2017-03-18 12:01:23 -04:00
|
|
|
<?php
|
2017-11-16 20:23:18 -05:00
|
|
|
/** @license MIT
|
|
|
|
* Copyright 2017 J. King, Dustin Wilson et al.
|
|
|
|
* See LICENSE and AUTHORS files for details */
|
|
|
|
|
2017-03-28 19:19:12 -04:00
|
|
|
declare(strict_types=1);
|
2017-03-28 00:12:12 -04:00
|
|
|
namespace JKingWeb\Arsse;
|
2017-08-29 10:50:31 -04:00
|
|
|
|
2017-07-17 07:47:57 -04:00
|
|
|
use JKingWeb\Arsse\Misc\Date;
|
2017-03-18 12:01:23 -04:00
|
|
|
use PicoFeed\PicoFeedException;
|
2017-03-28 19:19:12 -04:00
|
|
|
use PicoFeed\Config\Config;
|
2017-10-02 11:53:52 -04:00
|
|
|
use PicoFeed\Client\Client;
|
2017-07-17 14:56:50 -04:00
|
|
|
use PicoFeed\Reader\Reader;
|
|
|
|
use PicoFeed\Reader\Favicon;
|
|
|
|
use PicoFeed\Scraper\Scraper;
|
2017-03-18 12:01:23 -04:00
|
|
|
|
2017-08-29 10:50:31 -04:00
|
|
|
class Feed {
|
2017-03-26 16:16:15 -04:00
|
|
|
public $data = null;
|
|
|
|
public $favicon;
|
2017-03-18 12:01:23 -04:00
|
|
|
public $resource;
|
2017-04-24 21:51:56 -04:00
|
|
|
public $modified = false;
|
2017-04-30 17:54:29 -04:00
|
|
|
public $lastModified;
|
|
|
|
public $nextFetch;
|
2017-04-22 23:40:57 -04:00
|
|
|
public $newItems = [];
|
|
|
|
public $changedItems = [];
|
2017-10-02 11:53:52 -04:00
|
|
|
|
|
|
|
public static function discover(string $url, string $username = '', string $password = ''): string {
|
|
|
|
// fetch the candidate feed
|
|
|
|
$f = self::download($url, "", "", $username, $password);
|
|
|
|
if ($f->reader->detectFormat($f->getContent())) {
|
|
|
|
// if the prospective URL is a feed, use it
|
|
|
|
$out = $url;
|
|
|
|
} else {
|
|
|
|
$links = $f->reader->find($f->getUrl(), $f->getContent());
|
|
|
|
if (!$links) {
|
2018-11-20 15:46:22 -05:00
|
|
|
// work around a PicoFeed memory leak
|
2017-10-02 11:53:52 -04:00
|
|
|
libxml_use_internal_errors(false);
|
|
|
|
throw new Feed\Exception($url, new \PicoFeed\Reader\SubscriptionNotFoundException('Unable to find a subscription'));
|
|
|
|
} else {
|
|
|
|
$out = $links[0];
|
|
|
|
}
|
|
|
|
}
|
2018-11-20 15:46:22 -05:00
|
|
|
// work around a PicoFeed memory leak
|
2017-10-02 11:53:52 -04:00
|
|
|
libxml_use_internal_errors(false);
|
|
|
|
return $out;
|
|
|
|
}
|
2018-10-26 14:58:04 -04:00
|
|
|
|
2017-10-02 11:53:52 -04:00
|
|
|
public function __construct(int $feedID = null, string $url, string $lastModified = '', string $etag = '', string $username = '', string $password = '', bool $scrape = false) {
|
2017-04-30 17:54:29 -04:00
|
|
|
// fetch the feed
|
2017-10-02 11:53:52 -04:00
|
|
|
$this->resource = self::download($url, $lastModified, $etag, $username, $password);
|
2017-04-30 17:54:29 -04:00
|
|
|
// format the HTTP Last-Modified date returned
|
|
|
|
$lastMod = $this->resource->getLastModified();
|
2017-08-29 10:50:31 -04:00
|
|
|
if (strlen($lastMod)) {
|
2017-07-17 07:47:57 -04:00
|
|
|
$this->lastModified = Date::normalize($lastMod, "http");
|
2017-04-30 17:54:29 -04:00
|
|
|
}
|
|
|
|
$this->modified = $this->resource->isModified();
|
|
|
|
//parse the feed, if it has been modified
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($this->modified) {
|
2017-04-30 17:54:29 -04:00
|
|
|
$this->parse();
|
|
|
|
// ascertain whether there are any articles not in the database
|
|
|
|
$this->matchToDatabase($feedID);
|
|
|
|
// if caching header fields are not sent by the server, try to ascertain a last-modified date from the feed contents
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!$this->lastModified) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$this->lastModified = $this->computeLastModified();
|
|
|
|
}
|
2017-04-30 17:54:29 -04:00
|
|
|
// we only really care if articles have been modified; if there are no new articles, act as if the feed is unchanged
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!sizeof($this->newItems) && !sizeof($this->changedItems)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$this->modified = false;
|
|
|
|
}
|
2017-07-17 14:56:50 -04:00
|
|
|
// if requested, scrape full content for any new and changed items
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($scrape) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$this->scrape();
|
|
|
|
}
|
2017-04-30 17:54:29 -04:00
|
|
|
}
|
|
|
|
// compute the time at which the feed should next be fetched
|
|
|
|
$this->nextFetch = $this->computeNextFetch();
|
|
|
|
}
|
|
|
|
|
2017-10-02 11:53:52 -04:00
|
|
|
protected static function configure(): Config {
|
2017-12-07 15:18:25 -05:00
|
|
|
$userAgent = Arsse::$conf->fetchUserAgentString ?? sprintf(
|
2019-01-23 09:21:35 -05:00
|
|
|
'Arsse/%s (%s %s; %s; https://thearsse.com/)',
|
2017-10-02 11:53:52 -04:00
|
|
|
Arsse::VERSION, // Arsse version
|
|
|
|
php_uname('s'), // OS
|
|
|
|
php_uname('r'), // OS version
|
|
|
|
php_uname('m') // platform architecture
|
|
|
|
);
|
|
|
|
$config = new Config;
|
|
|
|
$config->setMaxBodySize(Arsse::$conf->fetchSizeLimit);
|
|
|
|
$config->setClientTimeout(Arsse::$conf->fetchTimeout);
|
|
|
|
$config->setGrabberTimeout(Arsse::$conf->fetchTimeout);
|
|
|
|
$config->setClientUserAgent($userAgent);
|
|
|
|
$config->setGrabberUserAgent($userAgent);
|
|
|
|
return $config;
|
|
|
|
}
|
|
|
|
|
|
|
|
protected static function download(string $url, string $lastModified, string $etag, string $username, string $password): Client {
|
2017-03-18 12:01:23 -04:00
|
|
|
try {
|
2017-10-02 11:53:52 -04:00
|
|
|
$reader = new Reader(self::configure());
|
|
|
|
$client = $reader->download($url, $lastModified, $etag, $username, $password);
|
|
|
|
$client->reader = $reader;
|
|
|
|
return $client;
|
2017-03-18 12:01:23 -04:00
|
|
|
} catch (PicoFeedException $e) {
|
|
|
|
throw new Feed\Exception($url, $e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-18 16:38:23 -04:00
|
|
|
protected function parse(): bool {
|
2017-03-18 12:01:23 -04:00
|
|
|
try {
|
2017-10-02 11:53:52 -04:00
|
|
|
$feed = $this->resource->reader->getParser(
|
2017-04-01 15:42:10 -04:00
|
|
|
$this->resource->getUrl(),
|
|
|
|
$this->resource->getContent(),
|
|
|
|
$this->resource->getEncoding()
|
2017-10-02 11:53:52 -04:00
|
|
|
)->execute();
|
2017-04-02 22:23:15 -04:00
|
|
|
// Grab the favicon for the feed; returns an empty string if it cannot find one.
|
|
|
|
// Some feeds might use a different domain (eg: feedburner), so the site url is
|
|
|
|
// used instead of the feed's url.
|
2017-04-06 20:50:47 -04:00
|
|
|
$this->favicon = (new Favicon)->find($feed->siteUrl);
|
2018-11-20 15:46:22 -05:00
|
|
|
// work around a PicoFeed memory leak
|
2017-07-18 12:52:26 -04:00
|
|
|
libxml_use_internal_errors(false);
|
2017-03-18 12:01:23 -04:00
|
|
|
} catch (PicoFeedException $e) {
|
2018-11-20 15:46:22 -05:00
|
|
|
// work around a PicoFeed memory leak
|
2017-07-18 12:52:26 -04:00
|
|
|
libxml_use_internal_errors(false);
|
2017-05-21 21:49:54 -04:00
|
|
|
throw new Feed\Exception($this->resource->getUrl(), $e);
|
2017-03-18 12:01:23 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// PicoFeed does not provide valid ids when there is no id element. Its solution
|
|
|
|
// of hashing the url, title, and content together for the id if there is no id
|
|
|
|
// element is stupid. Many feeds are frankenstein mixtures of Atom and RSS, but
|
|
|
|
// some are pure RSS with guid elements while others use the Dublin Core spec for
|
|
|
|
// identification. These feeds shouldn't be duplicated when updated. That should
|
|
|
|
// only be reserved for severely broken feeds.
|
|
|
|
|
2017-04-22 23:40:57 -04:00
|
|
|
foreach ($feed->items as $f) {
|
2017-03-26 16:16:15 -04:00
|
|
|
// Hashes used for comparison to check for updates and also to identify when an
|
|
|
|
// id doesn't exist.
|
2017-05-23 20:39:29 -04:00
|
|
|
$content = $f->content.$f->enclosureUrl.$f->enclosureType;
|
|
|
|
// if the item link URL and item title are both equal to the feed link URL, then the item has neither a link URL nor a title
|
2019-01-11 10:38:06 -05:00
|
|
|
if ($f->url === $feed->siteUrl && $f->title === $feed->siteUrl) {
|
2017-05-23 20:39:29 -04:00
|
|
|
$f->urlTitleHash = "";
|
|
|
|
} else {
|
|
|
|
$f->urlTitleHash = hash('sha256', $f->url.$f->title);
|
|
|
|
}
|
|
|
|
// if the item link URL is equal to the feed link URL, it has no link URL; if there is additionally no content, these should not be hashed
|
2019-01-11 10:38:06 -05:00
|
|
|
if (!strlen($content) && $f->url === $feed->siteUrl) {
|
2017-08-29 10:50:31 -04:00
|
|
|
$f->urlContentHash = "";
|
2017-05-23 20:39:29 -04:00
|
|
|
} else {
|
|
|
|
$f->urlContentHash = hash('sha256', $f->url.$content);
|
|
|
|
}
|
|
|
|
// if the item's title is the same as its link URL, it has no title; if there is additionally no content, these should not be hashed
|
2019-01-11 10:38:06 -05:00
|
|
|
if (!strlen($content) && $f->title === $f->url) {
|
2017-05-23 20:39:29 -04:00
|
|
|
$f->titleContentHash = "";
|
|
|
|
} else {
|
|
|
|
$f->titleContentHash = hash('sha256', $f->title.$content);
|
|
|
|
}
|
2017-05-30 20:18:04 -04:00
|
|
|
$f->id = null;
|
2017-06-03 13:43:58 -04:00
|
|
|
// prefer an Atom ID as the item's ID
|
|
|
|
$id = (string) $f->xml->children('http://www.w3.org/2005/Atom')->id;
|
|
|
|
// otherwise use the RSS2 guid element
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!strlen($id)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$id = (string) $f->xml->guid;
|
|
|
|
}
|
2017-06-03 13:43:58 -04:00
|
|
|
// otherwise use the Dublin Core identifier element
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!strlen($id)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$id = (string) $f->xml->children('http://purl.org/dc/elements/1.1/')->identifier;
|
|
|
|
}
|
2017-06-03 13:43:58 -04:00
|
|
|
// otherwise there is no ID; if there is one, hash it
|
2017-08-29 10:50:31 -04:00
|
|
|
if (strlen($id)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$f->id = hash('sha256', $id);
|
|
|
|
}
|
2017-06-03 11:16:26 -04:00
|
|
|
|
|
|
|
// PicoFeed also doesn't gather up categories, so we do this as well
|
|
|
|
$f->categories = [];
|
|
|
|
// first add Atom categories
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($f->xml->children('http://www.w3.org/2005/Atom')->category as $c) {
|
2017-06-03 11:16:26 -04:00
|
|
|
// if the category has a label, use that
|
|
|
|
$name = (string) $c->attributes()->label;
|
|
|
|
// otherwise use the term
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!strlen($name)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$name = (string) $c->attributes()->term;
|
|
|
|
}
|
2017-06-03 11:16:26 -04:00
|
|
|
// ... assuming it has that much
|
2017-08-29 10:50:31 -04:00
|
|
|
if (strlen($name)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$f->categories[] = $name;
|
|
|
|
}
|
2017-06-03 11:16:26 -04:00
|
|
|
}
|
|
|
|
// next add RSS2 categories
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($f->xml->children()->category as $c) {
|
2017-06-03 11:16:26 -04:00
|
|
|
$name = (string) $c;
|
2017-08-29 10:50:31 -04:00
|
|
|
if (strlen($name)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$f->categories[] = $name;
|
|
|
|
}
|
2017-06-03 11:16:26 -04:00
|
|
|
}
|
|
|
|
// and finally try Dublin Core subjects
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($f->xml->children('http://purl.org/dc/elements/1.1/')->subject as $c) {
|
2017-06-03 11:16:26 -04:00
|
|
|
$name = (string) $c;
|
2017-08-29 10:50:31 -04:00
|
|
|
if (strlen($name)) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$f->categories[] = $name;
|
|
|
|
}
|
2017-06-03 11:16:26 -04:00
|
|
|
}
|
|
|
|
//sort the results
|
|
|
|
sort($f->categories);
|
2017-03-18 12:01:23 -04:00
|
|
|
}
|
2017-04-23 13:12:33 -04:00
|
|
|
$this->data = $feed;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
protected function deduplicateItems(array $items): array {
|
|
|
|
/* Rationale:
|
2017-08-29 10:50:31 -04:00
|
|
|
Some newsfeeds (notably Planet) include multiple versions of an
|
2017-04-23 13:12:33 -04:00
|
|
|
item if it is updated. As we only care about the latest, we
|
2017-08-29 10:50:31 -04:00
|
|
|
try to remove any "old" versions of an item that might also be
|
2017-04-23 13:12:33 -04:00
|
|
|
present within the feed.
|
|
|
|
*/
|
|
|
|
$out = [];
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($items as $item) {
|
|
|
|
foreach ($out as $index => $check) {
|
2017-05-23 22:15:57 -04:00
|
|
|
// if the two items both have IDs and they differ, they do not match, regardless of hashes
|
2019-01-11 10:38:06 -05:00
|
|
|
if ($item->id && $check->id && $item->id !== $check->id) {
|
2017-07-20 22:40:09 -04:00
|
|
|
continue;
|
|
|
|
}
|
2017-04-23 13:12:33 -04:00
|
|
|
// if the two items have the same ID or any one hash matches, they are two versions of the same item
|
2017-08-29 10:50:31 -04:00
|
|
|
if (
|
2019-01-11 10:38:06 -05:00
|
|
|
($item->id && $check->id && $item->id === $check->id) ||
|
|
|
|
($item->urlTitleHash && $item->urlTitleHash === $check->urlTitleHash) ||
|
|
|
|
($item->urlContentHash && $item->urlContentHash === $check->urlContentHash) ||
|
|
|
|
($item->titleContentHash && $item->titleContentHash === $check->titleContentHash)
|
2017-04-23 13:12:33 -04:00
|
|
|
) {
|
2017-08-29 10:50:31 -04:00
|
|
|
if (// because newsfeeds are usually order newest-first, the later item should only be used if...
|
2017-04-23 13:12:33 -04:00
|
|
|
// the later item has an update date and the existing item does not
|
|
|
|
($item->updatedDate && !$check->updatedDate) ||
|
|
|
|
// the later item has an update date newer than the existing item's
|
|
|
|
($item->updatedDate && $check->updatedDate && $item->updatedDate->getTimestamp() > $check->updatedDate->getTimestamp()) ||
|
|
|
|
// neither item has update dates, both have publish dates, and the later item has a newer publish date
|
|
|
|
(!$item->updatedDate && !$check->updatedDate && $item->publishedDate && $check->publishedDate && $item->publishedDate->getTimestamp() > $check->publishedDate->getTimestamp())
|
|
|
|
) {
|
|
|
|
// if the later item should be used, replace the existing one
|
|
|
|
$out[$index] = $item;
|
|
|
|
continue 2;
|
|
|
|
} else {
|
|
|
|
// otherwise skip the item
|
|
|
|
continue 2;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// if there was no match, add the item
|
|
|
|
$out[] = $item;
|
|
|
|
}
|
|
|
|
return $out;
|
|
|
|
}
|
|
|
|
|
2017-07-18 16:38:23 -04:00
|
|
|
protected function matchToDatabase(int $feedID = null): bool {
|
2017-04-23 13:12:33 -04:00
|
|
|
// first perform deduplication on items
|
|
|
|
$items = $this->deduplicateItems($this->data->items);
|
2017-05-22 13:01:38 -04:00
|
|
|
// if we haven't been given a database feed ID to check against, all items are new
|
2017-08-29 10:50:31 -04:00
|
|
|
if (is_null($feedID)) {
|
2017-05-22 13:01:38 -04:00
|
|
|
$this->newItems = $items;
|
|
|
|
return true;
|
|
|
|
}
|
2017-04-23 13:12:33 -04:00
|
|
|
// get as many of the latest articles in the database as there are in the feed
|
2017-07-17 07:47:57 -04:00
|
|
|
$articles = Arsse::$db->feedMatchLatest($feedID, sizeof($items))->getAll();
|
2017-05-30 20:18:04 -04:00
|
|
|
// perform a first pass matching the latest articles against items in the feed
|
|
|
|
list($this->newItems, $this->changedItems) = $this->matchItems($items, $articles);
|
2017-08-29 10:50:31 -04:00
|
|
|
if (sizeof($this->newItems) && sizeof($items) <= sizeof($articles)) {
|
2017-05-30 20:18:04 -04:00
|
|
|
// if we need to, perform a second pass on the database looking specifically for IDs and hashes of the new items
|
|
|
|
$ids = $hashesUT = $hashesUC = $hashesTC = [];
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($this->newItems as $i) {
|
|
|
|
if ($i->id) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$ids[] = $i->id;
|
|
|
|
}
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($i->urlTitleHash) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$hashesUT[] = $i->urlTitleHash;
|
|
|
|
}
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($i->urlContentHash) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$hashesUC[] = $i->urlContentHash;
|
|
|
|
}
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($i->titleContentHash) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$hashesTC[] = $i->titleContentHash;
|
|
|
|
}
|
2017-05-30 20:18:04 -04:00
|
|
|
}
|
2017-07-17 07:47:57 -04:00
|
|
|
$articles = Arsse::$db->feedMatchIds($feedID, $ids, $hashesUT, $hashesUC, $hashesTC)->getAll();
|
2017-05-30 20:18:04 -04:00
|
|
|
list($this->newItems, $changed) = $this->matchItems($this->newItems, $articles);
|
2017-07-15 13:33:17 -04:00
|
|
|
// merge the two change-lists, preserving keys
|
|
|
|
$this->changedItems = array_combine(array_merge(array_keys($this->changedItems), array_keys($changed)), array_merge($this->changedItems, $changed));
|
2017-05-30 20:18:04 -04:00
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2017-07-18 16:38:23 -04:00
|
|
|
protected function matchItems(array $items, array $articles): array {
|
2017-05-30 20:18:04 -04:00
|
|
|
$new = $edited = [];
|
2017-04-23 13:12:33 -04:00
|
|
|
// iterate through the articles and for each determine whether it is existing, edited, or entirely new
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($items as $i) {
|
2017-05-04 14:42:40 -04:00
|
|
|
$found = false;
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($articles as $a) {
|
2017-05-23 22:15:57 -04:00
|
|
|
// if the item has an ID and it doesn't match the article ID, the two don't match, regardless of hashes
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($i->id && $i->id !== $a['guid']) {
|
2017-07-20 22:40:09 -04:00
|
|
|
continue;
|
|
|
|
}
|
2017-08-29 10:50:31 -04:00
|
|
|
if (
|
2017-04-23 13:12:33 -04:00
|
|
|
// the item matches if the GUID matches...
|
|
|
|
($i->id && $i->id === $a['guid']) ||
|
|
|
|
// ... or if any one of the hashes match
|
2017-05-23 20:39:29 -04:00
|
|
|
($i->urlTitleHash && $i->urlTitleHash === $a['url_title_hash']) ||
|
|
|
|
($i->urlContentHash && $i->urlContentHash === $a['url_content_hash']) ||
|
|
|
|
($i->titleContentHash && $i->titleContentHash === $a['title_content_hash'])
|
2017-04-23 13:12:33 -04:00
|
|
|
) {
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($i->updatedDate && Date::transform($i->updatedDate, "sql") !== $a['edited']) {
|
2017-04-23 13:12:33 -04:00
|
|
|
// if the item has an edit timestamp and it doesn't match that of the article in the database, the the article has been edited
|
|
|
|
// we store the item index and database record ID as a key/value pair
|
2017-05-04 14:42:40 -04:00
|
|
|
$found = true;
|
2017-05-30 20:18:04 -04:00
|
|
|
$edited[$a['id']] = $i;
|
2017-04-23 13:12:33 -04:00
|
|
|
break;
|
2017-08-29 10:50:31 -04:00
|
|
|
} elseif ($i->urlTitleHash !== $a['url_title_hash'] || $i->urlContentHash !== $a['url_content_hash'] || $i->titleContentHash !== $a['title_content_hash']) {
|
2017-04-23 13:12:33 -04:00
|
|
|
// if any of the hashes do not match, then the article has been edited
|
2017-05-04 14:42:40 -04:00
|
|
|
$found = true;
|
2017-05-30 20:18:04 -04:00
|
|
|
$edited[$a['id']] = $i;
|
2017-04-23 13:12:33 -04:00
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
// otherwise the item is unchanged and we can ignore it
|
2017-05-04 14:42:40 -04:00
|
|
|
$found = true;
|
2017-04-23 13:12:33 -04:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!$found) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$new[] = $i;
|
|
|
|
}
|
2017-04-22 23:40:57 -04:00
|
|
|
}
|
2017-05-30 20:18:04 -04:00
|
|
|
return [$new, $edited];
|
2017-03-18 12:01:23 -04:00
|
|
|
}
|
2017-04-24 21:51:56 -04:00
|
|
|
|
2018-01-02 16:27:58 -05:00
|
|
|
protected function computeNextFetch(): \DateTimeImmutable {
|
2017-07-17 07:47:57 -04:00
|
|
|
$now = Date::normalize(time());
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!$this->modified) {
|
2017-04-27 09:47:40 -04:00
|
|
|
$diff = $now->getTimestamp() - $this->lastModified->getTimestamp();
|
|
|
|
$offset = $this->normalizeDateDiff($diff);
|
2018-01-02 16:27:58 -05:00
|
|
|
return $now->modify("+".$offset);
|
2017-04-27 09:47:40 -04:00
|
|
|
} else {
|
2017-05-27 23:14:43 -04:00
|
|
|
// the algorithm for updated feeds (returning 200 rather than 304) uses the same parameters as for 304,
|
|
|
|
// save that the last three intervals between item dates are computed, and if any two fall within
|
|
|
|
// the same interval range, that interval is used (e.g. if the intervals are 23m, 12m, and 4h, the used
|
|
|
|
// interval is "less than 30m"). If there is no commonality, the feed is checked in 1 hour.
|
2017-04-27 09:47:40 -04:00
|
|
|
$offsets = [];
|
2017-04-30 17:54:29 -04:00
|
|
|
$dates = $this->gatherDates();
|
2017-08-29 10:50:31 -04:00
|
|
|
if (sizeof($dates) > 3) {
|
|
|
|
for ($a = 0; $a < 3; $a++) {
|
2017-05-27 23:14:43 -04:00
|
|
|
$diff = $dates[$a] - $dates[$a+1];
|
2017-04-27 09:47:40 -04:00
|
|
|
$offsets[] = $this->normalizeDateDiff($diff);
|
|
|
|
}
|
2019-01-11 10:38:06 -05:00
|
|
|
if ($offsets[0] === $offsets[1] || $offsets[0] === $offsets[2]) {
|
2018-01-02 16:27:58 -05:00
|
|
|
return $now->modify("+".$offsets[0]);
|
2019-01-11 10:38:06 -05:00
|
|
|
} elseif ($offsets[1] === $offsets[2]) {
|
2018-01-02 16:27:58 -05:00
|
|
|
return $now->modify("+".$offsets[1]);
|
2017-04-27 09:47:40 -04:00
|
|
|
} else {
|
2018-01-02 16:27:58 -05:00
|
|
|
return $now->modify("+ 1 hour");
|
2017-04-27 09:47:40 -04:00
|
|
|
}
|
2017-04-24 21:51:56 -04:00
|
|
|
} else {
|
2018-01-02 16:27:58 -05:00
|
|
|
return $now->modify("+ 1 hour");
|
2017-04-24 21:51:56 -04:00
|
|
|
}
|
2017-04-27 09:47:40 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-02 16:27:58 -05:00
|
|
|
public static function nextFetchOnError($errCount): \DateTimeImmutable {
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($errCount < 3) {
|
2017-04-30 17:54:29 -04:00
|
|
|
$offset = "5 minutes";
|
2017-08-29 10:50:31 -04:00
|
|
|
} elseif ($errCount < 15) {
|
2017-04-30 17:54:29 -04:00
|
|
|
$offset = "3 hours";
|
|
|
|
} else {
|
|
|
|
$offset = "1 day";
|
|
|
|
}
|
2017-07-17 07:47:57 -04:00
|
|
|
return Date::normalize("now + ".$offset);
|
2017-04-30 17:54:29 -04:00
|
|
|
}
|
|
|
|
|
2017-04-27 09:47:40 -04:00
|
|
|
protected function normalizeDateDiff(int $diff): string {
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($diff < (30 * 60)) { // less than 30 minutes
|
2017-04-27 09:47:40 -04:00
|
|
|
$offset = "15 minutes";
|
2017-08-29 10:50:31 -04:00
|
|
|
} elseif ($diff < (60 * 60)) { // less than an hour
|
2017-04-27 09:47:40 -04:00
|
|
|
$offset = "30 minutes";
|
2017-08-29 10:50:31 -04:00
|
|
|
} elseif ($diff < (3 * 60 * 60)) { // less than three hours
|
2017-04-27 09:47:40 -04:00
|
|
|
$offset = "1 hour";
|
2017-08-29 10:50:31 -04:00
|
|
|
} elseif ($diff >= (36 * 60 * 60)) { // more than 36 hours
|
2017-04-27 09:47:40 -04:00
|
|
|
$offset = "1 day";
|
2017-04-24 21:51:56 -04:00
|
|
|
} else {
|
2017-04-27 09:47:40 -04:00
|
|
|
$offset = "3 hours";
|
2017-04-24 21:51:56 -04:00
|
|
|
}
|
2017-04-27 09:47:40 -04:00
|
|
|
return $offset;
|
2017-04-24 21:51:56 -04:00
|
|
|
}
|
2017-04-30 17:54:29 -04:00
|
|
|
|
2017-07-18 16:38:23 -04:00
|
|
|
protected function computeLastModified() {
|
2017-08-29 10:50:31 -04:00
|
|
|
if (!$this->modified) {
|
2017-09-30 11:43:43 -04:00
|
|
|
return $this->lastModified; // @codeCoverageIgnore
|
2017-07-20 22:40:09 -04:00
|
|
|
}
|
2017-07-15 13:33:17 -04:00
|
|
|
$dates = $this->gatherDates();
|
2017-08-29 10:50:31 -04:00
|
|
|
if (sizeof($dates)) {
|
2017-07-17 07:47:57 -04:00
|
|
|
return Date::normalize($dates[0]);
|
2017-04-30 17:54:29 -04:00
|
|
|
} else {
|
2017-09-30 11:43:43 -04:00
|
|
|
return null; // @codeCoverageIgnore
|
2017-04-30 17:54:29 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
protected function gatherDates(): array {
|
|
|
|
$dates = [];
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach ($this->data->items as $item) {
|
|
|
|
if ($item->updatedDate) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$dates[] = $item->updatedDate->getTimestamp();
|
|
|
|
}
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($item->publishedDate) {
|
2017-07-20 22:40:09 -04:00
|
|
|
$dates[] = $item->publishedDate->getTimestamp();
|
|
|
|
}
|
2017-04-30 17:54:29 -04:00
|
|
|
}
|
|
|
|
$dates = array_unique($dates, \SORT_NUMERIC);
|
|
|
|
rsort($dates);
|
|
|
|
return $dates;
|
|
|
|
}
|
2017-07-17 14:56:50 -04:00
|
|
|
|
|
|
|
protected function scrape(): bool {
|
2017-10-02 11:53:52 -04:00
|
|
|
$scraper = new Scraper(self::configure());
|
2017-08-29 10:50:31 -04:00
|
|
|
foreach (array_merge($this->newItems, $this->changedItems) as $item) {
|
2017-07-17 14:56:50 -04:00
|
|
|
$scraper->setUrl($item->url);
|
|
|
|
$scraper->execute();
|
2017-08-29 10:50:31 -04:00
|
|
|
if ($scraper->hasRelevantContent()) {
|
2017-07-17 14:56:50 -04:00
|
|
|
$item->content = $scraper->getFilteredContent();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2017-08-29 10:50:31 -04:00
|
|
|
}
|