Several fixes for edge cases

This commit is contained in:
Marian Steinbach 2018-12-17 23:54:09 +01:00
parent 3b8328d804
commit 16a05b751b
3 changed files with 81 additions and 5 deletions

View file

@ -36,8 +36,10 @@ class Checker(AbstractChecker):
feeds.
"""
head = self.previous_results['html_head'][url]
assert 'link_rss_atom' in head
assert isinstance(head['link_rss_atom'], list)
if 'link_rss_atom' not in head:
return
if not isinstance(head['link_rss_atom'], list):
return
for feed_url in head['link_rss_atom']:
if feed_url not in self.feeds:
@ -67,6 +69,9 @@ class Checker(AbstractChecker):
if 'bozo_exception' in data:
result['exception'] = data['bozo_exception']
if 'headers' not in data:
return result
if data['headers'].get('status') not in ('200', '301', '302'):
result['exception'] = 'Server responded with status %s' % data['headers'].get('status')
@ -76,7 +81,10 @@ class Checker(AbstractChecker):
result['num_entries'] = len(data['entries'])
result['latest_entry'] = self.find_latest_entry(data['entries'])
result['first_entry'] = self.find_first_entry(data['entries'])
if result['num_entries'] > 1 and result['first_entry'] < result['latest_entry']:
if (result['num_entries'] > 1 and
result['first_entry'] is not None and
result['latest_entry'] is not None and
result['first_entry'] < result['latest_entry']):
result['average_interval'] = round((result['latest_entry'] - result['first_entry']).total_seconds() / (result['num_entries'] - 1))
return result
@ -86,7 +94,10 @@ class Checker(AbstractChecker):
max_date = None
for entry in entries:
timestamp = mktime(entry.get('published_parsed'))
published_parsed = entry.get('published_parsed')
if published_parsed is None:
return
timestamp = mktime(published_parsed)
if max_date is None or timestamp > max_date:
max_date = timestamp
@ -98,7 +109,10 @@ class Checker(AbstractChecker):
min_date = None
for entry in entries:
timestamp = mktime(entry.get('published_parsed'))
published_parsed = entry.get('published_parsed')
if published_parsed is None:
return
timestamp = mktime(published_parsed)
if min_date is None or timestamp < min_date:
min_date = timestamp

View file

@ -118,5 +118,63 @@ class TestFeed(unittest.TestCase):
})
def test_feed_rss2_without_dates(self):
"""
Checks RSS 2.0
"""
feed = """<?xml version="1.0"?>
<rss version="2.0">
<channel>
<title>Liftoff News</title>
<link>http://liftoff.msfc.nasa.gov/</link>
<description>Liftoff to Space Exploration.</description>
<item>
<title>Star City</title>
<link>http://liftoff.msfc.nasa.gov/news/2003/news-starcity.asp</link>
<guid>http://liftoff.msfc.nasa.gov/2003/06/03.html#item573</guid>
</item>
<item>
<description>Sky watchers in Europe, Asia, and parts of Alaska and Canada will experience a &lt;a href="http://science.nasa.gov/headlines/y2003/30may_solareclipse.htm"&gt;partial eclipse of the Sun&lt;/a&gt; on Saturday, May 31st.</description>
<guid>http://liftoff.msfc.nasa.gov/2003/05/30.html#item572</guid>
</item>
</channel>
</rss>
"""
feed_url = 'http://example.com/feed.xml'
httpretty.register_uri(httpretty.GET, feed_url,
body=feed,
adding_headers={
"Content-type": "application/rss+xml",
})
# mocking a previous result from some page
results = {
'html_head': {
'http://example.com/': {
'link_rss_atom': ['http://example.com/feed.xml']
}
}
}
config = Config(urls=['http://example.com/'])
checker = load_feeds.Checker(config=config, previous_results=results)
result = checker.run()
print(result)
self.assertEqual(result, {
'http://example.com/feed.xml': {
'exception': None,
'title': 'Liftoff News',
'latest_entry': None,
'first_entry': None,
'average_interval': None,
'num_entries': 2,
}
})
if __name__ == '__main__':
unittest.main()

View file

@ -25,6 +25,10 @@ class Rater(AbstractRater):
continue
if 'sizes' not in self.check_results['load_in_browser'][url]:
continue
if self.check_results['load_in_browser'][url]['sizes'] == []:
continue
if self.check_results['load_in_browser'][url]['sizes'] is None:
continue
if (self.check_results['load_in_browser'][url]['min_document_width'] <=
self.check_results['load_in_browser'][url]['sizes'][0]['viewport_width']):