2018-11-26 09:35:52 +01:00
|
|
|
import collections
|
2018-10-31 23:23:05 +01:00
|
|
|
from datetime import datetime
|
|
|
|
from os import getenv
|
2018-11-26 21:10:45 +01:00
|
|
|
import sys
|
2018-10-31 23:23:05 +01:00
|
|
|
from wsgiref import simple_server
|
|
|
|
|
|
|
|
import falcon
|
|
|
|
from falcon import media
|
|
|
|
import jsonhandler
|
|
|
|
|
|
|
|
from google.cloud import datastore
|
|
|
|
|
|
|
|
credentials_path = getenv('GCLOUD_DATASTORE_CREDENTIALS_PATH')
|
|
|
|
datastore_client = datastore.Client.from_service_account_json(credentials_path)
|
|
|
|
|
2018-11-02 09:22:26 +01:00
|
|
|
spider_results_kind = 'spider-results'
|
2018-11-12 22:16:09 +01:00
|
|
|
webscreenshots_kind = 'webscreenshot'
|
2018-10-31 23:23:05 +01:00
|
|
|
|
|
|
|
|
2018-11-26 09:35:52 +01:00
|
|
|
def convert_datastore_datetime(field):
|
|
|
|
"""
|
|
|
|
return datetime in different ways, depending on whether the lib returns
|
|
|
|
a str, int, or datetime.datetime
|
|
|
|
"""
|
|
|
|
dt = ''
|
|
|
|
if type(field) == datetime:
|
|
|
|
dt = field
|
|
|
|
elif type(field) == int:
|
|
|
|
dt = datetime.utcfromtimestamp(field / 1000000)
|
|
|
|
elif type(field) == str:
|
|
|
|
dt = datetime.utcfromtimestamp(int(field) / 1000000)
|
|
|
|
return dt
|
|
|
|
|
|
|
|
|
|
|
|
def flatten(d, parent_key='', sep='.'):
|
|
|
|
items = []
|
|
|
|
for k, v in d.items():
|
|
|
|
new_key = parent_key + sep + k if parent_key else k
|
|
|
|
if isinstance(v, collections.MutableMapping):
|
|
|
|
items.extend(flatten(v, new_key, sep=sep).items())
|
|
|
|
else:
|
|
|
|
items.append((new_key, v))
|
|
|
|
return dict(items)
|
|
|
|
|
|
|
|
|
2018-10-31 23:23:05 +01:00
|
|
|
def get_compact_results(client):
|
2018-11-02 09:22:26 +01:00
|
|
|
query = client.query(kind=spider_results_kind,
|
2018-11-26 09:35:52 +01:00
|
|
|
order=['-created'],
|
|
|
|
#projection=['created', 'meta', 'score'],
|
|
|
|
)
|
2018-10-31 23:23:05 +01:00
|
|
|
|
|
|
|
out = []
|
|
|
|
for entity in query.fetch(eventual=True):
|
2018-11-26 09:35:52 +01:00
|
|
|
created = convert_datastore_datetime(entity.get('created'))
|
2018-10-31 23:23:05 +01:00
|
|
|
|
|
|
|
out.append({
|
|
|
|
'input_url': entity.key.name,
|
2018-11-26 09:35:52 +01:00
|
|
|
'created': created.isoformat(),
|
2018-10-31 23:23:05 +01:00
|
|
|
'meta': entity.get('meta'),
|
|
|
|
'score': entity.get('score'),
|
|
|
|
})
|
|
|
|
return out
|
2018-11-26 09:35:52 +01:00
|
|
|
|
|
|
|
|
2018-11-26 21:10:45 +01:00
|
|
|
def simplify_rating(d):
|
|
|
|
"""
|
|
|
|
Removes some keys from a flattened rating dict
|
|
|
|
"""
|
|
|
|
keys_to_delete = []
|
|
|
|
for key in d.keys():
|
|
|
|
if key.endswith(".type") or key.endswith(".max_score"):
|
|
|
|
keys_to_delete.append(key)
|
|
|
|
|
|
|
|
for key in keys_to_delete:
|
|
|
|
del d[key]
|
|
|
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
|
|
|
def tablelize_checks(d):
|
|
|
|
"""
|
|
|
|
Returns a dict with the check details we want to be contained
|
|
|
|
in a table export.
|
|
|
|
"""
|
|
|
|
out = {}
|
|
|
|
|
|
|
|
# CMS names separated by space
|
|
|
|
out['generator'] = " ".join(list(set([i for i in d['generator'].values() if i is not None])))
|
|
|
|
|
|
|
|
# List of actual URLs crawled
|
|
|
|
out['resulting_urls'] = ""
|
|
|
|
if 'url_canonicalization' in d:
|
|
|
|
out['resulting_urls'] = " ".join([i for i in d['url_canonicalization'] if i is not None])
|
|
|
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
def get_table_result(client):
|
2018-11-26 09:35:52 +01:00
|
|
|
query = client.query(kind=spider_results_kind)
|
|
|
|
|
|
|
|
out = []
|
|
|
|
for entity in query.fetch(eventual=True):
|
|
|
|
created = convert_datastore_datetime(entity.get('created'))
|
|
|
|
|
|
|
|
record = {
|
|
|
|
'input_url': entity.key.name,
|
|
|
|
'created': created.isoformat(),
|
|
|
|
'score': entity.get('score'),
|
|
|
|
}
|
2018-11-26 21:10:45 +01:00
|
|
|
|
2018-11-26 09:35:52 +01:00
|
|
|
record.update(flatten(entity.get('meta'), parent_key='meta'))
|
2018-11-26 21:10:45 +01:00
|
|
|
record.update(simplify_rating(flatten(entity.get('rating'), parent_key='rating')))
|
|
|
|
record.update(tablelize_checks(entity.get('checks')))
|
|
|
|
|
2018-11-26 09:35:52 +01:00
|
|
|
out.append(record)
|
|
|
|
return out
|
|
|
|
|
2018-10-31 23:23:05 +01:00
|
|
|
|
|
|
|
class LastUpdated(object):
|
|
|
|
|
|
|
|
def on_get(self, req, resp):
|
|
|
|
"""
|
|
|
|
Informs about the most recent update to the spider results data
|
|
|
|
"""
|
2018-11-02 09:22:26 +01:00
|
|
|
query = datastore_client.query(kind=spider_results_kind,
|
2018-10-31 23:23:05 +01:00
|
|
|
order=['-created'],
|
|
|
|
projection=['created'])
|
|
|
|
items = list(query.fetch(limit=1, eventual=True))
|
|
|
|
ts = int(items[0].get('created')) / 1000000
|
|
|
|
dt = datetime.utcfromtimestamp(ts).isoformat()
|
|
|
|
|
|
|
|
maxage = 60 * 60 # one hour in seconds
|
|
|
|
resp.cache_control = ["max_age=%d" % maxage]
|
|
|
|
resp.media = {
|
|
|
|
"last_updated": dt
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class CompactResults(object):
|
|
|
|
|
|
|
|
def on_get(self, req, resp):
|
|
|
|
"""
|
|
|
|
Returns compact sites overview and score
|
|
|
|
"""
|
|
|
|
out = get_compact_results(datastore_client)
|
|
|
|
|
|
|
|
maxage = 6 * 60 * 60 # six hours in seconds
|
|
|
|
resp.cache_control = ["max_age=%d" % maxage]
|
|
|
|
resp.media = out
|
|
|
|
|
|
|
|
|
2018-11-26 21:10:45 +01:00
|
|
|
class TableResults(object):
|
2018-11-26 09:35:52 +01:00
|
|
|
|
|
|
|
def on_get(self, req, resp):
|
|
|
|
"""
|
|
|
|
Returns big sites results
|
|
|
|
"""
|
2018-11-26 21:10:45 +01:00
|
|
|
out = get_table_result(datastore_client)
|
2018-11-26 09:35:52 +01:00
|
|
|
|
|
|
|
maxage = 48 * 60 * 60 # two days
|
|
|
|
resp.cache_control = ["max_age=%d" % maxage]
|
|
|
|
resp.media = out
|
|
|
|
|
|
|
|
|
2018-11-02 09:22:26 +01:00
|
|
|
class SiteDetails(object):
|
|
|
|
|
|
|
|
def on_get(self, req, resp):
|
|
|
|
"""
|
|
|
|
Returns details for one URL
|
|
|
|
"""
|
|
|
|
|
|
|
|
url = req.get_param('url')
|
|
|
|
if url is None or url == '':
|
|
|
|
raise falcon.HTTPError(falcon.HTTP_400,
|
|
|
|
'Bad request',
|
|
|
|
'The parameter url must not be empty')
|
|
|
|
|
|
|
|
key = datastore_client.key(spider_results_kind, req.get_param('url'))
|
|
|
|
entity = datastore_client.get(key)
|
|
|
|
if entity is None:
|
|
|
|
raise falcon.HTTPError(falcon.HTTP_404,
|
|
|
|
'Not found',
|
|
|
|
'A site with this URL does not exist')
|
|
|
|
|
|
|
|
maxage = 24 * 60 * 60 # 24 hours in seconds
|
|
|
|
resp.cache_control = ["max_age=%d" % maxage]
|
|
|
|
resp.media = dict(entity)
|
|
|
|
|
|
|
|
|
2018-11-12 22:16:09 +01:00
|
|
|
class SiteScreenshots(object):
|
|
|
|
|
|
|
|
def on_get(self, req, resp):
|
|
|
|
"""
|
|
|
|
Returns screenshots for one URL
|
|
|
|
"""
|
|
|
|
|
|
|
|
url = req.get_param('url')
|
|
|
|
if url is None or url == '':
|
|
|
|
raise falcon.HTTPError(falcon.HTTP_400,
|
|
|
|
'Bad request',
|
|
|
|
'The parameter url must not be empty')
|
|
|
|
|
|
|
|
query = datastore_client.query(kind=webscreenshots_kind)
|
|
|
|
query.add_filter('url', '=', req.get_param('url'))
|
|
|
|
entities = list(query.fetch())
|
|
|
|
|
|
|
|
maxage = 24 * 60 * 60 # 24 hours in seconds
|
|
|
|
if len(entities) == 0:
|
|
|
|
maxage = 3 * 60 * 60 # 3 hours in seconds
|
|
|
|
|
|
|
|
resp.cache_control = ["max_age=%d" % maxage]
|
|
|
|
resp.media = entities
|
|
|
|
|
|
|
|
|
2018-11-26 09:35:52 +01:00
|
|
|
class Index(object):
|
|
|
|
def on_get(self, req, resp):
|
|
|
|
resp.media = {
|
|
|
|
"message": "This is green-spider-api",
|
|
|
|
"url": "https://github.com/netzbegruenung/green-spider-api",
|
|
|
|
"endpoints": [
|
|
|
|
"/api/v1/spider-results/last-updated/",
|
2019-04-12 09:13:54 +02:00
|
|
|
"/api/v1/spider-results/table/",
|
2018-11-26 09:35:52 +01:00
|
|
|
"/api/v1/spider-results/compact/",
|
|
|
|
"/api/v1/spider-results/site",
|
|
|
|
"/api/v1/screenshots/site",
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
2018-10-31 23:23:05 +01:00
|
|
|
handlers = media.Handlers({
|
|
|
|
'application/json': jsonhandler.JSONHandler(),
|
|
|
|
})
|
|
|
|
|
|
|
|
app = falcon.API()
|
|
|
|
|
|
|
|
app.req_options.media_handlers = handlers
|
|
|
|
app.resp_options.media_handlers = handlers
|
|
|
|
|
|
|
|
app.add_route('/api/v1/spider-results/last-updated/', LastUpdated())
|
|
|
|
app.add_route('/api/v1/spider-results/compact/', CompactResults())
|
2018-11-26 21:10:45 +01:00
|
|
|
app.add_route('/api/v1/spider-results/table/', TableResults())
|
2018-11-02 09:22:26 +01:00
|
|
|
app.add_route('/api/v1/spider-results/site', SiteDetails())
|
2018-11-12 22:16:09 +01:00
|
|
|
app.add_route('/api/v1/screenshots/site', SiteScreenshots())
|
2018-11-26 09:35:52 +01:00
|
|
|
app.add_route('/', Index())
|
2018-11-12 22:16:09 +01:00
|
|
|
|
2018-10-31 23:23:05 +01:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2018-11-26 21:10:45 +01:00
|
|
|
|
2018-10-31 23:23:05 +01:00
|
|
|
httpd = simple_server.make_server('127.0.0.1', 5000, app)
|
|
|
|
httpd.serve_forever()
|