Mercurial > code > home > repos > gcalendarwatch
changeset 16:a87969972d85
lib updates, auth update and cleanup, reformat
author | drewp@bigasterisk.com |
---|---|
date | Sun, 13 Sep 2020 23:31:25 -0700 |
parents | 72ebd60d640e |
children | bfd55a2fd358 |
files | .style.yapf Dockerfile calendar_connection.py gcalendarwatch.py login.py requirements.txt run_login.sh |
diffstat | 7 files changed, 197 insertions(+), 97 deletions(-) [+] |
line wrap: on
line diff
--- a/.style.yapf Sun Sep 13 14:35:37 2020 -0700 +++ b/.style.yapf Sun Sep 13 23:31:25 2020 -0700 @@ -1,4 +1,4 @@ # overwritten by /home/drewp/bin/setup_home_venv [style] based_on_style = google -column_limit = 130 +column_limit = 160
--- a/Dockerfile Sun Sep 13 14:35:37 2020 -0700 +++ b/Dockerfile Sun Sep 13 23:31:25 2020 -0700 @@ -18,8 +18,9 @@ # not sure why this doesn't work from inside requirements.txt #RUN pip3 install --index-url https://projects.bigasterisk.com/ --extra-index-url https://pypi.org/simple -U 'https://github.com/drewp/cyclone/archive/python3.zip?v2' -COPY gcalendarwatch.py *.credentials *.conf *.html *.json *.dat ./ +COPY *.py *.conf *.html *.json ./ +COPY credentials credentials EXPOSE 9105 -CMD [ "python3", "gcalendarwatch.py" ] +CMD [ "python3", "gcalendarwatch.py", "-v" ]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/calendar_connection.py Sun Sep 13 23:31:25 2020 -0700 @@ -0,0 +1,32 @@ +import os +import pickle + +from google.auth.transport.requests import Request +from google_auth_oauthlib.flow import InstalledAppFlow +from googleapiclient.discovery import build + +SCOPES = ['https://www.googleapis.com/auth/calendar.readonly'] + + +def getCalendarService(scope='https://www.googleapis.com/auth/calendar.readonly'): + """ + """ + creds = None + # The file token.pickle stores the user's access and refresh tokens, and is + # created automatically when the authorization flow completes for the first + # time. + if os.path.exists('credentials/token.pickle'): + with open('credentials/token.pickle', 'rb') as token: + creds = pickle.load(token) + # If there are no (valid) credentials available, let the user log in. + if not creds or not creds.valid: + if creds and creds.expired and creds.refresh_token: + creds.refresh(Request()) + else: + flow = InstalledAppFlow.from_client_secrets_file('credentials/cred.json', [scope]) + creds = flow.run_local_server() + # Save the credentials for the next run + with open('credentials/token.pickle', 'wb') as token: + pickle.dump(creds, token) + + return build('calendar', 'v3', credentials=creds)
--- a/gcalendarwatch.py Sun Sep 13 14:35:37 2020 -0700 +++ b/gcalendarwatch.py Sun Sep 13 23:31:25 2020 -0700 @@ -12,23 +12,36 @@ http://googledevelopers.blogspot.com/2013/07/google-calendar-api-push-notifications.html and update faster with less polling """ -import json, datetime, time, traceback, re, docopt +import datetime +import json +import re +import time +import traceback +import cyclone.web +import pymongo.collection from dateutil.parser import parse from dateutil.tz import tzlocal -from googleapiclient import discovery -from googleapiclient.http import build_http +from patchablegraph import ( + CycloneGraphEventsHandler, + CycloneGraphHandler, + PatchableGraph, +) +from calendar_connection import getCalendarService +from typing import Dict, Any from pymongo import MongoClient -from rdflib import Namespace, Literal, Graph, URIRef, RDF +from rdflib import Graph, Literal, Namespace, RDF, URIRef +from standardservice.logsetup import log, verboseLogging from twisted.internet import reactor -import cyclone.web -import oauth2client, oauth2client.file -from standardservice.logsetup import log, verboseLogging -from patchablegraph import PatchableGraph, CycloneGraphEventsHandler, CycloneGraphHandler +import docopt +from prometheus_client import Summary +from prometheus_client.exposition import generate_latest +from prometheus_client.registry import REGISTRY + +UPDATE = Summary('gcalendarwatch_updates', 'update loop calls') EV = Namespace("http://bigasterisk.com/event#") - """ example: { @@ -49,11 +62,15 @@ 'status': 'confirmed', 'updated': '2014-09-17T04:28:56.997Z', }""" -def recordFromEv(conf, calId, ev): + + +def recordFromEv(conf: Dict, calId: str, ev: Dict): + def dateOrTime(d): if 'date' in d: return d['date'] return d['dateTime'] + rec = { 'uri': conf['event_uri_ns'] + ev['id'], 'feed': conf['event_uri_ns'] + 'feed/' + calId, @@ -63,10 +80,9 @@ 'endTimeUnspecified': ev.get('endTimeUnspecified', False), 'htmlLink': ev.get('htmlLink', ''), 'creatorEmail': ev.get('creator', {}).get('email', ''), - } + } - for field, val in [('start', ev['start']), - ('end', ev['end'])]: + for field, val in [('start', ev['start']), ('end', ev['end'])]: if 'date' in val: rec['%sTime' % field] = parse(val['date']).replace(tzinfo=tzlocal()) rec['%sDate' % field] = val['date'] @@ -75,8 +91,9 @@ rec['%sDate' % field] = parse(val['dateTime']).date().isoformat() return rec + def asJsonLd(events): - ret = {'@graph':[]} + ret = {'@graph': []} # type: Dict[Any, Any] for ev in events: ev['startTime'] = ev['startTime'].astimezone(tzlocal()).isoformat() ev['endTime'] = ev['endTime'].astimezone(tzlocal()).isoformat() @@ -86,22 +103,44 @@ ret['@context'] = { "xsd": "http://www.w3.org/2001/XMLSchema#", "ev": "http://bigasterisk.com/event#", - "startTime": {"@id": "ev:startTime", "@type": "xsd:dateTime"}, - "endTime": {"@id": "ev:endTime", "@type": "xsd:dateTime"}, - "startDate": {"@id": "ev:startDate", "@type": "xsd:date"}, - "endDate": {"@id": "ev:endDate", "@type": "xsd:date"}, - "title" : "ev:title", - "feed": {"@id": "ev:feed", "@type": "@id"}, - "htmlLink": {"@id": "ev:htmlLink", "@type": "@id"}, + "startTime": { + "@id": "ev:startTime", + "@type": "xsd:dateTime" + }, + "endTime": { + "@id": "ev:endTime", + "@type": "xsd:dateTime" + }, + "startDate": { + "@id": "ev:startDate", + "@type": "xsd:date" + }, + "endDate": { + "@id": "ev:endDate", + "@type": "xsd:date" + }, + "title": "ev:title", + "feed": { + "@id": "ev:feed", + "@type": "@id" + }, + "htmlLink": { + "@id": "ev:htmlLink", + "@type": "@id" + }, } return ret + def asGraph(events, extraClasses=[]): graph = Graph() graph.namespace_manager.bind('ev', EV) for ev in events: uri = URIRef(ev['uri']) - add = lambda p, o: graph.add((uri, p, o)) + + def add(p, o): + return graph.add((uri, p, o)) + add(RDF.type, EV['Event']) for cls in extraClasses: add(RDF.type, cls) @@ -110,55 +149,31 @@ add(EV['startDate'], Literal(ev['startDate'])) add(EV['end'], Literal(ev['end'])) add(EV['feed'], URIRef(ev['feed'])) - #graph.add((feed, RDFS.label, Literal(ev['feedTitle']))) + # graph.add((feed, RDFS.label, Literal(ev['feedTitle']))) if 'htmlLink' in ev: add(EV['htmlLink'], URIRef(ev['htmlLink'])) return graph - -def getCalendarService(client_secrets='client_secret.json', - credential_storage='calendar.dat', - scope='https://www.googleapis.com/auth/calendar.readonly', - name='calendar', - version='v3'): - """ - see - https://cloud.google.com/docs/authentication/end-user#creating_your_client_credentials - for getting client_secret.json . Use 'application type' of - 'other'. - """ - flow = oauth2client.client.flow_from_clientsecrets(client_secrets, scope=scope) - - storage = oauth2client.file.Storage(credential_storage) - credentials = storage.get() - if credentials is None or credentials.invalid: - class Flags: - logging_level = 'INFO' - noauth_local_webserver = True - credentials = oauth2client.tools.run_flow(flow, storage, Flags) - # (storage now writes back to calendar.dat) - http = credentials.authorize(http=build_http()) - - service = discovery.build(name, version, http=http) - return service - def getFirstPageOfCalendars(service): for row in service.calendarList().list().execute()['items']: yield row['id'] + def dayRange(days): now = datetime.datetime.now(tzlocal()) start = now - datetime.timedelta(hours=12) end = now + datetime.timedelta(days=days) return start, end + def limitDays(recs, days): start, end = dayRange(days) start = start - datetime.timedelta(hours=12) # incomplete return [r for r in recs if r['startTime'] < end and r['endTime'] > start] + def starred(graph, ev): title = graph.value(ev, EV['title']) m = re.search(r'(.*)\*\s*$', title) @@ -167,6 +182,7 @@ else: return None + def filterStarred(recs, maxCount=15): recs = sorted(recs, key=lambda r: r['start']) out = [] @@ -177,16 +193,19 @@ break return out + class SyncToMongo(object): """reads gcal, writes to mongodb""" - def __init__(self, conf, collection, agendaGraph, countdownGraph): + + def __init__(self, conf, collection: pymongo.collection.Collection, agendaGraph: PatchableGraph, countdownGraph: PatchableGraph): self.conf = conf self.service = getCalendarService() self.collection = collection self.agendaGraph = agendaGraph self.countdownGraph = countdownGraph - def update(self, days=30*6): + @UPDATE.time() + def update(self, days=30 * 6): start, end = dayRange(days) self.removeEntries(start, end) @@ -210,11 +229,11 @@ self.updateGraphs(currentRecords) def removeEntries(self, start, end): - for doc in list(self.collection.find({"startTime":{"$gte":start, "$lte":end}})): - self.collection.remove({'_id':doc['_id']}) + for doc in list(self.collection.find({"startTime": {"$gte": start, "$lte": end}})): + self.collection.remove({'_id': doc['_id']}) def upsertMongo(self, rec): - if self.collection.find_one({"_id" : rec['uri']}) is not None: + if self.collection.find_one({"_id": rec['uri']}) is not None: log.debug("existing record %s", rec['uri']) # this is not yet noticing updates return [] @@ -228,29 +247,26 @@ def updateGraphs(self, currentRecords): c = EV['gcalendar'] currentRecords = list(currentRecords) - self.agendaGraph.setToGraph( - [(s,p,o,c) for s,p,o in asGraph(limitDays(currentRecords, days=2))]) - self.countdownGraph.setToGraph( - [(s,p,o,c) for s,p,o in asGraph(filterStarred(currentRecords, maxCount=15), - extraClasses=[EV['CountdownEvent']])]) + self.agendaGraph.setToGraph([(s, p, o, c) for s, p, o in asGraph(limitDays(currentRecords, days=2))]) + self.countdownGraph.setToGraph([(s, p, o, c) for s, p, o in asGraph(filterStarred(currentRecords, maxCount=15), extraClasses=[EV['CountdownEvent']])]) class ReadMongoEvents(object): """read events from mongodb""" + def __init__(self, collection): self.collection = collection def getEvents(self, t1, t2): - for doc in self.collection.find({"startTime" : {"$gte": t1, "$lt":t2} - }).sort([("startTime",1)]): + for doc in self.collection.find({"startTime": {"$gte": t1, "$lt": t2}}).sort([("startTime", 1)]): doc['uri'] = doc.pop('_id') if 'feedId' in doc: doc['feed'] = URIRef('old_event') yield doc +class Poller(object): -class Poller(object): def __init__(self, sync, periodSec): self.sync = sync self.lastUpdateTime = time.time() @@ -274,15 +290,18 @@ self.lastUpdateTime = t1 self.everUpdated = True took = time.time() - t1 - self.scheduled = reactor.callLater(max(3, self.periodSec - took), - self._updateLoop) + self.scheduled = reactor.callLater(max(3, self.periodSec - took), self._updateLoop) + class PollNow(cyclone.web.RequestHandler): + def post(self): self.settings.poller.updateNow() self.set_status(202) + class Index(cyclone.web.RequestHandler): + def get(self): period = self.settings.conf['minutes_between_polls'] * 60 ago = time.time() - self.settings.poller.lastUpdateTime @@ -299,13 +318,15 @@ class EventsPage(cyclone.web.RequestHandler): + def get(self): """ upcoming events as JSON-LD """ arg = self.get_argument t1 = parse(arg('t1')) if arg('t1', default=None) else datetime.datetime.now().replace(hour=0, minute=0, second=0) - t2 = parse(arg('t2')) if arg('t2', default=None) else datetime.datetime.now() + datetime.timedelta(days=int(arg('days')) if arg('days', default=None) else 2) + t2 = parse(arg('t2')) if arg('t2', + default=None) else datetime.datetime.now() + datetime.timedelta(days=int(arg('days')) if arg('days', default=None) else 2) if 0: self.set_header("content-type", "application/ld+json") self.write(asJsonLd(self.settings.read.getEvents(t1, t2))) @@ -315,29 +336,38 @@ class Countdowns(cyclone.web.RequestHandler): + def get(self): rows = [] graph = self.settings.countdownGraph._graph for ev in graph.subjects(RDF.type, EV['Event']): starLabel = starred(graph, ev) if starLabel is not None: - rows.append({'@type':'countdown', 'time': graph.value(ev, EV['start']), 'label': starLabel}) + rows.append({'@type': 'countdown', 'time': graph.value(ev, EV['start']), 'label': starLabel}) self.set_header("content-type", "application/ld+json") - self.write(json.dumps({ - "@context": { - "countdown":"http://bigasterisk.com/countdown#CountdownEvent", - "label": "http://www.w3.org/2000/01/rdf-schema#label", - "time": { - "@id": "http://bigasterisk.com/event#time", - "@type": "xsd:dateTime" + self.write( + json.dumps({ + "@context": { + "countdown": "http://bigasterisk.com/countdown#CountdownEvent", + "label": "http://www.w3.org/2000/01/rdf-schema#label", + "time": { + "@id": "http://bigasterisk.com/event#time", + "@type": "xsd:dateTime" + }, + "xsd": "http://www.w3.org/2001/XMLSchema#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#" }, - "xsd": "http://www.w3.org/2001/XMLSchema#", - "rdfs": "http://www.w3.org/2000/01/rdf-schema#" - }, - "@graph": rows, + "@graph": rows, })) +class Metrics(cyclone.web.RequestHandler): + + def get(self): + self.add_header('content-type', 'text/plain') + self.write(generate_latest(REGISTRY)) + + def main(): args = docopt.docopt(''' Usage: @@ -350,12 +380,11 @@ verboseLogging(args['--verbose']) - agendaGraph = PatchableGraph() # next few days - countdownGraph = PatchableGraph() # next n of starred events + agendaGraph = PatchableGraph() # next few days + countdownGraph = PatchableGraph() # next n of starred events conf = json.load(open("gcalendarwatch.conf")) m = conf['mongo'] - mongoOut = MongoClient(m['host'], m['port'], - tz_aware=True)[m['database']][m['collection']] + mongoOut = MongoClient(m['host'], m['port'], tz_aware=True)[m['database']][m['collection']] sync = SyncToMongo(conf, mongoOut, agendaGraph, countdownGraph) read = ReadMongoEvents(mongoOut) @@ -367,26 +396,40 @@ poller.updateNow() class Application(cyclone.web.Application): + def __init__(self): handlers = [ (r"/", Index), (r'/events', EventsPage), (r'/pollNow', PollNow), - (r'/graph/calendar/upcoming', CycloneGraphHandler, {'masterGraph': agendaGraph}), - (r'/graph/calendar/upcoming/events', CycloneGraphEventsHandler, {'masterGraph': agendaGraph}), - (r'/graph/calendar/countdown', CycloneGraphHandler, {'masterGraph': countdownGraph}), - (r'/graph/calendar/countdown/events', CycloneGraphEventsHandler, {'masterGraph': countdownGraph}), + (r'/graph/calendar/upcoming', CycloneGraphHandler, { + 'masterGraph': agendaGraph + }), + (r'/graph/calendar/upcoming/events', CycloneGraphEventsHandler, { + 'masterGraph': agendaGraph + }), + (r'/graph/calendar/countdown', CycloneGraphHandler, { + 'masterGraph': countdownGraph + }), + (r'/graph/calendar/countdown/events', CycloneGraphEventsHandler, { + 'masterGraph': countdownGraph + }), (r'/countdowns.json', Countdowns), + (r'/metrics', Metrics), ] - cyclone.web.Application.__init__(self, handlers, - conf=conf, - read=read, - poller=poller, - agendaGraph=agendaGraph, - countdownGraph=countdownGraph, + cyclone.web.Application.__init__( + self, + handlers, + conf=conf, + read=read, + poller=poller, + agendaGraph=agendaGraph, + countdownGraph=countdownGraph, ) + reactor.listenTCP(conf['serve_port'], Application()) reactor.run() + if __name__ == '__main__': main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/login.py Sun Sep 13 23:31:25 2020 -0700 @@ -0,0 +1,8 @@ +"""run this to update auth files, the rebuild docker""" + +from standardservice.logsetup import log, verboseLogging + +from calendar_connection import getCalendarService + +verboseLogging(True) +log.info(f'res={getCalendarService()}') \ No newline at end of file
--- a/requirements.txt Sun Sep 13 14:35:37 2020 -0700 +++ b/requirements.txt Sun Sep 13 23:31:25 2020 -0700 @@ -1,7 +1,11 @@ -google_auth_oauthlib==0.4.1 +#google-auth-httplib2==0.0.4 +#google_auth_oauthlib==0.4.1 google-api-python-client==1.11.0 -google-auth==1.21.1 +#google-auth==1.21.1 oauth2client==4.1.3 +#oauthlib==3.1.0 +gcsa==0.3.2 + prometheus_client==0.8.0 pymongo==3.11.0 python-dateutil==2.8.1
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/run_login.sh Sun Sep 13 23:31:25 2020 -0700 @@ -0,0 +1,12 @@ +#!/bin/sh + +# this didn't work out with the internal webserver, even with my port forwarder +#docker build -t gcalendarwatch_login . +#docker run -it --rm -p 8080:8080 -v `pwd`/credentials:/opt/credentials -t gcalendarwatch_login /usr/bin/python3 login.py + +echo run this on dash +setup_home_venv +~/.venvs/gcalendarwatch/bin/python3 login.py + +echo wrote token.pickle +