changeset 9:145779f2d79d

wedge in an allow-origin response for tests that aren't serving on our domain
author drewp@bigasterisk.com
date Sun, 07 May 2023 16:00:19 -0700
parents 92e31a078643
children e2be4c5d2531
files deploy.yaml environment.py
diffstat 2 files changed, 37 insertions(+), 1 deletions(-) [+]
line wrap: on
line diff
--- a/deploy.yaml	Sun May 07 15:59:10 2023 -0700
+++ b/deploy.yaml	Sun May 07 16:00:19 2023 -0700
@@ -18,6 +18,9 @@
         - name: environment
           image: bang5:5000/environment_image
           workingDir: /opt
+          env:
+            - name: ALLOW_ORIGIN
+              value: http://dash:8001
           command:
             - pdm
             - run
--- a/environment.py	Sun May 07 15:59:10 2023 -0700
+++ b/environment.py	Sun May 07 16:00:19 2023 -0700
@@ -6,6 +6,7 @@
 """
 import datetime
 import logging
+import os
 
 import background_loop
 from dateutil.relativedelta import FR, relativedelta
@@ -13,7 +14,9 @@
 from patchablegraph import PatchableGraph
 from patchablegraph.handler import GraphEvents, StaticGraph
 from rdflib import Literal, Namespace
+from sse_starlette import EventSourceResponse, ServerSentEvent
 from starlette.applications import Starlette
+from starlette.requests import Request
 from starlette.routing import Route
 from starlette.staticfiles import StaticFiles
 from starlette_exporter import PrometheusMiddleware, handle_metrics
@@ -27,6 +30,36 @@
 logging.basicConfig(level=logging.INFO)
 logging.getLogger('patchablegraph').setLevel(logging.WARNING)
 
+allowOrigin = os.environ.get('ALLOW_ORIGIN', '')
+
+# factor this back into GraphEvents
+def GE2(masterGraph:PatchableGraph):
+    
+    async def generateEvents():
+        events = masterGraph.subscribeToPatches()
+        while True:  # we'll get cancelled by EventSourceResponse when the conn drops
+            etype, data = await events.get()
+            # Are there more to get? We might throttle and combine patches here- ideally we could see how
+            # long the latency to the client is to make a better rate choice
+            yield ServerSentEvent(event=etype, data=data)
+
+    async def handle(request: Request):
+        """
+        One session with one client.
+
+        returns current graph plus future patches to keep remote version
+        in sync with ours.
+
+        instead of turning off buffering all over, it may work for this
+        response to send 'x-accel-buffering: no', per
+        http://nginx.org/en/docs/http/ngx_http_proxy_module.html#proxy_buffering
+        """
+        headers = {}
+        if allowOrigin:
+            headers={'Access-Control-Allow-Origin':allowOrigin}
+        return EventSourceResponse(generateEvents(), headers=headers)
+
+    return handle
 
 def update(masterGraph):
 
@@ -62,7 +95,7 @@
         routes=[
             Route('/', StaticFiles(directory='.', html=True)),
             Route('/graph/environment', StaticGraph(masterGraph)),
-            Route('/graph/environment/events', GraphEvents(masterGraph)),
+            Route('/graph/environment/events', GE2(masterGraph)),
             # Route('/doc', Doc),
         ])