changeset 2043:67575505c400

comment out more greplin so services can start
author drewp@bigasterisk.com
date Mon, 09 May 2022 23:18:05 -0700
parents 8849c9e5da30
children f8a5f579547a
files bin/effectsequencer bin/vidref light9/collector/collector_client.py light9/effect/sequencer.py light9/vidref/videorecorder.py
diffstat 5 files changed, 81 insertions(+), 79 deletions(-) [+]
line wrap: on
line diff
--- a/bin/effectsequencer	Mon May 09 21:44:04 2022 -0700
+++ b/bin/effectsequencer	Mon May 09 23:18:05 2022 -0700
@@ -5,10 +5,10 @@
 
 from run_local import log
 from twisted.internet import reactor
-from greplin.scales.cyclonehandler import StatsHandler
+# from greplin.scales.cyclonehandler import StatsHandler
 from rdfdb.syncedgraph import SyncedGraph
 from light9 import networking, showconfig
-from greplin import scales
+# from greplin import scales
 import optparse, sys, logging
 import cyclone.web
 from rdflib import URIRef
@@ -27,14 +27,14 @@
         self.graph = SyncedGraph(networking.rdfdb.url, "effectSequencer")
         self.graph.initiallySynced.addCallback(self.launch)
 
-        self.stats = scales.collection(
-            '/',
-            scales.PmfStat('sendLevels', recalcPeriod=1),
-            scales.PmfStat('getMusic', recalcPeriod=1),
-            scales.PmfStat('evals', recalcPeriod=1),
-            scales.PmfStat('sendOutput', recalcPeriod=1),
-            scales.IntStat('errors'),
-        )
+        # self.stats = scales.collection(
+        #     '/',
+        #     scales.PmfStat('sendLevels', recalcPeriod=1),
+        #     scales.PmfStat('getMusic', recalcPeriod=1),
+        #     scales.PmfStat('evals', recalcPeriod=1),
+        #     scales.PmfStat('sendOutput', recalcPeriod=1),
+        #     scales.IntStat('errors'),
+        # )
 
     def launch(self, *args):
         self.seq = Sequencer(
@@ -54,14 +54,15 @@
                 "default_filename": "sequencer.html"
             }),
             (r'/updates', Updates),
-            (r'/stats/(.*)', StatsHandler, {
-                'serverName': 'effectsequencer'
-            }),
+            # (r'/stats/(.*)', StatsHandler, {
+            #     'serverName': 'effectsequencer'
+            # }),
         ],
                                                   debug=True,
                                                   seq=self.seq,
                                                   graph=self.graph,
-                                                  stats=self.stats)
+                                                #   stats=self.stats
+                                                  )
         reactor.listenTCP(networking.effectSequencer.port, self.cycloneApp)
         log.info("listening on %s" % networking.effectSequencer.port)
 
--- a/bin/vidref	Mon May 09 21:44:04 2022 -0700
+++ b/bin/vidref	Mon May 09 23:18:05 2022 -0700
@@ -19,8 +19,8 @@
 from typing import cast
 import logging, optparse, json, base64, os, glob
 
-from greplin import scales
-from greplin.scales.cyclonehandler import StatsHandler
+# from greplin import scales
+# from greplin.scales.cyclonehandler import StatsHandler
 from rdflib import URIRef
 from twisted.internet import reactor, defer
 import cyclone.web, cyclone.httpclient, cyclone.websocket
@@ -30,7 +30,7 @@
 from light9.newtypes import Song
 from light9.vidref import videorecorder
 from rdfdb.syncedgraph import SyncedGraph
-from standardservice.scalessetup import gatherProcessStats
+# from standardservice.scalessetup import gatherProcessStats
 
 parser = optparse.OptionParser()
 parser.add_option("-v", "--verbose", action="store_true", help="logging.DEBUG")
@@ -38,12 +38,12 @@
 
 log.setLevel(logging.DEBUG if options.verbose else logging.INFO)
 
-gatherProcessStats()
-stats = scales.collection(
-    '/webServer',
-    scales.RecentFpsStat('liveWebsocketFrameFps'),
-    scales.IntStat('liveClients'),
-)
+# gatherProcessStats()
+# stats = scales.collection(
+#     '/webServer',
+#     scales.RecentFpsStat('liveWebsocketFrameFps'),
+#     scales.IntStat('liveClients'),
+# )
 
 
 class Snapshot(cyclone.web.RequestHandler):
@@ -78,16 +78,17 @@
 
     def connectionMade(self, *args, **kwargs):
         pipeline.liveImages.subscribe(on_next=self.onFrame)
-        stats.liveClients += 1
+        # stats.liveClients += 1
 
     def connectionLost(self, reason):
         #self.subj.dispose()
-        stats.liveClients -= 1
+        # stats.liveClients -= 1
+        pass
 
     def onFrame(self, cf: videorecorder.CaptureFrame):
         if cf is None: return
 
-        stats.liveWebsocketFrameFps.mark()
+        # stats.liveWebsocketFrameFps.mark()
 
         self.sendMessage(
             json.dumps({
@@ -188,9 +189,9 @@
             }),
             (r'/time', Time),
             (r'/time/stream', TimeStream),
-            (r'/stats/(.*)', StatsHandler, {
-                'serverName': 'vidref'
-            }),
+            # (r'/stats/(.*)', StatsHandler, {
+            #     'serverName': 'vidref'
+            # }),
         ],
         debug=True,
     ))
--- a/light9/collector/collector_client.py	Mon May 09 21:44:04 2022 -0700
+++ b/light9/collector/collector_client.py	Mon May 09 23:18:05 2022 -0700
@@ -4,16 +4,16 @@
 from txzmq import ZmqEndpoint, ZmqFactory, ZmqPushConnection
 import json, time, logging
 import treq
-from greplin import scales
+# from greplin import scales
 
 log = logging.getLogger('coll_client')
 
 _zmqClient = None
 
-stats = scales.collection(
-    '/collectorClient',
-    scales.PmfStat('send', recalcPeriod=1),
-)
+# stats = scales.collection(
+#     '/collectorClient',
+#     scales.PmfStat('send', recalcPeriod=1),
+# )
 
 
 class TwistedZmqClient(object):
@@ -58,7 +58,7 @@
 
     def onDone(result):
         dt = time.time() - sendTime
-        stats.send = dt
+        # stats.send = dt
         if dt > .1:
             log.warn('sendToCollector request took %.1fms', dt * 1000)
         return dt
--- a/light9/effect/sequencer.py	Mon May 09 21:44:04 2022 -0700
+++ b/light9/effect/sequencer.py	Mon May 09 23:18:05 2022 -0700
@@ -22,30 +22,30 @@
 from light9.namespaces import L9, RDF
 from light9.newtypes import DeviceUri, DeviceAttr, NoteUri, Curve, Song
 from rdfdb.syncedgraph import SyncedGraph
-from standardservice.scalessetup import gatherProcessStats
+# from standardservice.scalessetup import gatherProcessStats
 
-from greplin import scales
+# from greplin import scales
 import imp
 
 log = logging.getLogger('sequencer')
 
-gatherProcessStats()
-updateStats = scales.collection(
-    '/update',
-    scales.PmfStat('s0_getMusic', recalcPeriod=1),
-    scales.PmfStat('s1_eval', recalcPeriod=1),
-    #scales.PmfStat('s3_send_client', recalcPeriod=1),
-    scales.PmfStat('s3_send', recalcPeriod=1),
-    scales.PmfStat('updateLoopLatency', recalcPeriod=1),
-    scales.DoubleStat('updateLoopLatencyGoal'),
-    scales.RecentFpsStat('updateFps'),
-    scales.DoubleStat('goalFps'),
-)
-compileStats = scales.collection(
-    '/compile',
-    scales.PmfStat('graph', recalcPeriod=1),
-    scales.PmfStat('song', recalcPeriod=1),
-)
+# gatherProcessStats()
+# updateStats = scales.collection(
+#     '/update',
+#     scales.PmfStat('s0_getMusic', recalcPeriod=1),
+#     scales.PmfStat('s1_eval', recalcPeriod=1),
+#     #scales.PmfStat('s3_send_client', recalcPeriod=1),
+#     scales.PmfStat('s3_send', recalcPeriod=1),
+#     scales.PmfStat('updateLoopLatency', recalcPeriod=1),
+#     scales.DoubleStat('updateLoopLatencyGoal'),
+#     scales.RecentFpsStat('updateFps'),
+#     scales.DoubleStat('goalFps'),
+# )
+# compileStats = scales.collection(
+#     '/compile',
+#     scales.PmfStat('graph', recalcPeriod=1),
+#     scales.PmfStat('song', recalcPeriod=1),
+# )
 
 
 def pyType(n):
@@ -171,8 +171,8 @@
                  fps=40):
         self.graph = graph
         self.fps = fps
-        updateStats.goalFps = self.fps
-        updateStats.updateLoopLatencyGoal = 1 / self.fps
+        # updateStats.goalFps = self.fps
+        # updateStats.updateLoopLatencyGoal = 1 / self.fps
         self.sendToCollector = sendToCollector
         self.music = MusicTime(period=.2, pollCurvecalc=False)
 
@@ -192,7 +192,7 @@
         self.graph.addHandler(self.compileGraph)
         #self.updateLoop()
 
-    @compileStats.graph.time()
+    # @compileStats.graph.time()
     def compileGraph(self) -> None:
         """rebuild our data from the graph"""
         for song in self.graph.subjects(RDF.type, L9['Song']):
@@ -202,7 +202,7 @@
 
             self.graph.addHandler(compileSong)
 
-    @compileStats.song.time()
+    # @compileStats.song.time()
     def compileSong(self, song: Song) -> None:
         anyErrors = False
         self.notes[song] = []
@@ -230,7 +230,7 @@
             reactor.callLater(1, self.updateLoop)
         else:
             took = time.time() - frameStart
-            updateStats.updateLoopLatency = took
+            # updateStats.updateLoopLatency = took
 
             if not self.lastLoopSucceeded:
                 log.info('Sequencer.update is working')
@@ -239,11 +239,11 @@
             delay = max(0, 1 / self.fps - took)
             reactor.callLater(delay, self.updateLoop)
 
-    @updateStats.updateFps.rate()
+    # @updateStats.updateFps.rate()
     @inlineCallbacks
     def update(self) -> Deferred:
 
-        with updateStats.s0_getMusic.time():
+        if 1:#with updateStats.s0_getMusic.time():
             musicState = self.music.getLatest()
             if not musicState.get('song') or not isinstance(
                     musicState.get('t'), float):
@@ -255,7 +255,7 @@
                                 't': musicState['t']
                             })
 
-        with updateStats.s1_eval.time():
+        if 1:#with updateStats.s1_eval.time():
             settings = []
             songNotes = sorted(self.notes.get(song, []), key=lambda n: n.uri)
             noteReports = []
@@ -271,7 +271,7 @@
 
         dispatcher.send('state', update={'songNotes': noteReports})
 
-        with updateStats.s3_send.time():  # our measurement
+        if 1:#with updateStats.s3_send.time():  # our measurement
             sendSecs = yield self.sendToCollector(devSettings)
 
         # sendToCollector's own measurement.
--- a/light9/vidref/videorecorder.py	Mon May 09 21:44:04 2022 -0700
+++ b/light9/vidref/videorecorder.py	Mon May 09 23:18:05 2022 -0700
@@ -8,7 +8,7 @@
 gi.require_version('GstBase', '1.0')
 
 from gi.repository import Gst
-from greplin import scales
+# from greplin import scales
 from rdflib import URIRef
 from rx.subject import BehaviorSubject
 from twisted.internet import threads
@@ -22,15 +22,15 @@
 
 log = logging.getLogger()
 
-stats = scales.collection(
-    '/recorder',
-    scales.PmfStat('jpegEncode', recalcPeriod=1),
-    scales.IntStat('deletes'),
-    scales.PmfStat('waitForNextImg', recalcPeriod=1),
-    scales.PmfStat('crop', recalcPeriod=1),
-    scales.RecentFpsStat('encodeFrameFps'),
-    scales.RecentFpsStat('queueGstFrameFps'),
-)
+# stats = scales.collection(
+#     '/recorder',
+#     scales.PmfStat('jpegEncode', recalcPeriod=1),
+#     scales.IntStat('deletes'),
+#     scales.PmfStat('waitForNextImg', recalcPeriod=1),
+#     scales.PmfStat('crop', recalcPeriod=1),
+#     scales.RecentFpsStat('encodeFrameFps'),
+#     scales.RecentFpsStat('queueGstFrameFps'),
+# )
 
 
 @dataclass
@@ -41,7 +41,7 @@
     isPlaying: bool
     imgJpeg: Optional[bytes] = None
 
-    @stats.jpegEncode.time()
+    # @stats.jpegEncode.time()
     def asJpeg(self):
         if not self.imgJpeg:
             output = BytesIO()
@@ -73,7 +73,7 @@
         w[3]
     ])
     log.info(f'deleting {uri} {path}')
-    stats.deletes += 1
+    # stats.deletes += 1
     for fn in [path + '.mp4', path + '.timing']:
         os.remove(fn)
 
@@ -173,7 +173,7 @@
             deleteClip(takeUri(self.outMp4.encode('ascii')))
 
     def _bg_make_frame(self, video_time_secs):
-        stats.encodeFrameFps.mark()
+        # stats.encodeFrameFps.mark()
         if self.nextWriteAction == 'close':
             raise StopIteration  # the one in write_videofile
         elif self.nextWriteAction == 'notWritingClip':
@@ -187,7 +187,7 @@
         t1 = time.time()
         while self.nextImg is None:
             time.sleep(.015)
-        stats.waitForNextImg = time.time() - t1
+        # stats.waitForNextImg = time.time() - t1
         cf, self.nextImg = self.nextImg, None
 
         self.frameMap.write(f'video {video_time_secs:g} = song {cf.t:g}\n')
@@ -248,7 +248,7 @@
             # could get gst's frame time and pass it to getLatest
             latest = self.musicTime.getLatest()
             if 'song' in latest:
-                stats.queueGstFrameFps.mark()
+                # stats.queueGstFrameFps.mark()
                 self.liveImages.on_next(
                     CaptureFrame(img=img,
                                  song=Song(latest['song']),
@@ -258,7 +258,7 @@
             traceback.print_exc()
         return Gst.FlowReturn.OK
 
-    @stats.crop.time()
+    # @stats.crop.time()
     def crop(self, img):
         return img.crop((40, 100, 790, 310))