Files
@ 0480fc790527
Branch filter:
Location: light9/bin/collector
0480fc790527
6.1 KiB
text/plain
paint now looks for best match
Ignore-this: 5604a2a2181624a0612dddf1afa32985
Ignore-this: 5604a2a2181624a0612dddf1afa32985
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 | #!bin/python
from __future__ import division
from rdflib import URIRef, Literal
from twisted.internet import reactor, utils
from txzmq import ZmqEndpoint, ZmqFactory, ZmqPullConnection
import json
import logging
import optparse
import time
import traceback
import cyclone.web, cyclone.websocket
from greplin import scales
from run_local import log
from lib.cycloneerr import PrettyErrorHandler
from light9.collector.output import EnttecDmx, Udmx
from light9.collector.collector import Collector
from light9.namespaces import L9
from light9 import networking
from light9.rdfdb.syncedgraph import SyncedGraph
from light9.greplin_cyclone import StatsForCyclone
def parseJsonMessage(msg):
body = json.loads(msg)
settings = []
for device, attr, value in body['settings']:
settings.append((URIRef(device), URIRef(attr), Literal(value)))
return body['client'], body['clientSession'], settings, body['sendTime']
def startZmq(port, collector):
stats = scales.collection('/zmqServer',
scales.PmfStat('setAttr'))
zf = ZmqFactory()
addr = 'tcp://*:%s' % port
log.info('creating zmq endpoint at %r', addr)
e = ZmqEndpoint('bind', addr)
s = ZmqPullConnection(zf, e)
def onPull(message):
with stats.setAttr.time():
# todo: new compressed protocol where you send all URIs up
# front and then use small ints to refer to devices and
# attributes in subsequent requests.
client, clientSession, settings, sendTime = parseJsonMessage(message[0])
collector.setAttrs(client, clientSession, settings, sendTime)
s.onPull = onPull
class WebListeners(object):
def __init__(self):
self.clients = []
def addClient(self, client):
self.clients.append([client, {}])
log.info('added client %s', client)
def delClient(self, client):
self.clients = [[c, t] for c, t in self.clients if c != client]
log.info('delClient %s, %s left', client, len(self.clients))
def outputAttrsSet(self, dev, attrs, outputMap):
now = time.time()
msg = self.makeMsg(dev, attrs, outputMap)
# this omits repeats, but can still send many
# messages/sec. Not sure if piling up messages for the browser
# could lead to slowdowns in the real dmx output.
for client, seen in self.clients:
for m, t in seen.items():
if t < now - 5:
del seen[m]
if msg in seen:
continue
seen[msg] = now
client.sendMessage(msg)
def makeMsg(self, dev, attrs, outputMap):
attrRows = []
for attr, val in attrs.items():
output, index = outputMap[(dev, attr)]
attrRows.append({'attr': attr.rsplit('/')[-1],
'val': val,
'chan': (output.shortId(), index + 1)})
attrRows.sort(key=lambda r: r['chan'])
for row in attrRows:
row['chan'] = '%s %s' % (row['chan'][0], row['chan'][1])
msg = json.dumps({'outputAttrsSet': {'dev': dev, 'attrs': attrRows}}, sort_keys=True)
return msg
class Updates(cyclone.websocket.WebSocketHandler):
def connectionMade(self, *args, **kwargs):
log.info('socket connect %s', self)
self.settings.listeners.addClient(self)
def connectionLost(self, reason):
self.settings.listeners.delClient(self)
def messageReceived(self, message):
json.loads(message)
stats = scales.collection('/webServer', scales.PmfStat('setAttr'))
class Attrs(PrettyErrorHandler, cyclone.web.RequestHandler):
def put(self):
with stats.setAttr.time():
client, clientSession, settings, sendTime = parseJsonMessage(self.request.body)
self.settings.collector.setAttrs(client, clientSession, settings, sendTime)
self.set_status(202)
def launch(graph, doLoadTest=False):
try:
# todo: drive outputs with config files
outputs = [
#EnttecDmx(L9['output/dmx0/'], '/dev/dmx0', 80),
Udmx(L9['output/udmx/'], 510),
]
except Exception as e:
log.error("setting up outputs: %r", e)
traceback.print_exc()
raise
listeners = WebListeners()
c = Collector(graph, outputs, listeners)
startZmq(networking.collectorZmq.port, c)
reactor.listenTCP(networking.collector.port,
cyclone.web.Application(handlers=[
(r'/()', cyclone.web.StaticFileHandler,
{"path" : "light9/collector/web", "default_filename" : "index.html"}),
(r'/updates', Updates),
(r'/attrs', Attrs),
(r'/stats', StatsForCyclone),
], collector=c, listeners=listeners),
interface='::')
log.info('serving http on %s, zmq on %s', networking.collector.port,
networking.collectorZmq.port)
if doLoadTest:
# in a subprocess since we don't want this client to be
# cooperating with the main event loop and only sending
# requests when there's free time
def afterWarmup():
log.info('running collector_loadtest')
d = utils.getProcessValue('bin/python', ['bin/collector_loadtest.py'])
def done(*a):
log.info('loadtest done')
reactor.stop()
d.addCallback(done)
reactor.callLater(2, afterWarmup)
def main():
parser = optparse.OptionParser()
parser.add_option("-v", "--verbose", action="store_true",
help="logging.DEBUG")
parser.add_option("--loadtest", action="store_true",
help="call myself with some synthetic load then exit")
(options, args) = parser.parse_args()
log.setLevel(logging.DEBUG if options.verbose else logging.INFO)
graph = SyncedGraph(networking.rdfdb.url, "collector")
graph.initiallySynced.addCallback(lambda _: launch(graph, options.loadtest)).addErrback(log.error)
reactor.run()
if __name__ == '__main__':
main()
|