Files @ 654c41ccf265
Branch filter:

Location: light9/bin/collector

Drew Perttula
more draft UI for paint page
Ignore-this: 38d7781ae0c870578eee64cc17ec7eb8
#!bin/python
from __future__ import division
from rdflib import URIRef, Literal
from twisted.internet import reactor, utils
from txzmq import ZmqEndpoint, ZmqFactory, ZmqPullConnection
import json
import logging
import optparse
import time
import traceback
import cyclone.web, cyclone.websocket
from greplin import scales

from run_local import log
from lib.cycloneerr import PrettyErrorHandler
from light9.collector.output import EnttecDmx, Udmx
from light9.collector.collector import Collector
from light9.namespaces import L9
from light9 import networking
from light9.rdfdb.syncedgraph import SyncedGraph
from light9.greplin_cyclone import StatsForCyclone

def parseJsonMessage(msg):
    body = json.loads(msg)
    settings = []
    for device, attr, value in body['settings']:
        settings.append((URIRef(device), URIRef(attr), Literal(value)))
    return body['client'], body['clientSession'], settings, body['sendTime']

def startZmq(port, collector):
    stats = scales.collection('/zmqServer',
                              scales.PmfStat('setAttr'))
    
    zf = ZmqFactory()
    addr = 'tcp://*:%s' % port
    log.info('creating zmq endpoint at %r', addr)
    e = ZmqEndpoint('bind', addr)
    s = ZmqPullConnection(zf, e)
    def onPull(message):
        with stats.setAttr.time():
            # todo: new compressed protocol where you send all URIs up
            # front and then use small ints to refer to devices and
            # attributes in subsequent requests.
            client, clientSession, settings, sendTime = parseJsonMessage(message[0])
            collector.setAttrs(client, clientSession, settings, sendTime)
    s.onPull = onPull

class WebListeners(object):
    def __init__(self):
        self.clients = []

    def addClient(self, client):
        self.clients.append([client, {}])
        log.info('added client %s', client)

    def delClient(self, client):
        self.clients = [[c, t] for c, t in self.clients if c != client]
        log.info('delClient %s, %s left', client, len(self.clients))
        
    def outputAttrsSet(self, dev, attrs, outputMap):
        now = time.time()

        msg = self.makeMsg(dev, attrs, outputMap)

        # this omits repeats, but can still send many
        # messages/sec. Not sure if piling up messages for the browser
        # could lead to slowdowns in the real dmx output.
        for client, seen in self.clients:
            for m, t in seen.items():
                if t < now - 5:
                    del seen[m]
            if msg in seen:
                continue
            seen[msg] = now
            client.sendMessage(msg)

    def makeMsg(self, dev, attrs, outputMap):
        attrRows = []
        for attr, val in attrs.items():
            output, index = outputMap[(dev, attr)]
            attrRows.append({'attr': attr.rsplit('/')[-1],
                             'val': val,
                             'chan': (output.shortId(), index + 1)})
        attrRows.sort(key=lambda r: r['chan'])
        for row in attrRows:
            row['chan'] = '%s %s' % (row['chan'][0], row['chan'][1])

        msg = json.dumps({'outputAttrsSet': {'dev': dev, 'attrs': attrRows}}, sort_keys=True)
        return msg
    
class Updates(cyclone.websocket.WebSocketHandler):

    def connectionMade(self, *args, **kwargs):
        log.info('socket connect %s', self)
        self.settings.listeners.addClient(self)

    def connectionLost(self, reason):
        self.settings.listeners.delClient(self)

    def messageReceived(self, message):
        json.loads(message)

stats = scales.collection('/webServer', scales.PmfStat('setAttr'))

class Attrs(PrettyErrorHandler, cyclone.web.RequestHandler):
    def put(self):
        with stats.setAttr.time():
            client, clientSession, settings, sendTime = parseJsonMessage(self.request.body)
            self.settings.collector.setAttrs(client, clientSession, settings, sendTime)
            self.set_status(202)


            
def launch(graph, doLoadTest=False):
    try:
        # todo: drive outputs with config files
        outputs = [
            #EnttecDmx(L9['output/dmx0/'], '/dev/dmx0', 80),
            Udmx(L9['output/udmx/'], 510),
        ]
    except Exception as e:
        log.error("setting up outputs: %r", e)
        traceback.print_exc()
        raise
    listeners = WebListeners()
    c = Collector(graph, outputs, listeners)

    startZmq(networking.collectorZmq.port, c)
    
    reactor.listenTCP(networking.collector.port,
                      cyclone.web.Application(handlers=[
                          (r'/()', cyclone.web.StaticFileHandler,
                           {"path" : "light9/collector/web", "default_filename" : "index.html"}),
                          (r'/updates', Updates),
                          (r'/attrs', Attrs),
                          (r'/stats', StatsForCyclone),
                      ], collector=c, listeners=listeners),
                      interface='::')
    log.info('serving http on %s, zmq on %s', networking.collector.port,
             networking.collectorZmq.port)
    if doLoadTest:
        # in a subprocess since we don't want this client to be
        # cooperating with the main event loop and only sending
        # requests when there's free time
        def afterWarmup():
            log.info('running collector_loadtest')
            d = utils.getProcessValue('bin/python', ['bin/collector_loadtest.py'])
            def done(*a):
                log.info('loadtest done')
                reactor.stop()
            d.addCallback(done)
        reactor.callLater(2, afterWarmup)
    
def main():
    parser = optparse.OptionParser()
    parser.add_option("-v", "--verbose", action="store_true",
                      help="logging.DEBUG")
    parser.add_option("--loadtest", action="store_true",
                      help="call myself with some synthetic load then exit")
    (options, args) = parser.parse_args()
    log.setLevel(logging.DEBUG if options.verbose else logging.INFO)

    graph = SyncedGraph(networking.rdfdb.url, "collector")

    graph.initiallySynced.addCallback(lambda _: launch(graph, options.loadtest)).addErrback(log.error)
    reactor.run()

if __name__ == '__main__':
    main()