2015-03-14 11:12:57 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
"""Event Man(ager)
|
|
|
|
|
2015-03-22 11:08:23 +01:00
|
|
|
Your friendly manager of attendees at an event.
|
2015-03-22 09:36:32 +01:00
|
|
|
|
|
|
|
Copyright 2015 Davide Alberani <da@erlug.linux.it>
|
|
|
|
RaspiBO <info@raspibo.org>
|
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
2015-03-14 11:12:57 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
2015-04-25 15:46:46 +02:00
|
|
|
import re
|
2015-04-17 00:17:36 +02:00
|
|
|
import glob
|
2015-04-18 12:53:08 +02:00
|
|
|
import json
|
2015-04-18 15:01:30 +02:00
|
|
|
import logging
|
2015-04-17 20:31:50 +02:00
|
|
|
import datetime
|
2015-03-14 11:12:57 +01:00
|
|
|
|
|
|
|
import tornado.httpserver
|
|
|
|
import tornado.ioloop
|
|
|
|
import tornado.options
|
|
|
|
from tornado.options import define, options
|
|
|
|
import tornado.web
|
2015-04-26 00:47:38 +02:00
|
|
|
import tornado.websocket
|
2015-04-18 12:53:08 +02:00
|
|
|
from tornado import gen, escape, process
|
2015-03-14 11:12:57 +01:00
|
|
|
|
2015-03-30 21:39:12 +02:00
|
|
|
import utils
|
2015-03-21 09:29:01 +01:00
|
|
|
import backend
|
|
|
|
|
2015-04-25 15:46:46 +02:00
|
|
|
ENCODING = 'utf-8'
|
2015-04-17 20:31:50 +02:00
|
|
|
PROCESS_TIMEOUT = 60
|
|
|
|
|
2015-05-03 01:43:30 +02:00
|
|
|
API_VERSION = '1.0'
|
|
|
|
|
2015-04-25 15:46:46 +02:00
|
|
|
re_env_key = re.compile('[^A-Z_]+')
|
2015-04-26 01:26:00 +02:00
|
|
|
re_slashes = re.compile(r'//+')
|
2015-04-25 15:46:46 +02:00
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
|
2015-05-03 11:58:40 +02:00
|
|
|
def authenticated(method):
|
|
|
|
"""Decorator to handle authentication."""
|
|
|
|
original_wrapper = tornado.web.authenticated(method)
|
|
|
|
@tornado.web.functools.wraps(method)
|
|
|
|
def my_wrapper(self, *args, **kwargs):
|
|
|
|
# If no authentication was required from the command line or config file.
|
|
|
|
if not self.authentication:
|
|
|
|
return method(self, *args, **kwargs)
|
|
|
|
# un authenticated API calls gets redirected to /v1.0/[...]
|
|
|
|
if self.is_api() and not self.current_user:
|
|
|
|
self.redirect('/v%s%s' % (API_VERSION, self.get_login_url()))
|
|
|
|
return
|
|
|
|
return original_wrapper(self, *args, **kwargs)
|
|
|
|
return my_wrapper
|
|
|
|
|
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
class BaseHandler(tornado.web.RequestHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Base class for request handlers."""
|
2015-04-18 17:33:42 +02:00
|
|
|
# A property to access the first value of each argument.
|
|
|
|
arguments = property(lambda self: dict([(k, v[0])
|
|
|
|
for k, v in self.request.arguments.iteritems()]))
|
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
_bool_convert = {
|
|
|
|
'0': False,
|
|
|
|
'n': False,
|
2015-04-14 20:30:05 +02:00
|
|
|
'f': False,
|
2015-04-06 17:19:20 +02:00
|
|
|
'no': False,
|
|
|
|
'off': False,
|
2015-04-18 17:33:42 +02:00
|
|
|
'false': False,
|
|
|
|
'1': True,
|
|
|
|
'y': True,
|
|
|
|
't': True,
|
|
|
|
'on': True,
|
|
|
|
'yes': True,
|
|
|
|
'true': True
|
2015-04-06 17:19:20 +02:00
|
|
|
}
|
|
|
|
|
2015-05-03 01:43:30 +02:00
|
|
|
def is_api(self):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Return True if the path is from an API call."""
|
2015-05-03 01:43:30 +02:00
|
|
|
return self.request.path.startswith('/v%s' % API_VERSION)
|
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
def tobool(self, obj):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Convert some textual values to boolean."""
|
2015-04-06 17:19:20 +02:00
|
|
|
if isinstance(obj, (list, tuple)):
|
|
|
|
obj = obj[0]
|
|
|
|
if isinstance(obj, (str, unicode)):
|
|
|
|
obj = obj.lower()
|
2015-04-18 17:33:42 +02:00
|
|
|
return self._bool_convert.get(obj, obj)
|
|
|
|
|
2015-05-03 11:58:40 +02:00
|
|
|
def arguments_tobool(self):
|
|
|
|
"""Return a dictionary of arguments, converted to booleans where possible."""
|
2015-04-18 17:33:42 +02:00
|
|
|
return dict([(k, self.tobool(v)) for k, v in self.arguments.iteritems()])
|
2015-04-06 17:19:20 +02:00
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
def initialize(self, **kwargs):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Add every passed (key, value) as attributes of the instance."""
|
2015-03-20 23:08:21 +01:00
|
|
|
for key, value in kwargs.iteritems():
|
|
|
|
setattr(self, key, value)
|
|
|
|
|
2015-05-02 18:38:57 +02:00
|
|
|
def get_current_user(self):
|
|
|
|
"""Retrieve current user from the secure cookie."""
|
|
|
|
return self.get_secure_cookie("user")
|
|
|
|
|
|
|
|
def logout(self):
|
|
|
|
"""Remove the secure cookie used fro authentication."""
|
|
|
|
self.clear_cookie("user")
|
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
|
|
|
|
class RootHandler(BaseHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handler for the / path."""
|
2015-03-15 15:47:04 +01:00
|
|
|
angular_app_path = os.path.join(os.path.dirname(__file__), "angular_app")
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-03-14 13:05:04 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-03-31 23:35:56 +02:00
|
|
|
def get(self, *args, **kwargs):
|
2015-03-22 08:58:25 +01:00
|
|
|
# serve the ./angular_app/index.html file
|
2015-03-15 15:47:04 +01:00
|
|
|
with open(self.angular_app_path + "/index.html", 'r') as fd:
|
|
|
|
self.write(fd.read())
|
2015-03-14 17:32:45 +01:00
|
|
|
|
2015-03-15 18:00:08 +01:00
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
# Keep track of WebSocket connections.
|
2015-04-26 01:26:00 +02:00
|
|
|
_ws_clients = {}
|
2015-04-26 00:47:38 +02:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class CollectionHandler(BaseHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Base class for handlers that need to interact with the database backend.
|
|
|
|
|
|
|
|
Introduce basic CRUD operations."""
|
|
|
|
# set of documents we're managing (a collection in MongoDB or a table in a SQL database)
|
2015-03-21 20:32:39 +01:00
|
|
|
collection = None
|
2015-03-21 18:29:19 +01:00
|
|
|
|
2015-05-01 16:27:22 +02:00
|
|
|
# set of documents used to store incremental sequences
|
|
|
|
counters_collection = 'counters'
|
|
|
|
|
|
|
|
def get_next_seq(self, seq):
|
|
|
|
"""Increment and return the new value of a ever-incrementing counter.
|
|
|
|
|
|
|
|
:param seq: unique name of the sequence
|
|
|
|
:type seq: str
|
|
|
|
|
|
|
|
:return: the next value of the sequence
|
|
|
|
:rtype: int
|
|
|
|
"""
|
|
|
|
if not self.db.query(self.counters_collection, {'seq_name': seq}):
|
|
|
|
self.db.add(self.counters_collection, {'seq_name': seq, 'seq': 0})
|
|
|
|
merged, doc = self.db.update(self.counters_collection,
|
|
|
|
{'seq_name': seq},
|
|
|
|
{'seq': 1},
|
|
|
|
operation='increment')
|
|
|
|
return doc.get('seq', 0)
|
|
|
|
|
2015-04-14 23:44:55 +02:00
|
|
|
def _filter_results(self, results, params):
|
2015-04-15 00:12:35 +02:00
|
|
|
"""Filter a list using keys and values from a dictionary.
|
|
|
|
|
|
|
|
:param results: the list to be filtered
|
|
|
|
:type results: list
|
|
|
|
:param params: a dictionary of items that must all be present in an original list item to be included in the return
|
|
|
|
|
|
|
|
:return: list of items that have all the keys with the same values as params
|
|
|
|
:rtype: list"""
|
2015-04-14 23:44:55 +02:00
|
|
|
if not params:
|
|
|
|
return results
|
|
|
|
filtered = []
|
|
|
|
for result in results:
|
|
|
|
add = True
|
|
|
|
for key, value in params.iteritems():
|
|
|
|
if key not in result or result[key] != value:
|
|
|
|
add = False
|
|
|
|
break
|
|
|
|
if add:
|
|
|
|
filtered.append(result)
|
|
|
|
return filtered
|
|
|
|
|
2015-04-18 14:27:02 +02:00
|
|
|
def _dict2env(self, data):
|
|
|
|
"""Convert a dictionary into a form suitable to be passed as environment variables."""
|
|
|
|
ret = {}
|
|
|
|
for key, value in data.iteritems():
|
|
|
|
if isinstance(value, (list, tuple, dict)):
|
|
|
|
continue
|
|
|
|
try:
|
2015-04-25 15:46:46 +02:00
|
|
|
key = key.upper().encode('ascii', 'ignore')
|
|
|
|
key = re_env_key.sub('', key)
|
|
|
|
if not key:
|
|
|
|
continue
|
|
|
|
ret[key] = unicode(value).encode(ENCODING)
|
2015-04-18 14:27:02 +02:00
|
|
|
except:
|
|
|
|
continue
|
|
|
|
return ret
|
|
|
|
|
2015-03-14 17:32:45 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-05 00:55:59 +02:00
|
|
|
def get(self, id_=None, resource=None, resource_id=None, **kwargs):
|
2015-03-31 23:35:56 +02:00
|
|
|
if resource:
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2015-04-04 14:56:41 +02:00
|
|
|
method = getattr(self, 'handle_get_%s' % resource, None)
|
2015-03-31 23:35:56 +02:00
|
|
|
if method and callable(method):
|
2015-04-05 22:16:11 +02:00
|
|
|
self.write(method(id_, resource_id, **kwargs))
|
|
|
|
return
|
2015-03-15 23:05:59 +01:00
|
|
|
if id_ is not None:
|
2015-03-22 08:58:25 +01:00
|
|
|
# read a single document
|
2015-03-21 20:32:39 +01:00
|
|
|
self.write(self.db.get(self.collection, id_))
|
2015-03-21 18:29:19 +01:00
|
|
|
else:
|
2015-03-22 08:58:25 +01:00
|
|
|
# return an object containing the list of all objects in the collection;
|
|
|
|
# e.g.: {'events': [{'_id': 'obj1-id, ...}, {'_id': 'obj2-id, ...}, ...]}
|
2015-03-22 17:08:25 +01:00
|
|
|
# Please, never return JSON lists that are not encapsulated into an object,
|
2015-03-22 08:58:25 +01:00
|
|
|
# to avoid XSS vulnerabilities.
|
2015-03-21 20:32:39 +01:00
|
|
|
self.write({self.collection: self.db.query(self.collection)})
|
2015-03-21 18:29:19 +01:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-05 00:55:59 +02:00
|
|
|
def post(self, id_=None, resource=None, resource_id=None, **kwargs):
|
2015-04-06 17:19:20 +02:00
|
|
|
data = escape.json_decode(self.request.body or '{}')
|
2015-04-05 00:55:59 +02:00
|
|
|
if resource:
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2015-04-05 00:55:59 +02:00
|
|
|
method = getattr(self, 'handle_%s_%s' % (self.request.method.lower(), resource), None)
|
|
|
|
if method and callable(method):
|
2015-04-05 22:16:11 +02:00
|
|
|
self.write(method(id_, resource_id, data, **kwargs))
|
|
|
|
return
|
2015-03-21 18:29:19 +01:00
|
|
|
if id_ is None:
|
|
|
|
newData = self.db.add(self.collection, data)
|
|
|
|
else:
|
2015-04-04 13:01:33 +02:00
|
|
|
merged, newData = self.db.update(self.collection, id_, data)
|
2015-03-21 18:29:19 +01:00
|
|
|
self.write(newData)
|
2015-03-14 11:12:57 +01:00
|
|
|
|
2015-03-22 17:08:25 +01:00
|
|
|
# PUT (update an existing document) is handled by the POST (create a new document) method
|
2015-03-21 20:32:39 +01:00
|
|
|
put = post
|
2015-03-15 18:00:08 +01:00
|
|
|
|
2015-03-22 17:08:25 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-05 11:20:57 +02:00
|
|
|
def delete(self, id_=None, resource=None, resource_id=None, **kwargs):
|
|
|
|
if resource:
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2015-04-05 11:20:57 +02:00
|
|
|
method = getattr(self, 'handle_delete_%s' % resource, None)
|
|
|
|
if method and callable(method):
|
2015-04-05 22:16:11 +02:00
|
|
|
self.write(method(id_, resource_id, **kwargs))
|
|
|
|
return
|
2015-04-13 23:25:46 +02:00
|
|
|
if id_:
|
|
|
|
self.db.delete(self.collection, id_)
|
|
|
|
self.write({'success': True})
|
2015-03-22 17:08:25 +01:00
|
|
|
|
2015-04-18 15:01:30 +02:00
|
|
|
def on_timeout(self, cmd, pipe):
|
2015-04-18 12:53:08 +02:00
|
|
|
"""Kill a process that is taking too long to complete."""
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd %s is taking too long: killing it' % ' '.join(cmd))
|
2015-04-18 12:53:08 +02:00
|
|
|
try:
|
|
|
|
pipe.proc.kill()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def on_exit(self, returncode, cmd, pipe):
|
|
|
|
"""Callback executed when a subprocess execution is over."""
|
|
|
|
self.ioloop.remove_timeout(self.timeout)
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd: %s returncode: %d' % (' '.join(cmd), returncode))
|
2015-04-18 12:53:08 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-18 14:27:02 +02:00
|
|
|
def run_subprocess(self, cmd, stdin_data=None, env=None):
|
2015-04-17 20:31:50 +02:00
|
|
|
"""Execute the given action.
|
|
|
|
|
|
|
|
:param cmd: the command to be run with its command line arguments
|
|
|
|
:type cmd: list
|
2015-04-18 12:53:08 +02:00
|
|
|
|
|
|
|
:param stdin_data: data to be sent over stdin
|
|
|
|
:type stdin_data: str
|
2015-04-18 14:27:02 +02:00
|
|
|
:param env: environment of the process
|
|
|
|
:type env: dict
|
2015-04-17 20:31:50 +02:00
|
|
|
"""
|
|
|
|
self.ioloop = tornado.ioloop.IOLoop.instance()
|
2015-04-18 12:53:08 +02:00
|
|
|
p = process.Subprocess(cmd, close_fds=True, stdin=process.Subprocess.STREAM,
|
2015-04-18 14:27:02 +02:00
|
|
|
stdout=process.Subprocess.STREAM, stderr=process.Subprocess.STREAM, env=env)
|
2015-04-18 12:53:08 +02:00
|
|
|
p.set_exit_callback(lambda returncode: self.on_exit(returncode, cmd, p))
|
2015-04-17 20:31:50 +02:00
|
|
|
self.timeout = self.ioloop.add_timeout(datetime.timedelta(seconds=PROCESS_TIMEOUT),
|
2015-04-18 15:01:30 +02:00
|
|
|
lambda: self.on_timeout(cmd, p))
|
2015-04-18 12:53:08 +02:00
|
|
|
yield gen.Task(p.stdin.write, stdin_data or '')
|
|
|
|
p.stdin.close()
|
|
|
|
out, err = yield [gen.Task(p.stdout.read_until_close),
|
|
|
|
gen.Task(p.stderr.read_until_close)]
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd: %s' % ' '.join(cmd))
|
|
|
|
logging.debug('cmd stdout: %s' % out)
|
|
|
|
logging.debug('cmd strerr: %s' % err)
|
2015-04-18 12:53:08 +02:00
|
|
|
raise gen.Return((out, err))
|
2015-04-17 00:17:36 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-18 14:27:02 +02:00
|
|
|
def run_triggers(self, action, stdin_data=None, env=None):
|
2015-04-17 20:31:50 +02:00
|
|
|
"""Asynchronously execute triggers for the given action.
|
|
|
|
|
|
|
|
:param action: action name; scripts in directory ./data/triggers/{action}.d will be run
|
|
|
|
:type action: str
|
2015-04-18 12:53:08 +02:00
|
|
|
:param stdin_data: a python dictionary that will be serialized in JSON and sent to the process over stdin
|
|
|
|
:type stdin_data: dict
|
2015-04-18 14:27:02 +02:00
|
|
|
:param env: environment of the process
|
|
|
|
:type stdin_data: dict
|
2015-04-17 20:31:50 +02:00
|
|
|
"""
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('running triggers for action "%s"' % action)
|
2015-04-18 12:53:08 +02:00
|
|
|
stdin_data = stdin_data or {}
|
|
|
|
try:
|
|
|
|
stdin_data = json.dumps(stdin_data)
|
|
|
|
except:
|
|
|
|
stdin_data = '{}'
|
2015-04-17 00:17:36 +02:00
|
|
|
for script in glob.glob(os.path.join(self.data_dir, 'triggers', '%s.d' % action, '*')):
|
|
|
|
if not (os.path.isfile(script) and os.access(script, os.X_OK)):
|
|
|
|
continue
|
2015-04-18 14:27:02 +02:00
|
|
|
out, err = yield gen.Task(self.run_subprocess, [script], stdin_data, env)
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
def build_ws_url(self, path, proto='ws', host=None):
|
|
|
|
"""Return a WebSocket url from a path."""
|
2015-04-26 11:49:59 +02:00
|
|
|
return 'ws://127.0.0.1:%s/ws/%s' % (self.listen_port + 1, path)
|
2015-04-26 00:47:38 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-26 12:56:12 +02:00
|
|
|
def send_ws_message(self, path, message):
|
2015-04-26 00:47:38 +02:00
|
|
|
"""Send a WebSocket message to all the connected clients.
|
|
|
|
|
2015-04-26 12:56:12 +02:00
|
|
|
:param path: partial path used to build the WebSocket url
|
|
|
|
:type path: str
|
2015-04-26 00:47:38 +02:00
|
|
|
:param message: message to send
|
|
|
|
:type message: str
|
|
|
|
"""
|
2015-04-26 12:56:12 +02:00
|
|
|
ws = yield tornado.websocket.websocket_connect(self.build_ws_url(path))
|
2015-04-26 00:47:38 +02:00
|
|
|
ws.write_message(message)
|
|
|
|
ws.close()
|
|
|
|
|
2015-04-16 00:06:01 +02:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class PersonsHandler(CollectionHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handle requests for Persons."""
|
2015-03-21 20:32:39 +01:00
|
|
|
collection = 'persons'
|
2015-04-04 13:01:33 +02:00
|
|
|
object_id = 'person_id'
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2015-04-05 00:55:59 +02:00
|
|
|
def handle_get_events(self, id_, resource_id=None, **kwargs):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Get a list of events attended by this person.
|
|
|
|
# Inside the data of each event, a 'person_data' dictionary is
|
|
|
|
# created, duplicating the entry for the current person (so that
|
|
|
|
# there's no need to parse the 'persons' list on the client).
|
|
|
|
#
|
|
|
|
# If resource_id is given, only the specified event is considered.
|
|
|
|
#
|
|
|
|
# If the 'all' parameter is given, every event (also unattended ones) is returned.
|
2015-04-06 17:19:20 +02:00
|
|
|
args = self.request.arguments
|
|
|
|
query = {}
|
|
|
|
if id_ and not self.tobool(args.get('all')):
|
|
|
|
query = {'persons.person_id': id_}
|
2015-04-05 00:55:59 +02:00
|
|
|
if resource_id:
|
|
|
|
query['_id'] = resource_id
|
|
|
|
|
|
|
|
events = self.db.query('events', query)
|
2015-04-04 14:15:52 +02:00
|
|
|
for event in events:
|
|
|
|
person_data = {}
|
2015-04-04 14:56:41 +02:00
|
|
|
for persons in event.get('persons') or []:
|
|
|
|
if str(persons.get('person_id')) == id_:
|
|
|
|
person_data = persons
|
2015-04-04 14:15:52 +02:00
|
|
|
break
|
|
|
|
event['person_data'] = person_data
|
2015-04-13 23:25:46 +02:00
|
|
|
if resource_id and events:
|
|
|
|
return events[0]
|
2015-04-04 14:15:52 +02:00
|
|
|
return {'events': events}
|
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class EventsHandler(CollectionHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handle requests for Events."""
|
2015-03-21 20:32:39 +01:00
|
|
|
collection = 'events'
|
2015-04-04 13:01:33 +02:00
|
|
|
object_id = 'event_id'
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2015-04-22 23:34:53 +02:00
|
|
|
def _get_person_data(self, person_id_or_query, persons):
|
|
|
|
"""Filter a list of persons returning the first item with a given person_id
|
|
|
|
or which set of keys specified in a dictionary match their respective values."""
|
2015-04-18 12:53:08 +02:00
|
|
|
for person in persons:
|
2015-04-22 23:34:53 +02:00
|
|
|
if isinstance(person_id_or_query, dict):
|
|
|
|
if all(person.get(k) == v for k, v in person_id_or_query.iteritems()):
|
|
|
|
return person
|
|
|
|
else:
|
|
|
|
if str(person.get('person_id')) == person_id_or_query:
|
|
|
|
return person
|
2015-04-18 12:53:08 +02:00
|
|
|
return {}
|
|
|
|
|
2015-04-05 00:55:59 +02:00
|
|
|
def handle_get_persons(self, id_, resource_id=None):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Return every person registered at this event, or the information
|
|
|
|
# about a specific person.
|
2015-04-05 00:55:59 +02:00
|
|
|
query = {'_id': id_}
|
|
|
|
event = self.db.query('events', query)[0]
|
|
|
|
if resource_id:
|
2015-04-18 12:53:08 +02:00
|
|
|
return {'person': self._get_person_data(resource_id, event.get('persons') or [])}
|
2015-04-14 23:44:55 +02:00
|
|
|
persons = self._filter_results(event.get('persons') or [], self.arguments)
|
|
|
|
return {'persons': persons}
|
2015-04-05 00:55:59 +02:00
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
def handle_post_persons(self, id_, person_id, data):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Add a person to the list of persons registered at this event.
|
2015-05-01 16:27:22 +02:00
|
|
|
data['seq'] = self.get_next_seq('event_%s_persons' % id_)
|
|
|
|
data['seq_hex'] = '%06X' % data['seq']
|
2015-04-06 21:08:52 +02:00
|
|
|
doc = self.db.query('events',
|
|
|
|
{'_id': id_, 'persons.person_id': person_id})
|
|
|
|
if '_id' in data:
|
|
|
|
del data['_id']
|
|
|
|
if not doc:
|
2015-04-06 17:59:31 +02:00
|
|
|
merged, doc = self.db.update('events',
|
|
|
|
{'_id': id_},
|
|
|
|
{'persons': data},
|
2015-05-01 14:51:11 +02:00
|
|
|
operation='appendUnique',
|
2015-04-06 17:59:31 +02:00
|
|
|
create=False)
|
2015-04-06 17:19:20 +02:00
|
|
|
return {'event': doc}
|
|
|
|
|
2015-04-05 00:55:59 +02:00
|
|
|
def handle_put_persons(self, id_, person_id, data):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Update an existing entry for a person registered at this event.
|
2015-04-15 00:12:35 +02:00
|
|
|
query = dict([('persons.%s' % k, v) for k, v in self.arguments.iteritems()])
|
|
|
|
query['_id'] = id_
|
|
|
|
if person_id is not None:
|
|
|
|
query['persons.person_id'] = person_id
|
2015-04-18 12:53:08 +02:00
|
|
|
old_person_data = {}
|
|
|
|
current_event = self.db.query(self.collection, query)
|
|
|
|
if current_event:
|
|
|
|
current_event = current_event[0]
|
2015-04-22 23:34:53 +02:00
|
|
|
else:
|
|
|
|
current_event = {}
|
|
|
|
old_person_data = self._get_person_data(person_id or self.arguments,
|
|
|
|
current_event.get('persons') or [])
|
2015-04-15 00:12:35 +02:00
|
|
|
merged, doc = self.db.update('events', query,
|
2015-04-14 22:49:16 +02:00
|
|
|
data, updateList='persons', create=False)
|
2015-04-22 23:34:53 +02:00
|
|
|
new_person_data = self._get_person_data(person_id or self.arguments,
|
|
|
|
doc.get('persons') or [])
|
2015-04-18 14:27:02 +02:00
|
|
|
env = self._dict2env(new_person_data)
|
2015-04-26 09:36:45 +02:00
|
|
|
if person_id is None:
|
2015-05-01 20:40:23 +02:00
|
|
|
person_id = str(new_person_data.get('person_id'))
|
2015-04-18 14:27:02 +02:00
|
|
|
env.update({'PERSON_ID': person_id, 'EVENT_ID': id_, 'EVENT_TITLE': doc.get('title', '')})
|
|
|
|
stdin_data = {'old': old_person_data,
|
2015-04-18 12:53:08 +02:00
|
|
|
'new': new_person_data,
|
|
|
|
'event': doc,
|
|
|
|
'merged': merged
|
2015-04-18 14:27:02 +02:00
|
|
|
}
|
|
|
|
self.run_triggers('update_person_in_event', stdin_data=stdin_data, env=env)
|
2015-04-26 00:47:38 +02:00
|
|
|
if old_person_data and old_person_data.get('attended') != new_person_data.get('attended'):
|
|
|
|
if new_person_data.get('attended'):
|
|
|
|
self.run_triggers('attends', stdin_data=stdin_data, env=env)
|
2015-04-26 12:56:12 +02:00
|
|
|
|
|
|
|
if old_person_data != new_person_data:
|
|
|
|
self.send_ws_message('event/%s/updates' % id_,
|
|
|
|
json.dumps(doc.get('persons') or []))
|
2015-04-05 00:55:59 +02:00
|
|
|
return {'event': doc}
|
|
|
|
|
2015-04-05 11:20:57 +02:00
|
|
|
def handle_delete_persons(self, id_, person_id):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Remove a specific person from the list of persons registered at this event.
|
2015-04-05 11:20:57 +02:00
|
|
|
merged, doc = self.db.update('events',
|
|
|
|
{'_id': id_},
|
|
|
|
{'persons': {'person_id': person_id}},
|
2015-04-14 20:27:45 +02:00
|
|
|
operation='delete',
|
2015-04-05 11:20:57 +02:00
|
|
|
create=False)
|
|
|
|
return {'event': doc}
|
|
|
|
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2015-03-30 21:39:12 +02:00
|
|
|
class EbCSVImportPersonsHandler(BaseHandler):
|
|
|
|
"""Importer for CSV files exported from eventbrite."""
|
2015-03-29 15:05:01 +02:00
|
|
|
csvRemap = {
|
|
|
|
'Nome evento': 'event_title',
|
|
|
|
'ID evento': 'event_id',
|
|
|
|
'N. codice a barre': 'ebqrcode',
|
|
|
|
'Cognome acquirente': 'surname',
|
|
|
|
'Nome acquirente': 'name',
|
|
|
|
'E-mail acquirente': 'email',
|
2015-04-04 13:01:33 +02:00
|
|
|
'Cognome': 'surname',
|
|
|
|
'Nome': 'name',
|
|
|
|
'E-mail': 'email',
|
2015-04-04 14:56:41 +02:00
|
|
|
'Indirizzo e-mail': 'email',
|
2015-03-29 15:05:01 +02:00
|
|
|
'Tipologia biglietto': 'ticket_kind',
|
|
|
|
'Data partecipazione': 'attending_datetime',
|
|
|
|
'Data check-in': 'checkin_datetime',
|
|
|
|
'Ordine n.': 'order_nr',
|
2015-04-04 13:01:33 +02:00
|
|
|
'ID ordine': 'order_nr',
|
2015-04-25 10:04:47 +02:00
|
|
|
'Titolo professionale': 'job_title',
|
2015-04-18 18:26:50 +02:00
|
|
|
'Azienda': 'company',
|
|
|
|
'Prefisso': 'name_title',
|
2015-04-04 13:01:33 +02:00
|
|
|
'Prefisso (Sig., Sig.ra, ecc.)': 'name_title',
|
2015-04-25 10:04:47 +02:00
|
|
|
|
|
|
|
'Order #': 'order_nr',
|
|
|
|
'Prefix': 'name_title',
|
|
|
|
'First Name': 'name',
|
|
|
|
'Last Name': 'surname',
|
|
|
|
'Suffix': 'name_suffix',
|
|
|
|
'Email': 'email',
|
|
|
|
'Attendee #': 'attendee_nr',
|
|
|
|
'Barcode #': 'ebqrcode',
|
|
|
|
'Company': 'company',
|
2015-03-29 15:05:01 +02:00
|
|
|
}
|
2015-04-18 18:26:50 +02:00
|
|
|
# Only these information are stored in the person collection.
|
2015-04-25 12:37:59 +02:00
|
|
|
keepPersonData = ('name', 'surname', 'email', 'name_title', 'name_suffix',
|
|
|
|
'company', 'job_title')
|
2015-04-04 13:01:33 +02:00
|
|
|
|
2015-03-29 15:05:01 +02:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-03-29 15:05:01 +02:00
|
|
|
def post(self, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
# import a CSV list of persons
|
2015-05-01 14:51:11 +02:00
|
|
|
event_handler = EventsHandler(self.application, self.request)
|
|
|
|
event_handler.db = self.db
|
2015-03-29 15:50:36 +02:00
|
|
|
targetEvent = None
|
|
|
|
try:
|
|
|
|
targetEvent = self.get_body_argument('targetEvent')
|
|
|
|
except:
|
|
|
|
pass
|
2015-04-04 13:01:33 +02:00
|
|
|
reply = dict(total=0, valid=0, merged=0, new_in_event=0)
|
2015-03-29 15:05:01 +02:00
|
|
|
for fieldname, contents in self.request.files.iteritems():
|
|
|
|
for content in contents:
|
|
|
|
filename = content['filename']
|
2015-03-30 21:31:09 +02:00
|
|
|
parseStats, persons = utils.csvParse(content['body'], remap=self.csvRemap)
|
2015-03-29 15:05:01 +02:00
|
|
|
reply['total'] += parseStats['total']
|
|
|
|
reply['valid'] += parseStats['valid']
|
2015-03-29 23:47:59 +02:00
|
|
|
for person in persons:
|
2015-04-04 13:01:33 +02:00
|
|
|
person_data = dict([(k, person[k]) for k in self.keepPersonData
|
|
|
|
if k in person])
|
2015-04-22 23:34:53 +02:00
|
|
|
merged, stored_person = self.db.update('persons',
|
2015-04-25 12:37:59 +02:00
|
|
|
[('email', 'name', 'surname')],
|
2015-04-04 13:01:33 +02:00
|
|
|
person_data)
|
2015-03-30 22:31:16 +02:00
|
|
|
if merged:
|
2015-03-29 23:47:59 +02:00
|
|
|
reply['merged'] += 1
|
2015-04-22 23:34:53 +02:00
|
|
|
if targetEvent and stored_person:
|
2015-04-04 17:26:00 +02:00
|
|
|
event_id = targetEvent
|
2015-04-22 23:34:53 +02:00
|
|
|
person_id = stored_person['_id']
|
2015-03-30 22:31:16 +02:00
|
|
|
registered_data = {
|
2015-04-04 13:01:33 +02:00
|
|
|
'person_id': person_id,
|
|
|
|
'attended': False,
|
2015-03-30 22:31:16 +02:00
|
|
|
'from_file': filename}
|
2015-04-04 13:01:33 +02:00
|
|
|
person.update(registered_data)
|
|
|
|
if not self.db.query('events',
|
2015-04-04 14:56:41 +02:00
|
|
|
{'_id': event_id, 'persons.person_id': person_id}):
|
2015-05-01 14:51:11 +02:00
|
|
|
event_handler.handle_post_persons(event_id, person_id, person)
|
2015-04-04 13:01:33 +02:00
|
|
|
reply['new_in_event'] += 1
|
2015-03-29 15:05:01 +02:00
|
|
|
self.write(reply)
|
|
|
|
|
|
|
|
|
2015-04-18 17:33:42 +02:00
|
|
|
class SettingsHandler(BaseHandler):
|
|
|
|
"""Handle requests for Settings."""
|
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-18 17:33:42 +02:00
|
|
|
def get(self, **kwds):
|
2015-05-03 11:58:40 +02:00
|
|
|
query = self.arguments_tobool()
|
2015-04-18 17:33:42 +02:00
|
|
|
settings = self.db.query('settings', query)
|
|
|
|
self.write({'settings': settings})
|
|
|
|
|
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
class WebSocketEventUpdatesHandler(tornado.websocket.WebSocketHandler):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Manage websockets."""
|
2015-04-26 01:26:00 +02:00
|
|
|
def _clean_url(self, url):
|
|
|
|
return re_slashes.sub('/', url)
|
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
def open(self, event_id, *args, **kwds):
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_open event_id:%s' % event_id)
|
2015-04-26 01:26:00 +02:00
|
|
|
|
|
|
|
_ws_clients.setdefault(self._clean_url(self.request.uri), set()).add(self)
|
2015-04-26 00:47:38 +02:00
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_open %s clients connected' % len(_ws_clients))
|
|
|
|
|
|
|
|
def on_message(self, message):
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_message')
|
|
|
|
count = 0
|
2015-04-26 01:26:00 +02:00
|
|
|
for client in _ws_clients.get(self._clean_url(self.request.uri), []):
|
2015-04-26 00:47:38 +02:00
|
|
|
if client == self:
|
|
|
|
continue
|
|
|
|
client.write_message(message)
|
|
|
|
count += 1
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_message sent message to %d clients' % count)
|
|
|
|
|
|
|
|
def on_close(self):
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_close')
|
|
|
|
try:
|
2015-04-26 01:26:00 +02:00
|
|
|
if self in _ws_clients.get(self._clean_url(self.request.uri), []):
|
|
|
|
_ws_clients[self._clean_url(self.request.uri)].remove(self)
|
2015-04-26 00:47:38 +02:00
|
|
|
except Exception, e:
|
|
|
|
logging.warn('WebSocketEventUpdatesHandler.on_close error closing websocket: %s', str(e))
|
|
|
|
|
|
|
|
|
2015-05-02 17:39:59 +02:00
|
|
|
class LoginHandler(RootHandler):
|
|
|
|
"""Handle user authentication requests."""
|
2015-05-02 19:26:23 +02:00
|
|
|
re_split_salt = re.compile(r'\$(?P<salt>.+)\$(?P<hash>.+)')
|
|
|
|
|
2015-05-02 17:39:59 +02:00
|
|
|
@gen.coroutine
|
|
|
|
def get(self, **kwds):
|
2015-05-03 11:58:40 +02:00
|
|
|
# show the login page
|
2015-05-03 01:58:09 +02:00
|
|
|
if self.is_api():
|
|
|
|
self.set_status(401)
|
2015-05-03 11:58:40 +02:00
|
|
|
self.write({'error': 'authentication required',
|
|
|
|
'message': 'please provide username and password'})
|
2015-05-03 01:58:09 +02:00
|
|
|
else:
|
|
|
|
with open(self.angular_app_path + "/login.html", 'r') as fd:
|
|
|
|
self.write(fd.read())
|
2015-05-02 17:39:59 +02:00
|
|
|
|
2015-05-02 19:26:23 +02:00
|
|
|
def _authorize(self, username, password):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Return True is this username/password is valid."""
|
2015-05-02 19:26:23 +02:00
|
|
|
res = self.db.query('users', {'username': username})
|
|
|
|
if not res:
|
|
|
|
return False
|
|
|
|
user = res[0]
|
|
|
|
db_password = user.get('password') or ''
|
|
|
|
if not db_password:
|
|
|
|
return False
|
|
|
|
match = self.re_split_salt.match(db_password)
|
|
|
|
if not match:
|
|
|
|
return False
|
|
|
|
salt = match.group('salt')
|
|
|
|
if utils.hash_password(password, salt=salt) == db_password:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-05-02 17:39:59 +02:00
|
|
|
@gen.coroutine
|
|
|
|
def post(self):
|
2015-05-03 11:58:40 +02:00
|
|
|
# authenticate a user
|
2015-05-02 17:39:59 +02:00
|
|
|
username = self.get_body_argument('username')
|
|
|
|
password = self.get_body_argument('password')
|
2015-05-02 19:26:23 +02:00
|
|
|
if self._authorize(username, password):
|
2015-05-03 01:25:33 +02:00
|
|
|
logging.info('successful login for user %s' % username)
|
2015-05-02 18:38:57 +02:00
|
|
|
self.set_secure_cookie("user", username)
|
2015-05-03 01:43:30 +02:00
|
|
|
if self.is_api():
|
|
|
|
self.write({'error': None, 'message': 'successful login'})
|
|
|
|
else:
|
|
|
|
self.redirect('/')
|
2015-05-03 01:25:33 +02:00
|
|
|
return
|
|
|
|
logging.info('login failed for user %s' % username)
|
2015-05-03 01:43:30 +02:00
|
|
|
if self.is_api():
|
2015-05-03 01:58:09 +02:00
|
|
|
self.set_status(401)
|
|
|
|
self.write({'error': 'authentication failed', 'message': 'wrong username and password'})
|
2015-05-03 01:43:30 +02:00
|
|
|
else:
|
|
|
|
self.redirect('/login?failed=1')
|
2015-05-02 17:39:59 +02:00
|
|
|
|
|
|
|
|
2015-05-02 18:38:57 +02:00
|
|
|
class LogoutHandler(RootHandler):
|
|
|
|
"""Handle user logout requests."""
|
|
|
|
@gen.coroutine
|
|
|
|
def get(self, **kwds):
|
2015-05-03 11:58:40 +02:00
|
|
|
# log the user out
|
2015-05-03 01:25:33 +02:00
|
|
|
logging.info('logout')
|
2015-05-02 18:38:57 +02:00
|
|
|
self.logout()
|
2015-05-03 11:58:40 +02:00
|
|
|
if self.is_api():
|
|
|
|
self.redirect('/v%s/login' % API_VERSION)
|
|
|
|
else:
|
|
|
|
self.redirect('/login')
|
2015-05-02 18:38:57 +02:00
|
|
|
|
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
def run():
|
|
|
|
"""Run the Tornado web application."""
|
|
|
|
# command line arguments; can also be written in a configuration file,
|
|
|
|
# specified with the --config argument.
|
2015-03-14 11:12:57 +01:00
|
|
|
define("port", default=5242, help="run on the given port", type=int)
|
2015-05-03 11:58:40 +02:00
|
|
|
define("data_dir", default=os.path.join(os.path.dirname(__file__), "data"),
|
2015-03-14 17:32:45 +01:00
|
|
|
help="specify the directory used to store the data")
|
2015-04-26 10:27:33 +02:00
|
|
|
define("ssl_cert", default=os.path.join(os.path.dirname(__file__), 'ssl', 'eventman_cert.pem'),
|
|
|
|
help="specify the SSL certificate to use for secure connections")
|
|
|
|
define("ssl_key", default=os.path.join(os.path.dirname(__file__), 'ssl', 'eventman_key.pem'),
|
|
|
|
help="specify the SSL private key to use for secure connections")
|
2015-05-03 11:58:40 +02:00
|
|
|
define("mongo_url", default=None,
|
2015-03-21 09:29:01 +01:00
|
|
|
help="URL to MongoDB server", type=str)
|
2015-05-03 11:58:40 +02:00
|
|
|
define("db_name", default='eventman',
|
2015-03-21 20:32:39 +01:00
|
|
|
help="Name of the MongoDB database to use", type=str)
|
2015-05-03 11:58:40 +02:00
|
|
|
define("authentication", default=True, help="if set to false, no authentication is required")
|
2015-03-14 17:32:45 +01:00
|
|
|
define("debug", default=False, help="run in debug mode")
|
2015-03-14 11:12:57 +01:00
|
|
|
define("config", help="read configuration file",
|
|
|
|
callback=lambda path: tornado.options.parse_config_file(path, final=False))
|
|
|
|
tornado.options.parse_command_line()
|
|
|
|
|
2015-04-18 15:01:30 +02:00
|
|
|
if options.debug:
|
|
|
|
logger = logging.getLogger()
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
# database backend connector
|
2015-05-03 11:58:40 +02:00
|
|
|
db_connector = backend.EventManDB(url=options.mongo_url, dbName=options.db_name)
|
|
|
|
init_params = dict(db=db_connector, data_dir=options.data_dir, listen_port=options.port,
|
|
|
|
authentication=options.authentication)
|
2015-03-21 09:29:01 +01:00
|
|
|
|
2015-05-02 19:26:23 +02:00
|
|
|
# If not present, we store a user 'admin' with password 'eventman' into the database.
|
|
|
|
if not db_connector.query('users', {'username': 'admin'}):
|
|
|
|
db_connector.add('users',
|
|
|
|
{'username': 'admin', 'password': utils.hash_password('eventman')})
|
|
|
|
|
2015-05-03 13:10:11 +02:00
|
|
|
# If present, use the cookie_secret stored into the database.
|
|
|
|
cookie_secret = db_connector.query('settings', {'setting': 'server_cookie_secret'})
|
|
|
|
if cookie_secret:
|
|
|
|
cookie_secret = cookie_secret[0]['cookie_secret']
|
|
|
|
else:
|
|
|
|
# the salt guarantees its uniqueness
|
|
|
|
cookie_secret = utils.hash_password('__COOKIE_SECRET__')
|
|
|
|
db_connector.add('settings',
|
|
|
|
{'setting': 'server_cookie_secret', 'cookie_secret': cookie_secret})
|
|
|
|
|
2015-04-26 11:49:59 +02:00
|
|
|
_ws_handler = (r"/ws/+event/+(?P<event_id>\w+)/+updates/?", WebSocketEventUpdatesHandler)
|
2015-05-03 01:43:30 +02:00
|
|
|
_persons_path = r"/persons/?(?P<id_>\w+)?/?(?P<resource>\w+)?/?(?P<resource_id>\w+)?"
|
|
|
|
_events_path = r"/events/?(?P<id_>\w+)?/?(?P<resource>\w+)?/?(?P<resource_id>\w+)?"
|
2015-03-14 11:12:57 +01:00
|
|
|
application = tornado.web.Application([
|
2015-05-03 01:43:30 +02:00
|
|
|
(_persons_path, PersonsHandler, init_params),
|
|
|
|
(r'/v%s%s' % (API_VERSION, _persons_path), PersonsHandler, init_params),
|
|
|
|
(_events_path, EventsHandler, init_params),
|
|
|
|
(r'/v%s%s' % (API_VERSION, _events_path), EventsHandler, init_params),
|
2015-03-21 09:29:01 +01:00
|
|
|
(r"/(?:index.html)?", RootHandler, init_params),
|
2015-03-29 15:05:01 +02:00
|
|
|
(r"/ebcsvpersons", EbCSVImportPersonsHandler, init_params),
|
2015-04-18 17:33:42 +02:00
|
|
|
(r"/settings", SettingsHandler, init_params),
|
2015-04-26 11:49:59 +02:00
|
|
|
_ws_handler,
|
2015-05-02 19:26:23 +02:00
|
|
|
(r'/login', LoginHandler, init_params),
|
2015-05-03 01:43:30 +02:00
|
|
|
(r'/v%s/login' % API_VERSION, LoginHandler, init_params),
|
2015-05-02 18:38:57 +02:00
|
|
|
(r'/logout', LogoutHandler),
|
2015-05-03 01:43:30 +02:00
|
|
|
(r'/v%s/logout' % API_VERSION, LogoutHandler),
|
2015-03-14 17:32:45 +01:00
|
|
|
(r'/(.*)', tornado.web.StaticFileHandler, {"path": "angular_app"})
|
2015-03-14 11:12:57 +01:00
|
|
|
],
|
2015-03-14 13:05:04 +01:00
|
|
|
template_path=os.path.join(os.path.dirname(__file__), "templates"),
|
2015-03-14 11:12:57 +01:00
|
|
|
static_path=os.path.join(os.path.dirname(__file__), "static"),
|
2015-05-02 17:39:59 +02:00
|
|
|
cookie_secret='__COOKIE_SECRET__',
|
|
|
|
login_url='/login',
|
2015-03-14 13:05:04 +01:00
|
|
|
debug=options.debug)
|
2015-04-26 10:27:33 +02:00
|
|
|
ssl_options = {}
|
|
|
|
if os.path.isfile(options.ssl_key) and os.path.isfile(options.ssl_cert):
|
|
|
|
ssl_options = dict(certfile=options.ssl_cert, keyfile=options.ssl_key)
|
2015-04-26 10:40:38 +02:00
|
|
|
http_server = tornado.httpserver.HTTPServer(application, ssl_options=ssl_options or None)
|
2015-03-14 11:22:19 +01:00
|
|
|
http_server.listen(options.port)
|
2015-04-26 11:49:59 +02:00
|
|
|
|
|
|
|
# Also listen on options.port+1 for our local ws connection.
|
|
|
|
ws_application = tornado.web.Application([
|
|
|
|
_ws_handler,
|
|
|
|
],
|
|
|
|
debug=options.debug)
|
|
|
|
ws_http_server = tornado.httpserver.HTTPServer(ws_application)
|
|
|
|
ws_http_server.listen(options.port+1, address='127.0.0.1')
|
2015-03-14 11:12:57 +01:00
|
|
|
tornado.ioloop.IOLoop.instance().start()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2015-03-22 08:58:25 +01:00
|
|
|
run()
|
2015-03-14 11:12:57 +01:00
|
|
|
|