2015-03-14 11:12:57 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
"""Event Man(ager)
|
|
|
|
|
2015-03-22 11:08:23 +01:00
|
|
|
Your friendly manager of attendees at an event.
|
2015-03-22 09:36:32 +01:00
|
|
|
|
2016-04-10 18:45:30 +02:00
|
|
|
Copyright 2015-2016 Davide Alberani <da@erlug.linux.it>
|
|
|
|
RaspiBO <info@raspibo.org>
|
2015-03-22 09:36:32 +01:00
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
2015-03-14 11:12:57 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
2015-04-25 15:46:46 +02:00
|
|
|
import re
|
2015-04-17 00:17:36 +02:00
|
|
|
import glob
|
2015-04-18 12:53:08 +02:00
|
|
|
import json
|
2016-06-06 21:44:04 +02:00
|
|
|
import time
|
|
|
|
import string
|
|
|
|
import random
|
2015-04-18 15:01:30 +02:00
|
|
|
import logging
|
2015-04-17 20:31:50 +02:00
|
|
|
import datetime
|
2015-03-14 11:12:57 +01:00
|
|
|
|
|
|
|
import tornado.httpserver
|
|
|
|
import tornado.ioloop
|
|
|
|
import tornado.options
|
|
|
|
from tornado.options import define, options
|
|
|
|
import tornado.web
|
2015-04-26 00:47:38 +02:00
|
|
|
import tornado.websocket
|
2015-04-18 12:53:08 +02:00
|
|
|
from tornado import gen, escape, process
|
2015-03-14 11:12:57 +01:00
|
|
|
|
2015-03-30 21:39:12 +02:00
|
|
|
import utils
|
2015-03-21 09:29:01 +01:00
|
|
|
import backend
|
|
|
|
|
2015-04-25 15:46:46 +02:00
|
|
|
ENCODING = 'utf-8'
|
2015-04-17 20:31:50 +02:00
|
|
|
PROCESS_TIMEOUT = 60
|
|
|
|
|
2015-05-03 01:43:30 +02:00
|
|
|
API_VERSION = '1.0'
|
|
|
|
|
2015-04-25 15:46:46 +02:00
|
|
|
re_env_key = re.compile('[^A-Z_]+')
|
2015-04-26 01:26:00 +02:00
|
|
|
re_slashes = re.compile(r'//+')
|
2015-04-25 15:46:46 +02:00
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
|
2015-05-03 11:58:40 +02:00
|
|
|
def authenticated(method):
|
|
|
|
"""Decorator to handle authentication."""
|
|
|
|
original_wrapper = tornado.web.authenticated(method)
|
|
|
|
@tornado.web.functools.wraps(method)
|
|
|
|
def my_wrapper(self, *args, **kwargs):
|
|
|
|
# If no authentication was required from the command line or config file.
|
|
|
|
if not self.authentication:
|
|
|
|
return method(self, *args, **kwargs)
|
2016-05-29 11:32:57 +02:00
|
|
|
# unauthenticated API calls gets redirected to /v1.0/[...]
|
2016-06-12 16:04:46 +02:00
|
|
|
if self.is_api() and not self.current_user:
|
2015-05-03 11:58:40 +02:00
|
|
|
self.redirect('/v%s%s' % (API_VERSION, self.get_login_url()))
|
|
|
|
return
|
|
|
|
return original_wrapper(self, *args, **kwargs)
|
|
|
|
return my_wrapper
|
|
|
|
|
|
|
|
|
2016-06-12 23:44:48 +02:00
|
|
|
class BaseException(Exception):
|
|
|
|
"""Base class for EventMan custom exceptions.
|
|
|
|
|
|
|
|
:param message: text message
|
|
|
|
:type message: str
|
|
|
|
:param status: numeric http status code
|
|
|
|
:type status: int"""
|
|
|
|
def __init__(self, message, status=400):
|
|
|
|
super(BaseException, self).__init__(message)
|
|
|
|
self.message = message
|
|
|
|
self.status = status
|
|
|
|
|
|
|
|
|
|
|
|
class InputException(BaseException):
|
|
|
|
"""Exception raised by errors in input handling."""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
class BaseHandler(tornado.web.RequestHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Base class for request handlers."""
|
2016-06-06 21:44:04 +02:00
|
|
|
permissions = {
|
|
|
|
'event|read': True,
|
2016-06-09 23:13:56 +02:00
|
|
|
'event:tickets|all': True,
|
2016-06-11 16:14:28 +02:00
|
|
|
'event:tickets-all|create': True,
|
2016-06-11 17:56:23 +02:00
|
|
|
'events|read': True,
|
2016-06-12 23:44:48 +02:00
|
|
|
'persons|create': True,
|
|
|
|
'users|create': True
|
2016-06-06 21:44:04 +02:00
|
|
|
}
|
2016-05-31 22:26:38 +02:00
|
|
|
|
2016-06-27 18:19:37 +02:00
|
|
|
# Cache currently connected users.
|
2016-06-13 21:17:01 +02:00
|
|
|
_users_cache = {}
|
|
|
|
|
2015-04-18 17:33:42 +02:00
|
|
|
# A property to access the first value of each argument.
|
|
|
|
arguments = property(lambda self: dict([(k, v[0])
|
|
|
|
for k, v in self.request.arguments.iteritems()]))
|
|
|
|
|
2016-04-24 16:03:49 +02:00
|
|
|
# A property to access both the UUID and the clean arguments.
|
|
|
|
@property
|
|
|
|
def uuid_arguments(self):
|
|
|
|
uuid = None
|
|
|
|
arguments = self.arguments
|
|
|
|
if 'uuid' in arguments:
|
|
|
|
uuid = arguments['uuid']
|
|
|
|
del arguments['uuid']
|
|
|
|
return uuid, arguments
|
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
_bool_convert = {
|
|
|
|
'0': False,
|
|
|
|
'n': False,
|
2015-04-14 20:30:05 +02:00
|
|
|
'f': False,
|
2015-04-06 17:19:20 +02:00
|
|
|
'no': False,
|
|
|
|
'off': False,
|
2015-04-18 17:33:42 +02:00
|
|
|
'false': False,
|
|
|
|
'1': True,
|
|
|
|
'y': True,
|
|
|
|
't': True,
|
|
|
|
'on': True,
|
|
|
|
'yes': True,
|
|
|
|
'true': True
|
2015-04-06 17:19:20 +02:00
|
|
|
}
|
|
|
|
|
2016-06-12 23:44:48 +02:00
|
|
|
def write_error(self, status_code, **kwargs):
|
|
|
|
"""Default error handler."""
|
|
|
|
if isinstance(kwargs.get('exc_info', (None, None))[1], BaseException):
|
|
|
|
exc = kwargs['exc_info'][1]
|
|
|
|
status_code = exc.status
|
|
|
|
message = exc.message
|
|
|
|
else:
|
|
|
|
message = 'internal error'
|
|
|
|
self.build_error(message, status=status_code)
|
|
|
|
|
2015-05-03 01:43:30 +02:00
|
|
|
def is_api(self):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Return True if the path is from an API call."""
|
2015-05-03 01:43:30 +02:00
|
|
|
return self.request.path.startswith('/v%s' % API_VERSION)
|
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
def tobool(self, obj):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Convert some textual values to boolean."""
|
2015-04-06 17:19:20 +02:00
|
|
|
if isinstance(obj, (list, tuple)):
|
|
|
|
obj = obj[0]
|
|
|
|
if isinstance(obj, (str, unicode)):
|
|
|
|
obj = obj.lower()
|
2015-04-18 17:33:42 +02:00
|
|
|
return self._bool_convert.get(obj, obj)
|
|
|
|
|
2015-05-03 11:58:40 +02:00
|
|
|
def arguments_tobool(self):
|
|
|
|
"""Return a dictionary of arguments, converted to booleans where possible."""
|
2015-04-18 17:33:42 +02:00
|
|
|
return dict([(k, self.tobool(v)) for k, v in self.arguments.iteritems()])
|
2015-04-06 17:19:20 +02:00
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
def initialize(self, **kwargs):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Add every passed (key, value) as attributes of the instance."""
|
2015-03-20 23:08:21 +01:00
|
|
|
for key, value in kwargs.iteritems():
|
|
|
|
setattr(self, key, value)
|
|
|
|
|
2016-05-30 23:14:56 +02:00
|
|
|
@property
|
|
|
|
def current_user(self):
|
2016-06-11 17:56:23 +02:00
|
|
|
"""Retrieve current user name from the secure cookie."""
|
2015-05-02 18:38:57 +02:00
|
|
|
return self.get_secure_cookie("user")
|
|
|
|
|
2016-05-30 23:14:56 +02:00
|
|
|
@property
|
|
|
|
def current_user_info(self):
|
2016-06-11 16:14:28 +02:00
|
|
|
"""Information about the current user, including their permissions."""
|
2016-05-30 23:14:56 +02:00
|
|
|
current_user = self.current_user
|
2016-06-13 21:17:01 +02:00
|
|
|
if current_user in self._users_cache:
|
|
|
|
return self._users_cache[current_user]
|
2016-06-13 22:44:57 +02:00
|
|
|
permissions = set([k for (k, v) in self.permissions.iteritems() if v is True])
|
|
|
|
user_info = {'permissions': permissions}
|
2016-05-29 14:06:34 +02:00
|
|
|
if current_user:
|
|
|
|
user_info['username'] = current_user
|
|
|
|
res = self.db.query('users', {'username': current_user})
|
|
|
|
if res:
|
|
|
|
user = res[0]
|
2016-06-13 22:44:57 +02:00
|
|
|
user_info = user
|
|
|
|
permissions.update(set(user.get('permissions') or []))
|
|
|
|
user_info['permissions'] = permissions
|
2016-06-13 21:17:01 +02:00
|
|
|
self._users_cache[current_user] = user_info
|
2016-06-06 21:44:04 +02:00
|
|
|
return user_info
|
2016-05-29 14:06:34 +02:00
|
|
|
|
|
|
|
def has_permission(self, permission):
|
|
|
|
"""Check permissions of the current user.
|
|
|
|
|
|
|
|
:param permission: the permission to check
|
|
|
|
:type permission: str
|
|
|
|
|
|
|
|
:returns: True if the user is allowed to perform the action or False
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
2016-06-02 16:06:12 +02:00
|
|
|
user_info = self.current_user_info or {}
|
2016-05-31 22:26:38 +02:00
|
|
|
user_permissions = user_info.get('permissions') or []
|
|
|
|
global_permission = '%s|all' % permission.split('|')[0]
|
|
|
|
if 'admin|all' in user_permissions or global_permission in user_permissions or permission in user_permissions:
|
|
|
|
return True
|
|
|
|
collection_permission = self.permissions.get(permission)
|
|
|
|
if isinstance(collection_permission, bool):
|
|
|
|
return collection_permission
|
|
|
|
if callable(collection_permission):
|
|
|
|
return collection_permission(permission)
|
|
|
|
return False
|
2016-05-29 14:06:34 +02:00
|
|
|
|
2016-05-30 23:14:56 +02:00
|
|
|
def build_error(self, message='', status=400):
|
2016-06-11 16:14:28 +02:00
|
|
|
"""Build and write an error message."""
|
2016-05-30 23:14:56 +02:00
|
|
|
self.set_status(status)
|
|
|
|
self.write({'error': True, 'message': message})
|
|
|
|
|
2015-05-02 18:38:57 +02:00
|
|
|
def logout(self):
|
|
|
|
"""Remove the secure cookie used fro authentication."""
|
2016-06-13 21:17:01 +02:00
|
|
|
if self.current_user in self._users_cache:
|
|
|
|
del self._users_cache[self.current_user]
|
2015-05-02 18:38:57 +02:00
|
|
|
self.clear_cookie("user")
|
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
|
|
|
|
class RootHandler(BaseHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handler for the / path."""
|
2015-03-15 15:47:04 +01:00
|
|
|
angular_app_path = os.path.join(os.path.dirname(__file__), "angular_app")
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-03-14 13:05:04 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-03-31 23:35:56 +02:00
|
|
|
def get(self, *args, **kwargs):
|
2015-03-22 08:58:25 +01:00
|
|
|
# serve the ./angular_app/index.html file
|
2015-03-15 15:47:04 +01:00
|
|
|
with open(self.angular_app_path + "/index.html", 'r') as fd:
|
|
|
|
self.write(fd.read())
|
2015-03-14 17:32:45 +01:00
|
|
|
|
2015-03-15 18:00:08 +01:00
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
# Keep track of WebSocket connections.
|
2015-04-26 01:26:00 +02:00
|
|
|
_ws_clients = {}
|
2015-04-26 00:47:38 +02:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class CollectionHandler(BaseHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Base class for handlers that need to interact with the database backend.
|
2016-04-10 17:21:46 +02:00
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
Introduce basic CRUD operations."""
|
|
|
|
# set of documents we're managing (a collection in MongoDB or a table in a SQL database)
|
2016-05-30 23:14:56 +02:00
|
|
|
document = None
|
2015-03-21 20:32:39 +01:00
|
|
|
collection = None
|
2015-03-21 18:29:19 +01:00
|
|
|
|
2015-05-01 16:27:22 +02:00
|
|
|
# set of documents used to store incremental sequences
|
|
|
|
counters_collection = 'counters'
|
|
|
|
|
2016-06-06 21:44:04 +02:00
|
|
|
_id_chars = string.ascii_lowercase + string.digits
|
|
|
|
|
2015-05-01 16:27:22 +02:00
|
|
|
def get_next_seq(self, seq):
|
|
|
|
"""Increment and return the new value of a ever-incrementing counter.
|
|
|
|
|
|
|
|
:param seq: unique name of the sequence
|
|
|
|
:type seq: str
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
:returns: the next value of the sequence
|
2015-05-01 16:27:22 +02:00
|
|
|
:rtype: int
|
|
|
|
"""
|
|
|
|
if not self.db.query(self.counters_collection, {'seq_name': seq}):
|
|
|
|
self.db.add(self.counters_collection, {'seq_name': seq, 'seq': 0})
|
|
|
|
merged, doc = self.db.update(self.counters_collection,
|
|
|
|
{'seq_name': seq},
|
|
|
|
{'seq': 1},
|
|
|
|
operation='increment')
|
|
|
|
return doc.get('seq', 0)
|
|
|
|
|
2016-06-11 00:47:29 +02:00
|
|
|
def gen_id(self, seq='ids', random_alpha=32):
|
|
|
|
"""Generate a unique, non-guessable ID.
|
|
|
|
|
|
|
|
:param seq: the scope of the ever-incrementing sequence
|
|
|
|
:type seq: str
|
|
|
|
:param random_alpha: number of random lowercase alphanumeric chars
|
|
|
|
:type random_alpha: int
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
:returns: unique ID
|
2016-06-11 00:47:29 +02:00
|
|
|
:rtype: str"""
|
2016-06-08 23:05:16 +02:00
|
|
|
t = str(time.time()).replace('.', '_')
|
|
|
|
seq = str(self.get_next_seq(seq))
|
2016-06-11 00:47:29 +02:00
|
|
|
rand = ''.join([random.choice(self._id_chars) for x in xrange(random_alpha)])
|
2016-06-06 21:44:04 +02:00
|
|
|
return '-'.join((t, seq, rand))
|
|
|
|
|
2015-04-14 23:44:55 +02:00
|
|
|
def _filter_results(self, results, params):
|
2015-04-15 00:12:35 +02:00
|
|
|
"""Filter a list using keys and values from a dictionary.
|
2016-04-10 17:21:46 +02:00
|
|
|
|
2015-04-15 00:12:35 +02:00
|
|
|
:param results: the list to be filtered
|
|
|
|
:type results: list
|
|
|
|
:param params: a dictionary of items that must all be present in an original list item to be included in the return
|
2016-05-11 21:09:57 +02:00
|
|
|
:type params: dict
|
2016-04-10 17:21:46 +02:00
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
:returns: list of items that have all the keys with the same values as params
|
2015-04-15 00:12:35 +02:00
|
|
|
:rtype: list"""
|
2015-04-14 23:44:55 +02:00
|
|
|
if not params:
|
|
|
|
return results
|
2016-05-11 21:09:57 +02:00
|
|
|
params = backend.convert(params)
|
2015-04-14 23:44:55 +02:00
|
|
|
filtered = []
|
|
|
|
for result in results:
|
|
|
|
add = True
|
|
|
|
for key, value in params.iteritems():
|
|
|
|
if key not in result or result[key] != value:
|
|
|
|
add = False
|
|
|
|
break
|
|
|
|
if add:
|
|
|
|
filtered.append(result)
|
|
|
|
return filtered
|
|
|
|
|
2016-04-17 16:24:38 +02:00
|
|
|
def _clean_dict(self, data):
|
|
|
|
"""Filter a dictionary (in place) to remove unwanted keywords.
|
|
|
|
|
|
|
|
:param data: dictionary to clean
|
|
|
|
:type data: dict"""
|
|
|
|
if isinstance(data, dict):
|
|
|
|
for key in data.keys():
|
|
|
|
if isinstance(key, (str, unicode)) and key.startswith('$'):
|
|
|
|
del data[key]
|
|
|
|
return data
|
|
|
|
|
2015-04-18 14:27:02 +02:00
|
|
|
def _dict2env(self, data):
|
2016-04-17 16:24:38 +02:00
|
|
|
"""Convert a dictionary into a form suitable to be passed as environment variables.
|
|
|
|
|
|
|
|
:param data: dictionary to convert
|
|
|
|
:type data: dict"""
|
2015-04-18 14:27:02 +02:00
|
|
|
ret = {}
|
|
|
|
for key, value in data.iteritems():
|
|
|
|
if isinstance(value, (list, tuple, dict)):
|
|
|
|
continue
|
|
|
|
try:
|
2015-04-25 15:46:46 +02:00
|
|
|
key = key.upper().encode('ascii', 'ignore')
|
|
|
|
key = re_env_key.sub('', key)
|
|
|
|
if not key:
|
|
|
|
continue
|
|
|
|
ret[key] = unicode(value).encode(ENCODING)
|
2015-04-18 14:27:02 +02:00
|
|
|
except:
|
|
|
|
continue
|
|
|
|
return ret
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
def apply_filter(self, data, filter_name):
|
|
|
|
"""Apply a filter to the data.
|
|
|
|
|
|
|
|
:param data: the data to filter
|
|
|
|
:returns: the modified (possibly also in place) data
|
|
|
|
"""
|
|
|
|
filter_method = getattr(self, 'filter_%s' % filter_name, None)
|
2016-06-11 00:47:29 +02:00
|
|
|
if filter_method is not None:
|
2016-06-11 16:14:28 +02:00
|
|
|
data = filter_method(data)
|
|
|
|
return data
|
2016-06-11 00:47:29 +02:00
|
|
|
|
2015-03-14 17:32:45 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2016-06-02 16:06:12 +02:00
|
|
|
def get(self, id_=None, resource=None, resource_id=None, acl=True, **kwargs):
|
2015-03-31 23:35:56 +02:00
|
|
|
if resource:
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2016-06-10 12:39:25 +02:00
|
|
|
permission = '%s:%s%s|read' % (self.document, resource, '-all' if resource_id is None else '')
|
2016-06-02 16:06:12 +02:00
|
|
|
if acl and not self.has_permission(permission):
|
2016-05-31 22:26:38 +02:00
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 16:14:28 +02:00
|
|
|
handler = getattr(self, 'handle_get_%s' % resource, None)
|
|
|
|
if handler and callable(handler):
|
|
|
|
output = handler(id_, resource_id, **kwargs) or {}
|
|
|
|
output = self.apply_filter(output, 'get_%s' % resource)
|
2016-06-11 00:47:29 +02:00
|
|
|
self.write(output)
|
2015-04-05 22:16:11 +02:00
|
|
|
return
|
2016-06-07 23:21:08 +02:00
|
|
|
return self.build_error(status=404, message='unable to access resource: %s' % resource)
|
2015-03-15 23:05:59 +01:00
|
|
|
if id_ is not None:
|
2015-03-22 08:58:25 +01:00
|
|
|
# read a single document
|
2016-05-31 22:26:38 +02:00
|
|
|
permission = '%s|read' % self.document
|
2016-06-02 16:06:12 +02:00
|
|
|
if acl and not self.has_permission(permission):
|
2016-05-31 22:26:38 +02:00
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 00:47:29 +02:00
|
|
|
output = self.db.get(self.collection, id_)
|
2016-06-11 16:14:28 +02:00
|
|
|
output = self.apply_filter(output, 'get')
|
2016-06-11 00:47:29 +02:00
|
|
|
self.write(output)
|
2015-03-21 18:29:19 +01:00
|
|
|
else:
|
2015-03-22 08:58:25 +01:00
|
|
|
# return an object containing the list of all objects in the collection;
|
|
|
|
# e.g.: {'events': [{'_id': 'obj1-id, ...}, {'_id': 'obj2-id, ...}, ...]}
|
2015-03-22 17:08:25 +01:00
|
|
|
# Please, never return JSON lists that are not encapsulated into an object,
|
2015-03-22 08:58:25 +01:00
|
|
|
# to avoid XSS vulnerabilities.
|
2016-06-11 17:56:23 +02:00
|
|
|
permission = '%s|read' % self.collection
|
2016-06-02 16:06:12 +02:00
|
|
|
if acl and not self.has_permission(permission):
|
2016-05-31 22:26:38 +02:00
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 00:47:29 +02:00
|
|
|
output = {self.collection: self.db.query(self.collection, self.arguments)}
|
2016-06-11 16:14:28 +02:00
|
|
|
output = self.apply_filter(output, 'get_all')
|
2016-06-11 00:47:29 +02:00
|
|
|
self.write(output)
|
2015-03-21 18:29:19 +01:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-05 00:55:59 +02:00
|
|
|
def post(self, id_=None, resource=None, resource_id=None, **kwargs):
|
2015-04-06 17:19:20 +02:00
|
|
|
data = escape.json_decode(self.request.body or '{}')
|
2016-04-17 16:24:38 +02:00
|
|
|
self._clean_dict(data)
|
2016-05-30 23:14:56 +02:00
|
|
|
method = self.request.method.lower()
|
2016-06-11 16:14:28 +02:00
|
|
|
crud_method = 'create' if method == 'post' else 'update'
|
2016-06-11 17:56:23 +02:00
|
|
|
now = datetime.datetime.now()
|
2016-06-13 22:44:57 +02:00
|
|
|
user_info = self.current_user_info
|
|
|
|
user_id = user_info.get('_id')
|
2016-06-11 17:56:23 +02:00
|
|
|
if crud_method == 'create':
|
2016-06-13 22:44:57 +02:00
|
|
|
data['created_by'] = user_id
|
2016-06-11 17:56:23 +02:00
|
|
|
data['created_at'] = now
|
2016-06-13 22:44:57 +02:00
|
|
|
data['updated_by'] = user_id
|
2016-06-11 17:56:23 +02:00
|
|
|
data['updated_at'] = now
|
2015-04-05 00:55:59 +02:00
|
|
|
if resource:
|
2016-06-11 16:14:28 +02:00
|
|
|
permission = '%s:%s%s|%s' % (self.document, resource, '-all' if resource_id is None else '', crud_method)
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2016-05-30 23:14:56 +02:00
|
|
|
handler = getattr(self, 'handle_%s_%s' % (method, resource), None)
|
|
|
|
if handler and callable(handler):
|
2016-06-11 16:14:28 +02:00
|
|
|
data = self.apply_filter(data, 'input_%s_%s' % (method, resource))
|
|
|
|
output = handler(id_, resource_id, data, **kwargs)
|
|
|
|
output = self.apply_filter(output, 'get_%s' % resource)
|
|
|
|
self.write(output)
|
2015-04-05 22:16:11 +02:00
|
|
|
return
|
2016-06-11 16:14:28 +02:00
|
|
|
if id_ is not None:
|
|
|
|
permission = '%s|%s' % (self.document, crud_method)
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 17:56:23 +02:00
|
|
|
data = self.apply_filter(data, 'input_%s' % method)
|
2016-06-11 16:14:28 +02:00
|
|
|
merged, newData = self.db.update(self.collection, id_, data)
|
|
|
|
newData = self.apply_filter(newData, method)
|
2015-03-21 18:29:19 +01:00
|
|
|
else:
|
2016-06-11 17:56:23 +02:00
|
|
|
permission = '%s|%s' % (self.collection, crud_method)
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 16:14:28 +02:00
|
|
|
data = self.apply_filter(data, 'input_%s_all' % method)
|
|
|
|
newData = self.db.add(self.collection, data, _id=self.gen_id())
|
|
|
|
newData = self.apply_filter(newData, '%s_all' % method)
|
2015-03-21 18:29:19 +01:00
|
|
|
self.write(newData)
|
2015-03-14 11:12:57 +01:00
|
|
|
|
2015-03-22 17:08:25 +01:00
|
|
|
# PUT (update an existing document) is handled by the POST (create a new document) method
|
2015-03-21 20:32:39 +01:00
|
|
|
put = post
|
2015-03-15 18:00:08 +01:00
|
|
|
|
2015-03-22 17:08:25 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-05 11:20:57 +02:00
|
|
|
def delete(self, id_=None, resource=None, resource_id=None, **kwargs):
|
|
|
|
if resource:
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2016-06-11 16:14:28 +02:00
|
|
|
permission = '%s:%s%s|delete' % (self.document, resource, '-all' if resource_id is None else '')
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2015-04-05 11:20:57 +02:00
|
|
|
method = getattr(self, 'handle_delete_%s' % resource, None)
|
|
|
|
if method and callable(method):
|
2015-04-05 22:16:11 +02:00
|
|
|
self.write(method(id_, resource_id, **kwargs))
|
|
|
|
return
|
2015-04-13 23:25:46 +02:00
|
|
|
if id_:
|
2016-05-31 22:26:38 +02:00
|
|
|
permission = '%s|delete' % self.document
|
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2015-04-13 23:25:46 +02:00
|
|
|
self.db.delete(self.collection, id_)
|
2016-06-11 16:14:28 +02:00
|
|
|
else:
|
|
|
|
self.write({'success': False})
|
2015-04-13 23:25:46 +02:00
|
|
|
self.write({'success': True})
|
2015-03-22 17:08:25 +01:00
|
|
|
|
2015-04-18 15:01:30 +02:00
|
|
|
def on_timeout(self, cmd, pipe):
|
2015-04-18 12:53:08 +02:00
|
|
|
"""Kill a process that is taking too long to complete."""
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd %s is taking too long: killing it' % ' '.join(cmd))
|
2015-04-18 12:53:08 +02:00
|
|
|
try:
|
|
|
|
pipe.proc.kill()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def on_exit(self, returncode, cmd, pipe):
|
|
|
|
"""Callback executed when a subprocess execution is over."""
|
|
|
|
self.ioloop.remove_timeout(self.timeout)
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd: %s returncode: %d' % (' '.join(cmd), returncode))
|
2015-04-18 12:53:08 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-18 14:27:02 +02:00
|
|
|
def run_subprocess(self, cmd, stdin_data=None, env=None):
|
2015-04-17 20:31:50 +02:00
|
|
|
"""Execute the given action.
|
|
|
|
|
|
|
|
:param cmd: the command to be run with its command line arguments
|
|
|
|
:type cmd: list
|
2015-04-18 12:53:08 +02:00
|
|
|
|
|
|
|
:param stdin_data: data to be sent over stdin
|
|
|
|
:type stdin_data: str
|
2015-04-18 14:27:02 +02:00
|
|
|
:param env: environment of the process
|
|
|
|
:type env: dict
|
2015-04-17 20:31:50 +02:00
|
|
|
"""
|
|
|
|
self.ioloop = tornado.ioloop.IOLoop.instance()
|
2015-04-18 12:53:08 +02:00
|
|
|
p = process.Subprocess(cmd, close_fds=True, stdin=process.Subprocess.STREAM,
|
2015-04-18 14:27:02 +02:00
|
|
|
stdout=process.Subprocess.STREAM, stderr=process.Subprocess.STREAM, env=env)
|
2015-04-18 12:53:08 +02:00
|
|
|
p.set_exit_callback(lambda returncode: self.on_exit(returncode, cmd, p))
|
2015-04-17 20:31:50 +02:00
|
|
|
self.timeout = self.ioloop.add_timeout(datetime.timedelta(seconds=PROCESS_TIMEOUT),
|
2015-04-18 15:01:30 +02:00
|
|
|
lambda: self.on_timeout(cmd, p))
|
2015-04-18 12:53:08 +02:00
|
|
|
yield gen.Task(p.stdin.write, stdin_data or '')
|
|
|
|
p.stdin.close()
|
|
|
|
out, err = yield [gen.Task(p.stdout.read_until_close),
|
|
|
|
gen.Task(p.stderr.read_until_close)]
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd: %s' % ' '.join(cmd))
|
|
|
|
logging.debug('cmd stdout: %s' % out)
|
|
|
|
logging.debug('cmd strerr: %s' % err)
|
2015-04-18 12:53:08 +02:00
|
|
|
raise gen.Return((out, err))
|
2015-04-17 00:17:36 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-18 14:27:02 +02:00
|
|
|
def run_triggers(self, action, stdin_data=None, env=None):
|
2015-04-17 20:31:50 +02:00
|
|
|
"""Asynchronously execute triggers for the given action.
|
|
|
|
|
|
|
|
:param action: action name; scripts in directory ./data/triggers/{action}.d will be run
|
|
|
|
:type action: str
|
2015-04-18 12:53:08 +02:00
|
|
|
:param stdin_data: a python dictionary that will be serialized in JSON and sent to the process over stdin
|
|
|
|
:type stdin_data: dict
|
2015-04-18 14:27:02 +02:00
|
|
|
:param env: environment of the process
|
|
|
|
:type stdin_data: dict
|
2015-04-17 20:31:50 +02:00
|
|
|
"""
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('running triggers for action "%s"' % action)
|
2015-04-18 12:53:08 +02:00
|
|
|
stdin_data = stdin_data or {}
|
|
|
|
try:
|
|
|
|
stdin_data = json.dumps(stdin_data)
|
|
|
|
except:
|
|
|
|
stdin_data = '{}'
|
2015-04-17 00:17:36 +02:00
|
|
|
for script in glob.glob(os.path.join(self.data_dir, 'triggers', '%s.d' % action, '*')):
|
|
|
|
if not (os.path.isfile(script) and os.access(script, os.X_OK)):
|
|
|
|
continue
|
2015-04-18 14:27:02 +02:00
|
|
|
out, err = yield gen.Task(self.run_subprocess, [script], stdin_data, env)
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
def build_ws_url(self, path, proto='ws', host=None):
|
|
|
|
"""Return a WebSocket url from a path."""
|
2015-04-26 11:49:59 +02:00
|
|
|
return 'ws://127.0.0.1:%s/ws/%s' % (self.listen_port + 1, path)
|
2015-04-26 00:47:38 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-26 12:56:12 +02:00
|
|
|
def send_ws_message(self, path, message):
|
2015-04-26 00:47:38 +02:00
|
|
|
"""Send a WebSocket message to all the connected clients.
|
|
|
|
|
2015-04-26 12:56:12 +02:00
|
|
|
:param path: partial path used to build the WebSocket url
|
|
|
|
:type path: str
|
2015-04-26 00:47:38 +02:00
|
|
|
:param message: message to send
|
|
|
|
:type message: str
|
|
|
|
"""
|
2016-06-26 20:45:12 +02:00
|
|
|
try:
|
|
|
|
ws = yield tornado.websocket.websocket_connect(self.build_ws_url(path))
|
|
|
|
ws.write_message(message)
|
|
|
|
ws.close()
|
|
|
|
except Exception, e:
|
|
|
|
self.logger.error('Error yielding WebSocket message: %s', e)
|
2015-04-26 00:47:38 +02:00
|
|
|
|
2015-04-16 00:06:01 +02:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class PersonsHandler(CollectionHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handle requests for Persons."""
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2015-04-05 00:55:59 +02:00
|
|
|
def handle_get_events(self, id_, resource_id=None, **kwargs):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Get a list of events attended by this person.
|
|
|
|
# Inside the data of each event, a 'person_data' dictionary is
|
|
|
|
# created, duplicating the entry for the current person (so that
|
|
|
|
# there's no need to parse the 'persons' list on the client).
|
|
|
|
#
|
|
|
|
# If resource_id is given, only the specified event is considered.
|
|
|
|
#
|
|
|
|
# If the 'all' parameter is given, every event (also unattended ones) is returned.
|
2015-04-06 17:19:20 +02:00
|
|
|
args = self.request.arguments
|
|
|
|
query = {}
|
|
|
|
if id_ and not self.tobool(args.get('all')):
|
|
|
|
query = {'persons.person_id': id_}
|
2015-04-05 00:55:59 +02:00
|
|
|
if resource_id:
|
|
|
|
query['_id'] = resource_id
|
|
|
|
|
|
|
|
events = self.db.query('events', query)
|
2015-04-04 14:15:52 +02:00
|
|
|
for event in events:
|
|
|
|
person_data = {}
|
2015-04-04 14:56:41 +02:00
|
|
|
for persons in event.get('persons') or []:
|
|
|
|
if str(persons.get('person_id')) == id_:
|
|
|
|
person_data = persons
|
2015-04-04 14:15:52 +02:00
|
|
|
break
|
2016-05-15 11:52:59 +02:00
|
|
|
if 'persons' in event:
|
|
|
|
del event['persons']
|
2015-04-04 14:15:52 +02:00
|
|
|
event['person_data'] = person_data
|
2015-04-13 23:25:46 +02:00
|
|
|
if resource_id and events:
|
|
|
|
return events[0]
|
2015-04-04 14:15:52 +02:00
|
|
|
return {'events': events}
|
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class EventsHandler(CollectionHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handle requests for Events."""
|
2016-05-30 23:14:56 +02:00
|
|
|
document = 'event'
|
2015-03-21 20:32:39 +01:00
|
|
|
collection = 'events'
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2016-06-11 00:47:29 +02:00
|
|
|
def filter_get(self, output):
|
|
|
|
if not self.has_permission('persons-all|read'):
|
|
|
|
if 'persons' in output:
|
|
|
|
output['persons'] = []
|
|
|
|
return output
|
|
|
|
|
|
|
|
def filter_get_all(self, output):
|
|
|
|
if not self.has_permission('persons-all|read'):
|
|
|
|
for event in output.get('events') or []:
|
|
|
|
if 'persons' in event:
|
|
|
|
event['persons'] = []
|
|
|
|
return output
|
|
|
|
|
2016-07-03 14:13:27 +02:00
|
|
|
def filter_input_post(self, data):
|
|
|
|
# Auto-generate the group_id, if missing.
|
|
|
|
if 'group_id' not in data:
|
|
|
|
data['group_id'] = self.gen_id()
|
|
|
|
return data
|
|
|
|
|
|
|
|
filter_input_post_all = filter_input_post
|
|
|
|
filter_input_put = filter_input_post
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
def filter_input_post_tickets(self, data):
|
|
|
|
if not self.has_permission('event|update'):
|
|
|
|
if 'attended' in data:
|
|
|
|
del data['attended']
|
|
|
|
return data
|
|
|
|
|
|
|
|
filter_input_put_tickets = filter_input_post_tickets
|
|
|
|
|
2016-07-03 14:13:27 +02:00
|
|
|
def handle_get_group_persons(self, id_, resource_id=None):
|
|
|
|
persons = []
|
|
|
|
this_query = {'_id': id_}
|
|
|
|
this_event = self.db.query('events', this_query)[0]
|
|
|
|
group_id = this_event.get('group_id')
|
|
|
|
if group_id is None:
|
|
|
|
return {'persons': persons}
|
|
|
|
this_persons = [p for p in (this_event.get('persons') or []) if not p.get('cancelled')]
|
|
|
|
this_emails = filter(None, [p.get('email') for p in this_persons])
|
|
|
|
all_query = {'group_id': group_id}
|
|
|
|
events = self.db.query('events', all_query)
|
|
|
|
for event in events:
|
2016-07-05 21:30:12 +02:00
|
|
|
if id_ is not None and str(event.get('_id')) == id_:
|
2016-07-03 14:13:27 +02:00
|
|
|
continue
|
|
|
|
persons += [p for p in (event.get('persons') or []) if p.get('email') and p.get('email') not in this_emails]
|
|
|
|
return {'persons': persons}
|
|
|
|
|
2015-04-22 23:34:53 +02:00
|
|
|
def _get_person_data(self, person_id_or_query, persons):
|
|
|
|
"""Filter a list of persons returning the first item with a given person_id
|
|
|
|
or which set of keys specified in a dictionary match their respective values."""
|
2015-04-18 12:53:08 +02:00
|
|
|
for person in persons:
|
2015-04-22 23:34:53 +02:00
|
|
|
if isinstance(person_id_or_query, dict):
|
|
|
|
if all(person.get(k) == v for k, v in person_id_or_query.iteritems()):
|
|
|
|
return person
|
|
|
|
else:
|
2016-07-02 13:46:39 +02:00
|
|
|
if str(person.get('_id')) == person_id_or_query:
|
2015-04-22 23:34:53 +02:00
|
|
|
return person
|
2015-04-18 12:53:08 +02:00
|
|
|
return {}
|
|
|
|
|
2016-06-08 23:05:16 +02:00
|
|
|
def handle_get_persons(self, id_, resource_id=None, match_query=None):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Return every person registered at this event, or the information
|
|
|
|
# about a specific person.
|
2015-04-05 00:55:59 +02:00
|
|
|
query = {'_id': id_}
|
|
|
|
event = self.db.query('events', query)[0]
|
2016-06-08 23:05:16 +02:00
|
|
|
if match_query is None:
|
|
|
|
match_query = resource_id
|
2015-04-05 00:55:59 +02:00
|
|
|
if resource_id:
|
2016-06-08 23:05:16 +02:00
|
|
|
return {'person': self._get_person_data(match_query, event.get('persons') or [])}
|
2015-04-14 23:44:55 +02:00
|
|
|
persons = self._filter_results(event.get('persons') or [], self.arguments)
|
|
|
|
return {'persons': persons}
|
2015-04-05 00:55:59 +02:00
|
|
|
|
2016-06-07 23:21:08 +02:00
|
|
|
def handle_get_tickets(self, id_, resource_id=None):
|
|
|
|
if resource_id is None and not self.has_permission('event:tickets|all'):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: event:tickets|all')
|
2016-06-08 23:05:16 +02:00
|
|
|
return self.handle_get_persons(id_, resource_id, {'_id': resource_id})
|
2016-06-07 23:21:08 +02:00
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
def handle_post_persons(self, id_, person_id, data):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Add a person to the list of persons registered at this event.
|
2016-04-24 16:03:49 +02:00
|
|
|
uuid, arguments = self.uuid_arguments
|
2016-04-17 16:24:38 +02:00
|
|
|
self._clean_dict(data)
|
2015-05-01 16:27:22 +02:00
|
|
|
data['seq'] = self.get_next_seq('event_%s_persons' % id_)
|
|
|
|
data['seq_hex'] = '%06X' % data['seq']
|
2016-06-26 20:45:12 +02:00
|
|
|
if person_id is None:
|
|
|
|
doc = {}
|
|
|
|
else:
|
2016-07-02 13:46:39 +02:00
|
|
|
doc = self.db.query('events', {'_id': id_, 'persons._id': person_id})
|
|
|
|
ret = {'action': 'add', '_id': person_id, 'person': data, 'uuid': uuid}
|
2015-04-06 21:08:52 +02:00
|
|
|
if '_id' in data:
|
|
|
|
del data['_id']
|
2016-06-27 11:32:31 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
2015-04-06 21:08:52 +02:00
|
|
|
if not doc:
|
2016-06-06 21:44:04 +02:00
|
|
|
data['_id'] = self.gen_id()
|
2015-04-06 17:59:31 +02:00
|
|
|
merged, doc = self.db.update('events',
|
|
|
|
{'_id': id_},
|
|
|
|
{'persons': data},
|
2015-05-01 14:51:11 +02:00
|
|
|
operation='appendUnique',
|
2015-04-06 17:59:31 +02:00
|
|
|
create=False)
|
2016-04-10 18:45:30 +02:00
|
|
|
return ret
|
2015-04-06 17:19:20 +02:00
|
|
|
|
2016-06-07 23:21:08 +02:00
|
|
|
handle_post_tickets = handle_post_persons
|
|
|
|
|
2016-06-09 23:13:56 +02:00
|
|
|
def handle_put_persons(self, id_, person_id, data, ticket=False):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Update an existing entry for a person registered at this event.
|
2016-04-17 16:24:38 +02:00
|
|
|
self._clean_dict(data)
|
2016-04-24 16:03:49 +02:00
|
|
|
uuid, arguments = self.uuid_arguments
|
|
|
|
query = dict([('persons.%s' % k, v) for k, v in arguments.iteritems()])
|
2015-04-15 00:12:35 +02:00
|
|
|
query['_id'] = id_
|
2016-07-02 13:46:39 +02:00
|
|
|
if person_id is not None:
|
2016-06-09 23:13:56 +02:00
|
|
|
query['persons._id'] = person_id
|
2016-06-10 12:39:25 +02:00
|
|
|
person_query = {'_id': person_id}
|
|
|
|
else:
|
|
|
|
person_query = self.arguments
|
2015-04-18 12:53:08 +02:00
|
|
|
old_person_data = {}
|
|
|
|
current_event = self.db.query(self.collection, query)
|
|
|
|
if current_event:
|
|
|
|
current_event = current_event[0]
|
2015-04-22 23:34:53 +02:00
|
|
|
else:
|
|
|
|
current_event = {}
|
2016-06-10 12:39:25 +02:00
|
|
|
old_person_data = self._get_person_data(person_query,
|
2015-04-22 23:34:53 +02:00
|
|
|
current_event.get('persons') or [])
|
2015-04-15 00:12:35 +02:00
|
|
|
merged, doc = self.db.update('events', query,
|
2015-04-14 22:49:16 +02:00
|
|
|
data, updateList='persons', create=False)
|
2016-06-10 12:39:25 +02:00
|
|
|
new_person_data = self._get_person_data(person_query,
|
2015-04-22 23:34:53 +02:00
|
|
|
doc.get('persons') or [])
|
2015-04-18 14:27:02 +02:00
|
|
|
env = self._dict2env(new_person_data)
|
2016-06-10 12:39:25 +02:00
|
|
|
# always takes the person_id from the new person (it may have
|
2016-06-27 11:32:31 +02:00
|
|
|
# been a ticket_id).
|
2016-07-02 13:46:39 +02:00
|
|
|
ticket_id = str(new_person_data.get('_id'))
|
|
|
|
env.update({'PERSON_ID': ticket_id, 'TICKED_ID': ticket_id, 'EVENT_ID': id_,
|
2016-05-30 23:14:56 +02:00
|
|
|
'EVENT_TITLE': doc.get('title', ''), 'WEB_USER': self.current_user,
|
2015-10-11 00:29:59 +02:00
|
|
|
'WEB_REMOTE_IP': self.request.remote_ip})
|
2015-04-18 14:27:02 +02:00
|
|
|
stdin_data = {'old': old_person_data,
|
2015-04-18 12:53:08 +02:00
|
|
|
'new': new_person_data,
|
|
|
|
'event': doc,
|
|
|
|
'merged': merged
|
2015-04-18 14:27:02 +02:00
|
|
|
}
|
|
|
|
self.run_triggers('update_person_in_event', stdin_data=stdin_data, env=env)
|
2015-04-26 00:47:38 +02:00
|
|
|
if old_person_data and old_person_data.get('attended') != new_person_data.get('attended'):
|
|
|
|
if new_person_data.get('attended'):
|
|
|
|
self.run_triggers('attends', stdin_data=stdin_data, env=env)
|
2015-04-26 12:56:12 +02:00
|
|
|
|
2016-07-02 13:46:39 +02:00
|
|
|
ret = {'action': 'update', '_id': ticket_id, 'person': new_person_data, 'uuid': uuid}
|
2015-04-26 12:56:12 +02:00
|
|
|
if old_person_data != new_person_data:
|
2016-06-19 16:58:38 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
2016-04-10 17:21:46 +02:00
|
|
|
return ret
|
2015-04-05 00:55:59 +02:00
|
|
|
|
2016-06-09 23:13:56 +02:00
|
|
|
def handle_put_tickets(self, id_, person_id, data):
|
|
|
|
return self.handle_put_persons(id_, person_id, data, True)
|
2016-06-07 23:21:08 +02:00
|
|
|
|
2015-04-05 11:20:57 +02:00
|
|
|
def handle_delete_persons(self, id_, person_id):
|
2015-04-13 23:25:46 +02:00
|
|
|
# Remove a specific person from the list of persons registered at this event.
|
2016-04-24 16:03:49 +02:00
|
|
|
uuid, arguments = self.uuid_arguments
|
2016-04-10 19:39:12 +02:00
|
|
|
doc = self.db.query('events',
|
2016-07-02 13:46:39 +02:00
|
|
|
{'_id': id_, 'persons._id': person_id})
|
|
|
|
ret = {'action': 'delete', '_id': person_id, 'uuid': uuid}
|
2016-04-10 19:39:12 +02:00
|
|
|
if doc:
|
|
|
|
merged, doc = self.db.update('events',
|
|
|
|
{'_id': id_},
|
2016-07-02 13:46:39 +02:00
|
|
|
{'persons': {'_id': person_id}},
|
2016-04-10 19:39:12 +02:00
|
|
|
operation='delete',
|
|
|
|
create=False)
|
2016-06-27 11:32:31 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
2016-04-10 19:39:12 +02:00
|
|
|
return ret
|
2015-04-05 11:20:57 +02:00
|
|
|
|
2016-06-07 23:21:08 +02:00
|
|
|
handle_delete_tickets = handle_delete_persons
|
|
|
|
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2016-06-12 16:04:46 +02:00
|
|
|
class UsersHandler(CollectionHandler):
|
|
|
|
"""Handle requests for Users."""
|
|
|
|
document = 'user'
|
|
|
|
collection = 'users'
|
|
|
|
|
2016-07-08 22:10:42 +02:00
|
|
|
def filter_get_all(self, data):
|
|
|
|
if 'users' not in data:
|
|
|
|
return data
|
|
|
|
for user in data['users']:
|
|
|
|
if 'password' in user:
|
|
|
|
del user['password']
|
|
|
|
return data
|
|
|
|
|
2016-06-12 23:44:48 +02:00
|
|
|
def filter_input_post_all(self, data):
|
|
|
|
username = (data.get('username') or '').strip()
|
|
|
|
password = (data.get('password') or '').strip()
|
2016-06-13 21:17:01 +02:00
|
|
|
email = (data.get('email') or '').strip()
|
2016-06-12 23:44:48 +02:00
|
|
|
if not (username and password):
|
|
|
|
raise InputException('missing username or password')
|
|
|
|
res = self.db.query('users', {'username': username})
|
|
|
|
if res:
|
|
|
|
raise InputException('username already exists')
|
2016-06-13 21:17:01 +02:00
|
|
|
return {'username': username, 'password': utils.hash_password(password),
|
|
|
|
'email': email, '_id': self.gen_id()}
|
2016-06-12 23:44:48 +02:00
|
|
|
|
2016-06-12 16:04:46 +02:00
|
|
|
|
2015-03-30 21:39:12 +02:00
|
|
|
class EbCSVImportPersonsHandler(BaseHandler):
|
|
|
|
"""Importer for CSV files exported from eventbrite."""
|
2015-03-29 15:05:01 +02:00
|
|
|
csvRemap = {
|
|
|
|
'Nome evento': 'event_title',
|
|
|
|
'ID evento': 'event_id',
|
|
|
|
'N. codice a barre': 'ebqrcode',
|
|
|
|
'Cognome acquirente': 'surname',
|
|
|
|
'Nome acquirente': 'name',
|
|
|
|
'E-mail acquirente': 'email',
|
2015-04-04 13:01:33 +02:00
|
|
|
'Cognome': 'surname',
|
|
|
|
'Nome': 'name',
|
|
|
|
'E-mail': 'email',
|
2015-04-04 14:56:41 +02:00
|
|
|
'Indirizzo e-mail': 'email',
|
2015-03-29 15:05:01 +02:00
|
|
|
'Tipologia biglietto': 'ticket_kind',
|
|
|
|
'Data partecipazione': 'attending_datetime',
|
|
|
|
'Data check-in': 'checkin_datetime',
|
|
|
|
'Ordine n.': 'order_nr',
|
2015-04-04 13:01:33 +02:00
|
|
|
'ID ordine': 'order_nr',
|
2015-04-25 10:04:47 +02:00
|
|
|
'Titolo professionale': 'job_title',
|
2015-04-18 18:26:50 +02:00
|
|
|
'Azienda': 'company',
|
|
|
|
'Prefisso': 'name_title',
|
2015-04-04 13:01:33 +02:00
|
|
|
'Prefisso (Sig., Sig.ra, ecc.)': 'name_title',
|
2015-04-25 10:04:47 +02:00
|
|
|
|
|
|
|
'Order #': 'order_nr',
|
|
|
|
'Prefix': 'name_title',
|
|
|
|
'First Name': 'name',
|
|
|
|
'Last Name': 'surname',
|
|
|
|
'Suffix': 'name_suffix',
|
|
|
|
'Email': 'email',
|
|
|
|
'Attendee #': 'attendee_nr',
|
|
|
|
'Barcode #': 'ebqrcode',
|
|
|
|
'Company': 'company',
|
2015-03-29 15:05:01 +02:00
|
|
|
}
|
2015-04-18 18:26:50 +02:00
|
|
|
# Only these information are stored in the person collection.
|
2015-04-25 12:37:59 +02:00
|
|
|
keepPersonData = ('name', 'surname', 'email', 'name_title', 'name_suffix',
|
|
|
|
'company', 'job_title')
|
2015-04-04 13:01:33 +02:00
|
|
|
|
2015-03-29 15:05:01 +02:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-03-29 15:05:01 +02:00
|
|
|
def post(self, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
# import a CSV list of persons
|
2015-05-01 14:51:11 +02:00
|
|
|
event_handler = EventsHandler(self.application, self.request)
|
|
|
|
event_handler.db = self.db
|
2016-07-07 23:10:52 +02:00
|
|
|
event_id = None
|
2015-03-29 15:50:36 +02:00
|
|
|
try:
|
2016-07-07 23:10:52 +02:00
|
|
|
event_id = self.get_body_argument('targetEvent')
|
2015-03-29 15:50:36 +02:00
|
|
|
except:
|
|
|
|
pass
|
2015-04-04 13:01:33 +02:00
|
|
|
reply = dict(total=0, valid=0, merged=0, new_in_event=0)
|
2015-03-29 15:05:01 +02:00
|
|
|
for fieldname, contents in self.request.files.iteritems():
|
|
|
|
for content in contents:
|
|
|
|
filename = content['filename']
|
2015-03-30 21:31:09 +02:00
|
|
|
parseStats, persons = utils.csvParse(content['body'], remap=self.csvRemap)
|
2015-03-29 15:05:01 +02:00
|
|
|
reply['total'] += parseStats['total']
|
|
|
|
reply['valid'] += parseStats['valid']
|
2015-03-29 23:47:59 +02:00
|
|
|
for person in persons:
|
2016-07-07 23:10:52 +02:00
|
|
|
person['attended'] = False
|
|
|
|
person['from_file'] = filename
|
|
|
|
event_handler.handle_post_persons(event_id, None, person)
|
|
|
|
reply['new_in_event'] += 1
|
2015-03-29 15:05:01 +02:00
|
|
|
self.write(reply)
|
|
|
|
|
|
|
|
|
2015-04-18 17:33:42 +02:00
|
|
|
class SettingsHandler(BaseHandler):
|
|
|
|
"""Handle requests for Settings."""
|
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-18 17:33:42 +02:00
|
|
|
def get(self, **kwds):
|
2015-05-03 11:58:40 +02:00
|
|
|
query = self.arguments_tobool()
|
2015-04-18 17:33:42 +02:00
|
|
|
settings = self.db.query('settings', query)
|
|
|
|
self.write({'settings': settings})
|
|
|
|
|
|
|
|
|
2016-05-01 23:23:07 +02:00
|
|
|
class InfoHandler(BaseHandler):
|
2016-07-05 21:30:12 +02:00
|
|
|
"""Handle requests for information about the logged in user."""
|
2016-05-01 23:23:07 +02:00
|
|
|
@gen.coroutine
|
|
|
|
@authenticated
|
|
|
|
def get(self, **kwds):
|
|
|
|
info = {}
|
2016-05-30 23:14:56 +02:00
|
|
|
user_info = self.current_user_info
|
2016-05-29 14:06:34 +02:00
|
|
|
if user_info:
|
2016-05-29 11:32:57 +02:00
|
|
|
info['user'] = user_info
|
2016-05-01 23:23:07 +02:00
|
|
|
self.write({'info': info})
|
|
|
|
|
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
class WebSocketEventUpdatesHandler(tornado.websocket.WebSocketHandler):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Manage websockets."""
|
2015-04-26 01:26:00 +02:00
|
|
|
def _clean_url(self, url):
|
|
|
|
return re_slashes.sub('/', url)
|
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
def open(self, event_id, *args, **kwds):
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_open event_id:%s' % event_id)
|
2015-04-26 01:26:00 +02:00
|
|
|
_ws_clients.setdefault(self._clean_url(self.request.uri), set()).add(self)
|
2015-04-26 00:47:38 +02:00
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_open %s clients connected' % len(_ws_clients))
|
|
|
|
|
|
|
|
def on_message(self, message):
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_message')
|
|
|
|
count = 0
|
2015-04-26 01:26:00 +02:00
|
|
|
for client in _ws_clients.get(self._clean_url(self.request.uri), []):
|
2015-04-26 00:47:38 +02:00
|
|
|
if client == self:
|
|
|
|
continue
|
|
|
|
client.write_message(message)
|
|
|
|
count += 1
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_message sent message to %d clients' % count)
|
|
|
|
|
|
|
|
def on_close(self):
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_close')
|
|
|
|
try:
|
2015-04-26 01:26:00 +02:00
|
|
|
if self in _ws_clients.get(self._clean_url(self.request.uri), []):
|
|
|
|
_ws_clients[self._clean_url(self.request.uri)].remove(self)
|
2016-06-02 16:06:12 +02:00
|
|
|
except Exception as e:
|
2015-04-26 00:47:38 +02:00
|
|
|
logging.warn('WebSocketEventUpdatesHandler.on_close error closing websocket: %s', str(e))
|
|
|
|
|
|
|
|
|
2016-06-12 16:04:46 +02:00
|
|
|
class LoginHandler(BaseHandler):
|
2015-05-02 17:39:59 +02:00
|
|
|
"""Handle user authentication requests."""
|
2015-05-02 19:26:23 +02:00
|
|
|
re_split_salt = re.compile(r'\$(?P<salt>.+)\$(?P<hash>.+)')
|
|
|
|
|
2015-05-02 17:39:59 +02:00
|
|
|
@gen.coroutine
|
|
|
|
def get(self, **kwds):
|
2015-05-03 11:58:40 +02:00
|
|
|
# show the login page
|
2015-05-03 01:58:09 +02:00
|
|
|
if self.is_api():
|
|
|
|
self.set_status(401)
|
2016-05-29 11:32:57 +02:00
|
|
|
self.write({'error': True,
|
|
|
|
'message': 'authentication required'})
|
2015-05-03 01:58:09 +02:00
|
|
|
else:
|
|
|
|
with open(self.angular_app_path + "/login.html", 'r') as fd:
|
|
|
|
self.write(fd.read())
|
2015-05-02 17:39:59 +02:00
|
|
|
|
2016-06-13 21:17:01 +02:00
|
|
|
def _authorize(self, username, password, email=None):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Return True is this username/password is valid."""
|
2016-06-13 21:17:01 +02:00
|
|
|
query = [{'username': username}]
|
|
|
|
if email is not None:
|
|
|
|
query.append({'email': email})
|
|
|
|
res = self.db.query('users', query)
|
2015-05-02 19:26:23 +02:00
|
|
|
if not res:
|
|
|
|
return False
|
|
|
|
user = res[0]
|
|
|
|
db_password = user.get('password') or ''
|
|
|
|
if not db_password:
|
|
|
|
return False
|
|
|
|
match = self.re_split_salt.match(db_password)
|
|
|
|
if not match:
|
|
|
|
return False
|
|
|
|
salt = match.group('salt')
|
|
|
|
if utils.hash_password(password, salt=salt) == db_password:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-05-02 17:39:59 +02:00
|
|
|
@gen.coroutine
|
2016-06-12 16:04:46 +02:00
|
|
|
def post(self, *args, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
# authenticate a user
|
2016-06-12 16:04:46 +02:00
|
|
|
try:
|
|
|
|
password = self.get_body_argument('password')
|
|
|
|
username = self.get_body_argument('username')
|
|
|
|
except tornado.web.MissingArgumentError:
|
|
|
|
data = escape.json_decode(self.request.body or '{}')
|
|
|
|
username = data.get('username')
|
|
|
|
password = data.get('password')
|
|
|
|
if not (username and password):
|
|
|
|
self.set_status(401)
|
|
|
|
self.write({'error': True, 'message': 'missing username or password'})
|
|
|
|
return
|
2015-05-02 19:26:23 +02:00
|
|
|
if self._authorize(username, password):
|
2015-05-03 01:25:33 +02:00
|
|
|
logging.info('successful login for user %s' % username)
|
2015-05-02 18:38:57 +02:00
|
|
|
self.set_secure_cookie("user", username)
|
2016-06-12 16:04:46 +02:00
|
|
|
self.write({'error': False, 'message': 'successful login'})
|
2015-05-03 01:25:33 +02:00
|
|
|
return
|
|
|
|
logging.info('login failed for user %s' % username)
|
2016-06-12 16:04:46 +02:00
|
|
|
self.set_status(401)
|
|
|
|
self.write({'error': True, 'message': 'wrong username and password'})
|
2015-05-02 17:39:59 +02:00
|
|
|
|
|
|
|
|
2016-06-12 16:04:46 +02:00
|
|
|
class LogoutHandler(BaseHandler):
|
2015-05-02 18:38:57 +02:00
|
|
|
"""Handle user logout requests."""
|
|
|
|
@gen.coroutine
|
|
|
|
def get(self, **kwds):
|
2015-05-03 11:58:40 +02:00
|
|
|
# log the user out
|
2015-05-03 01:25:33 +02:00
|
|
|
logging.info('logout')
|
2015-05-02 18:38:57 +02:00
|
|
|
self.logout()
|
2016-06-12 16:04:46 +02:00
|
|
|
self.write({'error': False, 'message': 'logged out'})
|
2015-05-02 18:38:57 +02:00
|
|
|
|
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
def run():
|
|
|
|
"""Run the Tornado web application."""
|
|
|
|
# command line arguments; can also be written in a configuration file,
|
|
|
|
# specified with the --config argument.
|
2015-03-14 11:12:57 +01:00
|
|
|
define("port", default=5242, help="run on the given port", type=int)
|
2016-04-17 19:46:59 +02:00
|
|
|
define("address", default='', help="bind the server at the given address", type=str)
|
2015-05-03 11:58:40 +02:00
|
|
|
define("data_dir", default=os.path.join(os.path.dirname(__file__), "data"),
|
2015-03-14 17:32:45 +01:00
|
|
|
help="specify the directory used to store the data")
|
2015-04-26 10:27:33 +02:00
|
|
|
define("ssl_cert", default=os.path.join(os.path.dirname(__file__), 'ssl', 'eventman_cert.pem'),
|
|
|
|
help="specify the SSL certificate to use for secure connections")
|
|
|
|
define("ssl_key", default=os.path.join(os.path.dirname(__file__), 'ssl', 'eventman_key.pem'),
|
|
|
|
help="specify the SSL private key to use for secure connections")
|
2015-05-03 11:58:40 +02:00
|
|
|
define("mongo_url", default=None,
|
2015-03-21 09:29:01 +01:00
|
|
|
help="URL to MongoDB server", type=str)
|
2015-05-03 11:58:40 +02:00
|
|
|
define("db_name", default='eventman',
|
2015-03-21 20:32:39 +01:00
|
|
|
help="Name of the MongoDB database to use", type=str)
|
2016-05-29 11:32:57 +02:00
|
|
|
define("authentication", default=False, help="if set to true, authentication is required")
|
2015-03-14 17:32:45 +01:00
|
|
|
define("debug", default=False, help="run in debug mode")
|
2015-03-14 11:12:57 +01:00
|
|
|
define("config", help="read configuration file",
|
|
|
|
callback=lambda path: tornado.options.parse_config_file(path, final=False))
|
|
|
|
tornado.options.parse_command_line()
|
|
|
|
|
2016-04-19 22:04:52 +02:00
|
|
|
logger = logging.getLogger()
|
2016-04-19 23:08:23 +02:00
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
if options.debug:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2015-04-18 15:01:30 +02:00
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
# database backend connector
|
2015-05-03 11:58:40 +02:00
|
|
|
db_connector = backend.EventManDB(url=options.mongo_url, dbName=options.db_name)
|
|
|
|
init_params = dict(db=db_connector, data_dir=options.data_dir, listen_port=options.port,
|
2016-04-24 16:03:49 +02:00
|
|
|
authentication=options.authentication, logger=logger)
|
2015-03-21 09:29:01 +01:00
|
|
|
|
2015-05-02 19:26:23 +02:00
|
|
|
# If not present, we store a user 'admin' with password 'eventman' into the database.
|
|
|
|
if not db_connector.query('users', {'username': 'admin'}):
|
|
|
|
db_connector.add('users',
|
2016-05-29 11:32:57 +02:00
|
|
|
{'username': 'admin', 'password': utils.hash_password('eventman'),
|
2016-05-31 22:26:38 +02:00
|
|
|
'permissions': ['admin|all']})
|
2015-05-02 19:26:23 +02:00
|
|
|
|
2015-05-03 13:10:11 +02:00
|
|
|
# If present, use the cookie_secret stored into the database.
|
|
|
|
cookie_secret = db_connector.query('settings', {'setting': 'server_cookie_secret'})
|
|
|
|
if cookie_secret:
|
|
|
|
cookie_secret = cookie_secret[0]['cookie_secret']
|
|
|
|
else:
|
|
|
|
# the salt guarantees its uniqueness
|
|
|
|
cookie_secret = utils.hash_password('__COOKIE_SECRET__')
|
|
|
|
db_connector.add('settings',
|
|
|
|
{'setting': 'server_cookie_secret', 'cookie_secret': cookie_secret})
|
|
|
|
|
2016-06-19 16:58:38 +02:00
|
|
|
_ws_handler = (r"/ws/+event/+(?P<event_id>[\w\d_-]+)/+tickets/+updates/?", WebSocketEventUpdatesHandler)
|
2016-06-06 21:44:04 +02:00
|
|
|
_persons_path = r"/persons/?(?P<id_>[\w\d_-]+)?/?(?P<resource>[\w\d_-]+)?/?(?P<resource_id>[\w\d_-]+)?"
|
|
|
|
_events_path = r"/events/?(?P<id_>[\w\d_-]+)?/?(?P<resource>[\w\d_-]+)?/?(?P<resource_id>[\w\d_-]+)?"
|
2016-06-12 16:04:46 +02:00
|
|
|
_users_path = r"/users/?(?P<id_>[\w\d_-]+)?/?(?P<resource>[\w\d_-]+)?/?(?P<resource_id>[\w\d_-]+)?"
|
2015-03-14 11:12:57 +01:00
|
|
|
application = tornado.web.Application([
|
2015-05-03 01:43:30 +02:00
|
|
|
(_persons_path, PersonsHandler, init_params),
|
|
|
|
(r'/v%s%s' % (API_VERSION, _persons_path), PersonsHandler, init_params),
|
|
|
|
(_events_path, EventsHandler, init_params),
|
|
|
|
(r'/v%s%s' % (API_VERSION, _events_path), EventsHandler, init_params),
|
2016-06-12 16:04:46 +02:00
|
|
|
(_users_path, UsersHandler, init_params),
|
|
|
|
(r'/v%s%s' % (API_VERSION, _users_path), UsersHandler, init_params),
|
2015-03-21 09:29:01 +01:00
|
|
|
(r"/(?:index.html)?", RootHandler, init_params),
|
2015-03-29 15:05:01 +02:00
|
|
|
(r"/ebcsvpersons", EbCSVImportPersonsHandler, init_params),
|
2015-04-18 17:33:42 +02:00
|
|
|
(r"/settings", SettingsHandler, init_params),
|
2016-05-01 23:23:07 +02:00
|
|
|
(r"/info", InfoHandler, init_params),
|
2015-04-26 11:49:59 +02:00
|
|
|
_ws_handler,
|
2015-05-02 19:26:23 +02:00
|
|
|
(r'/login', LoginHandler, init_params),
|
2015-05-03 01:43:30 +02:00
|
|
|
(r'/v%s/login' % API_VERSION, LoginHandler, init_params),
|
2015-05-02 18:38:57 +02:00
|
|
|
(r'/logout', LogoutHandler),
|
2015-05-03 01:43:30 +02:00
|
|
|
(r'/v%s/logout' % API_VERSION, LogoutHandler),
|
2015-03-14 17:32:45 +01:00
|
|
|
(r'/(.*)', tornado.web.StaticFileHandler, {"path": "angular_app"})
|
2015-03-14 11:12:57 +01:00
|
|
|
],
|
2015-03-14 13:05:04 +01:00
|
|
|
template_path=os.path.join(os.path.dirname(__file__), "templates"),
|
2015-03-14 11:12:57 +01:00
|
|
|
static_path=os.path.join(os.path.dirname(__file__), "static"),
|
2015-05-02 17:39:59 +02:00
|
|
|
cookie_secret='__COOKIE_SECRET__',
|
|
|
|
login_url='/login',
|
2015-03-14 13:05:04 +01:00
|
|
|
debug=options.debug)
|
2015-04-26 10:27:33 +02:00
|
|
|
ssl_options = {}
|
|
|
|
if os.path.isfile(options.ssl_key) and os.path.isfile(options.ssl_cert):
|
|
|
|
ssl_options = dict(certfile=options.ssl_cert, keyfile=options.ssl_key)
|
2015-04-26 10:40:38 +02:00
|
|
|
http_server = tornado.httpserver.HTTPServer(application, ssl_options=ssl_options or None)
|
2016-04-17 19:46:59 +02:00
|
|
|
logger.info('Start serving on %s://%s:%d', 'https' if ssl_options else 'http',
|
|
|
|
options.address if options.address else '127.0.0.1',
|
|
|
|
options.port)
|
|
|
|
http_server.listen(options.port, options.address)
|
2015-04-26 11:49:59 +02:00
|
|
|
|
|
|
|
# Also listen on options.port+1 for our local ws connection.
|
2016-06-02 16:06:12 +02:00
|
|
|
ws_application = tornado.web.Application([_ws_handler], debug=options.debug)
|
2015-04-26 11:49:59 +02:00
|
|
|
ws_http_server = tornado.httpserver.HTTPServer(ws_application)
|
|
|
|
ws_http_server.listen(options.port+1, address='127.0.0.1')
|
2016-04-17 19:46:59 +02:00
|
|
|
logger.debug('Starting WebSocket on ws://127.0.0.1:%d', options.port+1)
|
2015-03-14 11:12:57 +01:00
|
|
|
tornado.ioloop.IOLoop.instance().start()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2015-03-22 08:58:25 +01:00
|
|
|
run()
|