2017-03-27 21:57:40 +02:00
|
|
|
#!/usr/bin/env python3
|
2017-04-22 13:10:56 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2016-07-10 14:16:17 +02:00
|
|
|
"""EventMan(ager)
|
2015-03-14 11:12:57 +01:00
|
|
|
|
2015-03-22 11:08:23 +01:00
|
|
|
Your friendly manager of attendees at an event.
|
2015-03-22 09:36:32 +01:00
|
|
|
|
2017-03-27 21:57:40 +02:00
|
|
|
Copyright 2015-2017 Davide Alberani <da@erlug.linux.it>
|
2016-04-10 18:45:30 +02:00
|
|
|
RaspiBO <info@raspibo.org>
|
2015-03-22 09:36:32 +01:00
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
2015-03-14 11:12:57 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
2015-04-25 15:46:46 +02:00
|
|
|
import re
|
2015-04-17 00:17:36 +02:00
|
|
|
import glob
|
2015-04-18 12:53:08 +02:00
|
|
|
import json
|
2016-06-06 21:44:04 +02:00
|
|
|
import time
|
|
|
|
import string
|
|
|
|
import random
|
2015-04-18 15:01:30 +02:00
|
|
|
import logging
|
2015-04-17 20:31:50 +02:00
|
|
|
import datetime
|
2016-08-01 14:40:29 +02:00
|
|
|
import dateutil.tz
|
|
|
|
import dateutil.parser
|
2015-03-14 11:12:57 +01:00
|
|
|
|
|
|
|
import tornado.httpserver
|
|
|
|
import tornado.ioloop
|
|
|
|
import tornado.options
|
|
|
|
from tornado.options import define, options
|
|
|
|
import tornado.web
|
2015-04-26 00:47:38 +02:00
|
|
|
import tornado.websocket
|
2015-04-18 12:53:08 +02:00
|
|
|
from tornado import gen, escape, process
|
2015-03-14 11:12:57 +01:00
|
|
|
|
2015-03-30 21:39:12 +02:00
|
|
|
import utils
|
2017-03-27 21:57:40 +02:00
|
|
|
import monco
|
|
|
|
import collections
|
2015-03-21 09:29:01 +01:00
|
|
|
|
2015-04-25 15:46:46 +02:00
|
|
|
ENCODING = 'utf-8'
|
2015-04-17 20:31:50 +02:00
|
|
|
PROCESS_TIMEOUT = 60
|
|
|
|
|
2015-05-03 01:43:30 +02:00
|
|
|
API_VERSION = '1.0'
|
|
|
|
|
2017-04-22 13:10:56 +02:00
|
|
|
re_env_key = re.compile('[^a-zA-Z_]+')
|
2015-04-26 01:26:00 +02:00
|
|
|
re_slashes = re.compile(r'//+')
|
2015-04-25 15:46:46 +02:00
|
|
|
|
2016-07-09 17:41:16 +02:00
|
|
|
# Keep track of WebSocket connections.
|
|
|
|
_ws_clients = {}
|
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
|
2015-05-03 11:58:40 +02:00
|
|
|
def authenticated(method):
|
2016-07-09 17:41:16 +02:00
|
|
|
"""Decorator to handle forced authentication."""
|
2015-05-03 11:58:40 +02:00
|
|
|
original_wrapper = tornado.web.authenticated(method)
|
|
|
|
@tornado.web.functools.wraps(method)
|
|
|
|
def my_wrapper(self, *args, **kwargs):
|
|
|
|
# If no authentication was required from the command line or config file.
|
|
|
|
if not self.authentication:
|
|
|
|
return method(self, *args, **kwargs)
|
2016-05-29 11:32:57 +02:00
|
|
|
# unauthenticated API calls gets redirected to /v1.0/[...]
|
2016-06-12 16:04:46 +02:00
|
|
|
if self.is_api() and not self.current_user:
|
2015-05-03 11:58:40 +02:00
|
|
|
self.redirect('/v%s%s' % (API_VERSION, self.get_login_url()))
|
|
|
|
return
|
|
|
|
return original_wrapper(self, *args, **kwargs)
|
|
|
|
return my_wrapper
|
|
|
|
|
|
|
|
|
2016-06-12 23:44:48 +02:00
|
|
|
class BaseException(Exception):
|
|
|
|
"""Base class for EventMan custom exceptions.
|
|
|
|
|
|
|
|
:param message: text message
|
|
|
|
:type message: str
|
|
|
|
:param status: numeric http status code
|
|
|
|
:type status: int"""
|
|
|
|
def __init__(self, message, status=400):
|
|
|
|
super(BaseException, self).__init__(message)
|
|
|
|
self.message = message
|
|
|
|
self.status = status
|
|
|
|
|
|
|
|
|
|
|
|
class InputException(BaseException):
|
|
|
|
"""Exception raised by errors in input handling."""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
class BaseHandler(tornado.web.RequestHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Base class for request handlers."""
|
2016-06-06 21:44:04 +02:00
|
|
|
permissions = {
|
|
|
|
'event|read': True,
|
2016-07-09 17:41:16 +02:00
|
|
|
'event:tickets|read': True,
|
|
|
|
'event:tickets|create': True,
|
|
|
|
'event:tickets|update': True,
|
2016-06-11 16:14:28 +02:00
|
|
|
'event:tickets-all|create': True,
|
2016-06-11 17:56:23 +02:00
|
|
|
'events|read': True,
|
2016-06-12 23:44:48 +02:00
|
|
|
'users|create': True
|
2016-06-06 21:44:04 +02:00
|
|
|
}
|
2016-05-31 22:26:38 +02:00
|
|
|
|
2016-06-27 18:19:37 +02:00
|
|
|
# Cache currently connected users.
|
2016-06-13 21:17:01 +02:00
|
|
|
_users_cache = {}
|
|
|
|
|
2015-04-18 17:33:42 +02:00
|
|
|
# A property to access the first value of each argument.
|
2017-03-27 21:57:40 +02:00
|
|
|
arguments = property(lambda self: dict([(k, v[0].decode('utf-8'))
|
|
|
|
for k, v in self.request.arguments.items()]))
|
2015-04-18 17:33:42 +02:00
|
|
|
|
2016-04-24 16:03:49 +02:00
|
|
|
# A property to access both the UUID and the clean arguments.
|
|
|
|
@property
|
|
|
|
def uuid_arguments(self):
|
|
|
|
uuid = None
|
|
|
|
arguments = self.arguments
|
|
|
|
if 'uuid' in arguments:
|
|
|
|
uuid = arguments['uuid']
|
|
|
|
del arguments['uuid']
|
|
|
|
return uuid, arguments
|
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
_bool_convert = {
|
|
|
|
'0': False,
|
|
|
|
'n': False,
|
2015-04-14 20:30:05 +02:00
|
|
|
'f': False,
|
2015-04-06 17:19:20 +02:00
|
|
|
'no': False,
|
|
|
|
'off': False,
|
2015-04-18 17:33:42 +02:00
|
|
|
'false': False,
|
|
|
|
'1': True,
|
|
|
|
'y': True,
|
|
|
|
't': True,
|
|
|
|
'on': True,
|
|
|
|
'yes': True,
|
|
|
|
'true': True
|
2015-04-06 17:19:20 +02:00
|
|
|
}
|
|
|
|
|
2016-07-09 13:34:36 +02:00
|
|
|
_re_split_salt = re.compile(r'\$(?P<salt>.+)\$(?P<hash>.+)')
|
|
|
|
|
2016-06-12 23:44:48 +02:00
|
|
|
def write_error(self, status_code, **kwargs):
|
|
|
|
"""Default error handler."""
|
|
|
|
if isinstance(kwargs.get('exc_info', (None, None))[1], BaseException):
|
|
|
|
exc = kwargs['exc_info'][1]
|
|
|
|
status_code = exc.status
|
|
|
|
message = exc.message
|
|
|
|
else:
|
|
|
|
message = 'internal error'
|
|
|
|
self.build_error(message, status=status_code)
|
|
|
|
|
2015-05-03 01:43:30 +02:00
|
|
|
def is_api(self):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Return True if the path is from an API call."""
|
2015-05-03 01:43:30 +02:00
|
|
|
return self.request.path.startswith('/v%s' % API_VERSION)
|
|
|
|
|
2015-04-06 17:19:20 +02:00
|
|
|
def tobool(self, obj):
|
2015-05-03 11:58:40 +02:00
|
|
|
"""Convert some textual values to boolean."""
|
2015-04-06 17:19:20 +02:00
|
|
|
if isinstance(obj, (list, tuple)):
|
|
|
|
obj = obj[0]
|
2017-03-27 21:57:40 +02:00
|
|
|
if isinstance(obj, str):
|
2015-04-06 17:19:20 +02:00
|
|
|
obj = obj.lower()
|
2015-04-18 17:33:42 +02:00
|
|
|
return self._bool_convert.get(obj, obj)
|
|
|
|
|
2015-05-03 11:58:40 +02:00
|
|
|
def arguments_tobool(self):
|
|
|
|
"""Return a dictionary of arguments, converted to booleans where possible."""
|
2017-03-27 21:57:40 +02:00
|
|
|
return dict([(k, self.tobool(v)) for k, v in self.arguments.items()])
|
2015-04-06 17:19:20 +02:00
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
def initialize(self, **kwargs):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Add every passed (key, value) as attributes of the instance."""
|
2017-03-27 21:57:40 +02:00
|
|
|
for key, value in kwargs.items():
|
2015-03-20 23:08:21 +01:00
|
|
|
setattr(self, key, value)
|
|
|
|
|
2016-05-30 23:14:56 +02:00
|
|
|
@property
|
|
|
|
def current_user(self):
|
2016-06-11 17:56:23 +02:00
|
|
|
"""Retrieve current user name from the secure cookie."""
|
2017-03-27 21:57:40 +02:00
|
|
|
current_user = self.get_secure_cookie("user")
|
|
|
|
if isinstance(current_user, bytes):
|
|
|
|
current_user = current_user.decode('utf-8')
|
|
|
|
return current_user
|
2015-05-02 18:38:57 +02:00
|
|
|
|
2016-05-30 23:14:56 +02:00
|
|
|
@property
|
|
|
|
def current_user_info(self):
|
2016-06-11 16:14:28 +02:00
|
|
|
"""Information about the current user, including their permissions."""
|
2016-05-30 23:14:56 +02:00
|
|
|
current_user = self.current_user
|
2016-06-13 21:17:01 +02:00
|
|
|
if current_user in self._users_cache:
|
|
|
|
return self._users_cache[current_user]
|
2017-03-27 21:57:40 +02:00
|
|
|
permissions = set([k for (k, v) in self.permissions.items() if v is True])
|
2016-06-13 22:44:57 +02:00
|
|
|
user_info = {'permissions': permissions}
|
2016-05-29 14:06:34 +02:00
|
|
|
if current_user:
|
2017-03-27 22:17:34 +02:00
|
|
|
user_info['_id'] = current_user
|
|
|
|
user = self.db.getOne('users', {'_id': current_user})
|
|
|
|
if user:
|
2016-06-13 22:44:57 +02:00
|
|
|
user_info = user
|
|
|
|
permissions.update(set(user.get('permissions') or []))
|
|
|
|
user_info['permissions'] = permissions
|
2017-03-27 22:17:34 +02:00
|
|
|
user_info['isRegistered'] = True
|
2016-06-13 21:17:01 +02:00
|
|
|
self._users_cache[current_user] = user_info
|
2016-06-06 21:44:04 +02:00
|
|
|
return user_info
|
2016-05-29 14:06:34 +02:00
|
|
|
|
2017-04-03 22:14:06 +02:00
|
|
|
def add_access_info(self, doc):
|
|
|
|
"""Add created/updated by/at to a document (modified in place and returned).
|
|
|
|
|
|
|
|
:param doc: the doc to be updated
|
|
|
|
:type doc: dict
|
|
|
|
:returns: the updated document
|
|
|
|
:rtype: dict"""
|
|
|
|
user_id = self.current_user
|
|
|
|
now = datetime.datetime.utcnow()
|
|
|
|
if 'created_by' not in doc:
|
|
|
|
doc['created_by'] = user_id
|
|
|
|
if 'created_at' not in doc:
|
|
|
|
doc['created_at'] = now
|
|
|
|
doc['updated_by'] = user_id
|
|
|
|
doc['updated_at'] = now
|
|
|
|
return doc
|
|
|
|
|
2016-05-29 14:06:34 +02:00
|
|
|
def has_permission(self, permission):
|
|
|
|
"""Check permissions of the current user.
|
|
|
|
|
|
|
|
:param permission: the permission to check
|
|
|
|
:type permission: str
|
|
|
|
|
|
|
|
:returns: True if the user is allowed to perform the action or False
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
2016-06-02 16:06:12 +02:00
|
|
|
user_info = self.current_user_info or {}
|
2016-05-31 22:26:38 +02:00
|
|
|
user_permissions = user_info.get('permissions') or []
|
|
|
|
global_permission = '%s|all' % permission.split('|')[0]
|
|
|
|
if 'admin|all' in user_permissions or global_permission in user_permissions or permission in user_permissions:
|
|
|
|
return True
|
|
|
|
collection_permission = self.permissions.get(permission)
|
|
|
|
if isinstance(collection_permission, bool):
|
|
|
|
return collection_permission
|
2017-03-27 21:57:40 +02:00
|
|
|
if isinstance(collection_permission, collections.Callable):
|
2016-05-31 22:26:38 +02:00
|
|
|
return collection_permission(permission)
|
|
|
|
return False
|
2016-05-29 14:06:34 +02:00
|
|
|
|
2016-07-09 13:34:36 +02:00
|
|
|
def user_authorized(self, username, password):
|
|
|
|
"""Check if a combination of username/password is valid.
|
|
|
|
|
|
|
|
:param username: username or email
|
|
|
|
:type username: str
|
|
|
|
:param password: password
|
|
|
|
:type password: str
|
|
|
|
|
|
|
|
:returns: tuple like (bool_user_is_authorized, dict_user_info)
|
|
|
|
:rtype: dict"""
|
|
|
|
query = [{'username': username}, {'email': username}]
|
|
|
|
res = self.db.query('users', query)
|
|
|
|
if not res:
|
|
|
|
return (False, {})
|
|
|
|
user = res[0]
|
|
|
|
db_password = user.get('password') or ''
|
|
|
|
if not db_password:
|
|
|
|
return (False, {})
|
|
|
|
match = self._re_split_salt.match(db_password)
|
|
|
|
if not match:
|
|
|
|
return (False, {})
|
|
|
|
salt = match.group('salt')
|
|
|
|
if utils.hash_password(password, salt=salt) == db_password:
|
|
|
|
return (True, user)
|
|
|
|
return (False, {})
|
|
|
|
|
2016-05-30 23:14:56 +02:00
|
|
|
def build_error(self, message='', status=400):
|
2016-07-09 17:41:16 +02:00
|
|
|
"""Build and write an error message.
|
|
|
|
|
|
|
|
:param message: textual message
|
|
|
|
:type message: str
|
|
|
|
:param status: HTTP status code
|
|
|
|
:type status: int
|
|
|
|
"""
|
2016-05-30 23:14:56 +02:00
|
|
|
self.set_status(status)
|
|
|
|
self.write({'error': True, 'message': message})
|
|
|
|
|
2015-05-02 18:38:57 +02:00
|
|
|
def logout(self):
|
|
|
|
"""Remove the secure cookie used fro authentication."""
|
2016-06-13 21:17:01 +02:00
|
|
|
if self.current_user in self._users_cache:
|
|
|
|
del self._users_cache[self.current_user]
|
2015-05-02 18:38:57 +02:00
|
|
|
self.clear_cookie("user")
|
|
|
|
|
2015-03-20 23:08:21 +01:00
|
|
|
|
|
|
|
class RootHandler(BaseHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handler for the / path."""
|
2015-03-15 15:47:04 +01:00
|
|
|
angular_app_path = os.path.join(os.path.dirname(__file__), "angular_app")
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-03-14 13:05:04 +01:00
|
|
|
@gen.coroutine
|
2015-03-31 23:35:56 +02:00
|
|
|
def get(self, *args, **kwargs):
|
2015-03-22 08:58:25 +01:00
|
|
|
# serve the ./angular_app/index.html file
|
2015-03-15 15:47:04 +01:00
|
|
|
with open(self.angular_app_path + "/index.html", 'r') as fd:
|
|
|
|
self.write(fd.read())
|
2015-03-14 17:32:45 +01:00
|
|
|
|
2015-03-15 18:00:08 +01:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class CollectionHandler(BaseHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Base class for handlers that need to interact with the database backend.
|
2016-04-10 17:21:46 +02:00
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
Introduce basic CRUD operations."""
|
|
|
|
# set of documents we're managing (a collection in MongoDB or a table in a SQL database)
|
2016-05-30 23:14:56 +02:00
|
|
|
document = None
|
2015-03-21 20:32:39 +01:00
|
|
|
collection = None
|
2015-03-21 18:29:19 +01:00
|
|
|
|
2015-05-01 16:27:22 +02:00
|
|
|
# set of documents used to store incremental sequences
|
|
|
|
counters_collection = 'counters'
|
|
|
|
|
2016-06-06 21:44:04 +02:00
|
|
|
_id_chars = string.ascii_lowercase + string.digits
|
|
|
|
|
2015-05-01 16:27:22 +02:00
|
|
|
def get_next_seq(self, seq):
|
|
|
|
"""Increment and return the new value of a ever-incrementing counter.
|
|
|
|
|
|
|
|
:param seq: unique name of the sequence
|
|
|
|
:type seq: str
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
:returns: the next value of the sequence
|
2015-05-01 16:27:22 +02:00
|
|
|
:rtype: int
|
|
|
|
"""
|
|
|
|
if not self.db.query(self.counters_collection, {'seq_name': seq}):
|
|
|
|
self.db.add(self.counters_collection, {'seq_name': seq, 'seq': 0})
|
|
|
|
merged, doc = self.db.update(self.counters_collection,
|
|
|
|
{'seq_name': seq},
|
|
|
|
{'seq': 1},
|
|
|
|
operation='increment')
|
|
|
|
return doc.get('seq', 0)
|
|
|
|
|
2016-06-11 00:47:29 +02:00
|
|
|
def gen_id(self, seq='ids', random_alpha=32):
|
|
|
|
"""Generate a unique, non-guessable ID.
|
|
|
|
|
|
|
|
:param seq: the scope of the ever-incrementing sequence
|
|
|
|
:type seq: str
|
|
|
|
:param random_alpha: number of random lowercase alphanumeric chars
|
|
|
|
:type random_alpha: int
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
:returns: unique ID
|
2016-06-11 00:47:29 +02:00
|
|
|
:rtype: str"""
|
2016-06-08 23:05:16 +02:00
|
|
|
t = str(time.time()).replace('.', '_')
|
|
|
|
seq = str(self.get_next_seq(seq))
|
2017-03-27 21:57:40 +02:00
|
|
|
rand = ''.join([random.choice(self._id_chars) for x in range(random_alpha)])
|
2016-06-06 21:44:04 +02:00
|
|
|
return '-'.join((t, seq, rand))
|
|
|
|
|
2015-04-14 23:44:55 +02:00
|
|
|
def _filter_results(self, results, params):
|
2015-04-15 00:12:35 +02:00
|
|
|
"""Filter a list using keys and values from a dictionary.
|
2016-04-10 17:21:46 +02:00
|
|
|
|
2015-04-15 00:12:35 +02:00
|
|
|
:param results: the list to be filtered
|
|
|
|
:type results: list
|
|
|
|
:param params: a dictionary of items that must all be present in an original list item to be included in the return
|
2016-05-11 21:09:57 +02:00
|
|
|
:type params: dict
|
2016-04-10 17:21:46 +02:00
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
:returns: list of items that have all the keys with the same values as params
|
2015-04-15 00:12:35 +02:00
|
|
|
:rtype: list"""
|
2015-04-14 23:44:55 +02:00
|
|
|
if not params:
|
|
|
|
return results
|
2017-03-27 21:57:40 +02:00
|
|
|
params = monco.convert(params)
|
2015-04-14 23:44:55 +02:00
|
|
|
filtered = []
|
|
|
|
for result in results:
|
|
|
|
add = True
|
2017-03-27 21:57:40 +02:00
|
|
|
for key, value in params.items():
|
2015-04-14 23:44:55 +02:00
|
|
|
if key not in result or result[key] != value:
|
|
|
|
add = False
|
|
|
|
break
|
|
|
|
if add:
|
|
|
|
filtered.append(result)
|
|
|
|
return filtered
|
|
|
|
|
2016-04-17 16:24:38 +02:00
|
|
|
def _clean_dict(self, data):
|
2016-07-09 19:02:00 +02:00
|
|
|
"""Filter a dictionary (in place) to remove unwanted keywords in db queries.
|
2016-04-17 16:24:38 +02:00
|
|
|
|
|
|
|
:param data: dictionary to clean
|
|
|
|
:type data: dict"""
|
|
|
|
if isinstance(data, dict):
|
2017-03-27 21:57:40 +02:00
|
|
|
for key in list(data.keys()):
|
2017-04-03 22:14:06 +02:00
|
|
|
if (isinstance(key, str) and key.startswith('$')) or key in ('_id', 'created_by', 'created_at',
|
|
|
|
'updated_by', 'updated_at', 'isRegistered'):
|
2016-04-17 16:24:38 +02:00
|
|
|
del data[key]
|
|
|
|
return data
|
|
|
|
|
2015-04-18 14:27:02 +02:00
|
|
|
def _dict2env(self, data):
|
2016-04-17 16:24:38 +02:00
|
|
|
"""Convert a dictionary into a form suitable to be passed as environment variables.
|
|
|
|
|
|
|
|
:param data: dictionary to convert
|
|
|
|
:type data: dict"""
|
2015-04-18 14:27:02 +02:00
|
|
|
ret = {}
|
2017-03-27 21:57:40 +02:00
|
|
|
for key, value in data.items():
|
2017-04-22 13:10:56 +02:00
|
|
|
if isinstance(value, (list, tuple, dict, set)):
|
2015-04-18 14:27:02 +02:00
|
|
|
continue
|
|
|
|
try:
|
2015-04-25 15:46:46 +02:00
|
|
|
key = re_env_key.sub('', key)
|
2017-04-22 13:10:56 +02:00
|
|
|
key = key.upper().encode('ascii', 'ignore')
|
2015-04-25 15:46:46 +02:00
|
|
|
if not key:
|
|
|
|
continue
|
2017-04-22 13:10:56 +02:00
|
|
|
if not isinstance(value, str):
|
|
|
|
value = str(value)
|
|
|
|
ret[key] = value
|
2015-04-18 14:27:02 +02:00
|
|
|
except:
|
|
|
|
continue
|
|
|
|
return ret
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
def apply_filter(self, data, filter_name):
|
|
|
|
"""Apply a filter to the data.
|
|
|
|
|
|
|
|
:param data: the data to filter
|
|
|
|
:returns: the modified (possibly also in place) data
|
|
|
|
"""
|
|
|
|
filter_method = getattr(self, 'filter_%s' % filter_name, None)
|
2016-06-11 00:47:29 +02:00
|
|
|
if filter_method is not None:
|
2016-06-11 16:14:28 +02:00
|
|
|
data = filter_method(data)
|
|
|
|
return data
|
2016-06-11 00:47:29 +02:00
|
|
|
|
2015-03-14 17:32:45 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2016-06-02 16:06:12 +02:00
|
|
|
def get(self, id_=None, resource=None, resource_id=None, acl=True, **kwargs):
|
2015-03-31 23:35:56 +02:00
|
|
|
if resource:
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2016-06-10 12:39:25 +02:00
|
|
|
permission = '%s:%s%s|read' % (self.document, resource, '-all' if resource_id is None else '')
|
2016-06-02 16:06:12 +02:00
|
|
|
if acl and not self.has_permission(permission):
|
2016-05-31 22:26:38 +02:00
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 16:14:28 +02:00
|
|
|
handler = getattr(self, 'handle_get_%s' % resource, None)
|
2017-03-27 21:57:40 +02:00
|
|
|
if handler and isinstance(handler, collections.Callable):
|
2016-06-11 16:14:28 +02:00
|
|
|
output = handler(id_, resource_id, **kwargs) or {}
|
|
|
|
output = self.apply_filter(output, 'get_%s' % resource)
|
2016-06-11 00:47:29 +02:00
|
|
|
self.write(output)
|
2015-04-05 22:16:11 +02:00
|
|
|
return
|
2016-06-07 23:21:08 +02:00
|
|
|
return self.build_error(status=404, message='unable to access resource: %s' % resource)
|
2015-03-15 23:05:59 +01:00
|
|
|
if id_ is not None:
|
2015-03-22 08:58:25 +01:00
|
|
|
# read a single document
|
2016-05-31 22:26:38 +02:00
|
|
|
permission = '%s|read' % self.document
|
2016-06-02 16:06:12 +02:00
|
|
|
if acl and not self.has_permission(permission):
|
2016-05-31 22:26:38 +02:00
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 00:47:29 +02:00
|
|
|
output = self.db.get(self.collection, id_)
|
2016-06-11 16:14:28 +02:00
|
|
|
output = self.apply_filter(output, 'get')
|
2016-06-11 00:47:29 +02:00
|
|
|
self.write(output)
|
2015-03-21 18:29:19 +01:00
|
|
|
else:
|
2015-03-22 08:58:25 +01:00
|
|
|
# return an object containing the list of all objects in the collection;
|
|
|
|
# e.g.: {'events': [{'_id': 'obj1-id, ...}, {'_id': 'obj2-id, ...}, ...]}
|
2015-03-22 17:08:25 +01:00
|
|
|
# Please, never return JSON lists that are not encapsulated into an object,
|
2015-03-22 08:58:25 +01:00
|
|
|
# to avoid XSS vulnerabilities.
|
2016-06-11 17:56:23 +02:00
|
|
|
permission = '%s|read' % self.collection
|
2016-06-02 16:06:12 +02:00
|
|
|
if acl and not self.has_permission(permission):
|
2016-05-31 22:26:38 +02:00
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 00:47:29 +02:00
|
|
|
output = {self.collection: self.db.query(self.collection, self.arguments)}
|
2016-06-11 16:14:28 +02:00
|
|
|
output = self.apply_filter(output, 'get_all')
|
2016-06-11 00:47:29 +02:00
|
|
|
self.write(output)
|
2015-03-21 18:29:19 +01:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-05 00:55:59 +02:00
|
|
|
def post(self, id_=None, resource=None, resource_id=None, **kwargs):
|
2015-04-06 17:19:20 +02:00
|
|
|
data = escape.json_decode(self.request.body or '{}')
|
2016-04-17 16:24:38 +02:00
|
|
|
self._clean_dict(data)
|
2016-05-30 23:14:56 +02:00
|
|
|
method = self.request.method.lower()
|
2016-06-11 16:14:28 +02:00
|
|
|
crud_method = 'create' if method == 'post' else 'update'
|
2016-07-30 19:39:58 +02:00
|
|
|
env = {}
|
|
|
|
if id_ is not None:
|
|
|
|
env['%s_ID' % self.document.upper()] = id_
|
2017-04-03 22:14:06 +02:00
|
|
|
self.add_access_info(data)
|
2015-04-05 00:55:59 +02:00
|
|
|
if resource:
|
2016-06-11 16:14:28 +02:00
|
|
|
permission = '%s:%s%s|%s' % (self.document, resource, '-all' if resource_id is None else '', crud_method)
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2016-05-30 23:14:56 +02:00
|
|
|
handler = getattr(self, 'handle_%s_%s' % (method, resource), None)
|
2017-03-27 21:57:40 +02:00
|
|
|
if handler and isinstance(handler, collections.Callable):
|
2016-06-11 16:14:28 +02:00
|
|
|
data = self.apply_filter(data, 'input_%s_%s' % (method, resource))
|
|
|
|
output = handler(id_, resource_id, data, **kwargs)
|
|
|
|
output = self.apply_filter(output, 'get_%s' % resource)
|
2016-07-30 19:39:58 +02:00
|
|
|
env['RESOURCE'] = resource
|
|
|
|
if resource_id:
|
|
|
|
env['%s_ID' % resource] = resource_id
|
|
|
|
self.run_triggers('%s_%s_%s' % ('create' if resource_id is None else 'update', self.document, resource),
|
|
|
|
stdin_data=output, env=env)
|
2016-06-11 16:14:28 +02:00
|
|
|
self.write(output)
|
2015-04-05 22:16:11 +02:00
|
|
|
return
|
2016-07-09 19:02:00 +02:00
|
|
|
return self.build_error(status=404, message='unable to access resource: %s' % resource)
|
2016-06-11 16:14:28 +02:00
|
|
|
if id_ is not None:
|
|
|
|
permission = '%s|%s' % (self.document, crud_method)
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 17:56:23 +02:00
|
|
|
data = self.apply_filter(data, 'input_%s' % method)
|
2016-06-11 16:14:28 +02:00
|
|
|
merged, newData = self.db.update(self.collection, id_, data)
|
|
|
|
newData = self.apply_filter(newData, method)
|
2016-07-30 19:39:58 +02:00
|
|
|
self.run_triggers('update_%s' % self.document, stdin_data=newData, env=env)
|
2015-03-21 18:29:19 +01:00
|
|
|
else:
|
2016-06-11 17:56:23 +02:00
|
|
|
permission = '%s|%s' % (self.collection, crud_method)
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-06-11 16:14:28 +02:00
|
|
|
data = self.apply_filter(data, 'input_%s_all' % method)
|
|
|
|
newData = self.db.add(self.collection, data, _id=self.gen_id())
|
|
|
|
newData = self.apply_filter(newData, '%s_all' % method)
|
2016-07-30 19:39:58 +02:00
|
|
|
self.run_triggers('create_%s' % self.document, stdin_data=newData, env=env)
|
2015-03-21 18:29:19 +01:00
|
|
|
self.write(newData)
|
2015-03-14 11:12:57 +01:00
|
|
|
|
2016-07-09 19:02:00 +02:00
|
|
|
# PUT (update an existing document) is handled by the POST (create a new document) method;
|
|
|
|
# in subclasses you can always separate sub-resources handlers like handle_post_tickets and handle_put_tickets
|
2015-03-21 20:32:39 +01:00
|
|
|
put = post
|
2015-03-15 18:00:08 +01:00
|
|
|
|
2015-03-22 17:08:25 +01:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-04-05 11:20:57 +02:00
|
|
|
def delete(self, id_=None, resource=None, resource_id=None, **kwargs):
|
2016-07-30 19:39:58 +02:00
|
|
|
env = {}
|
|
|
|
if id_ is not None:
|
|
|
|
env['%s_ID' % self.document.upper()] = id_
|
2015-04-05 11:20:57 +02:00
|
|
|
if resource:
|
2015-04-13 23:25:46 +02:00
|
|
|
# Handle access to sub-resources.
|
2016-06-11 16:14:28 +02:00
|
|
|
permission = '%s:%s%s|delete' % (self.document, resource, '-all' if resource_id is None else '')
|
2016-05-31 22:26:38 +02:00
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2015-04-05 11:20:57 +02:00
|
|
|
method = getattr(self, 'handle_delete_%s' % resource, None)
|
2017-03-27 21:57:40 +02:00
|
|
|
if method and isinstance(method, collections.Callable):
|
2016-07-30 19:39:58 +02:00
|
|
|
output = method(id_, resource_id, **kwargs)
|
|
|
|
env['RESOURCE'] = resource
|
|
|
|
if resource_id:
|
|
|
|
env['%s_ID' % resource] = resource_id
|
|
|
|
self.run_triggers('delete_%s_%s' % (self.document, resource), stdin_data=env, env=env)
|
|
|
|
self.write(output)
|
2015-04-05 22:16:11 +02:00
|
|
|
return
|
2016-07-09 19:02:00 +02:00
|
|
|
return self.build_error(status=404, message='unable to access resource: %s' % resource)
|
2016-07-30 19:39:58 +02:00
|
|
|
if id_ is not None:
|
2016-05-31 22:26:38 +02:00
|
|
|
permission = '%s|delete' % self.document
|
|
|
|
if not self.has_permission(permission):
|
|
|
|
return self.build_error(status=401, message='insufficient permissions: %s' % permission)
|
2016-07-30 19:39:58 +02:00
|
|
|
howMany = self.db.delete(self.collection, id_)
|
|
|
|
env['DELETED_ITEMS'] = howMany
|
|
|
|
self.run_triggers('delete_%s' % self.document, stdin_data=env, env=env)
|
2016-06-11 16:14:28 +02:00
|
|
|
else:
|
|
|
|
self.write({'success': False})
|
2015-04-13 23:25:46 +02:00
|
|
|
self.write({'success': True})
|
2015-03-22 17:08:25 +01:00
|
|
|
|
2015-04-18 15:01:30 +02:00
|
|
|
def on_timeout(self, cmd, pipe):
|
2015-04-18 12:53:08 +02:00
|
|
|
"""Kill a process that is taking too long to complete."""
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd %s is taking too long: killing it' % ' '.join(cmd))
|
2015-04-18 12:53:08 +02:00
|
|
|
try:
|
|
|
|
pipe.proc.kill()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def on_exit(self, returncode, cmd, pipe):
|
|
|
|
"""Callback executed when a subprocess execution is over."""
|
|
|
|
self.ioloop.remove_timeout(self.timeout)
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd: %s returncode: %d' % (' '.join(cmd), returncode))
|
2015-04-18 12:53:08 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-18 14:27:02 +02:00
|
|
|
def run_subprocess(self, cmd, stdin_data=None, env=None):
|
2015-04-17 20:31:50 +02:00
|
|
|
"""Execute the given action.
|
|
|
|
|
|
|
|
:param cmd: the command to be run with its command line arguments
|
|
|
|
:type cmd: list
|
2015-04-18 12:53:08 +02:00
|
|
|
|
|
|
|
:param stdin_data: data to be sent over stdin
|
|
|
|
:type stdin_data: str
|
2015-04-18 14:27:02 +02:00
|
|
|
:param env: environment of the process
|
|
|
|
:type env: dict
|
2015-04-17 20:31:50 +02:00
|
|
|
"""
|
|
|
|
self.ioloop = tornado.ioloop.IOLoop.instance()
|
2016-07-31 18:38:30 +02:00
|
|
|
processed_env = self._dict2env(env)
|
2015-04-18 12:53:08 +02:00
|
|
|
p = process.Subprocess(cmd, close_fds=True, stdin=process.Subprocess.STREAM,
|
2016-07-31 18:38:30 +02:00
|
|
|
stdout=process.Subprocess.STREAM, stderr=process.Subprocess.STREAM, env=processed_env)
|
2015-04-18 12:53:08 +02:00
|
|
|
p.set_exit_callback(lambda returncode: self.on_exit(returncode, cmd, p))
|
2015-04-17 20:31:50 +02:00
|
|
|
self.timeout = self.ioloop.add_timeout(datetime.timedelta(seconds=PROCESS_TIMEOUT),
|
2015-04-18 15:01:30 +02:00
|
|
|
lambda: self.on_timeout(cmd, p))
|
2017-04-22 13:10:56 +02:00
|
|
|
yield gen.Task(p.stdin.write, stdin_data.encode(ENCODING) or b'')
|
2015-04-18 12:53:08 +02:00
|
|
|
p.stdin.close()
|
|
|
|
out, err = yield [gen.Task(p.stdout.read_until_close),
|
|
|
|
gen.Task(p.stderr.read_until_close)]
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('cmd: %s' % ' '.join(cmd))
|
|
|
|
logging.debug('cmd stdout: %s' % out)
|
|
|
|
logging.debug('cmd strerr: %s' % err)
|
2015-04-18 12:53:08 +02:00
|
|
|
raise gen.Return((out, err))
|
2015-04-17 00:17:36 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-18 14:27:02 +02:00
|
|
|
def run_triggers(self, action, stdin_data=None, env=None):
|
2015-04-17 20:31:50 +02:00
|
|
|
"""Asynchronously execute triggers for the given action.
|
|
|
|
|
|
|
|
:param action: action name; scripts in directory ./data/triggers/{action}.d will be run
|
|
|
|
:type action: str
|
2015-04-18 12:53:08 +02:00
|
|
|
:param stdin_data: a python dictionary that will be serialized in JSON and sent to the process over stdin
|
|
|
|
:type stdin_data: dict
|
2015-04-18 14:27:02 +02:00
|
|
|
:param env: environment of the process
|
|
|
|
:type stdin_data: dict
|
2015-04-17 20:31:50 +02:00
|
|
|
"""
|
2016-10-16 17:14:57 +02:00
|
|
|
if not hasattr(self, 'data_dir'):
|
|
|
|
return
|
2015-04-18 15:01:30 +02:00
|
|
|
logging.debug('running triggers for action "%s"' % action)
|
2015-04-18 12:53:08 +02:00
|
|
|
stdin_data = stdin_data or {}
|
|
|
|
try:
|
|
|
|
stdin_data = json.dumps(stdin_data)
|
|
|
|
except:
|
|
|
|
stdin_data = '{}'
|
2015-04-17 00:17:36 +02:00
|
|
|
for script in glob.glob(os.path.join(self.data_dir, 'triggers', '%s.d' % action, '*')):
|
|
|
|
if not (os.path.isfile(script) and os.access(script, os.X_OK)):
|
|
|
|
continue
|
2015-04-18 14:27:02 +02:00
|
|
|
out, err = yield gen.Task(self.run_subprocess, [script], stdin_data, env)
|
2015-03-22 08:58:25 +01:00
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
def build_ws_url(self, path, proto='ws', host=None):
|
|
|
|
"""Return a WebSocket url from a path."""
|
2016-07-10 13:10:00 +02:00
|
|
|
try:
|
|
|
|
args = '?uuid=%s' % self.get_argument('uuid')
|
|
|
|
except:
|
|
|
|
args = ''
|
|
|
|
return 'ws://127.0.0.1:%s/ws/%s%s' % (self.listen_port + 1, path, args)
|
2015-04-26 00:47:38 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2015-04-26 12:56:12 +02:00
|
|
|
def send_ws_message(self, path, message):
|
2015-04-26 00:47:38 +02:00
|
|
|
"""Send a WebSocket message to all the connected clients.
|
|
|
|
|
2015-04-26 12:56:12 +02:00
|
|
|
:param path: partial path used to build the WebSocket url
|
|
|
|
:type path: str
|
2015-04-26 00:47:38 +02:00
|
|
|
:param message: message to send
|
|
|
|
:type message: str
|
|
|
|
"""
|
2016-06-26 20:45:12 +02:00
|
|
|
try:
|
|
|
|
ws = yield tornado.websocket.websocket_connect(self.build_ws_url(path))
|
|
|
|
ws.write_message(message)
|
|
|
|
ws.close()
|
2017-03-27 21:57:40 +02:00
|
|
|
except Exception as e:
|
2016-06-26 20:45:12 +02:00
|
|
|
self.logger.error('Error yielding WebSocket message: %s', e)
|
2015-04-26 00:47:38 +02:00
|
|
|
|
2015-04-16 00:06:01 +02:00
|
|
|
|
2015-03-21 20:32:39 +01:00
|
|
|
class EventsHandler(CollectionHandler):
|
2015-03-22 08:58:25 +01:00
|
|
|
"""Handle requests for Events."""
|
2016-05-30 23:14:56 +02:00
|
|
|
document = 'event'
|
2015-03-21 20:32:39 +01:00
|
|
|
collection = 'events'
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2016-08-01 14:40:29 +02:00
|
|
|
def _mangle_event(self, event):
|
|
|
|
# Some in-place changes to an event
|
|
|
|
if 'tickets' in event:
|
|
|
|
event['tickets_sold'] = len([t for t in event['tickets'] if not t.get('cancelled')])
|
|
|
|
event['no_tickets_for_sale'] = False
|
|
|
|
try:
|
|
|
|
self._check_sales_datetime(event)
|
|
|
|
self._check_number_of_tickets(event)
|
|
|
|
except InputException:
|
|
|
|
event['no_tickets_for_sale'] = True
|
2016-07-31 23:25:05 +02:00
|
|
|
if not self.has_permission('tickets-all|read'):
|
2016-08-01 14:40:29 +02:00
|
|
|
event['tickets'] = []
|
|
|
|
return event
|
|
|
|
|
|
|
|
def filter_get(self, output):
|
|
|
|
return self._mangle_event(output)
|
2016-06-11 00:47:29 +02:00
|
|
|
|
|
|
|
def filter_get_all(self, output):
|
2016-07-31 23:25:05 +02:00
|
|
|
for event in output.get('events') or []:
|
2016-08-01 14:40:29 +02:00
|
|
|
self._mangle_event(event)
|
2016-06-11 00:47:29 +02:00
|
|
|
return output
|
|
|
|
|
2016-07-03 14:13:27 +02:00
|
|
|
def filter_input_post(self, data):
|
|
|
|
# Auto-generate the group_id, if missing.
|
|
|
|
if 'group_id' not in data:
|
|
|
|
data['group_id'] = self.gen_id()
|
|
|
|
return data
|
|
|
|
|
|
|
|
filter_input_post_all = filter_input_post
|
|
|
|
filter_input_put = filter_input_post
|
|
|
|
|
2016-06-11 16:14:28 +02:00
|
|
|
def filter_input_post_tickets(self, data):
|
2016-07-09 19:02:00 +02:00
|
|
|
# Avoid users to be able to auto-update their 'attendee' status.
|
2016-06-11 16:14:28 +02:00
|
|
|
if not self.has_permission('event|update'):
|
|
|
|
if 'attended' in data:
|
|
|
|
del data['attended']
|
2017-04-03 22:14:06 +02:00
|
|
|
self.add_access_info(data)
|
2016-06-11 16:14:28 +02:00
|
|
|
return data
|
|
|
|
|
|
|
|
filter_input_put_tickets = filter_input_post_tickets
|
|
|
|
|
2016-07-03 14:13:27 +02:00
|
|
|
def handle_get_group_persons(self, id_, resource_id=None):
|
|
|
|
persons = []
|
|
|
|
this_query = {'_id': id_}
|
|
|
|
this_event = self.db.query('events', this_query)[0]
|
|
|
|
group_id = this_event.get('group_id')
|
|
|
|
if group_id is None:
|
|
|
|
return {'persons': persons}
|
2016-07-09 14:20:48 +02:00
|
|
|
this_persons = [p for p in (this_event.get('tickets') or []) if not p.get('cancelled')]
|
2017-03-27 21:57:40 +02:00
|
|
|
this_emails = [_f for _f in [p.get('email') for p in this_persons] if _f]
|
2016-07-03 14:13:27 +02:00
|
|
|
all_query = {'group_id': group_id}
|
|
|
|
events = self.db.query('events', all_query)
|
|
|
|
for event in events:
|
2016-07-05 21:30:12 +02:00
|
|
|
if id_ is not None and str(event.get('_id')) == id_:
|
2016-07-03 14:13:27 +02:00
|
|
|
continue
|
2016-07-09 14:20:48 +02:00
|
|
|
persons += [p for p in (event.get('tickets') or []) if p.get('email') and p.get('email') not in this_emails]
|
2016-07-03 14:13:27 +02:00
|
|
|
return {'persons': persons}
|
|
|
|
|
2017-04-15 11:07:27 +02:00
|
|
|
def _get_ticket_data(self, ticket_id_or_query, tickets, only_one=True):
|
2016-07-09 14:20:48 +02:00
|
|
|
"""Filter a list of tickets returning the first item with a given _id
|
2015-04-22 23:34:53 +02:00
|
|
|
or which set of keys specified in a dictionary match their respective values."""
|
2017-04-15 11:07:27 +02:00
|
|
|
matches = []
|
2016-07-09 14:20:48 +02:00
|
|
|
for ticket in tickets:
|
|
|
|
if isinstance(ticket_id_or_query, dict):
|
2017-03-27 21:57:40 +02:00
|
|
|
if all(ticket.get(k) == v for k, v in ticket_id_or_query.items()):
|
2017-04-15 11:07:27 +02:00
|
|
|
matches.append(ticket)
|
|
|
|
if only_one:
|
|
|
|
break
|
2015-04-22 23:34:53 +02:00
|
|
|
else:
|
2016-07-09 14:20:48 +02:00
|
|
|
if str(ticket.get('_id')) == ticket_id_or_query:
|
2017-04-15 11:07:27 +02:00
|
|
|
matches.append(ticket)
|
|
|
|
if only_one:
|
|
|
|
break
|
|
|
|
if only_one:
|
|
|
|
if matches:
|
|
|
|
return matches[0]
|
|
|
|
return {}
|
|
|
|
return matches
|
2015-04-18 12:53:08 +02:00
|
|
|
|
2016-07-09 19:02:00 +02:00
|
|
|
def handle_get_tickets(self, id_, resource_id=None):
|
2016-07-09 14:20:48 +02:00
|
|
|
# Return every ticket registered at this event, or the information
|
|
|
|
# about a specific ticket.
|
2015-04-05 00:55:59 +02:00
|
|
|
query = {'_id': id_}
|
|
|
|
event = self.db.query('events', query)[0]
|
|
|
|
if resource_id:
|
2016-07-09 19:02:00 +02:00
|
|
|
return {'ticket': self._get_ticket_data(resource_id, event.get('tickets') or [])}
|
2016-07-09 14:20:48 +02:00
|
|
|
tickets = self._filter_results(event.get('tickets') or [], self.arguments)
|
|
|
|
return {'tickets': tickets}
|
2015-04-05 00:55:59 +02:00
|
|
|
|
2016-08-01 14:40:29 +02:00
|
|
|
def _check_number_of_tickets(self, event):
|
|
|
|
if self.has_permission('admin|all'):
|
|
|
|
return
|
|
|
|
number_of_tickets = event.get('number_of_tickets')
|
|
|
|
if number_of_tickets is None:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
number_of_tickets = int(number_of_tickets)
|
|
|
|
except ValueError:
|
|
|
|
return
|
|
|
|
tickets = event.get('tickets') or []
|
|
|
|
tickets = [t for t in tickets if not t.get('cancelled')]
|
|
|
|
if len(tickets) >= event['number_of_tickets']:
|
|
|
|
raise InputException('no more tickets available')
|
|
|
|
|
|
|
|
def _check_sales_datetime(self, event):
|
|
|
|
if self.has_permission('admin|all'):
|
|
|
|
return
|
|
|
|
begin_date = event.get('ticket_sales_begin_date')
|
|
|
|
begin_time = event.get('ticket_sales_begin_time')
|
|
|
|
end_date = event.get('ticket_sales_end_date')
|
|
|
|
end_time = event.get('ticket_sales_end_time')
|
|
|
|
utc = dateutil.tz.tzutc()
|
|
|
|
is_dst = time.daylight and time.localtime().tm_isdst > 0
|
|
|
|
utc_offset = - (time.altzone if is_dst else time.timezone)
|
|
|
|
if begin_date is None:
|
|
|
|
begin_date = datetime.datetime.now(tz=utc).replace(hour=0, minute=0, second=0, microsecond=0)
|
|
|
|
else:
|
|
|
|
begin_date = dateutil.parser.parse(begin_date)
|
|
|
|
# Compensate UTC and DST offset, that otherwise would be added 2 times (one for date, one for time)
|
|
|
|
begin_date = begin_date + datetime.timedelta(seconds=utc_offset)
|
|
|
|
if begin_time is None:
|
|
|
|
begin_time_h = 0
|
|
|
|
begin_time_m = 0
|
|
|
|
else:
|
|
|
|
begin_time = dateutil.parser.parse(begin_time)
|
|
|
|
begin_time_h = begin_time.hour
|
|
|
|
begin_time_m = begin_time.minute
|
|
|
|
now = datetime.datetime.now(tz=utc)
|
|
|
|
begin_datetime = begin_date + datetime.timedelta(hours=begin_time_h, minutes=begin_time_m)
|
|
|
|
if now < begin_datetime:
|
|
|
|
raise InputException('ticket sales not yet started')
|
|
|
|
|
|
|
|
if end_date is None:
|
|
|
|
end_date = datetime.datetime.today().replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=utc)
|
|
|
|
else:
|
|
|
|
end_date = dateutil.parser.parse(end_date)
|
|
|
|
end_date = end_date + datetime.timedelta(seconds=utc_offset)
|
|
|
|
if end_time is None:
|
|
|
|
end_time = end_date
|
|
|
|
end_time_h = 23
|
|
|
|
end_time_m = 59
|
|
|
|
else:
|
|
|
|
end_time = dateutil.parser.parse(end_time, yearfirst=True)
|
|
|
|
end_time_h = end_time.hour
|
|
|
|
end_time_m = end_time.minute
|
|
|
|
end_datetime = end_date + datetime.timedelta(hours=end_time_h, minutes=end_time_m+1)
|
|
|
|
if now > end_datetime:
|
|
|
|
raise InputException('ticket sales has ended')
|
|
|
|
|
2016-07-09 19:02:00 +02:00
|
|
|
def handle_post_tickets(self, id_, resource_id, data):
|
2016-07-31 23:25:05 +02:00
|
|
|
event = self.db.query('events', {'_id': id_})[0]
|
2016-08-01 14:40:29 +02:00
|
|
|
self._check_sales_datetime(event)
|
|
|
|
self._check_number_of_tickets(event)
|
2016-04-24 16:03:49 +02:00
|
|
|
uuid, arguments = self.uuid_arguments
|
2016-04-17 16:24:38 +02:00
|
|
|
self._clean_dict(data)
|
2016-07-09 14:20:48 +02:00
|
|
|
data['seq'] = self.get_next_seq('event_%s_tickets' % id_)
|
2015-05-01 16:27:22 +02:00
|
|
|
data['seq_hex'] = '%06X' % data['seq']
|
2016-07-10 17:25:16 +02:00
|
|
|
data['_id'] = ticket_id = self.gen_id()
|
2017-04-03 22:14:06 +02:00
|
|
|
self.add_access_info(data)
|
2016-07-09 19:02:00 +02:00
|
|
|
ret = {'action': 'add', 'ticket': data, 'uuid': uuid}
|
|
|
|
merged, doc = self.db.update('events',
|
|
|
|
{'_id': id_},
|
|
|
|
{'tickets': data},
|
|
|
|
operation='appendUnique',
|
|
|
|
create=False)
|
|
|
|
if doc:
|
2016-07-10 13:10:00 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
2016-07-10 17:25:16 +02:00
|
|
|
ticket = self._get_ticket_data(ticket_id, doc.get('tickets') or [])
|
2016-07-31 18:38:30 +02:00
|
|
|
env = dict(ticket)
|
2016-07-10 17:25:16 +02:00
|
|
|
env.update({'PERSON_ID': ticket_id, 'TICKED_ID': ticket_id, 'EVENT_ID': id_,
|
2017-03-27 22:17:34 +02:00
|
|
|
'EVENT_TITLE': doc.get('title', ''), 'WEB_USER': self.current_user_info.get('username', ''),
|
2016-07-10 17:25:16 +02:00
|
|
|
'WEB_REMOTE_IP': self.request.remote_ip})
|
|
|
|
stdin_data = {'new': ticket,
|
|
|
|
'event': doc,
|
|
|
|
'merged': merged
|
|
|
|
}
|
|
|
|
self.run_triggers('create_ticket_in_event', stdin_data=stdin_data, env=env)
|
2016-04-10 18:45:30 +02:00
|
|
|
return ret
|
2015-04-06 17:19:20 +02:00
|
|
|
|
2016-07-09 19:02:00 +02:00
|
|
|
def handle_put_tickets(self, id_, ticket_id, data):
|
2016-07-09 14:20:48 +02:00
|
|
|
# Update an existing entry for a ticket registered at this event.
|
2016-04-17 16:24:38 +02:00
|
|
|
self._clean_dict(data)
|
2016-04-24 16:03:49 +02:00
|
|
|
uuid, arguments = self.uuid_arguments
|
2017-04-15 14:22:05 +02:00
|
|
|
_errorMessage = ''
|
|
|
|
if '_errorMessage' in arguments:
|
|
|
|
_errorMessage = arguments['_errorMessage']
|
|
|
|
del arguments['_errorMessage']
|
2017-04-23 17:17:54 +02:00
|
|
|
_searchFor = False
|
|
|
|
if '_searchFor' in arguments:
|
|
|
|
_searchFor = arguments['_searchFor']
|
|
|
|
del arguments['_searchFor']
|
2017-03-27 21:57:40 +02:00
|
|
|
query = dict([('tickets.%s' % k, v) for k, v in arguments.items()])
|
2015-04-15 00:12:35 +02:00
|
|
|
query['_id'] = id_
|
2016-07-09 14:20:48 +02:00
|
|
|
if ticket_id is not None:
|
|
|
|
query['tickets._id'] = ticket_id
|
|
|
|
ticket_query = {'_id': ticket_id}
|
2016-06-10 12:39:25 +02:00
|
|
|
else:
|
2017-04-15 15:12:57 +02:00
|
|
|
ticket_query = arguments
|
2016-07-09 14:20:48 +02:00
|
|
|
old_ticket_data = {}
|
2015-04-18 12:53:08 +02:00
|
|
|
current_event = self.db.query(self.collection, query)
|
|
|
|
if current_event:
|
|
|
|
current_event = current_event[0]
|
2015-04-22 23:34:53 +02:00
|
|
|
else:
|
|
|
|
current_event = {}
|
2016-08-01 14:40:29 +02:00
|
|
|
self._check_sales_datetime(current_event)
|
2016-07-31 23:25:05 +02:00
|
|
|
tickets = current_event.get('tickets') or []
|
2017-04-15 11:07:27 +02:00
|
|
|
matching_tickets = self._get_ticket_data(ticket_query, tickets, only_one=False)
|
|
|
|
nr_matches = len(matching_tickets)
|
|
|
|
if nr_matches > 1:
|
2017-04-15 14:22:05 +02:00
|
|
|
ret = {'error': True, 'message': 'more than one ticket matched. %s' % _errorMessage, 'query': query,
|
2017-04-23 17:17:54 +02:00
|
|
|
'searchFor': _searchFor, 'uuid': uuid, 'username': self.current_user_info.get('username', '')}
|
2017-04-15 11:07:27 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
|
|
|
self.set_status(400)
|
|
|
|
return ret
|
2017-04-15 13:35:15 +02:00
|
|
|
elif nr_matches == 0:
|
2017-04-15 14:22:05 +02:00
|
|
|
ret = {'error': True, 'message': 'no ticket matched. %s' % _errorMessage, 'query': query,
|
2017-04-23 17:17:54 +02:00
|
|
|
'searchFor': _searchFor, 'uuid': uuid, 'username': self.current_user_info.get('username', '')}
|
2017-04-15 13:35:15 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
|
|
|
self.set_status(400)
|
|
|
|
return ret
|
2017-04-15 11:07:27 +02:00
|
|
|
else:
|
2017-04-15 13:35:15 +02:00
|
|
|
old_ticket_data = matching_tickets[0]
|
2016-07-31 23:25:05 +02:00
|
|
|
|
2016-08-01 14:40:29 +02:00
|
|
|
# We have changed the "cancelled" status of a ticket to False; check if we still have a ticket available
|
2016-07-31 23:25:05 +02:00
|
|
|
if 'number_of_tickets' in current_event and old_ticket_data.get('cancelled') and not data.get('cancelled'):
|
2016-08-01 14:40:29 +02:00
|
|
|
self._check_number_of_tickets(current_event)
|
2016-07-31 23:25:05 +02:00
|
|
|
|
2017-04-03 22:14:06 +02:00
|
|
|
self.add_access_info(data)
|
2015-04-15 00:12:35 +02:00
|
|
|
merged, doc = self.db.update('events', query,
|
2016-07-09 14:20:48 +02:00
|
|
|
data, updateList='tickets', create=False)
|
|
|
|
new_ticket_data = self._get_ticket_data(ticket_query,
|
|
|
|
doc.get('tickets') or [])
|
2016-07-31 18:38:30 +02:00
|
|
|
env = dict(new_ticket_data)
|
2016-07-09 19:02:00 +02:00
|
|
|
# always takes the ticket_id from the new ticket
|
2016-07-09 14:20:48 +02:00
|
|
|
ticket_id = str(new_ticket_data.get('_id'))
|
2016-07-02 13:46:39 +02:00
|
|
|
env.update({'PERSON_ID': ticket_id, 'TICKED_ID': ticket_id, 'EVENT_ID': id_,
|
2017-03-27 22:17:34 +02:00
|
|
|
'EVENT_TITLE': doc.get('title', ''), 'WEB_USER': self.current_user_info.get('username', ''),
|
2015-10-11 00:29:59 +02:00
|
|
|
'WEB_REMOTE_IP': self.request.remote_ip})
|
2016-07-09 14:20:48 +02:00
|
|
|
stdin_data = {'old': old_ticket_data,
|
|
|
|
'new': new_ticket_data,
|
2015-04-18 12:53:08 +02:00
|
|
|
'event': doc,
|
|
|
|
'merged': merged
|
2015-04-18 14:27:02 +02:00
|
|
|
}
|
2016-07-09 14:20:48 +02:00
|
|
|
self.run_triggers('update_ticket_in_event', stdin_data=stdin_data, env=env)
|
|
|
|
if old_ticket_data and old_ticket_data.get('attended') != new_ticket_data.get('attended'):
|
|
|
|
if new_ticket_data.get('attended'):
|
2015-04-26 00:47:38 +02:00
|
|
|
self.run_triggers('attends', stdin_data=stdin_data, env=env)
|
2015-04-26 12:56:12 +02:00
|
|
|
|
2017-04-01 18:43:42 +02:00
|
|
|
ret = {'action': 'update', '_id': ticket_id, 'ticket': new_ticket_data,
|
|
|
|
'uuid': uuid, 'username': self.current_user_info.get('username', '')}
|
2016-07-09 14:20:48 +02:00
|
|
|
if old_ticket_data != new_ticket_data:
|
2016-06-19 16:58:38 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
2016-04-10 17:21:46 +02:00
|
|
|
return ret
|
2015-04-05 00:55:59 +02:00
|
|
|
|
2016-07-09 14:20:48 +02:00
|
|
|
def handle_delete_tickets(self, id_, ticket_id):
|
|
|
|
# Remove a specific ticket from the list of tickets registered at this event.
|
2016-04-24 16:03:49 +02:00
|
|
|
uuid, arguments = self.uuid_arguments
|
2016-04-10 19:39:12 +02:00
|
|
|
doc = self.db.query('events',
|
2016-07-09 14:20:48 +02:00
|
|
|
{'_id': id_, 'tickets._id': ticket_id})
|
|
|
|
ret = {'action': 'delete', '_id': ticket_id, 'uuid': uuid}
|
2016-04-10 19:39:12 +02:00
|
|
|
if doc:
|
2016-07-10 17:25:16 +02:00
|
|
|
ticket = self._get_ticket_data(ticket_id, doc[0].get('tickets') or [])
|
|
|
|
merged, rdoc = self.db.update('events',
|
2016-04-10 19:39:12 +02:00
|
|
|
{'_id': id_},
|
2016-07-09 14:20:48 +02:00
|
|
|
{'tickets': {'_id': ticket_id}},
|
2016-04-10 19:39:12 +02:00
|
|
|
operation='delete',
|
|
|
|
create=False)
|
2016-06-27 11:32:31 +02:00
|
|
|
self.send_ws_message('event/%s/tickets/updates' % id_, json.dumps(ret))
|
2016-07-31 18:38:30 +02:00
|
|
|
env = dict(ticket)
|
2016-07-10 17:25:16 +02:00
|
|
|
env.update({'PERSON_ID': ticket_id, 'TICKED_ID': ticket_id, 'EVENT_ID': id_,
|
2017-03-27 22:17:34 +02:00
|
|
|
'EVENT_TITLE': rdoc.get('title', ''), 'WEB_USER': self.current_user_info.get('username', ''),
|
2016-07-10 17:25:16 +02:00
|
|
|
'WEB_REMOTE_IP': self.request.remote_ip})
|
|
|
|
stdin_data = {'old': ticket,
|
|
|
|
'event': rdoc,
|
|
|
|
'merged': merged
|
|
|
|
}
|
|
|
|
self.run_triggers('delete_ticket_in_event', stdin_data=stdin_data, env=env)
|
2016-04-10 19:39:12 +02:00
|
|
|
return ret
|
2015-04-05 11:20:57 +02:00
|
|
|
|
2015-03-31 23:35:56 +02:00
|
|
|
|
2016-06-12 16:04:46 +02:00
|
|
|
class UsersHandler(CollectionHandler):
|
|
|
|
"""Handle requests for Users."""
|
|
|
|
document = 'user'
|
|
|
|
collection = 'users'
|
|
|
|
|
2016-07-09 13:34:36 +02:00
|
|
|
def filter_get(self, data):
|
|
|
|
if 'password' in data:
|
|
|
|
del data['password']
|
|
|
|
if '_id' in data:
|
2016-07-09 19:02:00 +02:00
|
|
|
# Also add a 'tickets' list with all the tickets created by this user
|
2016-07-09 13:34:36 +02:00
|
|
|
tickets = []
|
2016-07-09 14:20:48 +02:00
|
|
|
events = self.db.query('events', {'tickets.created_by': data['_id']})
|
2016-07-09 13:34:36 +02:00
|
|
|
for event in events:
|
|
|
|
event_title = event.get('title') or ''
|
|
|
|
event_id = str(event.get('_id'))
|
2016-07-09 14:20:48 +02:00
|
|
|
evt_tickets = self._filter_results(event.get('tickets') or [], {'created_by': data['_id']})
|
2016-07-09 13:34:36 +02:00
|
|
|
for evt_ticket in evt_tickets:
|
|
|
|
evt_ticket['event_title'] = event_title
|
|
|
|
evt_ticket['event_id'] = event_id
|
|
|
|
tickets.extend(evt_tickets)
|
|
|
|
data['tickets'] = tickets
|
|
|
|
return data
|
|
|
|
|
2016-07-08 22:10:42 +02:00
|
|
|
def filter_get_all(self, data):
|
|
|
|
if 'users' not in data:
|
|
|
|
return data
|
|
|
|
for user in data['users']:
|
|
|
|
if 'password' in user:
|
|
|
|
del user['password']
|
|
|
|
return data
|
|
|
|
|
2016-07-09 13:34:36 +02:00
|
|
|
@gen.coroutine
|
|
|
|
@authenticated
|
|
|
|
def get(self, id_=None, resource=None, resource_id=None, acl=True, **kwargs):
|
|
|
|
if id_ is not None:
|
2017-03-27 22:17:34 +02:00
|
|
|
if (self.has_permission('user|read') or self.current_user == id_):
|
2016-07-09 13:34:36 +02:00
|
|
|
acl = False
|
|
|
|
super(UsersHandler, self).get(id_, resource, resource_id, acl=acl, **kwargs)
|
|
|
|
|
2016-06-12 23:44:48 +02:00
|
|
|
def filter_input_post_all(self, data):
|
|
|
|
username = (data.get('username') or '').strip()
|
|
|
|
password = (data.get('password') or '').strip()
|
2016-06-13 21:17:01 +02:00
|
|
|
email = (data.get('email') or '').strip()
|
2016-06-12 23:44:48 +02:00
|
|
|
if not (username and password):
|
|
|
|
raise InputException('missing username or password')
|
|
|
|
res = self.db.query('users', {'username': username})
|
|
|
|
if res:
|
|
|
|
raise InputException('username already exists')
|
2016-06-13 21:17:01 +02:00
|
|
|
return {'username': username, 'password': utils.hash_password(password),
|
|
|
|
'email': email, '_id': self.gen_id()}
|
2016-06-12 23:44:48 +02:00
|
|
|
|
2016-07-09 13:34:36 +02:00
|
|
|
def filter_input_put(self, data):
|
|
|
|
old_pwd = data.get('old_password')
|
|
|
|
new_pwd = data.get('new_password')
|
|
|
|
if old_pwd is not None:
|
|
|
|
del data['old_password']
|
|
|
|
if new_pwd is not None:
|
|
|
|
del data['new_password']
|
|
|
|
authorized, user = self.user_authorized(data['username'], old_pwd)
|
2017-03-27 22:17:34 +02:00
|
|
|
if not (self.has_permission('user|update') or (authorized and
|
|
|
|
self.current_user_info.get('username') == data['username'])):
|
2016-07-09 13:34:36 +02:00
|
|
|
raise InputException('not authorized to change password')
|
|
|
|
data['password'] = utils.hash_password(new_pwd)
|
|
|
|
if '_id' in data:
|
|
|
|
del data['_id']
|
2017-03-27 22:56:56 +02:00
|
|
|
if 'username' in data:
|
|
|
|
del data['username']
|
2017-04-03 21:39:26 +02:00
|
|
|
if not self.has_permission('admin|all'):
|
|
|
|
if 'permissions' in data:
|
|
|
|
del data['permissions']
|
|
|
|
else:
|
|
|
|
if 'isAdmin' in data:
|
|
|
|
if not 'permissions' in data:
|
|
|
|
data['permissions'] = []
|
|
|
|
if 'admin|all' in data['permissions'] and not data['isAdmin']:
|
|
|
|
data['permissions'].remove('admin|all')
|
|
|
|
elif 'admin|all' not in data['permissions'] and data['isAdmin']:
|
|
|
|
data['permissions'].append('admin|all')
|
|
|
|
del data['isAdmin']
|
2016-07-09 13:34:36 +02:00
|
|
|
return data
|
|
|
|
|
|
|
|
@gen.coroutine
|
|
|
|
@authenticated
|
|
|
|
def put(self, id_=None, resource=None, resource_id=None, **kwargs):
|
|
|
|
if id_ is None:
|
|
|
|
return self.build_error(status=404, message='unable to access the resource')
|
2017-03-27 22:17:34 +02:00
|
|
|
if not (self.has_permission('user|update') or self.current_user == id_):
|
2016-07-09 13:34:36 +02:00
|
|
|
return self.build_error(status=401, message='insufficient permissions: user|update or current user')
|
|
|
|
super(UsersHandler, self).put(id_, resource, resource_id, **kwargs)
|
|
|
|
|
2016-06-12 16:04:46 +02:00
|
|
|
|
2015-03-30 21:39:12 +02:00
|
|
|
class EbCSVImportPersonsHandler(BaseHandler):
|
2016-07-09 19:02:00 +02:00
|
|
|
"""Importer for CSV files exported from Eventbrite."""
|
2015-03-29 15:05:01 +02:00
|
|
|
csvRemap = {
|
|
|
|
'Nome evento': 'event_title',
|
|
|
|
'ID evento': 'event_id',
|
|
|
|
'N. codice a barre': 'ebqrcode',
|
|
|
|
'Cognome acquirente': 'surname',
|
|
|
|
'Nome acquirente': 'name',
|
|
|
|
'E-mail acquirente': 'email',
|
2015-04-04 13:01:33 +02:00
|
|
|
'Cognome': 'surname',
|
|
|
|
'Nome': 'name',
|
|
|
|
'E-mail': 'email',
|
2015-04-04 14:56:41 +02:00
|
|
|
'Indirizzo e-mail': 'email',
|
2015-03-29 15:05:01 +02:00
|
|
|
'Tipologia biglietto': 'ticket_kind',
|
|
|
|
'Data partecipazione': 'attending_datetime',
|
|
|
|
'Data check-in': 'checkin_datetime',
|
|
|
|
'Ordine n.': 'order_nr',
|
2015-04-04 13:01:33 +02:00
|
|
|
'ID ordine': 'order_nr',
|
2016-10-16 16:48:11 +02:00
|
|
|
'Titolo professionale': 'job title',
|
2015-04-18 18:26:50 +02:00
|
|
|
'Azienda': 'company',
|
|
|
|
'Prefisso': 'name_title',
|
2016-10-16 16:48:11 +02:00
|
|
|
'Prefisso (Sig., Sig.ra, ecc.)': 'name title',
|
2015-04-25 10:04:47 +02:00
|
|
|
|
|
|
|
'Order #': 'order_nr',
|
2016-10-16 16:48:11 +02:00
|
|
|
'Prefix': 'name title',
|
2015-04-25 10:04:47 +02:00
|
|
|
'First Name': 'name',
|
|
|
|
'Last Name': 'surname',
|
2016-10-16 16:48:11 +02:00
|
|
|
'Suffix': 'name suffix',
|
2015-04-25 10:04:47 +02:00
|
|
|
'Email': 'email',
|
|
|
|
'Attendee #': 'attendee_nr',
|
|
|
|
'Barcode #': 'ebqrcode',
|
2016-07-10 09:35:55 +02:00
|
|
|
'Company': 'company'
|
2015-03-29 15:05:01 +02:00
|
|
|
}
|
2015-04-04 13:01:33 +02:00
|
|
|
|
2015-03-29 15:05:01 +02:00
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2015-03-29 15:05:01 +02:00
|
|
|
def post(self, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
# import a CSV list of persons
|
2015-05-01 14:51:11 +02:00
|
|
|
event_handler = EventsHandler(self.application, self.request)
|
|
|
|
event_handler.db = self.db
|
2016-10-16 17:14:57 +02:00
|
|
|
event_handler.logger = self.logger
|
2016-07-07 23:10:52 +02:00
|
|
|
event_id = None
|
2015-03-29 15:50:36 +02:00
|
|
|
try:
|
2016-07-07 23:10:52 +02:00
|
|
|
event_id = self.get_body_argument('targetEvent')
|
2015-03-29 15:50:36 +02:00
|
|
|
except:
|
|
|
|
pass
|
2016-07-10 09:35:55 +02:00
|
|
|
if event_id is None:
|
|
|
|
return self.build_error('invalid event')
|
2015-04-04 13:01:33 +02:00
|
|
|
reply = dict(total=0, valid=0, merged=0, new_in_event=0)
|
2016-10-16 17:14:57 +02:00
|
|
|
event_details = event_handler.db.query('events', {'_id': event_id})
|
|
|
|
if not event_details:
|
|
|
|
return self.build_error('invalid event')
|
2016-10-17 21:56:09 +02:00
|
|
|
all_emails = set()
|
|
|
|
#[x.get('email') for x in (event_details[0].get('tickets') or []) if x.get('email')])
|
|
|
|
for ticket in (event_details[0].get('tickets') or []):
|
|
|
|
all_emails.add('%s_%s_%s' % (ticket.get('name'), ticket.get('surname'), ticket.get('email')))
|
2017-03-27 21:57:40 +02:00
|
|
|
for fieldname, contents in self.request.files.items():
|
2015-03-29 15:05:01 +02:00
|
|
|
for content in contents:
|
|
|
|
filename = content['filename']
|
2015-03-30 21:31:09 +02:00
|
|
|
parseStats, persons = utils.csvParse(content['body'], remap=self.csvRemap)
|
2015-03-29 15:05:01 +02:00
|
|
|
reply['total'] += parseStats['total']
|
2015-03-29 23:47:59 +02:00
|
|
|
for person in persons:
|
2016-10-16 17:14:57 +02:00
|
|
|
if not person:
|
|
|
|
continue
|
|
|
|
reply['valid'] += 1
|
2016-07-07 23:10:52 +02:00
|
|
|
person['attended'] = False
|
|
|
|
person['from_file'] = filename
|
2017-04-03 22:14:06 +02:00
|
|
|
self.add_access_info(person)
|
2016-10-17 21:56:09 +02:00
|
|
|
duplicate_check = '%s_%s_%s' % (person.get('name'), person.get('surname'), person.get('email'))
|
|
|
|
if duplicate_check in all_emails:
|
2016-10-16 17:14:57 +02:00
|
|
|
continue
|
2016-10-17 21:56:09 +02:00
|
|
|
all_emails.add(duplicate_check)
|
2016-10-16 17:14:57 +02:00
|
|
|
event_handler.handle_post_tickets(event_id, None, person)
|
2016-07-07 23:10:52 +02:00
|
|
|
reply['new_in_event'] += 1
|
2015-03-29 15:05:01 +02:00
|
|
|
self.write(reply)
|
|
|
|
|
|
|
|
|
2015-04-18 17:33:42 +02:00
|
|
|
class SettingsHandler(BaseHandler):
|
|
|
|
"""Handle requests for Settings."""
|
|
|
|
@gen.coroutine
|
2015-05-03 11:58:40 +02:00
|
|
|
@authenticated
|
2016-07-09 17:41:16 +02:00
|
|
|
def get(self, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
query = self.arguments_tobool()
|
2015-04-18 17:33:42 +02:00
|
|
|
settings = self.db.query('settings', query)
|
|
|
|
self.write({'settings': settings})
|
|
|
|
|
|
|
|
|
2016-05-01 23:23:07 +02:00
|
|
|
class InfoHandler(BaseHandler):
|
2016-07-05 21:30:12 +02:00
|
|
|
"""Handle requests for information about the logged in user."""
|
2016-05-01 23:23:07 +02:00
|
|
|
@gen.coroutine
|
2016-07-09 17:41:16 +02:00
|
|
|
def get(self, **kwargs):
|
2016-05-01 23:23:07 +02:00
|
|
|
info = {}
|
2016-05-30 23:14:56 +02:00
|
|
|
user_info = self.current_user_info
|
2016-05-29 14:06:34 +02:00
|
|
|
if user_info:
|
2016-05-29 11:32:57 +02:00
|
|
|
info['user'] = user_info
|
2016-07-09 17:18:07 +02:00
|
|
|
info['authentication_required'] = self.authentication
|
2016-05-01 23:23:07 +02:00
|
|
|
self.write({'info': info})
|
|
|
|
|
|
|
|
|
2015-04-26 00:47:38 +02:00
|
|
|
class WebSocketEventUpdatesHandler(tornado.websocket.WebSocketHandler):
|
2016-07-10 13:10:00 +02:00
|
|
|
"""Manage WebSockets."""
|
2015-04-26 01:26:00 +02:00
|
|
|
def _clean_url(self, url):
|
2016-07-10 13:10:00 +02:00
|
|
|
url = re_slashes.sub('/', url)
|
|
|
|
ridx = url.rfind('?')
|
|
|
|
if ridx != -1:
|
|
|
|
url = url[:ridx]
|
|
|
|
return url
|
2015-04-26 01:26:00 +02:00
|
|
|
|
2016-07-09 17:41:16 +02:00
|
|
|
def open(self, event_id, *args, **kwargs):
|
2016-10-26 20:09:53 +02:00
|
|
|
try:
|
|
|
|
self.uuid = self.get_argument('uuid')
|
|
|
|
except:
|
|
|
|
self.uuid = None
|
2016-07-10 13:10:00 +02:00
|
|
|
url = self._clean_url(self.request.uri)
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_open event_id:%s url:%s' % (event_id, url))
|
|
|
|
_ws_clients.setdefault(url, {})
|
2016-10-26 20:09:53 +02:00
|
|
|
if self.uuid and self.uuid not in _ws_clients[url]:
|
2016-07-10 13:10:00 +02:00
|
|
|
_ws_clients[url][self.uuid] = self
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_open %s clients connected' % len(_ws_clients[url]))
|
2015-04-26 00:47:38 +02:00
|
|
|
|
|
|
|
def on_message(self, message):
|
2016-07-10 13:10:00 +02:00
|
|
|
url = self._clean_url(self.request.uri)
|
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_message url:%s' % url)
|
2015-04-26 00:47:38 +02:00
|
|
|
count = 0
|
2016-07-10 13:10:00 +02:00
|
|
|
_to_delete = set()
|
2017-03-27 21:57:40 +02:00
|
|
|
for uuid, client in _ws_clients.get(url, {}).items():
|
2016-07-10 13:10:00 +02:00
|
|
|
try:
|
|
|
|
client.write_message(message)
|
|
|
|
except:
|
|
|
|
_to_delete.add(uuid)
|
2015-04-26 00:47:38 +02:00
|
|
|
continue
|
|
|
|
count += 1
|
2016-07-10 13:10:00 +02:00
|
|
|
for uuid in _to_delete:
|
|
|
|
try:
|
|
|
|
del _ws_clients[url][uuid]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2015-04-26 00:47:38 +02:00
|
|
|
logging.debug('WebSocketEventUpdatesHandler.on_message sent message to %d clients' % count)
|
|
|
|
|
|
|
|
|
2016-07-09 17:18:07 +02:00
|
|
|
class LoginHandler(RootHandler):
|
2015-05-02 17:39:59 +02:00
|
|
|
"""Handle user authentication requests."""
|
2015-05-02 19:26:23 +02:00
|
|
|
|
2015-05-02 17:39:59 +02:00
|
|
|
@gen.coroutine
|
2016-07-09 17:41:16 +02:00
|
|
|
def get(self, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
# show the login page
|
2015-05-03 01:58:09 +02:00
|
|
|
if self.is_api():
|
|
|
|
self.set_status(401)
|
2016-05-29 11:32:57 +02:00
|
|
|
self.write({'error': True,
|
|
|
|
'message': 'authentication required'})
|
2015-05-02 17:39:59 +02:00
|
|
|
|
|
|
|
@gen.coroutine
|
2016-06-12 16:04:46 +02:00
|
|
|
def post(self, *args, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
# authenticate a user
|
2016-06-12 16:04:46 +02:00
|
|
|
try:
|
|
|
|
password = self.get_body_argument('password')
|
|
|
|
username = self.get_body_argument('username')
|
|
|
|
except tornado.web.MissingArgumentError:
|
|
|
|
data = escape.json_decode(self.request.body or '{}')
|
|
|
|
username = data.get('username')
|
|
|
|
password = data.get('password')
|
|
|
|
if not (username and password):
|
|
|
|
self.set_status(401)
|
|
|
|
self.write({'error': True, 'message': 'missing username or password'})
|
|
|
|
return
|
2016-07-09 13:34:36 +02:00
|
|
|
authorized, user = self.user_authorized(username, password)
|
2017-03-27 22:17:34 +02:00
|
|
|
if authorized and 'username' in user and '_id' in user:
|
|
|
|
id_ = str(user['_id'])
|
2016-07-09 13:34:36 +02:00
|
|
|
username = user['username']
|
2017-03-27 22:17:34 +02:00
|
|
|
logging.info('successful login for user %s (id: %s)' % (username, id_))
|
|
|
|
self.set_secure_cookie("user", id_)
|
2016-06-12 16:04:46 +02:00
|
|
|
self.write({'error': False, 'message': 'successful login'})
|
2015-05-03 01:25:33 +02:00
|
|
|
return
|
|
|
|
logging.info('login failed for user %s' % username)
|
2016-06-12 16:04:46 +02:00
|
|
|
self.set_status(401)
|
|
|
|
self.write({'error': True, 'message': 'wrong username and password'})
|
2015-05-02 17:39:59 +02:00
|
|
|
|
|
|
|
|
2016-06-12 16:04:46 +02:00
|
|
|
class LogoutHandler(BaseHandler):
|
2015-05-02 18:38:57 +02:00
|
|
|
"""Handle user logout requests."""
|
|
|
|
@gen.coroutine
|
2016-07-09 17:41:16 +02:00
|
|
|
def get(self, **kwargs):
|
2015-05-03 11:58:40 +02:00
|
|
|
# log the user out
|
2015-05-03 01:25:33 +02:00
|
|
|
logging.info('logout')
|
2015-05-02 18:38:57 +02:00
|
|
|
self.logout()
|
2016-06-12 16:04:46 +02:00
|
|
|
self.write({'error': False, 'message': 'logged out'})
|
2015-05-02 18:38:57 +02:00
|
|
|
|
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
def run():
|
|
|
|
"""Run the Tornado web application."""
|
|
|
|
# command line arguments; can also be written in a configuration file,
|
|
|
|
# specified with the --config argument.
|
2015-03-14 11:12:57 +01:00
|
|
|
define("port", default=5242, help="run on the given port", type=int)
|
2016-04-17 19:46:59 +02:00
|
|
|
define("address", default='', help="bind the server at the given address", type=str)
|
2015-05-03 11:58:40 +02:00
|
|
|
define("data_dir", default=os.path.join(os.path.dirname(__file__), "data"),
|
2015-03-14 17:32:45 +01:00
|
|
|
help="specify the directory used to store the data")
|
2015-04-26 10:27:33 +02:00
|
|
|
define("ssl_cert", default=os.path.join(os.path.dirname(__file__), 'ssl', 'eventman_cert.pem'),
|
|
|
|
help="specify the SSL certificate to use for secure connections")
|
|
|
|
define("ssl_key", default=os.path.join(os.path.dirname(__file__), 'ssl', 'eventman_key.pem'),
|
|
|
|
help="specify the SSL private key to use for secure connections")
|
2015-05-03 11:58:40 +02:00
|
|
|
define("mongo_url", default=None,
|
2015-03-21 09:29:01 +01:00
|
|
|
help="URL to MongoDB server", type=str)
|
2015-05-03 11:58:40 +02:00
|
|
|
define("db_name", default='eventman',
|
2015-03-21 20:32:39 +01:00
|
|
|
help="Name of the MongoDB database to use", type=str)
|
2016-05-29 11:32:57 +02:00
|
|
|
define("authentication", default=False, help="if set to true, authentication is required")
|
2015-03-14 17:32:45 +01:00
|
|
|
define("debug", default=False, help="run in debug mode")
|
2015-03-14 11:12:57 +01:00
|
|
|
define("config", help="read configuration file",
|
|
|
|
callback=lambda path: tornado.options.parse_config_file(path, final=False))
|
|
|
|
tornado.options.parse_command_line()
|
|
|
|
|
2016-04-19 22:04:52 +02:00
|
|
|
logger = logging.getLogger()
|
2016-04-19 23:08:23 +02:00
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
if options.debug:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2015-04-18 15:01:30 +02:00
|
|
|
|
2016-07-09 20:24:45 +02:00
|
|
|
ssl_options = {}
|
|
|
|
if os.path.isfile(options.ssl_key) and os.path.isfile(options.ssl_cert):
|
|
|
|
ssl_options = dict(certfile=options.ssl_cert, keyfile=options.ssl_key)
|
|
|
|
|
2015-03-22 08:58:25 +01:00
|
|
|
# database backend connector
|
2017-03-27 21:57:40 +02:00
|
|
|
db_connector = monco.Monco(url=options.mongo_url, dbName=options.db_name)
|
2015-05-03 11:58:40 +02:00
|
|
|
init_params = dict(db=db_connector, data_dir=options.data_dir, listen_port=options.port,
|
2016-07-09 20:24:45 +02:00
|
|
|
authentication=options.authentication, logger=logger, ssl_options=ssl_options)
|
2015-03-21 09:29:01 +01:00
|
|
|
|
2015-05-02 19:26:23 +02:00
|
|
|
# If not present, we store a user 'admin' with password 'eventman' into the database.
|
|
|
|
if not db_connector.query('users', {'username': 'admin'}):
|
|
|
|
db_connector.add('users',
|
2016-05-29 11:32:57 +02:00
|
|
|
{'username': 'admin', 'password': utils.hash_password('eventman'),
|
2016-05-31 22:26:38 +02:00
|
|
|
'permissions': ['admin|all']})
|
2015-05-02 19:26:23 +02:00
|
|
|
|
2015-05-03 13:10:11 +02:00
|
|
|
# If present, use the cookie_secret stored into the database.
|
|
|
|
cookie_secret = db_connector.query('settings', {'setting': 'server_cookie_secret'})
|
|
|
|
if cookie_secret:
|
|
|
|
cookie_secret = cookie_secret[0]['cookie_secret']
|
|
|
|
else:
|
|
|
|
# the salt guarantees its uniqueness
|
|
|
|
cookie_secret = utils.hash_password('__COOKIE_SECRET__')
|
|
|
|
db_connector.add('settings',
|
|
|
|
{'setting': 'server_cookie_secret', 'cookie_secret': cookie_secret})
|
|
|
|
|
2016-06-19 16:58:38 +02:00
|
|
|
_ws_handler = (r"/ws/+event/+(?P<event_id>[\w\d_-]+)/+tickets/+updates/?", WebSocketEventUpdatesHandler)
|
2016-06-06 21:44:04 +02:00
|
|
|
_events_path = r"/events/?(?P<id_>[\w\d_-]+)?/?(?P<resource>[\w\d_-]+)?/?(?P<resource_id>[\w\d_-]+)?"
|
2016-06-12 16:04:46 +02:00
|
|
|
_users_path = r"/users/?(?P<id_>[\w\d_-]+)?/?(?P<resource>[\w\d_-]+)?/?(?P<resource_id>[\w\d_-]+)?"
|
2015-03-14 11:12:57 +01:00
|
|
|
application = tornado.web.Application([
|
2015-05-03 01:43:30 +02:00
|
|
|
(_events_path, EventsHandler, init_params),
|
|
|
|
(r'/v%s%s' % (API_VERSION, _events_path), EventsHandler, init_params),
|
2016-06-12 16:04:46 +02:00
|
|
|
(_users_path, UsersHandler, init_params),
|
|
|
|
(r'/v%s%s' % (API_VERSION, _users_path), UsersHandler, init_params),
|
2015-03-21 09:29:01 +01:00
|
|
|
(r"/(?:index.html)?", RootHandler, init_params),
|
2015-03-29 15:05:01 +02:00
|
|
|
(r"/ebcsvpersons", EbCSVImportPersonsHandler, init_params),
|
2015-04-18 17:33:42 +02:00
|
|
|
(r"/settings", SettingsHandler, init_params),
|
2016-05-01 23:23:07 +02:00
|
|
|
(r"/info", InfoHandler, init_params),
|
2015-04-26 11:49:59 +02:00
|
|
|
_ws_handler,
|
2015-05-02 19:26:23 +02:00
|
|
|
(r'/login', LoginHandler, init_params),
|
2015-05-03 01:43:30 +02:00
|
|
|
(r'/v%s/login' % API_VERSION, LoginHandler, init_params),
|
2015-05-02 18:38:57 +02:00
|
|
|
(r'/logout', LogoutHandler),
|
2015-05-03 01:43:30 +02:00
|
|
|
(r'/v%s/logout' % API_VERSION, LogoutHandler),
|
2015-03-14 17:32:45 +01:00
|
|
|
(r'/(.*)', tornado.web.StaticFileHandler, {"path": "angular_app"})
|
2015-03-14 11:12:57 +01:00
|
|
|
],
|
2015-03-14 13:05:04 +01:00
|
|
|
template_path=os.path.join(os.path.dirname(__file__), "templates"),
|
2015-03-14 11:12:57 +01:00
|
|
|
static_path=os.path.join(os.path.dirname(__file__), "static"),
|
2017-03-27 22:22:13 +02:00
|
|
|
cookie_secret=cookie_secret,
|
2016-07-09 17:41:16 +02:00
|
|
|
login_url='/login',
|
2015-03-14 13:05:04 +01:00
|
|
|
debug=options.debug)
|
2015-04-26 10:40:38 +02:00
|
|
|
http_server = tornado.httpserver.HTTPServer(application, ssl_options=ssl_options or None)
|
2016-04-17 19:46:59 +02:00
|
|
|
logger.info('Start serving on %s://%s:%d', 'https' if ssl_options else 'http',
|
|
|
|
options.address if options.address else '127.0.0.1',
|
|
|
|
options.port)
|
|
|
|
http_server.listen(options.port, options.address)
|
2015-04-26 11:49:59 +02:00
|
|
|
|
|
|
|
# Also listen on options.port+1 for our local ws connection.
|
2016-06-02 16:06:12 +02:00
|
|
|
ws_application = tornado.web.Application([_ws_handler], debug=options.debug)
|
2015-04-26 11:49:59 +02:00
|
|
|
ws_http_server = tornado.httpserver.HTTPServer(ws_application)
|
|
|
|
ws_http_server.listen(options.port+1, address='127.0.0.1')
|
2017-04-03 21:39:26 +02:00
|
|
|
logger.debug('Starting WebSocket on ws://127.0.0.1:%d', options.port+1)
|
2015-03-14 11:12:57 +01:00
|
|
|
tornado.ioloop.IOLoop.instance().start()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2016-07-10 14:12:58 +02:00
|
|
|
try:
|
|
|
|
run()
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print('Stop server')
|