commit 30673a1f682d7756f8793608b2d899f6387cbaee Author: Davide Alberani Date: Mon Jan 2 21:52:33 2017 +0100 initial commit diff --git a/README.md b/README.md new file mode 100644 index 0000000..624ecd4 --- /dev/null +++ b/README.md @@ -0,0 +1,18 @@ +# ibt2 + +> I'll be there, 2 + +## Build Setup + +``` bash +# install dependencies +npm install + +# serve with hot reload at localhost:8080 +npm run dev + +# build for production with minification +npm run build +``` + +For detailed explanation on how things work, checkout the [guide](http://vuejs-templates.github.io/webpack/) and [docs for vue-loader](http://vuejs.github.io/vue-loader). diff --git a/config/dev.env.js b/config/dev.env.js new file mode 100644 index 0000000..efead7c --- /dev/null +++ b/config/dev.env.js @@ -0,0 +1,6 @@ +var merge = require('webpack-merge') +var prodEnv = require('./prod.env') + +module.exports = merge(prodEnv, { + NODE_ENV: '"development"' +}) diff --git a/config/index.js b/config/index.js new file mode 100644 index 0000000..207dfbd --- /dev/null +++ b/config/index.js @@ -0,0 +1,32 @@ +// see http://vuejs-templates.github.io/webpack for documentation. +var path = require('path') + +module.exports = { + build: { + env: require('./prod.env'), + index: path.resolve(__dirname, '../dist/index.html'), + assetsRoot: path.resolve(__dirname, '../dist'), + assetsSubDirectory: 'static', + assetsPublicPath: '/', + productionSourceMap: true, + // Gzip off by default as many popular static hosts such as + // Surge or Netlify already gzip all static assets for you. + // Before setting to `true`, make sure to: + // npm install --save-dev compression-webpack-plugin + productionGzip: false, + productionGzipExtensions: ['js', 'css'] + }, + dev: { + env: require('./dev.env'), + port: 8080, + assetsSubDirectory: 'static', + assetsPublicPath: '/', + proxyTable: {}, + // CSS Sourcemaps off by default because relative paths are "buggy" + // with this option, according to the CSS-Loader README + // (https://github.com/webpack/css-loader#sourcemaps) + // In our experience, they generally work as expected, + // just be aware of this issue when enabling this option. + cssSourceMap: false + } +} diff --git a/config/prod.env.js b/config/prod.env.js new file mode 100644 index 0000000..773d263 --- /dev/null +++ b/config/prod.env.js @@ -0,0 +1,3 @@ +module.exports = { + NODE_ENV: '"production"' +} diff --git a/ibt2.py b/ibt2.py new file mode 100755 index 0000000..ce6f688 --- /dev/null +++ b/ibt2.py @@ -0,0 +1,705 @@ +#!/usr/bin/env python +"""I'll Be There 2 (ibt2) - an oversimplified attendees registration system. + +Copyright 2016 Davide Alberani + RaspiBO + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import os +import re +import string +import logging +import datetime +from operator import itemgetter +import itertools + +import tornado.httpserver +import tornado.ioloop +import tornado.options +from tornado.options import define, options +import tornado.web +from tornado import gen, escape + +import utils +import monco + +ENCODING = 'utf-8' +PROCESS_TIMEOUT = 60 + +API_VERSION = '1.0' + +re_env_key = re.compile('[^A-Z_]+') +re_slashes = re.compile(r'//+') + + +class BaseException(Exception): + """Base class for ibt2 custom exceptions. + + :param message: text message + :type message: str + :param status: numeric http status code + :type status: int""" + def __init__(self, message, status=400): + super(BaseException, self).__init__(message) + self.message = message + self.status = status + + +class InputException(BaseException): + """Exception raised by errors in input handling.""" + pass + + +class BaseHandler(tornado.web.RequestHandler): + """Base class for request handlers.""" + permissions = { + 'day|read': True, + 'day:groups|read': True, + 'day:groups|create': True, + 'day:groups|update': True, + 'day:groups-all|read': True, + 'day:groups-all|create': True, + 'days|read': True, + 'days|create': True, + 'users|create': True + } + + # Cache currently connected users. + _users_cache = {} + + # A property to access the first value of each argument. + arguments = property(lambda self: dict([(k, v[0]) + for k, v in self.request.arguments.iteritems()])) + + _bool_convert = { + '0': False, + 'n': False, + 'f': False, + 'no': False, + 'off': False, + 'false': False, + '1': True, + 'y': True, + 't': True, + 'on': True, + 'yes': True, + 'true': True + } + + _re_split_salt = re.compile(r'\$(?P.+)\$(?P.+)') + + def write_error(self, status_code, **kwargs): + """Default error handler.""" + if isinstance(kwargs.get('exc_info', (None, None))[1], BaseException): + exc = kwargs['exc_info'][1] + status_code = exc.status + message = exc.message + else: + message = 'internal error' + self.build_error(message, status=status_code) + + def is_api(self): + """Return True if the path is from an API call.""" + return self.request.path.startswith('/v%s' % API_VERSION) + + def tobool(self, obj): + """Convert some textual values to boolean.""" + if isinstance(obj, (list, tuple)): + obj = obj[0] + if isinstance(obj, (str, unicode)): + obj = obj.lower() + return self._bool_convert.get(obj, obj) + + def arguments_tobool(self): + """Return a dictionary of arguments, converted to booleans where possible.""" + return dict([(k, self.tobool(v)) for k, v in self.arguments.iteritems()]) + + def initialize(self, **kwargs): + """Add every passed (key, value) as attributes of the instance.""" + for key, value in kwargs.iteritems(): + setattr(self, key, value) + + @property + def current_user(self): + """Retrieve current user name from the secure cookie.""" + return self.get_secure_cookie("user") + + @property + def current_user_info(self): + """Information about the current user, including their permissions.""" + current_user = self.current_user + if current_user in self._users_cache: + return self._users_cache[current_user] + permissions = set([k for (k, v) in self.permissions.iteritems() if v is True]) + user_info = {'permissions': permissions} + if current_user: + user_info['username'] = current_user + res = self.db.query('users', {'username': current_user}) + if res: + user = res[0] + user_info = user + permissions.update(set(user.get('permissions') or [])) + user_info['permissions'] = permissions + self._users_cache[current_user] = user_info + return user_info + + def has_permission(self, permission): + """Check permissions of the current user. + + :param permission: the permission to check + :type permission: str + + :returns: True if the user is allowed to perform the action or False + :rtype: bool + """ + user_info = self.current_user_info or {} + user_permissions = user_info.get('permissions') or [] + global_permission = '%s|all' % permission.split('|')[0] + if 'admin|all' in user_permissions or global_permission in user_permissions or permission in user_permissions: + return True + collection_permission = self.permissions.get(permission) + if isinstance(collection_permission, bool): + return collection_permission + if callable(collection_permission): + return collection_permission(permission) + return False + + def user_authorized(self, username, password): + """Check if a combination of username/password is valid. + + :param username: username or email + :type username: str + :param password: password + :type password: str + + :returns: tuple like (bool_user_is_authorized, dict_user_info) + :rtype: dict""" + query = [{'username': username}, {'email': username}] + res = self.db.query('users', query) + if not res: + return (False, {}) + user = res[0] + db_password = user.get('password') or '' + if not db_password: + return (False, {}) + match = self._re_split_salt.match(db_password) + if not match: + return (False, {}) + salt = match.group('salt') + if utils.hash_password(password, salt=salt) == db_password: + return (True, user) + return (False, {}) + + def build_error(self, message='', status=400): + """Build and write an error message. + + :param message: textual message + :type message: str + :param status: HTTP status code + :type status: int + """ + self.set_status(status) + self.write({'error': True, 'message': message}) + + def logout(self): + """Remove the secure cookie used fro authentication.""" + if self.current_user in self._users_cache: + del self._users_cache[self.current_user] + self.clear_cookie("user") + + +class RootHandler(BaseHandler): + """Handler for the / path.""" + app_path = os.path.join(os.path.dirname(__file__), "dist") + + @gen.coroutine + def get(self, *args, **kwargs): + # serve the ./app/index.html file + with open(self.app_path + "/index.html", 'r') as fd: + self.write(fd.read()) + + +class CollectionHandler(BaseHandler): + """Base class for handlers that need to interact with the database backend. + + Introduce basic CRUD operations.""" + # set of documents we're managing (a collection in MongoDB or a table in a SQL database) + document = None + collection = None + + # set of documents used to store incremental sequences + counters_collection = 'counters' + + _id_chars = string.ascii_lowercase + string.digits + + def _filter_results(self, results, params): + """Filter a list using keys and values from a dictionary. + + :param results: the list to be filtered + :type results: list + :param params: a dictionary of items that must all be present in an original list item to be included in the return + :type params: dict + + :returns: list of items that have all the keys with the same values as params + :rtype: list""" + if not params: + return results + params = monco.convert(params) + filtered = [] + for result in results: + add = True + for key, value in params.iteritems(): + if key not in result or result[key] != value: + add = False + break + if add: + filtered.append(result) + return filtered + + def _clean_dict(self, data): + """Filter a dictionary (in place) to remove unwanted keywords in db queries. + + :param data: dictionary to clean + :type data: dict""" + if isinstance(data, dict): + for key in data.keys(): + if isinstance(key, (str, unicode)) and key.startswith('$'): + del data[key] + return data + + def apply_filter(self, data, filter_name): + """Apply a filter to the data. + + :param data: the data to filter + :returns: the modified (possibly also in place) data + """ + filter_method = getattr(self, 'filter_%s' % filter_name, None) + if filter_method is not None: + data = filter_method(data) + return data + + @gen.coroutine + def get(self, id_=None, resource=None, resource_id=None, acl=True, **kwargs): + if resource: + # Handle access to sub-resources. + permission = '%s:%s%s|read' % (self.document, resource, '-all' if resource_id is None else '') + if acl and not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + handler = getattr(self, 'handle_get_%s' % resource, None) + if callable(handler): + output = handler(id_, resource_id, **kwargs) or {} + output = self.apply_filter(output, 'get_%s' % resource) + self.write(output) + return + return self.build_error(status=404, message='unable to access resource: %s' % resource) + if id_ is not None: + # read a single document + permission = '%s|read' % self.document + if acl and not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + handler = getattr(self, 'handle_get', None) + if callable(handler): + output = handler(id_, **kwargs) or {} + else: + output = self.db.get(self.collection, id_) + output = self.apply_filter(output, 'get') + self.write(output) + else: + # return an object containing the list of all objects in the collection. + # Please, never return JSON lists that are not encapsulated into an object, + # to avoid XSS vulnerabilities. + permission = '%s|read' % self.collection + if acl and not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + output = {self.collection: self.db.query(self.collection, self.arguments)} + output = self.apply_filter(output, 'get_all') + self.write(output) + + @gen.coroutine + def post(self, id_=None, resource=None, resource_id=None, **kwargs): + data = escape.json_decode(self.request.body or '{}') + self._clean_dict(data) + method = self.request.method.lower() + crud_method = 'create' if method == 'post' else 'update' + now = datetime.datetime.now() + user_info = self.current_user_info + user_id = user_info.get('_id') + if crud_method == 'create': + data['created_by'] = user_id + data['created_at'] = now + data['updated_by'] = user_id + data['updated_at'] = now + if resource: + permission = '%s:%s%s|%s' % (self.document, resource, '-all' if resource_id is None else '', crud_method) + if not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + # Handle access to sub-resources. + handler = getattr(self, 'handle_%s_%s' % (method, resource), None) + if handler and callable(handler): + data = self.apply_filter(data, 'input_%s_%s' % (method, resource)) + output = handler(id_, resource_id, data, **kwargs) + output = self.apply_filter(output, 'get_%s' % resource) + self.write(output) + return + return self.build_error(status=404, message='unable to access resource: %s' % resource) + if id_ is not None: + permission = '%s|%s' % (self.document, crud_method) + if not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + data = self.apply_filter(data, 'input_%s' % method) + merged, newData = self.db.update(self.collection, id_, data) + newData = self.apply_filter(newData, method) + else: + permission = '%s|%s' % (self.collection, crud_method) + if not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + data = self.apply_filter(data, 'input_%s_all' % method) + newData = self.db.add(self.collection, data) + newData = self.apply_filter(newData, '%s_all' % method) + self.write(newData) + + # PUT (update an existing document) is handled by the POST (create a new document) method; + # in subclasses you can always separate sub-resources handlers like handle_post_tickets and handle_put_tickets + put = post + + @gen.coroutine + def delete(self, id_=None, resource=None, resource_id=None, **kwargs): + if resource: + # Handle access to sub-resources. + permission = '%s:%s%s|delete' % (self.document, resource, '-all' if resource_id is None else '') + if not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + method = getattr(self, 'handle_delete_%s' % resource, None) + if method and callable(method): + output = method(id_, resource_id, **kwargs) + self.write(output) + return + return self.build_error(status=404, message='unable to access resource: %s' % resource) + if id_ is not None: + permission = '%s|delete' % self.document + if not self.has_permission(permission): + return self.build_error(status=401, message='insufficient permissions: %s' % permission) + howMany = self.db.delete(self.collection, id_) + else: + self.write({'success': False}) + self.write({'success': True}) + + +class AttendeesHandler(CollectionHandler): + document = 'attendee' + collection = 'attendees' + + @gen.coroutine + def get(self, id_=None, **kwargs): + if id_: + output = self.db.getOne(self.collection, {'_id': id_}) + else: + output = {self.collection: self.db.query(self.collection, self.arguments)} + self.write(output) + + @gen.coroutine + def post(self, **kwargs): + data = escape.json_decode(self.request.body or '{}') + self._clean_dict(data) + user_info = self.current_user_info + user_id = user_info.get('_id') + now = datetime.datetime.now() + data['created_by'] = user_id + data['created_at'] = now + data['updated_by'] = user_id + data['updated_at'] = now + doc = self.db.add(self.collection, data) + doc = self.apply_filter(doc, 'create') + self.write(doc) + + @gen.coroutine + def put(self, id_, **kwargs): + data = escape.json_decode(self.request.body or '{}') + self._clean_dict(data) + user_info = self.current_user_info + user_id = user_info.get('_id') + now = datetime.datetime.now() + data['updated_by'] = user_id + data['updated_at'] = now + merged, doc = self.db.update(self.collection, {'_id': id_}, data) + doc = self.apply_filter(doc, 'update') + self.write(doc) + + @gen.coroutine + def delete(self, id_=None, **kwargs): + if id_ is not None: + howMany = self.db.delete(self.collection, id_) + self.write({'success': True, 'deleted entries': howMany.get('n')}) + else: + self.write({'success': False}) + + +class DaysHandler(CollectionHandler): + """Handle requests for Days.""" + + def _summarize(self, days): + res = [] + for day in days: + print day['day'], [x['group'] for x in day.get('groups')] + res.append({'day': day['day'], 'groups_count': len(day.get('groups', []))}) + return res + + @gen.coroutine + def get(self, day=None, **kwargs): + params = self.arguments + summary = params.get('summary', False) + if summary: + del params['summary'] + start = params.get('start') + if start: + del params['start'] + end = params.get('end') + if end: + del params['end'] + if day: + params['day'] = day + else: + if start: + params['day'] = {'$gte': start} + if end: + if 'day' not in params: + params['day'] = {} + if end.count('-') == 0: + end += '-13' + elif end.count('-') == 1: + end += '-31' + params['day'].update({'$lte': end}) + res = self.db.query('attendees', params) + days = [] + for d, dayItems in itertools.groupby(sorted(res, key=itemgetter('day')), key=itemgetter('day')): + dayData = {'day': d, 'groups': []} + for group, attendees in itertools.groupby(sorted(dayItems, key=itemgetter('group')), key=itemgetter('group')): + attendees = sorted(attendees, key=itemgetter('_id')) + dayData['groups'].append({'group': group, 'attendees': attendees}) + days.append(dayData) + if summary: + days = self._summarize(days) + if not day: + self.write({'days': days}) + elif days: + self.write(days[0]) + else: + self.write({}) + + +class UsersHandler(CollectionHandler): + """Handle requests for Users.""" + document = 'user' + collection = 'users' + + def filter_get(self, data): + if 'password' in data: + del data['password'] + return data + + def filter_get_all(self, data): + if 'users' not in data: + return data + for user in data['users']: + if 'password' in user: + del user['password'] + return data + + @gen.coroutine + def get(self, id_=None, resource=None, resource_id=None, acl=True, **kwargs): + if id_ is not None: + if (self.has_permission('user|read') or str(self.current_user_info.get('_id')) == id_): + acl = False + super(UsersHandler, self).get(id_, resource, resource_id, acl=acl, **kwargs) + + def filter_input_post_all(self, data): + username = (data.get('username') or '').strip() + password = (data.get('password') or '').strip() + email = (data.get('email') or '').strip() + if not (username and password): + raise InputException('missing username or password') + res = self.db.query('users', {'username': username}) + if res: + raise InputException('username already exists') + return {'username': username, 'password': utils.hash_password(password), + 'email': email} + + def filter_input_put(self, data): + old_pwd = data.get('old_password') + new_pwd = data.get('new_password') + if old_pwd is not None: + del data['old_password'] + if new_pwd is not None: + del data['new_password'] + authorized, user = self.user_authorized(data['username'], old_pwd) + if not (authorized and self.current_user == data['username']): + raise InputException('not authorized to change password') + data['password'] = utils.hash_password(new_pwd) + if '_id' in data: + # Avoid overriding _id + del data['_id'] + return data + + @gen.coroutine + def put(self, id_=None, resource=None, resource_id=None, **kwargs): + if id_ is None: + return self.build_error(status=404, message='unable to access the resource') + if str(self.current_user_info.get('_id')) != id_: + return self.build_error(status=401, message='insufficient permissions: user|update or current user') + super(UsersHandler, self).put(id_, resource, resource_id, **kwargs) + + +class SettingsHandler(BaseHandler): + """Handle requests for Settings.""" + @gen.coroutine + def get(self, **kwargs): + query = self.arguments_tobool() + settings = self.db.query('settings', query) + self.write({'settings': settings}) + + +class LoginHandler(RootHandler): + """Handle user authentication requests.""" + + @gen.coroutine + def get(self, **kwargs): + # show the login page + if self.is_api(): + self.set_status(401) + self.write({'error': True, + 'message': 'authentication required'}) + + @gen.coroutine + def post(self, *args, **kwargs): + # authenticate a user + try: + password = self.get_body_argument('password') + username = self.get_body_argument('username') + except tornado.web.MissingArgumentError: + data = escape.json_decode(self.request.body or '{}') + username = data.get('username') + password = data.get('password') + if not (username and password): + self.set_status(401) + self.write({'error': True, 'message': 'missing username or password'}) + return + authorized, user = self.user_authorized(username, password) + if authorized and user.get('username'): + username = user['username'] + logging.info('successful login for user %s' % username) + self.set_secure_cookie("user", username) + self.write({'error': False, 'message': 'successful login'}) + return + logging.info('login failed for user %s' % username) + self.set_status(401) + self.write({'error': True, 'message': 'wrong username and password'}) + + +class LogoutHandler(BaseHandler): + """Handle user logout requests.""" + @gen.coroutine + def get(self, **kwargs): + # log the user out + logging.info('logout') + self.logout() + self.write({'error': False, 'message': 'logged out'}) + + +def run(): + """Run the Tornado web application.""" + # command line arguments; can also be written in a configuration file, + # specified with the --config argument. + define("port", default=3000, help="run on the given port", type=int) + define("address", default='', help="bind the server at the given address", type=str) + define("data_dir", default=os.path.join(os.path.dirname(__file__), "data"), + help="specify the directory used to store the data") + define("ssl_cert", default=os.path.join(os.path.dirname(__file__), 'ssl', 'ibt2_cert.pem'), + help="specify the SSL certificate to use for secure connections") + define("ssl_key", default=os.path.join(os.path.dirname(__file__), 'ssl', 'ibt2_key.pem'), + help="specify the SSL private key to use for secure connections") + define("mongo_url", default=None, + help="URL to MongoDB server", type=str) + define("db_name", default='ibt2', + help="Name of the MongoDB database to use", type=str) + define("authentication", default=False, help="if set to true, authentication is required") + define("debug", default=False, help="run in debug mode") + define("config", help="read configuration file", + callback=lambda path: tornado.options.parse_config_file(path, final=False)) + tornado.options.parse_command_line() + + logger = logging.getLogger() + logger.setLevel(logging.INFO) + if options.debug: + logger.setLevel(logging.DEBUG) + + ssl_options = {} + if os.path.isfile(options.ssl_key) and os.path.isfile(options.ssl_cert): + ssl_options = dict(certfile=options.ssl_cert, keyfile=options.ssl_key) + + # database backend connector + db_connector = monco.Monco(url=options.mongo_url, dbName=options.db_name) + init_params = dict(db=db_connector, data_dir=options.data_dir, listen_port=options.port, + authentication=options.authentication, logger=logger, ssl_options=ssl_options) + + # If not present, we store a user 'admin' with password 'ibt2' into the database. + if not db_connector.query('users', {'username': 'admin'}): + db_connector.add('users', + {'username': 'admin', 'password': utils.hash_password('ibt2'), + 'isAdmin': True}) + + # If present, use the cookie_secret stored into the database. + cookie_secret = db_connector.query('settings', {'setting': 'server_cookie_secret'}) + if cookie_secret: + cookie_secret = cookie_secret[0]['cookie_secret'] + else: + # the salt guarantees its uniqueness + cookie_secret = utils.hash_password('__COOKIE_SECRET__') + db_connector.add('settings', + {'setting': 'server_cookie_secret', 'cookie_secret': cookie_secret}) + + _days_path = r"/days/?(?P[\d_-]+)?" + _attendees_path = r"/days/(?P[\d_-]+)/groups/(?P[\w\d_\ -]+)/attendees/?(?P[\w\d_\ -]+)?" + _users_path = r"/users/?(?P[\w\d_-]+)?/?(?P[\w\d_-]+)?/?(?P[\w\d_-]+)?" + _attendees_path = r"/attendees/?(?P[\w\d_-]+)?" + application = tornado.web.Application([ + (_attendees_path, AttendeesHandler, init_params), + (r'/v%s%s' % (API_VERSION, _attendees_path), AttendeesHandler, init_params), + (_days_path, DaysHandler, init_params), + (r'/v%s%s' % (API_VERSION, _days_path), DaysHandler, init_params), + (_users_path, UsersHandler, init_params), + (r'/v%s%s' % (API_VERSION, _users_path), UsersHandler, init_params), + (r"/(?:index.html)?", RootHandler, init_params), + (r"/settings", SettingsHandler, init_params), + (r'/login', LoginHandler, init_params), + (r'/v%s/login' % API_VERSION, LoginHandler, init_params), + (r'/logout', LogoutHandler), + (r'/v%s/logout' % API_VERSION, LogoutHandler), + (r'/?(.*)', tornado.web.StaticFileHandler, {"path": "dist"}) + ], + static_path=os.path.join(os.path.dirname(__file__), "dist/static"), + cookie_secret='__COOKIE_SECRET__', + login_url='/login', + debug=options.debug) + http_server = tornado.httpserver.HTTPServer(application, ssl_options=ssl_options or None) + logger.info('Start serving on %s://%s:%d', 'https' if ssl_options else 'http', + options.address if options.address else '127.0.0.1', + options.port) + http_server.listen(options.port, options.address) + tornado.ioloop.IOLoop.instance().start() + + +if __name__ == '__main__': + try: + run() + except KeyboardInterrupt: + print('Stop server') diff --git a/index.html b/index.html new file mode 100644 index 0000000..a2f6a13 --- /dev/null +++ b/index.html @@ -0,0 +1,12 @@ + + + + + ibt2 + + + +
+ + + diff --git a/monco.py b/monco.py new file mode 100644 index 0000000..34c11ba --- /dev/null +++ b/monco.py @@ -0,0 +1,285 @@ +"""Monco: a MongoDB database backend + +Classes and functions used to issue queries to a MongoDB database. + +Copyright 2016 Davide Alberani + RaspiBO + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import re +import pymongo +from bson.objectid import ObjectId + +re_objectid = re.compile(r'[0-9a-f]{24}') + +_force_conversion = { + '_id': ObjectId +} + + +def convert_obj(obj): + """Convert an object in a format suitable to be stored in MongoDB. + + :param obj: object to convert + + :returns: object that can be stored in MongoDB. + """ + if obj is None: + return None + if isinstance(obj, bool): + return obj + try: + if re_objectid.match(obj): + return ObjectId(obj) + except: + pass + return obj + + +def convert(seq): + """Convert an object to a format suitable to be stored in MongoDB, + descending lists, tuples and dictionaries (a copy is returned). + + :param seq: sequence or object to convert + + :returns: object that can be stored in MongoDB. + """ + if isinstance(seq, dict): + d = {} + for key, item in seq.iteritems(): + if key in _force_conversion: + try: + d[key] = _force_conversion[key](item) + except: + d[key] = item + else: + d[key] = convert(item) + return d + if isinstance(seq, (list, tuple)): + return [convert(x) for x in seq] + return convert_obj(seq) + + +class MoncoError(Exception): + """Base class for Monco exceptions.""" + pass + + +class MoncoConnection(MoncoError): + """Monco exceptions raise when a connection problem occurs.""" + pass + + +class Monco(object): + """MongoDB connector.""" + db = None + connection = None + + # map operations on lists of items. + _operations = { + 'update': '$set', + 'append': '$push', + 'appendUnique': '$addToSet', + 'delete': '$pull', + 'increment': '$inc' + } + + def __init__(self, dbName, url=None): + """Initialize the instance, connecting to the database. + + :param dbName: name of the database + :type dbName: str (or None to use the dbName passed at initialization) + :param url: URL of the database + :type url: str (or None to connect to localhost) + """ + self._url = url + self._dbName = dbName + self.connect(url) + + def connect(self, dbName=None, url=None): + """Connect to the database. + + :param dbName: name of the database + :type dbName: str (or None to use the dbName passed at initialization) + :param url: URL of the database + :type url: str (or None to connect to localhost) + + :returns: the database we're connected to + :rtype: :class:`~pymongo.database.Database` + """ + if self.db is not None: + return self.db + if url: + self._url = url + if dbName: + self._dbName = dbName + if not self._dbName: + raise MoncoConnection('no database name specified') + self.connection = pymongo.MongoClient(self._url) + self.db = self.connection[self._dbName] + return self.db + + def getOne(self, collection, query=None): + """Get a single document with the specified `query`. + + :param collection: search the document in this collection + :type collection: str + :param query: query to filter the documents + :type query: dict or None + + :returns: the first document matching the query + :rtype: dict + """ + results = self.query(collection, convert(query)) + return results and results[0] or {} + + def get(self, collection, _id): + """Get a single document with the specified `_id`. + + :param collection: search the document in this collection + :type collection: str + :param _id: unique ID of the document + :type _id: str or :class:`~bson.objectid.ObjectId` + + :returns: the document with the given `_id` + :rtype: dict + """ + return self.getOne(collection, {'_id': _id}) + + def query(self, collection, query=None, condition='or'): + """Get multiple documents matching a query. + + :param collection: search for documents in this collection + :type collection: str + :param query: search for documents with those attributes + :type query: dict or None + + :returns: list of matching documents + :rtype: list + """ + db = self.connect() + query = convert(query or {}) + if isinstance(query, (list, tuple)): + query = {'$%s' % condition: query} + return list(db[collection].find(query)) + + def add(self, collection, data, _id=None): + """Insert a new document. + + :param collection: insert the document in this collection + :type collection: str + :param data: the document to store + :type data: dict + :param _id: the _id of the document to store; if None, it's generated + :type _id: object + + :returns: the document, as created in the database + :rtype: dict + """ + db = self.connect() + data = convert(data) + if _id is not None: + data['_id'] = _id + _id = db[collection].insert(data) + return self.get(collection, _id) + + def insertOne(self, collection, data): + """Insert a document, avoiding duplicates. + + :param collection: update a document in this collection + :type collection: str + :param data: the document information to store + :type data: dict + + :returns: True if the document was already present + :rtype: bool + """ + db = self.connect() + data = convert(data) + ret = db[collection].update(data, {'$set': data}, upsert=True) + return ret['updatedExisting'] + + def _buildSearchPattern(self, data, searchBy): + """Return an OR condition.""" + _or = [] + for searchPattern in searchBy: + try: + _or.append(dict([(k, data[k]) for k in searchPattern if k in data])) + except KeyError: + continue + return _or + + def update(self, collection, _id_or_query, data, operation='update', + updateList=None, create=True): + """Update an existing document or create it, if requested. + _id_or_query can be an ID, a dict representing a query or a list of tuples. + In the latter case, the tuples are put in OR; a tuple match if all of its + items from 'data' are contained in the document. + + :param collection: update a document in this collection + :type collection: str + :param _id_or_query: ID of the document to be updated, or a query or a list of attributes in the data that must match + :type _id_or_query: str or :class:`~bson.objectid.ObjectId` or iterable + :param data: the updated information to store + :type data: dict + :param operation: operation used to update the document or a portion of it, like a list (update, append, appendUnique, delete, increment) + :type operation: str + :param updateList: if set, it's considered the name of a list (the first matching element will be updated) + :type updateList: str + :param create: if True, the document is created if no document matches + :type create: bool + + :returns: a boolean (True if an existing document was updated) and the document after the update + :rtype: tuple of (bool, dict) + """ + db = self.connect() + data = convert(data or {}) + _id_or_query = convert(_id_or_query) + if isinstance(_id_or_query, (list, tuple)): + _id_or_query = {'$or': self._buildSearchPattern(data, _id_or_query)} + elif not isinstance(_id_or_query, dict): + _id_or_query = {'_id': _id_or_query} + if '_id' in data: + del data['_id'] + operator = self._operations.get(operation) + if updateList: + newData = {} + for key, value in data.iteritems(): + newData['%s.$.%s' % (updateList, key)] = value + data = newData + res = db[collection].find_and_modify(query=_id_or_query, + update={operator: data}, full_response=True, new=True, upsert=create) + lastErrorObject = res.get('lastErrorObject') or {} + return lastErrorObject.get('updatedExisting', False), res.get('value') or {} + + def delete(self, collection, _id_or_query=None, force=False): + """Remove one or more documents from a collection. + + :param collection: search the documents in this collection + :type collection: str + :param _id_or_query: unique ID of the document or query to match multiple documents + :type _id_or_query: str or :class:`~bson.objectid.ObjectId` or dict + :param force: force the deletion of all documents, when `_id_or_query` is empty + :type force: bool + + :returns: how many documents were removed + :rtype: int + """ + if not _id_or_query and not force: + return + db = self.connect() + if not isinstance(_id_or_query, dict): + _id_or_query = {'_id': _id_or_query} + _id_or_query = convert(_id_or_query) + return db[collection].remove(_id_or_query) + diff --git a/package.json b/package.json new file mode 100644 index 0000000..c5ae8a6 --- /dev/null +++ b/package.json @@ -0,0 +1,59 @@ +{ + "name": "ibt2", + "version": "1.0.0", + "description": "I'll be there, 2", + "author": "Davide Alberani ", + "private": true, + "scripts": { + "dev": "node build/dev-server.js", + "pydev": "python ibt2.py --debug --db_name=ibt2_test", + "watch": "watch 'npm run build' src/", + "build": "node build/build.js" + }, + "dependencies": { + "babel": "^6.5.2", + "jquery": "^3.1.1", + "material-ui-vue": "^0.1.4", + "materialize-css": "^0.97.8", + "vue": "^2.1.6", + "vue-loader": "^10.0.2", + "vue-resource": "^1.0.3", + "vuejs-datepicker": "^0.4.27" + }, + "devDependencies": { + "autoprefixer": "^6.4.0", + "babel-core": "^6.0.0", + "babel-loader": "^6.0.0", + "babel-plugin-transform-runtime": "^6.0.0", + "babel-preset-es2015": "^6.0.0", + "babel-preset-stage-2": "^6.0.0", + "babel-register": "^6.0.0", + "chalk": "^1.1.3", + "connect-history-api-fallback": "^1.1.0", + "css-loader": "^0.25.0", + "eventsource-polyfill": "^0.9.6", + "express": "^4.13.3", + "extract-text-webpack-plugin": "^1.0.1", + "file-loader": "^0.9.0", + "function-bind": "^1.0.2", + "html-webpack-plugin": "^2.8.1", + "http-proxy-middleware": "^0.17.2", + "json-loader": "^0.5.4", + "semver": "^5.3.0", + "opn": "^4.0.2", + "ora": "^0.3.0", + "shelljs": "^0.7.4", + "url-loader": "^0.5.7", + "vue-loader": "^10.0.0", + "vue-style-loader": "^1.0.0", + "vue-template-compiler": "^2.1.0", + "webpack": "^1.13.2", + "webpack-dev-middleware": "^1.8.3", + "webpack-hot-middleware": "^2.12.2", + "webpack-merge": "^0.14.1" + }, + "engines": { + "node": ">= 4.0.0", + "npm": ">= 3.0.0" + } +} diff --git a/src/App.vue b/src/App.vue new file mode 100644 index 0000000..0726de4 --- /dev/null +++ b/src/App.vue @@ -0,0 +1,133 @@ + + + + + diff --git a/src/assets/logo.png b/src/assets/logo.png new file mode 100644 index 0000000..f3d2503 Binary files /dev/null and b/src/assets/logo.png differ diff --git a/src/components/Hello.vue b/src/components/Hello.vue new file mode 100644 index 0000000..2d80539 --- /dev/null +++ b/src/components/Hello.vue @@ -0,0 +1,53 @@ + + + + + + diff --git a/src/main.js b/src/main.js new file mode 100644 index 0000000..a9d813a --- /dev/null +++ b/src/main.js @@ -0,0 +1,22 @@ +// The Vue build version to load with the `import` command +// (runtime-only or standalone) has been set in webpack.base.conf with an alias. +import Vue from 'vue' +import App from './App' +require("vue-resource") +/* +import 'jquery/dist/jquery.min.js' +import 'materialize-css/bin/materialize.css' +import 'materialize-css/bin/materialize.js' +require("material-ui-vue") +*/ +var VueResource = require("vue-resource"); +require("jquery"); + +Vue.use(VueResource); + +/* eslint-disable no-new */ +new Vue({ + el: '#app', + template: '', + components: { App } +}) diff --git a/static/.gitkeep b/static/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/tests/ibt2_tests.py b/tests/ibt2_tests.py new file mode 100755 index 0000000..f08000d --- /dev/null +++ b/tests/ibt2_tests.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python +"""I'll Be There 2 (ibt2) - tests + +Copyright 2016 Davide Alberani + RaspiBO + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import unittest +import requests +import monco + +BASE_URL = 'http://localhost:3000/v1.0/' +DB_NAME = 'ibt2_test' + +def dictInDict(d, dContainer): + for k, v in d.viewitems(): + if k not in dContainer: + return False + if v != dContainer[k]: + return False + return True + + +class Ibt2Tests(unittest.TestCase): + #@classmethod + #def setUpClass(cls): + def setUp(self): + self.monco_conn = monco.Monco(dbName=DB_NAME) + self.connection = self.monco_conn.connection + self.db = self.monco_conn.db + self.connection.drop_database(DB_NAME) + + def tearDown(self): + self.add_attendee({'day': '2017-01-15', 'name': 'A name', 'group': 'group A'}) + self.add_attendee({'day': '2017-01-16', 'name': 'A new name', 'group': 'group C'}) + self.add_attendee({'day': '2017-01-15', 'name': 'Another name', 'group': 'group A'}) + self.add_attendee({'day': '2017-01-15', 'name': 'Yet another name', 'group': 'group B'}) + + def add_attendee(self, attendee): + r = requests.post('%sattendees' % BASE_URL, json=attendee) + r.raise_for_status() + return r + + def test_add_attendee(self): + # POST /attendees/ {name: 'A Name', day: '2017-01-15', group: 'A group'} + # GET /attendees/:id + attendee = {'name': 'A Name', 'day': '2017-01-15', 'group': 'A group'} + r = self.add_attendee(attendee) + rj = r.json() + id_ = rj.get('_id') + self.assertTrue(dictInDict(attendee, rj)) + r = requests.get(BASE_URL + 'attendees/' + id_) + r.raise_for_status() + rj = r.json() + self.assertTrue(dictInDict(attendee, rj)) + + def test_put_attendee(self): + # POST /attendees/ {name: 'A Name', day: '2017-01-15', group: 'A group'} + # GET /attendees/:id + attendee = {'name': 'A Name', 'day': '2017-01-15', 'group': 'A group'} + r = self.add_attendee(attendee) + update = {'notes': 'A note'} + r = requests.post(BASE_URL + 'attendees', json=attendee) + r.raise_for_status() + id_ = r.json().get('_id') + r = requests.put(BASE_URL + 'attendees/' + id_, json=update) + r.raise_for_status() + r = requests.get('%s%s/%s' % (BASE_URL, 'attendees', id_)) + r.raise_for_status() + rj = r.json() + final = attendee.copy() + final.update(update) + self.assertTrue(dictInDict(final, rj)) + + def test_delete_attendee(self): + # POST /attendees/ {name: 'A Name', day: '2017-01-15', group: 'A group'} + # GET /attendees/:id + attendee = {'name': 'A Name', 'day': '2017-01-15', 'group': 'A group'} + r = self.add_attendee(attendee) + id_ = r.json().get('_id') + r = requests.delete(BASE_URL + 'attendees/' + id_) + r.raise_for_status() + self.assertTrue(r.json().get('success')) + + def test_get_days(self): + self.add_attendee({'day': '2017-01-15', 'name': 'A name', 'group': 'group A'}) + self.add_attendee({'day': '2017-01-16', 'name': 'A new name', 'group': 'group C'}) + self.add_attendee({'day': '2017-01-15', 'name': 'Another name', 'group': 'group A'}) + self.add_attendee({'day': '2017-01-15', 'name': 'Yet another name', 'group': 'group B'}) + r = requests.get(BASE_URL + 'days') + r.raise_for_status() + rj = r.json() + self.assertEqual([x.get('day') for x in rj['days']], ['2017-01-15', '2017-01-16']) + self.assertEqual([x.get('group') for x in rj['days'][0]['groups']], ['group A', 'group B']) + self.assertTrue(len(rj['days'][0]['groups'][0]['attendees']) == 2) + self.assertTrue(len(rj['days'][0]['groups'][1]['attendees']) == 1) + self.assertEqual([x.get('group') for x in rj['days'][1]['groups']], ['group C']) + self.assertTrue(len(rj['days'][1]['groups'][0]['attendees']) == 1) + + def test_get_days_summary(self): + self.add_attendee({'day': '2017-01-15', 'name': 'A name', 'group': 'group A'}) + self.add_attendee({'day': '2017-01-16', 'name': 'A new name', 'group': 'group C'}) + self.add_attendee({'day': '2017-01-15', 'name': 'Another name', 'group': 'group A'}) + self.add_attendee({'day': '2017-01-15', 'name': 'Yet another name', 'group': 'group B'}) + r = requests.get(BASE_URL + 'days?summary=1') + r.raise_for_status() + rj = r.json() + self.assertEqual(rj, + {"days": [{"groups_count": 2, "day": "2017-01-15"}, {"groups_count": 1, "day": "2017-01-16"}]}) + + def _test_post_day_group(self): + # POST /days/ {day: '2017-01-04'} + # GET /days/2017-01-04 + day = '2017-01-15' + query = {'day': day, 'groups': [{'name': 'group1'}]} + r = requests.post(BASE_URL + 'days', json=query) + r.raise_for_status() + rj = r.json() + self.assertTrue(dictInDict(query, rj)) + r = requests.get('%s%s/%s' % (BASE_URL, 'days', day)) + r.raise_for_status() + rj = r.json() + self.assertTrue(dictInDict(query, rj)) + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/tests/monco.py b/tests/monco.py new file mode 120000 index 0000000..f5bb920 --- /dev/null +++ b/tests/monco.py @@ -0,0 +1 @@ +../monco.py \ No newline at end of file diff --git a/tests/monco.pyc b/tests/monco.pyc new file mode 100644 index 0000000..acebfd8 Binary files /dev/null and b/tests/monco.pyc differ diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..0bdb26c --- /dev/null +++ b/utils.py @@ -0,0 +1,61 @@ +"""ibt2 utils + +Miscellaneous utilities. + +Copyright 2016 Davide Alberani + RaspiBO + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import json +import string +import random +import hashlib +import datetime +import StringIO +from bson.objectid import ObjectId + + +def hash_password(password, salt=None): + """Hash a password. + + :param password: the cleartext password + :type password: str + :param salt: the optional salt (randomly generated, if None) + :type salt: str + + :returns: the hashed password + :rtype: str""" + if salt is None: + salt_pool = string.ascii_letters + string.digits + salt = ''.join(random.choice(salt_pool) for x in xrange(32)) + hash_ = hashlib.sha512('%s%s' % (salt, password)) + return '$%s$%s' % (salt, hash_.hexdigest()) + + +class ImprovedEncoder(json.JSONEncoder): + """Enhance the default JSON encoder to serialize datetime and ObjectId instances.""" + def default(self, o): + if isinstance(o, (datetime.datetime, datetime.date, + datetime.time, datetime.timedelta, ObjectId)): + try: + return str(o) + except: + pass + elif isinstance(o, set): + return list(o) + return json.JSONEncoder.default(self, o) + + +# Inject our class as the default encoder. +json._default_encoder = ImprovedEncoder() +