diffido.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """Diffido - because the F5 key is a terrible thing to waste.
  4. Copyright 2018 Davide Alberani <da@erlug.linux.it>
  5. Licensed under the Apache License, Version 2.0 (the "License");
  6. you may not use this file except in compliance with the License.
  7. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
  8. Unless required by applicable law or agreed to in writing, software
  9. distributed under the License is distributed on an "AS IS" BASIS,
  10. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11. See the License for the specific language governing permissions and
  12. limitations under the License.
  13. """
  14. import os
  15. import re
  16. import json
  17. import pytz
  18. import shutil
  19. import urllib
  20. import smtplib
  21. from email.mime.text import MIMEText
  22. import logging
  23. import datetime
  24. import requests
  25. import subprocess
  26. import multiprocessing
  27. from lxml import etree
  28. from xml.etree import ElementTree
  29. from tornado.ioloop import IOLoop
  30. from apscheduler.triggers.cron import CronTrigger
  31. from apscheduler.schedulers.tornado import TornadoScheduler
  32. from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore
  33. import tornado.httpserver
  34. import tornado.ioloop
  35. import tornado.options
  36. from tornado.options import define, options
  37. import tornado.web
  38. from tornado import gen, escape
  39. JOBS_STORE = 'sqlite:///conf/jobs.db'
  40. API_VERSION = '1.0'
  41. SCHEDULES_FILE = 'conf/schedules.json'
  42. DEFAULT_CONF = 'conf/diffido.conf'
  43. EMAIL_FROM = 'diffido@localhost'
  44. SMTP_SETTINGS = {}
  45. GIT_CMD = 'git'
  46. re_commit = re.compile(r'^(?P<id>[0-9a-f]{40}) (?P<message>.*)\n(?: .* '
  47. '(?P<insertions>\d+) insertion.* (?P<deletions>\d+) deletion.*$)?', re.M)
  48. re_insertion = re.compile(r'(\d+) insertion')
  49. re_deletion = re.compile(r'(\d+) deletion')
  50. logger = logging.getLogger()
  51. logger.setLevel(logging.INFO)
  52. def read_schedules():
  53. """Return the schedules configuration.
  54. :returns: dictionary from the JSON object in conf/schedules.json
  55. :rtype: dict"""
  56. if not os.path.isfile(SCHEDULES_FILE):
  57. return {'schedules': {}}
  58. try:
  59. with open(SCHEDULES_FILE, 'r') as fd:
  60. schedules = json.loads(fd.read())
  61. for id_ in schedules.get('schedules', {}).keys():
  62. schedule = schedules['schedules'][id_]
  63. try:
  64. schedule['last_history'] = get_last_history(id_)
  65. except:
  66. schedule['last_history'] = {}
  67. continue
  68. return schedules
  69. except Exception as e:
  70. logger.error('unable to read %s: %s' % (SCHEDULES_FILE, e))
  71. return {'schedules': {}}
  72. def write_schedules(schedules):
  73. """Write the schedules configuration.
  74. :param schedules: the schedules to save
  75. :type schedules: dict
  76. :returns: True in case of success
  77. :rtype: bool"""
  78. try:
  79. with open(SCHEDULES_FILE, 'w') as fd:
  80. fd.write(json.dumps(schedules, indent=2))
  81. except Exception as e:
  82. logger.error('unable to write %s: %s' % (SCHEDULES_FILE, e))
  83. return False
  84. return True
  85. def next_id(schedules):
  86. """Return the next available integer (as a string) in the list of schedules keys (do not fills holes)
  87. :param schedules: the schedules
  88. :type schedules: dict
  89. :returns: the ID of the next schedule
  90. :rtype: str"""
  91. ids = schedules.get('schedules', {}).keys()
  92. if not ids:
  93. return '1'
  94. return str(max([int(i) for i in ids]) + 1)
  95. def get_schedule(id_, add_id=True, add_history=False):
  96. """Return information about a single schedule
  97. :param id_: ID of the schedule
  98. :type id_: str
  99. :param add_id: if True, add the ID in the dictionary
  100. :type add_id: bool
  101. :returns: the schedule
  102. :rtype: dict"""
  103. try:
  104. schedules = read_schedules()
  105. except Exception:
  106. return {}
  107. data = schedules.get('schedules', {}).get(id_, {})
  108. if add_history and data:
  109. data['last_history'] = get_last_history(id_)
  110. if add_id:
  111. data['id'] = str(id_)
  112. return data
  113. def select_xpath(content, xpath):
  114. """Select a portion of a HTML document
  115. :param content: the content of the document
  116. :type content: str
  117. :param xpath: the XPath selector
  118. :type xpath: str
  119. :returns: the selected document
  120. :rtype: str"""
  121. tree = etree.HTML(content)
  122. elems = tree.xpath(xpath)
  123. if not elems:
  124. return content
  125. selected_content = []
  126. for elem in elems:
  127. pieces = []
  128. if elem.text:
  129. pieces.append(elem.text)
  130. for sub_el in elem.getchildren():
  131. try:
  132. sub_el_text = ElementTree.tostring(sub_el, method='html').decode('utf-8', 'replace')
  133. except:
  134. continue
  135. if sub_el_text:
  136. pieces.append(sub_el_text)
  137. selected_content.append(''.join(pieces))
  138. content = ''.join(selected_content).strip()
  139. return content
  140. def run_job(id_=None, force=False, *args, **kwargs):
  141. """Run a job
  142. :param id_: ID of the schedule to run
  143. :type id_: str
  144. :param force: run even if disabled
  145. :type force: bool
  146. :param args: positional arguments
  147. :type args: tuple
  148. :param kwargs: named arguments
  149. :type kwargs: dict
  150. :returns: True in case of success
  151. :rtype: bool"""
  152. schedule = get_schedule(id_, add_id=False)
  153. url = schedule.get('url')
  154. if not url:
  155. return False
  156. logger.debug('running job id:%s title:%s url: %s' % (id_, schedule.get('title', ''), url))
  157. if not schedule.get('enabled') and not force:
  158. logger.info('not running job %s: disabled' % id_)
  159. return True
  160. req = requests.get(url, allow_redirects=True, timeout=(30.10, 240))
  161. content = req.text
  162. xpath = schedule.get('xpath')
  163. if xpath:
  164. try:
  165. content = select_xpath(content, xpath)
  166. except Exception as e:
  167. logger.warn('unable to extract XPath %s: %s' % (xpath, e))
  168. req_path = urllib.parse.urlparse(req.url).path
  169. base_name = os.path.basename(req_path) or 'index.html'
  170. def _commit(id_, filename, content, queue):
  171. try:
  172. os.chdir('storage/%s' % id_)
  173. except Exception as e:
  174. logger.info('unable to move to storage/%s directory: %s; trying to create it...' % (id_, e))
  175. _created = False
  176. try:
  177. _created = git_create_repo(id_)
  178. except Exception as e:
  179. logger.info('unable to move to storage/%s directory: %s; unable to create it' % (id_, e))
  180. if not _created:
  181. return False
  182. current_lines = 0
  183. if os.path.isfile(filename):
  184. with open(filename, 'r') as fd:
  185. for line in fd:
  186. current_lines += 1
  187. with open(filename, 'w') as fd:
  188. fd.write(content)
  189. p = subprocess.Popen([GIT_CMD, 'add', filename])
  190. p.communicate()
  191. p = subprocess.Popen([GIT_CMD, 'commit', '-m', '%s' % datetime.datetime.utcnow(), '--allow-empty'],
  192. stdout=subprocess.PIPE)
  193. stdout, _ = p.communicate()
  194. stdout = stdout.decode('utf-8')
  195. insert = re_insertion.findall(stdout)
  196. if insert:
  197. insert = int(insert[0])
  198. else:
  199. insert = 0
  200. delete = re_deletion.findall(stdout)
  201. if delete:
  202. delete = int(delete[0])
  203. else:
  204. delete = 0
  205. queue.put({'insertions': insert, 'deletions': delete, 'previous_lines': current_lines,
  206. 'changes': max(insert, delete)})
  207. queue = multiprocessing.Queue()
  208. p = multiprocessing.Process(target=_commit, args=(id_, base_name, content, queue))
  209. p.start()
  210. res = queue.get()
  211. p.join()
  212. email = schedule.get('email')
  213. if not email:
  214. return True
  215. changes = res.get('changes')
  216. if not changes:
  217. return True
  218. min_change = schedule.get('minimum_change')
  219. previous_lines = res.get('previous_lines')
  220. if min_change and previous_lines:
  221. min_change = float(min_change)
  222. change_fraction = res.get('changes') / previous_lines
  223. if change_fraction < min_change:
  224. return True
  225. # send notification
  226. diff = get_diff(id_).get('diff')
  227. if not diff:
  228. return True
  229. send_email(to=email, subject='%s page changed' % schedule.get('title'),
  230. body='changes:\n\n%s' % diff)
  231. return True
  232. def safe_run_job(id_=None, *args, **kwargs):
  233. """Safely run a job, catching all the exceptions
  234. :param id_: ID of the schedule to run
  235. :type id_: str
  236. :param args: positional arguments
  237. :type args: tuple
  238. :param kwargs: named arguments
  239. :type kwargs: dict
  240. :returns: True in case of success
  241. :rtype: bool"""
  242. try:
  243. run_job(id_, *args, **kwargs)
  244. except Exception as e:
  245. send_email('error executing job %s: %s' % (id_, e))
  246. def send_email(to, subject='diffido', body='', from_=None):
  247. """Send an email
  248. :param to: destination address
  249. :type to: str
  250. :param subject: email subject
  251. :type subject: str
  252. :param body: body of the email
  253. :type body: str
  254. :param from_: sender address
  255. :type from_: str
  256. :returns: True in case of success
  257. :rtype: bool"""
  258. msg = MIMEText(body)
  259. msg['Subject'] = subject
  260. msg['From'] = from_ or EMAIL_FROM
  261. msg['To'] = to
  262. starttls = SMTP_SETTINGS.get('smtp-starttls')
  263. use_ssl = SMTP_SETTINGS.get('smtp-use-ssl')
  264. args = {}
  265. for key, value in SMTP_SETTINGS.items():
  266. if key in ('smtp-starttls', 'smtp-use-ssl'):
  267. continue
  268. if key in ('smtp-port'):
  269. value = int(value)
  270. key = key.replace('smtp-', '', 1).replace('-', '_')
  271. args[key] = value
  272. try:
  273. if use_ssl:
  274. with smtplib.SMTP_SSL(**args) as s:
  275. s.send_message(msg)
  276. else:
  277. tls_args = {}
  278. for key in ('ssl_keyfile', 'ssl_certfile', 'ssl_context'):
  279. if key in args:
  280. tls_args = args[key]
  281. del args[key]
  282. with smtplib.SMTP(**args) as s:
  283. if starttls:
  284. s.starttls(**tls_args)
  285. s.ehlo_or_helo_if_needed()
  286. s.send_message(msg)
  287. except Exception as e:
  288. logger.error('unable to send email to %s: %s' % (to, e))
  289. return False
  290. return True
  291. def get_history(id_, limit=None, add_info=False):
  292. """Read the history of a schedule
  293. :param id_: ID of the schedule
  294. :type id_: str
  295. :param limit: number of entries to fetch
  296. :type limit: int
  297. :param add_info: add information about the schedule itself
  298. :type add_info: int
  299. :returns: information about the schedule and its history
  300. :rtype: dict"""
  301. def _history(id_, limit, queue):
  302. try:
  303. os.chdir('storage/%s' % id_)
  304. except Exception as e:
  305. logger.info('unable to move to storage/%s directory: %s' % (id_, e))
  306. return queue.put(b'')
  307. cmd = [GIT_CMD, 'log', '--pretty=oneline', '--shortstat']
  308. if limit is not None:
  309. cmd.append('-%s' % limit)
  310. p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
  311. stdout, _ = p.communicate()
  312. queue.put(stdout)
  313. queue = multiprocessing.Queue()
  314. p = multiprocessing.Process(target=_history, args=(id_, limit, queue))
  315. p.start()
  316. res = queue.get().decode('utf-8')
  317. p.join()
  318. history = []
  319. for match in re_commit.finditer(res):
  320. info = match.groupdict()
  321. info['insertions'] = int(info['insertions'] or 0)
  322. info['deletions'] = int(info['deletions'] or 0)
  323. info['changes'] = max(info['insertions'], info['deletions'])
  324. history.append(info)
  325. last_id = None
  326. if history and 'id' in history[0]:
  327. last_id = history[0]['id']
  328. for idx, item in enumerate(history):
  329. item['seq'] = idx + 1
  330. data = {'history': history, 'last_id': last_id}
  331. if add_info:
  332. data['schedule'] = get_schedule(id_)
  333. return data
  334. def get_last_history(id_):
  335. """Read the last history entry of a schedule
  336. :param id_: ID of the schedule
  337. :type id_: str
  338. :returns: information about the schedule and its history
  339. :rtype: dict"""
  340. history = get_history(id_, limit=1)
  341. hist = history.get('history') or [{}]
  342. return hist[0]
  343. def get_diff(id_, commit_id='HEAD', old_commit_id=None):
  344. """Return the diff between commits of a schedule
  345. :param id_: ID of the schedule
  346. :type id_: str
  347. :param commit_id: the most recent commit ID; HEAD by default
  348. :type commit_id: str
  349. :param old_commit_id: the older commit ID; if None, the previous commit is used
  350. :type old_commit_id: str
  351. :returns: information about the schedule and the diff between commits
  352. :rtype: dict"""
  353. def _history(id_, commit_id, old_commit_id, queue):
  354. try:
  355. os.chdir('storage/%s' % id_)
  356. except Exception as e:
  357. logger.info('unable to move to storage/%s directory: %s' % (id_, e))
  358. return queue.put(b'')
  359. p = subprocess.Popen([GIT_CMD, 'diff', old_commit_id or '%s~' % commit_id, commit_id],
  360. stdout=subprocess.PIPE)
  361. stdout, _ = p.communicate()
  362. queue.put(stdout)
  363. queue = multiprocessing.Queue()
  364. p = multiprocessing.Process(target=_history, args=(id_, commit_id, old_commit_id, queue))
  365. p.start()
  366. res = queue.get().decode('utf-8')
  367. p.join()
  368. schedule = get_schedule(id_)
  369. return {'diff': res, 'schedule': schedule}
  370. def scheduler_update(scheduler, id_):
  371. """Update a scheduler job, using information from the JSON object
  372. :param scheduler: the TornadoScheduler instance to modify
  373. :type scheduler: TornadoScheduler
  374. :param id_: ID of the schedule that must be updated
  375. :type id_: str
  376. :returns: True in case of success
  377. :rtype: bool"""
  378. schedule = get_schedule(id_, add_id=False)
  379. if not schedule:
  380. logger.warn('unable to update empty schedule %s' % id_)
  381. return False
  382. trigger = schedule.get('trigger')
  383. if trigger not in ('interval', 'cron'):
  384. logger.warn('unable to update empty schedule %s: trigger not in ("cron", "interval")' % id_)
  385. return False
  386. args = {}
  387. if trigger == 'interval':
  388. args['trigger'] = 'interval'
  389. for unit in 'weeks', 'days', 'hours', 'minutes', 'seconds':
  390. if 'interval_%s' % unit not in schedule:
  391. continue
  392. try:
  393. val = schedule['interval_%s' % unit]
  394. if not val:
  395. continue
  396. args[unit] = int(val)
  397. except Exception:
  398. logger.warn('invalid argument on schedule %s: %s parameter %s is not an integer' %
  399. (id_, 'interval_%s' % unit, schedule['interval_%s' % unit]))
  400. if len(args) == 1:
  401. logger.error('no valid interval specified, skipping schedule %s' % id_)
  402. return False
  403. elif trigger == 'cron':
  404. try:
  405. cron_trigger = CronTrigger.from_crontab(schedule['cron_crontab'])
  406. args['trigger'] = cron_trigger
  407. except Exception:
  408. logger.warn('invalid argument on schedule %s: cron_tab parameter %s is not a valid crontab' %
  409. (id_, schedule.get('cron_crontab')))
  410. return False
  411. git_create_repo(id_)
  412. try:
  413. scheduler.add_job(safe_run_job, id=id_, replace_existing=True, kwargs={'id_': id_}, **args)
  414. except Exception as e:
  415. logger.warn('unable to update job %s: %s' % (id_, e))
  416. return False
  417. return True
  418. def scheduler_delete(scheduler, id_):
  419. """Update a scheduler job, using information from the JSON object
  420. :param scheduler: the TornadoScheduler instance to modify
  421. :type scheduler: TornadoScheduler
  422. :param id_: ID of the schedule
  423. :type id_: str
  424. :returns: True in case of success
  425. :rtype: bool"""
  426. try:
  427. scheduler.remove_job(job_id=id_)
  428. except Exception as e:
  429. logger.warn('unable to delete job %s: %s' % (id_, e))
  430. return False
  431. return git_delete_repo(id_)
  432. def reset_from_schedules(scheduler):
  433. """"Reset all scheduler jobs, using information from the JSON object
  434. :param scheduler: the TornadoScheduler instance to modify
  435. :type scheduler: TornadoScheduler
  436. :returns: True in case of success
  437. :rtype: bool"""
  438. ret = False
  439. try:
  440. scheduler.remove_all_jobs()
  441. for key in read_schedules().get('schedules', {}).keys():
  442. ret |= scheduler_update(scheduler, id_=key)
  443. except Exception as e:
  444. logger.warn('unable to reset all jobs: %s' % e)
  445. return False
  446. return ret
  447. def git_init():
  448. """Initialize Git global settings"""
  449. p = subprocess.Popen([GIT_CMD, 'config', '--global', 'user.email', '"%s"' % EMAIL_FROM])
  450. p.communicate()
  451. p = subprocess.Popen([GIT_CMD, 'config', '--global', 'user.name', '"Diffido"'])
  452. p.communicate()
  453. def git_create_repo(id_):
  454. """Create a Git repository
  455. :param id_: ID of the schedule
  456. :type id_: str
  457. :returns: True in case of success
  458. :rtype: bool"""
  459. repo_dir = 'storage/%s' % id_
  460. if os.path.isdir(repo_dir):
  461. return True
  462. p = subprocess.Popen([GIT_CMD, 'init', repo_dir])
  463. p.communicate()
  464. return p.returncode == 0
  465. def git_delete_repo(id_):
  466. """Delete a Git repository
  467. :param id_: ID of the schedule
  468. :type id_: str
  469. :returns: True in case of success
  470. :rtype: bool"""
  471. repo_dir = 'storage/%s' % id_
  472. if not os.path.isdir(repo_dir):
  473. return False
  474. try:
  475. shutil.rmtree(repo_dir)
  476. except Exception as e:
  477. logger.warn('unable to delete Git repository %s: %s' % (id_, e))
  478. return False
  479. return True
  480. class DiffidoBaseException(Exception):
  481. """Base class for diffido custom exceptions.
  482. :param message: text message
  483. :type message: str
  484. :param status: numeric http status code
  485. :type status: int"""
  486. def __init__(self, message, status=400):
  487. super(DiffidoBaseException, self).__init__(message)
  488. self.message = message
  489. self.status = status
  490. class BaseHandler(tornado.web.RequestHandler):
  491. """Base class for request handlers."""
  492. # A property to access the first value of each argument.
  493. arguments = property(lambda self: dict([(k, v[0].decode('utf-8'))
  494. for k, v in self.request.arguments.items()]))
  495. @property
  496. def clean_body(self):
  497. """Return a clean dictionary from a JSON body, suitable for a query on MongoDB.
  498. :returns: a clean copy of the body arguments
  499. :rtype: dict"""
  500. return escape.json_decode(self.request.body or '{}')
  501. def write_error(self, status_code, **kwargs):
  502. """Default error handler."""
  503. if isinstance(kwargs.get('exc_info', (None, None))[1], DiffidoBaseException):
  504. exc = kwargs['exc_info'][1]
  505. status_code = exc.status
  506. message = exc.message
  507. else:
  508. message = 'internal error'
  509. self.build_error(message, status=status_code)
  510. def initialize(self, **kwargs):
  511. """Add every passed (key, value) as attributes of the instance."""
  512. for key, value in kwargs.items():
  513. setattr(self, key, value)
  514. def build_error(self, message='', status=400):
  515. """Build and write an error message.
  516. :param message: textual message
  517. :type message: str
  518. :param status: HTTP status code
  519. :type status: int
  520. """
  521. self.set_status(status)
  522. self.write({'error': True, 'message': message})
  523. def build_success(self, message='', status=200):
  524. """Build and write a success message.
  525. :param message: textual message
  526. :type message: str
  527. :param status: HTTP status code
  528. :type status: int
  529. """
  530. self.set_status(status)
  531. self.write({'error': False, 'message': message})
  532. class SchedulesHandler(BaseHandler):
  533. """Schedules handler."""
  534. @gen.coroutine
  535. def get(self, id_=None, *args, **kwargs):
  536. """Get a schedule."""
  537. if id_ is not None:
  538. return self.write({'schedule': get_schedule(id_, add_history=True)})
  539. schedules = read_schedules()
  540. self.write(schedules)
  541. @gen.coroutine
  542. def put(self, id_=None, *args, **kwargs):
  543. """Update a schedule."""
  544. if id_ is None:
  545. return self.build_error(message='update action requires an ID')
  546. data = self.clean_body
  547. schedules = read_schedules()
  548. if id_ not in schedules.get('schedules', {}):
  549. return self.build_error(message='schedule %s not found' % id_)
  550. schedules['schedules'][id_] = data
  551. write_schedules(schedules)
  552. scheduler_update(scheduler=self.scheduler, id_=id_)
  553. self.write(get_schedule(id_=id_))
  554. @gen.coroutine
  555. def post(self, *args, **kwargs):
  556. """Add a schedule."""
  557. data = self.clean_body
  558. schedules = read_schedules()
  559. id_ = next_id(schedules)
  560. schedules['schedules'][id_] = data
  561. write_schedules(schedules)
  562. scheduler_update(scheduler=self.scheduler, id_=id_)
  563. self.write(get_schedule(id_=id_))
  564. @gen.coroutine
  565. def delete(self, id_=None, *args, **kwargs):
  566. """Delete a schedule."""
  567. if id_ is None:
  568. return self.build_error(message='an ID must be specified')
  569. schedules = read_schedules()
  570. if id_ in schedules.get('schedules', {}):
  571. del schedules['schedules'][id_]
  572. write_schedules(schedules)
  573. scheduler_delete(scheduler=self.scheduler, id_=id_)
  574. self.build_success(message='removed schedule %s' % id_)
  575. class RunScheduleHandler(BaseHandler):
  576. """Reset schedules handler."""
  577. @gen.coroutine
  578. def post(self, id_, *args, **kwargs):
  579. if run_job(id_, force=True):
  580. return self.build_success('job run')
  581. self.build_error('job not run')
  582. class ResetSchedulesHandler(BaseHandler):
  583. """Reset schedules handler."""
  584. @gen.coroutine
  585. def post(self, *args, **kwargs):
  586. reset_from_schedules(self.scheduler)
  587. class HistoryHandler(BaseHandler):
  588. """History handler."""
  589. @gen.coroutine
  590. def get(self, id_, *args, **kwargs):
  591. self.write(get_history(id_, add_info=True))
  592. class DiffHandler(BaseHandler):
  593. """Diff handler."""
  594. @gen.coroutine
  595. def get(self, id_, commit_id, old_commit_id=None, *args, **kwargs):
  596. self.write(get_diff(id_, commit_id, old_commit_id))
  597. class TemplateHandler(BaseHandler):
  598. """Handler for the template files in the / path."""
  599. @gen.coroutine
  600. def get(self, *args, **kwargs):
  601. """Get a template file."""
  602. page = 'index.html'
  603. if args and args[0]:
  604. page = args[0].strip('/')
  605. arguments = self.arguments
  606. self.render(page, **arguments)
  607. def serve():
  608. """Read configuration and start the server."""
  609. global EMAIL_FROM, SMTP_SETTINGS
  610. jobstores = {'default': SQLAlchemyJobStore(url=JOBS_STORE)}
  611. scheduler = TornadoScheduler(jobstores=jobstores, timezone=pytz.utc)
  612. scheduler.start()
  613. define('port', default=3210, help='run on the given port', type=int)
  614. define('address', default='', help='bind the server at the given address', type=str)
  615. define('ssl_cert', default=os.path.join(os.path.dirname(__file__), 'ssl', 'diffido_cert.pem'),
  616. help='specify the SSL certificate to use for secure connections')
  617. define('ssl_key', default=os.path.join(os.path.dirname(__file__), 'ssl', 'diffido_key.pem'),
  618. help='specify the SSL private key to use for secure connections')
  619. define('admin-email', default='', help='email address of the site administrator', type=str)
  620. define('smtp-host', default='localhost', help='SMTP server address', type=str)
  621. define('smtp-port', default=0, help='SMTP server port', type=int)
  622. define('smtp-local-hostname', default=None, help='SMTP local hostname', type=str)
  623. define('smtp-use-ssl', default=False, help='Use SSL to connect to the SMTP server', type=bool)
  624. define('smtp-starttls', default=False, help='Use STARTTLS to connect to the SMTP server', type=bool)
  625. define('smtp-ssl-keyfile', default=None, help='SSL key file', type=str)
  626. define('smtp-ssl-certfile', default=None, help='SSL cert file', type=str)
  627. define('smtp-ssl-context', default=None, help='SSL context', type=str)
  628. define('debug', default=False, help='run in debug mode', type=bool)
  629. define('config', help='read configuration file',
  630. callback=lambda path: tornado.options.parse_config_file(path, final=False))
  631. if not options.config and os.path.isfile(DEFAULT_CONF):
  632. tornado.options.parse_config_file(DEFAULT_CONF, final=False)
  633. tornado.options.parse_command_line()
  634. if options.admin_email:
  635. EMAIL_FROM = options.admin_email
  636. for key, value in options.as_dict().items():
  637. if key.startswith('smtp-'):
  638. SMTP_SETTINGS[key] = value
  639. if options.debug:
  640. logger.setLevel(logging.DEBUG)
  641. ssl_options = {}
  642. if os.path.isfile(options.ssl_key) and os.path.isfile(options.ssl_cert):
  643. ssl_options = dict(certfile=options.ssl_cert, keyfile=options.ssl_key)
  644. init_params = dict(listen_port=options.port, logger=logger, ssl_options=ssl_options,
  645. scheduler=scheduler)
  646. git_init()
  647. _reset_schedules_path = r'schedules/reset'
  648. _schedule_run_path = r'schedules/(?P<id_>\d+)/run'
  649. _schedules_path = r'schedules/?(?P<id_>\d+)?'
  650. _history_path = r'schedules/?(?P<id_>\d+)/history'
  651. _diff_path = r'schedules/(?P<id_>\d+)/diff/(?P<commit_id>[0-9a-f]+)/?(?P<old_commit_id>[0-9a-f]+)?/?'
  652. application = tornado.web.Application([
  653. (r'/api/%s' % _reset_schedules_path, ResetSchedulesHandler, init_params),
  654. (r'/api/v%s/%s' % (API_VERSION, _reset_schedules_path), ResetSchedulesHandler, init_params),
  655. (r'/api/%s' % _schedule_run_path, RunScheduleHandler, init_params),
  656. (r'/api/v%s/%s' % (API_VERSION, _schedule_run_path), RunScheduleHandler, init_params),
  657. (r'/api/%s' % _history_path, HistoryHandler, init_params),
  658. (r'/api/v%s/%s' % (API_VERSION, _history_path), HistoryHandler, init_params),
  659. (r'/api/%s' % _diff_path, DiffHandler, init_params),
  660. (r'/api/v%s/%s' % (API_VERSION, _diff_path), DiffHandler, init_params),
  661. (r'/api/%s' % _schedules_path, SchedulesHandler, init_params),
  662. (r'/api/v%s/%s' % (API_VERSION, _schedules_path), SchedulesHandler, init_params),
  663. (r'/?(.*)', TemplateHandler, init_params),
  664. ],
  665. static_path=os.path.join(os.path.dirname(__file__), 'dist/static'),
  666. template_path=os.path.join(os.path.dirname(__file__), 'dist/'),
  667. debug=options.debug)
  668. http_server = tornado.httpserver.HTTPServer(application, ssl_options=ssl_options or None)
  669. logger.info('Start serving on %s://%s:%d', 'https' if ssl_options else 'http',
  670. options.address if options.address else '127.0.0.1',
  671. options.port)
  672. http_server.listen(options.port, options.address)
  673. try:
  674. IOLoop.instance().start()
  675. except (KeyboardInterrupt, SystemExit):
  676. pass
  677. if __name__ == '__main__':
  678. serve()