black reformatting

This commit is contained in:
boyska 2019-11-15 22:35:45 +01:00
parent 5cd55d15bd
commit b956076fc5
10 changed files with 472 additions and 372 deletions

View file

@ -3,8 +3,9 @@ import sys
from argparse import ArgumentParser, Action
from datetime import datetime
import logging
logging.basicConfig(stream=sys.stdout)
logger = logging.getLogger('cli')
logger = logging.getLogger("cli")
CWD = os.getcwd()
@ -16,19 +17,18 @@ from . import server
def pre_check_permissions():
def is_writable(d):
return os.access(d, os.W_OK)
if is_writable(get_config()['AUDIO_INPUT']):
yield "Audio input '%s' writable" % get_config()['AUDIO_INPUT']
if not os.access(get_config()['AUDIO_INPUT'], os.R_OK):
yield "Audio input '%s' unreadable" % get_config()['AUDIO_INPUT']
if is_writable(get_config()["AUDIO_INPUT"]):
yield "Audio input '%s' writable" % get_config()["AUDIO_INPUT"]
if not os.access(get_config()["AUDIO_INPUT"], os.R_OK):
yield "Audio input '%s' unreadable" % get_config()["AUDIO_INPUT"]
sys.exit(10)
if is_writable(os.getcwd()):
yield "Code writable"
if not is_writable(get_config()['AUDIO_OUTPUT']):
yield "Audio output '%s' not writable" % get_config()['AUDIO_OUTPUT']
if not is_writable(get_config()["AUDIO_OUTPUT"]):
yield "Audio output '%s' not writable" % get_config()["AUDIO_OUTPUT"]
logger.critical("Aborting")
sys.exit(10)
@ -37,13 +37,15 @@ def pre_check_user():
if os.geteuid() == 0:
yield "You're running as root; this is dangerous"
def pre_check_ffmpeg():
path = get_config()['FFMPEG_PATH']
if not path.startswith('/'):
path = get_config()["FFMPEG_PATH"]
if not path.startswith("/"):
yield "FFMPEG_PATH is not absolute: %s" % path
from subprocess import check_output
try:
check_output([path, '-version'])
check_output([path, "-version"])
except OSError:
yield "FFMPEG not found as " + path
else:
@ -54,7 +56,7 @@ def pre_check_ffmpeg():
class DateTimeAction(Action):
def __call__(self, parser, namespace, values, option_string=None):
if len(values) == 15 or len(values) == 13:
parsed_val = datetime.strptime(values, '%Y%m%d-%H%M%S')
parsed_val = datetime.strptime(values, "%Y%m%d-%H%M%S")
else:
raise ValueError("'%s' is not a valid datetime" % values)
setattr(namespace, self.dest, parsed_val)
@ -62,15 +64,14 @@ class DateTimeAction(Action):
def common_pre():
prechecks = [pre_check_user, pre_check_permissions, pre_check_ffmpeg]
configs = ['default_config.py']
if 'TECHREC_CONFIG' in os.environ:
for conf in os.environ['TECHREC_CONFIG'].split(':'):
configs = ["default_config.py"]
if "TECHREC_CONFIG" in os.environ:
for conf in os.environ["TECHREC_CONFIG"].split(":"):
if not conf:
continue
path = os.path.realpath(conf)
if not os.path.exists(path):
logger.warn("Configuration file '%s' does not exist; skipping"
% path)
logger.warn("Configuration file '%s' does not exist; skipping" % path)
continue
configs.append(path)
os.chdir(os.path.dirname(os.path.realpath(__file__)))
@ -81,36 +82,65 @@ def common_pre():
for warn in check():
logger.warn(warn)
def main():
parser = ArgumentParser(description='creates mp3 from live recordings')
parser.add_argument('--verbose', '-v', action='count',
default=0,
help='Increase verbosity; can be used multiple times')
parser.add_argument('--pretend', '-p', action='store_true', default=False,
help='Only pretend; no real action will be done')
sub = parser.add_subparsers(title='main subcommands',
description='valid subcommands')
serve_p = sub.add_parser('serve', help="Start an HTTP server")
parser = ArgumentParser(description="creates mp3 from live recordings")
parser.add_argument(
"--verbose",
"-v",
action="count",
default=0,
help="Increase verbosity; can be used multiple times",
)
parser.add_argument(
"--pretend",
"-p",
action="store_true",
default=False,
help="Only pretend; no real action will be done",
)
sub = parser.add_subparsers(
title="main subcommands", description="valid subcommands"
)
serve_p = sub.add_parser("serve", help="Start an HTTP server")
serve_p.set_defaults(func=server.main_cmd)
forge_p = sub.add_parser('forge', help="Create an audio file")
forge_p.add_argument('starttime', metavar='START',
help='Start time, espressed as 19450425_1200 (%%Y%%m%%d-%%H%%M%%S)',
action=DateTimeAction)
forge_p.add_argument('endtime', metavar='END',
help='End time, espressed as 19450425_1200 (%%Y%%m%%d-%%H%%M%%S)',
action=DateTimeAction)
forge_p.add_argument('-o', metavar='OUTFILE', dest='outfile',
default='out.mp3', help='Path of the output mp3')
forge_p = sub.add_parser("forge", help="Create an audio file")
forge_p.add_argument(
"starttime",
metavar="START",
help="Start time, espressed as 19450425_1200 (%%Y%%m%%d-%%H%%M%%S)",
action=DateTimeAction,
)
forge_p.add_argument(
"endtime",
metavar="END",
help="End time, espressed as 19450425_1200 (%%Y%%m%%d-%%H%%M%%S)",
action=DateTimeAction,
)
forge_p.add_argument(
"-o",
metavar="OUTFILE",
dest="outfile",
default="out.mp3",
help="Path of the output mp3",
)
forge_p.set_defaults(func=forge.main_cmd)
cleanold_p = sub.add_parser('cleanold', help="Remove old files from DB",
description="Will remove oldfiles with no filename from DB")
cleanold_p.add_argument('-t', metavar='MINAGE', dest='minage',
default='14', type=int,
help='Minimum age (in days) for removal')
cleanold_p = sub.add_parser(
"cleanold",
help="Remove old files from DB",
description="Will remove oldfiles with no filename from DB",
)
cleanold_p.add_argument(
"-t",
metavar="MINAGE",
dest="minage",
default="14",
type=int,
help="Minimum age (in days) for removal",
)
cleanold_p.set_defaults(func=maint.cleanold_cmd)
options = parser.parse_args()
options.cwd = CWD
if options.verbose < 1:
@ -123,5 +153,7 @@ def main():
logging.info("giving verbose flag >2 times is useless")
common_pre()
options.func(options)
if __name__ == "__main__":
main()

View file

@ -10,6 +10,8 @@ def get_config():
if get_config.instance is None:
get_config.instance = Config(os.getcwd())
return get_config.instance
get_config.instance = None
@ -78,11 +80,12 @@ class Config(dict):
if not rv:
if silent:
return False
raise RuntimeError('The environment variable %r is not set '
'and as such configuration could not be '
'loaded. Set this variable and make it '
'point to a configuration file' %
variable_name)
raise RuntimeError(
"The environment variable %r is not set "
"and as such configuration could not be "
"loaded. Set this variable and make it "
"point to a configuration file" % variable_name
)
return self.from_pyfile(rv, silent=silent)
def from_pyfile(self, filename, silent=False):
@ -100,15 +103,15 @@ class Config(dict):
`silent` parameter.
"""
filename = os.path.join(self.root_path, filename)
d = imp.new_module('config')
d = imp.new_module("config")
d.__file__ = filename
try:
with open(filename) as config_file:
exec(compile(config_file.read(), filename, 'exec'), d.__dict__)
exec(compile(config_file.read(), filename, "exec"), d.__dict__)
except IOError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
e.strerror = "Unable to load configuration file (%s)" % e.strerror
raise
self.from_object(d)
return True
@ -143,7 +146,7 @@ class Config(dict):
self[key] = getattr(obj, key)
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self))
return "<%s %s>" % (self.__class__.__name__, dict.__repr__(self))
def import_string(import_name, silent=False):
@ -159,27 +162,27 @@ def import_string(import_name, silent=False):
`None` is returned instead.
:return: imported object
"""
#XXX: py3 review needed
# XXX: py3 review needed
assert isinstance(import_name, string_types)
# force the import name to automatically convert to strings
import_name = str(import_name)
try:
if ':' in import_name:
module, obj = import_name.split(':', 1)
elif '.' in import_name:
module, obj = import_name.rsplit('.', 1)
if ":" in import_name:
module, obj = import_name.split(":", 1)
elif "." in import_name:
module, obj = import_name.rsplit(".", 1)
else:
return __import__(import_name)
# __import__ is not able to handle unicode strings in the fromlist
# if the module is a package
if sys.version_info[0] == 2 and isinstance(obj, unicode):
obj = obj.encode('utf-8')
obj = obj.encode("utf-8")
try:
return getattr(__import__(module, None, None, [obj]), obj)
except (ImportError, AttributeError):
# support importing modules not yet set up by the parent module
# (or package for that matter)
modname = module + '.' + obj
modname = module + "." + obj
__import__(modname)
return sys.modules[modname]
except ImportError as e:

View file

@ -1,6 +1,6 @@
'''
"""
This module contains DB logic
'''
"""
from __future__ import print_function
import logging
from datetime import datetime, timedelta
@ -20,33 +20,38 @@ Base = declarative_base()
class Rec(Base):
'''Entry on the DB'''
__tablename__ = 'rec'
"""Entry on the DB"""
__tablename__ = "rec"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=True)
starttime = Column(DateTime, nullable=True)
endtime = Column(DateTime, nullable=True)
filename = Column(String, nullable=True)
def __init__(self, name="", starttime=None, endtime=None,
filename=None):
def __init__(self, name="", starttime=None, endtime=None, filename=None):
self.name = name
self.starttime = starttime
self.endtime = endtime
self.filename = filename
def serialize(self):
'''json-friendly encoding'''
return {'id': self.id,
'name': self.name,
'starttime': self.starttime,
'endtime': self.endtime,
'filename': self.filename
}
"""json-friendly encoding"""
return {
"id": self.id,
"name": self.name,
"starttime": self.starttime,
"endtime": self.endtime,
"filename": self.filename,
}
def __repr__(self):
contents = "id:'%s',name:'%s',Start: '%s',End: '%s'" % \
(self.id, self.name, self.starttime, self.endtime)
contents = "id:'%s',name:'%s',Start: '%s',End: '%s'" % (
self.id,
self.name,
self.starttime,
self.endtime,
)
if self.filename is not None:
contents += ",Filename: '%s'" % self.filename
return "<Rec(%s)>" % contents
@ -58,12 +63,11 @@ class RecDB:
self.conn = self.engine.connect()
self.log = logging.getLogger(name=self.__class__.__name__)
logging.getLogger('sqlalchemy.engine').setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.engine.base.Engine')\
.setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.dialects').setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.pool').setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.orm').setLevel(logging.FATAL)
logging.getLogger("sqlalchemy.engine").setLevel(logging.FATAL)
logging.getLogger("sqlalchemy.engine.base.Engine").setLevel(logging.FATAL)
logging.getLogger("sqlalchemy.dialects").setLevel(logging.FATAL)
logging.getLogger("sqlalchemy.pool").setLevel(logging.FATAL)
logging.getLogger("sqlalchemy.orm").setLevel(logging.FATAL)
Base.metadata.create_all(self.engine) # create Database
@ -74,17 +78,17 @@ class RecDB:
def add(self, simplerecord):
s = self.get_session()
s.add( simplerecord )
s.add(simplerecord)
s.commit()
self.log.info("New Record: %s" % simplerecord)
return ( simplerecord )
return simplerecord
def update(self, id, rec):
# TODO: rlist = results list
_rlist = self._search(_id=id)
if not len(_rlist) == 1:
raise ValueError('Too many recs with id=%s' % id)
raise ValueError("Too many recs with id=%s" % id)
self.log.debug("DB:: Update request %s:%s " % (id, rec))
self.log.debug("DB:: Update: data before %s" % _rlist[0])
@ -92,7 +96,7 @@ class RecDB:
# 2013-11-24 22:22:42
_rlist[0].starttime = rec["starttime"]
_rlist[0].endtime = rec["endtime"]
if 'name' in rec:
if "name" in rec:
_rlist[0].name = rec["name"]
self.log.debug("DB:: Update: data AFTER %s" % _rlist[0])
@ -150,34 +154,34 @@ class RecDB:
return query.all()
def _query_ongoing(self, query=None):
'''
"""
Not terminated AND recent.
The meaning is "a query that makes sense to stop"
'''
delta = timedelta(seconds=get_config()['FORGE_MAX_DURATION'])
"""
delta = timedelta(seconds=get_config()["FORGE_MAX_DURATION"])
return self._query_newer(delta, self._query_not_saved(query))
def _query_not_saved(self, query=None):
'''Still not saved'''
"""Still not saved"""
if query is None:
query = self.get_session().query(Rec)
return query.filter(Rec.filename == None)
def _query_saved(self, query=None):
'''Still not saved'''
"""Still not saved"""
if query is None:
query = self.get_session().query(Rec)
return query.filter(Rec.filename != None)
def _query_newer(self, delta, query=None):
'''Get Rec older than delta seconds'''
"""Get Rec older than delta seconds"""
if query is None:
query = self.get_session().query(Rec)
return query.filter(Rec.starttime > datetime.now() - delta)
def _query_older(self, delta, query=None):
'''Get Rec older than delta seconds'''
"""Get Rec older than delta seconds"""
if query is None:
query = self.get_session().query(Rec)
return query.filter(Rec.starttime < datetime.now() - delta)
@ -187,15 +191,14 @@ class RecDB:
page_size = int(page_size)
query = query.limit(page_size)
if page:
query = query.offset(page*page_size)
query = query.offset(page * page_size)
return query
def _query_generic(self, query, _id=None, name=None, starttime=None,
endtime=None):
def _query_generic(self, query, _id=None, name=None, starttime=None, endtime=None):
if _id is not None:
query = query.filter_by(id=_id)
if name is not None:
query = query.filter(Rec.name.like("%"+name+"%"))
query = query.filter(Rec.name.like("%" + name + "%"))
if starttime is not None:
_st = starttime
query = query.filter(Rec.starttime > _st)
@ -204,15 +207,22 @@ class RecDB:
query = query.filter(Rec.endtime < _et)
return query
def _search(self, _id=None, name=None, starttime=None,
endtime=None, page=0, page_size=PAGESIZE):
def _search(
self,
_id=None,
name=None,
starttime=None,
endtime=None,
page=0,
page_size=PAGESIZE,
):
self.log.debug(
"DB: Search => id:%s name:%s starttime:%s endtime=%s" %
(_id, name, starttime, endtime))
"DB: Search => id:%s name:%s starttime:%s endtime=%s"
% (_id, name, starttime, endtime)
)
query = self.get_session().query(Rec)
query = self._query_generic(query, _id, name, starttime,
endtime)
query = self._query_generic(query, _id, name, starttime, endtime)
query = self._query_page(query, page, page_size)
self.log.debug("Searching: %s" % str(query))
ret = query.all()
@ -226,6 +236,7 @@ class RecDB:
if __name__ == "__main__":
def printall(queryres):
for record in queryres:
print("Record: %s" % record)
@ -245,12 +256,11 @@ if __name__ == "__main__":
print("Mimmo ")
printall(db._search(name="Mimmo1"))
print("Search")
printall(db._search(name="Mimmo1",
starttime=datetime(2014, 5, 24, 15, 16, 1) ))
printall(db._search(name="Mimmo1", starttime=datetime(2014, 5, 24, 15, 16, 1)))
a = db.get_by_id(5)
a.start()
db.delete(1)
db.delete(2)
db.delete(4)
db.delete(1)
printall( db._search() )
printall(db._search())

View file

@ -1,37 +1,40 @@
import logging
HOST = 'localhost'
PORT = '8000'
HOST = "localhost"
PORT = "8000"
# pastelog is just "paste", but customized to accept logging options
WSGI_SERVER = 'pastelog'
WSGI_SERVER = "pastelog"
# these are pastelog-specific options for logging engine
TRANSLOGGER_OPTS = {
'logger_name': 'accesslog',
'set_logger_level': logging.WARNING,
'setup_console_handler': False }
"logger_name": "accesslog",
"set_logger_level": logging.WARNING,
"setup_console_handler": False,
}
WSGI_SERVER_OPTIONS = {}
DEBUG = True
DB_URI = 'sqlite:///techrec.db'
AUDIO_OUTPUT = 'output/'
AUDIO_INPUT = 'rec/'
AUDIO_INPUT_FORMAT = '%Y-%m/%d/rec-%Y-%m-%d-%H-%M-%S.mp3'
AUDIO_OUTPUT_FORMAT = 'techrec-%(startdt)s-%(endtime)s-%(name)s.mp3'
DB_URI = "sqlite:///techrec.db"
AUDIO_OUTPUT = "output/"
AUDIO_INPUT = "rec/"
AUDIO_INPUT_FORMAT = "%Y-%m/%d/rec-%Y-%m-%d-%H-%M-%S.mp3"
AUDIO_OUTPUT_FORMAT = "techrec-%(startdt)s-%(endtime)s-%(name)s.mp3"
FORGE_TIMEOUT = 20
FORGE_MAX_DURATION = 3600*5
FFMPEG_OUT_CODEC = ['-acodec', 'copy']
FFMPEG_OPTIONS = ['-loglevel', 'warning', '-n']
FFMPEG_PATH = 'ffmpeg'
FORGE_MAX_DURATION = 3600 * 5
FFMPEG_OUT_CODEC = ["-acodec", "copy"]
FFMPEG_OPTIONS = ["-loglevel", "warning", "-n"]
FFMPEG_PATH = "ffmpeg"
# tag:value pairs
TAG_EXTRA = {}
# LICENSE URI is special because date need to be added
TAG_LICENSE_URI = None
STATIC_FILES='static/'
STATIC_PAGES='pages/'
STATIC_FILES = "static/"
STATIC_PAGES = "pages/"
try:
from pkg_resources import resource_filename, resource_isdir
if resource_isdir('techrec', 'pages'):
STATIC_PAGES = resource_filename('techrec', 'pages')
STATIC_FILES = resource_filename('techrec', 'static')
if resource_isdir("techrec", "pages"):
STATIC_PAGES = resource_filename("techrec", "pages")
STATIC_FILES = resource_filename("techrec", "static")
except ImportError:
logging.exception("Error loading resources from installed part")

View file

@ -8,20 +8,19 @@ from .config_manager import get_config
def get_timefile_exact(time):
'''
"""
time is of type `datetime`; it is not "rounded" to match the real file;
that work is done in get_timefile(time)
'''
"""
return os.path.join(
get_config()['AUDIO_INPUT'],
time.strftime(get_config()['AUDIO_INPUT_FORMAT'])
get_config()["AUDIO_INPUT"], time.strftime(get_config()["AUDIO_INPUT_FORMAT"])
)
def round_timefile(exact):
'''
"""
This will round the datetime, so to match the file organization structure
'''
"""
return datetime(exact.year, exact.month, exact.day, exact.hour)
@ -30,11 +29,11 @@ def get_timefile(exact):
def get_files_and_intervals(start, end, rounder=round_timefile):
'''
"""
both arguments are datetime objects
returns an iterator whose elements are (filename, start_cut, end_cut)
Cuts are expressed in seconds
'''
"""
if end <= start:
raise ValueError("end < start!")
@ -50,7 +49,7 @@ def get_files_and_intervals(start, end, rounder=round_timefile):
def mp3_join(named_intervals):
'''
"""
Note that these are NOT the intervals returned by get_files_and_intervals,
as they do not supply a filename, but only a datetime.
What we want in input is basically the same thing, but with get_timefile()
@ -58,8 +57,8 @@ def mp3_join(named_intervals):
This function make the (quite usual) assumption that the only start_cut (if
any) is at the first file, and the last one is at the last file
'''
ffmpeg = get_config()['FFMPEG_PATH']
"""
ffmpeg = get_config()["FFMPEG_PATH"]
startskip = None
endskip = None
files = []
@ -72,65 +71,64 @@ def mp3_join(named_intervals):
if end_cut:
assert endskip is None
endskip = end_cut
assert '|' not in filename
assert "|" not in filename
files.append(filename)
cmdline = [ffmpeg, '-i', 'concat:%s' % '|'.join(files)]
cmdline += get_config()['FFMPEG_OUT_CODEC']
cmdline = [ffmpeg, "-i", "concat:%s" % "|".join(files)]
cmdline += get_config()["FFMPEG_OUT_CODEC"]
if startskip is not None:
cmdline += ['-ss', str(startskip)]
cmdline += ["-ss", str(startskip)]
else:
startskip = 0
if endskip is not None:
cmdline += ['-t', str(len(files)*3600 - (startskip + endskip))]
cmdline += ["-t", str(len(files) * 3600 - (startskip + endskip))]
return cmdline
def create_mp3(start, end, outfile, options={}, **kwargs):
intervals = [(get_timefile(begin), start_cut, end_cut)
for begin, start_cut, end_cut
in get_files_and_intervals(start, end)]
intervals = [
(get_timefile(begin), start_cut, end_cut)
for begin, start_cut, end_cut in get_files_and_intervals(start, end)
]
if os.path.exists(outfile):
raise OSError("file '%s' already exists" % outfile)
for path, _s, _e in intervals:
if not os.path.exists(path):
raise OSError("file '%s' does not exist; recording system broken?"
% path)
raise OSError("file '%s' does not exist; recording system broken?" % path)
# metadata date/time formatted according to
# https://wiki.xiph.org/VorbisComment#Date_and_time
metadata = {}
if outfile.endswith('.mp3'):
metadata['TRDC'] = start.replace(microsecond=0).isoformat()
metadata['RECORDINGTIME'] = metadata['TRDC']
metadata['ENCODINGTIME'] = datetime.now().replace(
microsecond=0).isoformat()
if outfile.endswith(".mp3"):
metadata["TRDC"] = start.replace(microsecond=0).isoformat()
metadata["RECORDINGTIME"] = metadata["TRDC"]
metadata["ENCODINGTIME"] = datetime.now().replace(microsecond=0).isoformat()
else:
metadata['DATE'] = start.replace(microsecond=0).isoformat()
metadata['ENCODER'] = 'https://github.com/boyska/techrec'
if 'title' in options:
metadata['TITLE'] = options['title']
if options.get('license_uri', None) is not None:
metadata['RIGHTS-DATE'] = start.strftime('%Y-%m')
metadata['RIGHTS-URI'] = options['license_uri']
if 'extra_tags' in options:
metadata.update(options['extra_tags'])
metadata["DATE"] = start.replace(microsecond=0).isoformat()
metadata["ENCODER"] = "https://github.com/boyska/techrec"
if "title" in options:
metadata["TITLE"] = options["title"]
if options.get("license_uri", None) is not None:
metadata["RIGHTS-DATE"] = start.strftime("%Y-%m")
metadata["RIGHTS-URI"] = options["license_uri"]
if "extra_tags" in options:
metadata.update(options["extra_tags"])
metadata_list = []
for tag, value in metadata.items():
if '=' in tag:
if "=" in tag:
logging.error('Received a tag with "=" inside, skipping')
continue
metadata_list.append('-metadata')
metadata_list.append('%s=%s' % (tag, value))
metadata_list.append("-metadata")
metadata_list.append("%s=%s" % (tag, value))
p = Popen(mp3_join(intervals) + metadata_list +
get_config()['FFMPEG_OPTIONS'] + [outfile])
if get_config()['FORGE_TIMEOUT'] == 0:
p = Popen(
mp3_join(intervals) + metadata_list + get_config()["FFMPEG_OPTIONS"] + [outfile]
)
if get_config()["FORGE_TIMEOUT"] == 0:
p.wait()
else:
start = datetime.now()
while (datetime.now() - start).total_seconds() \
< get_config()['FORGE_TIMEOUT']:
while (datetime.now() - start).total_seconds() < get_config()["FORGE_TIMEOUT"]:
p.poll()
if p.returncode is None:
sleep(1)
@ -142,14 +140,14 @@ def create_mp3(start, end, outfile, options={}, **kwargs):
os.remove(outfile)
except:
pass
raise Exception('timeout') # TODO: make a specific TimeoutError
raise Exception("timeout") # TODO: make a specific TimeoutError
if p.returncode != 0:
raise OSError("return code was %d" % p.returncode)
return True
def main_cmd(options):
log = logging.getLogger('forge_main')
log = logging.getLogger("forge_main")
outfile = os.path.abspath(os.path.join(options.cwd, options.outfile))
log.debug('will forge an mp3 into %s' % (outfile))
log.debug("will forge an mp3 into %s" % (outfile))
create_mp3(options.starttime, options.endtime, outfile)

View file

@ -7,11 +7,12 @@ from sqlalchemy import inspect
from .config_manager import get_config
from .db import RecDB
def cleanold_cmd(options):
log = logging.getLogger('cleanold')
log = logging.getLogger("cleanold")
log.debug("starting cleanold[%d]" % options.minage)
db = RecDB(get_config()['DB_URI'])
res = db.get_not_completed(options.minage*3600*24)
db = RecDB(get_config()["DB_URI"])
res = db.get_not_completed(options.minage * 3600 * 24)
count = len(res)
if options.pretend:
for rec in res:
@ -25,4 +26,5 @@ def cleanold_cmd(options):
logging.info("Cleanold complete: %d deleted" % count)
sys.exit(0)
# vim: set ai ts=4 sw=4 et:

View file

@ -12,18 +12,18 @@ class JobQueue(object):
job_id = self.last_job_id
def clean_jobs(res):
'''this callback will remove the job from the queue'''
"""this callback will remove the job from the queue"""
del self.jobs[job_id]
self.jobs[job_id] = self.pool.apply_async(function, args, kwargs,
clean_jobs)
self.jobs[job_id] = self.pool.apply_async(function, args, kwargs, clean_jobs)
return job_id
def check_job(self, job_id):
'''
"""
If the job is running, return the asyncResult.
If it has already completed, returns True.
If no such job_id exists at all, returns False
'''
"""
if job_id <= 0:
raise ValueError("non-valid job_id")
if self.last_job_id < job_id:
@ -38,13 +38,16 @@ class JobQueue(object):
self.pool = None
def simulate_long_job(recid=None, starttime=None, endtime=None, name='', filename=None):
def simulate_long_job(recid=None, starttime=None, endtime=None, name="", filename=None):
from time import sleep
print("evviva " + name)
sleep(2)
print("lavoro su " + name)
sleep(2)
print("done su " + name)
_queue = None
@ -54,12 +57,15 @@ def get_process_queue():
_queue = JobQueue()
return _queue
if __name__ == '__main__':
if __name__ == "__main__":
from datetime import datetime
n = datetime.now()
def sleep(n):
import time
print("Inizio %d" % n)
time.sleep(n)
print("Finisco %d" % n)

View file

@ -8,8 +8,8 @@ import unicodedata
from bottle import Bottle, request, static_file, redirect, abort, response
import bottle
logger = logging.getLogger('server')
botlog = logging.getLogger('bottle')
logger = logging.getLogger("server")
botlog = logging.getLogger("bottle")
botlog.setLevel(logging.INFO)
botlog.addHandler(logging.StreamHandler(sys.stdout))
bottle._stderr = lambda x: botlog.info(x.strip())
@ -25,47 +25,49 @@ def date_read(s):
def date_write(dt):
return dt.strftime('%s')
return dt.strftime("%s")
def rec_sanitize(rec):
d = rec.serialize()
d['starttime'] = date_write(d['starttime'])
d['endtime'] = date_write(d['endtime'])
d["starttime"] = date_write(d["starttime"])
d["endtime"] = date_write(d["endtime"])
return d
class DateApp(Bottle):
'''
"""
This application will expose some date-related functions; it is intended to
be used when you need to know the server's time on the browser
'''
"""
def __init__(self):
Bottle.__init__(self)
self.route('/help', callback=self.help)
self.route('/date', callback=self.date)
self.route('/custom', callback=self.custom)
self.route("/help", callback=self.help)
self.route("/date", callback=self.date)
self.route("/custom", callback=self.custom)
def date(self):
n = datetime.now()
return {
'unix': n.strftime('%s'),
'isoformat': n.isoformat(),
'ctime': n.ctime()
"unix": n.strftime("%s"),
"isoformat": n.isoformat(),
"ctime": n.ctime(),
}
def custom(self):
n = datetime.now()
if 'strftime' not in request.query:
if "strftime" not in request.query:
abort(400, 'Need argument "strftime"')
response.content_type = 'text/plain'
return n.strftime(request.query['strftime'])
response.content_type = "text/plain"
return n.strftime(request.query["strftime"])
def help(self):
response.content_type = 'text/plain'
return \
'/date : get JSON dict containing multiple formats of now()\n' + \
'/custom?strftime=FORMAT : get now().strftime(FORMAT)'
response.content_type = "text/plain"
return (
"/date : get JSON dict containing multiple formats of now()\n"
+ "/custom?strftime=FORMAT : get now().strftime(FORMAT)"
)
class RecAPI(Bottle):
@ -73,20 +75,20 @@ class RecAPI(Bottle):
Bottle.__init__(self)
self._route()
self._app = app
self.db = RecDB(get_config()['DB_URI'])
self.db = RecDB(get_config()["DB_URI"])
def _route(self):
self.post('/create', callback=self.create)
self.post('/delete', callback=self.delete)
self.post('/update/<recid:int>', callback=self.update)
self.post('/generate', callback=self.generate)
self.get('/help', callback=self.help)
self.get('/', callback=self.help)
self.get('/get/search', callback=self.search)
self.get('/get/ongoing', callback=self.get_ongoing)
self.get('/get/archive', callback=self.get_archive)
self.get('/jobs', callback=self.running_jobs)
self.get('/jobs/<job_id:int>', callback=self.check_job)
self.post("/create", callback=self.create)
self.post("/delete", callback=self.delete)
self.post("/update/<recid:int>", callback=self.update)
self.post("/generate", callback=self.generate)
self.get("/help", callback=self.help)
self.get("/", callback=self.help)
self.get("/get/search", callback=self.search)
self.get("/get/ongoing", callback=self.get_ongoing)
self.get("/get/archive", callback=self.get_archive)
self.get("/jobs", callback=self.running_jobs)
self.get("/jobs/<job_id:int>", callback=self.check_job)
def create(self):
req = dict(request.POST.decode().allitems())
@ -94,22 +96,21 @@ class RecAPI(Bottle):
logger.debug("Create request %s " % req)
now = datetime.now()
start = date_read(req['starttime']) if 'starttime' in req else now
name = req['name'] if 'name' in req else u""
end = date_read(req['endtime']) if 'endtime' in req else now
start = date_read(req["starttime"]) if "starttime" in req else now
name = req["name"] if "name" in req else u""
end = date_read(req["endtime"]) if "endtime" in req else now
rec = Rec(name=name,
starttime=start,
endtime=end)
rec = Rec(name=name, starttime=start, endtime=end)
ret = self.db.add(rec)
return self.rec_msg("Nuova registrazione creata! (id:%d)" % ret.id,
rec=rec_sanitize(rec))
return self.rec_msg(
"Nuova registrazione creata! (id:%d)" % ret.id, rec=rec_sanitize(rec)
)
def delete(self):
req = dict(request.POST.decode().allitems())
logging.info("Server: request delete %s " % (req))
if 'id' not in req:
if "id" not in req:
return self.rec_err("No valid ID")
if self.db.delete(req["id"]):
@ -122,16 +123,16 @@ class RecAPI(Bottle):
newrec = {}
now = datetime.now()
if 'starttime' not in req:
newrec['starttime'] = now
if "starttime" not in req:
newrec["starttime"] = now
else:
newrec['starttime'] = date_read(req['starttime'])
newrec["starttime"] = date_read(req["starttime"])
if "endtime" not in req:
newrec['endtime'] = now
newrec["endtime"] = now
else:
newrec['endtime'] = date_read(req['endtime'])
if 'name' in req:
newrec["name"] = req['name']
newrec["endtime"] = date_read(req["endtime"])
if "name" in req:
newrec["name"] = req["name"]
try:
logger.info("prima di update")
@ -139,88 +140,101 @@ class RecAPI(Bottle):
logger.info("dopo update")
except Exception as exc:
return self.rec_err("Errore Aggiornamento", exception=exc)
return self.rec_msg("Aggiornamento completato!",
rec=rec_sanitize(result_rec))
return self.rec_msg("Aggiornamento completato!", rec=rec_sanitize(result_rec))
def generate(self):
# prendiamo la rec in causa
recid = dict(request.POST.decode().allitems())['id']
recid = dict(request.POST.decode().allitems())["id"]
rec = self.db._search(_id=recid)[0]
if rec.filename is not None and os.path.exists(rec.filename):
return {'status': 'ready',
'message': 'The file has already been generated at %s' %
rec.filename,
'rec': rec
}
if get_config()['FORGE_MAX_DURATION'] > 0 and \
(rec.endtime - rec.starttime).total_seconds() > \
get_config()['FORGE_MAX_DURATION']:
response.status = 400
return {'status': 'error',
'message': 'The requested recording is too long' +
' (%d seconds)' %
(rec.endtime - rec.starttime).total_seconds()
}
rec.filename = get_config()['AUDIO_OUTPUT_FORMAT'] % {
'time': rec.starttime.strftime('%y%m%d_%H%M'), # kept for retrocompatibility, should be dropped
'endtime': rec.endtime.strftime('%H%M'),
'startdt': rec.starttime.strftime('%y%m%d_%H%M'),
'enddt': rec.endtime.strftime('%y%m%d_%H%M'),
'name': ''.join(filter(lambda c: c.isalpha(),
unicodedata.normalize('NFKD', rec.name).encode('ascii', 'ignore').decode('ascii'))),
return {
"status": "ready",
"message": "The file has already been generated at %s" % rec.filename,
"rec": rec,
}
if (
get_config()["FORGE_MAX_DURATION"] > 0
and (rec.endtime - rec.starttime).total_seconds()
> get_config()["FORGE_MAX_DURATION"]
):
response.status = 400
return {
"status": "error",
"message": "The requested recording is too long"
+ " (%d seconds)" % (rec.endtime - rec.starttime).total_seconds(),
}
rec.filename = get_config()["AUDIO_OUTPUT_FORMAT"] % {
"time": rec.starttime.strftime(
"%y%m%d_%H%M"
), # kept for retrocompatibility, should be dropped
"endtime": rec.endtime.strftime("%H%M"),
"startdt": rec.starttime.strftime("%y%m%d_%H%M"),
"enddt": rec.endtime.strftime("%y%m%d_%H%M"),
"name": "".join(
filter(
lambda c: c.isalpha(),
unicodedata.normalize("NFKD", rec.name)
.encode("ascii", "ignore")
.decode("ascii"),
)
),
}
self.db.get_session(rec).commit()
job_id = self._app.pq.submit(
create_mp3,
start=rec.starttime,
end=rec.endtime,
outfile=os.path.join(get_config()['AUDIO_OUTPUT'], rec.filename),
outfile=os.path.join(get_config()["AUDIO_OUTPUT"], rec.filename),
options={
'title': rec.name,
'license_uri': get_config()['TAG_LICENSE_URI'],
'extra_tags': get_config()['TAG_EXTRA']
}
"title": rec.name,
"license_uri": get_config()["TAG_LICENSE_URI"],
"extra_tags": get_config()["TAG_EXTRA"],
},
)
logger.debug("SUBMITTED: %d" % job_id)
return self.rec_msg("Aggiornamento completato!",
job_id=job_id,
result='/output/' + rec.filename,
rec=rec_sanitize(rec))
return self.rec_msg(
"Aggiornamento completato!",
job_id=job_id,
result="/output/" + rec.filename,
rec=rec_sanitize(rec),
)
def check_job(self, job_id):
try:
job = self._app.pq.check_job(job_id)
except ValueError:
abort(400, 'job_id not valid')
abort(400, "job_id not valid")
def ret(status):
return {'job_status': status, 'job_id': job_id}
return {"job_status": status, "job_id": job_id}
if job is True:
return ret('DONE')
return ret("DONE")
if job is False:
abort(404, 'No such job has ever been spawned')
abort(404, "No such job has ever been spawned")
else:
if job.ready():
try:
res = job.get()
return res
except Exception as exc:
r = ret('FAILED')
r['exception'] = str(exc)
r = ret("FAILED")
r["exception"] = str(exc)
import traceback
tb = traceback.format_exc()
logger.warning(tb)
if get_config()['DEBUG']:
r['exception'] = "%s: %s" % (str(exc), tb)
r['traceback'] = tb
if get_config()["DEBUG"]:
r["exception"] = "%s: %s" % (str(exc), tb)
r["traceback"] = tb
return r
return ret('WIP')
return ret("WIP")
def running_jobs(self):
res = {}
res['last_job_id'] = self._app.pq.last_job_id
res['running'] = self._app.pq.jobs.keys()
res["last_job_id"] = self._app.pq.last_job_id
res["running"] = self._app.pq.jobs.keys()
return res
def search(self, args=None):
@ -230,8 +244,8 @@ class RecAPI(Bottle):
values = self.db._search(**req)
from pprint import pprint
logger.debug("Returned Values %s" %
pprint([r.serialize() for r in values]))
logger.debug("Returned Values %s" % pprint([r.serialize() for r in values]))
ret = {}
for rec in values:
@ -241,12 +255,10 @@ class RecAPI(Bottle):
return ret
def get_ongoing(self):
return {rec.id: rec_sanitize(rec)
for rec in self.db.get_ongoing()}
return {rec.id: rec_sanitize(rec) for rec in self.db.get_ongoing()}
def get_archive(self):
return {rec.id: rec_sanitize(rec)
for rec in self.db.get_archive_recent()}
return {rec.id: rec_sanitize(rec) for rec in self.db.get_archive_recent()}
# @route('/help')
def help(self):
@ -279,104 +291,126 @@ class RecServer:
self._app.pq = get_process_queue()
self._route()
self.db = RecDB(get_config()['DB_URI'])
self.db = RecDB(get_config()["DB_URI"])
def _route(self):
# Static part of the site
self._app.route('/output/<filepath:path>',
callback=lambda filepath:
static_file(filepath,
root=get_config()['AUDIO_OUTPUT'],
download=True))
self._app.route(
"/output/<filepath:path>",
callback=lambda filepath: static_file(
filepath, root=get_config()["AUDIO_OUTPUT"], download=True
),
)
self._app.route('/static/<filepath:path>',
callback=lambda filepath: static_file(filepath,
root=get_config()['STATIC_FILES']))
self._app.route('/', callback=lambda: redirect('/new.html'))
self._app.route('/new.html',
callback=partial(static_file, 'new.html',
root=get_config()['STATIC_PAGES']))
self._app.route('/old.html',
callback=partial(static_file, 'old.html',
root=get_config()['STATIC_PAGES']))
self._app.route('/archive.html',
callback=partial(static_file, 'archive.html',
root=get_config()['STATIC_PAGES']))
self._app.route(
"/static/<filepath:path>",
callback=lambda filepath: static_file(
filepath, root=get_config()["STATIC_FILES"]
),
)
self._app.route("/", callback=lambda: redirect("/new.html"))
self._app.route(
"/new.html",
callback=partial(
static_file, "new.html", root=get_config()["STATIC_PAGES"]
),
)
self._app.route(
"/old.html",
callback=partial(
static_file, "old.html", root=get_config()["STATIC_PAGES"]
),
)
self._app.route(
"/archive.html",
callback=partial(
static_file, "archive.html", root=get_config()["STATIC_PAGES"]
),
)
class DebugAPI(Bottle):
'''
"""
This application is useful for testing the webserver itself
'''
"""
def __init__(self):
Bottle.__init__(self)
self.route('/sleep/:milliseconds', callback=self.sleep)
self.route('/cpusleep/:howmuch', callback=self.cpusleep)
self.route('/big/:exponent', callback=self.big)
self.route("/sleep/:milliseconds", callback=self.sleep)
self.route("/cpusleep/:howmuch", callback=self.cpusleep)
self.route("/big/:exponent", callback=self.big)
def sleep(self, milliseconds):
import time
time.sleep(int(milliseconds)/1000.0)
return 'ok'
time.sleep(int(milliseconds) / 1000.0)
return "ok"
def cpusleep(self, howmuch):
out = ''
for i in xrange(int(howmuch) * (10**3)):
out = ""
for i in xrange(int(howmuch) * (10 ** 3)):
if i % 11234 == 0:
out += 'a'
out += "a"
return out
def big(self, exponent):
'''
"""
returns a 2**n -1 string
'''
"""
for i in xrange(int(exponent)):
yield str(i) * (2 ** i)
def help(self):
response.content_type = 'text/plain'
return '''
response.content_type = "text/plain"
return """
/sleep/<int:milliseconds> : sleep, than say "ok"
/cpusleep/<int:howmuch> : busysleep, than say "ok"
/big/<int:exponent> : returns a 2**n -1 byte content
'''
"""
class PasteLoggingServer(bottle.PasteServer):
def run(self, handler): # pragma: no cover
from paste import httpserver
from paste.translogger import TransLogger
handler = TransLogger(handler, **self.options['translogger_opts'])
del self.options['translogger_opts']
httpserver.serve(handler, host=self.host, port=str(self.port),
**self.options)
bottle.server_names['pastelog'] = PasteLoggingServer
handler = TransLogger(handler, **self.options["translogger_opts"])
del self.options["translogger_opts"]
httpserver.serve(handler, host=self.host, port=str(self.port), **self.options)
bottle.server_names["pastelog"] = PasteLoggingServer
def main_cmd(*args):
"""meant to be called from argparse"""
c = RecServer()
c._app.mount('/date', DateApp())
c._app.mount('/api', RecAPI(c._app))
if get_config()['DEBUG']:
c._app.mount('/debug', DebugAPI())
c._app.mount("/date", DateApp())
c._app.mount("/api", RecAPI(c._app))
if get_config()["DEBUG"]:
c._app.mount("/debug", DebugAPI())
server = get_config()['WSGI_SERVER']
if server == 'pastelog':
server = get_config()["WSGI_SERVER"]
if server == "pastelog":
from paste.translogger import TransLogger
get_config()['WSGI_SERVER_OPTIONS']['translogger_opts'] = \
get_config()['TRANSLOGGER_OPTS']
c._app.run(server=server,
host=get_config()['HOST'],
port=get_config()['PORT'],
debug=get_config()['DEBUG'],
quiet=True, # this is to hide access.log style messages
**get_config()['WSGI_SERVER_OPTIONS']
)
get_config()["WSGI_SERVER_OPTIONS"]["translogger_opts"] = get_config()[
"TRANSLOGGER_OPTS"
]
if __name__ == '__main__':
c._app.run(
server=server,
host=get_config()["HOST"],
port=get_config()["PORT"],
debug=get_config()["DEBUG"],
quiet=True, # this is to hide access.log style messages
**get_config()["WSGI_SERVER_OPTIONS"]
)
if __name__ == "__main__":
from cli import common_pre
common_pre()
logger.warn("Usage of server.py is deprecated; use cli.py")
main_cmd()

View file

@ -2,18 +2,23 @@ from datetime import datetime, timedelta
from nose.tools import raises, eq_
from .forge import get_files_and_intervals, get_timefile_exact, round_timefile,\
get_timefile, mp3_join
from .forge import (
get_files_and_intervals,
get_timefile_exact,
round_timefile,
get_timefile,
mp3_join,
)
from .config_manager import get_config
eight = datetime(2014, 5, 30, 20)
nine = datetime(2014, 5, 30, 21)
ten = datetime(2014, 5, 30, 22)
get_config()['AUDIO_INPUT'] = ''
get_config()['AUDIO_INPUT_FORMAT'] = '%Y-%m/%d/%Y-%m-%d-%H-%M-%S.mp3'
get_config()['FFMPEG_PATH'] = 'ffmpeg'
get_config()['FFMPEG_OUT_CODEC'] = ['-acodec', 'copy']
get_config()["AUDIO_INPUT"] = ""
get_config()["AUDIO_INPUT_FORMAT"] = "%Y-%m/%d/%Y-%m-%d-%H-%M-%S.mp3"
get_config()["FFMPEG_PATH"] = "ffmpeg"
get_config()["FFMPEG_OUT_CODEC"] = ["-acodec", "copy"]
def minutes(n):
@ -23,18 +28,19 @@ def minutes(n):
def seconds(n):
return timedelta(seconds=n)
# timefile
def test_timefile_exact():
eq_(get_timefile_exact(eight),
'2014-05/30/2014-05-30-20-00-00.mp3')
eq_(get_timefile_exact(eight), "2014-05/30/2014-05-30-20-00-00.mp3")
# Rounding
def test_rounding_similarity():
eq_(round_timefile(eight), round_timefile(eight+minutes(20)))
eq_(round_timefile(eight), round_timefile(eight + minutes(20)))
assert round_timefile(eight) != round_timefile(nine)
@ -47,13 +53,12 @@ def test_rounding_value():
def test_timefile_alreadyround():
eq_(get_timefile(eight),
'2014-05/30/2014-05-30-20-00-00.mp3')
eq_(get_timefile(eight), "2014-05/30/2014-05-30-20-00-00.mp3")
def test_timefile_toround():
eq_(get_timefile(eight + minutes(20)),
'2014-05/30/2014-05-30-20-00-00.mp3')
eq_(get_timefile(eight + minutes(20)), "2014-05/30/2014-05-30-20-00-00.mp3")
# Intervals
@ -69,17 +74,17 @@ def test_intervals_before():
def test_intervals_full_1():
res = list(get_files_and_intervals(eight, nine-seconds(1)))
res = list(get_files_and_intervals(eight, nine - seconds(1)))
eq_(len(res), 1)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
def test_intervals_partial_1():
res = list(get_files_and_intervals(eight, nine-minutes(10)))
res = list(get_files_and_intervals(eight, nine - minutes(10)))
eq_(len(res), 1)
eq_(res[0][1], 0)
eq_(res[0][2], 10*60 - 1)
eq_(res[0][2], 10 * 60 - 1)
def test_intervals_exact_2():
@ -101,8 +106,7 @@ def test_intervals_partial_2():
def test_intervals_full_2():
res = list(get_files_and_intervals(eight,
nine + minutes(59) + seconds(59)))
res = list(get_files_and_intervals(eight, nine + minutes(59) + seconds(59)))
eq_(len(res), 2)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
@ -122,7 +126,7 @@ def test_intervals_exact_3():
def test_intervals_partial_3():
res = list(get_files_and_intervals(eight, ten+minutes(50)))
res = list(get_files_and_intervals(eight, ten + minutes(50)))
eq_(len(res), 3)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
@ -133,7 +137,7 @@ def test_intervals_partial_3():
def test_intervals_full_3():
res = list(get_files_and_intervals(eight, ten+minutes(59) + seconds(59)))
res = list(get_files_and_intervals(eight, ten + minutes(59) + seconds(59)))
eq_(len(res), 3)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
@ -144,49 +148,54 @@ def test_intervals_full_3():
def test_intervals_middle_1():
res = list(get_files_and_intervals(eight + minutes(20),
nine - minutes(20)))
res = list(get_files_and_intervals(eight + minutes(20), nine - minutes(20)))
eq_(len(res), 1)
eq_(res[0][1], 20*60)
eq_(res[0][2], 20*60-1)
eq_(res[0][1], 20 * 60)
eq_(res[0][2], 20 * 60 - 1)
def test_intervals_left_2():
res = list(get_files_and_intervals(eight+minutes(30), nine))
res = list(get_files_and_intervals(eight + minutes(30), nine))
eq_(len(res), 2)
eq_(res[0][1], 30*60)
eq_(res[0][1], 30 * 60)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 3599)
# MP3 Join
def test_mp3_1():
eq_(' '.join(mp3_join((('a', 0, 0),))),
'ffmpeg -i concat:a -acodec copy')
eq_(" ".join(mp3_join((("a", 0, 0),))), "ffmpeg -i concat:a -acodec copy")
def test_mp3_1_left():
eq_(' '.join(mp3_join((('a', 160, 0),))),
'ffmpeg -i concat:a -acodec copy -ss 160')
eq_(" ".join(mp3_join((("a", 160, 0),))), "ffmpeg -i concat:a -acodec copy -ss 160")
def test_mp3_1_right():
eq_(' '.join(mp3_join((('a', 0, 1600),))),
'ffmpeg -i concat:a -acodec copy -t 2000')
eq_(
" ".join(mp3_join((("a", 0, 1600),))), "ffmpeg -i concat:a -acodec copy -t 2000"
)
def test_mp3_1_leftright():
eq_(' '.join(mp3_join((('a', 160, 1600),))),
'ffmpeg -i concat:a -acodec copy -ss 160 -t 1840')
eq_(
" ".join(mp3_join((("a", 160, 1600),))),
"ffmpeg -i concat:a -acodec copy -ss 160 -t 1840",
)
def test_mp3_2():
eq_(' '.join(mp3_join((('a', 0, 0), ('b', 0, 0)))),
'ffmpeg -i concat:a|b -acodec copy')
eq_(
" ".join(mp3_join((("a", 0, 0), ("b", 0, 0)))),
"ffmpeg -i concat:a|b -acodec copy",
)
def test_mp3_2_leftright():
eq_(' '.join(mp3_join((('a', 1000, 0), ('b', 0, 1600)))),
'ffmpeg -i concat:a|b -acodec copy -ss 1000 -t 4600')
eq_(
" ".join(mp3_join((("a", 1000, 0), ("b", 0, 1600)))),
"ffmpeg -i concat:a|b -acodec copy -ss 1000 -t 4600",
)

View file

@ -4,7 +4,7 @@ from setuptools import setup
setup(
name="techrec",
version="1.1.3",
version="1.2.0",
description="A Python2 web application "
"that assist radio speakers in recording their shows",
long_description=open("README.md").read(),
@ -14,7 +14,10 @@ setup(
packages=["techrec"],
package_dir={"techrec": "server"},
install_requires=["Paste~=3.2", "SQLAlchemy==0.8.3", "bottle~=0.12"],
classifiers=["Programming Language :: Python :: 2.7"],
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.7",
],
entry_points={"console_scripts": ["techrec = techrec.cli:main"]},
zip_safe=False,
install_package_data=True,