Implementata richiesta dei certificati per le url e fix permessi
This commit is contained in:
parent
ec30f759be
commit
e4f36a6d7c
1 changed files with 44 additions and 5 deletions
49
OTcerts.py
49
OTcerts.py
|
@ -8,6 +8,9 @@ import configparser
|
||||||
import logging
|
import logging
|
||||||
import mysql.connector
|
import mysql.connector
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from pwd import getpwnam
|
||||||
|
from grp import getgrnam
|
||||||
|
|
||||||
|
|
||||||
# Query for IMAP/POP3 certificate
|
# Query for IMAP/POP3 certificate
|
||||||
mbox_list_stmt = "SELECT DISTINCT(name) FROM records WHERE content in ({}) and (name LIKE 'imap.%' or name LIKE 'pop3.%' or name LIKE 'mail.%')"
|
mbox_list_stmt = "SELECT DISTINCT(name) FROM records WHERE content in ({}) and (name LIKE 'imap.%' or name LIKE 'pop3.%' or name LIKE 'mail.%')"
|
||||||
|
@ -42,7 +45,7 @@ subdomains_list_stmt = "SELECT DISTINCT(urls.dns_name) AS domain_names "\
|
||||||
"urls.dns_name LIKE %(domain)s)"
|
"urls.dns_name LIKE %(domain)s)"
|
||||||
|
|
||||||
default_conf_file="./etc/ot_certs.ini"
|
default_conf_file="./etc/ot_certs.ini"
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.INFO)
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +62,7 @@ def init_prog(argv):
|
||||||
help="Specifity config file (default: {})".format(default_conf_file))
|
help="Specifity config file (default: {})".format(default_conf_file))
|
||||||
parser.add_argument("--renew", default=False, action='store_true', required=False,
|
parser.add_argument("--renew", default=False, action='store_true', required=False,
|
||||||
help="Invoca solamente il renew per i certificati gia' presenti")
|
help="Invoca solamente il renew per i certificati gia' presenti")
|
||||||
|
|
||||||
service_group = parser.add_mutually_exclusive_group(required=True)
|
service_group = parser.add_mutually_exclusive_group(required=True)
|
||||||
service_group.add_argument("--proxy", default=False, action='store_true', required=False,
|
service_group.add_argument("--proxy", default=False, action='store_true', required=False,
|
||||||
help="Richiedi i certificati per i siti proxaty")
|
help="Richiedi i certificati per i siti proxaty")
|
||||||
|
@ -163,7 +166,10 @@ def get_domain_list(config, ot_conn, dns_conn):
|
||||||
|
|
||||||
def get_url_list(config_section, server_name, ot_conn, dns_conn):
|
def get_url_list(config_section, server_name, ot_conn, dns_conn):
|
||||||
"""
|
"""
|
||||||
Return a list
|
Ritorna la lista delle url configurate per uno specifico server_name
|
||||||
|
NB: il questo momento il dato viene estratto dal db di ortiche, ma non viene
|
||||||
|
controllato se il dns e' configurato in maniera coerente. Questo potrebbe generare
|
||||||
|
errori in momenti successivi (es, durante il challenge HTTP-01)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
urls_list = []
|
urls_list = []
|
||||||
|
@ -284,6 +290,28 @@ def link_cert(config, source, dest, dryrun=False):
|
||||||
symlink_force(src_name, link_name)
|
symlink_force(src_name, link_name)
|
||||||
|
|
||||||
|
|
||||||
|
def fix_permissions(config):
|
||||||
|
"""
|
||||||
|
Sistema i permessi dei certificati affinche' risultino leggibili dai demoni interessati
|
||||||
|
|
||||||
|
"""
|
||||||
|
archive_dir = config['certbot']['archive_certificates_dir']
|
||||||
|
uid = getpwnam(config['certbot']['certificates_user'])[2]
|
||||||
|
gid = getgrnam(config['certbot']['certificates_group'])[2]
|
||||||
|
for root, dirs, files in os.walk(archive_dir):
|
||||||
|
for momo in dirs:
|
||||||
|
logger.debug('Fixing user/group and permissions on {}'.format(os.path.join(root, momo)))
|
||||||
|
os.chown(os.path.join(root, momo), uid, gid)
|
||||||
|
os.chmod(os.path.join(root, momo), 0o755)
|
||||||
|
|
||||||
|
for momo in files:
|
||||||
|
logger.debug('Fixing user/group and permissions on {}'.format(os.path.join(root, momo)))
|
||||||
|
os.chown(os.path.join(root, momo), uid, gid)
|
||||||
|
if momo.startswith('privkey'):
|
||||||
|
os.chmod(os.path.join(root, momo), 0o640)
|
||||||
|
else:
|
||||||
|
os.chmod(os.path.join(root, momo), 0o644)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
args, config = init_prog(sys.argv)
|
args, config = init_prog(sys.argv)
|
||||||
|
|
||||||
|
@ -339,14 +367,25 @@ if __name__ == '__main__':
|
||||||
logger.error("Error parsing configuration, KeyError {}".format(e))
|
logger.error("Error parsing configuration, KeyError {}".format(e))
|
||||||
exit(-1)
|
exit(-1)
|
||||||
ot_conn=connect_db(dict(config['ot_db']))
|
ot_conn=connect_db(dict(config['ot_db']))
|
||||||
dns_conn=connect_db(dict(config['dns_db']))
|
|
||||||
|
|
||||||
upstream_servers = [s.strip() for s in proxy_conf['upstream_servers'].split(',') if len(s.strip())>0]
|
upstream_servers = [s.strip() for s in proxy_conf['upstream_servers'].split(',') if len(s.strip())>0]
|
||||||
for server_name in upstream_servers:
|
for server_name in upstream_servers:
|
||||||
logger.debug("Upstream server {}".format(server_name))
|
logger.debug("Upstream server {}".format(server_name))
|
||||||
url_list = get_url_list(proxy_conf, server_name,
|
url_list = get_url_list(proxy_conf, server_name,
|
||||||
ot_conn, dns_conn)
|
ot_conn, None)
|
||||||
logger.debug(url_list)
|
logger.debug(url_list)
|
||||||
|
for url in url_list:
|
||||||
|
acme_request(config, url, acme_test='HTTP-01', webroot=proxy_conf['http-01_webroot'],
|
||||||
|
dryrun=dryrun, domains_list=[url])
|
||||||
|
|
||||||
|
ot_conn.close()
|
||||||
|
|
||||||
|
if not dryrun:
|
||||||
|
fix_permissions(config)
|
||||||
|
|
||||||
|
logger.info("Reloading nginx")
|
||||||
|
ret = os.system("systemctl reload nginx")
|
||||||
|
logger.info(ret)
|
||||||
|
|
||||||
|
|
||||||
# Caso speciale per l'hosting
|
# Caso speciale per l'hosting
|
||||||
|
|
Loading…
Reference in a new issue