[nanobox] Add Automated Backups (#4023)
This PR adds automatic backups to Nanobox instances. The database, Redis, and user files are backed up every day at 03:00 (server time) to the data warehouse component which comes with every Nanobox app. Old backups are automatically cleared out, but the number of backups that are left untouched can be configured by setting the `BACKUP_COUNT` environment variable to any integer value greater than 0 (the default is 1). Also updated `.env.nanobox` to reflect the current `.env.production.sample`.
This commit is contained in:
parent
2083000027
commit
185b41beb4
2 changed files with 52 additions and 1 deletions
|
@ -69,7 +69,7 @@ SMTP_FROM_ADDRESS=notifications@${APP_NAME}.nanoapp.io
|
||||||
# PAPERCLIP_ROOT_URL=/system
|
# PAPERCLIP_ROOT_URL=/system
|
||||||
|
|
||||||
# Optional asset host for multi-server setups
|
# Optional asset host for multi-server setups
|
||||||
# CDN_HOST=assets.example.com
|
# CDN_HOST=https://assets.example.com
|
||||||
|
|
||||||
# S3 (optional)
|
# S3 (optional)
|
||||||
# S3_ENABLED=true
|
# S3_ENABLED=true
|
||||||
|
|
51
boxfile.yml
51
boxfile.yml
|
@ -153,8 +153,59 @@ worker.sidekiq:
|
||||||
data.db:
|
data.db:
|
||||||
image: nanobox/postgresql:9.5
|
image: nanobox/postgresql:9.5
|
||||||
|
|
||||||
|
cron:
|
||||||
|
- id: backup
|
||||||
|
schedule: '0 3 * * *'
|
||||||
|
command: |
|
||||||
|
PGPASSWORD=${DATA_POSTGRES_PASS} pg_dump -U ${DATA_POSTGRES_USER} -w -Fc -O gonano |
|
||||||
|
gzip |
|
||||||
|
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- &&
|
||||||
|
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
|
||||||
|
json_pp |
|
||||||
|
grep ${HOSTNAME} |
|
||||||
|
sort |
|
||||||
|
head -n-${BACKUP_COUNT:-1} |
|
||||||
|
sed 's/.*: "\(.*\)".*/\1/' |
|
||||||
|
while read file
|
||||||
|
do
|
||||||
|
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
|
||||||
|
done
|
||||||
|
|
||||||
data.redis:
|
data.redis:
|
||||||
image: nanobox/redis:3.0
|
image: nanobox/redis:3.0
|
||||||
|
|
||||||
|
cron:
|
||||||
|
- id: backup
|
||||||
|
schedule: '0 3 * * *'
|
||||||
|
command: |
|
||||||
|
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb &&
|
||||||
|
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
|
||||||
|
json_pp |
|
||||||
|
grep ${HOSTNAME} |
|
||||||
|
sort |
|
||||||
|
head -n-${BACKUP_COUNT:-1} |
|
||||||
|
sed 's/.*: "\(.*\)".*/\1/' |
|
||||||
|
while read file
|
||||||
|
do
|
||||||
|
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
|
||||||
|
done
|
||||||
|
|
||||||
data.storage:
|
data.storage:
|
||||||
image: nanobox/unfs:0.9
|
image: nanobox/unfs:0.9
|
||||||
|
|
||||||
|
cron:
|
||||||
|
- id: backup
|
||||||
|
schedule: '0 3 * * *'
|
||||||
|
command: |
|
||||||
|
tar cz -C /data/var/db/unfs/ |
|
||||||
|
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- &&
|
||||||
|
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
|
||||||
|
json_pp |
|
||||||
|
grep ${HOSTNAME} |
|
||||||
|
sort |
|
||||||
|
head -n-${BACKUP_COUNT:-1} |
|
||||||
|
sed 's/.*: "\(.*\)".*/\1/' |
|
||||||
|
while read file
|
||||||
|
do
|
||||||
|
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
|
||||||
|
done
|
||||||
|
|
Loading…
Reference in a new issue