diff --git a/Dockerfile b/Dockerfile index c42b24a..c5d6308 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,8 @@ LABEL org.label-schema.build-date=$BUILD_DATE \ org.label-schema.vcs-url="https://github.com/robbyoconnor/dockup.git" \ org.label-schema.vcs-ref=$VCS_REF -ENV CRON_TIME="0 0 * * *" +ENV CRON_INTERVALS false +WORKDIR "/dockup" CMD ["/dockup/run.sh"] ENV S3_BUCKET_NAME docker-backups.example.com ENV AWS_ACCESS_KEY_ID **DefineMe** diff --git a/README.md b/README.md index 4224b80..5cfcbae 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,7 @@ Why the name? Docker + Backup = Dockup Instead of backing up volumes you can also run tasks that provide the files to be backed up. See the following projects as examples on building on Dockup for that purpose: * [robbyoconnor/dockup-mongo](https://github.com/robbyoconnor/dockup-mongo) - Uses `mongodump` and `mongorestore` to backup and restore a MongoDB instance +* [mimicmobile/dockup-postgres](https://github.com/mimicmobile/dockup-postgres) - Uses `pg_dump` and `pg_restore` to backup and restore a PostgreSQL instance # Usage @@ -71,10 +72,22 @@ For more complex backup tasks as dumping a database, you can optionally define t Instead of providing paths manually you can set the `PATHS_TO_BACKUP` to `auto`. Using this setting the backup script will try to the detect the volumes mounted into the running backup container and include these into the backup archive. -### Scheduling +### Scheduling (snapshots) If you want `dockup` to run the backup as a cron task, you can set the environment variable `CRON_TIME` to the desired frequency, for example `CRON_TIME=0 0 * * *` to backup every day at midnight. +### Scheduling (intervals) + +Alternatively if you would like `dockup` to run interval backups as cron jobs (hourly, daily, weekly and monthly), set the environment variable `CRON_INTERVALS` to `true`. Interval backups are named `$BACKUP_NAME-.tar.gz` and will overwrite existing backups of the same interval name. The real benefit of interval backups comes when you enable [versioning](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/enable-versioning.html) and create [lifecycle](https://docs.aws.amazon.com/AmazonS3/latest/user-guide/create-lifecycle.html) rules to automatically delete older previous versions of your backups. + +By default hourly backups are done every hour on the hour. Daily backups are done at 3:30am, weekly on Monday at 3:00am and monthly on the 1st of each month at 2:30am. + +You can specify when and if each backup will fire by setting any of the respective environment variables (`CRON_HOURLY`, `CRON_DAILY`, `CRON_WEEKLY` and `CRON_MONTHLY`) to the desired frequency. + +For example, `CRON_WEEKLY=0 2 * * 0` would run the weekly backup at 2am on Sundays. + +If you wish to disable any of the intervals, you can do so by setting the value to `#`. + ### Retries Sometimes creating the TAR archive may fail, often due to modifications to the files while `tar` is running. @@ -96,7 +109,7 @@ For more complex restore operations, you can define a command to be run once the ## Encryption -You can use GnuPG to encrypt backup archives and decrpyt them again when you need to restore them. +You can use GnuPG to encrypt backup archives and decrypt them again when you need to restore them. You need a GnuPG public key for encryption and the corresponding private key for decryption. Keep the private key safe (and secret), otherwise you will not be able to restore your backups. diff --git a/scripts/backup.sh b/scripts/backup.sh index 269706b..2906383 100755 --- a/scripts/backup.sh +++ b/scripts/backup.sh @@ -15,7 +15,11 @@ SECONDS=0 echo "[$start_time] Initiating backup $BACKUP_NAME..." # Get timestamp -: ${BACKUP_SUFFIX:=.$(date +"%Y-%m-%d-%H-%M-%S")} +if [[ "$CRON_INTERVALS" = "true" ]]; then + : ${BACKUP_SUFFIX:=-$1} +else + : ${BACKUP_SUFFIX:=.$(date +"%Y-%m-%d-%H-%M-%S")} +fi tarball=$BACKUP_NAME$BACKUP_SUFFIX.tar.gz # If a pre-backup command is defined, run it before creating the tarball diff --git a/scripts/cron.sh b/scripts/cron.sh new file mode 100755 index 0000000..4d982dc --- /dev/null +++ b/scripts/cron.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +: ${CRON_HOURLY:="0 */1 * * *"} +: ${CRON_DAILY:="15 4 * * *"} +: ${CRON_WEEKLY:="30 3 * * 1"} +: ${CRON_MONTHLY:="45 2 1 * *"} +LOGFIFO='/dockup/cron.fifo' + +if [[ ! -e "$LOGFIFO" ]]; then + mkfifo "$LOGFIFO" +fi +env | grep -v 'affinity:container' | sed -e 's/^\([^=]*\)=\(.*\)/export \1="\2"/' > /dockup/env.conf # Save current environment + +if [[ "$CRON_INTERVALS" = "true" ]]; then + echo "${CRON_HOURLY} cd /dockup && . ./env.conf && ./backup.sh hourly >> $LOGFIFO 2>&1" > crontab.conf + echo "${CRON_DAILY} cd /dockup && . ./env.conf && ./backup.sh daily >> $LOGFIFO 2>&1" >> crontab.conf + echo "${CRON_WEEKLY} cd /dockup && . ./env.conf && ./backup.sh weekly >> $LOGFIFO 2>&1" >> crontab.conf + echo "${CRON_MONTHLY} cd /dockup && . ./env.conf && ./backup.sh monthly >> $LOGFIFO 2>&1" >> crontab.conf + echo "=> Running dockup interval backups as a cronjob" +elif [ -n "$CRON_TIME" ]; then + echo "${CRON_TIME} cd /dockup && . ./env.conf && ./backup.sh >> $LOGFIFO 2>&1" > crontab.conf + echo "=> Running dockup backups as a cronjob for ${CRON_TIME}" +fi + +crontab ./crontab.conf +cron +tail -n +0 -f "$LOGFIFO" diff --git a/scripts/run.sh b/scripts/run.sh index 188ea3f..3ff97be 100755 --- a/scripts/run.sh +++ b/scripts/run.sh @@ -3,18 +3,9 @@ if [[ "$RESTORE" == "true" ]]; then ./restore.sh else - if [ -n "$CRON_TIME" ]; then - LOGFIFO='/dockup/cron.fifo' - if [[ ! -e "$LOGFIFO" ]]; then - mkfifo "$LOGFIFO" - fi - env | grep -v 'affinity:container' | sed -e 's/^\([^=]*\)=\(.*\)/export \1="\2"/' > /dockup/env.conf # Save current environment - echo "${CRON_TIME} cd /dockup && . ./env.conf && ./backup.sh >> $LOGFIFO 2>&1" > crontab.conf - crontab ./crontab.conf - echo "=> Running dockup backups as a cronjob for ${CRON_TIME}" - cron - tail -n +0 -f "$LOGFIFO" + if [[ "$CRON_INTERVALS" == "true" ]] || [ -n "$CRON_TIME" ]; then + ./cron.sh else ./backup.sh fi -fi \ No newline at end of file +fi