From 40279685fbbcc5185f2c0d68570e8ac8aaf554ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt?= Date: Thu, 24 Sep 2020 16:50:45 +0200 Subject: [PATCH] Allow upload to s3 --- Dockerfile | 5 +---- README.md | 13 +++++++++++++ backups.sh | 27 ++++----------------------- start.sh | 5 +++++ 4 files changed, 23 insertions(+), 27 deletions(-) diff --git a/Dockerfile b/Dockerfile index 98affab..2290965 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,6 @@ FROM kartoza/postgis:12.1 -MAINTAINER tim@kartoza.com -RUN apt-get -y update; apt-get -y --no-install-recommends install postgresql-client cron +RUN apt-get -y update; apt-get -y --no-install-recommends install postgresql-client cron awscli RUN touch /var/log/cron.log COPY backups-cron /backups-cron @@ -12,5 +11,3 @@ RUN chmod 0755 /*.sh ENTRYPOINT ["/bin/bash", "/start.sh"] CMD ["/docker-entrypoint.sh"] - - diff --git a/README.md b/README.md index 9aa8cf5..57d09f0 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,16 @@ +# This is a Fork + +This was originally forked from https://github.com/kartoza/docker-pg-backup + +Instead of backing up locally, backups go to a s3 bucket. + +Additional env vars: + +`S3_BUCKET`: the bucket where to store the dumps. e.g.: `my-backups` +`S3_BUCKET_PREFIX`: A prefix for the backups. e.g.: `my-db/production` + +You might will probably need authentication. You can either add the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` env vars, or give your EC2 instance the right IAM role. + # Docker PG Backup diff --git a/backups.sh b/backups.sh index 36634e8..b6f7337 100755 --- a/backups.sh +++ b/backups.sh @@ -8,8 +8,7 @@ source /pgenv.sh MYDATE=`date +%d-%B-%Y` MONTH=$(date +%B) YEAR=$(date +%Y) -MYBASEDIR=/backups -MYBACKUPDIR=${MYBASEDIR}/${YEAR}/${MONTH} +MYBACKUPDIR=${S3_BUCKET_PREFIX}/${YEAR}/${MONTH} mkdir -p ${MYBACKUPDIR} cd ${MYBACKUPDIR} @@ -21,25 +20,7 @@ echo "Backup running to $MYBACKUPDIR" >> /var/log/cron.log #echo "Databases to backup: ${DBLIST}" >> /var/log/cron.log for DB in ${DBLIST} do - echo "Backing up $DB" >> /var/log/cron.log - if [ -z "${ARCHIVE_FILENAME:-}" ]; then - FILENAME=${MYBACKUPDIR}/${DUMPPREFIX}_${DB}.${MYDATE}.dmp - else - FILENAME="${ARCHIVE_FILENAME}.${DB}.dmp" - fi - if [[ -f ${MYBASEDIR}/globals.sql ]]; then - rm ${MYBASEDIR}/globals.sql - pg_dumpall --globals-only -f ${MYBASEDIR}/globals.sql - else - echo "Dump users and permisions" - pg_dumpall --globals-only -f ${MYBASEDIR}/globals.sql - fi - pg_dump -Fc -f ${FILENAME} ${DB} + FILENAME=${MYBACKUPDIR}/${DUMPPREFIX}_${DB}.${MYDATE}.dmp + echo "Backing up $DB to s3://${S3_BUCKET}/${FILENAME}" >> /var/log/cron.log + pg_dump -Fc ${DB} | aws s3 cp - s3://${S3_BUCKET}/${FILENAME} done - -if [ "${REMOVE_BEFORE:-}" ]; then - TIME_MINUTES=$((REMOVE_BEFORE * 24 * 60)) - - echo "Removing following backups older than ${REMOVE_BEFORE} days" >> /var/log/cron.log - find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>> /var/log/cron.log -fi diff --git a/start.sh b/start.sh index 3afbd21..b901bb2 100755 --- a/start.sh +++ b/start.sh @@ -65,6 +65,11 @@ export DUMPPREFIX=$DUMPPREFIX export ARCHIVE_FILENAME="${ARCHIVE_FILENAME}" export REMOVE_BEFORE=$REMOVE_BEFORE export DBLIST=\"$DBLIST\" +export S3_BUCKET=\"$S3_BUCKET\" +export S3_BUCKET_PREFIX=\"$S3_BUCKET_PREFIX\" +export AWS_ACCESS_KEY_ID=\"$AWS_ACCESS_KEY_ID\" +export AWS_SECRET_ACCESS_KEY=\"$AWS_SECRET_ACCESS_KEY\" + " > /pgenv.sh echo "Start script running with these environment options" cat /pgenv.sh