Skip to content

Commit

Permalink
Merge pull request #1 from tamaringoapp/feat/s3
Browse files Browse the repository at this point in the history
Allow upload to s3
  • Loading branch information
bboure authored Sep 24, 2020
2 parents f834eb2 + 4027968 commit 49c3d6c
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 27 deletions.
5 changes: 1 addition & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
FROM kartoza/postgis:12.1
MAINTAINER [email protected]

RUN apt-get -y update; apt-get -y --no-install-recommends install postgresql-client cron
RUN apt-get -y update; apt-get -y --no-install-recommends install postgresql-client cron awscli
RUN touch /var/log/cron.log

COPY backups-cron /backups-cron
Expand All @@ -12,5 +11,3 @@ RUN chmod 0755 /*.sh

ENTRYPOINT ["/bin/bash", "/start.sh"]
CMD ["/docker-entrypoint.sh"]


13 changes: 13 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,16 @@
# This is a Fork

This was originally forked from https://github.com/kartoza/docker-pg-backup

Instead of backing up locally, backups go to a s3 bucket.

Additional env vars:

`S3_BUCKET`: the bucket where to store the dumps. e.g.: `my-backups`
`S3_BUCKET_PREFIX`: A prefix for the backups. e.g.: `my-db/production`

You might will probably need authentication. You can either add the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` env vars, or give your EC2 instance the right IAM role.

# Docker PG Backup


Expand Down
27 changes: 4 additions & 23 deletions backups.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@ source /pgenv.sh
MYDATE=`date +%d-%B-%Y`
MONTH=$(date +%B)
YEAR=$(date +%Y)
MYBASEDIR=/backups
MYBACKUPDIR=${MYBASEDIR}/${YEAR}/${MONTH}
MYBACKUPDIR=${S3_BUCKET_PREFIX}/${YEAR}/${MONTH}
mkdir -p ${MYBACKUPDIR}
cd ${MYBACKUPDIR}

Expand All @@ -21,25 +20,7 @@ echo "Backup running to $MYBACKUPDIR" >> /var/log/cron.log
#echo "Databases to backup: ${DBLIST}" >> /var/log/cron.log
for DB in ${DBLIST}
do
echo "Backing up $DB" >> /var/log/cron.log
if [ -z "${ARCHIVE_FILENAME:-}" ]; then
FILENAME=${MYBACKUPDIR}/${DUMPPREFIX}_${DB}.${MYDATE}.dmp
else
FILENAME="${ARCHIVE_FILENAME}.${DB}.dmp"
fi
if [[ -f ${MYBASEDIR}/globals.sql ]]; then
rm ${MYBASEDIR}/globals.sql
pg_dumpall --globals-only -f ${MYBASEDIR}/globals.sql
else
echo "Dump users and permisions"
pg_dumpall --globals-only -f ${MYBASEDIR}/globals.sql
fi
pg_dump -Fc -f ${FILENAME} ${DB}
FILENAME=${MYBACKUPDIR}/${DUMPPREFIX}_${DB}.${MYDATE}.dmp
echo "Backing up $DB to s3://${S3_BUCKET}/${FILENAME}" >> /var/log/cron.log
pg_dump -Fc ${DB} | aws s3 cp - s3://${S3_BUCKET}/${FILENAME}
done

if [ "${REMOVE_BEFORE:-}" ]; then
TIME_MINUTES=$((REMOVE_BEFORE * 24 * 60))

echo "Removing following backups older than ${REMOVE_BEFORE} days" >> /var/log/cron.log
find ${MYBASEDIR}/* -type f -mmin +${TIME_MINUTES} -delete &>> /var/log/cron.log
fi
5 changes: 5 additions & 0 deletions start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,11 @@ export DUMPPREFIX=$DUMPPREFIX
export ARCHIVE_FILENAME="${ARCHIVE_FILENAME}"
export REMOVE_BEFORE=$REMOVE_BEFORE
export DBLIST=\"$DBLIST\"
export S3_BUCKET=\"$S3_BUCKET\"
export S3_BUCKET_PREFIX=\"$S3_BUCKET_PREFIX\"
export AWS_ACCESS_KEY_ID=\"$AWS_ACCESS_KEY_ID\"
export AWS_SECRET_ACCESS_KEY=\"$AWS_SECRET_ACCESS_KEY\"
" > /pgenv.sh
echo "Start script running with these environment options"
cat /pgenv.sh
Expand Down

0 comments on commit 49c3d6c

Please sign in to comment.