Skip to content

Commit

Permalink
Add check if the tables have already been setup
Browse files Browse the repository at this point in the history
  • Loading branch information
ikennaokpala committed Aug 21, 2016
1 parent fb56d87 commit f8aeafc
Showing 1 changed file with 29 additions and 34 deletions.
63 changes: 29 additions & 34 deletions templates/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,28 +12,23 @@ fi
NOW=$(date +"%Y-%m-%d-%H%M")
env > /root/.env

if [[ $AWS_S3 ]];
then
if [[ ! -f ~/.passwd-s3fs || ! -f /etc/passwd-s3fs ]];
then
if [[ $AWS_S3 ]]; then
if [[ ! -f ~/.passwd-s3fs || ! -f /etc/passwd-s3fs ]]; then
echo $AWS_S3 >> ~/.passwd-s3fs && cp ~/.passwd-s3fs /etc/passwd-s3fs

chmod 600 ~/.passwd-s3fs
chmod 640 /etc/passwd-s3fs
fi
elif [[ $GCS_AUTH ]];
then
if [[ ! -f $GCS_AUTH_FILE ]];
then
elif [[ $GCS_AUTH ]]; then
if [[ ! -f $GCS_AUTH_FILE ]]; then
# echo $GCS_AUTH >> ~/.gcs-auth.txt && cp ~/.gcs-auth.txt $GCS_AUTH_FILE
echo $GCS_AUTH >> $GCS_AUTH_FILE

chmod 600 $GCS_AUTH_FILE
fi
fi

if [[ $MARIADB_PORT_3306_TCP_ADDR ]];
then
if [[ $MARIADB_PORT_3306_TCP_ADDR ]]; then
export IP_SUBNET_WILDCARD=${MARIADB_PORT_3306_TCP_ADDR/%?/}%;
counter=0;
while ! nc -vz $MARIADB_PORT_3306_TCP_ADDR $MARIADB_PORT_3306_TCP_PORT; do
Expand All @@ -42,35 +37,35 @@ then
sleep 10;
done

subdirectories=`find $BACKUP -maxdepth 1 -type d | wc -l`
path_to_sql_dump="";
if [[ -f $BACKUP/dump.sql ]];
then
path_to_sql_dump="$BACKUP/dump.sql";
elif [[ -f $BACKUP/gfb.sql ]];
then
path_to_sql_dump="$BACKUP/gfb.sql";
fi
no_tables=$(mysqli<<<"use $MARIADB_ENV_MARIADB_DATABASE;show tables;"|grep -c _post);
if [[ $no_tables == 0 ]]; then
# subdirectories=`find $BACKUP -maxdepth 1 -type d | wc -l`
path_to_sql_dump="";
if [[ -f $BACKUP/dump.sql ]]; then
path_to_sql_dump="$BACKUP/dump.sql";
elif [[ -f $BACKUP/gfb.sql ]]; then
path_to_sql_dump="$BACKUP/gfb.sql";
fi

if [[ $path_to_sql_dump ]];
then
initiate_db
restore_db $path_to_sql_dump
if [[ $path_to_sql_dump ]]; then
initiate_db
restore_db $path_to_sql_dump

sudo mv $path_to_sql_dump $BACKUP/"used_dump_on_$NOW.sql"
elif [[ subdirectories -le 1 ]] && [[ $GCS_AUTH || $AWS_S3 ]];
then
initiate_db
mount_cloud_storage
sudo mv $path_to_sql_dump $BACKUP/"used_dump_on_$NOW.sql"
elif [[ $GCS_AUTH || $AWS_S3 ]]; then
# elif [[ subdirectories -le 1 ]] && [[ $GCS_AUTH || $AWS_S3 ]];
initiate_db
mount_cloud_storage

recovery_dir="$BACKUP/recover-$NOW"
mkdir -p $recovery_dir
latest_dump_path=`find "$MOUNT/data" -type f|sort -r|head -n1`
dump_file=`basename $latest_dump_path .tar.gz`
recovery_dir="$BACKUP/recover-$NOW"
mkdir -p $recovery_dir
latest_dump_path=`find "$MOUNT/data" -type f|sort -r|head -n1`
dump_file=`basename $latest_dump_path .tar.gz`

tar -xzvf $latest_dump_path -C $recovery_dir
tar -xzvf $latest_dump_path -C $recovery_dir

restore_db "$recovery_dir$BACKUP/$dump_file"
restore_db "$recovery_dir$BACKUP/$dump_file"
fi
fi
fi

Expand Down

0 comments on commit f8aeafc

Please sign in to comment.