Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 27 additions & 8 deletions scripts/backup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,43 @@ set -e

while true; do

SQL_FILE=$(date '+%Y-%m-%d.%H').sql
SQL_FILE=$(date '+%Y-%m-%d.%H').sql.gz

# Dump database
mysqldump --no-tablespaces -h ${MYSQL_HOST:=127.0.0.1} -u ${MYSQL_USER:=gorse} -p${MYSQL_PASSWORD:=gorse_pass} --ssl-verify-server-cert=0 ${MYSQL_DATABASE:=gorse} users items feedback flask_dance_oauth > $SQL_FILE

# Compress SQL file
gzip $SQL_FILE
# Dump and compress database in one stream
mysqldump --no-tablespaces -h ${MYSQL_HOST:=127.0.0.1} -u ${MYSQL_USER:=gorse} -p${MYSQL_PASSWORD:=gorse_pass} --ssl-verify-server-cert=0 ${MYSQL_DATABASE:=gorse} users items feedback flask_dance_oauth | gzip > $SQL_FILE

# Upload SQL file
s3cmd --access_key=$S3_ACCESS_KEY \
--secret_key=$S3_SECRET_KEY \
--region=$S3_BUCKET_LOCATION \
--host=$S3_HOST_BASE \
--host-bucket=$S3_HOST_BUCKET \
put ${SQL_FILE}.gz s3://${S3_BUCKET}${S3_PREFIX}/${SQL_FILE}.gz
put $SQL_FILE s3://${S3_BUCKET}${S3_PREFIX}/$SQL_FILE

# Remove local SQL file
rm *.sql.gz
rm $SQL_FILE

# Keep only the latest 7 backups on remote
BACKUP_FILES=$(s3cmd --access_key=$S3_ACCESS_KEY \
--secret_key=$S3_SECRET_KEY \
--region=$S3_BUCKET_LOCATION \
--host=$S3_HOST_BASE \
--host-bucket=$S3_HOST_BUCKET \
ls s3://${S3_BUCKET}${S3_PREFIX}/ | grep '\.sql\.gz$' | sort -r | awk '{print $4}')

# Count and delete old backups
COUNT=0
for FILE in $BACKUP_FILES; do
COUNT=$((COUNT + 1))
if [ $COUNT -gt 7 ]; then
s3cmd --access_key=$S3_ACCESS_KEY \
--secret_key=$S3_SECRET_KEY \
--region=$S3_BUCKET_LOCATION \
--host=$S3_HOST_BASE \
--host-bucket=$S3_HOST_BUCKET \
del $FILE
fi
done

# Backup 1 day later.
sleep 86400
Expand Down
Loading