Created
October 14, 2025 03:22
-
-
Save Lamarcke/a2d16f52a9e92ede9152e4c85dd3b2d4 to your computer and use it in GitHub Desktop.
MySQL in Docker local + S3 backup
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| # Docker MySQL backup script | |
| # Makes a local backup and optionally uploads files to cloud storage (S3, R2) with rclone. | |
| # Works great with Dockerized MySQL in a shared network (e.g. my_app_network) | |
| set -euo pipefail | |
| IFS=$'\n\t' | |
| # === CONFIGURATION === | |
| # Current date for unique backup filenames | |
| DATE=$(date +%F-%H%M%S) | |
| # Backup directory on host | |
| BACKUP_DIR="${BACKUP_DIR:-/var/local/backups/sql}" | |
| mkdir -p "$BACKUP_DIR" | |
| # Docker network shared by both DB containers | |
| NETWORK="${NETWORK}" | |
| # Docker MySQL image used for dumping | |
| MYSQL_IMAGE="${MYSQL_IMAGE:-mysql:8.3}" | |
| # Rclone config (optional) | |
| # E.g.: cloudflare:mysql-backup | |
| RCLONE_CONFIG_NAME="${RCLONE_CONFIG_NAME}" | |
| RCLONE_BUCKET_NAME="${RCLONE_BUCKET_NAME}" | |
| # === DATABASES TO BACKUP === | |
| # These must be passed via environment variables in crontab, e.g.: | |
| # DB1_NAME=my-db DB1_USER=root DB1_PASS=pass DB1_HOST=db | |
| # DB2_NAME=my-db-2 DB2_USER=root DB2_PASS=pass DB2_HOST=db-2 | |
| # and so on. | |
| declare -A DBS=( | |
| ["$DB1_NAME"]="$DB1_USER:$DB1_PASS@$DB1_HOST" | |
| ) | |
| # Add second database if defined | |
| # To add more DBs, replicate this if statement with more DBX_ variables (where X is the DB number). | |
| if [[ -n "${DB2_NAME:-}" && -n "${DB2_USER:-}" && -n "${DB2_PASS:-}" && -n "${DB2_HOST:-}" ]]; then | |
| DBS["$DB2_NAME"]="$DB2_USER:$DB2_PASS@$DB2_HOST" | |
| fi | |
| # === BACKUP LOOP === | |
| for DB_NAME in "${!DBS[@]}"; do | |
| echo "๐น Backing up database: $DB_NAME" | |
| CREDENTIALS="${DBS[$DB_NAME]}" | |
| DB_USER="${CREDENTIALS%%:*}" | |
| REST="${CREDENTIALS#*:}" | |
| DB_PASS="${REST%%@*}" | |
| DB_HOST="${REST#*@}" | |
| BACKUP_FILENAME="$BACKUP_DIR/${DB_NAME}-${DATE}.sql" | |
| COMPRESSED_BACKUP_FILENAME="${BACKUP_FILENAME}.zst" | |
| # Maximum number of retries | |
| MAX_RETRIES=3 | |
| RETRY_DELAY=5 # seconds between attempts | |
| # Dump DB using Dockerized MySQL client with retries | |
| SUCCESS=false | |
| for ((i=1; i<=MAX_RETRIES; i++)); do | |
| echo "๐น Attempt $i: Backing up database $DB_NAME..." | |
| if docker run --rm --network "$NETWORK" "$MYSQL_IMAGE" \ | |
| mysqldump --compact --single-transaction --quick --lock-tables=false \ | |
| -h "$DB_HOST" -u "$DB_USER" -p"$DB_PASS" "$DB_NAME" > "$BACKUP_FILENAME"; then | |
| SUCCESS=true | |
| break | |
| else | |
| echo "โ ๏ธ Backup attempt $i failed." | |
| if [[ $i -lt $MAX_RETRIES ]]; then | |
| echo "โณ Retrying in $RETRY_DELAY seconds..." | |
| sleep $RETRY_DELAY | |
| fi | |
| fi | |
| done | |
| if [[ "$SUCCESS" != true ]]; then | |
| echo "โ Failed to backup database $DB_NAME after $MAX_RETRIES attempts. Exiting." | |
| exit 1 | |
| fi | |
| # Compress with all available threads | |
| # Make sure to have zstd installed (e.g.: apt install zstd -y) | |
| # -9 compression is high but reasonable, use -6 or -3 (default) if it's too slow. | |
| zstd -9 -T0 "$BACKUP_FILENAME" -o "$COMPRESSED_BACKUP_FILENAME" | |
| # Remove uncompressed dump | |
| rm -f "$BACKUP_FILENAME" | |
| echo "โ Backup complete: $COMPRESSED_BACKUP_FILENAME" | |
| # Upload to S3 bucket via rclone if enabled | |
| # You need to have S3 configured in rclone for this to work. | |
| # For R2, see this guide: https://developers.cloudflare.com/r2/examples/rclone/ | |
| if [[ "${UPLOAD_TO_RCLONE:-false}" == "true" ]]; then | |
| echo "โ๏ธ Uploading $COMPRESSED_BACKUP_FILENAME to R2..." | |
| rclone copy "$COMPRESSED_BACKUP_FILENAME" "${RCLONE_CONFIG_NAME}:${RCLONE_BUCKET_NAME}" \ | |
| --quiet --s3-no-check-bucket | |
| fi | |
| # Optionally remove local copy after upload | |
| if [[ "${CLEANUP_LOCAL:-false}" == "true" ]]; then | |
| echo "๐งน Removing local backup $COMPRESSED_BACKUP_FILENAME" | |
| rm -f "$COMPRESSED_BACKUP_FILENAME" | |
| fi | |
| done | |
| # === Cleanup old backups (older than 14 days) === | |
| # If you don't want this behaviour, delete the following lines until the last 'echo'. | |
| # From local files | |
| echo "๐๏ธ Removing local backups older than 14 days in $BACKUP_DIR..." | |
| find "$BACKUP_DIR" -type f -name "*.zst" -mtime +14 -exec rm -f {} \; | |
| # From cloud storage bucket | |
| # For R2, this helps reduce the overral GB-month pricing. | |
| echo "๐๏ธ Removing bucket backups older than 14 days in ${RCLONE_CONFIG_NAME}:${RCLONE_BUCKET_NAME}..." | |
| rclone delete --min-age 14d "${RCLONE_CONFIG_NAME}:${RCLONE_BUCKET_NAME}" | |
| echo "๐ All backups completed successfully." |
Author
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Add the script to somewhere like
/usr/local/bin, and then make it executable:Example
crontabfor this script:This will run the backup everyday at 2 AM.
Logs will be appended to
/var/log/db_backup.log.