Files
the_order/scripts/backup/database-backup.sh

48 lines
1.7 KiB
Bash
Raw Normal View History

#!/bin/bash
# Automated database backup script
# Supports full and incremental backups
set -e
BACKUP_DIR="${BACKUP_DIR:-/backups}"
DB_NAME="${DB_NAME:-theorder}"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_TYPE="${1:-full}" # full or incremental
echo "📦 Starting database backup (type: $BACKUP_TYPE)..."
mkdir -p "$BACKUP_DIR"
if [ "$BACKUP_TYPE" = "full" ]; then
BACKUP_FILE="$BACKUP_DIR/full_backup_${TIMESTAMP}.sql.gz"
echo "Creating full backup..."
pg_dump "$DATABASE_URL" | gzip > "$BACKUP_FILE"
echo "✅ Full backup created: $BACKUP_FILE"
# Keep only last 7 full backups
ls -t "$BACKUP_DIR"/full_backup_*.sql.gz | tail -n +8 | xargs rm -f || true
elif [ "$BACKUP_TYPE" = "incremental" ]; then
BACKUP_FILE="$BACKUP_DIR/incremental_backup_${TIMESTAMP}.sql.gz"
echo "Creating incremental backup..."
# For PostgreSQL, we use WAL archiving for true incremental backups
# This is a simplified version using pg_dump with --data-only
pg_dump "$DATABASE_URL" --data-only | gzip > "$BACKUP_FILE"
echo "✅ Incremental backup created: $BACKUP_FILE"
# Keep only last 24 incremental backups
ls -t "$BACKUP_DIR"/incremental_backup_*.sql.gz | tail -n +25 | xargs rm -f || true
fi
# Upload to cloud storage if configured
if [ -n "$BACKUP_STORAGE_BUCKET" ]; then
echo "Uploading backup to cloud storage..."
if command -v aws &> /dev/null; then
aws s3 cp "$BACKUP_FILE" "s3://$BACKUP_STORAGE_BUCKET/$(basename $BACKUP_FILE)" || true
elif command -v az &> /dev/null; then
az storage blob upload --file "$BACKUP_FILE" --container-name backups --name "$(basename $BACKUP_FILE)" --account-name "$AZURE_STORAGE_ACCOUNT" || true
fi
fi
echo "✅ Backup complete!"