Files
Sankofa/scripts/backup-database-automated.sh

156 lines
3.9 KiB
Bash
Raw Normal View History

#!/bin/bash
# Automated Database Backup Script
# Runs as a Kubernetes CronJob for daily backups
set -e
# Configuration
BACKUP_DIR="${BACKUP_DIR:-/backups/postgres}"
DB_NAME="${DB_NAME:-sankofa}"
DB_HOST="${DB_HOST:-postgres}"
DB_PORT="${DB_PORT:-5432}"
DB_USER="${DB_USER:-postgres}"
RETENTION_DAYS="${RETENTION_DAYS:-7}"
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_FILE="${BACKUP_DIR}/${DB_NAME}_${TIMESTAMP}.sql"
# Colors
GREEN='\033[0;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
NC='\033[0m'
log_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
# Create backup directory if it doesn't exist
mkdir -p "$BACKUP_DIR"
# Perform backup
perform_backup() {
log_info "Starting database backup..."
log_info "Database: ${DB_NAME}"
log_info "Backup file: ${BACKUP_FILE}"
# Check if pg_dump is available
if ! command -v pg_dump &> /dev/null; then
log_error "pg_dump not found. Install PostgreSQL client tools."
exit 1
fi
# Perform backup
if pg_dump -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" \
-F p -f "$BACKUP_FILE" 2>&1; then
log_info "Backup completed successfully"
# Compress backup
log_info "Compressing backup..."
gzip "$BACKUP_FILE"
BACKUP_FILE="${BACKUP_FILE}.gz"
# Get backup size
BACKUP_SIZE=$(du -h "$BACKUP_FILE" | cut -f1)
log_info "Backup size: ${BACKUP_SIZE}"
# Verify backup integrity
if gzip -t "$BACKUP_FILE" 2>/dev/null; then
log_info "Backup integrity verified"
else
log_error "Backup integrity check failed"
exit 1
fi
return 0
else
log_error "Backup failed"
exit 1
fi
}
# Clean up old backups
cleanup_old_backups() {
log_info "Cleaning up backups older than ${RETENTION_DAYS} days..."
local deleted_count=0
while IFS= read -r -d '' backup_file; do
log_info "Deleting old backup: $(basename "$backup_file")"
rm -f "$backup_file"
((deleted_count++))
done < <(find "$BACKUP_DIR" -name "${DB_NAME}_*.sql.gz" -type f -mtime +$RETENTION_DAYS -print0)
if [ $deleted_count -gt 0 ]; then
log_info "Deleted ${deleted_count} old backup(s)"
else
log_info "No old backups to delete"
fi
}
# Upload to S3 (optional)
upload_to_s3() {
if [ -n "$S3_BUCKET" ] && command -v aws &> /dev/null; then
log_info "Uploading backup to S3..."
S3_PATH="s3://${S3_BUCKET}/postgres-backups/$(basename "$BACKUP_FILE")"
if aws s3 cp "$BACKUP_FILE" "$S3_PATH"; then
log_info "Backup uploaded to S3: ${S3_PATH}"
else
log_warn "Failed to upload backup to S3"
fi
fi
}
# Send notification (optional)
send_notification() {
if [ -n "$WEBHOOK_URL" ]; then
local status="$1"
local message="Database backup ${status}: ${BACKUP_FILE}"
curl -X POST "$WEBHOOK_URL" \
-H "Content-Type: application/json" \
-d "{\"text\": \"${message}\"}" \
> /dev/null 2>&1 || true
fi
}
# Main execution
main() {
log_info "=========================================="
log_info "Database Backup Automation"
log_info "=========================================="
log_info ""
# Perform backup
if perform_backup; then
# Clean up old backups
cleanup_old_backups
# Upload to S3 if configured
upload_to_s3
# Send success notification
send_notification "succeeded"
log_info "Backup process completed successfully"
exit 0
else
# Send failure notification
send_notification "failed"
log_error "Backup process failed"
exit 1
fi
}
main "$@"