#!/bin/bash # backup_db.sh # Dumps SOMA PostgreSQL database, compresses it, uploads to S3 with timestamp. # Run inside the backend container or on the EC2 host with Docker access. # # Required environment variables (from .env): # POSTGRES_USER, POSTGRES_PASSWORD, POSTGRES_DB # AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_REGION # S3_BACKUP_BUCKET (set this in .env — not in TID default, add it) # # Usage: bash /app/scripts/backup_db.sh set -euo pipefail TIMESTAMP=$(date +"%Y%m%d_%H%M%S") BACKUP_DIR="/tmp/soma_backups" FILENAME="somadb_${TIMESTAMP}.sql.gz" FILEPATH="${BACKUP_DIR}/${FILENAME}" # Defaults if env not set POSTGRES_USER="${POSTGRES_USER:-soma}" POSTGRES_PASSWORD="${POSTGRES_PASSWORD:-somapass}" POSTGRES_DB="${POSTGRES_DB:-somadb}" POSTGRES_HOST="${POSTGRES_HOST:-postgres}" S3_BUCKET="${S3_BACKUP_BUCKET:-soma-backups}" mkdir -p "${BACKUP_DIR}" echo "[backup] Starting database dump at ${TIMESTAMP}" # Dump and compress PGPASSWORD="${POSTGRES_PASSWORD}" pg_dump \ -h "${POSTGRES_HOST}" \ -U "${POSTGRES_USER}" \ -d "${POSTGRES_DB}" \ --no-password \ --format=plain \ --clean \ --if-exists \ | gzip > "${FILEPATH}" FILESIZE=$(du -sh "${FILEPATH}" | cut -f1) echo "[backup] Dump complete: ${FILENAME} (${FILESIZE})" # Upload to S3 if command -v aws &> /dev/null; then S3_KEY="backups/${TIMESTAMP:0:6}/${FILENAME}" aws s3 cp "${FILEPATH}" "s3://${S3_BUCKET}/${S3_KEY}" \ --sse AES256 \ --region "${AWS_REGION:-ap-south-1}" echo "[backup] Uploaded to s3://${S3_BUCKET}/${S3_KEY}" else echo "[backup] WARNING: aws CLI not found. Backup saved locally at ${FILEPATH}" echo "[backup] Install aws CLI or add boto3 call to upload manually." fi # Keep only last 7 local backups find "${BACKUP_DIR}" -name "somadb_*.sql.gz" -mtime +7 -delete echo "[backup] Done."