197 lines
7.2 KiB
Bash
Executable File
197 lines
7.2 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
# =============================================================================
|
|
# Changemaker Lite V2 — Backup Script
|
|
# Backs up PostgreSQL databases, uploads, and generates a manifest.
|
|
# Usage: ./scripts/backup.sh [--s3] [--retention DAYS]
|
|
# =============================================================================
|
|
set -euo pipefail
|
|
|
|
# --- Configuration ---
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
|
|
BACKUP_DIR="${BACKUP_DIR:-$PROJECT_DIR/backups}"
|
|
RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
|
TIMESTAMP="$(date +%Y%m%d_%H%M%S)"
|
|
BACKUP_NAME="changemaker-v2-backup-${TIMESTAMP}"
|
|
BACKUP_PATH="${BACKUP_DIR}/${BACKUP_NAME}"
|
|
S3_UPLOAD=false
|
|
|
|
# --- Parse args ---
|
|
while [[ $# -gt 0 ]]; do
|
|
case "$1" in
|
|
--s3) S3_UPLOAD=true; shift ;;
|
|
--retention) RETENTION_DAYS="$2"; shift 2 ;;
|
|
--help)
|
|
echo "Usage: $0 [--s3] [--retention DAYS]"
|
|
echo " --s3 Upload backup to S3 (requires AWS CLI + S3_BUCKET env var)"
|
|
echo " --retention N Delete local backups older than N days (default: 30)"
|
|
exit 0 ;;
|
|
*) echo "Unknown option: $1"; exit 1 ;;
|
|
esac
|
|
done
|
|
|
|
# --- Load .env if present (safe parsing to handle special characters) ---
|
|
if [ -f "$PROJECT_DIR/.env" ]; then
|
|
while IFS='=' read -r key value; do
|
|
# Skip comments and empty lines
|
|
[[ -z "$key" || "$key" =~ ^[[:space:]]*# ]] && continue
|
|
# Trim leading/trailing whitespace from key
|
|
key="$(echo "$key" | xargs)"
|
|
# Strip surrounding quotes from value if present
|
|
value="${value%\"}"
|
|
value="${value#\"}"
|
|
value="${value%\'}"
|
|
value="${value#\'}"
|
|
# Only export valid variable names
|
|
if [[ "$key" =~ ^[a-zA-Z_][a-zA-Z0-9_]*$ ]]; then
|
|
export "$key=$value"
|
|
fi
|
|
done < "$PROJECT_DIR/.env"
|
|
fi
|
|
|
|
# --- Derived vars ---
|
|
PG_CONTAINER="${PG_CONTAINER:-changemaker-v2-postgres}"
|
|
PG_USER="${V2_POSTGRES_USER:-changemaker}"
|
|
PG_DB="${V2_POSTGRES_DB:-changemaker_v2}"
|
|
LISTMONK_PG_CONTAINER="${LISTMONK_PG_CONTAINER:-listmonk-db}"
|
|
LISTMONK_PG_USER="${LISTMONK_DB_USER:-listmonk}"
|
|
LISTMONK_PG_DB="${LISTMONK_DB_NAME:-listmonk}"
|
|
UPLOADS_DIR="${PROJECT_DIR}/assets/uploads"
|
|
|
|
echo "=========================================="
|
|
echo " Changemaker Lite V2 — Backup"
|
|
echo " ${TIMESTAMP}"
|
|
echo "=========================================="
|
|
echo ""
|
|
|
|
# --- Create backup directory ---
|
|
mkdir -p "$BACKUP_PATH"
|
|
|
|
# --- 1. V2 PostgreSQL Dump ---
|
|
echo "[1/4] Dumping V2 PostgreSQL (${PG_DB})..."
|
|
if docker ps --format '{{.Names}}' | grep -q "^${PG_CONTAINER}$"; then
|
|
docker exec "$PG_CONTAINER" pg_dump -U "$PG_USER" -d "$PG_DB" --no-owner --no-acl \
|
|
| gzip > "${BACKUP_PATH}/v2-postgres.sql.gz"
|
|
echo " -> v2-postgres.sql.gz ($(du -h "${BACKUP_PATH}/v2-postgres.sql.gz" | cut -f1))"
|
|
else
|
|
echo " [WARN] Container ${PG_CONTAINER} not running, skipping V2 DB dump."
|
|
fi
|
|
|
|
# --- 1b. Gancio PostgreSQL Dump (shared PostgreSQL, separate database) ---
|
|
echo "[1b/4] Dumping Gancio PostgreSQL (gancio)..."
|
|
if docker ps --format '{{.Names}}' | grep -q "^${PG_CONTAINER}$"; then
|
|
docker exec "$PG_CONTAINER" pg_dump -U "$PG_USER" -d gancio --no-owner --no-acl \
|
|
| gzip > "${BACKUP_PATH}/gancio-postgres.sql.gz" 2>/dev/null \
|
|
&& echo " -> gancio-postgres.sql.gz ($(du -h "${BACKUP_PATH}/gancio-postgres.sql.gz" | cut -f1))" \
|
|
|| echo " [INFO] Gancio database not found, skipping."
|
|
else
|
|
echo " [INFO] V2 PostgreSQL not running, skipping Gancio dump."
|
|
fi
|
|
|
|
# --- 2. Listmonk PostgreSQL Dump (optional) ---
|
|
echo "[2/4] Dumping Listmonk PostgreSQL (${LISTMONK_PG_DB})..."
|
|
if docker ps --format '{{.Names}}' | grep -q "^${LISTMONK_PG_CONTAINER}$"; then
|
|
docker exec "$LISTMONK_PG_CONTAINER" pg_dump -U "$LISTMONK_PG_USER" -d "$LISTMONK_PG_DB" --no-owner --no-acl \
|
|
| gzip > "${BACKUP_PATH}/listmonk-postgres.sql.gz"
|
|
echo " -> listmonk-postgres.sql.gz ($(du -h "${BACKUP_PATH}/listmonk-postgres.sql.gz" | cut -f1))"
|
|
else
|
|
echo " [WARN] Container ${LISTMONK_PG_CONTAINER} not running, skipping Listmonk dump."
|
|
fi
|
|
|
|
# --- 3. Uploads Archive ---
|
|
echo "[3/4] Archiving uploads..."
|
|
if [ -d "$UPLOADS_DIR" ] && [ "$(ls -A "$UPLOADS_DIR" 2>/dev/null)" ]; then
|
|
tar -czf "${BACKUP_PATH}/uploads.tar.gz" -C "$(dirname "$UPLOADS_DIR")" "$(basename "$UPLOADS_DIR")"
|
|
echo " -> uploads.tar.gz ($(du -h "${BACKUP_PATH}/uploads.tar.gz" | cut -f1))"
|
|
else
|
|
echo " [INFO] No uploads directory or empty, skipping."
|
|
fi
|
|
|
|
# --- 4. Manifest ---
|
|
echo "[4/4] Generating manifest..."
|
|
MANIFEST_FILE="${BACKUP_PATH}/manifest.json"
|
|
{
|
|
echo "{"
|
|
echo " \"timestamp\": \"${TIMESTAMP}\","
|
|
echo " \"backup_name\": \"${BACKUP_NAME}\","
|
|
echo " \"files\": ["
|
|
|
|
FIRST=true
|
|
for f in "${BACKUP_PATH}"/*.{sql.gz,tar.gz}; do
|
|
[ -f "$f" ] || continue
|
|
SIZE="$(stat --printf='%s' "$f" 2>/dev/null || stat -f '%z' "$f" 2>/dev/null || echo 0)"
|
|
SHA256="$(sha256sum "$f" 2>/dev/null | cut -d' ' -f1 || shasum -a 256 "$f" | cut -d' ' -f1)"
|
|
BASENAME="$(basename "$f")"
|
|
if [ "$FIRST" = true ]; then
|
|
FIRST=false
|
|
else
|
|
echo ","
|
|
fi
|
|
printf ' {"file": "%s", "size_bytes": %s, "sha256": "%s"}' "$BASENAME" "$SIZE" "$SHA256"
|
|
done
|
|
|
|
echo ""
|
|
echo " ],"
|
|
echo " \"v2_database\": \"${PG_DB}\","
|
|
echo " \"listmonk_database\": \"${LISTMONK_PG_DB}\","
|
|
echo " \"retention_days\": ${RETENTION_DAYS}"
|
|
echo "}"
|
|
} > "$MANIFEST_FILE"
|
|
|
|
echo " -> manifest.json"
|
|
echo ""
|
|
|
|
# --- Create single archive ---
|
|
ARCHIVE_FILE="${BACKUP_DIR}/${BACKUP_NAME}.tar.gz"
|
|
tar -czf "$ARCHIVE_FILE" -C "$BACKUP_DIR" "$BACKUP_NAME"
|
|
rm -rf "$BACKUP_PATH"
|
|
echo "Archive: ${ARCHIVE_FILE} ($(du -h "$ARCHIVE_FILE" | cut -f1))"
|
|
echo ""
|
|
|
|
# --- Optional S3 Upload ---
|
|
if [ "$S3_UPLOAD" = true ]; then
|
|
S3_BUCKET="${S3_BUCKET:-}"
|
|
S3_PREFIX="${S3_PREFIX:-changemaker-backups}"
|
|
if [ -z "$S3_BUCKET" ]; then
|
|
echo "[WARN] S3_BUCKET not set, skipping S3 upload."
|
|
elif ! command -v aws &>/dev/null; then
|
|
echo "[WARN] AWS CLI not found, skipping S3 upload."
|
|
else
|
|
echo "Uploading to s3://${S3_BUCKET}/${S3_PREFIX}/${BACKUP_NAME}.tar.gz..."
|
|
aws s3 cp "$ARCHIVE_FILE" "s3://${S3_BUCKET}/${S3_PREFIX}/${BACKUP_NAME}.tar.gz"
|
|
echo "S3 upload complete."
|
|
fi
|
|
echo ""
|
|
fi
|
|
|
|
# --- Retention Cleanup ---
|
|
echo "Cleaning up backups older than ${RETENTION_DAYS} days..."
|
|
DELETED=0
|
|
for old in "${BACKUP_DIR}"/changemaker-v2-backup-*.tar.gz; do
|
|
[ -f "$old" ] || continue
|
|
if [ "$(find "$old" -mtime +"$RETENTION_DAYS" -print 2>/dev/null)" ]; then
|
|
rm -f "$old"
|
|
DELETED=$((DELETED + 1))
|
|
fi
|
|
done
|
|
echo " Deleted ${DELETED} old backup(s)."
|
|
echo ""
|
|
|
|
# --- Push metrics to Bunker Ops (if enabled) ---
|
|
if [ "${BUNKER_OPS_ENABLED:-false}" = "true" ] && [ -n "${BUNKER_OPS_REMOTE_WRITE_URL:-}" ]; then
|
|
METRIC_TIMESTAMP=$(date +%s)
|
|
BACKUP_SIZE=$(stat --printf='%s' "$ARCHIVE_FILE" 2>/dev/null || stat -f '%z' "$ARCHIVE_FILE" 2>/dev/null || echo 0)
|
|
INSTANCE="${INSTANCE_LABEL:-${DOMAIN:-unknown}}"
|
|
curl -sf -X POST "${BUNKER_OPS_REMOTE_WRITE_URL}" \
|
|
--data-binary "cm_backup_last_success_timestamp{instance=\"${INSTANCE}\"} ${METRIC_TIMESTAMP}
|
|
cm_backup_size_bytes{instance=\"${INSTANCE}\"} ${BACKUP_SIZE}" \
|
|
&& echo "Pushed backup metrics to Bunker Ops." \
|
|
|| echo "[WARN] Failed to push backup metrics to Bunker Ops."
|
|
echo ""
|
|
fi
|
|
|
|
echo "=========================================="
|
|
echo " Backup complete!"
|
|
echo " Archive: ${ARCHIVE_FILE}"
|
|
echo "=========================================="
|