infra/roles/backup/templates/backup.sh.j2
jack fc6b1c0cec
Some checks failed
CI/CD / syntax-check (push) Successful in 39s
CI/CD / deploy (push) Has been cancelled
feat: Timeweb S3 offsite backup uploads
- Add vault_s3_access_key / vault_s3_secret_key to Ansible Vault
- Expose via s3_access_key / s3_secret_key in all/main.yml
- Add s3_endpoint + s3_bucket to backup role defaults
- Install awscli via apt in backup role tasks
- Extend backup.sh.j2: upload *.gz to S3 after local backup,
  prune S3 objects older than backup_retention_days

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-22 03:58:58 +07:00

83 lines
4.1 KiB
Django/Jinja

#!/usr/bin/env bash
# Generated by Ansible — do not edit manually
# Backs up PostgreSQL databases and Vaultwarden data.
# Runs daily at 03:00, keeps {{ backup_retention_days }} days of backups.
set -euo pipefail
BACKUP_DIR="{{ backup_dir }}"
DATE=$(date +%Y-%m-%d_%H-%M-%S)
KEEP_DAYS="{{ backup_retention_days }}"
log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"; }
log "=== Backup started ==="
# ── Forgejo PostgreSQL ──────────────────────────────────────────────────────
log "Backing up forgejo-db..."
docker exec forgejo-db pg_dump -U forgejo forgejo \
| gzip > "${BACKUP_DIR}/forgejo-db_${DATE}.sql.gz"
log " → ${BACKUP_DIR}/forgejo-db_${DATE}.sql.gz ($(du -sh "${BACKUP_DIR}/forgejo-db_${DATE}.sql.gz" | cut -f1))"
# ── Plane PostgreSQL ────────────────────────────────────────────────────────
log "Backing up plane-db..."
docker exec plane-db pg_dump -U plane plane \
| gzip > "${BACKUP_DIR}/plane-db_${DATE}.sql.gz"
log " → ${BACKUP_DIR}/plane-db_${DATE}.sql.gz ($(du -sh "${BACKUP_DIR}/plane-db_${DATE}.sql.gz" | cut -f1))"
# ── Vaultwarden data ────────────────────────────────────────────────────────
log "Backing up Vaultwarden..."
docker run --rm \
--volumes-from vaultwarden \
-v "${BACKUP_DIR}:/backup" \
alpine:3 \
tar czf "/backup/vaultwarden_${DATE}.tar.gz" /data
log " → ${BACKUP_DIR}/vaultwarden_${DATE}.tar.gz ($(du -sh "${BACKUP_DIR}/vaultwarden_${DATE}.tar.gz" | cut -f1))"
# ── Forgejo repositories ────────────────────────────────────────────────────
log "Backing up Forgejo data..."
docker run --rm \
--volumes-from forgejo \
-v "${BACKUP_DIR}:/backup" \
alpine:3 \
tar czf "/backup/forgejo-data_${DATE}.tar.gz" /data
log " → ${BACKUP_DIR}/forgejo-data_${DATE}.tar.gz ($(du -sh "${BACKUP_DIR}/forgejo-data_${DATE}.tar.gz" | cut -f1))"
# ── Upload to Timeweb S3 ────────────────────────────────────────────────────
log "Uploading backups to S3 ({{ s3_bucket }})..."
AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \
AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \
aws s3 sync "${BACKUP_DIR}/" "s3://{{ s3_bucket }}/" \
--endpoint-url "{{ s3_endpoint }}" \
--exclude "*" --include "*.gz" \
--storage-class STANDARD \
--no-progress \
&& log " → S3 upload complete" \
|| log " ⚠ S3 upload failed (local backups still intact)"
# ── Cleanup old backups ─────────────────────────────────────────────────────
log "Removing backups older than ${KEEP_DAYS} days..."
find "${BACKUP_DIR}" -name "*.gz" -mtime +${KEEP_DAYS} -delete
# Remove S3 objects older than KEEP_DAYS as well
log "Pruning S3 objects older than ${KEEP_DAYS} days..."
CUTOFF=$(date -d "-${KEEP_DAYS} days" +%Y-%m-%dT%H:%M:%S 2>/dev/null || date -v-${KEEP_DAYS}d +%Y-%m-%dT%H:%M:%S)
AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \
AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \
aws s3 ls "s3://{{ s3_bucket }}/" \
--endpoint-url "{{ s3_endpoint }}" \
| awk '{print $4}' \
| while read -r obj; do
obj_date=$(echo "$obj" | grep -oP '^\d{4}-\d{2}-\d{2}' || true)
if [[ -n "$obj_date" && "$obj_date" < "${CUTOFF:0:10}" ]]; then
AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \
AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \
aws s3 rm "s3://{{ s3_bucket }}/$obj" \
--endpoint-url "{{ s3_endpoint }}" \
&& log " → Deleted old S3 object: $obj"
fi
done
log " → Done. Current backups:"
du -sh "${BACKUP_DIR}"/*.gz 2>/dev/null | sort -k2 || true
log "=== Backup completed ==="