feat(backup): hourly schedule, cold S3 storage at data/ prefix
Some checks failed
CI/CD / syntax-check (push) Successful in 1m13s
CI/CD / deploy (push) Has been cancelled

- Change cron from daily 03:00 to every hour (minute=0)
- Change S3 path from main/ to data/ as requested
- Change storage class from STANDARD to COLD (Timeweb cold storage)
- Update S3 pruning to match new data/ prefix

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
jack 2026-03-22 19:44:34 +07:00
parent aa6b20c463
commit 624b85cd15
2 changed files with 10 additions and 11 deletions

View file

@ -32,11 +32,10 @@
group: root group: root
mode: "0750" mode: "0750"
- name: Schedule daily backup at 03:00 - name: Schedule hourly backup
ansible.builtin.cron: ansible.builtin.cron:
name: "Daily services backup" name: "Hourly services backup"
minute: "0" minute: "0"
hour: "3"
job: "/usr/local/bin/backup-services >> /var/log/backup-services.log 2>&1" job: "/usr/local/bin/backup-services >> /var/log/backup-services.log 2>&1"
user: root user: root
state: present state: present

View file

@ -5,8 +5,8 @@
# data/ # data/
# databases/ — PostgreSQL dumps (restore with psql) # databases/ — PostgreSQL dumps (restore with psql)
# volumes/ — Docker volume contents (restore by copying) # volumes/ — Docker volume contents (restore by copying)
# Runs daily at 03:00, keeps {{ backup_retention_days }} days. # Runs every hour, keeps {{ backup_retention_days }} days.
# Upload to S3: s3://{{ s3_bucket }}/main/ # Upload to S3: s3://{{ s3_bucket }}/data/
set -euo pipefail set -euo pipefail
BACKUP_ROOT="{{ backup_dir }}" BACKUP_ROOT="{{ backup_dir }}"
@ -112,12 +112,12 @@ rm -rf "${WORK_DIR}"
log " → Archive: $(du -sh "${ARCHIVE}" | cut -f1)" log " → Archive: $(du -sh "${ARCHIVE}" | cut -f1)"
# ── Upload to S3 ───────────────────────────────────────────────────────────── # ── Upload to S3 ─────────────────────────────────────────────────────────────
log "Uploading to S3 (s3://{{ s3_bucket }}/main/)..." log "Uploading to S3 (s3://{{ s3_bucket }}/data/)..."
AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \
AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \
aws s3 cp "${ARCHIVE}" "s3://{{ s3_bucket }}/main/$(basename "${ARCHIVE}")" \ aws s3 cp "${ARCHIVE}" "s3://{{ s3_bucket }}/data/$(basename "${ARCHIVE}")" \
--endpoint-url "{{ s3_endpoint }}" \ --endpoint-url "{{ s3_endpoint }}" \
--storage-class STANDARD \ --storage-class COLD \
--no-progress \ --no-progress \
&& log " → S3 upload complete" \ && log " → S3 upload complete" \
|| log " ⚠ S3 upload failed (local backup still intact)" || log " ⚠ S3 upload failed (local backup still intact)"
@ -132,7 +132,7 @@ CUTOFF=$(date -d "-${KEEP_DAYS} days" +%Y-%m-%d 2>/dev/null \
|| date -v-${KEEP_DAYS}d +%Y-%m-%d) || date -v-${KEEP_DAYS}d +%Y-%m-%d)
AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \
AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \
aws s3 ls "s3://{{ s3_bucket }}/main/" \ aws s3 ls "s3://{{ s3_bucket }}/data/" \
--endpoint-url "{{ s3_endpoint }}" \ --endpoint-url "{{ s3_endpoint }}" \
| awk '{print $4}' \ | awk '{print $4}' \
| while read -r obj; do | while read -r obj; do
@ -140,9 +140,9 @@ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \
if [[ -n "$obj_date" && "$obj_date" < "$CUTOFF" ]]; then if [[ -n "$obj_date" && "$obj_date" < "$CUTOFF" ]]; then
AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \
AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \
aws s3 rm "s3://{{ s3_bucket }}/main/$obj" \ aws s3 rm "s3://{{ s3_bucket }}/data/$obj" \
--endpoint-url "{{ s3_endpoint }}" \ --endpoint-url "{{ s3_endpoint }}" \
&& log " → Deleted old S3 object: main/$obj" && log " → Deleted old S3 object: data/$obj"
fi fi
done done