From 624b85cd15f9ffc5512317f0b97d4ae5cd309731 Mon Sep 17 00:00:00 2001 From: jack Date: Sun, 22 Mar 2026 19:44:34 +0700 Subject: [PATCH] feat(backup): hourly schedule, cold S3 storage at data/ prefix - Change cron from daily 03:00 to every hour (minute=0) - Change S3 path from main/ to data/ as requested - Change storage class from STANDARD to COLD (Timeweb cold storage) - Update S3 pruning to match new data/ prefix Co-Authored-By: Claude Sonnet 4.6 --- roles/backup/tasks/main.yml | 5 ++--- roles/backup/templates/backup.sh.j2 | 16 ++++++++-------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/roles/backup/tasks/main.yml b/roles/backup/tasks/main.yml index f1e2255..1a473c9 100644 --- a/roles/backup/tasks/main.yml +++ b/roles/backup/tasks/main.yml @@ -32,11 +32,10 @@ group: root mode: "0750" -- name: Schedule daily backup at 03:00 +- name: Schedule hourly backup ansible.builtin.cron: - name: "Daily services backup" + name: "Hourly services backup" minute: "0" - hour: "3" job: "/usr/local/bin/backup-services >> /var/log/backup-services.log 2>&1" user: root state: present diff --git a/roles/backup/templates/backup.sh.j2 b/roles/backup/templates/backup.sh.j2 index c7a5ecf..4dd9452 100644 --- a/roles/backup/templates/backup.sh.j2 +++ b/roles/backup/templates/backup.sh.j2 @@ -5,8 +5,8 @@ # data/ # databases/ — PostgreSQL dumps (restore with psql) # volumes/ — Docker volume contents (restore by copying) -# Runs daily at 03:00, keeps {{ backup_retention_days }} days. -# Upload to S3: s3://{{ s3_bucket }}/main/ +# Runs every hour, keeps {{ backup_retention_days }} days. +# Upload to S3: s3://{{ s3_bucket }}/data/ set -euo pipefail BACKUP_ROOT="{{ backup_dir }}" @@ -112,12 +112,12 @@ rm -rf "${WORK_DIR}" log " → Archive: $(du -sh "${ARCHIVE}" | cut -f1)" # ── Upload to S3 ───────────────────────────────────────────────────────────── -log "Uploading to S3 (s3://{{ s3_bucket }}/main/)..." +log "Uploading to S3 (s3://{{ s3_bucket }}/data/)..." AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ - aws s3 cp "${ARCHIVE}" "s3://{{ s3_bucket }}/main/$(basename "${ARCHIVE}")" \ + aws s3 cp "${ARCHIVE}" "s3://{{ s3_bucket }}/data/$(basename "${ARCHIVE}")" \ --endpoint-url "{{ s3_endpoint }}" \ - --storage-class STANDARD \ + --storage-class COLD \ --no-progress \ && log " → S3 upload complete" \ || log " ⚠ S3 upload failed (local backup still intact)" @@ -132,7 +132,7 @@ CUTOFF=$(date -d "-${KEEP_DAYS} days" +%Y-%m-%d 2>/dev/null \ || date -v-${KEEP_DAYS}d +%Y-%m-%d) AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ - aws s3 ls "s3://{{ s3_bucket }}/main/" \ + aws s3 ls "s3://{{ s3_bucket }}/data/" \ --endpoint-url "{{ s3_endpoint }}" \ | awk '{print $4}' \ | while read -r obj; do @@ -140,9 +140,9 @@ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ if [[ -n "$obj_date" && "$obj_date" < "$CUTOFF" ]]; then AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ - aws s3 rm "s3://{{ s3_bucket }}/main/$obj" \ + aws s3 rm "s3://{{ s3_bucket }}/data/$obj" \ --endpoint-url "{{ s3_endpoint }}" \ - && log " → Deleted old S3 object: main/$obj" + && log " → Deleted old S3 object: data/$obj" fi done