diff --git a/roles/backup/templates/backup.sh.j2 b/roles/backup/templates/backup.sh.j2 index 82933f2..c7a5ecf 100644 --- a/roles/backup/templates/backup.sh.j2 +++ b/roles/backup/templates/backup.sh.j2 @@ -1,83 +1,149 @@ #!/usr/bin/env bash # Generated by Ansible — do not edit manually -# Backs up PostgreSQL databases and Vaultwarden data. -# Runs daily at 03:00, keeps {{ backup_retention_days }} days of backups. +# Creates a single dated archive: data_YYYY-MM-DD_HH-MM.tar.gz +# Structure: +# data/ +# databases/ — PostgreSQL dumps (restore with psql) +# volumes/ — Docker volume contents (restore by copying) +# Runs daily at 03:00, keeps {{ backup_retention_days }} days. +# Upload to S3: s3://{{ s3_bucket }}/main/ set -euo pipefail -BACKUP_DIR="{{ backup_dir }}" -DATE=$(date +%Y-%m-%d_%H-%M-%S) +BACKUP_ROOT="{{ backup_dir }}" +DATE=$(date +%Y-%m-%d_%H-%M) +WORK_DIR="${BACKUP_ROOT}/tmp_${DATE}" +ARCHIVE="${BACKUP_ROOT}/main_data_${DATE}.tar.gz" KEEP_DAYS="{{ backup_retention_days }}" log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*"; } -log "=== Backup started ===" +log "=== Backup started: ${DATE} ===" +mkdir -p "${WORK_DIR}/data/databases" "${WORK_DIR}/data/volumes" -# ── Forgejo PostgreSQL ────────────────────────────────────────────────────── -log "Backing up forgejo-db..." +# ── PostgreSQL: Forgejo ────────────────────────────────────────────────────── +log "Dumping forgejo-db..." docker exec forgejo-db pg_dump -U forgejo forgejo \ - | gzip > "${BACKUP_DIR}/forgejo-db_${DATE}.sql.gz" -log " → ${BACKUP_DIR}/forgejo-db_${DATE}.sql.gz ($(du -sh "${BACKUP_DIR}/forgejo-db_${DATE}.sql.gz" | cut -f1))" + | gzip > "${WORK_DIR}/data/databases/forgejo.sql.gz" +log " → databases/forgejo.sql.gz ($(du -sh "${WORK_DIR}/data/databases/forgejo.sql.gz" | cut -f1))" -# ── Plane PostgreSQL ──────────────────────────────────────────────────────── -log "Backing up plane-db..." +# ── PostgreSQL: Plane ──────────────────────────────────────────────────────── +log "Dumping plane-db..." docker exec plane-db pg_dump -U plane plane \ - | gzip > "${BACKUP_DIR}/plane-db_${DATE}.sql.gz" -log " → ${BACKUP_DIR}/plane-db_${DATE}.sql.gz ($(du -sh "${BACKUP_DIR}/plane-db_${DATE}.sql.gz" | cut -f1))" + | gzip > "${WORK_DIR}/data/databases/plane.sql.gz" +log " → databases/plane.sql.gz ($(du -sh "${WORK_DIR}/data/databases/plane.sql.gz" | cut -f1))" -# ── Vaultwarden data ──────────────────────────────────────────────────────── -log "Backing up Vaultwarden..." +# ── Vaultwarden data volume ────────────────────────────────────────────────── +log "Backing up Vaultwarden data..." docker run --rm \ --volumes-from vaultwarden \ - -v "${BACKUP_DIR}:/backup" \ + -v "${WORK_DIR}/data/volumes:/backup" \ alpine:3 \ - tar czf "/backup/vaultwarden_${DATE}.tar.gz" /data -log " → ${BACKUP_DIR}/vaultwarden_${DATE}.tar.gz ($(du -sh "${BACKUP_DIR}/vaultwarden_${DATE}.tar.gz" | cut -f1))" + tar czf /backup/vaultwarden.tar.gz /data +log " → volumes/vaultwarden.tar.gz ($(du -sh "${WORK_DIR}/data/volumes/vaultwarden.tar.gz" | cut -f1))" -# ── Forgejo repositories ──────────────────────────────────────────────────── +# ── Forgejo data volume (repos, attachments, LFS) ─────────────────────────── log "Backing up Forgejo data..." docker run --rm \ --volumes-from forgejo \ - -v "${BACKUP_DIR}:/backup" \ + -v "${WORK_DIR}/data/volumes:/backup" \ alpine:3 \ - tar czf "/backup/forgejo-data_${DATE}.tar.gz" /data -log " → ${BACKUP_DIR}/forgejo-data_${DATE}.tar.gz ($(du -sh "${BACKUP_DIR}/forgejo-data_${DATE}.tar.gz" | cut -f1))" + tar czf /backup/forgejo.tar.gz /data +log " → volumes/forgejo.tar.gz ($(du -sh "${WORK_DIR}/data/volumes/forgejo.tar.gz" | cut -f1))" -# ── Upload to Timeweb S3 ──────────────────────────────────────────────────── -log "Uploading backups to S3 ({{ s3_bucket }})..." +# ── Uptime Kuma ────────────────────────────────────────────────────────────── +log "Backing up Uptime Kuma..." +docker run --rm \ + --volumes-from uptime-kuma \ + -v "${WORK_DIR}/data/volumes:/backup" \ + alpine:3 \ + tar czf /backup/uptime-kuma.tar.gz /app/data +log " → volumes/uptime-kuma.tar.gz ($(du -sh "${WORK_DIR}/data/volumes/uptime-kuma.tar.gz" | cut -f1))" + +# ── Add restore instructions ───────────────────────────────────────────────── +cat > "${WORK_DIR}/data/RESTORE.md" << 'RESTORE_EOF' +# Restore Instructions + +## Prerequisites +- Docker and docker compose running +- Services stack deployed via Ansible (`ansible-playbook playbooks/deploy.yml`) + +## Step 1 — Stop services that need data restored +```bash +cd /opt/services +docker compose stop forgejo plane-api plane-web plane-admin plane-space plane-worker plane-beat uptime-kuma vaultwarden +docker compose stop forgejo-db plane-db +``` + +## Step 2 — Restore PostgreSQL databases +```bash +# Forgejo DB +zcat data/databases/forgejo.sql.gz | docker exec -i forgejo-db psql -U forgejo forgejo + +# Plane DB +zcat data/databases/plane.sql.gz | docker exec -i plane-db psql -U plane plane +``` + +## Step 3 — Restore volume data +```bash +# Vaultwarden — extracts /data/ into the container +docker run --rm --volumes-from vaultwarden -v $(pwd)/data/volumes:/backup \ + alpine:3 sh -c "cd / && tar xzf /backup/vaultwarden.tar.gz" + +# Forgejo — extracts /data/ into the container +docker run --rm --volumes-from forgejo -v $(pwd)/data/volumes:/backup \ + alpine:3 sh -c "cd / && tar xzf /backup/forgejo.tar.gz" + +# Uptime Kuma — extracts /app/data/ into the container +docker run --rm --volumes-from uptime-kuma -v $(pwd)/data/volumes:/backup \ + alpine:3 sh -c "cd / && tar xzf /backup/uptime-kuma.tar.gz" +``` + +## Step 4 — Restart services +```bash +cd /opt/services +docker compose up -d +``` +RESTORE_EOF + +# ── Pack everything into one archive ───────────────────────────────────────── +log "Creating archive: ${ARCHIVE}..." +tar czf "${ARCHIVE}" -C "${WORK_DIR}" data +rm -rf "${WORK_DIR}" +log " → Archive: $(du -sh "${ARCHIVE}" | cut -f1)" + +# ── Upload to S3 ───────────────────────────────────────────────────────────── +log "Uploading to S3 (s3://{{ s3_bucket }}/main/)..." AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ - aws s3 sync "${BACKUP_DIR}/" "s3://{{ s3_bucket }}/" \ + aws s3 cp "${ARCHIVE}" "s3://{{ s3_bucket }}/main/$(basename "${ARCHIVE}")" \ --endpoint-url "{{ s3_endpoint }}" \ - --exclude "*" --include "*.gz" \ --storage-class STANDARD \ --no-progress \ && log " → S3 upload complete" \ - || log " ⚠ S3 upload failed (local backups still intact)" + || log " ⚠ S3 upload failed (local backup still intact)" -# ── Cleanup old backups ───────────────────────────────────────────────────── -log "Removing backups older than ${KEEP_DAYS} days..." -find "${BACKUP_DIR}" -name "*.gz" -mtime +${KEEP_DAYS} -delete +# ── Cleanup old local backups ───────────────────────────────────────────────── +log "Removing local backups older than ${KEEP_DAYS} days..." +find "${BACKUP_ROOT}" -name "main_data_*.tar.gz" -mtime +${KEEP_DAYS} -delete -# Remove S3 objects older than KEEP_DAYS as well +# ── Prune old S3 objects ────────────────────────────────────────────────────── log "Pruning S3 objects older than ${KEEP_DAYS} days..." -CUTOFF=$(date -d "-${KEEP_DAYS} days" +%Y-%m-%dT%H:%M:%S 2>/dev/null || date -v-${KEEP_DAYS}d +%Y-%m-%dT%H:%M:%S) +CUTOFF=$(date -d "-${KEEP_DAYS} days" +%Y-%m-%d 2>/dev/null \ + || date -v-${KEEP_DAYS}d +%Y-%m-%d) AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ - aws s3 ls "s3://{{ s3_bucket }}/" \ + aws s3 ls "s3://{{ s3_bucket }}/main/" \ --endpoint-url "{{ s3_endpoint }}" \ | awk '{print $4}' \ | while read -r obj; do obj_date=$(echo "$obj" | grep -oP '^\d{4}-\d{2}-\d{2}' || true) - if [[ -n "$obj_date" && "$obj_date" < "${CUTOFF:0:10}" ]]; then + if [[ -n "$obj_date" && "$obj_date" < "$CUTOFF" ]]; then AWS_ACCESS_KEY_ID="{{ s3_access_key }}" \ AWS_SECRET_ACCESS_KEY="{{ s3_secret_key }}" \ - aws s3 rm "s3://{{ s3_bucket }}/$obj" \ + aws s3 rm "s3://{{ s3_bucket }}/main/$obj" \ --endpoint-url "{{ s3_endpoint }}" \ - && log " → Deleted old S3 object: $obj" + && log " → Deleted old S3 object: main/$obj" fi done -log " → Done. Current backups:" -du -sh "${BACKUP_DIR}"/*.gz 2>/dev/null | sort -k2 || true - -log "=== Backup completed ===" +log "=== Backup completed: $(du -sh "${ARCHIVE}" | cut -f1) ===" diff --git a/roles/tools/defaults/main.yml b/roles/tools/defaults/main.yml index 63b0bc9..a3fb70d 100644 --- a/roles/tools/defaults/main.yml +++ b/roles/tools/defaults/main.yml @@ -4,3 +4,10 @@ outline_image: "outlinewiki/outline:0.80.2" outline_db_image: "postgres:15-alpine" outline_redis_image: "redis:7-alpine" n8n_image: "n8nio/n8n:1.89.2" # https://hub.docker.com/r/n8nio/n8n/tags + +# SMTP for Outline magic-link auth (override in vault) +outline_smtp_host: "smtp.csrx.ru" +outline_smtp_port: 587 +outline_smtp_from: "noreply@csrx.ru" +outline_smtp_username: "" +outline_smtp_password: "" diff --git a/roles/tools/templates/env.j2 b/roles/tools/templates/env.j2 index f6e7f3f..e45e9e1 100644 --- a/roles/tools/templates/env.j2 +++ b/roles/tools/templates/env.j2 @@ -27,6 +27,15 @@ FILE_STORAGE=s3 # Auth — local accounts (can add OIDC/Authelia later) AUTH_PROVIDERS=email +# SMTP (required to enable email magic-link auth) +SMTP_HOST={{ outline_smtp_host }} +SMTP_PORT={{ outline_smtp_port | default(587) }} +SMTP_FROM_EMAIL={{ outline_smtp_from }} +{% if outline_smtp_username is defined and outline_smtp_username %} +SMTP_USERNAME={{ outline_smtp_username }} +SMTP_PASSWORD={{ outline_smtp_password }} +{% endif %} + # Outline DB password (used in docker-compose) OUTLINE_DB_PASSWORD={{ outline_db_password }}