feat: ship blog platform admin and deploy stack

This commit is contained in:
2026-03-31 21:48:39 +08:00
parent a9a05aa105
commit 313f174fbc
210 changed files with 25476 additions and 5803 deletions

View File

@@ -0,0 +1,10 @@
#!/usr/bin/env bash
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
"${SCRIPT_DIR}/backup-postgres.sh"
"${SCRIPT_DIR}/backup-markdown.sh"
"${SCRIPT_DIR}/backup-media.sh"
echo "All backup jobs finished successfully."

View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
SOURCE_DIR="${MARKDOWN_SOURCE_DIR:-./backend/content/posts}"
BACKUP_DIR="${BACKUP_DIR:-./backups/markdown}"
RETENTION_DAYS="${RETENTION_DAYS:-30}"
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
FILE_PATH="${BACKUP_DIR}/markdown-${TIMESTAMP}.tar.gz"
if [[ ! -d "${SOURCE_DIR}" ]]; then
echo "Markdown source directory not found: ${SOURCE_DIR}" >&2
exit 1
fi
mkdir -p "${BACKUP_DIR}"
tar -czf "${FILE_PATH}" -C "${SOURCE_DIR}" .
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.tar.gz"
find "${BACKUP_DIR}" -type f -name 'markdown-*.tar.gz' -mtime +"${RETENTION_DAYS}" -delete
echo "Markdown backup written to ${FILE_PATH}"

View File

@@ -0,0 +1,31 @@
#!/usr/bin/env bash
set -euo pipefail
BACKUP_DIR="${BACKUP_DIR:-./backups/media}"
RETENTION_DAYS="${RETENTION_DAYS:-14}"
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
mkdir -p "${BACKUP_DIR}"
if [[ -n "${MEDIA_S3_SOURCE:-}" ]]; then
TARGET_DIR="${BACKUP_DIR}/media-${TIMESTAMP}"
mkdir -p "${TARGET_DIR}"
aws s3 sync "${MEDIA_S3_SOURCE}" "${TARGET_DIR}" ${AWS_EXTRA_ARGS:-}
ln -sfn "$(basename "${TARGET_DIR}")" "${BACKUP_DIR}/latest"
find "${BACKUP_DIR}" -maxdepth 1 -mindepth 1 -type d -name 'media-*' -mtime +"${RETENTION_DAYS}" -exec rm -rf {} +
echo "Media backup synced from ${MEDIA_S3_SOURCE} to ${TARGET_DIR}"
exit 0
fi
SOURCE_DIR="${MEDIA_SOURCE_DIR:-./uploads}"
FILE_PATH="${BACKUP_DIR}/media-${TIMESTAMP}.tar.gz"
if [[ ! -d "${SOURCE_DIR}" ]]; then
echo "Set MEDIA_SOURCE_DIR or MEDIA_S3_SOURCE before running this script" >&2
exit 1
fi
tar -czf "${FILE_PATH}" -C "${SOURCE_DIR}" .
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.tar.gz"
find "${BACKUP_DIR}" -type f -name 'media-*.tar.gz' -mtime +"${RETENTION_DAYS}" -delete
echo "Media backup written to ${FILE_PATH}"

View File

@@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -euo pipefail
BACKUP_DIR="${BACKUP_DIR:-./backups/postgres}"
RETENTION_DAYS="${RETENTION_DAYS:-14}"
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
FILE_PATH="${BACKUP_DIR}/postgres-${TIMESTAMP}.dump"
if [[ -z "${DATABASE_URL:-}" ]]; then
echo "DATABASE_URL is required" >&2
exit 1
fi
mkdir -p "${BACKUP_DIR}"
pg_dump --format=custom --file="${FILE_PATH}" "${DATABASE_URL}"
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.dump"
find "${BACKUP_DIR}" -type f -name 'postgres-*.dump' -mtime +"${RETENTION_DAYS}" -delete
echo "Postgres backup written to ${FILE_PATH}"

View File

@@ -0,0 +1,49 @@
#!/usr/bin/env bash
set -euo pipefail
BACKUP_ROOT="${BACKUP_ROOT:-./backups}"
POSTGRES_RETENTION_DAYS="${POSTGRES_RETENTION_DAYS:-14}"
MARKDOWN_RETENTION_DAYS="${MARKDOWN_RETENTION_DAYS:-30}"
MEDIA_RETENTION_DAYS="${MEDIA_RETENTION_DAYS:-14}"
DRY_RUN="${DRY_RUN:-false}"
prune() {
local target_dir="$1"
local pattern="$2"
local retention_days="$3"
if [[ ! -d "${target_dir}" ]]; then
return 0
fi
if [[ "${DRY_RUN}" == "true" ]]; then
find "${target_dir}" -type f -name "${pattern}" -mtime +"${retention_days}" -print
return 0
fi
find "${target_dir}" -type f -name "${pattern}" -mtime +"${retention_days}" -delete
}
prune_dirs() {
local target_dir="$1"
local pattern="$2"
local retention_days="$3"
if [[ ! -d "${target_dir}" ]]; then
return 0
fi
if [[ "${DRY_RUN}" == "true" ]]; then
find "${target_dir}" -maxdepth 1 -mindepth 1 -type d -name "${pattern}" -mtime +"${retention_days}" -print
return 0
fi
find "${target_dir}" -maxdepth 1 -mindepth 1 -type d -name "${pattern}" -mtime +"${retention_days}" -exec rm -rf {} +
}
prune "${BACKUP_ROOT}/postgres" 'postgres-*.dump' "${POSTGRES_RETENTION_DAYS}"
prune "${BACKUP_ROOT}/markdown" 'markdown-*.tar.gz' "${MARKDOWN_RETENTION_DAYS}"
prune "${BACKUP_ROOT}/media" 'media-*.tar.gz' "${MEDIA_RETENTION_DAYS}"
prune_dirs "${BACKUP_ROOT}/media" 'media-*' "${MEDIA_RETENTION_DAYS}"
echo "Backup pruning completed under ${BACKUP_ROOT}"

View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <backup-file.tar.gz>" >&2
exit 1
fi
TARGET_DIR="${MARKDOWN_TARGET_DIR:-./backend/content/posts}"
BACKUP_FILE="$1"
if [[ ! -f "${BACKUP_FILE}" ]]; then
echo "Backup file not found: ${BACKUP_FILE}" >&2
exit 1
fi
mkdir -p "${TARGET_DIR}"
rm -rf "${TARGET_DIR}"/*
tar -xzf "${BACKUP_FILE}" -C "${TARGET_DIR}"
echo "Markdown restore completed into ${TARGET_DIR}"

View File

@@ -0,0 +1,30 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <backup-file-or-directory>" >&2
exit 1
fi
SOURCE="$1"
if [[ -d "${SOURCE}" ]]; then
if [[ -z "${MEDIA_S3_TARGET:-}" ]]; then
echo "MEDIA_S3_TARGET is required when restoring from a synced directory backup" >&2
exit 1
fi
aws s3 sync "${SOURCE}" "${MEDIA_S3_TARGET}" ${AWS_EXTRA_ARGS:-}
echo "Media restore synced to ${MEDIA_S3_TARGET}"
exit 0
fi
TARGET_DIR="${MEDIA_TARGET_DIR:-./uploads}"
if [[ ! -f "${SOURCE}" ]]; then
echo "Backup source not found: ${SOURCE}" >&2
exit 1
fi
mkdir -p "${TARGET_DIR}"
rm -rf "${TARGET_DIR}"/*
tar -xzf "${SOURCE}" -C "${TARGET_DIR}"
echo "Media restore completed into ${TARGET_DIR}"

View File

@@ -0,0 +1,21 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <backup-file.dump>" >&2
exit 1
fi
if [[ -z "${DATABASE_URL:-}" ]]; then
echo "DATABASE_URL is required" >&2
exit 1
fi
BACKUP_FILE="$1"
if [[ ! -f "${BACKUP_FILE}" ]]; then
echo "Backup file not found: ${BACKUP_FILE}" >&2
exit 1
fi
pg_restore --clean --if-exists --no-owner --no-privileges --dbname="${DATABASE_URL}" "${BACKUP_FILE}"
echo "Postgres restore completed from ${BACKUP_FILE}"

View File

@@ -0,0 +1,26 @@
#!/usr/bin/env bash
set -euo pipefail
BACKUP_ROOT="${BACKUP_ROOT:-./backups}"
OFFSITE_TARGET="${OFFSITE_TARGET:-}"
AWS_EXTRA_ARGS="${AWS_EXTRA_ARGS:-}"
RSYNC_EXTRA_ARGS="${RSYNC_EXTRA_ARGS:-}"
if [[ -z "${OFFSITE_TARGET}" ]]; then
echo "OFFSITE_TARGET is required (rsync path or s3:// bucket)" >&2
exit 1
fi
if [[ ! -d "${BACKUP_ROOT}" ]]; then
echo "Backup root not found: ${BACKUP_ROOT}" >&2
exit 1
fi
if [[ "${OFFSITE_TARGET}" == s3://* ]]; then
aws s3 sync "${BACKUP_ROOT}" "${OFFSITE_TARGET}" ${AWS_EXTRA_ARGS}
echo "Backups synced to ${OFFSITE_TARGET}"
exit 0
fi
rsync -av --delete ${RSYNC_EXTRA_ARGS} "${BACKUP_ROOT}/" "${OFFSITE_TARGET}/"
echo "Backups synced to ${OFFSITE_TARGET}"

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
set -euo pipefail
: "${DATABASE_URL:?DATABASE_URL is required}"
: "${POSTGRES_BACKUP:?POSTGRES_BACKUP is required}"
: "${MARKDOWN_BACKUP:?MARKDOWN_BACKUP is required}"
: "${MEDIA_BACKUP:?MEDIA_BACKUP is required}"
POSTGRES_RESTORE_CMD="${POSTGRES_RESTORE_CMD:-./deploy/scripts/backup/restore-postgres.sh}"
MARKDOWN_RESTORE_CMD="${MARKDOWN_RESTORE_CMD:-./deploy/scripts/backup/restore-markdown.sh}"
MEDIA_RESTORE_CMD="${MEDIA_RESTORE_CMD:-./deploy/scripts/backup/restore-media.sh}"
"${POSTGRES_RESTORE_CMD}" "${POSTGRES_BACKUP}"
"${MARKDOWN_RESTORE_CMD}" "${MARKDOWN_BACKUP}"
"${MEDIA_RESTORE_CMD}" "${MEDIA_BACKUP}"
echo "Restore rehearsal completed. Please verify homepage, article detail, media assets, admin login, revisions, audit logs, and subscriptions manually."