feat: ship blog platform admin and deploy stack
This commit is contained in:
10
deploy/scripts/backup/backup-all.sh
Normal file
10
deploy/scripts/backup/backup-all.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
"${SCRIPT_DIR}/backup-postgres.sh"
|
||||
"${SCRIPT_DIR}/backup-markdown.sh"
|
||||
"${SCRIPT_DIR}/backup-media.sh"
|
||||
|
||||
echo "All backup jobs finished successfully."
|
||||
20
deploy/scripts/backup/backup-markdown.sh
Normal file
20
deploy/scripts/backup/backup-markdown.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SOURCE_DIR="${MARKDOWN_SOURCE_DIR:-./backend/content/posts}"
|
||||
BACKUP_DIR="${BACKUP_DIR:-./backups/markdown}"
|
||||
RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
FILE_PATH="${BACKUP_DIR}/markdown-${TIMESTAMP}.tar.gz"
|
||||
|
||||
if [[ ! -d "${SOURCE_DIR}" ]]; then
|
||||
echo "Markdown source directory not found: ${SOURCE_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${BACKUP_DIR}"
|
||||
tar -czf "${FILE_PATH}" -C "${SOURCE_DIR}" .
|
||||
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.tar.gz"
|
||||
find "${BACKUP_DIR}" -type f -name 'markdown-*.tar.gz' -mtime +"${RETENTION_DAYS}" -delete
|
||||
|
||||
echo "Markdown backup written to ${FILE_PATH}"
|
||||
31
deploy/scripts/backup/backup-media.sh
Normal file
31
deploy/scripts/backup/backup-media.sh
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BACKUP_DIR="${BACKUP_DIR:-./backups/media}"
|
||||
RETENTION_DAYS="${RETENTION_DAYS:-14}"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
mkdir -p "${BACKUP_DIR}"
|
||||
|
||||
if [[ -n "${MEDIA_S3_SOURCE:-}" ]]; then
|
||||
TARGET_DIR="${BACKUP_DIR}/media-${TIMESTAMP}"
|
||||
mkdir -p "${TARGET_DIR}"
|
||||
aws s3 sync "${MEDIA_S3_SOURCE}" "${TARGET_DIR}" ${AWS_EXTRA_ARGS:-}
|
||||
ln -sfn "$(basename "${TARGET_DIR}")" "${BACKUP_DIR}/latest"
|
||||
find "${BACKUP_DIR}" -maxdepth 1 -mindepth 1 -type d -name 'media-*' -mtime +"${RETENTION_DAYS}" -exec rm -rf {} +
|
||||
echo "Media backup synced from ${MEDIA_S3_SOURCE} to ${TARGET_DIR}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SOURCE_DIR="${MEDIA_SOURCE_DIR:-./uploads}"
|
||||
FILE_PATH="${BACKUP_DIR}/media-${TIMESTAMP}.tar.gz"
|
||||
|
||||
if [[ ! -d "${SOURCE_DIR}" ]]; then
|
||||
echo "Set MEDIA_SOURCE_DIR or MEDIA_S3_SOURCE before running this script" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
tar -czf "${FILE_PATH}" -C "${SOURCE_DIR}" .
|
||||
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.tar.gz"
|
||||
find "${BACKUP_DIR}" -type f -name 'media-*.tar.gz' -mtime +"${RETENTION_DAYS}" -delete
|
||||
|
||||
echo "Media backup written to ${FILE_PATH}"
|
||||
19
deploy/scripts/backup/backup-postgres.sh
Normal file
19
deploy/scripts/backup/backup-postgres.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BACKUP_DIR="${BACKUP_DIR:-./backups/postgres}"
|
||||
RETENTION_DAYS="${RETENTION_DAYS:-14}"
|
||||
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
FILE_PATH="${BACKUP_DIR}/postgres-${TIMESTAMP}.dump"
|
||||
|
||||
if [[ -z "${DATABASE_URL:-}" ]]; then
|
||||
echo "DATABASE_URL is required" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${BACKUP_DIR}"
|
||||
pg_dump --format=custom --file="${FILE_PATH}" "${DATABASE_URL}"
|
||||
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.dump"
|
||||
find "${BACKUP_DIR}" -type f -name 'postgres-*.dump' -mtime +"${RETENTION_DAYS}" -delete
|
||||
|
||||
echo "Postgres backup written to ${FILE_PATH}"
|
||||
49
deploy/scripts/backup/prune-backups.sh
Normal file
49
deploy/scripts/backup/prune-backups.sh
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BACKUP_ROOT="${BACKUP_ROOT:-./backups}"
|
||||
POSTGRES_RETENTION_DAYS="${POSTGRES_RETENTION_DAYS:-14}"
|
||||
MARKDOWN_RETENTION_DAYS="${MARKDOWN_RETENTION_DAYS:-30}"
|
||||
MEDIA_RETENTION_DAYS="${MEDIA_RETENTION_DAYS:-14}"
|
||||
DRY_RUN="${DRY_RUN:-false}"
|
||||
|
||||
prune() {
|
||||
local target_dir="$1"
|
||||
local pattern="$2"
|
||||
local retention_days="$3"
|
||||
|
||||
if [[ ! -d "${target_dir}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||
find "${target_dir}" -type f -name "${pattern}" -mtime +"${retention_days}" -print
|
||||
return 0
|
||||
fi
|
||||
|
||||
find "${target_dir}" -type f -name "${pattern}" -mtime +"${retention_days}" -delete
|
||||
}
|
||||
|
||||
prune_dirs() {
|
||||
local target_dir="$1"
|
||||
local pattern="$2"
|
||||
local retention_days="$3"
|
||||
|
||||
if [[ ! -d "${target_dir}" ]]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "${DRY_RUN}" == "true" ]]; then
|
||||
find "${target_dir}" -maxdepth 1 -mindepth 1 -type d -name "${pattern}" -mtime +"${retention_days}" -print
|
||||
return 0
|
||||
fi
|
||||
|
||||
find "${target_dir}" -maxdepth 1 -mindepth 1 -type d -name "${pattern}" -mtime +"${retention_days}" -exec rm -rf {} +
|
||||
}
|
||||
|
||||
prune "${BACKUP_ROOT}/postgres" 'postgres-*.dump' "${POSTGRES_RETENTION_DAYS}"
|
||||
prune "${BACKUP_ROOT}/markdown" 'markdown-*.tar.gz' "${MARKDOWN_RETENTION_DAYS}"
|
||||
prune "${BACKUP_ROOT}/media" 'media-*.tar.gz' "${MEDIA_RETENTION_DAYS}"
|
||||
prune_dirs "${BACKUP_ROOT}/media" 'media-*' "${MEDIA_RETENTION_DAYS}"
|
||||
|
||||
echo "Backup pruning completed under ${BACKUP_ROOT}"
|
||||
20
deploy/scripts/backup/restore-markdown.sh
Normal file
20
deploy/scripts/backup/restore-markdown.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <backup-file.tar.gz>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TARGET_DIR="${MARKDOWN_TARGET_DIR:-./backend/content/posts}"
|
||||
BACKUP_FILE="$1"
|
||||
|
||||
if [[ ! -f "${BACKUP_FILE}" ]]; then
|
||||
echo "Backup file not found: ${BACKUP_FILE}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${TARGET_DIR}"
|
||||
rm -rf "${TARGET_DIR}"/*
|
||||
tar -xzf "${BACKUP_FILE}" -C "${TARGET_DIR}"
|
||||
echo "Markdown restore completed into ${TARGET_DIR}"
|
||||
30
deploy/scripts/backup/restore-media.sh
Normal file
30
deploy/scripts/backup/restore-media.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <backup-file-or-directory>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SOURCE="$1"
|
||||
|
||||
if [[ -d "${SOURCE}" ]]; then
|
||||
if [[ -z "${MEDIA_S3_TARGET:-}" ]]; then
|
||||
echo "MEDIA_S3_TARGET is required when restoring from a synced directory backup" >&2
|
||||
exit 1
|
||||
fi
|
||||
aws s3 sync "${SOURCE}" "${MEDIA_S3_TARGET}" ${AWS_EXTRA_ARGS:-}
|
||||
echo "Media restore synced to ${MEDIA_S3_TARGET}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
TARGET_DIR="${MEDIA_TARGET_DIR:-./uploads}"
|
||||
if [[ ! -f "${SOURCE}" ]]; then
|
||||
echo "Backup source not found: ${SOURCE}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${TARGET_DIR}"
|
||||
rm -rf "${TARGET_DIR}"/*
|
||||
tar -xzf "${SOURCE}" -C "${TARGET_DIR}"
|
||||
echo "Media restore completed into ${TARGET_DIR}"
|
||||
21
deploy/scripts/backup/restore-postgres.sh
Normal file
21
deploy/scripts/backup/restore-postgres.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ $# -lt 1 ]]; then
|
||||
echo "Usage: $0 <backup-file.dump>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "${DATABASE_URL:-}" ]]; then
|
||||
echo "DATABASE_URL is required" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BACKUP_FILE="$1"
|
||||
if [[ ! -f "${BACKUP_FILE}" ]]; then
|
||||
echo "Backup file not found: ${BACKUP_FILE}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pg_restore --clean --if-exists --no-owner --no-privileges --dbname="${DATABASE_URL}" "${BACKUP_FILE}"
|
||||
echo "Postgres restore completed from ${BACKUP_FILE}"
|
||||
26
deploy/scripts/backup/sync-backups-offsite.sh
Normal file
26
deploy/scripts/backup/sync-backups-offsite.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
BACKUP_ROOT="${BACKUP_ROOT:-./backups}"
|
||||
OFFSITE_TARGET="${OFFSITE_TARGET:-}"
|
||||
AWS_EXTRA_ARGS="${AWS_EXTRA_ARGS:-}"
|
||||
RSYNC_EXTRA_ARGS="${RSYNC_EXTRA_ARGS:-}"
|
||||
|
||||
if [[ -z "${OFFSITE_TARGET}" ]]; then
|
||||
echo "OFFSITE_TARGET is required (rsync path or s3:// bucket)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -d "${BACKUP_ROOT}" ]]; then
|
||||
echo "Backup root not found: ${BACKUP_ROOT}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${OFFSITE_TARGET}" == s3://* ]]; then
|
||||
aws s3 sync "${BACKUP_ROOT}" "${OFFSITE_TARGET}" ${AWS_EXTRA_ARGS}
|
||||
echo "Backups synced to ${OFFSITE_TARGET}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
rsync -av --delete ${RSYNC_EXTRA_ARGS} "${BACKUP_ROOT}/" "${OFFSITE_TARGET}/"
|
||||
echo "Backups synced to ${OFFSITE_TARGET}"
|
||||
17
deploy/scripts/backup/verify-restore.sh
Normal file
17
deploy/scripts/backup/verify-restore.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
: "${DATABASE_URL:?DATABASE_URL is required}"
|
||||
: "${POSTGRES_BACKUP:?POSTGRES_BACKUP is required}"
|
||||
: "${MARKDOWN_BACKUP:?MARKDOWN_BACKUP is required}"
|
||||
: "${MEDIA_BACKUP:?MEDIA_BACKUP is required}"
|
||||
|
||||
POSTGRES_RESTORE_CMD="${POSTGRES_RESTORE_CMD:-./deploy/scripts/backup/restore-postgres.sh}"
|
||||
MARKDOWN_RESTORE_CMD="${MARKDOWN_RESTORE_CMD:-./deploy/scripts/backup/restore-markdown.sh}"
|
||||
MEDIA_RESTORE_CMD="${MEDIA_RESTORE_CMD:-./deploy/scripts/backup/restore-media.sh}"
|
||||
|
||||
"${POSTGRES_RESTORE_CMD}" "${POSTGRES_BACKUP}"
|
||||
"${MARKDOWN_RESTORE_CMD}" "${MARKDOWN_BACKUP}"
|
||||
"${MEDIA_RESTORE_CMD}" "${MEDIA_BACKUP}"
|
||||
|
||||
echo "Restore rehearsal completed. Please verify homepage, article detail, media assets, admin login, revisions, audit logs, and subscriptions manually."
|
||||
122
deploy/scripts/render_compose_env.py
Normal file
122
deploy/scripts/render_compose_env.py
Normal file
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError as exc: # pragma: no cover
|
||||
raise SystemExit(
|
||||
"Missing dependency: PyYAML. Install it with `python -m pip install pyyaml`."
|
||||
) from exc
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Render docker compose .env from deploy config.yaml"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--input",
|
||||
default="deploy/docker/config.yaml",
|
||||
help="Path to config.yaml (default: deploy/docker/config.yaml)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
default="deploy/docker/.env",
|
||||
help="Output dotenv file path (default: deploy/docker/.env)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--section",
|
||||
default="compose_env",
|
||||
help="Top-level mapping section to export (default: compose_env)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--stdout",
|
||||
action="store_true",
|
||||
help="Print rendered dotenv to stdout instead of writing file",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def load_config(path: Path) -> dict[str, Any]:
|
||||
if not path.exists():
|
||||
raise SystemExit(f"Config file not found: {path}")
|
||||
|
||||
data = yaml.safe_load(path.read_text(encoding="utf-8"))
|
||||
if not isinstance(data, dict):
|
||||
raise SystemExit("config.yaml root must be a mapping/object")
|
||||
return data
|
||||
|
||||
|
||||
def encode_env_value(value: Any) -> str:
|
||||
if value is None:
|
||||
return '""'
|
||||
if isinstance(value, bool):
|
||||
return "true" if value else "false"
|
||||
if isinstance(value, (int, float)):
|
||||
return str(value)
|
||||
if not isinstance(value, str):
|
||||
raise SystemExit(f"compose_env only supports scalar values, got: {type(value).__name__}")
|
||||
|
||||
if value == "":
|
||||
return '""'
|
||||
|
||||
needs_quotes = any(ch in value for ch in [' ', '#', '"', "'", '\t', '\n', '\r']) or value.startswith('$')
|
||||
if not needs_quotes:
|
||||
return value
|
||||
|
||||
escaped = (
|
||||
value.replace('\\', '\\\\')
|
||||
.replace('"', '\\"')
|
||||
.replace('\n', '\\n')
|
||||
.replace('\r', '\\r')
|
||||
.replace('\t', '\\t')
|
||||
)
|
||||
return f'"{escaped}"'
|
||||
|
||||
|
||||
def render_env(section_name: str, values: dict[str, Any], source_path: Path) -> str:
|
||||
lines = [
|
||||
f"# Generated from {source_path.as_posix()}::{section_name}",
|
||||
"# Do not edit this file directly; edit config.yaml and re-render.",
|
||||
"",
|
||||
]
|
||||
|
||||
for key, value in values.items():
|
||||
if not isinstance(key, str) or not key:
|
||||
raise SystemExit(f"Invalid env key: {key!r}")
|
||||
lines.append(f"{key}={encode_env_value(value)}")
|
||||
|
||||
lines.append("")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
source_path = Path(args.input)
|
||||
output_path = Path(args.output)
|
||||
|
||||
data = load_config(source_path)
|
||||
section = data.get(args.section)
|
||||
if not isinstance(section, dict):
|
||||
raise SystemExit(
|
||||
f"Section `{args.section}` must exist in config.yaml and must be a mapping/object"
|
||||
)
|
||||
|
||||
rendered = render_env(args.section, section, source_path)
|
||||
|
||||
if args.stdout:
|
||||
sys.stdout.write(rendered)
|
||||
return 0
|
||||
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
output_path.write_text(rendered, encoding="utf-8", newline="\n")
|
||||
print(f"Wrote {output_path}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
Reference in New Issue
Block a user