12 Commits

Author SHA1 Message Date
73c261eca4 ci: split ui regression artifacts by frontend and admin 2026-04-01 16:52:22 +00:00
7de4ddc3ee feat: refresh content workflow and verification settings
All checks were successful
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 43s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 25m9s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 51s
2026-04-01 18:47:17 +08:00
f2c07df320 build: slim backend release profile safely
All checks were successful
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 12s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 26m8s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 56s
2026-04-01 13:46:11 +08:00
09d7cbfbf3 ci: allow optional Docker Hub login for buildx pulls
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 54s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Has been cancelled
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Has been cancelled
2026-04-01 13:24:26 +08:00
497a9d713d feat: ship public ops features and cache docker builds
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 13s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Has been cancelled
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Has been cancelled
2026-04-01 13:22:19 +08:00
669b79cc95 Fix backend build toolchain and Debian base
All checks were successful
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 1m5s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 30m13s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 1m17s
2026-04-01 02:02:52 +08:00
a305817b78 Stabilize docker workflow on constrained runner
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 1m41s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Failing after 27m6s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 1m13s
2026-04-01 00:26:25 +08:00
81fd785d60 Fix docker build syntax frontend for actions
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 26s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Failing after 31s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 25s
2026-04-01 00:22:58 +08:00
660b255700 Fix admin login and add subscription popup settings
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 6s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Failing after 5s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Failing after 6s
2026-04-01 00:05:16 +08:00
350262c910 fix: try multiple registry usernames in docker workflow
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 8s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Failing after 4s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Failing after 5s
2026-03-31 22:09:02 +08:00
ef2010cb48 fix: allow docker workflow to fallback to gitea token
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 4s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Failing after 4s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Failing after 4s
2026-03-31 22:06:11 +08:00
43eaaf3602 merge: integrate blog platform admin and deploy stack
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 5s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Failing after 4s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Failing after 5s
2026-03-31 21:53:06 +08:00
123 changed files with 9533 additions and 3225 deletions

View File

@@ -13,11 +13,16 @@ on:
- .gitea/workflows/backend-docker.yml
workflow_dispatch:
permissions:
contents: read
packages: write
jobs:
build-and-push:
runs-on: ubuntu-latest
strategy:
fail-fast: false
max-parallel: 1
matrix:
include:
- component: backend
@@ -110,16 +115,82 @@ jobs:
REGISTRY_HOST: ${{ steps.meta.outputs.registry_host }}
REGISTRY_USER: ${{ secrets.REGISTRY_USERNAME }}
REGISTRY_TOKEN: ${{ secrets.REGISTRY_TOKEN }}
BUILTIN_GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
GITHUB_ACTOR_NAME: ${{ github.actor }}
GITHUB_REPOSITORY_OWNER_NAME: ${{ github.repository_owner }}
run: |
set -euo pipefail
if [ -z "${REGISTRY_USER}" ] || [ -z "${REGISTRY_TOKEN}" ]; then
echo "Missing secrets: REGISTRY_USERNAME / REGISTRY_TOKEN"
CUSTOM_REGISTRY_USER="${REGISTRY_USER:-}"
CUSTOM_REGISTRY_TOKEN="${REGISTRY_TOKEN:-}"
BUILTIN_REGISTRY_TOKEN="${BUILTIN_GITEA_TOKEN:-}"
ACTOR_USER="${GITHUB_ACTOR_NAME:-}"
OWNER_USER="${GITHUB_REPOSITORY_OWNER_NAME:-}"
if [ -n "${CUSTOM_REGISTRY_TOKEN}" ]; then
REGISTRY_TOKEN="${CUSTOM_REGISTRY_TOKEN}"
else
REGISTRY_TOKEN="${BUILTIN_REGISTRY_TOKEN}"
fi
if [ -z "${REGISTRY_TOKEN}" ]; then
echo "Missing registry credentials: set REGISTRY_USERNAME/REGISTRY_TOKEN, or rely on the built-in GITEA_TOKEN with packages:write permission."
exit 1
fi
echo "${REGISTRY_TOKEN}" | docker login "${REGISTRY_HOST}" --username "${REGISTRY_USER}" --password-stdin
CANDIDATE_USERS=()
for candidate in "${CUSTOM_REGISTRY_USER}" "${ACTOR_USER}" "${OWNER_USER}"; do
if [ -n "${candidate}" ] && [[ ! " ${CANDIDATE_USERS[*]} " =~ [[:space:]]${candidate}[[:space:]] ]]; then
CANDIDATE_USERS+=("${candidate}")
fi
done
- name: Build image
if [ ${#CANDIDATE_USERS[@]} -eq 0 ]; then
echo "Missing registry username: set REGISTRY_USERNAME or ensure github.actor/repository_owner are available."
exit 1
fi
LOGIN_OK=0
for candidate in "${CANDIDATE_USERS[@]}"; do
if echo "${REGISTRY_TOKEN}" | docker login "${REGISTRY_HOST}" --username "${candidate}" --password-stdin; then
LOGIN_OK=1
break
fi
done
if [ "${LOGIN_OK}" -ne 1 ]; then
echo "Registry login failed for all candidate usernames."
exit 1
fi
- name: Setup docker buildx
shell: bash
run: |
set -euo pipefail
if docker buildx inspect gitea-builder >/dev/null 2>&1; then
docker buildx use gitea-builder
else
docker buildx create --name gitea-builder --driver docker-container --use
fi
docker buildx inspect --bootstrap
- name: Login Docker Hub (optional)
shell: bash
env:
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
run: |
set -euo pipefail
if [ -n "${DOCKERHUB_USERNAME:-}" ] && [ -n "${DOCKERHUB_TOKEN:-}" ]; then
echo "${DOCKERHUB_TOKEN}" | docker login docker.io --username "${DOCKERHUB_USERNAME}" --password-stdin
else
echo "Docker Hub credentials not configured, continuing with anonymous pulls."
fi
- name: Build and push image
shell: bash
env:
COMPONENT: ${{ matrix.component }}
@@ -147,27 +218,19 @@ jobs:
BUILD_ARGS+=(--build-arg "VITE_ADMIN_BASENAME=${ADMIN_VITE_BASENAME}")
fi
docker build \
docker buildx build \
--file "${DOCKERFILE}" \
"${BUILD_ARGS[@]}" \
--build-arg BUILDKIT_INLINE_CACHE=1 \
--cache-from "type=registry,ref=${IMAGE_BASE}:${TAG_BRANCH}" \
--cache-from "type=registry,ref=${IMAGE_BASE}:${TAG_LATEST}" \
--cache-to "type=inline" \
--tag "${IMAGE_BASE}:${TAG_LATEST}" \
--tag "${IMAGE_BASE}:${TAG_BRANCH}" \
--tag "${IMAGE_BASE}:${TAG_SHA}" \
--push \
"${CONTEXT_DIR}"
- name: Push image
shell: bash
env:
IMAGE_BASE: ${{ steps.meta.outputs.image_base }}
TAG_LATEST: ${{ steps.meta.outputs.tag_latest }}
TAG_BRANCH: ${{ steps.meta.outputs.tag_branch }}
TAG_SHA: ${{ steps.meta.outputs.tag_sha }}
run: |
set -euo pipefail
docker push "${IMAGE_BASE}:${TAG_LATEST}"
docker push "${IMAGE_BASE}:${TAG_BRANCH}"
docker push "${IMAGE_BASE}:${TAG_SHA}"
- name: Output image tags
shell: bash
env:

View File

@@ -0,0 +1,167 @@
name: ui-regression
on:
push:
branches:
- main
- master
paths:
- admin/**
- frontend/**
- playwright-smoke/**
- .gitea/workflows/ui-regression.yml
pull_request:
paths:
- admin/**
- frontend/**
- playwright-smoke/**
- .gitea/workflows/ui-regression.yml
workflow_dispatch:
jobs:
playwright-regression:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 10
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 22
cache: pnpm
cache-dependency-path: |
frontend/pnpm-lock.yaml
admin/pnpm-lock.yaml
playwright-smoke/pnpm-lock.yaml
- name: Install frontend deps
working-directory: frontend
run: pnpm install --frozen-lockfile
- name: Install admin deps
working-directory: admin
run: pnpm install --frozen-lockfile
- name: Install Playwright deps
working-directory: playwright-smoke
run: pnpm install --frozen-lockfile
- name: Install Playwright browsers
working-directory: playwright-smoke
run: pnpm exec playwright install --with-deps chromium
- name: Typecheck Playwright suite
working-directory: playwright-smoke
run: pnpm exec tsc -p tsconfig.json --noEmit
- name: Prepare Playwright artifact folders
run: |
rm -rf playwright-smoke/.artifacts
mkdir -p playwright-smoke/.artifacts/frontend
mkdir -p playwright-smoke/.artifacts/admin
- name: Run frontend UI regression suite
id: ui_frontend
working-directory: playwright-smoke
continue-on-error: true
run: pnpm test:frontend
- name: Collect frontend Playwright artifacts
if: always()
run: |
if [ -d playwright-smoke/playwright-report ]; then
cp -R playwright-smoke/playwright-report playwright-smoke/.artifacts/frontend/playwright-report
fi
if [ -d playwright-smoke/test-results ]; then
cp -R playwright-smoke/test-results playwright-smoke/.artifacts/frontend/test-results
fi
rm -rf playwright-smoke/playwright-report playwright-smoke/test-results
- name: Run admin UI regression suite
id: ui_admin
working-directory: playwright-smoke
continue-on-error: true
run: pnpm test:admin
- name: Collect admin Playwright artifacts
if: always()
run: |
if [ -d playwright-smoke/playwright-report ]; then
cp -R playwright-smoke/playwright-report playwright-smoke/.artifacts/admin/playwright-report
fi
if [ -d playwright-smoke/test-results ]; then
cp -R playwright-smoke/test-results playwright-smoke/.artifacts/admin/test-results
fi
- name: Upload frontend HTML report
if: always()
uses: actions/upload-artifact@v4
with:
name: playwright-html-report-frontend
path: playwright-smoke/.artifacts/frontend/playwright-report
retention-days: 14
if-no-files-found: ignore
- name: Upload admin HTML report
if: always()
uses: actions/upload-artifact@v4
with:
name: playwright-html-report-admin
path: playwright-smoke/.artifacts/admin/playwright-report
retention-days: 14
if-no-files-found: ignore
- name: Upload frontend raw results
if: always()
uses: actions/upload-artifact@v4
with:
name: playwright-raw-results-frontend
path: playwright-smoke/.artifacts/frontend/test-results
retention-days: 14
if-no-files-found: ignore
- name: Upload admin raw results
if: always()
uses: actions/upload-artifact@v4
with:
name: playwright-raw-results-admin
path: playwright-smoke/.artifacts/admin/test-results
retention-days: 14
if-no-files-found: ignore
- name: Upload frontend failure screenshots / videos / traces
if: steps.ui_frontend.outcome != 'success'
uses: actions/upload-artifact@v4
with:
name: playwright-failure-artifacts-frontend
path: |
playwright-smoke/.artifacts/frontend/test-results/**/*.png
playwright-smoke/.artifacts/frontend/test-results/**/*.webm
playwright-smoke/.artifacts/frontend/test-results/**/*.zip
playwright-smoke/.artifacts/frontend/test-results/**/error-context.md
retention-days: 21
if-no-files-found: ignore
- name: Upload admin failure screenshots / videos / traces
if: steps.ui_admin.outcome != 'success'
uses: actions/upload-artifact@v4
with:
name: playwright-failure-artifacts-admin
path: |
playwright-smoke/.artifacts/admin/test-results/**/*.png
playwright-smoke/.artifacts/admin/test-results/**/*.webm
playwright-smoke/.artifacts/admin/test-results/**/*.zip
playwright-smoke/.artifacts/admin/test-results/**/error-context.md
retention-days: 21
if-no-files-found: ignore
- name: Mark workflow failed when any suite failed
if: steps.ui_frontend.outcome != 'success' || steps.ui_admin.outcome != 'success'
run: exit 1

14
.gitignore vendored
View File

@@ -24,5 +24,19 @@ backend-start.log
deploy/docker/.env
deploy/docker/config.yaml
admin/tmp-playwright.*
admin/.vite/
test-results/
playwright-report/
blob-report/
*-playwright.err.log
*-playwright.out.log
backend-restart.err.log
backend-restart.out.log
frontend.dev.err.log
frontend.dev.out.log
admin.dev.err.log
admin.dev.out.log
backend.dev.err.log
backend.dev.out.log
lighthouse-*/
lighthouse-*.json

View File

@@ -1,5 +1,3 @@
# syntax=docker/dockerfile:1.7
FROM node:22-alpine AS builder
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"

View File

@@ -38,6 +38,18 @@ const PostsPage = lazy(async () => {
const mod = await import('@/pages/posts-page')
return { default: mod.PostsPage }
})
const CategoriesPage = lazy(async () => {
const mod = await import('@/pages/categories-page')
return { default: mod.CategoriesPage }
})
const TagsPage = lazy(async () => {
const mod = await import('@/pages/tags-page')
return { default: mod.TagsPage }
})
const BackupsPage = lazy(async () => {
const mod = await import('@/pages/backups-page')
return { default: mod.BackupsPage }
})
const RevisionsPage = lazy(async () => {
const mod = await import('@/pages/revisions-page')
return { default: mod.RevisionsPage }
@@ -251,6 +263,30 @@ function AppRoutes() {
</LazyRoute>
}
/>
<Route
path="categories"
element={
<LazyRoute>
<CategoriesPage />
</LazyRoute>
}
/>
<Route
path="tags"
element={
<LazyRoute>
<TagsPage />
</LazyRoute>
}
/>
<Route
path="backups"
element={
<LazyRoute>
<BackupsPage />
</LazyRoute>
}
/>
<Route
path="revisions"
element={

View File

@@ -2,7 +2,9 @@ import {
BarChart3,
BellRing,
BookOpenText,
Download,
ExternalLink,
Folders,
History,
Image as ImageIcon,
LayoutDashboard,
@@ -13,6 +15,7 @@ import {
ScrollText,
Settings,
Sparkles,
Tags,
} from 'lucide-react'
import type { ReactNode } from 'react'
import { NavLink } from 'react-router-dom'
@@ -42,6 +45,24 @@ const primaryNav = [
description: 'Markdown 内容管理',
icon: ScrollText,
},
{
to: '/categories',
label: '分类',
description: '分类目录与聚合统计',
icon: Folders,
},
{
to: '/tags',
label: '标签',
description: '标签库与引用整理',
icon: Tags,
},
{
to: '/backups',
label: '备份',
description: '全站导出与恢复',
icon: Download,
},
{
to: '/revisions',
label: '版本',

View File

@@ -179,7 +179,7 @@ export function MarkdownWorkbench({
<span className="h-3 w-3 rounded-full bg-[#ffbd2e]" />
<span className="h-3 w-3 rounded-full bg-[#27c93f]" />
</div>
<p className="font-mono text-xs text-slate-400">{path}</p>
<p className="font-mono text-xs text-slate-400">Markdown </p>
</div>
<div className="flex flex-wrap items-center gap-2">
@@ -258,9 +258,7 @@ export function MarkdownWorkbench({
<span>
{originalLabel} / {modifiedLabel}
</span>
) : (
<span>{path}</span>
)}
) : null}
</div>
{panel === 'edit' ? (

View File

@@ -7,6 +7,7 @@ import type {
AdminMediaBatchDeleteResponse,
AdminMediaDeleteResponse,
AdminMediaListResponse,
AdminMediaMetadataResponse,
AdminMediaReplaceResponse,
AdminMediaUploadResponse,
AdminPostCoverImageRequest,
@@ -20,6 +21,7 @@ import type {
AdminSessionResponse,
AdminSiteSettingsResponse,
AuditLogRecord,
CategoryRecord,
CommentListQuery,
CommentBlacklistRecord,
CommentPersonaAnalysisLogRecord,
@@ -33,7 +35,9 @@ import type {
MarkdownDeleteResponse,
MarkdownDocumentResponse,
MarkdownImportResponse,
MediaAssetMetadataPayload,
NotificationDeliveryRecord,
PostPageResponse,
PostListQuery,
PostRevisionDetail,
PostRevisionRecord,
@@ -41,11 +45,16 @@ import type {
ReviewRecord,
RestoreRevisionResponse,
SiteSettingsPayload,
SiteBackupDocument,
SiteBackupImportPayload,
SiteBackupImportResponse,
SubscriptionDigestResponse,
SubscriptionListResponse,
SubscriptionPayload,
SubscriptionRecord,
SubscriptionUpdatePayload,
TagRecord,
TaxonomyPayload,
UpdateCommentPayload,
UpdatePostPayload,
UpdateReviewPayload,
@@ -53,8 +62,8 @@ import type {
import { getRuntimeAdminBaseUrl, normalizeAdminBaseUrl } from '@/lib/runtime-config'
const envApiBase = normalizeAdminBaseUrl(import.meta.env.VITE_API_BASE)
const DEV_API_BASE = 'http://localhost:5150'
const PROD_DEFAULT_API_PORT = '5150'
const DEV_DEFAULT_API_HOST = '127.0.0.1'
function getApiBase() {
const runtimeApiBase = getRuntimeAdminBaseUrl('apiBaseUrl')
@@ -67,11 +76,12 @@ function getApiBase() {
}
if (import.meta.env.DEV) {
return DEV_API_BASE
}
if (typeof window !== 'undefined') {
const { protocol, hostname } = window.location
return `${protocol}//${hostname}:${PROD_DEFAULT_API_PORT}`
}
if (typeof window === 'undefined') {
return DEV_API_BASE
return `http://${DEV_DEFAULT_API_HOST}:${PROD_DEFAULT_API_PORT}`
}
const { protocol, hostname } = window.location
@@ -240,6 +250,68 @@ export const adminApi = {
}),
dashboard: () => request<AdminDashboardResponse>('/api/admin/dashboard'),
analytics: () => request<AdminAnalyticsResponse>('/api/admin/analytics'),
listCategories: () => request<CategoryRecord[]>('/api/admin/categories'),
createCategory: (payload: TaxonomyPayload) =>
request<CategoryRecord>('/api/admin/categories', {
method: 'POST',
body: JSON.stringify({
name: payload.name,
slug: payload.slug,
description: payload.description,
cover_image: payload.coverImage,
accent_color: payload.accentColor,
seo_title: payload.seoTitle,
seo_description: payload.seoDescription,
}),
}),
updateCategory: (id: number, payload: TaxonomyPayload) =>
request<CategoryRecord>(`/api/admin/categories/${id}`, {
method: 'PATCH',
body: JSON.stringify({
name: payload.name,
slug: payload.slug,
description: payload.description,
cover_image: payload.coverImage,
accent_color: payload.accentColor,
seo_title: payload.seoTitle,
seo_description: payload.seoDescription,
}),
}),
deleteCategory: (id: number) =>
request<void>(`/api/admin/categories/${id}`, {
method: 'DELETE',
}),
listTags: () => request<TagRecord[]>('/api/admin/tags'),
createTag: (payload: TaxonomyPayload) =>
request<TagRecord>('/api/admin/tags', {
method: 'POST',
body: JSON.stringify({
name: payload.name,
slug: payload.slug,
description: payload.description,
cover_image: payload.coverImage,
accent_color: payload.accentColor,
seo_title: payload.seoTitle,
seo_description: payload.seoDescription,
}),
}),
updateTag: (id: number, payload: TaxonomyPayload) =>
request<TagRecord>(`/api/admin/tags/${id}`, {
method: 'PATCH',
body: JSON.stringify({
name: payload.name,
slug: payload.slug,
description: payload.description,
cover_image: payload.coverImage,
accent_color: payload.accentColor,
seo_title: payload.seoTitle,
seo_description: payload.seoDescription,
}),
}),
deleteTag: (id: number) =>
request<void>(`/api/admin/tags/${id}`, {
method: 'DELETE',
}),
getSiteSettings: () => request<AdminSiteSettingsResponse>('/api/admin/site-settings'),
updateSiteSettings: (payload: SiteSettingsPayload) =>
request<AdminSiteSettingsResponse>('/api/admin/site-settings', {
@@ -333,6 +405,24 @@ export const adminApi = {
body: formData,
})
},
updateMediaObjectMetadata: (payload: MediaAssetMetadataPayload) =>
request<AdminMediaMetadataResponse>('/api/admin/storage/media/metadata', {
method: 'PATCH',
body: JSON.stringify({
key: payload.key,
title: payload.title,
alt_text: payload.altText,
caption: payload.caption,
tags: payload.tags,
notes: payload.notes,
}),
}),
exportSiteBackup: () => request<SiteBackupDocument>('/api/admin/site-backup/export'),
importSiteBackup: (payload: SiteBackupImportPayload) =>
request<SiteBackupImportResponse>('/api/admin/site-backup/import', {
method: 'POST',
body: JSON.stringify(payload),
}),
generatePostMetadata: (markdown: string) =>
request<AdminPostMetadataResponse>('/api/admin/ai/post-metadata', {
method: 'POST',
@@ -386,6 +476,27 @@ export const adminApi = {
preview: query?.preview ?? true,
}),
),
listPostsPage: (query?: PostListQuery) =>
request<PostPageResponse>(
appendQueryParams('/api/posts/page', {
slug: query?.slug,
category: query?.category,
tag: query?.tag,
search: query?.search,
type: query?.postType,
pinned: query?.pinned,
status: query?.status,
visibility: query?.visibility,
listed_only: query?.listedOnly,
include_private: query?.includePrivate ?? true,
include_redirects: query?.includeRedirects ?? true,
preview: query?.preview ?? true,
page: query?.page,
page_size: query?.pageSize,
sort_by: query?.sortBy,
sort_order: query?.sortOrder,
}),
),
getPostBySlug: (slug: string) =>
request<PostRecord>(`/api/posts/slug/${encodeURIComponent(slug)}?preview=true&include_private=true`),
createPost: (payload: CreatePostPayload) =>

View File

@@ -301,6 +301,16 @@ export interface AdminSiteSettingsResponse {
music_playlist: MusicTrack[]
ai_enabled: boolean
paragraph_comments_enabled: boolean
comment_verification_mode: HumanVerificationMode
comment_turnstile_enabled: boolean
subscription_verification_mode: HumanVerificationMode
subscription_turnstile_enabled: boolean
web_push_enabled: boolean
turnstile_site_key: string | null
turnstile_secret_key: string | null
web_push_vapid_public_key: string | null
web_push_vapid_private_key: string | null
web_push_vapid_subject: string | null
ai_provider: string | null
ai_api_base: string | null
ai_api_key: string | null
@@ -327,8 +337,13 @@ export interface AdminSiteSettingsResponse {
seo_default_og_image: string | null
seo_default_twitter_handle: string | null
notification_webhook_url: string | null
notification_channel_type: 'webhook' | 'ntfy' | string
notification_comment_enabled: boolean
notification_friend_link_enabled: boolean
subscription_popup_enabled: boolean
subscription_popup_title: string
subscription_popup_description: string
subscription_popup_delay_seconds: number
search_synonyms: string[]
}
@@ -362,6 +377,16 @@ export interface SiteSettingsPayload {
musicPlaylist?: MusicTrack[]
aiEnabled?: boolean
paragraphCommentsEnabled?: boolean
commentVerificationMode?: HumanVerificationMode | null
commentTurnstileEnabled?: boolean
subscriptionVerificationMode?: HumanVerificationMode | null
subscriptionTurnstileEnabled?: boolean
webPushEnabled?: boolean
turnstileSiteKey?: string | null
turnstileSecretKey?: string | null
webPushVapidPublicKey?: string | null
webPushVapidPrivateKey?: string | null
webPushVapidSubject?: string | null
aiProvider?: string | null
aiApiBase?: string | null
aiApiKey?: string | null
@@ -385,11 +410,56 @@ export interface SiteSettingsPayload {
seoDefaultOgImage?: string | null
seoDefaultTwitterHandle?: string | null
notificationWebhookUrl?: string | null
notificationChannelType?: 'webhook' | 'ntfy' | string | null
notificationCommentEnabled?: boolean
notificationFriendLinkEnabled?: boolean
subscriptionPopupEnabled?: boolean
subscriptionPopupTitle?: string | null
subscriptionPopupDescription?: string | null
subscriptionPopupDelaySeconds?: number | null
searchSynonyms?: string[]
}
export type HumanVerificationMode = 'off' | 'captcha' | 'turnstile' | string
export interface CategoryRecord {
id: number
name: string
slug: string
count: number
description: string | null
cover_image: string | null
accent_color: string | null
seo_title: string | null
seo_description: string | null
created_at: string
updated_at: string
}
export interface TagRecord {
id: number
name: string
slug: string
count: number
description: string | null
cover_image: string | null
accent_color: string | null
seo_title: string | null
seo_description: string | null
created_at: string
updated_at: string
}
export interface TaxonomyPayload {
name: string
slug?: string | null
description?: string | null
coverImage?: string | null
accentColor?: string | null
seoTitle?: string | null
seoDescription?: string | null
}
export interface AdminAiReindexResponse {
indexed_chunks: number
last_indexed_at: string | null
@@ -424,6 +494,11 @@ export interface AdminMediaObjectResponse {
url: string
size_bytes: number
last_modified: string | null
title: string | null
alt_text: string | null
caption: string | null
tags: string[]
notes: string | null
}
export interface AdminMediaListResponse {
@@ -458,6 +533,64 @@ export interface AdminMediaReplaceResponse {
url: string
}
export interface MediaAssetMetadataPayload {
key: string
title?: string | null
altText?: string | null
caption?: string | null
tags?: string[]
notes?: string | null
}
export interface AdminMediaMetadataResponse {
saved: boolean
key: string
title: string | null
alt_text: string | null
caption: string | null
tags: string[]
notes: string | null
}
export interface SiteBackupDocument {
version: string
exported_at: string
includes_storage_binaries: boolean
warning: string
site_settings: Record<string, unknown>
categories: Record<string, unknown>[]
tags: Record<string, unknown>[]
reviews: Record<string, unknown>[]
friend_links: Record<string, unknown>[]
media_assets: Record<string, unknown>[]
storage_manifest?: Record<string, unknown>[] | null
posts: Array<{
slug: string
file_name: string
markdown: string
}>
}
export interface SiteBackupImportPayload {
backup: SiteBackupDocument
mode?: 'merge' | 'replace' | string
}
export interface SiteBackupImportResponse {
imported: boolean
mode: string
site_settings_restored: boolean
posts_written: number
categories_upserted: number
tags_upserted: number
reviews_upserted: number
friend_links_upserted: number
media_assets_upserted: number
storage_manifest_items: number
includes_storage_binaries: boolean
warning: string
}
export interface CommentBlacklistRecord {
id: number
matcher_type: 'ip' | 'email' | 'user_agent' | string
@@ -595,6 +728,20 @@ export interface PostListQuery {
includePrivate?: boolean
includeRedirects?: boolean
preview?: boolean
page?: number
pageSize?: number
sortBy?: 'created_at' | 'updated_at' | 'title' | string
sortOrder?: 'asc' | 'desc' | string
}
export interface PostPageResponse {
items: PostRecord[]
page: number
page_size: number
total: number
total_pages: number
sort_by: string
sort_order: string
}
export interface CreatePostPayload {

View File

@@ -0,0 +1,248 @@
import { Download, RefreshCcw, Upload } from 'lucide-react'
import { useMemo, useState } from 'react'
import { toast } from 'sonner'
import { Badge } from '@/components/ui/badge'
import { Button } from '@/components/ui/button'
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'
import { Input } from '@/components/ui/input'
import { Select } from '@/components/ui/select'
import { adminApi, ApiError } from '@/lib/api'
import type { SiteBackupDocument, SiteBackupImportResponse } from '@/lib/types'
function downloadJson(filename: string, payload: unknown) {
const blob = new Blob([JSON.stringify(payload, null, 2)], { type: 'application/json' })
const url = URL.createObjectURL(blob)
const link = document.createElement('a')
link.href = url
link.download = filename
document.body.appendChild(link)
link.click()
link.remove()
URL.revokeObjectURL(url)
}
export function BackupsPage() {
const [exporting, setExporting] = useState(false)
const [importing, setImporting] = useState(false)
const [importMode, setImportMode] = useState<'merge' | 'replace'>('merge')
const [selectedFile, setSelectedFile] = useState<File | null>(null)
const [selectedBackup, setSelectedBackup] = useState<SiteBackupDocument | null>(null)
const [lastImportResult, setLastImportResult] = useState<SiteBackupImportResponse | null>(null)
const backupStats = useMemo(() => {
if (!selectedBackup) {
return null
}
return {
posts: selectedBackup.posts.length,
categories: selectedBackup.categories.length,
tags: selectedBackup.tags.length,
reviews: selectedBackup.reviews.length,
friendLinks: selectedBackup.friend_links.length,
mediaAssets: selectedBackup.media_assets.length,
storageManifest: selectedBackup.storage_manifest?.length ?? 0,
}
}, [selectedBackup])
return (
<div className="space-y-6">
<div className="flex flex-col gap-4 xl:flex-row xl:items-end xl:justify-between">
<div className="space-y-3">
<Badge variant="secondary"> / </Badge>
<div>
<h2 className="text-3xl font-semibold tracking-tight"></h2>
<p className="mt-2 max-w-3xl text-sm leading-7 text-muted-foreground">
</p>
</div>
</div>
</div>
<div className="grid gap-6 xl:grid-cols-[minmax(0,0.9fr)_minmax(0,1.1fr)]">
<Card>
<CardHeader>
<CardTitle></CardTitle>
<CardDescription>
/
</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="rounded-3xl border border-border/70 bg-background/50 p-4 text-sm leading-7 text-muted-foreground">
<p></p>
<ul className="mt-2 list-disc space-y-1 pl-5">
<li></li>
<li>Markdown </li>
<li> / </li>
<li></li>
<li></li>
</ul>
</div>
<Button
disabled={exporting}
onClick={async () => {
try {
setExporting(true)
const backup = await adminApi.exportSiteBackup()
const exportedAt = backup.exported_at.replaceAll(':', '-').replaceAll('.', '-')
downloadJson(`termi-backup-${exportedAt}.json`, backup)
toast.success('备份已导出到本地。')
} catch (error) {
toast.error(error instanceof ApiError ? error.message : '导出备份失败。')
} finally {
setExporting(false)
}
}}
>
<Download className="h-4 w-4" />
{exporting ? '导出中...' : '下载备份 JSON'}
</Button>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle></CardTitle>
<CardDescription>
merge / replace replace markdown
</CardDescription>
</CardHeader>
<CardContent className="space-y-5">
<div className="grid gap-4 md:grid-cols-[220px_minmax(0,1fr)]">
<Select
value={importMode}
onChange={(event) => setImportMode(event.target.value as 'merge' | 'replace')}
>
<option value="merge">merge</option>
<option value="replace">replace</option>
</Select>
<Input
type="file"
accept="application/json"
onChange={async (event) => {
const file = event.target.files?.item(0) ?? null
setSelectedFile(file)
setLastImportResult(null)
if (!file) {
setSelectedBackup(null)
return
}
try {
const parsed = JSON.parse(await file.text()) as SiteBackupDocument
setSelectedBackup(parsed)
} catch {
setSelectedBackup(null)
toast.error('备份文件不是合法的 JSON。')
}
}}
/>
</div>
<div className="rounded-3xl border border-dashed border-border/70 bg-background/40 p-4 text-sm text-muted-foreground">
<p className="font-medium text-foreground"></p>
<ul className="mt-2 list-disc space-y-1 pl-5 leading-6">
<li>replace markdown / / </li>
<li>访</li>
<li></li>
</ul>
</div>
{selectedBackup ? (
<div className="rounded-3xl border border-border/70 bg-background/50 p-4">
<div className="flex flex-wrap items-center gap-2">
<Badge variant="outline"> {selectedBackup.version}</Badge>
<Badge variant="outline"> {selectedBackup.exported_at}</Badge>
<Badge variant="secondary">{selectedBackup.includes_storage_binaries ? '包含二进制' : '仅对象清单'}</Badge>
</div>
<div className="mt-4 grid gap-3 sm:grid-cols-2 xl:grid-cols-3 text-sm text-muted-foreground">
<div>{backupStats?.posts ?? 0}</div>
<div>{backupStats?.categories ?? 0}</div>
<div>{backupStats?.tags ?? 0}</div>
<div>{backupStats?.reviews ?? 0}</div>
<div>{backupStats?.friendLinks ?? 0}</div>
<div>{backupStats?.mediaAssets ?? 0}</div>
<div>{backupStats?.storageManifest ?? 0}</div>
</div>
<p className="mt-4 text-sm leading-6 text-muted-foreground">{selectedBackup.warning}</p>
</div>
) : (
<div className="rounded-3xl border border-dashed border-border/70 bg-background/40 px-5 py-8 text-center text-sm text-muted-foreground">
{selectedFile ? '当前文件未通过 JSON 校验。' : '选择一个备份 JSON 后,这里会显示导入概览。'}
</div>
)}
<div className="flex flex-wrap items-center gap-3">
<Button
disabled={!selectedBackup || importing}
variant={importMode === 'replace' ? 'danger' : 'default'}
onClick={async () => {
if (!selectedBackup) {
return
}
if (
importMode === 'replace' &&
!window.confirm('replace 会覆盖当前内容,确认继续吗?')
) {
return
}
try {
setImporting(true)
const result = await adminApi.importSiteBackup({
backup: selectedBackup,
mode: importMode,
})
setLastImportResult(result)
toast.success('备份已导入。')
} catch (error) {
toast.error(error instanceof ApiError ? error.message : '导入备份失败。')
} finally {
setImporting(false)
}
}}
>
<Upload className="h-4 w-4" />
{importing ? '导入中...' : importMode === 'replace' ? '执行覆盖恢复' : '执行合并恢复'}
</Button>
<Button
variant="outline"
onClick={() => {
setSelectedFile(null)
setSelectedBackup(null)
setLastImportResult(null)
}}
>
<RefreshCcw className="h-4 w-4" />
</Button>
</div>
</CardContent>
</Card>
</div>
{lastImportResult ? (
<Card>
<CardHeader>
<CardTitle></CardTitle>
<CardDescription>{lastImportResult.mode}</CardDescription>
</CardHeader>
<CardContent className="grid gap-3 sm:grid-cols-2 xl:grid-cols-4 text-sm text-muted-foreground">
<div>{lastImportResult.site_settings_restored ? '已恢复' : '未恢复'}</div>
<div>{lastImportResult.posts_written}</div>
<div>{lastImportResult.categories_upserted}</div>
<div>{lastImportResult.tags_upserted}</div>
<div>{lastImportResult.reviews_upserted}</div>
<div>{lastImportResult.friend_links_upserted}</div>
<div>{lastImportResult.media_assets_upserted}</div>
<div>{lastImportResult.storage_manifest_items}</div>
<div className="sm:col-span-2 xl:col-span-4">{lastImportResult.warning}</div>
</CardContent>
</Card>
) : null}
</div>
)
}

View File

@@ -0,0 +1,402 @@
import { Folders, Plus, RefreshCcw, Save, Trash2 } from 'lucide-react'
import { startTransition, useCallback, useEffect, useMemo, useState } from 'react'
import { toast } from 'sonner'
import { FormField } from '@/components/form-field'
import { Badge } from '@/components/ui/badge'
import { Button } from '@/components/ui/button'
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'
import { Input } from '@/components/ui/input'
import { Skeleton } from '@/components/ui/skeleton'
import { Textarea } from '@/components/ui/textarea'
import { adminApi, ApiError } from '@/lib/api'
import { emptyToNull, formatDateTime } from '@/lib/admin-format'
import type { CategoryRecord, TaxonomyPayload } from '@/lib/types'
type CategoryFormState = {
name: string
slug: string
description: string
coverImage: string
accentColor: string
seoTitle: string
seoDescription: string
}
const defaultCategoryForm: CategoryFormState = {
name: '',
slug: '',
description: '',
coverImage: '',
accentColor: '',
seoTitle: '',
seoDescription: '',
}
function toFormState(item: CategoryRecord): CategoryFormState {
return {
name: item.name,
slug: item.slug,
description: item.description ?? '',
coverImage: item.cover_image ?? '',
accentColor: item.accent_color ?? '',
seoTitle: item.seo_title ?? '',
seoDescription: item.seo_description ?? '',
}
}
function toPayload(form: CategoryFormState): TaxonomyPayload {
return {
name: form.name.trim(),
slug: emptyToNull(form.slug),
description: emptyToNull(form.description),
coverImage: emptyToNull(form.coverImage),
accentColor: emptyToNull(form.accentColor),
seoTitle: emptyToNull(form.seoTitle),
seoDescription: emptyToNull(form.seoDescription),
}
}
export function CategoriesPage() {
const [items, setItems] = useState<CategoryRecord[]>([])
const [selectedId, setSelectedId] = useState<number | null>(null)
const [form, setForm] = useState<CategoryFormState>(defaultCategoryForm)
const [loading, setLoading] = useState(true)
const [refreshing, setRefreshing] = useState(false)
const [saving, setSaving] = useState(false)
const [deleting, setDeleting] = useState(false)
const [searchTerm, setSearchTerm] = useState('')
const loadCategories = useCallback(async (showToast = false) => {
try {
if (showToast) {
setRefreshing(true)
}
const next = await adminApi.listCategories()
startTransition(() => {
setItems(next)
})
if (showToast) {
toast.success('分类列表已刷新。')
}
} catch (error) {
if (error instanceof ApiError && error.status === 401) {
return
}
toast.error(error instanceof ApiError ? error.message : '无法加载分类列表。')
} finally {
setLoading(false)
setRefreshing(false)
}
}, [])
useEffect(() => {
void loadCategories(false)
}, [loadCategories])
const filteredItems = useMemo(() => {
const keyword = searchTerm.trim().toLowerCase()
if (!keyword) {
return items
}
return items.filter((item) =>
[item.name, item.slug, item.description ?? '', item.seo_title ?? '']
.join('\n')
.toLowerCase()
.includes(keyword),
)
}, [items, searchTerm])
const selectedItem = useMemo(
() => items.find((item) => item.id === selectedId) ?? null,
[items, selectedId],
)
const resetForm = useCallback(() => {
setSelectedId(null)
setForm(defaultCategoryForm)
}, [])
const handleSave = useCallback(async () => {
if (!form.name.trim()) {
toast.error('请先填写分类名称。')
return
}
try {
setSaving(true)
if (selectedId) {
const updated = await adminApi.updateCategory(selectedId, toPayload(form))
startTransition(() => {
setItems((current) => current.map((item) => (item.id === updated.id ? updated : item)))
setSelectedId(updated.id)
setForm(toFormState(updated))
})
toast.success('分类已更新。')
} else {
const created = await adminApi.createCategory(toPayload(form))
startTransition(() => {
setItems((current) => [created, ...current])
setSelectedId(created.id)
setForm(toFormState(created))
})
toast.success('分类已创建。')
}
} catch (error) {
toast.error(error instanceof ApiError ? error.message : '保存分类失败。')
} finally {
setSaving(false)
}
}, [form, selectedId])
const handleDelete = useCallback(async () => {
if (!selectedItem) {
return
}
if (!window.confirm(`确认删除分类「${selectedItem.name}」吗?相关文章会同步移除该分类引用。`)) {
return
}
try {
setDeleting(true)
await adminApi.deleteCategory(selectedItem.id)
startTransition(() => {
setItems((current) => current.filter((item) => item.id !== selectedItem.id))
})
toast.success('分类已删除。')
resetForm()
} catch (error) {
toast.error(error instanceof ApiError ? error.message : '删除分类失败。')
} finally {
setDeleting(false)
}
}, [resetForm, selectedItem])
if (loading) {
return (
<div className="space-y-6">
<Skeleton className="h-40 rounded-3xl" />
<Skeleton className="h-[720px] rounded-3xl" />
</div>
)
}
return (
<div className="space-y-6">
<div className="flex flex-col gap-4 xl:flex-row xl:items-end xl:justify-between">
<div className="space-y-3">
<Badge variant="secondary"></Badge>
<div>
<h2 className="text-3xl font-semibold tracking-tight"></h2>
<p className="mt-2 max-w-3xl text-sm leading-7 text-muted-foreground">
SEO
</p>
</div>
</div>
<div className="flex flex-wrap items-center gap-3">
<Button variant="outline" onClick={resetForm}>
<Plus className="h-4 w-4" />
</Button>
<Button variant="secondary" onClick={() => void loadCategories(true)} disabled={refreshing}>
<RefreshCcw className="h-4 w-4" />
{refreshing ? '刷新中...' : '刷新'}
</Button>
</div>
</div>
<div className="grid gap-6 xl:grid-cols-[0.92fr_1.08fr]">
<Card>
<CardHeader>
<CardTitle></CardTitle>
<CardDescription>slug SEO </CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<Input
placeholder="按分类名 / slug / 描述搜索"
value={searchTerm}
onChange={(event) => setSearchTerm(event.target.value)}
/>
{filteredItems.length ? (
<div className="space-y-3">
{filteredItems.map((item) => (
<button
key={item.id}
type="button"
onClick={() => {
setSelectedId(item.id)
setForm(toFormState(item))
}}
className={`w-full rounded-3xl border px-4 py-4 text-left transition ${
selectedId === item.id
? 'border-primary/30 bg-primary/10 shadow-[0_12px_30px_rgba(37,99,235,0.12)]'
: 'border-border/70 bg-background/60 hover:border-border'
}`}
>
<div className="flex items-start justify-between gap-3">
<div className="min-w-0 space-y-2">
<div className="flex flex-wrap items-center gap-2">
<span className="font-medium">{item.name}</span>
<Badge variant="outline">{item.slug}</Badge>
{item.accent_color ? (
<span
className="inline-flex h-5 w-5 rounded-full border border-border/80"
style={{ backgroundColor: item.accent_color }}
/>
) : null}
</div>
<p className="text-sm text-muted-foreground">
{item.description || `${item.count} 篇文章正在使用这个分类`}
</p>
</div>
<Badge variant={item.count > 0 ? 'success' : 'secondary'}>{item.count}</Badge>
</div>
</button>
))}
</div>
) : (
<div className="rounded-3xl border border-dashed border-border/70 bg-background/40 px-5 py-10 text-center text-sm text-muted-foreground">
</div>
)}
</CardContent>
</Card>
<Card>
<CardHeader>
<div className="flex items-center gap-3">
<div className="flex h-11 w-11 items-center justify-center rounded-2xl border border-primary/20 bg-primary/10 text-primary">
<Folders className="h-5 w-5" />
</div>
<div>
<CardTitle>{selectedItem ? '编辑分类' : '新建分类'}</CardTitle>
<CardDescription>
/ slug SEO
</CardDescription>
</div>
</div>
</CardHeader>
<CardContent className="space-y-6">
<div className="grid gap-4 lg:grid-cols-2">
<FormField label="分类名称" hint="例如:前端工程、随笔、工具链。">
<Input
value={form.name}
onChange={(event) => setForm((current) => ({ ...current, name: event.target.value }))}
placeholder="输入分类名称"
/>
</FormField>
<FormField label="分类 slug" hint="留空时自动从英文名称生成;中文建议手填。">
<Input
value={form.slug}
onChange={(event) => setForm((current) => ({ ...current, slug: event.target.value }))}
placeholder="frontend-engineering"
/>
</FormField>
<FormField label="封面图 URL" hint="可选,用于前台分类头图。">
<Input
value={form.coverImage}
onChange={(event) =>
setForm((current) => ({ ...current, coverImage: event.target.value }))
}
placeholder="https://cdn.example.com/covers/frontend.jpg"
/>
</FormField>
<FormField label="强调色" hint="可选,用于前台分类详情强调色。">
<div className="flex items-center gap-3">
<Input
value={form.accentColor}
onChange={(event) =>
setForm((current) => ({ ...current, accentColor: event.target.value }))
}
placeholder="#3b82f6"
/>
<input
type="color"
value={form.accentColor || '#2563eb'}
onChange={(event) =>
setForm((current) => ({ ...current, accentColor: event.target.value }))
}
className="h-10 w-14 rounded-xl border border-input bg-background px-1"
/>
</div>
</FormField>
</div>
<FormField label="分类描述" hint="会展示在前台分类卡片和分类详情区域。">
<Textarea
value={form.description}
onChange={(event) =>
setForm((current) => ({ ...current, description: event.target.value }))
}
rows={4}
placeholder="介绍这个分类主要收录哪些内容。"
/>
</FormField>
<div className="grid gap-4 lg:grid-cols-2">
<FormField label="SEO 标题" hint="留空时前台继续回退到常规标题。">
<Input
value={form.seoTitle}
onChange={(event) =>
setForm((current) => ({ ...current, seoTitle: event.target.value }))
}
placeholder="前端工程专题 - Termi"
/>
</FormField>
<FormField label="SEO 描述" hint="搜索引擎摘要或社交分享描述。">
<Textarea
value={form.seoDescription}
onChange={(event) =>
setForm((current) => ({ ...current, seoDescription: event.target.value }))
}
rows={4}
placeholder="这个分类汇总了工程化、构建链路与调优经验。"
/>
</FormField>
</div>
<div className="grid gap-4 rounded-3xl border border-border/70 bg-background/50 p-4 md:grid-cols-3">
<div>
<p className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></p>
<p className="mt-2 text-2xl font-semibold text-foreground">{selectedItem?.count ?? 0}</p>
</div>
<div>
<p className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></p>
<p className="mt-2 text-sm text-muted-foreground">{formatDateTime(selectedItem?.created_at)}</p>
</div>
<div>
<p className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></p>
<p className="mt-2 text-sm text-muted-foreground">{formatDateTime(selectedItem?.updated_at)}</p>
</div>
</div>
<div className="flex flex-wrap items-center gap-3">
<Button onClick={() => void handleSave()} disabled={saving}>
<Save className="h-4 w-4" />
{saving ? '保存中...' : selectedItem ? '保存分类' : '创建分类'}
</Button>
<Button variant="outline" onClick={resetForm}>
</Button>
<Button
variant="ghost"
onClick={() => void handleDelete()}
disabled={!selectedItem || deleting}
className="text-rose-600 hover:text-rose-600"
>
<Trash2 className="h-4 w-4" />
{deleting ? '删除中...' : '删除分类'}
</Button>
</div>
</CardContent>
</Card>
</div>
</div>
)
}

View File

@@ -4,6 +4,7 @@ import {
Image as ImageIcon,
RefreshCcw,
Replace,
Save,
Square,
Trash2,
Upload,
@@ -24,6 +25,8 @@ import {
normalizeCoverImageWithPrompt,
} from '@/lib/image-compress'
import type { AdminMediaObjectResponse } from '@/lib/types'
import { FormField } from '@/components/form-field'
import { Textarea } from '@/components/ui/textarea'
function formatBytes(value: number) {
if (!Number.isFinite(value) || value <= 0) {
@@ -39,6 +42,47 @@ function formatBytes(value: number) {
return `${size >= 10 || unitIndex === 0 ? size.toFixed(0) : size.toFixed(1)} ${units[unitIndex]}`
}
type MediaMetadataFormState = {
title: string
altText: string
caption: string
tags: string
notes: string
}
const defaultMetadataForm: MediaMetadataFormState = {
title: '',
altText: '',
caption: '',
tags: '',
notes: '',
}
function toMetadataForm(item: AdminMediaObjectResponse | null): MediaMetadataFormState {
if (!item) {
return defaultMetadataForm
}
return {
title: item.title ?? '',
altText: item.alt_text ?? '',
caption: item.caption ?? '',
tags: item.tags.join(', '),
notes: item.notes ?? '',
}
}
function parseTagList(value: string) {
return Array.from(
new Set(
value
.split(',')
.map((item) => item.trim())
.filter(Boolean),
),
)
}
export function MediaPage() {
const [items, setItems] = useState<AdminMediaObjectResponse[]>([])
const [loading, setLoading] = useState(true)
@@ -54,6 +98,9 @@ export function MediaPage() {
const [bucket, setBucket] = useState<string | null>(null)
const [uploadFiles, setUploadFiles] = useState<File[]>([])
const [selectedKeys, setSelectedKeys] = useState<string[]>([])
const [activeKey, setActiveKey] = useState<string | null>(null)
const [metadataForm, setMetadataForm] = useState<MediaMetadataFormState>(defaultMetadataForm)
const [metadataSaving, setMetadataSaving] = useState(false)
const [compressBeforeUpload, setCompressBeforeUpload] = useState(true)
const [compressQuality, setCompressQuality] = useState('0.82')
@@ -90,6 +137,25 @@ export function MediaPage() {
)
}, [items])
useEffect(() => {
if (!items.length) {
setActiveKey(null)
setMetadataForm(defaultMetadataForm)
return
}
setActiveKey((current) => (current && items.some((item) => item.key === current) ? current : items[0].key))
}, [items])
const activeItem = useMemo(
() => items.find((item) => item.key === activeKey) ?? null,
[activeKey, items],
)
useEffect(() => {
setMetadataForm(toMetadataForm(activeItem))
}, [activeItem])
const filteredItems = useMemo(() => {
const keyword = searchTerm.trim().toLowerCase()
if (!keyword) {
@@ -266,6 +332,140 @@ export function MediaPage() {
</CardContent>
</Card>
{activeItem ? (
<Card>
<CardHeader>
<CardTitle></CardTitle>
<CardDescription>
{activeItem.key}alt /
</CardDescription>
</CardHeader>
<CardContent className="space-y-6">
<div className="grid gap-4 lg:grid-cols-[minmax(0,1.2fr)_minmax(320px,0.8fr)]">
<div className="space-y-4">
<div className="grid gap-4 lg:grid-cols-2">
<FormField label="标题" hint="媒体资源的人类可读名称。">
<Input
value={metadataForm.title}
onChange={(event) =>
setMetadataForm((current) => ({ ...current, title: event.target.value }))
}
placeholder="文章封面 / 站点横幅"
/>
</FormField>
<FormField label="Alt 文本" hint="用于 img alt 和无障碍描述。">
<Input
value={metadataForm.altText}
onChange={(event) =>
setMetadataForm((current) => ({ ...current, altText: event.target.value }))
}
placeholder="夜色下的终端风格博客封面"
/>
</FormField>
</div>
<FormField label="标签" hint="多个标签用英文逗号分隔。">
<Input
value={metadataForm.tags}
onChange={(event) =>
setMetadataForm((current) => ({ ...current, tags: event.target.value }))
}
placeholder="cover, astro, terminal"
/>
</FormField>
<FormField label="Caption" hint="适合前台图注、图片说明。">
<Textarea
value={metadataForm.caption}
onChange={(event) =>
setMetadataForm((current) => ({ ...current, caption: event.target.value }))
}
rows={4}
placeholder="这张图通常用于文章列表和详情页头图。"
/>
</FormField>
<FormField label="内部备注" hint="仅后台使用,例如素材来源、版权或推荐用途。">
<Textarea
value={metadataForm.notes}
onChange={(event) =>
setMetadataForm((current) => ({ ...current, notes: event.target.value }))
}
rows={4}
placeholder="来源Unsplash / 站点截图 / AI 生成"
/>
</FormField>
<div className="flex flex-wrap items-center gap-3">
<Button
disabled={metadataSaving}
onClick={async () => {
if (!activeItem) {
return
}
try {
setMetadataSaving(true)
const result = await adminApi.updateMediaObjectMetadata({
key: activeItem.key,
title: metadataForm.title || null,
altText: metadataForm.altText || null,
caption: metadataForm.caption || null,
tags: parseTagList(metadataForm.tags),
notes: metadataForm.notes || null,
})
startTransition(() => {
setItems((current) =>
current.map((item) =>
item.key === result.key
? {
...item,
title: result.title,
alt_text: result.alt_text,
caption: result.caption,
tags: result.tags,
notes: result.notes,
}
: item,
),
)
})
toast.success('媒体元数据已保存。')
} catch (error) {
toast.error(error instanceof ApiError ? error.message : '保存媒体元数据失败。')
} finally {
setMetadataSaving(false)
}
}}
>
<Save className="h-4 w-4" />
{metadataSaving ? '保存中...' : '保存元数据'}
</Button>
<Button variant="outline" onClick={() => setMetadataForm(toMetadataForm(activeItem))}>
</Button>
</div>
</div>
<div className="space-y-4 rounded-3xl border border-border/70 bg-background/50 p-4">
<div className="aspect-[16/9] overflow-hidden rounded-2xl border border-border/70 bg-muted/30">
<img
src={activeItem.url}
alt={metadataForm.altText || activeItem.key}
className="h-full w-full object-cover"
/>
</div>
<div className="space-y-2 text-sm text-muted-foreground">
<p className="break-all font-medium text-foreground">{activeItem.key}</p>
<p>{formatBytes(activeItem.size_bytes)} · {activeItem.last_modified ?? '未知修改时间'}</p>
<p>{metadataForm.altText || '尚未填写 alt 文本'}</p>
</div>
</div>
</div>
</CardContent>
</Card>
) : null}
{loading ? (
<Skeleton className="h-[520px] rounded-3xl" />
) : (
@@ -275,7 +475,10 @@ export function MediaPage() {
const replaceInputId = `replace-media-${index}`
return (
<Card key={item.key} className="overflow-hidden">
<Card
key={item.key}
className={`overflow-hidden ${activeKey === item.key ? 'ring-1 ring-primary/40' : ''}`}
>
<div className="relative aspect-[16/9] overflow-hidden bg-muted/30">
<img src={item.url} alt={item.key} className="h-full w-full object-cover" />
<button
@@ -300,8 +503,21 @@ export function MediaPage() {
<span>{formatBytes(item.size_bytes)}</span>
{item.last_modified ? <span>{item.last_modified}</span> : null}
</div>
{item.title ? <p className="text-sm text-foreground">{item.title}</p> : null}
{item.tags.length ? (
<div className="flex flex-wrap gap-2">
{item.tags.slice(0, 4).map((tag) => (
<Badge key={`${item.key}-${tag}`} variant="outline">
{tag}
</Badge>
))}
</div>
) : null}
</div>
<div className="flex flex-wrap gap-2">
<Button size="sm" variant="outline" onClick={() => setActiveKey(item.key)}>
</Button>
<Button
size="sm"
variant="outline"

View File

@@ -139,7 +139,7 @@ export function PostComparePage({ slugOverride }: { slugOverride?: string }) {
<GitCompareArrows className="h-4 w-4" />
vs 稿
</CardTitle>
<CardDescription>{state.path}</CardDescription>
<CardDescription>稿</CardDescription>
</CardHeader>
</Card>

View File

@@ -177,7 +177,7 @@ export function PostPolishPage() {
<Card>
<CardHeader>
<CardTitle> vs </CardTitle>
<CardDescription>{snapshot.path}</CardDescription>
<CardDescription> AI </CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<div className="flex flex-wrap items-center gap-3">

View File

@@ -237,6 +237,11 @@ function formatWorkbenchStateLabel(
.join(' / ')}`
}
function buildVirtualPostPath(slug: string) {
const normalizedSlug = slug.trim() || 'new-post'
return `article://posts/${normalizedSlug}`
}
function parseImageList(value: string) {
return value
.split('\n')
@@ -820,6 +825,26 @@ export function PostsPage() {
const [pinnedFilter, setPinnedFilter] = useState('all')
const [currentPage, setCurrentPage] = useState(1)
const [pageSize, setPageSize] = useState<number>(POSTS_PAGE_SIZE_OPTIONS[0])
const [sortKey, setSortKey] = useState('updated_at_desc')
const [totalPosts, setTotalPosts] = useState(0)
const [totalPages, setTotalPages] = useState(1)
const { sortBy, sortOrder } = useMemo(() => {
switch (sortKey) {
case 'created_at_asc':
return { sortBy: 'created_at', sortOrder: 'asc' }
case 'created_at_desc':
return { sortBy: 'created_at', sortOrder: 'desc' }
case 'title_asc':
return { sortBy: 'title', sortOrder: 'asc' }
case 'title_desc':
return { sortBy: 'title', sortOrder: 'desc' }
case 'updated_at_asc':
return { sortBy: 'updated_at', sortOrder: 'asc' }
default:
return { sortBy: 'updated_at', sortOrder: 'desc' }
}
}, [sortKey])
const loadPosts = useCallback(async (showToast = false) => {
try {
@@ -827,9 +852,28 @@ export function PostsPage() {
setRefreshing(true)
}
const next = await adminApi.listPosts()
const next = await adminApi.listPostsPage({
search: searchTerm.trim() || undefined,
postType: typeFilter === 'all' ? undefined : typeFilter,
pinned:
pinnedFilter === 'all'
? undefined
: pinnedFilter === 'pinned',
includePrivate: true,
includeRedirects: true,
preview: true,
page: currentPage,
pageSize,
sortBy,
sortOrder,
})
startTransition(() => {
setPosts(next)
setPosts(next.items)
setTotalPosts(next.total)
setTotalPages(next.total_pages)
if (next.page !== currentPage) {
setCurrentPage(next.page)
}
})
if (showToast) {
@@ -844,7 +888,7 @@ export function PostsPage() {
setLoading(false)
setRefreshing(false)
}
}, [])
}, [currentPage, pageSize, pinnedFilter, searchTerm, sortBy, sortOrder, typeFilter])
const loadEditor = useCallback(
async (nextSlug: string) => {
@@ -931,49 +975,17 @@ export function PostsPage() {
}
}, [createDialogOpen, metadataDialog, navigate, slug])
const normalizedSearchTerm = searchTerm.trim().toLowerCase()
const filteredPosts = useMemo(() => {
return posts.filter((post) => {
const matchesSearch =
!normalizedSearchTerm ||
[
post.title ?? '',
post.slug,
post.category ?? '',
post.description ?? '',
post.post_type ?? '',
postTagsToList(post.tags).join(' '),
]
.join('\n')
.toLowerCase()
.includes(normalizedSearchTerm)
const matchesType = typeFilter === 'all' || (post.post_type ?? 'article') === typeFilter
const pinnedValue = Boolean(post.pinned)
const matchesPinned =
pinnedFilter === 'all' ||
(pinnedFilter === 'pinned' && pinnedValue) ||
(pinnedFilter === 'regular' && !pinnedValue)
return matchesSearch && matchesType && matchesPinned
})
}, [normalizedSearchTerm, pinnedFilter, posts, typeFilter])
useEffect(() => {
setCurrentPage(1)
}, [pageSize, pinnedFilter, searchTerm, typeFilter])
}, [pageSize, pinnedFilter, searchTerm, sortKey, typeFilter])
const totalPages = Math.max(1, Math.ceil(filteredPosts.length / pageSize))
const safeCurrentPage = Math.min(currentPage, totalPages)
useEffect(() => {
setCurrentPage((current) => Math.min(current, totalPages))
}, [totalPages])
const paginatedPosts = useMemo(() => {
const startIndex = (safeCurrentPage - 1) * pageSize
return filteredPosts.slice(startIndex, startIndex + pageSize)
}, [filteredPosts, pageSize, safeCurrentPage])
const paginatedPosts = posts
const paginationItems = useMemo(() => {
const maxVisiblePages = 5
@@ -988,8 +1000,8 @@ export function PostsPage() {
return Array.from({ length: endPage - startPage + 1 }, (_, index) => startPage + index)
}, [safeCurrentPage, totalPages])
const pageStart = filteredPosts.length ? (safeCurrentPage - 1) * pageSize + 1 : 0
const pageEnd = filteredPosts.length ? Math.min(safeCurrentPage * pageSize, filteredPosts.length) : 0
const pageStart = totalPosts ? (safeCurrentPage - 1) * pageSize + 1 : 0
const pageEnd = totalPosts ? Math.min(safeCurrentPage * pageSize, totalPosts) : 0
const pinnedPostCount = useMemo(
() => posts.filter((post) => Boolean(post.pinned)).length,
[posts],
@@ -1138,9 +1150,7 @@ export function PostsPage() {
setMetadataDialog({
target: 'create',
title: createForm.title.trim() || createForm.slug.trim() || '新建草稿',
path: createForm.slug.trim()
? `backend/content/posts/${createForm.slug.trim()}.md`
: 'backend/content/posts/new-post.md',
path: buildVirtualPostPath(createForm.slug),
proposal: nextProposal,
})
})
@@ -1904,7 +1914,7 @@ export function PostsPage() {
</CardDescription>
</div>
<Badge variant="outline">{filteredPosts.length} / {posts.length}</Badge>
<Badge variant="outline">{paginatedPosts.length} / {totalPosts}</Badge>
</div>
<div className="grid gap-3">
<div className="flex flex-col gap-3 lg:flex-row">
@@ -1921,7 +1931,7 @@ export function PostsPage() {
</Button>
) : null}
</div>
<div className="grid gap-3 sm:grid-cols-2 xl:grid-cols-3">
<div className="grid gap-3 sm:grid-cols-2 xl:grid-cols-4">
<Select value={typeFilter} onChange={(event) => setTypeFilter(event.target.value)}>
<option value="all"></option>
<option value="article"></option>
@@ -1947,11 +1957,18 @@ export function PostsPage() {
</option>
))}
</Select>
<Select value={sortKey} onChange={(event) => setSortKey(event.target.value)}>
<option value="updated_at_desc"></option>
<option value="created_at_desc"></option>
<option value="created_at_asc"></option>
<option value="title_asc"> A Z</option>
<option value="title_desc"> Z A</option>
</Select>
</div>
</div>
<div className="flex flex-wrap gap-2">
<Badge variant="secondary"> {filteredPosts.length}</Badge>
<Badge variant="outline"> {pinnedPostCount}</Badge>
<Badge variant="secondary"> {totalPosts}</Badge>
<Badge variant="outline"> {pinnedPostCount}</Badge>
<Badge variant="outline">
{safeCurrentPage} / {totalPages}
</Badge>
@@ -2008,18 +2025,18 @@ export function PostsPage() {
)
})}
{!filteredPosts.length ? (
{!totalPosts ? (
<div className="rounded-[1.8rem] border border-dashed border-border/80 px-5 py-12 text-center text-sm text-muted-foreground">
</div>
) : null}
</div>
{filteredPosts.length ? (
{totalPosts ? (
<div className="rounded-[1.5rem] border border-border/70 bg-background/65 px-4 py-3">
<div className="flex flex-col gap-3 lg:flex-row lg:items-center lg:justify-between">
<p className="text-sm text-muted-foreground">
{pageStart} - {pageEnd} {filteredPosts.length}
{pageStart} - {pageEnd} {totalPosts}
</p>
<div className="flex flex-wrap items-center gap-2">
<Button
@@ -2116,8 +2133,7 @@ export function PostsPage() {
<Badge variant="outline">{editor.markdown.split(/\r?\n/).length} </Badge>
</div>
<div className="rounded-2xl border border-border/70 bg-background/70 p-4">
<p className="break-all font-mono text-xs text-muted-foreground">{editor.path}</p>
<p className="mt-2 text-sm text-muted-foreground">
<p className="text-sm text-muted-foreground">
{formatDateTime(editor.createdAt)} · {formatDateTime(editor.updatedAt)}
</p>
</div>
@@ -2931,11 +2947,7 @@ export function PostsPage() {
value={createForm.markdown}
originalValue={buildCreateMarkdownForWindow(defaultCreateForm)}
diffValue={buildCreateMarkdownForWindow(createForm)}
path={
createForm.slug.trim()
? `backend/content/posts/${createForm.slug.trim()}.md`
: 'backend/content/posts/new-post.md'
}
path={buildVirtualPostPath(createForm.slug)}
workspaceHeightClassName="h-[clamp(620px,74dvh,920px)]"
mode={createMode}
visiblePanels={createPanels}
@@ -3033,9 +3045,6 @@ export function PostsPage() {
<p className="mt-3 text-base font-semibold">
{metadataDialog.title}
</p>
<p className="mt-2 break-all font-mono text-xs text-muted-foreground">
{metadataDialog.path}
</p>
</div>
<div className="grid gap-3 sm:grid-cols-3 xl:grid-cols-1">

View File

@@ -15,6 +15,7 @@ import { adminApi, ApiError } from '@/lib/api'
import type {
AdminSiteSettingsResponse,
AiProviderConfig,
HumanVerificationMode,
MusicTrack,
SiteSettingsPayload,
} from '@/lib/types'
@@ -65,6 +66,35 @@ const MEDIA_STORAGE_PROVIDER_OPTIONS = [
{ value: 'minio', label: 'MinIO' },
] as const
const NOTIFICATION_CHANNEL_OPTIONS = [
{ value: 'webhook', label: 'Webhook' },
{ value: 'ntfy', label: 'ntfy' },
] as const
const HUMAN_VERIFICATION_MODE_OPTIONS = [
{ value: 'off', label: '关闭' },
{ value: 'captcha', label: '普通验证码' },
{ value: 'turnstile', label: 'Turnstile' },
] as const
function normalizeHumanVerificationMode(
value: string | null | undefined,
fallback: HumanVerificationMode,
): HumanVerificationMode {
switch ((value ?? '').trim().toLowerCase()) {
case 'off':
return 'off'
case 'captcha':
case 'normal':
case 'simple':
return 'captcha'
case 'turnstile':
return 'turnstile'
default:
return fallback
}
}
function isCloudflareProvider(provider: string | null | undefined) {
const normalized = provider?.trim().toLowerCase()
return normalized === 'cloudflare' || normalized === 'cloudflare-workers-ai' || normalized === 'workers-ai'
@@ -89,6 +119,19 @@ function normalizeSettingsResponse(
...input,
ai_providers: aiProviders,
search_synonyms: searchSynonyms,
comment_verification_mode: normalizeHumanVerificationMode(
input.comment_verification_mode,
input.comment_turnstile_enabled ? 'turnstile' : 'captcha',
),
subscription_verification_mode: normalizeHumanVerificationMode(
input.subscription_verification_mode,
input.subscription_turnstile_enabled ? 'turnstile' : 'off',
),
turnstile_site_key: input.turnstile_site_key ?? null,
turnstile_secret_key: input.turnstile_secret_key ?? null,
web_push_vapid_public_key: input.web_push_vapid_public_key ?? null,
web_push_vapid_private_key: input.web_push_vapid_private_key ?? null,
web_push_vapid_subject: input.web_push_vapid_subject ?? null,
ai_active_provider_id:
input.ai_active_provider_id ?? aiProviders[0]?.id ?? null,
}
@@ -113,6 +156,9 @@ function Field({
}
function toPayload(form: AdminSiteSettingsResponse): SiteSettingsPayload {
const commentTurnstileEnabled = form.comment_verification_mode === 'turnstile'
const subscriptionTurnstileEnabled = form.subscription_verification_mode === 'turnstile'
return {
siteName: form.site_name,
siteShortName: form.site_short_name,
@@ -133,6 +179,16 @@ function toPayload(form: AdminSiteSettingsResponse): SiteSettingsPayload {
musicPlaylist: form.music_playlist,
aiEnabled: form.ai_enabled,
paragraphCommentsEnabled: form.paragraph_comments_enabled,
commentVerificationMode: form.comment_verification_mode,
commentTurnstileEnabled,
subscriptionVerificationMode: form.subscription_verification_mode,
subscriptionTurnstileEnabled,
webPushEnabled: form.web_push_enabled,
turnstileSiteKey: form.turnstile_site_key,
turnstileSecretKey: form.turnstile_secret_key,
webPushVapidPublicKey: form.web_push_vapid_public_key,
webPushVapidPrivateKey: form.web_push_vapid_private_key,
webPushVapidSubject: form.web_push_vapid_subject,
aiProvider: form.ai_provider,
aiApiBase: form.ai_api_base,
aiApiKey: form.ai_api_key,
@@ -156,8 +212,13 @@ function toPayload(form: AdminSiteSettingsResponse): SiteSettingsPayload {
seoDefaultOgImage: form.seo_default_og_image,
seoDefaultTwitterHandle: form.seo_default_twitter_handle,
notificationWebhookUrl: form.notification_webhook_url,
notificationChannelType: form.notification_channel_type,
notificationCommentEnabled: form.notification_comment_enabled,
notificationFriendLinkEnabled: form.notification_friend_link_enabled,
subscriptionPopupEnabled: form.subscription_popup_enabled,
subscriptionPopupTitle: form.subscription_popup_title,
subscriptionPopupDescription: form.subscription_popup_description,
subscriptionPopupDelaySeconds: form.subscription_popup_delay_seconds,
searchSynonyms: form.search_synonyms,
}
}
@@ -595,6 +656,181 @@ export function SiteSettingsPage() {
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle></CardTitle>
<CardDescription>
/
</CardDescription>
</CardHeader>
<CardContent className="space-y-5">
<label className="flex items-start gap-3 rounded-2xl border border-border/70 bg-background/60 p-4">
<input
type="checkbox"
checked={form.subscription_popup_enabled}
onChange={(event) =>
updateField('subscription_popup_enabled', event.target.checked)
}
className="mt-1 h-4 w-4 rounded border-input text-primary focus:ring-ring"
/>
<div>
<div className="font-medium"></div>
<p className="mt-1 text-sm leading-6 text-muted-foreground">
</p>
</div>
</label>
<div className="grid gap-4 md:grid-cols-2">
<label className="flex items-start gap-3 rounded-2xl border border-border/70 bg-background/60 p-4">
<input
type="checkbox"
checked={form.web_push_enabled}
onChange={(event) => updateField('web_push_enabled', event.target.checked)}
className="mt-1 h-4 w-4 rounded border-input text-primary focus:ring-ring"
/>
<div>
<div className="font-medium"></div>
<p className="mt-1 text-sm leading-6 text-muted-foreground">
VAPID
</p>
</div>
</label>
<div className="rounded-2xl border border-border/70 bg-background/60 p-4">
<Field
label="订阅提交验证方式"
hint="可选 关闭 / 普通验证码 / Turnstile若 Turnstile key 未配置完整,会自动回退到普通验证码。"
>
<Select
value={form.subscription_verification_mode}
onChange={(event) =>
updateField(
'subscription_verification_mode',
normalizeHumanVerificationMode(event.target.value, 'off'),
)
}
>
{HUMAN_VERIFICATION_MODE_OPTIONS.map((option) => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</Select>
</Field>
</div>
</div>
<div className="grid gap-4 lg:grid-cols-2">
<Field label="弹窗标题" hint="建议直接传达价值,例如“订阅更新”或“别错过新文章”。">
<Input
value={form.subscription_popup_title}
onChange={(event) =>
updateField('subscription_popup_title', event.target.value)
}
/>
</Field>
<Field label="触发延迟(秒)" hint="建议保持在 1020 秒,避免首屏强打断。">
<Input
type="number"
min={3}
max={120}
value={form.subscription_popup_delay_seconds}
onChange={(event) =>
updateField(
'subscription_popup_delay_seconds',
event.target.value ? Number(event.target.value) : 18,
)
}
/>
</Field>
</div>
<Field
label="弹窗说明"
hint="建议明确订阅收益、需要邮箱确认,以及可随时退订,降低打扰感。"
>
<Textarea
value={form.subscription_popup_description}
onChange={(event) =>
updateField('subscription_popup_description', event.target.value)
}
/>
</Field>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle> / </CardTitle>
<CardDescription>
退
</CardDescription>
</CardHeader>
<CardContent className="grid gap-6 lg:grid-cols-2">
<Field
label="Turnstile Site Key"
hint="评论区和订阅弹窗共用这一套站点 key保存后前台会在运行时读取。"
>
<Input
value={form.turnstile_site_key ?? ''}
onChange={(event) => updateField('turnstile_site_key', event.target.value)}
placeholder="0x4AAAA..."
/>
</Field>
<Field
label="Turnstile Secret Key"
hint="后端验证 token 使用;留空可清除数据库配置并回退到环境变量。"
>
<Input
value={form.turnstile_secret_key ?? ''}
onChange={(event) => updateField('turnstile_secret_key', event.target.value)}
placeholder="ts-secret-key"
/>
</Field>
<Field
label="Web Push VAPID Public Key"
hint="浏览器订阅按钮会把这把 public key 下发到前台。"
>
<Textarea
value={form.web_push_vapid_public_key ?? ''}
onChange={(event) =>
updateField('web_push_vapid_public_key', event.target.value)
}
placeholder="BEl6..."
rows={3}
/>
</Field>
<Field
label="Web Push VAPID Private Key"
hint="后端发送浏览器推送时签名使用。"
>
<Textarea
value={form.web_push_vapid_private_key ?? ''}
onChange={(event) =>
updateField('web_push_vapid_private_key', event.target.value)
}
placeholder="5aQ..."
rows={3}
/>
</Field>
<div className="lg:col-span-2">
<Field
label="Web Push Subject"
hint="推荐填写 mailto:you@example.com留空时会优先回退到环境变量再退回站点 URL / 默认值。"
>
<Input
value={form.web_push_vapid_subject ?? ''}
onChange={(event) =>
updateField('web_push_vapid_subject', event.target.value)
}
placeholder="mailto:admin@example.com"
/>
</Field>
</div>
</CardContent>
</Card>
<Card>
<CardHeader>
<CardTitle>SEO</CardTitle>
@@ -617,13 +853,39 @@ export function SiteSettingsPage() {
}
/>
</Field>
<div className="lg:col-span-2">
<Field label="Webhook URL" hint="评论和友链申请会向这个地址推送 JSON。">
<div className="grid gap-4 lg:col-span-2 md:grid-cols-[220px_minmax(0,1fr)]">
<Field label="通知渠道" hint="可选 Webhook 或 ntfy。">
<Select
value={form.notification_channel_type}
onChange={(event) =>
updateField('notification_channel_type', event.target.value)
}
>
{NOTIFICATION_CHANNEL_OPTIONS.map((item) => (
<option key={item.value} value={item.value}>
{item.label}
</option>
))}
</Select>
</Field>
<Field
label="通知目标"
hint={
form.notification_channel_type === 'ntfy'
? '支持 topic 名称或完整 ntfy URL。'
: '评论和友链申请会向这个地址推送 JSON。'
}
>
<Input
value={form.notification_webhook_url ?? ''}
onChange={(event) =>
updateField('notification_webhook_url', event.target.value)
}
placeholder={
form.notification_channel_type === 'ntfy'
? 'blog-admin 或 https://ntfy.example.com/blog-admin'
: 'https://example.com/hooks/termi'
}
/>
</Field>
</div>
@@ -640,7 +902,7 @@ export function SiteSettingsPage() {
<div>
<div className="font-medium"></div>
<p className="mt-1 text-sm leading-6 text-muted-foreground">
Webhook
</p>
</div>
</label>
@@ -656,7 +918,7 @@ export function SiteSettingsPage() {
<div>
<div className="font-medium"></div>
<p className="mt-1 text-sm leading-6 text-muted-foreground">
Webhook
</p>
</div>
</label>
@@ -707,6 +969,29 @@ export function SiteSettingsPage() {
</p>
</div>
</label>
<div className="rounded-2xl border border-border/70 bg-background/60 p-4">
<Field
label="评论区验证方式"
hint="文章评论和段落评论都走这里;若选择 Turnstile 但 key / secret 不完整,会自动回退到普通验证码。"
>
<Select
value={form.comment_verification_mode}
onChange={(event) =>
updateField(
'comment_verification_mode',
normalizeHumanVerificationMode(event.target.value, 'captcha'),
)
}
>
{HUMAN_VERIFICATION_MODE_OPTIONS.map((option) => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</Select>
</Field>
</div>
</CardContent>
</Card>

View File

@@ -27,6 +27,7 @@ const CHANNEL_OPTIONS = [
{ value: 'discord', label: 'Discord Webhook' },
{ value: 'telegram', label: 'Telegram Bot API' },
{ value: 'ntfy', label: 'ntfy' },
{ value: 'web_push', label: 'Web Push / Browser Push' },
] as const
const DEFAULT_FILTERS = {
@@ -172,9 +173,9 @@ export function SubscriptionsPage() {
<div className="space-y-3">
<Badge variant="secondary"></Badge>
<div>
<h2 className="text-3xl font-semibold tracking-tight"> / / Digest</h2>
<h2 className="text-3xl font-semibold tracking-tight"> / / </h2>
<p className="mt-2 max-w-3xl text-sm leading-7 text-muted-foreground">
Webhook / Discord / Telegram / ntfy retry pending
Webhook / Discord / Telegram / ntfy / Web Push retry pending
</p>
</div>
</div>
@@ -251,7 +252,15 @@ export function SubscriptionsPage() {
<Input
value={form.target}
onChange={(event) => setForm((current) => ({ ...current, target: event.target.value }))}
placeholder={form.channelType === 'email' ? 'name@example.com' : 'https://...'}
placeholder={
form.channelType === 'email'
? 'name@example.com'
: form.channelType === 'ntfy'
? 'topic-name 或 https://ntfy.example.com/topic'
: form.channelType === 'web_push'
? 'https://push-service/...'
: 'https://...'
}
/>
</div>
<div className="space-y-2">

View File

@@ -0,0 +1,402 @@
import { Plus, RefreshCcw, Save, Tags, Trash2 } from 'lucide-react'
import { startTransition, useCallback, useEffect, useMemo, useState } from 'react'
import { toast } from 'sonner'
import { FormField } from '@/components/form-field'
import { Badge } from '@/components/ui/badge'
import { Button } from '@/components/ui/button'
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from '@/components/ui/card'
import { Input } from '@/components/ui/input'
import { Skeleton } from '@/components/ui/skeleton'
import { Textarea } from '@/components/ui/textarea'
import { adminApi, ApiError } from '@/lib/api'
import { emptyToNull, formatDateTime } from '@/lib/admin-format'
import type { TagRecord, TaxonomyPayload } from '@/lib/types'
type TagFormState = {
name: string
slug: string
description: string
coverImage: string
accentColor: string
seoTitle: string
seoDescription: string
}
const defaultTagForm: TagFormState = {
name: '',
slug: '',
description: '',
coverImage: '',
accentColor: '',
seoTitle: '',
seoDescription: '',
}
function toFormState(item: TagRecord): TagFormState {
return {
name: item.name,
slug: item.slug,
description: item.description ?? '',
coverImage: item.cover_image ?? '',
accentColor: item.accent_color ?? '',
seoTitle: item.seo_title ?? '',
seoDescription: item.seo_description ?? '',
}
}
function toPayload(form: TagFormState): TaxonomyPayload {
return {
name: form.name.trim(),
slug: emptyToNull(form.slug),
description: emptyToNull(form.description),
coverImage: emptyToNull(form.coverImage),
accentColor: emptyToNull(form.accentColor),
seoTitle: emptyToNull(form.seoTitle),
seoDescription: emptyToNull(form.seoDescription),
}
}
export function TagsPage() {
const [items, setItems] = useState<TagRecord[]>([])
const [selectedId, setSelectedId] = useState<number | null>(null)
const [form, setForm] = useState<TagFormState>(defaultTagForm)
const [loading, setLoading] = useState(true)
const [refreshing, setRefreshing] = useState(false)
const [saving, setSaving] = useState(false)
const [deleting, setDeleting] = useState(false)
const [searchTerm, setSearchTerm] = useState('')
const loadTags = useCallback(async (showToast = false) => {
try {
if (showToast) {
setRefreshing(true)
}
const next = await adminApi.listTags()
startTransition(() => {
setItems(next)
})
if (showToast) {
toast.success('标签列表已刷新。')
}
} catch (error) {
if (error instanceof ApiError && error.status === 401) {
return
}
toast.error(error instanceof ApiError ? error.message : '无法加载标签列表。')
} finally {
setLoading(false)
setRefreshing(false)
}
}, [])
useEffect(() => {
void loadTags(false)
}, [loadTags])
const filteredItems = useMemo(() => {
const keyword = searchTerm.trim().toLowerCase()
if (!keyword) {
return items
}
return items.filter((item) =>
[item.name, item.slug, item.description ?? '', item.seo_title ?? '']
.join('\n')
.toLowerCase()
.includes(keyword),
)
}, [items, searchTerm])
const selectedItem = useMemo(
() => items.find((item) => item.id === selectedId) ?? null,
[items, selectedId],
)
const resetForm = useCallback(() => {
setSelectedId(null)
setForm(defaultTagForm)
}, [])
const handleSave = useCallback(async () => {
if (!form.name.trim()) {
toast.error('请先填写标签名称。')
return
}
try {
setSaving(true)
if (selectedId) {
const updated = await adminApi.updateTag(selectedId, toPayload(form))
startTransition(() => {
setItems((current) => current.map((item) => (item.id === updated.id ? updated : item)))
setSelectedId(updated.id)
setForm(toFormState(updated))
})
toast.success('标签已更新。')
} else {
const created = await adminApi.createTag(toPayload(form))
startTransition(() => {
setItems((current) => [created, ...current])
setSelectedId(created.id)
setForm(toFormState(created))
})
toast.success('标签已创建。')
}
} catch (error) {
toast.error(error instanceof ApiError ? error.message : '保存标签失败。')
} finally {
setSaving(false)
}
}, [form, selectedId])
const handleDelete = useCallback(async () => {
if (!selectedItem) {
return
}
if (!window.confirm(`确认删除标签「${selectedItem.name}」吗?相关文章会同步移除该标签引用。`)) {
return
}
try {
setDeleting(true)
await adminApi.deleteTag(selectedItem.id)
startTransition(() => {
setItems((current) => current.filter((item) => item.id !== selectedItem.id))
})
toast.success('标签已删除。')
resetForm()
} catch (error) {
toast.error(error instanceof ApiError ? error.message : '删除标签失败。')
} finally {
setDeleting(false)
}
}, [resetForm, selectedItem])
if (loading) {
return (
<div className="space-y-6">
<Skeleton className="h-40 rounded-3xl" />
<Skeleton className="h-[720px] rounded-3xl" />
</div>
)
}
return (
<div className="space-y-6">
<div className="flex flex-col gap-4 xl:flex-row xl:items-end xl:justify-between">
<div className="space-y-3">
<Badge variant="secondary"></Badge>
<div>
<h2 className="text-3xl font-semibold tracking-tight"></h2>
<p className="mt-2 max-w-3xl text-sm leading-7 text-muted-foreground">
SEO 便
</p>
</div>
</div>
<div className="flex flex-wrap items-center gap-3">
<Button variant="outline" onClick={resetForm}>
<Plus className="h-4 w-4" />
</Button>
<Button variant="secondary" onClick={() => void loadTags(true)} disabled={refreshing}>
<RefreshCcw className="h-4 w-4" />
{refreshing ? '刷新中...' : '刷新'}
</Button>
</div>
</div>
<div className="grid gap-6 xl:grid-cols-[0.92fr_1.08fr]">
<Card>
<CardHeader>
<CardTitle></CardTitle>
<CardDescription>slug SEO </CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<Input
placeholder="按标签名 / slug / 描述搜索"
value={searchTerm}
onChange={(event) => setSearchTerm(event.target.value)}
/>
{filteredItems.length ? (
<div className="space-y-3">
{filteredItems.map((item) => (
<button
key={item.id}
type="button"
onClick={() => {
setSelectedId(item.id)
setForm(toFormState(item))
}}
className={`w-full rounded-3xl border px-4 py-4 text-left transition ${
selectedId === item.id
? 'border-primary/30 bg-primary/10 shadow-[0_12px_30px_rgba(37,99,235,0.12)]'
: 'border-border/70 bg-background/60 hover:border-border'
}`}
>
<div className="flex items-start justify-between gap-3">
<div className="min-w-0 space-y-2">
<div className="flex flex-wrap items-center gap-2">
<span className="font-medium">{item.name}</span>
<Badge variant="outline">#{item.slug}</Badge>
{item.accent_color ? (
<span
className="inline-flex h-5 w-5 rounded-full border border-border/80"
style={{ backgroundColor: item.accent_color }}
/>
) : null}
</div>
<p className="text-sm text-muted-foreground">
{item.description || `${item.count} 篇文章引用了这个标签`}
</p>
</div>
<Badge variant={item.count > 0 ? 'success' : 'secondary'}>{item.count}</Badge>
</div>
</button>
))}
</div>
) : (
<div className="rounded-3xl border border-dashed border-border/70 bg-background/40 px-5 py-10 text-center text-sm text-muted-foreground">
</div>
)}
</CardContent>
</Card>
<Card>
<CardHeader>
<div className="flex items-center gap-3">
<div className="flex h-11 w-11 items-center justify-center rounded-2xl border border-primary/20 bg-primary/10 text-primary">
<Tags className="h-5 w-5" />
</div>
<div>
<CardTitle>{selectedItem ? '编辑标签' : '新建标签'}</CardTitle>
<CardDescription>
SEO
</CardDescription>
</div>
</div>
</CardHeader>
<CardContent className="space-y-6">
<div className="grid gap-4 lg:grid-cols-2">
<FormField label="标签名称" hint="例如astro、rust、workflow。">
<Input
value={form.name}
onChange={(event) => setForm((current) => ({ ...current, name: event.target.value }))}
placeholder="输入标签名称"
/>
</FormField>
<FormField label="标签 slug" hint="留空时自动从英文名称生成;中文建议手填。">
<Input
value={form.slug}
onChange={(event) => setForm((current) => ({ ...current, slug: event.target.value }))}
placeholder="astro"
/>
</FormField>
<FormField label="封面图 URL" hint="可选,用于前台标签头图。">
<Input
value={form.coverImage}
onChange={(event) =>
setForm((current) => ({ ...current, coverImage: event.target.value }))
}
placeholder="https://cdn.example.com/covers/astro.jpg"
/>
</FormField>
<FormField label="强调色" hint="可选,用于标签专题头部强调色。">
<div className="flex items-center gap-3">
<Input
value={form.accentColor}
onChange={(event) =>
setForm((current) => ({ ...current, accentColor: event.target.value }))
}
placeholder="#14b8a6"
/>
<input
type="color"
value={form.accentColor || '#14b8a6'}
onChange={(event) =>
setForm((current) => ({ ...current, accentColor: event.target.value }))
}
className="h-10 w-14 rounded-xl border border-input bg-background px-1"
/>
</div>
</FormField>
</div>
<FormField label="标签描述" hint="用于前台标签卡片与专题说明。">
<Textarea
value={form.description}
onChange={(event) =>
setForm((current) => ({ ...current, description: event.target.value }))
}
rows={4}
placeholder="介绍这个标签常见主题、适合谁看。"
/>
</FormField>
<div className="grid gap-4 lg:grid-cols-2">
<FormField label="SEO 标题" hint="留空时前台继续使用标签名拼接默认标题。">
<Input
value={form.seoTitle}
onChange={(event) =>
setForm((current) => ({ ...current, seoTitle: event.target.value }))
}
placeholder="Astro 相关文章 - Termi"
/>
</FormField>
<FormField label="SEO 描述" hint="搜索引擎摘要与分享描述。">
<Textarea
value={form.seoDescription}
onChange={(event) =>
setForm((current) => ({ ...current, seoDescription: event.target.value }))
}
rows={4}
placeholder="围绕 Astro、内容站与渲染策略的文章汇总。"
/>
</FormField>
</div>
<div className="grid gap-4 rounded-3xl border border-border/70 bg-background/50 p-4 md:grid-cols-3">
<div>
<p className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></p>
<p className="mt-2 text-2xl font-semibold text-foreground">{selectedItem?.count ?? 0}</p>
</div>
<div>
<p className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></p>
<p className="mt-2 text-sm text-muted-foreground">{formatDateTime(selectedItem?.created_at)}</p>
</div>
<div>
<p className="text-xs uppercase tracking-[0.18em] text-muted-foreground"></p>
<p className="mt-2 text-sm text-muted-foreground">{formatDateTime(selectedItem?.updated_at)}</p>
</div>
</div>
<div className="flex flex-wrap items-center gap-3">
<Button onClick={() => void handleSave()} disabled={saving}>
<Save className="h-4 w-4" />
{saving ? '保存中...' : selectedItem ? '保存标签' : '创建标签'}
</Button>
<Button variant="outline" onClick={resetForm}>
</Button>
<Button
variant="ghost"
onClick={() => void handleDelete()}
disabled={!selectedItem || deleting}
className="text-rose-600 hover:text-rose-600"
>
<Trash2 className="h-4 w-4" />
{deleting ? '删除中...' : '删除标签'}
</Button>
</div>
</CardContent>
</Card>
</div>
</div>
)
}

View File

@@ -1,4 +1,5 @@
target
target-*
.git
.github
.gitea
@@ -6,3 +7,4 @@ node_modules
*.log
*.out
*.err
storage

3
backend/.gitignore vendored
View File

@@ -5,6 +5,7 @@
# will have compiled files and executables
debug/
target/
target-*/
# include cargo lock
!Cargo.lock
@@ -16,4 +17,4 @@ target/
*.pdb
*.sqlite
*.sqlite-*
*.sqlite-*

589
backend/Cargo.lock generated
View File

@@ -8,6 +8,62 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "aead"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0"
dependencies = [
"crypto-common 0.1.7",
"generic-array",
]
[[package]]
name = "aes"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
"cfg-if",
"cipher 0.4.4",
"cpufeatures 0.2.17",
]
[[package]]
name = "aes"
version = "0.9.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04097e08a47d9ad181c2e1f4a5fabc9ae06ce8839a333ba9a949bcb0d31fd2a3"
dependencies = [
"cipher 0.5.1",
"cpubits",
"cpufeatures 0.2.17",
]
[[package]]
name = "aes-gcm"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1"
dependencies = [
"aead",
"aes 0.8.4",
"cipher 0.4.4",
"ctr",
"ghash",
"subtle",
]
[[package]]
name = "aes-keywrap"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10b6f24a1f796bc46415a1d0d18dc0a8203ccba088acf5def3291c4f61225522"
dependencies = [
"aes 0.9.0-rc.4",
"byteorder",
]
[[package]]
name = "ahash"
version = "0.7.8"
@@ -190,6 +246,12 @@ dependencies = [
"password-hash",
]
[[package]]
name = "arrayref"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
[[package]]
name = "arrayvec"
version = "0.7.6"
@@ -547,7 +609,7 @@ dependencies = [
"hmac",
"http 0.2.12",
"http 1.4.0",
"p256",
"p256 0.11.1",
"percent-encoding",
"ring",
"sha2",
@@ -950,12 +1012,24 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce"
[[package]]
name = "base16ct"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
@@ -992,6 +1066,12 @@ dependencies = [
"serde",
]
[[package]]
name = "binstring"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0669d5a35b64fdb5ab7fb19cae13148b6b5cbdf4b8247faf54ece47f699c8cef"
[[package]]
name = "bit_field"
version = "0.10.3"
@@ -1043,6 +1123,17 @@ dependencies = [
"digest",
]
[[package]]
name = "blake2b_simd"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b79834656f71332577234b50bfc009996f7449e0c056884e6a02492ded0ca2f3"
dependencies = [
"arrayref",
"arrayvec",
"constant_time_eq",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@@ -1287,6 +1378,26 @@ dependencies = [
"stacker",
]
[[package]]
name = "cipher"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
dependencies = [
"crypto-common 0.1.7",
"inout 0.1.4",
]
[[package]]
name = "cipher"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e34d8227fe1ba289043aeb13792056ff80fd6de1a9f49137a5f499de8e8c78ea"
dependencies = [
"crypto-common 0.2.1",
"inout 0.2.2",
]
[[package]]
name = "clap"
version = "4.6.0"
@@ -1336,6 +1447,17 @@ dependencies = [
"cc",
]
[[package]]
name = "coarsetime"
version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e58eb270476aa4fc7843849f8a35063e8743b4dbcdf6dd0f8ea0886980c204c2"
dependencies = [
"libc",
"wasix",
"wasm-bindgen",
]
[[package]]
name = "color_quant"
version = "1.1.0"
@@ -1449,12 +1571,24 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "const-oid"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d6f2aa4d0537bcc1c74df8755072bd31c1ef1a3a1b85a68e8404a8c353b7b8b"
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "constant_time_eq"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b"
[[package]]
name = "cookie"
version = "0.18.1"
@@ -1501,6 +1635,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "cpubits"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ef0c543070d296ea414df2dd7625d1b24866ce206709d8a4a424f28377f5861"
[[package]]
name = "cpufeatures"
version = "0.2.17"
@@ -1654,8 +1794,10 @@ version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"subtle",
"zeroize",
]
[[package]]
@@ -1665,9 +1807,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"typenum",
]
[[package]]
name = "crypto-common"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710"
dependencies = [
"hybrid-array",
]
[[package]]
name = "cssparser"
version = "0.34.0"
@@ -1691,6 +1843,21 @@ dependencies = [
"syn 2.0.117",
]
[[package]]
name = "ct-codecs"
version = "1.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b10589d1a5e400d61f9f38f12f884cfd080ff345de8f17efda36fe0e4a02aa8"
[[package]]
name = "ctr"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835"
dependencies = [
"cipher 0.4.4",
]
[[package]]
name = "darling"
version = "0.20.11"
@@ -1749,13 +1916,23 @@ dependencies = [
"parking_lot_core",
]
[[package]]
name = "der"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79b71cca7d95d7681a4b3b9cdf63c8dbc3730d0584c2c74e31416d64a90493f4"
dependencies = [
"const-oid 0.6.2",
"der_derive",
]
[[package]]
name = "der"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de"
dependencies = [
"const-oid",
"const-oid 0.9.6",
"zeroize",
]
@@ -1765,7 +1942,7 @@ version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
"const-oid",
"const-oid 0.9.6",
"pem-rfc7468 0.7.0",
"zeroize",
]
@@ -1780,6 +1957,18 @@ dependencies = [
"zeroize",
]
[[package]]
name = "der_derive"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8aed3b3c608dc56cf36c45fe979d04eda51242e6703d8d0bb03426ef7c41db6a"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"synstructure 0.12.6",
]
[[package]]
name = "deranged"
version = "0.5.8"
@@ -1873,8 +2062,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"const-oid",
"crypto-common",
"const-oid 0.9.6",
"crypto-common 0.1.7",
"subtle",
]
@@ -1965,11 +2154,53 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c"
dependencies = [
"der 0.6.1",
"elliptic-curve",
"rfc6979",
"elliptic-curve 0.12.3",
"rfc6979 0.3.1",
"signature 1.6.4",
]
[[package]]
name = "ecdsa"
version = "0.16.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
dependencies = [
"der 0.7.10",
"digest",
"elliptic-curve 0.13.8",
"rfc6979 0.4.0",
"signature 2.2.0",
"spki 0.7.3",
]
[[package]]
name = "ece"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2ea1d2f2cc974957a4e2575d8e5bb494549bab66338d6320c2789abcfff5746"
dependencies = [
"base64 0.21.7",
"byteorder",
"hex",
"hkdf",
"lazy_static",
"once_cell",
"openssl",
"serde",
"sha2",
"thiserror 1.0.69",
]
[[package]]
name = "ed25519-compact"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33ce99a9e19c84beb4cc35ece85374335ccc398240712114c85038319ed709bd"
dependencies = [
"ct-codecs",
"getrandom 0.3.4",
]
[[package]]
name = "ego-tree"
version = "0.9.0"
@@ -1991,16 +2222,37 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3"
dependencies = [
"base16ct",
"base16ct 0.1.1",
"crypto-bigint 0.4.9",
"der 0.6.1",
"digest",
"ff",
"ff 0.12.1",
"generic-array",
"group",
"group 0.12.1",
"pkcs8 0.9.0",
"rand_core 0.6.4",
"sec1",
"sec1 0.3.0",
"subtle",
"zeroize",
]
[[package]]
name = "elliptic-curve"
version = "0.13.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
dependencies = [
"base16ct 0.2.0",
"crypto-bigint 0.5.5",
"digest",
"ff 0.13.1",
"generic-array",
"group 0.13.0",
"hkdf",
"pem-rfc7468 0.7.0",
"pkcs8 0.10.2",
"rand_core 0.6.4",
"sec1 0.7.3",
"subtle",
"zeroize",
]
@@ -2186,6 +2438,16 @@ dependencies = [
"subtle",
]
[[package]]
name = "ff"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393"
dependencies = [
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "find-msvc-tools"
version = "0.1.9"
@@ -2416,6 +2678,7 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
"zeroize",
]
[[package]]
@@ -2468,6 +2731,16 @@ dependencies = [
"wasip3",
]
[[package]]
name = "ghash"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1"
dependencies = [
"opaque-debug",
"polyval",
]
[[package]]
name = "gif"
version = "0.14.1"
@@ -2526,7 +2799,18 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7"
dependencies = [
"ff",
"ff 0.12.1",
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "group"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
dependencies = [
"ff 0.13.1",
"rand_core 0.6.4",
"subtle",
]
@@ -2689,11 +2973,29 @@ dependencies = [
"digest",
]
[[package]]
name = "hmac-sha1-compact"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0b3ba31f6dc772cc8221ce81dbbbd64fa1e668255a6737d95eeace59b5a8823"
[[package]]
name = "hmac-sha256"
version = "1.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec9d92d097f4749b64e8cc33d924d9f40a2d4eb91402b458014b781f5733d60f"
dependencies = [
"digest",
]
[[package]]
name = "hmac-sha512"
version = "1.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "019ece39bbefc17f13f677a690328cb978dbf6790e141a3c24e66372cb38588b"
dependencies = [
"digest",
]
[[package]]
name = "home"
@@ -2809,6 +3111,15 @@ dependencies = [
"libm",
]
[[package]]
name = "hybrid-array"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3944cf8cf766b40e2a1a333ee5e9b563f854d5fa49d6a8ca2764e97c6eddb214"
dependencies = [
"typenum",
]
[[package]]
name = "hyper"
version = "0.14.32"
@@ -2889,6 +3200,19 @@ dependencies = [
"webpki-roots 1.0.6",
]
[[package]]
name = "hyper-tls"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
dependencies = [
"bytes",
"hyper 0.14.32",
"native-tls",
"tokio",
"tokio-native-tls",
]
[[package]]
name = "hyper-tls"
version = "0.6.0"
@@ -3199,6 +3523,24 @@ dependencies = [
"libc",
]
[[package]]
name = "inout"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
dependencies = [
"generic-array",
]
[[package]]
name = "inout"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4250ce6452e92010fdf7268ccc5d14faa80bb12fc741938534c58f16804e03c7"
dependencies = [
"hybrid-array",
]
[[package]]
name = "insta"
version = "1.47.0"
@@ -3300,13 +3642,53 @@ checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde"
dependencies = [
"base64 0.22.1",
"js-sys",
"pem",
"pem 3.0.6",
"ring",
"serde",
"serde_json",
"simple_asn1",
]
[[package]]
name = "jwt-simple"
version = "0.12.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3991f54af4b009bb6efe01aa5a4fcce9ca52f3de7a104a3f6b6e2ad36c852c48"
dependencies = [
"anyhow",
"binstring",
"blake2b_simd",
"coarsetime",
"ct-codecs",
"ed25519-compact",
"hmac-sha1-compact",
"hmac-sha256",
"hmac-sha512",
"k256",
"p256 0.13.2",
"p384",
"rand 0.8.5",
"serde",
"serde_json",
"superboring",
"thiserror 2.0.18",
"zeroize",
]
[[package]]
name = "k256"
version = "0.13.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b"
dependencies = [
"cfg-if",
"ecdsa 0.16.9",
"elliptic-curve 0.13.8",
"once_cell",
"sha2",
"signature 2.2.0",
]
[[package]]
name = "kqueue"
version = "1.1.1"
@@ -4019,6 +4401,12 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "opaque-debug"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
[[package]]
name = "opendal"
version = "0.54.1"
@@ -4174,8 +4562,32 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594"
dependencies = [
"ecdsa",
"elliptic-curve",
"ecdsa 0.14.8",
"elliptic-curve 0.12.3",
"sha2",
]
[[package]]
name = "p256"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b"
dependencies = [
"ecdsa 0.16.9",
"elliptic-curve 0.13.8",
"primeorder",
"sha2",
]
[[package]]
name = "p384"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6"
dependencies = [
"ecdsa 0.16.9",
"elliptic-curve 0.13.8",
"primeorder",
"sha2",
]
@@ -4240,6 +4652,17 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec"
[[package]]
name = "pem"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb"
dependencies = [
"base64 0.13.1",
"once_cell",
"regex",
]
[[package]]
name = "pem"
version = "3.0.6"
@@ -4446,6 +4869,18 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "polyval"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25"
dependencies = [
"cfg-if",
"cpufeatures 0.2.17",
"opaque-debug",
"universal-hash",
]
[[package]]
name = "portable-atomic"
version = "1.13.1"
@@ -4511,6 +4946,15 @@ dependencies = [
"syn 2.0.117",
]
[[package]]
name = "primeorder"
version = "0.13.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6"
dependencies = [
"elliptic-curve 0.13.8",
]
[[package]]
name = "proc-macro-crate"
version = "3.5.0"
@@ -5014,7 +5458,7 @@ dependencies = [
"http-body-util",
"hyper 1.8.1",
"hyper-rustls 0.27.7",
"hyper-tls",
"hyper-tls 0.6.0",
"hyper-util",
"js-sys",
"log",
@@ -5065,6 +5509,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rfc6979"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2"
dependencies = [
"hmac",
"subtle",
]
[[package]]
name = "rgb"
version = "0.8.53"
@@ -5139,7 +5593,7 @@ version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d"
dependencies = [
"const-oid",
"const-oid 0.9.6",
"digest",
"num-bigint-dig",
"num-integer",
@@ -5147,6 +5601,7 @@ dependencies = [
"pkcs1",
"pkcs8 0.10.2",
"rand_core 0.6.4",
"sha2",
"signature 2.2.0",
"spki 0.7.3",
"subtle",
@@ -5579,7 +6034,7 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928"
dependencies = [
"base16ct",
"base16ct 0.1.1",
"der 0.6.1",
"generic-array",
"pkcs8 0.9.0",
@@ -5587,6 +6042,31 @@ dependencies = [
"zeroize",
]
[[package]]
name = "sec1"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
dependencies = [
"base16ct 0.2.0",
"der 0.7.10",
"generic-array",
"pkcs8 0.10.2",
"subtle",
"zeroize",
]
[[package]]
name = "sec1_decode"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6326ddc956378a0739200b2c30892dccaf198992dfd7323274690b9e188af23"
dependencies = [
"der 0.4.5",
"pem 0.8.3",
"thiserror 1.0.69",
]
[[package]]
name = "security-framework"
version = "3.7.0"
@@ -6331,6 +6811,21 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "superboring"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af44d8b60bc4ffb966f80d1582d579c84f559419e7abafb948d706fc6f95b3d4"
dependencies = [
"aes-gcm",
"aes-keywrap",
"getrandom 0.2.17",
"hmac-sha256",
"hmac-sha512",
"rand 0.8.5",
"rsa",
]
[[package]]
name = "syn"
version = "1.0.109"
@@ -6362,6 +6857,18 @@ dependencies = [
"futures-core",
]
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]]
name = "synstructure"
version = "0.13.2"
@@ -6483,6 +6990,7 @@ dependencies = [
"tracing-subscriber",
"uuid",
"validator",
"web-push",
]
[[package]]
@@ -7063,6 +7571,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
[[package]]
name = "universal-hash"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea"
dependencies = [
"crypto-common 0.1.7",
"subtle",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
@@ -7300,6 +7818,15 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
[[package]]
name = "wasix"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1757e0d1f8456693c7e5c6c629bdb54884e032aa0bb53c155f6a39f94440d332"
dependencies = [
"wasi",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.114"
@@ -7406,6 +7933,28 @@ dependencies = [
"semver",
]
[[package]]
name = "web-push"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5c305b9ee2993ab68b7744b13ef32231d83600dd879ac8183b4c76ae31d28ac"
dependencies = [
"async-trait",
"chrono",
"ct-codecs",
"ece",
"http 0.2.12",
"hyper 0.14.32",
"hyper-tls 0.5.0",
"jwt-simple",
"log",
"pem 3.0.6",
"sec1_decode",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "web-sys"
version = "0.3.91"
@@ -7960,7 +8509,7 @@ dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
"synstructure",
"synstructure 0.13.2",
]
[[package]]
@@ -8001,7 +8550,7 @@ dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
"synstructure",
"synstructure 0.13.2",
]
[[package]]

View File

@@ -3,7 +3,8 @@
[package]
name = "termi-api"
version = "0.1.0"
edition = "2021"
edition = "2024"
rust-version = "1.85"
publish = false
default-run = "termi_api-cli"
@@ -44,6 +45,7 @@ async-stream = "0.3"
base64 = "0.22"
aws-config = "1"
aws-sdk-s3 = "1"
web-push = { version = "0.11.0", default-features = false, features = ["hyper-client"] }
[[bin]]
name = "termi_api-cli"
@@ -54,6 +56,10 @@ name = "tool"
path = "src/bin/tool.rs"
required-features = []
[profile.release]
strip = "symbols"
lto = "thin"
[dev-dependencies]
loco-rs = { workspace = true, features = ["testing"] }
serial_test = { version = "3.1.1" }

View File

@@ -1,20 +1,23 @@
# syntax=docker/dockerfile:1.7
FROM rust:1.88-bookworm AS builder
FROM rust:1.94-trixie AS chef
RUN cargo install cargo-chef --locked
WORKDIR /app
COPY Cargo.toml Cargo.lock ./
COPY migration/Cargo.toml migration/Cargo.toml
COPY src src
COPY migration/src migration/src
COPY config config
COPY assets assets
FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --locked --recipe-path recipe.json
COPY . .
RUN cargo build --release --locked --bin termi_api-cli
FROM debian:bookworm-slim AS runtime
FROM debian:trixie-slim AS runtime
RUN apt-get update \
&& apt-get install -y --no-install-recommends ca-certificates tzdata wget \
&& apt-get install -y --no-install-recommends ca-certificates libgomp1 libstdc++6 tzdata wget \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app

View File

@@ -45,6 +45,10 @@
description: "节奏更明显一点,适合切换阅读状态。"
ai_enabled: false
paragraph_comments_enabled: true
comment_verification_mode: "captcha"
subscription_verification_mode: "off"
turnstile_site_key: "0x4AAAAAACy58kMBSwXwqMhx"
turnstile_secret_key: "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U"
ai_provider: "newapi"
ai_api_base: "https://91code.jiangnight.com/v1"
ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571"

View File

@@ -0,0 +1,40 @@
<!doctype html>
<html lang="zh-CN">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>404 Not Found</title>
<style>
body {
margin: 0;
min-height: 100vh;
display: grid;
place-items: center;
background: #0f172a;
color: #e2e8f0;
font: 16px/1.6 -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
}
main {
padding: 24px;
text-align: center;
}
h1 {
margin: 0 0 8px;
font-size: 28px;
}
p {
margin: 0;
color: #94a3b8;
}
</style>
</head>
<body>
<main>
<h1>404</h1>
<p>Not Found</p>
</main>
</body>
</html>

View File

@@ -0,0 +1,24 @@
---
title: 徐霞客游记·游太和山日记(下)
slug: building-blog-with-astro
description: 《徐霞客游记》太和山下篇,包含琼台、南岩与五龙宫等段落。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 徐霞客
- 游记
- 太和山
- 长文测试
---
# 徐霞客游记·游太和山日记(下)
更衣上金顶。瞻叩毕,天宇澄朗,下瞰诸峰,近者鹄峙,远者罗列,诚天真奥区也。
遂从三天门之右小径下峡中。此径无级无索,乱峰离立,路穿其间,迥觉幽胜。三里馀,抵蜡烛峰右,泉涓涓溢出路旁,下为蜡烛涧。
从宫左趋雷公洞。洞在悬崖间。乃从北天门下,一径阴森,滴水、仙侣二岩,俱在路左,飞崖上突,泉滴沥于中。

View File

@@ -1,242 +0,0 @@
---
title: "Canokey入门指南:2FA、OpenPGP、PIV"
description: 本文是一份Canokey入门指南将介绍如何使用Canokey进行2FA、OpenPGP和PIV等操作。其中2FA部分将介绍如何使用Yubikey Authenticator进行管理OpenPGP部分将介绍如何生成GPG密钥并使用Canokey进行身份验证和加密解密PIV部分将介绍如何在Canokey中生成PIV证书并使用其进行身份验证。
date: 2022-08-19T16:42:40+08:00
draft: false
slug: canokeys
image:
categories:
- Linux
tags:
- Linux
---
# 2FA
`Canokey`使用`Yubikey Authenticator`来进行管理`2FA`
下载`Yubikey Authenticator`,以下为`Yubikey Authenticator`官方下载网址
```http
https://www.yubico.com/products/yubico-authenticator/#h-download-yubico-authenticator
```
运行`Yubikey Authenticator`
进入`custom reader`,在`Custom reader fiter`处填入 `CanoKey`
![填入CanoKey](https://upload-images.jianshu.io/upload_images/9676051-ff0cd60f38ac7334.png)
右上角`Add account` 增加`2FA`
![添加2FA](https://upload-images.jianshu.io/upload_images/9676051-1031857fe0f13d08.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```yaml
Issuer: 备注 可选
Account name : 用户名 必填项
Secret Key : Hotp或Totp的key 必填项
```
# OpenPGP
## 安装GPG
Windows 用户可下载 [Gpg4Win](https://gpg4win.org/download.html)Linux/macOS 用户使用对应包管理软件安装即可.
## 生成主密钥
```shell
gpg --expert --full-gen-key #生成GPG KEY
```
推荐使用`ECC`算法
![image-20220102223722475](https://upload-images.jianshu.io/upload_images/9676051-df42e4b958e9a238.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```shell
选择(11) ECC (set your own capabilities) # 设置自己的功能 主密钥只保留 Certify 功能其他功能Encr,Sign,Auth使用子密钥
# 子密钥分成三份,分别获得三个不同的功能
# encr 解密功能
# sign 签名功能
# auth 登录验证功能
```
```shell
先选择 (S) Toggle the sign capability
```
![image-20220102224151589](https://upload-images.jianshu.io/upload_images/9676051-c3bb19eb398419e1.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```
之后输入q 退出
```
键入1,选择默认算法
![键入1,选择默认算法](https://upload-images.jianshu.io/upload_images/9676051-7a2c5ee8ed4800af.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
设置主密钥永不过期
![image-20220102224451731](https://upload-images.jianshu.io/upload_images/9676051-cca6100917c2ffaa.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
填写信息,按照实际情况填写即可
![image-20220102224612167](https://upload-images.jianshu.io/upload_images/9676051-10430afe3aa592c7.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```
Windnows 下会弹出窗口输入密码,注意一定要保管好!!!
```
```shell
```
```shell
# 会自动生成吊销证书,注意保存到安全的地方
gpg: AllowSetForegroundWindow(22428) failed: <20>ܾ<EFBFBD><DCBE><EFBFBD><EFBFBD>ʡ<EFBFBD>
gpg: revocation certificate stored as 'C:\\Users\\Andorid\\AppData\\Roaming\\gnupg\\openpgp-revocs.d\\<此处为私钥>.rev'
# 以上的REV文件即为吊销证书
public and secret key created and signed.
```
```shell
pub ed25519 2022-01-02 [SC]
<此处为Pub>
uid <此处为Name> <此处为email>
```
生成子密钥
```shell
gpg --fingerprint --keyid-format long -K
```
下面生成不同功能的子密钥,其中 `<fingerprint>` 为上面输出的密钥指纹,本示例中即为 `私钥`。最后的 `2y` 为密钥过期时间,可自行设置,如不填写默认永不过期。
```shell
gpg --quick-add-key <fingerprint> cv25519 encr 2y
gpg --quick-add-key <fingerprint> ed25519 auth 2y
gpg --quick-add-key <fingerprint> ed25519 sign 2y
```
再次查看目前的私钥,可以看到已经包含了这三个子密钥。
```shell
gpg --fingerprint --keyid-format long -K
```
上面生成了三种功能的子密钥ssb分别为加密E、认证A、签名S对应 `OpenPGP Applet` 中的三个插槽。由于 `ECC` 实现的原因,加密密钥的算法区别于其他密钥的算法。
加密密钥用于加密文件和信息。签名密钥主要用于给自己的信息签名,保证这真的是来自**我**的信息。认证密钥主要用于 SSH 登录。
## 备份GPG
```shell
# 公钥
gpg -ao public-key.pub --export <ed25519/16位>
# 主密钥,请务必保存好!!!
# 注意 key id 后面的 !,表示只导出这一个私钥,若没有的话默认导出全部私钥。
gpg -ao sec-key.asc --export-secret-key <ed25519/16位>!
# sign子密钥
gpg -ao sign-key.asc --export-secret-key <ed25519/16位>!
gpg -ao auth-key.asc --export-secret-key <ed25519/16位>!
gpg -ao encr-key.asc --export-secret-key <ed25519/16位>!
```
## 导入Canokey
```shell
# 查看智能卡设备状态
gpg --card-status
# 写入GPG
gpg --edit-key <ed25519/16位> # 为上方的sec-key
# 选中第一个子密钥
key 1
# 写入到智能卡
keytocard
# 再次输入,取消选择
key 1
# 选择第二个子密钥
key 2
keytocard
key 2
key 3
keytocard
# 保存修改并退出
save
#再次查看设备状态,可以看到此时子密钥标识符为 ssb>,表示本地只有一个指向 card-no: F1D0 xxxxxxxx 智能卡的指针,已不存在私钥。现在可以删除掉主密钥了,请再次确认你已安全备份好主密钥。
gpg --card-status
```
## 删除本地密钥
```shell
gpg --delete-secret-keys <ed25519/16位> # 为上方的sec-key
```
为确保安全,也可直接删除 gpg 的工作目录:`%APPDATA%\gnupg`Linux/macOS: `~/.gunpg`
## 使用 Canokey
此时切换回日常使用的环境,首先导入公钥
```shell
gpg --import public-key.pub
```
然后设置子密钥指向 Canokey
```shell
gpg --edit-card
gpg/card> fetch
```
此时查看本地的私钥,可以看到已经指向了 Canokey
```
gpg --fingerprint --keyid-format long -K
```
配置gpg路径
```bash
git config --global gpg.program "C:\Program Files (x86)\GnuPG\bin\gpg.exe" --replace-all
```
## Git Commit 签名
首先确保 Git 本地配置以及 GitHub 中的邮箱信息包含在 `UID` 中,然后设置 Git 来指定使用子密钥中的签名S密钥。
```shell
git config --global user.signingkey <ed25519/16位> # 为上方的Sign密钥
```
之后在 `git commit` 时增加 `-S` 参数即可使用 gpg 进行签名。也可在配置中设置自动 gpg 签名,此处不建议全局开启该选项,因为有的脚本可能会使用 `git am` 之类的涉及到 `commit` 的命令,如果全局开启的话会导致问题。
```shell
git config commit.gpgsign true
```
如果提交到 GitHub前往 [GitHub SSH and GPG keys](https://github.com/settings/keys) 添加公钥。此处添加后,可以直接通过对应 GitHub ID 来获取公钥:`https://github.com/<yourid>.gpg`
## PIV
首先在Web端添加自己的私钥到智能卡之后前往 [WinCrypt SSH Agent](https://github.com/buptczq/WinCryptSSHAgent) 下载并运行,此时查看 `ssh-agent` 读取到的公钥信息,把输出的公钥信息添加到服务器的 `~/.ssh/authorized_keys`
```shell
# 设置环境池
$Env:SSH_AUTH_SOCK="\\.\pipe\openssh-ssh-agent"
# 查看ssh列表
ssh-add -L
```
此时连接 `ssh user@host`,会弹出提示输入 `PIN` 的页面,注意此时输入的是 `PIV Applet PIN`,输入后即可成功连接服务器。
```yaml
tips: 可能会出现权限不够的情况,需要禁用Windows服务OpenSSH Authentication Agent
```
最后可以把该程序快捷方式添加到启动目录 `%AppData%\Microsoft\Windows\Start Menu\Programs\Startup`,方便直接使用。

View File

@@ -1,67 +0,0 @@
---
title: "如何使用FFmpeg处理音视频文件"
description: 本文提供了FFmpeg处理音视频文件的完整指南包括将单张图片转换为视频、拼接多个视频、设置转场特效等多种操作。
date: 2022-07-25T14:05:04+08:00
draft: true
slug: ffmpeg
image:
categories: ffmpeg
tags: ffmpeg
---
# `ffmpeg`图片转视频
使用单张图片生成5秒视频
```bash
# -loop 1 指定开启单帧图片loop
# -t 5 指定loop时长为5秒
# -i input 指定输入图片文件路径 示例:pic.jpg
# -pix_fmt 指定编码格式为yuv420p
# -y 若输出文件已存在,则强制进行覆盖。
# ffmpeg会根据输出文件后缀,自动选择编码格式。
# 也可以使用 -f 指定输出格式
ffmpeg -loop 1 -t 5 -i <filename>.jpg -pix_fmt yuv420p -y output.ts
```
# `ffmpeg`拼接视频
```bash
# windows
# -i input 指定需要合并的文件,使用concat进行合并.示例:"concat:0.ts|1.ts|2.ts"
# -vcodec 指定视频编码器的参数为copy
# -acodec 指定音频编码器的参数为copy
# -y 若输出文件已存在,则强制进行覆盖。
ffmpeg -i "concat:0.ts|1.ts" -vcodec copy -acodec copy -y output.ts
```
# `ffmpeg`设置转场特效
```bash
# Linux
ffmpeg -i v0.mp4 -i v1.mp4 -i v2.mp4 -i v3.mp4 -i v4.mp4 -filter_complex \
"[0][1:v]xfade=transition=fade:duration=1:offset=3[vfade1]; \
[vfade1][2:v]xfade=transition=fade:duration=1:offset=10[vfade2]; \
[vfade2][3:v]xfade=transition=fade:duration=1:offset=21[vfade3]; \
[vfade3][4:v]xfade=transition=fade:duration=1:offset=25,format=yuv420p; \
[0:a][1:a]acrossfade=d=1[afade1]; \
[afade1][2:a]acrossfade=d=1[afade2]; \
[afade2][3:a]acrossfade=d=1[afade3]; \
[afade3][4:a]acrossfade=d=1" \
-movflags +faststart out.mp4
```
| 输入文件 | 输入文件的视频总长 | + | previous xfade `offset` | - | xfade `duration` | `offset` = |
| :------- | :----------------- | :--: | :---------------------- | :--: | :--------------- | :--------- |
| `v0.mp4` | 4 | + | 0 | - | 1 | 3 |
| `v1.mp4` | 8 | + | 3 | - | 1 | 10 |
| `v2.mp4` | 12 | + | 10 | - | 1 | 21 |
| `v3.mp4` | 5 | + | 21 | - | 1 | 25 |
// 将音频转为单声道
```
ffmpeg -i .\1.mp3 -ac 1 -ar 44100 -ab 16k -vol 50 -f 1s.mp3
ffmpeg -i one.ts -i 1s.mp3 -map 0:v -map 1:a -c:v copy -shortest -af apad -y one1.ts
```

View File

@@ -1,121 +0,0 @@
---
title: "使用arm交叉编译工具并解决GLIBC版本不匹配的问题"
description: 介绍如何使用arm交叉编译工具来编译Go程序并解决在arm平台上运行时出现GLIBC版本不匹配的问题。
date: 2022-06-10T15:00:26+08:00
draft: false
slug: go-arm
image:
categories:
- Go
tags:
- Arm
- Go
- GLIBC
---
1. 下载 ARM 交叉编译工具,可以从官方网站下载。比如,可以从如下链接下载 GNU 工具链:[https://developer.arm.com/downloads/-/gnu-a](https://developer.arm.com/downloads/-/gnu-a)
示例:https://developer.arm.com/-/media/Files/downloads/gnu-a/10.3-2021.07/binrel/gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-elf.tar.xz
2. 设置 Go ARM 交叉编译环境变量。具体来说,需要设置以下变量:
```ruby
$env:GOOS="linux"
$env:GOARCH="arm64"
$env:CGO_ENABLED=1
$env:CC="D:\arm\gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-linux-gnu\bin\aarch64-none-linux-gnu-gcc.exe"
$env:CXX="D:\arm\gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-linux-gnu\bin\aarch64-none-linux-gnu-g++.exe"
```
3. 在 ARM 上运行程序时可能会出现如下错误:
```bash
./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.28' not found (required by ./bupload)
./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.32' not found (required by ./bupload)
./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.33' not found (required by ./bupload)
```
这是因为程序需要使用较新版本的 GLIBC 库,而 ARM 上安装的库版本较旧。可以通过以下步骤来解决这个问题:
4. 查看当前系统中 libc 库所支持的版本:
```bash
strings /lib/aarch64-linux-gnu/libc.so.6 | grep GLIBC_
```
5. 备份整个 `/lib` 目录和 `/usr/include` 目录,以便稍后还原。
6. 从 GNU libc 官方网站下载对应版本的 libc 库。例如,可以从如下链接下载 2.35 版本的 libc 库:[http://ftp.gnu.org/gnu/glibc/glibc-2.35.tar.xz](http://ftp.gnu.org/gnu/glibc/glibc-2.35.tar.xz)
7. 解压 libc 库:
```
xz -d glibc-2.35.tar.xz
tar xvf glibc-2.35.tar glibc-2.35
```
8. 创建并进入 build 目录:
```bash
mkdir build
cd build
```
9. 配置 libc 库的安装选项:
```javascript
../configure --prefix=/usr --disable-profile --enable-add-ons --with-headers=/usr/include --with-binutils=/usr/bin
```
10. 编译并安装 libc 库:
```go
make -j4
make install
```
接下来是关于 `make` 报错的部分:
```yaml
asm/errno.h: No such file or directory
```
这个报错是因为 `errno.h` 文件中包含了 `asm/errno.h` 文件,但是找不到这个文件。为了解决这个问题,我们需要创建一个软链接:
```bash
ln -s /usr/include/asm-generic /usr/include/asm
```
然后又出现了另一个报错:
```bash
/usr/include/aarch64-linux-gnu/asm/sigcontext.h: No such file or directory
```
这个问题也可以通过重新安装`linux-libc-dev`后创建软链接来解决:
```bash
# find / -name sigcontext.h
sudo apt-get install --reinstall linux-libc-dev
ln -s /usr/include/aarch64-linux-gnu/asm/sigcontext.h /usr/include/asm/sigcontext.h
```
接下来,还有一个报错:
```yaml
asm/sve_context.h: No such file or directory
```
这个报错是因为最新的 Linux 内核在启用 ARM Scalable Vector Extension (SVE) 后,需要包含 `asm/sve_context.h` 文件。我们需要创建一个软链接来解决这个问题:
```bash
# find / -name sve_context.h
ln -s /usr/include/aarch64-linux-gnu/asm/sve_context.h /usr/include/asm/sve_context.h
```
最后,还需要创建一个软链接:
```bash
# find / -name byteorder.h
ln -s /usr/include/aarch64-linux-gnu/asm/byteorder.h /usr/include/asm/byteorder.h
```
完成以上步骤后,我们再次执行 `make` 命令,就应该可以顺利地编译和安装 glibc 了。

View File

@@ -1,173 +0,0 @@
---
title: "Go使用gRPC进行通信"
description: RPC是远程过程调用的简称是分布式系统中不同节点间流行的通信方式。
date: 2022-05-26T14:17:33+08:00
draft: false
slug: go-grpc
image:
categories:
- Go
tags:
- Go
- gRPC
---
# 安装`gRPC`和`Protoc`
## 安装`protobuf`
```bash
go get -u google.golang.org/protobuf
go get -u google.golang.org/protobuf/proto
go get -u google.golang.org/protobuf/protoc-gen-go
```
## 安装`Protoc`
```shell
# 下载二进制文件并添加至环境变量
https://github.com/protocolbuffers/protobuf/releases
```
安装`Protoc`插件`protoc-gen-go`
```shell
# go install 会自动编译项目并添加至环境变量中
go install google.golang.org/protobuf/cmd/protoc-gen-go@latest
```
```shell
#protoc-gen-go 文档地址
https://developers.google.com/protocol-buffers/docs/reference/go-generated
```
# 创建`proto`文件并定义服务
## 新建 `task.proto`文件
```shell
touch task.proto
```
## 编写`task.proto`
```protobuf
// 指定proto版本
syntax = "proto3";
// 指定包名
package task;
// 指定输出 go 语言的源码到哪个目录和 包名
// 主要 目录和包名用 ; 隔开
// 将在当前目录生成 task.pb.go
// 也可以只填写 "./",会生成的包名会变成 "----"
option go_package = "./;task";
// 指定RPC的服务名
service TaskService {
// 调用 AddTaskCompletion 方法
rpc AddTaskCompletion(request) returns (response);
}
// RPC TaskService服务,AddTaskCompletion函数的请求参数,即消息
message request {
uint32 id = 1;//任务id
string module = 2;//所属模块
int32 value = 3;//此次完成值
string guid = 4;//用户id
}
// RPC TaskService服务,TaskService函数的返回值,即消息
message response{
}
```
## 使用`Protoc`来生成Go代码
```bash
protoc --go_out=. --go-grpc_out=. <要进行生成代码的文件>.proto
# example
protoc --go_out=. --go-grpc_out=. .\task.proto
```
这样生成会生成两个`.go`文件,一个是对应消息`task.pb.go`,一个对应服务接口`task_grpc.pb.go`
`task_grpc.pb.go`中,在我们定义的服务接口中,多增加了一个私有的接口方法:
`mustEmbedUnimplementedTaskServiceServer()`
# 使用`Go`监听`gRPC`服务端及客户端
## 监听服务端
并有生成的一个`UnimplementedTaskServiceServer`结构体来实现了所有的服务接口。因此,在我们自己实现的服务类中,需要继承这个结构体,如:
```go
// 用于实现grpc服务 TaskServiceServer 接口
type TaskServiceImpl struct {
// 需要继承结构体 UnimplementedServiceServer 或mustEmbedUnimplementedTaskServiceServer
task.mustEmbedUnimplementedTaskServiceServer()
}
func main() {
// 创建Grpc服务
// 创建tcp连接
listener, err := net.Listen("tcp", ":8082")
if err != nil {
fmt.Println(err)
return
}
// 创建grpc服务
grpcServer := grpc.NewServer()
// 此函数在task.pb.go中,自动生成
task.RegisterTaskServiceServer(grpcServer, &TaskServiceImpl{})
// 在grpc服务上注册反射服务
reflection.Register(grpcServer)
// 启动grpc服务
err = grpcServer.Serve(listener)
if err != nil {
fmt.Println(err)
return
}
}
func (s *TaskServiceImpl) AddTaskCompletion(ctx context.Context, in *task.Request) (*task.Response, error) {
fmt.Println("收到一个Grpc 请求, 请求参数为", in.Guid)
r := &task.Response{
}
return r, nil
}
```
然后在`TaskService`上实现我们的服务接口。
## 客户端
```go
conn, err := grpc.Dial("127.0.0.1:8082", grpc.WithInsecure())
if err != nil {
panic(err)
}
defer conn.Close()
// 创建grpc客户端
client := task.NewTaskServiceClient(conn)
// 创建请求
req := &task.Request{
Id: 1,
Module: "test",
Value: 3,
Guid: "test",
}
// 调用rpc TaskService AddTaskCompletion函数
response, err := client.AddTaskCompletion(context.Background(), req)
if err != nil {
log.Println(err)
return
}
log.Println(response)
```
[本文参考](https://www.cnblogs.com/whuanle/p/14588031.html)

View File

@@ -1,98 +0,0 @@
---
title: "Go语言解析Xml"
slug: "go-xml"
date: 2022-05-20T14:38:05+08:00
draft: false
description: "使用Go简简单单的解析Xml"
tags:
- Go
- Xml
categories:
- Go
---
# 开始之前
```go
import "encoding/xml"
```
## 简单的`Xml`解析
### 1.假设我们解析的`Xml`内容如下:
```xml
<feed>
<person name="initcool" id="1" age=18 />
</feed>
```
<!--more-->
### 2.接着我们构造对应的结构体
```go
type Feed struct {
XMLName xml.Name `xml:"feed"`
Person struct{
Name string `xml:"name"`
Id string `xml:"id"`
Age int `xml:"age"`
} `xml:"person"`
}
```
### 3.对`Xml`数据进行反序列化
```go
var feed Feed
// 读取Xml文件并返回字节流
content,err := ioutil.ReadFile(XmlFilename)
if err != nil {
log.Fatal(err)
}
// 将读取到的内容反序列化到feed
xml.Unmarshal(content,&feed)
```
## 带有命名空间的`Xml`解析
部分`xml`文件会带有`命名空间`(`Namespace`),也就是冒号左侧的内容,此时我们需要在`go`结构体的`tag` 中加入`命名空间`
### 1.带有命名空间(Namespace)的`Xml`文件
```xml
<feed xmlns:yt="http://www.youtube.com/xml/schemas/2015" xmlns:media="http://search.yahoo.com/mrss/" xmlns="http://www.w3.org/2005/Atom">
<!-- yt即是命名空间 -->
<yt:videoId>XXXXXXX</yt:videoId>
<!-- media是另一个命名空间 -->
<media:community></media:community>
</feed>
```
### 2.针对命名空间构造结构体
```go
type Feed struct {
XMLName xml.Name `xml:"feed"` // 指定最外层的标签为feed
VideoId string `xml:"http://www.youtube.com/xml/schemas/2015 videoId"`
Community string `xml:"http://search.yahoo.com/mrss/ community"`
}
```
### 3.对`Xml`数据进行反序列化
```go
var feed Feed
// 读取Xml文件并返回字节流
content,err := ioutil.ReadFile(XmlFilename)
if err != nil {
log.Fatal(err)
}
// 将读取到的内容反序列化到feed
xml.Unmarshal(content,&feed)
```

View File

@@ -1,36 +0,0 @@
---
title: "Hugo使用指南"
slug: "hugo"
draft: false
date: 2022-05-20T10:23:53+08:00
description: "快速上手hugo"
tags:
- Go
- Hugo
categories:
- Go
---
查看Hugo版本号
```bash
hugo version
```
新建一个Hugo页面
```
hugo new site <siteName>
```
设置主题
```bash
cd <siteName>
git init
# 设置为 Stack主题
git clone https://github.com/CaiJimmy/hugo-theme-stack/ themes/hugo-theme-stack
git submodule add https://github.com/CaiJimmy/hugo-theme-stack/ themes/hugo-theme-stack
```
部署Hugo到github

View File

@@ -1,67 +0,0 @@
---
title: "Linux部署DHCP服务"
description: Debian下使用docker镜像部署DHCP服务
date: 2022-05-23T11:11:40+08:00
draft: false
slug: linux-dhcp
image:
categories: Linux
tags:
- Linux
- DHCP
---
拉取`networkboot/dhcpd`镜像
```shell
docker pull networkboot/dhcpd
```
新建`data/dhcpd.conf`文件
```shell
touch /data/dhcpd.conf
```
修改`data/dhcpd.conf`文件
```
subnet 204.254.239.0 netmask 255.255.255.224 {
option subnet-mask 255.255.0.0;
option domain-name "cname.nmslwsnd.com";
option domain-name-servers 8.8.8.8;
range 204.254.239.10 204.254.239.30;
}
```
修改`/etc/network/interfaces`
```
# The loopback network interface (always required)
auto lo
iface lo inet loopback
# Get our IP address from any DHCP server
auto dhcp
iface dhcp inet static
address 204.254.239.0
netmask 255.255.255.224
```
获取帮助命令
```shell
docker run -it --rm networkboot/dhcpd man dhcpd.conf
```
运行`DHCP`服务
```shell
docker run -it --rm --init --net host -v "/data":/data networkboot/dhcpd <网卡名称>
# 示例
docker run -it --rm --init --net host -v "/data":/data networkboot/dhcpd dhcp
```

View File

@@ -1,36 +0,0 @@
---
title: "Linux Shell"
description:
date: 2022-05-21T10:02:09+08:00
draft: false
Hidden: true
slug: linux-shell
image:
categories:
Linux
tag:
Linux
Shell
---
Linux守护进程:no_good:
```bash
#!/bin/bash
# nohup.sh
while true
do
# -f 后跟进程名,判断进程是否正在运行
if [ `pgrep -f <ProcessName> | wc -l` -eq 0 ];then
echo "进程已终止"
push
# /dev/null 无输出日志
nohup ./<ProcessName> > /dev/null 2>&1 &
else
echo "进程正在运行"
fi
# 每隔1分钟检查一次
sleep 1m
done
```

View File

@@ -1,65 +0,0 @@
---
title: "Linux"
description:
date: 2022-09-08T15:19:00+08:00
draft: true
slug: linux
image:
categories:
- Linux
tags:
- Linux
---
```bash
# 使用cd 进入到上一个目录
cd -
```
复制和粘贴
```bash
ctrl + shift + c
ctrl + shift + v
```
快速移动
```bash
# 移动到行首
ctrl + a
# 移动到行尾
ctrl + e
```
快速删除
```bash
# 删除光标之前的内容
ctrl + u
# 删除光标之后的内容
ctrl + k
# 恢复之前删除的内容
ctrl + y
```
不适用cat
```
使用less 查看 顶部的文件
less filename
```
使用alt+backspace删除,以单词为单位
```
tcpdump host 1.1.1.1
```
```
# 并行执行命令 Parallel
find . -type f -name '*.html' -print | parallel gzip
```

View File

@@ -0,0 +1,24 @@
---
title: 游黄山记(中)
slug: loco-rs-framework
description: 钱谦益《游黄山记》中篇,适合测试中文长文、检索与段落锚点。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 钱谦益
- 黄山
- 游记
- 长文测试
---
# 游黄山记(中)
由祥符寺度石桥而北,逾慈光寺,行数里,径朱砂庵而上。过此取道钵盂、老人两峰之间,峰趾相并,两崖合遝,弥望削成。
憩桃源庵,指天都为诸峰之中峰,山形络绎,未有以殊异也。云生峰腰,层叠如裼衣焉。
清晓,出文殊院,神鸦背行而先。避莲华沟险,从支径右折,险益甚。上平天矼,转始信峰,经散花坞,看扰龙松。

View File

@@ -1,569 +0,0 @@
---
title: "mysql个人常用命令及操作"
description:
date: 2021-09-21T16:13:24+08:00
draft: true
slug: mysql
image:
categories:
- Database
tags:
- Linux
- Mysql
- Sql
---
启动`mysql`
```bash
sudo service mysql start
```
使用`root`账户登录`mysql`
```bash
sudo mysql -u root
```
查看数据库信息
```mysql
show databases;
```
新增数据库
```mysql
create database <>;
# 示例新增一个名为gradesystem的数据库
create database gradesystem;
```
切换数据库
```mysql
use <>;
# 示例切换至gradesystem数据库
use gradesystem;
```
查看数据库中的表
```mysql
# 查看数据库中所有的表
show tables;
```
新增表
```mysql
# MySQL不区分大小写
CREATE TABLE student(
sid int NOT NULL AUTO_INCREMENT,
sname varchar(20) NOT NULL,
gender varchar(10) NOT NULL,
PRIMARY KEY(sid)
);
# 新增一个表名为学生的表。
# AUTO_INCREMENT, 自动地创建主键字段的值。
# PRIMARY KEY(sid) 设置主键为sid
CREATE TABLE course(
cid int not null auto_increment,
cname varchar(20) not null,
primary key(cid)
);
# 新增一个表名为课程的表。
# primary key(cid) 设置主键为cid
CREATE TABLE mark(
mid int not null auto_increment,
sid int not null,
cid int not null,
score int not null,
primary key(mid),
foreign key(sid) references student(sid),
foreign key(cid) references course(cid)
);
# 新增一个表明为mark的表
# primary key(cid) 设置主键为cid
# foreign 设置外键为sid
# foreign 设置外键为cid
insert into student values(1,'Tom','male'),(2,'Jack','male'),(3,'Rose','female');
# 向student表插入数据sid为1sname为'Tom',gender为'male'
insert into course values(1,'math'),(2,'physics'),(3,'chemistry');
# 向course表插入数据sid为1cname为'math'
insert into mark values(1,1,1,80);
# 向mark表插入数据mid为1sid为1,cid为1score为80
```
### 向数据库插入数据
```mysql
source <>
```
## SELECT语句查询
SELECT 语句的基本格式为:
```bash
SELECT 要查询的列名 FROM 表名字 WHERE 限制条件;
```
```mysql
select name,age from employee;
# 查看employee的name列和age列
select name,age from employee where age > 25;
# 筛选出age 大于25的结果
select name,age,phone from employee where name = 'Mary';
# 筛选出name为'Mary'的name,age,phone
select name,age,phone from employee where age < 25 or age >30;
# 筛选出age小于30或大于25的name,age,phone
select name,age,phone from employee where age > 25 and age < 30;
# 筛选出age大于25且小于30的name,age,phone
select name,age,phone from employee where age between 25 and 30;
# 筛选出包含25和30的name,age,phone
select name,age,phone,in_dpt from employee where in_dpt in('dpt3','dpt4');
# 筛选出在dpt3或dpt4里面的name,age,phone,in_dpt
select name,age,phone,in_dpt from employee where in_dpt not in('dpt1','dpt3');
# 筛选出不在dpt1和dpt3的name,age,phone,in_dpt
```
## 通配符
关键字 **LIKE** 可用于实现模糊查询,常见于搜索功能中。
和 LIKE 联用的通常还有通配符代表未知字符。SQL 中的通配符是 `_``%` 。其中 `_` 代表一个**未指定**字符,`%` 代表**不定个**未指定字符
```mysql
select name,age,phone from employee where phone like '1101__';
# 筛选出1101开头的六位数字的name,age,phone
select name,age,phone from employee where name like 'J%';
# 筛选出name位J开头的人的name,age,phone
```
## 排序
为了使查询结果看起来更顺眼,我们可能需要对结果按某一列来排序,这就要用到 **ORDER BY** 排序关键词。默认情况下,**ORDER BY** 的结果是**升序**排列,而使用关键词 **ASC****DESC** 可指定**升序**或**降序**排序。 比如,我们**按 salary 降序排列**SQL 语句为
```mysql
select name,age,salary,phone from employee order by salary desc;
# salary列按降序排列
select name,age,salary,phone from employee order by salary;
# 不加 DESC 或 ASC 将默认按照升序排列。
```
## SQL 内置函数和计算
置函数,这些函数都对 SELECT 的结果做操作:
| 函数名: | COUNT | SUM | AVG | MAX | MIN |
| -------- | ----- | ---- | -------- | ------ | ------ |
| 作用: | 计数 | 求和 | 求平均值 | 最大值 | 最小值 |
> 其中 COUNT 函数可用于任何数据类型(因为它只是计数),而 SUM 、AVG 函数都只能对数字类数据类型做计算MAX 和 MIN 可用于数值、字符串或是日期时间数据类型。
```mysql
select max(salary) as max_salary,min(salary) from employee;
# 使用as关键字可以给值重命名
```
## 连接查询
在处理多个表时,子查询只有在结果来自一个表时才有用。但如果需要显示两个表或多个表中的数据,这时就必须使用连接 **(join)** 操作。 连接的基本思想是把两个或多个表当作一个新的表来操作,如下:
```mysql
select id,name,people_num from employee,department where employee.in_dpt = department.dpt_name order by id;
# 这条语句查询出的是,各员工所在部门的人数,其中员工的 id 和 name 来自 employee 表people_num 来自 department 表:
select id,name,people_num from employee join department on employee.in_dpt = department.dpt_name order by id;
# 另一个连接语句格式是使用 JOIN ON 语法,刚才的语句等同于以上语句
```
## 删除数据库
```mysql
drop database test_01;
# 删除名为test_01的数据库;
```
### 修改表
重命名一张表的语句有多种形式,以下 3 种格式效果是一样的:
```sql
RENAME TABLE TO ;
ALTER TABLE RENAME ;
ALTER TABLE RENAME TO ;
```
进入数据库 mysql_shiyan
```mysql
use mysql_shiyan
```
使用命令尝试修改 `table_1` 的名字为 `table_2`
```mysql
RENAME TABLE table_1 TO table_2;
```
删除一张表的语句,类似于刚才用过的删除数据库的语句,格式是这样的:
```sql
DROP TABLE ;
```
比如我们把 `table_2` 表删除:
```mysql
DROP TABLE table_2;
```
#### 增加一列
在表中增加一列的语句格式为:
```sql
ALTER TABLE ADD COLUMN ;
ALTER TABLE ADD ;
```
现在 employee 表中有 `id、name、age、salary、phone、in_dpt` 这 6 个列,我们尝试加入 `height` (身高)一个列并指定 DEFAULT 约束:
```mysql
ALTER TABLE employee ADD height INT(4) DEFAULT 170;
```
可以发现:新增加的列,被默认放置在这张表的最右边。如果要把增加的列插入在指定位置,则需要在语句的最后使用 AFTER 关键词(**“AFTER 列 1” 表示新增的列被放置在 “列 1” 的后面**)。
> 提醒:语句中的 INT(4) 不是表示整数的字节数,而是表示该值的显示宽度,如果设置填充字符为 0则 170 显示为 0170
比如我们新增一列 `weight`(体重) 放置在 `age`(年龄) 的后面:
```mysql
ALTER TABLE employee ADD weight INT(4) DEFAULT 120 AFTER age;
```
上面的效果是把新增的列加在某位置的后面,如果想放在第一列的位置,则使用 `FIRST` 关键词,如语句:
```sql
ALTER TABLE employee ADD test INT(10) DEFAULT 11 FIRST;
```
#### 删除一列
删除表中的一列和刚才使用的新增一列的语句格式十分相似,只是把关键词 `ADD` 改为 `DROP` ,语句后面不需要有数据类型、约束或位置信息。具体语句格式:
```sql
ALTER TABLE DROP COLUMN ;
ALTER TABLE DROP ;
```
我们把刚才新增的 `test` 删除:
```sql
ALTER TABLE employee DROP test;
```
#### 重命名一列
这条语句其实不只可用于重命名一列,准确地说,它是对一个列做修改(CHANGE)
```sql
ALTER TABLE CHANGE ;
```
> **注意:这条重命名语句后面的 “数据类型” 不能省略,否则重命名失败。**
当**原列名**和**新列名**相同的时候,指定新的**数据类型**或**约束**,就可以用于修改数据类型或约束。需要注意的是,修改数据类型可能会导致数据丢失,所以要慎重使用。
我们用这条语句将 “height” 一列重命名为汉语拼音 “shengao” ,效果如下:
```mysql
ALTER TABLE employee CHANGE height shengao INT(4) DEFAULT 170;
```
#### 改变数据类型
要修改一列的数据类型,除了使用刚才的 **CHANGE** 语句外,还可以用这样的 **MODIFY** 语句:
```sql
ALTER TABLE MODIFY ;
```
再次提醒,修改数据类型必须小心,因为这可能会导致数据丢失。在尝试修改数据类型之前,请慎重考虑。
#### 修改表中某个值
大多数时候我们需要做修改的不会是整个数据库或整张表,而是表中的某一个或几个数据,这就需要我们用下面这条命令达到精确的修改:
```sql
UPDATE SET 1=1,2=2 WHERE ;
```
比如,我们要把 Tom 的 age 改为 21salary 改为 3000
```mysql
UPDATE employee SET age=21,salary=3000 WHERE name='Tom';
```
> **注意:一定要有 WHERE 条件,否则会出现你不想看到的后果**
#### 删除一行记录
删除表中的一行数据,也必须加上 WHERE 条件,否则整列的数据都会被删除。删除语句:
```sql
DELETE FROM WHERE ;
```
我们尝试把 Tom 的数据删除:
```mysql
DELETE FROM employee WHERE name='Tom';
```
#### 索引
索引是一种与表有关的结构,它的作用相当于书的目录,可以根据目录中的页码快速找到所需的内容。
当表中有大量记录时,若要对表进行查询,没有索引的情况是全表搜索:将所有记录一一取出,和查询条件进行对比,然后返回满足条件的记录。这样做会执行大量磁盘 I/O 操作,并花费大量数据库系统时间。
而如果在表中已建立索引,在索引中找到符合查询条件的索引值,通过索引值就可以快速找到表中的数据,可以**大大加快查询速度**。
对一张表中的某个列建立索引,有以下两种语句格式:
```sql
ALTER TABLE ADD INDEX ();
CREATE INDEX ON ();
```
我们用这两种语句分别建立索引:
```sql
ALTER TABLE employee ADD INDEX idx_id (id); #employee表的id列上建立名为idx_id的索引
CREATE INDEX idx_name ON employee (name); #employee表的name列上建立名为idx_name的索引
```
索引的效果是加快查询速度,当表中数据不够多的时候是感受不出它的效果的。这里我们使用命令 **SHOW INDEX FROM 表名字;** 查看刚才新建的索引:
![01](https://doc.shiyanlou.com/MySQL/sql-06-01.png)
在使用 SELECT 语句查询的时候,语句中 WHERE 里面的条件,会**自动判断有没有可用的索引**。
比如有一个用户表,它拥有用户名(username)和个人签名(note)两个字段。其中用户名具有唯一性,并且格式具有较强的限制,我们给用户名加上一个唯一索引;个性签名格式多变,而且允许不同用户使用重复的签名,不加任何索引。
这时候,如果你要查找某一用户,使用语句 `select * from user where username=?``select * from user where note=?` 性能是有很大差距的,对**建立了索引的用户名**进行条件查询会比**没有索引的个性签名**条件查询快几倍,在数据量大的时候,这个差距只会更大。
一些字段不适合创建索引,比如性别,这个字段存在大量的重复记录无法享受索引带来的速度加成,甚至会拖累数据库,导致数据冗余和额外的 CPU 开销。
## 视图
视图是从一个或多个表中导出来的表,是一种**虚拟存在的表**。它就像一个窗口,通过这个窗口可以看到系统专门提供的数据,这样,用户可以不用看到整个数据库中的数据,而只关心对自己有用的数据。
注意理解视图是虚拟的表:
- 数据库中只存放了视图的定义,而没有存放视图中的数据,这些数据存放在原来的表中;
- 使用视图查询数据时,数据库系统会从原来的表中取出对应的数据;
- 视图中的数据依赖于原来表中的数据,一旦表中数据发生改变,显示在视图中的数据也会发生改变;
- 在使用视图的时候,可以把它当作一张表。
创建视图的语句格式为:
```sql
CREATE VIEW (a,b,c) AS SELECT 1,2,3 FROM ;
```
可见创建视图的语句,后半句是一个 SELECT 查询语句,所以**视图也可以建立在多张表上**,只需在 SELECT 语句中使用**子查询**或**连接查询**,这些在之前的实验已经进行过。
现在我们创建一个简单的视图,名为 **v_emp**,包含**v_name****v_age****v_phone**三个列:
```sql
CREATE VIEW v_emp (v_name,v_age,v_phone) AS SELECT name,age,phone FROM employee;
```
![02](https://doc.shiyanlou.com/MySQL/sql-06-02.png)
## 导出
导出与导入是相反的过程,是把数据库某个表中的数据保存到一个文件之中。导出语句基本格式为:
```sql
SELECT 12 INTO OUTFILE '文件路径和文件名' FROM ;
```
**注意:语句中 “文件路径” 之下不能已经有同名文件。**
现在我们把整个 employee 表的数据导出到 /var/lib/mysql-files/ 目录下,导出文件命名为 **out.txt** 具体语句为:
```sql
SELECT * INTO OUTFILE '/var/lib/mysql-files/out.txt' FROM employee;
```
用 gedit 可以查看导出文件 `/var/lib/mysql-files/out.txt` 的内容:
> 也可以使用 `sudo cat /var/lib/mysql-files/out.txt` 命令查看。
## 备份
数据库中的数据十分重要,出于安全性考虑,在数据库的使用中,应该注意使用备份功能。
> 备份与导出的区别:导出的文件只是保存数据库中的数据;而备份,则是把数据库的结构,包括数据、约束、索引、视图等全部另存为一个文件。
**mysqldump** 是 MySQL 用于备份数据库的实用程序。它主要产生一个 SQL 脚本文件,其中包含从头重新创建数据库所必需的命令 CREATE TABLE INSERT 等。
使用 mysqldump 备份的语句:
```bash
mysqldump -u root 数据库名>备份文件名; #备份整个数据库
mysqldump -u root 数据库名 表名字>备份文件名; #备份整个表
```
> mysqldump 是一个备份工具,因此该命令是在终端中执行的,而不是在 mysql 交互环境下
我们尝试备份整个数据库 `mysql_shiyan`,将备份文件命名为 `bak.sql`,先 `Ctrl+D` 退出 MySQL 控制台,再打开 Xfce 终端,在终端中输入命令:
```bash
cd /home/shiyanlou/
mysqldump -u root mysql_shiyan > bak.sql;
```
使用命令 “ls” 可见已经生成备份文件 `bak.sql`
![07](https://doc.shiyanlou.com/MySQL/sql-06-07.png)
> 你可以用 gedit 查看备份文件的内容,可以看见里面不仅保存了数据,还有所备份的数据库的其它信息。
## 恢复
用备份文件恢复数据库,其实我们早就使用过了。在本次实验的开始,我们使用过这样一条命令:
```bash
source /tmp/SQL6/MySQL-06.sql
```
这就是一条恢复语句,它把 MySQL-06.sql 文件中保存的 `mysql_shiyan` 数据库恢复。
还有另一种方式恢复数据库,但是在这之前我们先使用命令新建一个**空的数据库 test**
```bash
mysql -u root #因为在上一步已经退出了 MySQL现在需要重新登录
CREATE DATABASE test; #新建一个名为test的数据库
```
再次 **Ctrl+D** 退出 MySQL然后输入语句进行恢复把刚才备份的 **bak.sql** 恢复到 **test** 数据库:
```bash
mysql -u root test < bak.sql
```
我们输入命令查看 test 数据库的表,便可验证是否恢复成功:
```bash
mysql -u root # 因为在上一步已经退出了 MySQL现在需要重新登录
use test # 连接数据库 test
SHOW TABLES; # 查看 test 数据库的表
```
可以看见原数据库的 4 张表和 1 个视图,现在已经恢复到 test 数据库中:
![08](https://doc.shiyanlou.com/MySQL/sql-06-08.png)
再查看 employee 表的恢复情况:
![09](https://doc.shiyanlou.com/MySQL/sql-06-09.png)
## Mysql授权
1. 登录MySQL
```sql
mysql -u root -p
```
2. 进入MySQL并查看用户和主机
```sql
use mysql;
select host,user from user;
```
3. 更新root用户允许远程连接
```sql
update user set host='%' where user='root';
```
4. 设置root用户密码
```sql
alter user 'root'@'localhost' identified by 'your_password';
```
注意:不要使用临时密码。
5. 授权允许远程访问:
```sql
grant all privileges on *.* to 'root'@'%' identified by 'password';
```
请将命令中的“password”更改为您的MySQL密码。
6. 刷新授权:
```sql
flush privileges;
```
7. 关闭授权:
```sql
revoke all on *.* from dba@localhost;
```
8. 查看MySQL初始密码
```bash
grep "password" /var/log/mysqld.log
```
通过以上操作您的MySQL可以被远程连接并进行管理。请注意在授权和更新用户权限时应只授权特定的数据库或表格而不是使用通配符以提高安全性和减少不必要的权限。在进行远程访问授权时应只授权特定的IP地址或IP地址段而不是使用通配符以减少潜在的安全威胁。同时建议使用强密码并定期更换密码以提高安全性。

View File

@@ -1,119 +0,0 @@
---
title: "Redis 安装与常用命令整理"
slug: redis
description: "文章介绍了 Redis 在 Debian 下的安装方法、Windows 图形客户端的安装方式以及监听端口修改、BitMap、消息队列、LREM 和 Pipeline 等常用操作示例。"
category: "数据库"
post_type: "article"
pinned: false
published: true
tags:
- "Redis安装"
- "Debian"
- "BitMap"
- "消息队列"
- "Pipeline"
- "go-redis"
---
# 安装`Redis`
## `Debian`下安装`Redis`服务端
```bash
curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list
sudo apt-get update
sudo apt-get install redis
```
## `Windows`下安装`Redis` 第三方`GUI`客户端
Redis (GUI)管理客户端
```bash
winget install qishibo.AnotherRedisDesktopManager
```
## `Redis`修改监听端口
```bash
vim /etc/redis/redis.conf
```
# `Redis`常用命令
## `bitMap`
使用`BitMap`实现签到,`setbit key offset value,` `key`做为时间,`offset`做为用户`id` ,`value`做为签到状态
```shell
# 示例
setbit key offset value key
# 设置用户10086在2022/04/21进行签到
setbit check_in_2022_04_21 10086 1
# 获取用户10086是否在2022/04/21签到
getbit check_in_2022_04_21 10086
# bitcount 获取20220421签到的用户数量
# 可选 start和end参数
# start 和 end 参数的设置和 GETRANGE 命令类似,都可以使用负数值:比如 -1 表示最后一个位,而 -2 表示倒数第二个位
BITCOUNT 20220421
# BITOP 对一个或多个保存二进制位的字符串 key 进行位元操作,并将结果保存到 destkey 上
# operation 可以是 AND 、 OR 、 NOT 、 XOR 这四种操作中的任意一种:
# BITOP AND destkey key [key ...] ,对一个或多个 key 求逻辑并,并将结果保存到 destkey 。
# BITOP OR destkey key [key ...] ,对一个或多个 key 求逻辑或,并将结果保存到 destkey 。
# BITOP XOR destkey key [key ...] ,对一个或多个 key 求逻辑异或,并将结果保存到 destkey 。
# BITOP NOT destkey key ,对给定 key 求逻辑非,并将结果保存到 destkey 。
# 除了 NOT 操作之外,其他操作都可以接受一个或多个 key 作为输入。
BITOP AND and-result 20220421 20220420
GETBIT and-result
```
## `Redis` 消息队列
```
# LPUSH key value, Lpush用于生产并添加消息
# LPOP key,用于取出消息
```
## `Lrem`
```shell
# count > 0 : 从表头开始向表尾搜索,移除与 VALUE 相等的元素,数量为 COUNT 。
# count < 0 : 从表尾开始向表头搜索,移除与 VALUE 相等的元素,数量为 COUNT 的绝对值。
# count = 0 : 移除表中所有与 VALUE 相等的值。
LREM key count VALUE
```
## `Pipeline`
`Redis` 使用的是客户端-服务器(`CS`)模型和请求/响应协议的 TCP 服务器。这意味着通常情况下一个请求会遵循以下步骤:
客户端向服务端发送一个查询请求,并监听 Socket 返回,通常是以阻塞模式,等待服务端响应。
服务端处理命令,并将结果返回给客户端。
管道(`pipeline`可以一次性发送多条命令并在执行完后一次性将结果返回pipeline 通过减少客户端与 redis 的通信次数来实现降低往返延时时间,而且 `Pipeline` 实现的原理是队列,而队列的原理是时先进先出,这样就保证数据的顺序性。
通俗点:`pipeline`就是把一组命令进行打包然后一次性通过网络发送到Redis。同时将执行的结果批量的返回回来
```go
// 使用 go-redis
p := Client.Pipeline()
for _, v := range val {
p.LRem("user:watched:"+guid, 0, v)
}
// p.Exec()执行pipeline 请求
p.Exec()
```
[本文参考](https://blog.csdn.net/mumuwang1234/article/details/118603697)

View File

@@ -1,169 +0,0 @@
---
title: "手把手教你用Rust进行Dll注入"
description: 我是一个懒惰的男孩,我甚至懒的不想按键盘上的按键和挪动鼠标.可是我还是想玩游戏,该怎么做呢?通过 google 了解到我可以通过将我自己编写的dll文件注入到目标程序内,来实现这个事情.
date: 2022-09-17T15:10:26+08:00
draft: false
slug: rust-dll
image:
categories:
- Rust
tags:
- Rust
- Dll
---
# 前言
我是一个懒惰的男孩,我甚至懒的不想按键盘上的按键和挪动鼠标.可是我还是想玩游戏,该怎么做呢?
通过google了解到我可以通过将我自己编写的 `dll` 文件注入到目标程序内,来实现这个事情.
将大象放在冰箱里需要几步?
答案是三步。
# `snes9x` 模拟器 `Dll` 注入实战
## 一、现在我们需要进行第一步,生成 `Dll` 文件
准确说是我们需要生成符合 `C` 标准的 `dll` 文件,如果你使用 `go` 语言,直接使用 `Cgo``C` 进行互动,即可生成符合 `C` 标准的 `dll` .
但是很明显,我要用 `Rust` 来做这件事。
由于 `Rust` 拥有出色的所有权机制,和其他语言的交互会导致 `Rust` 失去这个特性,所以这一块是属于 `Unsafe` 区域的。
`Rust` 默认生成的 `Dll` 是提供给 `Rust` 语言来调用的,而非C系语言的 `dll`.
我们现在来生成 `C` 系语言的 `Dll` 吧。
### 1.新建项目 `lib` 目录 `lib` 目录主要作为库文件以方便其他开发者调用
```bash
# 新建库项目
Cargo new --lib <project name>
Cargo new --lib joy
```
### 2.修改 `Cargo.toml` 文件 增加 `bin` 区域
```toml
[package]
name = "joy"
version = "0.1.0"
edition = "2021"
[lib]
name = "joy"
path = "src/lib.rs"
crate-type = ["cdylib"]
[[bin]]
name = "joyrun"
path = "src/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
```
```bash
# 为项目导入依赖ctor来生成符合c标准的dll
cargo add ctor
```
### 3.修改 `lib.rs` 使用 `ctor`
```rust
// lib.rs
#[ctor::ctor]
fn ctor() {
println!("我是一个dll")
}
```
#### 4.编译项目生成 `joy.dll` 以及 `joyrun.exe`
```bash
cargo build
```
现在我们有了我们自己的 `dll` 文件,该如何将他注入到目标的进程呢?
## 二、使用 `dll-syringe` 进行dll注入
```
cargo add dll-syringe
```
### 1.修改main.rs 将刚刚编写的dll注入到目标应用
```rust
// main.rs
use dll_syringe::{Syringe, process::OwnedProcess};
fn main() {
// 通过进程名找到目标进程
let target_process = OwnedProcess::find_first_by_name("snes9x").unwrap();
// 新建一个注入器
let syringe = Syringe::for_process(target_process);
// 将我们刚刚编写的dll加载进去
let injected_payload = syringe.inject("joy.dll").unwrap();
// do something else
// 将我们刚刚注入的dll从目标程序内移除
syringe.eject(injected_payload).unwrap();
}
```
### 2.运行项目
```shell
# 运行项目
cargo run
```
此时你可能会遇到一个新问题,我的`dll`已经加载进目标程序了,为什么没有打印 "我是一个dll"
### 3.解决控制台无输出问题
这是由于目标程序没有控制台,所以我们没有看到 `dll` 的输出,接下来让我们来获取 `dll` 的输出。
此时我们可以使用 `TCP` 交互的方式或采用 `OutputDebugStringA function (debugapi.h)` 来进行打印
`OutputDebugStringA` ,需要额外开启`features` `Win32_System_Diagnostics_Debug`
```rust
// Rust Unsafe fn
// windows::Win32::System::Diagnostics::Debug::OutputDebugStringA
pub unsafe fn OutputDebugStringA<'a, P0>(lpoutputstring: P0)
where
P0: Into<PCSTR>,
// Required features: "Win32_System_Diagnostics_Debug"
```
采用 `Tcp` 通信交互
```rust
// 在lib.rs 新建tcp客户端
let stream = TcpStream::connect("127.0.0.1:7331").unwrap();
```
```rust
// 在main.rs 新建tcp服务端
let (mut stream, addr) = listener.accept()?;
info!(%addr,"Accepted!");
let mut buf = vec![0u8; 1024];
let mut stdout = std::io::stdout();
while let Ok(n) = stream.read(&mut buf[..]) {
if n == 0 {
break;
}
stdout.write_all(&buf[..n])?
}
```
```shell
# 运行项目
cargo run
# 运行之后,大功告成,成功在Tcp服务端看到了,客户端对我们发起了请求。
```

View File

@@ -0,0 +1,24 @@
---
title: 徐霞客游记·游恒山日记
slug: rust-programming-tips
description: 游恒山、悬空寺与北岳登顶的古文纪行,适合做中文长文测试。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 徐霞客
- 恒山
- 悬空寺
- 长文测试
---
# 徐霞客游记·游恒山日记
出南山。大溪从山中俱来者,别而西去。余北驰平陆中,望外界之山,高不及台山十之四,其长缭绕如垣。
余溯西涧入,又一涧自北来,遂从其西登岭,道甚峻。北向直上者六七里,西转,又北跻而上者五六里,登峰两重,造其巅,是名箭筸岭。
三转,峡愈隘,崖愈高。西崖之半,层楼高悬,曲榭斜倚,望之如蜃吐重台者,悬空寺也。

View File

@@ -1,96 +0,0 @@
---
title: "Rust使用Serde进行序列化及反序列化"
description: 这篇文章将介绍如何在Rust编程语言中使用Serde库进行序列化和反序列化操作。Serde是一个广泛使用的序列化和反序列化库能够支持JSON、BSON、CBOR、MessagePack和YAML等常见数据格式。
date: 2022-07-25T14:02:22+08:00
draft: false
slug: rust-serde
image:
categories:
- Rust
tags:
- Rust
- Xml
---
# 开始之前
```toml
# 在Cargo.toml 新增以下依赖
[dependencies]
serde = { version = "1.0.140",features = ["derive"] }
serde_json = "1.0.82"
serde_yaml = "0.8"
serde_urlencoded = "0.7.1"
# 使用yaserde解析xml
yaserde = "0.8.0"
yaserde_derive = "0.8.0"
```
## `Serde`通用规则(`json`,`yaml`,`xml`)
### 1.使用`Serde`宏通过具体结构实现序列化及反序列化
```rust
use serde::{Deserialize, Serialize};
// 为结构体实现 Serialize(序列化)属性和Deserialize(反序列化)
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Person {
// 将该字段名称修改为lastname
#[serde(rename = "lastname")]
name: String,
// 反序列化及序列化时忽略该字段(nickname)
#[serde(skip)]
nickname: String,
// 分别设置序列化及反序列化时输出的字段名称
#[serde(rename(serialize = "serialize_id", deserialize = "derialize_id"))
id: i32,
// 为age设置默认值
#[serde(default)]
age: i32,
}
```
### 2.使用`serde_json`序列化及反序列化
```rust
use serde_json::{json, Value};
let v:serde_json::Value = json!(
{
"x":20.0,
"y":15.0
}
);
println!("x:{:#?},y:{:#?}",v["x"],v["y"]); // x:20.0, y:15.0
```
### 3.使用`Serde`宏统一格式化输入、输出字段名称
| 方法名 | 方法效果 |
| ------------------------------- | ------------------------------------------------------------ |
| `PascalCase` | 首字母为大写的驼峰式命名,推荐结构体、枚举等名称以及`Yaml`配置文件读取使用。 |
| `camelCase` | 首字母为小写的驼峰式命名,推荐`Yaml`配置文件读取使用。 |
| `snake_case` | 小蛇形命名,用下划线"`_`"连接单词,推荐函数命名以及变量名称使用此种方式。 |
| `SCREAMING_SNAKE_CASE` | 大蛇形命名,单词均为大写形式,用下划线"`_`"连接单词。推荐常数及全局变量使用此种方式。 |
| `kebab-case`(小串烤肉) | 同`snake_case`,使用中横线"`-`"替换了下划线"`_`"。 |
| `SCREAMING-KEBAB-CAS`(大串烤肉) | 同`SCREAMING_SNAKE_CASE`,使用中横线"`-`"替换了下划线"`_`"。 |
示例:
```rust
pub struct App {
#[serde(rename_all = "PascalCase")]
/// 统一格式化输入、输出字段名称
/// #[serde(rename_all = "camelCase")]
/// #[serde(rename_all = "snake_case")]
/// #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
/// 仅设置
version: String,
app_name: String,
host: String,
}
```
[本文参考:yaserde](https://github.com/media-io/yaserde)
[本文参考:magiclen](https://magiclen.org/rust-serde/)

View File

@@ -1,37 +0,0 @@
---
title: "Rust Sqlx"
description:
date: 2022-08-29T13:55:08+08:00
draft: true
slug: rust-sqlx
image:
categories:
-
tags:
-
---
# sqlx-cli
## 创建 migration
```shell
sqlx migrate add categories
```
```sql
-- Add migration script here
CREATE TABLE IF NOT EXISTS categories(
id INT PRIMARY KEY DEFAULT AUTO_INCREMENT,
type_id INT UNIQUE NOT NULL,
parent_id INT NOT NULL,
name TEXT UNIQUE NOT NULL,
);
```
## 运行 migration
```sh
sqlx migrate run
```

View File

@@ -0,0 +1,24 @@
---
title: 游黄山记(上)
slug: terminal-ui-design
description: 钱谦益《游黄山记》上篇,包含序、记之一与记之二。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 钱谦益
- 黄山
- 游记
- 长文测试
---
# 游黄山记(上)
辛巳春,余与程孟阳订黄山之游,约以梅花时相寻于武林之西溪。徐维翰书来劝驾,读之两腋欲举,遂挟吴去尘以行。
黄山耸秀峻极,作镇一方。江南诸山,天台、天目为最,以地形准之,黄山之趾与二山齐。
自山口至汤口,山之麓也,登山之径于是始。汤泉之流,自紫石峰六百仞县布,其下有香泉溪。

View File

@@ -1,54 +0,0 @@
---
title: "在 Tmux 会话窗格中发送命令的方法"
slug: tmux
description: "介绍如何在 Tmux 中创建分离会话、向指定窗格发送命令并执行回车,同时说明连接会话和发送特殊按键的基本用法。"
category: "Linux"
post_type: "article"
pinned: false
published: true
tags:
- "Tmux"
- "终端复用"
- "send-keys"
- "会话管理"
- "命令行"
---
## 在 Tmux 会话窗格中发送命令的方法
`Tmux` 中,可以使用 `send-keys` 命令将命令发送到会话窗格中。以下是在 `Tmux` 中发送命令的步骤:
### 1. 新建一个分离(`Detached`)会话
使用以下命令新建一个分离会话:
```bash
tmux new -d -s mySession
```
### 2. 发送命令至会话窗格
使用以下命令将命令发送到会话窗格:
```bash
tmux send-keys -t mySession "echo 'Hello World!'" ENTER
```
这将发送 `echo 'Hello World!'` 命令,并模拟按下回车键(`ENTER`),以在会话窗格中执行该命令。
### 3. 连接(`Attach`)会话窗格
使用以下命令连接会话窗格:
```bash
tmux a -t mySession
```
这将连接到名为 `mySession` 的会话窗格。
### 4. 发送特殊命令
要发送特殊命令,例如清除当前行或使用管理员权限运行命令,请使用以下命令:
- 清除当前行:`tmux send-keys C-c`
- 以管理员身份运行命令:`sudo tmux send-keys ...`

View File

@@ -0,0 +1,24 @@
---
title: 徐霞客游记·游太和山日记(上)
slug: welcome-to-termi
description: 《徐霞客游记》太和山上篇,适合作为中文长文测试样本。
category: 古籍游记
post_type: article
pinned: true
status: published
visibility: public
noindex: false
tags:
- 徐霞客
- 游记
- 太和山
- 长文测试
---
# 徐霞客游记·游太和山日记(上)
登仙猿岭。十馀里,有枯溪小桥,为郧县境,乃河南、湖广界。东五里,有池一泓,曰青泉,上源不见所自来,而下流淙淙,地又属淅川。
自此连逾山岭,桃李缤纷,山花夹道,幽艳异常。山坞之中,居庐相望,沿流稻畦,高下鳞次,不似山、陕间矣。
骑而南趋,石道平敞。三十里,越一石梁,有溪自西东注,即太和下流入汉者。越桥为迎恩宫,西向。前有碑大书“第一山”三字,乃米襄阳笔。

View File

@@ -36,6 +36,13 @@ mod m20260331_000025_create_post_revisions;
mod m20260331_000026_create_subscriptions;
mod m20260331_000027_create_notification_deliveries;
mod m20260331_000028_expand_subscriptions_and_deliveries;
mod m20260331_000029_add_subscription_popup_settings_to_site_settings;
mod m20260401_000030_add_public_security_and_web_push_to_site_settings;
mod m20260401_000031_add_notification_channel_type_to_site_settings;
mod m20260401_000032_add_runtime_security_keys_to_site_settings;
mod m20260401_000033_add_taxonomy_metadata_and_media_assets;
mod m20260401_000034_add_source_markdown_to_posts;
mod m20260401_000035_add_human_verification_modes_to_site_settings;
pub struct Migrator;
#[async_trait::async_trait]
@@ -76,6 +83,13 @@ impl MigratorTrait for Migrator {
Box::new(m20260331_000026_create_subscriptions::Migration),
Box::new(m20260331_000027_create_notification_deliveries::Migration),
Box::new(m20260331_000028_expand_subscriptions_and_deliveries::Migration),
Box::new(m20260331_000029_add_subscription_popup_settings_to_site_settings::Migration),
Box::new(m20260401_000030_add_public_security_and_web_push_to_site_settings::Migration),
Box::new(m20260401_000031_add_notification_channel_type_to_site_settings::Migration),
Box::new(m20260401_000032_add_runtime_security_keys_to_site_settings::Migration),
Box::new(m20260401_000033_add_taxonomy_metadata_and_media_assets::Migration),
Box::new(m20260401_000034_add_source_markdown_to_posts::Migration),
Box::new(m20260401_000035_add_human_verification_modes_to_site_settings::Migration),
// inject-above (do not remove this comment)
]
}

View File

@@ -0,0 +1,111 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
if !manager
.has_column("site_settings", "subscription_popup_enabled")
.await?
{
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column(
ColumnDef::new(Alias::new("subscription_popup_enabled"))
.boolean()
.null()
.default(true),
)
.to_owned(),
)
.await?;
}
if !manager
.has_column("site_settings", "subscription_popup_title")
.await?
{
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column(
ColumnDef::new(Alias::new("subscription_popup_title"))
.string()
.null(),
)
.to_owned(),
)
.await?;
}
if !manager
.has_column("site_settings", "subscription_popup_description")
.await?
{
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column(
ColumnDef::new(Alias::new("subscription_popup_description"))
.text()
.null(),
)
.to_owned(),
)
.await?;
}
if !manager
.has_column("site_settings", "subscription_popup_delay_seconds")
.await?
{
manager
.alter_table(
Table::alter()
.table(table)
.add_column(
ColumnDef::new(Alias::new("subscription_popup_delay_seconds"))
.integer()
.null()
.default(18),
)
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
for column in [
"subscription_popup_delay_seconds",
"subscription_popup_description",
"subscription_popup_title",
"subscription_popup_enabled",
] {
if manager.has_column("site_settings", column).await? {
manager
.alter_table(
Table::alter()
.table(table.clone())
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
Ok(())
}
}

View File

@@ -0,0 +1,59 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column_if_not_exists(
ColumnDef::new(Alias::new("comment_turnstile_enabled"))
.boolean()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("subscription_turnstile_enabled"))
.boolean()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_enabled"))
.boolean()
.null(),
)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
for column in [
"web_push_enabled",
"subscription_turnstile_enabled",
"comment_turnstile_enabled",
] {
if manager.has_column("site_settings", column).await? {
manager
.alter_table(
Table::alter()
.table(table.clone())
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
Ok(())
}
}

View File

@@ -0,0 +1,51 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
if !manager
.has_column("site_settings", "notification_channel_type")
.await?
{
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column(
ColumnDef::new(Alias::new("notification_channel_type"))
.string()
.null(),
)
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
if manager
.has_column("site_settings", "notification_channel_type")
.await?
{
manager
.alter_table(
Table::alter()
.table(table)
.drop_column(Alias::new("notification_channel_type"))
.to_owned(),
)
.await?;
}
Ok(())
}
}

View File

@@ -0,0 +1,71 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
manager
.alter_table(
Table::alter()
.table(table)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("turnstile_site_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("turnstile_secret_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_vapid_public_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_vapid_private_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_vapid_subject"))
.text()
.null(),
)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
for column in [
"web_push_vapid_subject",
"web_push_vapid_private_key",
"web_push_vapid_public_key",
"turnstile_secret_key",
"turnstile_site_key",
] {
if manager.has_column("site_settings", column).await? {
manager
.alter_table(
Table::alter()
.table(table.clone())
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
Ok(())
}
}

View File

@@ -0,0 +1,161 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
for table_name in ["categories", "tags"] {
if !manager.has_column(table_name, "description").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("description")).text().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "cover_image").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("cover_image")).string().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "accent_color").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("accent_color")).string().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "seo_title").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("seo_title")).string().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "seo_description").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("seo_description")).text().null())
.to_owned(),
)
.await?;
}
}
if !manager.has_table("media_assets").await? {
manager
.create_table(
Table::create()
.table(Alias::new("media_assets"))
.if_not_exists()
.col(
ColumnDef::new(Alias::new("created_at"))
.timestamp_with_time_zone()
.not_null()
.default(Expr::current_timestamp()),
)
.col(
ColumnDef::new(Alias::new("updated_at"))
.timestamp_with_time_zone()
.not_null()
.default(Expr::current_timestamp()),
)
.col(
ColumnDef::new(Alias::new("id"))
.integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Alias::new("object_key")).string().not_null())
.col(ColumnDef::new(Alias::new("title")).string().null())
.col(ColumnDef::new(Alias::new("alt_text")).string().null())
.col(ColumnDef::new(Alias::new("caption")).text().null())
.col(ColumnDef::new(Alias::new("tags")).json_binary().null())
.col(ColumnDef::new(Alias::new("notes")).text().null())
.to_owned(),
)
.await?;
}
manager
.create_index(
Index::create()
.name("idx_media_assets_object_key_unique")
.table(Alias::new("media_assets"))
.col(Alias::new("object_key"))
.unique()
.if_not_exists()
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
if manager
.has_index("media_assets", "idx_media_assets_object_key_unique")
.await?
{
manager
.drop_index(
Index::drop()
.name("idx_media_assets_object_key_unique")
.table(Alias::new("media_assets"))
.to_owned(),
)
.await?;
}
if manager.has_table("media_assets").await? {
manager
.drop_table(Table::drop().table(Alias::new("media_assets")).to_owned())
.await?;
}
for table_name in ["categories", "tags"] {
for column in [
"seo_description",
"seo_title",
"accent_color",
"cover_image",
"description",
] {
if manager.has_column(table_name, column).await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
}
Ok(())
}
}

View File

@@ -0,0 +1,37 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
if !manager.has_column("posts", "source_markdown").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new("posts"))
.add_column(ColumnDef::new(Alias::new("source_markdown")).text().null())
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
if manager.has_column("posts", "source_markdown").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new("posts"))
.drop_column(Alias::new("source_markdown"))
.to_owned(),
)
.await?;
}
Ok(())
}
}

View File

@@ -0,0 +1,86 @@
use sea_orm::{DbBackend, Statement};
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column_if_not_exists(
ColumnDef::new(Alias::new("comment_verification_mode"))
.string()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("subscription_verification_mode"))
.string()
.null(),
)
.to_owned(),
)
.await?;
manager
.get_connection()
.execute(Statement::from_string(
DbBackend::Postgres,
r#"
UPDATE site_settings
SET comment_verification_mode = CASE
WHEN COALESCE(comment_turnstile_enabled, false) THEN 'turnstile'
ELSE 'captcha'
END
WHERE COALESCE(trim(comment_verification_mode), '') = ''
"#
.to_string(),
))
.await?;
manager
.get_connection()
.execute(Statement::from_string(
DbBackend::Postgres,
r#"
UPDATE site_settings
SET subscription_verification_mode = CASE
WHEN COALESCE(subscription_turnstile_enabled, false) THEN 'turnstile'
ELSE 'off'
END
WHERE COALESCE(trim(subscription_verification_mode), '') = ''
"#
.to_string(),
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
for column in [
"subscription_verification_mode",
"comment_verification_mode",
] {
if manager.has_column("site_settings", column).await? {
manager
.alter_table(
Table::alter()
.table(table.clone())
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
Ok(())
}
}

View File

@@ -0,0 +1,3 @@
[toolchain]
channel = "1.94.1"
components = ["rustfmt", "clippy"]

View File

@@ -1,5 +1,8 @@
use async_trait::async_trait;
use axum::{http::Method, Router as AxumRouter};
use axum::{
http::{header, HeaderName, Method},
Router as AxumRouter,
};
use loco_rs::{
app::{AppContext, Hooks, Initializer},
bgworker::{BackgroundWorker, Queue},
@@ -15,8 +18,8 @@ use migration::Migrator;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set,
};
use std::path::Path;
use tower_http::cors::{Any, CorsLayer};
use std::{collections::BTreeSet, path::Path};
use tower_http::cors::CorsLayer;
#[allow(unused_imports)]
use crate::{
@@ -29,6 +32,48 @@ use crate::{
};
pub struct App;
fn normalized_origin(value: &str) -> Option<String> {
let trimmed = value.trim().trim_end_matches('/').to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
}
fn collect_cors_origins() -> Vec<String> {
let mut origins = BTreeSet::new();
for origin in [
"http://127.0.0.1:4321",
"http://127.0.0.1:4322",
"http://localhost:4321",
"http://localhost:4322",
] {
origins.insert(origin.to_string());
}
for key in [
"APP_BASE_URL",
"ADMIN_API_BASE_URL",
"ADMIN_FRONTEND_BASE_URL",
"PUBLIC_API_BASE_URL",
"PUBLIC_FRONTEND_BASE_URL",
"TERMI_CORS_ALLOWED_ORIGINS",
] {
if let Ok(value) = std::env::var(key) {
for origin in value.split([',', ';', ' ']) {
if let Some(origin) = normalized_origin(origin) {
origins.insert(origin);
}
}
}
}
origins.into_iter().collect()
}
#[async_trait]
impl Hooks for App {
fn app_name() -> &'static str {
@@ -61,6 +106,7 @@ impl Hooks for App {
AppRoutes::with_default_routes() // controller routes below
.add_route(controllers::health::routes())
.add_route(controllers::admin_api::routes())
.add_route(controllers::admin_taxonomy::routes())
.add_route(controllers::admin_ops::routes())
.add_route(controllers::review::routes())
.add_route(controllers::category::routes())
@@ -76,8 +122,22 @@ impl Hooks for App {
.add_route(controllers::subscription::routes())
}
async fn after_routes(router: AxumRouter, _ctx: &AppContext) -> Result<AxumRouter> {
let allowed_origins = collect_cors_origins()
.into_iter()
.filter_map(|origin| origin.parse().ok())
.collect::<Vec<_>>();
let allowed_headers = [
header::ACCEPT,
header::ACCEPT_LANGUAGE,
header::AUTHORIZATION,
header::CONTENT_LANGUAGE,
header::CONTENT_TYPE,
header::COOKIE,
header::ORIGIN,
HeaderName::from_static("x-requested-with"),
];
let cors = CorsLayer::new()
.allow_origin(Any)
.allow_origin(allowed_origins)
.allow_methods([
Method::GET,
Method::POST,
@@ -85,7 +145,8 @@ impl Hooks for App {
Method::PATCH,
Method::DELETE,
])
.allow_headers(Any);
.allow_headers(allowed_headers)
.allow_credentials(true);
Ok(router.layer(cors))
}
@@ -270,6 +331,24 @@ impl Hooks for App {
.await?;
if existing.is_none() {
let comment_verification_mode = settings["comment_verification_mode"]
.as_str()
.map(ToString::to_string);
let subscription_verification_mode = settings
["subscription_verification_mode"]
.as_str()
.map(ToString::to_string);
let comment_turnstile_enabled = settings["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = settings
["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let tech_stack = settings["tech_stack"]
.as_array()
.map(|items| {
@@ -347,6 +426,16 @@ impl Hooks for App {
paragraph_comments_enabled: Set(settings["paragraph_comments_enabled"]
.as_bool()
.or(Some(true))),
comment_verification_mode: Set(comment_verification_mode),
comment_turnstile_enabled: Set(comment_turnstile_enabled),
subscription_verification_mode: Set(subscription_verification_mode),
subscription_turnstile_enabled: Set(subscription_turnstile_enabled),
turnstile_site_key: Set(settings["turnstile_site_key"]
.as_str()
.map(ToString::to_string)),
turnstile_secret_key: Set(settings["turnstile_secret_key"]
.as_str()
.map(ToString::to_string)),
ai_provider: Set(settings["ai_provider"].as_str().map(ToString::to_string)),
ai_api_base: Set(settings["ai_api_base"].as_str().map(ToString::to_string)),
ai_api_key: Set(settings["ai_api_key"].as_str().map(ToString::to_string)),

View File

@@ -1,6 +1,6 @@
use axum::{
extract::{Multipart, Query},
http::{header, HeaderMap},
http::{HeaderMap, header},
};
use loco_rs::prelude::*;
use sea_orm::{
@@ -22,7 +22,7 @@ use crate::{
ai_chunks, comment_blacklist, comment_persona_analysis_logs, comments, friend_links, posts,
reviews,
},
services::{admin_audit, ai, analytics, comment_guard, content, storage},
services::{admin_audit, ai, analytics, comment_guard, content, media_assets, storage},
};
#[derive(Clone, Debug, Deserialize)]
@@ -44,8 +44,13 @@ pub struct AdminSessionResponse {
pub can_logout: bool,
}
fn build_session_response(identity: Option<crate::controllers::admin::AdminIdentity>) -> AdminSessionResponse {
let can_logout = matches!(identity.as_ref().map(|item| item.source.as_str()), Some("local"));
fn build_session_response(
identity: Option<crate::controllers::admin::AdminIdentity>,
) -> AdminSessionResponse {
let can_logout = matches!(
identity.as_ref().map(|item| item.source.as_str()),
Some("local")
);
AdminSessionResponse {
authenticated: identity.is_some(),
@@ -165,6 +170,16 @@ pub struct AdminSiteSettingsResponse {
pub music_playlist: Vec<site_settings::MusicTrackPayload>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
pub comment_turnstile_enabled: bool,
pub subscription_verification_mode: String,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
pub turnstile_secret_key: Option<String>,
pub web_push_vapid_public_key: Option<String>,
pub web_push_vapid_private_key: Option<String>,
pub web_push_vapid_subject: Option<String>,
pub ai_provider: Option<String>,
pub ai_api_base: Option<String>,
pub ai_api_key: Option<String>,
@@ -191,8 +206,13 @@ pub struct AdminSiteSettingsResponse {
pub seo_default_og_image: Option<String>,
pub seo_default_twitter_handle: Option<String>,
pub notification_webhook_url: Option<String>,
pub notification_channel_type: String,
pub notification_comment_enabled: bool,
pub notification_friend_link_enabled: bool,
pub subscription_popup_enabled: bool,
pub subscription_popup_title: String,
pub subscription_popup_description: String,
pub subscription_popup_delay_seconds: i32,
pub search_synonyms: Vec<String>,
}
@@ -249,6 +269,11 @@ pub struct AdminMediaObjectResponse {
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -295,6 +320,32 @@ pub struct AdminMediaReplaceResponse {
pub url: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaMetadataPayload {
pub key: String,
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub alt_text: Option<String>,
#[serde(default)]
pub caption: Option<String>,
#[serde(default)]
pub tags: Option<Vec<String>>,
#[serde(default)]
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaMetadataResponse {
pub saved: bool,
pub key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaListQuery {
pub prefix: Option<String>,
@@ -625,6 +676,23 @@ fn normalize_media_key(value: Option<String>) -> Option<String> {
})
}
fn build_media_object_response(
item: storage::StoredObjectSummary,
metadata: Option<&crate::models::_entities::media_assets::Model>,
) -> AdminMediaObjectResponse {
AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
title: metadata.and_then(|entry| entry.title.clone()),
alt_text: metadata.and_then(|entry| entry.alt_text.clone()),
caption: metadata.and_then(|entry| entry.caption.clone()),
tags: metadata.map(media_assets::tag_list).unwrap_or_default(),
notes: metadata.and_then(|entry| entry.notes.clone()),
}
}
fn tech_stack_values(value: &Option<serde_json::Value>) -> Vec<String> {
value
.as_ref()
@@ -656,6 +724,19 @@ fn build_settings_response(
) -> AdminSiteSettingsResponse {
let ai_providers = site_settings::ai_provider_configs(&item);
let ai_active_provider_id = site_settings::active_ai_provider_id(&item);
let comment_verification_mode = crate::services::turnstile::selected_mode(
&item,
crate::services::turnstile::TurnstileScope::Comment,
);
let subscription_verification_mode = crate::services::turnstile::selected_mode(
&item,
crate::services::turnstile::TurnstileScope::Subscription,
);
let turnstile_site_key = crate::services::turnstile::site_key(&item);
let turnstile_secret_key = crate::services::turnstile::secret_key(&item);
let web_push_vapid_public_key = crate::services::web_push::public_key(&item);
let web_push_vapid_private_key = crate::services::web_push::private_key(&item);
let web_push_vapid_subject = crate::services::web_push::vapid_subject(&item);
AdminSiteSettingsResponse {
id: item.id,
@@ -678,6 +759,22 @@ fn build_settings_response(
music_playlist: music_playlist_values(&item.music_playlist),
ai_enabled: item.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true),
comment_verification_mode: comment_verification_mode.as_str().to_string(),
comment_turnstile_enabled: matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
subscription_verification_mode: subscription_verification_mode.as_str().to_string(),
subscription_turnstile_enabled: matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
web_push_enabled: item.web_push_enabled.unwrap_or(false),
turnstile_site_key,
turnstile_secret_key,
web_push_vapid_public_key,
web_push_vapid_private_key,
web_push_vapid_subject,
ai_provider: item.ai_provider,
ai_api_base: item.ai_api_base,
ai_api_key: item.ai_api_key,
@@ -704,8 +801,23 @@ fn build_settings_response(
seo_default_og_image: item.seo_default_og_image,
seo_default_twitter_handle: item.seo_default_twitter_handle,
notification_webhook_url: item.notification_webhook_url,
notification_channel_type: item
.notification_channel_type
.unwrap_or_else(|| "webhook".to_string()),
notification_comment_enabled: item.notification_comment_enabled.unwrap_or(false),
notification_friend_link_enabled: item.notification_friend_link_enabled.unwrap_or(false),
subscription_popup_enabled: item
.subscription_popup_enabled
.unwrap_or_else(site_settings::default_subscription_popup_enabled),
subscription_popup_title: item
.subscription_popup_title
.unwrap_or_else(site_settings::default_subscription_popup_title),
subscription_popup_description: item
.subscription_popup_description
.unwrap_or_else(site_settings::default_subscription_popup_description),
subscription_popup_delay_seconds: item
.subscription_popup_delay_seconds
.unwrap_or_else(site_settings::default_subscription_popup_delay_seconds),
search_synonyms: tech_stack_values(&item.search_synonyms),
}
}
@@ -753,7 +865,10 @@ pub async fn session_login(
#[debug_handler]
pub async fn session_logout(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
let before = resolve_admin_identity(&headers);
if matches!(before.as_ref().map(|item| item.source.as_str()), Some("local")) {
if matches!(
before.as_ref().map(|item| item.source.as_str()),
Some("local")
) {
clear_local_session(&headers);
}
@@ -764,7 +879,10 @@ pub async fn session_logout(headers: HeaderMap, State(ctx): State<AppContext>) -
"admin.logout",
"admin_session",
None,
identity.email.clone().or_else(|| Some(identity.username.clone())),
identity
.email
.clone()
.or_else(|| Some(identity.username.clone())),
None,
)
.await?;
@@ -785,7 +903,6 @@ pub async fn session_logout(headers: HeaderMap, State(ctx): State<AppContext>) -
#[debug_handler]
pub async fn dashboard(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let all_posts = posts::Entity::find().all(&ctx.db).await?;
let total_posts = all_posts.len() as u64;
@@ -843,10 +960,7 @@ pub async fn dashboard(headers: HeaderMap, State(ctx): State<AppContext>) -> Res
}
}
let mut recent_posts = all_posts
.clone()
.into_iter()
.collect::<Vec<_>>();
let mut recent_posts = all_posts.clone().into_iter().collect::<Vec<_>>();
recent_posts.sort_by(|left, right| right.created_at.cmp(&left.created_at));
let recent_posts = recent_posts
.into_iter()
@@ -959,13 +1073,19 @@ pub async fn dashboard(headers: HeaderMap, State(ctx): State<AppContext>) -> Res
}
#[debug_handler]
pub async fn analytics_overview(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn analytics_overview(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(analytics::build_admin_analytics(&ctx).await?)
}
#[debug_handler]
pub async fn get_site_settings(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn get_site_settings(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let current = site_settings::load_current(&ctx).await?;
let ai_chunks_count = ai_chunks::Entity::find().count(&ctx.db).await?;
@@ -1061,7 +1181,10 @@ pub async fn test_ai_image_provider(
}
#[debug_handler]
pub async fn test_r2_storage(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn test_r2_storage(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let settings = storage::require_r2_settings(&ctx).await?;
@@ -1082,14 +1205,18 @@ pub async fn list_media_objects(
check_auth(&headers)?;
let settings = storage::require_r2_settings(&ctx).await?;
let items = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?
let objects =
storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200)).await?;
let keys = objects
.iter()
.map(|item| item.key.clone())
.collect::<Vec<_>>();
let metadata_map = media_assets::list_by_keys(&ctx, &keys).await?;
let items = objects
.into_iter()
.map(|item| AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
.map(|item| {
let metadata = metadata_map.get(&item.key);
build_media_object_response(item, metadata)
})
.collect::<Vec<_>>();
@@ -1115,6 +1242,13 @@ pub async fn delete_media_object(
}
storage::delete_object(&ctx, key).await?;
if let Err(error) = media_assets::delete_by_key(&ctx, key).await {
tracing::warn!(
?error,
key,
"failed to delete media metadata after object deletion"
);
}
format::json(AdminMediaDeleteResponse {
deleted: true,
@@ -1208,7 +1342,16 @@ pub async fn batch_delete_media_objects(
for key in keys {
match storage::delete_object(&ctx, &key).await {
Ok(()) => deleted.push(key),
Ok(()) => {
if let Err(error) = media_assets::delete_by_key(&ctx, &key).await {
tracing::warn!(
?error,
key,
"failed to delete media metadata after batch removal"
);
}
deleted.push(key)
}
Err(_) => failed.push(key),
}
}
@@ -1216,6 +1359,43 @@ pub async fn batch_delete_media_objects(
format::json(AdminMediaBatchDeleteResponse { deleted, failed })
}
#[debug_handler]
pub async fn update_media_object_metadata(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<AdminMediaMetadataPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let key = payload.key.trim();
if key.is_empty() {
return Err(Error::BadRequest("缺少对象 key".to_string()));
}
let metadata = media_assets::upsert_by_key(
&ctx,
key,
media_assets::MediaAssetMetadataInput {
title: payload.title,
alt_text: payload.alt_text,
caption: payload.caption,
tags: payload.tags,
notes: payload.notes,
},
)
.await?;
format::json(AdminMediaMetadataResponse {
saved: true,
key: metadata.object_key.clone(),
title: metadata.title.clone(),
alt_text: metadata.alt_text.clone(),
caption: metadata.caption.clone(),
tags: media_assets::tag_list(&metadata),
notes: metadata.notes.clone(),
})
}
#[debug_handler]
pub async fn replace_media_object(
headers: HeaderMap,
@@ -1278,7 +1458,10 @@ pub async fn replace_media_object(
}
#[debug_handler]
pub async fn list_comment_blacklist(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn list_comment_blacklist(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let items = comment_blacklist::Entity::find()
@@ -1795,6 +1978,10 @@ pub fn routes() -> Routes {
"/storage/media/batch-delete",
post(batch_delete_media_objects),
)
.add(
"/storage/media/metadata",
patch(update_media_object_metadata),
)
.add("/storage/media/replace", post(replace_media_object))
.add(
"/comments/blacklist",

View File

@@ -11,7 +11,10 @@ use crate::{
models::_entities::{
admin_audit_logs, notification_deliveries, post_revisions, subscriptions,
},
services::{admin_audit, post_revisions as revision_service, subscriptions as subscription_service},
services::{
admin_audit, backups, post_revisions as revision_service,
subscriptions as subscription_service,
},
};
#[derive(Clone, Debug, Default, Deserialize)]
@@ -82,6 +85,13 @@ pub struct DigestDispatchRequest {
pub period: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SiteBackupImportRequest {
pub backup: backups::SiteBackupDocument,
#[serde(default)]
pub mode: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PostRevisionListItem {
pub id: i32,
@@ -440,6 +450,25 @@ pub async fn send_subscription_digest(
format::json(summary)
}
#[debug_handler]
pub async fn export_site_backup(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(backups::export_site_backup(&ctx).await?)
}
#[debug_handler]
pub async fn import_site_backup(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<SiteBackupImportRequest>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(backups::import_site_backup(&ctx, payload.backup, payload.mode.as_deref()).await?)
}
pub fn routes() -> Routes {
Routes::new()
.prefix("/api/admin")
@@ -452,4 +481,6 @@ pub fn routes() -> Routes {
.add("/subscriptions/digest", post(send_subscription_digest))
.add("/subscriptions/{id}", patch(update_subscription).delete(delete_subscription))
.add("/subscriptions/{id}/test", post(test_subscription))
.add("/site-backup/export", get(export_site_backup))
.add("/site-backup/import", post(import_site_backup))
}

View File

@@ -0,0 +1,483 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use crate::{
controllers::admin::check_auth,
models::_entities::{categories, posts, tags},
services::content,
};
#[derive(Clone, Debug, Deserialize)]
pub struct TaxonomyPayload {
pub name: Option<String>,
#[serde(default)]
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminCategoryRecord {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminTagRecord {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalized_name(params: &TaxonomyPayload, label: &str) -> Result<String> {
params
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.ok_or_else(|| Error::BadRequest(format!("{label}名称不能为空")))
}
fn normalized_slug(value: Option<&str>, fallback: &str, label: &str) -> Result<String> {
let slug = value
.map(str::trim)
.filter(|item| !item.is_empty())
.map(ToString::to_string)
.unwrap_or_else(|| slugify(fallback));
if slug.is_empty() {
return Err(Error::BadRequest(format!(
"{label} slug 不能为空,请填写英文字母 / 数字 / 连字符"
)));
}
Ok(slug)
}
fn normalized_token(value: &str) -> String {
value.trim().to_ascii_lowercase()
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn post_tag_values(post: &posts::Model) -> Vec<String> {
post.tags
.as_ref()
.and_then(|value| serde_json::from_value::<Vec<String>>(value.clone()).ok())
.unwrap_or_default()
.into_iter()
.map(|item| normalized_token(&item))
.filter(|item| !item.is_empty())
.collect()
}
fn category_name(item: &categories::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn build_category_record(
item: &categories::Model,
post_items: &[posts::Model],
) -> AdminCategoryRecord {
let name = category_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items
.iter()
.filter(|post| {
post.category
.as_deref()
.map(normalized_token)
.is_some_and(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
AdminCategoryRecord {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
fn build_tag_record(item: &tags::Model, post_items: &[posts::Model]) -> AdminTagRecord {
let name = tag_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items
.iter()
.filter(|post| {
post_tag_values(post)
.into_iter()
.any(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
AdminTagRecord {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_category(ctx: &AppContext, id: i32) -> Result<categories::Model> {
categories::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)
}
async fn load_tag(ctx: &AppContext, id: i32) -> Result<tags::Model> {
tags::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)
}
async fn ensure_category_slug_unique(
ctx: &AppContext,
slug: &str,
exclude_id: Option<i32>,
) -> Result<()> {
if let Some(existing) = categories::Entity::find()
.filter(categories::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
{
if Some(existing.id) != exclude_id {
return Err(Error::BadRequest("分类 slug 已存在".to_string()));
}
}
Ok(())
}
async fn ensure_tag_slug_unique(
ctx: &AppContext,
slug: &str,
exclude_id: Option<i32>,
) -> Result<()> {
if let Some(existing) = tags::Entity::find()
.filter(tags::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
{
if Some(existing.id) != exclude_id {
return Err(Error::BadRequest("标签 slug 已存在".to_string()));
}
}
Ok(())
}
async fn load_posts(ctx: &AppContext) -> Result<Vec<posts::Model>> {
Ok(posts::Entity::find().all(&ctx.db).await?)
}
#[debug_handler]
pub async fn list_categories(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let items = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(
items
.into_iter()
.map(|item| build_category_record(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn create_category(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "分类")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "分类")?;
ensure_category_slug_unique(&ctx, &slug, None).await?;
let item = categories::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(payload.description)),
cover_image: Set(trim_to_option(payload.cover_image)),
accent_color: Set(trim_to_option(payload.accent_color)),
seo_title: Set(trim_to_option(payload.seo_title)),
seo_description: Set(trim_to_option(payload.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&item, &post_items))
}
#[debug_handler]
pub async fn update_category(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "分类")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "分类")?;
ensure_category_slug_unique(&ctx, &slug, Some(id)).await?;
let item = load_category(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_category_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)
.await?;
}
let mut active = item.into_active_model();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(payload.description));
active.cover_image = Set(trim_to_option(payload.cover_image));
active.accent_color = Set(trim_to_option(payload.accent_color));
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&updated, &post_items))
}
#[debug_handler]
pub async fn delete_category(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let item = load_category(&ctx, id).await?;
content::rewrite_category_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
format::empty()
}
#[debug_handler]
pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
let items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(
items
.into_iter()
.map(|item| build_tag_record(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn create_tag(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "标签")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "标签")?;
ensure_tag_slug_unique(&ctx, &slug, None).await?;
let item = tags::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(payload.description)),
cover_image: Set(trim_to_option(payload.cover_image)),
accent_color: Set(trim_to_option(payload.accent_color)),
seo_title: Set(trim_to_option(payload.seo_title)),
seo_description: Set(trim_to_option(payload.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&item, &post_items))
}
#[debug_handler]
pub async fn update_tag(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "标签")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "标签")?;
ensure_tag_slug_unique(&ctx, &slug, Some(id)).await?;
let item = load_tag(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_tag_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)
.await?;
}
let mut active = item.into_active_model();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(payload.description));
active.cover_image = Set(trim_to_option(payload.cover_image));
active.accent_color = Set(trim_to_option(payload.accent_color));
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&updated, &post_items))
}
#[debug_handler]
pub async fn delete_tag(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let item = load_tag(&ctx, id).await?;
content::rewrite_tag_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
format::empty()
}
pub fn routes() -> Routes {
Routes::new()
.add(
"/api/admin/categories",
get(list_categories).post(create_category),
)
.add(
"/api/admin/categories/{id}",
patch(update_category).delete(delete_category),
)
.add("/api/admin/tags", get(list_tags).post(create_tag))
.add("/api/admin/tags/{id}", patch(update_tag).delete(delete_tag))
}

View File

@@ -14,12 +14,41 @@ pub struct CategorySummary {
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct CategoryRecord {
pub id: i32,
pub name: Option<String>,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
pub name: Option<String>,
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
fn slugify(value: &str) -> String {
@@ -39,6 +68,17 @@ fn slugify(value: &str) -> String {
slug.trim_matches('-').to_string()
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalized_name(params: &Params) -> Result<String> {
let name = params
.name
@@ -60,6 +100,52 @@ fn normalized_slug(params: &Params, fallback: &str) -> String {
.unwrap_or_else(|| slugify(fallback))
}
fn category_name(item: &categories::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn build_summary(item: &categories::Model, post_items: &[posts::Model]) -> CategorySummary {
let name = category_name(item);
let count = post_items
.iter()
.filter(|post| {
post.category
.as_deref()
.map(str::trim)
.is_some_and(|value| {
value.eq_ignore_ascii_case(&name) || value.eq_ignore_ascii_case(&item.slug)
})
})
.count();
CategorySummary {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
}
}
fn build_record(item: categories::Model) -> CategoryRecord {
CategoryRecord {
id: item.id,
name: item.name,
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<categories::Model> {
let item = categories::Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or(Error::NotFound)
@@ -67,8 +153,6 @@ async fn load_item(ctx: &AppContext, id: i32) -> Result<categories::Model> {
#[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let category_items = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
@@ -77,23 +161,7 @@ pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
let categories = category_items
.into_iter()
.map(|category| {
let name = category
.name
.clone()
.unwrap_or_else(|| category.slug.clone());
let count = post_items
.iter()
.filter(|post| post.category.as_deref().map(str::trim) == Some(name.as_str()))
.count();
CategorySummary {
id: category.id,
name,
slug: category.slug,
count,
}
})
.map(|category| build_summary(&category, &post_items))
.collect::<Vec<_>>();
format::json(categories)
@@ -113,18 +181,28 @@ pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> R
let mut model = existing_category.into_active_model();
model.name = Set(Some(name));
model.slug = Set(slug);
model.description = Set(trim_to_option(params.description));
model.cover_image = Set(trim_to_option(params.cover_image));
model.accent_color = Set(trim_to_option(params.accent_color));
model.seo_title = Set(trim_to_option(params.seo_title));
model.seo_description = Set(trim_to_option(params.seo_description));
model.update(&ctx.db).await?
} else {
categories::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(params.description)),
cover_image: Set(trim_to_option(params.cover_image)),
accent_color: Set(trim_to_option(params.accent_color)),
seo_title: Set(trim_to_option(params.seo_title)),
seo_description: Set(trim_to_option(params.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?
};
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -146,32 +224,37 @@ pub async fn update(
!= Some(name.as_str())
{
content::rewrite_category_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)?;
)
.await?;
}
let mut item = item.into_active_model();
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
let item = load_item(&ctx, id).await?;
content::rewrite_category_references(item.name.as_deref(), &item.slug, None)?;
content::rewrite_category_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_item(&ctx, id).await?)
format::json(build_record(load_item(&ctx, id).await?))
}
pub fn routes() -> Routes {

View File

@@ -122,6 +122,8 @@ pub struct CreateCommentRequest {
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default)]
pub website: Option<String>,
}
@@ -383,6 +385,7 @@ pub async fn add(
author: author.as_deref(),
content: content.as_deref(),
honeypot_website: params.website.as_deref(),
turnstile_token: params.turnstile_token.as_deref(),
captcha_token: params.captcha_token.as_deref(),
captcha_answer: params.captcha_answer.as_deref(),
},

View File

@@ -1,5 +1,6 @@
pub mod admin;
pub mod admin_api;
pub mod admin_taxonomy;
pub mod admin_ops;
pub mod ai;
pub mod auth;

View File

@@ -80,7 +80,9 @@ fn post_has_tag(post: &Model, wanted_tag: &str) -> bool {
fn effective_status(post: &Model) -> String {
content::effective_post_state(
post.status.as_deref().unwrap_or(content::POST_STATUS_PUBLISHED),
post.status
.as_deref()
.unwrap_or(content::POST_STATUS_PUBLISHED),
post.publish_at,
post.unpublish_at,
Utc::now().fixed_offset(),
@@ -95,6 +97,60 @@ fn publicly_accessible(post: &Model) -> bool {
content::is_post_publicly_accessible(post, Utc::now().fixed_offset())
}
fn normalize_post_sort_by(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"updated_at" | "updated" => "updated_at".to_string(),
"title" => "title".to_string(),
_ => "created_at".to_string(),
}
}
fn normalize_sort_order(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"asc" => "asc".to_string(),
_ => "desc".to_string(),
}
}
fn sort_posts(items: &mut [Model], sort_by: &str, sort_order: &str) {
items.sort_by(|left, right| {
let ordering = match sort_by {
"updated_at" => left.updated_at.cmp(&right.updated_at),
"title" => left
.title
.as_deref()
.unwrap_or(&left.slug)
.to_ascii_lowercase()
.cmp(
&right
.title
.as_deref()
.unwrap_or(&right.slug)
.to_ascii_lowercase(),
),
_ => left.created_at.cmp(&right.created_at),
};
let ordering = if sort_order == "asc" {
ordering
} else {
ordering.reverse()
};
ordering.then_with(|| left.id.cmp(&right.id))
});
}
fn parse_optional_markdown_datetime(
value: Option<&str>,
) -> Option<chrono::DateTime<chrono::FixedOffset>> {
@@ -103,16 +159,18 @@ fn parse_optional_markdown_datetime(
return None;
}
chrono::DateTime::parse_from_rfc3339(value).ok().or_else(|| {
chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d")
.ok()
.and_then(|date| date.and_hms_opt(0, 0, 0))
.and_then(|naive| {
chrono::FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single()
})
})
chrono::DateTime::parse_from_rfc3339(value)
.ok()
.or_else(|| {
chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d")
.ok()
.and_then(|date| date.and_hms_opt(0, 0, 0))
.and_then(|naive| {
chrono::FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single()
})
})
}
fn markdown_post_listed_publicly(post: &content::MarkdownPost) -> bool {
@@ -199,7 +257,9 @@ fn should_include_post(
}
if let Some(status) = &query.status {
if effective_status(post) != content::normalize_post_status(Some(status)) && effective_status(post) != status.trim().to_ascii_lowercase() {
if effective_status(post) != content::normalize_post_status(Some(status))
&& effective_status(post) != status.trim().to_ascii_lowercase()
{
return false;
}
}
@@ -343,22 +403,22 @@ impl Params {
item.image = Set(self.image.clone());
item.images = Set(self.images.clone());
item.pinned = Set(self.pinned);
item.status = Set(self.status.clone().map(|value| requested_status(Some(value), None)));
item.visibility = Set(
self.visibility
.clone()
.map(|value| normalize_visibility(Some(value))),
);
item.publish_at = Set(
self.publish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
);
item.unpublish_at = Set(
self.unpublish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
);
item.status = Set(self
.status
.clone()
.map(|value| requested_status(Some(value), None)));
item.visibility = Set(self
.visibility
.clone()
.map(|value| normalize_visibility(Some(value))));
item.publish_at = Set(self
.publish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()));
item.unpublish_at = Set(self
.unpublish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()));
item.canonical_url = Set(self.canonical_url.clone());
item.noindex = Set(self.noindex);
item.og_image = Set(self.og_image.clone());
@@ -388,6 +448,28 @@ pub struct ListQuery {
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct PagedPostsQuery {
#[serde(flatten)]
pub filters: ListQuery,
pub page: Option<u64>,
#[serde(alias = "page_size")]
pub page_size: Option<u64>,
pub sort_by: Option<String>,
pub sort_order: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PagedPostsResponse {
pub items: Vec<Model>,
pub page: u64,
pub page_size: u64,
pub total: usize,
pub total_pages: u64,
pub sort_by: String,
pub sort_order: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct LookupQuery {
#[serde(default, deserialize_with = "deserialize_boolish_option")]
@@ -450,8 +532,6 @@ pub async fn list(
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true);
let include_redirects = query.include_redirects.unwrap_or(preview);
@@ -463,12 +543,67 @@ pub async fn list(
let filtered = posts
.into_iter()
.filter(|post| should_include_post(post, &query, preview, include_private, include_redirects))
.filter(|post| {
should_include_post(post, &query, preview, include_private, include_redirects)
})
.collect::<Vec<_>>();
format::json(filtered)
}
#[debug_handler]
pub async fn list_page(
Query(query): Query<PagedPostsQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let preview = request_preview_mode(query.filters.preview, &headers);
let include_private = preview && query.filters.include_private.unwrap_or(true);
let include_redirects = query.filters.include_redirects.unwrap_or(preview);
let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_post_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref());
let mut filtered = Entity::find()
.order_by_desc(Column::CreatedAt)
.all(&ctx.db)
.await?
.into_iter()
.filter(|post| {
should_include_post(
post,
&query.filters,
preview,
include_private,
include_redirects,
)
})
.collect::<Vec<_>>();
sort_posts(&mut filtered, &sort_by, &sort_order);
let total = filtered.len();
let total_pages = std::cmp::max(1, ((total as u64) + page_size - 1) / page_size);
let page = query.page.unwrap_or(1).clamp(1, total_pages);
let start = ((page - 1) * page_size) as usize;
let end = std::cmp::min(start + page_size as usize, total);
let items = if start >= total {
Vec::new()
} else {
filtered[start..end].to_vec()
};
format::json(PagedPostsResponse {
items,
page,
page_size,
total,
total_pages,
sort_by,
sort_order,
})
}
#[debug_handler]
pub async fn add(
headers: HeaderMap,
@@ -541,7 +676,10 @@ pub async fn update(
.into_iter()
.filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(),
post_type: item.post_type.clone().unwrap_or_else(|| "article".to_string()),
post_type: item
.post_type
.clone()
.unwrap_or_else(|| "article".to_string()),
image: item.image.clone(),
images: item
.images
@@ -553,7 +691,10 @@ pub async fn update(
.filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(),
pinned: item.pinned.unwrap_or(false),
status: item.status.clone().unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()),
status: item
.status
.clone()
.unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()),
visibility: item
.visibility
.clone()
@@ -565,9 +706,7 @@ pub async fn update(
og_image: item.og_image.clone(),
redirect_from: content::post_redirects_from_json(&item.redirect_from),
redirect_to: item.redirect_to.clone(),
file_path: content::markdown_post_path(&item.slug)
.to_string_lossy()
.to_string(),
file_path: content::virtual_markdown_document_path(&item.slug),
};
let _ = subscriptions::notify_post_published(&ctx, &post).await;
}
@@ -605,7 +744,6 @@ pub async fn get_one(
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers);
let post = load_item(&ctx, id).await?;
@@ -623,7 +761,6 @@ pub async fn get_by_slug(
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true);
let post = resolve_post_by_slug(&ctx, &slug).await?;
@@ -649,8 +786,7 @@ pub async fn get_markdown_by_slug(
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let (path, markdown) = content::read_markdown_document(&slug)?;
let (path, markdown) = content::read_markdown_document_from_store(&ctx, &slug).await?;
format::json(MarkdownDocumentResponse {
slug,
path,
@@ -676,7 +812,7 @@ pub async fn update_markdown_by_slug(
)
.await?;
let updated = content::write_markdown_document(&ctx, &slug, &params.markdown).await?;
let (path, markdown) = content::read_markdown_document(&updated.slug)?;
let (path, markdown) = content::read_markdown_document_from_store(&ctx, &updated.slug).await?;
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
@@ -743,7 +879,7 @@ pub async fn create_markdown(
},
)
.await?;
let (path, markdown) = content::read_markdown_document(&created.slug)?;
let (path, markdown) = content::read_markdown_document_from_store(&ctx, &created.slug).await?;
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
@@ -805,7 +941,9 @@ pub async fn import_markdown(
let imported = content::import_markdown_documents(&ctx, files).await?;
for item in &imported {
if let Ok((_path, markdown)) = content::read_markdown_document(&item.slug) {
if let Ok((_path, markdown)) =
content::read_markdown_document_from_store(&ctx, &item.slug).await
{
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
@@ -876,6 +1014,7 @@ pub async fn delete_markdown_by_slug(
pub fn routes() -> Routes {
Routes::new()
.prefix("api/posts/")
.add("page", get(list_page))
.add("/", get(list))
.add("/", post(add))
.add("markdown", post(create_markdown))

View File

@@ -63,9 +63,7 @@ fn levenshtein_distance(left: &str, right: &str) -> usize {
let mut curr = vec![i + 1; right_chars.len() + 1];
for (j, right_ch) in right_chars.iter().enumerate() {
let cost = usize::from(left_ch != *right_ch);
curr[j + 1] = (curr[j] + 1)
.min(prev[j + 1] + 1)
.min(prev[j] + cost);
curr[j + 1] = (curr[j] + 1).min(prev[j + 1] + 1).min(prev[j] + cost);
}
prev = curr;
}
@@ -157,7 +155,11 @@ fn candidate_terms(posts: &[posts::Model]) -> Vec<String> {
candidates
}
fn find_spelling_fallback(query: &str, posts: &[posts::Model], synonym_groups: &[Vec<String>]) -> Vec<String> {
fn find_spelling_fallback(
query: &str,
posts: &[posts::Model],
synonym_groups: &[Vec<String>],
) -> Vec<String> {
let primary_token = tokenize(query).into_iter().next().unwrap_or_default();
if primary_token.len() < 3 {
return Vec::new();
@@ -274,6 +276,71 @@ fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
.unwrap_or(false)
}
fn normalize_search_sort_by(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"newest" | "created_at" => "newest".to_string(),
"oldest" => "oldest".to_string(),
"title" => "title".to_string(),
_ => "relevance".to_string(),
}
}
fn normalize_sort_order(value: Option<&str>, sort_by: &str) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"asc" => "asc".to_string(),
"desc" => "desc".to_string(),
_ if sort_by == "title" => "asc".to_string(),
_ => "desc".to_string(),
}
}
fn sort_search_results(items: &mut [SearchResult], sort_by: &str, sort_order: &str) {
items.sort_by(|left, right| {
let ordering = match sort_by {
"newest" => right.created_at.cmp(&left.created_at),
"oldest" => left.created_at.cmp(&right.created_at),
"title" => left
.title
.as_deref()
.unwrap_or(&left.slug)
.to_ascii_lowercase()
.cmp(
&right
.title
.as_deref()
.unwrap_or(&right.slug)
.to_ascii_lowercase(),
),
_ => right
.rank
.partial_cmp(&left.rank)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| right.created_at.cmp(&left.created_at)),
};
if sort_by == "relevance" || sort_by == "newest" || sort_by == "oldest" {
return ordering;
}
let ordering = if sort_order == "asc" {
ordering
} else {
ordering.reverse()
};
ordering.then_with(|| left.slug.cmp(&right.slug))
});
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchQuery {
pub q: Option<String>,
@@ -286,6 +353,17 @@ pub struct SearchQuery {
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchPageQuery {
#[serde(flatten)]
pub search: SearchQuery,
pub page: Option<u64>,
#[serde(alias = "page_size")]
pub page_size: Option<u64>,
pub sort_by: Option<String>,
pub sort_order: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct SearchResult {
pub id: i32,
@@ -296,37 +374,46 @@ pub struct SearchResult {
pub category: Option<String>,
pub tags: Option<Value>,
pub post_type: Option<String>,
pub image: Option<String>,
pub pinned: Option<bool>,
pub created_at: chrono::DateTime<chrono::Utc>,
pub updated_at: chrono::DateTime<chrono::Utc>,
pub rank: f64,
}
#[debug_handler]
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let preview_search = is_preview_search(&query, &headers);
content::sync_markdown_posts(&ctx).await?;
#[derive(Clone, Debug, Serialize)]
pub struct PagedSearchResponse {
pub query: String,
pub items: Vec<SearchResult>,
pub page: u64,
pub page_size: u64,
pub total: usize,
pub total_pages: u64,
pub sort_by: String,
pub sort_order: String,
}
let q = query.q.unwrap_or_default().trim().to_string();
async fn build_search_results(
ctx: &AppContext,
query: &SearchQuery,
headers: &HeaderMap,
) -> Result<(String, bool, Vec<SearchResult>)> {
let preview_search = is_preview_search(query, headers);
let q = query.q.clone().unwrap_or_default().trim().to_string();
if q.is_empty() {
return format::json(Vec::<SearchResult>::new());
return Ok((q, preview_search, Vec::new()));
}
if !preview_search {
abuse_guard::enforce_public_scope(
"search",
abuse_guard::detect_client_ip(&headers).as_deref(),
abuse_guard::detect_client_ip(headers).as_deref(),
Some(&q),
)?;
}
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
let settings = site_settings::load_current(&ctx).await.ok();
let settings = site_settings::load_current(ctx).await.ok();
let synonym_groups = settings
.as_ref()
.map(|item| parse_synonym_groups(&item.search_synonyms))
@@ -342,7 +429,12 @@ pub async fn search(
})
.collect::<Vec<_>>();
if let Some(category) = query.category.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(category) = query
.category
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| {
post.category
.as_deref()
@@ -351,11 +443,21 @@ pub async fn search(
});
}
if let Some(tag) = query.tag.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(tag) = query
.tag
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| post_has_tag(post, tag));
}
if let Some(post_type) = query.post_type.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(post_type) = query
.post_type
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| {
post.post_type
.as_deref()
@@ -378,6 +480,7 @@ pub async fn search(
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
image: post.image.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
@@ -401,6 +504,7 @@ pub async fn search(
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
image: post.image.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
@@ -410,13 +514,22 @@ pub async fn search(
}
}
results.sort_by(|left, right| {
right
.rank
.partial_cmp(&left.rank)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| right.created_at.cmp(&left.created_at))
});
sort_search_results(&mut results, "relevance", "desc");
Ok((q, preview_search, results))
}
#[debug_handler]
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
let (q, preview_search, mut results) = build_search_results(&ctx, &query, &headers).await?;
if q.is_empty() {
return format::json(Vec::<SearchResult>::new());
}
results.truncate(limit);
if !preview_search {
@@ -433,6 +546,71 @@ pub async fn search(
format::json(results)
}
pub fn routes() -> Routes {
Routes::new().prefix("api/search/").add("/", get(search))
#[debug_handler]
pub async fn search_page(
Query(query): Query<SearchPageQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_search_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref(), &sort_by);
let (q, preview_search, mut results) =
build_search_results(&ctx, &query.search, &headers).await?;
if q.is_empty() {
return format::json(PagedSearchResponse {
query: q,
items: Vec::new(),
page: 1,
page_size,
total: 0,
total_pages: 1,
sort_by,
sort_order,
});
}
sort_search_results(&mut results, &sort_by, &sort_order);
let total = results.len();
let total_pages = std::cmp::max(1, ((total as u64) + page_size - 1) / page_size);
let page = query.page.unwrap_or(1).clamp(1, total_pages);
let start = ((page - 1) * page_size) as usize;
let end = std::cmp::min(start + page_size as usize, total);
let items = if start >= total {
Vec::new()
} else {
results[start..end].to_vec()
};
if !preview_search {
analytics::record_search_event(
&ctx,
&q,
total,
&headers,
started_at.elapsed().as_millis() as i64,
)
.await;
}
format::json(PagedSearchResponse {
query: q,
items,
page,
page_size,
total,
total_pages,
sort_by,
sort_order,
})
}
pub fn routes() -> Routes {
Routes::new()
.prefix("api/search/")
.add("page", get(search_page))
.add("/", get(search))
}

View File

@@ -93,6 +93,26 @@ pub struct SiteSettingsPayload {
pub ai_enabled: Option<bool>,
#[serde(default, alias = "paragraphCommentsEnabled")]
pub paragraph_comments_enabled: Option<bool>,
#[serde(default, alias = "commentVerificationMode")]
pub comment_verification_mode: Option<String>,
#[serde(default, alias = "commentTurnstileEnabled")]
pub comment_turnstile_enabled: Option<bool>,
#[serde(default, alias = "subscriptionVerificationMode")]
pub subscription_verification_mode: Option<String>,
#[serde(default, alias = "subscriptionTurnstileEnabled")]
pub subscription_turnstile_enabled: Option<bool>,
#[serde(default, alias = "webPushEnabled")]
pub web_push_enabled: Option<bool>,
#[serde(default, alias = "turnstileSiteKey")]
pub turnstile_site_key: Option<String>,
#[serde(default, alias = "turnstileSecretKey")]
pub turnstile_secret_key: Option<String>,
#[serde(default, alias = "webPushVapidPublicKey")]
pub web_push_vapid_public_key: Option<String>,
#[serde(default, alias = "webPushVapidPrivateKey")]
pub web_push_vapid_private_key: Option<String>,
#[serde(default, alias = "webPushVapidSubject")]
pub web_push_vapid_subject: Option<String>,
#[serde(default, alias = "aiProvider")]
pub ai_provider: Option<String>,
#[serde(default, alias = "aiApiBase")]
@@ -139,10 +159,20 @@ pub struct SiteSettingsPayload {
pub seo_default_twitter_handle: Option<String>,
#[serde(default, alias = "notificationWebhookUrl")]
pub notification_webhook_url: Option<String>,
#[serde(default, alias = "notificationChannelType")]
pub notification_channel_type: Option<String>,
#[serde(default, alias = "notificationCommentEnabled")]
pub notification_comment_enabled: Option<bool>,
#[serde(default, alias = "notificationFriendLinkEnabled")]
pub notification_friend_link_enabled: Option<bool>,
#[serde(default, alias = "subscriptionPopupEnabled")]
pub subscription_popup_enabled: Option<bool>,
#[serde(default, alias = "subscriptionPopupTitle")]
pub subscription_popup_title: Option<String>,
#[serde(default, alias = "subscriptionPopupDescription")]
pub subscription_popup_description: Option<String>,
#[serde(default, alias = "subscriptionPopupDelaySeconds")]
pub subscription_popup_delay_seconds: Option<i32>,
#[serde(default, alias = "searchSynonyms")]
pub search_synonyms: Option<Vec<String>>,
}
@@ -169,6 +199,17 @@ pub struct PublicSiteSettingsResponse {
pub music_playlist: Option<serde_json::Value>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
pub comment_turnstile_enabled: bool,
pub subscription_verification_mode: String,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
pub web_push_vapid_public_key: Option<String>,
pub subscription_popup_enabled: bool,
pub subscription_popup_title: String,
pub subscription_popup_description: String,
pub subscription_popup_delay_seconds: i32,
pub seo_default_og_image: Option<String>,
pub seo_default_twitter_handle: Option<String>,
}
@@ -208,6 +249,36 @@ fn normalize_optional_int(value: Option<i32>, min: i32, max: i32) -> Option<i32>
value.map(|item| item.clamp(min, max))
}
fn normalize_notification_channel_type(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let normalized = item.trim().to_ascii_lowercase();
match normalized.as_str() {
"ntfy" => Some("ntfy".to_string()),
"webhook" => Some("webhook".to_string()),
_ => None,
}
})
}
pub(crate) fn default_subscription_popup_enabled() -> bool {
true
}
pub(crate) fn default_subscription_popup_title() -> String {
"订阅更新".to_string()
}
pub(crate) fn default_subscription_popup_description() -> String {
"有新文章或汇总简报时,通过邮件第一时间收到提醒。需要先确认邮箱,可随时退订。".to_string()
}
pub(crate) fn default_subscription_popup_delay_seconds() -> i32 {
18
}
const DEFAULT_TURNSTILE_SITE_KEY: &str = "0x4AAAAAACy58kMBSwXwqMhx";
const DEFAULT_TURNSTILE_SECRET_KEY: &str = "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U";
fn normalize_string_list(values: Vec<String>) -> Vec<String> {
values
.into_iter()
@@ -487,6 +558,69 @@ impl SiteSettingsPayload {
if let Some(paragraph_comments_enabled) = self.paragraph_comments_enabled {
item.paragraph_comments_enabled = Some(paragraph_comments_enabled);
}
if let Some(comment_verification_mode) = self
.comment_verification_mode
.as_deref()
.and_then(|value| crate::services::turnstile::normalize_verification_mode(Some(value)))
{
item.comment_verification_mode = Some(comment_verification_mode.as_str().to_string());
item.comment_turnstile_enabled = Some(matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
));
} else if let Some(comment_turnstile_enabled) = self.comment_turnstile_enabled {
item.comment_turnstile_enabled = Some(comment_turnstile_enabled);
item.comment_verification_mode = Some(
if comment_turnstile_enabled {
crate::services::turnstile::VERIFICATION_MODE_TURNSTILE
} else {
crate::services::turnstile::VERIFICATION_MODE_CAPTCHA
}
.to_string(),
);
}
if let Some(subscription_verification_mode) = self
.subscription_verification_mode
.as_deref()
.and_then(|value| crate::services::turnstile::normalize_verification_mode(Some(value)))
{
item.subscription_verification_mode =
Some(subscription_verification_mode.as_str().to_string());
item.subscription_turnstile_enabled = Some(matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
));
} else if let Some(subscription_turnstile_enabled) = self.subscription_turnstile_enabled {
item.subscription_turnstile_enabled = Some(subscription_turnstile_enabled);
item.subscription_verification_mode = Some(
if subscription_turnstile_enabled {
crate::services::turnstile::VERIFICATION_MODE_TURNSTILE
} else {
crate::services::turnstile::VERIFICATION_MODE_OFF
}
.to_string(),
);
}
if let Some(web_push_enabled) = self.web_push_enabled {
item.web_push_enabled = Some(web_push_enabled);
}
if let Some(turnstile_site_key) = self.turnstile_site_key {
item.turnstile_site_key = normalize_optional_string(Some(turnstile_site_key));
}
if let Some(turnstile_secret_key) = self.turnstile_secret_key {
item.turnstile_secret_key = normalize_optional_string(Some(turnstile_secret_key));
}
if let Some(web_push_vapid_public_key) = self.web_push_vapid_public_key {
item.web_push_vapid_public_key =
normalize_optional_string(Some(web_push_vapid_public_key));
}
if let Some(web_push_vapid_private_key) = self.web_push_vapid_private_key {
item.web_push_vapid_private_key =
normalize_optional_string(Some(web_push_vapid_private_key));
}
if let Some(web_push_vapid_subject) = self.web_push_vapid_subject {
item.web_push_vapid_subject = normalize_optional_string(Some(web_push_vapid_subject));
}
let provider_list_supplied = self.ai_providers.is_some();
let provided_ai_providers = self.ai_providers.map(normalize_ai_provider_configs);
let requested_active_provider_id = self
@@ -563,12 +697,31 @@ impl SiteSettingsPayload {
item.notification_webhook_url =
normalize_optional_string(Some(notification_webhook_url));
}
if self.notification_channel_type.is_some() {
item.notification_channel_type =
normalize_notification_channel_type(self.notification_channel_type);
}
if let Some(notification_comment_enabled) = self.notification_comment_enabled {
item.notification_comment_enabled = Some(notification_comment_enabled);
}
if let Some(notification_friend_link_enabled) = self.notification_friend_link_enabled {
item.notification_friend_link_enabled = Some(notification_friend_link_enabled);
}
if let Some(subscription_popup_enabled) = self.subscription_popup_enabled {
item.subscription_popup_enabled = Some(subscription_popup_enabled);
}
if let Some(subscription_popup_title) = self.subscription_popup_title {
item.subscription_popup_title =
normalize_optional_string(Some(subscription_popup_title));
}
if let Some(subscription_popup_description) = self.subscription_popup_description {
item.subscription_popup_description =
normalize_optional_string(Some(subscription_popup_description));
}
if self.subscription_popup_delay_seconds.is_some() {
item.subscription_popup_delay_seconds =
normalize_optional_int(self.subscription_popup_delay_seconds, 3, 120);
}
if let Some(search_synonyms) = self.search_synonyms {
let normalized = normalize_string_list(search_synonyms);
item.search_synonyms = (!normalized.is_empty()).then(|| serde_json::json!(normalized));
@@ -656,6 +809,20 @@ fn default_payload() -> SiteSettingsPayload {
]),
ai_enabled: Some(false),
paragraph_comments_enabled: Some(true),
comment_verification_mode: Some(
crate::services::turnstile::VERIFICATION_MODE_CAPTCHA.to_string(),
),
comment_turnstile_enabled: Some(false),
subscription_verification_mode: Some(
crate::services::turnstile::VERIFICATION_MODE_OFF.to_string(),
),
subscription_turnstile_enabled: Some(false),
web_push_enabled: Some(false),
turnstile_site_key: Some(DEFAULT_TURNSTILE_SITE_KEY.to_string()),
turnstile_secret_key: Some(DEFAULT_TURNSTILE_SECRET_KEY.to_string()),
web_push_vapid_public_key: None,
web_push_vapid_private_key: None,
web_push_vapid_subject: None,
ai_provider: Some(ai::provider_name(None)),
ai_api_base: Some(ai::default_api_base().to_string()),
ai_api_key: Some(ai::default_api_key().to_string()),
@@ -682,8 +849,13 @@ fn default_payload() -> SiteSettingsPayload {
seo_default_og_image: None,
seo_default_twitter_handle: None,
notification_webhook_url: None,
notification_channel_type: Some("webhook".to_string()),
notification_comment_enabled: Some(false),
notification_friend_link_enabled: Some(false),
subscription_popup_enabled: Some(default_subscription_popup_enabled()),
subscription_popup_title: Some(default_subscription_popup_title()),
subscription_popup_description: Some(default_subscription_popup_description()),
subscription_popup_delay_seconds: Some(default_subscription_popup_delay_seconds()),
search_synonyms: Some(Vec::new()),
}
}
@@ -713,6 +885,18 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
}
fn public_response(model: Model) -> PublicSiteSettingsResponse {
let turnstile_site_key = crate::services::turnstile::site_key(&model);
let web_push_vapid_public_key = crate::services::web_push::public_key(&model);
let comment_verification_mode = crate::services::turnstile::effective_mode(
&model,
crate::services::turnstile::TurnstileScope::Comment,
);
let subscription_verification_mode = crate::services::turnstile::effective_mode(
&model,
crate::services::turnstile::TurnstileScope::Subscription,
);
let web_push_enabled = crate::services::web_push::is_enabled(&model);
PublicSiteSettingsResponse {
id: model.id,
site_name: model.site_name,
@@ -734,6 +918,31 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
music_playlist: model.music_playlist,
ai_enabled: model.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
comment_verification_mode: comment_verification_mode.as_str().to_string(),
comment_turnstile_enabled: matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
subscription_verification_mode: subscription_verification_mode.as_str().to_string(),
subscription_turnstile_enabled: matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
web_push_enabled,
turnstile_site_key,
web_push_vapid_public_key,
subscription_popup_enabled: model
.subscription_popup_enabled
.unwrap_or_else(default_subscription_popup_enabled),
subscription_popup_title: model
.subscription_popup_title
.unwrap_or_else(default_subscription_popup_title),
subscription_popup_description: model
.subscription_popup_description
.unwrap_or_else(default_subscription_popup_description),
subscription_popup_delay_seconds: model
.subscription_popup_delay_seconds
.unwrap_or_else(default_subscription_popup_delay_seconds),
seo_default_og_image: model.seo_default_og_image,
seo_default_twitter_handle: model.seo_default_twitter_handle,
}
@@ -741,8 +950,6 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
#[debug_handler]
pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let site_settings = public_response(load_current(&ctx).await?);
let posts = posts::Entity::find()
.order_by_desc(posts::Column::CreatedAt)
@@ -784,7 +991,8 @@ pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
.collect::<Vec<_>>();
let content_highlights =
crate::services::analytics::build_public_content_highlights(&ctx, &posts).await?;
let content_ranges = crate::services::analytics::build_public_content_windows(&ctx, &posts).await?;
let content_ranges =
crate::services::analytics::build_public_content_windows(&ctx, &posts).await?;
format::json(HomePageResponse {
site_settings,

View File

@@ -1,7 +1,9 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::services::{abuse_guard, admin_audit, subscriptions};
use axum::http::header;
use crate::services::{abuse_guard, admin_audit, subscriptions, turnstile};
#[derive(Clone, Debug, Deserialize)]
pub struct PublicSubscriptionPayload {
@@ -10,6 +12,25 @@ pub struct PublicSubscriptionPayload {
pub display_name: Option<String>,
#[serde(default)]
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct PublicBrowserPushSubscriptionPayload {
pub subscription: serde_json::Value,
#[serde(default)]
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -55,6 +76,41 @@ fn public_subscription_metadata(source: Option<String>) -> serde_json::Value {
})
}
fn public_browser_push_metadata(
source: Option<String>,
subscription: serde_json::Value,
user_agent: Option<String>,
) -> serde_json::Value {
serde_json::json!({
"source": source,
"kind": "browser-push",
"subscription": subscription,
"user_agent": user_agent,
})
}
async fn verify_subscription_human_check(
settings: &crate::models::_entities::site_settings::Model,
turnstile_token: Option<&str>,
captcha_token: Option<&str>,
captcha_answer: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
match turnstile::effective_mode(settings, turnstile::TurnstileScope::Subscription) {
turnstile::VerificationMode::Off => Ok(()),
turnstile::VerificationMode::Captcha => {
crate::services::comment_guard::verify_captcha_solution(
captcha_token,
captcha_answer,
client_ip,
)
}
turnstile::VerificationMode::Turnstile => {
turnstile::verify_token(settings, turnstile_token, client_ip).await
}
}
}
#[debug_handler]
pub async fn subscribe(
State(ctx): State<AppContext>,
@@ -62,11 +118,21 @@ pub async fn subscribe(
Json(payload): Json<PublicSubscriptionPayload>,
) -> Result<Response> {
let email = payload.email.trim().to_ascii_lowercase();
let client_ip = abuse_guard::detect_client_ip(&headers);
abuse_guard::enforce_public_scope(
"subscription",
abuse_guard::detect_client_ip(&headers).as_deref(),
client_ip.as_deref(),
Some(&email),
)?;
let settings = crate::controllers::site_settings::load_current(&ctx).await?;
verify_subscription_human_check(
&settings,
payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
client_ip.as_deref(),
)
.await?;
let result = subscriptions::create_public_email_subscription(
&ctx,
@@ -103,6 +169,77 @@ pub async fn subscribe(
})
}
#[debug_handler]
pub async fn subscribe_browser_push(
State(ctx): State<AppContext>,
headers: axum::http::HeaderMap,
Json(payload): Json<PublicBrowserPushSubscriptionPayload>,
) -> Result<Response> {
let settings = crate::controllers::site_settings::load_current(&ctx).await?;
if !crate::services::web_push::is_enabled(&settings) {
return Err(Error::BadRequest("浏览器推送未启用".to_string()));
}
let endpoint = payload
.subscription
.get("endpoint")
.and_then(serde_json::Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| Error::BadRequest("browser push subscription.endpoint 不能为空".to_string()))?
.to_string();
let client_ip = abuse_guard::detect_client_ip(&headers);
let user_agent = headers
.get(header::USER_AGENT)
.and_then(|value| value.to_str().ok())
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
abuse_guard::enforce_public_scope("browser-push-subscription", client_ip.as_deref(), Some(&endpoint))?;
verify_subscription_human_check(
&settings,
payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
client_ip.as_deref(),
)
.await?;
let result = subscriptions::create_public_web_push_subscription(
&ctx,
payload.subscription.clone(),
Some(public_browser_push_metadata(
payload.source,
payload.subscription,
user_agent,
)),
)
.await?;
admin_audit::log_event(
&ctx,
None,
"subscription.public.web_push.active",
"subscription",
Some(result.subscription.id.to_string()),
Some(result.subscription.target.clone()),
Some(serde_json::json!({
"channel_type": result.subscription.channel_type,
"status": result.subscription.status,
})),
)
.await?;
format::json(PublicSubscriptionResponse {
ok: true,
subscription_id: result.subscription.id,
status: result.subscription.status,
requires_confirmation: false,
message: result.message,
})
}
#[debug_handler]
pub async fn confirm(
State(ctx): State<AppContext>,
@@ -196,6 +333,7 @@ pub fn routes() -> Routes {
Routes::new()
.prefix("/api/subscriptions")
.add("/", post(subscribe))
.add("/browser-push", post(subscribe_browser_push))
.add("/confirm", post(confirm))
.add("/manage", get(manage).patch(update_manage))
.add("/unsubscribe", post(unsubscribe))

View File

@@ -2,43 +2,219 @@
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::models::_entities::tags::{ActiveModel, Entity, Model};
use crate::models::_entities::{posts, tags};
use crate::services::content;
#[derive(Clone, Debug, Serialize)]
pub struct TagSummary {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct TagRecord {
pub id: i32,
pub name: Option<String>,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
pub name: Option<String>,
pub slug: String,
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
impl Params {
fn update(&self, item: &mut ActiveModel) {
item.name = Set(self.name.clone());
item.slug = Set(self.slug.clone());
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalized_name(params: &Params) -> Result<String> {
params
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.ok_or_else(|| Error::BadRequest("tag name is required".to_string()))
}
fn normalized_slug(params: &Params, fallback: &str) -> String {
params
.slug
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.unwrap_or_else(|| slugify(fallback))
}
fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn tag_values(post: &posts::Model) -> Vec<String> {
post.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(|value| value.trim().to_ascii_lowercase()))
.filter(|item| !item.is_empty())
.collect()
}
fn build_summary(item: &tags::Model, post_items: &[posts::Model]) -> TagSummary {
let name = tag_name(item);
let aliases = [
name.trim().to_ascii_lowercase(),
item.slug.trim().to_ascii_lowercase(),
];
let count = post_items
.iter()
.filter(|post| {
tag_values(post)
.into_iter()
.any(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
TagSummary {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
let item = Entity::find_by_id(id).one(&ctx.db).await?;
fn build_record(item: tags::Model) -> TagRecord {
TagRecord {
id: item.id,
name: item.name,
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<tags::Model> {
let item = tags::Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or_else(|| Error::NotFound)
}
#[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
format::json(Entity::find().all(&ctx.db).await?)
let tag_items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = posts::Entity::find().all(&ctx.db).await?;
format::json(
tag_items
.into_iter()
.map(|item| build_summary(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> Result<Response> {
let mut item = ActiveModel {
..Default::default()
let name = normalized_name(&params)?;
let slug = normalized_slug(&params, &name);
let existing = tags::Entity::find()
.filter(tags::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?;
let item = if let Some(existing_tag) = existing {
let mut item = existing_tag.into_active_model();
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
item.update(&ctx.db).await?
} else {
tags::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(params.description)),
cover_image: Set(trim_to_option(params.cover_image)),
accent_color: Set(trim_to_option(params.accent_color)),
seo_title: Set(trim_to_option(params.seo_title)),
seo_description: Set(trim_to_option(params.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?
};
params.update(&mut item);
let item = item.insert(&ctx.db).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -47,49 +223,50 @@ pub async fn update(
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let name = normalized_name(&params)?;
let slug = normalized_slug(&params, &name);
let item = load_item(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
let next_name = params
.name
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty());
if let Some(next_name) = next_name {
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(next_name)
{
content::rewrite_tag_references(
previous_name.as_deref(),
&previous_slug,
Some(next_name),
)?;
}
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_tag_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)
.await?;
}
let mut item = item.into_active_model();
params.update(&mut item);
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
let item = load_item(&ctx, id).await?;
content::rewrite_tag_references(item.name.as_deref(), &item.slug, None)?;
content::rewrite_tag_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_item(&ctx, id).await?)
format::json(build_record(load_item(&ctx, id).await?))
}
pub fn routes() -> Routes {

View File

@@ -45,6 +45,10 @@
description: "节奏更明显一点,适合切换阅读状态。"
ai_enabled: false
paragraph_comments_enabled: true
comment_verification_mode: "captcha"
subscription_verification_mode: "off"
turnstile_site_key: "0x4AAAAAACy58kMBSwXwqMhx"
turnstile_secret_key: "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U"
ai_provider: "newapi"
ai_api_base: "https://91code.jiangnight.com/v1"
ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571"

View File

@@ -1,14 +1,12 @@
use async_trait::async_trait;
use loco_rs::{
app::{AppContext, Initializer},
Result,
app::{AppContext, Initializer},
};
use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set};
use std::path::{Path, PathBuf};
use crate::models::_entities::{comments, posts, site_settings};
use crate::services::content;
const FIXTURES_DIR: &str = "src/fixtures";
pub struct ContentSyncInitializer;
@@ -25,7 +23,6 @@ impl Initializer for ContentSyncInitializer {
}
async fn sync_content(ctx: &AppContext, base: &Path) -> Result<()> {
content::sync_markdown_posts(ctx).await?;
sync_site_settings(ctx, base).await?;
sync_comment_post_slugs(ctx, base).await?;
Ok(())
@@ -111,6 +108,19 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
})
.filter(|items| !items.is_empty())
.map(serde_json::Value::Array);
let comment_verification_mode = as_optional_string(&seed["comment_verification_mode"]);
let subscription_verification_mode =
as_optional_string(&seed["subscription_verification_mode"]);
let comment_turnstile_enabled = seed["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = seed["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let existing = site_settings::Entity::find()
.order_by_asc(site_settings::Column::Id)
@@ -179,6 +189,24 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
model.paragraph_comments_enabled =
Set(seed["paragraph_comments_enabled"].as_bool().or(Some(true)));
}
if existing.comment_verification_mode.is_none() {
model.comment_verification_mode = Set(comment_verification_mode.clone());
}
if existing.comment_turnstile_enabled.is_none() {
model.comment_turnstile_enabled = Set(comment_turnstile_enabled);
}
if existing.subscription_verification_mode.is_none() {
model.subscription_verification_mode = Set(subscription_verification_mode.clone());
}
if existing.subscription_turnstile_enabled.is_none() {
model.subscription_turnstile_enabled = Set(subscription_turnstile_enabled);
}
if is_blank(&existing.turnstile_site_key) {
model.turnstile_site_key = Set(as_optional_string(&seed["turnstile_site_key"]));
}
if is_blank(&existing.turnstile_secret_key) {
model.turnstile_secret_key = Set(as_optional_string(&seed["turnstile_secret_key"]));
}
if should_upgrade_legacy_ai_defaults {
model.ai_provider = Set(as_optional_string(&seed["ai_provider"]));
model.ai_api_base = Set(as_optional_string(&seed["ai_api_base"]));
@@ -237,6 +265,12 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
paragraph_comments_enabled: Set(seed["paragraph_comments_enabled"]
.as_bool()
.or(Some(true))),
comment_verification_mode: Set(comment_verification_mode),
comment_turnstile_enabled: Set(comment_turnstile_enabled),
subscription_verification_mode: Set(subscription_verification_mode),
subscription_turnstile_enabled: Set(subscription_turnstile_enabled),
turnstile_site_key: Set(as_optional_string(&seed["turnstile_site_key"])),
turnstile_secret_key: Set(as_optional_string(&seed["turnstile_secret_key"])),
ai_provider: Set(as_optional_string(&seed["ai_provider"])),
ai_api_base: Set(as_optional_string(&seed["ai_api_base"])),
ai_api_key: Set(as_optional_string(&seed["ai_api_key"])),

View File

@@ -10,6 +10,13 @@ pub struct Model {
pub id: i32,
pub name: Option<String>,
pub slug: String,
#[sea_orm(column_type = "Text", nullable)]
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub seo_description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "media_assets")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub object_key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub caption: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub tags: Option<Json>,
#[sea_orm(column_type = "Text", nullable)]
pub notes: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}

View File

@@ -10,6 +10,7 @@ pub mod comment_persona_analysis_logs;
pub mod comments;
pub mod content_events;
pub mod friend_links;
pub mod media_assets;
pub mod notification_deliveries;
pub mod post_revisions;
pub mod posts;

View File

@@ -15,6 +15,9 @@ pub struct Model {
pub description: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub content: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
#[serde(skip_serializing, skip_deserializing)]
pub source_markdown: Option<String>,
pub category: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub tags: Option<Json>,

View File

@@ -8,6 +8,7 @@ pub use super::comment_persona_analysis_logs::Entity as CommentPersonaAnalysisLo
pub use super::comments::Entity as Comments;
pub use super::content_events::Entity as ContentEvents;
pub use super::friend_links::Entity as FriendLinks;
pub use super::media_assets::Entity as MediaAssets;
pub use super::notification_deliveries::Entity as NotificationDeliveries;
pub use super::post_revisions::Entity as PostRevisions;
pub use super::posts::Entity as Posts;

View File

@@ -32,6 +32,21 @@ pub struct Model {
pub music_playlist: Option<Json>,
pub ai_enabled: Option<bool>,
pub paragraph_comments_enabled: Option<bool>,
pub comment_turnstile_enabled: Option<bool>,
pub subscription_turnstile_enabled: Option<bool>,
pub comment_verification_mode: Option<String>,
pub subscription_verification_mode: Option<String>,
pub web_push_enabled: Option<bool>,
#[sea_orm(column_type = "Text", nullable)]
pub turnstile_site_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub turnstile_secret_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_public_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_private_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_subject: Option<String>,
pub ai_provider: Option<String>,
pub ai_api_base: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
@@ -63,8 +78,14 @@ pub struct Model {
pub seo_default_twitter_handle: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub notification_webhook_url: Option<String>,
pub notification_channel_type: Option<String>,
pub notification_comment_enabled: Option<bool>,
pub notification_friend_link_enabled: Option<bool>,
pub subscription_popup_enabled: Option<bool>,
pub subscription_popup_title: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub subscription_popup_description: Option<String>,
pub subscription_popup_delay_seconds: Option<i32>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub search_synonyms: Option<Json>,
}

View File

@@ -12,6 +12,13 @@ pub struct Model {
pub id: i32,
pub name: Option<String>,
pub slug: String,
#[sea_orm(column_type = "Text", nullable)]
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub seo_description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -0,0 +1,23 @@
pub use super::_entities::media_assets::{ActiveModel, Entity, Model};
use sea_orm::entity::prelude::*;
pub type MediaAssets = Entity;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(self, _db: &C, insert: bool) -> std::result::Result<Self, DbErr>
where
C: ConnectionTrait,
{
if !insert && self.updated_at.is_unchanged() {
let mut this = self;
this.updated_at = sea_orm::ActiveValue::Set(chrono::Utc::now().into());
Ok(this)
} else {
Ok(self)
}
}
}
impl Model {}
impl ActiveModel {}
impl Entity {}

View File

@@ -3,6 +3,7 @@ pub mod ai_chunks;
pub mod categories;
pub mod comments;
pub mod friend_links;
pub mod media_assets;
pub mod posts;
pub mod site_settings;
pub mod tags;

View File

@@ -1,16 +1,16 @@
use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, Engine as _};
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
use chrono::{DateTime, Utc};
use fastembed::{
InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel,
};
use loco_rs::prelude::*;
use reqwest::{header::CONTENT_TYPE, multipart, Client, Url};
use reqwest::{Client, Url, header::CONTENT_TYPE, multipart};
use sea_orm::{
ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel,
PaginatorTrait, QueryOrder, Set, Statement,
};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use serde_json::{Value, json};
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::{Mutex, OnceLock};
@@ -34,8 +34,7 @@ const DEFAULT_CLOUDFLARE_CHAT_MODEL: &str = "@cf/meta/llama-3.1-8b-instruct";
const DEFAULT_CLOUDFLARE_IMAGE_MODEL: &str = "@cf/black-forest-labs/flux-2-klein-4b";
const DEFAULT_TOP_K: usize = 4;
const DEFAULT_CHUNK_SIZE: usize = 1200;
const DEFAULT_SYSTEM_PROMPT: &str =
"你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
const DEFAULT_SYSTEM_PROMPT: &str = "你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
const EMBEDDING_BATCH_SIZE: usize = 32;
const EMBEDDING_DIMENSION: usize = 384;
const LOCAL_EMBEDDING_MODEL_LABEL: &str = "fastembed / local all-MiniLM-L6-v2";
@@ -2096,8 +2095,8 @@ pub(crate) fn build_provider_url(request: &AiProviderRequest) -> String {
#[cfg(test)]
mod tests {
use super::{
build_provider_url, extract_provider_text, is_profile_question,
normalize_provider_api_base, parse_provider_sse_body, AiProviderRequest,
AiProviderRequest, build_provider_url, extract_provider_text, is_profile_question,
normalize_provider_api_base, parse_provider_sse_body,
};
fn build_request(provider: &str, api_base: &str) -> AiProviderRequest {
@@ -2643,7 +2642,7 @@ async fn retrieve_matches(
pub async fn rebuild_index(ctx: &AppContext) -> Result<AiIndexSummary> {
let settings = load_runtime_settings(ctx, false).await?;
let posts = content::sync_markdown_posts(ctx).await?;
let posts = content::load_markdown_posts_from_store(ctx).await?;
let mut chunk_drafts = build_chunks(&posts, settings.chunk_size);
chunk_drafts.extend(build_profile_chunks(&settings.raw, settings.chunk_size));
let embeddings = if chunk_drafts.is_empty() {

View File

@@ -0,0 +1,620 @@
use std::path::Path;
use chrono::Utc;
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter,
QueryOrder, Set,
};
use serde::{Deserialize, Serialize};
use crate::{
controllers::site_settings,
models::_entities::{
categories, comments, friend_links, media_assets, posts, reviews,
site_settings as site_settings_entity, tags,
},
services::{content, media_assets as media_assets_service, storage},
};
const BACKUP_VERSION: &str = "2026-04-01";
const WARNING_STORAGE_BINARIES: &str = "当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupTaxonomyRecord {
pub name: String,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupReviewRecord {
pub title: Option<String>,
pub review_type: Option<String>,
pub rating: Option<i32>,
pub review_date: Option<String>,
pub status: Option<String>,
pub description: Option<String>,
pub tags: Option<String>,
pub cover: Option<String>,
pub link_url: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupFriendLinkRecord {
pub site_name: Option<String>,
pub site_url: String,
pub avatar_url: Option<String>,
pub description: Option<String>,
pub category: Option<String>,
pub status: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupMediaAssetRecord {
pub object_key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupStorageObjectRecord {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupPostDocument {
pub slug: String,
pub file_name: String,
pub markdown: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupDocument {
pub version: String,
pub exported_at: String,
pub includes_storage_binaries: bool,
pub warning: String,
pub site_settings: site_settings_entity::Model,
pub categories: Vec<BackupTaxonomyRecord>,
pub tags: Vec<BackupTaxonomyRecord>,
pub reviews: Vec<BackupReviewRecord>,
pub friend_links: Vec<BackupFriendLinkRecord>,
pub media_assets: Vec<BackupMediaAssetRecord>,
pub storage_manifest: Option<Vec<BackupStorageObjectRecord>>,
pub posts: Vec<BackupPostDocument>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupImportSummary {
pub imported: bool,
pub mode: String,
pub site_settings_restored: bool,
pub posts_written: usize,
pub categories_upserted: usize,
pub tags_upserted: usize,
pub reviews_upserted: usize,
pub friend_links_upserted: usize,
pub media_assets_upserted: usize,
pub storage_manifest_items: usize,
pub includes_storage_binaries: bool,
pub warning: String,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalize_backup_mode(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or("merge")
.to_ascii_lowercase()
.as_str()
{
"replace" => "replace".to_string(),
_ => "merge".to_string(),
}
}
fn normalize_markdown(value: &str) -> String {
value.replace("\r\n", "\n")
}
fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> {
let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default();
let file_name = trim_to_option(Some(document.file_name.clone())).unwrap_or_else(|| {
format!(
"{}.md",
if candidate_slug.is_empty() {
"post"
} else {
&candidate_slug
}
)
});
let file_stem = Path::new(&file_name)
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post");
let markdown = normalize_markdown(&document.markdown);
let parsed = content::parse_markdown_source(file_stem, &markdown, &file_name)?;
let slug = if parsed.slug.trim().is_empty() {
candidate_slug
} else {
parsed.slug
};
if slug.trim().is_empty() {
return Err(Error::BadRequest("备份中的文章 slug 不能为空".to_string()));
}
Ok((slug, markdown))
}
async fn export_storage_manifest(
ctx: &AppContext,
) -> Result<Option<Vec<BackupStorageObjectRecord>>> {
if storage::optional_r2_settings(ctx).await?.is_none() {
return Ok(None);
}
Ok(Some(
storage::list_objects(ctx, None, 1000)
.await?
.into_iter()
.map(|item| BackupStorageObjectRecord {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
})
.collect(),
))
}
fn export_category_record(item: categories::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_tag_record(item: tags::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_review_record(item: reviews::Model) -> BackupReviewRecord {
BackupReviewRecord {
title: item.title,
review_type: item.review_type,
rating: item.rating,
review_date: item.review_date,
status: item.status,
description: item.description,
tags: item.tags,
cover: item.cover,
link_url: item.link_url,
}
}
fn export_friend_link_record(item: friend_links::Model) -> BackupFriendLinkRecord {
BackupFriendLinkRecord {
site_name: item.site_name,
site_url: item.site_url,
avatar_url: item.avatar_url,
description: item.description,
category: item.category,
status: item.status,
}
}
fn export_media_asset_record(item: media_assets::Model) -> BackupMediaAssetRecord {
let tags = media_assets_service::tag_list(&item);
BackupMediaAssetRecord {
object_key: item.object_key,
title: item.title,
alt_text: item.alt_text,
caption: item.caption,
tags,
notes: item.notes,
}
}
pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> {
let site_settings_row = site_settings::load_current(ctx).await?;
let categories = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_category_record)
.collect::<Vec<_>>();
let tags = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_tag_record)
.collect::<Vec<_>>();
let reviews = reviews::Entity::find()
.order_by_desc(reviews::Column::UpdatedAt)
.all(&ctx.db)
.await?
.into_iter()
.map(export_review_record)
.collect::<Vec<_>>();
let friend_links = friend_links::Entity::find()
.order_by_asc(friend_links::Column::SiteUrl)
.all(&ctx.db)
.await?
.into_iter()
.map(export_friend_link_record)
.collect::<Vec<_>>();
let media_assets = media_assets::Entity::find()
.order_by_asc(media_assets::Column::ObjectKey)
.all(&ctx.db)
.await?
.into_iter()
.map(export_media_asset_record)
.collect::<Vec<_>>();
let posts = content::load_markdown_posts_from_store(ctx)
.await?
.into_iter()
.map(|post| {
Ok(BackupPostDocument {
slug: post.slug.clone(),
file_name: format!("{}.md", post.slug),
markdown: content::build_markdown_document(&post),
})
})
.collect::<Result<Vec<_>>>()?;
let storage_manifest = match export_storage_manifest(ctx).await {
Ok(items) => items,
Err(error) => {
tracing::warn!(
?error,
"failed to export storage manifest, continuing without it"
);
None
}
};
Ok(SiteBackupDocument {
version: BACKUP_VERSION.to_string(),
exported_at: Utc::now().to_rfc3339(),
includes_storage_binaries: false,
warning: WARNING_STORAGE_BINARIES.to_string(),
site_settings: site_settings_row,
categories,
tags,
reviews,
friend_links,
media_assets,
storage_manifest,
posts,
})
}
async fn restore_site_settings(
ctx: &AppContext,
value: &site_settings_entity::Model,
) -> Result<()> {
let current = site_settings::load_current(ctx).await?;
let mut active = value.clone().into_active_model();
active.id = Set(current.id);
active.created_at = Set(current.created_at);
active.updated_at = Set(Utc::now().into());
active.reset_all().update(&ctx.db).await?;
Ok(())
}
async fn upsert_category(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("分类 slug 不能为空".to_string()));
}
let existing = categories::Entity::find()
.filter(
Condition::any()
.add(categories::Column::Slug.eq(&slug))
.add(categories::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_tag(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("标签 slug 不能为空".to_string()));
}
let existing = tags::Entity::find()
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_friend_link(ctx: &AppContext, item: &BackupFriendLinkRecord) -> Result<()> {
let site_url = trim_to_option(Some(item.site_url.clone()))
.ok_or_else(|| Error::BadRequest("友链 site_url 不能为空".to_string()))?;
let existing = friend_links::Entity::find()
.filter(friend_links::Column::SiteUrl.eq(&site_url))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.site_name = Set(trim_to_option(item.site_name.clone()));
active.site_url = Set(site_url);
active.avatar_url = Set(trim_to_option(item.avatar_url.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.category = Set(trim_to_option(item.category.clone()));
active.status = Set(trim_to_option(item.status.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_review(ctx: &AppContext, item: &BackupReviewRecord) -> Result<()> {
let title = trim_to_option(item.title.clone());
let review_type = trim_to_option(item.review_type.clone());
let review_date = trim_to_option(item.review_date.clone());
let mut query = reviews::Entity::find();
if let Some(value) = title.clone() {
query = query.filter(reviews::Column::Title.eq(value));
}
if let Some(value) = review_type.clone() {
query = query.filter(reviews::Column::ReviewType.eq(value));
}
if let Some(value) = review_date.clone() {
query = query.filter(reviews::Column::ReviewDate.eq(value));
}
let existing = if title.is_some() || review_type.is_some() || review_date.is_some() {
query.order_by_asc(reviews::Column::Id).one(&ctx.db).await?
} else {
None
};
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.title = Set(title);
active.review_type = Set(review_type);
active.rating = Set(item.rating);
active.review_date = Set(review_date);
active.status = Set(trim_to_option(item.status.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.tags = Set(trim_to_option(item.tags.clone()));
active.cover = Set(trim_to_option(item.cover.clone()));
active.link_url = Set(trim_to_option(item.link_url.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_media_asset(ctx: &AppContext, item: &BackupMediaAssetRecord) -> Result<()> {
media_assets_service::upsert_by_key(
ctx,
&item.object_key,
media_assets_service::MediaAssetMetadataInput {
title: item.title.clone(),
alt_text: item.alt_text.clone(),
caption: item.caption.clone(),
tags: Some(item.tags.clone()),
notes: item.notes.clone(),
},
)
.await?;
Ok(())
}
async fn write_backup_posts(
ctx: &AppContext,
documents: &[BackupPostDocument],
replace_existing: bool,
) -> Result<usize> {
if replace_existing {
let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for post in &existing_posts {
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&post.slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
}
posts::Entity::delete_many().exec(&ctx.db).await?;
}
if documents.is_empty() {
return Ok(0);
}
let mut written = std::collections::HashSet::new();
for document in documents {
let (slug, markdown) = normalized_backup_post(document)?;
content::upsert_markdown_document(ctx, Some(&slug), &markdown).await?;
written.insert(slug);
}
Ok(written.len())
}
pub async fn import_site_backup(
ctx: &AppContext,
backup: SiteBackupDocument,
mode: Option<&str>,
) -> Result<SiteBackupImportSummary> {
let mode = normalize_backup_mode(mode);
let replace_existing = mode == "replace";
if replace_existing {
friend_links::Entity::delete_many().exec(&ctx.db).await?;
reviews::Entity::delete_many().exec(&ctx.db).await?;
media_assets::Entity::delete_many().exec(&ctx.db).await?;
categories::Entity::delete_many().exec(&ctx.db).await?;
tags::Entity::delete_many().exec(&ctx.db).await?;
}
restore_site_settings(ctx, &backup.site_settings).await?;
let posts_written = write_backup_posts(ctx, &backup.posts, replace_existing).await?;
let mut categories_upserted = 0_usize;
for item in &backup.categories {
upsert_category(ctx, item).await?;
categories_upserted += 1;
}
let mut tags_upserted = 0_usize;
for item in &backup.tags {
upsert_tag(ctx, item).await?;
tags_upserted += 1;
}
let mut reviews_upserted = 0_usize;
for item in &backup.reviews {
upsert_review(ctx, item).await?;
reviews_upserted += 1;
}
let mut friend_links_upserted = 0_usize;
for item in &backup.friend_links {
upsert_friend_link(ctx, item).await?;
friend_links_upserted += 1;
}
let mut media_assets_upserted = 0_usize;
for item in &backup.media_assets {
upsert_media_asset(ctx, item).await?;
media_assets_upserted += 1;
}
Ok(SiteBackupImportSummary {
imported: true,
mode,
site_settings_restored: true,
posts_written,
categories_upserted,
tags_upserted,
reviews_upserted,
friend_links_upserted,
media_assets_upserted,
storage_manifest_items: backup.storage_manifest.as_ref().map(Vec::len).unwrap_or(0),
includes_storage_binaries: backup.includes_storage_binaries,
warning: WARNING_STORAGE_BINARIES.to_string(),
})
}

View File

@@ -39,6 +39,7 @@ pub struct CommentGuardInput<'a> {
pub author: Option<&'a str>,
pub content: Option<&'a str>,
pub honeypot_website: Option<&'a str>,
pub turnstile_token: Option<&'a str>,
pub captcha_token: Option<&'a str>,
pub captcha_answer: Option<&'a str>,
}
@@ -362,7 +363,24 @@ pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'
return Err(Error::BadRequest("提交未通过校验".to_string()));
}
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
let settings = crate::controllers::site_settings::load_current(ctx).await?;
match crate::services::turnstile::effective_mode(
&settings,
crate::services::turnstile::TurnstileScope::Comment,
) {
crate::services::turnstile::VerificationMode::Off => {}
crate::services::turnstile::VerificationMode::Captcha => {
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
}
crate::services::turnstile::VerificationMode::Turnstile => {
crate::services::turnstile::verify_token(
&settings,
input.turnstile_token,
input.ip_address,
)
.await?;
}
}
if contains_blocked_keyword(input).is_some() {
return Err(Error::BadRequest("评论内容包含敏感关键词".to_string()));

View File

@@ -6,19 +6,17 @@ use sea_orm::{
};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use std::fs;
use std::path::{Path, PathBuf};
use std::path::Path;
use crate::models::_entities::{categories, comments, posts, tags};
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
pub const POST_STATUS_DRAFT: &str = "draft";
pub const POST_STATUS_PUBLISHED: &str = "published";
pub const POST_STATUS_OFFLINE: &str = "offline";
pub const POST_VISIBILITY_PUBLIC: &str = "public";
pub const POST_VISIBILITY_UNLISTED: &str = "unlisted";
pub const POST_VISIBILITY_PRIVATE: &str = "private";
const VIRTUAL_MARKDOWN_PATH_PREFIX: &str = "article://posts";
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
struct MarkdownFrontmatter {
@@ -105,32 +103,18 @@ pub struct MarkdownImportFile {
pub content: String,
}
#[derive(Debug, Clone, Deserialize)]
struct LegacyFixturePost {
title: String,
slug: String,
content: String,
excerpt: Option<String>,
category: Option<String>,
tags: Option<Vec<String>>,
pinned: Option<bool>,
published: Option<bool>,
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
#[derive(Debug, Clone)]
struct MarkdownDocumentSource {
post: MarkdownPost,
raw_markdown: String,
}
fn yaml_error(err: serde_yaml::Error) -> Error {
Error::string(&err.to_string())
}
fn posts_dir() -> PathBuf {
PathBuf::from(MARKDOWN_POSTS_DIR)
}
pub fn markdown_post_path(slug: &str) -> PathBuf {
posts_dir().join(format!("{slug}.md"))
pub fn virtual_markdown_document_path(slug: &str) -> String {
format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/{slug}")
}
fn normalize_newlines(input: &str) -> String {
@@ -157,6 +141,15 @@ fn normalize_string_list(values: Option<Vec<String>>) -> Vec<String> {
.collect()
}
fn normalize_post_tags(values: Vec<String>) -> Vec<String> {
let mut seen = std::collections::HashSet::new();
normalize_string_list(Some(values))
.into_iter()
.filter(|item| seen.insert(normalized_match_key(item)))
.collect()
}
fn yaml_scalar(value: &str) -> String {
serde_yaml::to_string(value)
.unwrap_or_else(|_| format!("{value:?}"))
@@ -214,7 +207,9 @@ fn parse_frontmatter_datetime(value: Option<String>) -> Option<DateTime<FixedOff
if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") {
let naive = date_only.and_hms_opt(0, 0, 0)?;
return FixedOffset::east_opt(0)?.from_local_datetime(&naive).single();
return FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single();
}
None
@@ -278,6 +273,46 @@ pub fn post_redirects_from_json(value: &Option<Value>) -> Vec<String> {
.collect()
}
fn json_string_array(value: &Option<Value>) -> Vec<String> {
value
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.map(|item| item.trim().to_string())
.filter(|item| !item.is_empty())
.collect()
}
fn markdown_post_from_model(post: &posts::Model) -> MarkdownPost {
MarkdownPost {
title: trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
slug: post.slug.clone(),
description: trim_to_option(post.description.clone())
.or_else(|| post.content.as_deref().and_then(excerpt_from_content)),
content: post.content.clone().unwrap_or_default(),
category: trim_to_option(post.category.clone()),
tags: json_string_array(&post.tags),
post_type: trim_to_option(post.post_type.clone()).unwrap_or_else(|| "article".to_string()),
image: trim_to_option(post.image.clone()),
images: json_string_array(&post.images),
pinned: post.pinned.unwrap_or(false),
status: normalize_post_status(post.status.as_deref()),
visibility: normalize_post_visibility(post.visibility.as_deref()),
publish_at: format_frontmatter_datetime(post.publish_at.clone()),
unpublish_at: format_frontmatter_datetime(post.unpublish_at.clone()),
canonical_url: normalize_url_like(post.canonical_url.clone()),
noindex: post.noindex.unwrap_or(false),
og_image: normalize_url_like(post.og_image.clone()),
redirect_from: post_redirects_from_json(&post.redirect_from),
redirect_to: trim_to_option(post.redirect_to.clone())
.map(|item| item.trim_matches('/').to_string()),
file_path: virtual_markdown_document_path(&post.slug),
}
}
pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
effective_post_state(
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
@@ -431,17 +466,6 @@ fn split_frontmatter(raw: &str) -> Result<(MarkdownFrontmatter, String)> {
Ok((parsed, content))
}
fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
let raw = fs::read_to_string(path).map_err(io_error)?;
let file_stem = path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post")
.to_string();
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
}
pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
let (frontmatter, content) = split_frontmatter(raw)?;
@@ -567,103 +591,40 @@ pub fn build_markdown_document(post: &MarkdownPost) -> String {
lines.join("\n")
}
fn ensure_markdown_posts_bootstrapped() -> Result<()> {
let dir = posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
fn markdown_document_from_model(model: &posts::Model) -> Result<MarkdownDocumentSource> {
let raw_markdown = model
.source_markdown
.clone()
.map(|value| normalize_newlines(&value))
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| build_markdown_document(&markdown_post_from_model(model)));
let virtual_path = virtual_markdown_document_path(&model.slug);
let post = parse_markdown_source(&model.slug, &raw_markdown, &virtual_path)?;
let has_markdown = fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.any(|entry| entry.path().extension().and_then(|value| value.to_str()) == Some("md"));
if has_markdown {
return Ok(());
}
let raw = fs::read_to_string(FIXTURE_POSTS_FILE).map_err(io_error)?;
let fixtures = serde_yaml::from_str::<Vec<LegacyFixturePost>>(&raw).map_err(yaml_error)?;
for fixture in fixtures {
let post = MarkdownPost {
title: fixture.title,
slug: fixture.slug.clone(),
description: trim_to_option(fixture.excerpt),
content: fixture.content,
category: trim_to_option(fixture.category),
tags: fixture.tags.unwrap_or_default(),
post_type: "article".to_string(),
image: None,
images: Vec::new(),
pinned: fixture.pinned.unwrap_or(false),
status: if fixture.published.unwrap_or(true) {
POST_STATUS_PUBLISHED.to_string()
} else {
POST_STATUS_DRAFT.to_string()
},
visibility: POST_VISIBILITY_PUBLIC.to_string(),
publish_at: None,
unpublish_at: None,
canonical_url: None,
noindex: false,
og_image: None,
redirect_from: Vec::new(),
redirect_to: None,
file_path: markdown_post_path(&fixture.slug)
.to_string_lossy()
.to_string(),
};
fs::write(
markdown_post_path(&fixture.slug),
build_markdown_document(&post),
)
.map_err(io_error)?;
}
Ok(())
Ok(MarkdownDocumentSource { post, raw_markdown })
}
fn load_markdown_posts_from_disk() -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut posts = fs::read_dir(posts_dir())
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
.filter(|path| path.extension().and_then(|value| value.to_str()) == Some("md"))
.map(|path| parse_markdown_post(&path))
async fn load_markdown_documents_from_store(
ctx: &AppContext,
) -> Result<Vec<MarkdownDocumentSource>> {
let mut documents = posts::Entity::find()
.order_by_asc(posts::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(|item| markdown_document_from_model(&item))
.collect::<Result<Vec<_>>>()?;
posts.sort_by(|left, right| left.slug.cmp(&right.slug));
Ok(posts)
documents.sort_by(|left, right| left.post.slug.cmp(&right.post.slug));
Ok(documents)
}
async fn sync_tags_from_posts(ctx: &AppContext, posts: &[MarkdownPost]) -> Result<()> {
for post in posts {
for tag_name in &post.tags {
let slug = slugify(tag_name);
let trimmed = tag_name.trim();
let existing = tags::Entity::find()
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(trimmed)),
)
.one(&ctx.db)
.await?;
if existing.is_none() {
let item = tags::ActiveModel {
name: Set(Some(trimmed.to_string())),
slug: Set(slug),
..Default::default()
};
let _ = item.insert(&ctx.db).await;
}
}
}
Ok(())
pub async fn load_markdown_posts_from_store(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
Ok(load_markdown_documents_from_store(ctx)
.await?
.into_iter()
.map(|document| document.post)
.collect())
}
async fn ensure_category(ctx: &AppContext, raw_name: &str) -> Result<Option<String>> {
@@ -768,21 +729,138 @@ async fn canonicalize_tags(ctx: &AppContext, raw_tags: &[String]) -> Result<Vec<
Ok(canonical_tags)
}
fn write_markdown_post_to_disk(post: &MarkdownPost) -> Result<()> {
fs::write(
markdown_post_path(&post.slug),
build_markdown_document(post),
)
.map_err(io_error)
fn string_array_json(values: &[String]) -> Option<Value> {
(!values.is_empty()).then(|| Value::Array(values.iter().cloned().map(Value::String).collect()))
}
pub fn rewrite_category_references(
fn apply_markdown_post_to_active_model(
model: &mut posts::ActiveModel,
post: &MarkdownPost,
raw_markdown: &str,
) {
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.source_markdown = Set(Some(raw_markdown.to_string()));
model.category = Set(post.category.clone());
model.tags = Set(string_array_json(&post.tags));
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(string_array_json(&post.images));
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(string_array_json(&post.redirect_from));
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string())
);
}
async fn save_markdown_post_to_store(
ctx: &AppContext,
mut post: MarkdownPost,
slug_hint: Option<&str>,
canonicalize_taxonomy: bool,
) -> Result<MarkdownPost> {
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
post.title = trim_to_option(Some(post.title.clone())).unwrap_or_else(|| post.slug.clone());
post.slug = trim_to_option(Some(post.slug.clone()))
.or_else(|| normalized_slug_hint.clone())
.unwrap_or_else(|| slugify(&post.title));
post.description =
trim_to_option(post.description.clone()).or_else(|| excerpt_from_content(&post.content));
post.content = normalize_newlines(post.content.trim());
post.category = trim_to_option(post.category.clone());
post.tags = normalize_post_tags(post.tags.clone());
post.post_type =
trim_to_option(Some(post.post_type.clone())).unwrap_or_else(|| "article".to_string());
post.image = trim_to_option(post.image.clone());
post.images = normalize_string_list(Some(post.images.clone()));
post.status = normalize_post_status(Some(&post.status));
post.visibility = normalize_post_visibility(Some(&post.visibility));
post.publish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.publish_at.clone()));
post.unpublish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.unpublish_at.clone()));
post.canonical_url = normalize_url_like(post.canonical_url.clone());
post.og_image = normalize_url_like(post.og_image.clone());
post.redirect_from = normalize_redirect_list(Some(post.redirect_from.clone()));
post.redirect_to =
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string());
if post.slug.trim().is_empty() {
return Err(Error::BadRequest("slug is required".to_string()));
}
if canonicalize_taxonomy {
post.category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
post.tags = canonicalize_tags(ctx, &post.tags).await?;
}
let existing_by_hint = if let Some(hint) = normalized_slug_hint.as_deref() {
posts::Entity::find()
.filter(posts::Column::Slug.eq(hint))
.one(&ctx.db)
.await?
} else {
None
};
let existing_by_slug =
if existing_by_hint.as_ref().map(|item| item.slug.as_str()) == Some(post.slug.as_str()) {
None
} else {
posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?
};
if let (Some(by_hint), Some(by_slug)) = (&existing_by_hint, &existing_by_slug) {
if by_hint.id != by_slug.id {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {}",
post.slug
)));
}
}
let has_existing = existing_by_hint.is_some() || existing_by_slug.is_some();
let mut model = existing_by_hint
.or(existing_by_slug)
.map(|item| item.into_active_model())
.unwrap_or_default();
post.file_path = virtual_markdown_document_path(&post.slug);
let raw_markdown = build_markdown_document(&post);
apply_markdown_post_to_active_model(&mut model, &post, &raw_markdown);
if has_existing {
model.update(&ctx.db).await?;
} else {
model.insert(&ctx.db).await?;
}
Ok(post)
}
pub async fn rewrite_category_references(
ctx: &AppContext,
current_name: Option<&str>,
current_slug: &str,
next_name: Option<&str>,
) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new();
if let Some(name) = current_name {
let normalized = normalized_match_key(name);
@@ -805,9 +883,9 @@ pub fn rewrite_category_references(
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?;
let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts {
for mut post in posts {
let Some(category) = post.category.as_deref() else {
continue;
};
@@ -816,16 +894,17 @@ pub fn rewrite_category_references(
continue;
}
let existing_slug = post.slug.clone();
match &next_category {
Some(updated_name) if same_text(category, updated_name) => {}
Some(updated_name) => {
post.category = Some(updated_name.clone());
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
None => {
post.category = None;
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
}
@@ -834,13 +913,12 @@ pub fn rewrite_category_references(
Ok(changed)
}
pub fn rewrite_tag_references(
pub async fn rewrite_tag_references(
ctx: &AppContext,
current_name: Option<&str>,
current_slug: &str,
next_name: Option<&str>,
) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new();
if let Some(name) = current_name {
let normalized = normalized_match_key(name);
@@ -863,9 +941,9 @@ pub fn rewrite_tag_references(
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?;
let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts {
for mut post in posts {
let mut updated_tags = Vec::new();
let mut seen = std::collections::HashSet::new();
let mut post_changed = false;
@@ -889,8 +967,9 @@ pub fn rewrite_tag_references(
}
if post_changed {
let existing_slug = post.slug.clone();
post.tags = updated_tags;
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
}
@@ -898,167 +977,43 @@ pub fn rewrite_tag_references(
Ok(changed)
}
async fn dedupe_tags(ctx: &AppContext) -> Result<()> {
let existing_tags = tags::Entity::find()
.order_by_asc(tags::Column::Id)
.all(&ctx.db)
.await?;
let mut seen = std::collections::HashSet::new();
for tag in existing_tags {
let key = if tag.slug.trim().is_empty() {
tag.name.as_deref().map(slugify).unwrap_or_default()
} else {
slugify(&tag.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = tag.delete(&ctx.db).await;
}
Ok(())
pub async fn read_markdown_document_from_store(
ctx: &AppContext,
slug: &str,
) -> Result<(String, String)> {
let post = posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let document = markdown_document_from_model(&post)?;
Ok((
virtual_markdown_document_path(&document.post.slug),
document.raw_markdown,
))
}
async fn dedupe_categories(ctx: &AppContext) -> Result<()> {
let existing_categories = categories::Entity::find()
.order_by_asc(categories::Column::Id)
.all(&ctx.db)
.await?;
pub async fn upsert_markdown_document(
ctx: &AppContext,
slug_hint: Option<&str>,
markdown: &str,
) -> Result<MarkdownPost> {
let normalized_markdown = normalize_newlines(markdown);
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let file_stem = normalized_slug_hint
.as_deref()
.filter(|value| !value.is_empty())
.unwrap_or("post");
let virtual_path = normalized_slug_hint
.as_deref()
.map(virtual_markdown_document_path)
.unwrap_or_else(|| format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/draft"));
let post = parse_markdown_source(file_stem, &normalized_markdown, &virtual_path)?;
let mut seen = std::collections::HashSet::new();
for category in existing_categories {
let key = if category.slug.trim().is_empty() {
category.name.as_deref().map(slugify).unwrap_or_default()
} else {
slugify(&category.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = category.delete(&ctx.db).await;
}
Ok(())
}
pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
let markdown_posts = load_markdown_posts_from_disk()?;
let markdown_slugs = markdown_posts
.iter()
.map(|post| post.slug.clone())
.collect::<std::collections::HashSet<_>>();
let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for stale_post in existing_posts
.into_iter()
.filter(|post| !markdown_slugs.contains(&post.slug))
{
let stale_slug = stale_post.slug.clone();
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&stale_slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
let _ = stale_post.delete(&ctx.db).await;
}
for post in &markdown_posts {
let canonical_category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
let canonical_tags = canonicalize_tags(ctx, &post.tags).await?;
let existing = posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut model = existing
.map(|item| item.into_active_model())
.unwrap_or_default();
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.category = Set(canonical_category);
model.tags = Set(if canonical_tags.is_empty() {
None
} else {
Some(Value::Array(
canonical_tags.into_iter().map(Value::String).collect(),
))
});
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(if post.images.is_empty() {
None
} else {
Some(Value::Array(
post.images
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(if post.redirect_from.is_empty() {
None
} else {
Some(Value::Array(
post.redirect_from
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string()),
);
if has_existing {
let _ = model.update(&ctx.db).await;
} else {
let _ = model.insert(&ctx.db).await;
}
}
sync_tags_from_posts(ctx, &markdown_posts).await?;
dedupe_tags(ctx).await?;
dedupe_categories(ctx).await?;
Ok(markdown_posts)
}
pub fn read_markdown_document(slug: &str) -> Result<(String, String)> {
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
}
let raw = fs::read_to_string(&path).map_err(io_error)?;
Ok((path.to_string_lossy().to_string(), raw))
save_markdown_post_to_store(ctx, post, normalized_slug_hint.as_deref(), true).await
}
pub async fn write_markdown_document(
@@ -1066,24 +1021,25 @@ pub async fn write_markdown_document(
slug: &str,
markdown: &str,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let path = markdown_post_path(slug);
fs::write(&path, normalize_newlines(markdown)).map_err(io_error)?;
let updated = parse_markdown_post(&path)?;
sync_markdown_posts(ctx).await?;
Ok(updated)
upsert_markdown_document(ctx, Some(slug), markdown).await
}
pub async fn delete_markdown_post(ctx: &AppContext, slug: &str) -> Result<()> {
ensure_markdown_posts_bootstrapped()?;
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
let post = posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
fs::remove_file(&path).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
post.delete(&ctx.db).await?;
Ok(())
}
@@ -1091,8 +1047,6 @@ pub async fn create_markdown_post(
ctx: &AppContext,
draft: MarkdownPostDraft,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let title = draft.title.trim().to_string();
if title.is_empty() {
return Err(Error::BadRequest("title is required".to_string()));
@@ -1110,6 +1064,17 @@ pub async fn create_markdown_post(
return Err(Error::BadRequest("slug is required".to_string()));
}
if posts::Entity::find()
.filter(posts::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?
.is_some()
{
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
let post = MarkdownPost {
title,
slug: slug.clone(),
@@ -1143,28 +1108,16 @@ pub async fn create_markdown_post(
redirect_from: normalize_redirect_list(Some(draft.redirect_from)),
redirect_to: trim_to_option(draft.redirect_to)
.map(|item| item.trim_matches('/').to_string()),
file_path: markdown_post_path(&slug).to_string_lossy().to_string(),
file_path: virtual_markdown_document_path(&slug),
};
let path = markdown_post_path(&slug);
if path.exists() {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
fs::write(&path, build_markdown_document(&post)).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
parse_markdown_post(&path)
save_markdown_post_to_store(ctx, post, Some(&slug), true).await
}
pub async fn import_markdown_documents(
ctx: &AppContext,
files: Vec<MarkdownImportFile>,
) -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut imported_slugs = Vec::new();
let mut imported = Vec::new();
for file in files {
let path = Path::new(&file.file_name);
@@ -1194,15 +1147,8 @@ pub async fn import_markdown_documents(
continue;
}
fs::write(markdown_post_path(&slug), normalize_newlines(&file.content))
.map_err(io_error)?;
imported_slugs.push(slug);
imported.push(upsert_markdown_document(ctx, Some(&slug), &file.content).await?);
}
sync_markdown_posts(ctx).await?;
imported_slugs
.into_iter()
.map(|slug| parse_markdown_post(&markdown_post_path(&slug)))
.collect()
Ok(imported)
}

View File

@@ -0,0 +1,125 @@
use std::collections::HashMap;
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, Set};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::models::_entities::media_assets;
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct MediaAssetMetadataInput {
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Option<Vec<String>>,
pub notes: Option<String>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalize_tag_list(values: Option<Vec<String>>) -> Option<Value> {
let mut seen = std::collections::HashSet::new();
let tags = values
.unwrap_or_default()
.into_iter()
.filter_map(|item| trim_to_option(Some(item)))
.map(|item| item.to_ascii_lowercase())
.filter(|item| seen.insert(item.clone()))
.map(Value::String)
.collect::<Vec<_>>();
(!tags.is_empty()).then_some(Value::Array(tags))
}
pub fn tag_list(model: &media_assets::Model) -> Vec<String> {
model
.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.collect()
}
pub async fn list_by_keys(
ctx: &AppContext,
keys: &[String],
) -> Result<HashMap<String, media_assets::Model>> {
if keys.is_empty() {
return Ok(HashMap::new());
}
Ok(media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.is_in(keys.iter().cloned()))
.all(&ctx.db)
.await?
.into_iter()
.map(|item| (item.object_key.clone(), item))
.collect())
}
pub async fn get_by_key(ctx: &AppContext, object_key: &str) -> Result<Option<media_assets::Model>> {
media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.eq(object_key))
.one(&ctx.db)
.await
.map_err(Into::into)
}
pub async fn upsert_by_key(
ctx: &AppContext,
object_key: &str,
payload: MediaAssetMetadataInput,
) -> Result<media_assets::Model> {
let normalized_key = object_key.trim();
if normalized_key.is_empty() {
return Err(Error::BadRequest("object key 不能为空".to_string()));
}
let existing = get_by_key(ctx, normalized_key).await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|item| item.into_active_model())
.unwrap_or_else(|| media_assets::ActiveModel {
object_key: Set(normalized_key.to_string()),
..Default::default()
});
active.title = Set(trim_to_option(payload.title));
active.alt_text = Set(trim_to_option(payload.alt_text));
active.caption = Set(trim_to_option(payload.caption));
active.tags = Set(normalize_tag_list(payload.tags));
active.notes = Set(trim_to_option(payload.notes));
if has_existing {
active.update(&ctx.db).await.map_err(Into::into)
} else {
active.insert(&ctx.db).await.map_err(Into::into)
}
}
pub async fn delete_by_key(ctx: &AppContext, object_key: &str) -> Result<()> {
if let Some(item) = get_by_key(ctx, object_key).await? {
item.delete(&ctx.db).await?;
}
Ok(())
}
pub async fn delete_by_keys(ctx: &AppContext, object_keys: &[String]) -> Result<()> {
for key in object_keys {
delete_by_key(ctx, key).await?;
}
Ok(())
}

View File

@@ -2,9 +2,13 @@ pub mod admin_audit;
pub mod abuse_guard;
pub mod ai;
pub mod analytics;
pub mod backups;
pub mod comment_guard;
pub mod content;
pub mod media_assets;
pub mod notifications;
pub mod post_revisions;
pub mod storage;
pub mod subscriptions;
pub mod turnstile;
pub mod web_push;

View File

@@ -1,10 +1,23 @@
use loco_rs::prelude::*;
use crate::{
controllers::site_settings,
models::_entities::{comments, friend_links},
models::_entities::{comments, friend_links, site_settings as site_settings_model},
services::subscriptions,
};
fn notification_channel_type(settings: &site_settings_model::Model) -> &'static str {
match settings
.notification_channel_type
.as_deref()
.map(str::trim)
.map(str::to_ascii_lowercase)
.as_deref()
{
Some("ntfy") => subscriptions::CHANNEL_NTFY,
_ => subscriptions::CHANNEL_WEBHOOK,
}
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
@@ -81,9 +94,10 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
if settings.notification_comment_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_COMMENT_CREATED,
"新评论通知",
@@ -94,7 +108,7 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
)
.await
{
tracing::warn!("failed to queue legacy comment webhook notification: {error}");
tracing::warn!("failed to queue comment admin notification: {error}");
}
}
}
@@ -144,9 +158,10 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
if settings.notification_friend_link_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_FRIEND_LINK_CREATED,
"新友链申请通知",
@@ -157,7 +172,7 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
)
.await
{
tracing::warn!("failed to queue legacy friend-link webhook notification: {error}");
tracing::warn!("failed to queue friend-link admin notification: {error}");
}
}
}

View File

@@ -2,7 +2,6 @@ use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set,
};
use std::fs;
use crate::{
controllers::admin::AdminIdentity,
@@ -48,10 +47,10 @@ fn trim_to_option(value: Option<String>) -> Option<String> {
fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
let normalized = markdown.replace("\r\n", "\n");
if let Some(frontmatter) = normalized
.strip_prefix("---\n")
.and_then(|rest| rest.split_once("\n---\n").map(|(frontmatter, _)| frontmatter))
{
if let Some(frontmatter) = normalized.strip_prefix("---\n").and_then(|rest| {
rest.split_once("\n---\n")
.map(|(frontmatter, _)| frontmatter)
}) {
for line in frontmatter.lines() {
let trimmed = line.trim();
if let Some(raw) = trimmed.strip_prefix("title:") {
@@ -63,14 +62,16 @@ fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
}
}
normalized.lines().find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
normalized
.lines()
.find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
}
async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> {
@@ -122,7 +123,7 @@ pub async fn capture_current_snapshot(
reason: Option<&str>,
metadata: Option<serde_json::Value>,
) -> Result<Option<post_revisions::Model>> {
let Ok((_path, markdown)) = content::read_markdown_document(slug) else {
let Ok((_path, markdown)) = content::read_markdown_document_from_store(ctx, slug).await else {
return Ok(None);
};
@@ -136,17 +137,14 @@ pub async fn list_revisions(
slug: Option<&str>,
limit: u64,
) -> Result<Vec<post_revisions::Model>> {
let mut query = post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
let mut query =
post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) {
query = query.filter(post_revisions::Column::PostSlug.eq(slug));
}
query
.limit(limit)
.all(&ctx.db)
.await
.map_err(Into::into)
query.limit(limit).all(&ctx.db).await.map_err(Into::into)
}
pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> {
@@ -187,13 +185,18 @@ pub async fn restore_revision(
let markdown = match restore_mode {
RestoreMode::Full => revision_markdown.clone(),
RestoreMode::Markdown | RestoreMode::Metadata => {
let (_path, current_markdown) = content::read_markdown_document(&slug).map_err(|_| {
Error::BadRequest("当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string())
})?;
let (_path, current_markdown) = content::read_markdown_document_from_store(ctx, &slug)
.await
.map_err(|_| {
Error::BadRequest(
"当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string(),
)
})?;
let virtual_path = content::virtual_markdown_document_path(&slug);
let revision_post =
content::parse_markdown_source(&slug, &revision_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
content::parse_markdown_source(&slug, &revision_markdown, &virtual_path)?;
let current_post =
content::parse_markdown_source(&slug, &current_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
content::parse_markdown_source(&slug, &current_markdown, &virtual_path)?;
let mut merged = current_post.clone();
match restore_mode {
RestoreMode::Markdown => {
@@ -224,10 +227,7 @@ pub async fn restore_revision(
}
};
fs::create_dir_all(content::MARKDOWN_POSTS_DIR).map_err(|error| Error::BadRequest(error.to_string()))?;
fs::write(content::markdown_post_path(&slug), markdown.replace("\r\n", "\n"))
.map_err(|error| Error::BadRequest(error.to_string()))?;
content::sync_markdown_posts(ctx).await?;
content::write_markdown_document(ctx, &slug, &markdown).await?;
let _ = capture_snapshot_from_markdown(
ctx,

View File

@@ -15,7 +15,7 @@ use uuid::Uuid;
use crate::{
mailers::subscription::SubscriptionMailer,
models::_entities::{notification_deliveries, posts, subscriptions},
services::content,
services::{content, web_push as web_push_service},
workers::notification_delivery::{
NotificationDeliveryWorker, NotificationDeliveryWorkerArgs,
},
@@ -26,6 +26,7 @@ pub const CHANNEL_WEBHOOK: &str = "webhook";
pub const CHANNEL_DISCORD: &str = "discord";
pub const CHANNEL_TELEGRAM: &str = "telegram";
pub const CHANNEL_NTFY: &str = "ntfy";
pub const CHANNEL_WEB_PUSH: &str = "web_push";
pub const STATUS_PENDING: &str = "pending";
pub const STATUS_ACTIVE: &str = "active";
@@ -139,6 +140,9 @@ pub fn normalize_channel_type(value: &str) -> String {
CHANNEL_DISCORD => CHANNEL_DISCORD.to_string(),
CHANNEL_TELEGRAM => CHANNEL_TELEGRAM.to_string(),
CHANNEL_NTFY => CHANNEL_NTFY.to_string(),
CHANNEL_WEB_PUSH | "browser_push" | "browser-push" | "webpush" => {
CHANNEL_WEB_PUSH.to_string()
}
_ => CHANNEL_EMAIL.to_string(),
}
}
@@ -225,6 +229,35 @@ fn merge_metadata(existing: Option<&Value>, incoming: Option<Value>) -> Option<V
}
}
fn normalize_browser_push_subscription(raw: Value) -> Result<Value> {
let mut subscription = serde_json::from_value::<web_push::SubscriptionInfo>(raw)
.map_err(|_| Error::BadRequest("browser push subscription 非法".to_string()))?;
subscription.endpoint = subscription.endpoint.trim().to_string();
subscription.keys.p256dh = subscription.keys.p256dh.trim().to_string();
subscription.keys.auth = subscription.keys.auth.trim().to_string();
if subscription.endpoint.is_empty()
|| subscription.keys.p256dh.is_empty()
|| subscription.keys.auth.is_empty()
{
return Err(Error::BadRequest(
"browser push subscription 缺少 endpoint / keys".to_string(),
));
}
serde_json::to_value(subscription).map_err(Into::into)
}
fn merge_browser_push_metadata(existing: Option<&Value>, incoming: Option<Value>, subscription: Value) -> Value {
let mut object = merge_metadata(existing, incoming)
.and_then(|value| value.as_object().cloned())
.unwrap_or_default();
object.insert("kind".to_string(), Value::String("browser-push".to_string()));
object.insert("subscription".to_string(), subscription);
Value::Object(object)
}
fn json_string_list(value: Option<&Value>, key: &str) -> Vec<String> {
value
.and_then(Value::as_object)
@@ -592,6 +625,88 @@ pub async fn create_public_email_subscription(
})
}
pub async fn create_public_web_push_subscription(
ctx: &AppContext,
subscription: Value,
metadata: Option<Value>,
) -> Result<PublicSubscriptionResult> {
let normalized_subscription = normalize_browser_push_subscription(subscription)?;
let endpoint = normalized_subscription
.get("endpoint")
.and_then(Value::as_str)
.ok_or_else(|| Error::BadRequest("browser push endpoint 非法".to_string()))?
.to_string();
let existing = subscriptions::Entity::find()
.filter(subscriptions::Column::ChannelType.eq(CHANNEL_WEB_PUSH))
.filter(subscriptions::Column::Target.eq(&endpoint))
.one(&ctx.db)
.await?;
if let Some(existing) = existing {
let mut active = existing.clone().into_active_model();
let manage_token = existing
.manage_token
.clone()
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(generate_subscription_token);
active.manage_token = Set(Some(manage_token));
active.status = Set(STATUS_ACTIVE.to_string());
active.confirm_token = Set(None);
active.verified_at = Set(Some(Utc::now().to_rfc3339()));
active.metadata = Set(Some(merge_browser_push_metadata(
existing.metadata.as_ref(),
metadata,
normalized_subscription,
)));
if existing
.display_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.is_none()
{
active.display_name = Set(Some("Browser Push".to_string()));
}
let updated = active.update(&ctx.db).await?;
return Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&updated),
requires_confirmation: false,
message: "浏览器推送已更新,后续有新内容时会直接提醒。".to_string(),
});
}
let created = subscriptions::ActiveModel {
channel_type: Set(CHANNEL_WEB_PUSH.to_string()),
target: Set(endpoint),
display_name: Set(Some("Browser Push".to_string())),
status: Set(STATUS_ACTIVE.to_string()),
filters: Set(Some(default_public_filters())),
secret: Set(None),
notes: Set(None),
confirm_token: Set(None),
manage_token: Set(Some(generate_subscription_token())),
metadata: Set(Some(merge_browser_push_metadata(
None,
metadata,
normalized_subscription,
))),
verified_at: Set(Some(Utc::now().to_rfc3339())),
last_notified_at: Set(None),
failure_count: Set(Some(0)),
last_delivery_status: Set(None),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&created),
requires_confirmation: false,
message: "浏览器推送已开启,后续有新内容时会直接提醒。".to_string(),
})
}
pub async fn confirm_subscription(ctx: &AppContext, token: &str) -> Result<subscriptions::Model> {
let token = token.trim();
if token.is_empty() {
@@ -869,6 +984,7 @@ fn provider_name(channel_type: &str) -> &'static str {
CHANNEL_DISCORD => "discord-webhook",
CHANNEL_TELEGRAM => "telegram-bot-api",
CHANNEL_NTFY => "ntfy",
CHANNEL_WEB_PUSH => "web-push",
_ => "webhook",
}
}
@@ -882,10 +998,65 @@ fn resolve_ntfy_target(target: &str) -> String {
}
}
fn collapse_whitespace(value: &str) -> String {
value.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn truncate_chars(value: &str, max_chars: usize) -> String {
if value.chars().count() <= max_chars {
return value.to_string();
}
let mut sliced = value.chars().take(max_chars).collect::<String>();
sliced.push('…');
sliced
}
fn site_asset_url(site_url: Option<&str>, path: &str) -> Option<String> {
let base = site_url?.trim().trim_end_matches('/');
if base.is_empty() {
return None;
}
Some(format!("{base}{path}"))
}
fn web_push_target_url(message: &QueuedDeliveryPayload) -> Option<String> {
message
.payload
.get("url")
.and_then(Value::as_str)
.map(ToString::to_string)
.or_else(|| message.site_url.clone())
}
fn build_web_push_payload(message: &QueuedDeliveryPayload) -> Value {
let body = truncate_chars(&collapse_whitespace(&message.text), 220);
serde_json::json!({
"title": message.subject,
"body": body,
"icon": site_asset_url(message.site_url.as_deref(), "/favicon.svg"),
"badge": site_asset_url(message.site_url.as_deref(), "/favicon.ico"),
"url": web_push_target_url(message),
"tag": message
.payload
.get("event_type")
.and_then(Value::as_str)
.unwrap_or("subscription"),
"data": {
"event_type": message.payload.get("event_type").cloned().unwrap_or(Value::Null),
"payload": message.payload,
}
})
}
async fn deliver_via_channel(
ctx: &AppContext,
channel_type: &str,
target: &str,
message: &QueuedDeliveryPayload,
metadata: Option<&Value>,
) -> Result<Option<String>> {
match channel_type {
CHANNEL_EMAIL => Err(Error::BadRequest(
@@ -923,6 +1094,21 @@ async fn deliver_via_channel(
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_WEB_PUSH => {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
let subscription_info = web_push_service::subscription_info_from_metadata(metadata)?;
let payload = serde_json::to_vec(&build_web_push_payload(message))?;
web_push_service::send_payload(
&settings,
&subscription_info,
&payload,
Some(web_push::Urgency::Normal),
24 * 60 * 60,
message.site_url.as_deref(),
)
.await?;
Ok(None)
}
_ => {
let envelope = DeliveryEnvelope {
event: message
@@ -1010,10 +1196,17 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.await
.map(|_| None)
} else {
deliver_via_channel(&subscription.channel_type, &subscription.target, &message).await
deliver_via_channel(
ctx,
&subscription.channel_type,
&subscription.target,
&message,
subscription.metadata.as_ref(),
)
.await
}
} else {
deliver_via_channel(&delivery.channel_type, &delivery.target, &message).await
deliver_via_channel(ctx, &delivery.channel_type, &delivery.target, &message, None).await
};
let subscription_id = delivery.subscription_id;
let delivery_channel_type = delivery.channel_type.clone();

View File

@@ -0,0 +1,243 @@
use std::sync::OnceLock;
use loco_rs::prelude::*;
use reqwest::Client;
use serde::Deserialize;
use crate::models::_entities::site_settings;
const DEFAULT_TURNSTILE_VERIFY_URL: &str =
"https://challenges.cloudflare.com/turnstile/v0/siteverify";
const ENV_TURNSTILE_SECRET_KEY: &str = "TERMI_TURNSTILE_SECRET_KEY";
const ENV_LEGACY_TURNSTILE_SECRET_KEY: &str = "TERMI_COMMENT_TURNSTILE_SECRET_KEY";
const ENV_TURNSTILE_SITE_KEY: &str = "PUBLIC_COMMENT_TURNSTILE_SITE_KEY";
const ENV_TURNSTILE_VERIFY_URL: &str = "TERMI_TURNSTILE_VERIFY_URL";
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum TurnstileScope {
Comment,
Subscription,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum VerificationMode {
Off,
Captcha,
Turnstile,
}
pub const VERIFICATION_MODE_OFF: &str = "off";
pub const VERIFICATION_MODE_CAPTCHA: &str = "captcha";
pub const VERIFICATION_MODE_TURNSTILE: &str = "turnstile";
impl VerificationMode {
pub const fn as_str(self) -> &'static str {
match self {
Self::Off => VERIFICATION_MODE_OFF,
Self::Captcha => VERIFICATION_MODE_CAPTCHA,
Self::Turnstile => VERIFICATION_MODE_TURNSTILE,
}
}
}
#[derive(Clone, Debug, Deserialize)]
struct TurnstileVerifyResponse {
success: bool,
#[serde(default, rename = "error-codes")]
error_codes: Vec<String>,
}
fn trim_to_option(value: Option<&str>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
pub fn normalize_verification_mode(value: Option<&str>) -> Option<VerificationMode> {
match value?.trim().to_ascii_lowercase().as_str() {
VERIFICATION_MODE_OFF => Some(VerificationMode::Off),
VERIFICATION_MODE_CAPTCHA | "normal" | "simple" => Some(VerificationMode::Captcha),
VERIFICATION_MODE_TURNSTILE => Some(VerificationMode::Turnstile),
_ => None,
}
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
}
fn verify_url() -> String {
env_value(ENV_TURNSTILE_VERIFY_URL)
.unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
}
fn client() -> &'static Client {
static CLIENT: OnceLock<Client> = OnceLock::new();
CLIENT.get_or_init(Client::new)
}
pub fn secret_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_secret_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SECRET_KEY))
.or_else(|| env_value(ENV_LEGACY_TURNSTILE_SECRET_KEY))
}
pub fn site_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_site_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SITE_KEY))
}
fn site_key_configured(settings: &site_settings::Model) -> bool {
site_key(settings).is_some()
}
pub fn secret_key_configured(settings: &site_settings::Model) -> bool {
secret_key(settings).is_some()
}
fn legacy_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match scope {
TurnstileScope::Comment => {
if settings.comment_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Captcha
}
}
TurnstileScope::Subscription => {
if settings.subscription_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Off
}
}
}
}
pub fn selected_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
let configured = match scope {
TurnstileScope::Comment => settings.comment_verification_mode.as_deref(),
TurnstileScope::Subscription => settings.subscription_verification_mode.as_deref(),
};
normalize_verification_mode(configured).unwrap_or_else(|| legacy_mode(settings, scope))
}
pub fn effective_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match selected_mode(settings, scope) {
VerificationMode::Turnstile
if site_key_configured(settings) && secret_key_configured(settings) =>
{
VerificationMode::Turnstile
}
VerificationMode::Turnstile => VerificationMode::Captcha,
mode => mode,
}
}
pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
effective_mode(settings, scope) == VerificationMode::Turnstile
}
pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
Ok(is_enabled(&settings, scope))
}
pub async fn verify_token(
settings: &site_settings::Model,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
let secret = secret_key(settings).ok_or_else(|| {
Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string())
})?;
let response_token = trim_to_option(token)
.ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let mut form_data = vec![
("secret".to_string(), secret),
("response".to_string(), response_token),
];
if let Some(remote_ip) = normalize_ip(client_ip) {
form_data.push(("remoteip".to_string(), remote_ip));
}
let response = client()
.post(verify_url())
.form(&form_data)
.send()
.await
.map_err(|error| {
tracing::warn!("turnstile verify request failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !response.status().is_success() {
tracing::warn!(
"turnstile verify returned unexpected status: {}",
response.status()
);
return Err(Error::BadRequest(
"人机验证服务暂时不可用,请稍后重试".to_string(),
));
}
let payload = response
.json::<TurnstileVerifyResponse>()
.await
.map_err(|error| {
tracing::warn!("turnstile verify decode failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !payload.success {
tracing::warn!(
error_codes = ?payload.error_codes,
"turnstile verify rejected request"
);
return Err(Error::BadRequest("人机验证未通过,请重试".to_string()));
}
Ok(())
}
pub async fn verify_if_enabled(
ctx: &AppContext,
scope: TurnstileScope,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
if effective_mode(&settings, scope) != VerificationMode::Turnstile {
return Ok(false);
}
verify_token(&settings, token, client_ip).await?;
Ok(true)
}

View File

@@ -0,0 +1,122 @@
use loco_rs::prelude::*;
use serde_json::Value;
use web_push::{
ContentEncoding, HyperWebPushClient, SubscriptionInfo, Urgency, VapidSignatureBuilder,
WebPushClient, WebPushMessageBuilder,
};
use crate::models::_entities::site_settings;
const ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "TERMI_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_WEB_PUSH_VAPID_PRIVATE_KEY: &str = "TERMI_WEB_PUSH_VAPID_PRIVATE_KEY";
const ENV_WEB_PUSH_VAPID_SUBJECT: &str = "TERMI_WEB_PUSH_VAPID_SUBJECT";
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
pub fn public_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_public_key.as_ref())
.or_else(|| env_value(ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY))
.or_else(|| env_value(ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY))
}
pub fn private_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_private_key.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_PRIVATE_KEY))
}
pub fn vapid_subject(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_subject.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_SUBJECT))
}
fn effective_vapid_subject(settings: &site_settings::Model, site_url: Option<&str>) -> String {
vapid_subject(settings)
.or_else(|| {
site_url
.map(str::trim)
.filter(|value| value.starts_with("http://") || value.starts_with("https://"))
.map(ToString::to_string)
})
.unwrap_or_else(|| "mailto:noreply@example.com".to_string())
}
pub fn public_key_configured(settings: &site_settings::Model) -> bool {
public_key(settings).is_some()
}
pub fn private_key_configured(settings: &site_settings::Model) -> bool {
private_key(settings).is_some()
}
pub fn is_enabled(settings: &site_settings::Model) -> bool {
settings.web_push_enabled.unwrap_or(false)
&& public_key_configured(settings)
&& private_key_configured(settings)
}
pub fn subscription_info_from_metadata(metadata: Option<&Value>) -> Result<SubscriptionInfo> {
let subscription = metadata
.and_then(Value::as_object)
.and_then(|object| object.get("subscription"))
.cloned()
.ok_or_else(|| Error::BadRequest("browser push metadata 缺少 subscription".to_string()))?;
serde_json::from_value::<SubscriptionInfo>(subscription)
.map_err(|_| Error::BadRequest("browser push metadata 非法".to_string()))
}
pub async fn send_payload(
settings: &site_settings::Model,
subscription_info: &SubscriptionInfo,
payload: &[u8],
urgency: Option<Urgency>,
ttl: u32,
site_url: Option<&str>,
) -> Result<()> {
let private_key = private_key(settings)
.ok_or_else(|| Error::BadRequest("web push VAPID private key 未配置".to_string()))?;
let mut signature_builder = VapidSignatureBuilder::from_base64(&private_key, subscription_info)
.map_err(|error| Error::BadRequest(format!("web push vapid build failed: {error}")))?;
signature_builder.add_claim("sub", effective_vapid_subject(settings, site_url));
let signature = signature_builder
.build()
.map_err(|error| Error::BadRequest(format!("web push vapid sign failed: {error}")))?;
let mut builder = WebPushMessageBuilder::new(subscription_info);
builder.set_ttl(ttl);
if let Some(urgency) = urgency {
builder.set_urgency(urgency);
}
builder.set_payload(ContentEncoding::Aes128Gcm, payload);
builder.set_vapid_signature(signature);
let client = HyperWebPushClient::new();
let message = builder
.build()
.map_err(|error| Error::BadRequest(format!("web push message build failed: {error}")))?;
client
.send(message)
.await
.map_err(|error| Error::BadRequest(format!("web push send failed: {error}")))?;
Ok(())
}

View File

@@ -1 +0,0 @@
{"rustc_fingerprint":10734737548331824535,"outputs":{"17747080675513052775":{"success":true,"status":"","code":0,"stdout":"rustc 1.92.0 (ded5c06cf 2025-12-08)\nbinary: rustc\ncommit-hash: ded5c06cf21d2b93bffd5d884aa6e96934ee4234\ncommit-date: 2025-12-08\nhost: x86_64-pc-windows-msvc\nrelease: 1.92.0\nLLVM version: 21.1.3\n","stderr":""},"7971740275564407648":{"success":true,"status":"","code":0,"stdout":"___.exe\nlib___.rlib\n___.dll\n___.dll\n___.lib\n___.dll\nC:\\Users\\Andorid\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\npacked\n___\ndebug_assertions\npanic=\"unwind\"\nproc_macro\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"msvc\"\ntarget_family=\"windows\"\ntarget_feature=\"cmpxchg16b\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_feature=\"sse3\"\ntarget_has_atomic=\"128\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_os=\"windows\"\ntarget_pointer_width=\"64\"\ntarget_vendor=\"pc\"\nwindows\n","stderr":""}},"successes":{}}

View File

@@ -1,3 +0,0 @@
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by cargo.
# For information about cache directory tags see https://bford.info/cachedir/

View File

@@ -204,10 +204,8 @@ Caddy -> frontend Node server
当前仓库内已经补了:
- `deploy/scripts/backup/backup-postgres.sh`
- `deploy/scripts/backup/backup-markdown.sh`
- `deploy/scripts/backup/backup-media.sh`
- `deploy/scripts/backup/restore-postgres.sh`
- `deploy/scripts/backup/restore-markdown.sh`
- `deploy/scripts/backup/restore-media.sh`
- `deploy/docker/BACKUP_AND_RECOVERY.md`

View File

@@ -1,13 +1,22 @@
# 备份与恢复说明
这套博客现在已经有
当前站点的内容已经是 **DB-only**
- PostgreSQL 数据库
- Markdown 原文内容
- 文章结构化字段
- 文章 Markdown 原文(`posts.source_markdown`
- 分类 / 标签
- 版本历史 / 审计日志 / 订阅数据
- 站点配置
- 媒体文件 / 对象存储
- 版本历史 / 审计日志 / 订阅数据
所以生产上最重要的不是再多一两个功能,而是**出事后能不能快速恢复**。
因此生产上最重要的是:
1. **数据库备份**
2. **媒体资源备份**
3. 定期做恢复演练
> 不再需要单独备份 `backend/content/posts` 之类的本地 Markdown 目录。
## 1. 建议的最小备份策略
@@ -15,11 +24,7 @@
- **频率**:每天至少 1 次;高频站点建议每 6~12 小时 1 次
- **工具**`pg_dump --format=custom`
- **脚本**`deploy/scripts/backup/backup-postgres.sh`
### Markdown 原文
- **频率**:每次发布后 + 每天定时 1 次
- **脚本**`deploy/scripts/backup/backup-markdown.sh`
- **原因**Markdown 是内容源,恢复速度最快
- **说明**:文章内容原文已经跟随数据库一起备份
### 媒体文件
- 如果是本地目录:打包归档
@@ -35,9 +40,6 @@
# 单独备份数据库
DATABASE_URL=postgres://... ./deploy/scripts/backup/backup-postgres.sh
# 单独备份 Markdown
MARKDOWN_SOURCE_DIR=./backend/content/posts ./deploy/scripts/backup/backup-markdown.sh
# 单独备份媒体(本地目录)
MEDIA_SOURCE_DIR=./uploads ./deploy/scripts/backup/backup-media.sh
@@ -53,12 +55,6 @@ MEDIA_S3_SOURCE=s3://bucket-name ./deploy/scripts/backup/backup-media.sh
DATABASE_URL=postgres://... ./deploy/scripts/backup/restore-postgres.sh ./backups/postgres/latest.dump
```
### 恢复 Markdown
```bash
MARKDOWN_TARGET_DIR=./backend/content/posts ./deploy/scripts/backup/restore-markdown.sh ./backups/markdown/latest.tar.gz
```
### 恢复媒体
```bash
@@ -75,9 +71,6 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
# 每天 03:10 备份 PostgreSQL
10 3 * * * cd /opt/termi-astro && DATABASE_URL=postgres://... ./deploy/scripts/backup/backup-postgres.sh >> /var/log/termi-backup.log 2>&1
# 每天 03:25 备份 Markdown
25 3 * * * cd /opt/termi-astro && MARKDOWN_SOURCE_DIR=./backend/content/posts ./deploy/scripts/backup/backup-markdown.sh >> /var/log/termi-backup.log 2>&1
# 每天 03:40 备份媒体
40 3 * * * cd /opt/termi-astro && MEDIA_S3_SOURCE=s3://bucket-name ./deploy/scripts/backup/backup-media.sh >> /var/log/termi-backup.log 2>&1
@@ -88,7 +81,7 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
40 4 * * * cd /opt/termi-astro && OFFSITE_TARGET=/mnt/offsite/termi-astro-backups ./deploy/scripts/backup/sync-backups-offsite.sh >> /var/log/termi-backup.log 2>&1
```
## 5. 建议你们再加一层异地备份
## 5. 建议再加一层异地备份
仅仅把备份留在同一台服务器上不够。
@@ -101,9 +94,8 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
建议每个月至少做 1 次演练:
1. 用最新数据库备份恢复到临时环境
2. Markdown 备份恢复内容目录
3. 用媒体备份恢复对象
4. 校验:
2.媒体备份恢复对象
3. 校验:
- 首页可打开
- 文章详情可打开
- 图片可访问
@@ -115,7 +107,6 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
```bash
DATABASE_URL=postgres://... \
POSTGRES_BACKUP=./backups/postgres/latest.dump \
MARKDOWN_BACKUP=./backups/markdown/latest.tar.gz \
MEDIA_BACKUP=./backups/media/latest.tar.gz \
./deploy/scripts/backup/verify-restore.sh
```
@@ -125,17 +116,16 @@ MEDIA_BACKUP=./backups/media/latest.tar.gz \
发生事故时建议按这个顺序:
1. 恢复数据库
2. 恢复 Markdown 原文
3. 恢复媒体资源
4. 启动 backend / frontend / admin
5. 进入后台检查:
2. 恢复媒体资源
3. 启动 backend / frontend / admin
4. 进入后台检查:
- 审计日志
- 文章版本历史
- 订阅目标与最近投递
## 8. 说明
这些脚本是**仓库内参考实现**,没有在你们生产机上自动执行。
这些脚本是**仓库内参考实现**,没有在生产机上自动执行。
正式上线前请按你们实际目录、R2/S3 桶、数据库连接串、cron 规范再过一遍。
另外仓库里已经提供:

View File

@@ -45,12 +45,17 @@ python deploy/scripts/render_compose_env.py \
- `INTERNAL_API_BASE_URL`frontend SSR 容器访问 backend 用compose 默认推荐 `http://backend:5150/api`
- `PUBLIC_API_BASE_URL`:浏览器访问 backend API 用;留空时前台会回退到“当前主机 + `:5150/api`
- `PUBLIC_COMMENT_TURNSTILE_SITE_KEY`:前台评论 / 订阅表单使用的 Cloudflare Turnstile site key
- `PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY`:前台浏览器推送订阅使用的 VAPID public key
- `PUBLIC_IMAGE_ALLOWED_HOSTS`:前台 `/_img` 图片优化端点允许的额外图片 host逗号分隔
- `ADMIN_API_BASE_URL`admin 浏览器访问 backend API 用;留空时后台会回退到“当前主机 + `:5150`
- `ADMIN_FRONTEND_BASE_URL`admin 里“打开前台 / 问答页 / 文章页预览”跳转用
- `TERMI_ADMIN_TRUST_PROXY_AUTH`:是否信任前置代理(如 Caddy + TinyAuth注入的后台认证头
- `TERMI_ADMIN_LOCAL_LOGIN_ENABLED`:是否保留本地账号密码登录兜底
- `TERMI_ADMIN_PROXY_SHARED_SECRET`:代理 SSO 共享密钥;建议和 Caddy 的 `X-Termi-Proxy-Secret` 配套使用
- `TERMI_TURNSTILE_SECRET_KEY`backend 评论 / 订阅接口使用的 Cloudflare Turnstile secret key兼容旧的 `TERMI_COMMENT_TURNSTILE_SECRET_KEY`
- `TERMI_WEB_PUSH_VAPID_PRIVATE_KEY`backend / worker 发送浏览器推送时使用的 VAPID private key
- `TERMI_WEB_PUSH_VAPID_SUBJECT`:浏览器推送 VAPID subject推荐 `mailto:xxx@example.com`
- `SMTP_ENABLE / SMTP_HOST / SMTP_PORT / SMTP_SECURE / SMTP_USER / SMTP_PASSWORD / SMTP_HELLO_NAME`:订阅确认和邮件通知需要
例如:
@@ -58,11 +63,16 @@ python deploy/scripts/render_compose_env.py \
```yaml
compose_env:
PUBLIC_API_BASE_URL: https://api.blog.init.cool
PUBLIC_COMMENT_TURNSTILE_SITE_KEY: 1x00000000000000000000AA
PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: replace-with-web-push-vapid-public-key
ADMIN_API_BASE_URL: https://admin.blog.init.cool
ADMIN_FRONTEND_BASE_URL: https://blog.init.cool
TERMI_ADMIN_TRUST_PROXY_AUTH: true
TERMI_ADMIN_LOCAL_LOGIN_ENABLED: false
TERMI_ADMIN_PROXY_SHARED_SECRET: replace-with-a-long-random-secret
TERMI_TURNSTILE_SECRET_KEY: replace-with-turnstile-secret-key
TERMI_WEB_PUSH_VAPID_PRIVATE_KEY: replace-with-web-push-vapid-private-key
TERMI_WEB_PUSH_VAPID_SUBJECT: mailto:noreply@blog.init.cool
```
> 这些值最终会被渲染成 `deploy/docker/.env`,再由 `compose.package.yml` 读取。

View File

@@ -16,6 +16,10 @@ services:
TERMI_ADMIN_TRUST_PROXY_AUTH: ${TERMI_ADMIN_TRUST_PROXY_AUTH:-false}
TERMI_ADMIN_LOCAL_LOGIN_ENABLED: ${TERMI_ADMIN_LOCAL_LOGIN_ENABLED:-true}
TERMI_ADMIN_PROXY_SHARED_SECRET: ${TERMI_ADMIN_PROXY_SHARED_SECRET:-}
TERMI_TURNSTILE_SECRET_KEY: ${TERMI_TURNSTILE_SECRET_KEY:-}
PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: ${PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY:-}
TERMI_WEB_PUSH_VAPID_PRIVATE_KEY: ${TERMI_WEB_PUSH_VAPID_PRIVATE_KEY:-}
TERMI_WEB_PUSH_VAPID_SUBJECT: ${TERMI_WEB_PUSH_VAPID_SUBJECT:-}
RUST_LOG: ${RUST_LOG:-info}
ports:
# 这是“直连端口”示例;如果前面接 tohka 宿主机 Caddy
@@ -39,6 +43,9 @@ services:
TERMI_ADMIN_TRUST_PROXY_AUTH: ${TERMI_ADMIN_TRUST_PROXY_AUTH:-false}
TERMI_ADMIN_LOCAL_LOGIN_ENABLED: ${TERMI_ADMIN_LOCAL_LOGIN_ENABLED:-true}
TERMI_ADMIN_PROXY_SHARED_SECRET: ${TERMI_ADMIN_PROXY_SHARED_SECRET:-}
PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: ${PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY:-}
TERMI_WEB_PUSH_VAPID_PRIVATE_KEY: ${TERMI_WEB_PUSH_VAPID_PRIVATE_KEY:-}
TERMI_WEB_PUSH_VAPID_SUBJECT: ${TERMI_WEB_PUSH_VAPID_SUBJECT:-}
RUST_LOG: ${RUST_LOG:-info}
TERMI_SKIP_MIGRATIONS: 'true'
@@ -53,9 +60,13 @@ services:
# frontend 是 Astro SSR(Node)
# - INTERNAL_API_BASE_URL 给服务端渲染访问 backend 用
# - PUBLIC_API_BASE_URL 给浏览器里的评论 / AI 问答等请求用
# - PUBLIC_COMMENT_TURNSTILE_SITE_KEY 给评论 / 订阅表单的人机验证组件用
# - PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY 给浏览器推送订阅用
# - PUBLIC_IMAGE_ALLOWED_HOSTS 给前台图片优化端点 /_img 放行额外图片域名
INTERNAL_API_BASE_URL: ${INTERNAL_API_BASE_URL:-http://backend:5150/api}
PUBLIC_API_BASE_URL: ${PUBLIC_API_BASE_URL:-}
PUBLIC_COMMENT_TURNSTILE_SITE_KEY: ${PUBLIC_COMMENT_TURNSTILE_SITE_KEY:-}
PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: ${PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY:-}
PUBLIC_IMAGE_ALLOWED_HOSTS: ${PUBLIC_IMAGE_ALLOWED_HOSTS:-}
# frontend 是 Astro SSR(Node) 服务,容器内部监听 4321
# 生产建议由网关统一反代,仅对外开放 80/443

View File

@@ -29,6 +29,8 @@ compose_env:
APP_BASE_URL: https://admin.blog.init.cool
INTERNAL_API_BASE_URL: http://backend:5150/api
PUBLIC_API_BASE_URL: https://api.blog.init.cool
PUBLIC_COMMENT_TURNSTILE_SITE_KEY: 1x00000000000000000000AA
PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: replace-with-web-push-vapid-public-key
ADMIN_API_BASE_URL: https://admin.blog.init.cool
ADMIN_FRONTEND_BASE_URL: https://blog.init.cool
PUBLIC_IMAGE_ALLOWED_HOSTS: cdn.example.com,pub-xxxx.r2.dev
@@ -50,6 +52,9 @@ compose_env:
TERMI_ADMIN_TRUST_PROXY_AUTH: true
TERMI_ADMIN_LOCAL_LOGIN_ENABLED: false
TERMI_ADMIN_PROXY_SHARED_SECRET: replace-with-another-long-random-secret
TERMI_TURNSTILE_SECRET_KEY: replace-with-turnstile-secret-key
TERMI_WEB_PUSH_VAPID_PRIVATE_KEY: replace-with-web-push-vapid-private-key
TERMI_WEB_PUSH_VAPID_SUBJECT: mailto:noreply@blog.init.cool
BACKEND_IMAGE: git.init.cool/cool/termi-astro-backend:latest
FRONTEND_IMAGE: git.init.cool/cool/termi-astro-frontend:latest

View File

@@ -4,7 +4,6 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
"${SCRIPT_DIR}/backup-postgres.sh"
"${SCRIPT_DIR}/backup-markdown.sh"
"${SCRIPT_DIR}/backup-media.sh"
echo "All backup jobs finished successfully."

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
SOURCE_DIR="${MARKDOWN_SOURCE_DIR:-./backend/content/posts}"
BACKUP_DIR="${BACKUP_DIR:-./backups/markdown}"
RETENTION_DAYS="${RETENTION_DAYS:-30}"
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
FILE_PATH="${BACKUP_DIR}/markdown-${TIMESTAMP}.tar.gz"
if [[ ! -d "${SOURCE_DIR}" ]]; then
echo "Markdown source directory not found: ${SOURCE_DIR}" >&2
exit 1
fi
mkdir -p "${BACKUP_DIR}"
tar -czf "${FILE_PATH}" -C "${SOURCE_DIR}" .
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.tar.gz"
find "${BACKUP_DIR}" -type f -name 'markdown-*.tar.gz' -mtime +"${RETENTION_DAYS}" -delete
echo "Markdown backup written to ${FILE_PATH}"

View File

@@ -3,7 +3,6 @@ set -euo pipefail
BACKUP_ROOT="${BACKUP_ROOT:-./backups}"
POSTGRES_RETENTION_DAYS="${POSTGRES_RETENTION_DAYS:-14}"
MARKDOWN_RETENTION_DAYS="${MARKDOWN_RETENTION_DAYS:-30}"
MEDIA_RETENTION_DAYS="${MEDIA_RETENTION_DAYS:-14}"
DRY_RUN="${DRY_RUN:-false}"
@@ -42,7 +41,6 @@ prune_dirs() {
}
prune "${BACKUP_ROOT}/postgres" 'postgres-*.dump' "${POSTGRES_RETENTION_DAYS}"
prune "${BACKUP_ROOT}/markdown" 'markdown-*.tar.gz' "${MARKDOWN_RETENTION_DAYS}"
prune "${BACKUP_ROOT}/media" 'media-*.tar.gz' "${MEDIA_RETENTION_DAYS}"
prune_dirs "${BACKUP_ROOT}/media" 'media-*' "${MEDIA_RETENTION_DAYS}"

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <backup-file.tar.gz>" >&2
exit 1
fi
TARGET_DIR="${MARKDOWN_TARGET_DIR:-./backend/content/posts}"
BACKUP_FILE="$1"
if [[ ! -f "${BACKUP_FILE}" ]]; then
echo "Backup file not found: ${BACKUP_FILE}" >&2
exit 1
fi
mkdir -p "${TARGET_DIR}"
rm -rf "${TARGET_DIR}"/*
tar -xzf "${BACKUP_FILE}" -C "${TARGET_DIR}"
echo "Markdown restore completed into ${TARGET_DIR}"

View File

@@ -3,15 +3,12 @@ set -euo pipefail
: "${DATABASE_URL:?DATABASE_URL is required}"
: "${POSTGRES_BACKUP:?POSTGRES_BACKUP is required}"
: "${MARKDOWN_BACKUP:?MARKDOWN_BACKUP is required}"
: "${MEDIA_BACKUP:?MEDIA_BACKUP is required}"
POSTGRES_RESTORE_CMD="${POSTGRES_RESTORE_CMD:-./deploy/scripts/backup/restore-postgres.sh}"
MARKDOWN_RESTORE_CMD="${MARKDOWN_RESTORE_CMD:-./deploy/scripts/backup/restore-markdown.sh}"
MEDIA_RESTORE_CMD="${MEDIA_RESTORE_CMD:-./deploy/scripts/backup/restore-media.sh}"
"${POSTGRES_RESTORE_CMD}" "${POSTGRES_BACKUP}"
"${MARKDOWN_RESTORE_CMD}" "${MARKDOWN_BACKUP}"
"${MEDIA_RESTORE_CMD}" "${MEDIA_BACKUP}"
echo "Restore rehearsal completed. Please verify homepage, article detail, media assets, admin login, revisions, audit logs, and subscriptions manually."

Some files were not shown because too many files have changed in this diff Show More