From 92a85eef204bd74c8285de012c118398bad24fd4 Mon Sep 17 00:00:00 2001 From: limitcool Date: Sun, 29 Mar 2026 21:36:13 +0800 Subject: [PATCH] feat: Refactor service management scripts to use a unified dev script - Added package.json to manage development scripts. - Updated restart-services.ps1 to call the new dev script for starting services. - Refactored start-admin.ps1, start-backend.ps1, start-frontend.ps1, and start-mcp.ps1 to utilize the dev script for starting respective services. - Enhanced stop-services.ps1 to improve process termination logic by matching command patterns. --- README.md | 59 +- admin/package-lock.json | 92 + admin/package.json | 4 + admin/src/App.tsx | 50 +- admin/src/components/app-shell.tsx | 55 +- admin/src/components/markdown-preview.tsx | 40 + admin/src/components/markdown-workbench.tsx | 332 +++ admin/src/lib/admin-format.ts | 77 +- admin/src/lib/api.ts | 42 +- admin/src/lib/markdown-diff.ts | 32 + admin/src/lib/markdown-document.ts | 247 ++ admin/src/lib/markdown-merge.ts | 149 ++ admin/src/lib/post-draft-window.ts | 82 + admin/src/lib/types.ts | 57 + admin/src/pages/comments-page.tsx | 99 +- admin/src/pages/dashboard-page.tsx | 101 +- admin/src/pages/friend-links-page.tsx | 158 +- admin/src/pages/login-page.tsx | 24 +- admin/src/pages/post-compare-page.tsx | 166 ++ admin/src/pages/post-polish-page.tsx | 302 +++ admin/src/pages/post-preview-page.tsx | 165 ++ admin/src/pages/posts-page.tsx | 2033 +++++++++++++---- admin/src/pages/reviews-page.tsx | 126 +- admin/src/pages/site-settings-page.tsx | 699 +++++- backend/Cargo.lock | 1 + backend/Cargo.toml | 1 + backend/assets/seeds/comments.yaml | 44 +- backend/assets/seeds/friend_links.yaml | 48 +- backend/assets/seeds/posts.yaml | 196 +- backend/assets/seeds/reviews.yaml | 106 +- backend/assets/seeds/site_settings.yaml | 55 + backend/assets/views/admin/reviews.html | 5 + backend/assets/views/admin/site_settings.html | 6 +- backend/backend-manual.err.log | 330 +++ backend/backend-manual.log | 26 + backend/backend-restart.err.log | 529 +++++ backend/backend-restart.log | 25 + .../content/posts/building-blog-with-astro.md | 37 - backend/content/posts/canokeys.md | 242 ++ backend/content/posts/ffmpeg.md | 67 + backend/content/posts/go-arm.md | 121 + backend/content/posts/go-grpc.md | 173 ++ backend/content/posts/go-xml.md | 98 + backend/content/posts/hugo.md | 36 + backend/content/posts/linux-dhcp.md | 67 + backend/content/posts/linux-shell.md | 36 + backend/content/posts/linux.md | 65 + backend/content/posts/loco-rs-framework.md | 44 - backend/content/posts/mysql.md | 569 +++++ backend/content/posts/redis.md | 116 + backend/content/posts/rust-dll.md | 169 ++ .../content/posts/rust-programming-tips.md | 38 - backend/content/posts/rust-serde.md | 96 + backend/content/posts/rust-sqlx.md | 37 + backend/content/posts/terminal-ui-design.md | 38 - backend/content/posts/tmux.md | 52 + backend/content/posts/welcome-to-termi.md | 35 - backend/migration/src/lib.rs | 8 + ...m20260328_000009_add_paragraph_comments.rs | 6 +- ...agraph_comments_toggle_to_site_settings.rs | 48 + ...0011_add_post_images_and_music_playlist.rs | 75 + ...20260329_000012_add_link_url_to_reviews.rs | 35 + ...dd_ai_provider_presets_to_site_settings.rs | 98 + backend/playwright-backend.err.log | 3 + backend/playwright-backend.out.log | 0 backend/src/app.rs | 31 + backend/src/controllers/admin.rs | 30 +- backend/src/controllers/admin_api.rs | 102 +- backend/src/controllers/ai.rs | 8 +- backend/src/controllers/category.rs | 6 +- backend/src/controllers/comment.rs | 5 +- backend/src/controllers/post.rs | 46 + backend/src/controllers/review.rs | 10 + backend/src/controllers/search.rs | 3 +- backend/src/controllers/site_settings.rs | 400 +++- backend/src/controllers/tag.rs | 6 +- backend/src/fixtures/comments.yaml | 44 +- backend/src/fixtures/friend_links.yaml | 48 +- backend/src/fixtures/posts.yaml | 196 +- backend/src/fixtures/reviews.yaml | 106 +- backend/src/fixtures/site_settings.yaml | 61 +- backend/src/initializers/content_sync.rs | 50 +- backend/src/models/_entities/posts.rs | 2 + backend/src/models/_entities/reviews.rs | 1 + backend/src/models/_entities/site_settings.rs | 6 + backend/src/services/ai.rs | 1346 ++++++++++- backend/src/services/content.rs | 112 +- backend/target-codex-ai-fix/.rustc_info.json | 1 + backend/target-codex-ai-fix/CACHEDIR.TAG | 3 + dev.ps1 | 246 +- .../review-covers/black-myth-wukong.svg | 24 + .../hero-dreams-in-tired-life.svg | 22 + .../journey-to-the-west-editorial.svg | 19 + .../public/review-covers/placed-within.svg | 20 + .../public/review-covers/the-long-season.svg | 22 + .../public/review-covers/thirteen-invites.svg | 20 + frontend/src/components/Comments.astro | 10 +- frontend/src/components/Footer.astro | 4 +- .../components/FriendLinkApplication.astro | 10 +- frontend/src/components/FriendLinkCard.astro | 2 +- frontend/src/components/Header.astro | 573 +++-- frontend/src/components/Lightbox.astro | 4 +- .../src/components/ParagraphComments.astro | 211 +- frontend/src/components/PostCard.astro | 84 +- frontend/src/components/RelatedPosts.astro | 25 +- frontend/src/components/StatsList.astro | 23 +- frontend/src/components/TechStackList.astro | 24 +- .../src/components/ui/CommandPrompt.astro | 89 +- frontend/src/components/ui/FilterPill.astro | 3 +- frontend/src/components/ui/ViewMoreLink.astro | 77 +- frontend/src/env.d.ts | 8 + frontend/src/layouts/BaseLayout.astro | 86 +- frontend/src/lib/api/client.ts | 92 +- frontend/src/lib/config/terminal.ts | 4 +- frontend/src/lib/i18n/index.ts | 7 +- frontend/src/lib/i18n/messages.ts | 160 +- frontend/src/lib/types/index.ts | 14 + frontend/src/lib/utils/index.ts | 143 +- frontend/src/pages/about/index.astro | 81 +- frontend/src/pages/admin.astro | 4 +- frontend/src/pages/articles/[slug].astro | 59 +- frontend/src/pages/articles/index.astro | 345 ++- frontend/src/pages/ask/index.astro | 107 +- frontend/src/pages/categories/index.astro | 280 ++- frontend/src/pages/friends/index.astro | 6 +- frontend/src/pages/index.astro | 626 ++++- frontend/src/pages/reviews/index.astro | 688 +++++- frontend/src/pages/tags/index.astro | 243 +- frontend/src/pages/timeline/index.astro | 72 +- frontend/src/styles/global.css | 556 ++++- package.json | 14 + restart-services.ps1 | 40 +- start-admin.ps1 | 28 +- start-backend.ps1 | 22 +- start-frontend.ps1 | 28 +- start-mcp.ps1 | 33 +- stop-services.ps1 | 19 +- 137 files changed, 14181 insertions(+), 2691 deletions(-) create mode 100644 admin/src/components/markdown-preview.tsx create mode 100644 admin/src/components/markdown-workbench.tsx create mode 100644 admin/src/lib/markdown-diff.ts create mode 100644 admin/src/lib/markdown-document.ts create mode 100644 admin/src/lib/markdown-merge.ts create mode 100644 admin/src/lib/post-draft-window.ts create mode 100644 admin/src/pages/post-compare-page.tsx create mode 100644 admin/src/pages/post-polish-page.tsx create mode 100644 admin/src/pages/post-preview-page.tsx create mode 100644 backend/assets/seeds/site_settings.yaml create mode 100644 backend/backend-manual.err.log create mode 100644 backend/backend-manual.log create mode 100644 backend/backend-restart.err.log create mode 100644 backend/backend-restart.log delete mode 100644 backend/content/posts/building-blog-with-astro.md create mode 100644 backend/content/posts/canokeys.md create mode 100644 backend/content/posts/ffmpeg.md create mode 100644 backend/content/posts/go-arm.md create mode 100644 backend/content/posts/go-grpc.md create mode 100644 backend/content/posts/go-xml.md create mode 100644 backend/content/posts/hugo.md create mode 100644 backend/content/posts/linux-dhcp.md create mode 100644 backend/content/posts/linux-shell.md create mode 100644 backend/content/posts/linux.md delete mode 100644 backend/content/posts/loco-rs-framework.md create mode 100644 backend/content/posts/mysql.md create mode 100644 backend/content/posts/redis.md create mode 100644 backend/content/posts/rust-dll.md delete mode 100644 backend/content/posts/rust-programming-tips.md create mode 100644 backend/content/posts/rust-serde.md create mode 100644 backend/content/posts/rust-sqlx.md delete mode 100644 backend/content/posts/terminal-ui-design.md create mode 100644 backend/content/posts/tmux.md delete mode 100644 backend/content/posts/welcome-to-termi.md create mode 100644 backend/migration/src/m20260328_000010_add_paragraph_comments_toggle_to_site_settings.rs create mode 100644 backend/migration/src/m20260328_000011_add_post_images_and_music_playlist.rs create mode 100644 backend/migration/src/m20260329_000012_add_link_url_to_reviews.rs create mode 100644 backend/migration/src/m20260329_000013_add_ai_provider_presets_to_site_settings.rs create mode 100644 backend/playwright-backend.err.log create mode 100644 backend/playwright-backend.out.log create mode 100644 backend/target-codex-ai-fix/.rustc_info.json create mode 100644 backend/target-codex-ai-fix/CACHEDIR.TAG create mode 100644 frontend/public/review-covers/black-myth-wukong.svg create mode 100644 frontend/public/review-covers/hero-dreams-in-tired-life.svg create mode 100644 frontend/public/review-covers/journey-to-the-west-editorial.svg create mode 100644 frontend/public/review-covers/placed-within.svg create mode 100644 frontend/public/review-covers/the-long-season.svg create mode 100644 frontend/public/review-covers/thirteen-invites.svg create mode 100644 package.json diff --git a/README.md b/README.md index 274cb4c..035e7c1 100644 --- a/README.md +++ b/README.md @@ -16,45 +16,48 @@ Monorepo for the Termi blog system. ## Run -### Monorepo scripts +### Recommended From the repository root: +```powershell +npm run dev +``` + +This starts `frontend + admin + backend` in a single Windows Terminal window with multiple tabs. + +Common shortcuts: + +```powershell +npm run dev:mcp +npm run dev:frontend +npm run dev:admin +npm run dev:backend +npm run dev:mcp-only +npm run stop +npm run restart +``` + +### PowerShell entrypoint + +If you prefer direct scripts, use the single root entrypoint: + ```powershell .\dev.ps1 -``` - -Frontend + backend + MCP: - -```powershell .\dev.ps1 -WithMcp +.\dev.ps1 -Only frontend +.\dev.ps1 -Only admin +.\dev.ps1 -Only backend +.\dev.ps1 -Only mcp ``` -Only frontend: +If you want a single service to be opened as a new Windows Terminal tab instead of running in the current shell: ```powershell -.\dev.ps1 -FrontendOnly +.\dev.ps1 -Only frontend -Spawn ``` -Only backend: - -```powershell -.\dev.ps1 -BackendOnly -``` - -Only admin: - -```powershell -.\dev.ps1 -AdminOnly -``` - -Only MCP: - -```powershell -.\dev.ps1 -McpOnly -``` - -Direct scripts: +Legacy aliases are still available and now just forward to `dev.ps1`: ```powershell .\start-frontend.ps1 @@ -90,7 +93,7 @@ cargo loco start 2>&1 ### MCP Server ```powershell -.\start-mcp.ps1 +.\dev.ps1 -Only mcp ``` Default MCP endpoint: diff --git a/admin/package-lock.json b/admin/package-lock.json index 1361386..6929b5e 100644 --- a/admin/package-lock.json +++ b/admin/package-lock.json @@ -8,12 +8,16 @@ "name": "admin", "version": "0.0.0", "dependencies": { + "@monaco-editor/react": "^4.7.0", "@radix-ui/react-label": "^2.1.8", "@radix-ui/react-slot": "^1.2.4", "@tailwindcss/vite": "^4.2.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", + "dompurify": "^3.3.3", "lucide-react": "^1.7.0", + "marked": "^17.0.5", + "monaco-editor": "^0.55.1", "react": "^19.2.4", "react-dom": "^19.2.4", "react-router-dom": "^7.13.2", @@ -561,6 +565,29 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@monaco-editor/loader": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.7.0.tgz", + "integrity": "sha512-gIwR1HrJrrx+vfyOhYmCZ0/JcWqG5kbfG7+d3f/C1LXk2EvzAbHSg3MQ5lO2sMlo9izoAZ04shohfKLVT6crVA==", + "license": "MIT", + "dependencies": { + "state-local": "^1.0.6" + } + }, + "node_modules/@monaco-editor/react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@monaco-editor/react/-/react-4.7.0.tgz", + "integrity": "sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA==", + "license": "MIT", + "dependencies": { + "@monaco-editor/loader": "^1.5.0" + }, + "peerDependencies": { + "monaco-editor": ">= 0.25.0 < 1", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, "node_modules/@napi-rs/wasm-runtime": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@napi-rs/wasm-runtime/-/wasm-runtime-1.1.1.tgz", @@ -1223,6 +1250,13 @@ "@types/react": "^19.2.0" } }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "license": "MIT", + "optional": true + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.57.2", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.2.tgz", @@ -1844,6 +1878,15 @@ "node": ">=8" } }, + "node_modules/dompurify": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.3.tgz", + "integrity": "sha512-Oj6pzI2+RqBfFG+qOaOLbFXLQ90ARpcGG6UePL82bJLtdsa6CYJD7nmiU8MW9nQNOtCHV3lZ/Bzq1X0QYbBZCA==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, "node_modules/electron-to-chromium": { "version": "1.5.328", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.328.tgz", @@ -2710,6 +2753,18 @@ "@jridgewell/sourcemap-codec": "^1.5.5" } }, + "node_modules/marked": { + "version": "17.0.5", + "resolved": "https://registry.npmjs.org/marked/-/marked-17.0.5.tgz", + "integrity": "sha512-6hLvc0/JEbRjRgzI6wnT2P1XuM1/RrrDEX0kPt0N7jGm1133g6X7DlxFasUIx+72aKAr904GTxhSLDrd5DIlZg==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, "node_modules/minimatch": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz", @@ -2723,6 +2778,37 @@ "node": "*" } }, + "node_modules/monaco-editor": { + "version": "0.55.1", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.55.1.tgz", + "integrity": "sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==", + "license": "MIT", + "dependencies": { + "dompurify": "3.2.7", + "marked": "14.0.0" + } + }, + "node_modules/monaco-editor/node_modules/dompurify": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.7.tgz", + "integrity": "sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, + "node_modules/monaco-editor/node_modules/marked": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-14.0.0.tgz", + "integrity": "sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -3083,6 +3169,12 @@ "node": ">=0.10.0" } }, + "node_modules/state-local": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/state-local/-/state-local-1.0.7.tgz", + "integrity": "sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==", + "license": "MIT" + }, "node_modules/strip-json-comments": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", diff --git a/admin/package.json b/admin/package.json index 2ef9c84..7281a6a 100644 --- a/admin/package.json +++ b/admin/package.json @@ -10,12 +10,16 @@ "preview": "vite preview" }, "dependencies": { + "@monaco-editor/react": "^4.7.0", "@radix-ui/react-label": "^2.1.8", "@radix-ui/react-slot": "^1.2.4", "@tailwindcss/vite": "^4.2.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", + "dompurify": "^3.3.3", "lucide-react": "^1.7.0", + "marked": "^17.0.5", + "monaco-editor": "^0.55.1", "react": "^19.2.4", "react-dom": "^19.2.4", "react-router-dom": "^7.13.2", diff --git a/admin/src/App.tsx b/admin/src/App.tsx index 00f4ed2..0d6163f 100644 --- a/admin/src/App.tsx +++ b/admin/src/App.tsx @@ -6,6 +6,7 @@ import { useCallback, useMemo, useState, + type ReactNode, } from 'react' import { BrowserRouter, @@ -56,11 +57,11 @@ function AppLoadingScreen() {

- Termi admin + Termi 后台

-

Booting control room

+

正在进入管理后台

- Checking the current admin session and preparing the new React workspace. + 正在检查当前登录状态,并准备新的 React 管理工作台。

@@ -68,14 +69,14 @@ function AppLoadingScreen() { ) } -function SessionGuard() { +function RequireAuth({ children }: { children: ReactNode }) { const { session } = useSession() if (!session.authenticated) { return } - return + return <>{children} } function PublicOnly() { @@ -97,12 +98,10 @@ function PublicOnly() { startTransition(() => { setSession(nextSession) }) - toast.success('Admin session unlocked.') + toast.success('后台登录成功。') navigate('/', { replace: true }) } catch (error) { - toast.error( - error instanceof ApiError ? error.message : 'Unable to sign in right now.', - ) + toast.error(error instanceof ApiError ? error.message : '当前无法登录后台。') } finally { setSubmitting(false) } @@ -127,12 +126,10 @@ function ProtectedLayout() { startTransition(() => { setSession(nextSession) }) - toast.success('Admin session closed.') + toast.success('已退出后台。') navigate('/login', { replace: true }) } catch (error) { - toast.error( - error instanceof ApiError ? error.message : 'Unable to sign out right now.', - ) + toast.error(error instanceof ApiError ? error.message : '当前无法退出后台。') } finally { setLoggingOut(false) } @@ -147,16 +144,21 @@ function AppRoutes() { return ( } /> - }> - }> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - + + + + } + > + } /> + } /> + } /> + } /> + } /> + } /> + } /> } /> @@ -178,7 +180,7 @@ export default function App() { }) } catch (error) { toast.error( - error instanceof ApiError ? error.message : 'Unable to reach the backend session API.', + error instanceof ApiError ? error.message : '当前无法连接后台会话接口。', ) } finally { setLoading(false) diff --git a/admin/src/components/app-shell.tsx b/admin/src/components/app-shell.tsx index 681dcd5..eae44c4 100644 --- a/admin/src/components/app-shell.tsx +++ b/admin/src/components/app-shell.tsx @@ -21,38 +21,38 @@ import { cn } from '@/lib/utils' const primaryNav = [ { to: '/', - label: 'Overview', - description: 'Live operational dashboard', + label: '概览', + description: '站点运营总览', icon: LayoutDashboard, }, { to: '/posts', - label: 'Posts', - description: 'Markdown content workspace', + label: '文章', + description: 'Markdown 内容管理', icon: ScrollText, }, { to: '/comments', - label: 'Comments', - description: 'Moderation and paragraph replies', + label: '评论', + description: '审核与段落回复', icon: MessageSquareText, }, { to: '/friend-links', - label: 'Friend links', - description: 'Partner queue and reciprocity', + label: '友链', + description: '友链申请与互链管理', icon: Link2, }, { to: '/reviews', - label: 'Reviews', - description: 'Curated review library', + label: '评测', + description: '评测内容库', icon: BookOpenText, }, { to: '/settings', - label: 'Site settings', - description: 'Brand, profile, and AI config', + label: '设置', + description: '品牌、资料与 AI 配置', icon: Settings, }, ] @@ -77,15 +77,12 @@ export function AppShell({
- Termi admin + Termi 后台
-

- Control room for the blog system -

+

博客系统控制台

- A dedicated React workspace for publishing, moderation, operations, and - AI-related site controls. + 一个独立的 React 管理工作台,用来处理发布、审核、运营以及站内 AI 配置。

@@ -141,20 +138,20 @@ export function AppShell({

- Workspace status + 工作台状态

- Core admin flows are now available in the standalone app. + 核心后台流程已经迁移到独立管理端。

- live + 运行中
- Public site and admin stay decoupled. + 前台站点与后台管理保持解耦。
- Backend remains the shared auth and data layer. + 后端继续作为统一认证与数据层。
@@ -168,12 +165,14 @@ export function AppShell({
- New admin workspace + 新版管理工作台
-

Signed in as {username ?? 'admin'}

+

+ 当前登录:{username ?? 'admin'} +

- React + shadcn/ui foundation + React + shadcn/ui 基础架构

@@ -202,12 +201,12 @@ export function AppShell({ diff --git a/admin/src/components/markdown-preview.tsx b/admin/src/components/markdown-preview.tsx new file mode 100644 index 0000000..12756ce --- /dev/null +++ b/admin/src/components/markdown-preview.tsx @@ -0,0 +1,40 @@ +import DOMPurify from 'dompurify' +import { marked } from 'marked' +import { useMemo } from 'react' + +import { cn } from '@/lib/utils' + +type MarkdownPreviewProps = { + markdown: string + className?: string +} + +marked.setOptions({ + breaks: true, + gfm: true, +}) + +export function MarkdownPreview({ markdown, className }: MarkdownPreviewProps) { + const html = useMemo(() => { + const rendered = marked.parse(markdown || '暂无内容。') + return DOMPurify.sanitize(typeof rendered === 'string' ? rendered : '') + }, [markdown]) + + return ( +
+
+
+ ) +} diff --git a/admin/src/components/markdown-workbench.tsx b/admin/src/components/markdown-workbench.tsx new file mode 100644 index 0000000..778928f --- /dev/null +++ b/admin/src/components/markdown-workbench.tsx @@ -0,0 +1,332 @@ +import type { ReactNode } from 'react' +import { useEffect, useState } from 'react' +import { createPortal } from 'react-dom' + +import Editor, { DiffEditor, type BeforeMount } from '@monaco-editor/react' +import { Expand, Minimize2, Sparkles } from 'lucide-react' + +import { Button } from '@/components/ui/button' +import { cn } from '@/lib/utils' + +export type MarkdownWorkbenchPanel = 'edit' | 'preview' | 'diff' +export type MarkdownWorkbenchMode = 'workspace' | 'polish' + +type MarkdownWorkbenchProps = { + value: string + originalValue: string + diffValue?: string + path: string + readOnly?: boolean + mode: MarkdownWorkbenchMode + visiblePanels: MarkdownWorkbenchPanel[] + availablePanels?: MarkdownWorkbenchPanel[] + allowPolish?: boolean + preview: ReactNode + polishPanel?: ReactNode + originalLabel?: string + modifiedLabel?: string + onChange: (value: string) => void + onModeChange: (next: MarkdownWorkbenchMode) => void + onVisiblePanelsChange: (next: MarkdownWorkbenchPanel[]) => void +} + +export const editorTheme = 'termi-vscode' + +const orderedWorkbenchPanels: MarkdownWorkbenchPanel[] = ['edit', 'preview', 'diff'] + +function formatPanelLabel(panel: MarkdownWorkbenchPanel) { + switch (panel) { + case 'preview': + return '预览' + case 'diff': + return '改动对比' + case 'edit': + default: + return '编辑' + } +} + +function resolveVisiblePanels( + visiblePanels: MarkdownWorkbenchPanel[], + availablePanels: MarkdownWorkbenchPanel[], +) { + const orderedAvailablePanels = orderedWorkbenchPanels.filter((panel) => + availablePanels.includes(panel), + ) + const nextPanels = orderedAvailablePanels.filter((panel) => visiblePanels.includes(panel)) + return nextPanels.length ? nextPanels : orderedAvailablePanels.slice(0, 1) +} + +export const configureMonaco: BeforeMount = (monaco) => { + monaco.editor.defineTheme(editorTheme, { + base: 'vs-dark', + inherit: true, + rules: [ + { token: 'comment', foreground: '6A9955' }, + { token: 'keyword', foreground: 'C586C0' }, + { token: 'string', foreground: 'CE9178' }, + { token: 'number', foreground: 'B5CEA8' }, + { token: 'delimiter', foreground: 'D4D4D4' }, + { token: 'type.identifier', foreground: '4EC9B0' }, + ], + colors: { + 'editor.background': '#1e1e1e', + 'editor.foreground': '#d4d4d4', + 'editor.lineHighlightBackground': '#2a2d2e', + 'editor.lineHighlightBorder': '#00000000', + 'editorCursor.foreground': '#aeafad', + 'editor.selectionBackground': '#264f78', + 'editor.inactiveSelectionBackground': '#3a3d41', + 'editorWhitespace.foreground': '#3b3b3b', + 'editorIndentGuide.background1': '#404040', + 'editorIndentGuide.activeBackground1': '#707070', + 'editorLineNumber.foreground': '#858585', + 'editorLineNumber.activeForeground': '#c6c6c6', + 'editorGutter.background': '#1e1e1e', + 'editorOverviewRuler.border': '#00000000', + 'diffEditor.insertedTextBackground': '#9ccc2c33', + 'diffEditor.removedTextBackground': '#ff6b6b2d', + 'diffEditor.insertedLineBackground': '#9ccc2c18', + 'diffEditor.removedLineBackground': '#ff6b6b18', + }, + }) +} + +export const sharedOptions = { + automaticLayout: true, + fontFamily: + '"JetBrains Mono", "Cascadia Code", "Fira Code", ui-monospace, SFMono-Regular, monospace', + fontLigatures: true, + fontSize: 14, + lineHeight: 22, + minimap: { enabled: false }, + padding: { top: 16, bottom: 16 }, + renderWhitespace: 'selection' as const, + roundedSelection: false, + scrollBeyondLastLine: false, + smoothScrolling: true, + tabSize: 2, + wordWrap: 'on' as const, +} + +export function MarkdownWorkbench({ + value, + originalValue, + diffValue, + path, + readOnly = false, + mode, + visiblePanels, + availablePanels = ['edit', 'preview', 'diff'], + allowPolish, + preview, + polishPanel, + originalLabel = '基线版本', + modifiedLabel = '目标版本', + onChange, + onModeChange, + onVisiblePanelsChange, +}: MarkdownWorkbenchProps) { + const [fullscreen, setFullscreen] = useState(false) + const editorHeight = fullscreen ? 'h-[calc(100dvh-82px)]' : 'h-[560px]' + const diffContent = diffValue ?? value + const polishEnabled = allowPolish ?? Boolean(polishPanel) + const workspacePanels = resolveVisiblePanels(visiblePanels, availablePanels) + const renderDiffSideBySide = workspacePanels.length < 3 || fullscreen + + useEffect(() => { + if (!fullscreen) { + return + } + + const previousOverflow = document.body.style.overflow + document.body.style.overflow = 'hidden' + + return () => { + document.body.style.overflow = previousOverflow + } + }, [fullscreen]) + + const togglePanel = (panel: MarkdownWorkbenchPanel) => { + const currentPanels = resolveVisiblePanels(visiblePanels, availablePanels) + const nextPanels = currentPanels.includes(panel) + ? currentPanels.filter((item) => item !== panel) + : orderedWorkbenchPanels.filter( + (item) => availablePanels.includes(item) && (currentPanels.includes(item) || item === panel), + ) + + onVisiblePanelsChange(nextPanels.length ? nextPanels : availablePanels.slice(0, 1)) + + if (mode !== 'workspace') { + onModeChange('workspace') + } + } + + const workbench = ( +
+
+
+
+ + + +
+

{path}

+
+ +
+ {availablePanels.map((panel) => { + const active = mode === 'workspace' && workspacePanels.includes(panel) + + return ( + + ) + })} + {polishEnabled ? ( + + ) : null} + +
+
+ +
+ {mode === 'polish' ? ( +
{polishPanel}
+ ) : ( +
+ {workspacePanels.map((panel, index) => ( +
+
+ {formatPanelLabel(panel)} + {panel === 'diff' ? ( + + {originalLabel} / {modifiedLabel} + + ) : ( + {path} + )} +
+ + {panel === 'edit' ? ( +
+ onChange(next ?? '')} + /> +
+ ) : null} + + {panel === 'preview' ? ( +
{preview}
+ ) : null} + + {panel === 'diff' ? ( +
+ +
+ ) : null} +
+ ))} +
+ )} +
+
+ ) + + if (!fullscreen) { + return workbench + } + + if (typeof document === 'undefined') { + return workbench + } + + return createPortal( + <> +
+
{workbench}
+ , + document.body, + ) +} diff --git a/admin/src/lib/admin-format.ts b/admin/src/lib/admin-format.ts index 4fced2d..d03e3e0 100644 --- a/admin/src/lib/admin-format.ts +++ b/admin/src/lib/admin-format.ts @@ -1,6 +1,6 @@ export function formatDateTime(value: string | null | undefined) { if (!value) { - return 'Not available' + return '暂无' } const date = new Date(value) @@ -9,12 +9,85 @@ export function formatDateTime(value: string | null | undefined) { return value } - return new Intl.DateTimeFormat('en-US', { + return new Intl.DateTimeFormat('zh-CN', { dateStyle: 'medium', timeStyle: 'short', }).format(date) } +export function formatPostType(value: string | null | undefined) { + switch (value) { + case 'article': + return '文章' + case 'note': + return '笔记' + case 'page': + return '页面' + case 'snippet': + return '片段' + default: + return value || '文章' + } +} + +export function formatCommentScope(value: string | null | undefined) { + switch (value) { + case 'paragraph': + return '段落' + case 'article': + return '全文' + default: + return value || '全文' + } +} + +export function formatFriendLinkStatus(value: string | null | undefined) { + switch (value) { + case 'approved': + return '已通过' + case 'rejected': + return '已拒绝' + case 'pending': + return '待审核' + default: + return value || '待审核' + } +} + +export function formatReviewType(value: string | null | undefined) { + switch (value) { + case 'book': + return '图书' + case 'movie': + return '电影' + case 'game': + return '游戏' + case 'anime': + return '动画' + case 'music': + return '音乐' + default: + return value || '未分类' + } +} + +export function formatReviewStatus(value: string | null | undefined) { + switch (value) { + case 'published': + return '已发布' + case 'draft': + return '草稿' + case 'archived': + return '已归档' + case 'completed': + return '已完成' + case 'in-progress': + return '进行中' + default: + return value || '未知状态' + } +} + export function emptyToNull(value: string) { const trimmed = value.trim() return trimmed ? trimmed : null diff --git a/admin/src/lib/api.ts b/admin/src/lib/api.ts index f1c3b41..0860ddf 100644 --- a/admin/src/lib/api.ts +++ b/admin/src/lib/api.ts @@ -1,6 +1,9 @@ import type { AdminAiReindexResponse, + AdminAiProviderTestResponse, AdminDashboardResponse, + AdminPostMetadataResponse, + AdminPostPolishResponse, AdminSessionResponse, AdminSiteSettingsResponse, CommentListQuery, @@ -12,6 +15,7 @@ import type { FriendLinkRecord, MarkdownDeleteResponse, MarkdownDocumentResponse, + MarkdownImportResponse, PostListQuery, PostRecord, ReviewRecord, @@ -37,7 +41,7 @@ async function readErrorMessage(response: Response) { const text = await response.text().catch(() => '') if (!text) { - return `Request failed with status ${response.status}.` + return `请求失败,状态码 ${response.status}。` } try { @@ -123,6 +127,28 @@ export const adminApi = { request('/api/admin/ai/reindex', { method: 'POST', }), + testAiProvider: (provider: { + id: string + name: string + provider: string + api_base: string | null + api_key: string | null + chat_model: string | null + }) => + request('/api/admin/ai/test-provider', { + method: 'POST', + body: JSON.stringify({ provider }), + }), + generatePostMetadata: (markdown: string) => + request('/api/admin/ai/post-metadata', { + method: 'POST', + body: JSON.stringify({ markdown }), + }), + polishPostMarkdown: (markdown: string) => + request('/api/admin/ai/polish-post', { + method: 'POST', + body: JSON.stringify({ markdown }), + }), listPosts: (query?: PostListQuery) => request( appendQueryParams('/api/posts', { @@ -147,6 +173,7 @@ export const adminApi = { tags: payload.tags, post_type: payload.postType, image: payload.image, + images: payload.images, pinned: payload.pinned, published: payload.published, }), @@ -163,11 +190,24 @@ export const adminApi = { tags: payload.tags, post_type: payload.postType, image: payload.image, + images: payload.images, pinned: payload.pinned, }), }), getPostMarkdown: (slug: string) => request(`/api/posts/slug/${encodeURIComponent(slug)}/markdown`), + importPosts: async (files: File[]) => { + const formData = new FormData() + + files.forEach((file) => { + formData.append('files', file, file.webkitRelativePath || file.name) + }) + + return request('/api/posts/markdown/import', { + method: 'POST', + body: formData, + }) + }, updatePostMarkdown: (slug: string, markdown: string) => request(`/api/posts/slug/${encodeURIComponent(slug)}/markdown`, { method: 'PATCH', diff --git a/admin/src/lib/markdown-diff.ts b/admin/src/lib/markdown-diff.ts new file mode 100644 index 0000000..55a463c --- /dev/null +++ b/admin/src/lib/markdown-diff.ts @@ -0,0 +1,32 @@ +export function normalizeMarkdown(value: string) { + return value.replace(/\r\n/g, '\n') +} + +export function countLineDiff(left: string, right: string) { + const leftLines = normalizeMarkdown(left).split('\n') + const rightLines = normalizeMarkdown(right).split('\n') + const previous = new Array(rightLines.length + 1).fill(0) + + for (let leftIndex = 1; leftIndex <= leftLines.length; leftIndex += 1) { + const current = new Array(rightLines.length + 1).fill(0) + + for (let rightIndex = 1; rightIndex <= rightLines.length; rightIndex += 1) { + if (leftLines[leftIndex - 1] === rightLines[rightIndex - 1]) { + current[rightIndex] = previous[rightIndex - 1] + 1 + } else { + current[rightIndex] = Math.max(previous[rightIndex], current[rightIndex - 1]) + } + } + + for (let rightIndex = 0; rightIndex <= rightLines.length; rightIndex += 1) { + previous[rightIndex] = current[rightIndex] + } + } + + const common = previous[rightLines.length] + + return { + additions: Math.max(rightLines.length - common, 0), + deletions: Math.max(leftLines.length - common, 0), + } +} diff --git a/admin/src/lib/markdown-document.ts b/admin/src/lib/markdown-document.ts new file mode 100644 index 0000000..6060993 --- /dev/null +++ b/admin/src/lib/markdown-document.ts @@ -0,0 +1,247 @@ +import { normalizeMarkdown } from '@/lib/markdown-diff' + +export type ParsedMarkdownMeta = { + title: string + slug: string + description: string + category: string + postType: string + image: string + images: string[] + pinned: boolean + published: boolean + tags: string[] +} + +export type ParsedMarkdownDocument = { + meta: ParsedMarkdownMeta + body: string + markdown: string +} + +const defaultMeta: ParsedMarkdownMeta = { + title: '', + slug: '', + description: '', + category: '', + postType: 'article', + image: '', + images: [], + pinned: false, + published: true, + tags: [], +} + +function parseScalar(value: string) { + const trimmed = value.trim() + if (!trimmed) { + return '' + } + + if ( + trimmed.startsWith('"') || + trimmed.startsWith("'") || + trimmed.startsWith('[') || + trimmed.startsWith('{') + ) { + try { + return JSON.parse(trimmed) + } catch { + return trimmed.replace(/^['"]|['"]$/g, '') + } + } + + if (trimmed === 'true') { + return true + } + + if (trimmed === 'false') { + return false + } + + return trimmed +} + +function toStringList(value: unknown) { + if (Array.isArray(value)) { + return value + .map((item) => String(item).trim()) + .filter(Boolean) + } + + if (typeof value === 'string') { + return value + .split(/[,,]/) + .map((item) => item.trim()) + .filter(Boolean) + } + + return [] +} + +export function parseMarkdownDocument(markdown: string): ParsedMarkdownDocument { + const normalized = normalizeMarkdown(markdown) + const meta: ParsedMarkdownMeta = { ...defaultMeta } + + if (!normalized.startsWith('---\n')) { + return { + meta, + body: normalized.trimStart(), + markdown: normalized, + } + } + + const endIndex = normalized.indexOf('\n---\n', 4) + if (endIndex === -1) { + return { + meta, + body: normalized.trimStart(), + markdown: normalized, + } + } + + const frontmatter = normalized.slice(4, endIndex) + const body = normalized.slice(endIndex + 5).trimStart() + let currentListKey: 'tags' | 'images' | 'categories' | null = null + const categories: string[] = [] + + frontmatter.split('\n').forEach((line) => { + const listItemMatch = line.match(/^\s*-\s*(.+)\s*$/) + if (listItemMatch && currentListKey) { + const parsed = parseScalar(listItemMatch[1]) + const nextValue = typeof parsed === 'string' ? parsed.trim() : String(parsed).trim() + if (!nextValue) { + return + } + + if (currentListKey === 'tags') { + meta.tags.push(nextValue) + } else if (currentListKey === 'images') { + meta.images.push(nextValue) + } else { + categories.push(nextValue) + } + return + } + + currentListKey = null + + const keyMatch = line.match(/^([A-Za-z_]+):\s*(.*)$/) + if (!keyMatch) { + return + } + + const [, rawKey, rawValue] = keyMatch + const key = rawKey.trim() + const value = parseScalar(rawValue) + + if (key === 'tags') { + const tags = toStringList(value) + if (tags.length) { + meta.tags = tags + } else if (!String(rawValue).trim()) { + currentListKey = 'tags' + } + return + } + + if (key === 'images') { + const images = toStringList(value) + if (images.length) { + meta.images = images + } else if (!String(rawValue).trim()) { + currentListKey = 'images' + } + return + } + + if (key === 'categories' || key === 'category') { + const parsedCategories = toStringList(value) + if (parsedCategories.length) { + categories.push(...parsedCategories) + } else if (!String(rawValue).trim()) { + currentListKey = 'categories' + } + return + } + + switch (key) { + case 'title': + meta.title = String(value).trim() + break + case 'slug': + meta.slug = String(value).trim() + break + case 'description': + meta.description = String(value).trim() + break + case 'post_type': + meta.postType = String(value).trim() || 'article' + break + case 'image': + meta.image = String(value).trim() + break + case 'pinned': + meta.pinned = Boolean(value) + break + case 'published': + meta.published = value !== false + break + case 'draft': + if (value === true) { + meta.published = false + } + break + default: + break + } + }) + + meta.category = categories[0] ?? meta.category + + return { + meta, + body, + markdown: normalized, + } +} + +export function buildMarkdownDocument(meta: ParsedMarkdownMeta, body: string) { + const lines = [ + '---', + `title: ${JSON.stringify(meta.title.trim() || meta.slug || 'untitled-post')}`, + `slug: ${meta.slug.trim() || 'untitled-post'}`, + ] + + if (meta.description.trim()) { + lines.push(`description: ${JSON.stringify(meta.description.trim())}`) + } + + if (meta.category.trim()) { + lines.push(`category: ${JSON.stringify(meta.category.trim())}`) + } + + lines.push(`post_type: ${JSON.stringify(meta.postType.trim() || 'article')}`) + lines.push(`pinned: ${meta.pinned ? 'true' : 'false'}`) + lines.push(`published: ${meta.published ? 'true' : 'false'}`) + + if (meta.image.trim()) { + lines.push(`image: ${JSON.stringify(meta.image.trim())}`) + } + + if (meta.images.length) { + lines.push('images:') + meta.images.forEach((image) => { + lines.push(` - ${JSON.stringify(image)}`) + }) + } + + if (meta.tags.length) { + lines.push('tags:') + meta.tags.forEach((tag) => { + lines.push(` - ${JSON.stringify(tag)}`) + }) + } + + return `${lines.join('\n')}\n---\n\n${body.trim()}\n` +} diff --git a/admin/src/lib/markdown-merge.ts b/admin/src/lib/markdown-merge.ts new file mode 100644 index 0000000..5c34f20 --- /dev/null +++ b/admin/src/lib/markdown-merge.ts @@ -0,0 +1,149 @@ +import { normalizeMarkdown } from '@/lib/markdown-diff' + +type DiffOperation = + | { type: 'equal'; line: string } + | { type: 'delete'; line: string } + | { type: 'insert'; line: string } + +export type DiffHunk = { + id: string + originalStart: number + originalEnd: number + modifiedStart: number + modifiedEnd: number + removedLines: string[] + addedLines: string[] + preview: string +} + +function diffOperations(originalLines: string[], modifiedLines: string[]) { + const rows = originalLines.length + const cols = modifiedLines.length + const dp = Array.from({ length: rows + 1 }, () => new Array(cols + 1).fill(0)) + + for (let row = 1; row <= rows; row += 1) { + for (let col = 1; col <= cols; col += 1) { + if (originalLines[row - 1] === modifiedLines[col - 1]) { + dp[row][col] = dp[row - 1][col - 1] + 1 + } else { + dp[row][col] = Math.max(dp[row - 1][col], dp[row][col - 1]) + } + } + } + + const operations: DiffOperation[] = [] + let row = rows + let col = cols + + while (row > 0 || col > 0) { + if (row > 0 && col > 0 && originalLines[row - 1] === modifiedLines[col - 1]) { + operations.push({ type: 'equal', line: originalLines[row - 1] }) + row -= 1 + col -= 1 + continue + } + + if (col > 0 && (row === 0 || dp[row][col - 1] >= dp[row - 1][col])) { + operations.push({ type: 'insert', line: modifiedLines[col - 1] }) + col -= 1 + continue + } + + operations.push({ type: 'delete', line: originalLines[row - 1] }) + row -= 1 + } + + return operations.reverse() +} + +export function computeDiffHunks(original: string, modified: string): DiffHunk[] { + const originalLines = normalizeMarkdown(original).split('\n') + const modifiedLines = normalizeMarkdown(modified).split('\n') + const operations = diffOperations(originalLines, modifiedLines) + const hunks: DiffHunk[] = [] + let originalLine = 1 + let modifiedLine = 1 + let current: + | (Omit & { + idSeed: number + }) + | null = null + + const flush = () => { + if (!current) { + return + } + + const previewSource = current.addedLines.join(' ').trim() || current.removedLines.join(' ').trim() + hunks.push({ + id: `hunk-${current.idSeed}`, + originalStart: current.originalStart, + originalEnd: originalLine - 1, + modifiedStart: current.modifiedStart, + modifiedEnd: modifiedLine - 1, + removedLines: current.removedLines, + addedLines: current.addedLines, + preview: previewSource.slice(0, 120) || '空白改动', + }) + current = null + } + + operations.forEach((operation) => { + if (operation.type === 'equal') { + flush() + originalLine += 1 + modifiedLine += 1 + return + } + + if (!current) { + current = { + idSeed: hunks.length + 1, + originalStart: originalLine, + modifiedStart: modifiedLine, + removedLines: [], + addedLines: [], + } + } + + if (operation.type === 'delete') { + current.removedLines.push(operation.line) + originalLine += 1 + return + } + + current.addedLines.push(operation.line) + modifiedLine += 1 + }) + + flush() + + return hunks +} + +export function applySelectedDiffHunks( + original: string, + hunks: DiffHunk[], + selectedIds: Set, +) { + const originalLines = normalizeMarkdown(original).split('\n') + const resultLines: string[] = [] + let cursor = 1 + + hunks.forEach((hunk) => { + const unchangedEnd = Math.max(hunk.originalStart - 1, cursor - 1) + resultLines.push(...originalLines.slice(cursor - 1, unchangedEnd)) + + if (selectedIds.has(hunk.id)) { + resultLines.push(...hunk.addedLines) + } else if (hunk.originalEnd >= hunk.originalStart) { + resultLines.push(...originalLines.slice(hunk.originalStart - 1, hunk.originalEnd)) + } + + cursor = Math.max(hunk.originalEnd + 1, hunk.originalStart) + }) + + resultLines.push(...originalLines.slice(cursor - 1)) + + return resultLines.join('\n') +} diff --git a/admin/src/lib/post-draft-window.ts b/admin/src/lib/post-draft-window.ts new file mode 100644 index 0000000..353d907 --- /dev/null +++ b/admin/src/lib/post-draft-window.ts @@ -0,0 +1,82 @@ +export type DraftWindowSnapshot = { + title: string + slug: string + path: string + markdown: string + savedMarkdown: string + createdAt: number +} + +const STORAGE_PREFIX = 'termi-admin-post-draft:' +const POLISH_RESULT_PREFIX = 'termi-admin-post-polish-result:' + +export type PolishWindowResult = { + draftKey: string + markdown: string + target: 'editor' | 'create' + createdAt: number +} + +export function saveDraftWindowSnapshot(snapshot: Omit) { + const key = `${STORAGE_PREFIX}${snapshot.slug}:${Date.now()}` + const payload: DraftWindowSnapshot = { + ...snapshot, + createdAt: Date.now(), + } + + window.localStorage.setItem(key, JSON.stringify(payload)) + return key +} + +export function loadDraftWindowSnapshot(key: string | null) { + if (!key) { + return null + } + + const raw = window.localStorage.getItem(key) + if (!raw) { + return null + } + + try { + return JSON.parse(raw) as DraftWindowSnapshot + } catch { + return null + } +} + +export function savePolishWindowResult( + draftKey: string, + markdown: string, + target: 'editor' | 'create', +) { + const payload: PolishWindowResult = { + draftKey, + markdown, + target, + createdAt: Date.now(), + } + + window.localStorage.setItem(`${POLISH_RESULT_PREFIX}${draftKey}`, JSON.stringify(payload)) + return payload +} + +export function consumePolishWindowResult(key: string | null) { + if (!key) { + return null + } + + const storageKey = `${POLISH_RESULT_PREFIX}${key}` + const raw = window.localStorage.getItem(storageKey) + if (!raw) { + return null + } + + window.localStorage.removeItem(storageKey) + + try { + return JSON.parse(raw) as PolishWindowResult + } catch { + return null + } +} diff --git a/admin/src/lib/types.ts b/admin/src/lib/types.ts index 868f735..8ed7c64 100644 --- a/admin/src/lib/types.ts +++ b/admin/src/lib/types.ts @@ -89,11 +89,15 @@ export interface AdminSiteSettingsResponse { social_email: string | null location: string | null tech_stack: string[] + music_playlist: MusicTrack[] ai_enabled: boolean + paragraph_comments_enabled: boolean ai_provider: string | null ai_api_base: string | null ai_api_key: string | null ai_chat_model: string | null + ai_providers: AiProviderConfig[] + ai_active_provider_id: string | null ai_embedding_model: string | null ai_system_prompt: string | null ai_top_k: number | null @@ -103,6 +107,15 @@ export interface AdminSiteSettingsResponse { ai_local_embedding: string } +export interface AiProviderConfig { + id: string + name: string + provider: string + api_base: string | null + api_key: string | null + chat_model: string | null +} + export interface SiteSettingsPayload { siteName?: string | null siteShortName?: string | null @@ -120,11 +133,15 @@ export interface SiteSettingsPayload { socialEmail?: string | null location?: string | null techStack?: string[] + musicPlaylist?: MusicTrack[] aiEnabled?: boolean + paragraphCommentsEnabled?: boolean aiProvider?: string | null aiApiBase?: string | null aiApiKey?: string | null aiChatModel?: string | null + aiProviders?: AiProviderConfig[] + aiActiveProviderId?: string | null aiEmbeddingModel?: string | null aiSystemPrompt?: string | null aiTopK?: number | null @@ -136,6 +153,35 @@ export interface AdminAiReindexResponse { last_indexed_at: string | null } +export interface AdminAiProviderTestResponse { + provider: string + endpoint: string + chat_model: string + reply_preview: string +} + +export interface MusicTrack { + title: string + artist?: string | null + album?: string | null + url: string + cover_image_url?: string | null + accent_color?: string | null + description?: string | null +} + +export interface AdminPostMetadataResponse { + title: string + description: string + category: string + tags: string[] + slug: string +} + +export interface AdminPostPolishResponse { + polished_markdown: string +} + export interface PostRecord { created_at: string updated_at: string @@ -148,6 +194,7 @@ export interface PostRecord { tags: unknown post_type: string | null image: string | null + images: string[] | null pinned: boolean | null } @@ -169,6 +216,7 @@ export interface CreatePostPayload { tags?: string[] postType?: string | null image?: string | null + images?: string[] | null pinned?: boolean published?: boolean } @@ -182,6 +230,7 @@ export interface UpdatePostPayload { tags?: unknown postType?: string | null image?: string | null + images?: string[] | null pinned?: boolean | null } @@ -196,6 +245,11 @@ export interface MarkdownDeleteResponse { deleted: boolean } +export interface MarkdownImportResponse { + count: number + slugs: string[] +} + export interface CommentRecord { created_at: string updated_at: string @@ -273,6 +327,7 @@ export interface ReviewRecord { description: string | null tags: string | null cover: string | null + link_url: string | null created_at: string updated_at: string } @@ -286,6 +341,7 @@ export interface CreateReviewPayload { description: string tags: string[] cover: string + link_url?: string | null } export interface UpdateReviewPayload { @@ -297,4 +353,5 @@ export interface UpdateReviewPayload { description?: string tags?: string[] cover?: string + link_url?: string | null } diff --git a/admin/src/pages/comments-page.tsx b/admin/src/pages/comments-page.tsx index 51d3877..80472b3 100644 --- a/admin/src/pages/comments-page.tsx +++ b/admin/src/pages/comments-page.tsx @@ -17,7 +17,7 @@ import { TableRow, } from '@/components/ui/table' import { adminApi, ApiError } from '@/lib/api' -import { formatDateTime } from '@/lib/admin-format' +import { formatCommentScope, formatDateTime } from '@/lib/admin-format' import type { CommentRecord } from '@/lib/types' function moderationBadgeVariant(approved: boolean | null) { @@ -49,13 +49,13 @@ export function CommentsPage() { }) if (showToast) { - toast.success('Comments refreshed.') + toast.success('评论列表已刷新。') } } catch (error) { if (error instanceof ApiError && error.status === 401) { return } - toast.error(error instanceof ApiError ? error.message : 'Unable to load comments.') + toast.error(error instanceof ApiError ? error.message : '无法加载评论列表。') } finally { setLoading(false) setRefreshing(false) @@ -106,59 +106,58 @@ export function CommentsPage() {
- Comments + 评论
-

Moderation queue

+

评论审核队列

- Review article comments and paragraph-specific responses from one place, with fast - approval controls for the public discussion layer. + 在一个页面中处理全文评论与段落评论,快速完成公开讨论区的审核工作。

-

Pending

+

待审核

{pendingCount}
-

Needs moderation attention.

+

需要人工审核处理。

- Paragraph replies + 段落评论

{paragraphCount}
-

Scoped to paragraph anchors.

+

挂载到具体段落锚点。

-

Total

+

总数

{comments.length}
-

Everything currently stored.

+

当前系统中全部评论。

- Comment list + 评论列表 - Filter the queue, then approve, hide, or remove entries without leaving the page. + 先筛选,再直接通过、隐藏或删除评论,无需离开当前页面。
setSearchTerm(event.target.value)} /> @@ -166,14 +165,14 @@ export function CommentsPage() { value={approvalFilter} onChange={(event) => setApprovalFilter(event.target.value)} > - - - + + +
@@ -183,10 +182,10 @@ export function CommentsPage() { - Comment - Status - Context - Actions + 评论内容 + 状态 + 上下文 + 操作 @@ -195,20 +194,20 @@ export function CommentsPage() {
- {comment.author ?? 'Anonymous'} - {comment.scope} + {comment.author ?? '匿名用户'} + {formatCommentScope(comment.scope)} {formatDateTime(comment.created_at)}

- {comment.content ?? 'No content provided.'} + {comment.content ?? '暂无评论内容。'}

{comment.scope === 'paragraph' ? (
-

{comment.paragraph_key ?? 'missing-key'}

+

{comment.paragraph_key ?? '缺少段落键'}

- {comment.paragraph_excerpt ?? 'No paragraph excerpt stored.'} + {comment.paragraph_excerpt ?? '没有保存段落摘录。'}

) : null} @@ -216,16 +215,16 @@ export function CommentsPage() { - {comment.approved ? 'Approved' : 'Pending'} + {comment.approved ? '已通过' : '待审核'}
-

{comment.post_slug ?? 'unknown-post'}

+

{comment.post_slug ?? '未知文章'}

{comment.reply_to_comment_id ? ( -

Replying to #{comment.reply_to_comment_id}

+

回复评论 #{comment.reply_to_comment_id}

) : ( -

Top-level comment

+

顶级评论

)}
@@ -239,13 +238,11 @@ export function CommentsPage() { try { setActingId(comment.id) await adminApi.updateComment(comment.id, { approved: true }) - toast.success('Comment approved.') + toast.success('评论已通过。') await loadComments(false) } catch (error) { toast.error( - error instanceof ApiError - ? error.message - : 'Unable to approve comment.', + error instanceof ApiError ? error.message : '无法通过该评论。', ) } finally { setActingId(null) @@ -253,7 +250,7 @@ export function CommentsPage() { }} > - Approve + 通过
@@ -316,7 +309,7 @@ export function CommentsPage() {
-

No comments match the current moderation filters.

+

当前筛选条件下没有匹配的评论。

diff --git a/admin/src/pages/dashboard-page.tsx b/admin/src/pages/dashboard-page.tsx index 2828711..e5f04c3 100644 --- a/admin/src/pages/dashboard-page.tsx +++ b/admin/src/pages/dashboard-page.tsx @@ -24,6 +24,13 @@ import { TableRow, } from '@/components/ui/table' import { adminApi, ApiError } from '@/lib/api' +import { + formatCommentScope, + formatFriendLinkStatus, + formatPostType, + formatReviewStatus, + formatReviewType, +} from '@/lib/admin-format' import type { AdminDashboardResponse } from '@/lib/types' function StatCard({ @@ -70,13 +77,13 @@ export function DashboardPage() { }) if (showToast) { - toast.success('Dashboard refreshed.') + toast.success('仪表盘已刷新。') } } catch (error) { if (error instanceof ApiError && error.status === 401) { return } - toast.error(error instanceof ApiError ? error.message : 'Unable to load dashboard.') + toast.error(error instanceof ApiError ? error.message : '无法加载仪表盘。') } finally { setLoading(false) setRefreshing(false) @@ -102,27 +109,27 @@ export function DashboardPage() { const statCards = [ { - label: 'Posts', + label: '文章总数', value: data.stats.total_posts, - note: `${data.stats.total_comments} comments across the content library`, + note: `内容库中共有 ${data.stats.total_comments} 条评论`, icon: Rss, }, { - label: 'Pending comments', + label: '待审核评论', value: data.stats.pending_comments, - note: 'Queued for moderation follow-up', + note: '等待审核处理', icon: MessageSquareWarning, }, { - label: 'Categories', + label: '分类数量', value: data.stats.total_categories, - note: `${data.stats.total_tags} tags currently in circulation`, + note: `当前共有 ${data.stats.total_tags} 个标签`, icon: FolderTree, }, { - label: 'AI chunks', + label: 'AI 分块', value: data.stats.ai_chunks, - note: data.stats.ai_enabled ? 'Knowledge base is enabled' : 'AI is currently disabled', + note: data.stats.ai_enabled ? '知识库已启用' : 'AI 功能当前关闭', icon: BrainCircuit, }, ] @@ -131,12 +138,11 @@ export function DashboardPage() {
- Dashboard + 仪表盘
-

Operations overview

+

运营总览

- This screen brings the most important publishing, moderation, and AI signals into the - new standalone admin so the day-to-day control loop stays in one place. + 这里汇总了最重要的发布、审核和 AI 信号,让日常运营在一个独立后台里完成闭环。

@@ -145,7 +151,7 @@ export function DashboardPage() {
@@ -169,21 +175,21 @@ export function DashboardPage() {
- Recent posts + 最近文章 - Freshly imported or updated content flowing into the public site. + 最近同步到前台的文章内容。
- {data.recent_posts.length} rows + {data.recent_posts.length} 条
- Title - Type - Category - Created + 标题 + 类型 + 分类 + 创建时间 @@ -193,13 +199,13 @@ export function DashboardPage() {
{post.title} - {post.pinned ? pinned : null} + {post.pinned ? 置顶 : null}

{post.slug}

- {post.post_type} + {formatPostType(post.post_type)} {post.category} {post.created_at} @@ -212,9 +218,9 @@ export function DashboardPage() { - Site heartbeat + 站点状态 - A quick read on the public-facing site and the AI index state. + 快速查看前台站点与 AI 索引状态。 @@ -225,7 +231,7 @@ export function DashboardPage() {

{data.site.site_url}

- {data.site.ai_enabled ? 'AI on' : 'AI off'} + {data.site.ai_enabled ? 'AI 已开启' : 'AI 已关闭'} @@ -233,7 +239,7 @@ export function DashboardPage() {

- Reviews + 评测

{data.stats.total_reviews} @@ -242,7 +248,7 @@ export function DashboardPage() {

- Friend links + 友链

{data.stats.total_links} @@ -253,10 +259,10 @@ export function DashboardPage() {

- Last AI index + 最近一次 AI 索引

- {data.site.ai_last_indexed_at ?? 'The site has not been indexed yet.'} + {data.site.ai_last_indexed_at ?? '站点还没有建立过索引。'}

@@ -267,21 +273,21 @@ export function DashboardPage() {
- Pending comments + 待审核评论 - Queue visibility without opening the old moderation page. + 不进入旧后台也能查看审核队列。
- {data.pending_comments.length} queued + {data.pending_comments.length} 条待处理
- Author - Scope - Post - Created + 作者 + 范围 + 文章 + 创建时间 @@ -296,7 +302,7 @@ export function DashboardPage() { - {comment.scope} + {formatCommentScope(comment.scope)} {comment.post_slug} @@ -313,12 +319,12 @@ export function DashboardPage() {
- Pending friend links + 待审核友链 - Requests waiting for review and reciprocal checks. + 等待审核和互链确认的申请。
- {data.pending_friend_links.length} pending + {data.pending_friend_links.length} 条待处理
{data.pending_friend_links.map((link) => ( @@ -335,6 +341,9 @@ export function DashboardPage() { {link.category} +

+ 状态:{formatFriendLinkStatus(link.status)} +

{link.created_at}

@@ -345,9 +354,9 @@ export function DashboardPage() { - Recent reviews + 最近评测 - The latest review entries flowing into the public reviews page. + 最近同步到前台评测页的内容。 @@ -359,7 +368,7 @@ export function DashboardPage() {

{review.title}

- {review.review_type} · {review.status} + {formatReviewType(review.review_type)} · {formatReviewStatus(review.status)}

diff --git a/admin/src/pages/friend-links-page.tsx b/admin/src/pages/friend-links-page.tsx index fb28a7f..f0ae866 100644 --- a/admin/src/pages/friend-links-page.tsx +++ b/admin/src/pages/friend-links-page.tsx @@ -11,7 +11,7 @@ import { Select } from '@/components/ui/select' import { Skeleton } from '@/components/ui/skeleton' import { Textarea } from '@/components/ui/textarea' import { adminApi, ApiError } from '@/lib/api' -import { emptyToNull, formatDateTime } from '@/lib/admin-format' +import { emptyToNull, formatDateTime, formatFriendLinkStatus } from '@/lib/admin-format' import type { FriendLinkPayload, FriendLinkRecord } from '@/lib/types' type FriendLinkFormState = { @@ -88,13 +88,13 @@ export function FriendLinksPage() { }) if (showToast) { - toast.success('Friend links refreshed.') + toast.success('友链列表已刷新。') } } catch (error) { if (error instanceof ApiError && error.status === 401) { return } - toast.error(error instanceof ApiError ? error.message : 'Unable to load friend links.') + toast.error(error instanceof ApiError ? error.message : '无法加载友链列表。') } finally { setLoading(false) setRefreshing(false) @@ -135,12 +135,11 @@ export function FriendLinksPage() {
- Friend links + 友链
-

Partner site queue

+

友链申请队列

- Review inbound link exchanges, keep metadata accurate, and move requests through - pending, approved, or rejected states in one dedicated workspace. + 审核前台提交的友链申请,维护站点信息,并在待审核、已通过、已拒绝之间完成流转。

@@ -153,11 +152,11 @@ export function FriendLinksPage() { setForm(defaultFriendLinkForm) }} > - New link + 新建友链
@@ -165,15 +164,15 @@ export function FriendLinksPage() {
- Link inventory + 友链列表 - Pick an item to edit it, or start a new record from the right-hand form. + 选择一条友链进行编辑,或者直接在右侧创建新记录。
setSearchTerm(event.target.value)} /> @@ -181,10 +180,10 @@ export function FriendLinksPage() { value={statusFilter} onChange={(event) => setStatusFilter(event.target.value)} > - - - - + + + +
@@ -209,18 +208,18 @@ export function FriendLinksPage() {
- {link.site_name ?? 'Untitled partner'} + {link.site_name ?? '未命名站点'} - {link.status ?? 'pending'} + {formatFriendLinkStatus(link.status)}

{link.site_url}

- {link.description ?? 'No description yet.'} + {link.description ?? '暂无简介。'}

-

{link.category ?? 'uncategorized'}

+

{link.category ?? '未分类'}

{formatDateTime(link.created_at)}

@@ -230,7 +229,7 @@ export function FriendLinksPage() { {!filteredLinks.length ? (
-

No friend links match the current filters.

+

当前筛选条件下没有匹配的友链。

) : null}
@@ -241,10 +240,9 @@ export function FriendLinksPage() {
- {selectedLink ? 'Edit friend link' : 'Create friend link'} + {selectedLink ? '编辑友链' : '新建友链'} - Capture the reciprocal URL, classification, and moderation status the public link - page depends on. + 维护前台友链页依赖的互链地址、分类和审核状态。
@@ -252,14 +250,36 @@ export function FriendLinksPage() { ) : null} + {selectedLink ? ( + <> + + + + + ) : null} {selectedLink ? ( ) : null}
@@ -332,21 +348,21 @@ export function FriendLinksPage() {

- Selected record + 当前记录

- Created {formatDateTime(selectedLink.created_at)} + 创建于 {formatDateTime(selectedLink.created_at)}

- {selectedLink.status ?? 'pending'} + {formatFriendLinkStatus(selectedLink.status)}
) : null}
- + @@ -354,7 +370,7 @@ export function FriendLinksPage() { } /> - + @@ -362,7 +378,7 @@ export function FriendLinksPage() { } /> - + @@ -370,7 +386,7 @@ export function FriendLinksPage() { } /> - + @@ -379,21 +395,49 @@ export function FriendLinksPage() { />
- - + +
+ + + +
- +
@@ -78,11 +79,15 @@ +
+ {% if row.link_url %} + 跳转 + {% endif %} API
diff --git a/backend/assets/views/admin/site_settings.html b/backend/assets/views/admin/site_settings.html index 7722505..4b9ca16 100644 --- a/backend/assets/views/admin/site_settings.html +++ b/backend/assets/views/admin/site_settings.html @@ -88,12 +88,12 @@
- +
- -
这里只保存在后端数据库里,前台公开接口不会返回这个字段。当前默认接入本地 NewAPI 网关,未配置时前台仍可做本地检索,但不会生成完整聊天回答。
+ +
这里只保存在后端数据库里,前台公开接口不会返回这个字段。当前默认接入 91code.jiangnight.com 的 NewAPI 兼容接口,未配置时前台仍可做本地检索,但不会生成完整聊天回答。
diff --git a/backend/backend-manual.err.log b/backend/backend-manual.err.log new file mode 100644 index 0000000..227eedd --- /dev/null +++ b/backend/backend-manual.err.log @@ -0,0 +1,330 @@ + Compiling proc-macro2 v1.0.106 + Compiling quote v1.0.45 + Compiling unicode-ident v1.0.24 + Compiling serde_core v1.0.228 + Compiling serde v1.0.228 + Compiling getrandom v0.3.4 + Compiling autocfg v1.5.0 + Compiling find-msvc-tools v0.1.9 + Compiling shlex v1.3.0 + Compiling version_check v0.9.5 + Compiling crossbeam-utils v0.8.21 + Compiling zmij v1.0.21 + Compiling zerocopy v0.8.47 + Compiling serde_json v1.0.149 + Compiling pkg-config v0.3.32 + Compiling icu_normalizer_data v2.1.1 + Compiling icu_properties_data v2.1.2 + Compiling thiserror v2.0.18 + Compiling libc v0.2.183 + Compiling typenum v1.19.0 + Compiling generic-array v0.14.7 + Compiling rustls v0.23.37 + Compiling num-traits v0.2.19 + Compiling libm v0.2.16 + Compiling getrandom v0.4.2 + Compiling windows_x86_64_msvc v0.52.6 + Compiling jobserver v0.1.34 + Compiling ident_case v1.0.1 + Compiling parking_lot_core v0.9.12 + Compiling regex-syntax v0.8.10 + Compiling crc32fast v1.5.0 + Compiling httparse v1.10.1 + Compiling bigdecimal v0.4.10 + Compiling cc v1.2.57 + Compiling crossbeam-epoch v0.9.18 + Compiling rust_decimal v1.40.0 + Compiling windows-targets v0.52.6 + Compiling rand v0.10.0 + Compiling proc-macro-hack v0.5.20+deprecated + Compiling crossbeam-deque v0.8.6 + Compiling rand_core v0.6.4 + Compiling windows_x86_64_msvc v0.48.5 + Compiling flate2 v1.1.9 + Compiling windows_x86_64_msvc v0.53.1 + Compiling syn v2.0.117 + Compiling rand v0.8.5 + Compiling rayon-core v1.13.0 + Compiling regex-automata v0.4.14 + Compiling num-integer v0.1.46 + Compiling zstd-safe v7.2.4 + Compiling windows-sys v0.59.0 + Compiling concurrent-queue v2.5.0 + Compiling log v0.4.29 + Compiling num-bigint v0.4.6 + Compiling phf_generator v0.11.3 + Compiling block-buffer v0.10.4 + Compiling crypto-common v0.1.7 + Compiling winapi v0.3.9 + Compiling vcpkg v0.2.15 + Compiling anyhow v1.0.102 + Compiling native-tls v0.2.18 + Compiling digest v0.10.7 + Compiling object v0.37.3 + Compiling phf_codegen v0.11.3 + Compiling sha2 v0.10.9 + Compiling event-listener v5.4.1 + Compiling hashbrown v0.16.1 + Compiling deranged v0.5.8 + Compiling uuid v1.23.0 + Compiling ring v0.17.14 + Compiling zstd-sys v2.0.16+zstd.1.5.7 + Compiling windows-targets v0.53.5 + Compiling libsqlite3-sys v0.30.1 + Compiling windows-targets v0.48.5 + Compiling crossbeam-queue v0.3.12 + Compiling ahash v0.8.12 + Compiling windows-sys v0.48.0 + Compiling indexmap v2.13.0 + Compiling windows-sys v0.60.2 + Compiling time v0.3.47 + Compiling hmac v0.12.1 + Compiling regex v1.12.3 + Compiling md-5 v0.10.6 + Compiling atoi v2.0.0 + Compiling proc-macro-error-attr2 v2.0.0 + Compiling rustversion v1.0.22 + Compiling parse-zoneinfo v0.3.1 + Compiling etcetera v0.8.0 + Compiling hkdf v0.12.4 + Compiling rand_core v0.9.5 + Compiling chrono-tz-build v0.3.0 + Compiling proc-macro2-diagnostics v0.10.1 + Compiling portable-atomic v1.13.1 + Compiling base64ct v1.8.3 + Compiling socks v0.3.4 + Compiling paste v1.0.15 + Compiling pem-rfc7468 v1.0.0 + Compiling ignore v0.4.25 + Compiling ordered-float v4.6.0 + Compiling yansi v1.0.1 + Compiling thiserror v1.0.69 + Compiling ureq-proto v0.6.0 + Compiling der v0.8.0 + Compiling globwalk v0.9.1 + Compiling stacker v0.1.23 + Compiling num-rational v0.4.2 + Compiling humansize v2.1.3 + Compiling fs-err v2.11.0 + Compiling synstructure v0.13.2 + Compiling darling_core v0.20.11 + Compiling proc-macro-error2 v2.0.1 + Compiling pest_generator v2.8.6 + Compiling multer v3.1.0 + Compiling chrono-tz v0.9.0 + Compiling av-scenechange v0.14.1 + Compiling utf8-zero v0.8.1 + Compiling unicode-xid v0.2.6 + Compiling built v0.8.0 + Compiling ureq v3.3.0 + Compiling shared_child v1.1.1 + Compiling onig_sys v69.9.1 + Compiling matrixmultiply v0.3.10 + Compiling cookie v0.18.1 + Compiling hmac-sha256 v1.1.14 + Compiling rav1e v0.8.1 + Compiling pastey v0.1.1 + Compiling lzma-rust2 v0.15.7 + Compiling duct v1.1.1 + Compiling serde_path_to_error v0.1.20 + Compiling ar_archive_writer v0.5.1 + Compiling simd_helpers v0.1.0 + Compiling include_dir_macros v0.7.4 + Compiling windows-sys v0.52.0 + Compiling crossbeam-channel v0.5.15 + Compiling esaxx-rs v0.1.10 + Compiling tokio-cron-scheduler v0.11.1 + Compiling noop_proc_macro v0.3.0 + Compiling console v0.15.11 + Compiling include_dir v0.7.4 + Compiling castaway v0.2.4 + Compiling globset v0.4.18 + Compiling serde_derive v1.0.228 + Compiling displaydoc v0.2.5 + Compiling zerofrom-derive v0.1.6 + Compiling yoke-derive v0.8.1 + Compiling zerovec-derive v0.11.2 + Compiling tokio-macros v2.6.1 + Compiling tracing-attributes v0.1.31 + Compiling zerocopy-derive v0.8.47 + Compiling thiserror-impl v2.0.18 + Compiling futures-macro v0.3.32 + Compiling rustls-webpki v0.103.10 + Compiling darling_macro v0.20.11 + Compiling tinystr v0.8.2 + Compiling tokio v1.50.0 + Compiling unic-langid-impl v0.9.6 + Compiling equator-macro v0.4.2 + Compiling psm v0.1.30 + Compiling zerofrom v0.1.6 + Compiling darling v0.20.11 + Compiling futures-util v0.3.32 + Compiling yoke v0.8.1 + Compiling inherent v1.0.13 + Compiling num-derive v0.4.2 + Compiling tracing v0.1.44 + Compiling unic-langid-macros-impl v0.9.6 + Compiling zerovec v0.11.5 + Compiling zerotrie v0.2.3 + Compiling equator v0.4.2 + Compiling clap_derive v4.6.0 + Compiling pest_derive v2.8.6 + Compiling sea-query-derive v0.4.3 + Compiling aligned-vec v0.6.4 + Compiling thiserror-impl v1.0.69 + Compiling v_frame v0.3.9 + Compiling sea-bae v0.2.1 + Compiling async-trait v0.1.89 + Compiling profiling-procmacros v1.0.17 + Compiling derive_more-impl v2.1.1 + Compiling potential_utf v0.1.4 + Compiling icu_locale_core v2.1.1 + Compiling icu_collections v2.1.1 + Compiling arg_enum_proc_macro v0.3.4 + Compiling unic-langid-macros v0.9.6 + Compiling futures-executor v0.3.32 + Compiling futures v0.3.32 + Compiling icu_provider v2.1.1 + Compiling unic-langid v0.9.6 + Compiling smallvec v1.15.1 + Compiling chrono v0.4.44 + Compiling either v1.15.0 + Compiling serde_urlencoded v0.7.1 + Compiling icu_properties v2.1.2 + Compiling tracing-serde v0.2.0 + Compiling icu_normalizer v2.1.1 + Compiling tokio-util v0.7.18 + Compiling tokio-stream v0.1.18 + Compiling tower v0.5.3 + Compiling parking_lot v0.12.5 + Compiling rayon v1.11.0 + Compiling tokio-rustls v0.26.4 + Compiling idna_adapter v1.2.1 + Compiling h2 v0.4.13 + Compiling ppv-lite86 v0.2.21 + Compiling futures-intrusive v0.5.0 + Compiling idna v1.1.0 + Compiling tokio-native-tls v0.3.1 + Compiling sea-query v0.32.7 + Compiling rand_chacha v0.3.1 + Compiling rand_chacha v0.9.0 + Compiling itertools v0.14.0 + Compiling url v2.5.8 + Compiling hashbrown v0.14.5 + Compiling rand v0.9.2 + Compiling clap v4.6.0 + Compiling sqlx-core v0.8.6 + Compiling tracing-subscriber v0.3.23 + Compiling async-stream-impl v0.3.6 + Compiling ouroboros_macro v0.18.5 + Compiling maybe-rayon v0.1.1 + Compiling half v2.7.1 + Compiling derive_more v2.1.1 + Compiling serde_spanned v0.6.9 + Compiling serde_regex v1.1.0 + Compiling serde_yaml v0.9.34+deprecated + Compiling toml_datetime v0.6.11 + Compiling tera v1.20.1 + Compiling async-stream v0.3.6 + Compiling sea-orm-macros v1.1.19 + Compiling profiling v1.0.17 + Compiling av1-grain v0.2.5 + Compiling hyper v1.8.1 + Compiling axum-core v0.5.6 + Compiling derive_builder_core v0.20.2 + Compiling sqlx-postgres v0.8.6 + Compiling sqlx-sqlite v0.8.6 + Compiling hyper-util v0.1.20 + Compiling ouroboros v0.18.5 + Compiling ort-sys v2.0.0-rc.11 + Compiling fax_derive v0.2.0 + Compiling axum-macros v0.5.0 + Compiling sea-schema-derive v0.3.0 + Compiling fax v0.2.6 + Compiling hyper-tls v0.6.0 + Compiling hyper-rustls v0.27.7 + Compiling rrgen v0.5.6 + Compiling derive_builder_macro v0.20.2 + Compiling chumsky v0.9.3 + Compiling sea-orm-cli v1.1.19 + Compiling toml_edit v0.22.27 + Compiling combine v4.6.7 + Compiling cron v0.12.1 + Compiling backon v1.6.0 + Compiling quick-xml v0.38.4 + Compiling simple_asn1 v0.6.4 + Compiling validator_derive v0.20.0 + Compiling socket2 v0.5.10 + Compiling monostate-impl v0.1.18 + Compiling serde_html_form v0.2.8 + Compiling sqlx v0.8.6 + Compiling colored v2.2.0 + Compiling blake2 v0.10.6 + Compiling sea-query-binder v0.7.0 + Compiling num-complex v0.4.6 + Compiling macro_rules_attribute-proc_macro v0.2.2 + Compiling loco-rs v0.16.4 + Compiling moxcms v0.8.1 + Compiling axum v0.8.8 + Compiling sea-schema v0.16.2 + Compiling sea-orm v1.1.19 + Compiling validator v0.20.0 + Compiling ndarray v0.17.2 + Compiling macro_rules_attribute v0.2.2 + Compiling spm_precompiled v0.1.4 + Compiling lettre v0.11.19 + Compiling exr v1.74.0 + Compiling backtrace_printer v1.3.0 + Compiling zstd v0.13.3 + Compiling moka v0.12.15 + Compiling compression-codecs v0.4.37 + Compiling ravif v0.13.0 + Compiling async-compression v0.4.41 + Compiling redis v0.31.0 + Compiling tower-http v0.6.8 + Compiling indicatif v0.17.11 + Compiling argon2 v0.5.3 + Compiling reqwest v0.12.28 + Compiling axum-extra v0.10.3 + Compiling byte-unit v4.0.19 + Compiling loco-gen v0.16.4 + Compiling jsonwebtoken v9.3.1 + Compiling notify v8.2.0 + Compiling png v0.18.1 + Compiling monostate v0.1.18 + Compiling toml v0.8.23 + Compiling onig v6.5.1 + Compiling derive_builder v0.20.2 + Compiling tiff v0.11.3 + Compiling tracing-appender v0.2.4 + Compiling opendal v0.54.1 + Compiling rayon-cond v0.4.0 + Compiling ulid v1.2.1 + Compiling dashmap v6.1.0 + Compiling ureq v2.12.1 + Compiling unicode-normalization-alignments v0.1.12 + Compiling intl_pluralrules v7.0.2 + Compiling intl-memoizer v0.5.3 + Compiling fluent-langneg v0.13.1 + Compiling compact_str v0.9.0 + Compiling ipnetwork v0.20.0 + Compiling dary_heap v0.3.8 + Compiling serde_variant v0.1.3 + Compiling fluent-syntax v0.12.0 + Compiling tower v0.4.13 + Compiling duct_sh v1.0.0 + Compiling fluent-bundle v0.16.0 + Compiling tokenizers v0.22.2 + Compiling hf-hub v0.4.3 + Compiling image v0.25.10 + Compiling ort v2.0.0-rc.11 + Compiling safetensors v0.7.0 + Compiling sea-orm-migration v1.1.19 + Compiling fluent-template-macros v0.13.3 + Compiling fluent-templates v0.13.3 + Compiling fastembed v5.13.0 + Compiling migration v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend\migration) + Compiling termi-api v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend) + Finished `dev` profile [unoptimized + debuginfo] target(s) in 2m 23s + Running `target\debug\termi_api-cli.exe start` +error: process didn't exit successfully: `target\debug\termi_api-cli.exe start` (exit code: 0xffffffff) diff --git a/backend/backend-manual.log b/backend/backend-manual.log new file mode 100644 index 0000000..01abf73 --- /dev/null +++ b/backend/backend-manual.log @@ -0,0 +1,26 @@ +2026-03-29T11:49:41.902355Z  WARN loco_rs::boot: pretty backtraces are enabled (this is great for development but has a runtime cost for production. disable with `logger.pretty_backtrace` in your config yaml) + + ▄ ▀ + ▀ ▄ + ▄ ▀ ▄ ▄ ▄▀ + ▄ ▀▄▄ + ▄ ▀ ▀ ▀▄▀█▄ + ▀█▄ +▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▀▀█ + ██████ █████ ███ █████ ███ █████ ███ ▀█ + ██████ █████ ███ █████ ▀▀▀ █████ ███ ▄█▄ + ██████ █████ ███ █████ █████ ███ ████▄ + ██████ █████ ███ █████ ▄▄▄ █████ ███ █████ + ██████ █████ ███ ████ ███ █████ ███ ████▀ + ▀▀▀██▄ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ██▀ + ▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ + https://loco.rs + +environment: development + database: automigrate + logger: debug +compilation: debug + modes: server + +listening on http://localhost:5150 +2026-03-29T11:50:40.675162Z ERROR http-request: loco_rs::controller: controller_error error.msg=AI provider returned 429 Too Many Requests: {"error":{"message":"Concurrency limit exceeded for user, please retry later","type":"rate_limit_error"}} error.details=BadRequest("AI provider returned 429 Too Many Requests: {\"error\":{\"message\":\"Concurrency limit exceeded for user, please retry later\",\"type\":\"rate_limit_error\"}}") http.method=POST http.uri=/api/ai/ask http.version=HTTP/1.1 http.user_agent=Mozilla/5.0 (Windows NT 10.0; Microsoft Windows 10.0.26200; zh-CN) PowerShell/7.5.5 environment=development request_id=160e41d4-83b3-49d9-ad6d-e26498301ab9 diff --git a/backend/backend-restart.err.log b/backend/backend-restart.err.log new file mode 100644 index 0000000..01e2e9f --- /dev/null +++ b/backend/backend-restart.err.log @@ -0,0 +1,529 @@ + Compiling proc-macro2 v1.0.106 + Compiling unicode-ident v1.0.24 + Compiling quote v1.0.45 + Compiling syn v2.0.117 + Compiling cfg-if v1.0.4 + Compiling serde_core v1.0.228 + Compiling memchr v2.8.0 + Compiling windows-link v0.2.1 + Compiling serde v1.0.228 + Compiling serde_derive v1.0.228 + Compiling windows-sys v0.61.2 + Compiling getrandom v0.3.4 + Compiling itoa v1.0.18 + Compiling autocfg v1.5.0 + Compiling once_cell v1.21.4 + Compiling jobserver v0.1.34 + Compiling find-msvc-tools v0.1.9 + Compiling shlex v1.3.0 + Compiling cc v1.2.57 + Compiling log v0.4.29 + Compiling pin-project-lite v0.2.17 + Compiling bytes v1.11.1 + Compiling stable_deref_trait v1.2.1 + Compiling version_check v0.9.5 + Compiling num-traits v0.2.19 + Compiling smallvec v1.15.1 + Compiling displaydoc v0.2.5 + Compiling synstructure v0.13.2 + Compiling zerofrom-derive v0.1.6 + Compiling zerofrom v0.1.6 + Compiling yoke-derive v0.8.1 + Compiling futures-core v0.3.32 + Compiling yoke v0.8.1 + Compiling percent-encoding v2.3.2 + Compiling zerovec-derive v0.11.2 + Compiling crossbeam-utils v0.8.21 + Compiling zerovec v0.11.5 + Compiling allocator-api2 v0.2.21 + Compiling socket2 v0.6.3 + Compiling mio v1.1.1 + Compiling tokio-macros v2.6.1 + Compiling tokio v1.50.0 + Compiling tinystr v0.8.2 + Compiling aho-corasick v1.1.4 + Compiling futures-sink v0.3.32 + Compiling tracing-core v0.1.36 + Compiling equivalent v1.0.2 + Compiling zerocopy v0.8.47 + Compiling zmij v1.0.21 + Compiling getrandom v0.2.17 + Compiling tracing-attributes v0.1.31 + Compiling zerocopy-derive v0.8.47 + Compiling serde_json v1.0.149 + Compiling zeroize v1.8.2 + Compiling tracing v0.1.44 + Compiling foldhash v0.2.0 + Compiling base64 v0.22.1 + Compiling hashbrown v0.16.1 + Compiling slab v0.4.12 + Compiling pkg-config v0.3.32 + Compiling futures-channel v0.3.32 + Compiling fnv v1.0.7 + Compiling indexmap v2.13.0 + Compiling futures-macro v0.3.32 + Compiling thiserror-impl v2.0.18 + Compiling futures-io v0.3.32 + Compiling subtle v2.6.1 + Compiling futures-task v0.3.32 + Compiling futures-util v0.3.32 + Compiling litemap v0.8.1 + Compiling writeable v0.6.2 + Compiling icu_locale_core v2.1.1 + Compiling potential_utf v0.1.4 + Compiling zerotrie v0.2.3 + Compiling num-integer v0.1.46 + Compiling icu_properties_data v2.1.2 + Compiling thiserror v2.0.18 + Compiling icu_normalizer_data v2.1.1 + Compiling icu_provider v2.1.1 + Compiling icu_collections v2.1.1 + Compiling form_urlencoded v1.2.2 + Compiling ring v0.17.14 + Compiling libc v0.2.183 + Compiling bitflags v2.11.0 + Compiling regex-syntax v0.8.10 + Compiling regex-automata v0.4.14 + Compiling scopeguard v1.2.0 + Compiling typenum v1.19.0 + Compiling lock_api v0.4.14 + Compiling icu_normalizer v2.1.1 + Compiling icu_properties v2.1.2 + Compiling num-bigint v0.4.6 + Compiling rustls-pki-types v1.14.0 + Compiling generic-array v0.14.7 + Compiling ryu v1.0.23 + Compiling untrusted v0.9.0 + Compiling strsim v0.11.1 + Compiling idna_adapter v1.2.1 + Compiling crossbeam-epoch v0.9.18 + Compiling utf8_iter v1.0.4 + Compiling idna v1.1.0 + Compiling ppv-lite86 v0.2.21 + Compiling chrono v0.4.44 + Compiling either v1.15.0 + Compiling rustls v0.23.37 + Compiling url v2.5.8 + Compiling crossbeam-deque v0.8.6 + Compiling rustls-webpki v0.103.10 + Compiling arrayvec v0.7.6 + Compiling libm v0.2.16 + Compiling crypto-common v0.1.7 + Compiling block-buffer v0.10.4 + Compiling webpki-roots v1.0.6 + Compiling http v1.4.0 + Compiling getrandom v0.4.2 + Compiling num-conv v0.2.1 + Compiling ident_case v1.0.1 + Compiling powerfmt v0.2.0 + Compiling windows_x86_64_msvc v0.52.6 + Compiling time-core v0.1.8 + Compiling rand_core v0.10.0 + Compiling simd-adler32 v0.3.9 + Compiling time-macros v0.2.27 + Compiling deranged v0.5.8 + Compiling darling_core v0.20.11 + Compiling digest v0.10.7 + Compiling cpufeatures v0.3.0 + Compiling byteorder v1.5.0 + Compiling chacha20 v0.10.0 + Compiling darling_macro v0.20.11 + Compiling time v0.3.47 + Compiling regex v1.12.3 + Compiling rand_core v0.6.4 + Compiling tokio-util v0.7.18 + Compiling crc32fast v1.5.0 + Compiling parking_lot_core v0.9.12 + Compiling adler2 v2.0.1 + Compiling siphasher v1.0.2 + Compiling miniz_oxide v0.8.9 + Compiling windows-targets v0.52.6 + Compiling darling v0.20.11 + Compiling rand v0.10.0 + Compiling http-body v1.0.1 + Compiling spin v0.9.8 + Compiling heck v0.4.1 + Compiling httparse v1.10.1 + Compiling tower-service v0.3.3 + Compiling uuid v1.23.0 + Compiling serde_urlencoded v0.7.1 + Compiling zstd-sys v2.0.16+zstd.1.5.7 + Compiling httpdate v1.0.3 + Compiling flate2 v1.1.9 + Compiling phf_shared v0.11.3 + Compiling rand_chacha v0.3.1 + Compiling webpki-roots v0.26.11 + Compiling bigdecimal v0.4.10 + Compiling windows_x86_64_msvc v0.48.5 + Compiling proc-macro-hack v0.5.20+deprecated + Compiling atomic-waker v1.1.2 + Compiling windows_x86_64_msvc v0.53.1 + Compiling rust_decimal v1.40.0 + Compiling try-lock v0.2.5 + Compiling mime v0.3.17 + Compiling lazy_static v1.5.0 + Compiling want v0.3.1 + Compiling h2 v0.4.13 + Compiling rand v0.8.5 + Compiling parking_lot v0.12.5 + Compiling windows-strings v0.5.1 + Compiling windows-result v0.4.1 + Compiling bstr v1.12.1 + Compiling tower-layer v0.3.3 + Compiling pin-utils v0.1.0 + Compiling zstd-safe v7.2.4 + Compiling alloc-no-stdlib v2.0.4 + Compiling cpufeatures v0.2.17 + Compiling rayon-core v1.13.0 + Compiling foldhash v0.1.5 + Compiling hashbrown v0.15.5 + Compiling alloc-stdlib v0.2.2 + Compiling hyper v1.8.1 + Compiling windows-registry v0.6.1 + Compiling unic-langid-impl v0.9.6 + Compiling phf_generator v0.11.3 + Compiling http-body-util v0.1.3 + Compiling windows-sys v0.59.0 + Compiling concurrent-queue v2.5.0 + Compiling sync_wrapper v1.0.2 + Compiling winapi-util v0.1.11 + Compiling parking v2.2.1 + Compiling native-tls v0.2.18 + Compiling tinyvec_macros v0.1.1 + Compiling object v0.37.3 + Compiling anyhow v1.0.102 + Compiling vcpkg v0.2.15 + Compiling winapi v0.3.9 + Compiling ipnet v2.12.0 + Compiling crc-catalog v2.4.0 + Compiling crc v3.4.0 + Compiling hyper-util v0.1.20 + Compiling libsqlite3-sys v0.30.1 + Compiling tinyvec v1.11.0 + Compiling event-listener v5.4.1 + Compiling same-file v1.0.6 + Compiling parse-zoneinfo v0.3.1 + Compiling windows-targets v0.53.5 + Compiling unic-langid-macros-impl v0.9.6 + Compiling windows-targets v0.48.5 + Compiling phf_codegen v0.11.3 + Compiling brotli-decompressor v5.0.0 + Compiling hashlink v0.10.0 + Compiling sha2 v0.10.9 + Compiling futures-intrusive v0.5.0 + Compiling phf v0.11.3 + Compiling tokio-stream v0.1.18 + Compiling crossbeam-queue v0.3.12 + Compiling ahash v0.8.12 + Compiling schannel v0.1.29 + Compiling unicase v2.9.0 + Compiling ucd-trie v0.1.7 + Compiling heck v0.5.0 + Compiling pest v2.8.6 + Compiling mime_guess v2.0.5 + Compiling sqlx-core v0.8.6 + Compiling chrono-tz-build v0.3.0 + Compiling brotli v8.0.2 + Compiling windows-sys v0.48.0 + Compiling windows-sys v0.60.2 + Compiling zstd v0.13.3 + Compiling walkdir v2.5.0 + Compiling unicode-normalization v0.1.25 + Compiling tower v0.5.3 + Compiling flume v0.11.1 + Compiling hmac v0.12.1 + Compiling md-5 v0.10.6 + Compiling atoi v2.0.0 + Compiling home v0.5.12 + Compiling encoding_rs v0.8.35 + Compiling equator-macro v0.4.2 + Compiling proc-macro-error-attr2 v2.0.0 + Compiling rustversion v1.0.22 + Compiling compression-core v0.4.31 + Compiling utf8parse v0.2.2 + Compiling anstyle v1.0.14 + Compiling unicode-bidi v0.3.18 + Compiling unicode-segmentation v1.13.2 + Compiling unicode-properties v0.1.4 + Compiling once_cell_polyfill v1.70.2 + Compiling dotenvy v0.15.7 + Compiling anstyle-wincon v3.0.11 + Compiling stringprep v0.1.5 + Compiling anstyle-parse v1.0.0 + Compiling compression-codecs v0.4.37 + Compiling proc-macro-error2 v2.0.1 + Compiling equator v0.4.2 + Compiling etcetera v0.8.0 + Compiling hkdf v0.12.4 + Compiling socks v0.3.4 + Compiling ar_archive_writer v0.5.1 + Compiling chrono-tz v0.9.0 + Compiling pest_meta v2.8.6 + Compiling rayon v1.11.0 + Compiling globset v0.4.18 + Compiling tokio-rustls v0.26.4 + Compiling futures-executor v0.3.32 + Compiling proc-macro2-diagnostics v0.10.1 + Compiling rand_core v0.9.5 + Compiling anstyle-query v1.1.5 + Compiling nom v8.0.0 + Compiling colorchoice v1.0.5 + Compiling whoami v1.6.1 + Compiling is_terminal_polyfill v1.70.2 + Compiling hex v0.4.3 + Compiling base64ct v1.8.3 + Compiling paste v1.0.15 + Compiling portable-atomic v1.13.1 + Compiling static_assertions v1.1.0 + Compiling minimal-lexical v0.2.1 + Compiling nom v7.1.3 + Compiling pem-rfc7468 v1.0.0 + Compiling sqlx-postgres v0.8.6 + Compiling anstream v1.0.0 + Compiling rand_chacha v0.9.0 + Compiling sqlx-sqlite v0.8.6 + Compiling ignore v0.4.25 + Compiling sea-query-derive v0.4.3 + Compiling pest_generator v2.8.6 + Compiling psm v0.1.30 + Compiling aligned-vec v0.6.4 + Compiling async-compression v0.4.41 + Compiling tokio-native-tls v0.3.1 + Compiling ordered-float v4.6.0 + Compiling inherent v1.0.13 + Compiling num-derive v0.4.2 + Compiling clap_lex v1.1.0 + Compiling http-range-header v0.4.2 + Compiling deunicode v1.6.2 + Compiling yansi v1.0.1 + Compiling iri-string v0.7.11 + Compiling thiserror v1.0.69 + Compiling tower-http v0.6.8 + Compiling slug v0.1.6 + Compiling clap_builder v4.6.0 + Compiling sea-query v0.32.7 + Compiling ureq-proto v0.6.0 + Compiling unic-langid-macros v0.9.6 + Compiling webpki-root-certs v1.0.6 + Compiling hyper-tls v0.6.0 + Compiling v_frame v0.3.9 + Compiling pest_derive v2.8.6 + Compiling globwalk v0.9.1 + Compiling sqlx v0.8.6 + Compiling rand v0.9.2 + Compiling der v0.8.0 + Compiling hyper-rustls v0.27.7 + Compiling clap_derive v4.6.0 + Compiling sharded-slab v0.1.7 + Compiling humansize v2.1.3 + Compiling itertools v0.14.0 + Compiling num-rational v0.4.2 + Compiling matchers v0.2.0 + Compiling tracing-serde v0.2.0 + Compiling tracing-log v0.2.0 + Compiling multer v3.1.0 + Compiling as-slice v0.2.1 + Compiling stacker v0.1.23 + Compiling fs-err v2.11.0 + Compiling nu-ansi-term v0.50.3 + Compiling thread_local v1.1.9 + Compiling thiserror-impl v1.0.69 + Compiling av-scenechange v0.14.1 + Compiling utf8-zero v0.8.1 + Compiling glob v0.3.3 + Compiling built v0.8.0 + Compiling unicode-xid v0.2.6 + Compiling derive_more-impl v2.1.1 + Compiling rav1e v0.8.1 + Compiling ureq v3.3.0 + Compiling tracing-subscriber v0.3.23 + Compiling aligned v0.4.3 + Compiling tera v1.20.1 + Compiling clap v4.6.0 + Compiling reqwest v0.12.28 + Compiling sea-query-binder v0.7.0 + Compiling unic-langid v0.9.6 + Compiling ouroboros_macro v0.18.5 + Compiling hashbrown v0.14.5 + Compiling sea-bae v0.2.1 + Compiling shared_child v1.1.1 + Compiling futures v0.3.32 + Compiling onig_sys v69.9.1 + Compiling cookie v0.18.1 + Compiling matrixmultiply v0.3.10 + Compiling os_pipe v1.2.3 + Compiling core2 v0.4.0 + Compiling profiling-procmacros v1.0.17 + Compiling arg_enum_proc_macro v0.3.4 + Compiling async-trait v0.1.89 + Compiling async-stream-impl v0.3.6 + Compiling aliasable v0.1.3 + Compiling unsafe-libyaml v0.2.11 + Compiling lzma-rust2 v0.15.7 + Compiling y4m v0.8.0 + Compiling hmac-sha256 v1.1.14 + Compiling quick-error v2.0.1 + Compiling pastey v0.1.1 + Compiling fastrand v2.3.0 + Compiling shared_thread v0.2.0 + Compiling duct v1.1.1 + Compiling ort-sys v2.0.0-rc.11 + Compiling serde_yaml v0.9.34+deprecated + Compiling ouroboros v0.18.5 + Compiling async-stream v0.3.6 + Compiling profiling v1.0.17 + Compiling bitstream-io v4.9.0 + Compiling sea-orm-macros v1.1.19 + Compiling derive_more v2.1.1 + Compiling av1-grain v0.2.5 + Compiling maybe-rayon v0.1.1 + Compiling axum-core v0.5.6 + Compiling sea-schema-derive v0.3.0 + Compiling derive_builder_core v0.20.2 + Compiling windows-sys v0.52.0 + Compiling serde_regex v1.1.0 + Compiling cruet v0.13.3 + Compiling half v2.7.1 + Compiling crossbeam-channel v0.5.15 + Compiling serde_path_to_error v0.1.20 + Compiling toml_datetime v0.6.11 + Compiling serde_spanned v0.6.9 + Compiling fax_derive v0.2.0 + Compiling axum-macros v0.5.0 + Compiling include_dir_macros v0.7.4 + Compiling simd_helpers v0.1.0 + Compiling noop_proc_macro v0.3.0 + Compiling new_debug_unreachable v1.0.6 + Compiling matchit v0.8.4 + Compiling toml_write v0.1.2 + Compiling esaxx-rs v0.1.10 + Compiling rustc-hash v2.1.1 + Compiling winnow v0.7.15 + Compiling strum v0.26.3 + Compiling zune-core v0.5.1 + Compiling imgref v1.12.0 + Compiling unicode-width v0.2.2 + Compiling option-ext v0.2.0 + Compiling rawpointer v0.2.1 + Compiling tokio-cron-scheduler v0.11.1 + Compiling encode_unicode v1.0.0 + Compiling weezl v0.1.12 + Compiling console v0.15.11 + Compiling password-hash v0.5.0 + Compiling dirs-sys v0.5.0 + Compiling loop9 v0.1.5 + Compiling zune-jpeg v0.5.15 + Compiling sea-orm v1.1.19 + Compiling toml_edit v0.22.27 + Compiling type-map v0.5.1 + Compiling axum v0.8.8 + Compiling include_dir v0.7.4 + Compiling fax v0.2.6 + Compiling rrgen v0.5.6 + Compiling socket2 v0.5.10 + Compiling derive_builder_macro v0.20.2 + Compiling sea-schema v0.16.2 + Compiling chumsky v0.9.3 + Compiling backon v1.6.0 + Compiling sea-orm-cli v1.1.19 + Compiling castaway v0.2.4 + Compiling cron v0.12.1 + Compiling validator_derive v0.20.0 + Compiling colored v2.2.0 + Compiling combine v4.6.7 + Compiling cruet v0.14.0 + Compiling simple_asn1 v0.6.4 + Compiling blake2 v0.10.6 + Compiling zune-inflate v0.2.54 + Compiling fdeflate v0.3.7 + Compiling avif-serialize v0.8.8 + Compiling serde_html_form v0.2.8 + Compiling notify-types v2.1.0 + Compiling pem v3.0.6 + Compiling email-encoding v0.4.1 + Compiling num-complex v0.4.6 + Compiling colored v3.1.1 + Compiling quick-xml v0.38.4 + Compiling hostname v0.4.2 + Compiling monostate-impl v0.1.18 + Compiling utf8-width v0.1.8 + Compiling byteorder-lite v0.1.0 + Compiling quoted_printable v0.5.2 + Compiling base64 v0.13.1 + Compiling color_quant v1.1.0 + Compiling pxfm v0.1.28 + Compiling tagptr v0.2.0 + Compiling loco-rs v0.16.4 + Compiling macro_rules_attribute-proc_macro v0.2.2 + Compiling sha1_smol v1.0.1 + Compiling rgb v0.8.53 + Compiling bytemuck v1.25.0 + Compiling email_address v0.2.9 + Compiling bit_field v0.10.3 + Compiling btparse-stable v0.1.2 + Compiling lebe v0.5.3 + Compiling number_prefix v0.4.0 + Compiling indicatif v0.17.11 + Compiling exr v1.74.0 + Compiling backtrace_printer v1.3.0 + Compiling lettre v0.11.19 + Compiling qoi v0.4.1 + Compiling ravif v0.13.0 + Compiling redis v0.31.0 + Compiling macro_rules_attribute v0.2.2 + Compiling moka v0.12.15 + Compiling moxcms v0.8.1 + Compiling gif v0.14.1 + Compiling spm_precompiled v0.1.4 + Compiling image-webp v0.2.4 + Compiling byte-unit v4.0.19 + Compiling monostate v0.1.18 + Compiling opendal v0.54.1 + Compiling loco-gen v0.16.4 + Compiling ndarray v0.17.2 + Compiling jsonwebtoken v9.3.1 + Compiling notify v8.2.0 + Compiling axum-extra v0.10.3 + Compiling png v0.18.1 + Compiling argon2 v0.5.3 + Compiling validator v0.20.0 + Compiling compact_str v0.9.0 + Compiling sea-orm-migration v1.1.19 + Compiling onig v6.5.1 + Compiling derive_builder v0.20.2 + Compiling tiff v0.11.3 + Compiling intl-memoizer v0.5.3 + Compiling toml v0.8.23 + Compiling dirs v6.0.0 + Compiling tracing-appender v0.2.4 + Compiling duct_sh v1.0.0 + Compiling dashmap v6.1.0 + Compiling intl_pluralrules v7.0.2 + Compiling fluent-langneg v0.13.1 + Compiling rayon-cond v0.4.0 + Compiling ulid v1.2.1 + Compiling ureq v2.12.1 + Compiling tower v0.4.13 + Compiling english-to-cron v0.1.7 + Compiling fluent-syntax v0.12.0 + Compiling unicode-normalization-alignments v0.1.12 + Compiling ipnetwork v0.20.0 + Compiling serde_variant v0.1.3 + Compiling dary_heap v0.3.8 + Compiling unicode_categories v0.1.1 + Compiling self_cell v1.2.2 + Compiling semver v1.0.27 + Compiling fluent-bundle v0.16.0 + Compiling tokenizers v0.22.2 + Compiling fluent-template-macros v0.13.3 + Compiling hf-hub v0.4.3 + Compiling image v0.25.10 + Compiling ort v2.0.0-rc.11 + Compiling safetensors v0.7.0 + Compiling fastembed v5.13.0 + Compiling fluent-templates v0.13.3 + Compiling migration v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend\migration) + Compiling termi-api v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend) + Finished `dev` profile [unoptimized] target(s) in 8m 53s + Running `target\debug\termi_api-cli.exe start` +error: process didn't exit successfully: `target\debug\termi_api-cli.exe start` (exit code: 1073807364) diff --git a/backend/backend-restart.log b/backend/backend-restart.log new file mode 100644 index 0000000..7eefdd2 --- /dev/null +++ b/backend/backend-restart.log @@ -0,0 +1,25 @@ +2026-03-28T15:13:51.613322Z  WARN loco_rs::boot: pretty backtraces are enabled (this is great for development but has a runtime cost for production. disable with `logger.pretty_backtrace` in your config yaml) + + ▄ ▀ + ▀ ▄ + ▄ ▀ ▄ ▄ ▄▀ + ▄ ▀▄▄ + ▄ ▀ ▀ ▀▄▀█▄ + ▀█▄ +▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▀▀█ + ██████ █████ ███ █████ ███ █████ ███ ▀█ + ██████ █████ ███ █████ ▀▀▀ █████ ███ ▄█▄ + ██████ █████ ███ █████ █████ ███ ████▄ + ██████ █████ ███ █████ ▄▄▄ █████ ███ █████ + ██████ █████ ███ ████ ███ █████ ███ ████▀ + ▀▀▀██▄ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ██▀ + ▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ + https://loco.rs + +environment: development + database: automigrate + logger: debug +compilation: debug + modes: server + +listening on http://localhost:5150 diff --git a/backend/content/posts/building-blog-with-astro.md b/backend/content/posts/building-blog-with-astro.md deleted file mode 100644 index f66cb70..0000000 --- a/backend/content/posts/building-blog-with-astro.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: Building a Blog with Astro -slug: building-blog-with-astro -description: Learn why Astro is the perfect choice for building fast, content-focused blogs. -category: tech -post_type: article -pinned: false -published: true -tags: - - astro - - web-dev - - static-site ---- - -# Building a Blog with Astro - -Astro is a modern static site generator that delivers lightning-fast performance. - -## Why Astro? - -- Zero JavaScript by default -- Island Architecture -- Framework Agnostic -- Great DX - -## Getting Started - -```bash -npm create astro@latest -cd my-astro-project -npm install -npm run dev -``` - -## Conclusion - -Astro is perfect for content-focused websites like blogs. diff --git a/backend/content/posts/canokeys.md b/backend/content/posts/canokeys.md new file mode 100644 index 0000000..0c48a83 --- /dev/null +++ b/backend/content/posts/canokeys.md @@ -0,0 +1,242 @@ +--- +title: "Canokey入门指南:2FA、OpenPGP、PIV" +description: 本文是一份Canokey入门指南,将介绍如何使用Canokey进行2FA、OpenPGP和PIV等操作。其中,2FA部分将介绍如何使用Yubikey Authenticator进行管理,OpenPGP部分将介绍如何生成GPG密钥并使用Canokey进行身份验证和加密解密,PIV部分将介绍如何在Canokey中生成PIV证书并使用其进行身份验证。 +date: 2022-08-19T16:42:40+08:00 +draft: false +slug: canokeys +image: +categories: + - Linux +tags: + - Linux +--- + + + +# 2FA + +`Canokey`使用`Yubikey Authenticator`来进行管理`2FA`。 + +下载`Yubikey Authenticator`,以下为`Yubikey Authenticator`官方下载网址 + +```http +https://www.yubico.com/products/yubico-authenticator/#h-download-yubico-authenticator +``` + +运行`Yubikey Authenticator` + +进入`custom reader`,在`Custom reader fiter`处填入 `CanoKey` + +![填入CanoKey](https://upload-images.jianshu.io/upload_images/9676051-ff0cd60f38ac7334.png) + +右上角`Add account` 增加`2FA` + +![添加2FA](https://upload-images.jianshu.io/upload_images/9676051-1031857fe0f13d08.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240) + +```yaml +Issuer: 备注 可选 +Account name : 用户名 必填项 +Secret Key : Hotp或Totp的key 必填项 +``` + + +# OpenPGP + +## 安装GPG + +Windows 用户可下载 [Gpg4Win](https://gpg4win.org/download.html),Linux/macOS 用户使用对应包管理软件安装即可. + +## 生成主密钥 + +```shell +gpg --expert --full-gen-key #生成GPG KEY +``` + +推荐使用`ECC`算法 + +![image-20220102223722475](https://upload-images.jianshu.io/upload_images/9676051-df42e4b958e9a238.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240) + +```shell +选择(11) ECC (set your own capabilities) # 设置自己的功能 主密钥只保留 Certify 功能,其他功能(Encr,Sign,Auth)使用子密钥 +# 子密钥分成三份,分别获得三个不同的功能 +# encr 解密功能 +# sign 签名功能 +# auth 登录验证功能 +``` + +```shell +先选择 (S) Toggle the sign capability +``` + +![image-20220102224151589](https://upload-images.jianshu.io/upload_images/9676051-c3bb19eb398419e1.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240) + +``` +之后输入q 退出 +``` + +键入1,选择默认算法 + +![键入1,选择默认算法](https://upload-images.jianshu.io/upload_images/9676051-7a2c5ee8ed4800af.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240) + +设置主密钥永不过期 + +![image-20220102224451731](https://upload-images.jianshu.io/upload_images/9676051-cca6100917c2ffaa.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240) + +填写信息,按照实际情况填写即可 + +![image-20220102224612167](https://upload-images.jianshu.io/upload_images/9676051-10430afe3aa592c7.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240) + +``` +Windnows 下会弹出窗口输入密码,注意一定要保管好!!! +``` + +```shell + +``` + +```shell +# 会自动生成吊销证书,注意保存到安全的地方 +gpg: AllowSetForegroundWindow(22428) failed: �ܾ����ʡ� +gpg: revocation certificate stored as 'C:\\Users\\Andorid\\AppData\\Roaming\\gnupg\\openpgp-revocs.d\\<此处为私钥>.rev' +# 以上的REV文件即为吊销证书 +public and secret key created and signed. +``` + +```shell +pub ed25519 2022-01-02 [SC] + <此处为Pub> +uid <此处为Name> <此处为email> +``` + +生成子密钥 + +```shell + gpg --fingerprint --keyid-format long -K +``` + +下面生成不同功能的子密钥,其中 `` 为上面输出的密钥指纹,本示例中即为 `私钥`。最后的 `2y` 为密钥过期时间,可自行设置,如不填写默认永不过期。 + +```shell +gpg --quick-add-key cv25519 encr 2y +gpg --quick-add-key ed25519 auth 2y +gpg --quick-add-key ed25519 sign 2y +``` + +再次查看目前的私钥,可以看到已经包含了这三个子密钥。 + +```shell +gpg --fingerprint --keyid-format long -K +``` + +上面生成了三种功能的子密钥(ssb),分别为加密(E)、认证(A)、签名(S),对应 `OpenPGP Applet` 中的三个插槽。由于 `ECC` 实现的原因,加密密钥的算法区别于其他密钥的算法。 + +加密密钥用于加密文件和信息。签名密钥主要用于给自己的信息签名,保证这真的是来自**我**的信息。认证密钥主要用于 SSH 登录。 + +## 备份GPG + +```shell +# 公钥 +gpg -ao public-key.pub --export +# 主密钥,请务必保存好!!! +# 注意 key id 后面的 !,表示只导出这一个私钥,若没有的话默认导出全部私钥。 +gpg -ao sec-key.asc --export-secret-key ! +# sign子密钥 +gpg -ao sign-key.asc --export-secret-key ! +gpg -ao auth-key.asc --export-secret-key ! +gpg -ao encr-key.asc --export-secret-key ! +``` + +## 导入Canokey + +```shell +# 查看智能卡设备状态 +gpg --card-status +# 写入GPG +gpg --edit-key # 为上方的sec-key +# 选中第一个子密钥 +key 1 +# 写入到智能卡 +keytocard +# 再次输入,取消选择 +key 1 +# 选择第二个子密钥 +key 2 +keytocard +key 2 +key 3 +keytocard +# 保存修改并退出 +save + +#再次查看设备状态,可以看到此时子密钥标识符为 ssb>,表示本地只有一个指向 card-no: F1D0 xxxxxxxx 智能卡的指针,已不存在私钥。现在可以删除掉主密钥了,请再次确认你已安全备份好主密钥。 +gpg --card-status +``` +## 删除本地密钥 + +```shell +gpg --delete-secret-keys # 为上方的sec-key +``` + +为确保安全,也可直接删除 gpg 的工作目录:`%APPDATA%\gnupg`,Linux/macOS: `~/.gunpg`。 + +## 使用 Canokey + +此时切换回日常使用的环境,首先导入公钥 + +```shell +gpg --import public-key.pub +``` + +然后设置子密钥指向 Canokey + +```shell +gpg --edit-card +gpg/card> fetch +``` + +此时查看本地的私钥,可以看到已经指向了 Canokey + +``` +gpg --fingerprint --keyid-format long -K +``` + +配置gpg路径 + +```bash +git config --global gpg.program "C:\Program Files (x86)\GnuPG\bin\gpg.exe" --replace-all +``` + +## Git Commit 签名 + +首先确保 Git 本地配置以及 GitHub 中的邮箱信息包含在 `UID` 中,然后设置 Git 来指定使用子密钥中的签名(S)密钥。 + +```shell +git config --global user.signingkey # 为上方的Sign密钥 +``` + +之后在 `git commit` 时增加 `-S` 参数即可使用 gpg 进行签名。也可在配置中设置自动 gpg 签名,此处不建议全局开启该选项,因为有的脚本可能会使用 `git am` 之类的涉及到 `commit` 的命令,如果全局开启的话会导致问题。 + +```shell +git config commit.gpgsign true +``` + +如果提交到 GitHub,前往 [GitHub SSH and GPG keys](https://github.com/settings/keys) 添加公钥。此处添加后,可以直接通过对应 GitHub ID 来获取公钥:`https://github.com/.gpg` + +## PIV + +首先在Web端添加自己的私钥到智能卡,之后前往 [WinCrypt SSH Agent](https://github.com/buptczq/WinCryptSSHAgent) 下载并运行,此时查看 `ssh-agent` 读取到的公钥信息,把输出的公钥信息添加到服务器的 `~/.ssh/authorized_keys` + +```shell +# 设置环境池 +$Env:SSH_AUTH_SOCK="\\.\pipe\openssh-ssh-agent" +# 查看ssh列表 +ssh-add -L +``` + +此时连接 `ssh user@host`,会弹出提示输入 `PIN` 的页面,注意此时输入的是 `PIV Applet PIN`,输入后即可成功连接服务器。 + +```yaml +tips: 可能会出现权限不够的情况,需要禁用Windows服务OpenSSH Authentication Agent +``` + +最后可以把该程序快捷方式添加到启动目录 `%AppData%\Microsoft\Windows\Start Menu\Programs\Startup`,方便直接使用。 diff --git a/backend/content/posts/ffmpeg.md b/backend/content/posts/ffmpeg.md new file mode 100644 index 0000000..36b8692 --- /dev/null +++ b/backend/content/posts/ffmpeg.md @@ -0,0 +1,67 @@ +--- +title: "如何使用FFmpeg处理音视频文件" +description: 本文提供了FFmpeg处理音视频文件的完整指南,包括将单张图片转换为视频、拼接多个视频、设置转场特效等多种操作。 +date: 2022-07-25T14:05:04+08:00 +draft: true +slug: ffmpeg +image: +categories: ffmpeg +tags: ffmpeg +--- + +# `ffmpeg`图片转视频 + +使用单张图片生成5秒视频 + +```bash +# -loop 1 指定开启单帧图片loop +# -t 5 指定loop时长为5秒 +# -i input 指定输入图片文件路径 示例:pic.jpg +# -pix_fmt 指定编码格式为yuv420p +# -y 若输出文件已存在,则强制进行覆盖。 +# ffmpeg会根据输出文件后缀,自动选择编码格式。 +# 也可以使用 -f 指定输出格式 +ffmpeg -loop 1 -t 5 -i .jpg -pix_fmt yuv420p -y output.ts +``` + +# `ffmpeg`拼接视频 + +```bash +# windows +# -i input 指定需要合并的文件,使用concat进行合并.示例:"concat:0.ts|1.ts|2.ts" +# -vcodec 指定视频编码器的参数为copy +# -acodec 指定音频编码器的参数为copy +# -y 若输出文件已存在,则强制进行覆盖。 +ffmpeg -i "concat:0.ts|1.ts" -vcodec copy -acodec copy -y output.ts +``` + +# `ffmpeg`设置转场特效 + +```bash +# Linux +ffmpeg -i v0.mp4 -i v1.mp4 -i v2.mp4 -i v3.mp4 -i v4.mp4 -filter_complex \ +"[0][1:v]xfade=transition=fade:duration=1:offset=3[vfade1]; \ + [vfade1][2:v]xfade=transition=fade:duration=1:offset=10[vfade2]; \ + [vfade2][3:v]xfade=transition=fade:duration=1:offset=21[vfade3]; \ + [vfade3][4:v]xfade=transition=fade:duration=1:offset=25,format=yuv420p; \ + [0:a][1:a]acrossfade=d=1[afade1]; \ + [afade1][2:a]acrossfade=d=1[afade2]; \ + [afade2][3:a]acrossfade=d=1[afade3]; \ + [afade3][4:a]acrossfade=d=1" \ +-movflags +faststart out.mp4 +``` + +| 输入文件 | 输入文件的视频总长 | + | previous xfade `offset` | - | xfade `duration` | `offset` = | +| :------- | :----------------- | :--: | :---------------------- | :--: | :--------------- | :--------- | +| `v0.mp4` | 4 | + | 0 | - | 1 | 3 | +| `v1.mp4` | 8 | + | 3 | - | 1 | 10 | +| `v2.mp4` | 12 | + | 10 | - | 1 | 21 | +| `v3.mp4` | 5 | + | 21 | - | 1 | 25 | + +// 将音频转为单声道 + +``` +ffmpeg -i .\1.mp3 -ac 1 -ar 44100 -ab 16k -vol 50 -f 1s.mp3 +ffmpeg -i one.ts -i 1s.mp3 -map 0:v -map 1:a -c:v copy -shortest -af apad -y one1.ts +``` + diff --git a/backend/content/posts/go-arm.md b/backend/content/posts/go-arm.md new file mode 100644 index 0000000..4526651 --- /dev/null +++ b/backend/content/posts/go-arm.md @@ -0,0 +1,121 @@ +--- +title: "使用arm交叉编译工具并解决GLIBC版本不匹配的问题" +description: 介绍如何使用arm交叉编译工具来编译Go程序,并解决在arm平台上运行时出现GLIBC版本不匹配的问题。 +date: 2022-06-10T15:00:26+08:00 +draft: false +slug: go-arm +image: +categories: + - Go +tags: + - Arm + - Go + - GLIBC +--- + +1. 下载 ARM 交叉编译工具,可以从官方网站下载。比如,可以从如下链接下载 GNU 工具链:[https://developer.arm.com/downloads/-/gnu-a](https://developer.arm.com/downloads/-/gnu-a) + + 示例:https://developer.arm.com/-/media/Files/downloads/gnu-a/10.3-2021.07/binrel/gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-elf.tar.xz + +2. 设置 Go ARM 交叉编译环境变量。具体来说,需要设置以下变量: + +```ruby +$env:GOOS="linux" +$env:GOARCH="arm64" +$env:CGO_ENABLED=1 +$env:CC="D:\arm\gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-linux-gnu\bin\aarch64-none-linux-gnu-gcc.exe" +$env:CXX="D:\arm\gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-linux-gnu\bin\aarch64-none-linux-gnu-g++.exe" +``` + +3. 在 ARM 上运行程序时可能会出现如下错误: + +```bash +./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.28' not found (required by ./bupload) +./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.32' not found (required by ./bupload) +./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.33' not found (required by ./bupload) +``` + +这是因为程序需要使用较新版本的 GLIBC 库,而 ARM 上安装的库版本较旧。可以通过以下步骤来解决这个问题: + +4. 查看当前系统中 libc 库所支持的版本: + +```bash +strings /lib/aarch64-linux-gnu/libc.so.6 | grep GLIBC_ +``` + +5. 备份整个 `/lib` 目录和 `/usr/include` 目录,以便稍后还原。 +6. 从 GNU libc 官方网站下载对应版本的 libc 库。例如,可以从如下链接下载 2.35 版本的 libc 库:[http://ftp.gnu.org/gnu/glibc/glibc-2.35.tar.xz](http://ftp.gnu.org/gnu/glibc/glibc-2.35.tar.xz) +7. 解压 libc 库: + +``` +xz -d glibc-2.35.tar.xz +tar xvf glibc-2.35.tar glibc-2.35 +``` + +8. 创建并进入 build 目录: + +```bash +mkdir build +cd build +``` + +9. 配置 libc 库的安装选项: + +```javascript +../configure --prefix=/usr --disable-profile --enable-add-ons --with-headers=/usr/include --with-binutils=/usr/bin +``` + +10. 编译并安装 libc 库: + +```go +make -j4 +make install +``` + +接下来是关于 `make` 报错的部分: + +```yaml +asm/errno.h: No such file or directory +``` + +这个报错是因为 `errno.h` 文件中包含了 `asm/errno.h` 文件,但是找不到这个文件。为了解决这个问题,我们需要创建一个软链接: + +```bash +ln -s /usr/include/asm-generic /usr/include/asm +``` + +然后又出现了另一个报错: + +```bash +/usr/include/aarch64-linux-gnu/asm/sigcontext.h: No such file or directory +``` + +这个问题也可以通过重新安装`linux-libc-dev`后创建软链接来解决: + +```bash +# find / -name sigcontext.h +sudo apt-get install --reinstall linux-libc-dev +ln -s /usr/include/aarch64-linux-gnu/asm/sigcontext.h /usr/include/asm/sigcontext.h +``` + +接下来,还有一个报错: + +```yaml +asm/sve_context.h: No such file or directory +``` + +这个报错是因为最新的 Linux 内核在启用 ARM Scalable Vector Extension (SVE) 后,需要包含 `asm/sve_context.h` 文件。我们需要创建一个软链接来解决这个问题: + +```bash +# find / -name sve_context.h +ln -s /usr/include/aarch64-linux-gnu/asm/sve_context.h /usr/include/asm/sve_context.h +``` + +最后,还需要创建一个软链接: + +```bash +# find / -name byteorder.h +ln -s /usr/include/aarch64-linux-gnu/asm/byteorder.h /usr/include/asm/byteorder.h +``` + +完成以上步骤后,我们再次执行 `make` 命令,就应该可以顺利地编译和安装 glibc 了。 diff --git a/backend/content/posts/go-grpc.md b/backend/content/posts/go-grpc.md new file mode 100644 index 0000000..e1af849 --- /dev/null +++ b/backend/content/posts/go-grpc.md @@ -0,0 +1,173 @@ +--- +title: "Go使用gRPC进行通信" +description: RPC是远程过程调用的简称,是分布式系统中不同节点间流行的通信方式。 +date: 2022-05-26T14:17:33+08:00 +draft: false +slug: go-grpc +image: +categories: + - Go +tags: + - Go + - gRPC +--- + +# 安装`gRPC`和`Protoc` + +## 安装`protobuf` + +```bash +go get -u google.golang.org/protobuf +go get -u google.golang.org/protobuf/proto +go get -u google.golang.org/protobuf/protoc-gen-go +``` + + + +## 安装`Protoc` + +```shell +# 下载二进制文件并添加至环境变量 +https://github.com/protocolbuffers/protobuf/releases +``` + +安装`Protoc`插件`protoc-gen-go` + +```shell +# go install 会自动编译项目并添加至环境变量中 +go install google.golang.org/protobuf/cmd/protoc-gen-go@latest +``` + +```shell +#protoc-gen-go 文档地址 +https://developers.google.com/protocol-buffers/docs/reference/go-generated +``` + +# 创建`proto`文件并定义服务 + +## 新建 `task.proto`文件 + +```shell +touch task.proto +``` + +## 编写`task.proto` + +```protobuf +// 指定proto版本 +syntax = "proto3"; +// 指定包名 +package task; +// 指定输出 go 语言的源码到哪个目录和 包名 +// 主要 目录和包名用 ; 隔开 +// 将在当前目录生成 task.pb.go +// 也可以只填写 "./",会生成的包名会变成 "----" +option go_package = "./;task"; + +// 指定RPC的服务名 +service TaskService { + // 调用 AddTaskCompletion 方法 + rpc AddTaskCompletion(request) returns (response); +} + +// RPC TaskService服务,AddTaskCompletion函数的请求参数,即消息 +message request { + uint32 id = 1;//任务id + string module = 2;//所属模块 + int32 value = 3;//此次完成值 + string guid = 4;//用户id +} +// RPC TaskService服务,TaskService函数的返回值,即消息 +message response{ + +} +``` + +## 使用`Protoc`来生成Go代码 + +```bash +protoc --go_out=. --go-grpc_out=. <要进行生成代码的文件>.proto +# example +protoc --go_out=. --go-grpc_out=. .\task.proto +``` + +这样生成会生成两个`.go`文件,一个是对应消息`task.pb.go`,一个对应服务接口`task_grpc.pb.go`。 + +在`task_grpc.pb.go`中,在我们定义的服务接口中,多增加了一个私有的接口方法: +`mustEmbedUnimplementedTaskServiceServer()` + +# 使用`Go`监听`gRPC`服务端及客户端 + +## 监听服务端 + +并有生成的一个`UnimplementedTaskServiceServer`结构体来实现了所有的服务接口。因此,在我们自己实现的服务类中,需要继承这个结构体,如: + +```go +// 用于实现grpc服务 TaskServiceServer 接口 +type TaskServiceImpl struct { + // 需要继承结构体 UnimplementedServiceServer 或mustEmbedUnimplementedTaskServiceServer + task.mustEmbedUnimplementedTaskServiceServer() +} + +func main() { + // 创建Grpc服务 + // 创建tcp连接 + listener, err := net.Listen("tcp", ":8082") + if err != nil { + fmt.Println(err) + return + } + // 创建grpc服务 + grpcServer := grpc.NewServer() + // 此函数在task.pb.go中,自动生成 + task.RegisterTaskServiceServer(grpcServer, &TaskServiceImpl{}) + // 在grpc服务上注册反射服务 + reflection.Register(grpcServer) + // 启动grpc服务 + err = grpcServer.Serve(listener) + if err != nil { + fmt.Println(err) + return + } + +} + +func (s *TaskServiceImpl) AddTaskCompletion(ctx context.Context, in *task.Request) (*task.Response, error) { + fmt.Println("收到一个Grpc 请求, 请求参数为", in.Guid) + r := &task.Response{ + } + return r, nil +} + +``` + +然后在`TaskService`上实现我们的服务接口。 + + +## 客户端 + +```go + conn, err := grpc.Dial("127.0.0.1:8082", grpc.WithInsecure()) + if err != nil { + panic(err) + } + defer conn.Close() + // 创建grpc客户端 + client := task.NewTaskServiceClient(conn) + // 创建请求 + req := &task.Request{ + Id: 1, + Module: "test", + Value: 3, + Guid: "test", + } + // 调用rpc TaskService AddTaskCompletion函数 + response, err := client.AddTaskCompletion(context.Background(), req) + if err != nil { + log.Println(err) + return + } + log.Println(response) +``` + +[本文参考](https://www.cnblogs.com/whuanle/p/14588031.html) diff --git a/backend/content/posts/go-xml.md b/backend/content/posts/go-xml.md new file mode 100644 index 0000000..bcdbdc4 --- /dev/null +++ b/backend/content/posts/go-xml.md @@ -0,0 +1,98 @@ +--- +title: "Go语言解析Xml" +slug: "go-xml" +date: 2022-05-20T14:38:05+08:00 +draft: false +description: "使用Go简简单单的解析Xml!" +tags: + - Go + - Xml +categories: + - Go +--- + +# 开始之前 + +```go +import "encoding/xml" +``` + +## 简单的`Xml`解析 + +### 1.假设我们解析的`Xml`内容如下: + +```xml + + + +``` + + + +### 2.接着我们构造对应的结构体 + +```go +type Feed struct { + XMLName xml.Name `xml:"feed"` + Person struct{ + Name string `xml:"name"` + Id string `xml:"id"` + Age int `xml:"age"` + } `xml:"person"` +} +``` + +### 3.对`Xml`数据进行反序列化 + +```go +var feed Feed + +// 读取Xml文件,并返回字节流 +content,err := ioutil.ReadFile(XmlFilename) +if err != nil { + log.Fatal(err) +} + +// 将读取到的内容反序列化到feed +xml.Unmarshal(content,&feed) +``` + +## 带有命名空间的`Xml`解析 + +部分`xml`文件会带有`命名空间`(`Namespace`),也就是冒号左侧的内容,此时我们需要在`go`结构体的`tag` 中加入`命名空间`。 + +### 1.带有命名空间(Namespace)的`Xml`文件 + +```xml + + +XXXXXXX + + + +``` + +### 2.针对命名空间构造结构体 + +```go +type Feed struct { + XMLName xml.Name `xml:"feed"` // 指定最外层的标签为feed + VideoId string `xml:"http://www.youtube.com/xml/schemas/2015 videoId"` + Community string `xml:"http://search.yahoo.com/mrss/ community"` +} +``` + +### 3.对`Xml`数据进行反序列化 + +```go +var feed Feed + +// 读取Xml文件,并返回字节流 +content,err := ioutil.ReadFile(XmlFilename) +if err != nil { + log.Fatal(err) +} + +// 将读取到的内容反序列化到feed +xml.Unmarshal(content,&feed) +``` diff --git a/backend/content/posts/hugo.md b/backend/content/posts/hugo.md new file mode 100644 index 0000000..678d5da --- /dev/null +++ b/backend/content/posts/hugo.md @@ -0,0 +1,36 @@ +--- +title: "Hugo使用指南!" +slug: "hugo" +draft: false +date: 2022-05-20T10:23:53+08:00 +description: "快速上手hugo!" +tags: + - Go + - Hugo +categories: + - Go +--- +查看Hugo版本号 + +```bash +hugo version +``` + +新建一个Hugo页面 + +``` +hugo new site +``` + +设置主题 + +```bash +cd +git init + +# 设置为 Stack主题 +git clone https://github.com/CaiJimmy/hugo-theme-stack/ themes/hugo-theme-stack +git submodule add https://github.com/CaiJimmy/hugo-theme-stack/ themes/hugo-theme-stack +``` + +部署Hugo到github diff --git a/backend/content/posts/linux-dhcp.md b/backend/content/posts/linux-dhcp.md new file mode 100644 index 0000000..b4804df --- /dev/null +++ b/backend/content/posts/linux-dhcp.md @@ -0,0 +1,67 @@ +--- +title: "Linux部署DHCP服务" +description: Debian下使用docker镜像部署DHCP服务 +date: 2022-05-23T11:11:40+08:00 +draft: false +slug: linux-dhcp +image: +categories: Linux +tags: + - Linux + - DHCP +--- + +拉取`networkboot/dhcpd`镜像 + +```shell +docker pull networkboot/dhcpd +``` + +新建`data/dhcpd.conf`文件 + +```shell +touch /data/dhcpd.conf +``` + +修改`data/dhcpd.conf`文件 + +``` +subnet 204.254.239.0 netmask 255.255.255.224 { +option subnet-mask 255.255.0.0; +option domain-name "cname.nmslwsnd.com"; +option domain-name-servers 8.8.8.8; +range 204.254.239.10 204.254.239.30; +} +``` + +修改`/etc/network/interfaces` + +``` +# The loopback network interface (always required) +auto lo +iface lo inet loopback + +# Get our IP address from any DHCP server +auto dhcp +iface dhcp inet static +address 204.254.239.0 +netmask 255.255.255.224 + +``` + + + +获取帮助命令 + +```shell +docker run -it --rm networkboot/dhcpd man dhcpd.conf +``` + +运行`DHCP`服务 + +```shell +docker run -it --rm --init --net host -v "/data":/data networkboot/dhcpd <网卡名称> +# 示例 +docker run -it --rm --init --net host -v "/data":/data networkboot/dhcpd dhcp +``` + diff --git a/backend/content/posts/linux-shell.md b/backend/content/posts/linux-shell.md new file mode 100644 index 0000000..17f74e9 --- /dev/null +++ b/backend/content/posts/linux-shell.md @@ -0,0 +1,36 @@ +--- +title: "Linux Shell" +description: +date: 2022-05-21T10:02:09+08:00 +draft: false +Hidden: true +slug: linux-shell +image: +categories: + Linux +tag: + Linux + Shell +--- + +Linux守护进程:no_good: + +```bash +#!/bin/bash +# nohup.sh +while true +do + # -f 后跟进程名,判断进程是否正在运行 + if [ `pgrep -f | wc -l` -eq 0 ];then + echo "进程已终止" + push + # /dev/null 无输出日志 + nohup ./ > /dev/null 2>&1 & + else + echo "进程正在运行" + fi + # 每隔1分钟检查一次 + sleep 1m +done +``` + diff --git a/backend/content/posts/linux.md b/backend/content/posts/linux.md new file mode 100644 index 0000000..cd92c78 --- /dev/null +++ b/backend/content/posts/linux.md @@ -0,0 +1,65 @@ +--- +title: "Linux" +description: +date: 2022-09-08T15:19:00+08:00 +draft: true +slug: linux +image: +categories: + - Linux +tags: + - Linux +--- + +```bash +# 使用cd 进入到上一个目录 +cd - +``` + +复制和粘贴 + +```bash +ctrl + shift + c +ctrl + shift + v +``` + + + +快速移动 + +```bash +# 移动到行首 +ctrl + a +# 移动到行尾 +ctrl + e +``` + +快速删除 + +```bash +# 删除光标之前的内容 +ctrl + u +# 删除光标之后的内容 +ctrl + k +# 恢复之前删除的内容 +ctrl + y +``` + +不适用cat + +``` +使用less 查看 顶部的文件 +less filename +``` + +使用alt+backspace删除,以单词为单位 + +``` + tcpdump host 1.1.1.1 +``` + +``` +# 并行执行命令 Parallel +find . -type f -name '*.html' -print | parallel gzip +``` + diff --git a/backend/content/posts/loco-rs-framework.md b/backend/content/posts/loco-rs-framework.md deleted file mode 100644 index 89fc686..0000000 --- a/backend/content/posts/loco-rs-framework.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -title: Loco.rs Backend Framework -slug: loco-rs-framework -description: An introduction to Loco.rs, the Rails-inspired web framework for Rust. -category: tech -post_type: article -pinned: false -published: true -tags: - - rust - - loco-rs - - backend - - api ---- - -# Introduction to Loco.rs - -Loco.rs is a web and API framework for Rust inspired by Rails. - -## Features - -- MVC Architecture -- SeaORM Integration -- Background Jobs -- Authentication -- CLI Generator - -## Quick Start - -```bash -cargo install loco -loco new myapp -cd myapp -cargo loco start -``` - -## Why Loco.rs? - -- Opinionated but flexible -- Production-ready defaults -- Excellent documentation -- Active community - -Perfect for building APIs and web applications in Rust. diff --git a/backend/content/posts/mysql.md b/backend/content/posts/mysql.md new file mode 100644 index 0000000..a85fae2 --- /dev/null +++ b/backend/content/posts/mysql.md @@ -0,0 +1,569 @@ +--- +title: "mysql个人常用命令及操作" +description: +date: 2021-09-21T16:13:24+08:00 +draft: true +slug: mysql +image: +categories: + - Database +tags: + - Linux + - Mysql + - Sql +--- + +启动`mysql` + +```bash +sudo service mysql start +``` + +使用`root`账户登录`mysql` + +```bash +sudo mysql -u root +``` + +查看数据库信息 + +```mysql +show databases; +``` + +新增数据库 + +```mysql +create database <新增的数据库名>; +# 示例,新增一个名为gradesystem的数据库 +create database gradesystem; + +``` + +切换数据库 + +```mysql +use <切换的数据库名>; +# 示例,切换至gradesystem数据库 +use gradesystem; +``` + +查看数据库中的表 + +```mysql +# 查看数据库中所有的表 +show tables; +``` + +新增表 + +```mysql +# MySQL不区分大小写 +CREATE TABLE student( + sid int NOT NULL AUTO_INCREMENT, + sname varchar(20) NOT NULL, + gender varchar(10) NOT NULL, + PRIMARY KEY(sid) + ); +# 新增一个表名为学生的表。 +# AUTO_INCREMENT, 自动地创建主键字段的值。 +# PRIMARY KEY(sid) 设置主键为sid +CREATE TABLE course( + cid int not null auto_increment, + cname varchar(20) not null, + primary key(cid) +); +# 新增一个表名为课程的表。 +# primary key(cid) 设置主键为cid + +CREATE TABLE mark( + mid int not null auto_increment, + sid int not null, + cid int not null, + score int not null, + primary key(mid), + foreign key(sid) references student(sid), + foreign key(cid) references course(cid) +); +# 新增一个表明为mark的表 +# primary key(cid) 设置主键为cid +# foreign 设置外键为sid +# foreign 设置外键为cid + +insert into student values(1,'Tom','male'),(2,'Jack','male'),(3,'Rose','female'); +# 向student表插入数据,sid为1,sname为'Tom',gender为'male' + +insert into course values(1,'math'),(2,'physics'),(3,'chemistry'); +# 向course表插入数据,sid为1,cname为'math' + +insert into mark values(1,1,1,80); +# 向mark表插入数据,mid为1,sid为1,cid为1,score为80 +``` + +### 向数据库插入数据 + +```mysql + source <数据库文件所在目录> + + +``` + +## SELECT语句查询 + + SELECT 语句的基本格式为: + +```bash +SELECT 要查询的列名 FROM 表名字 WHERE 限制条件; +``` + +```mysql +select name,age from employee; +# 查看employee的name列和age列 + +select name,age from employee where age > 25; +# 筛选出age 大于25的结果 + +select name,age,phone from employee where name = 'Mary'; +# 筛选出name为'Mary'的name,age,phone + +select name,age,phone from employee where age < 25 or age >30; +# 筛选出age小于30或大于25的name,age,phone + +select name,age,phone from employee where age > 25 and age < 30; +# 筛选出age大于25且小于30的name,age,phone + +select name,age,phone from employee where age between 25 and 30; +# 筛选出包含25和30的,name,age,phone + +select name,age,phone,in_dpt from employee where in_dpt in('dpt3','dpt4'); +# 筛选出在dpt3或dpt4里面的name,age,phone,in_dpt + +select name,age,phone,in_dpt from employee where in_dpt not in('dpt1','dpt3'); +# 筛选出不在dpt1和dpt3的name,age,phone,in_dpt + + +``` + +## 通配符 + +关键字 **LIKE** 可用于实现模糊查询,常见于搜索功能中。 + +和 LIKE 联用的通常还有通配符,代表未知字符。SQL 中的通配符是 `_` 和 `%` 。其中 `_` 代表一个**未指定**字符,`%` 代表**不定个**未指定字符 + +```mysql +select name,age,phone from employee where phone like '1101__'; +# 筛选出1101开头的六位数字的name,age,phone + +select name,age,phone from employee where name like 'J%'; +# 筛选出name位J开头的人的name,age,phone +``` + +## 排序 + + 为了使查询结果看起来更顺眼,我们可能需要对结果按某一列来排序,这就要用到 **ORDER BY** 排序关键词。默认情况下,**ORDER BY** 的结果是**升序**排列,而使用关键词 **ASC** 和 **DESC** 可指定**升序**或**降序**排序。 比如,我们**按 salary 降序排列**,SQL 语句为 + +```mysql +select name,age,salary,phone from employee order by salary desc; +# salary列按降序排列 +select name,age,salary,phone from employee order by salary; +# 不加 DESC 或 ASC 将默认按照升序排列。 +``` + +## SQL 内置函数和计算 + +置函数,这些函数都对 SELECT 的结果做操作: + +| 函数名: | COUNT | SUM | AVG | MAX | MIN | +| -------- | ----- | ---- | -------- | ------ | ------ | +| 作用: | 计数 | 求和 | 求平均值 | 最大值 | 最小值 | + +> 其中 COUNT 函数可用于任何数据类型(因为它只是计数),而 SUM 、AVG 函数都只能对数字类数据类型做计算,MAX 和 MIN 可用于数值、字符串或是日期时间数据类型。 + + + +```mysql +select max(salary) as max_salary,min(salary) from employee; +# 使用as关键字可以给值重命名, +``` + +## 连接查询 + + 在处理多个表时,子查询只有在结果来自一个表时才有用。但如果需要显示两个表或多个表中的数据,这时就必须使用连接 **(join)** 操作。 连接的基本思想是把两个或多个表当作一个新的表来操作,如下: + +```mysql +select id,name,people_num from employee,department where employee.in_dpt = department.dpt_name order by id; +# 这条语句查询出的是,各员工所在部门的人数,其中员工的 id 和 name 来自 employee 表,people_num 来自 department 表: + +select id,name,people_num from employee join department on employee.in_dpt = department.dpt_name order by id; +# 另一个连接语句格式是使用 JOIN ON 语法,刚才的语句等同于以上语句 +``` + +## 删除数据库 + +```mysql +drop database test_01; +# 删除名为test_01的数据库; +``` + +### 修改表 + +重命名一张表的语句有多种形式,以下 3 种格式效果是一样的: + +```sql +RENAME TABLE 原名 TO 新名字; + +ALTER TABLE 原名 RENAME 新名; + +ALTER TABLE 原名 RENAME TO 新名; +``` + +进入数据库 mysql_shiyan : + +```mysql +use mysql_shiyan +``` + +使用命令尝试修改 `table_1` 的名字为 `table_2` : + +```mysql +RENAME TABLE table_1 TO table_2; +``` + +删除一张表的语句,类似于刚才用过的删除数据库的语句,格式是这样的: + +```sql +DROP TABLE 表名字; +``` + +比如我们把 `table_2` 表删除: + +```mysql +DROP TABLE table_2; +``` + +#### 增加一列 + +在表中增加一列的语句格式为: + +```sql +ALTER TABLE 表名字 ADD COLUMN 列名字 数据类型 约束; +或: +ALTER TABLE 表名字 ADD 列名字 数据类型 约束; +``` + +现在 employee 表中有 `id、name、age、salary、phone、in_dpt` 这 6 个列,我们尝试加入 `height` (身高)一个列并指定 DEFAULT 约束: + +```mysql +ALTER TABLE employee ADD height INT(4) DEFAULT 170; +``` + +可以发现:新增加的列,被默认放置在这张表的最右边。如果要把增加的列插入在指定位置,则需要在语句的最后使用 AFTER 关键词(**“AFTER 列 1” 表示新增的列被放置在 “列 1” 的后面**)。 + +> 提醒:语句中的 INT(4) 不是表示整数的字节数,而是表示该值的显示宽度,如果设置填充字符为 0,则 170 显示为 0170 + +比如我们新增一列 `weight`(体重) 放置在 `age`(年龄) 的后面: + +```mysql +ALTER TABLE employee ADD weight INT(4) DEFAULT 120 AFTER age; +``` + + + +上面的效果是把新增的列加在某位置的后面,如果想放在第一列的位置,则使用 `FIRST` 关键词,如语句: + +```sql +ALTER TABLE employee ADD test INT(10) DEFAULT 11 FIRST; +``` + +#### 删除一列 + +删除表中的一列和刚才使用的新增一列的语句格式十分相似,只是把关键词 `ADD` 改为 `DROP` ,语句后面不需要有数据类型、约束或位置信息。具体语句格式: + +```sql +ALTER TABLE 表名字 DROP COLUMN 列名字; + +或: ALTER TABLE 表名字 DROP 列名字; +``` + +我们把刚才新增的 `test` 删除: + +```sql +ALTER TABLE employee DROP test; +``` + +#### 重命名一列 + +这条语句其实不只可用于重命名一列,准确地说,它是对一个列做修改(CHANGE) : + +```sql +ALTER TABLE 表名字 CHANGE 原列名 新列名 数据类型 约束; +``` + +> **注意:这条重命名语句后面的 “数据类型” 不能省略,否则重命名失败。** + +当**原列名**和**新列名**相同的时候,指定新的**数据类型**或**约束**,就可以用于修改数据类型或约束。需要注意的是,修改数据类型可能会导致数据丢失,所以要慎重使用。 + +我们用这条语句将 “height” 一列重命名为汉语拼音 “shengao” ,效果如下: + +```mysql +ALTER TABLE employee CHANGE height shengao INT(4) DEFAULT 170; +``` + +#### 改变数据类型 + +要修改一列的数据类型,除了使用刚才的 **CHANGE** 语句外,还可以用这样的 **MODIFY** 语句: + +```sql +ALTER TABLE 表名字 MODIFY 列名字 新数据类型; +``` + +再次提醒,修改数据类型必须小心,因为这可能会导致数据丢失。在尝试修改数据类型之前,请慎重考虑。 + +#### 修改表中某个值 + +大多数时候我们需要做修改的不会是整个数据库或整张表,而是表中的某一个或几个数据,这就需要我们用下面这条命令达到精确的修改: + +```sql +UPDATE 表名字 SET 列1=值1,列2=值2 WHERE 条件; +``` + +比如,我们要把 Tom 的 age 改为 21,salary 改为 3000: + +```mysql +UPDATE employee SET age=21,salary=3000 WHERE name='Tom'; +``` + +> **注意:一定要有 WHERE 条件,否则会出现你不想看到的后果** + +#### 删除一行记录 + +删除表中的一行数据,也必须加上 WHERE 条件,否则整列的数据都会被删除。删除语句: + +```sql +DELETE FROM 表名字 WHERE 条件; +``` + +我们尝试把 Tom 的数据删除: + +```mysql +DELETE FROM employee WHERE name='Tom'; +``` + +#### 索引 + +索引是一种与表有关的结构,它的作用相当于书的目录,可以根据目录中的页码快速找到所需的内容。 + +当表中有大量记录时,若要对表进行查询,没有索引的情况是全表搜索:将所有记录一一取出,和查询条件进行对比,然后返回满足条件的记录。这样做会执行大量磁盘 I/O 操作,并花费大量数据库系统时间。 + +而如果在表中已建立索引,在索引中找到符合查询条件的索引值,通过索引值就可以快速找到表中的数据,可以**大大加快查询速度**。 + +对一张表中的某个列建立索引,有以下两种语句格式: + +```sql +ALTER TABLE 表名字 ADD INDEX 索引名 (列名); + +CREATE INDEX 索引名 ON 表名字 (列名); +``` + +我们用这两种语句分别建立索引: + +```sql +ALTER TABLE employee ADD INDEX idx_id (id); #在employee表的id列上建立名为idx_id的索引 + +CREATE INDEX idx_name ON employee (name); #在employee表的name列上建立名为idx_name的索引 +``` + +索引的效果是加快查询速度,当表中数据不够多的时候是感受不出它的效果的。这里我们使用命令 **SHOW INDEX FROM 表名字;** 查看刚才新建的索引: + +![01](https://doc.shiyanlou.com/MySQL/sql-06-01.png) + +在使用 SELECT 语句查询的时候,语句中 WHERE 里面的条件,会**自动判断有没有可用的索引**。 + +比如有一个用户表,它拥有用户名(username)和个人签名(note)两个字段。其中用户名具有唯一性,并且格式具有较强的限制,我们给用户名加上一个唯一索引;个性签名格式多变,而且允许不同用户使用重复的签名,不加任何索引。 + +这时候,如果你要查找某一用户,使用语句 `select * from user where username=?` 和 `select * from user where note=?` 性能是有很大差距的,对**建立了索引的用户名**进行条件查询会比**没有索引的个性签名**条件查询快几倍,在数据量大的时候,这个差距只会更大。 + +一些字段不适合创建索引,比如性别,这个字段存在大量的重复记录无法享受索引带来的速度加成,甚至会拖累数据库,导致数据冗余和额外的 CPU 开销。 + +## 视图 + + + +视图是从一个或多个表中导出来的表,是一种**虚拟存在的表**。它就像一个窗口,通过这个窗口可以看到系统专门提供的数据,这样,用户可以不用看到整个数据库中的数据,而只关心对自己有用的数据。 + +注意理解视图是虚拟的表: + +- 数据库中只存放了视图的定义,而没有存放视图中的数据,这些数据存放在原来的表中; +- 使用视图查询数据时,数据库系统会从原来的表中取出对应的数据; +- 视图中的数据依赖于原来表中的数据,一旦表中数据发生改变,显示在视图中的数据也会发生改变; +- 在使用视图的时候,可以把它当作一张表。 + +创建视图的语句格式为: + +```sql +CREATE VIEW 视图名(列a,列b,列c) AS SELECT 列1,列2,列3 FROM 表名字; +``` + +可见创建视图的语句,后半句是一个 SELECT 查询语句,所以**视图也可以建立在多张表上**,只需在 SELECT 语句中使用**子查询**或**连接查询**,这些在之前的实验已经进行过。 + +现在我们创建一个简单的视图,名为 **v_emp**,包含**v_name**,**v_age**,**v_phone**三个列: + +```sql +CREATE VIEW v_emp (v_name,v_age,v_phone) AS SELECT name,age,phone FROM employee; +``` + +![02](https://doc.shiyanlou.com/MySQL/sql-06-02.png) + +## 导出 + + + +导出与导入是相反的过程,是把数据库某个表中的数据保存到一个文件之中。导出语句基本格式为: + +```sql +SELECT 列1,列2 INTO OUTFILE '文件路径和文件名' FROM 表名字; +``` + +**注意:语句中 “文件路径” 之下不能已经有同名文件。** + +现在我们把整个 employee 表的数据导出到 /var/lib/mysql-files/ 目录下,导出文件命名为 **out.txt** 具体语句为: + +```sql +SELECT * INTO OUTFILE '/var/lib/mysql-files/out.txt' FROM employee; +``` + +用 gedit 可以查看导出文件 `/var/lib/mysql-files/out.txt` 的内容: + +> 也可以使用 `sudo cat /var/lib/mysql-files/out.txt` 命令查看。 + +## 备份 + + + +数据库中的数据十分重要,出于安全性考虑,在数据库的使用中,应该注意使用备份功能。 + +> 备份与导出的区别:导出的文件只是保存数据库中的数据;而备份,则是把数据库的结构,包括数据、约束、索引、视图等全部另存为一个文件。 + +**mysqldump** 是 MySQL 用于备份数据库的实用程序。它主要产生一个 SQL 脚本文件,其中包含从头重新创建数据库所必需的命令 CREATE TABLE INSERT 等。 + +使用 mysqldump 备份的语句: + +```bash +mysqldump -u root 数据库名>备份文件名; #备份整个数据库 + +mysqldump -u root 数据库名 表名字>备份文件名; #备份整个表 +``` + +> mysqldump 是一个备份工具,因此该命令是在终端中执行的,而不是在 mysql 交互环境下 + +我们尝试备份整个数据库 `mysql_shiyan`,将备份文件命名为 `bak.sql`,先 `Ctrl+D` 退出 MySQL 控制台,再打开 Xfce 终端,在终端中输入命令: + +```bash +cd /home/shiyanlou/ +mysqldump -u root mysql_shiyan > bak.sql; +``` + +使用命令 “ls” 可见已经生成备份文件 `bak.sql`: + +![07](https://doc.shiyanlou.com/MySQL/sql-06-07.png) + +> 你可以用 gedit 查看备份文件的内容,可以看见里面不仅保存了数据,还有所备份的数据库的其它信息。 + +## 恢复 + + + +用备份文件恢复数据库,其实我们早就使用过了。在本次实验的开始,我们使用过这样一条命令: + +```bash +source /tmp/SQL6/MySQL-06.sql +``` + +这就是一条恢复语句,它把 MySQL-06.sql 文件中保存的 `mysql_shiyan` 数据库恢复。 + +还有另一种方式恢复数据库,但是在这之前我们先使用命令新建一个**空的数据库 test**: + +```bash +mysql -u root #因为在上一步已经退出了 MySQL,现在需要重新登录 +CREATE DATABASE test; #新建一个名为test的数据库 +``` + +再次 **Ctrl+D** 退出 MySQL,然后输入语句进行恢复,把刚才备份的 **bak.sql** 恢复到 **test** 数据库: + +```bash +mysql -u root test < bak.sql +``` + +我们输入命令查看 test 数据库的表,便可验证是否恢复成功: + +```bash +mysql -u root # 因为在上一步已经退出了 MySQL,现在需要重新登录 +use test # 连接数据库 test + +SHOW TABLES; # 查看 test 数据库的表 +``` + +可以看见原数据库的 4 张表和 1 个视图,现在已经恢复到 test 数据库中: + +![08](https://doc.shiyanlou.com/MySQL/sql-06-08.png) + +再查看 employee 表的恢复情况: + +![09](https://doc.shiyanlou.com/MySQL/sql-06-09.png) + +## Mysql授权 + +1. 登录MySQL: + +```sql +mysql -u root -p +``` + +2. 进入MySQL并查看用户和主机: + +```sql +use mysql; +select host,user from user; +``` + +3. 更新root用户允许远程连接: + +```sql +update user set host='%' where user='root'; +``` + +4. 设置root用户密码: + +```sql +alter user 'root'@'localhost' identified by 'your_password'; +``` + +注意:不要使用临时密码。 + +5. 授权允许远程访问: + +```sql +grant all privileges on *.* to 'root'@'%' identified by 'password'; +``` + +请将命令中的“password”更改为您的MySQL密码。 + +6. 刷新授权: + +```sql +flush privileges; +``` + +7. 关闭授权: + +```sql +revoke all on *.* from dba@localhost; +``` + +8. 查看MySQL初始密码: + +```bash +grep "password" /var/log/mysqld.log +``` + +通过以上操作,您的MySQL可以被远程连接并进行管理。请注意在授权和更新用户权限时,应只授权特定的数据库或表格,而不是使用通配符,以提高安全性和减少不必要的权限。在进行远程访问授权时,应只授权特定的IP地址或IP地址段,而不是使用通配符,以减少潜在的安全威胁。同时,建议使用强密码,并定期更换密码以提高安全性。 diff --git a/backend/content/posts/redis.md b/backend/content/posts/redis.md new file mode 100644 index 0000000..61da6f3 --- /dev/null +++ b/backend/content/posts/redis.md @@ -0,0 +1,116 @@ +--- +title: "Redis常用命令" +description: +date: 2022-04-21T09:42:24+08:00 +draft: false +slug: redis +image: +categories: + - Database +tags: + - Database + - Redis +--- + +# 安装`Redis` + +## `Debian`下安装`Redis`服务端 + +```bash +curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg + +echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list + +sudo apt-get update +sudo apt-get install redis +``` + +## `Windows`下安装`Redis` 第三方`GUI`客户端 + +Redis (GUI)管理客户端 + +```bash +winget install qishibo.AnotherRedisDesktopManager +``` + +## `Redis`修改监听端口 + +```bash +vim /etc/redis/redis.conf +``` + +# `Redis`常用命令 + +## `bitMap` + +使用`BitMap`实现签到,`setbit key offset value,` `key`做为时间,`offset`做为用户`id` ,`value`做为签到状态 + +```shell +# 示例 +setbit key offset value key +# 设置用户10086在2022/04/21进行签到 +setbit check_in_2022_04_21 10086 1 +# 获取用户10086是否在2022/04/21签到 +getbit check_in_2022_04_21 10086 +# bitcount 获取20220421签到的用户数量 +# 可选 start和end参数 +# start 和 end 参数的设置和 GETRANGE 命令类似,都可以使用负数值:比如 -1 表示最后一个位,而 -2 表示倒数第二个位 +BITCOUNT 20220421 +# BITOP 对一个或多个保存二进制位的字符串 key 进行位元操作,并将结果保存到 destkey 上 + +# operation 可以是 AND 、 OR 、 NOT 、 XOR 这四种操作中的任意一种: + +# BITOP AND destkey key [key ...] ,对一个或多个 key 求逻辑并,并将结果保存到 destkey 。 + +# BITOP OR destkey key [key ...] ,对一个或多个 key 求逻辑或,并将结果保存到 destkey 。 + +# BITOP XOR destkey key [key ...] ,对一个或多个 key 求逻辑异或,并将结果保存到 destkey 。 + +# BITOP NOT destkey key ,对给定 key 求逻辑非,并将结果保存到 destkey 。 + +# 除了 NOT 操作之外,其他操作都可以接受一个或多个 key 作为输入。 + +BITOP AND and-result 20220421 20220420 +GETBIT and-result + +``` + +## `Redis` 消息队列 + +``` +# LPUSH key value, Lpush用于生产并添加消息 +# LPOP key,用于取出消息 +``` + +## `Lrem` + +```shell +# count > 0 : 从表头开始向表尾搜索,移除与 VALUE 相等的元素,数量为 COUNT 。 +# count < 0 : 从表尾开始向表头搜索,移除与 VALUE 相等的元素,数量为 COUNT 的绝对值。 +# count = 0 : 移除表中所有与 VALUE 相等的值。 +LREM key count VALUE +``` + +## `Pipeline` + +`Redis` 使用的是客户端-服务器(`CS`)模型和请求/响应协议的 TCP 服务器。这意味着通常情况下一个请求会遵循以下步骤: + +客户端向服务端发送一个查询请求,并监听 Socket 返回,通常是以阻塞模式,等待服务端响应。 +服务端处理命令,并将结果返回给客户端。 +管道(`pipeline`)可以一次性发送多条命令并在执行完后一次性将结果返回,pipeline 通过减少客户端与 redis 的通信次数来实现降低往返延时时间,而且 `Pipeline` 实现的原理是队列,而队列的原理是时先进先出,这样就保证数据的顺序性。 + +通俗点:`pipeline`就是把一组命令进行打包,然后一次性通过网络发送到Redis。同时将执行的结果批量的返回回来 + +```go +// 使用 go-redis + p := Client.Pipeline() + for _, v := range val { + p.LRem("user:watched:"+guid, 0, v) + } +// p.Exec()执行pipeline 请求 + p.Exec() +``` + + + +[本文参考](https://blog.csdn.net/mumuwang1234/article/details/118603697) diff --git a/backend/content/posts/rust-dll.md b/backend/content/posts/rust-dll.md new file mode 100644 index 0000000..47b5a49 --- /dev/null +++ b/backend/content/posts/rust-dll.md @@ -0,0 +1,169 @@ +--- +title: "手把手教你用Rust进行Dll注入" +description: 我是一个懒惰的男孩,我甚至懒的不想按键盘上的按键和挪动鼠标.可是我还是想玩游戏,该怎么做呢?通过 google 了解到我可以通过将我自己编写的dll文件注入到目标程序内,来实现这个事情. +date: 2022-09-17T15:10:26+08:00 +draft: false +slug: rust-dll +image: +categories: + - Rust +tags: + - Rust + - Dll +--- + +# 前言 + +我是一个懒惰的男孩,我甚至懒的不想按键盘上的按键和挪动鼠标.可是我还是想玩游戏,该怎么做呢? + +通过google了解到我可以通过将我自己编写的 `dll` 文件注入到目标程序内,来实现这个事情. + +将大象放在冰箱里需要几步? + +答案是三步。 + +# `snes9x` 模拟器 `Dll` 注入实战 + +## 一、现在我们需要进行第一步,生成 `Dll` 文件 + +准确说是我们需要生成符合 `C` 标准的 `dll` 文件,如果你使用 `go` 语言,直接使用 `Cgo` 与 `C` 进行互动,即可生成符合 `C` 标准的 `dll` . + +但是很明显,我要用 `Rust` 来做这件事。 + +由于 `Rust` 拥有出色的所有权机制,和其他语言的交互会导致 `Rust` 失去这个特性,所以这一块是属于 `Unsafe` 区域的。 + +`Rust` 默认生成的 `Dll` 是提供给 `Rust` 语言来调用的,而非C系语言的 `dll`. + +我们现在来生成 `C` 系语言的 `Dll` 吧。 + +### 1.新建项目 `lib` 目录 `lib` 目录主要作为库文件以方便其他开发者调用 + +```bash +# 新建库项目 +Cargo new --lib +Cargo new --lib joy +``` + +### 2.修改 `Cargo.toml` 文件 增加 `bin` 区域 + +```toml +[package] +name = "joy" +version = "0.1.0" +edition = "2021" + +[lib] +name = "joy" +path = "src/lib.rs" +crate-type = ["cdylib"] + +[[bin]] +name = "joyrun" +path = "src/main.rs" +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +``` + +```bash +# 为项目导入依赖ctor来生成符合c标准的dll +cargo add ctor +``` + +### 3.修改 `lib.rs` 使用 `ctor` + +```rust +// lib.rs +#[ctor::ctor] +fn ctor() { + println!("我是一个dll") +} +``` + +#### 4.编译项目生成 `joy.dll` 以及 `joyrun.exe` + +```bash +cargo build +``` + +现在我们有了我们自己的 `dll` 文件,该如何将他注入到目标的进程呢? + +## 二、使用 `dll-syringe` 进行dll注入 + +``` +cargo add dll-syringe +``` + +### 1.修改main.rs 将刚刚编写的dll注入到目标应用 + +```rust +// main.rs +use dll_syringe::{Syringe, process::OwnedProcess}; + +fn main() { + // 通过进程名找到目标进程 + let target_process = OwnedProcess::find_first_by_name("snes9x").unwrap(); + + // 新建一个注入器 + let syringe = Syringe::for_process(target_process); + + // 将我们刚刚编写的dll加载进去 + let injected_payload = syringe.inject("joy.dll").unwrap(); + + // do something else + + // 将我们刚刚注入的dll从目标程序内移除 + syringe.eject(injected_payload).unwrap(); +} +``` + +### 2.运行项目 + +```shell +# 运行项目 +cargo run +``` + +此时你可能会遇到一个新问题,我的`dll`已经加载进目标程序了,为什么没有打印 "我是一个dll" + +### 3.解决控制台无输出问题 + +这是由于目标程序没有控制台,所以我们没有看到 `dll` 的输出,接下来让我们来获取 `dll` 的输出。 + +此时我们可以使用 `TCP` 交互的方式或采用 `OutputDebugStringA function (debugapi.h)` 来进行打印 + +`OutputDebugStringA` ,需要额外开启`features` `Win32_System_Diagnostics_Debug` + +```rust +// Rust Unsafe fn +// windows::Win32::System::Diagnostics::Debug::OutputDebugStringA +pub unsafe fn OutputDebugStringA<'a, P0>(lpoutputstring: P0) +where + P0: Into, +// Required features: "Win32_System_Diagnostics_Debug" +``` + +采用 `Tcp` 通信交互 + +```rust +// 在lib.rs 新建tcp客户端 +let stream = TcpStream::connect("127.0.0.1:7331").unwrap(); +``` + +```rust + // 在main.rs 新建tcp服务端 + let (mut stream, addr) = listener.accept()?; + info!(%addr,"Accepted!"); + let mut buf = vec![0u8; 1024]; + let mut stdout = std::io::stdout(); + while let Ok(n) = stream.read(&mut buf[..]) { + if n == 0 { + break; + } + stdout.write_all(&buf[..n])? + } +``` + +```shell +# 运行项目 +cargo run +# 运行之后,大功告成,成功在Tcp服务端看到了,客户端对我们发起了请求。 +``` diff --git a/backend/content/posts/rust-programming-tips.md b/backend/content/posts/rust-programming-tips.md deleted file mode 100644 index e51f1f1..0000000 --- a/backend/content/posts/rust-programming-tips.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -title: Rust Programming Tips -slug: rust-programming-tips -description: Essential tips for Rust developers including ownership, pattern matching, and error handling. -category: tech -post_type: article -pinned: false -published: true -tags: - - rust - - programming - - tips ---- - -# Rust Programming Tips - -Here are some essential tips for Rust developers: - -## 1. Ownership and Borrowing - -Understanding ownership is crucial in Rust. Every value has an owner, and there can only be one owner at a time. - -## 2. Pattern Matching - -Use `match` expressions for exhaustive pattern matching: - -```rust -match result { - Ok(value) => println!("Success: {}", value), - Err(e) => println!("Error: {}", e), -} -``` - -## 3. Error Handling - -Use `Result` and `Option` types effectively with the `?` operator. - -Happy coding! diff --git a/backend/content/posts/rust-serde.md b/backend/content/posts/rust-serde.md new file mode 100644 index 0000000..0a6d8a0 --- /dev/null +++ b/backend/content/posts/rust-serde.md @@ -0,0 +1,96 @@ +--- +title: "Rust使用Serde进行序列化及反序列化" +description: 这篇文章将介绍如何在Rust编程语言中使用Serde库进行序列化和反序列化操作。Serde是一个广泛使用的序列化和反序列化库,能够支持JSON、BSON、CBOR、MessagePack和YAML等常见数据格式。 +date: 2022-07-25T14:02:22+08:00 +draft: false +slug: rust-serde +image: +categories: + - Rust +tags: + - Rust + - Xml +--- + +# 开始之前 + +```toml +# 在Cargo.toml 新增以下依赖 +[dependencies] +serde = { version = "1.0.140",features = ["derive"] } +serde_json = "1.0.82" +serde_yaml = "0.8" +serde_urlencoded = "0.7.1" +# 使用yaserde解析xml +yaserde = "0.8.0" +yaserde_derive = "0.8.0" +``` + +## `Serde`通用规则(`json`,`yaml`,`xml`) + +### 1.使用`Serde`宏通过具体结构实现序列化及反序列化 + +```rust +use serde::{Deserialize, Serialize}; +// 为结构体实现 Serialize(序列化)属性和Deserialize(反序列化) +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct Person { + // 将该字段名称修改为lastname + #[serde(rename = "lastname")] + name: String, + // 反序列化及序列化时忽略该字段(nickname) + #[serde(skip)] + nickname: String, + // 分别设置序列化及反序列化时输出的字段名称 + #[serde(rename(serialize = "serialize_id", deserialize = "derialize_id")) + id: i32, + // 为age设置默认值 + #[serde(default)] + age: i32, + +} +``` + +### 2.使用`serde_json`序列化及反序列化 + +```rust +use serde_json::{json, Value}; +let v:serde_json::Value = json!( + { + "x":20.0, + "y":15.0 + } +); +println!("x:{:#?},y:{:#?}",v["x"],v["y"]); // x:20.0, y:15.0 +``` + +### 3.使用`Serde`宏统一格式化输入、输出字段名称 + +| 方法名 | 方法效果 | +| ------------------------------- | ------------------------------------------------------------ | +| `PascalCase` | 首字母为大写的驼峰式命名,推荐结构体、枚举等名称以及`Yaml`配置文件读取使用。 | +| `camelCase` | 首字母为小写的驼峰式命名,推荐`Yaml`配置文件读取使用。 | +| `snake_case` | 小蛇形命名,用下划线"`_`"连接单词,推荐函数命名以及变量名称使用此种方式。 | +| `SCREAMING_SNAKE_CASE` | 大蛇形命名,单词均为大写形式,用下划线"`_`"连接单词。推荐常数及全局变量使用此种方式。 | +| `kebab-case`(小串烤肉) | 同`snake_case`,使用中横线"`-`"替换了下划线"`_`"。 | +| `SCREAMING-KEBAB-CAS`(大串烤肉) | 同`SCREAMING_SNAKE_CASE`,使用中横线"`-`"替换了下划线"`_`"。 | + +示例: + +```rust +pub struct App { + #[serde(rename_all = "PascalCase")] + /// 统一格式化输入、输出字段名称 + /// #[serde(rename_all = "camelCase")] + /// #[serde(rename_all = "snake_case")] + /// #[serde(rename_all = "SCREAMING_SNAKE_CASE")] + /// 仅设置 + version: String, + app_name: String, + host: String, +} +``` + +[本文参考:yaserde](https://github.com/media-io/yaserde) + +[本文参考:magiclen](https://magiclen.org/rust-serde/) \ No newline at end of file diff --git a/backend/content/posts/rust-sqlx.md b/backend/content/posts/rust-sqlx.md new file mode 100644 index 0000000..40d2539 --- /dev/null +++ b/backend/content/posts/rust-sqlx.md @@ -0,0 +1,37 @@ +--- +title: "Rust Sqlx" +description: +date: 2022-08-29T13:55:08+08:00 +draft: true +slug: rust-sqlx +image: +categories: + - +tags: + - +--- + +# sqlx-cli + +## 创建 migration + +```shell +sqlx migrate add categories +``` + +```sql +-- Add migration script here +CREATE TABLE IF NOT EXISTS categories( + id INT PRIMARY KEY DEFAULT AUTO_INCREMENT, + type_id INT UNIQUE NOT NULL, + parent_id INT NOT NULL, + name TEXT UNIQUE NOT NULL, + ); +``` + +## 运行 migration + +```sh +sqlx migrate run +``` + diff --git a/backend/content/posts/terminal-ui-design.md b/backend/content/posts/terminal-ui-design.md deleted file mode 100644 index 69978ce..0000000 --- a/backend/content/posts/terminal-ui-design.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -title: Terminal UI Design Principles -slug: terminal-ui-design -description: Learn the key principles of designing beautiful terminal-style user interfaces. -category: design -post_type: article -pinned: false -published: true -tags: - - design - - terminal - - ui ---- - -# Terminal UI Design Principles - -Terminal-style interfaces are making a comeback in modern web design. - -## Key Elements - -1. Monospace fonts -2. Dark themes -3. Command prompts -4. ASCII art -5. Blinking cursor - -## Color Palette - -- Background: `#0d1117` -- Text: `#c9d1d9` -- Accent: `#58a6ff` -- Success: `#3fb950` -- Warning: `#d29922` -- Error: `#f85149` - -## Implementation - -Use CSS to create the terminal aesthetic while maintaining accessibility. diff --git a/backend/content/posts/tmux.md b/backend/content/posts/tmux.md new file mode 100644 index 0000000..a5135ae --- /dev/null +++ b/backend/content/posts/tmux.md @@ -0,0 +1,52 @@ +--- +title: "如何在 Tmux 会话窗格中发送命令" +description: 本文介绍了在 Tmux 中发送命令的步骤,包括新建分离会话、发送命令至会话窗格、连接会话窗格、以及发送特殊命令。通过本文,读者将了解如何在 Tmux 中发送命令,并能够更加高效地使用 Tmux。 +date: 2022-08-02T14:54:08+08:00 +draft: false +slug: tmux +image: +categories: + - Linux +tags: + - Linux + - Tmux +--- + +## 在 Tmux 会话窗格中发送命令的方法 + +在 `Tmux` 中,可以使用 `send-keys` 命令将命令发送到会话窗格中。以下是在 `Tmux` 中发送命令的步骤: + +### 1. 新建一个分离(`Detached`)会话 + +使用以下命令新建一个分离会话: + +```bash +tmux new -d -s mySession +``` + +### 2. 发送命令至会话窗格 + +使用以下命令将命令发送到会话窗格: + +```bash +tmux send-keys -t mySession "echo 'Hello World!'" ENTER +``` + +这将发送 `echo 'Hello World!'` 命令,并模拟按下回车键(`ENTER`),以在会话窗格中执行该命令。 + +### 3. 连接(`Attach`)会话窗格 + +使用以下命令连接会话窗格: + +```bash +tmux a -t mySession +``` + +这将连接到名为 `mySession` 的会话窗格。 + +### 4. 发送特殊命令 + +要发送特殊命令,例如清除当前行或使用管理员权限运行命令,请使用以下命令: + +- 清除当前行:`tmux send-keys C-c` +- 以管理员身份运行命令:`sudo tmux send-keys ...` diff --git a/backend/content/posts/welcome-to-termi.md b/backend/content/posts/welcome-to-termi.md deleted file mode 100644 index 0c2dc7c..0000000 --- a/backend/content/posts/welcome-to-termi.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: Welcome to Termi Blog -slug: welcome-to-termi -description: Welcome to our new blog built with Astro and Loco.rs backend. -category: general -post_type: article -pinned: true -published: true -tags: - - welcome - - astro - - loco-rs ---- - -# Welcome to Termi Blog - -This is the first post on our new blog built with Astro and Loco.rs backend. - -## Features - -- Fast performance with Astro -- Terminal-style UI design -- Comments system -- Friend links -- Tags and categories - -## Code Example - -```rust -fn main() { - println!("Hello, Termi!"); -} -``` - -Stay tuned for more posts! diff --git a/backend/migration/src/lib.rs b/backend/migration/src/lib.rs index 6d93c6a..d497c28 100644 --- a/backend/migration/src/lib.rs +++ b/backend/migration/src/lib.rs @@ -17,6 +17,10 @@ mod m20260328_000006_add_ai_to_site_settings; mod m20260328_000007_create_ai_chunks; mod m20260328_000008_enable_pgvector_for_ai_chunks; mod m20260328_000009_add_paragraph_comments; +mod m20260328_000010_add_paragraph_comments_toggle_to_site_settings; +mod m20260328_000011_add_post_images_and_music_playlist; +mod m20260329_000012_add_link_url_to_reviews; +mod m20260329_000013_add_ai_provider_presets_to_site_settings; pub struct Migrator; #[async_trait::async_trait] @@ -38,6 +42,10 @@ impl MigratorTrait for Migrator { Box::new(m20260328_000007_create_ai_chunks::Migration), Box::new(m20260328_000008_enable_pgvector_for_ai_chunks::Migration), Box::new(m20260328_000009_add_paragraph_comments::Migration), + Box::new(m20260328_000010_add_paragraph_comments_toggle_to_site_settings::Migration), + Box::new(m20260328_000011_add_post_images_and_music_playlist::Migration), + Box::new(m20260329_000012_add_link_url_to_reviews::Migration), + Box::new(m20260329_000013_add_ai_provider_presets_to_site_settings::Migration), // inject-above (do not remove this comment) ] } diff --git a/backend/migration/src/m20260328_000009_add_paragraph_comments.rs b/backend/migration/src/m20260328_000009_add_paragraph_comments.rs index 73ef7b4..627de97 100644 --- a/backend/migration/src/m20260328_000009_add_paragraph_comments.rs +++ b/backend/migration/src/m20260328_000009_add_paragraph_comments.rs @@ -42,7 +42,11 @@ impl MigrationTrait for Migration { .alter_table( Table::alter() .table(table.clone()) - .add_column(ColumnDef::new(Alias::new("paragraph_excerpt")).string().null()) + .add_column( + ColumnDef::new(Alias::new("paragraph_excerpt")) + .string() + .null(), + ) .to_owned(), ) .await?; diff --git a/backend/migration/src/m20260328_000010_add_paragraph_comments_toggle_to_site_settings.rs b/backend/migration/src/m20260328_000010_add_paragraph_comments_toggle_to_site_settings.rs new file mode 100644 index 0000000..2f44832 --- /dev/null +++ b/backend/migration/src/m20260328_000010_add_paragraph_comments_toggle_to_site_settings.rs @@ -0,0 +1,48 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + if !manager + .has_column("site_settings", "paragraph_comments_enabled") + .await? + { + manager + .alter_table( + Table::alter() + .table(Alias::new("site_settings")) + .add_column( + ColumnDef::new(Alias::new("paragraph_comments_enabled")) + .boolean() + .null() + .default(true), + ) + .to_owned(), + ) + .await?; + } + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + if manager + .has_column("site_settings", "paragraph_comments_enabled") + .await? + { + manager + .alter_table( + Table::alter() + .table(Alias::new("site_settings")) + .drop_column(Alias::new("paragraph_comments_enabled")) + .to_owned(), + ) + .await?; + } + + Ok(()) + } +} diff --git a/backend/migration/src/m20260328_000011_add_post_images_and_music_playlist.rs b/backend/migration/src/m20260328_000011_add_post_images_and_music_playlist.rs new file mode 100644 index 0000000..f99facb --- /dev/null +++ b/backend/migration/src/m20260328_000011_add_post_images_and_music_playlist.rs @@ -0,0 +1,75 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let posts_table = Alias::new("posts"); + let site_settings_table = Alias::new("site_settings"); + + if !manager.has_column("posts", "images").await? { + manager + .alter_table( + Table::alter() + .table(posts_table.clone()) + .add_column(ColumnDef::new(Alias::new("images")).json_binary().null()) + .to_owned(), + ) + .await?; + } + + if !manager + .has_column("site_settings", "music_playlist") + .await? + { + manager + .alter_table( + Table::alter() + .table(site_settings_table.clone()) + .add_column( + ColumnDef::new(Alias::new("music_playlist")) + .json_binary() + .null(), + ) + .to_owned(), + ) + .await?; + } + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let posts_table = Alias::new("posts"); + let site_settings_table = Alias::new("site_settings"); + + if manager.has_column("posts", "images").await? { + manager + .alter_table( + Table::alter() + .table(posts_table) + .drop_column(Alias::new("images")) + .to_owned(), + ) + .await?; + } + + if manager + .has_column("site_settings", "music_playlist") + .await? + { + manager + .alter_table( + Table::alter() + .table(site_settings_table) + .drop_column(Alias::new("music_playlist")) + .to_owned(), + ) + .await?; + } + + Ok(()) + } +} diff --git a/backend/migration/src/m20260329_000012_add_link_url_to_reviews.rs b/backend/migration/src/m20260329_000012_add_link_url_to_reviews.rs new file mode 100644 index 0000000..d152ba4 --- /dev/null +++ b/backend/migration/src/m20260329_000012_add_link_url_to_reviews.rs @@ -0,0 +1,35 @@ +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Reviews::Table) + .add_column(ColumnDef::new(Reviews::LinkUrl).string().null()) + .to_owned(), + ) + .await + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + manager + .alter_table( + Table::alter() + .table(Reviews::Table) + .drop_column(Reviews::LinkUrl) + .to_owned(), + ) + .await + } +} + +#[derive(DeriveIden)] +enum Reviews { + Table, + LinkUrl, +} diff --git a/backend/migration/src/m20260329_000013_add_ai_provider_presets_to_site_settings.rs b/backend/migration/src/m20260329_000013_add_ai_provider_presets_to_site_settings.rs new file mode 100644 index 0000000..64a12fd --- /dev/null +++ b/backend/migration/src/m20260329_000013_add_ai_provider_presets_to_site_settings.rs @@ -0,0 +1,98 @@ +use sea_orm::{DbBackend, Statement}; +use sea_orm_migration::prelude::*; + +#[derive(DeriveMigrationName)] +pub struct Migration; + +#[async_trait::async_trait] +impl MigrationTrait for Migration { + async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let table = Alias::new("site_settings"); + + if !manager.has_column("site_settings", "ai_providers").await? { + manager + .alter_table( + Table::alter() + .table(table.clone()) + .add_column( + ColumnDef::new(Alias::new("ai_providers")) + .json_binary() + .null(), + ) + .to_owned(), + ) + .await?; + } + + if !manager + .has_column("site_settings", "ai_active_provider_id") + .await? + { + manager + .alter_table( + Table::alter() + .table(table) + .add_column( + ColumnDef::new(Alias::new("ai_active_provider_id")) + .string() + .null(), + ) + .to_owned(), + ) + .await?; + } + + manager + .get_connection() + .execute(Statement::from_string( + DbBackend::Postgres, + r#" + UPDATE site_settings + SET + ai_providers = jsonb_build_array( + jsonb_strip_nulls( + jsonb_build_object( + 'id', 'default', + 'name', COALESCE(NULLIF(trim(ai_provider), ''), '默认提供商'), + 'provider', COALESCE(NULLIF(trim(ai_provider), ''), 'newapi'), + 'api_base', NULLIF(trim(ai_api_base), ''), + 'api_key', NULLIF(trim(ai_api_key), ''), + 'chat_model', NULLIF(trim(ai_chat_model), '') + ) + ) + ), + ai_active_provider_id = COALESCE(NULLIF(trim(ai_active_provider_id), ''), 'default') + WHERE ai_providers IS NULL + AND ( + COALESCE(trim(ai_provider), '') <> '' + OR COALESCE(trim(ai_api_base), '') <> '' + OR COALESCE(trim(ai_api_key), '') <> '' + OR COALESCE(trim(ai_chat_model), '') <> '' + ) + "# + .to_string(), + )) + .await?; + + Ok(()) + } + + async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> { + let table = Alias::new("site_settings"); + + for column in ["ai_active_provider_id", "ai_providers"] { + if manager.has_column("site_settings", column).await? { + manager + .alter_table( + Table::alter() + .table(table.clone()) + .drop_column(Alias::new(column)) + .to_owned(), + ) + .await?; + } + } + + Ok(()) + } +} diff --git a/backend/playwright-backend.err.log b/backend/playwright-backend.err.log new file mode 100644 index 0000000..ef45a51 --- /dev/null +++ b/backend/playwright-backend.err.log @@ -0,0 +1,3 @@ + Compiling termi-api v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend) + Finished `dev` profile [unoptimized + debuginfo] target(s) in 17.22s + Running `target\debug\termi_api-cli.exe start` diff --git a/backend/playwright-backend.out.log b/backend/playwright-backend.out.log new file mode 100644 index 0000000..e69de29 diff --git a/backend/src/app.rs b/backend/src/app.rs index 59a91bc..0419f63 100644 --- a/backend/src/app.rs +++ b/backend/src/app.rs @@ -277,6 +277,27 @@ impl Hooks for App { }) .filter(|items| !items.is_empty()) .map(|items| serde_json::json!(items)); + let music_playlist = settings["music_playlist"] + .as_array() + .map(|items| { + items + .iter() + .filter_map(|item| { + let title = item["title"].as_str()?.trim(); + let url = item["url"].as_str()?.trim(); + if title.is_empty() || url.is_empty() { + None + } else { + Some(serde_json::json!({ + "title": title, + "url": url, + })) + } + }) + .collect::>() + }) + .filter(|items| !items.is_empty()) + .map(serde_json::Value::Array); let item = site_settings::ActiveModel { id: Set(settings["id"].as_i64().unwrap_or(1) as i32), @@ -317,7 +338,11 @@ impl Hooks for App { .map(ToString::to_string)), location: Set(settings["location"].as_str().map(ToString::to_string)), tech_stack: Set(tech_stack), + music_playlist: Set(music_playlist), ai_enabled: Set(settings["ai_enabled"].as_bool()), + paragraph_comments_enabled: Set(settings["paragraph_comments_enabled"] + .as_bool() + .or(Some(true))), ai_provider: Set(settings["ai_provider"].as_str().map(ToString::to_string)), ai_api_base: Set(settings["ai_api_base"].as_str().map(ToString::to_string)), ai_api_key: Set(settings["ai_api_key"].as_str().map(ToString::to_string)), @@ -353,6 +378,11 @@ impl Hooks for App { let status = review["status"].as_str().unwrap_or("completed").to_string(); let description = review["description"].as_str().unwrap_or("").to_string(); let cover = review["cover"].as_str().unwrap_or("📝").to_string(); + let link_url = review["link_url"] + .as_str() + .map(str::trim) + .filter(|value| !value.is_empty()) + .map(ToString::to_string); let tags_vec = review["tags"] .as_array() .map(|arr| { @@ -376,6 +406,7 @@ impl Hooks for App { status: Set(Some(status)), description: Set(Some(description)), cover: Set(Some(cover)), + link_url: Set(link_url), tags: Set(Some(serde_json::to_string(&tags_vec).unwrap_or_default())), ..Default::default() }; diff --git a/backend/src/controllers/admin.rs b/backend/src/controllers/admin.rs index 4d80db3..2368007 100644 --- a/backend/src/controllers/admin.rs +++ b/backend/src/controllers/admin.rs @@ -167,6 +167,7 @@ struct ReviewRow { description: String, tags_input: String, cover: String, + link_url: String, api_url: String, } @@ -205,6 +206,7 @@ pub struct ReviewForm { description: String, tags: String, cover: String, + link_url: String, } fn url_encode(value: &str) -> String { @@ -704,6 +706,7 @@ pub async fn posts_create( tags: parse_tag_input(&form.tags), post_type: normalize_admin_text(&form.post_type), image: Some(normalize_admin_text(&form.image)), + images: Vec::new(), pinned: form.pinned.is_some(), published: form.published.is_some(), }, @@ -818,8 +821,14 @@ pub async fn comments_admin( let text_filter = normalized_filter_value(query.q.as_deref()); let total_count = items.len(); - let article_count = items.iter().filter(|comment| comment.scope != "paragraph").count(); - let paragraph_count = items.iter().filter(|comment| comment.scope == "paragraph").count(); + let article_count = items + .iter() + .filter(|comment| comment.scope != "paragraph") + .count(); + let paragraph_count = items + .iter() + .filter(|comment| comment.scope == "paragraph") + .count(); let pending_count = items .iter() .filter(|comment| !comment.approved.unwrap_or(false)) @@ -827,12 +836,7 @@ pub async fn comments_admin( let author_by_id = items .iter() - .map(|comment| { - ( - comment.id, - non_empty(comment.author.as_deref(), "匿名"), - ) - }) + .map(|comment| (comment.id, non_empty(comment.author.as_deref(), "匿名"))) .collect::>(); let post_options = items @@ -1263,6 +1267,7 @@ pub async fn reviews_admin( description: non_empty(review.description.as_deref(), ""), tags_input: review_tags_input(review.tags.as_deref()), cover: non_empty(review.cover.as_deref(), "🎮"), + link_url: non_empty(review.link_url.as_deref(), ""), api_url: format!("/api/reviews/{}", review.id), }) .collect::>(); @@ -1290,6 +1295,7 @@ pub async fn reviews_admin( "description": "", "tags": "", "cover": "🎮", + "link_url": "", }), ); context.insert("rows".into(), json!(rows)); @@ -1314,6 +1320,10 @@ pub async fn reviews_create( serde_json::to_string(&parse_review_tags(&form.tags)).unwrap_or_default(), )), cover: Set(Some(normalize_admin_text(&form.cover))), + link_url: Set({ + let value = normalize_admin_text(&form.link_url); + (!value.is_empty()).then_some(value) + }), ..Default::default() } .insert(&ctx.db) @@ -1345,6 +1355,10 @@ pub async fn reviews_update( serde_json::to_string(&parse_review_tags(&form.tags)).unwrap_or_default(), )); model.cover = Set(Some(normalize_admin_text(&form.cover))); + model.link_url = Set({ + let value = normalize_admin_text(&form.link_url); + (!value.is_empty()).then_some(value) + }); let _ = model.update(&ctx.db).await?; Ok(format::redirect("/admin/reviews")) diff --git a/backend/src/controllers/admin_api.rs b/backend/src/controllers/admin_api.rs index 1276583..14ba21d 100644 --- a/backend/src/controllers/admin_api.rs +++ b/backend/src/controllers/admin_api.rs @@ -7,7 +7,10 @@ use serde::{Deserialize, Serialize}; use crate::{ controllers::{ - admin::{admin_username, check_auth, is_admin_logged_in, set_admin_logged_in, validate_admin_credentials}, + admin::{ + admin_username, check_auth, is_admin_logged_in, set_admin_logged_in, + validate_admin_credentials, + }, site_settings::{self, SiteSettingsPayload}, }, models::_entities::{ai_chunks, comments, friend_links, posts, reviews}, @@ -120,11 +123,15 @@ pub struct AdminSiteSettingsResponse { pub social_email: Option, pub location: Option, pub tech_stack: Vec, + pub music_playlist: Vec, pub ai_enabled: bool, + pub paragraph_comments_enabled: bool, pub ai_provider: Option, pub ai_api_base: Option, pub ai_api_key: Option, pub ai_chat_model: Option, + pub ai_providers: Vec, + pub ai_active_provider_id: Option, pub ai_embedding_model: Option, pub ai_system_prompt: Option, pub ai_top_k: Option, @@ -140,6 +147,29 @@ pub struct AdminAiReindexResponse { pub last_indexed_at: Option, } +#[derive(Clone, Debug, Deserialize)] +pub struct AdminAiProviderTestRequest { + pub provider: site_settings::AiProviderConfig, +} + +#[derive(Clone, Debug, Serialize)] +pub struct AdminAiProviderTestResponse { + pub provider: String, + pub endpoint: String, + pub chat_model: String, + pub reply_preview: String, +} + +#[derive(Clone, Debug, Deserialize)] +pub struct AdminPostMetadataRequest { + pub markdown: String, +} + +#[derive(Clone, Debug, Deserialize)] +pub struct AdminPostPolishRequest { + pub markdown: String, +} + fn format_timestamp( value: Option, pattern: &str, @@ -166,10 +196,27 @@ fn tech_stack_values(value: &Option) -> Vec { .collect() } +fn music_playlist_values( + value: &Option, +) -> Vec { + value + .as_ref() + .and_then(serde_json::Value::as_array) + .cloned() + .unwrap_or_default() + .into_iter() + .filter_map(|item| serde_json::from_value::(item).ok()) + .filter(|item| !item.title.trim().is_empty() && !item.url.trim().is_empty()) + .collect() +} + fn build_settings_response( item: crate::models::_entities::site_settings::Model, ai_chunks_count: u64, ) -> AdminSiteSettingsResponse { + let ai_providers = site_settings::ai_provider_configs(&item); + let ai_active_provider_id = site_settings::active_ai_provider_id(&item); + AdminSiteSettingsResponse { id: item.id, site_name: item.site_name, @@ -188,11 +235,15 @@ fn build_settings_response( social_email: item.social_email, location: item.location, tech_stack: tech_stack_values(&item.tech_stack), + music_playlist: music_playlist_values(&item.music_playlist), ai_enabled: item.ai_enabled.unwrap_or(false), + paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true), ai_provider: item.ai_provider, ai_api_base: item.ai_api_base, ai_api_key: item.ai_api_key, ai_chat_model: item.ai_chat_model, + ai_providers, + ai_active_provider_id, ai_embedding_model: item.ai_embedding_model, ai_system_prompt: item.ai_system_prompt, ai_top_k: item.ai_top_k, @@ -375,8 +426,9 @@ pub async fn update_site_settings( check_auth()?; let current = site_settings::load_current(&ctx).await?; - let mut item = current.into_active_model(); + let mut item = current; params.apply(&mut item); + let item = item.into_active_model(); let updated = item.update(&ctx.db).await?; let ai_chunks_count = ai_chunks::Entity::find().count(&ctx.db).await?; @@ -390,10 +442,51 @@ pub async fn reindex_ai(State(ctx): State) -> Result { format::json(AdminAiReindexResponse { indexed_chunks: summary.indexed_chunks, - last_indexed_at: format_timestamp(summary.last_indexed_at.map(Into::into), "%Y-%m-%d %H:%M:%S UTC"), + last_indexed_at: format_timestamp( + summary.last_indexed_at.map(Into::into), + "%Y-%m-%d %H:%M:%S UTC", + ), }) } +#[debug_handler] +pub async fn test_ai_provider(Json(payload): Json) -> Result { + check_auth()?; + + let result = ai::test_provider_connectivity( + &payload.provider.provider, + payload.provider.api_base.as_deref().unwrap_or_default(), + payload.provider.api_key.as_deref().unwrap_or_default(), + payload.provider.chat_model.as_deref().unwrap_or_default(), + ) + .await?; + + format::json(AdminAiProviderTestResponse { + provider: result.provider, + endpoint: result.endpoint, + chat_model: result.chat_model, + reply_preview: result.reply_preview, + }) +} + +#[debug_handler] +pub async fn generate_post_metadata( + State(ctx): State, + Json(payload): Json, +) -> Result { + check_auth()?; + format::json(ai::generate_post_metadata(&ctx, &payload.markdown).await?) +} + +#[debug_handler] +pub async fn polish_post_markdown( + State(ctx): State, + Json(payload): Json, +) -> Result { + check_auth()?; + format::json(ai::polish_post_markdown(&ctx, &payload.markdown).await?) +} + pub fn routes() -> Routes { Routes::new() .prefix("/api/admin") @@ -405,4 +498,7 @@ pub fn routes() -> Routes { .add("/site-settings", patch(update_site_settings)) .add("/site-settings", put(update_site_settings)) .add("/ai/reindex", post(reindex_ai)) + .add("/ai/test-provider", post(test_ai_provider)) + .add("/ai/post-metadata", post(generate_post_metadata)) + .add("/ai/polish-post", post(polish_post_markdown)) } diff --git a/backend/src/controllers/ai.rs b/backend/src/controllers/ai.rs index d82ac24..4a0a40d 100644 --- a/backend/src/controllers/ai.rs +++ b/backend/src/controllers/ai.rs @@ -56,9 +56,8 @@ fn format_timestamp(value: Option>) -> Option { } fn sse_bytes(event: &str, payload: &T) -> Bytes { - let data = serde_json::to_string(payload).unwrap_or_else(|_| { - "{\"message\":\"failed to serialize SSE payload\"}".to_string() - }); + let data = serde_json::to_string(payload) + .unwrap_or_else(|_| "{\"message\":\"failed to serialize SSE payload\"}".to_string()); Bytes::from(format!("event: {event}\ndata: {data}\n\n")) } @@ -127,7 +126,8 @@ fn extract_stream_delta(value: &Value) -> Option { } } - value.get("choices") + value + .get("choices") .and_then(Value::as_array) .and_then(|choices| choices.first()) .and_then(|choice| choice.get("text")) diff --git a/backend/src/controllers/category.rs b/backend/src/controllers/category.rs index 7e2a4ba..f14c37b 100644 --- a/backend/src/controllers/category.rs +++ b/backend/src/controllers/category.rs @@ -145,7 +145,11 @@ pub async fn update( .filter(|value| !value.is_empty()) != Some(name.as_str()) { - content::rewrite_category_references(previous_name.as_deref(), &previous_slug, Some(&name))?; + content::rewrite_category_references( + previous_name.as_deref(), + &previous_slug, + Some(&name), + )?; } let mut item = item.into_active_model(); diff --git a/backend/src/controllers/comment.rs b/backend/src/controllers/comment.rs index f2e357a..af83f1b 100644 --- a/backend/src/controllers/comment.rs +++ b/backend/src/controllers/comment.rs @@ -243,7 +243,10 @@ pub async fn paragraph_summary( let summary = counts .into_iter() - .map(|(paragraph_key, count)| ParagraphCommentSummary { paragraph_key, count }) + .map(|(paragraph_key, count)| ParagraphCommentSummary { + paragraph_key, + count, + }) .collect::>(); format::json(summary) diff --git a/backend/src/controllers/post.rs b/backend/src/controllers/post.rs index 2374dbd..51ea36c 100644 --- a/backend/src/controllers/post.rs +++ b/backend/src/controllers/post.rs @@ -1,6 +1,7 @@ #![allow(clippy::missing_errors_doc)] #![allow(clippy::unnecessary_struct_initialization)] #![allow(clippy::unused_async)] +use axum::extract::Multipart; use loco_rs::prelude::*; use sea_orm::QueryOrder; use serde::{Deserialize, Serialize}; @@ -18,6 +19,7 @@ pub struct Params { pub tags: Option, pub post_type: Option, pub image: Option, + pub images: Option, pub pinned: Option, } @@ -31,6 +33,7 @@ impl Params { item.tags = Set(self.tags.clone()); item.post_type = Set(self.post_type.clone()); item.image = Set(self.image.clone()); + item.images = Set(self.images.clone()); item.pinned = Set(self.pinned); } } @@ -61,6 +64,7 @@ pub struct MarkdownCreateParams { pub tags: Option>, pub post_type: Option, pub image: Option, + pub images: Option>, pub pinned: Option, pub published: Option, } @@ -78,6 +82,12 @@ pub struct MarkdownDeleteResponse { pub deleted: bool, } +#[derive(Clone, Debug, Serialize)] +pub struct MarkdownImportResponse { + pub count: usize, + pub slugs: Vec, +} + async fn load_item(ctx: &AppContext, id: i32) -> Result { let item = Entity::find_by_id(id).one(&ctx.db).await?; item.ok_or_else(|| Error::NotFound) @@ -293,6 +303,7 @@ pub async fn create_markdown( tags: params.tags.unwrap_or_default(), post_type: params.post_type.unwrap_or_else(|| "article".to_string()), image: params.image, + images: params.images.unwrap_or_default(), pinned: params.pinned.unwrap_or(false), published: params.published.unwrap_or(true), }, @@ -307,6 +318,40 @@ pub async fn create_markdown( }) } +#[debug_handler] +pub async fn import_markdown( + State(ctx): State, + mut multipart: Multipart, +) -> Result { + let mut files = Vec::new(); + + while let Some(field) = multipart + .next_field() + .await + .map_err(|error| Error::BadRequest(error.to_string()))? + { + let file_name = field + .file_name() + .map(ToString::to_string) + .unwrap_or_else(|| "imported.md".to_string()); + let bytes = field + .bytes() + .await + .map_err(|error| Error::BadRequest(error.to_string()))?; + let content = String::from_utf8(bytes.to_vec()) + .map_err(|_| Error::BadRequest("markdown file must be utf-8".to_string()))?; + + files.push(content::MarkdownImportFile { file_name, content }); + } + + let imported = content::import_markdown_documents(&ctx, files).await?; + + format::json(MarkdownImportResponse { + count: imported.len(), + slugs: imported.into_iter().map(|item| item.slug).collect(), + }) +} + #[debug_handler] pub async fn delete_markdown_by_slug( Path(slug): Path, @@ -325,6 +370,7 @@ pub fn routes() -> Routes { .add("/", get(list)) .add("/", post(add)) .add("markdown", post(create_markdown)) + .add("markdown/import", post(import_markdown)) .add("slug/{slug}/markdown", get(get_markdown_by_slug)) .add("slug/{slug}/markdown", put(update_markdown_by_slug)) .add("slug/{slug}/markdown", patch(update_markdown_by_slug)) diff --git a/backend/src/controllers/review.rs b/backend/src/controllers/review.rs index 6757444..691dfee 100644 --- a/backend/src/controllers/review.rs +++ b/backend/src/controllers/review.rs @@ -15,6 +15,7 @@ pub struct CreateReviewRequest { pub description: String, pub tags: Vec, pub cover: String, + pub link_url: Option, } #[derive(Serialize, Deserialize, Debug)] @@ -27,6 +28,7 @@ pub struct UpdateReviewRequest { pub description: Option, pub tags: Option>, pub cover: Option, + pub link_url: Option, } pub async fn list(State(ctx): State) -> Result { @@ -63,6 +65,10 @@ pub async fn create( description: Set(Some(req.description)), tags: Set(Some(serde_json::to_string(&req.tags).unwrap_or_default())), cover: Set(Some(req.cover)), + link_url: Set(req.link_url.and_then(|value| { + let trimmed = value.trim().to_string(); + (!trimmed.is_empty()).then_some(trimmed) + })), ..Default::default() }; @@ -105,6 +111,10 @@ pub async fn update( if let Some(cover) = req.cover { review.cover = Set(Some(cover)); } + if let Some(link_url) = req.link_url { + let trimmed = link_url.trim().to_string(); + review.link_url = Set((!trimmed.is_empty()).then_some(trimmed)); + } let review = review.update(&ctx.db).await?; format::json(review) diff --git a/backend/src/controllers/search.rs b/backend/src/controllers/search.rs index c608509..b22b75f 100644 --- a/backend/src/controllers/search.rs +++ b/backend/src/controllers/search.rs @@ -178,7 +178,8 @@ pub async fn search( .all(&ctx.db) .await { - Ok(rows) => rows, + Ok(rows) if !rows.is_empty() => rows, + Ok(_) => fallback_search(&ctx, &q, limit).await?, Err(_) => fallback_search(&ctx, &q, limit).await?, } } else { diff --git a/backend/src/controllers/site_settings.rs b/backend/src/controllers/site_settings.rs index 5197681..0dcc667 100644 --- a/backend/src/controllers/site_settings.rs +++ b/backend/src/controllers/site_settings.rs @@ -5,6 +5,8 @@ use loco_rs::prelude::*; use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set}; use serde::{Deserialize, Serialize}; +use std::collections::HashSet; +use uuid::Uuid; use crate::{ controllers::admin::check_auth, @@ -12,6 +14,38 @@ use crate::{ services::ai, }; +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +pub struct MusicTrackPayload { + pub title: String, + #[serde(default)] + pub artist: Option, + #[serde(default)] + pub album: Option, + pub url: String, + #[serde(default, alias = "coverImageUrl")] + pub cover_image_url: Option, + #[serde(default, alias = "accentColor")] + pub accent_color: Option, + #[serde(default)] + pub description: Option, +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +pub struct AiProviderConfig { + #[serde(default)] + pub id: String, + #[serde(default, alias = "label")] + pub name: String, + #[serde(default)] + pub provider: String, + #[serde(default, alias = "apiBase")] + pub api_base: Option, + #[serde(default, alias = "apiKey")] + pub api_key: Option, + #[serde(default, alias = "chatModel")] + pub chat_model: Option, +} + #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct SiteSettingsPayload { #[serde(default, alias = "siteName")] @@ -46,8 +80,12 @@ pub struct SiteSettingsPayload { pub location: Option, #[serde(default, alias = "techStack")] pub tech_stack: Option>, + #[serde(default, alias = "musicPlaylist")] + pub music_playlist: Option>, #[serde(default, alias = "aiEnabled")] pub ai_enabled: Option, + #[serde(default, alias = "paragraphCommentsEnabled")] + pub paragraph_comments_enabled: Option, #[serde(default, alias = "aiProvider")] pub ai_provider: Option, #[serde(default, alias = "aiApiBase")] @@ -56,6 +94,10 @@ pub struct SiteSettingsPayload { pub ai_api_key: Option, #[serde(default, alias = "aiChatModel")] pub ai_chat_model: Option, + #[serde(default, alias = "aiProviders")] + pub ai_providers: Option>, + #[serde(default, alias = "aiActiveProviderId")] + pub ai_active_provider_id: Option, #[serde(default, alias = "aiEmbeddingModel")] pub ai_embedding_model: Option, #[serde(default, alias = "aiSystemPrompt")] @@ -85,7 +127,9 @@ pub struct PublicSiteSettingsResponse { pub social_email: Option, pub location: Option, pub tech_stack: Option, + pub music_playlist: Option, pub ai_enabled: bool, + pub paragraph_comments_enabled: bool, } fn normalize_optional_string(value: Option) -> Option { @@ -103,82 +147,307 @@ fn normalize_optional_int(value: Option, min: i32, max: i32) -> Option value.map(|item| item.clamp(min, max)) } +fn create_ai_provider_id() -> String { + format!("provider-{}", Uuid::new_v4().simple()) +} + +fn default_ai_provider_config() -> AiProviderConfig { + AiProviderConfig { + id: "default".to_string(), + name: "默认提供商".to_string(), + provider: ai::provider_name(None), + api_base: Some(ai::default_api_base().to_string()), + api_key: Some(ai::default_api_key().to_string()), + chat_model: Some(ai::default_chat_model().to_string()), + } +} + +fn normalize_ai_provider_configs(items: Vec) -> Vec { + let mut seen_ids = HashSet::new(); + + items + .into_iter() + .enumerate() + .filter_map(|(index, item)| { + let provider = normalize_optional_string(Some(item.provider)) + .unwrap_or_else(|| ai::provider_name(None)); + let api_base = normalize_optional_string(item.api_base); + let api_key = normalize_optional_string(item.api_key); + let chat_model = normalize_optional_string(item.chat_model); + let has_content = !item.name.trim().is_empty() + || !provider.trim().is_empty() + || api_base.is_some() + || api_key.is_some() + || chat_model.is_some(); + + if !has_content { + return None; + } + + let mut id = + normalize_optional_string(Some(item.id)).unwrap_or_else(create_ai_provider_id); + if !seen_ids.insert(id.clone()) { + id = create_ai_provider_id(); + seen_ids.insert(id.clone()); + } + + let name = normalize_optional_string(Some(item.name)) + .unwrap_or_else(|| format!("提供商 {}", index + 1)); + + Some(AiProviderConfig { + id, + name, + provider, + api_base, + api_key, + chat_model, + }) + }) + .collect() +} + +fn legacy_ai_provider_config(model: &Model) -> Option { + let provider = normalize_optional_string(model.ai_provider.clone()); + let api_base = normalize_optional_string(model.ai_api_base.clone()); + let api_key = normalize_optional_string(model.ai_api_key.clone()); + let chat_model = normalize_optional_string(model.ai_chat_model.clone()); + + if provider.is_none() && api_base.is_none() && api_key.is_none() && chat_model.is_none() { + return None; + } + + Some(AiProviderConfig { + id: "default".to_string(), + name: "当前提供商".to_string(), + provider: provider.unwrap_or_else(|| ai::provider_name(None)), + api_base, + api_key, + chat_model, + }) +} + +pub(crate) fn ai_provider_configs(model: &Model) -> Vec { + let parsed = model + .ai_providers + .as_ref() + .and_then(|value| serde_json::from_value::>(value.clone()).ok()) + .map(normalize_ai_provider_configs) + .unwrap_or_default(); + + if !parsed.is_empty() { + parsed + } else { + legacy_ai_provider_config(model).into_iter().collect() + } +} + +pub(crate) fn active_ai_provider_id(model: &Model) -> Option { + let configs = ai_provider_configs(model); + let requested = normalize_optional_string(model.ai_active_provider_id.clone()); + + if let Some(active_id) = requested.filter(|id| configs.iter().any(|item| item.id == *id)) { + Some(active_id) + } else { + configs.first().map(|item| item.id.clone()) + } +} + +fn write_ai_provider_state( + model: &mut Model, + configs: Vec, + requested_active_id: Option, +) { + let normalized = normalize_ai_provider_configs(configs); + let active_id = requested_active_id + .filter(|id| normalized.iter().any(|item| item.id == *id)) + .or_else(|| normalized.first().map(|item| item.id.clone())); + + model.ai_providers = (!normalized.is_empty()).then(|| serde_json::json!(normalized.clone())); + model.ai_active_provider_id = active_id.clone(); + + if let Some(active) = active_id.and_then(|id| normalized.into_iter().find(|item| item.id == id)) + { + model.ai_provider = Some(active.provider); + model.ai_api_base = active.api_base; + model.ai_api_key = active.api_key; + model.ai_chat_model = active.chat_model; + } else { + model.ai_provider = None; + model.ai_api_base = None; + model.ai_api_key = None; + model.ai_chat_model = None; + } +} + +fn sync_ai_provider_fields(model: &mut Model) { + write_ai_provider_state( + model, + ai_provider_configs(model), + active_ai_provider_id(model), + ); +} + +fn update_active_provider_from_legacy_fields(model: &mut Model) { + let provider = model.ai_provider.clone(); + let api_base = model.ai_api_base.clone(); + let api_key = model.ai_api_key.clone(); + let chat_model = model.ai_chat_model.clone(); + let mut configs = ai_provider_configs(model); + let active_id = active_ai_provider_id(model); + + if configs.is_empty() { + let mut config = default_ai_provider_config(); + config.provider = provider.unwrap_or_else(|| ai::provider_name(None)); + config.api_base = api_base; + config.api_key = api_key; + config.chat_model = chat_model; + write_ai_provider_state( + model, + vec![config], + Some(active_id.unwrap_or_else(|| "default".to_string())), + ); + return; + } + + let target_id = active_id + .clone() + .or_else(|| configs.first().map(|item| item.id.clone())); + + if let Some(target_id) = target_id { + for config in &mut configs { + if config.id == target_id { + if let Some(next_provider) = provider.clone() { + config.provider = next_provider; + } + config.api_base = api_base.clone(); + config.api_key = api_key.clone(); + config.chat_model = chat_model.clone(); + } + } + } + + write_ai_provider_state(model, configs, active_id); +} + +fn normalize_music_playlist(items: Vec) -> Vec { + items + .into_iter() + .map(|item| MusicTrackPayload { + title: item.title.trim().to_string(), + artist: normalize_optional_string(item.artist), + album: normalize_optional_string(item.album), + url: item.url.trim().to_string(), + cover_image_url: normalize_optional_string(item.cover_image_url), + accent_color: normalize_optional_string(item.accent_color), + description: normalize_optional_string(item.description), + }) + .filter(|item| !item.title.is_empty() && !item.url.is_empty()) + .collect() +} + impl SiteSettingsPayload { - pub(crate) fn apply(self, item: &mut ActiveModel) { + pub(crate) fn apply(self, item: &mut Model) { if let Some(site_name) = self.site_name { - item.site_name = Set(normalize_optional_string(Some(site_name))); + item.site_name = normalize_optional_string(Some(site_name)); } if let Some(site_short_name) = self.site_short_name { - item.site_short_name = Set(normalize_optional_string(Some(site_short_name))); + item.site_short_name = normalize_optional_string(Some(site_short_name)); } if let Some(site_url) = self.site_url { - item.site_url = Set(normalize_optional_string(Some(site_url))); + item.site_url = normalize_optional_string(Some(site_url)); } if let Some(site_title) = self.site_title { - item.site_title = Set(normalize_optional_string(Some(site_title))); + item.site_title = normalize_optional_string(Some(site_title)); } if let Some(site_description) = self.site_description { - item.site_description = Set(normalize_optional_string(Some(site_description))); + item.site_description = normalize_optional_string(Some(site_description)); } if let Some(hero_title) = self.hero_title { - item.hero_title = Set(normalize_optional_string(Some(hero_title))); + item.hero_title = normalize_optional_string(Some(hero_title)); } if let Some(hero_subtitle) = self.hero_subtitle { - item.hero_subtitle = Set(normalize_optional_string(Some(hero_subtitle))); + item.hero_subtitle = normalize_optional_string(Some(hero_subtitle)); } if let Some(owner_name) = self.owner_name { - item.owner_name = Set(normalize_optional_string(Some(owner_name))); + item.owner_name = normalize_optional_string(Some(owner_name)); } if let Some(owner_title) = self.owner_title { - item.owner_title = Set(normalize_optional_string(Some(owner_title))); + item.owner_title = normalize_optional_string(Some(owner_title)); } if let Some(owner_bio) = self.owner_bio { - item.owner_bio = Set(normalize_optional_string(Some(owner_bio))); + item.owner_bio = normalize_optional_string(Some(owner_bio)); } if let Some(owner_avatar_url) = self.owner_avatar_url { - item.owner_avatar_url = Set(normalize_optional_string(Some(owner_avatar_url))); + item.owner_avatar_url = normalize_optional_string(Some(owner_avatar_url)); } if let Some(social_github) = self.social_github { - item.social_github = Set(normalize_optional_string(Some(social_github))); + item.social_github = normalize_optional_string(Some(social_github)); } if let Some(social_twitter) = self.social_twitter { - item.social_twitter = Set(normalize_optional_string(Some(social_twitter))); + item.social_twitter = normalize_optional_string(Some(social_twitter)); } if let Some(social_email) = self.social_email { - item.social_email = Set(normalize_optional_string(Some(social_email))); + item.social_email = normalize_optional_string(Some(social_email)); } if let Some(location) = self.location { - item.location = Set(normalize_optional_string(Some(location))); + item.location = normalize_optional_string(Some(location)); } if let Some(tech_stack) = self.tech_stack { - item.tech_stack = Set(Some(serde_json::json!(tech_stack))); + item.tech_stack = Some(serde_json::json!(tech_stack)); + } + if let Some(music_playlist) = self.music_playlist { + item.music_playlist = Some(serde_json::json!(normalize_music_playlist(music_playlist))); } if let Some(ai_enabled) = self.ai_enabled { - item.ai_enabled = Set(Some(ai_enabled)); + item.ai_enabled = Some(ai_enabled); } + if let Some(paragraph_comments_enabled) = self.paragraph_comments_enabled { + item.paragraph_comments_enabled = Some(paragraph_comments_enabled); + } + let provider_list_supplied = self.ai_providers.is_some(); + let provided_ai_providers = self.ai_providers.map(normalize_ai_provider_configs); + let requested_active_provider_id = self + .ai_active_provider_id + .and_then(|value| normalize_optional_string(Some(value))); + let legacy_provider_fields_updated = self.ai_provider.is_some() + || self.ai_api_base.is_some() + || self.ai_api_key.is_some() + || self.ai_chat_model.is_some(); if let Some(ai_provider) = self.ai_provider { - item.ai_provider = Set(normalize_optional_string(Some(ai_provider))); + item.ai_provider = normalize_optional_string(Some(ai_provider)); } if let Some(ai_api_base) = self.ai_api_base { - item.ai_api_base = Set(normalize_optional_string(Some(ai_api_base))); + item.ai_api_base = normalize_optional_string(Some(ai_api_base)); } if let Some(ai_api_key) = self.ai_api_key { - item.ai_api_key = Set(normalize_optional_string(Some(ai_api_key))); + item.ai_api_key = normalize_optional_string(Some(ai_api_key)); } if let Some(ai_chat_model) = self.ai_chat_model { - item.ai_chat_model = Set(normalize_optional_string(Some(ai_chat_model))); + item.ai_chat_model = normalize_optional_string(Some(ai_chat_model)); } if let Some(ai_embedding_model) = self.ai_embedding_model { - item.ai_embedding_model = Set(normalize_optional_string(Some(ai_embedding_model))); + item.ai_embedding_model = normalize_optional_string(Some(ai_embedding_model)); } if let Some(ai_system_prompt) = self.ai_system_prompt { - item.ai_system_prompt = Set(normalize_optional_string(Some(ai_system_prompt))); + item.ai_system_prompt = normalize_optional_string(Some(ai_system_prompt)); } if self.ai_top_k.is_some() { - item.ai_top_k = Set(normalize_optional_int(self.ai_top_k, 1, 12)); + item.ai_top_k = normalize_optional_int(self.ai_top_k, 1, 12); } if self.ai_chunk_size.is_some() { - item.ai_chunk_size = Set(normalize_optional_int(self.ai_chunk_size, 400, 4000)); + item.ai_chunk_size = normalize_optional_int(self.ai_chunk_size, 400, 4000); + } + + if provider_list_supplied { + write_ai_provider_state( + item, + provided_ai_providers.unwrap_or_default(), + requested_active_provider_id.or_else(|| item.ai_active_provider_id.clone()), + ); + } else if legacy_provider_fields_updated { + update_active_provider_from_legacy_fields(item); + } else { + sync_ai_provider_fields(item); } } } @@ -187,33 +456,76 @@ fn default_payload() -> SiteSettingsPayload { SiteSettingsPayload { site_name: Some("InitCool".to_string()), site_short_name: Some("Termi".to_string()), - site_url: Some("https://termi.dev".to_string()), + site_url: Some("https://init.cool".to_string()), site_title: Some("InitCool - 终端风格的内容平台".to_string()), site_description: Some("一个基于终端美学的个人内容站,记录代码、设计和生活。".to_string()), hero_title: Some("欢迎来到我的极客终端博客".to_string()), hero_subtitle: Some("这里记录技术、代码和生活点滴".to_string()), owner_name: Some("InitCool".to_string()), - owner_title: Some("前端开发者 / 技术博主".to_string()), + owner_title: Some("Rust / Go / Python Developer · Builder @ init.cool".to_string()), owner_bio: Some( - "一名热爱技术的前端开发者,专注于构建高性能、优雅的用户界面。相信代码不仅是工具,更是一种艺术表达。" + "InitCool,GitHub 用户名 limitcool。坚持不要重复造轮子,当前在维护 starter,平时主要写 Rust、Go、Python 相关项目,也在持续学习 AI 与 Web3。" .to_string(), ), - owner_avatar_url: None, - social_github: Some("https://github.com".to_string()), - social_twitter: Some("https://twitter.com".to_string()), - social_email: Some("mailto:hello@termi.dev".to_string()), + owner_avatar_url: Some("https://github.com/limitcool.png".to_string()), + social_github: Some("https://github.com/limitcool".to_string()), + social_twitter: None, + social_email: Some("mailto:initcoool@gmail.com".to_string()), location: Some("Hong Kong".to_string()), tech_stack: Some(vec![ - "Astro".to_string(), + "Rust".to_string(), + "Go".to_string(), + "Python".to_string(), "Svelte".to_string(), - "Tailwind CSS".to_string(), - "TypeScript".to_string(), + "Astro".to_string(), + "Loco.rs".to_string(), + ]), + music_playlist: Some(vec![ + MusicTrackPayload { + title: "山中来信".to_string(), + artist: Some("InitCool Radio".to_string()), + album: Some("站点默认歌单".to_string()), + url: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3".to_string(), + cover_image_url: Some( + "https://images.unsplash.com/photo-1510915228340-29c85a43dcfe?auto=format&fit=crop&w=600&q=80" + .to_string(), + ), + accent_color: Some("#2f6b5f".to_string()), + description: Some("适合文章阅读时循环播放的轻氛围曲。".to_string()), + }, + MusicTrackPayload { + title: "风吹松声".to_string(), + artist: Some("InitCool Radio".to_string()), + album: Some("站点默认歌单".to_string()), + url: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-2.mp3".to_string(), + cover_image_url: Some( + "https://images.unsplash.com/photo-1500530855697-b586d89ba3ee?auto=format&fit=crop&w=600&q=80" + .to_string(), + ), + accent_color: Some("#8a5b35".to_string()), + description: Some("偏木质感的器乐氛围,适合深夜浏览。".to_string()), + }, + MusicTrackPayload { + title: "夜航小记".to_string(), + artist: Some("InitCool Radio".to_string()), + album: Some("站点默认歌单".to_string()), + url: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-3.mp3".to_string(), + cover_image_url: Some( + "https://images.unsplash.com/photo-1493225457124-a3eb161ffa5f?auto=format&fit=crop&w=600&q=80" + .to_string(), + ), + accent_color: Some("#375a7f".to_string()), + description: Some("节奏更明显一点,适合切换阅读状态。".to_string()), + }, ]), ai_enabled: Some(false), + paragraph_comments_enabled: Some(true), ai_provider: Some(ai::provider_name(None)), ai_api_base: Some(ai::default_api_base().to_string()), ai_api_key: Some(ai::default_api_key().to_string()), ai_chat_model: Some(ai::default_chat_model().to_string()), + ai_providers: Some(vec![default_ai_provider_config()]), + ai_active_provider_id: Some("default".to_string()), ai_embedding_model: Some(ai::local_embedding_label().to_string()), ai_system_prompt: Some( "你是这个博客的站内 AI 助手。请优先基于提供的上下文回答,答案要准确、简洁、实用;如果上下文不足,请明确说明。" @@ -233,12 +545,15 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result { return Ok(settings); } - let mut item = ActiveModel { + let inserted = ActiveModel { id: Set(1), ..Default::default() - }; - default_payload().apply(&mut item); - Ok(item.insert(&ctx.db).await?) + } + .insert(&ctx.db) + .await?; + let mut model = inserted; + default_payload().apply(&mut model); + Ok(model.into_active_model().update(&ctx.db).await?) } fn public_response(model: Model) -> PublicSiteSettingsResponse { @@ -260,7 +575,9 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse { social_email: model.social_email, location: model.location, tech_stack: model.tech_stack, + music_playlist: model.music_playlist, ai_enabled: model.ai_enabled.unwrap_or(false), + paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true), } } @@ -277,8 +594,9 @@ pub async fn update( check_auth()?; let current = load_current(&ctx).await?; - let mut item = current.into_active_model(); + let mut item = current; params.apply(&mut item); + let item = item.into_active_model(); let updated = item.update(&ctx.db).await?; format::json(public_response(updated)) } diff --git a/backend/src/controllers/tag.rs b/backend/src/controllers/tag.rs index 575bb7f..e73d547 100644 --- a/backend/src/controllers/tag.rs +++ b/backend/src/controllers/tag.rs @@ -63,7 +63,11 @@ pub async fn update( .filter(|value| !value.is_empty()) != Some(next_name) { - content::rewrite_tag_references(previous_name.as_deref(), &previous_slug, Some(next_name))?; + content::rewrite_tag_references( + previous_name.as_deref(), + &previous_slug, + Some(next_name), + )?; } } diff --git a/backend/src/fixtures/comments.yaml b/backend/src/fixtures/comments.yaml index d2c4931..948fa75 100644 --- a/backend/src/fixtures/comments.yaml +++ b/backend/src/fixtures/comments.yaml @@ -1,48 +1,48 @@ - id: 1 pid: 1 - author: "Alice" - email: "alice@example.com" - content: "Great introduction! Looking forward to more content." + author: "林川" + email: "linchuan@example.com" + content: "这篇做长文测试很合适,段落密度和古文节奏都不错。" approved: true - id: 2 pid: 1 - author: "Bob" - email: "bob@example.com" - content: "The terminal UI looks amazing. Love the design!" + author: "阿青" + email: "aqing@example.com" + content: "建议后面再加几篇山水游记,方便测试问答检索是否能区分不同山名。" approved: true - id: 3 pid: 2 - author: "Charlie" - email: "charlie@example.com" - content: "Thanks for the Rust tips! The ownership concept finally clicked for me." + author: "周宁" + email: "zhouling@example.com" + content: "这一段关于南岩和琼台的描写很好,适合测试段落评论锚点。" approved: true - id: 4 pid: 3 - author: "Diana" - email: "diana@example.com" - content: "Astro is indeed fast. I've been using it for my personal blog too." + author: "顾远" + email: "guyuan@example.com" + content: "悬空寺这一段信息量很大,拿来测试 AI 摘要应该很有代表性。" approved: true - id: 5 pid: 4 - author: "Eve" - email: "eve@example.com" - content: "The color palette you shared is perfect. Using it for my terminal theme now!" + author: "清嘉" + email: "qingjia@example.com" + content: "黄山记的序文很适合测试首屏摘要生成。" approved: true - id: 6 pid: 5 - author: "Frank" - email: "frank@example.com" - content: "Loco.rs looks promising. Might use it for my next project." + author: "石霁" + email: "shiji@example.com" + content: "想看看评测页和文章页共存时,搜索能不能把这类古文结果排在前面。" approved: false - id: 7 - pid: 2 - author: "Grace" - email: "grace@example.com" - content: "Would love to see more advanced Rust patterns in future posts." + pid: 3 + author: "江禾" + email: "jianghe@example.com" + content: "如果后续要做段落评论,这篇恒山记很适合,因为章节分段比较清晰。" approved: true diff --git a/backend/src/fixtures/friend_links.yaml b/backend/src/fixtures/friend_links.yaml index 50e9946..b1964af 100644 --- a/backend/src/fixtures/friend_links.yaml +++ b/backend/src/fixtures/friend_links.yaml @@ -1,38 +1,38 @@ - id: 1 - site_name: "Tech Blog Daily" - site_url: "https://techblog.example.com" - avatar_url: "https://techblog.example.com/avatar.png" - description: "Daily tech news and tutorials" - category: "tech" + site_name: "山中札记" + site_url: "https://mountain-notes.example.com" + avatar_url: "https://mountain-notes.example.com/avatar.png" + description: "记录古籍、游记与自然地理的中文内容站。" + category: "文化" status: "approved" - id: 2 - site_name: "Rustacean Station" - site_url: "https://rustacean.example.com" - avatar_url: "https://rustacean.example.com/logo.png" - description: "All things Rust programming" - category: "tech" + site_name: "旧书与远方" + site_url: "https://oldbooks.example.com" + avatar_url: "https://oldbooks.example.com/logo.png" + description: "分享古典文学、读书笔记和旅行随笔。" + category: "阅读" status: "approved" - id: 3 - site_name: "Design Patterns" - site_url: "https://designpatterns.example.com" - avatar_url: "https://designpatterns.example.com/icon.png" - description: "UI/UX design inspiration" - category: "design" + site_name: "山海数据局" + site_url: "https://shanhai-data.example.com" + avatar_url: "https://shanhai-data.example.com/icon.png" + description: "偏技术向的中文站点,关注搜索、知识库与可视化。" + category: "技术" status: "approved" - id: 4 - site_name: "Code Snippets" - site_url: "https://codesnippets.example.com" - description: "Useful code snippets for developers" - category: "dev" + site_name: "风物手册" + site_url: "https://fengwu.example.com" + description: "整理地方风物、古迹与旅行地图。" + category: "旅行" status: "pending" - id: 5 - site_name: "Web Dev Weekly" - site_url: "https://webdevweekly.example.com" - avatar_url: "https://webdevweekly.example.com/favicon.png" - description: "Weekly web development newsletter" - category: "dev" + site_name: "慢读周刊" + site_url: "https://slowread.example.com" + avatar_url: "https://slowread.example.com/favicon.png" + description: "每周推荐中文长文、读书摘录与站点发现。" + category: "内容" status: "pending" diff --git a/backend/src/fixtures/posts.yaml b/backend/src/fixtures/posts.yaml index 75295c4..74a0c78 100644 --- a/backend/src/fixtures/posts.yaml +++ b/backend/src/fixtures/posts.yaml @@ -1,191 +1,109 @@ - id: 1 pid: 1 - title: "Welcome to Termi Blog" + title: "徐霞客游记·游太和山日记(上)" slug: "welcome-to-termi" content: | - # Welcome to Termi Blog + # 徐霞客游记·游太和山日记(上) - This is the first post on our new blog built with Astro and Loco.rs backend. + 登仙猿岭。十馀里,有枯溪小桥,为郧县境,乃河南、湖广界。东五里,有池一泓,曰青泉,上源不见所自来,而下流淙淙,地又属淅川。 - ## Features + 自此连逾山岭,桃李缤纷,山花夹道,幽艳异常。山坞之中,居庐相望,沿流稻畦,高下鳞次,不似山、陕间矣。 - - 🚀 Fast performance with Astro - - 🎨 Terminal-style UI design - - 💬 Comments system - - 🔗 Friend links - - 🏷️ Tags and categories - - ## Code Example - - ```rust - fn main() { - println!("Hello, Termi!"); - } - ``` - - Stay tuned for more posts! - excerpt: "Welcome to our new blog built with Astro and Loco.rs backend." - category: "general" + 骑而南趋,石道平敞。三十里,越一石梁,有溪自西东注,即太和下流入汉者。越桥为迎恩宫,西向。前有碑大书“第一山”三字,乃米襄阳笔。 + excerpt: "《徐霞客游记》太和山上篇,适合作为中文长文测试样本。" + category: "古籍游记" published: true pinned: true tags: - - welcome - - astro - - loco-rs + - 徐霞客 + - 游记 + - 太和山 + - 长文测试 - id: 2 pid: 2 - title: "Rust Programming Tips" - slug: "rust-programming-tips" + title: "徐霞客游记·游太和山日记(下)" + slug: "building-blog-with-astro" content: | - # Rust Programming Tips + # 徐霞客游记·游太和山日记(下) - Here are some essential tips for Rust developers: + 更衣上金顶。瞻叩毕,天宇澄朗,下瞰诸峰,近者鹄峙,远者罗列,诚天真奥区也。 - ## 1. Ownership and Borrowing + 遂从三天门之右小径下峡中。此径无级无索,乱峰离立,路穿其间,迥觉幽胜。三里馀,抵蜡烛峰右,泉涓涓溢出路旁,下为蜡烛涧。 - Understanding ownership is crucial in Rust. Every value has an owner, and there can only be one owner at a time. - - ## 2. Pattern Matching - - Use `match` expressions for exhaustive pattern matching: - - ```rust - match result { - Ok(value) => println!("Success: {}", value), - Err(e) => println!("Error: {}", e), - } - ``` - - ## 3. Error Handling - - Use `Result` and `Option` types effectively with the `?` operator. - - Happy coding! - excerpt: "Essential tips for Rust developers including ownership, pattern matching, and error handling." - category: "tech" + 从宫左趋雷公洞。洞在悬崖间。乃从北天门下,一径阴森,滴水、仙侣二岩,俱在路左,飞崖上突,泉滴沥于中。 + excerpt: "《徐霞客游记》太和山下篇,包含琼台、南岩与五龙宫等段落。" + category: "古籍游记" published: true pinned: false tags: - - rust - - programming - - tips + - 徐霞客 + - 游记 + - 太和山 + - 长文测试 - id: 3 pid: 3 - title: "Building a Blog with Astro" - slug: "building-blog-with-astro" + title: "徐霞客游记·游恒山日记" + slug: "rust-programming-tips" content: | - # Building a Blog with Astro + # 徐霞客游记·游恒山日记 - Astro is a modern static site generator that delivers lightning-fast performance. + 出南山。大溪从山中俱来者,别而西去。余北驰平陆中,望外界之山,高不及台山十之四,其长缭绕如垣。 - ## Why Astro? + 余溯西涧入,又一涧自北来,遂从其西登岭,道甚峻。北向直上者六七里,西转,又北跻而上者五六里,登峰两重,造其巅,是名箭筸岭。 - - **Zero JavaScript by default**: Ships less JavaScript to the client - - **Island Architecture**: Hydrate only interactive components - - **Framework Agnostic**: Use React, Vue, Svelte, or vanilla JS - - **Great DX**: Excellent developer experience with hot module replacement - - ## Getting Started - - ```bash - npm create astro@latest - cd my-astro-project - npm install - npm run dev - ``` - - ## Conclusion - - Astro is perfect for content-focused websites like blogs. - excerpt: "Learn why Astro is the perfect choice for building fast, content-focused blogs." - category: "tech" + 三转,峡愈隘,崖愈高。西崖之半,层楼高悬,曲榭斜倚,望之如蜃吐重台者,悬空寺也。 + excerpt: "游恒山、悬空寺与北岳登顶的古文纪行,适合做中文长文测试。" + category: "古籍游记" published: true pinned: false tags: - - astro - - web-dev - - static-site + - 徐霞客 + - 恒山 + - 悬空寺 + - 长文测试 - id: 4 pid: 4 - title: "Terminal UI Design Principles" + title: "游黄山记(上)" slug: "terminal-ui-design" content: | - # Terminal UI Design Principles + # 游黄山记(上) - Terminal-style interfaces are making a comeback in modern web design. + 辛巳春,余与程孟阳订黄山之游,约以梅花时相寻于武林之西溪。徐维翰书来劝驾,读之两腋欲举,遂挟吴去尘以行。 - ## Key Elements + 黄山耸秀峻极,作镇一方。江南诸山,天台、天目为最,以地形准之,黄山之趾与二山齐。 - 1. **Monospace Fonts**: Use fonts like Fira Code, JetBrains Mono - 2. **Dark Themes**: Black or dark backgrounds with vibrant text colors - 3. **Command Prompts**: Use `$` or `>` as visual indicators - 4. **ASCII Art**: Decorative elements using text characters - 5. **Blinking Cursor**: The iconic terminal cursor - - ## Color Palette - - - Background: `#0d1117` - - Text: `#c9d1d9` - - Accent: `#58a6ff` - - Success: `#3fb950` - - Warning: `#d29922` - - Error: `#f85149` - - ## Implementation - - Use CSS to create the terminal aesthetic while maintaining accessibility. - excerpt: "Learn the key principles of designing beautiful terminal-style user interfaces." - category: "design" + 自山口至汤口,山之麓也,登山之径于是始。汤泉之流,自紫石峰六百仞县布,其下有香泉溪。 + excerpt: "钱谦益《游黄山记》上篇,包含序、记之一与记之二。" + category: "古籍游记" published: true pinned: false tags: - - design - - terminal - - ui + - 钱谦益 + - 黄山 + - 游记 + - 长文测试 - id: 5 pid: 5 - title: "Loco.rs Backend Framework" + title: "游黄山记(中)" slug: "loco-rs-framework" content: | - # Introduction to Loco.rs + # 游黄山记(中) - Loco.rs is a web and API framework for Rust inspired by Rails. + 由祥符寺度石桥而北,逾慈光寺,行数里,径朱砂庵而上。过此取道钵盂、老人两峰之间,峰趾相并,两崖合遝,弥望削成。 - ## Features + 憩桃源庵,指天都为诸峰之中峰,山形络绎,未有以殊异也。云生峰腰,层叠如裼衣焉。 - - **MVC Architecture**: Model-View-Controller pattern - - **SeaORM Integration**: Powerful ORM for database operations - - **Background Jobs**: Built-in job processing - - **Authentication**: Ready-to-use auth system - - **CLI Generator**: Scaffold resources quickly - - ## Quick Start - - ```bash - cargo install loco - loco new myapp - cd myapp - cargo loco start - ``` - - ## Why Loco.rs? - - - Opinionated but flexible - - Production-ready defaults - - Excellent documentation - - Active community - - Perfect for building APIs and web applications in Rust. - excerpt: "An introduction to Loco.rs, the Rails-inspired web framework for Rust." - category: "tech" + 清晓,出文殊院,神鸦背行而先。避莲华沟险,从支径右折,险益甚。上平天矼,转始信峰,经散花坞,看扰龙松。 + excerpt: "钱谦益《游黄山记》中篇,适合测试中文长文、检索与段落锚点。" + category: "古籍游记" published: true pinned: false tags: - - rust - - loco-rs - - backend - - api + - 钱谦益 + - 黄山 + - 游记 + - 长文测试 diff --git a/backend/src/fixtures/reviews.yaml b/backend/src/fixtures/reviews.yaml index 73f63e9..b5d2192 100644 --- a/backend/src/fixtures/reviews.yaml +++ b/backend/src/fixtures/reviews.yaml @@ -1,59 +1,59 @@ - id: 1 - title: "塞尔达传说:王国之泪" - review_type: "game" - rating: 5 - review_date: "2024-03-20" - status: "completed" - description: "开放世界的巅峰之作,究极手能力带来无限创意空间" - tags: ["Switch", "开放世界", "冒险"] - cover: "🎮" - -- id: 2 - title: "进击的巨人" - review_type: "anime" - rating: 5 - review_date: "2023-11-10" - status: "completed" - description: "史诗级完结,剧情反转令人震撼" - tags: ["热血", "悬疑", "神作"] - cover: "🎭" - -- id: 3 - title: "赛博朋克 2077" - review_type: "game" - rating: 4 - review_date: "2024-01-15" - status: "completed" - description: "夜之城的故事,虽然首发有问题但后续更新很棒" - tags: ["PC", "RPG", "科幻"] - cover: "🎮" - -- id: 4 - title: "三体" - review_type: "book" - rating: 5 - review_date: "2023-08-05" - status: "completed" - description: "硬科幻巅峰,宇宙社会学的黑暗森林法则" - tags: ["科幻", "经典", "雨果奖"] - cover: "📚" - -- id: 5 - title: "星际穿越" + title: "《漫长的季节》" review_type: "movie" rating: 5 - review_date: "2024-02-14" - status: "completed" - description: "诺兰神作,五维空间和黑洞的视觉奇观" - tags: ["科幻", "IMAX", "诺兰"] - cover: "🎬" + review_date: "2024-03-20" + status: "published" + description: "极有质感的中文悬疑剧,人物命运与时代氛围都很扎实。" + tags: ["国产剧", "悬疑", "年度推荐"] + cover: "/review-covers/the-long-season.svg" -- id: 6 - title: "博德之门3" +- id: 2 + title: "《十三邀》" + review_type: "movie" + rating: 4 + review_date: "2024-01-10" + status: "published" + description: "更像一组人物观察样本,适合慢慢看,不适合倍速。" + tags: ["访谈", "人文", "纪实"] + cover: "/review-covers/thirteen-invites.svg" + +- id: 3 + title: "《黑神话:悟空》" review_type: "game" rating: 5 - review_date: "2024-04-01" - status: "in-progress" - description: "CRPG的文艺复兴,骰子决定命运" - tags: ["PC", "CRPG", "多人"] - cover: "🎮" + review_date: "2024-08-25" + status: "published" + description: "美术和演出都很强,战斗手感也足够扎实,是非常好的中文游戏样本。" + tags: ["国产游戏", "动作", "神话"] + cover: "/review-covers/black-myth-wukong.svg" + +- id: 4 + title: "《置身事内》" + review_type: "book" + rating: 5 + review_date: "2024-02-18" + status: "published" + description: "把很多宏观经济问题讲得非常清楚,适合做深阅读测试。" + tags: ["经济", "非虚构", "中国"] + cover: "/review-covers/placed-within.svg" + +- id: 5 + title: "《宇宙探索编辑部》" + review_type: "movie" + rating: 4 + review_date: "2024-04-12" + status: "published" + description: "荒诞和真诚并存,气质很特别,很适合作为中文评论内容。" + tags: ["电影", "科幻", "荒诞"] + cover: "/review-covers/journey-to-the-west-editorial.svg" + +- id: 6 + title: "《疲惫生活中的英雄梦想》" + review_type: "music" + rating: 4 + review_date: "2024-05-01" + status: "draft" + description: "适合深夜循环,文字和旋律都带一点诚恳的钝感。" + tags: ["音乐", "中文", "独立"] + cover: "/review-covers/hero-dreams-in-tired-life.svg" diff --git a/backend/src/fixtures/site_settings.yaml b/backend/src/fixtures/site_settings.yaml index fd16577..b30d822 100644 --- a/backend/src/fixtures/site_settings.yaml +++ b/backend/src/fixtures/site_settings.yaml @@ -1,30 +1,55 @@ - id: 1 site_name: "InitCool" site_short_name: "Termi" - site_url: "https://termi.dev" - site_title: "InitCool - 终端风格的内容平台" - site_description: "一个基于终端美学的个人内容站,记录代码、设计和生活。" - hero_title: "欢迎来到我的极客终端博客" - hero_subtitle: "这里记录技术、代码和生活点滴" + site_url: "https://init.cool" + site_title: "InitCool · 中文长文与 AI 搜索实验站" + site_description: "一个偏终端审美的中文内容站,用来测试文章检索、AI 问答、段落评论与后台工作流。" + hero_title: "欢迎来到我的中文内容实验站" + hero_subtitle: "这里有长文章、评测、友链,以及逐步打磨中的 AI 搜索体验" owner_name: "InitCool" - owner_title: "前端开发者 / 技术博主" - owner_bio: "一名热爱技术的前端开发者,专注于构建高性能、优雅的用户界面。相信代码不仅是工具,更是一种艺术表达。" - owner_avatar_url: "" - social_github: "https://github.com" - social_twitter: "https://twitter.com" - social_email: "mailto:hello@termi.dev" - location: "Hong Kong" + owner_title: "Rust / Go / Python Developer · Builder @ init.cool" + owner_bio: "InitCool,GitHub 用户名 limitcool。坚持不要重复造轮子,当前在维护 starter,平时主要写 Rust、Go、Python 相关项目,也在持续学习 AI 与 Web3。" + owner_avatar_url: "https://github.com/limitcool.png" + social_github: "https://github.com/limitcool" + social_twitter: "" + social_email: "mailto:initcoool@gmail.com" + location: "中国香港" tech_stack: - - "Astro" + - "Rust" + - "Go" + - "Python" - "Svelte" - - "Tailwind CSS" - - "TypeScript" + - "Astro" + - "Loco.rs" + music_playlist: + - title: "山中来信" + artist: "InitCool Radio" + album: "站点默认歌单" + url: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3" + cover_image_url: "https://images.unsplash.com/photo-1510915228340-29c85a43dcfe?auto=format&fit=crop&w=600&q=80" + accent_color: "#2f6b5f" + description: "适合文章阅读时循环播放的轻氛围曲。" + - title: "风吹松声" + artist: "InitCool Radio" + album: "站点默认歌单" + url: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-2.mp3" + cover_image_url: "https://images.unsplash.com/photo-1500530855697-b586d89ba3ee?auto=format&fit=crop&w=600&q=80" + accent_color: "#8a5b35" + description: "偏木质感的器乐氛围,适合深夜浏览。" + - title: "夜航小记" + artist: "InitCool Radio" + album: "站点默认歌单" + url: "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-3.mp3" + cover_image_url: "https://images.unsplash.com/photo-1493225457124-a3eb161ffa5f?auto=format&fit=crop&w=600&q=80" + accent_color: "#375a7f" + description: "节奏更明显一点,适合切换阅读状态。" ai_enabled: false + paragraph_comments_enabled: true ai_provider: "newapi" - ai_api_base: "http://localhost:8317/v1" - ai_api_key: "your-api-key-1" + ai_api_base: "https://91code.jiangnight.com/v1" + ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571" ai_chat_model: "gpt-5.4" ai_embedding_model: "fastembed / local all-MiniLM-L6-v2" - ai_system_prompt: "你是这个博客的站内 AI 助手。请优先基于提供的上下文回答,答案要准确、简洁、实用;如果上下文不足,请明确说明。" + ai_system_prompt: "你是这个博客的站内 AI 助手。请优先依据检索到的站内内容回答问题,回答保持准确、简洁、清晰;如果上下文不足,请明确说明,不要编造。" ai_top_k: 4 ai_chunk_size: 1200 diff --git a/backend/src/initializers/content_sync.rs b/backend/src/initializers/content_sync.rs index 4e27754..b28d137 100644 --- a/backend/src/initializers/content_sync.rs +++ b/backend/src/initializers/content_sync.rs @@ -57,10 +57,20 @@ fn is_blank(value: &Option) -> bool { } fn matches_legacy_ai_defaults(settings: &site_settings::Model) -> bool { - settings.ai_provider.as_deref().map(str::trim) == Some("openai-compatible") - && settings.ai_api_base.as_deref().map(str::trim) == Some("https://api.openai.com/v1") - && settings.ai_chat_model.as_deref().map(str::trim) == Some("gpt-4.1-mini") - && is_blank(&settings.ai_api_key) + let provider = settings.ai_provider.as_deref().map(str::trim); + let api_base = settings.ai_api_base.as_deref().map(str::trim); + let chat_model = settings.ai_chat_model.as_deref().map(str::trim); + + (provider == Some("openai-compatible") + && api_base == Some("https://api.openai.com/v1") + && chat_model == Some("gpt-4.1-mini") + && is_blank(&settings.ai_api_key)) + || (provider == Some("newapi") + && matches!( + api_base, + Some("https://cliproxy.ai.init.cool") | Some("https://cliproxy.ai.init.cool/v1") + ) + && chat_model == Some("gpt-5.4")) } async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> { @@ -80,6 +90,27 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> { }) .filter(|items| !items.is_empty()) .map(|items| serde_json::json!(items)); + let music_playlist = seed["music_playlist"] + .as_array() + .map(|items| { + items + .iter() + .filter_map(|item| { + let title = item["title"].as_str()?.trim(); + let url = item["url"].as_str()?.trim(); + if title.is_empty() || url.is_empty() { + None + } else { + Some(serde_json::json!({ + "title": title, + "url": url, + })) + } + }) + .collect::>() + }) + .filter(|items| !items.is_empty()) + .map(serde_json::Value::Array); let existing = site_settings::Entity::find() .order_by_asc(site_settings::Column::Id) @@ -138,9 +169,16 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> { if existing.tech_stack.is_none() { model.tech_stack = Set(tech_stack); } + if existing.music_playlist.is_none() { + model.music_playlist = Set(music_playlist); + } if existing.ai_enabled.is_none() { model.ai_enabled = Set(seed["ai_enabled"].as_bool()); } + if existing.paragraph_comments_enabled.is_none() { + model.paragraph_comments_enabled = + Set(seed["paragraph_comments_enabled"].as_bool().or(Some(true))); + } if should_upgrade_legacy_ai_defaults { model.ai_provider = Set(as_optional_string(&seed["ai_provider"])); model.ai_api_base = Set(as_optional_string(&seed["ai_api_base"])); @@ -194,7 +232,11 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> { social_email: Set(as_optional_string(&seed["social_email"])), location: Set(as_optional_string(&seed["location"])), tech_stack: Set(tech_stack), + music_playlist: Set(music_playlist), ai_enabled: Set(seed["ai_enabled"].as_bool()), + paragraph_comments_enabled: Set(seed["paragraph_comments_enabled"] + .as_bool() + .or(Some(true))), ai_provider: Set(as_optional_string(&seed["ai_provider"])), ai_api_base: Set(as_optional_string(&seed["ai_api_base"])), ai_api_key: Set(as_optional_string(&seed["ai_api_key"])), diff --git a/backend/src/models/_entities/posts.rs b/backend/src/models/_entities/posts.rs index 489f197..4b3cd48 100644 --- a/backend/src/models/_entities/posts.rs +++ b/backend/src/models/_entities/posts.rs @@ -20,6 +20,8 @@ pub struct Model { pub tags: Option, pub post_type: Option, pub image: Option, + #[sea_orm(column_type = "JsonBinary", nullable)] + pub images: Option, pub pinned: Option, } diff --git a/backend/src/models/_entities/reviews.rs b/backend/src/models/_entities/reviews.rs index 1f15361..9bf78be 100644 --- a/backend/src/models/_entities/reviews.rs +++ b/backend/src/models/_entities/reviews.rs @@ -14,6 +14,7 @@ pub struct Model { pub description: Option, pub tags: Option, pub cover: Option, + pub link_url: Option, pub created_at: DateTimeWithTimeZone, pub updated_at: DateTimeWithTimeZone, } diff --git a/backend/src/models/_entities/site_settings.rs b/backend/src/models/_entities/site_settings.rs index 0e3cb3e..39a6de5 100644 --- a/backend/src/models/_entities/site_settings.rs +++ b/backend/src/models/_entities/site_settings.rs @@ -28,12 +28,18 @@ pub struct Model { pub location: Option, #[sea_orm(column_type = "JsonBinary", nullable)] pub tech_stack: Option, + #[sea_orm(column_type = "JsonBinary", nullable)] + pub music_playlist: Option, pub ai_enabled: Option, + pub paragraph_comments_enabled: Option, pub ai_provider: Option, pub ai_api_base: Option, #[sea_orm(column_type = "Text", nullable)] pub ai_api_key: Option, pub ai_chat_model: Option, + #[sea_orm(column_type = "JsonBinary", nullable)] + pub ai_providers: Option, + pub ai_active_provider_id: Option, pub ai_embedding_model: Option, #[sea_orm(column_type = "Text", nullable)] pub ai_system_prompt: Option, diff --git a/backend/src/services/ai.rs b/backend/src/services/ai.rs index d9b82b4..9e4fb62 100644 --- a/backend/src/services/ai.rs +++ b/backend/src/services/ai.rs @@ -1,18 +1,20 @@ +use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, Engine as _}; use chrono::{DateTime, Utc}; use fastembed::{ InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel, }; use loco_rs::prelude::*; -use reqwest::Client; +use reqwest::{Client, Url}; use sea_orm::{ ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel, PaginatorTrait, QueryOrder, Set, Statement, }; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; use std::fs; use std::path::{Path, PathBuf}; use std::sync::{Mutex, OnceLock}; +use uuid::Uuid; use crate::{ models::_entities::{ai_chunks, site_settings}, @@ -20,11 +22,13 @@ use crate::{ }; const DEFAULT_AI_PROVIDER: &str = "newapi"; -const DEFAULT_AI_API_BASE: &str = "http://localhost:8317/v1"; -const DEFAULT_AI_API_KEY: &str = "your-api-key-1"; +const DEFAULT_AI_API_BASE: &str = "https://91code.jiangnight.com/v1"; +const DEFAULT_AI_API_KEY: &str = + "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571"; const DEFAULT_CHAT_MODEL: &str = "gpt-5.4"; const DEFAULT_REASONING_EFFORT: &str = "medium"; const DEFAULT_DISABLE_RESPONSE_STORAGE: bool = true; +const DEFAULT_IMAGE_MODEL: &str = "gpt-image-1"; const DEFAULT_TOP_K: usize = 4; const DEFAULT_CHUNK_SIZE: usize = 1200; const DEFAULT_SYSTEM_PROMPT: &str = @@ -79,6 +83,7 @@ struct ScoredChunk { struct SimilarChunkRow { source_slug: String, source_title: Option, + source_type: String, chunk_index: i32, content: String, content_preview: Option, @@ -95,12 +100,50 @@ enum EmbeddingKind { #[derive(Clone, Debug, Serialize)] pub struct AiSource { pub slug: String, + pub href: String, pub title: String, pub excerpt: String, pub score: f64, pub chunk_index: i32, } +#[derive(Clone, Debug, Serialize)] +pub struct GeneratedPostMetadata { + pub title: String, + pub description: String, + pub category: String, + pub tags: Vec, + pub slug: String, +} + +#[derive(Clone, Debug, Serialize)] +pub struct PolishedPostMarkdown { + pub polished_markdown: String, +} + +#[derive(Clone, Debug, Serialize)] +pub struct GeneratedPostCoverImage { + pub image_url: String, + pub prompt: String, +} + +#[derive(Clone, Debug, Serialize)] +pub struct AiProviderConnectivityResult { + pub provider: String, + pub endpoint: String, + pub chat_model: String, + pub reply_preview: String, +} + +#[derive(Clone, Debug, Default, Deserialize)] +struct GeneratedPostMetadataDraft { + title: Option, + description: Option, + category: Option, + tags: Option>, + slug: Option, +} + #[derive(Clone, Debug)] pub struct AiAnswer { pub answer: String, @@ -146,6 +189,10 @@ fn trim_to_option(value: Option) -> Option { }) } +fn normalize_newlines(input: &str) -> String { + input.replace("\r\n", "\n") +} + fn preview_text(content: &str, limit: usize) -> Option { let flattened = content .split_whitespace() @@ -162,6 +209,16 @@ fn preview_text(content: &str, limit: usize) -> Option { Some(preview) } +fn json_string_array(value: &Option) -> Vec { + value + .as_ref() + .and_then(|raw| serde_json::from_value::>(raw.clone()).ok()) + .unwrap_or_default() + .into_iter() + .filter_map(|item| trim_to_option(Some(item))) + .collect() +} + fn build_endpoint(api_base: &str, path: &str) -> String { format!( "{}/{}", @@ -170,6 +227,31 @@ fn build_endpoint(api_base: &str, path: &str) -> String { ) } +fn provider_uses_openai_api_prefix(provider: &str) -> bool { + provider_uses_responses(provider) || provider.eq_ignore_ascii_case("openai-compatible") +} + +fn normalize_provider_api_base(provider: &str, api_base: &str) -> String { + let trimmed = api_base.trim(); + if trimmed.is_empty() { + return String::new(); + } + + if !provider_uses_openai_api_prefix(provider) { + return trimmed.trim_end_matches('/').to_string(); + } + + let Ok(mut parsed) = Url::parse(trimmed) else { + return trimmed.trim_end_matches('/').to_string(); + }; + + if parsed.path().trim_end_matches('/').is_empty() { + parsed.set_path("/v1"); + } + + parsed.to_string().trim_end_matches('/').to_string() +} + fn local_embedding_dir() -> PathBuf { PathBuf::from(LOCAL_EMBEDDING_CACHE_DIR) } @@ -307,6 +389,101 @@ fn split_long_text(text: &str, chunk_size: usize) -> Vec { parts } +fn build_source_chunks( + source_slug: String, + source_title: Option, + source_path: Option, + source_type: &str, + source_text: String, + chunk_size: usize, +) -> Vec { + let mut chunks = Vec::new(); + let paragraphs = source_text + .split("\n\n") + .map(str::trim) + .filter(|value| !value.is_empty()) + .collect::>(); + + let mut buffer = String::new(); + let mut chunk_index = 0_i32; + + for paragraph in paragraphs { + if paragraph.chars().count() > chunk_size { + if !buffer.trim().is_empty() { + chunks.push(ChunkDraft { + source_slug: source_slug.clone(), + source_title: source_title.clone(), + source_path: source_path.clone(), + source_type: source_type.to_string(), + chunk_index, + content: buffer.trim().to_string(), + content_preview: preview_text(&buffer, 180), + word_count: Some(buffer.split_whitespace().count() as i32), + }); + chunk_index += 1; + buffer.clear(); + } + + for part in split_long_text(paragraph, chunk_size) { + if part.trim().is_empty() { + continue; + } + + chunks.push(ChunkDraft { + source_slug: source_slug.clone(), + source_title: source_title.clone(), + source_path: source_path.clone(), + source_type: source_type.to_string(), + chunk_index, + content_preview: preview_text(&part, 180), + word_count: Some(part.split_whitespace().count() as i32), + content: part, + }); + chunk_index += 1; + } + continue; + } + + let candidate = if buffer.is_empty() { + paragraph.to_string() + } else { + format!("{buffer}\n\n{paragraph}") + }; + + if candidate.chars().count() > chunk_size && !buffer.trim().is_empty() { + chunks.push(ChunkDraft { + source_slug: source_slug.clone(), + source_title: source_title.clone(), + source_path: source_path.clone(), + source_type: source_type.to_string(), + chunk_index, + content_preview: preview_text(&buffer, 180), + word_count: Some(buffer.split_whitespace().count() as i32), + content: buffer.trim().to_string(), + }); + chunk_index += 1; + buffer = paragraph.to_string(); + } else { + buffer = candidate; + } + } + + if !buffer.trim().is_empty() { + chunks.push(ChunkDraft { + source_slug, + source_title, + source_path, + source_type: source_type.to_string(), + chunk_index, + content_preview: preview_text(&buffer, 180), + word_count: Some(buffer.split_whitespace().count() as i32), + content: buffer.trim().to_string(), + }); + } + + chunks +} + fn build_chunks(posts: &[content::MarkdownPost], chunk_size: usize) -> Vec { let mut chunks = Vec::new(); @@ -328,93 +505,105 @@ fn build_chunks(posts: &[content::MarkdownPost], chunk_size: usize) -> Vec>() .join("\n\n"); - let paragraphs = source_text - .split("\n\n") - .map(str::trim) - .filter(|value| !value.is_empty()) - .collect::>(); - - let mut buffer = String::new(); - let mut chunk_index = 0_i32; - - for paragraph in paragraphs { - if paragraph.chars().count() > chunk_size { - if !buffer.trim().is_empty() { - chunks.push(ChunkDraft { - source_slug: post.slug.clone(), - source_title: Some(post.title.clone()), - source_path: Some(post.file_path.clone()), - source_type: "post".to_string(), - chunk_index, - content: buffer.trim().to_string(), - content_preview: preview_text(&buffer, 180), - word_count: Some(buffer.split_whitespace().count() as i32), - }); - chunk_index += 1; - buffer.clear(); - } - - for part in split_long_text(paragraph, chunk_size) { - if part.trim().is_empty() { - continue; - } - - chunks.push(ChunkDraft { - source_slug: post.slug.clone(), - source_title: Some(post.title.clone()), - source_path: Some(post.file_path.clone()), - source_type: "post".to_string(), - chunk_index, - content_preview: preview_text(&part, 180), - word_count: Some(part.split_whitespace().count() as i32), - content: part, - }); - chunk_index += 1; - } - continue; - } - - let candidate = if buffer.is_empty() { - paragraph.to_string() - } else { - format!("{buffer}\n\n{paragraph}") - }; - - if candidate.chars().count() > chunk_size && !buffer.trim().is_empty() { - chunks.push(ChunkDraft { - source_slug: post.slug.clone(), - source_title: Some(post.title.clone()), - source_path: Some(post.file_path.clone()), - source_type: "post".to_string(), - chunk_index, - content_preview: preview_text(&buffer, 180), - word_count: Some(buffer.split_whitespace().count() as i32), - content: buffer.trim().to_string(), - }); - chunk_index += 1; - buffer = paragraph.to_string(); - } else { - buffer = candidate; - } - } - - if !buffer.trim().is_empty() { - chunks.push(ChunkDraft { - source_slug: post.slug.clone(), - source_title: Some(post.title.clone()), - source_path: Some(post.file_path.clone()), - source_type: "post".to_string(), - chunk_index, - content_preview: preview_text(&buffer, 180), - word_count: Some(buffer.split_whitespace().count() as i32), - content: buffer.trim().to_string(), - }); - } + chunks.extend(build_source_chunks( + post.slug.clone(), + Some(post.title.clone()), + Some(post.file_path.clone()), + "post", + source_text, + chunk_size, + )); } chunks } +fn build_profile_chunks(settings: &site_settings::Model, chunk_size: usize) -> Vec { + let owner_name = + trim_to_option(settings.owner_name.clone()).unwrap_or_else(|| "InitCool".to_string()); + let owner_title = trim_to_option(settings.owner_title.clone()); + let owner_bio = trim_to_option(settings.owner_bio.clone()); + let owner_avatar = trim_to_option(settings.owner_avatar_url.clone()); + let github = trim_to_option(settings.social_github.clone()); + let email = trim_to_option(settings.social_email.clone()); + let site_url = trim_to_option(settings.site_url.clone()); + let location = trim_to_option(settings.location.clone()); + let tech_stack = json_string_array(&settings.tech_stack); + + let mut sections = vec![format!("# 关于作者 {owner_name}")]; + + if let Some(title) = owner_title.as_deref() { + sections.push(format!("身份: {title}")); + } + if let Some(bio) = owner_bio.as_deref() { + sections.push(format!("简介: {bio}")); + } + if let Some(location) = location.as_deref() { + sections.push(format!("位置: {location}")); + } + if !tech_stack.is_empty() { + sections.push(format!("技术栈: {}", tech_stack.join(" / "))); + } + if let Some(github) = github.as_deref() { + sections.push(format!("GitHub: {github}")); + } + if let Some(site_url) = site_url.as_deref() { + sections.push(format!("网站: {site_url}")); + } + if let Some(email) = email.as_deref() { + sections.push(format!("邮箱: {email}")); + } + if let Some(owner_avatar) = owner_avatar.as_deref() { + sections.push(format!("头像: {owner_avatar}")); + } + + let profile_text = sections + .into_iter() + .filter(|item| !item.trim().is_empty()) + .collect::>() + .join("\n\n"); + + build_source_chunks( + "about".to_string(), + Some(format!("关于作者 {owner_name}")), + Some("site_settings".to_string()), + "profile", + profile_text, + chunk_size, + ) +} + +fn parse_provider_sse_body(body: &str) -> Option { + let normalized = normalize_newlines(body); + let mut latest_payload = None; + let mut latest_response = None; + + for event in normalized.split("\n\n") { + let data = event + .lines() + .filter_map(|line| line.strip_prefix("data:")) + .map(str::trim_start) + .collect::>() + .join("\n"); + + if data.is_empty() || data == "[DONE]" { + continue; + } + + let Ok(parsed) = serde_json::from_str::(&data) else { + continue; + }; + + if let Some(response) = parsed.get("response") { + latest_response = Some(response.clone()); + } + + latest_payload = Some(parsed); + } + + latest_response.or(latest_payload) +} + async fn request_json(client: &Client, url: &str, api_key: &str, payload: Value) -> Result { let response = client .post(url) @@ -438,6 +627,14 @@ async fn request_json(client: &Client, url: &str, api_key: &str, payload: Value) } serde_json::from_str(&body) + .or_else(|_| { + parse_provider_sse_body(&body).ok_or_else(|| { + serde_json::Error::io(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "provider returned neither JSON nor SSE JSON payload", + )) + }) + }) .map_err(|error| Error::BadRequest(format!("AI response parse failed: {error}"))) } @@ -471,6 +668,31 @@ async fn embed_texts_locally(inputs: Vec, kind: EmbeddingKind) -> Result } fn extract_message_content(value: &Value) -> Option { + if let Some(content) = value + .get("message") + .and_then(|message| message.get("content")) + { + if let Some(text) = content.as_str() { + return Some(text.trim().to_string()); + } + + if let Some(parts) = content.as_array() { + let merged = parts + .iter() + .filter_map(|part| { + part.get("text") + .and_then(Value::as_str) + .or_else(|| part.get("output_text").and_then(Value::as_str)) + }) + .collect::>() + .join("\n"); + + if !merged.trim().is_empty() { + return Some(merged.trim().to_string()); + } + } + } + if let Some(content) = value .get("choices") .and_then(Value::as_array) @@ -519,6 +741,639 @@ fn merge_text_segments(parts: Vec) -> Option { } } +fn extract_text_from_content_items(items: &[Value]) -> Option { + let mut segments = Vec::new(); + + for item in items { + if let Some(text) = item.get("text").and_then(Value::as_str) { + segments.push(text.to_string()); + continue; + } + + if let Some(text) = item + .get("output_text") + .and_then(|output_text| output_text.get("text")) + .and_then(Value::as_str) + { + segments.push(text.to_string()); + continue; + } + + if let Some(text) = item.get("output_text").and_then(Value::as_str) { + segments.push(text.to_string()); + } + } + + merge_text_segments(segments) +} + +fn title_from_markdown(markdown: &str) -> Option { + markdown.lines().find_map(|line| { + line.trim() + .strip_prefix("# ") + .map(str::trim) + .filter(|value| !value.is_empty()) + .map(ToString::to_string) + }) +} + +fn excerpt_from_markdown(markdown: &str, limit: usize) -> Option { + let mut in_code_block = false; + + for line in markdown.lines() { + let trimmed = line.trim(); + if trimmed.starts_with("```") { + in_code_block = !in_code_block; + continue; + } + + if in_code_block || trimmed.is_empty() || trimmed.starts_with('#') { + continue; + } + + let excerpt = trimmed.chars().take(limit).collect::(); + if !excerpt.is_empty() { + return Some(excerpt); + } + } + + None +} + +fn strip_markdown_frontmatter(markdown: &str) -> String { + let normalized = markdown.replace("\r\n", "\n"); + if !normalized.starts_with("---\n") { + return normalized; + } + + let Some(end_index) = normalized[4..].find("\n---\n") else { + return normalized; + }; + + normalized[(end_index + 9)..].to_string() +} + +fn metadata_slugify(value: &str) -> String { + let mut slug = String::new(); + let mut last_was_dash = false; + + for ch in value.trim().chars() { + if ch.is_alphanumeric() { + for lower in ch.to_lowercase() { + slug.push(lower); + } + last_was_dash = false; + } else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash { + slug.push('-'); + last_was_dash = true; + } + } + + slug.trim_matches('-').to_string() +} + +fn take_json_block(text: &str) -> Option { + let trimmed = text.trim(); + if trimmed.is_empty() { + return None; + } + + if trimmed.starts_with('{') && trimmed.ends_with('}') { + return Some(trimmed.to_string()); + } + + for marker in ["```json", "```JSON", "```"] { + if let Some(start) = trimmed.find(marker) { + let rest = &trimmed[(start + marker.len())..]; + if let Some(end) = rest.find("```") { + let candidate = rest[..end].trim(); + if candidate.starts_with('{') && candidate.ends_with('}') { + return Some(candidate.to_string()); + } + } + } + } + + let start = trimmed.find('{')?; + let end = trimmed.rfind('}')?; + (start < end).then(|| trimmed[start..=end].to_string()) +} + +fn normalize_generated_metadata( + markdown: &str, + draft: GeneratedPostMetadataDraft, +) -> GeneratedPostMetadata { + let fallback_title = title_from_markdown(markdown).unwrap_or_else(|| "未命名文章".to_string()); + let title = trim_to_option(draft.title).unwrap_or_else(|| fallback_title.clone()); + let description = trim_to_option(draft.description) + .or_else(|| excerpt_from_markdown(markdown, 140)) + .unwrap_or_else(|| format!("关于《{title}》的文章摘要。")); + let category = trim_to_option(draft.category).unwrap_or_else(|| "未分类".to_string()); + + let mut seen = std::collections::HashSet::new(); + let tags = draft + .tags + .unwrap_or_default() + .into_iter() + .filter_map(|tag| trim_to_option(Some(tag))) + .filter(|tag| { + let key = tag.to_lowercase(); + seen.insert(key) + }) + .take(6) + .collect::>(); + + let slug_source = trim_to_option(draft.slug) + .filter(|value| !value.is_empty()) + .unwrap_or_else(|| title.clone()); + let slug = metadata_slugify(&slug_source); + + GeneratedPostMetadata { + title, + description, + category, + tags, + slug: if slug.is_empty() { + metadata_slugify(&fallback_title) + } else { + slug + }, + } +} + +fn build_post_metadata_prompt(markdown: &str) -> String { + let content = strip_markdown_frontmatter(markdown) + .chars() + .take(6000) + .collect::(); + + format!( + "请根据下面的 Markdown 文章内容,生成适合博客后台直接回填的元数据。\n\ +要求:\n\ +1. 使用中文理解文章,但 slug 必须是适合 URL 的短横线形式。\n\ +2. title 要自然,不要保留“未命名文章”之类的占位词。\n\ +3. description 控制在 40 到 120 个中文字符之间,像站点摘要,不要分点。\n\ +4. category 只输出 1 个分类名称。\n\ +5. tags 输出 3 到 6 个标签,尽量具体,不要和 category 完全重复。\n\ +6. 只返回 JSON,不要解释,不要代码块。\n\ +JSON 结构:\n\ +{{\n\ + \"title\": \"\",\n\ + \"description\": \"\",\n\ + \"category\": \"\",\n\ + \"tags\": [\"\", \"\"],\n\ + \"slug\": \"\"\n\ +}}\n\n\ +文章内容:\n{content}" + ) +} + +fn infer_category_and_tags(markdown: &str) -> (String, Vec) { + let normalized = strip_markdown_frontmatter(markdown).to_lowercase(); + let candidates = [ + ("canokey", "Linux", "CanoKey"), + ("ffmpeg", "ffmpeg", "ffmpeg"), + ("grpc", "Go", "gRPC"), + ("protobuf", "Go", "Protobuf"), + ("protoc", "Go", "Go"), + ("go ", "Go", "Go"), + ("golang", "Go", "Go"), + ("rust", "Rust", "Rust"), + ("serde", "Rust", "Serde"), + ("sqlx", "Rust", "Sqlx"), + ("dll", "Rust", "Dll"), + ("mysql", "Database", "Mysql"), + ("redis", "Database", "Redis"), + ("sql", "Database", "Sql"), + ("linux", "Linux", "Linux"), + ("shell", "Linux", "Shell"), + ("tmux", "Linux", "Tmux"), + ("dhcp", "Linux", "DHCP"), + ("glibc", "Linux", "GLIBC"), + ("hugo", "Go", "Hugo"), + ("xml", "Go", "Xml"), + ("arm", "Go", "Arm"), + ]; + + let mut category = None; + let mut tags = Vec::new(); + let mut seen = std::collections::HashSet::new(); + + for (needle, matched_category, tag) in candidates { + if !normalized.contains(needle) { + continue; + } + + if category.is_none() { + category = Some(matched_category.to_string()); + } + + if seen.insert(tag.to_lowercase()) { + tags.push(tag.to_string()); + } + } + + if tags.is_empty() { + tags.push("技术笔记".to_string()); + } + + (category.unwrap_or_else(|| "开发".to_string()), tags) +} + +fn fallback_generated_metadata(markdown: &str) -> GeneratedPostMetadata { + let fallback_title = title_from_markdown(markdown).unwrap_or_else(|| "未命名文章".to_string()); + let (category, tags) = infer_category_and_tags(markdown); + + normalize_generated_metadata( + markdown, + GeneratedPostMetadataDraft { + title: Some(fallback_title.clone()), + description: excerpt_from_markdown(markdown, 96) + .or_else(|| Some(format!("关于《{fallback_title}》的技术实践记录。"))), + category: Some(category), + tags: Some(tags), + slug: Some(metadata_slugify(&fallback_title)), + }, + ) +} + +fn build_polish_markdown_prompt(markdown: &str) -> String { + let content = normalize_newlines(markdown) + .chars() + .take(12000) + .collect::(); + + format!( + "请润色下面这篇博客 Markdown 文档,输出一份可直接保存的新版本。\n\ +要求:\n\ +1. 直接返回完整 Markdown 文档,不要解释,不要代码块包裹。\n\ +2. 如果文档包含 frontmatter,请一起优化 title、description、category、tags、slug;保留图片、发布状态等合理字段。\n\ +3. 正文要更顺、更准确、更适合发布,但不要改掉核心事实。\n\ +4. 保持 Markdown 结构清晰,标题层级合理,列表和代码块不要损坏。\n\ +5. 使用中文润色,技术名词保持正确。\n\n\ +原始 Markdown:\n{content}" + ) +} + +fn build_post_cover_prompt( + title: &str, + description: Option<&str>, + category: Option<&str>, + tags: &[String], + post_type: &str, + markdown: &str, +) -> String { + let excerpt = strip_markdown_frontmatter(markdown) + .chars() + .take(1600) + .collect::(); + let description_text = description + .map(str::trim) + .filter(|value| !value.is_empty()) + .unwrap_or("暂无摘要"); + let category_text = category + .map(str::trim) + .filter(|value| !value.is_empty()) + .unwrap_or("未分类"); + let tag_text = if tags.is_empty() { + "无".to_string() + } else { + tags.join(" / ") + }; + let post_type_text = if post_type.trim().is_empty() { + "article" + } else { + post_type.trim() + }; + + format!( + "为一篇中文技术博客生成横版封面图。\n\ +要求:\n\ +1. 画面比例 16:9,适合作为文章头图。\n\ +2. 风格偏终端审美、现代、克制、有设计感,不要廉价素材感。\n\ +3. 不要在图片里放可读文字、logo、水印、UI 截图。\n\ +4. 画面要能体现文章主题,但保持抽象和高级感。\n\ +5. 适合中文技术博客首页与文章页展示。\n\n\ +文章标题:{title}\n\ +文章摘要:{description_text}\n\ +分类:{category_text}\n\ +标签:{tag_text}\n\ +文章类型:{post_type_text}\n\n\ +正文摘录:\n{excerpt}" + ) +} + +fn build_image_generation_url(provider: &str, api_base: &str) -> String { + let normalized = normalize_provider_api_base(provider, api_base); + build_endpoint(&normalized, "/images/generations") +} + +fn extract_image_generation_result(value: &Value) -> Option { + let data = value.get("data").and_then(Value::as_array)?; + + for item in data { + if let Some(url) = item.get("url").and_then(Value::as_str) { + let trimmed = url.trim(); + if !trimmed.is_empty() { + return Some(trimmed.to_string()); + } + } + + if let Some(base64_data) = item.get("b64_json").and_then(Value::as_str) { + let trimmed = base64_data.trim(); + if !trimmed.is_empty() { + return Some(trimmed.to_string()); + } + } + } + + None +} + +fn generated_cover_directory() -> PathBuf { + let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from(".")); + let candidates = [ + current_dir + .join("frontend") + .join("public") + .join("generated-covers"), + current_dir + .join("..") + .join("frontend") + .join("public") + .join("generated-covers"), + ]; + + candidates + .into_iter() + .find(|path| path.parent().map(|parent| parent.exists()).unwrap_or(false)) + .unwrap_or_else(|| { + PathBuf::from("..") + .join("frontend") + .join("public") + .join("generated-covers") + }) +} + +fn persist_generated_cover_image(slug_hint: &str, base64_data: &str) -> Result { + let directory = generated_cover_directory(); + fs::create_dir_all(&directory) + .map_err(|error| Error::BadRequest(format!("创建封面图目录失败: {error}")))?; + + let safe_slug = metadata_slugify(slug_hint); + let file_name = format!( + "{}-{}.png", + if safe_slug.is_empty() { + "cover".to_string() + } else { + safe_slug + }, + Uuid::new_v4().simple() + ); + let file_path = directory.join(&file_name); + let image_bytes = BASE64_STANDARD + .decode(base64_data) + .map_err(|error| Error::BadRequest(format!("解析 AI 封面图失败: {error}")))?; + + fs::write(&file_path, image_bytes) + .map_err(|error| Error::BadRequest(format!("写入 AI 封面图失败: {error}")))?; + + Ok(format!("/generated-covers/{file_name}")) +} + +fn fallback_polished_markdown(markdown: &str) -> String { + let metadata = fallback_generated_metadata(markdown); + let body = strip_markdown_frontmatter(markdown) + .lines() + .map(str::trim_end) + .collect::>() + .join("\n") + .replace("\n\n\n", "\n\n"); + + format!( + "---\n\ +title: {}\n\ +slug: {}\n\ +description: {}\n\ +category: {}\n\ +post_type: \"article\"\n\ +pinned: false\n\ +published: true\n\ +tags:\n{}\n\ +---\n\n{}\n", + serde_json::to_string(&metadata.title).unwrap_or_else(|_| "\"未命名文章\"".to_string()), + metadata.slug, + serde_json::to_string(&metadata.description) + .unwrap_or_else(|_| "\"关于文章内容的摘要。\"".to_string()), + serde_json::to_string(&metadata.category).unwrap_or_else(|_| "\"开发\"".to_string()), + metadata + .tags + .iter() + .map(|tag| format!(" - {}", serde_json::to_string(tag).unwrap_or_default())) + .collect::>() + .join("\n"), + body.trim() + ) +} + +pub async fn generate_post_metadata( + ctx: &AppContext, + markdown: &str, +) -> Result { + let trimmed_markdown = markdown.trim(); + if trimmed_markdown.is_empty() { + return Err(Error::BadRequest("文章内容不能为空".to_string())); + } + + let settings = load_runtime_settings(ctx, false).await?; + let remote_result: Result = match ( + settings.api_base.clone(), + settings.api_key.clone(), + ) { + (Some(api_base), Some(api_key)) => { + let request = AiProviderRequest { + provider: settings.provider.clone(), + api_base, + api_key, + chat_model: settings.chat_model.clone(), + system_prompt: "你是博客后台的内容编辑助手。你只负责提取和整理文章元数据,输出必须是合法 JSON。不要输出额外解释。".to_string(), + prompt: build_post_metadata_prompt(trimmed_markdown), + }; + + let client = Client::new(); + let response = request_json( + &client, + &build_provider_url(&request), + &request.api_key, + build_provider_payload(&request, false), + ) + .await; + + match response { + Ok(response) => { + let text = extract_provider_text(&response).ok_or_else(|| { + Error::BadRequest("AI 元数据响应里没有可读取内容。".to_string()) + })?; + let json_block = take_json_block(&text).ok_or_else(|| { + Error::BadRequest("AI 返回的元数据不是合法 JSON。".to_string()) + })?; + let draft = serde_json::from_str::(&json_block) + .map_err(|error| { + Error::BadRequest(format!("AI 元数据解析失败: {error}")) + })?; + + Ok(normalize_generated_metadata(trimmed_markdown, draft)) + } + Err(error) => Err(error), + } + } + _ => Err(Error::BadRequest( + "AI 服务未配置完整,已自动切换为本地智能推断。".to_string(), + )), + }; + + match remote_result { + Ok(metadata) => Ok(metadata), + Err(error) => { + tracing::warn!("AI metadata generation fallback: {error}"); + Ok(fallback_generated_metadata(trimmed_markdown)) + } + } +} + +pub async fn polish_post_markdown( + ctx: &AppContext, + markdown: &str, +) -> Result { + let trimmed_markdown = markdown.trim(); + if trimmed_markdown.is_empty() { + return Err(Error::BadRequest("文章内容不能为空".to_string())); + } + + let settings = load_runtime_settings(ctx, false).await?; + let remote_result: Result = match ( + settings.api_base.clone(), + settings.api_key.clone(), + ) { + (Some(api_base), Some(api_key)) => { + let request = AiProviderRequest { + provider: settings.provider.clone(), + api_base, + api_key, + chat_model: settings.chat_model.clone(), + system_prompt: "你是博客后台的资深编辑。你的任务是把用户给出的 Markdown 文档润色成更适合发布的版本,并且只返回完整 Markdown。".to_string(), + prompt: build_polish_markdown_prompt(trimmed_markdown), + }; + + let client = Client::new(); + let response = request_json( + &client, + &build_provider_url(&request), + &request.api_key, + build_provider_payload(&request, false), + ) + .await; + + match response { + Ok(response) => { + let polished_markdown = extract_provider_text(&response).ok_or_else(|| { + Error::BadRequest("AI 润色响应里没有可读取内容。".to_string()) + })?; + + Ok(PolishedPostMarkdown { + polished_markdown: normalize_newlines(&polished_markdown), + }) + } + Err(error) => Err(error), + } + } + _ => Err(Error::BadRequest( + "AI 服务未配置完整,已自动切换为本地智能推断。".to_string(), + )), + }; + + match remote_result { + Ok(result) => Ok(result), + Err(error) => { + tracing::warn!("AI post polish fallback: {error}"); + Ok(PolishedPostMarkdown { + polished_markdown: fallback_polished_markdown(trimmed_markdown), + }) + } + } +} + +pub async fn generate_post_cover_image( + ctx: &AppContext, + title: &str, + description: Option<&str>, + category: Option<&str>, + tags: &[String], + post_type: &str, + slug: Option<&str>, + markdown: &str, +) -> Result { + let trimmed_title = title.trim(); + let trimmed_markdown = markdown.trim(); + + if trimmed_title.is_empty() && trimmed_markdown.is_empty() { + return Err(Error::BadRequest( + "请至少填写标题或正文,再生成封面图。".to_string(), + )); + } + + let settings = load_runtime_settings(ctx, false).await?; + let api_base = settings + .api_base + .clone() + .ok_or_else(|| Error::BadRequest("AI API Base 未配置,无法生成封面图。".to_string()))?; + let api_key = settings + .api_key + .clone() + .ok_or_else(|| Error::BadRequest("AI API Key 未配置,无法生成封面图。".to_string()))?; + let prompt = build_post_cover_prompt( + if trimmed_title.is_empty() { + "未命名文章" + } else { + trimmed_title + }, + description, + category, + tags, + post_type, + trimmed_markdown, + ); + let payload = json!({ + "model": DEFAULT_IMAGE_MODEL, + "prompt": prompt, + "size": "1536x1024", + "quality": "high", + "output_format": "png" + }); + + let client = Client::new(); + let response = request_json( + &client, + &build_image_generation_url(&settings.provider, &api_base), + &api_key, + payload, + ) + .await?; + let image_result = extract_image_generation_result(&response) + .ok_or_else(|| Error::BadRequest("AI 封面图响应里没有可读取图片。".to_string()))?; + let image_url = if image_result.starts_with("http://") || image_result.starts_with("https://") { + image_result + } else { + let slug_hint = slug + .map(str::trim) + .filter(|value| !value.is_empty()) + .unwrap_or(trimmed_title); + persist_generated_cover_image(slug_hint, &image_result)? + }; + + Ok(GeneratedPostCoverImage { image_url, prompt }) +} + fn extract_response_output(value: &Value) -> Option { if let Some(text) = value.get("output_text").and_then(Value::as_str) { let trimmed = text.trim(); @@ -527,26 +1382,44 @@ fn extract_response_output(value: &Value) -> Option { } } + if let Some(text) = value.get("text").and_then(Value::as_str) { + let trimmed = text.trim(); + if !trimmed.is_empty() { + return Some(trimmed.to_string()); + } + } + + if let Some(response) = value.get("response") { + if let Some(text) = extract_response_output(response) { + return Some(text); + } + } + + if let Some(item) = value.get("item") { + if let Some(text) = extract_response_output(item) { + return Some(text); + } + } + + if let Some(part) = value.get("part") { + if let Some(text) = extract_response_output(part) { + return Some(text); + } + } + + if let Some(content_items) = value.get("content").and_then(Value::as_array) { + if let Some(text) = extract_text_from_content_items(content_items) { + return Some(text); + } + } + let output_items = value.get("output").and_then(Value::as_array)?; let mut segments = Vec::new(); for item in output_items { - let Some(content_items) = item.get("content").and_then(Value::as_array) else { - continue; - }; - - for content in content_items { - if let Some(text) = content.get("text").and_then(Value::as_str) { - segments.push(text.to_string()); - continue; - } - - if let Some(text) = content - .get("output_text") - .and_then(|output_text| output_text.get("text")) - .and_then(Value::as_str) - { - segments.push(text.to_string()); + if let Some(content_items) = item.get("content").and_then(Value::as_array) { + if let Some(text) = extract_text_from_content_items(content_items) { + segments.push(text); } } } @@ -589,11 +1462,115 @@ fn build_chat_prompt(question: &str, matches: &[ScoredChunk]) -> String { ) } +fn is_profile_question(question: &str) -> bool { + let normalized = question.trim().to_lowercase(); + [ + "站长", + "博主", + "作者", + "个人介绍", + "个人信息", + "技术栈", + "github", + "邮箱", + "联系方式", + "init.cool", + "owner", + "author", + "maintainer", + "profile", + "tech stack", + "who runs", + "who built", + ] + .iter() + .any(|keyword| normalized.contains(keyword)) +} + +async fn prioritize_profile_matches( + ctx: &AppContext, + question: &str, + matches: Vec, + limit: usize, +) -> Result> { + if !is_profile_question(question) { + return Ok(matches); + } + + let statement = Statement::from_sql_and_values( + DbBackend::Postgres, + r#" + SELECT + source_slug, + source_title, + source_type, + chunk_index, + content, + content_preview, + word_count, + 1.0::float8 AS score + FROM ai_chunks + WHERE source_type = $1 + ORDER BY chunk_index ASC + LIMIT $2 + "#, + ["profile".into(), (limit as i64).into()], + ); + + let profile_rows = SimilarChunkRow::find_by_statement(statement) + .all(&ctx.db) + .await? + .into_iter() + .map(|row| ScoredChunk { + score: row.score, + row: ai_chunks::Model { + created_at: Utc::now().into(), + updated_at: Utc::now().into(), + id: 0, + source_slug: row.source_slug, + source_title: row.source_title, + source_path: None, + source_type: row.source_type, + chunk_index: row.chunk_index, + content: row.content, + content_preview: row.content_preview, + embedding: None, + word_count: row.word_count, + }, + }) + .collect::>(); + + if profile_rows.is_empty() { + return Ok(matches); + } + + let mut merged = profile_rows; + + for item in matches { + let duplicated = merged.iter().any(|existing| { + existing.row.source_slug == item.row.source_slug + && existing.row.chunk_index == item.row.chunk_index + }); + + if !duplicated { + merged.push(item); + } + } + + merged.truncate(limit); + Ok(merged) +} + fn build_sources(matches: &[ScoredChunk]) -> Vec { matches .iter() .map(|item| AiSource { slug: item.row.source_slug.clone(), + href: if item.row.source_type == "profile" { + "/about".to_string() + } else { + format!("/articles/{}", item.row.source_slug) + }, title: item .row .source_title @@ -639,7 +1616,6 @@ pub(crate) fn build_provider_payload(request: &AiProviderRequest, stream: bool) "reasoning": { "effort": DEFAULT_REASONING_EFFORT }, - "max_output_tokens": 520, "store": !DEFAULT_DISABLE_RESPONSE_STORAGE, "stream": stream }) @@ -663,13 +1639,126 @@ pub(crate) fn build_provider_payload(request: &AiProviderRequest, stream: bool) } pub(crate) fn build_provider_url(request: &AiProviderRequest) -> String { + let api_base = normalize_provider_api_base(&request.provider, &request.api_base); let path = if provider_uses_responses(&request.provider) { "/responses" } else { "/chat/completions" }; - build_endpoint(&request.api_base, path) + build_endpoint(&api_base, path) +} + +#[cfg(test)] +mod tests { + use super::{ + build_provider_url, extract_provider_text, is_profile_question, + normalize_provider_api_base, parse_provider_sse_body, AiProviderRequest, + }; + + fn build_request(provider: &str, api_base: &str) -> AiProviderRequest { + AiProviderRequest { + provider: provider.to_string(), + api_base: api_base.to_string(), + api_key: "test-key".to_string(), + chat_model: "gpt-5.4".to_string(), + system_prompt: "system".to_string(), + prompt: "hello".to_string(), + } + } + + #[test] + fn normalize_provider_api_base_adds_v1_for_root_openai_style_urls() { + assert_eq!( + normalize_provider_api_base("newapi", "https://91code.jiangnight.com"), + "https://91code.jiangnight.com/v1" + ); + } + + #[test] + fn normalize_provider_api_base_keeps_existing_version_path() { + assert_eq!( + normalize_provider_api_base("newapi", "https://91code.jiangnight.com/v1/"), + "https://91code.jiangnight.com/v1" + ); + } + + #[test] + fn normalize_provider_api_base_preserves_custom_subpaths() { + assert_eq!( + normalize_provider_api_base("openai-compatible", "https://proxy.example.com/openai"), + "https://proxy.example.com/openai" + ); + } + + #[test] + fn build_provider_url_uses_normalized_base_for_responses_api() { + let request = build_request("newapi", "https://91code.jiangnight.com"); + + assert_eq!( + build_provider_url(&request), + "https://91code.jiangnight.com/v1/responses" + ); + } + + #[test] + fn profile_question_detects_owner_keywords() { + assert!(is_profile_question("站长的技术栈和个人介绍是什么?")); + assert!(is_profile_question("Who runs init.cool?")); + } + + #[test] + fn parse_provider_sse_body_extracts_final_response_payload() { + let body = concat!( + "event: response.created\n", + "data: {\"type\":\"response.created\",\"response\":{\"id\":\"resp_1\",\"output\":[]}}\n\n", + "event: response.completed\n", + "data: {\"type\":\"response.completed\",\"response\":{\"id\":\"resp_1\",\"output\":[{\"type\":\"message\",\"content\":[{\"type\":\"output_text\",\"text\":\"ok\"}]}]}}\n\n" + ); + + let parsed = parse_provider_sse_body(body).expect("expected SSE body to parse"); + + assert_eq!(parsed["id"], "resp_1"); + assert_eq!(parsed["output"][0]["content"][0]["text"], "ok"); + } + + #[test] + fn extract_provider_text_reads_completed_response_payload() { + let payload = serde_json::json!({ + "response": { + "output": [ + { + "type": "message", + "content": [ + { + "type": "output_text", + "text": "ok" + } + ] + } + ] + } + }); + + assert_eq!(extract_provider_text(&payload).as_deref(), Some("ok")); + } + + #[test] + fn extract_provider_text_reads_output_item_done_payload() { + let payload = serde_json::json!({ + "item": { + "type": "message", + "content": [ + { + "type": "output_text", + "text": "hello" + } + ] + } + }); + + assert_eq!(extract_provider_text(&payload).as_deref(), Some("hello")); + } } pub(crate) fn extract_provider_text(value: &Value) -> Option { @@ -691,6 +1780,40 @@ async fn request_chat_answer(request: &AiProviderRequest) -> Result { }) } +pub async fn test_provider_connectivity( + provider: &str, + api_base: &str, + api_key: &str, + chat_model: &str, +) -> Result { + let provider = trim_to_option(Some(provider.to_string())) + .unwrap_or_else(|| DEFAULT_AI_PROVIDER.to_string()); + let api_base = trim_to_option(Some(api_base.to_string())) + .ok_or_else(|| Error::BadRequest("请先填写 API 地址".to_string()))?; + let api_key = trim_to_option(Some(api_key.to_string())) + .ok_or_else(|| Error::BadRequest("请先填写 API 密钥".to_string()))?; + let chat_model = trim_to_option(Some(chat_model.to_string())) + .ok_or_else(|| Error::BadRequest("请先填写对话模型".to_string()))?; + + let request = AiProviderRequest { + provider: provider.clone(), + api_base, + api_key, + chat_model: chat_model.clone(), + system_prompt: "你是一个连通性检测助手。".to_string(), + prompt: "请只回复 pong".to_string(), + }; + let reply = request_chat_answer(&request).await?; + let reply_preview = reply.trim().chars().take(160).collect::(); + + Ok(AiProviderConnectivityResult { + provider, + endpoint: build_provider_url(&request), + chat_model, + reply_preview, + }) +} + pub(crate) async fn prepare_answer(ctx: &AppContext, question: &str) -> Result { let trimmed_question = question.trim(); if trimmed_question.is_empty() { @@ -847,6 +1970,7 @@ async fn retrieve_matches( SELECT source_slug, source_title, + source_type, chunk_index, content, content_preview, @@ -873,7 +1997,7 @@ async fn retrieve_matches( source_slug: row.source_slug, source_title: row.source_title, source_path: None, - source_type: "post".to_string(), + source_type: row.source_type, chunk_index: row.chunk_index, content: row.content, content_preview: row.content_preview, @@ -883,13 +2007,16 @@ async fn retrieve_matches( }) .collect::>(); + let matches = prioritize_profile_matches(ctx, question, matches, settings.top_k).await?; + Ok((matches, indexed_chunks, last_indexed_at)) } pub async fn rebuild_index(ctx: &AppContext) -> Result { let settings = load_runtime_settings(ctx, false).await?; let posts = content::sync_markdown_posts(ctx).await?; - let chunk_drafts = build_chunks(&posts, settings.chunk_size); + let mut chunk_drafts = build_chunks(&posts, settings.chunk_size); + chunk_drafts.extend(build_profile_chunks(&settings.raw, settings.chunk_size)); let embeddings = if chunk_drafts.is_empty() { Vec::new() } else { @@ -957,9 +2084,10 @@ pub async fn answer_question(ctx: &AppContext, question: &str) -> Result, slug: Option, description: Option, - category: Option, + #[serde( + default, + alias = "category", + alias = "categories", + deserialize_with = "deserialize_optional_string_list" + )] + categories: Option>, + #[serde(default, deserialize_with = "deserialize_optional_string_list")] tags: Option>, post_type: Option, image: Option, + images: Option>, pinned: Option, published: Option, + draft: Option, } #[derive(Debug, Clone, Serialize)] @@ -36,6 +45,7 @@ pub struct MarkdownPost { pub tags: Vec, pub post_type: String, pub image: Option, + pub images: Vec, pub pinned: bool, pub published: bool, pub file_path: String, @@ -51,6 +61,7 @@ pub struct MarkdownPostDraft { pub tags: Vec, pub post_type: String, pub image: Option, + pub images: Vec, pub pinned: bool, pub published: bool, } @@ -104,13 +115,71 @@ fn trim_to_option(input: Option) -> Option { }) } +fn normalize_string_list(values: Option>) -> Vec { + values + .unwrap_or_default() + .into_iter() + .map(|item| item.trim().to_string()) + .filter(|item| !item.is_empty()) + .collect() +} + +fn split_inline_list(value: &str) -> Vec { + value + .split([',', ',']) + .map(|item| item.trim().to_string()) + .filter(|item| !item.is_empty()) + .collect() +} + +fn deserialize_optional_string_list<'de, D>( + deserializer: D, +) -> std::result::Result>, D::Error> +where + D: Deserializer<'de>, +{ + let raw = Option::::deserialize(deserializer)?; + + match raw { + None | Some(serde_yaml::Value::Null) => Ok(None), + Some(serde_yaml::Value::String(value)) => { + let items = split_inline_list(&value); + if items.is_empty() && !value.trim().is_empty() { + Ok(Some(vec![value.trim().to_string()])) + } else if items.is_empty() { + Ok(None) + } else { + Ok(Some(items)) + } + } + Some(serde_yaml::Value::Sequence(items)) => Ok(Some( + items + .into_iter() + .filter_map(|item| match item { + serde_yaml::Value::String(value) => { + let trimmed = value.trim().to_string(); + (!trimmed.is_empty()).then_some(trimmed) + } + serde_yaml::Value::Number(value) => Some(value.to_string()), + _ => None, + }) + .collect(), + )), + Some(other) => Err(serde::de::Error::custom(format!( + "unsupported frontmatter list value: {other:?}" + ))), + } +} + fn slugify(value: &str) -> String { let mut slug = String::new(); let mut last_was_dash = false; for ch in value.trim().chars() { - if ch.is_ascii_alphanumeric() { - slug.push(ch.to_ascii_lowercase()); + if ch.is_alphanumeric() { + for lower in ch.to_lowercase() { + slug.push(lower); + } last_was_dash = false; } else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash { slug.push('-'); @@ -208,7 +277,9 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result< .unwrap_or_else(|| slug.clone()); let description = trim_to_option(frontmatter.description.clone()).or_else(|| excerpt_from_content(&content)); - let category = trim_to_option(frontmatter.category.clone()); + let category = normalize_string_list(frontmatter.categories.clone()) + .into_iter() + .next(); let tags = frontmatter .tags .unwrap_or_default() @@ -227,8 +298,11 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result< post_type: trim_to_option(frontmatter.post_type.clone()) .unwrap_or_else(|| "article".to_string()), image: trim_to_option(frontmatter.image.clone()), + images: normalize_string_list(frontmatter.images.clone()), pinned: frontmatter.pinned.unwrap_or(false), - published: frontmatter.published.unwrap_or(true), + published: frontmatter + .published + .unwrap_or(!frontmatter.draft.unwrap_or(false)), file_path: file_path.to_string(), }) } @@ -266,6 +340,13 @@ fn build_markdown_document(post: &MarkdownPost) -> String { lines.push(format!("image: {}", image)); } + if !post.images.is_empty() { + lines.push("images:".to_string()); + for image in &post.images { + lines.push(format!(" - {}", image)); + } + } + if !post.tags.is_empty() { lines.push("tags:".to_string()); for tag in &post.tags { @@ -307,6 +388,7 @@ fn ensure_markdown_posts_bootstrapped() -> Result<()> { tags: fixture.tags.unwrap_or_default(), post_type: "article".to_string(), image: None, + images: Vec::new(), pinned: fixture.pinned.unwrap_or(false), published: fixture.published.unwrap_or(true), file_path: markdown_post_path(&fixture.slug) @@ -470,7 +552,11 @@ async fn canonicalize_tags(ctx: &AppContext, raw_tags: &[String]) -> Result Result<()> { - fs::write(markdown_post_path(&post.slug), build_markdown_document(post)).map_err(io_error) + fs::write( + markdown_post_path(&post.slug), + build_markdown_document(post), + ) + .map_err(io_error) } pub fn rewrite_category_references( @@ -701,6 +787,17 @@ pub async fn sync_markdown_posts(ctx: &AppContext) -> Result> }); model.post_type = Set(Some(post.post_type.clone())); model.image = Set(post.image.clone()); + model.images = Set(if post.images.is_empty() { + None + } else { + Some(Value::Array( + post.images + .iter() + .cloned() + .map(Value::String) + .collect::>(), + )) + }); model.pinned = Set(Some(post.pinned)); if has_existing { @@ -796,6 +893,7 @@ pub async fn create_markdown_post( } }, image: trim_to_option(draft.image), + images: normalize_string_list(Some(draft.images)), pinned: draft.pinned, published: draft.published, file_path: markdown_post_path(&slug).to_string_lossy().to_string(), diff --git a/backend/target-codex-ai-fix/.rustc_info.json b/backend/target-codex-ai-fix/.rustc_info.json new file mode 100644 index 0000000..18c0488 --- /dev/null +++ b/backend/target-codex-ai-fix/.rustc_info.json @@ -0,0 +1 @@ +{"rustc_fingerprint":10734737548331824535,"outputs":{"17747080675513052775":{"success":true,"status":"","code":0,"stdout":"rustc 1.92.0 (ded5c06cf 2025-12-08)\nbinary: rustc\ncommit-hash: ded5c06cf21d2b93bffd5d884aa6e96934ee4234\ncommit-date: 2025-12-08\nhost: x86_64-pc-windows-msvc\nrelease: 1.92.0\nLLVM version: 21.1.3\n","stderr":""},"7971740275564407648":{"success":true,"status":"","code":0,"stdout":"___.exe\nlib___.rlib\n___.dll\n___.dll\n___.lib\n___.dll\nC:\\Users\\Andorid\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\npacked\n___\ndebug_assertions\npanic=\"unwind\"\nproc_macro\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"msvc\"\ntarget_family=\"windows\"\ntarget_feature=\"cmpxchg16b\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_feature=\"sse3\"\ntarget_has_atomic=\"128\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_os=\"windows\"\ntarget_pointer_width=\"64\"\ntarget_vendor=\"pc\"\nwindows\n","stderr":""}},"successes":{}} \ No newline at end of file diff --git a/backend/target-codex-ai-fix/CACHEDIR.TAG b/backend/target-codex-ai-fix/CACHEDIR.TAG new file mode 100644 index 0000000..20d7c31 --- /dev/null +++ b/backend/target-codex-ai-fix/CACHEDIR.TAG @@ -0,0 +1,3 @@ +Signature: 8a477f597d28d172789f06886806bc55 +# This file is a cache directory tag created by cargo. +# For information about cache directory tags see https://bford.info/cachedir/ diff --git a/dev.ps1 b/dev.ps1 index fcc25f0..c3ba5aa 100644 --- a/dev.ps1 +++ b/dev.ps1 @@ -1,73 +1,231 @@ param( + [ValidateSet("frontend", "backend", "admin", "mcp")] + [string]$Only, + [switch]$Spawn, + [switch]$WithMcp, + [switch]$Install, + [string]$DatabaseUrl = "postgres://postgres:postgres%402025%21@10.0.0.2:5432/termi-api_development", + [string]$McpApiKey = "termi-mcp-local-dev-key", + [string]$McpBackendApiBase = "http://127.0.0.1:5150/api", + [int]$McpPort = 5151, [switch]$FrontendOnly, [switch]$BackendOnly, [switch]$AdminOnly, - [switch]$McpOnly, - [switch]$WithMcp, - [string]$DatabaseUrl = "postgres://postgres:postgres%402025%21@10.0.0.2:5432/termi-api_development" + [switch]$McpOnly ) $ErrorActionPreference = "Stop" $repoRoot = Split-Path -Parent $MyInvocation.MyCommand.Path -$frontendScript = Join-Path $repoRoot "start-frontend.ps1" -$backendScript = Join-Path $repoRoot "start-backend.ps1" -$adminScript = Join-Path $repoRoot "start-admin.ps1" -$mcpScript = Join-Path $repoRoot "start-mcp.ps1" +$devScriptPath = $MyInvocation.MyCommand.Path +$serviceOrder = @("frontend", "admin", "backend") -if (@($FrontendOnly, $BackendOnly, $AdminOnly, $McpOnly).Where({ $_ }).Count -gt 1) { - throw "Use only one of -FrontendOnly, -BackendOnly, -AdminOnly, or -McpOnly." +function Resolve-TargetService { + if ($Only) { + return $Only + } + + $legacyTargets = @( + @{ Enabled = $FrontendOnly; Name = "frontend" } + @{ Enabled = $BackendOnly; Name = "backend" } + @{ Enabled = $AdminOnly; Name = "admin" } + @{ Enabled = $McpOnly; Name = "mcp" } + ) | Where-Object { $_.Enabled } + + if ($legacyTargets.Count -gt 1) { + throw "Use only one of -Only, -FrontendOnly, -BackendOnly, -AdminOnly, or -McpOnly." + } + + if ($legacyTargets.Count -eq 1) { + return $legacyTargets[0].Name + } + + return $null } -if ($FrontendOnly) { - & $frontendScript - exit $LASTEXITCODE +function Invoke-RepoCommand { + param( + [string]$Name, + [string]$WorkingDirectory, + [scriptblock]$Run, + [switch]$UsesNode + ) + + if (-not (Test-Path $WorkingDirectory)) { + throw "$Name directory not found: $WorkingDirectory" + } + + Push-Location $WorkingDirectory + + try { + if ($UsesNode -and ($Install -or -not (Test-Path (Join-Path $WorkingDirectory "node_modules")))) { + Write-Host "[$Name] Installing dependencies..." -ForegroundColor Cyan + npm install + if ($LASTEXITCODE -ne 0) { + throw "npm install failed for $Name" + } + } + + & $Run + if ($LASTEXITCODE -ne 0) { + throw "$Name failed to start" + } + } + finally { + Pop-Location + } } -if ($BackendOnly) { - & $backendScript -DatabaseUrl $DatabaseUrl - exit $LASTEXITCODE +function Start-Frontend { + Invoke-RepoCommand ` + -Name "frontend" ` + -WorkingDirectory (Join-Path $repoRoot "frontend") ` + -UsesNode ` + -Run { + Write-Host "[frontend] Starting Astro dev server..." -ForegroundColor Green + npm run dev + } } -if ($AdminOnly) { - & $adminScript - exit $LASTEXITCODE +function Start-Admin { + Invoke-RepoCommand ` + -Name "admin" ` + -WorkingDirectory (Join-Path $repoRoot "admin") ` + -UsesNode ` + -Run { + Write-Host "[admin] Starting Vite admin workspace..." -ForegroundColor Green + npm run dev + } } -if ($McpOnly) { - & $mcpScript - exit $LASTEXITCODE +function Start-Backend { + Invoke-RepoCommand ` + -Name "backend" ` + -WorkingDirectory (Join-Path $repoRoot "backend") ` + -Run { + $env:DATABASE_URL = $DatabaseUrl + Write-Host "[backend] DATABASE_URL set to $DatabaseUrl" -ForegroundColor Cyan + Write-Host "[backend] Starting Loco.rs server..." -ForegroundColor Green + cargo loco start 2>&1 + } } -$services = if ($WithMcp) { "frontend, admin, backend, and MCP" } else { "frontend, admin, and backend" } -Write-Host "[monorepo] Starting $services in separate PowerShell windows..." -ForegroundColor Cyan +function Start-Mcp { + Invoke-RepoCommand ` + -Name "mcp" ` + -WorkingDirectory (Join-Path $repoRoot "mcp-server") ` + -UsesNode ` + -Run { + $env:TERMI_MCP_API_KEY = $McpApiKey + $env:TERMI_BACKEND_API_BASE = $McpBackendApiBase + $env:TERMI_MCP_PORT = "$McpPort" -Start-Process powershell -ArgumentList @( - "-NoExit", - "-ExecutionPolicy", "Bypass", - "-File", $frontendScript -) + Write-Host "[mcp] Backend API base set to $McpBackendApiBase" -ForegroundColor Cyan + Write-Host "[mcp] Starting MCP server on port $McpPort..." -ForegroundColor Green + npm run start + } +} -Start-Process powershell -ArgumentList @( - "-NoExit", - "-ExecutionPolicy", "Bypass", - "-File", $backendScript, - "-DatabaseUrl", $DatabaseUrl -) +function Invoke-Service { + param([string]$Name) -Start-Process powershell -ArgumentList @( - "-NoExit", - "-ExecutionPolicy", "Bypass", - "-File", $adminScript -) + switch ($Name) { + "frontend" { Start-Frontend; return } + "admin" { Start-Admin; return } + "backend" { Start-Backend; return } + "mcp" { Start-Mcp; return } + default { throw "Unsupported service: $Name" } + } +} -if ($WithMcp) { - Start-Process powershell -ArgumentList @( +function Get-ServiceLaunchArguments { + param([string]$Name) + + $arguments = @( + "powershell", "-NoExit", "-ExecutionPolicy", "Bypass", - "-File", $mcpScript + "-File", $devScriptPath, + "-Only", $Name ) + + if ($Install -and $Name -ne "backend") { + $arguments += "-Install" + } + + if ($Name -eq "backend") { + $arguments += @("-DatabaseUrl", $DatabaseUrl) + } + + if ($Name -eq "mcp") { + $arguments += @( + "-McpApiKey", $McpApiKey, + "-McpBackendApiBase", $McpBackendApiBase, + "-McpPort", $McpPort + ) + } + + return $arguments } -$servicesStarted = if ($WithMcp) { "Frontend, admin, backend, and MCP windows started." } else { "Frontend, admin, and backend windows started." } -Write-Host "[monorepo] $servicesStarted" -ForegroundColor Green +function Start-ServiceWindow { + param([string]$Name) + + $arguments = Get-ServiceLaunchArguments -Name $Name + Start-Process powershell -ArgumentList $arguments[1..($arguments.Length - 1)] +} + +function Start-ServiceHost { + param([string[]]$Services) + + $wt = Get-Command wt.exe -ErrorAction SilentlyContinue + if (-not $wt) { + Write-Warning "[dev] Windows Terminal (wt.exe) not found. Falling back to separate PowerShell windows." + foreach ($service in $Services) { + Start-ServiceWindow $service + } + return + } + + $wtArguments = @("-w", "0") + $isFirst = $true + + foreach ($service in $Services) { + if (-not $isFirst) { + $wtArguments += ";" + } + + $wtArguments += @( + "new-tab", + "--title", "termi:$service" + ) + $wtArguments += Get-ServiceLaunchArguments -Name $service + $isFirst = $false + } + + Start-Process -FilePath $wt.Source -ArgumentList $wtArguments +} + +$targetService = Resolve-TargetService + +if ($targetService -and -not $Spawn) { + Invoke-Service $targetService + exit $LASTEXITCODE +} + +$servicesToStart = [System.Collections.Generic.List[string]]::new() +if ($targetService) { + [void]$servicesToStart.Add($targetService) +} +else { + $serviceOrder | ForEach-Object { [void]$servicesToStart.Add($_) } + if ($WithMcp) { + [void]$servicesToStart.Add("mcp") + } +} + +$serviceLabel = ($servicesToStart -join ", ") +Write-Host "[dev] Starting $serviceLabel in one Windows Terminal window..." -ForegroundColor Cyan +Start-ServiceHost -Services $servicesToStart + +Write-Host "[dev] Ready. Use .\\stop-services.ps1 to stop everything." -ForegroundColor Green diff --git a/frontend/public/review-covers/black-myth-wukong.svg b/frontend/public/review-covers/black-myth-wukong.svg new file mode 100644 index 0000000..6a2eb52 --- /dev/null +++ b/frontend/public/review-covers/black-myth-wukong.svg @@ -0,0 +1,24 @@ + + + + + + + + + + 黑神话:悟空 + BLACK MYTH / WUKONG / GAME + + + + + + + + + + + + + diff --git a/frontend/public/review-covers/hero-dreams-in-tired-life.svg b/frontend/public/review-covers/hero-dreams-in-tired-life.svg new file mode 100644 index 0000000..a7f7139 --- /dev/null +++ b/frontend/public/review-covers/hero-dreams-in-tired-life.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + LATE NIGHT LOOP / INDIE POP + 疲惫生活中的 + 英雄梦想 + MUSIC REVIEW / MIDNIGHT LISTENING + + + + + + + + diff --git a/frontend/public/review-covers/journey-to-the-west-editorial.svg b/frontend/public/review-covers/journey-to-the-west-editorial.svg new file mode 100644 index 0000000..7e3fa14 --- /dev/null +++ b/frontend/public/review-covers/journey-to-the-west-editorial.svg @@ -0,0 +1,19 @@ + + + + + + + + + RETRO SCI-FI / FIELD NOTES + 宇宙探索编辑部 + JOURNEY TO THE WEST EDITORIAL + + + + + + + + diff --git a/frontend/public/review-covers/placed-within.svg b/frontend/public/review-covers/placed-within.svg new file mode 100644 index 0000000..5c66389 --- /dev/null +++ b/frontend/public/review-covers/placed-within.svg @@ -0,0 +1,20 @@ + + + + + + + + + + MACRO / CHINA / NOTES + 置身事内 + ECONOMY / NONFICTION / BOOK + + + + + + + + diff --git a/frontend/public/review-covers/the-long-season.svg b/frontend/public/review-covers/the-long-season.svg new file mode 100644 index 0000000..ea9099d --- /dev/null +++ b/frontend/public/review-covers/the-long-season.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + 漫长的季节 + THE LONG SEASON + FILM LOG / 2024 / NO.01 + + + + + + + + diff --git a/frontend/public/review-covers/thirteen-invites.svg b/frontend/public/review-covers/thirteen-invites.svg new file mode 100644 index 0000000..bfd002c --- /dev/null +++ b/frontend/public/review-covers/thirteen-invites.svg @@ -0,0 +1,20 @@ + + + + + + + + + + INTERVIEW DOSSIER + 十三邀 + THIRTEEN INVITES / VOL.13 + + + + + + + + diff --git a/frontend/src/components/Comments.astro b/frontend/src/components/Comments.astro index e2a0bab..f9bbcc9 100644 --- a/frontend/src/components/Comments.astro +++ b/frontend/src/components/Comments.astro @@ -40,7 +40,7 @@ function formatCommentDate(dateStr: string): string {
- discussion buffer + {t('comments.kicker')}
@@ -72,7 +72,7 @@ function formatCommentDate(dateStr: string): string { type="text" name="nickname" required - placeholder="anonymous_operator" + placeholder={t('comments.nicknamePlaceholder')} class="terminal-form-input" />
@@ -83,7 +83,7 @@ function formatCommentDate(dateStr: string): string {
@@ -210,7 +210,7 @@ function formatCommentDate(dateStr: string): string { const replyBtns = document.querySelectorAll('.reply-btn'); const messageBox = document.getElementById('comment-message'); const postSlug = wrapper?.getAttribute('data-post-slug') || ''; - const apiBase = wrapper?.getAttribute('data-api-base') || 'http://localhost:5150/api'; + const apiBase = wrapper?.getAttribute('data-api-base') || '/api'; function showMessage(message: string, type: 'success' | 'error' | 'info') { if (!messageBox) return; @@ -319,7 +319,7 @@ function formatCommentDate(dateStr: string): string { formContainer?.classList.add('hidden'); showMessage(t('comments.submitSuccess'), 'success'); } catch (error) { - showMessage(t('comments.submitFailed', { message: error instanceof Error ? error.message : 'unknown error' }), 'error'); + showMessage(t('comments.submitFailed', { message: error instanceof Error ? error.message : t('common.unknownError') }), 'error'); } }); diff --git a/frontend/src/components/Footer.astro b/frontend/src/components/Footer.astro index e71b8d4..6654a59 100644 --- a/frontend/src/components/Footer.astro +++ b/frontend/src/components/Footer.astro @@ -75,8 +75,8 @@ const tools = [
-

- user@{siteSettings.siteShortName.toLowerCase()}:~$ echo "{siteSettings.siteDescription}" +

+ {t('footer.summary')}

diff --git a/frontend/src/components/FriendLinkApplication.astro b/frontend/src/components/FriendLinkApplication.astro index 0aa7a75..05a6adc 100644 --- a/frontend/src/components/FriendLinkApplication.astro +++ b/frontend/src/components/FriendLinkApplication.astro @@ -130,21 +130,21 @@ const { t } = getI18n(Astro);

{t('friends.name')}: {siteSettings.siteName} -

{t('friends.link')}: {siteSettings.siteUrl} -

{t('friends.description')}: {siteSettings.siteDescription} -

@@ -178,7 +178,7 @@ const { t } = getI18n(Astro); const reciprocalInfo = document.getElementById('reciprocal-info') as HTMLDivElement | null; const messageDiv = document.getElementById('form-message') as HTMLDivElement | null; const copyBtns = document.querySelectorAll('.copy-btn'); - const apiBase = wrapper?.getAttribute('data-api-base') || 'http://localhost:5150/api'; + const apiBase = wrapper?.getAttribute('data-api-base') || '/api'; reciprocalCheckbox?.addEventListener('change', () => { reciprocalInfo?.classList.toggle('hidden', !reciprocalCheckbox.checked); @@ -248,7 +248,7 @@ const { t } = getI18n(Astro); reciprocalInfo?.classList.add('hidden'); showMessage(t('friendForm.submitSuccess'), 'success'); } catch (error) { - showMessage(t('friendForm.submitFailed', { message: error instanceof Error ? error.message : 'unknown error' }), 'error'); + showMessage(t('friendForm.submitFailed', { message: error instanceof Error ? error.message : t('common.unknownError') }), 'error'); } }); diff --git a/frontend/src/components/FriendLinkCard.astro b/frontend/src/components/FriendLinkCard.astro index 5a86f09..2d18c0e 100644 --- a/frontend/src/components/FriendLinkCard.astro +++ b/frontend/src/components/FriendLinkCard.astro @@ -14,7 +14,7 @@ const { t } = getI18n(Astro); href={friend.url} target="_blank" rel="noopener noreferrer" - class="terminal-panel group flex h-full items-start gap-4 p-4 transition-all duration-300 hover:-translate-y-1 hover:border-[var(--primary)]" + class="terminal-panel terminal-interactive-card group flex h-full items-start gap-4 p-4" >
{friend.avatar ? ( diff --git a/frontend/src/components/Header.astro b/frontend/src/components/Header.astro index 06bc835..8eadafc 100644 --- a/frontend/src/components/Header.astro +++ b/frontend/src/components/Header.astro @@ -1,4 +1,5 @@ --- +import { API_BASE_URL } from '../lib/api/client'; import { terminalConfig } from '../lib/config/terminal'; import { getI18n, SUPPORTED_LOCALES } from '../lib/i18n'; import type { SiteSettings } from '../lib/types'; @@ -14,6 +15,12 @@ const { const { locale, t, buildLocaleUrl } = getI18n(Astro); const aiEnabled = Boolean(Astro.props.siteSettings?.ai?.enabled); +const musicPlaylist = (Astro.props.siteSettings?.musicPlaylist || []).filter( + (item) => item?.title?.trim() && item?.url?.trim() +); +const musicPlaylistPayload = JSON.stringify(musicPlaylist); +const hasMusicPlaylist = musicPlaylist.length > 0; +const currentMusicTrack = hasMusicPlaylist ? musicPlaylist[0] : null; const navItems = [ { icon: 'fa-file-code', text: t('nav.articles'), href: '/articles' }, { icon: 'fa-folder', text: t('nav.categories'), href: '/categories' }, @@ -31,54 +38,34 @@ const localeLinks = SUPPORTED_LOCALES.map((item) => ({ shortLabel: item === 'zh-CN' ? '中' : 'EN', })); const currentPath = Astro.url.pathname; +const currentNavLabel = + navItems.find((item) => currentPath === item.href || (item.href !== '/' && currentPath.startsWith(item.href))) + ?.text || t('header.navigation'); ---
-
+
-
-
- - - +
+
+ + + - root@termi - {siteName} + {t('header.shellLabel')} + {siteName} - - -
- diff --git a/frontend/src/components/Lightbox.astro b/frontend/src/components/Lightbox.astro index 299cd2f..cc3b0ee 100644 --- a/frontend/src/components/Lightbox.astro +++ b/frontend/src/components/Lightbox.astro @@ -49,10 +49,10 @@ // Initialize lightbox for all article images function initLightbox() { - const content = document.querySelector('.article-content'); + const content = document.querySelector('[data-article-slug]'); if (!content) return; - images = Array.from(content.querySelectorAll('img')); + images = Array.from(content.querySelectorAll('.article-content img, [data-lightbox-image="true"]')); images.forEach((img, index) => { img.style.cursor = 'zoom-in'; diff --git a/frontend/src/components/ParagraphComments.astro b/frontend/src/components/ParagraphComments.astro index 4f9e4cd..c5e71e2 100644 --- a/frontend/src/components/ParagraphComments.astro +++ b/frontend/src/components/ParagraphComments.astro @@ -11,24 +11,32 @@ const { postSlug, class: className = '' } = Astro.props; const { t } = getI18n(Astro); --- -
-
-
+
+
+
- paragraph annotations + {t('paragraphComments.kicker')} -
-

{t('paragraphComments.title')}

-

- {t('paragraphComments.intro')} -

-
-
-
- - {t('paragraphComments.scanning')} +

+ {t('paragraphComments.scanning')} +

+ +
@@ -62,13 +70,17 @@ const { t } = getI18n(Astro); const wrappers = document.querySelectorAll('.paragraph-comments-shell'); const wrapper = wrappers.item(wrappers.length - 1) as HTMLElement | null; const postSlug = wrapper?.dataset.postSlug || ''; - const apiBase = wrapper?.dataset.apiBase || 'http://localhost:5150/api'; - const articleRoot = wrapper?.closest('[data-article-slug]') || document; - const articleContent = articleRoot.querySelector('.article-content') as HTMLElement | null; + const apiBase = wrapper?.dataset.apiBase || '/api'; + const storageKey = wrapper?.dataset.storageKey || 'termi:paragraph-comments'; + const articleRoot = wrapper?.closest('[data-article-slug]') as HTMLElement | null; + const articleContent = articleRoot?.querySelector('.article-content') as HTMLElement | null; const summaryText = wrapper?.querySelector('[data-summary-text]') as HTMLElement | null; + const toggleButton = wrapper?.querySelector('[data-display-toggle]') as HTMLButtonElement | null; + const toggleLabel = wrapper?.querySelector('[data-toggle-label]') as HTMLElement | null; + if (wrapper && articleRoot && articleContent && postSlug) { const paragraphCounts = new Map(); - const paragraphRows = new Map(); + const paragraphMarkers = new Map(); const paragraphDescriptors = new Map< string, ReturnType[number] @@ -79,6 +91,7 @@ const { t } = getI18n(Astro); let activeParagraphKey: string | null = null; let activeReplyToCommentId: number | null = null; let pendingCounter = 0; + let markersVisible = true; function escapeHtml(value: string): string { return value @@ -119,6 +132,14 @@ const { t } = getI18n(Astro); return t('paragraphComments.manyNotes', { count }); } + function markerCountText(count: number): string { + if (count <= 0) { + return ''; + } + + return count > 99 ? '99+' : String(count); + } + function previewReplyText(value: string | null | undefined, limit = 88) { const normalized = (value || '').replace(/\s+/g, ' ').trim(); if (normalized.length <= limit) { @@ -128,10 +149,6 @@ const { t } = getI18n(Astro); return `${normalized.slice(0, limit).trimEnd()}...`; } - function promptLabel(key: string, active: boolean) { - return active ? `./comment --paragraph ${key} --open` : `./comment --paragraph ${key}`; - } - function anchorForParagraph(key: string) { return `#paragraph-${key}`; } @@ -146,23 +163,6 @@ const { t } = getI18n(Astro); return key || null; } - function updateRowState() { - paragraphRows.forEach((row, rowKey) => { - const trigger = row.querySelector('[data-trigger-label]') as HTMLElement | null; - const prompt = row.querySelector('[data-command-text]') as HTMLElement | null; - const count = paragraphCounts.get(rowKey) || 0; - const isActive = rowKey === activeParagraphKey; - - row.classList.toggle('is-active', isActive); - if (trigger) { - trigger.textContent = countLabel(count); - } - if (prompt) { - prompt.textContent = promptLabel(rowKey, isActive); - } - }); - } - function updateSummaryFromCounts() { const paragraphCount = paragraphDescriptors.size; const discussedParagraphs = Array.from(paragraphCounts.values()).filter(count => count > 0).length; @@ -173,6 +173,11 @@ const { t } = getI18n(Astro); return; } + if (!markersVisible) { + setSummaryMessage(t('paragraphComments.markersHidden')); + return; + } + setSummaryMessage( t('paragraphComments.summary', { paragraphCount, @@ -182,30 +187,74 @@ const { t } = getI18n(Astro); ); } - function createParagraphRow(key: string, excerpt: string) { - const row = document.createElement('div'); - row.className = 'paragraph-comment-row'; - row.dataset.paragraphKey = key; - row.innerHTML = ` -
- user@blog:~/articles$ - ${escapeHtml(promptLabel(key, false))} -
-
- ${escapeHtml(t('paragraphComments.focusCurrent'))} - -
+ function updateMarkerState() { + paragraphMarkers.forEach((marker, key) => { + const count = paragraphCounts.get(key) || 0; + const countNode = marker.querySelector('[data-marker-count]') as HTMLElement | null; + const isActive = key === activeParagraphKey; + + marker.classList.toggle('has-comments', count > 0); + marker.classList.toggle('is-active', isActive); + marker.setAttribute( + 'aria-label', + count > 0 + ? `${t('paragraphComments.badgeLabel')} (${countLabel(count)})` + : t('paragraphComments.badgeLabel') + ); + + if (countNode) { + countNode.textContent = markerCountText(count); + countNode.classList.toggle('hidden', count <= 0); + } + }); + } + + function applyMarkerVisibility(visible: boolean, options?: { persist?: boolean }) { + markersVisible = visible; + + if (articleRoot) { + articleRoot.dataset.paragraphCommentsVisible = visible ? 'true' : 'false'; + } + + if (toggleButton) { + toggleButton.setAttribute('aria-pressed', visible ? 'true' : 'false'); + } + + if (toggleLabel) { + toggleLabel.textContent = visible + ? t('paragraphComments.hideMarkers') + : t('paragraphComments.showMarkers'); + } + + if (options?.persist !== false) { + localStorage.setItem(storageKey, visible ? 'true' : 'false'); + } + + if (!visible) { + closePanel(true); + } + + updateSummaryFromCounts(); + } + + function createParagraphMarker(key: string, excerpt: string) { + const marker = document.createElement('button'); + marker.type = 'button'; + marker.className = 'paragraph-comment-marker'; + marker.dataset.paragraphKey = key; + marker.title = excerpt; + marker.innerHTML = ` + + `; - const button = row.querySelector('.paragraph-comment-trigger') as HTMLButtonElement | null; - button?.addEventListener('click', () => { + marker.addEventListener('click', () => { void openPanelForParagraph(key, { focusForm: true, syncHash: true }); }); - return row; + return marker; } const panel = document.createElement('section'); @@ -215,7 +264,7 @@ const { t } = getI18n(Astro);
- paragraph thread + ${escapeHtml(t('paragraphComments.panelKicker'))}

${escapeHtml(t('paragraphComments.panelTitle'))}

@@ -244,7 +293,7 @@ const { t } = getI18n(Astro); type="text" name="nickname" required - placeholder="inline_operator" + placeholder="${escapeHtml(t('paragraphComments.nicknamePlaceholder'))}" class="terminal-form-input" />
@@ -255,7 +304,7 @@ const { t } = getI18n(Astro);
@@ -413,7 +462,7 @@ const { t } = getI18n(Astro); paragraphCounts.set(paragraphKey, comments.length); pendingCountChip.textContent = `${pending.length} ${t('common.pending')}`; pendingCountChip.classList.toggle('hidden', pending.length === 0); - updateRowState(); + updateMarkerState(); updateSummaryFromCounts(); const approvedMarkup = @@ -514,17 +563,20 @@ const { t } = getI18n(Astro); } ) { const descriptor = paragraphDescriptors.get(paragraphKey); - const row = paragraphRows.get(paragraphKey); - if (!descriptor || !row) { + if (!descriptor) { return; } + if (!markersVisible) { + applyMarkerVisibility(true, { persist: false }); + } + activeParagraphKey = paragraphKey; clearStatus(); resetReplyState(); panelExcerpt.textContent = descriptor.excerpt; - row.insertAdjacentElement('afterend', panel); + descriptor.element.insertAdjacentElement('afterend', panel); panel.classList.remove('hidden'); panel.dataset.paragraphKey = paragraphKey; @@ -540,7 +592,7 @@ const { t } = getI18n(Astro); descriptor.element.scrollIntoView({ behavior: 'smooth', block: 'center' }); } - updateRowState(); + updateMarkerState(); threadContainer.innerHTML = `
${escapeHtml(t('paragraphComments.loadingThread'))} @@ -558,7 +610,7 @@ const { t } = getI18n(Astro); pendingCountChip.classList.add('hidden'); threadContainer.innerHTML = `
- ${escapeHtml(t('paragraphComments.loadFailed', { message: error instanceof Error ? error.message : 'unknown error' }))} + ${escapeHtml(t('paragraphComments.loadFailed', { message: error instanceof Error ? error.message : t('common.unknownError') }))}
`; } @@ -570,7 +622,7 @@ const { t } = getI18n(Astro); resetReplyState(); clearStatus(); paragraphDescriptors.forEach(item => item.element.classList.remove('is-comment-focused')); - updateRowState(); + updateMarkerState(); if (clearHash) { syncHashForParagraph(null); @@ -690,15 +742,22 @@ const { t } = getI18n(Astro); renderThread(descriptor.key, approvedComments); setStatus(t('paragraphComments.submitSuccess'), 'success'); } catch (error) { - setStatus(t('paragraphComments.submitFailed', { message: error instanceof Error ? error.message : 'unknown error' }), 'error'); + setStatus(t('paragraphComments.submitFailed', { message: error instanceof Error ? error.message : t('common.unknownError') }), 'error'); } }); + toggleButton?.addEventListener('click', () => { + applyMarkerVisibility(!markersVisible); + }); + async function init() { - if (!wrapper || !articleContent || !postSlug) { + if (!wrapper || !articleRoot || !articleContent || !postSlug) { return; } + const storedVisibility = localStorage.getItem(storageKey); + markersVisible = storedVisibility !== 'false'; + const descriptors = buildParagraphDescriptors(articleContent); if (descriptors.length === 0) { setSummaryMessage(t('paragraphComments.noParagraphs')); @@ -710,6 +769,10 @@ const { t } = getI18n(Astro); descriptor.element.id = `paragraph-${descriptor.key}`; descriptor.element.dataset.paragraphKey = descriptor.key; descriptor.element.classList.add('paragraph-comment-paragraph'); + + const marker = createParagraphMarker(descriptor.key, descriptor.excerpt); + paragraphMarkers.set(descriptor.key, marker); + descriptor.element.appendChild(marker); }); try { @@ -730,14 +793,11 @@ const { t } = getI18n(Astro); } descriptors.forEach(descriptor => { - const row = createParagraphRow(descriptor.key, descriptor.excerpt); - paragraphRows.set(descriptor.key, row); paragraphCounts.set(descriptor.key, paragraphCounts.get(descriptor.key) || 0); - descriptor.element.insertAdjacentElement('afterend', row); }); - updateRowState(); - updateSummaryFromCounts(); + updateMarkerState(); + applyMarkerVisibility(markersVisible, { persist: false }); await openFromHash(); window.addEventListener('hashchange', () => { void openFromHash(); @@ -745,4 +805,5 @@ const { t } = getI18n(Astro); } void init(); + } diff --git a/frontend/src/components/PostCard.astro b/frontend/src/components/PostCard.astro index 56465c0..3fa92ff 100644 --- a/frontend/src/components/PostCard.astro +++ b/frontend/src/components/PostCard.astro @@ -1,20 +1,29 @@ --- import type { Post } from '../lib/types'; -import TerminalButton from './ui/TerminalButton.astro'; import CodeBlock from './CodeBlock.astro'; import { formatReadTime, getI18n } from '../lib/i18n'; -import { resolveFileRef, getPostTypeColor } from '../lib/utils'; +import { + getAccentVars, + getCategoryTheme, + getPostTypeColor, + getPostTypeTheme, + getTagTheme, + resolveFileRef, +} from '../lib/utils'; interface Props { post: Post; selectedTag?: string; highlightTerm?: string; + tagHrefPrefix?: string; } -const { post, selectedTag = '', highlightTerm = '' } = Astro.props; +const { post, selectedTag = '', highlightTerm = '', tagHrefPrefix = '/tags?tag=' } = Astro.props; const { locale, t } = getI18n(Astro); const typeColor = getPostTypeColor(post.type); +const typeTheme = getPostTypeTheme(post.type); +const categoryTheme = getCategoryTheme(post.category); const escapeHtml = (value: string) => value @@ -42,15 +51,23 @@ const normalizedSelectedTag = selectedTag.trim().toLowerCase(); --- + + diff --git a/frontend/src/components/RelatedPosts.astro b/frontend/src/components/RelatedPosts.astro index edc12c8..14e30a8 100644 --- a/frontend/src/components/RelatedPosts.astro +++ b/frontend/src/components/RelatedPosts.astro @@ -1,6 +1,7 @@ --- import { apiClient } from '../lib/api/client'; import { getI18n } from '../lib/i18n'; +import { getAccentVars, getCategoryTheme, getPostTypeTheme, getTagTheme } from '../lib/utils'; interface Props { currentSlug: string; @@ -38,7 +39,7 @@ const relatedPosts = allPosts
- related traces + {t('relatedPosts.kicker')}
@@ -63,13 +64,13 @@ const relatedPosts = allPosts {relatedPosts.map(post => (
- - - {post.type} + + {post.type === 'article' ? t('common.article') : t('common.tweet')}

{post.title} @@ -85,11 +86,17 @@ const relatedPosts = allPosts {post.date} + + + {post.category} + {post.sharedTags.length > 0 && ( - - - {post.sharedTags.map(tag => `#${tag}`).join(' ')} - + post.sharedTags.map(tag => ( + + + {tag} + + )) )}

diff --git a/frontend/src/components/StatsList.astro b/frontend/src/components/StatsList.astro index 694b126..da35a72 100644 --- a/frontend/src/components/StatsList.astro +++ b/frontend/src/components/StatsList.astro @@ -1,6 +1,5 @@ --- import type { SystemStat } from '../lib/types'; -import InfoTile from './ui/InfoTile.astro'; interface Props { stats: SystemStat[]; @@ -9,13 +8,21 @@ interface Props { const { stats } = Astro.props; --- -
    - {stats.map(stat => ( -
  • - - {stat.label} - {stat.value} - +
      + {stats.map((stat, index) => ( +
    • +
      +
      + + {String(index + 1).padStart(2, '0')} + +
      +
      {stat.label}
      +
      {stat.value}
      +
      +
      + +
    • ))}
    diff --git a/frontend/src/components/TechStackList.astro b/frontend/src/components/TechStackList.astro index d67f6ef..b9e2b75 100644 --- a/frontend/src/components/TechStackList.astro +++ b/frontend/src/components/TechStackList.astro @@ -1,6 +1,5 @@ --- import type { TechStackItem } from '../lib/types'; -import InfoTile from './ui/InfoTile.astro'; interface Props { items: TechStackItem[]; @@ -9,20 +8,23 @@ interface Props { const { items } = Astro.props; --- -
      - {items.map(item => ( -
    • - - +
        + {items.map((item) => ( +
      • +
        + - {item.name} - {item.level && ( - {item.level} - )} + + stack://module + + {item.name} + + {item.level || 'active'} + - +
      • ))}
      diff --git a/frontend/src/components/ui/CommandPrompt.astro b/frontend/src/components/ui/CommandPrompt.astro index 9ca0fa3..6add803 100644 --- a/frontend/src/components/ui/CommandPrompt.astro +++ b/frontend/src/components/ui/CommandPrompt.astro @@ -5,13 +5,20 @@ interface Props { clickable?: boolean; href?: string; typing?: boolean; + promptId?: string; } -const { command, path = '~/', clickable = false, href = '/', typing = true } = Astro.props; +const { command, path = '~/', clickable = false, href = '/', typing = true, promptId = '' } = Astro.props; const uniqueId = Math.random().toString(36).slice(2, 11); --- -
      +
      {clickable ? ( user@blog @@ -35,44 +42,64 @@ const uniqueId = Math.random().toString(36).slice(2, 11); diff --git a/frontend/src/components/ui/FilterPill.astro b/frontend/src/components/ui/FilterPill.astro index 03ed2dd..05e2a38 100644 --- a/frontend/src/components/ui/FilterPill.astro +++ b/frontend/src/components/ui/FilterPill.astro @@ -2,8 +2,9 @@ interface Props { href?: string; active?: boolean; - tone?: 'blue' | 'amber' | 'teal' | 'violet' | 'neutral'; + tone?: 'blue' | 'amber' | 'teal' | 'violet' | 'neutral' | 'accent'; class?: string; + style?: string; } const { diff --git a/frontend/src/components/ui/ViewMoreLink.astro b/frontend/src/components/ui/ViewMoreLink.astro index 633da7d..7dffdd8 100644 --- a/frontend/src/components/ui/ViewMoreLink.astro +++ b/frontend/src/components/ui/ViewMoreLink.astro @@ -2,15 +2,84 @@ interface Props { href: string; text: string; + command?: string; } -const { href, text } = Astro.props; +const { href, text, command = 'cd' } = Astro.props; --- - {text} - + {command} + {text} + + + diff --git a/frontend/src/env.d.ts b/frontend/src/env.d.ts index 180d54d..40f90db 100644 --- a/frontend/src/env.d.ts +++ b/frontend/src/env.d.ts @@ -1,5 +1,13 @@ /// +interface ImportMetaEnv { + readonly PUBLIC_API_BASE_URL?: string; +} + +interface ImportMeta { + readonly env: ImportMetaEnv; +} + declare global { interface Window { __TERMI_I18N__?: { diff --git a/frontend/src/layouts/BaseLayout.astro b/frontend/src/layouts/BaseLayout.astro index d5b6c27..89e6629 100644 --- a/frontend/src/layouts/BaseLayout.astro +++ b/frontend/src/layouts/BaseLayout.astro @@ -121,7 +121,7 @@ const i18nPayload = JSON.stringify({ locale, messages }); } @media (prefers-color-scheme: dark) { - :root:not(.light) { + :root:not(.light):not(.dark) { --primary: #00ff9d; --primary-rgb: 0 255 157; --primary-light: #00ff9d33; @@ -193,12 +193,84 @@ const i18nPayload = JSON.stringify({ locale, messages }); diff --git a/frontend/src/lib/api/client.ts b/frontend/src/lib/api/client.ts index 918e201..1a3ec82 100644 --- a/frontend/src/lib/api/client.ts +++ b/frontend/src/lib/api/client.ts @@ -6,7 +6,14 @@ import type { Tag as UiTag, } from '../types'; -export const API_BASE_URL = 'http://localhost:5150/api'; +const envApiBaseUrl = import.meta.env.PUBLIC_API_BASE_URL?.trim(); + +export const API_BASE_URL = + envApiBaseUrl && envApiBaseUrl.length > 0 + ? envApiBaseUrl.replace(/\/$/, '') + : import.meta.env.DEV + ? 'http://127.0.0.1:5150/api' + : 'https://init.cool/api'; export interface ApiPost { id: number; @@ -18,6 +25,7 @@ export interface ApiPost { tags: string[]; post_type: 'article' | 'tweet'; image: string | null; + images: string[] | null; pinned: boolean; created_at: string; updated_at: string; @@ -111,11 +119,22 @@ export interface ApiSiteSettings { social_email: string | null; location: string | null; tech_stack: string[] | null; + music_playlist: Array<{ + title: string; + artist?: string | null; + album?: string | null; + url: string; + cover_image_url?: string | null; + accent_color?: string | null; + description?: string | null; + }> | null; ai_enabled: boolean; + paragraph_comments_enabled: boolean; } export interface AiSource { slug: string; + href: string; title: string; excerpt: string; score: number; @@ -152,10 +171,11 @@ export interface Review { review_type: 'game' | 'anime' | 'music' | 'book' | 'movie'; rating: number; review_date: string; - status: 'completed' | 'in-progress' | 'dropped'; + status: 'published' | 'draft' | 'completed' | 'in-progress' | 'dropped'; description: string; tags: string; cover: string; + link_url: string | null; created_at: string; updated_at: string; } @@ -168,24 +188,59 @@ export const DEFAULT_SITE_SETTINGS: SiteSettings = { id: '1', siteName: 'InitCool', siteShortName: 'Termi', - siteUrl: 'https://termi.dev', + siteUrl: 'https://init.cool', siteTitle: 'InitCool - 终端风格的内容平台', siteDescription: '一个基于终端美学的个人内容站,记录代码、设计和生活。', heroTitle: '欢迎来到我的极客终端博客', heroSubtitle: '这里记录技术、代码和生活点滴', ownerName: 'InitCool', - ownerTitle: '前端开发者 / 技术博主', - ownerBio: '一名热爱技术的前端开发者,专注于构建高性能、优雅的用户界面。相信代码不仅是工具,更是一种艺术表达。', + ownerTitle: 'Rust / Go / Python Developer · Builder @ init.cool', + ownerBio: 'InitCool,GitHub 用户名 limitcool。坚持不要重复造轮子,当前在维护 starter,平时主要写 Rust、Go、Python 相关项目,也在持续学习 AI 与 Web3。', location: 'Hong Kong', social: { - github: 'https://github.com', - twitter: 'https://twitter.com', - email: 'mailto:hello@termi.dev', + github: 'https://github.com/limitcool', + twitter: '', + email: 'mailto:initcoool@gmail.com', }, - techStack: ['Astro', 'Svelte', 'Tailwind CSS', 'TypeScript'], + techStack: ['Rust', 'Go', 'Python', 'Svelte', 'Astro', 'Loco.rs'], + musicPlaylist: [ + { + title: '山中来信', + artist: 'InitCool Radio', + album: '站点默认歌单', + url: 'https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3', + coverImageUrl: + 'https://images.unsplash.com/photo-1510915228340-29c85a43dcfe?auto=format&fit=crop&w=600&q=80', + accentColor: '#2f6b5f', + description: '适合文章阅读时循环播放的轻氛围曲。', + }, + { + title: '风吹松声', + artist: 'InitCool Radio', + album: '站点默认歌单', + url: 'https://www.soundhelix.com/examples/mp3/SoundHelix-Song-2.mp3', + coverImageUrl: + 'https://images.unsplash.com/photo-1500530855697-b586d89ba3ee?auto=format&fit=crop&w=600&q=80', + accentColor: '#8a5b35', + description: '偏木质感的器乐氛围,适合深夜浏览。', + }, + { + title: '夜航小记', + artist: 'InitCool Radio', + album: '站点默认歌单', + url: 'https://www.soundhelix.com/examples/mp3/SoundHelix-Song-3.mp3', + coverImageUrl: + 'https://images.unsplash.com/photo-1493225457124-a3eb161ffa5f?auto=format&fit=crop&w=600&q=80', + accentColor: '#375a7f', + description: '节奏更明显一点,适合切换阅读状态。', + }, + ], ai: { enabled: false, }, + comments: { + paragraphsEnabled: true, + }, }; const formatPostDate = (dateString: string) => dateString.slice(0, 10); @@ -208,6 +263,7 @@ const normalizePost = (post: ApiPost): UiPost => ({ tags: post.tags ?? [], category: post.category, image: post.image ?? undefined, + images: post.images ?? undefined, pinned: post.pinned, }); @@ -277,9 +333,26 @@ const normalizeSiteSettings = (settings: ApiSiteSettings): SiteSettings => ({ email: settings.social_email || DEFAULT_SITE_SETTINGS.social.email, }, techStack: settings.tech_stack?.length ? settings.tech_stack : DEFAULT_SITE_SETTINGS.techStack, + musicPlaylist: + settings.music_playlist?.filter((item) => item?.title?.trim() && item?.url?.trim())?.length + ? settings.music_playlist + .filter((item) => item.title.trim() && item.url.trim()) + .map((item) => ({ + title: item.title, + artist: item.artist ?? undefined, + album: item.album ?? undefined, + url: item.url, + coverImageUrl: item.cover_image_url ?? undefined, + accentColor: item.accent_color ?? undefined, + description: item.description ?? undefined, + })) + : DEFAULT_SITE_SETTINGS.musicPlaylist, ai: { enabled: Boolean(settings.ai_enabled), }, + comments: { + paragraphsEnabled: settings.paragraph_comments_enabled ?? true, + }, }); class ApiClient { @@ -450,6 +523,7 @@ class ApiClient { tags: result.tags ?? [], post_type: result.post_type || 'article', image: result.image, + images: null, pinned: result.pinned ?? false, created_at: result.created_at, updated_at: result.updated_at, diff --git a/frontend/src/lib/config/terminal.ts b/frontend/src/lib/config/terminal.ts index eb06567..1b63877 100644 --- a/frontend/src/lib/config/terminal.ts +++ b/frontend/src/lib/config/terminal.ts @@ -140,11 +140,11 @@ I N N I T CCCC OOO OOO LLLLL`, ], search: { placeholders: { - default: "'关键词' articles/*.md", + default: "'关键词' 文章 / 标签 / 分类", small: "搜索...", medium: "搜索文章..." }, - promptText: "grep -i", + promptText: "搜索", emptyResultText: "输入关键词搜索文章" }, terminal: { diff --git a/frontend/src/lib/i18n/index.ts b/frontend/src/lib/i18n/index.ts index d3a9fae..c840862 100644 --- a/frontend/src/lib/i18n/index.ts +++ b/frontend/src/lib/i18n/index.ts @@ -66,12 +66,7 @@ export function resolveLocale(options: { return fromCookie; } - const acceptLanguages = String(options.acceptLanguage || '') - .split(',') - .map((part) => normalizeLocale(part.split(';')[0])) - .filter(Boolean) as Locale[]; - - return acceptLanguages[0] || DEFAULT_LOCALE; + return DEFAULT_LOCALE; } export function translate(locale: Locale, key: string, params?: TranslateParams): string { diff --git a/frontend/src/lib/i18n/messages.ts b/frontend/src/lib/i18n/messages.ts index 7b990d3..1d321ec 100644 --- a/frontend/src/lib/i18n/messages.ts +++ b/frontend/src/lib/i18n/messages.ts @@ -4,7 +4,7 @@ export const messages = { language: '语言', languages: { 'zh-CN': '简体中文', - en: 'English', + en: '英文', }, all: '全部', search: '搜索', @@ -63,6 +63,7 @@ export const messages = { featureOff: '功能未开启', emptyState: '当前还没有内容。', apiUnavailable: 'API 暂时不可用', + unknownError: '未知错误', }, nav: { articles: '文章', @@ -77,19 +78,31 @@ export const messages = { header: { navigation: '导航', themeToggle: '切换主题', + themePanelTitle: '外观模式', + themeLight: '浅色', + themeDark: '深色', + themeSystem: '跟随系统', + themeLightHint: '始终使用亮色界面', + themeDarkHint: '始终使用暗色界面', + themeSystemHint: '跟随设备当前主题', + themeResolvedAs: '当前生效:{mode}', toggleMenu: '切换菜单', searchModeKeyword: '搜索', searchModeAi: 'AI', searchModeKeywordMobile: '关键词搜索', searchModeAiMobile: 'AI 搜索', + shellLabel: '站点终端', + musicPanel: '播放控制', + searchPromptKeyword: '站内搜索', + searchPromptAi: 'AI 问答', searchPlaceholderKeyword: "'关键词'", searchPlaceholderAi: '输入问题,交给站内 AI', - searchHintKeyword: 'articles/*.md', - searchHintAi: '手动确认', + searchHintKeyword: '文章 / 标签 / 分类', + searchHintAi: '前往问答页', aiModeTitle: 'AI 问答模式', aiModeHeading: '把这个问题交给站内 AI', - aiModeDescription: 'AI 会先检索站内知识库,再给出总结式回答,并附带相关文章来源。', - aiModeNotice: '进入问答页后不会自动调用模型,需要你手动确认发送。', + aiModeDescription: '在问答页输入问题后,系统会优先参考站内内容并给出整理后的回答。', + aiModeNotice: '回答会附带相关文章,方便继续阅读。', aiModeCta: '前往 AI 问答页确认', liveResults: '实时搜索结果', searching: '正在搜索 {query} ...', @@ -106,12 +119,22 @@ export const messages = { copyright: '© {year} {site}. 保留所有权利。', sitemap: '站点地图', rss: 'RSS 订阅', + summary: '持续整理文章、记录与站内阅读入口。', }, home: { pinned: '置顶', + quickJump: '快速跳转', about: '关于我', techStack: '技术栈', systemStatus: '系统状态', + promptWelcome: 'pwd', + promptDiscoverDefault: "find ./posts -type f | sort", + promptDiscoverFiltered: 'grep -Ril "{filters}" ./posts', + promptPinned: 'grep -Ril "^pinned: true$" ./posts', + promptPostsDefault: "find ./posts -type f | head -n {count}", + promptPostsFiltered: 'grep -Ril "{filters}" ./posts | head -n {count}', + promptFriends: "find ./links -maxdepth 1 -type f | sort", + promptAbout: "sed -n '1,80p' ~/profile.md", }, articlesPage: { title: '文章索引', @@ -131,16 +154,20 @@ export const messages = { filePath: '文件路径', }, relatedPosts: { + kicker: '关联轨迹', title: '相关文章', description: '基于当前分类与标签关联出的相近内容,延续同一条阅读链路。', linked: '{count} 条关联', }, comments: { title: '评论终端', + kicker: '讨论缓冲区', description: '这里是整篇文章的讨论区,当前缓冲区共有 {count} 条已展示评论,新的留言提交后会进入审核队列。', writeComment: '写评论', nickname: '昵称', + nicknamePlaceholder: '山客', email: '邮箱', + emailPlaceholder: 'name@example.com', message: '内容', messagePlaceholder: "$ echo '留下你的想法...'", maxChars: '最多 500 字', @@ -160,15 +187,19 @@ export const messages = { }, paragraphComments: { title: '段落评论已启用', + kicker: '段落批注', intro: '正文里的自然段都会挂一个轻量讨论入口,适合只针对某一段补充上下文、指出问题或继续展开讨论。', scanning: '正在扫描段落缓冲区...', noParagraphs: '当前文章没有可挂载评论的自然段。', summary: '已为 {paragraphCount} 个自然段挂载评论入口,其中 {discussedCount} 段已有讨论,当前共展示 {approvedCount} 条已审核段落评论。', focusCurrent: '聚焦当前段落', panelTitle: '段落讨论面板', + panelKicker: '段落讨论线程', close: '关闭', nickname: '昵称', + nicknamePlaceholder: '林泉', email: '邮箱', + emailPlaceholder: 'name@example.com', comment: '评论', commentPlaceholder: "$ echo '只评论这一段...'", maxChars: '最多 500 字', @@ -192,22 +223,29 @@ export const messages = { zeroNotes: '评论', waitingReview: '等待审核', locateParagraph: '定位段落', + showMarkers: '显示段落评论', + hideMarkers: '隐藏段落评论', + markersHidden: '段落评论入口已隐藏,你仍然可以随时重新打开。', + badgeLabel: '打开这一段的评论面板', }, ask: { pageTitle: 'AI 问答', - pageDescription: '基于 {siteName} 内容知识库的站内 AI 问答', + pageDescription: '{siteName} 的站内 AI 问答入口', title: 'AI 站内问答', - subtitle: '基于博客 Markdown 内容建立索引,回答会优先引用站内真实资料。', + subtitle: '围绕本站内容回答问题,并附上可继续阅读的相关文章。', + terminalLabel: '问答助手', + assistantLabel: '回答输出', + disabledStateLabel: '功能已关闭', disabledTitle: '后台暂未开启 AI 问答', disabledDescription: '这个入口已经接好了真实后端,但当前站点设置里没有开启公开问答。管理员开启后,这里会自动变成可用状态,导航也会同步显示。', textareaPlaceholder: '输入你想问的问题,比如:这个博客关于前端写过哪些内容?', submit: '开始提问', - idleStatus: '知识库已接入,等待问题输入。', + idleStatus: '可以直接输入问题开始提问。', examples: '示例问题', - workflow: '工作流', - workflow1: '1. 后台开启 AI 开关并配置聊天模型。', - workflow2: '2. 重建索引,把 Markdown 文章切块后由后端本地生成 embedding,并写入 PostgreSQL pgvector。', - workflow3: '3. 前台提问时先在 pgvector 中做相似度检索,再交给聊天模型基于上下文回答。', + guide: '提问建议', + guide1: '1. 直接问主题、文章、观点或站内某类内容。', + guide2: '2. 回答会优先结合本站已有内容,并给出可继续阅读的文章。', + guide3: '3. 如果问题太宽泛,换成更具体的关键词通常会更准确。', emptyAnswer: '暂无回答。', requestFailed: '请求失败:{message}', streamUnsupported: '当前浏览器无法读取流式响应。', @@ -220,7 +258,15 @@ export const messages = { streamInterrupted: '流式响应被提前中断。', retryLater: '这次请求没有成功,可以稍后重试。', prefixedQuestion: '已带入搜索词,确认后开始提问。', + promptIdle: 'cat > question.txt', + promptEditing: "sed -n '1,12p' question.txt", + promptSubmitting: 'tail -f answer.stream', + promptComplete: "printf 'sources=%s\\n' {count}", + promptFailed: "echo 'retry'", sources: '来源', + sourceScore: '相关度 {score}', + metaSources: '{count} 篇相关文章', + metaSourcesWithTime: '{count} 篇相关文章 · 更新于 {time}', }, about: { pageTitle: '关于', @@ -236,8 +282,11 @@ export const messages = { title: '文章分类', intro: '按内容主题浏览文章,分类页现在和其他列表页保持同一套终端面板语言。', quickJump: '快速跳转分类文章', + allCategoriesDescription: '查看全部分类下的文章与更新记录。', categoryPosts: '浏览 {name} 主题下的全部文章和更新记录。', + selectedSummary: '{name} 分类下找到 {count} 篇文章', empty: '暂无分类数据', + emptyPosts: '当前分类下没有文章', }, friends: { pageTitle: '友情链接', @@ -254,6 +303,9 @@ export const messages = { name: '名称', description: '描述', link: '链接', + promptBrowse: "find ./links -maxdepth 1 -type f | sort", + promptApply: 'cat > friend-link.txt', + promptRules: "sed -n '1,120p' rules.md", }, friendForm: { title: '提交友链申请', @@ -312,6 +364,9 @@ export const messages = { emptyData: '暂无评价数据,请检查后端 API 连接', emptyFiltered: '当前筛选下暂无评价', currentFilter: '当前筛选: {type}', + statusCompleted: '已完成', + statusInProgress: '进行中', + statusDropped: '已弃置', typeAll: '全部', typeGame: '游戏', typeAnime: '动画', @@ -330,7 +385,7 @@ export const messages = { time: '时间', actions: '可执行操作', actionsIntro: '像命令面板一样,优先给出直接可走的恢复路径。', - searchHint: '也可以直接使用顶部的搜索输入框,在 `articles/*.md` 里重新 grep 一次相关关键字。', + searchHint: '也可以直接使用顶部的搜索输入框,重新搜索相关文章。', recommended: '推荐入口', recommendedIntro: '使用真实文章数据,避免 404 页面再把人带进不存在的地址。', cannotLoad: '暂时无法读取文章列表。', @@ -409,6 +464,7 @@ export const messages = { featureOff: 'Feature off', emptyState: 'Nothing here yet.', apiUnavailable: 'API temporarily unavailable', + unknownError: 'unknown error', }, nav: { articles: 'Articles', @@ -423,19 +479,31 @@ export const messages = { header: { navigation: 'Navigation', themeToggle: 'Toggle theme', + themePanelTitle: 'Appearance', + themeLight: 'Light', + themeDark: 'Dark', + themeSystem: 'System', + themeLightHint: 'Always use the light interface', + themeDarkHint: 'Always use the dark interface', + themeSystemHint: 'Follow the device appearance', + themeResolvedAs: 'Currently applied: {mode}', toggleMenu: 'Toggle menu', searchModeKeyword: 'Search', searchModeAi: 'AI', searchModeKeywordMobile: 'Keyword Search', searchModeAiMobile: 'AI Search', + shellLabel: 'Site Terminal', + musicPanel: 'Playback', + searchPromptKeyword: 'Site Search', + searchPromptAi: 'Ask AI', searchPlaceholderKeyword: "'keyword'", searchPlaceholderAi: 'Type a question for the site AI', - searchHintKeyword: 'articles/*.md', - searchHintAi: 'manual confirm', + searchHintKeyword: 'posts / tags / categories', + searchHintAi: 'open AI Q&A', aiModeTitle: 'AI Q&A mode', aiModeHeading: 'Send this question to the site AI', - aiModeDescription: 'The AI will search the site knowledge base first, then answer with source-backed summaries.', - aiModeNotice: 'The model will not run automatically after navigation. You must confirm manually.', + aiModeDescription: 'Ask on the Q&A page and the system will answer with priority given to on-site content.', + aiModeNotice: 'Answers include related articles so visitors can keep reading.', aiModeCta: 'Open AI Q&A to confirm', liveResults: 'Live results', searching: 'Searching {query} ...', @@ -452,12 +520,22 @@ export const messages = { copyright: '© {year} {site}. All rights reserved.', sitemap: 'Sitemap', rss: 'RSS feed', + summary: 'A place for posts, notes, and on-site reading paths.', }, home: { pinned: 'Pinned', + quickJump: 'Quick jump', about: 'About', techStack: 'Tech stack', systemStatus: 'System status', + promptWelcome: 'pwd', + promptDiscoverDefault: "find ./posts -type f | sort", + promptDiscoverFiltered: 'grep -Ril "{filters}" ./posts', + promptPinned: 'grep -Ril "^pinned: true$" ./posts', + promptPostsDefault: "find ./posts -type f | head -n {count}", + promptPostsFiltered: 'grep -Ril "{filters}" ./posts | head -n {count}', + promptFriends: "find ./links -maxdepth 1 -type f | sort", + promptAbout: "sed -n '1,80p' ~/profile.md", }, articlesPage: { title: 'Article Index', @@ -477,16 +555,20 @@ export const messages = { filePath: 'File path', }, relatedPosts: { + kicker: 'Related traces', title: 'Related Posts', description: 'More nearby reading paths based on the current category and shared tags.', linked: '{count} linked', }, comments: { title: 'Comment Terminal', + kicker: 'Discussion Buffer', description: 'This is the discussion thread for the whole article. {count} approved comments are shown right now, and new messages enter moderation first.', writeComment: 'Write comment', nickname: 'Nickname', + nicknamePlaceholder: 'trail_reader', email: 'Email', + emailPlaceholder: 'you@example.com', message: 'Message', messagePlaceholder: "$ echo 'Leave your thoughts here...'", maxChars: 'Max 500 chars', @@ -506,15 +588,19 @@ export const messages = { }, paragraphComments: { title: 'Paragraph comments are enabled', + kicker: 'Paragraph Notes', intro: 'Each natural paragraph in the article gets a lightweight discussion entry point, perfect for focused context, corrections, or follow-up questions.', scanning: 'Scanning paragraph buffer...', noParagraphs: 'No commentable paragraphs were found in this article.', summary: '{paragraphCount} paragraphs have comment entries, {discussedCount} already have discussion, and {approvedCount} approved paragraph comments are currently visible.', focusCurrent: 'Focus current paragraph', panelTitle: 'Paragraph discussion panel', + panelKicker: 'Paragraph thread', close: 'Close', nickname: 'Nickname', + nicknamePlaceholder: 'inline_reader', email: 'Email', + emailPlaceholder: 'you@example.com', comment: 'Comment', commentPlaceholder: "$ echo 'Comment on this paragraph only...'", maxChars: 'Max 500 chars', @@ -538,22 +624,29 @@ export const messages = { zeroNotes: 'comment', waitingReview: 'waiting review', locateParagraph: 'Locate paragraph', + showMarkers: 'Show paragraph comments', + hideMarkers: 'Hide paragraph comments', + markersHidden: 'Paragraph comment markers are hidden. You can turn them back on anytime.', + badgeLabel: 'Open comments for this paragraph', }, ask: { pageTitle: 'Ask AI', - pageDescription: 'An on-site AI Q&A experience grounded in the {siteName} knowledge base', + pageDescription: 'An on-site AI Q&A entry for {siteName}', title: 'On-site AI Q&A', - subtitle: 'Answers are grounded in indexed Markdown content from the blog and prioritize real on-site references.', + subtitle: 'Ask about the site and get answers with related articles attached for follow-up reading.', + terminalLabel: 'Q&A Assistant', + assistantLabel: 'Assistant Output', + disabledStateLabel: 'Feature Disabled', disabledTitle: 'AI Q&A is not enabled yet', disabledDescription: 'The real backend integration is already in place, but public Q&A is still disabled in site settings. Once it is enabled, this page and the navigation entry will become available automatically.', textareaPlaceholder: 'Ask anything, for example: what has this blog written about frontend topics?', submit: 'Ask now', - idleStatus: 'Knowledge base connected. Waiting for a question.', + idleStatus: 'Type a question to get started.', examples: 'Example questions', - workflow: 'Workflow', - workflow1: '1. Enable the AI switch in the admin and configure the chat model.', - workflow2: '2. Rebuild the index so Markdown content is chunked, embedded locally by the backend, and written into PostgreSQL pgvector.', - workflow3: '3. Each user question retrieves similar chunks from pgvector first, then the chat model answers with that context.', + guide: 'Asking tips', + guide1: '1. Ask directly about topics, posts, viewpoints, or recurring themes on the site.', + guide2: '2. Answers prioritize on-site material and include related reading when available.', + guide3: '3. If the answer feels broad, try a more specific keyword or article topic.', emptyAnswer: 'No answer yet.', requestFailed: 'Request failed: {message}', streamUnsupported: 'This browser cannot read streaming responses.', @@ -566,7 +659,15 @@ export const messages = { streamInterrupted: 'The streaming response ended early.', retryLater: 'This request did not complete successfully. Please try again later.', prefixedQuestion: 'The search query has been prefilled. Confirm manually to ask AI.', + promptIdle: 'cat > question.txt', + promptEditing: "sed -n '1,12p' question.txt", + promptSubmitting: 'tail -f answer.stream', + promptComplete: "printf 'sources=%s\\n' {count}", + promptFailed: "echo 'retry'", sources: 'Sources', + sourceScore: 'Score {score}', + metaSources: '{count} related articles', + metaSourcesWithTime: '{count} related articles · updated {time}', }, about: { pageTitle: 'About', @@ -582,8 +683,11 @@ export const messages = { title: 'Categories', intro: 'Browse posts by topic. This page now follows the same terminal language as the other list views.', quickJump: 'Jump straight into category posts', + allCategoriesDescription: 'Browse posts and updates from every category.', categoryPosts: 'Browse all posts and updates under {name}.', + selectedSummary: '{count} posts in {name}', empty: 'No category data yet', + emptyPosts: 'No posts found in this category', }, friends: { pageTitle: 'Links', @@ -600,6 +704,9 @@ export const messages = { name: 'Name', description: 'Description', link: 'Link', + promptBrowse: "find ./links -maxdepth 1 -type f | sort", + promptApply: 'cat > friend-link.txt', + promptRules: "sed -n '1,120p' rules.md", }, friendForm: { title: 'Submit a link request', @@ -658,6 +765,9 @@ export const messages = { emptyData: 'No review data yet. Please check the backend API connection.', emptyFiltered: 'No reviews match the current filter', currentFilter: 'Current filter: {type}', + statusCompleted: 'Completed', + statusInProgress: 'In progress', + statusDropped: 'Dropped', typeAll: 'All', typeGame: 'Games', typeAnime: 'Anime', @@ -676,7 +786,7 @@ export const messages = { time: 'time', actions: 'Actions', actionsIntro: 'Like a command palette, this page surfaces the most direct recovery paths first.', - searchHint: 'You can also use the search box in the header and grep through `articles/*.md` again.', + searchHint: 'You can also use the search box in the header to search related posts again.', recommended: 'Recommended entries', recommendedIntro: 'These use real article data so the 404 page does not send people into more dead ends.', cannotLoad: 'Unable to load the article list right now.', diff --git a/frontend/src/lib/types/index.ts b/frontend/src/lib/types/index.ts index 0c17ff2..5b43bf2 100644 --- a/frontend/src/lib/types/index.ts +++ b/frontend/src/lib/types/index.ts @@ -61,9 +61,23 @@ export interface SiteSettings { email?: string; }; techStack: string[]; + musicPlaylist: MusicTrack[]; ai: { enabled: boolean; }; + comments: { + paragraphsEnabled: boolean; + }; +} + +export interface MusicTrack { + title: string; + artist?: string; + album?: string; + url: string; + coverImageUrl?: string; + accentColor?: string; + description?: string; } export interface SiteConfig { diff --git a/frontend/src/lib/utils/index.ts b/frontend/src/lib/utils/index.ts index 68432b3..9ec75f4 100644 --- a/frontend/src/lib/utils/index.ts +++ b/frontend/src/lib/utils/index.ts @@ -68,6 +68,147 @@ export function debounce unknown>( }; } +export interface AccentTheme { + color: string; + rgb: string; +} + +const POST_TYPE_THEMES: Record = { + article: { + color: '#2563eb', + rgb: '37 99 235', + }, + tweet: { + color: '#f97316', + rgb: '249 115 22', + }, +}; + +const DEFAULT_THEME: AccentTheme = { + color: '#64748b', + rgb: '100 116 139', +}; + +function normalizeToken(value: string | null | undefined): string { + return value?.trim().toLowerCase() || ''; +} + +function hashToken(value: string): number { + let hash = 2166136261; + + for (let index = 0; index < value.length; index += 1) { + hash ^= value.charCodeAt(index); + hash = Math.imul(hash, 16777619); + } + + return hash >>> 0; +} + +function hexToRgbTriplet(hex: string): string { + const normalized = hex.replace('#', ''); + const safeHex = normalized.length === 3 + ? normalized.split('').map((char) => `${char}${char}`).join('') + : normalized; + const value = parseInt(safeHex, 16); + + return `${(value >> 16) & 255} ${(value >> 8) & 255} ${value & 255}`; +} + +function hslToHex(hue: number, saturation: number, lightness: number): string { + const normalizedHue = ((hue % 360) + 360) % 360; + const s = saturation / 100; + const l = lightness / 100; + const chroma = (1 - Math.abs(2 * l - 1)) * s; + const section = normalizedHue / 60; + const x = chroma * (1 - Math.abs((section % 2) - 1)); + + let red = 0; + let green = 0; + let blue = 0; + + if (section >= 0 && section < 1) { + red = chroma; + green = x; + } else if (section < 2) { + red = x; + green = chroma; + } else if (section < 3) { + green = chroma; + blue = x; + } else if (section < 4) { + green = x; + blue = chroma; + } else if (section < 5) { + red = x; + blue = chroma; + } else { + red = chroma; + blue = x; + } + + const match = l - chroma / 2; + const toHex = (value: number) => Math.round((value + match) * 255).toString(16).padStart(2, '0'); + + return `#${toHex(red)}${toHex(green)}${toHex(blue)}`; +} + +function getGeneratedTheme( + value: string | null | undefined, + { + salt, + saturation, + lightness, + }: { + salt: string; + saturation: number; + lightness: number; + } +): AccentTheme { + const normalized = normalizeToken(value); + + if (!normalized) { + return DEFAULT_THEME; + } + + const hue = hashToken(`${salt}:${normalized}`) % 360; + const color = hslToHex(hue, saturation, lightness); + + return { + color, + rgb: hexToRgbTriplet(color), + }; +} + +export function getAccentVars(theme: AccentTheme): string { + return [ + `--accent-color:${theme.color}`, + `--accent-rgb:${theme.rgb}`, + `--pill-fg:${theme.color}`, + `--pill-rgb:${theme.rgb}`, + `--tile-rgb:${theme.rgb}`, + ].join(';') + ';'; +} + +export function getPostTypeTheme(type: string | null | undefined): AccentTheme { + return POST_TYPE_THEMES[normalizeToken(type)] || DEFAULT_THEME; +} + +export function getCategoryTheme(category: string | null | undefined): AccentTheme { + return getGeneratedTheme(category, { + salt: 'category', + saturation: 72, + lightness: 46, + }); +} + +export function getTagTheme(tag: string | null | undefined): AccentTheme { + return getGeneratedTheme(tag, { + salt: 'tag', + saturation: 68, + lightness: 50, + }); +} + /** * Filter posts by type and tag */ @@ -90,7 +231,7 @@ export function filterPosts( * Get color for post type */ export function getPostTypeColor(type: string): string { - return type === 'article' ? 'var(--primary)' : 'var(--secondary)'; + return getPostTypeTheme(type).color; } export { diff --git a/frontend/src/pages/about/index.astro b/frontend/src/pages/about/index.astro index 537e483..33149bd 100644 --- a/frontend/src/pages/about/index.astro +++ b/frontend/src/pages/about/index.astro @@ -4,7 +4,6 @@ import TerminalWindow from '../../components/ui/TerminalWindow.astro'; import CommandPrompt from '../../components/ui/CommandPrompt.astro'; import StatsList from '../../components/StatsList.astro'; import TechStackList from '../../components/TechStackList.astro'; -import InfoTile from '../../components/ui/InfoTile.astro'; import { api, DEFAULT_SITE_SETTINGS } from '../../lib/api/client'; import { getI18n } from '../../lib/i18n'; @@ -49,7 +48,7 @@ const ownerInitial = siteSettings.ownerName.charAt(0) || 'T';
      - +
      identity profile
      @@ -79,7 +78,7 @@ const ownerInitial = siteSettings.ownerName.charAt(0) || 'T';
      - +
      @@ -104,7 +103,7 @@ const ownerInitial = siteSettings.ownerName.charAt(0) || 'T';
      - +
      @@ -112,59 +111,75 @@ const ownerInitial = siteSettings.ownerName.charAt(0) || 'T';
      - +
      - +
      -
      +
      {siteSettings.social.github && ( - - - GitHub - + + + + + {t('about.contact')} + GitHub + + )} - {siteSettings.social.twitter && ( - - - Twitter - + + + + + {t('about.contact')} + Twitter + + )} - {siteSettings.social.email && ( - - - {t('comments.email')} - + + + + + {t('about.contact')} + {t('comments.email')} + + )} - - - {t('about.website')} - + + + + + {t('about.contact')} + {t('about.website')} + +
      diff --git a/frontend/src/pages/admin.astro b/frontend/src/pages/admin.astro index 0bfb7c0..4a3bdb6 100644 --- a/frontend/src/pages/admin.astro +++ b/frontend/src/pages/admin.astro @@ -308,7 +308,7 @@ const recentReviews = [...reviews].sort((a, b) => b.review_date.localeCompare(a.
      评价页
      - + @@ -321,7 +321,7 @@ const recentReviews = [...reviews].sort((a, b) => b.review_date.localeCompare(a.
      api endpoint
      -

      http://localhost:5150/api

      +

      https://init.cool/api

      diff --git a/frontend/src/pages/articles/[slug].astro b/frontend/src/pages/articles/[slug].astro index 37c2d92..26a4cc7 100644 --- a/frontend/src/pages/articles/[slug].astro +++ b/frontend/src/pages/articles/[slug].astro @@ -11,15 +11,23 @@ import Lightbox from '../../components/Lightbox.astro'; import CodeCopyButton from '../../components/CodeCopyButton.astro'; import Comments from '../../components/Comments.astro'; import ParagraphComments from '../../components/ParagraphComments.astro'; -import { apiClient } from '../../lib/api/client'; +import { apiClient, DEFAULT_SITE_SETTINGS } from '../../lib/api/client'; import { formatReadTime, getI18n } from '../../lib/i18n'; -import { resolveFileRef, getPostTypeColor } from '../../lib/utils'; +import { + getAccentVars, + getCategoryTheme, + getPostTypeColor, + getPostTypeTheme, + getTagTheme, + resolveFileRef, +} from '../../lib/utils'; export const prerender = false; const { slug } = Astro.params; let post = null; +let siteSettings = DEFAULT_SITE_SETTINGS; try { post = await apiClient.getPostBySlug(slug ?? ''); @@ -27,22 +35,31 @@ try { console.error('API Error:', error); } +try { + siteSettings = await apiClient.getSiteSettings(); +} catch (error) { + console.error('Site settings API Error:', error); +} + if (!post) { return new Response(null, { status: 404 }); } const typeColor = getPostTypeColor(post.type || 'article'); +const typeTheme = getPostTypeTheme(post.type || 'article'); +const categoryTheme = getCategoryTheme(post.category); const contentText = post.content || post.description || ''; const wordCount = contentText.length; const readTimeMinutes = Math.ceil(wordCount / 300); const { locale, t } = getI18n(Astro); const articleMarkdown = contentText.replace(/^#\s+.+\r?\n+/, ''); +const paragraphCommentsEnabled = siteSettings.comments.paragraphsEnabled; const markdownProcessor = await createMarkdownProcessor(); const renderedContent = await markdownProcessor.render(articleMarkdown); --- - + @@ -66,12 +83,12 @@ const renderedContent = await markdownProcessor.render(articleMarkdown); {t('article.documentSession')} - + {post.type === 'article' ? t('common.article') : t('common.tweet')} - - + + {post.category}
      @@ -101,7 +118,11 @@ const renderedContent = await markdownProcessor.render(articleMarkdown); {post.tags?.length > 0 && (
      {post.tags.map(tag => ( - + {tag} @@ -120,18 +141,30 @@ const renderedContent = await markdownProcessor.render(articleMarkdown); {post.title}
      )} -
      -
      -
      + {post.images && post.images.length > 0 && ( +
      + {post.images.map((image, index) => ( +
      + {`${post.title} +
      + ))} +
      + )} -
      -
      - + {paragraphCommentsEnabled && } + +
      diff --git a/frontend/src/pages/articles/index.astro b/frontend/src/pages/articles/index.astro index eeb0916..18bf23d 100644 --- a/frontend/src/pages/articles/index.astro +++ b/frontend/src/pages/articles/index.astro @@ -7,6 +7,7 @@ import PostCard from '../../components/PostCard.astro'; import { api } from '../../lib/api/client'; import { getI18n } from '../../lib/i18n'; import type { Category, Post, Tag } from '../../lib/types'; +import { getAccentVars, getCategoryTheme, getPostTypeTheme, getTagTheme } from '../../lib/utils'; export const prerender = false; @@ -60,9 +61,22 @@ const postTypeFilters = [ { id: 'tweet', name: t('common.tweet'), icon: 'fa-comment-dots' } ]; -const typePromptCommand = `./filter --type ${selectedType || 'all'}`; -const categoryPromptCommand = `./filter --category ${selectedCategory ? `"${selectedCategory}"` : 'all'}`; -const tagPromptCommand = `./filter --tag ${selectedTag ? `"${selectedTag}"` : 'all'}`; +const typePromptCommand = + selectedType === 'all' + ? `grep -E "^type: (article|tweet)$" ./posts/*.md` + : `grep -E "^type: ${selectedType}$" ./posts/*.md`; +const categoryPromptCommand = selectedCategory + ? `grep -El "^category: ${selectedCategory}$" ./posts/*.md` + : `cut -d: -f2 ./categories.index | sort -u`; +const tagPromptCommand = selectedTag + ? `grep -Ril "#${selectedTag}" ./posts` + : `cut -d: -f2 ./tags.index | sort -u`; +const categoryAccentMap = Object.fromEntries( + allCategories.map((category) => [category.name.toLowerCase(), getAccentVars(getCategoryTheme(category.name))]) +); +const tagAccentMap = Object.fromEntries( + allTags.map((tag) => [String(tag.slug || tag.name).toLowerCase(), getAccentVars(getTagTheme(tag.name))]) +); const buildArticlesUrl = ({ type = selectedType, @@ -94,7 +108,7 @@ const buildArticlesUrl = ({
      - +

      {t('articlesPage.title')}

      @@ -104,7 +118,7 @@ const buildArticlesUrl = ({
      - {t('articlesPage.totalPosts', { count: filteredPosts.length })} + {t('articlesPage.totalPosts', { count: filteredPosts.length })} {selectedSearch && ( @@ -112,31 +126,43 @@ const buildArticlesUrl = ({ grep: {selectedSearch} )} - {selectedCategory && ( - - - {selectedCategory} - - )} - {selectedTag && ( - - - {selectedTag} - - )} + + + {selectedCategory} + + + + {selectedTag} +
      - +
      {postTypeFilters.map(filter => ( {filter.name} @@ -147,10 +173,11 @@ const buildArticlesUrl = ({ {allCategories.length > 0 && (
      - +
      @@ -160,8 +187,10 @@ const buildArticlesUrl = ({ {allCategories.map(category => ( {category.name} @@ -174,10 +203,11 @@ const buildArticlesUrl = ({ {allTags.length > 0 && (
      - +
      @@ -187,8 +217,10 @@ const buildArticlesUrl = ({ {allTags.map(tag => ( {tag.name} @@ -200,14 +232,34 @@ const buildArticlesUrl = ({
      - {paginatedPosts.length > 0 ? ( + {allPosts.length > 0 ? (
      - {paginatedPosts.map(post => ( - - ))} + {allPosts.map((post, index) => { + const matchesCurrentFilter = + (selectedType === 'all' || post.type === selectedType) && + (!selectedTag || post.tags?.some(isMatchingTag)) && + (!selectedCategory || post.category?.toLowerCase() === selectedCategory.toLowerCase()); + const filteredIndex = matchesCurrentFilter + ? filteredPosts.findIndex((item) => item.slug === post.slug) + : -1; + const isVisible = matchesCurrentFilter && filteredIndex >= startIndex && filteredIndex < startIndex + postsPerPage; + + return ( +
      tag.trim().toLowerCase()).join('|')} + data-article-index={index} + class:list={[!isVisible && 'hidden']} + > + +
      + ); + })}
      - ) : ( -
      + ) : null} +
      0 && 'hidden']}>
      @@ -222,38 +274,237 @@ const buildArticlesUrl = ({
      - )}
      - {totalPages > 1 && ( -
      -
      +
      +
      - {t('articlesPage.pageSummary', { current: currentPage, total: totalPages, count: totalPosts })} + {t('articlesPage.pageSummary', { current: currentPage, total: totalPages, count: totalPosts })}
      - {currentPage > 1 && ( - {t('articlesPage.previous')} - - )} - {currentPage < totalPages && ( - +
      - )}
      + + diff --git a/frontend/src/pages/ask/index.astro b/frontend/src/pages/ask/index.astro index a8fd7e1..d018639 100644 --- a/frontend/src/pages/ask/index.astro +++ b/frontend/src/pages/ask/index.astro @@ -1,6 +1,7 @@ --- import BaseLayout from '../../layouts/BaseLayout.astro'; -import { api, DEFAULT_SITE_SETTINGS } from '../../lib/api/client'; +import CommandPrompt from '../../components/ui/CommandPrompt.astro'; +import { API_BASE_URL, api, DEFAULT_SITE_SETTINGS } from '../../lib/api/client'; import { getI18n } from '../../lib/i18n'; export const prerender = false; @@ -33,7 +34,7 @@ const sampleQuestions = [
      -
      knowledge terminal
      +
      {t('ask.terminalLabel')}

      {t('ask.title')}

      {t('ask.subtitle')}

      @@ -52,14 +53,14 @@ const sampleQuestions = [ {aiEnabled ? ( <>
      - +
      - @@ -69,7 +70,7 @@ const sampleQuestions = [