feat: refresh content workflow and verification settings
All checks were successful
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 43s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 25m9s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 51s

This commit is contained in:
2026-04-01 18:47:17 +08:00
parent f2c07df320
commit 7de4ddc3ee
66 changed files with 1455 additions and 2759 deletions

6
.gitignore vendored
View File

@@ -32,5 +32,11 @@ blob-report/
*-playwright.out.log *-playwright.out.log
backend-restart.err.log backend-restart.err.log
backend-restart.out.log backend-restart.out.log
frontend.dev.err.log
frontend.dev.out.log
admin.dev.err.log
admin.dev.out.log
backend.dev.err.log
backend.dev.out.log
lighthouse-*/ lighthouse-*/
lighthouse-*.json lighthouse-*.json

View File

@@ -179,7 +179,7 @@ export function MarkdownWorkbench({
<span className="h-3 w-3 rounded-full bg-[#ffbd2e]" /> <span className="h-3 w-3 rounded-full bg-[#ffbd2e]" />
<span className="h-3 w-3 rounded-full bg-[#27c93f]" /> <span className="h-3 w-3 rounded-full bg-[#27c93f]" />
</div> </div>
<p className="font-mono text-xs text-slate-400">{path}</p> <p className="font-mono text-xs text-slate-400">Markdown </p>
</div> </div>
<div className="flex flex-wrap items-center gap-2"> <div className="flex flex-wrap items-center gap-2">
@@ -258,9 +258,7 @@ export function MarkdownWorkbench({
<span> <span>
{originalLabel} / {modifiedLabel} {originalLabel} / {modifiedLabel}
</span> </span>
) : ( ) : null}
<span>{path}</span>
)}
</div> </div>
{panel === 'edit' ? ( {panel === 'edit' ? (

View File

@@ -301,7 +301,9 @@ export interface AdminSiteSettingsResponse {
music_playlist: MusicTrack[] music_playlist: MusicTrack[]
ai_enabled: boolean ai_enabled: boolean
paragraph_comments_enabled: boolean paragraph_comments_enabled: boolean
comment_verification_mode: HumanVerificationMode
comment_turnstile_enabled: boolean comment_turnstile_enabled: boolean
subscription_verification_mode: HumanVerificationMode
subscription_turnstile_enabled: boolean subscription_turnstile_enabled: boolean
web_push_enabled: boolean web_push_enabled: boolean
turnstile_site_key: string | null turnstile_site_key: string | null
@@ -375,7 +377,9 @@ export interface SiteSettingsPayload {
musicPlaylist?: MusicTrack[] musicPlaylist?: MusicTrack[]
aiEnabled?: boolean aiEnabled?: boolean
paragraphCommentsEnabled?: boolean paragraphCommentsEnabled?: boolean
commentVerificationMode?: HumanVerificationMode | null
commentTurnstileEnabled?: boolean commentTurnstileEnabled?: boolean
subscriptionVerificationMode?: HumanVerificationMode | null
subscriptionTurnstileEnabled?: boolean subscriptionTurnstileEnabled?: boolean
webPushEnabled?: boolean webPushEnabled?: boolean
turnstileSiteKey?: string | null turnstileSiteKey?: string | null
@@ -416,6 +420,8 @@ export interface SiteSettingsPayload {
searchSynonyms?: string[] searchSynonyms?: string[]
} }
export type HumanVerificationMode = 'off' | 'captcha' | 'turnstile' | string
export interface CategoryRecord { export interface CategoryRecord {
id: number id: number
name: string name: string

View File

@@ -139,7 +139,7 @@ export function PostComparePage({ slugOverride }: { slugOverride?: string }) {
<GitCompareArrows className="h-4 w-4" /> <GitCompareArrows className="h-4 w-4" />
vs 稿 vs 稿
</CardTitle> </CardTitle>
<CardDescription>{state.path}</CardDescription> <CardDescription>稿</CardDescription>
</CardHeader> </CardHeader>
</Card> </Card>

View File

@@ -177,7 +177,7 @@ export function PostPolishPage() {
<Card> <Card>
<CardHeader> <CardHeader>
<CardTitle> vs </CardTitle> <CardTitle> vs </CardTitle>
<CardDescription>{snapshot.path}</CardDescription> <CardDescription> AI </CardDescription>
</CardHeader> </CardHeader>
<CardContent className="space-y-4"> <CardContent className="space-y-4">
<div className="flex flex-wrap items-center gap-3"> <div className="flex flex-wrap items-center gap-3">

View File

@@ -237,6 +237,11 @@ function formatWorkbenchStateLabel(
.join(' / ')}` .join(' / ')}`
} }
function buildVirtualPostPath(slug: string) {
const normalizedSlug = slug.trim() || 'new-post'
return `article://posts/${normalizedSlug}`
}
function parseImageList(value: string) { function parseImageList(value: string) {
return value return value
.split('\n') .split('\n')
@@ -1145,9 +1150,7 @@ export function PostsPage() {
setMetadataDialog({ setMetadataDialog({
target: 'create', target: 'create',
title: createForm.title.trim() || createForm.slug.trim() || '新建草稿', title: createForm.title.trim() || createForm.slug.trim() || '新建草稿',
path: createForm.slug.trim() path: buildVirtualPostPath(createForm.slug),
? `backend/content/posts/${createForm.slug.trim()}.md`
: 'backend/content/posts/new-post.md',
proposal: nextProposal, proposal: nextProposal,
}) })
}) })
@@ -2130,8 +2133,7 @@ export function PostsPage() {
<Badge variant="outline">{editor.markdown.split(/\r?\n/).length} </Badge> <Badge variant="outline">{editor.markdown.split(/\r?\n/).length} </Badge>
</div> </div>
<div className="rounded-2xl border border-border/70 bg-background/70 p-4"> <div className="rounded-2xl border border-border/70 bg-background/70 p-4">
<p className="break-all font-mono text-xs text-muted-foreground">{editor.path}</p> <p className="text-sm text-muted-foreground">
<p className="mt-2 text-sm text-muted-foreground">
{formatDateTime(editor.createdAt)} · {formatDateTime(editor.updatedAt)} {formatDateTime(editor.createdAt)} · {formatDateTime(editor.updatedAt)}
</p> </p>
</div> </div>
@@ -2945,11 +2947,7 @@ export function PostsPage() {
value={createForm.markdown} value={createForm.markdown}
originalValue={buildCreateMarkdownForWindow(defaultCreateForm)} originalValue={buildCreateMarkdownForWindow(defaultCreateForm)}
diffValue={buildCreateMarkdownForWindow(createForm)} diffValue={buildCreateMarkdownForWindow(createForm)}
path={ path={buildVirtualPostPath(createForm.slug)}
createForm.slug.trim()
? `backend/content/posts/${createForm.slug.trim()}.md`
: 'backend/content/posts/new-post.md'
}
workspaceHeightClassName="h-[clamp(620px,74dvh,920px)]" workspaceHeightClassName="h-[clamp(620px,74dvh,920px)]"
mode={createMode} mode={createMode}
visiblePanels={createPanels} visiblePanels={createPanels}
@@ -3047,9 +3045,6 @@ export function PostsPage() {
<p className="mt-3 text-base font-semibold"> <p className="mt-3 text-base font-semibold">
{metadataDialog.title} {metadataDialog.title}
</p> </p>
<p className="mt-2 break-all font-mono text-xs text-muted-foreground">
{metadataDialog.path}
</p>
</div> </div>
<div className="grid gap-3 sm:grid-cols-3 xl:grid-cols-1"> <div className="grid gap-3 sm:grid-cols-3 xl:grid-cols-1">

View File

@@ -15,6 +15,7 @@ import { adminApi, ApiError } from '@/lib/api'
import type { import type {
AdminSiteSettingsResponse, AdminSiteSettingsResponse,
AiProviderConfig, AiProviderConfig,
HumanVerificationMode,
MusicTrack, MusicTrack,
SiteSettingsPayload, SiteSettingsPayload,
} from '@/lib/types' } from '@/lib/types'
@@ -70,6 +71,30 @@ const NOTIFICATION_CHANNEL_OPTIONS = [
{ value: 'ntfy', label: 'ntfy' }, { value: 'ntfy', label: 'ntfy' },
] as const ] as const
const HUMAN_VERIFICATION_MODE_OPTIONS = [
{ value: 'off', label: '关闭' },
{ value: 'captcha', label: '普通验证码' },
{ value: 'turnstile', label: 'Turnstile' },
] as const
function normalizeHumanVerificationMode(
value: string | null | undefined,
fallback: HumanVerificationMode,
): HumanVerificationMode {
switch ((value ?? '').trim().toLowerCase()) {
case 'off':
return 'off'
case 'captcha':
case 'normal':
case 'simple':
return 'captcha'
case 'turnstile':
return 'turnstile'
default:
return fallback
}
}
function isCloudflareProvider(provider: string | null | undefined) { function isCloudflareProvider(provider: string | null | undefined) {
const normalized = provider?.trim().toLowerCase() const normalized = provider?.trim().toLowerCase()
return normalized === 'cloudflare' || normalized === 'cloudflare-workers-ai' || normalized === 'workers-ai' return normalized === 'cloudflare' || normalized === 'cloudflare-workers-ai' || normalized === 'workers-ai'
@@ -94,6 +119,14 @@ function normalizeSettingsResponse(
...input, ...input,
ai_providers: aiProviders, ai_providers: aiProviders,
search_synonyms: searchSynonyms, search_synonyms: searchSynonyms,
comment_verification_mode: normalizeHumanVerificationMode(
input.comment_verification_mode,
input.comment_turnstile_enabled ? 'turnstile' : 'captcha',
),
subscription_verification_mode: normalizeHumanVerificationMode(
input.subscription_verification_mode,
input.subscription_turnstile_enabled ? 'turnstile' : 'off',
),
turnstile_site_key: input.turnstile_site_key ?? null, turnstile_site_key: input.turnstile_site_key ?? null,
turnstile_secret_key: input.turnstile_secret_key ?? null, turnstile_secret_key: input.turnstile_secret_key ?? null,
web_push_vapid_public_key: input.web_push_vapid_public_key ?? null, web_push_vapid_public_key: input.web_push_vapid_public_key ?? null,
@@ -123,6 +156,9 @@ function Field({
} }
function toPayload(form: AdminSiteSettingsResponse): SiteSettingsPayload { function toPayload(form: AdminSiteSettingsResponse): SiteSettingsPayload {
const commentTurnstileEnabled = form.comment_verification_mode === 'turnstile'
const subscriptionTurnstileEnabled = form.subscription_verification_mode === 'turnstile'
return { return {
siteName: form.site_name, siteName: form.site_name,
siteShortName: form.site_short_name, siteShortName: form.site_short_name,
@@ -143,8 +179,10 @@ function toPayload(form: AdminSiteSettingsResponse): SiteSettingsPayload {
musicPlaylist: form.music_playlist, musicPlaylist: form.music_playlist,
aiEnabled: form.ai_enabled, aiEnabled: form.ai_enabled,
paragraphCommentsEnabled: form.paragraph_comments_enabled, paragraphCommentsEnabled: form.paragraph_comments_enabled,
commentTurnstileEnabled: form.comment_turnstile_enabled, commentVerificationMode: form.comment_verification_mode,
subscriptionTurnstileEnabled: form.subscription_turnstile_enabled, commentTurnstileEnabled,
subscriptionVerificationMode: form.subscription_verification_mode,
subscriptionTurnstileEnabled,
webPushEnabled: form.web_push_enabled, webPushEnabled: form.web_push_enabled,
turnstileSiteKey: form.turnstile_site_key, turnstileSiteKey: form.turnstile_site_key,
turnstileSecretKey: form.turnstile_secret_key, turnstileSecretKey: form.turnstile_secret_key,
@@ -659,22 +697,28 @@ export function SiteSettingsPage() {
</div> </div>
</label> </label>
<label className="flex items-start gap-3 rounded-2xl border border-border/70 bg-background/60 p-4"> <div className="rounded-2xl border border-border/70 bg-background/60 p-4">
<input <Field
type="checkbox" label="订阅提交验证方式"
checked={form.subscription_turnstile_enabled} hint="可选 关闭 / 普通验证码 / Turnstile若 Turnstile key 未配置完整,会自动回退到普通验证码。"
onChange={(event) => >
updateField('subscription_turnstile_enabled', event.target.checked) <Select
} value={form.subscription_verification_mode}
className="mt-1 h-4 w-4 rounded border-input text-primary focus:ring-ring" onChange={(event) =>
/> updateField(
<div> 'subscription_verification_mode',
<div className="font-medium"> Turnstile</div> normalizeHumanVerificationMode(event.target.value, 'off'),
<p className="mt-1 text-sm leading-6 text-muted-foreground"> )
Cloudflare Turnstile key }
</p> >
</div> {HUMAN_VERIFICATION_MODE_OPTIONS.map((option) => (
</label> <option key={option.value} value={option.value}>
{option.label}
</option>
))}
</Select>
</Field>
</div>
</div> </div>
<div className="grid gap-4 lg:grid-cols-2"> <div className="grid gap-4 lg:grid-cols-2">
@@ -926,22 +970,28 @@ export function SiteSettingsPage() {
</div> </div>
</label> </label>
<label className="flex items-start gap-3 rounded-2xl border border-border/70 bg-background/60 p-4"> <div className="rounded-2xl border border-border/70 bg-background/60 p-4">
<input <Field
type="checkbox" label="评论区验证方式"
checked={form.comment_turnstile_enabled} hint="文章评论和段落评论都走这里;若选择 Turnstile 但 key / secret 不完整,会自动回退到普通验证码。"
onChange={(event) => >
updateField('comment_turnstile_enabled', event.target.checked) <Select
} value={form.comment_verification_mode}
className="mt-1 h-4 w-4 rounded border-input text-primary focus:ring-ring" onChange={(event) =>
/> updateField(
<div> 'comment_verification_mode',
<div className="font-medium"> Turnstile</div> normalizeHumanVerificationMode(event.target.value, 'captcha'),
<p className="mt-1 text-sm leading-6 text-muted-foreground"> )
使 Cloudflare Turnstile key / secret退 }
</p> >
</div> {HUMAN_VERIFICATION_MODE_OPTIONS.map((option) => (
</label> <option key={option.value} value={option.value}>
{option.label}
</option>
))}
</Select>
</Field>
</div>
</CardContent> </CardContent>
</Card> </Card>

View File

@@ -45,6 +45,10 @@
description: "节奏更明显一点,适合切换阅读状态。" description: "节奏更明显一点,适合切换阅读状态。"
ai_enabled: false ai_enabled: false
paragraph_comments_enabled: true paragraph_comments_enabled: true
comment_verification_mode: "captcha"
subscription_verification_mode: "off"
turnstile_site_key: "0x4AAAAAACy58kMBSwXwqMhx"
turnstile_secret_key: "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U"
ai_provider: "newapi" ai_provider: "newapi"
ai_api_base: "https://91code.jiangnight.com/v1" ai_api_base: "https://91code.jiangnight.com/v1"
ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571" ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571"

View File

@@ -0,0 +1,24 @@
---
title: 徐霞客游记·游太和山日记(下)
slug: building-blog-with-astro
description: 《徐霞客游记》太和山下篇,包含琼台、南岩与五龙宫等段落。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 徐霞客
- 游记
- 太和山
- 长文测试
---
# 徐霞客游记·游太和山日记(下)
更衣上金顶。瞻叩毕,天宇澄朗,下瞰诸峰,近者鹄峙,远者罗列,诚天真奥区也。
遂从三天门之右小径下峡中。此径无级无索,乱峰离立,路穿其间,迥觉幽胜。三里馀,抵蜡烛峰右,泉涓涓溢出路旁,下为蜡烛涧。
从宫左趋雷公洞。洞在悬崖间。乃从北天门下,一径阴森,滴水、仙侣二岩,俱在路左,飞崖上突,泉滴沥于中。

View File

@@ -1,242 +0,0 @@
---
title: "Canokey入门指南:2FA、OpenPGP、PIV"
description: 本文是一份Canokey入门指南将介绍如何使用Canokey进行2FA、OpenPGP和PIV等操作。其中2FA部分将介绍如何使用Yubikey Authenticator进行管理OpenPGP部分将介绍如何生成GPG密钥并使用Canokey进行身份验证和加密解密PIV部分将介绍如何在Canokey中生成PIV证书并使用其进行身份验证。
date: 2022-08-19T16:42:40+08:00
draft: false
slug: canokeys
image:
categories:
- Linux
tags:
- Linux
---
# 2FA
`Canokey`使用`Yubikey Authenticator`来进行管理`2FA`
下载`Yubikey Authenticator`,以下为`Yubikey Authenticator`官方下载网址
```http
https://www.yubico.com/products/yubico-authenticator/#h-download-yubico-authenticator
```
运行`Yubikey Authenticator`
进入`custom reader`,在`Custom reader fiter`处填入 `CanoKey`
![填入CanoKey](https://upload-images.jianshu.io/upload_images/9676051-ff0cd60f38ac7334.png)
右上角`Add account` 增加`2FA`
![添加2FA](https://upload-images.jianshu.io/upload_images/9676051-1031857fe0f13d08.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```yaml
Issuer: 备注 可选
Account name : 用户名 必填项
Secret Key : Hotp或Totp的key 必填项
```
# OpenPGP
## 安装GPG
Windows 用户可下载 [Gpg4Win](https://gpg4win.org/download.html)Linux/macOS 用户使用对应包管理软件安装即可.
## 生成主密钥
```shell
gpg --expert --full-gen-key #生成GPG KEY
```
推荐使用`ECC`算法
![image-20220102223722475](https://upload-images.jianshu.io/upload_images/9676051-df42e4b958e9a238.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```shell
选择(11) ECC (set your own capabilities) # 设置自己的功能 主密钥只保留 Certify 功能其他功能Encr,Sign,Auth使用子密钥
# 子密钥分成三份,分别获得三个不同的功能
# encr 解密功能
# sign 签名功能
# auth 登录验证功能
```
```shell
先选择 (S) Toggle the sign capability
```
![image-20220102224151589](https://upload-images.jianshu.io/upload_images/9676051-c3bb19eb398419e1.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```
之后输入q 退出
```
键入1,选择默认算法
![键入1,选择默认算法](https://upload-images.jianshu.io/upload_images/9676051-7a2c5ee8ed4800af.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
设置主密钥永不过期
![image-20220102224451731](https://upload-images.jianshu.io/upload_images/9676051-cca6100917c2ffaa.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
填写信息,按照实际情况填写即可
![image-20220102224612167](https://upload-images.jianshu.io/upload_images/9676051-10430afe3aa592c7.png?imageMogr2/auto-orient/strip%7CimageView2/2/w/1240)
```
Windnows 下会弹出窗口输入密码,注意一定要保管好!!!
```
```shell
```
```shell
# 会自动生成吊销证书,注意保存到安全的地方
gpg: AllowSetForegroundWindow(22428) failed: <20>ܾ<EFBFBD><DCBE><EFBFBD><EFBFBD>ʡ<EFBFBD>
gpg: revocation certificate stored as 'C:\\Users\\Andorid\\AppData\\Roaming\\gnupg\\openpgp-revocs.d\\<此处为私钥>.rev'
# 以上的REV文件即为吊销证书
public and secret key created and signed.
```
```shell
pub ed25519 2022-01-02 [SC]
<此处为Pub>
uid <此处为Name> <此处为email>
```
生成子密钥
```shell
gpg --fingerprint --keyid-format long -K
```
下面生成不同功能的子密钥,其中 `<fingerprint>` 为上面输出的密钥指纹,本示例中即为 `私钥`。最后的 `2y` 为密钥过期时间,可自行设置,如不填写默认永不过期。
```shell
gpg --quick-add-key <fingerprint> cv25519 encr 2y
gpg --quick-add-key <fingerprint> ed25519 auth 2y
gpg --quick-add-key <fingerprint> ed25519 sign 2y
```
再次查看目前的私钥,可以看到已经包含了这三个子密钥。
```shell
gpg --fingerprint --keyid-format long -K
```
上面生成了三种功能的子密钥ssb分别为加密E、认证A、签名S对应 `OpenPGP Applet` 中的三个插槽。由于 `ECC` 实现的原因,加密密钥的算法区别于其他密钥的算法。
加密密钥用于加密文件和信息。签名密钥主要用于给自己的信息签名,保证这真的是来自**我**的信息。认证密钥主要用于 SSH 登录。
## 备份GPG
```shell
# 公钥
gpg -ao public-key.pub --export <ed25519/16位>
# 主密钥,请务必保存好!!!
# 注意 key id 后面的 !,表示只导出这一个私钥,若没有的话默认导出全部私钥。
gpg -ao sec-key.asc --export-secret-key <ed25519/16位>!
# sign子密钥
gpg -ao sign-key.asc --export-secret-key <ed25519/16位>!
gpg -ao auth-key.asc --export-secret-key <ed25519/16位>!
gpg -ao encr-key.asc --export-secret-key <ed25519/16位>!
```
## 导入Canokey
```shell
# 查看智能卡设备状态
gpg --card-status
# 写入GPG
gpg --edit-key <ed25519/16位> # 为上方的sec-key
# 选中第一个子密钥
key 1
# 写入到智能卡
keytocard
# 再次输入,取消选择
key 1
# 选择第二个子密钥
key 2
keytocard
key 2
key 3
keytocard
# 保存修改并退出
save
#再次查看设备状态,可以看到此时子密钥标识符为 ssb>,表示本地只有一个指向 card-no: F1D0 xxxxxxxx 智能卡的指针,已不存在私钥。现在可以删除掉主密钥了,请再次确认你已安全备份好主密钥。
gpg --card-status
```
## 删除本地密钥
```shell
gpg --delete-secret-keys <ed25519/16位> # 为上方的sec-key
```
为确保安全,也可直接删除 gpg 的工作目录:`%APPDATA%\gnupg`Linux/macOS: `~/.gunpg`
## 使用 Canokey
此时切换回日常使用的环境,首先导入公钥
```shell
gpg --import public-key.pub
```
然后设置子密钥指向 Canokey
```shell
gpg --edit-card
gpg/card> fetch
```
此时查看本地的私钥,可以看到已经指向了 Canokey
```
gpg --fingerprint --keyid-format long -K
```
配置gpg路径
```bash
git config --global gpg.program "C:\Program Files (x86)\GnuPG\bin\gpg.exe" --replace-all
```
## Git Commit 签名
首先确保 Git 本地配置以及 GitHub 中的邮箱信息包含在 `UID` 中,然后设置 Git 来指定使用子密钥中的签名S密钥。
```shell
git config --global user.signingkey <ed25519/16位> # 为上方的Sign密钥
```
之后在 `git commit` 时增加 `-S` 参数即可使用 gpg 进行签名。也可在配置中设置自动 gpg 签名,此处不建议全局开启该选项,因为有的脚本可能会使用 `git am` 之类的涉及到 `commit` 的命令,如果全局开启的话会导致问题。
```shell
git config commit.gpgsign true
```
如果提交到 GitHub前往 [GitHub SSH and GPG keys](https://github.com/settings/keys) 添加公钥。此处添加后,可以直接通过对应 GitHub ID 来获取公钥:`https://github.com/<yourid>.gpg`
## PIV
首先在Web端添加自己的私钥到智能卡之后前往 [WinCrypt SSH Agent](https://github.com/buptczq/WinCryptSSHAgent) 下载并运行,此时查看 `ssh-agent` 读取到的公钥信息,把输出的公钥信息添加到服务器的 `~/.ssh/authorized_keys`
```shell
# 设置环境池
$Env:SSH_AUTH_SOCK="\\.\pipe\openssh-ssh-agent"
# 查看ssh列表
ssh-add -L
```
此时连接 `ssh user@host`,会弹出提示输入 `PIN` 的页面,注意此时输入的是 `PIV Applet PIN`,输入后即可成功连接服务器。
```yaml
tips: 可能会出现权限不够的情况,需要禁用Windows服务OpenSSH Authentication Agent
```
最后可以把该程序快捷方式添加到启动目录 `%AppData%\Microsoft\Windows\Start Menu\Programs\Startup`,方便直接使用。

View File

@@ -1,67 +0,0 @@
---
title: "如何使用FFmpeg处理音视频文件"
description: 本文提供了FFmpeg处理音视频文件的完整指南包括将单张图片转换为视频、拼接多个视频、设置转场特效等多种操作。
date: 2022-07-25T14:05:04+08:00
draft: true
slug: ffmpeg
image:
categories: ffmpeg
tags: ffmpeg
---
# `ffmpeg`图片转视频
使用单张图片生成5秒视频
```bash
# -loop 1 指定开启单帧图片loop
# -t 5 指定loop时长为5秒
# -i input 指定输入图片文件路径 示例:pic.jpg
# -pix_fmt 指定编码格式为yuv420p
# -y 若输出文件已存在,则强制进行覆盖。
# ffmpeg会根据输出文件后缀,自动选择编码格式。
# 也可以使用 -f 指定输出格式
ffmpeg -loop 1 -t 5 -i <filename>.jpg -pix_fmt yuv420p -y output.ts
```
# `ffmpeg`拼接视频
```bash
# windows
# -i input 指定需要合并的文件,使用concat进行合并.示例:"concat:0.ts|1.ts|2.ts"
# -vcodec 指定视频编码器的参数为copy
# -acodec 指定音频编码器的参数为copy
# -y 若输出文件已存在,则强制进行覆盖。
ffmpeg -i "concat:0.ts|1.ts" -vcodec copy -acodec copy -y output.ts
```
# `ffmpeg`设置转场特效
```bash
# Linux
ffmpeg -i v0.mp4 -i v1.mp4 -i v2.mp4 -i v3.mp4 -i v4.mp4 -filter_complex \
"[0][1:v]xfade=transition=fade:duration=1:offset=3[vfade1]; \
[vfade1][2:v]xfade=transition=fade:duration=1:offset=10[vfade2]; \
[vfade2][3:v]xfade=transition=fade:duration=1:offset=21[vfade3]; \
[vfade3][4:v]xfade=transition=fade:duration=1:offset=25,format=yuv420p; \
[0:a][1:a]acrossfade=d=1[afade1]; \
[afade1][2:a]acrossfade=d=1[afade2]; \
[afade2][3:a]acrossfade=d=1[afade3]; \
[afade3][4:a]acrossfade=d=1" \
-movflags +faststart out.mp4
```
| 输入文件 | 输入文件的视频总长 | + | previous xfade `offset` | - | xfade `duration` | `offset` = |
| :------- | :----------------- | :--: | :---------------------- | :--: | :--------------- | :--------- |
| `v0.mp4` | 4 | + | 0 | - | 1 | 3 |
| `v1.mp4` | 8 | + | 3 | - | 1 | 10 |
| `v2.mp4` | 12 | + | 10 | - | 1 | 21 |
| `v3.mp4` | 5 | + | 21 | - | 1 | 25 |
// 将音频转为单声道
```
ffmpeg -i .\1.mp3 -ac 1 -ar 44100 -ab 16k -vol 50 -f 1s.mp3
ffmpeg -i one.ts -i 1s.mp3 -map 0:v -map 1:a -c:v copy -shortest -af apad -y one1.ts
```

View File

@@ -1,121 +0,0 @@
---
title: "使用arm交叉编译工具并解决GLIBC版本不匹配的问题"
description: 介绍如何使用arm交叉编译工具来编译Go程序并解决在arm平台上运行时出现GLIBC版本不匹配的问题。
date: 2022-06-10T15:00:26+08:00
draft: false
slug: go-arm
image:
categories:
- Go
tags:
- Arm
- Go
- GLIBC
---
1. 下载 ARM 交叉编译工具,可以从官方网站下载。比如,可以从如下链接下载 GNU 工具链:[https://developer.arm.com/downloads/-/gnu-a](https://developer.arm.com/downloads/-/gnu-a)
示例:https://developer.arm.com/-/media/Files/downloads/gnu-a/10.3-2021.07/binrel/gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-elf.tar.xz
2. 设置 Go ARM 交叉编译环境变量。具体来说,需要设置以下变量:
```ruby
$env:GOOS="linux"
$env:GOARCH="arm64"
$env:CGO_ENABLED=1
$env:CC="D:\arm\gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-linux-gnu\bin\aarch64-none-linux-gnu-gcc.exe"
$env:CXX="D:\arm\gcc-arm-10.3-2021.07-mingw-w64-i686-aarch64-none-linux-gnu\bin\aarch64-none-linux-gnu-g++.exe"
```
3. 在 ARM 上运行程序时可能会出现如下错误:
```bash
./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.28' not found (required by ./bupload)
./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.32' not found (required by ./bupload)
./bupload: /lib/aarch64-linux-gnu/libc.so.6: version `GLIBC_2.33' not found (required by ./bupload)
```
这是因为程序需要使用较新版本的 GLIBC 库,而 ARM 上安装的库版本较旧。可以通过以下步骤来解决这个问题:
4. 查看当前系统中 libc 库所支持的版本:
```bash
strings /lib/aarch64-linux-gnu/libc.so.6 | grep GLIBC_
```
5. 备份整个 `/lib` 目录和 `/usr/include` 目录,以便稍后还原。
6. 从 GNU libc 官方网站下载对应版本的 libc 库。例如,可以从如下链接下载 2.35 版本的 libc 库:[http://ftp.gnu.org/gnu/glibc/glibc-2.35.tar.xz](http://ftp.gnu.org/gnu/glibc/glibc-2.35.tar.xz)
7. 解压 libc 库:
```
xz -d glibc-2.35.tar.xz
tar xvf glibc-2.35.tar glibc-2.35
```
8. 创建并进入 build 目录:
```bash
mkdir build
cd build
```
9. 配置 libc 库的安装选项:
```javascript
../configure --prefix=/usr --disable-profile --enable-add-ons --with-headers=/usr/include --with-binutils=/usr/bin
```
10. 编译并安装 libc 库:
```go
make -j4
make install
```
接下来是关于 `make` 报错的部分:
```yaml
asm/errno.h: No such file or directory
```
这个报错是因为 `errno.h` 文件中包含了 `asm/errno.h` 文件,但是找不到这个文件。为了解决这个问题,我们需要创建一个软链接:
```bash
ln -s /usr/include/asm-generic /usr/include/asm
```
然后又出现了另一个报错:
```bash
/usr/include/aarch64-linux-gnu/asm/sigcontext.h: No such file or directory
```
这个问题也可以通过重新安装`linux-libc-dev`后创建软链接来解决:
```bash
# find / -name sigcontext.h
sudo apt-get install --reinstall linux-libc-dev
ln -s /usr/include/aarch64-linux-gnu/asm/sigcontext.h /usr/include/asm/sigcontext.h
```
接下来,还有一个报错:
```yaml
asm/sve_context.h: No such file or directory
```
这个报错是因为最新的 Linux 内核在启用 ARM Scalable Vector Extension (SVE) 后,需要包含 `asm/sve_context.h` 文件。我们需要创建一个软链接来解决这个问题:
```bash
# find / -name sve_context.h
ln -s /usr/include/aarch64-linux-gnu/asm/sve_context.h /usr/include/asm/sve_context.h
```
最后,还需要创建一个软链接:
```bash
# find / -name byteorder.h
ln -s /usr/include/aarch64-linux-gnu/asm/byteorder.h /usr/include/asm/byteorder.h
```
完成以上步骤后,我们再次执行 `make` 命令,就应该可以顺利地编译和安装 glibc 了。

View File

@@ -1,173 +0,0 @@
---
title: "Go使用gRPC进行通信"
description: RPC是远程过程调用的简称是分布式系统中不同节点间流行的通信方式。
date: 2022-05-26T14:17:33+08:00
draft: false
slug: go-grpc
image:
categories:
- Go
tags:
- Go
- gRPC
---
# 安装`gRPC`和`Protoc`
## 安装`protobuf`
```bash
go get -u google.golang.org/protobuf
go get -u google.golang.org/protobuf/proto
go get -u google.golang.org/protobuf/protoc-gen-go
```
## 安装`Protoc`
```shell
# 下载二进制文件并添加至环境变量
https://github.com/protocolbuffers/protobuf/releases
```
安装`Protoc`插件`protoc-gen-go`
```shell
# go install 会自动编译项目并添加至环境变量中
go install google.golang.org/protobuf/cmd/protoc-gen-go@latest
```
```shell
#protoc-gen-go 文档地址
https://developers.google.com/protocol-buffers/docs/reference/go-generated
```
# 创建`proto`文件并定义服务
## 新建 `task.proto`文件
```shell
touch task.proto
```
## 编写`task.proto`
```protobuf
// 指定proto版本
syntax = "proto3";
// 指定包名
package task;
// 指定输出 go 语言的源码到哪个目录和 包名
// 主要 目录和包名用 ; 隔开
// 将在当前目录生成 task.pb.go
// 也可以只填写 "./",会生成的包名会变成 "----"
option go_package = "./;task";
// 指定RPC的服务名
service TaskService {
// 调用 AddTaskCompletion 方法
rpc AddTaskCompletion(request) returns (response);
}
// RPC TaskService服务,AddTaskCompletion函数的请求参数,即消息
message request {
uint32 id = 1;//任务id
string module = 2;//所属模块
int32 value = 3;//此次完成值
string guid = 4;//用户id
}
// RPC TaskService服务,TaskService函数的返回值,即消息
message response{
}
```
## 使用`Protoc`来生成Go代码
```bash
protoc --go_out=. --go-grpc_out=. <要进行生成代码的文件>.proto
# example
protoc --go_out=. --go-grpc_out=. .\task.proto
```
这样生成会生成两个`.go`文件,一个是对应消息`task.pb.go`,一个对应服务接口`task_grpc.pb.go`
`task_grpc.pb.go`中,在我们定义的服务接口中,多增加了一个私有的接口方法:
`mustEmbedUnimplementedTaskServiceServer()`
# 使用`Go`监听`gRPC`服务端及客户端
## 监听服务端
并有生成的一个`UnimplementedTaskServiceServer`结构体来实现了所有的服务接口。因此,在我们自己实现的服务类中,需要继承这个结构体,如:
```go
// 用于实现grpc服务 TaskServiceServer 接口
type TaskServiceImpl struct {
// 需要继承结构体 UnimplementedServiceServer 或mustEmbedUnimplementedTaskServiceServer
task.mustEmbedUnimplementedTaskServiceServer()
}
func main() {
// 创建Grpc服务
// 创建tcp连接
listener, err := net.Listen("tcp", ":8082")
if err != nil {
fmt.Println(err)
return
}
// 创建grpc服务
grpcServer := grpc.NewServer()
// 此函数在task.pb.go中,自动生成
task.RegisterTaskServiceServer(grpcServer, &TaskServiceImpl{})
// 在grpc服务上注册反射服务
reflection.Register(grpcServer)
// 启动grpc服务
err = grpcServer.Serve(listener)
if err != nil {
fmt.Println(err)
return
}
}
func (s *TaskServiceImpl) AddTaskCompletion(ctx context.Context, in *task.Request) (*task.Response, error) {
fmt.Println("收到一个Grpc 请求, 请求参数为", in.Guid)
r := &task.Response{
}
return r, nil
}
```
然后在`TaskService`上实现我们的服务接口。
## 客户端
```go
conn, err := grpc.Dial("127.0.0.1:8082", grpc.WithInsecure())
if err != nil {
panic(err)
}
defer conn.Close()
// 创建grpc客户端
client := task.NewTaskServiceClient(conn)
// 创建请求
req := &task.Request{
Id: 1,
Module: "test",
Value: 3,
Guid: "test",
}
// 调用rpc TaskService AddTaskCompletion函数
response, err := client.AddTaskCompletion(context.Background(), req)
if err != nil {
log.Println(err)
return
}
log.Println(response)
```
[本文参考](https://www.cnblogs.com/whuanle/p/14588031.html)

View File

@@ -1,98 +0,0 @@
---
title: "Go语言解析Xml"
slug: "go-xml"
date: 2022-05-20T14:38:05+08:00
draft: false
description: "使用Go简简单单的解析Xml"
tags:
- Go
- Xml
categories:
- Go
---
# 开始之前
```go
import "encoding/xml"
```
## 简单的`Xml`解析
### 1.假设我们解析的`Xml`内容如下:
```xml
<feed>
<person name="initcool" id="1" age=18 />
</feed>
```
<!--more-->
### 2.接着我们构造对应的结构体
```go
type Feed struct {
XMLName xml.Name `xml:"feed"`
Person struct{
Name string `xml:"name"`
Id string `xml:"id"`
Age int `xml:"age"`
} `xml:"person"`
}
```
### 3.对`Xml`数据进行反序列化
```go
var feed Feed
// 读取Xml文件并返回字节流
content,err := ioutil.ReadFile(XmlFilename)
if err != nil {
log.Fatal(err)
}
// 将读取到的内容反序列化到feed
xml.Unmarshal(content,&feed)
```
## 带有命名空间的`Xml`解析
部分`xml`文件会带有`命名空间`(`Namespace`),也就是冒号左侧的内容,此时我们需要在`go`结构体的`tag` 中加入`命名空间`
### 1.带有命名空间(Namespace)的`Xml`文件
```xml
<feed xmlns:yt="http://www.youtube.com/xml/schemas/2015" xmlns:media="http://search.yahoo.com/mrss/" xmlns="http://www.w3.org/2005/Atom">
<!-- yt即是命名空间 -->
<yt:videoId>XXXXXXX</yt:videoId>
<!-- media是另一个命名空间 -->
<media:community></media:community>
</feed>
```
### 2.针对命名空间构造结构体
```go
type Feed struct {
XMLName xml.Name `xml:"feed"` // 指定最外层的标签为feed
VideoId string `xml:"http://www.youtube.com/xml/schemas/2015 videoId"`
Community string `xml:"http://search.yahoo.com/mrss/ community"`
}
```
### 3.对`Xml`数据进行反序列化
```go
var feed Feed
// 读取Xml文件并返回字节流
content,err := ioutil.ReadFile(XmlFilename)
if err != nil {
log.Fatal(err)
}
// 将读取到的内容反序列化到feed
xml.Unmarshal(content,&feed)
```

View File

@@ -1,36 +0,0 @@
---
title: "Hugo使用指南"
slug: "hugo"
draft: false
date: 2022-05-20T10:23:53+08:00
description: "快速上手hugo"
tags:
- Go
- Hugo
categories:
- Go
---
查看Hugo版本号
```bash
hugo version
```
新建一个Hugo页面
```
hugo new site <siteName>
```
设置主题
```bash
cd <siteName>
git init
# 设置为 Stack主题
git clone https://github.com/CaiJimmy/hugo-theme-stack/ themes/hugo-theme-stack
git submodule add https://github.com/CaiJimmy/hugo-theme-stack/ themes/hugo-theme-stack
```
部署Hugo到github

View File

@@ -1,67 +0,0 @@
---
title: "Linux部署DHCP服务"
description: Debian下使用docker镜像部署DHCP服务
date: 2022-05-23T11:11:40+08:00
draft: false
slug: linux-dhcp
image:
categories: Linux
tags:
- Linux
- DHCP
---
拉取`networkboot/dhcpd`镜像
```shell
docker pull networkboot/dhcpd
```
新建`data/dhcpd.conf`文件
```shell
touch /data/dhcpd.conf
```
修改`data/dhcpd.conf`文件
```
subnet 204.254.239.0 netmask 255.255.255.224 {
option subnet-mask 255.255.0.0;
option domain-name "cname.nmslwsnd.com";
option domain-name-servers 8.8.8.8;
range 204.254.239.10 204.254.239.30;
}
```
修改`/etc/network/interfaces`
```
# The loopback network interface (always required)
auto lo
iface lo inet loopback
# Get our IP address from any DHCP server
auto dhcp
iface dhcp inet static
address 204.254.239.0
netmask 255.255.255.224
```
获取帮助命令
```shell
docker run -it --rm networkboot/dhcpd man dhcpd.conf
```
运行`DHCP`服务
```shell
docker run -it --rm --init --net host -v "/data":/data networkboot/dhcpd <网卡名称>
# 示例
docker run -it --rm --init --net host -v "/data":/data networkboot/dhcpd dhcp
```

View File

@@ -1,36 +0,0 @@
---
title: "Linux Shell"
description:
date: 2022-05-21T10:02:09+08:00
draft: false
Hidden: true
slug: linux-shell
image:
categories:
Linux
tag:
Linux
Shell
---
Linux守护进程:no_good:
```bash
#!/bin/bash
# nohup.sh
while true
do
# -f 后跟进程名,判断进程是否正在运行
if [ `pgrep -f <ProcessName> | wc -l` -eq 0 ];then
echo "进程已终止"
push
# /dev/null 无输出日志
nohup ./<ProcessName> > /dev/null 2>&1 &
else
echo "进程正在运行"
fi
# 每隔1分钟检查一次
sleep 1m
done
```

View File

@@ -1,65 +0,0 @@
---
title: "Linux"
description:
date: 2022-09-08T15:19:00+08:00
draft: true
slug: linux
image:
categories:
- Linux
tags:
- Linux
---
```bash
# 使用cd 进入到上一个目录
cd -
```
复制和粘贴
```bash
ctrl + shift + c
ctrl + shift + v
```
快速移动
```bash
# 移动到行首
ctrl + a
# 移动到行尾
ctrl + e
```
快速删除
```bash
# 删除光标之前的内容
ctrl + u
# 删除光标之后的内容
ctrl + k
# 恢复之前删除的内容
ctrl + y
```
不适用cat
```
使用less 查看 顶部的文件
less filename
```
使用alt+backspace删除,以单词为单位
```
tcpdump host 1.1.1.1
```
```
# 并行执行命令 Parallel
find . -type f -name '*.html' -print | parallel gzip
```

View File

@@ -0,0 +1,24 @@
---
title: 游黄山记(中)
slug: loco-rs-framework
description: 钱谦益《游黄山记》中篇,适合测试中文长文、检索与段落锚点。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 钱谦益
- 黄山
- 游记
- 长文测试
---
# 游黄山记(中)
由祥符寺度石桥而北,逾慈光寺,行数里,径朱砂庵而上。过此取道钵盂、老人两峰之间,峰趾相并,两崖合遝,弥望削成。
憩桃源庵,指天都为诸峰之中峰,山形络绎,未有以殊异也。云生峰腰,层叠如裼衣焉。
清晓,出文殊院,神鸦背行而先。避莲华沟险,从支径右折,险益甚。上平天矼,转始信峰,经散花坞,看扰龙松。

View File

@@ -1,569 +0,0 @@
---
title: "mysql个人常用命令及操作"
description:
date: 2021-09-21T16:13:24+08:00
draft: true
slug: mysql
image:
categories:
- Database
tags:
- Linux
- Mysql
- Sql
---
启动`mysql`
```bash
sudo service mysql start
```
使用`root`账户登录`mysql`
```bash
sudo mysql -u root
```
查看数据库信息
```mysql
show databases;
```
新增数据库
```mysql
create database <>;
# 示例新增一个名为gradesystem的数据库
create database gradesystem;
```
切换数据库
```mysql
use <>;
# 示例切换至gradesystem数据库
use gradesystem;
```
查看数据库中的表
```mysql
# 查看数据库中所有的表
show tables;
```
新增表
```mysql
# MySQL不区分大小写
CREATE TABLE student(
sid int NOT NULL AUTO_INCREMENT,
sname varchar(20) NOT NULL,
gender varchar(10) NOT NULL,
PRIMARY KEY(sid)
);
# 新增一个表名为学生的表。
# AUTO_INCREMENT, 自动地创建主键字段的值。
# PRIMARY KEY(sid) 设置主键为sid
CREATE TABLE course(
cid int not null auto_increment,
cname varchar(20) not null,
primary key(cid)
);
# 新增一个表名为课程的表。
# primary key(cid) 设置主键为cid
CREATE TABLE mark(
mid int not null auto_increment,
sid int not null,
cid int not null,
score int not null,
primary key(mid),
foreign key(sid) references student(sid),
foreign key(cid) references course(cid)
);
# 新增一个表明为mark的表
# primary key(cid) 设置主键为cid
# foreign 设置外键为sid
# foreign 设置外键为cid
insert into student values(1,'Tom','male'),(2,'Jack','male'),(3,'Rose','female');
# 向student表插入数据sid为1sname为'Tom',gender为'male'
insert into course values(1,'math'),(2,'physics'),(3,'chemistry');
# 向course表插入数据sid为1cname为'math'
insert into mark values(1,1,1,80);
# 向mark表插入数据mid为1sid为1,cid为1score为80
```
### 向数据库插入数据
```mysql
source <>
```
## SELECT语句查询
SELECT 语句的基本格式为:
```bash
SELECT 要查询的列名 FROM 表名字 WHERE 限制条件;
```
```mysql
select name,age from employee;
# 查看employee的name列和age列
select name,age from employee where age > 25;
# 筛选出age 大于25的结果
select name,age,phone from employee where name = 'Mary';
# 筛选出name为'Mary'的name,age,phone
select name,age,phone from employee where age < 25 or age >30;
# 筛选出age小于30或大于25的name,age,phone
select name,age,phone from employee where age > 25 and age < 30;
# 筛选出age大于25且小于30的name,age,phone
select name,age,phone from employee where age between 25 and 30;
# 筛选出包含25和30的name,age,phone
select name,age,phone,in_dpt from employee where in_dpt in('dpt3','dpt4');
# 筛选出在dpt3或dpt4里面的name,age,phone,in_dpt
select name,age,phone,in_dpt from employee where in_dpt not in('dpt1','dpt3');
# 筛选出不在dpt1和dpt3的name,age,phone,in_dpt
```
## 通配符
关键字 **LIKE** 可用于实现模糊查询,常见于搜索功能中。
和 LIKE 联用的通常还有通配符代表未知字符。SQL 中的通配符是 `_``%` 。其中 `_` 代表一个**未指定**字符,`%` 代表**不定个**未指定字符
```mysql
select name,age,phone from employee where phone like '1101__';
# 筛选出1101开头的六位数字的name,age,phone
select name,age,phone from employee where name like 'J%';
# 筛选出name位J开头的人的name,age,phone
```
## 排序
为了使查询结果看起来更顺眼,我们可能需要对结果按某一列来排序,这就要用到 **ORDER BY** 排序关键词。默认情况下,**ORDER BY** 的结果是**升序**排列,而使用关键词 **ASC****DESC** 可指定**升序**或**降序**排序。 比如,我们**按 salary 降序排列**SQL 语句为
```mysql
select name,age,salary,phone from employee order by salary desc;
# salary列按降序排列
select name,age,salary,phone from employee order by salary;
# 不加 DESC 或 ASC 将默认按照升序排列。
```
## SQL 内置函数和计算
置函数,这些函数都对 SELECT 的结果做操作:
| 函数名: | COUNT | SUM | AVG | MAX | MIN |
| -------- | ----- | ---- | -------- | ------ | ------ |
| 作用: | 计数 | 求和 | 求平均值 | 最大值 | 最小值 |
> 其中 COUNT 函数可用于任何数据类型(因为它只是计数),而 SUM 、AVG 函数都只能对数字类数据类型做计算MAX 和 MIN 可用于数值、字符串或是日期时间数据类型。
```mysql
select max(salary) as max_salary,min(salary) from employee;
# 使用as关键字可以给值重命名
```
## 连接查询
在处理多个表时,子查询只有在结果来自一个表时才有用。但如果需要显示两个表或多个表中的数据,这时就必须使用连接 **(join)** 操作。 连接的基本思想是把两个或多个表当作一个新的表来操作,如下:
```mysql
select id,name,people_num from employee,department where employee.in_dpt = department.dpt_name order by id;
# 这条语句查询出的是,各员工所在部门的人数,其中员工的 id 和 name 来自 employee 表people_num 来自 department 表:
select id,name,people_num from employee join department on employee.in_dpt = department.dpt_name order by id;
# 另一个连接语句格式是使用 JOIN ON 语法,刚才的语句等同于以上语句
```
## 删除数据库
```mysql
drop database test_01;
# 删除名为test_01的数据库;
```
### 修改表
重命名一张表的语句有多种形式,以下 3 种格式效果是一样的:
```sql
RENAME TABLE TO ;
ALTER TABLE RENAME ;
ALTER TABLE RENAME TO ;
```
进入数据库 mysql_shiyan
```mysql
use mysql_shiyan
```
使用命令尝试修改 `table_1` 的名字为 `table_2`
```mysql
RENAME TABLE table_1 TO table_2;
```
删除一张表的语句,类似于刚才用过的删除数据库的语句,格式是这样的:
```sql
DROP TABLE ;
```
比如我们把 `table_2` 表删除:
```mysql
DROP TABLE table_2;
```
#### 增加一列
在表中增加一列的语句格式为:
```sql
ALTER TABLE ADD COLUMN ;
ALTER TABLE ADD ;
```
现在 employee 表中有 `id、name、age、salary、phone、in_dpt` 这 6 个列,我们尝试加入 `height` (身高)一个列并指定 DEFAULT 约束:
```mysql
ALTER TABLE employee ADD height INT(4) DEFAULT 170;
```
可以发现:新增加的列,被默认放置在这张表的最右边。如果要把增加的列插入在指定位置,则需要在语句的最后使用 AFTER 关键词(**“AFTER 列 1” 表示新增的列被放置在 “列 1” 的后面**)。
> 提醒:语句中的 INT(4) 不是表示整数的字节数,而是表示该值的显示宽度,如果设置填充字符为 0则 170 显示为 0170
比如我们新增一列 `weight`(体重) 放置在 `age`(年龄) 的后面:
```mysql
ALTER TABLE employee ADD weight INT(4) DEFAULT 120 AFTER age;
```
上面的效果是把新增的列加在某位置的后面,如果想放在第一列的位置,则使用 `FIRST` 关键词,如语句:
```sql
ALTER TABLE employee ADD test INT(10) DEFAULT 11 FIRST;
```
#### 删除一列
删除表中的一列和刚才使用的新增一列的语句格式十分相似,只是把关键词 `ADD` 改为 `DROP` ,语句后面不需要有数据类型、约束或位置信息。具体语句格式:
```sql
ALTER TABLE DROP COLUMN ;
ALTER TABLE DROP ;
```
我们把刚才新增的 `test` 删除:
```sql
ALTER TABLE employee DROP test;
```
#### 重命名一列
这条语句其实不只可用于重命名一列,准确地说,它是对一个列做修改(CHANGE)
```sql
ALTER TABLE CHANGE ;
```
> **注意:这条重命名语句后面的 “数据类型” 不能省略,否则重命名失败。**
当**原列名**和**新列名**相同的时候,指定新的**数据类型**或**约束**,就可以用于修改数据类型或约束。需要注意的是,修改数据类型可能会导致数据丢失,所以要慎重使用。
我们用这条语句将 “height” 一列重命名为汉语拼音 “shengao” ,效果如下:
```mysql
ALTER TABLE employee CHANGE height shengao INT(4) DEFAULT 170;
```
#### 改变数据类型
要修改一列的数据类型,除了使用刚才的 **CHANGE** 语句外,还可以用这样的 **MODIFY** 语句:
```sql
ALTER TABLE MODIFY ;
```
再次提醒,修改数据类型必须小心,因为这可能会导致数据丢失。在尝试修改数据类型之前,请慎重考虑。
#### 修改表中某个值
大多数时候我们需要做修改的不会是整个数据库或整张表,而是表中的某一个或几个数据,这就需要我们用下面这条命令达到精确的修改:
```sql
UPDATE SET 1=1,2=2 WHERE ;
```
比如,我们要把 Tom 的 age 改为 21salary 改为 3000
```mysql
UPDATE employee SET age=21,salary=3000 WHERE name='Tom';
```
> **注意:一定要有 WHERE 条件,否则会出现你不想看到的后果**
#### 删除一行记录
删除表中的一行数据,也必须加上 WHERE 条件,否则整列的数据都会被删除。删除语句:
```sql
DELETE FROM WHERE ;
```
我们尝试把 Tom 的数据删除:
```mysql
DELETE FROM employee WHERE name='Tom';
```
#### 索引
索引是一种与表有关的结构,它的作用相当于书的目录,可以根据目录中的页码快速找到所需的内容。
当表中有大量记录时,若要对表进行查询,没有索引的情况是全表搜索:将所有记录一一取出,和查询条件进行对比,然后返回满足条件的记录。这样做会执行大量磁盘 I/O 操作,并花费大量数据库系统时间。
而如果在表中已建立索引,在索引中找到符合查询条件的索引值,通过索引值就可以快速找到表中的数据,可以**大大加快查询速度**。
对一张表中的某个列建立索引,有以下两种语句格式:
```sql
ALTER TABLE ADD INDEX ();
CREATE INDEX ON ();
```
我们用这两种语句分别建立索引:
```sql
ALTER TABLE employee ADD INDEX idx_id (id); #employee表的id列上建立名为idx_id的索引
CREATE INDEX idx_name ON employee (name); #employee表的name列上建立名为idx_name的索引
```
索引的效果是加快查询速度,当表中数据不够多的时候是感受不出它的效果的。这里我们使用命令 **SHOW INDEX FROM 表名字;** 查看刚才新建的索引:
![01](https://doc.shiyanlou.com/MySQL/sql-06-01.png)
在使用 SELECT 语句查询的时候,语句中 WHERE 里面的条件,会**自动判断有没有可用的索引**。
比如有一个用户表,它拥有用户名(username)和个人签名(note)两个字段。其中用户名具有唯一性,并且格式具有较强的限制,我们给用户名加上一个唯一索引;个性签名格式多变,而且允许不同用户使用重复的签名,不加任何索引。
这时候,如果你要查找某一用户,使用语句 `select * from user where username=?``select * from user where note=?` 性能是有很大差距的,对**建立了索引的用户名**进行条件查询会比**没有索引的个性签名**条件查询快几倍,在数据量大的时候,这个差距只会更大。
一些字段不适合创建索引,比如性别,这个字段存在大量的重复记录无法享受索引带来的速度加成,甚至会拖累数据库,导致数据冗余和额外的 CPU 开销。
## 视图
视图是从一个或多个表中导出来的表,是一种**虚拟存在的表**。它就像一个窗口,通过这个窗口可以看到系统专门提供的数据,这样,用户可以不用看到整个数据库中的数据,而只关心对自己有用的数据。
注意理解视图是虚拟的表:
- 数据库中只存放了视图的定义,而没有存放视图中的数据,这些数据存放在原来的表中;
- 使用视图查询数据时,数据库系统会从原来的表中取出对应的数据;
- 视图中的数据依赖于原来表中的数据,一旦表中数据发生改变,显示在视图中的数据也会发生改变;
- 在使用视图的时候,可以把它当作一张表。
创建视图的语句格式为:
```sql
CREATE VIEW (a,b,c) AS SELECT 1,2,3 FROM ;
```
可见创建视图的语句,后半句是一个 SELECT 查询语句,所以**视图也可以建立在多张表上**,只需在 SELECT 语句中使用**子查询**或**连接查询**,这些在之前的实验已经进行过。
现在我们创建一个简单的视图,名为 **v_emp**,包含**v_name****v_age****v_phone**三个列:
```sql
CREATE VIEW v_emp (v_name,v_age,v_phone) AS SELECT name,age,phone FROM employee;
```
![02](https://doc.shiyanlou.com/MySQL/sql-06-02.png)
## 导出
导出与导入是相反的过程,是把数据库某个表中的数据保存到一个文件之中。导出语句基本格式为:
```sql
SELECT 12 INTO OUTFILE '文件路径和文件名' FROM ;
```
**注意:语句中 “文件路径” 之下不能已经有同名文件。**
现在我们把整个 employee 表的数据导出到 /var/lib/mysql-files/ 目录下,导出文件命名为 **out.txt** 具体语句为:
```sql
SELECT * INTO OUTFILE '/var/lib/mysql-files/out.txt' FROM employee;
```
用 gedit 可以查看导出文件 `/var/lib/mysql-files/out.txt` 的内容:
> 也可以使用 `sudo cat /var/lib/mysql-files/out.txt` 命令查看。
## 备份
数据库中的数据十分重要,出于安全性考虑,在数据库的使用中,应该注意使用备份功能。
> 备份与导出的区别:导出的文件只是保存数据库中的数据;而备份,则是把数据库的结构,包括数据、约束、索引、视图等全部另存为一个文件。
**mysqldump** 是 MySQL 用于备份数据库的实用程序。它主要产生一个 SQL 脚本文件,其中包含从头重新创建数据库所必需的命令 CREATE TABLE INSERT 等。
使用 mysqldump 备份的语句:
```bash
mysqldump -u root 数据库名>备份文件名; #备份整个数据库
mysqldump -u root 数据库名 表名字>备份文件名; #备份整个表
```
> mysqldump 是一个备份工具,因此该命令是在终端中执行的,而不是在 mysql 交互环境下
我们尝试备份整个数据库 `mysql_shiyan`,将备份文件命名为 `bak.sql`,先 `Ctrl+D` 退出 MySQL 控制台,再打开 Xfce 终端,在终端中输入命令:
```bash
cd /home/shiyanlou/
mysqldump -u root mysql_shiyan > bak.sql;
```
使用命令 “ls” 可见已经生成备份文件 `bak.sql`
![07](https://doc.shiyanlou.com/MySQL/sql-06-07.png)
> 你可以用 gedit 查看备份文件的内容,可以看见里面不仅保存了数据,还有所备份的数据库的其它信息。
## 恢复
用备份文件恢复数据库,其实我们早就使用过了。在本次实验的开始,我们使用过这样一条命令:
```bash
source /tmp/SQL6/MySQL-06.sql
```
这就是一条恢复语句,它把 MySQL-06.sql 文件中保存的 `mysql_shiyan` 数据库恢复。
还有另一种方式恢复数据库,但是在这之前我们先使用命令新建一个**空的数据库 test**
```bash
mysql -u root #因为在上一步已经退出了 MySQL现在需要重新登录
CREATE DATABASE test; #新建一个名为test的数据库
```
再次 **Ctrl+D** 退出 MySQL然后输入语句进行恢复把刚才备份的 **bak.sql** 恢复到 **test** 数据库:
```bash
mysql -u root test < bak.sql
```
我们输入命令查看 test 数据库的表,便可验证是否恢复成功:
```bash
mysql -u root # 因为在上一步已经退出了 MySQL现在需要重新登录
use test # 连接数据库 test
SHOW TABLES; # 查看 test 数据库的表
```
可以看见原数据库的 4 张表和 1 个视图,现在已经恢复到 test 数据库中:
![08](https://doc.shiyanlou.com/MySQL/sql-06-08.png)
再查看 employee 表的恢复情况:
![09](https://doc.shiyanlou.com/MySQL/sql-06-09.png)
## Mysql授权
1. 登录MySQL
```sql
mysql -u root -p
```
2. 进入MySQL并查看用户和主机
```sql
use mysql;
select host,user from user;
```
3. 更新root用户允许远程连接
```sql
update user set host='%' where user='root';
```
4. 设置root用户密码
```sql
alter user 'root'@'localhost' identified by 'your_password';
```
注意:不要使用临时密码。
5. 授权允许远程访问:
```sql
grant all privileges on *.* to 'root'@'%' identified by 'password';
```
请将命令中的“password”更改为您的MySQL密码。
6. 刷新授权:
```sql
flush privileges;
```
7. 关闭授权:
```sql
revoke all on *.* from dba@localhost;
```
8. 查看MySQL初始密码
```bash
grep "password" /var/log/mysqld.log
```
通过以上操作您的MySQL可以被远程连接并进行管理。请注意在授权和更新用户权限时应只授权特定的数据库或表格而不是使用通配符以提高安全性和减少不必要的权限。在进行远程访问授权时应只授权特定的IP地址或IP地址段而不是使用通配符以减少潜在的安全威胁。同时建议使用强密码并定期更换密码以提高安全性。

View File

@@ -1,119 +0,0 @@
---
title: "Redis 安装与常用命令整理"
slug: redis
description: "文章介绍了 Redis 在 Debian 下的安装方法、Windows 图形客户端的安装方式以及监听端口修改、BitMap、消息队列、LREM 和 Pipeline 等常用操作示例。"
category: "数据库"
post_type: "article"
pinned: false
published: true
tags:
- "Redis安装"
- "Debian"
- "BitMap"
- "消息队列"
- "Pipeline"
- "go-redis"
---
# 安装`Redis`
## `Debian`下安装`Redis`服务端
```bash
curl -fsSL https://packages.redis.io/gpg | sudo gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/redis.list
sudo apt-get update
sudo apt-get install redis
```
## `Windows`下安装`Redis` 第三方`GUI`客户端
Redis (GUI)管理客户端
```bash
winget install qishibo.AnotherRedisDesktopManager
```
## `Redis`修改监听端口
```bash
vim /etc/redis/redis.conf
```
# `Redis`常用命令
## `bitMap`
使用`BitMap`实现签到,`setbit key offset value,` `key`做为时间,`offset`做为用户`id` ,`value`做为签到状态
```shell
# 示例
setbit key offset value key
# 设置用户10086在2022/04/21进行签到
setbit check_in_2022_04_21 10086 1
# 获取用户10086是否在2022/04/21签到
getbit check_in_2022_04_21 10086
# bitcount 获取20220421签到的用户数量
# 可选 start和end参数
# start 和 end 参数的设置和 GETRANGE 命令类似,都可以使用负数值:比如 -1 表示最后一个位,而 -2 表示倒数第二个位
BITCOUNT 20220421
# BITOP 对一个或多个保存二进制位的字符串 key 进行位元操作,并将结果保存到 destkey 上
# operation 可以是 AND 、 OR 、 NOT 、 XOR 这四种操作中的任意一种:
# BITOP AND destkey key [key ...] ,对一个或多个 key 求逻辑并,并将结果保存到 destkey 。
# BITOP OR destkey key [key ...] ,对一个或多个 key 求逻辑或,并将结果保存到 destkey 。
# BITOP XOR destkey key [key ...] ,对一个或多个 key 求逻辑异或,并将结果保存到 destkey 。
# BITOP NOT destkey key ,对给定 key 求逻辑非,并将结果保存到 destkey 。
# 除了 NOT 操作之外,其他操作都可以接受一个或多个 key 作为输入。
BITOP AND and-result 20220421 20220420
GETBIT and-result
```
## `Redis` 消息队列
```
# LPUSH key value, Lpush用于生产并添加消息
# LPOP key,用于取出消息
```
## `Lrem`
```shell
# count > 0 : 从表头开始向表尾搜索,移除与 VALUE 相等的元素,数量为 COUNT 。
# count < 0 : 从表尾开始向表头搜索,移除与 VALUE 相等的元素,数量为 COUNT 的绝对值。
# count = 0 : 移除表中所有与 VALUE 相等的值。
LREM key count VALUE
```
## `Pipeline`
`Redis` 使用的是客户端-服务器(`CS`)模型和请求/响应协议的 TCP 服务器。这意味着通常情况下一个请求会遵循以下步骤:
客户端向服务端发送一个查询请求,并监听 Socket 返回,通常是以阻塞模式,等待服务端响应。
服务端处理命令,并将结果返回给客户端。
管道(`pipeline`可以一次性发送多条命令并在执行完后一次性将结果返回pipeline 通过减少客户端与 redis 的通信次数来实现降低往返延时时间,而且 `Pipeline` 实现的原理是队列,而队列的原理是时先进先出,这样就保证数据的顺序性。
通俗点:`pipeline`就是把一组命令进行打包然后一次性通过网络发送到Redis。同时将执行的结果批量的返回回来
```go
// 使用 go-redis
p := Client.Pipeline()
for _, v := range val {
p.LRem("user:watched:"+guid, 0, v)
}
// p.Exec()执行pipeline 请求
p.Exec()
```
[本文参考](https://blog.csdn.net/mumuwang1234/article/details/118603697)

View File

@@ -1,169 +0,0 @@
---
title: "手把手教你用Rust进行Dll注入"
description: 我是一个懒惰的男孩,我甚至懒的不想按键盘上的按键和挪动鼠标.可是我还是想玩游戏,该怎么做呢?通过 google 了解到我可以通过将我自己编写的dll文件注入到目标程序内,来实现这个事情.
date: 2022-09-17T15:10:26+08:00
draft: false
slug: rust-dll
image:
categories:
- Rust
tags:
- Rust
- Dll
---
# 前言
我是一个懒惰的男孩,我甚至懒的不想按键盘上的按键和挪动鼠标.可是我还是想玩游戏,该怎么做呢?
通过google了解到我可以通过将我自己编写的 `dll` 文件注入到目标程序内,来实现这个事情.
将大象放在冰箱里需要几步?
答案是三步。
# `snes9x` 模拟器 `Dll` 注入实战
## 一、现在我们需要进行第一步,生成 `Dll` 文件
准确说是我们需要生成符合 `C` 标准的 `dll` 文件,如果你使用 `go` 语言,直接使用 `Cgo``C` 进行互动,即可生成符合 `C` 标准的 `dll` .
但是很明显,我要用 `Rust` 来做这件事。
由于 `Rust` 拥有出色的所有权机制,和其他语言的交互会导致 `Rust` 失去这个特性,所以这一块是属于 `Unsafe` 区域的。
`Rust` 默认生成的 `Dll` 是提供给 `Rust` 语言来调用的,而非C系语言的 `dll`.
我们现在来生成 `C` 系语言的 `Dll` 吧。
### 1.新建项目 `lib` 目录 `lib` 目录主要作为库文件以方便其他开发者调用
```bash
# 新建库项目
Cargo new --lib <project name>
Cargo new --lib joy
```
### 2.修改 `Cargo.toml` 文件 增加 `bin` 区域
```toml
[package]
name = "joy"
version = "0.1.0"
edition = "2021"
[lib]
name = "joy"
path = "src/lib.rs"
crate-type = ["cdylib"]
[[bin]]
name = "joyrun"
path = "src/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
```
```bash
# 为项目导入依赖ctor来生成符合c标准的dll
cargo add ctor
```
### 3.修改 `lib.rs` 使用 `ctor`
```rust
// lib.rs
#[ctor::ctor]
fn ctor() {
println!("我是一个dll")
}
```
#### 4.编译项目生成 `joy.dll` 以及 `joyrun.exe`
```bash
cargo build
```
现在我们有了我们自己的 `dll` 文件,该如何将他注入到目标的进程呢?
## 二、使用 `dll-syringe` 进行dll注入
```
cargo add dll-syringe
```
### 1.修改main.rs 将刚刚编写的dll注入到目标应用
```rust
// main.rs
use dll_syringe::{Syringe, process::OwnedProcess};
fn main() {
// 通过进程名找到目标进程
let target_process = OwnedProcess::find_first_by_name("snes9x").unwrap();
// 新建一个注入器
let syringe = Syringe::for_process(target_process);
// 将我们刚刚编写的dll加载进去
let injected_payload = syringe.inject("joy.dll").unwrap();
// do something else
// 将我们刚刚注入的dll从目标程序内移除
syringe.eject(injected_payload).unwrap();
}
```
### 2.运行项目
```shell
# 运行项目
cargo run
```
此时你可能会遇到一个新问题,我的`dll`已经加载进目标程序了,为什么没有打印 "我是一个dll"
### 3.解决控制台无输出问题
这是由于目标程序没有控制台,所以我们没有看到 `dll` 的输出,接下来让我们来获取 `dll` 的输出。
此时我们可以使用 `TCP` 交互的方式或采用 `OutputDebugStringA function (debugapi.h)` 来进行打印
`OutputDebugStringA` ,需要额外开启`features` `Win32_System_Diagnostics_Debug`
```rust
// Rust Unsafe fn
// windows::Win32::System::Diagnostics::Debug::OutputDebugStringA
pub unsafe fn OutputDebugStringA<'a, P0>(lpoutputstring: P0)
where
P0: Into<PCSTR>,
// Required features: "Win32_System_Diagnostics_Debug"
```
采用 `Tcp` 通信交互
```rust
// 在lib.rs 新建tcp客户端
let stream = TcpStream::connect("127.0.0.1:7331").unwrap();
```
```rust
// 在main.rs 新建tcp服务端
let (mut stream, addr) = listener.accept()?;
info!(%addr,"Accepted!");
let mut buf = vec![0u8; 1024];
let mut stdout = std::io::stdout();
while let Ok(n) = stream.read(&mut buf[..]) {
if n == 0 {
break;
}
stdout.write_all(&buf[..n])?
}
```
```shell
# 运行项目
cargo run
# 运行之后,大功告成,成功在Tcp服务端看到了,客户端对我们发起了请求。
```

View File

@@ -0,0 +1,24 @@
---
title: 徐霞客游记·游恒山日记
slug: rust-programming-tips
description: 游恒山、悬空寺与北岳登顶的古文纪行,适合做中文长文测试。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 徐霞客
- 恒山
- 悬空寺
- 长文测试
---
# 徐霞客游记·游恒山日记
出南山。大溪从山中俱来者,别而西去。余北驰平陆中,望外界之山,高不及台山十之四,其长缭绕如垣。
余溯西涧入,又一涧自北来,遂从其西登岭,道甚峻。北向直上者六七里,西转,又北跻而上者五六里,登峰两重,造其巅,是名箭筸岭。
三转,峡愈隘,崖愈高。西崖之半,层楼高悬,曲榭斜倚,望之如蜃吐重台者,悬空寺也。

View File

@@ -1,96 +0,0 @@
---
title: "Rust使用Serde进行序列化及反序列化"
description: 这篇文章将介绍如何在Rust编程语言中使用Serde库进行序列化和反序列化操作。Serde是一个广泛使用的序列化和反序列化库能够支持JSON、BSON、CBOR、MessagePack和YAML等常见数据格式。
date: 2022-07-25T14:02:22+08:00
draft: false
slug: rust-serde
image:
categories:
- Rust
tags:
- Rust
- Xml
---
# 开始之前
```toml
# 在Cargo.toml 新增以下依赖
[dependencies]
serde = { version = "1.0.140",features = ["derive"] }
serde_json = "1.0.82"
serde_yaml = "0.8"
serde_urlencoded = "0.7.1"
# 使用yaserde解析xml
yaserde = "0.8.0"
yaserde_derive = "0.8.0"
```
## `Serde`通用规则(`json`,`yaml`,`xml`)
### 1.使用`Serde`宏通过具体结构实现序列化及反序列化
```rust
use serde::{Deserialize, Serialize};
// 为结构体实现 Serialize(序列化)属性和Deserialize(反序列化)
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Person {
// 将该字段名称修改为lastname
#[serde(rename = "lastname")]
name: String,
// 反序列化及序列化时忽略该字段(nickname)
#[serde(skip)]
nickname: String,
// 分别设置序列化及反序列化时输出的字段名称
#[serde(rename(serialize = "serialize_id", deserialize = "derialize_id"))
id: i32,
// 为age设置默认值
#[serde(default)]
age: i32,
}
```
### 2.使用`serde_json`序列化及反序列化
```rust
use serde_json::{json, Value};
let v:serde_json::Value = json!(
{
"x":20.0,
"y":15.0
}
);
println!("x:{:#?},y:{:#?}",v["x"],v["y"]); // x:20.0, y:15.0
```
### 3.使用`Serde`宏统一格式化输入、输出字段名称
| 方法名 | 方法效果 |
| ------------------------------- | ------------------------------------------------------------ |
| `PascalCase` | 首字母为大写的驼峰式命名,推荐结构体、枚举等名称以及`Yaml`配置文件读取使用。 |
| `camelCase` | 首字母为小写的驼峰式命名,推荐`Yaml`配置文件读取使用。 |
| `snake_case` | 小蛇形命名,用下划线"`_`"连接单词,推荐函数命名以及变量名称使用此种方式。 |
| `SCREAMING_SNAKE_CASE` | 大蛇形命名,单词均为大写形式,用下划线"`_`"连接单词。推荐常数及全局变量使用此种方式。 |
| `kebab-case`(小串烤肉) | 同`snake_case`,使用中横线"`-`"替换了下划线"`_`"。 |
| `SCREAMING-KEBAB-CAS`(大串烤肉) | 同`SCREAMING_SNAKE_CASE`,使用中横线"`-`"替换了下划线"`_`"。 |
示例:
```rust
pub struct App {
#[serde(rename_all = "PascalCase")]
/// 统一格式化输入、输出字段名称
/// #[serde(rename_all = "camelCase")]
/// #[serde(rename_all = "snake_case")]
/// #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
/// 仅设置
version: String,
app_name: String,
host: String,
}
```
[本文参考:yaserde](https://github.com/media-io/yaserde)
[本文参考:magiclen](https://magiclen.org/rust-serde/)

View File

@@ -1,37 +0,0 @@
---
title: "Rust Sqlx"
description:
date: 2022-08-29T13:55:08+08:00
draft: true
slug: rust-sqlx
image:
categories:
-
tags:
-
---
# sqlx-cli
## 创建 migration
```shell
sqlx migrate add categories
```
```sql
-- Add migration script here
CREATE TABLE IF NOT EXISTS categories(
id INT PRIMARY KEY DEFAULT AUTO_INCREMENT,
type_id INT UNIQUE NOT NULL,
parent_id INT NOT NULL,
name TEXT UNIQUE NOT NULL,
);
```
## 运行 migration
```sh
sqlx migrate run
```

View File

@@ -0,0 +1,24 @@
---
title: 游黄山记(上)
slug: terminal-ui-design
description: 钱谦益《游黄山记》上篇,包含序、记之一与记之二。
category: 古籍游记
post_type: article
pinned: false
status: published
visibility: public
noindex: false
tags:
- 钱谦益
- 黄山
- 游记
- 长文测试
---
# 游黄山记(上)
辛巳春,余与程孟阳订黄山之游,约以梅花时相寻于武林之西溪。徐维翰书来劝驾,读之两腋欲举,遂挟吴去尘以行。
黄山耸秀峻极,作镇一方。江南诸山,天台、天目为最,以地形准之,黄山之趾与二山齐。
自山口至汤口,山之麓也,登山之径于是始。汤泉之流,自紫石峰六百仞县布,其下有香泉溪。

View File

@@ -1,54 +0,0 @@
---
title: "在 Tmux 会话窗格中发送命令的方法"
slug: tmux
description: "介绍如何在 Tmux 中创建分离会话、向指定窗格发送命令并执行回车,同时说明连接会话和发送特殊按键的基本用法。"
category: "Linux"
post_type: "article"
pinned: false
published: true
tags:
- "Tmux"
- "终端复用"
- "send-keys"
- "会话管理"
- "命令行"
---
## 在 Tmux 会话窗格中发送命令的方法
`Tmux` 中,可以使用 `send-keys` 命令将命令发送到会话窗格中。以下是在 `Tmux` 中发送命令的步骤:
### 1. 新建一个分离(`Detached`)会话
使用以下命令新建一个分离会话:
```bash
tmux new -d -s mySession
```
### 2. 发送命令至会话窗格
使用以下命令将命令发送到会话窗格:
```bash
tmux send-keys -t mySession "echo 'Hello World!'" ENTER
```
这将发送 `echo 'Hello World!'` 命令,并模拟按下回车键(`ENTER`),以在会话窗格中执行该命令。
### 3. 连接(`Attach`)会话窗格
使用以下命令连接会话窗格:
```bash
tmux a -t mySession
```
这将连接到名为 `mySession` 的会话窗格。
### 4. 发送特殊命令
要发送特殊命令,例如清除当前行或使用管理员权限运行命令,请使用以下命令:
- 清除当前行:`tmux send-keys C-c`
- 以管理员身份运行命令:`sudo tmux send-keys ...`

View File

@@ -0,0 +1,24 @@
---
title: 徐霞客游记·游太和山日记(上)
slug: welcome-to-termi
description: 《徐霞客游记》太和山上篇,适合作为中文长文测试样本。
category: 古籍游记
post_type: article
pinned: true
status: published
visibility: public
noindex: false
tags:
- 徐霞客
- 游记
- 太和山
- 长文测试
---
# 徐霞客游记·游太和山日记(上)
登仙猿岭。十馀里,有枯溪小桥,为郧县境,乃河南、湖广界。东五里,有池一泓,曰青泉,上源不见所自来,而下流淙淙,地又属淅川。
自此连逾山岭,桃李缤纷,山花夹道,幽艳异常。山坞之中,居庐相望,沿流稻畦,高下鳞次,不似山、陕间矣。
骑而南趋,石道平敞。三十里,越一石梁,有溪自西东注,即太和下流入汉者。越桥为迎恩宫,西向。前有碑大书“第一山”三字,乃米襄阳笔。

View File

@@ -41,6 +41,8 @@ mod m20260401_000030_add_public_security_and_web_push_to_site_settings;
mod m20260401_000031_add_notification_channel_type_to_site_settings; mod m20260401_000031_add_notification_channel_type_to_site_settings;
mod m20260401_000032_add_runtime_security_keys_to_site_settings; mod m20260401_000032_add_runtime_security_keys_to_site_settings;
mod m20260401_000033_add_taxonomy_metadata_and_media_assets; mod m20260401_000033_add_taxonomy_metadata_and_media_assets;
mod m20260401_000034_add_source_markdown_to_posts;
mod m20260401_000035_add_human_verification_modes_to_site_settings;
pub struct Migrator; pub struct Migrator;
#[async_trait::async_trait] #[async_trait::async_trait]
@@ -86,6 +88,8 @@ impl MigratorTrait for Migrator {
Box::new(m20260401_000031_add_notification_channel_type_to_site_settings::Migration), Box::new(m20260401_000031_add_notification_channel_type_to_site_settings::Migration),
Box::new(m20260401_000032_add_runtime_security_keys_to_site_settings::Migration), Box::new(m20260401_000032_add_runtime_security_keys_to_site_settings::Migration),
Box::new(m20260401_000033_add_taxonomy_metadata_and_media_assets::Migration), Box::new(m20260401_000033_add_taxonomy_metadata_and_media_assets::Migration),
Box::new(m20260401_000034_add_source_markdown_to_posts::Migration),
Box::new(m20260401_000035_add_human_verification_modes_to_site_settings::Migration),
// inject-above (do not remove this comment) // inject-above (do not remove this comment)
] ]
} }

View File

@@ -0,0 +1,37 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
if !manager.has_column("posts", "source_markdown").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new("posts"))
.add_column(ColumnDef::new(Alias::new("source_markdown")).text().null())
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
if manager.has_column("posts", "source_markdown").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new("posts"))
.drop_column(Alias::new("source_markdown"))
.to_owned(),
)
.await?;
}
Ok(())
}
}

View File

@@ -0,0 +1,86 @@
use sea_orm::{DbBackend, Statement};
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column_if_not_exists(
ColumnDef::new(Alias::new("comment_verification_mode"))
.string()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("subscription_verification_mode"))
.string()
.null(),
)
.to_owned(),
)
.await?;
manager
.get_connection()
.execute(Statement::from_string(
DbBackend::Postgres,
r#"
UPDATE site_settings
SET comment_verification_mode = CASE
WHEN COALESCE(comment_turnstile_enabled, false) THEN 'turnstile'
ELSE 'captcha'
END
WHERE COALESCE(trim(comment_verification_mode), '') = ''
"#
.to_string(),
))
.await?;
manager
.get_connection()
.execute(Statement::from_string(
DbBackend::Postgres,
r#"
UPDATE site_settings
SET subscription_verification_mode = CASE
WHEN COALESCE(subscription_turnstile_enabled, false) THEN 'turnstile'
ELSE 'off'
END
WHERE COALESCE(trim(subscription_verification_mode), '') = ''
"#
.to_string(),
))
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
for column in [
"subscription_verification_mode",
"comment_verification_mode",
] {
if manager.has_column("site_settings", column).await? {
manager
.alter_table(
Table::alter()
.table(table.clone())
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
Ok(())
}
}

View File

@@ -331,6 +331,24 @@ impl Hooks for App {
.await?; .await?;
if existing.is_none() { if existing.is_none() {
let comment_verification_mode = settings["comment_verification_mode"]
.as_str()
.map(ToString::to_string);
let subscription_verification_mode = settings
["subscription_verification_mode"]
.as_str()
.map(ToString::to_string);
let comment_turnstile_enabled = settings["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = settings
["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let tech_stack = settings["tech_stack"] let tech_stack = settings["tech_stack"]
.as_array() .as_array()
.map(|items| { .map(|items| {
@@ -408,6 +426,16 @@ impl Hooks for App {
paragraph_comments_enabled: Set(settings["paragraph_comments_enabled"] paragraph_comments_enabled: Set(settings["paragraph_comments_enabled"]
.as_bool() .as_bool()
.or(Some(true))), .or(Some(true))),
comment_verification_mode: Set(comment_verification_mode),
comment_turnstile_enabled: Set(comment_turnstile_enabled),
subscription_verification_mode: Set(subscription_verification_mode),
subscription_turnstile_enabled: Set(subscription_turnstile_enabled),
turnstile_site_key: Set(settings["turnstile_site_key"]
.as_str()
.map(ToString::to_string)),
turnstile_secret_key: Set(settings["turnstile_secret_key"]
.as_str()
.map(ToString::to_string)),
ai_provider: Set(settings["ai_provider"].as_str().map(ToString::to_string)), ai_provider: Set(settings["ai_provider"].as_str().map(ToString::to_string)),
ai_api_base: Set(settings["ai_api_base"].as_str().map(ToString::to_string)), ai_api_base: Set(settings["ai_api_base"].as_str().map(ToString::to_string)),
ai_api_key: Set(settings["ai_api_key"].as_str().map(ToString::to_string)), ai_api_key: Set(settings["ai_api_key"].as_str().map(ToString::to_string)),

View File

@@ -1,6 +1,6 @@
use axum::{ use axum::{
extract::{Multipart, Query}, extract::{Multipart, Query},
http::{header, HeaderMap}, http::{HeaderMap, header},
}; };
use loco_rs::prelude::*; use loco_rs::prelude::*;
use sea_orm::{ use sea_orm::{
@@ -170,7 +170,9 @@ pub struct AdminSiteSettingsResponse {
pub music_playlist: Vec<site_settings::MusicTrackPayload>, pub music_playlist: Vec<site_settings::MusicTrackPayload>,
pub ai_enabled: bool, pub ai_enabled: bool,
pub paragraph_comments_enabled: bool, pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
pub comment_turnstile_enabled: bool, pub comment_turnstile_enabled: bool,
pub subscription_verification_mode: String,
pub subscription_turnstile_enabled: bool, pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool, pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>, pub turnstile_site_key: Option<String>,
@@ -686,9 +688,7 @@ fn build_media_object_response(
title: metadata.and_then(|entry| entry.title.clone()), title: metadata.and_then(|entry| entry.title.clone()),
alt_text: metadata.and_then(|entry| entry.alt_text.clone()), alt_text: metadata.and_then(|entry| entry.alt_text.clone()),
caption: metadata.and_then(|entry| entry.caption.clone()), caption: metadata.and_then(|entry| entry.caption.clone()),
tags: metadata tags: metadata.map(media_assets::tag_list).unwrap_or_default(),
.map(media_assets::tag_list)
.unwrap_or_default(),
notes: metadata.and_then(|entry| entry.notes.clone()), notes: metadata.and_then(|entry| entry.notes.clone()),
} }
} }
@@ -724,6 +724,14 @@ fn build_settings_response(
) -> AdminSiteSettingsResponse { ) -> AdminSiteSettingsResponse {
let ai_providers = site_settings::ai_provider_configs(&item); let ai_providers = site_settings::ai_provider_configs(&item);
let ai_active_provider_id = site_settings::active_ai_provider_id(&item); let ai_active_provider_id = site_settings::active_ai_provider_id(&item);
let comment_verification_mode = crate::services::turnstile::selected_mode(
&item,
crate::services::turnstile::TurnstileScope::Comment,
);
let subscription_verification_mode = crate::services::turnstile::selected_mode(
&item,
crate::services::turnstile::TurnstileScope::Subscription,
);
let turnstile_site_key = crate::services::turnstile::site_key(&item); let turnstile_site_key = crate::services::turnstile::site_key(&item);
let turnstile_secret_key = crate::services::turnstile::secret_key(&item); let turnstile_secret_key = crate::services::turnstile::secret_key(&item);
let web_push_vapid_public_key = crate::services::web_push::public_key(&item); let web_push_vapid_public_key = crate::services::web_push::public_key(&item);
@@ -751,8 +759,16 @@ fn build_settings_response(
music_playlist: music_playlist_values(&item.music_playlist), music_playlist: music_playlist_values(&item.music_playlist),
ai_enabled: item.ai_enabled.unwrap_or(false), ai_enabled: item.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true), paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled: item.comment_turnstile_enabled.unwrap_or(false), comment_verification_mode: comment_verification_mode.as_str().to_string(),
subscription_turnstile_enabled: item.subscription_turnstile_enabled.unwrap_or(false), comment_turnstile_enabled: matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
subscription_verification_mode: subscription_verification_mode.as_str().to_string(),
subscription_turnstile_enabled: matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
web_push_enabled: item.web_push_enabled.unwrap_or(false), web_push_enabled: item.web_push_enabled.unwrap_or(false),
turnstile_site_key, turnstile_site_key,
turnstile_secret_key, turnstile_secret_key,
@@ -887,7 +903,6 @@ pub async fn session_logout(headers: HeaderMap, State(ctx): State<AppContext>) -
#[debug_handler] #[debug_handler]
pub async fn dashboard(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> { pub async fn dashboard(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?; check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let all_posts = posts::Entity::find().all(&ctx.db).await?; let all_posts = posts::Entity::find().all(&ctx.db).await?;
let total_posts = all_posts.len() as u64; let total_posts = all_posts.len() as u64;
@@ -1190,8 +1205,8 @@ pub async fn list_media_objects(
check_auth(&headers)?; check_auth(&headers)?;
let settings = storage::require_r2_settings(&ctx).await?; let settings = storage::require_r2_settings(&ctx).await?;
let objects = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200)) let objects =
.await?; storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200)).await?;
let keys = objects let keys = objects
.iter() .iter()
.map(|item| item.key.clone()) .map(|item| item.key.clone())
@@ -1228,7 +1243,11 @@ pub async fn delete_media_object(
storage::delete_object(&ctx, key).await?; storage::delete_object(&ctx, key).await?;
if let Err(error) = media_assets::delete_by_key(&ctx, key).await { if let Err(error) = media_assets::delete_by_key(&ctx, key).await {
tracing::warn!(?error, key, "failed to delete media metadata after object deletion"); tracing::warn!(
?error,
key,
"failed to delete media metadata after object deletion"
);
} }
format::json(AdminMediaDeleteResponse { format::json(AdminMediaDeleteResponse {
@@ -1325,7 +1344,11 @@ pub async fn batch_delete_media_objects(
match storage::delete_object(&ctx, &key).await { match storage::delete_object(&ctx, &key).await {
Ok(()) => { Ok(()) => {
if let Err(error) = media_assets::delete_by_key(&ctx, &key).await { if let Err(error) = media_assets::delete_by_key(&ctx, &key).await {
tracing::warn!(?error, key, "failed to delete media metadata after batch removal"); tracing::warn!(
?error,
key,
"failed to delete media metadata after batch removal"
);
} }
deleted.push(key) deleted.push(key)
} }
@@ -1955,7 +1978,10 @@ pub fn routes() -> Routes {
"/storage/media/batch-delete", "/storage/media/batch-delete",
post(batch_delete_media_objects), post(batch_delete_media_objects),
) )
.add("/storage/media/metadata", patch(update_media_object_metadata)) .add(
"/storage/media/metadata",
patch(update_media_object_metadata),
)
.add("/storage/media/replace", post(replace_media_object)) .add("/storage/media/replace", post(replace_media_object))
.add( .add(
"/comments/blacklist", "/comments/blacklist",

View File

@@ -137,7 +137,10 @@ fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone()) item.name.clone().unwrap_or_else(|| item.slug.clone())
} }
fn build_category_record(item: &categories::Model, post_items: &[posts::Model]) -> AdminCategoryRecord { fn build_category_record(
item: &categories::Model,
post_items: &[posts::Model],
) -> AdminCategoryRecord {
let name = category_name(item); let name = category_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)]; let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items let count = post_items
@@ -224,7 +227,11 @@ async fn ensure_category_slug_unique(
Ok(()) Ok(())
} }
async fn ensure_tag_slug_unique(ctx: &AppContext, slug: &str, exclude_id: Option<i32>) -> Result<()> { async fn ensure_tag_slug_unique(
ctx: &AppContext,
slug: &str,
exclude_id: Option<i32>,
) -> Result<()> {
if let Some(existing) = tags::Entity::find() if let Some(existing) = tags::Entity::find()
.filter(tags::Column::Slug.eq(slug)) .filter(tags::Column::Slug.eq(slug))
.one(&ctx.db) .one(&ctx.db)
@@ -243,9 +250,11 @@ async fn load_posts(ctx: &AppContext) -> Result<Vec<posts::Model>> {
} }
#[debug_handler] #[debug_handler]
pub async fn list_categories(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> { pub async fn list_categories(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?; check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = categories::Entity::find() let items = categories::Entity::find()
.order_by_asc(categories::Column::Slug) .order_by_asc(categories::Column::Slug)
@@ -254,7 +263,8 @@ pub async fn list_categories(headers: HeaderMap, State(ctx): State<AppContext>)
let post_items = load_posts(&ctx).await?; let post_items = load_posts(&ctx).await?;
format::json( format::json(
items.into_iter() items
.into_iter()
.map(|item| build_category_record(&item, &post_items)) .map(|item| build_category_record(&item, &post_items))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
) )
@@ -312,7 +322,13 @@ pub async fn update_category(
.filter(|value| !value.is_empty()) .filter(|value| !value.is_empty())
!= Some(name.as_str()) != Some(name.as_str())
{ {
content::rewrite_category_references(previous_name.as_deref(), &previous_slug, Some(&name))?; content::rewrite_category_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)
.await?;
} }
let mut active = item.into_active_model(); let mut active = item.into_active_model();
@@ -324,7 +340,6 @@ pub async fn update_category(
active.seo_title = Set(trim_to_option(payload.seo_title)); active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description)); active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?; let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?; let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&updated, &post_items)) format::json(build_category_record(&updated, &post_items))
@@ -339,9 +354,8 @@ pub async fn delete_category(
check_auth(&headers)?; check_auth(&headers)?;
let item = load_category(&ctx, id).await?; let item = load_category(&ctx, id).await?;
content::rewrite_category_references(item.name.as_deref(), &item.slug, None)?; content::rewrite_category_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?; item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty() format::empty()
} }
@@ -349,7 +363,6 @@ pub async fn delete_category(
#[debug_handler] #[debug_handler]
pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> { pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?; check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = tags::Entity::find() let items = tags::Entity::find()
.order_by_asc(tags::Column::Slug) .order_by_asc(tags::Column::Slug)
@@ -358,7 +371,8 @@ pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Res
let post_items = load_posts(&ctx).await?; let post_items = load_posts(&ctx).await?;
format::json( format::json(
items.into_iter() items
.into_iter()
.map(|item| build_tag_record(&item, &post_items)) .map(|item| build_tag_record(&item, &post_items))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
) )
@@ -416,7 +430,13 @@ pub async fn update_tag(
.filter(|value| !value.is_empty()) .filter(|value| !value.is_empty())
!= Some(name.as_str()) != Some(name.as_str())
{ {
content::rewrite_tag_references(previous_name.as_deref(), &previous_slug, Some(&name))?; content::rewrite_tag_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)
.await?;
} }
let mut active = item.into_active_model(); let mut active = item.into_active_model();
@@ -428,7 +448,6 @@ pub async fn update_tag(
active.seo_title = Set(trim_to_option(payload.seo_title)); active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description)); active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?; let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?; let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&updated, &post_items)) format::json(build_tag_record(&updated, &post_items))
@@ -443,9 +462,8 @@ pub async fn delete_tag(
check_auth(&headers)?; check_auth(&headers)?;
let item = load_tag(&ctx, id).await?; let item = load_tag(&ctx, id).await?;
content::rewrite_tag_references(item.name.as_deref(), &item.slug, None)?; content::rewrite_tag_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?; item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty() format::empty()
} }

View File

@@ -112,7 +112,9 @@ fn build_summary(item: &categories::Model, post_items: &[posts::Model]) -> Categ
post.category post.category
.as_deref() .as_deref()
.map(str::trim) .map(str::trim)
.is_some_and(|value| value.eq_ignore_ascii_case(&name) || value.eq_ignore_ascii_case(&item.slug)) .is_some_and(|value| {
value.eq_ignore_ascii_case(&name) || value.eq_ignore_ascii_case(&item.slug)
})
}) })
.count(); .count();
@@ -151,8 +153,6 @@ async fn load_item(ctx: &AppContext, id: i32) -> Result<categories::Model> {
#[debug_handler] #[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> { pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let category_items = categories::Entity::find() let category_items = categories::Entity::find()
.order_by_asc(categories::Column::Slug) .order_by_asc(categories::Column::Slug)
.all(&ctx.db) .all(&ctx.db)
@@ -224,10 +224,12 @@ pub async fn update(
!= Some(name.as_str()) != Some(name.as_str())
{ {
content::rewrite_category_references( content::rewrite_category_references(
&ctx,
previous_name.as_deref(), previous_name.as_deref(),
&previous_slug, &previous_slug,
Some(&name), Some(&name),
)?; )
.await?;
} }
let mut item = item.into_active_model(); let mut item = item.into_active_model();
@@ -239,16 +241,14 @@ pub async fn update(
item.seo_title = Set(trim_to_option(params.seo_title)); item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description)); item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?; let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(build_record(item)) format::json(build_record(item))
} }
#[debug_handler] #[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> { pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
let item = load_item(&ctx, id).await?; let item = load_item(&ctx, id).await?;
content::rewrite_category_references(item.name.as_deref(), &item.slug, None)?; content::rewrite_category_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?; item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty() format::empty()
} }

View File

@@ -80,7 +80,9 @@ fn post_has_tag(post: &Model, wanted_tag: &str) -> bool {
fn effective_status(post: &Model) -> String { fn effective_status(post: &Model) -> String {
content::effective_post_state( content::effective_post_state(
post.status.as_deref().unwrap_or(content::POST_STATUS_PUBLISHED), post.status
.as_deref()
.unwrap_or(content::POST_STATUS_PUBLISHED),
post.publish_at, post.publish_at,
post.unpublish_at, post.unpublish_at,
Utc::now().fixed_offset(), Utc::now().fixed_offset(),
@@ -157,16 +159,18 @@ fn parse_optional_markdown_datetime(
return None; return None;
} }
chrono::DateTime::parse_from_rfc3339(value).ok().or_else(|| { chrono::DateTime::parse_from_rfc3339(value)
chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d") .ok()
.ok() .or_else(|| {
.and_then(|date| date.and_hms_opt(0, 0, 0)) chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d")
.and_then(|naive| { .ok()
chrono::FixedOffset::east_opt(0)? .and_then(|date| date.and_hms_opt(0, 0, 0))
.from_local_datetime(&naive) .and_then(|naive| {
.single() chrono::FixedOffset::east_opt(0)?
}) .from_local_datetime(&naive)
}) .single()
})
})
} }
fn markdown_post_listed_publicly(post: &content::MarkdownPost) -> bool { fn markdown_post_listed_publicly(post: &content::MarkdownPost) -> bool {
@@ -253,7 +257,9 @@ fn should_include_post(
} }
if let Some(status) = &query.status { if let Some(status) = &query.status {
if effective_status(post) != content::normalize_post_status(Some(status)) && effective_status(post) != status.trim().to_ascii_lowercase() { if effective_status(post) != content::normalize_post_status(Some(status))
&& effective_status(post) != status.trim().to_ascii_lowercase()
{
return false; return false;
} }
} }
@@ -397,22 +403,22 @@ impl Params {
item.image = Set(self.image.clone()); item.image = Set(self.image.clone());
item.images = Set(self.images.clone()); item.images = Set(self.images.clone());
item.pinned = Set(self.pinned); item.pinned = Set(self.pinned);
item.status = Set(self.status.clone().map(|value| requested_status(Some(value), None))); item.status = Set(self
item.visibility = Set( .status
self.visibility .clone()
.clone() .map(|value| requested_status(Some(value), None)));
.map(|value| normalize_visibility(Some(value))), item.visibility = Set(self
); .visibility
item.publish_at = Set( .clone()
self.publish_at .map(|value| normalize_visibility(Some(value))));
.clone() item.publish_at = Set(self
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()), .publish_at
); .clone()
item.unpublish_at = Set( .and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()));
self.unpublish_at item.unpublish_at = Set(self
.clone() .unpublish_at
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()), .clone()
); .and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()));
item.canonical_url = Set(self.canonical_url.clone()); item.canonical_url = Set(self.canonical_url.clone());
item.noindex = Set(self.noindex); item.noindex = Set(self.noindex);
item.og_image = Set(self.og_image.clone()); item.og_image = Set(self.og_image.clone());
@@ -526,8 +532,6 @@ pub async fn list(
State(ctx): State<AppContext>, State(ctx): State<AppContext>,
headers: HeaderMap, headers: HeaderMap,
) -> Result<Response> { ) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers); let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true); let include_private = preview && query.include_private.unwrap_or(true);
let include_redirects = query.include_redirects.unwrap_or(preview); let include_redirects = query.include_redirects.unwrap_or(preview);
@@ -539,7 +543,9 @@ pub async fn list(
let filtered = posts let filtered = posts
.into_iter() .into_iter()
.filter(|post| should_include_post(post, &query, preview, include_private, include_redirects)) .filter(|post| {
should_include_post(post, &query, preview, include_private, include_redirects)
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
format::json(filtered) format::json(filtered)
@@ -551,8 +557,6 @@ pub async fn list_page(
State(ctx): State<AppContext>, State(ctx): State<AppContext>,
headers: HeaderMap, headers: HeaderMap,
) -> Result<Response> { ) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.filters.preview, &headers); let preview = request_preview_mode(query.filters.preview, &headers);
let include_private = preview && query.filters.include_private.unwrap_or(true); let include_private = preview && query.filters.include_private.unwrap_or(true);
let include_redirects = query.filters.include_redirects.unwrap_or(preview); let include_redirects = query.filters.include_redirects.unwrap_or(preview);
@@ -672,7 +676,10 @@ pub async fn update(
.into_iter() .into_iter()
.filter_map(|tag| tag.as_str().map(ToString::to_string)) .filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(), .collect(),
post_type: item.post_type.clone().unwrap_or_else(|| "article".to_string()), post_type: item
.post_type
.clone()
.unwrap_or_else(|| "article".to_string()),
image: item.image.clone(), image: item.image.clone(),
images: item images: item
.images .images
@@ -684,7 +691,10 @@ pub async fn update(
.filter_map(|tag| tag.as_str().map(ToString::to_string)) .filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(), .collect(),
pinned: item.pinned.unwrap_or(false), pinned: item.pinned.unwrap_or(false),
status: item.status.clone().unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()), status: item
.status
.clone()
.unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()),
visibility: item visibility: item
.visibility .visibility
.clone() .clone()
@@ -696,9 +706,7 @@ pub async fn update(
og_image: item.og_image.clone(), og_image: item.og_image.clone(),
redirect_from: content::post_redirects_from_json(&item.redirect_from), redirect_from: content::post_redirects_from_json(&item.redirect_from),
redirect_to: item.redirect_to.clone(), redirect_to: item.redirect_to.clone(),
file_path: content::markdown_post_path(&item.slug) file_path: content::virtual_markdown_document_path(&item.slug),
.to_string_lossy()
.to_string(),
}; };
let _ = subscriptions::notify_post_published(&ctx, &post).await; let _ = subscriptions::notify_post_published(&ctx, &post).await;
} }
@@ -736,7 +744,6 @@ pub async fn get_one(
State(ctx): State<AppContext>, State(ctx): State<AppContext>,
headers: HeaderMap, headers: HeaderMap,
) -> Result<Response> { ) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers); let preview = request_preview_mode(query.preview, &headers);
let post = load_item(&ctx, id).await?; let post = load_item(&ctx, id).await?;
@@ -754,7 +761,6 @@ pub async fn get_by_slug(
State(ctx): State<AppContext>, State(ctx): State<AppContext>,
headers: HeaderMap, headers: HeaderMap,
) -> Result<Response> { ) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers); let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true); let include_private = preview && query.include_private.unwrap_or(true);
let post = resolve_post_by_slug(&ctx, &slug).await?; let post = resolve_post_by_slug(&ctx, &slug).await?;
@@ -780,8 +786,7 @@ pub async fn get_markdown_by_slug(
State(ctx): State<AppContext>, State(ctx): State<AppContext>,
) -> Result<Response> { ) -> Result<Response> {
check_auth(&headers)?; check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?; let (path, markdown) = content::read_markdown_document_from_store(&ctx, &slug).await?;
let (path, markdown) = content::read_markdown_document(&slug)?;
format::json(MarkdownDocumentResponse { format::json(MarkdownDocumentResponse {
slug, slug,
path, path,
@@ -807,7 +812,7 @@ pub async fn update_markdown_by_slug(
) )
.await?; .await?;
let updated = content::write_markdown_document(&ctx, &slug, &params.markdown).await?; let updated = content::write_markdown_document(&ctx, &slug, &params.markdown).await?;
let (path, markdown) = content::read_markdown_document(&updated.slug)?; let (path, markdown) = content::read_markdown_document_from_store(&ctx, &updated.slug).await?;
let _ = post_revisions::capture_snapshot_from_markdown( let _ = post_revisions::capture_snapshot_from_markdown(
&ctx, &ctx,
Some(&actor), Some(&actor),
@@ -874,7 +879,7 @@ pub async fn create_markdown(
}, },
) )
.await?; .await?;
let (path, markdown) = content::read_markdown_document(&created.slug)?; let (path, markdown) = content::read_markdown_document_from_store(&ctx, &created.slug).await?;
let _ = post_revisions::capture_snapshot_from_markdown( let _ = post_revisions::capture_snapshot_from_markdown(
&ctx, &ctx,
Some(&actor), Some(&actor),
@@ -936,7 +941,9 @@ pub async fn import_markdown(
let imported = content::import_markdown_documents(&ctx, files).await?; let imported = content::import_markdown_documents(&ctx, files).await?;
for item in &imported { for item in &imported {
if let Ok((_path, markdown)) = content::read_markdown_document(&item.slug) { if let Ok((_path, markdown)) =
content::read_markdown_document_from_store(&ctx, &item.slug).await
{
let _ = post_revisions::capture_snapshot_from_markdown( let _ = post_revisions::capture_snapshot_from_markdown(
&ctx, &ctx,
Some(&actor), Some(&actor),

View File

@@ -63,9 +63,7 @@ fn levenshtein_distance(left: &str, right: &str) -> usize {
let mut curr = vec![i + 1; right_chars.len() + 1]; let mut curr = vec![i + 1; right_chars.len() + 1];
for (j, right_ch) in right_chars.iter().enumerate() { for (j, right_ch) in right_chars.iter().enumerate() {
let cost = usize::from(left_ch != *right_ch); let cost = usize::from(left_ch != *right_ch);
curr[j + 1] = (curr[j] + 1) curr[j + 1] = (curr[j] + 1).min(prev[j + 1] + 1).min(prev[j] + cost);
.min(prev[j + 1] + 1)
.min(prev[j] + cost);
} }
prev = curr; prev = curr;
} }
@@ -157,7 +155,11 @@ fn candidate_terms(posts: &[posts::Model]) -> Vec<String> {
candidates candidates
} }
fn find_spelling_fallback(query: &str, posts: &[posts::Model], synonym_groups: &[Vec<String>]) -> Vec<String> { fn find_spelling_fallback(
query: &str,
posts: &[posts::Model],
synonym_groups: &[Vec<String>],
) -> Vec<String> {
let primary_token = tokenize(query).into_iter().next().unwrap_or_default(); let primary_token = tokenize(query).into_iter().next().unwrap_or_default();
if primary_token.len() < 3 { if primary_token.len() < 3 {
return Vec::new(); return Vec::new();
@@ -397,7 +399,6 @@ async fn build_search_results(
headers: &HeaderMap, headers: &HeaderMap,
) -> Result<(String, bool, Vec<SearchResult>)> { ) -> Result<(String, bool, Vec<SearchResult>)> {
let preview_search = is_preview_search(query, headers); let preview_search = is_preview_search(query, headers);
content::sync_markdown_posts(ctx).await?;
let q = query.q.clone().unwrap_or_default().trim().to_string(); let q = query.q.clone().unwrap_or_default().trim().to_string();
if q.is_empty() { if q.is_empty() {
@@ -442,7 +443,12 @@ async fn build_search_results(
}); });
} }
if let Some(tag) = query.tag.as_deref().map(str::trim).filter(|value| !value.is_empty()) { if let Some(tag) = query
.tag
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| post_has_tag(post, tag)); all_posts.retain(|post| post_has_tag(post, tag));
} }
@@ -550,7 +556,8 @@ pub async fn search_page(
let page_size = query.page_size.unwrap_or(20).clamp(1, 100); let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_search_sort_by(query.sort_by.as_deref()); let sort_by = normalize_search_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref(), &sort_by); let sort_order = normalize_sort_order(query.sort_order.as_deref(), &sort_by);
let (q, preview_search, mut results) = build_search_results(&ctx, &query.search, &headers).await?; let (q, preview_search, mut results) =
build_search_results(&ctx, &query.search, &headers).await?;
if q.is_empty() { if q.is_empty() {
return format::json(PagedSearchResponse { return format::json(PagedSearchResponse {

View File

@@ -93,8 +93,12 @@ pub struct SiteSettingsPayload {
pub ai_enabled: Option<bool>, pub ai_enabled: Option<bool>,
#[serde(default, alias = "paragraphCommentsEnabled")] #[serde(default, alias = "paragraphCommentsEnabled")]
pub paragraph_comments_enabled: Option<bool>, pub paragraph_comments_enabled: Option<bool>,
#[serde(default, alias = "commentVerificationMode")]
pub comment_verification_mode: Option<String>,
#[serde(default, alias = "commentTurnstileEnabled")] #[serde(default, alias = "commentTurnstileEnabled")]
pub comment_turnstile_enabled: Option<bool>, pub comment_turnstile_enabled: Option<bool>,
#[serde(default, alias = "subscriptionVerificationMode")]
pub subscription_verification_mode: Option<String>,
#[serde(default, alias = "subscriptionTurnstileEnabled")] #[serde(default, alias = "subscriptionTurnstileEnabled")]
pub subscription_turnstile_enabled: Option<bool>, pub subscription_turnstile_enabled: Option<bool>,
#[serde(default, alias = "webPushEnabled")] #[serde(default, alias = "webPushEnabled")]
@@ -195,7 +199,9 @@ pub struct PublicSiteSettingsResponse {
pub music_playlist: Option<serde_json::Value>, pub music_playlist: Option<serde_json::Value>,
pub ai_enabled: bool, pub ai_enabled: bool,
pub paragraph_comments_enabled: bool, pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
pub comment_turnstile_enabled: bool, pub comment_turnstile_enabled: bool,
pub subscription_verification_mode: String,
pub subscription_turnstile_enabled: bool, pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool, pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>, pub turnstile_site_key: Option<String>,
@@ -270,6 +276,9 @@ pub(crate) fn default_subscription_popup_delay_seconds() -> i32 {
18 18
} }
const DEFAULT_TURNSTILE_SITE_KEY: &str = "0x4AAAAAACy58kMBSwXwqMhx";
const DEFAULT_TURNSTILE_SECRET_KEY: &str = "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U";
fn normalize_string_list(values: Vec<String>) -> Vec<String> { fn normalize_string_list(values: Vec<String>) -> Vec<String> {
values values
.into_iter() .into_iter()
@@ -549,11 +558,48 @@ impl SiteSettingsPayload {
if let Some(paragraph_comments_enabled) = self.paragraph_comments_enabled { if let Some(paragraph_comments_enabled) = self.paragraph_comments_enabled {
item.paragraph_comments_enabled = Some(paragraph_comments_enabled); item.paragraph_comments_enabled = Some(paragraph_comments_enabled);
} }
if let Some(comment_turnstile_enabled) = self.comment_turnstile_enabled { if let Some(comment_verification_mode) = self
.comment_verification_mode
.as_deref()
.and_then(|value| crate::services::turnstile::normalize_verification_mode(Some(value)))
{
item.comment_verification_mode = Some(comment_verification_mode.as_str().to_string());
item.comment_turnstile_enabled = Some(matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
));
} else if let Some(comment_turnstile_enabled) = self.comment_turnstile_enabled {
item.comment_turnstile_enabled = Some(comment_turnstile_enabled); item.comment_turnstile_enabled = Some(comment_turnstile_enabled);
item.comment_verification_mode = Some(
if comment_turnstile_enabled {
crate::services::turnstile::VERIFICATION_MODE_TURNSTILE
} else {
crate::services::turnstile::VERIFICATION_MODE_CAPTCHA
}
.to_string(),
);
} }
if let Some(subscription_turnstile_enabled) = self.subscription_turnstile_enabled { if let Some(subscription_verification_mode) = self
.subscription_verification_mode
.as_deref()
.and_then(|value| crate::services::turnstile::normalize_verification_mode(Some(value)))
{
item.subscription_verification_mode =
Some(subscription_verification_mode.as_str().to_string());
item.subscription_turnstile_enabled = Some(matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
));
} else if let Some(subscription_turnstile_enabled) = self.subscription_turnstile_enabled {
item.subscription_turnstile_enabled = Some(subscription_turnstile_enabled); item.subscription_turnstile_enabled = Some(subscription_turnstile_enabled);
item.subscription_verification_mode = Some(
if subscription_turnstile_enabled {
crate::services::turnstile::VERIFICATION_MODE_TURNSTILE
} else {
crate::services::turnstile::VERIFICATION_MODE_OFF
}
.to_string(),
);
} }
if let Some(web_push_enabled) = self.web_push_enabled { if let Some(web_push_enabled) = self.web_push_enabled {
item.web_push_enabled = Some(web_push_enabled); item.web_push_enabled = Some(web_push_enabled);
@@ -763,11 +809,17 @@ fn default_payload() -> SiteSettingsPayload {
]), ]),
ai_enabled: Some(false), ai_enabled: Some(false),
paragraph_comments_enabled: Some(true), paragraph_comments_enabled: Some(true),
comment_verification_mode: Some(
crate::services::turnstile::VERIFICATION_MODE_CAPTCHA.to_string(),
),
comment_turnstile_enabled: Some(false), comment_turnstile_enabled: Some(false),
subscription_verification_mode: Some(
crate::services::turnstile::VERIFICATION_MODE_OFF.to_string(),
),
subscription_turnstile_enabled: Some(false), subscription_turnstile_enabled: Some(false),
web_push_enabled: Some(false), web_push_enabled: Some(false),
turnstile_site_key: None, turnstile_site_key: Some(DEFAULT_TURNSTILE_SITE_KEY.to_string()),
turnstile_secret_key: None, turnstile_secret_key: Some(DEFAULT_TURNSTILE_SECRET_KEY.to_string()),
web_push_vapid_public_key: None, web_push_vapid_public_key: None,
web_push_vapid_private_key: None, web_push_vapid_private_key: None,
web_push_vapid_subject: None, web_push_vapid_subject: None,
@@ -835,11 +887,11 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
fn public_response(model: Model) -> PublicSiteSettingsResponse { fn public_response(model: Model) -> PublicSiteSettingsResponse {
let turnstile_site_key = crate::services::turnstile::site_key(&model); let turnstile_site_key = crate::services::turnstile::site_key(&model);
let web_push_vapid_public_key = crate::services::web_push::public_key(&model); let web_push_vapid_public_key = crate::services::web_push::public_key(&model);
let comment_turnstile_enabled = crate::services::turnstile::is_enabled( let comment_verification_mode = crate::services::turnstile::effective_mode(
&model, &model,
crate::services::turnstile::TurnstileScope::Comment, crate::services::turnstile::TurnstileScope::Comment,
); );
let subscription_turnstile_enabled = crate::services::turnstile::is_enabled( let subscription_verification_mode = crate::services::turnstile::effective_mode(
&model, &model,
crate::services::turnstile::TurnstileScope::Subscription, crate::services::turnstile::TurnstileScope::Subscription,
); );
@@ -866,8 +918,16 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
music_playlist: model.music_playlist, music_playlist: model.music_playlist,
ai_enabled: model.ai_enabled.unwrap_or(false), ai_enabled: model.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true), paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled, comment_verification_mode: comment_verification_mode.as_str().to_string(),
subscription_turnstile_enabled, comment_turnstile_enabled: matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
subscription_verification_mode: subscription_verification_mode.as_str().to_string(),
subscription_turnstile_enabled: matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
web_push_enabled, web_push_enabled,
turnstile_site_key, turnstile_site_key,
web_push_vapid_public_key, web_push_vapid_public_key,
@@ -890,8 +950,6 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
#[debug_handler] #[debug_handler]
pub async fn home(State(ctx): State<AppContext>) -> Result<Response> { pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let site_settings = public_response(load_current(&ctx).await?); let site_settings = public_response(load_current(&ctx).await?);
let posts = posts::Entity::find() let posts = posts::Entity::find()
.order_by_desc(posts::Column::CreatedAt) .order_by_desc(posts::Column::CreatedAt)

View File

@@ -14,6 +14,10 @@ pub struct PublicSubscriptionPayload {
pub source: Option<String>, pub source: Option<String>,
#[serde(default, alias = "turnstileToken")] #[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>, pub turnstile_token: Option<String>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
} }
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
@@ -23,6 +27,10 @@ pub struct PublicBrowserPushSubscriptionPayload {
pub source: Option<String>, pub source: Option<String>,
#[serde(default, alias = "turnstileToken")] #[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>, pub turnstile_token: Option<String>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
} }
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
@@ -81,6 +89,28 @@ fn public_browser_push_metadata(
}) })
} }
async fn verify_subscription_human_check(
settings: &crate::models::_entities::site_settings::Model,
turnstile_token: Option<&str>,
captcha_token: Option<&str>,
captcha_answer: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
match turnstile::effective_mode(settings, turnstile::TurnstileScope::Subscription) {
turnstile::VerificationMode::Off => Ok(()),
turnstile::VerificationMode::Captcha => {
crate::services::comment_guard::verify_captcha_solution(
captcha_token,
captcha_answer,
client_ip,
)
}
turnstile::VerificationMode::Turnstile => {
turnstile::verify_token(settings, turnstile_token, client_ip).await
}
}
}
#[debug_handler] #[debug_handler]
pub async fn subscribe( pub async fn subscribe(
State(ctx): State<AppContext>, State(ctx): State<AppContext>,
@@ -94,10 +124,12 @@ pub async fn subscribe(
client_ip.as_deref(), client_ip.as_deref(),
Some(&email), Some(&email),
)?; )?;
let _ = turnstile::verify_if_enabled( let settings = crate::controllers::site_settings::load_current(&ctx).await?;
&ctx, verify_subscription_human_check(
turnstile::TurnstileScope::Subscription, &settings,
payload.turnstile_token.as_deref(), payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
client_ip.as_deref(), client_ip.as_deref(),
) )
.await?; .await?;
@@ -165,10 +197,11 @@ pub async fn subscribe_browser_push(
.map(ToString::to_string); .map(ToString::to_string);
abuse_guard::enforce_public_scope("browser-push-subscription", client_ip.as_deref(), Some(&endpoint))?; abuse_guard::enforce_public_scope("browser-push-subscription", client_ip.as_deref(), Some(&endpoint))?;
let _ = turnstile::verify_if_enabled( verify_subscription_human_check(
&ctx, &settings,
turnstile::TurnstileScope::Subscription,
payload.turnstile_token.as_deref(), payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
client_ip.as_deref(), client_ip.as_deref(),
) )
.await?; .await?;

View File

@@ -118,7 +118,10 @@ fn tag_values(post: &posts::Model) -> Vec<String> {
fn build_summary(item: &tags::Model, post_items: &[posts::Model]) -> TagSummary { fn build_summary(item: &tags::Model, post_items: &[posts::Model]) -> TagSummary {
let name = tag_name(item); let name = tag_name(item);
let aliases = [name.trim().to_ascii_lowercase(), item.slug.trim().to_ascii_lowercase()]; let aliases = [
name.trim().to_ascii_lowercase(),
item.slug.trim().to_ascii_lowercase(),
];
let count = post_items let count = post_items
.iter() .iter()
.filter(|post| { .filter(|post| {
@@ -163,7 +166,6 @@ async fn load_item(ctx: &AppContext, id: i32) -> Result<tags::Model> {
#[debug_handler] #[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> { pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let tag_items = tags::Entity::find() let tag_items = tags::Entity::find()
.order_by_asc(tags::Column::Slug) .order_by_asc(tags::Column::Slug)
.all(&ctx.db) .all(&ctx.db)
@@ -234,10 +236,12 @@ pub async fn update(
!= Some(name.as_str()) != Some(name.as_str())
{ {
content::rewrite_tag_references( content::rewrite_tag_references(
&ctx,
previous_name.as_deref(), previous_name.as_deref(),
&previous_slug, &previous_slug,
Some(&name), Some(&name),
)?; )
.await?;
} }
let mut item = item.into_active_model(); let mut item = item.into_active_model();
@@ -249,16 +253,14 @@ pub async fn update(
item.seo_title = Set(trim_to_option(params.seo_title)); item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description)); item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?; let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(build_record(item)) format::json(build_record(item))
} }
#[debug_handler] #[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> { pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
let item = load_item(&ctx, id).await?; let item = load_item(&ctx, id).await?;
content::rewrite_tag_references(item.name.as_deref(), &item.slug, None)?; content::rewrite_tag_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?; item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty() format::empty()
} }

View File

@@ -45,6 +45,10 @@
description: "节奏更明显一点,适合切换阅读状态。" description: "节奏更明显一点,适合切换阅读状态。"
ai_enabled: false ai_enabled: false
paragraph_comments_enabled: true paragraph_comments_enabled: true
comment_verification_mode: "captcha"
subscription_verification_mode: "off"
turnstile_site_key: "0x4AAAAAACy58kMBSwXwqMhx"
turnstile_secret_key: "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U"
ai_provider: "newapi" ai_provider: "newapi"
ai_api_base: "https://91code.jiangnight.com/v1" ai_api_base: "https://91code.jiangnight.com/v1"
ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571" ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571"

View File

@@ -1,14 +1,12 @@
use async_trait::async_trait; use async_trait::async_trait;
use loco_rs::{ use loco_rs::{
app::{AppContext, Initializer},
Result, Result,
app::{AppContext, Initializer},
}; };
use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set}; use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::models::_entities::{comments, posts, site_settings}; use crate::models::_entities::{comments, posts, site_settings};
use crate::services::content;
const FIXTURES_DIR: &str = "src/fixtures"; const FIXTURES_DIR: &str = "src/fixtures";
pub struct ContentSyncInitializer; pub struct ContentSyncInitializer;
@@ -25,7 +23,6 @@ impl Initializer for ContentSyncInitializer {
} }
async fn sync_content(ctx: &AppContext, base: &Path) -> Result<()> { async fn sync_content(ctx: &AppContext, base: &Path) -> Result<()> {
content::sync_markdown_posts(ctx).await?;
sync_site_settings(ctx, base).await?; sync_site_settings(ctx, base).await?;
sync_comment_post_slugs(ctx, base).await?; sync_comment_post_slugs(ctx, base).await?;
Ok(()) Ok(())
@@ -111,6 +108,19 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
}) })
.filter(|items| !items.is_empty()) .filter(|items| !items.is_empty())
.map(serde_json::Value::Array); .map(serde_json::Value::Array);
let comment_verification_mode = as_optional_string(&seed["comment_verification_mode"]);
let subscription_verification_mode =
as_optional_string(&seed["subscription_verification_mode"]);
let comment_turnstile_enabled = seed["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = seed["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let existing = site_settings::Entity::find() let existing = site_settings::Entity::find()
.order_by_asc(site_settings::Column::Id) .order_by_asc(site_settings::Column::Id)
@@ -179,6 +189,24 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
model.paragraph_comments_enabled = model.paragraph_comments_enabled =
Set(seed["paragraph_comments_enabled"].as_bool().or(Some(true))); Set(seed["paragraph_comments_enabled"].as_bool().or(Some(true)));
} }
if existing.comment_verification_mode.is_none() {
model.comment_verification_mode = Set(comment_verification_mode.clone());
}
if existing.comment_turnstile_enabled.is_none() {
model.comment_turnstile_enabled = Set(comment_turnstile_enabled);
}
if existing.subscription_verification_mode.is_none() {
model.subscription_verification_mode = Set(subscription_verification_mode.clone());
}
if existing.subscription_turnstile_enabled.is_none() {
model.subscription_turnstile_enabled = Set(subscription_turnstile_enabled);
}
if is_blank(&existing.turnstile_site_key) {
model.turnstile_site_key = Set(as_optional_string(&seed["turnstile_site_key"]));
}
if is_blank(&existing.turnstile_secret_key) {
model.turnstile_secret_key = Set(as_optional_string(&seed["turnstile_secret_key"]));
}
if should_upgrade_legacy_ai_defaults { if should_upgrade_legacy_ai_defaults {
model.ai_provider = Set(as_optional_string(&seed["ai_provider"])); model.ai_provider = Set(as_optional_string(&seed["ai_provider"]));
model.ai_api_base = Set(as_optional_string(&seed["ai_api_base"])); model.ai_api_base = Set(as_optional_string(&seed["ai_api_base"]));
@@ -237,6 +265,12 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
paragraph_comments_enabled: Set(seed["paragraph_comments_enabled"] paragraph_comments_enabled: Set(seed["paragraph_comments_enabled"]
.as_bool() .as_bool()
.or(Some(true))), .or(Some(true))),
comment_verification_mode: Set(comment_verification_mode),
comment_turnstile_enabled: Set(comment_turnstile_enabled),
subscription_verification_mode: Set(subscription_verification_mode),
subscription_turnstile_enabled: Set(subscription_turnstile_enabled),
turnstile_site_key: Set(as_optional_string(&seed["turnstile_site_key"])),
turnstile_secret_key: Set(as_optional_string(&seed["turnstile_secret_key"])),
ai_provider: Set(as_optional_string(&seed["ai_provider"])), ai_provider: Set(as_optional_string(&seed["ai_provider"])),
ai_api_base: Set(as_optional_string(&seed["ai_api_base"])), ai_api_base: Set(as_optional_string(&seed["ai_api_base"])),
ai_api_key: Set(as_optional_string(&seed["ai_api_key"])), ai_api_key: Set(as_optional_string(&seed["ai_api_key"])),

View File

@@ -15,6 +15,9 @@ pub struct Model {
pub description: Option<String>, pub description: Option<String>,
#[sea_orm(column_type = "Text", nullable)] #[sea_orm(column_type = "Text", nullable)]
pub content: Option<String>, pub content: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
#[serde(skip_serializing, skip_deserializing)]
pub source_markdown: Option<String>,
pub category: Option<String>, pub category: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)] #[sea_orm(column_type = "JsonBinary", nullable)]
pub tags: Option<Json>, pub tags: Option<Json>,

View File

@@ -34,6 +34,8 @@ pub struct Model {
pub paragraph_comments_enabled: Option<bool>, pub paragraph_comments_enabled: Option<bool>,
pub comment_turnstile_enabled: Option<bool>, pub comment_turnstile_enabled: Option<bool>,
pub subscription_turnstile_enabled: Option<bool>, pub subscription_turnstile_enabled: Option<bool>,
pub comment_verification_mode: Option<String>,
pub subscription_verification_mode: Option<String>,
pub web_push_enabled: Option<bool>, pub web_push_enabled: Option<bool>,
#[sea_orm(column_type = "Text", nullable)] #[sea_orm(column_type = "Text", nullable)]
pub turnstile_site_key: Option<String>, pub turnstile_site_key: Option<String>,

View File

@@ -1,16 +1,16 @@
use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, Engine as _}; use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use fastembed::{ use fastembed::{
InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel, InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel,
}; };
use loco_rs::prelude::*; use loco_rs::prelude::*;
use reqwest::{header::CONTENT_TYPE, multipart, Client, Url}; use reqwest::{Client, Url, header::CONTENT_TYPE, multipart};
use sea_orm::{ use sea_orm::{
ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel, ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel,
PaginatorTrait, QueryOrder, Set, Statement, PaginatorTrait, QueryOrder, Set, Statement,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{json, Value}; use serde_json::{Value, json};
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{Mutex, OnceLock}; use std::sync::{Mutex, OnceLock};
@@ -34,8 +34,7 @@ const DEFAULT_CLOUDFLARE_CHAT_MODEL: &str = "@cf/meta/llama-3.1-8b-instruct";
const DEFAULT_CLOUDFLARE_IMAGE_MODEL: &str = "@cf/black-forest-labs/flux-2-klein-4b"; const DEFAULT_CLOUDFLARE_IMAGE_MODEL: &str = "@cf/black-forest-labs/flux-2-klein-4b";
const DEFAULT_TOP_K: usize = 4; const DEFAULT_TOP_K: usize = 4;
const DEFAULT_CHUNK_SIZE: usize = 1200; const DEFAULT_CHUNK_SIZE: usize = 1200;
const DEFAULT_SYSTEM_PROMPT: &str = const DEFAULT_SYSTEM_PROMPT: &str = "你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
"你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
const EMBEDDING_BATCH_SIZE: usize = 32; const EMBEDDING_BATCH_SIZE: usize = 32;
const EMBEDDING_DIMENSION: usize = 384; const EMBEDDING_DIMENSION: usize = 384;
const LOCAL_EMBEDDING_MODEL_LABEL: &str = "fastembed / local all-MiniLM-L6-v2"; const LOCAL_EMBEDDING_MODEL_LABEL: &str = "fastembed / local all-MiniLM-L6-v2";
@@ -2096,8 +2095,8 @@ pub(crate) fn build_provider_url(request: &AiProviderRequest) -> String {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{ use super::{
build_provider_url, extract_provider_text, is_profile_question, AiProviderRequest, build_provider_url, extract_provider_text, is_profile_question,
normalize_provider_api_base, parse_provider_sse_body, AiProviderRequest, normalize_provider_api_base, parse_provider_sse_body,
}; };
fn build_request(provider: &str, api_base: &str) -> AiProviderRequest { fn build_request(provider: &str, api_base: &str) -> AiProviderRequest {
@@ -2643,7 +2642,7 @@ async fn retrieve_matches(
pub async fn rebuild_index(ctx: &AppContext) -> Result<AiIndexSummary> { pub async fn rebuild_index(ctx: &AppContext) -> Result<AiIndexSummary> {
let settings = load_runtime_settings(ctx, false).await?; let settings = load_runtime_settings(ctx, false).await?;
let posts = content::sync_markdown_posts(ctx).await?; let posts = content::load_markdown_posts_from_store(ctx).await?;
let mut chunk_drafts = build_chunks(&posts, settings.chunk_size); let mut chunk_drafts = build_chunks(&posts, settings.chunk_size);
chunk_drafts.extend(build_profile_chunks(&settings.raw, settings.chunk_size)); chunk_drafts.extend(build_profile_chunks(&settings.raw, settings.chunk_size));
let embeddings = if chunk_drafts.is_empty() { let embeddings = if chunk_drafts.is_empty() {

View File

@@ -1,4 +1,4 @@
use std::{fs, path::Path, path::PathBuf}; use std::path::Path;
use chrono::Utc; use chrono::Utc;
use loco_rs::prelude::*; use loco_rs::prelude::*;
@@ -11,15 +11,14 @@ use serde::{Deserialize, Serialize};
use crate::{ use crate::{
controllers::site_settings, controllers::site_settings,
models::_entities::{ models::_entities::{
categories, friend_links, media_assets, posts, reviews, site_settings as site_settings_entity, categories, comments, friend_links, media_assets, posts, reviews,
tags, site_settings as site_settings_entity, tags,
}, },
services::{content, media_assets as media_assets_service, storage}, services::{content, media_assets as media_assets_service, storage},
}; };
const BACKUP_VERSION: &str = "2026-04-01"; const BACKUP_VERSION: &str = "2026-04-01";
const WARNING_STORAGE_BINARIES: &str = const WARNING_STORAGE_BINARIES: &str = "当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
"当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupTaxonomyRecord { pub struct BackupTaxonomyRecord {
@@ -152,47 +151,22 @@ fn normalize_backup_mode(value: Option<&str>) -> String {
} }
} }
fn markdown_posts_dir() -> PathBuf {
PathBuf::from(content::MARKDOWN_POSTS_DIR)
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
}
fn remove_existing_markdown_documents() -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
let mut removed = 0_usize;
for path in fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
{
let extension = path
.extension()
.and_then(|value| value.to_str())
.map(|value| value.to_ascii_lowercase())
.unwrap_or_default();
if extension == "md" || extension == "markdown" {
fs::remove_file(&path).map_err(io_error)?;
removed += 1;
}
}
Ok(removed)
}
fn normalize_markdown(value: &str) -> String { fn normalize_markdown(value: &str) -> String {
value.replace("\r\n", "\n") value.replace("\r\n", "\n")
} }
fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> { fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> {
let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default(); let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default();
let file_name = trim_to_option(Some(document.file_name.clone())) let file_name = trim_to_option(Some(document.file_name.clone())).unwrap_or_else(|| {
.unwrap_or_else(|| format!("{}.md", if candidate_slug.is_empty() { "post" } else { &candidate_slug })); format!(
"{}.md",
if candidate_slug.is_empty() {
"post"
} else {
&candidate_slug
}
)
});
let file_stem = Path::new(&file_name) let file_stem = Path::new(&file_name)
.file_stem() .file_stem()
.and_then(|value| value.to_str()) .and_then(|value| value.to_str())
@@ -296,7 +270,6 @@ fn export_media_asset_record(item: media_assets::Model) -> BackupMediaAssetRecor
pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> { pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> {
let site_settings_row = site_settings::load_current(ctx).await?; let site_settings_row = site_settings::load_current(ctx).await?;
let markdown_posts = content::sync_markdown_posts(ctx).await?;
let categories = categories::Entity::find() let categories = categories::Entity::find()
.order_by_asc(categories::Column::Slug) .order_by_asc(categories::Column::Slug)
.all(&ctx.db) .all(&ctx.db)
@@ -332,21 +305,24 @@ pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument>
.into_iter() .into_iter()
.map(export_media_asset_record) .map(export_media_asset_record)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let posts = markdown_posts let posts = content::load_markdown_posts_from_store(ctx)
.await?
.into_iter() .into_iter()
.map(|post| { .map(|post| {
let (_, markdown) = content::read_markdown_document(&post.slug)?;
Ok(BackupPostDocument { Ok(BackupPostDocument {
slug: post.slug.clone(), slug: post.slug.clone(),
file_name: format!("{}.md", post.slug), file_name: format!("{}.md", post.slug),
markdown, markdown: content::build_markdown_document(&post),
}) })
}) })
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
let storage_manifest = match export_storage_manifest(ctx).await { let storage_manifest = match export_storage_manifest(ctx).await {
Ok(items) => items, Ok(items) => items,
Err(error) => { Err(error) => {
tracing::warn!(?error, "failed to export storage manifest, continuing without it"); tracing::warn!(
?error,
"failed to export storage manifest, continuing without it"
);
None None
} }
}; };
@@ -549,28 +525,32 @@ async fn write_backup_posts(
documents: &[BackupPostDocument], documents: &[BackupPostDocument],
replace_existing: bool, replace_existing: bool,
) -> Result<usize> { ) -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
if replace_existing { if replace_existing {
remove_existing_markdown_documents()?; let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for post in &existing_posts {
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&post.slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
}
posts::Entity::delete_many().exec(&ctx.db).await?;
} }
if documents.is_empty() { if documents.is_empty() {
if replace_existing {
posts::Entity::delete_many().exec(&ctx.db).await?;
}
return Ok(0); return Ok(0);
} }
let mut written = std::collections::HashSet::new(); let mut written = std::collections::HashSet::new();
for document in documents { for document in documents {
let (slug, markdown) = normalized_backup_post(document)?; let (slug, markdown) = normalized_backup_post(document)?;
fs::write(content::markdown_post_path(&slug), markdown).map_err(io_error)?; content::upsert_markdown_document(ctx, Some(&slug), &markdown).await?;
written.insert(slug); written.insert(slug);
} }
content::sync_markdown_posts(ctx).await?;
Ok(written.len()) Ok(written.len())
} }

View File

@@ -363,15 +363,23 @@ pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'
return Err(Error::BadRequest("提交未通过校验".to_string())); return Err(Error::BadRequest("提交未通过校验".to_string()));
} }
if !crate::services::turnstile::verify_if_enabled( let settings = crate::controllers::site_settings::load_current(ctx).await?;
ctx, match crate::services::turnstile::effective_mode(
&settings,
crate::services::turnstile::TurnstileScope::Comment, crate::services::turnstile::TurnstileScope::Comment,
input.turnstile_token, ) {
input.ip_address, crate::services::turnstile::VerificationMode::Off => {}
) crate::services::turnstile::VerificationMode::Captcha => {
.await? verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
{ }
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?; crate::services::turnstile::VerificationMode::Turnstile => {
crate::services::turnstile::verify_token(
&settings,
input.turnstile_token,
input.ip_address,
)
.await?;
}
} }
if contains_blocked_keyword(input).is_some() { if contains_blocked_keyword(input).is_some() {

View File

@@ -6,19 +6,17 @@ use sea_orm::{
}; };
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value; use serde_json::Value;
use std::fs; use std::path::Path;
use std::path::{Path, PathBuf};
use crate::models::_entities::{categories, comments, posts, tags}; use crate::models::_entities::{categories, comments, posts, tags};
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
pub const POST_STATUS_DRAFT: &str = "draft"; pub const POST_STATUS_DRAFT: &str = "draft";
pub const POST_STATUS_PUBLISHED: &str = "published"; pub const POST_STATUS_PUBLISHED: &str = "published";
pub const POST_STATUS_OFFLINE: &str = "offline"; pub const POST_STATUS_OFFLINE: &str = "offline";
pub const POST_VISIBILITY_PUBLIC: &str = "public"; pub const POST_VISIBILITY_PUBLIC: &str = "public";
pub const POST_VISIBILITY_UNLISTED: &str = "unlisted"; pub const POST_VISIBILITY_UNLISTED: &str = "unlisted";
pub const POST_VISIBILITY_PRIVATE: &str = "private"; pub const POST_VISIBILITY_PRIVATE: &str = "private";
const VIRTUAL_MARKDOWN_PATH_PREFIX: &str = "article://posts";
#[derive(Debug, Clone, Default, Deserialize, Serialize)] #[derive(Debug, Clone, Default, Deserialize, Serialize)]
struct MarkdownFrontmatter { struct MarkdownFrontmatter {
@@ -105,32 +103,18 @@ pub struct MarkdownImportFile {
pub content: String, pub content: String,
} }
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone)]
struct LegacyFixturePost { struct MarkdownDocumentSource {
title: String, post: MarkdownPost,
slug: String, raw_markdown: String,
content: String,
excerpt: Option<String>,
category: Option<String>,
tags: Option<Vec<String>>,
pinned: Option<bool>,
published: Option<bool>,
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
} }
fn yaml_error(err: serde_yaml::Error) -> Error { fn yaml_error(err: serde_yaml::Error) -> Error {
Error::string(&err.to_string()) Error::string(&err.to_string())
} }
fn posts_dir() -> PathBuf { pub fn virtual_markdown_document_path(slug: &str) -> String {
PathBuf::from(MARKDOWN_POSTS_DIR) format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/{slug}")
}
pub fn markdown_post_path(slug: &str) -> PathBuf {
posts_dir().join(format!("{slug}.md"))
} }
fn normalize_newlines(input: &str) -> String { fn normalize_newlines(input: &str) -> String {
@@ -157,6 +141,15 @@ fn normalize_string_list(values: Option<Vec<String>>) -> Vec<String> {
.collect() .collect()
} }
fn normalize_post_tags(values: Vec<String>) -> Vec<String> {
let mut seen = std::collections::HashSet::new();
normalize_string_list(Some(values))
.into_iter()
.filter(|item| seen.insert(normalized_match_key(item)))
.collect()
}
fn yaml_scalar(value: &str) -> String { fn yaml_scalar(value: &str) -> String {
serde_yaml::to_string(value) serde_yaml::to_string(value)
.unwrap_or_else(|_| format!("{value:?}")) .unwrap_or_else(|_| format!("{value:?}"))
@@ -214,7 +207,9 @@ fn parse_frontmatter_datetime(value: Option<String>) -> Option<DateTime<FixedOff
if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") { if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") {
let naive = date_only.and_hms_opt(0, 0, 0)?; let naive = date_only.and_hms_opt(0, 0, 0)?;
return FixedOffset::east_opt(0)?.from_local_datetime(&naive).single(); return FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single();
} }
None None
@@ -278,6 +273,46 @@ pub fn post_redirects_from_json(value: &Option<Value>) -> Vec<String> {
.collect() .collect()
} }
fn json_string_array(value: &Option<Value>) -> Vec<String> {
value
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.map(|item| item.trim().to_string())
.filter(|item| !item.is_empty())
.collect()
}
fn markdown_post_from_model(post: &posts::Model) -> MarkdownPost {
MarkdownPost {
title: trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
slug: post.slug.clone(),
description: trim_to_option(post.description.clone())
.or_else(|| post.content.as_deref().and_then(excerpt_from_content)),
content: post.content.clone().unwrap_or_default(),
category: trim_to_option(post.category.clone()),
tags: json_string_array(&post.tags),
post_type: trim_to_option(post.post_type.clone()).unwrap_or_else(|| "article".to_string()),
image: trim_to_option(post.image.clone()),
images: json_string_array(&post.images),
pinned: post.pinned.unwrap_or(false),
status: normalize_post_status(post.status.as_deref()),
visibility: normalize_post_visibility(post.visibility.as_deref()),
publish_at: format_frontmatter_datetime(post.publish_at.clone()),
unpublish_at: format_frontmatter_datetime(post.unpublish_at.clone()),
canonical_url: normalize_url_like(post.canonical_url.clone()),
noindex: post.noindex.unwrap_or(false),
og_image: normalize_url_like(post.og_image.clone()),
redirect_from: post_redirects_from_json(&post.redirect_from),
redirect_to: trim_to_option(post.redirect_to.clone())
.map(|item| item.trim_matches('/').to_string()),
file_path: virtual_markdown_document_path(&post.slug),
}
}
pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool { pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
effective_post_state( effective_post_state(
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED), post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
@@ -431,17 +466,6 @@ fn split_frontmatter(raw: &str) -> Result<(MarkdownFrontmatter, String)> {
Ok((parsed, content)) Ok((parsed, content))
} }
fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
let raw = fs::read_to_string(path).map_err(io_error)?;
let file_stem = path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post")
.to_string();
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
}
pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> { pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
let (frontmatter, content) = split_frontmatter(raw)?; let (frontmatter, content) = split_frontmatter(raw)?;
@@ -567,103 +591,40 @@ pub fn build_markdown_document(post: &MarkdownPost) -> String {
lines.join("\n") lines.join("\n")
} }
fn ensure_markdown_posts_bootstrapped() -> Result<()> { fn markdown_document_from_model(model: &posts::Model) -> Result<MarkdownDocumentSource> {
let dir = posts_dir(); let raw_markdown = model
fs::create_dir_all(&dir).map_err(io_error)?; .source_markdown
.clone()
.map(|value| normalize_newlines(&value))
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| build_markdown_document(&markdown_post_from_model(model)));
let virtual_path = virtual_markdown_document_path(&model.slug);
let post = parse_markdown_source(&model.slug, &raw_markdown, &virtual_path)?;
let has_markdown = fs::read_dir(&dir) Ok(MarkdownDocumentSource { post, raw_markdown })
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.any(|entry| entry.path().extension().and_then(|value| value.to_str()) == Some("md"));
if has_markdown {
return Ok(());
}
let raw = fs::read_to_string(FIXTURE_POSTS_FILE).map_err(io_error)?;
let fixtures = serde_yaml::from_str::<Vec<LegacyFixturePost>>(&raw).map_err(yaml_error)?;
for fixture in fixtures {
let post = MarkdownPost {
title: fixture.title,
slug: fixture.slug.clone(),
description: trim_to_option(fixture.excerpt),
content: fixture.content,
category: trim_to_option(fixture.category),
tags: fixture.tags.unwrap_or_default(),
post_type: "article".to_string(),
image: None,
images: Vec::new(),
pinned: fixture.pinned.unwrap_or(false),
status: if fixture.published.unwrap_or(true) {
POST_STATUS_PUBLISHED.to_string()
} else {
POST_STATUS_DRAFT.to_string()
},
visibility: POST_VISIBILITY_PUBLIC.to_string(),
publish_at: None,
unpublish_at: None,
canonical_url: None,
noindex: false,
og_image: None,
redirect_from: Vec::new(),
redirect_to: None,
file_path: markdown_post_path(&fixture.slug)
.to_string_lossy()
.to_string(),
};
fs::write(
markdown_post_path(&fixture.slug),
build_markdown_document(&post),
)
.map_err(io_error)?;
}
Ok(())
} }
fn load_markdown_posts_from_disk() -> Result<Vec<MarkdownPost>> { async fn load_markdown_documents_from_store(
ensure_markdown_posts_bootstrapped()?; ctx: &AppContext,
) -> Result<Vec<MarkdownDocumentSource>> {
let mut posts = fs::read_dir(posts_dir()) let mut documents = posts::Entity::find()
.map_err(io_error)? .order_by_asc(posts::Column::Slug)
.filter_map(|entry| entry.ok()) .all(&ctx.db)
.map(|entry| entry.path()) .await?
.filter(|path| path.extension().and_then(|value| value.to_str()) == Some("md")) .into_iter()
.map(|path| parse_markdown_post(&path)) .map(|item| markdown_document_from_model(&item))
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
posts.sort_by(|left, right| left.slug.cmp(&right.slug)); documents.sort_by(|left, right| left.post.slug.cmp(&right.post.slug));
Ok(posts) Ok(documents)
} }
async fn sync_tags_from_posts(ctx: &AppContext, posts: &[MarkdownPost]) -> Result<()> { pub async fn load_markdown_posts_from_store(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
for post in posts { Ok(load_markdown_documents_from_store(ctx)
for tag_name in &post.tags { .await?
let slug = slugify(tag_name); .into_iter()
let trimmed = tag_name.trim(); .map(|document| document.post)
let existing = tags::Entity::find() .collect())
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(trimmed)),
)
.one(&ctx.db)
.await?;
if existing.is_none() {
let item = tags::ActiveModel {
name: Set(Some(trimmed.to_string())),
slug: Set(slug),
..Default::default()
};
let _ = item.insert(&ctx.db).await;
}
}
}
Ok(())
} }
async fn ensure_category(ctx: &AppContext, raw_name: &str) -> Result<Option<String>> { async fn ensure_category(ctx: &AppContext, raw_name: &str) -> Result<Option<String>> {
@@ -768,21 +729,138 @@ async fn canonicalize_tags(ctx: &AppContext, raw_tags: &[String]) -> Result<Vec<
Ok(canonical_tags) Ok(canonical_tags)
} }
fn write_markdown_post_to_disk(post: &MarkdownPost) -> Result<()> { fn string_array_json(values: &[String]) -> Option<Value> {
fs::write( (!values.is_empty()).then(|| Value::Array(values.iter().cloned().map(Value::String).collect()))
markdown_post_path(&post.slug),
build_markdown_document(post),
)
.map_err(io_error)
} }
pub fn rewrite_category_references( fn apply_markdown_post_to_active_model(
model: &mut posts::ActiveModel,
post: &MarkdownPost,
raw_markdown: &str,
) {
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.source_markdown = Set(Some(raw_markdown.to_string()));
model.category = Set(post.category.clone());
model.tags = Set(string_array_json(&post.tags));
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(string_array_json(&post.images));
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(string_array_json(&post.redirect_from));
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string())
);
}
async fn save_markdown_post_to_store(
ctx: &AppContext,
mut post: MarkdownPost,
slug_hint: Option<&str>,
canonicalize_taxonomy: bool,
) -> Result<MarkdownPost> {
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
post.title = trim_to_option(Some(post.title.clone())).unwrap_or_else(|| post.slug.clone());
post.slug = trim_to_option(Some(post.slug.clone()))
.or_else(|| normalized_slug_hint.clone())
.unwrap_or_else(|| slugify(&post.title));
post.description =
trim_to_option(post.description.clone()).or_else(|| excerpt_from_content(&post.content));
post.content = normalize_newlines(post.content.trim());
post.category = trim_to_option(post.category.clone());
post.tags = normalize_post_tags(post.tags.clone());
post.post_type =
trim_to_option(Some(post.post_type.clone())).unwrap_or_else(|| "article".to_string());
post.image = trim_to_option(post.image.clone());
post.images = normalize_string_list(Some(post.images.clone()));
post.status = normalize_post_status(Some(&post.status));
post.visibility = normalize_post_visibility(Some(&post.visibility));
post.publish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.publish_at.clone()));
post.unpublish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.unpublish_at.clone()));
post.canonical_url = normalize_url_like(post.canonical_url.clone());
post.og_image = normalize_url_like(post.og_image.clone());
post.redirect_from = normalize_redirect_list(Some(post.redirect_from.clone()));
post.redirect_to =
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string());
if post.slug.trim().is_empty() {
return Err(Error::BadRequest("slug is required".to_string()));
}
if canonicalize_taxonomy {
post.category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
post.tags = canonicalize_tags(ctx, &post.tags).await?;
}
let existing_by_hint = if let Some(hint) = normalized_slug_hint.as_deref() {
posts::Entity::find()
.filter(posts::Column::Slug.eq(hint))
.one(&ctx.db)
.await?
} else {
None
};
let existing_by_slug =
if existing_by_hint.as_ref().map(|item| item.slug.as_str()) == Some(post.slug.as_str()) {
None
} else {
posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?
};
if let (Some(by_hint), Some(by_slug)) = (&existing_by_hint, &existing_by_slug) {
if by_hint.id != by_slug.id {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {}",
post.slug
)));
}
}
let has_existing = existing_by_hint.is_some() || existing_by_slug.is_some();
let mut model = existing_by_hint
.or(existing_by_slug)
.map(|item| item.into_active_model())
.unwrap_or_default();
post.file_path = virtual_markdown_document_path(&post.slug);
let raw_markdown = build_markdown_document(&post);
apply_markdown_post_to_active_model(&mut model, &post, &raw_markdown);
if has_existing {
model.update(&ctx.db).await?;
} else {
model.insert(&ctx.db).await?;
}
Ok(post)
}
pub async fn rewrite_category_references(
ctx: &AppContext,
current_name: Option<&str>, current_name: Option<&str>,
current_slug: &str, current_slug: &str,
next_name: Option<&str>, next_name: Option<&str>,
) -> Result<usize> { ) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new(); let mut match_keys = Vec::new();
if let Some(name) = current_name { if let Some(name) = current_name {
let normalized = normalized_match_key(name); let normalized = normalized_match_key(name);
@@ -805,9 +883,9 @@ pub fn rewrite_category_references(
.filter(|value| !value.is_empty()) .filter(|value| !value.is_empty())
.map(ToString::to_string); .map(ToString::to_string);
let mut changed = 0_usize; let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?; let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts { for mut post in posts {
let Some(category) = post.category.as_deref() else { let Some(category) = post.category.as_deref() else {
continue; continue;
}; };
@@ -816,16 +894,17 @@ pub fn rewrite_category_references(
continue; continue;
} }
let existing_slug = post.slug.clone();
match &next_category { match &next_category {
Some(updated_name) if same_text(category, updated_name) => {} Some(updated_name) if same_text(category, updated_name) => {}
Some(updated_name) => { Some(updated_name) => {
post.category = Some(updated_name.clone()); post.category = Some(updated_name.clone());
write_markdown_post_to_disk(post)?; save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1; changed += 1;
} }
None => { None => {
post.category = None; post.category = None;
write_markdown_post_to_disk(post)?; save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1; changed += 1;
} }
} }
@@ -834,13 +913,12 @@ pub fn rewrite_category_references(
Ok(changed) Ok(changed)
} }
pub fn rewrite_tag_references( pub async fn rewrite_tag_references(
ctx: &AppContext,
current_name: Option<&str>, current_name: Option<&str>,
current_slug: &str, current_slug: &str,
next_name: Option<&str>, next_name: Option<&str>,
) -> Result<usize> { ) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new(); let mut match_keys = Vec::new();
if let Some(name) = current_name { if let Some(name) = current_name {
let normalized = normalized_match_key(name); let normalized = normalized_match_key(name);
@@ -863,9 +941,9 @@ pub fn rewrite_tag_references(
.filter(|value| !value.is_empty()) .filter(|value| !value.is_empty())
.map(ToString::to_string); .map(ToString::to_string);
let mut changed = 0_usize; let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?; let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts { for mut post in posts {
let mut updated_tags = Vec::new(); let mut updated_tags = Vec::new();
let mut seen = std::collections::HashSet::new(); let mut seen = std::collections::HashSet::new();
let mut post_changed = false; let mut post_changed = false;
@@ -889,8 +967,9 @@ pub fn rewrite_tag_references(
} }
if post_changed { if post_changed {
let existing_slug = post.slug.clone();
post.tags = updated_tags; post.tags = updated_tags;
write_markdown_post_to_disk(post)?; save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1; changed += 1;
} }
} }
@@ -898,167 +977,43 @@ pub fn rewrite_tag_references(
Ok(changed) Ok(changed)
} }
async fn dedupe_tags(ctx: &AppContext) -> Result<()> { pub async fn read_markdown_document_from_store(
let existing_tags = tags::Entity::find() ctx: &AppContext,
.order_by_asc(tags::Column::Id) slug: &str,
.all(&ctx.db) ) -> Result<(String, String)> {
.await?; let post = posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
let mut seen = std::collections::HashSet::new(); .one(&ctx.db)
.await?
for tag in existing_tags { .ok_or(Error::NotFound)?;
let key = if tag.slug.trim().is_empty() { let document = markdown_document_from_model(&post)?;
tag.name.as_deref().map(slugify).unwrap_or_default() Ok((
} else { virtual_markdown_document_path(&document.post.slug),
slugify(&tag.slug) document.raw_markdown,
}; ))
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = tag.delete(&ctx.db).await;
}
Ok(())
} }
async fn dedupe_categories(ctx: &AppContext) -> Result<()> { pub async fn upsert_markdown_document(
let existing_categories = categories::Entity::find() ctx: &AppContext,
.order_by_asc(categories::Column::Id) slug_hint: Option<&str>,
.all(&ctx.db) markdown: &str,
.await?; ) -> Result<MarkdownPost> {
let normalized_markdown = normalize_newlines(markdown);
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let file_stem = normalized_slug_hint
.as_deref()
.filter(|value| !value.is_empty())
.unwrap_or("post");
let virtual_path = normalized_slug_hint
.as_deref()
.map(virtual_markdown_document_path)
.unwrap_or_else(|| format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/draft"));
let post = parse_markdown_source(file_stem, &normalized_markdown, &virtual_path)?;
let mut seen = std::collections::HashSet::new(); save_markdown_post_to_store(ctx, post, normalized_slug_hint.as_deref(), true).await
for category in existing_categories {
let key = if category.slug.trim().is_empty() {
category.name.as_deref().map(slugify).unwrap_or_default()
} else {
slugify(&category.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = category.delete(&ctx.db).await;
}
Ok(())
}
pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
let markdown_posts = load_markdown_posts_from_disk()?;
let markdown_slugs = markdown_posts
.iter()
.map(|post| post.slug.clone())
.collect::<std::collections::HashSet<_>>();
let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for stale_post in existing_posts
.into_iter()
.filter(|post| !markdown_slugs.contains(&post.slug))
{
let stale_slug = stale_post.slug.clone();
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&stale_slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
let _ = stale_post.delete(&ctx.db).await;
}
for post in &markdown_posts {
let canonical_category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
let canonical_tags = canonicalize_tags(ctx, &post.tags).await?;
let existing = posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut model = existing
.map(|item| item.into_active_model())
.unwrap_or_default();
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.category = Set(canonical_category);
model.tags = Set(if canonical_tags.is_empty() {
None
} else {
Some(Value::Array(
canonical_tags.into_iter().map(Value::String).collect(),
))
});
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(if post.images.is_empty() {
None
} else {
Some(Value::Array(
post.images
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(if post.redirect_from.is_empty() {
None
} else {
Some(Value::Array(
post.redirect_from
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string()),
);
if has_existing {
let _ = model.update(&ctx.db).await;
} else {
let _ = model.insert(&ctx.db).await;
}
}
sync_tags_from_posts(ctx, &markdown_posts).await?;
dedupe_tags(ctx).await?;
dedupe_categories(ctx).await?;
Ok(markdown_posts)
}
pub fn read_markdown_document(slug: &str) -> Result<(String, String)> {
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
}
let raw = fs::read_to_string(&path).map_err(io_error)?;
Ok((path.to_string_lossy().to_string(), raw))
} }
pub async fn write_markdown_document( pub async fn write_markdown_document(
@@ -1066,24 +1021,25 @@ pub async fn write_markdown_document(
slug: &str, slug: &str,
markdown: &str, markdown: &str,
) -> Result<MarkdownPost> { ) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?; upsert_markdown_document(ctx, Some(slug), markdown).await
let path = markdown_post_path(slug);
fs::write(&path, normalize_newlines(markdown)).map_err(io_error)?;
let updated = parse_markdown_post(&path)?;
sync_markdown_posts(ctx).await?;
Ok(updated)
} }
pub async fn delete_markdown_post(ctx: &AppContext, slug: &str) -> Result<()> { pub async fn delete_markdown_post(ctx: &AppContext, slug: &str) -> Result<()> {
ensure_markdown_posts_bootstrapped()?; let post = posts::Entity::find()
let path = markdown_post_path(slug); .filter(posts::Column::Slug.eq(slug))
if !path.exists() { .one(&ctx.db)
return Err(Error::NotFound); .await?
.ok_or(Error::NotFound)?;
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
} }
fs::remove_file(&path).map_err(io_error)?; post.delete(&ctx.db).await?;
sync_markdown_posts(ctx).await?;
Ok(()) Ok(())
} }
@@ -1091,8 +1047,6 @@ pub async fn create_markdown_post(
ctx: &AppContext, ctx: &AppContext,
draft: MarkdownPostDraft, draft: MarkdownPostDraft,
) -> Result<MarkdownPost> { ) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let title = draft.title.trim().to_string(); let title = draft.title.trim().to_string();
if title.is_empty() { if title.is_empty() {
return Err(Error::BadRequest("title is required".to_string())); return Err(Error::BadRequest("title is required".to_string()));
@@ -1110,6 +1064,17 @@ pub async fn create_markdown_post(
return Err(Error::BadRequest("slug is required".to_string())); return Err(Error::BadRequest("slug is required".to_string()));
} }
if posts::Entity::find()
.filter(posts::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?
.is_some()
{
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
let post = MarkdownPost { let post = MarkdownPost {
title, title,
slug: slug.clone(), slug: slug.clone(),
@@ -1143,28 +1108,16 @@ pub async fn create_markdown_post(
redirect_from: normalize_redirect_list(Some(draft.redirect_from)), redirect_from: normalize_redirect_list(Some(draft.redirect_from)),
redirect_to: trim_to_option(draft.redirect_to) redirect_to: trim_to_option(draft.redirect_to)
.map(|item| item.trim_matches('/').to_string()), .map(|item| item.trim_matches('/').to_string()),
file_path: markdown_post_path(&slug).to_string_lossy().to_string(), file_path: virtual_markdown_document_path(&slug),
}; };
save_markdown_post_to_store(ctx, post, Some(&slug), true).await
let path = markdown_post_path(&slug);
if path.exists() {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
fs::write(&path, build_markdown_document(&post)).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
parse_markdown_post(&path)
} }
pub async fn import_markdown_documents( pub async fn import_markdown_documents(
ctx: &AppContext, ctx: &AppContext,
files: Vec<MarkdownImportFile>, files: Vec<MarkdownImportFile>,
) -> Result<Vec<MarkdownPost>> { ) -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?; let mut imported = Vec::new();
let mut imported_slugs = Vec::new();
for file in files { for file in files {
let path = Path::new(&file.file_name); let path = Path::new(&file.file_name);
@@ -1194,15 +1147,8 @@ pub async fn import_markdown_documents(
continue; continue;
} }
fs::write(markdown_post_path(&slug), normalize_newlines(&file.content)) imported.push(upsert_markdown_document(ctx, Some(&slug), &file.content).await?);
.map_err(io_error)?;
imported_slugs.push(slug);
} }
sync_markdown_posts(ctx).await?; Ok(imported)
imported_slugs
.into_iter()
.map(|slug| parse_markdown_post(&markdown_post_path(&slug)))
.collect()
} }

View File

@@ -2,7 +2,6 @@ use loco_rs::prelude::*;
use sea_orm::{ use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set, ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set,
}; };
use std::fs;
use crate::{ use crate::{
controllers::admin::AdminIdentity, controllers::admin::AdminIdentity,
@@ -48,10 +47,10 @@ fn trim_to_option(value: Option<String>) -> Option<String> {
fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> { fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
let normalized = markdown.replace("\r\n", "\n"); let normalized = markdown.replace("\r\n", "\n");
if let Some(frontmatter) = normalized if let Some(frontmatter) = normalized.strip_prefix("---\n").and_then(|rest| {
.strip_prefix("---\n") rest.split_once("\n---\n")
.and_then(|rest| rest.split_once("\n---\n").map(|(frontmatter, _)| frontmatter)) .map(|(frontmatter, _)| frontmatter)
{ }) {
for line in frontmatter.lines() { for line in frontmatter.lines() {
let trimmed = line.trim(); let trimmed = line.trim();
if let Some(raw) = trimmed.strip_prefix("title:") { if let Some(raw) = trimmed.strip_prefix("title:") {
@@ -63,14 +62,16 @@ fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
} }
} }
normalized.lines().find_map(|line| { normalized
line.trim() .lines()
.strip_prefix("# ") .find_map(|line| {
.map(str::trim) line.trim()
.filter(|value| !value.is_empty()) .strip_prefix("# ")
.map(ToString::to_string) .map(str::trim)
}) .filter(|value| !value.is_empty())
.or_else(|| trim_to_option(Some(slug.to_string()))) .map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
} }
async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> { async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> {
@@ -122,7 +123,7 @@ pub async fn capture_current_snapshot(
reason: Option<&str>, reason: Option<&str>,
metadata: Option<serde_json::Value>, metadata: Option<serde_json::Value>,
) -> Result<Option<post_revisions::Model>> { ) -> Result<Option<post_revisions::Model>> {
let Ok((_path, markdown)) = content::read_markdown_document(slug) else { let Ok((_path, markdown)) = content::read_markdown_document_from_store(ctx, slug).await else {
return Ok(None); return Ok(None);
}; };
@@ -136,17 +137,14 @@ pub async fn list_revisions(
slug: Option<&str>, slug: Option<&str>,
limit: u64, limit: u64,
) -> Result<Vec<post_revisions::Model>> { ) -> Result<Vec<post_revisions::Model>> {
let mut query = post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc); let mut query =
post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) { if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) {
query = query.filter(post_revisions::Column::PostSlug.eq(slug)); query = query.filter(post_revisions::Column::PostSlug.eq(slug));
} }
query query.limit(limit).all(&ctx.db).await.map_err(Into::into)
.limit(limit)
.all(&ctx.db)
.await
.map_err(Into::into)
} }
pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> { pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> {
@@ -187,13 +185,18 @@ pub async fn restore_revision(
let markdown = match restore_mode { let markdown = match restore_mode {
RestoreMode::Full => revision_markdown.clone(), RestoreMode::Full => revision_markdown.clone(),
RestoreMode::Markdown | RestoreMode::Metadata => { RestoreMode::Markdown | RestoreMode::Metadata => {
let (_path, current_markdown) = content::read_markdown_document(&slug).map_err(|_| { let (_path, current_markdown) = content::read_markdown_document_from_store(ctx, &slug)
Error::BadRequest("当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string()) .await
})?; .map_err(|_| {
Error::BadRequest(
"当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string(),
)
})?;
let virtual_path = content::virtual_markdown_document_path(&slug);
let revision_post = let revision_post =
content::parse_markdown_source(&slug, &revision_markdown, &content::markdown_post_path(&slug).to_string_lossy())?; content::parse_markdown_source(&slug, &revision_markdown, &virtual_path)?;
let current_post = let current_post =
content::parse_markdown_source(&slug, &current_markdown, &content::markdown_post_path(&slug).to_string_lossy())?; content::parse_markdown_source(&slug, &current_markdown, &virtual_path)?;
let mut merged = current_post.clone(); let mut merged = current_post.clone();
match restore_mode { match restore_mode {
RestoreMode::Markdown => { RestoreMode::Markdown => {
@@ -224,10 +227,7 @@ pub async fn restore_revision(
} }
}; };
fs::create_dir_all(content::MARKDOWN_POSTS_DIR).map_err(|error| Error::BadRequest(error.to_string()))?; content::write_markdown_document(ctx, &slug, &markdown).await?;
fs::write(content::markdown_post_path(&slug), markdown.replace("\r\n", "\n"))
.map_err(|error| Error::BadRequest(error.to_string()))?;
content::sync_markdown_posts(ctx).await?;
let _ = capture_snapshot_from_markdown( let _ = capture_snapshot_from_markdown(
ctx, ctx,

View File

@@ -20,6 +20,27 @@ pub enum TurnstileScope {
Subscription, Subscription,
} }
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum VerificationMode {
Off,
Captcha,
Turnstile,
}
pub const VERIFICATION_MODE_OFF: &str = "off";
pub const VERIFICATION_MODE_CAPTCHA: &str = "captcha";
pub const VERIFICATION_MODE_TURNSTILE: &str = "turnstile";
impl VerificationMode {
pub const fn as_str(self) -> &'static str {
match self {
Self::Off => VERIFICATION_MODE_OFF,
Self::Captcha => VERIFICATION_MODE_CAPTCHA,
Self::Turnstile => VERIFICATION_MODE_TURNSTILE,
}
}
}
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
struct TurnstileVerifyResponse { struct TurnstileVerifyResponse {
success: bool, success: bool,
@@ -56,6 +77,15 @@ fn configured_value(value: Option<&String>) -> Option<String> {
}) })
} }
pub fn normalize_verification_mode(value: Option<&str>) -> Option<VerificationMode> {
match value?.trim().to_ascii_lowercase().as_str() {
VERIFICATION_MODE_OFF => Some(VerificationMode::Off),
VERIFICATION_MODE_CAPTCHA | "normal" | "simple" => Some(VerificationMode::Captcha),
VERIFICATION_MODE_TURNSTILE => Some(VerificationMode::Turnstile),
_ => None,
}
}
fn normalize_ip(value: Option<&str>) -> Option<String> { fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>()) trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
} }
@@ -89,17 +119,48 @@ pub fn secret_key_configured(settings: &site_settings::Model) -> bool {
secret_key(settings).is_some() secret_key(settings).is_some()
} }
fn scope_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool { fn legacy_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match scope { match scope {
TurnstileScope::Comment => settings.comment_turnstile_enabled.unwrap_or(false), TurnstileScope::Comment => {
TurnstileScope::Subscription => settings.subscription_turnstile_enabled.unwrap_or(false), if settings.comment_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Captcha
}
}
TurnstileScope::Subscription => {
if settings.subscription_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Off
}
}
}
}
pub fn selected_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
let configured = match scope {
TurnstileScope::Comment => settings.comment_verification_mode.as_deref(),
TurnstileScope::Subscription => settings.subscription_verification_mode.as_deref(),
};
normalize_verification_mode(configured).unwrap_or_else(|| legacy_mode(settings, scope))
}
pub fn effective_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match selected_mode(settings, scope) {
VerificationMode::Turnstile
if site_key_configured(settings) && secret_key_configured(settings) =>
{
VerificationMode::Turnstile
}
VerificationMode::Turnstile => VerificationMode::Captcha,
mode => mode,
} }
} }
pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool { pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
scope_enabled(settings, scope) effective_mode(settings, scope) == VerificationMode::Turnstile
&& site_key_configured(settings)
&& secret_key_configured(settings)
} }
pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> { pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> {
@@ -107,7 +168,7 @@ pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Resu
Ok(is_enabled(&settings, scope)) Ok(is_enabled(&settings, scope))
} }
async fn verify_token( pub async fn verify_token(
settings: &site_settings::Model, settings: &site_settings::Model,
token: Option<&str>, token: Option<&str>,
client_ip: Option<&str>, client_ip: Option<&str>,
@@ -173,7 +234,7 @@ pub async fn verify_if_enabled(
client_ip: Option<&str>, client_ip: Option<&str>,
) -> Result<bool> { ) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?; let settings = crate::controllers::site_settings::load_current(ctx).await?;
if !is_enabled(&settings, scope) { if effective_mode(&settings, scope) != VerificationMode::Turnstile {
return Ok(false); return Ok(false);
} }

View File

@@ -204,10 +204,8 @@ Caddy -> frontend Node server
当前仓库内已经补了: 当前仓库内已经补了:
- `deploy/scripts/backup/backup-postgres.sh` - `deploy/scripts/backup/backup-postgres.sh`
- `deploy/scripts/backup/backup-markdown.sh`
- `deploy/scripts/backup/backup-media.sh` - `deploy/scripts/backup/backup-media.sh`
- `deploy/scripts/backup/restore-postgres.sh` - `deploy/scripts/backup/restore-postgres.sh`
- `deploy/scripts/backup/restore-markdown.sh`
- `deploy/scripts/backup/restore-media.sh` - `deploy/scripts/backup/restore-media.sh`
- `deploy/docker/BACKUP_AND_RECOVERY.md` - `deploy/docker/BACKUP_AND_RECOVERY.md`

View File

@@ -1,13 +1,22 @@
# 备份与恢复说明 # 备份与恢复说明
这套博客现在已经有 当前站点的内容已经是 **DB-only**
- PostgreSQL 数据库 - PostgreSQL 数据库
- Markdown 原文内容 - 文章结构化字段
- 文章 Markdown 原文(`posts.source_markdown`
- 分类 / 标签
- 版本历史 / 审计日志 / 订阅数据
- 站点配置
- 媒体文件 / 对象存储 - 媒体文件 / 对象存储
- 版本历史 / 审计日志 / 订阅数据
所以生产上最重要的不是再多一两个功能,而是**出事后能不能快速恢复**。 因此生产上最重要的是:
1. **数据库备份**
2. **媒体资源备份**
3. 定期做恢复演练
> 不再需要单独备份 `backend/content/posts` 之类的本地 Markdown 目录。
## 1. 建议的最小备份策略 ## 1. 建议的最小备份策略
@@ -15,11 +24,7 @@
- **频率**:每天至少 1 次;高频站点建议每 6~12 小时 1 次 - **频率**:每天至少 1 次;高频站点建议每 6~12 小时 1 次
- **工具**`pg_dump --format=custom` - **工具**`pg_dump --format=custom`
- **脚本**`deploy/scripts/backup/backup-postgres.sh` - **脚本**`deploy/scripts/backup/backup-postgres.sh`
- **说明**:文章内容原文已经跟随数据库一起备份
### Markdown 原文
- **频率**:每次发布后 + 每天定时 1 次
- **脚本**`deploy/scripts/backup/backup-markdown.sh`
- **原因**Markdown 是内容源,恢复速度最快
### 媒体文件 ### 媒体文件
- 如果是本地目录:打包归档 - 如果是本地目录:打包归档
@@ -35,9 +40,6 @@
# 单独备份数据库 # 单独备份数据库
DATABASE_URL=postgres://... ./deploy/scripts/backup/backup-postgres.sh DATABASE_URL=postgres://... ./deploy/scripts/backup/backup-postgres.sh
# 单独备份 Markdown
MARKDOWN_SOURCE_DIR=./backend/content/posts ./deploy/scripts/backup/backup-markdown.sh
# 单独备份媒体(本地目录) # 单独备份媒体(本地目录)
MEDIA_SOURCE_DIR=./uploads ./deploy/scripts/backup/backup-media.sh MEDIA_SOURCE_DIR=./uploads ./deploy/scripts/backup/backup-media.sh
@@ -53,12 +55,6 @@ MEDIA_S3_SOURCE=s3://bucket-name ./deploy/scripts/backup/backup-media.sh
DATABASE_URL=postgres://... ./deploy/scripts/backup/restore-postgres.sh ./backups/postgres/latest.dump DATABASE_URL=postgres://... ./deploy/scripts/backup/restore-postgres.sh ./backups/postgres/latest.dump
``` ```
### 恢复 Markdown
```bash
MARKDOWN_TARGET_DIR=./backend/content/posts ./deploy/scripts/backup/restore-markdown.sh ./backups/markdown/latest.tar.gz
```
### 恢复媒体 ### 恢复媒体
```bash ```bash
@@ -75,9 +71,6 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
# 每天 03:10 备份 PostgreSQL # 每天 03:10 备份 PostgreSQL
10 3 * * * cd /opt/termi-astro && DATABASE_URL=postgres://... ./deploy/scripts/backup/backup-postgres.sh >> /var/log/termi-backup.log 2>&1 10 3 * * * cd /opt/termi-astro && DATABASE_URL=postgres://... ./deploy/scripts/backup/backup-postgres.sh >> /var/log/termi-backup.log 2>&1
# 每天 03:25 备份 Markdown
25 3 * * * cd /opt/termi-astro && MARKDOWN_SOURCE_DIR=./backend/content/posts ./deploy/scripts/backup/backup-markdown.sh >> /var/log/termi-backup.log 2>&1
# 每天 03:40 备份媒体 # 每天 03:40 备份媒体
40 3 * * * cd /opt/termi-astro && MEDIA_S3_SOURCE=s3://bucket-name ./deploy/scripts/backup/backup-media.sh >> /var/log/termi-backup.log 2>&1 40 3 * * * cd /opt/termi-astro && MEDIA_S3_SOURCE=s3://bucket-name ./deploy/scripts/backup/backup-media.sh >> /var/log/termi-backup.log 2>&1
@@ -88,7 +81,7 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
40 4 * * * cd /opt/termi-astro && OFFSITE_TARGET=/mnt/offsite/termi-astro-backups ./deploy/scripts/backup/sync-backups-offsite.sh >> /var/log/termi-backup.log 2>&1 40 4 * * * cd /opt/termi-astro && OFFSITE_TARGET=/mnt/offsite/termi-astro-backups ./deploy/scripts/backup/sync-backups-offsite.sh >> /var/log/termi-backup.log 2>&1
``` ```
## 5. 建议你们再加一层异地备份 ## 5. 建议再加一层异地备份
仅仅把备份留在同一台服务器上不够。 仅仅把备份留在同一台服务器上不够。
@@ -101,9 +94,8 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
建议每个月至少做 1 次演练: 建议每个月至少做 1 次演练:
1. 用最新数据库备份恢复到临时环境 1. 用最新数据库备份恢复到临时环境
2. Markdown 备份恢复内容目录 2.媒体备份恢复对象
3. 用媒体备份恢复对象 3. 校验:
4. 校验:
- 首页可打开 - 首页可打开
- 文章详情可打开 - 文章详情可打开
- 图片可访问 - 图片可访问
@@ -115,7 +107,6 @@ MEDIA_S3_TARGET=s3://bucket-name ./deploy/scripts/backup/restore-media.sh ./back
```bash ```bash
DATABASE_URL=postgres://... \ DATABASE_URL=postgres://... \
POSTGRES_BACKUP=./backups/postgres/latest.dump \ POSTGRES_BACKUP=./backups/postgres/latest.dump \
MARKDOWN_BACKUP=./backups/markdown/latest.tar.gz \
MEDIA_BACKUP=./backups/media/latest.tar.gz \ MEDIA_BACKUP=./backups/media/latest.tar.gz \
./deploy/scripts/backup/verify-restore.sh ./deploy/scripts/backup/verify-restore.sh
``` ```
@@ -125,17 +116,16 @@ MEDIA_BACKUP=./backups/media/latest.tar.gz \
发生事故时建议按这个顺序: 发生事故时建议按这个顺序:
1. 恢复数据库 1. 恢复数据库
2. 恢复 Markdown 原文 2. 恢复媒体资源
3. 恢复媒体资源 3. 启动 backend / frontend / admin
4. 启动 backend / frontend / admin 4. 进入后台检查:
5. 进入后台检查:
- 审计日志 - 审计日志
- 文章版本历史 - 文章版本历史
- 订阅目标与最近投递 - 订阅目标与最近投递
## 8. 说明 ## 8. 说明
这些脚本是**仓库内参考实现**,没有在你们生产机上自动执行。 这些脚本是**仓库内参考实现**,没有在生产机上自动执行。
正式上线前请按你们实际目录、R2/S3 桶、数据库连接串、cron 规范再过一遍。 正式上线前请按你们实际目录、R2/S3 桶、数据库连接串、cron 规范再过一遍。
另外仓库里已经提供: 另外仓库里已经提供:

View File

@@ -4,7 +4,6 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
"${SCRIPT_DIR}/backup-postgres.sh" "${SCRIPT_DIR}/backup-postgres.sh"
"${SCRIPT_DIR}/backup-markdown.sh"
"${SCRIPT_DIR}/backup-media.sh" "${SCRIPT_DIR}/backup-media.sh"
echo "All backup jobs finished successfully." echo "All backup jobs finished successfully."

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
SOURCE_DIR="${MARKDOWN_SOURCE_DIR:-./backend/content/posts}"
BACKUP_DIR="${BACKUP_DIR:-./backups/markdown}"
RETENTION_DAYS="${RETENTION_DAYS:-30}"
TIMESTAMP="$(date -u +%Y%m%dT%H%M%SZ)"
FILE_PATH="${BACKUP_DIR}/markdown-${TIMESTAMP}.tar.gz"
if [[ ! -d "${SOURCE_DIR}" ]]; then
echo "Markdown source directory not found: ${SOURCE_DIR}" >&2
exit 1
fi
mkdir -p "${BACKUP_DIR}"
tar -czf "${FILE_PATH}" -C "${SOURCE_DIR}" .
ln -sfn "$(basename "${FILE_PATH}")" "${BACKUP_DIR}/latest.tar.gz"
find "${BACKUP_DIR}" -type f -name 'markdown-*.tar.gz' -mtime +"${RETENTION_DAYS}" -delete
echo "Markdown backup written to ${FILE_PATH}"

View File

@@ -3,7 +3,6 @@ set -euo pipefail
BACKUP_ROOT="${BACKUP_ROOT:-./backups}" BACKUP_ROOT="${BACKUP_ROOT:-./backups}"
POSTGRES_RETENTION_DAYS="${POSTGRES_RETENTION_DAYS:-14}" POSTGRES_RETENTION_DAYS="${POSTGRES_RETENTION_DAYS:-14}"
MARKDOWN_RETENTION_DAYS="${MARKDOWN_RETENTION_DAYS:-30}"
MEDIA_RETENTION_DAYS="${MEDIA_RETENTION_DAYS:-14}" MEDIA_RETENTION_DAYS="${MEDIA_RETENTION_DAYS:-14}"
DRY_RUN="${DRY_RUN:-false}" DRY_RUN="${DRY_RUN:-false}"
@@ -42,7 +41,6 @@ prune_dirs() {
} }
prune "${BACKUP_ROOT}/postgres" 'postgres-*.dump' "${POSTGRES_RETENTION_DAYS}" prune "${BACKUP_ROOT}/postgres" 'postgres-*.dump' "${POSTGRES_RETENTION_DAYS}"
prune "${BACKUP_ROOT}/markdown" 'markdown-*.tar.gz' "${MARKDOWN_RETENTION_DAYS}"
prune "${BACKUP_ROOT}/media" 'media-*.tar.gz' "${MEDIA_RETENTION_DAYS}" prune "${BACKUP_ROOT}/media" 'media-*.tar.gz' "${MEDIA_RETENTION_DAYS}"
prune_dirs "${BACKUP_ROOT}/media" 'media-*' "${MEDIA_RETENTION_DAYS}" prune_dirs "${BACKUP_ROOT}/media" 'media-*' "${MEDIA_RETENTION_DAYS}"

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <backup-file.tar.gz>" >&2
exit 1
fi
TARGET_DIR="${MARKDOWN_TARGET_DIR:-./backend/content/posts}"
BACKUP_FILE="$1"
if [[ ! -f "${BACKUP_FILE}" ]]; then
echo "Backup file not found: ${BACKUP_FILE}" >&2
exit 1
fi
mkdir -p "${TARGET_DIR}"
rm -rf "${TARGET_DIR}"/*
tar -xzf "${BACKUP_FILE}" -C "${TARGET_DIR}"
echo "Markdown restore completed into ${TARGET_DIR}"

View File

@@ -3,15 +3,12 @@ set -euo pipefail
: "${DATABASE_URL:?DATABASE_URL is required}" : "${DATABASE_URL:?DATABASE_URL is required}"
: "${POSTGRES_BACKUP:?POSTGRES_BACKUP is required}" : "${POSTGRES_BACKUP:?POSTGRES_BACKUP is required}"
: "${MARKDOWN_BACKUP:?MARKDOWN_BACKUP is required}"
: "${MEDIA_BACKUP:?MEDIA_BACKUP is required}" : "${MEDIA_BACKUP:?MEDIA_BACKUP is required}"
POSTGRES_RESTORE_CMD="${POSTGRES_RESTORE_CMD:-./deploy/scripts/backup/restore-postgres.sh}" POSTGRES_RESTORE_CMD="${POSTGRES_RESTORE_CMD:-./deploy/scripts/backup/restore-postgres.sh}"
MARKDOWN_RESTORE_CMD="${MARKDOWN_RESTORE_CMD:-./deploy/scripts/backup/restore-markdown.sh}"
MEDIA_RESTORE_CMD="${MEDIA_RESTORE_CMD:-./deploy/scripts/backup/restore-media.sh}" MEDIA_RESTORE_CMD="${MEDIA_RESTORE_CMD:-./deploy/scripts/backup/restore-media.sh}"
"${POSTGRES_RESTORE_CMD}" "${POSTGRES_BACKUP}" "${POSTGRES_RESTORE_CMD}" "${POSTGRES_BACKUP}"
"${MARKDOWN_RESTORE_CMD}" "${MARKDOWN_BACKUP}"
"${MEDIA_RESTORE_CMD}" "${MEDIA_BACKUP}" "${MEDIA_RESTORE_CMD}" "${MEDIA_BACKUP}"
echo "Restore rehearsal completed. Please verify homepage, article detail, media assets, admin login, revisions, audit logs, and subscriptions manually." echo "Restore rehearsal completed. Please verify homepage, article detail, media assets, admin login, revisions, audit logs, and subscriptions manually."

View File

@@ -17,7 +17,8 @@ interface Props {
const { postSlug, class: className = '', siteSettings } = Astro.props as Props; const { postSlug, class: className = '', siteSettings } = Astro.props as Props;
const { locale, t } = getI18n(Astro); const { locale, t } = getI18n(Astro);
const publicApiBaseUrl = resolvePublicApiBaseUrl(Astro.url); const publicApiBaseUrl = resolvePublicApiBaseUrl(Astro.url);
const turnstileSiteKey = siteSettings.comments.turnstileEnabled const commentVerificationMode = siteSettings.comments.verificationMode;
const turnstileSiteKey = commentVerificationMode === 'turnstile'
? siteSettings.comments.turnstileSiteKey || resolvePublicCommentTurnstileSiteKey() ? siteSettings.comments.turnstileSiteKey || resolvePublicCommentTurnstileSiteKey()
: ''; : '';
@@ -49,6 +50,7 @@ function formatCommentDate(dateStr: string): string {
class={`terminal-comments ${className}`} class={`terminal-comments ${className}`}
data-post-slug={postSlug} data-post-slug={postSlug}
data-api-base={publicApiBaseUrl} data-api-base={publicApiBaseUrl}
data-verification-mode={commentVerificationMode}
data-turnstile-site-key={turnstileSiteKey || undefined} data-turnstile-site-key={turnstileSiteKey || undefined}
> >
<div class="flex flex-col gap-4 sm:flex-row sm:items-start sm:justify-between"> <div class="flex flex-col gap-4 sm:flex-row sm:items-start sm:justify-between">
@@ -126,42 +128,44 @@ function formatCommentDate(dateStr: string): string {
</label> </label>
</div> </div>
<div class="rounded-2xl border border-[var(--border-color)] bg-[var(--header-bg)]/60 px-4 py-3"> {commentVerificationMode !== 'off' && (
<div class="flex flex-wrap items-center justify-between gap-2"> <div class="rounded-2xl border border-[var(--border-color)] bg-[var(--header-bg)]/60 px-4 py-3">
<p class="text-xs font-semibold uppercase tracking-[0.18em] text-[var(--text-tertiary)]"> <div class="flex flex-wrap items-center justify-between gap-2">
{t('common.humanVerification')} <p class="text-xs font-semibold uppercase tracking-[0.18em] text-[var(--text-tertiary)]">
</p> {t('common.humanVerification')}
{turnstileSiteKey ? ( </p>
<span class="text-xs text-[var(--text-tertiary)]">Cloudflare Turnstile</span> {commentVerificationMode === 'turnstile' ? (
<span class="text-xs text-[var(--text-tertiary)]">Cloudflare Turnstile</span>
) : (
<button type="button" id="refresh-captcha" class="terminal-action-button px-3 py-2 text-xs">
<i class="fas fa-rotate-right"></i>
<span>{t('common.refresh')}</span>
</button>
)}
</div>
{commentVerificationMode === 'turnstile' ? (
<>
<div class="mt-3" data-turnstile-container></div>
<input type="hidden" name="turnstileToken" />
<p class="mt-3 text-sm text-[var(--text-secondary)]">{t('common.turnstileHint')}</p>
</>
) : ( ) : (
<button type="button" id="refresh-captcha" class="terminal-action-button px-3 py-2 text-xs"> <>
<i class="fas fa-rotate-right"></i> <p id="captcha-question" class="mt-2 text-sm text-[var(--text-secondary)]">加载中...</p>
<span>{t('common.refresh')}</span> <input type="hidden" name="captchaToken" />
</button> <input
type="text"
name="captchaAnswer"
required
inputmode="numeric"
placeholder="请输入上方答案"
class="mt-3 terminal-form-input"
/>
</>
)} )}
</div> </div>
)}
{turnstileSiteKey ? (
<>
<div class="mt-3" data-turnstile-container></div>
<input type="hidden" name="turnstileToken" />
<p class="mt-3 text-sm text-[var(--text-secondary)]">{t('common.turnstileHint')}</p>
</>
) : (
<>
<p id="captcha-question" class="mt-2 text-sm text-[var(--text-secondary)]">加载中...</p>
<input type="hidden" name="captchaToken" />
<input
type="text"
name="captchaAnswer"
required
inputmode="numeric"
placeholder="请输入上方答案"
class="mt-3 terminal-form-input"
/>
</>
)}
</div>
<div id="replying-to" class="terminal-panel-muted hidden items-center justify-between gap-3 py-3"> <div id="replying-to" class="terminal-panel-muted hidden items-center justify-between gap-3 py-3">
<span class="text-sm text-[var(--text-secondary)]"> <span class="text-sm text-[var(--text-secondary)]">
@@ -274,6 +278,9 @@ function formatCommentDate(dateStr: string): string {
const refreshCaptchaBtn = document.getElementById('refresh-captcha'); const refreshCaptchaBtn = document.getElementById('refresh-captcha');
const postSlug = wrapper?.getAttribute('data-post-slug') || ''; const postSlug = wrapper?.getAttribute('data-post-slug') || '';
const apiBase = wrapper?.getAttribute('data-api-base') || '/api'; const apiBase = wrapper?.getAttribute('data-api-base') || '/api';
const verificationMode = wrapper?.getAttribute('data-verification-mode') || 'captcha';
const useTurnstile = verificationMode === 'turnstile';
const useCaptcha = verificationMode === 'captcha';
const turnstileSiteKey = wrapper?.getAttribute('data-turnstile-site-key') || ''; const turnstileSiteKey = wrapper?.getAttribute('data-turnstile-site-key') || '';
const turnstileContainer = form?.querySelector('[data-turnstile-container]') as HTMLElement | null; const turnstileContainer = form?.querySelector('[data-turnstile-container]') as HTMLElement | null;
const turnstileTokenInput = form?.querySelector('input[name="turnstileToken"]') as HTMLInputElement | null; const turnstileTokenInput = form?.querySelector('input[name="turnstileToken"]') as HTMLInputElement | null;
@@ -390,13 +397,15 @@ function formatCommentDate(dateStr: string): string {
} }
function resetHumanCheck() { function resetHumanCheck() {
if (turnstileSiteKey) { if (useTurnstile) {
turnstileTokenInput && (turnstileTokenInput.value = ''); turnstileTokenInput && (turnstileTokenInput.value = '');
turnstileWidget?.reset(); turnstileWidget?.reset();
return; return;
} }
void loadCaptcha(false); if (useCaptcha) {
void loadCaptcha(false);
}
} }
toggleBtn?.addEventListener('click', () => { toggleBtn?.addEventListener('click', () => {
@@ -443,7 +452,7 @@ function formatCommentDate(dateStr: string): string {
const formData = new FormData(form); const formData = new FormData(form);
const replyToId = replyingTo?.getAttribute('data-reply-to'); const replyToId = replyingTo?.getAttribute('data-reply-to');
if (turnstileSiteKey) { if (useTurnstile) {
const token = String(formData.get('turnstileToken') || '').trim(); const token = String(formData.get('turnstileToken') || '').trim();
if (!token) { if (!token) {
showMessage(t('common.turnstileRequired'), 'error'); showMessage(t('common.turnstileRequired'), 'error');
@@ -502,9 +511,9 @@ function formatCommentDate(dateStr: string): string {
}); });
}); });
if (turnstileSiteKey) { if (useTurnstile) {
void ensureTurnstile(false); void ensureTurnstile(false);
} else { } else if (useCaptcha) {
void loadCaptcha(false); void loadCaptcha(false);
} }
</script> </script>

View File

@@ -15,7 +15,8 @@ interface Props {
const { postSlug, class: className = '', siteSettings } = Astro.props as Props; const { postSlug, class: className = '', siteSettings } = Astro.props as Props;
const { t } = getI18n(Astro); const { t } = getI18n(Astro);
const publicApiBaseUrl = resolvePublicApiBaseUrl(Astro.url); const publicApiBaseUrl = resolvePublicApiBaseUrl(Astro.url);
const turnstileSiteKey = siteSettings.comments.turnstileEnabled const commentVerificationMode = siteSettings.comments.verificationMode;
const turnstileSiteKey = commentVerificationMode === 'turnstile'
? siteSettings.comments.turnstileSiteKey || resolvePublicCommentTurnstileSiteKey() ? siteSettings.comments.turnstileSiteKey || resolvePublicCommentTurnstileSiteKey()
: ''; : '';
--- ---
@@ -25,6 +26,7 @@ const turnstileSiteKey = siteSettings.comments.turnstileEnabled
data-post-slug={postSlug} data-post-slug={postSlug}
data-api-base={publicApiBaseUrl} data-api-base={publicApiBaseUrl}
data-storage-key={`termi:paragraph-comments:${postSlug}`} data-storage-key={`termi:paragraph-comments:${postSlug}`}
data-verification-mode={commentVerificationMode}
data-turnstile-site-key={turnstileSiteKey || undefined} data-turnstile-site-key={turnstileSiteKey || undefined}
> >
<div class="paragraph-comments-toolbar terminal-panel-muted"> <div class="paragraph-comments-toolbar terminal-panel-muted">
@@ -83,6 +85,9 @@ const turnstileSiteKey = siteSettings.comments.turnstileEnabled
const postSlug = wrapper?.dataset.postSlug || ''; const postSlug = wrapper?.dataset.postSlug || '';
const apiBase = wrapper?.dataset.apiBase || '/api'; const apiBase = wrapper?.dataset.apiBase || '/api';
const storageKey = wrapper?.dataset.storageKey || 'termi:paragraph-comments'; const storageKey = wrapper?.dataset.storageKey || 'termi:paragraph-comments';
const verificationMode = wrapper?.dataset.verificationMode || 'captcha';
const useTurnstile = verificationMode === 'turnstile';
const useCaptcha = verificationMode === 'captcha';
const turnstileSiteKey = wrapper?.dataset.turnstileSiteKey || ''; const turnstileSiteKey = wrapper?.dataset.turnstileSiteKey || '';
const articleRoot = wrapper?.closest('[data-article-slug]') as HTMLElement | null; const articleRoot = wrapper?.closest('[data-article-slug]') as HTMLElement | null;
const articleContent = articleRoot?.querySelector('.article-content') as HTMLElement | null; const articleContent = articleRoot?.querySelector('.article-content') as HTMLElement | null;
@@ -355,35 +360,39 @@ const turnstileSiteKey = siteSettings.comments.turnstileEnabled
</label> </label>
</div> </div>
<div class="rounded-2xl border border-[var(--border-color)] bg-[var(--header-bg)]/60 px-4 py-3"> ${
<div class="flex flex-wrap items-center justify-between gap-2"> useTurnstile || useCaptcha
<p class="text-xs font-semibold uppercase tracking-[0.18em] text-[var(--text-tertiary)]">${escapeHtml(t('common.humanVerification'))}</p> ? `<div class="rounded-2xl border border-[var(--border-color)] bg-[var(--header-bg)]/60 px-4 py-3">
${ <div class="flex flex-wrap items-center justify-between gap-2">
turnstileSiteKey <p class="text-xs font-semibold uppercase tracking-[0.18em] text-[var(--text-tertiary)]">${escapeHtml(t('common.humanVerification'))}</p>
? `<span class="text-xs text-[var(--text-tertiary)]">Cloudflare Turnstile</span>` ${
: `<button type="button" class="terminal-action-button px-3 py-2 text-xs" data-refresh-captcha> useTurnstile
<i class="fas fa-rotate-right"></i> ? `<span class="text-xs text-[var(--text-tertiary)]">Cloudflare Turnstile</span>`
<span>${escapeHtml(t('common.refresh'))}</span> : `<button type="button" class="terminal-action-button px-3 py-2 text-xs" data-refresh-captcha>
</button>` <i class="fas fa-rotate-right"></i>
} <span>${escapeHtml(t('common.refresh'))}</span>
</div> </button>`
${ }
turnstileSiteKey </div>
? `<div class="mt-3" data-turnstile-container></div> ${
<input type="hidden" name="turnstileToken" /> useTurnstile
<p class="mt-3 text-sm text-[var(--text-secondary)]">${escapeHtml(t('common.turnstileHint'))}</p>` ? `<div class="mt-3" data-turnstile-container></div>
: `<p class="mt-2 text-sm text-[var(--text-secondary)]" data-captcha-question>加载中...</p> <input type="hidden" name="turnstileToken" />
<input type="hidden" name="captchaToken" /> <p class="mt-3 text-sm text-[var(--text-secondary)]">${escapeHtml(t('common.turnstileHint'))}</p>`
<input : `<p class="mt-2 text-sm text-[var(--text-secondary)]" data-captcha-question>加载中...</p>
type="text" <input type="hidden" name="captchaToken" />
name="captchaAnswer" <input
required type="text"
inputmode="numeric" name="captchaAnswer"
placeholder="请输入上方答案" required
class="mt-3 terminal-form-input" inputmode="numeric"
/>` placeholder="请输入上方答案"
} class="mt-3 terminal-form-input"
</div> />`
}
</div>`
: ''
}
<div class="flex flex-wrap gap-3"> <div class="flex flex-wrap gap-3">
<button type="submit" class="terminal-action-button terminal-action-button-primary"> <button type="submit" class="terminal-action-button terminal-action-button-primary">
@@ -496,13 +505,15 @@ const turnstileSiteKey = siteSettings.comments.turnstileEnabled
} }
function resetHumanCheck() { function resetHumanCheck() {
if (turnstileSiteKey) { if (useTurnstile) {
turnstileTokenInput && (turnstileTokenInput.value = ''); turnstileTokenInput && (turnstileTokenInput.value = '');
turnstileWidget?.reset(); turnstileWidget?.reset();
return; return;
} }
void loadCaptcha(false); if (useCaptcha) {
void loadCaptcha(false);
}
} }
function resetReplyState() { function resetReplyState() {
@@ -714,11 +725,11 @@ const turnstileSiteKey = siteSettings.comments.turnstileEnabled
descriptor.element.insertAdjacentElement('afterend', panel); descriptor.element.insertAdjacentElement('afterend', panel);
panel.classList.remove('hidden'); panel.classList.remove('hidden');
panel.dataset.paragraphKey = paragraphKey; panel.dataset.paragraphKey = paragraphKey;
if (turnstileSiteKey) { if (useTurnstile) {
if (!turnstileTokenInput?.value) { if (!turnstileTokenInput?.value) {
await ensureTurnstile(false); await ensureTurnstile(false);
} }
} else if (!captchaTokenInput?.value) { } else if (useCaptcha && !captchaTokenInput?.value) {
await loadCaptcha(false); await loadCaptcha(false);
} }
@@ -849,7 +860,7 @@ const turnstileSiteKey = siteSettings.comments.turnstileEnabled
clearStatus(); clearStatus();
setStatus(t('paragraphComments.submitting'), 'info'); setStatus(t('paragraphComments.submitting'), 'info');
if (turnstileSiteKey) { if (useTurnstile) {
const token = String(formData.get('turnstileToken') || '').trim(); const token = String(formData.get('turnstileToken') || '').trim();
if (!token) { if (!token) {
setStatus(t('common.turnstileRequired'), 'error'); setStatus(t('common.turnstileRequired'), 'error');
@@ -958,9 +969,9 @@ const turnstileSiteKey = siteSettings.comments.turnstileEnabled
updateMarkerState(); updateMarkerState();
applyMarkerVisibility(markersVisible, { persist: false }); applyMarkerVisibility(markersVisible, { persist: false });
if (turnstileSiteKey) { if (useTurnstile) {
await ensureTurnstile(false); await ensureTurnstile(false);
} else { } else if (useCaptcha) {
await loadCaptcha(false); await loadCaptcha(false);
} }
await openFromHash(); await openFromHash();

View File

@@ -14,8 +14,10 @@ interface Props {
const { requestUrl, siteSettings } = Astro.props as Props; const { requestUrl, siteSettings } = Astro.props as Props;
const subscribeApiUrl = `${resolvePublicApiBaseUrl(requestUrl)}/subscriptions`; const subscribeApiUrl = `${resolvePublicApiBaseUrl(requestUrl)}/subscriptions`;
const browserPushApiUrl = `${resolvePublicApiBaseUrl(requestUrl)}/subscriptions/browser-push`; const browserPushApiUrl = `${resolvePublicApiBaseUrl(requestUrl)}/subscriptions/browser-push`;
const captchaApiUrl = `${resolvePublicApiBaseUrl(requestUrl)}/comments/captcha`;
const popupSettings = siteSettings.subscriptions; const popupSettings = siteSettings.subscriptions;
const turnstileSiteKey = popupSettings.turnstileEnabled const verificationMode = popupSettings.verificationMode;
const turnstileSiteKey = verificationMode === 'turnstile'
? popupSettings.turnstileSiteKey || resolvePublicCommentTurnstileSiteKey() ? popupSettings.turnstileSiteKey || resolvePublicCommentTurnstileSiteKey()
: ''; : '';
const webPushPublicKey = popupSettings.webPushEnabled const webPushPublicKey = popupSettings.webPushEnabled
@@ -29,7 +31,9 @@ const webPushPublicKey = popupSettings.webPushEnabled
data-subscription-popup-root data-subscription-popup-root
data-api-url={subscribeApiUrl} data-api-url={subscribeApiUrl}
data-browser-push-api-url={browserPushApiUrl} data-browser-push-api-url={browserPushApiUrl}
data-captcha-url={captchaApiUrl}
data-delay-ms={String(Math.max(popupSettings.popupDelaySeconds, 3) * 1000)} data-delay-ms={String(Math.max(popupSettings.popupDelaySeconds, 3) * 1000)}
data-verification-mode={verificationMode}
data-turnstile-site-key={turnstileSiteKey || undefined} data-turnstile-site-key={turnstileSiteKey || undefined}
data-web-push-public-key={webPushPublicKey || undefined} data-web-push-public-key={webPushPublicKey || undefined}
hidden hidden
@@ -137,16 +141,48 @@ const webPushPublicKey = popupSettings.webPushEnabled
/> />
</label> </label>
{turnstileSiteKey && ( {verificationMode !== 'off' && (
<div class="mt-4 rounded-2xl border border-[var(--border-color)] bg-[var(--header-bg)]/60 px-4 py-3"> <div class="mt-4 rounded-2xl border border-[var(--border-color)] bg-[var(--header-bg)]/60 px-4 py-3">
<div class="flex items-center justify-between gap-3"> <div class="flex items-center justify-between gap-3">
<p class="text-xs font-semibold uppercase tracking-[0.18em] text-[var(--text-tertiary)]"> <p class="text-xs font-semibold uppercase tracking-[0.18em] text-[var(--text-tertiary)]">
人机验证 人机验证
</p> </p>
<span class="text-xs text-[var(--text-tertiary)]">Cloudflare Turnstile</span> {verificationMode === 'turnstile' ? (
<span class="text-xs text-[var(--text-tertiary)]">Cloudflare Turnstile</span>
) : (
<button
type="button"
class="terminal-action-button px-3 py-2 text-xs"
data-subscription-popup-refresh-captcha
>
<i class="fas fa-rotate-right"></i>
<span>刷新</span>
</button>
)}
</div> </div>
<div class="mt-3" data-subscription-popup-turnstile></div> {verificationMode === 'turnstile' ? (
<input type="hidden" name="turnstileToken" /> <>
<div class="mt-3" data-subscription-popup-turnstile></div>
<input type="hidden" name="turnstileToken" />
</>
) : (
<>
<p
class="mt-3 text-sm text-[var(--text-secondary)]"
data-subscription-popup-captcha-question
>
加载中...
</p>
<input type="hidden" name="captchaToken" />
<input
type="text"
name="captchaAnswer"
inputmode="numeric"
placeholder="请输入上方答案"
class="mt-3 terminal-form-input"
/>
</>
)}
</div> </div>
)} )}
@@ -199,8 +235,12 @@ const webPushPublicKey = popupSettings.webPushEnabled
const dismissButton = root.querySelector('[data-subscription-popup-dismiss]'); const dismissButton = root.querySelector('[data-subscription-popup-dismiss]');
const apiUrl = root.getAttribute('data-api-url'); const apiUrl = root.getAttribute('data-api-url');
const browserPushApiUrl = root.getAttribute('data-browser-push-api-url'); const browserPushApiUrl = root.getAttribute('data-browser-push-api-url');
const captchaApiUrl = root.getAttribute('data-captcha-url') || '/api/comments/captcha';
const browserPushPublicKey = root.getAttribute('data-web-push-public-key') || ''; const browserPushPublicKey = root.getAttribute('data-web-push-public-key') || '';
const browserPushButton = root.querySelector('[data-subscription-popup-browser-push]'); const browserPushButton = root.querySelector('[data-subscription-popup-browser-push]');
const verificationMode = root.getAttribute('data-verification-mode') || 'off';
const useTurnstile = verificationMode === 'turnstile';
const useCaptcha = verificationMode === 'captcha';
const turnstileSiteKey = root.getAttribute('data-turnstile-site-key') || ''; const turnstileSiteKey = root.getAttribute('data-turnstile-site-key') || '';
const turnstileContainer = root.querySelector( const turnstileContainer = root.querySelector(
'[data-subscription-popup-turnstile]', '[data-subscription-popup-turnstile]',
@@ -208,6 +248,18 @@ const webPushPublicKey = popupSettings.webPushEnabled
const turnstileTokenInput = form?.querySelector( const turnstileTokenInput = form?.querySelector(
'input[name="turnstileToken"]', 'input[name="turnstileToken"]',
) as HTMLInputElement | null; ) as HTMLInputElement | null;
const captchaQuestion = root.querySelector(
'[data-subscription-popup-captcha-question]',
) as HTMLElement | null;
const refreshCaptchaButton = root.querySelector(
'[data-subscription-popup-refresh-captcha]',
) as HTMLButtonElement | null;
const captchaTokenInput = form?.querySelector(
'input[name="captchaToken"]',
) as HTMLInputElement | null;
const captchaAnswerInput = form?.querySelector(
'input[name="captchaAnswer"]',
) as HTMLInputElement | null;
const pathname = window.location.pathname || '/'; const pathname = window.location.pathname || '/';
const delayMs = Math.max(3000, Number(root.getAttribute('data-delay-ms') || '18000')); const delayMs = Math.max(3000, Number(root.getAttribute('data-delay-ms') || '18000'));
const defaultStatus = status instanceof HTMLElement ? status.textContent?.trim() || '' : ''; const defaultStatus = status instanceof HTMLElement ? status.textContent?.trim() || '' : '';
@@ -322,8 +374,10 @@ const webPushPublicKey = popupSettings.webPushEnabled
if (focusEmail && shouldFocusEmail()) { if (focusEmail && shouldFocusEmail()) {
emailInput.focus({ preventScroll: true }); emailInput.focus({ preventScroll: true });
} }
if (turnstileSiteKey) { if (useTurnstile) {
void ensureTurnstile(false); void ensureTurnstile(false);
} else if (useCaptcha) {
void loadCaptcha(false);
} }
}); });
}; };
@@ -408,13 +462,42 @@ const webPushPublicKey = popupSettings.webPushEnabled
} }
}; };
const resetHumanCheck = () => { const loadCaptcha = async (showError = true) => {
if (!turnstileSiteKey || !turnstileTokenInput) { if (!captchaQuestion || !captchaTokenInput || !captchaAnswerInput) {
return; return;
} }
turnstileTokenInput.value = ''; captchaQuestion.textContent = '加载中...';
turnstileWidget?.reset(); captchaTokenInput.value = '';
captchaAnswerInput.value = '';
try {
const response = await fetch(captchaApiUrl);
if (!response.ok) {
throw new Error(await response.text());
}
const payload = (await response.json()) as { token?: string; question?: string };
captchaTokenInput.value = payload.token || '';
captchaQuestion.textContent = payload.question || '请刷新验证码';
} catch (error) {
captchaQuestion.textContent = '验证码加载失败,请刷新重试';
if (showError) {
setError(error instanceof Error ? error.message : '验证码加载失败,请刷新后重试。');
}
}
};
const resetHumanCheck = () => {
if (useTurnstile && turnstileTokenInput) {
turnstileTokenInput.value = '';
turnstileWidget?.reset();
return;
}
if (useCaptcha) {
void loadCaptcha(false);
}
}; };
const syncBrowserPushState = async () => { const syncBrowserPushState = async () => {
@@ -481,6 +564,9 @@ const webPushPublicKey = popupSettings.webPushEnabled
}); });
dismissButton.addEventListener('click', () => closePopup(true)); dismissButton.addEventListener('click', () => closePopup(true));
refreshCaptchaButton?.addEventListener('click', () => {
void loadCaptcha(false);
});
window.addEventListener('keydown', (event) => { window.addEventListener('keydown', (event) => {
if (event.key === 'Escape' && opened) { if (event.key === 'Escape' && opened) {
@@ -495,12 +581,24 @@ const webPushPublicKey = popupSettings.webPushEnabled
return; return;
} }
if (turnstileSiteKey) { if (useTurnstile) {
const token = turnstileTokenInput?.value.trim() || ''; const token = turnstileTokenInput?.value.trim() || '';
if (!token) { if (!token) {
setError('请先完成人机验证。'); setError('请先完成人机验证。');
return; return;
} }
} else if (useCaptcha) {
const captchaToken = captchaTokenInput?.value.trim() || '';
const captchaAnswer = captchaAnswerInput?.value.trim() || '';
if (!captchaToken) {
setError('验证码加载失败,请刷新后重试。');
return;
}
if (!captchaAnswer) {
setError('请先填写验证码答案。');
captchaAnswerInput?.focus();
return;
}
} }
setPending('正在申请浏览器通知权限...'); setPending('正在申请浏览器通知权限...');
@@ -517,6 +615,8 @@ const webPushPublicKey = popupSettings.webPushEnabled
subscription, subscription,
source: 'frontend-popup', source: 'frontend-popup',
turnstileToken: turnstileTokenInput?.value || undefined, turnstileToken: turnstileTokenInput?.value || undefined,
captchaToken: captchaTokenInput?.value || undefined,
captchaAnswer: captchaAnswerInput?.value || undefined,
}), }),
}); });
@@ -554,12 +654,24 @@ const webPushPublicKey = popupSettings.webPushEnabled
return; return;
} }
if (turnstileSiteKey) { if (useTurnstile) {
const token = String(formData.get('turnstileToken') || '').trim(); const token = String(formData.get('turnstileToken') || '').trim();
if (!token) { if (!token) {
setError('请先完成人机验证。'); setError('请先完成人机验证。');
return; return;
} }
} else if (useCaptcha) {
const captchaToken = String(formData.get('captchaToken') || '').trim();
const captchaAnswer = String(formData.get('captchaAnswer') || '').trim();
if (!captchaToken) {
setError('验证码加载失败,请刷新后重试。');
return;
}
if (!captchaAnswer) {
setError('请先填写验证码答案。');
captchaAnswerInput?.focus();
return;
}
} }
setPending('正在提交订阅申请...'); setPending('正在提交订阅申请...');
@@ -575,6 +687,8 @@ const webPushPublicKey = popupSettings.webPushEnabled
displayName, displayName,
source: 'frontend-popup', source: 'frontend-popup',
turnstileToken: formData.get('turnstileToken'), turnstileToken: formData.get('turnstileToken'),
captchaToken: formData.get('captchaToken'),
captchaAnswer: formData.get('captchaAnswer'),
}), }),
}); });

View File

@@ -3,6 +3,7 @@ import type {
ContentOverview, ContentOverview,
ContentWindowHighlight, ContentWindowHighlight,
FriendLink as UiFriendLink, FriendLink as UiFriendLink,
HumanVerificationMode,
Post as UiPost, Post as UiPost,
PopularPostHighlight, PopularPostHighlight,
SiteSettings, SiteSettings,
@@ -36,6 +37,24 @@ function toUrlLike(value: string | URL) {
return value instanceof URL ? value : new URL(value); return value instanceof URL ? value : new URL(value);
} }
function normalizeVerificationMode(
value: string | null | undefined,
fallback: HumanVerificationMode,
): HumanVerificationMode {
switch ((value ?? '').trim().toLowerCase()) {
case 'off':
return 'off';
case 'captcha':
case 'normal':
case 'simple':
return 'captcha';
case 'turnstile':
return 'turnstile';
default:
return fallback;
}
}
const buildTimePublicApiBaseUrl = normalizeApiBaseUrl(import.meta.env.PUBLIC_API_BASE_URL); const buildTimePublicApiBaseUrl = normalizeApiBaseUrl(import.meta.env.PUBLIC_API_BASE_URL);
const buildTimeCommentTurnstileSiteKey = const buildTimeCommentTurnstileSiteKey =
import.meta.env.PUBLIC_COMMENT_TURNSTILE_SITE_KEY?.trim() ?? ''; import.meta.env.PUBLIC_COMMENT_TURNSTILE_SITE_KEY?.trim() ?? '';
@@ -262,7 +281,9 @@ export interface ApiSiteSettings {
}> | null; }> | null;
ai_enabled: boolean; ai_enabled: boolean;
paragraph_comments_enabled: boolean; paragraph_comments_enabled: boolean;
comment_verification_mode?: HumanVerificationMode | null;
comment_turnstile_enabled: boolean; comment_turnstile_enabled: boolean;
subscription_verification_mode?: HumanVerificationMode | null;
subscription_turnstile_enabled: boolean; subscription_turnstile_enabled: boolean;
web_push_enabled: boolean; web_push_enabled: boolean;
turnstile_site_key: string | null; turnstile_site_key: string | null;
@@ -452,6 +473,7 @@ export const DEFAULT_SITE_SETTINGS: SiteSettings = {
}, },
comments: { comments: {
paragraphsEnabled: true, paragraphsEnabled: true,
verificationMode: 'captcha',
turnstileEnabled: false, turnstileEnabled: false,
turnstileSiteKey: undefined, turnstileSiteKey: undefined,
}, },
@@ -460,6 +482,7 @@ export const DEFAULT_SITE_SETTINGS: SiteSettings = {
popupTitle: '订阅更新', popupTitle: '订阅更新',
popupDescription: '有新文章或汇总简报时,通过邮件第一时间收到提醒。需要先确认邮箱,可随时退订。', popupDescription: '有新文章或汇总简报时,通过邮件第一时间收到提醒。需要先确认邮箱,可随时退订。',
popupDelaySeconds: 18, popupDelaySeconds: 18,
verificationMode: 'off',
turnstileEnabled: false, turnstileEnabled: false,
turnstileSiteKey: undefined, turnstileSiteKey: undefined,
webPushEnabled: false, webPushEnabled: false,
@@ -561,7 +584,17 @@ const normalizeFriendLink = (friendLink: ApiFriendLink): AppFriendLink => ({
status: friendLink.status, status: friendLink.status,
}); });
const normalizeSiteSettings = (settings: ApiSiteSettings): SiteSettings => ({ const normalizeSiteSettings = (settings: ApiSiteSettings): SiteSettings => {
const commentVerificationMode = normalizeVerificationMode(
settings.comment_verification_mode,
settings.comment_turnstile_enabled ? 'turnstile' : 'captcha',
);
const subscriptionVerificationMode = normalizeVerificationMode(
settings.subscription_verification_mode,
settings.subscription_turnstile_enabled ? 'turnstile' : 'off',
);
return {
id: String(settings.id), id: String(settings.id),
siteName: settings.site_name || DEFAULT_SITE_SETTINGS.siteName, siteName: settings.site_name || DEFAULT_SITE_SETTINGS.siteName,
siteShortName: settings.site_short_name || DEFAULT_SITE_SETTINGS.siteShortName, siteShortName: settings.site_short_name || DEFAULT_SITE_SETTINGS.siteShortName,
@@ -599,8 +632,9 @@ const normalizeSiteSettings = (settings: ApiSiteSettings): SiteSettings => ({
enabled: Boolean(settings.ai_enabled), enabled: Boolean(settings.ai_enabled),
}, },
comments: { comments: {
verificationMode: commentVerificationMode,
paragraphsEnabled: settings.paragraph_comments_enabled ?? true, paragraphsEnabled: settings.paragraph_comments_enabled ?? true,
turnstileEnabled: Boolean(settings.comment_turnstile_enabled), turnstileEnabled: commentVerificationMode === 'turnstile',
turnstileSiteKey: turnstileSiteKey:
settings.turnstile_site_key || resolvePublicCommentTurnstileSiteKey() || undefined, settings.turnstile_site_key || resolvePublicCommentTurnstileSiteKey() || undefined,
}, },
@@ -615,7 +649,8 @@ const normalizeSiteSettings = (settings: ApiSiteSettings): SiteSettings => ({
popupDelaySeconds: popupDelaySeconds:
settings.subscription_popup_delay_seconds ?? settings.subscription_popup_delay_seconds ??
DEFAULT_SITE_SETTINGS.subscriptions.popupDelaySeconds, DEFAULT_SITE_SETTINGS.subscriptions.popupDelaySeconds,
turnstileEnabled: Boolean(settings.subscription_turnstile_enabled), verificationMode: subscriptionVerificationMode,
turnstileEnabled: subscriptionVerificationMode === 'turnstile',
turnstileSiteKey: turnstileSiteKey:
settings.turnstile_site_key || resolvePublicCommentTurnstileSiteKey() || undefined, settings.turnstile_site_key || resolvePublicCommentTurnstileSiteKey() || undefined,
webPushEnabled: Boolean(settings.web_push_enabled), webPushEnabled: Boolean(settings.web_push_enabled),
@@ -628,7 +663,8 @@ const normalizeSiteSettings = (settings: ApiSiteSettings): SiteSettings => ({
defaultOgImage: settings.seo_default_og_image ?? undefined, defaultOgImage: settings.seo_default_og_image ?? undefined,
defaultTwitterHandle: settings.seo_default_twitter_handle ?? undefined, defaultTwitterHandle: settings.seo_default_twitter_handle ?? undefined,
}, },
}); };
};
const normalizeContentOverview = ( const normalizeContentOverview = (
overview: ApiHomePagePayload['content_overview'] | undefined, overview: ApiHomePagePayload['content_overview'] | undefined,
@@ -937,13 +973,23 @@ class ApiClient {
}); });
} }
async subscribe(input: { email: string; displayName?: string; source?: string }): Promise<PublicSubscriptionResponse> { async subscribe(input: {
email: string;
displayName?: string;
source?: string;
turnstileToken?: string;
captchaToken?: string;
captchaAnswer?: string;
}): Promise<PublicSubscriptionResponse> {
return this.fetch<PublicSubscriptionResponse>('/subscriptions', { return this.fetch<PublicSubscriptionResponse>('/subscriptions', {
method: 'POST', method: 'POST',
body: JSON.stringify({ body: JSON.stringify({
email: input.email, email: input.email,
displayName: input.displayName, displayName: input.displayName,
source: input.source, source: input.source,
turnstileToken: input.turnstileToken,
captchaToken: input.captchaToken,
captchaAnswer: input.captchaAnswer,
}), }),
}); });
} }

View File

@@ -59,6 +59,8 @@ export interface FriendLink {
category?: string; category?: string;
} }
export type HumanVerificationMode = 'off' | 'captcha' | 'turnstile';
export interface SiteSettings { export interface SiteSettings {
id: string; id: string;
siteName: string; siteName: string;
@@ -85,6 +87,7 @@ export interface SiteSettings {
}; };
comments: { comments: {
paragraphsEnabled: boolean; paragraphsEnabled: boolean;
verificationMode: HumanVerificationMode;
turnstileEnabled: boolean; turnstileEnabled: boolean;
turnstileSiteKey?: string; turnstileSiteKey?: string;
}; };
@@ -93,6 +96,7 @@ export interface SiteSettings {
popupTitle: string; popupTitle: string;
popupDescription: string; popupDescription: string;
popupDelaySeconds: number; popupDelaySeconds: number;
verificationMode: HumanVerificationMode;
turnstileEnabled: boolean; turnstileEnabled: boolean;
turnstileSiteKey?: string; turnstileSiteKey?: string;
webPushEnabled: boolean; webPushEnabled: boolean;

View File

@@ -187,7 +187,7 @@ const breadcrumbJsonLd = {
<div class="max-w-6xl mx-auto px-4 sm:px-6 lg:px-8 py-8" data-article-slug={post.slug}> <div class="max-w-6xl mx-auto px-4 sm:px-6 lg:px-8 py-8" data-article-slug={post.slug}>
<div class="flex flex-col gap-8 lg:flex-row"> <div class="flex flex-col gap-8 lg:flex-row">
<div class="min-w-0 flex-1"> <div class="min-w-0 flex-1">
<TerminalWindow title={`~/content/posts/${post.slug}.md`} class="w-full"> <TerminalWindow title={`~/articles/${post.slug}`} class="w-full">
<div class="px-4 pb-2"> <div class="px-4 pb-2">
<div class="terminal-panel ml-4 mt-4 space-y-5"> <div class="terminal-panel ml-4 mt-4 space-y-5">
<div class="flex flex-wrap items-start justify-between gap-4"> <div class="flex flex-wrap items-start justify-between gap-4">
@@ -252,7 +252,7 @@ const breadcrumbJsonLd = {
</div> </div>
<div class="px-4 pb-2"> <div class="px-4 pb-2">
<CommandPrompt command={`bat --style=plain ${post.slug}.md`} /> <CommandPrompt command={`preview article --slug ${post.slug}`} />
<div class="ml-4 mt-4 space-y-6"> <div class="ml-4 mt-4 space-y-6">
{post.image && ( {post.image && (
@@ -298,10 +298,7 @@ const breadcrumbJsonLd = {
</div> </div>
<div class="px-4 py-6"> <div class="px-4 py-6">
<div class="terminal-panel-muted ml-4 mt-4 flex flex-col gap-4 sm:flex-row sm:items-center sm:justify-between"> <div class="terminal-panel-muted ml-4 mt-4 flex flex-col gap-4 sm:flex-row sm:items-center sm:justify-end">
<span class="text-sm text-[var(--text-secondary)]">
file://content/posts/{post.slug}.md
</span>
<div class="flex flex-wrap gap-2"> <div class="flex flex-wrap gap-2">
<a href="/articles" class="terminal-action-button"> <a href="/articles" class="terminal-action-button">
<i class="fas fa-list"></i> <i class="fas fa-list"></i>

View File

@@ -80,14 +80,14 @@ const postTypeFilters = [
const typePromptCommand = const typePromptCommand =
selectedType === 'all' selectedType === 'all'
? `grep -E "^type: (article|tweet)$" ./posts/*.md` ? 'posts query --type all'
: `grep -E "^type: ${selectedType}$" ./posts/*.md`; : `posts query --type ${selectedType}`;
const categoryPromptCommand = selectedCategory const categoryPromptCommand = selectedCategory
? `grep -El "^category: ${selectedCategory}$" ./posts/*.md` ? `posts query --category "${selectedCategory}"`
: `cut -d: -f2 ./categories.index | sort -u`; : 'categories list --sort name';
const tagPromptCommand = selectedTag const tagPromptCommand = selectedTag
? `grep -Ril "#${selectedTag}" ./posts` ? `posts query --tag "${selectedTag}"`
: `cut -d: -f2 ./tags.index | sort -u`; : 'tags list --sort popularity';
const hasActiveFilters = const hasActiveFilters =
Boolean(selectedSearch || selectedTag || selectedCategory || selectedType !== 'all' || currentPage > 1); Boolean(selectedSearch || selectedTag || selectedCategory || selectedType !== 'all' || currentPage > 1);
const canonicalUrl = hasActiveFilters ? '/articles' : undefined; const canonicalUrl = hasActiveFilters ? '/articles' : undefined;
@@ -126,7 +126,7 @@ const buildArticlesUrl = ({
<div class="max-w-5xl mx-auto px-4 sm:px-6 lg:px-8 py-8"> <div class="max-w-5xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<TerminalWindow title="~/articles/index" class="w-full"> <TerminalWindow title="~/articles/index" class="w-full">
<div class="px-4 pb-2"> <div class="px-4 pb-2">
<CommandPrompt command="find ./posts -type f -name '*.md' | sort" /> <CommandPrompt command="posts list --sort published_at --order desc" />
<div class="ml-4 mt-4 space-y-3"> <div class="ml-4 mt-4 space-y-3">
<h1 class="text-3xl font-bold tracking-tight text-[var(--title-color)]">{t('articlesPage.title')}</h1> <h1 class="text-3xl font-bold tracking-tight text-[var(--title-color)]">{t('articlesPage.title')}</h1>
@@ -141,7 +141,7 @@ const buildArticlesUrl = ({
{selectedSearch && ( {selectedSearch && (
<span class="terminal-stat-pill"> <span class="terminal-stat-pill">
<i class="fas fa-magnifying-glass text-[var(--primary)]"></i> <i class="fas fa-magnifying-glass text-[var(--primary)]"></i>
grep: {selectedSearch} search: {selectedSearch}
</span> </span>
)} )}
{selectedCategory && ( {selectedCategory && (

View File

@@ -38,11 +38,11 @@ const filteredPosts = selectedCategory
? allPosts.filter((post) => (post.category || '').trim().toLowerCase() === normalizedSelectedCategory) ? allPosts.filter((post) => (post.category || '').trim().toLowerCase() === normalizedSelectedCategory)
: []; : [];
const categoryPromptCommand = selectedCategory const categoryPromptCommand = selectedCategory
? `grep -El "^category: ${selectedCategory}$" ./posts/*.md` ? `posts query --category "${selectedCategory}"`
: 'cut -d: -f2 ./categories.index | sort -u'; : 'categories list --sort name';
const resultsPromptCommand = selectedCategory const resultsPromptCommand = selectedCategory
? `find ./posts -type f | xargs grep -il "^category: ${selectedCategory}$"` ? `posts list --category "${selectedCategory}"`
: 'find ./posts -type f | sort'; : 'posts list --group-by category';
const categoryAccentMap = Object.fromEntries( const categoryAccentMap = Object.fromEntries(
categories.map((category) => [category.name.trim().toLowerCase(), getAccentVars(getCategoryTheme(category.name))]) categories.map((category) => [category.name.trim().toLowerCase(), getAccentVars(getCategoryTheme(category.name))])
); );
@@ -58,7 +58,7 @@ const pageDescription = selectedCategoryRecord?.seoDescription || selectedCatego
<div class="max-w-5xl mx-auto px-4 sm:px-6 lg:px-8 py-8"> <div class="max-w-5xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<TerminalWindow title="~/categories" class="w-full"> <TerminalWindow title="~/categories" class="w-full">
<div class="mb-6 px-4"> <div class="mb-6 px-4">
<CommandPrompt command="find ./categories -maxdepth 1 -type d | sort" /> <CommandPrompt command="categories list --with-counts" />
<div class="terminal-panel ml-4 mt-4"> <div class="terminal-panel ml-4 mt-4">
<div class="terminal-kicker">content taxonomy</div> <div class="terminal-kicker">content taxonomy</div>
<div class="terminal-section-title mt-4"> <div class="terminal-section-title mt-4">
@@ -273,11 +273,11 @@ const pageDescription = selectedCategoryRecord?.seoDescription || selectedCatego
function updatePrompts() { function updatePrompts() {
const filterCommand = state.category const filterCommand = state.category
? `grep -El "^category: ${state.category}$" ./posts/*.md` ? `posts query --category "${state.category}"`
: 'cut -d: -f2 ./categories.index | sort -u'; : 'categories list --sort name';
const resultsCommand = state.category const resultsCommand = state.category
? `find ./posts -type f | xargs grep -il "^category: ${state.category}$"` ? `posts list --category "${state.category}"`
: 'find ./posts -type f | sort'; : 'posts list --group-by category';
promptApi?.set?.('categories-filter-prompt', filterCommand, { typing: false }); promptApi?.set?.('categories-filter-prompt', filterCommand, { typing: false });
promptApi?.set?.('categories-results-prompt', resultsCommand, { typing: false }); promptApi?.set?.('categories-results-prompt', resultsCommand, { typing: false });

View File

@@ -59,7 +59,7 @@ export const GET: APIRoute = async ({ params }) => {
<circle cx="92" cy="77" r="10" fill="#FF5F56"/> <circle cx="92" cy="77" r="10" fill="#FF5F56"/>
<circle cx="124" cy="77" r="10" fill="#FFBD2E"/> <circle cx="124" cy="77" r="10" fill="#FFBD2E"/>
<circle cx="156" cy="77" r="10" fill="#27C93F"/> <circle cx="156" cy="77" r="10" fill="#27C93F"/>
<text x="190" y="83" fill="#9CA3AF" font-family="'JetBrains Mono', monospace" font-size="22">~/content/posts/${escapeXml(post.slug)}.md</text> <text x="190" y="83" fill="#9CA3AF" font-family="'JetBrains Mono', monospace" font-size="22">~/articles/${escapeXml(post.slug)}</text>
<rect x="88" y="150" width="180" height="44" rx="22" fill="rgba(0,255,157,0.12)" stroke="url(#accent)"/> <rect x="88" y="150" width="180" height="44" rx="22" fill="rgba(0,255,157,0.12)" stroke="url(#accent)"/>
<text x="178" y="178" text-anchor="middle" fill="#8BFFD3" font-family="'JetBrains Mono', monospace" font-size="24">${category}</text> <text x="178" y="178" text-anchor="middle" fill="#8BFFD3" font-family="'JetBrains Mono', monospace" font-size="24">${category}</text>
<text x="88" y="274" fill="#F8FAFC" font-family="'IBM Plex Sans', Arial, sans-serif" font-size="64" font-weight="700">${title}</text> <text x="88" y="274" fill="#F8FAFC" font-family="'IBM Plex Sans', Arial, sans-serif" font-size="64" font-weight="700">${title}</text>