feat: refresh content workflow and verification settings
All checks were successful
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 43s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 25m9s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 51s

This commit is contained in:
2026-04-01 18:47:17 +08:00
parent f2c07df320
commit 7de4ddc3ee
66 changed files with 1455 additions and 2759 deletions

View File

@@ -1,16 +1,16 @@
use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, Engine as _};
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
use chrono::{DateTime, Utc};
use fastembed::{
InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel,
};
use loco_rs::prelude::*;
use reqwest::{header::CONTENT_TYPE, multipart, Client, Url};
use reqwest::{Client, Url, header::CONTENT_TYPE, multipart};
use sea_orm::{
ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel,
PaginatorTrait, QueryOrder, Set, Statement,
};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use serde_json::{Value, json};
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::{Mutex, OnceLock};
@@ -34,8 +34,7 @@ const DEFAULT_CLOUDFLARE_CHAT_MODEL: &str = "@cf/meta/llama-3.1-8b-instruct";
const DEFAULT_CLOUDFLARE_IMAGE_MODEL: &str = "@cf/black-forest-labs/flux-2-klein-4b";
const DEFAULT_TOP_K: usize = 4;
const DEFAULT_CHUNK_SIZE: usize = 1200;
const DEFAULT_SYSTEM_PROMPT: &str =
"你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
const DEFAULT_SYSTEM_PROMPT: &str = "你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
const EMBEDDING_BATCH_SIZE: usize = 32;
const EMBEDDING_DIMENSION: usize = 384;
const LOCAL_EMBEDDING_MODEL_LABEL: &str = "fastembed / local all-MiniLM-L6-v2";
@@ -2096,8 +2095,8 @@ pub(crate) fn build_provider_url(request: &AiProviderRequest) -> String {
#[cfg(test)]
mod tests {
use super::{
build_provider_url, extract_provider_text, is_profile_question,
normalize_provider_api_base, parse_provider_sse_body, AiProviderRequest,
AiProviderRequest, build_provider_url, extract_provider_text, is_profile_question,
normalize_provider_api_base, parse_provider_sse_body,
};
fn build_request(provider: &str, api_base: &str) -> AiProviderRequest {
@@ -2643,7 +2642,7 @@ async fn retrieve_matches(
pub async fn rebuild_index(ctx: &AppContext) -> Result<AiIndexSummary> {
let settings = load_runtime_settings(ctx, false).await?;
let posts = content::sync_markdown_posts(ctx).await?;
let posts = content::load_markdown_posts_from_store(ctx).await?;
let mut chunk_drafts = build_chunks(&posts, settings.chunk_size);
chunk_drafts.extend(build_profile_chunks(&settings.raw, settings.chunk_size));
let embeddings = if chunk_drafts.is_empty() {

View File

@@ -1,4 +1,4 @@
use std::{fs, path::Path, path::PathBuf};
use std::path::Path;
use chrono::Utc;
use loco_rs::prelude::*;
@@ -11,15 +11,14 @@ use serde::{Deserialize, Serialize};
use crate::{
controllers::site_settings,
models::_entities::{
categories, friend_links, media_assets, posts, reviews, site_settings as site_settings_entity,
tags,
categories, comments, friend_links, media_assets, posts, reviews,
site_settings as site_settings_entity, tags,
},
services::{content, media_assets as media_assets_service, storage},
};
const BACKUP_VERSION: &str = "2026-04-01";
const WARNING_STORAGE_BINARIES: &str =
"当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
const WARNING_STORAGE_BINARIES: &str = "当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupTaxonomyRecord {
@@ -152,47 +151,22 @@ fn normalize_backup_mode(value: Option<&str>) -> String {
}
}
fn markdown_posts_dir() -> PathBuf {
PathBuf::from(content::MARKDOWN_POSTS_DIR)
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
}
fn remove_existing_markdown_documents() -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
let mut removed = 0_usize;
for path in fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
{
let extension = path
.extension()
.and_then(|value| value.to_str())
.map(|value| value.to_ascii_lowercase())
.unwrap_or_default();
if extension == "md" || extension == "markdown" {
fs::remove_file(&path).map_err(io_error)?;
removed += 1;
}
}
Ok(removed)
}
fn normalize_markdown(value: &str) -> String {
value.replace("\r\n", "\n")
}
fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> {
let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default();
let file_name = trim_to_option(Some(document.file_name.clone()))
.unwrap_or_else(|| format!("{}.md", if candidate_slug.is_empty() { "post" } else { &candidate_slug }));
let file_name = trim_to_option(Some(document.file_name.clone())).unwrap_or_else(|| {
format!(
"{}.md",
if candidate_slug.is_empty() {
"post"
} else {
&candidate_slug
}
)
});
let file_stem = Path::new(&file_name)
.file_stem()
.and_then(|value| value.to_str())
@@ -296,7 +270,6 @@ fn export_media_asset_record(item: media_assets::Model) -> BackupMediaAssetRecor
pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> {
let site_settings_row = site_settings::load_current(ctx).await?;
let markdown_posts = content::sync_markdown_posts(ctx).await?;
let categories = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
@@ -332,21 +305,24 @@ pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument>
.into_iter()
.map(export_media_asset_record)
.collect::<Vec<_>>();
let posts = markdown_posts
let posts = content::load_markdown_posts_from_store(ctx)
.await?
.into_iter()
.map(|post| {
let (_, markdown) = content::read_markdown_document(&post.slug)?;
Ok(BackupPostDocument {
slug: post.slug.clone(),
file_name: format!("{}.md", post.slug),
markdown,
markdown: content::build_markdown_document(&post),
})
})
.collect::<Result<Vec<_>>>()?;
let storage_manifest = match export_storage_manifest(ctx).await {
Ok(items) => items,
Err(error) => {
tracing::warn!(?error, "failed to export storage manifest, continuing without it");
tracing::warn!(
?error,
"failed to export storage manifest, continuing without it"
);
None
}
};
@@ -549,28 +525,32 @@ async fn write_backup_posts(
documents: &[BackupPostDocument],
replace_existing: bool,
) -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
if replace_existing {
remove_existing_markdown_documents()?;
let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for post in &existing_posts {
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&post.slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
}
posts::Entity::delete_many().exec(&ctx.db).await?;
}
if documents.is_empty() {
if replace_existing {
posts::Entity::delete_many().exec(&ctx.db).await?;
}
return Ok(0);
}
let mut written = std::collections::HashSet::new();
for document in documents {
let (slug, markdown) = normalized_backup_post(document)?;
fs::write(content::markdown_post_path(&slug), markdown).map_err(io_error)?;
content::upsert_markdown_document(ctx, Some(&slug), &markdown).await?;
written.insert(slug);
}
content::sync_markdown_posts(ctx).await?;
Ok(written.len())
}

View File

@@ -363,15 +363,23 @@ pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'
return Err(Error::BadRequest("提交未通过校验".to_string()));
}
if !crate::services::turnstile::verify_if_enabled(
ctx,
let settings = crate::controllers::site_settings::load_current(ctx).await?;
match crate::services::turnstile::effective_mode(
&settings,
crate::services::turnstile::TurnstileScope::Comment,
input.turnstile_token,
input.ip_address,
)
.await?
{
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
) {
crate::services::turnstile::VerificationMode::Off => {}
crate::services::turnstile::VerificationMode::Captcha => {
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
}
crate::services::turnstile::VerificationMode::Turnstile => {
crate::services::turnstile::verify_token(
&settings,
input.turnstile_token,
input.ip_address,
)
.await?;
}
}
if contains_blocked_keyword(input).is_some() {

View File

@@ -6,19 +6,17 @@ use sea_orm::{
};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use std::fs;
use std::path::{Path, PathBuf};
use std::path::Path;
use crate::models::_entities::{categories, comments, posts, tags};
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
pub const POST_STATUS_DRAFT: &str = "draft";
pub const POST_STATUS_PUBLISHED: &str = "published";
pub const POST_STATUS_OFFLINE: &str = "offline";
pub const POST_VISIBILITY_PUBLIC: &str = "public";
pub const POST_VISIBILITY_UNLISTED: &str = "unlisted";
pub const POST_VISIBILITY_PRIVATE: &str = "private";
const VIRTUAL_MARKDOWN_PATH_PREFIX: &str = "article://posts";
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
struct MarkdownFrontmatter {
@@ -105,32 +103,18 @@ pub struct MarkdownImportFile {
pub content: String,
}
#[derive(Debug, Clone, Deserialize)]
struct LegacyFixturePost {
title: String,
slug: String,
content: String,
excerpt: Option<String>,
category: Option<String>,
tags: Option<Vec<String>>,
pinned: Option<bool>,
published: Option<bool>,
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
#[derive(Debug, Clone)]
struct MarkdownDocumentSource {
post: MarkdownPost,
raw_markdown: String,
}
fn yaml_error(err: serde_yaml::Error) -> Error {
Error::string(&err.to_string())
}
fn posts_dir() -> PathBuf {
PathBuf::from(MARKDOWN_POSTS_DIR)
}
pub fn markdown_post_path(slug: &str) -> PathBuf {
posts_dir().join(format!("{slug}.md"))
pub fn virtual_markdown_document_path(slug: &str) -> String {
format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/{slug}")
}
fn normalize_newlines(input: &str) -> String {
@@ -157,6 +141,15 @@ fn normalize_string_list(values: Option<Vec<String>>) -> Vec<String> {
.collect()
}
fn normalize_post_tags(values: Vec<String>) -> Vec<String> {
let mut seen = std::collections::HashSet::new();
normalize_string_list(Some(values))
.into_iter()
.filter(|item| seen.insert(normalized_match_key(item)))
.collect()
}
fn yaml_scalar(value: &str) -> String {
serde_yaml::to_string(value)
.unwrap_or_else(|_| format!("{value:?}"))
@@ -214,7 +207,9 @@ fn parse_frontmatter_datetime(value: Option<String>) -> Option<DateTime<FixedOff
if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") {
let naive = date_only.and_hms_opt(0, 0, 0)?;
return FixedOffset::east_opt(0)?.from_local_datetime(&naive).single();
return FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single();
}
None
@@ -278,6 +273,46 @@ pub fn post_redirects_from_json(value: &Option<Value>) -> Vec<String> {
.collect()
}
fn json_string_array(value: &Option<Value>) -> Vec<String> {
value
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.map(|item| item.trim().to_string())
.filter(|item| !item.is_empty())
.collect()
}
fn markdown_post_from_model(post: &posts::Model) -> MarkdownPost {
MarkdownPost {
title: trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
slug: post.slug.clone(),
description: trim_to_option(post.description.clone())
.or_else(|| post.content.as_deref().and_then(excerpt_from_content)),
content: post.content.clone().unwrap_or_default(),
category: trim_to_option(post.category.clone()),
tags: json_string_array(&post.tags),
post_type: trim_to_option(post.post_type.clone()).unwrap_or_else(|| "article".to_string()),
image: trim_to_option(post.image.clone()),
images: json_string_array(&post.images),
pinned: post.pinned.unwrap_or(false),
status: normalize_post_status(post.status.as_deref()),
visibility: normalize_post_visibility(post.visibility.as_deref()),
publish_at: format_frontmatter_datetime(post.publish_at.clone()),
unpublish_at: format_frontmatter_datetime(post.unpublish_at.clone()),
canonical_url: normalize_url_like(post.canonical_url.clone()),
noindex: post.noindex.unwrap_or(false),
og_image: normalize_url_like(post.og_image.clone()),
redirect_from: post_redirects_from_json(&post.redirect_from),
redirect_to: trim_to_option(post.redirect_to.clone())
.map(|item| item.trim_matches('/').to_string()),
file_path: virtual_markdown_document_path(&post.slug),
}
}
pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
effective_post_state(
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
@@ -431,17 +466,6 @@ fn split_frontmatter(raw: &str) -> Result<(MarkdownFrontmatter, String)> {
Ok((parsed, content))
}
fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
let raw = fs::read_to_string(path).map_err(io_error)?;
let file_stem = path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post")
.to_string();
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
}
pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
let (frontmatter, content) = split_frontmatter(raw)?;
@@ -567,103 +591,40 @@ pub fn build_markdown_document(post: &MarkdownPost) -> String {
lines.join("\n")
}
fn ensure_markdown_posts_bootstrapped() -> Result<()> {
let dir = posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
fn markdown_document_from_model(model: &posts::Model) -> Result<MarkdownDocumentSource> {
let raw_markdown = model
.source_markdown
.clone()
.map(|value| normalize_newlines(&value))
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| build_markdown_document(&markdown_post_from_model(model)));
let virtual_path = virtual_markdown_document_path(&model.slug);
let post = parse_markdown_source(&model.slug, &raw_markdown, &virtual_path)?;
let has_markdown = fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.any(|entry| entry.path().extension().and_then(|value| value.to_str()) == Some("md"));
if has_markdown {
return Ok(());
}
let raw = fs::read_to_string(FIXTURE_POSTS_FILE).map_err(io_error)?;
let fixtures = serde_yaml::from_str::<Vec<LegacyFixturePost>>(&raw).map_err(yaml_error)?;
for fixture in fixtures {
let post = MarkdownPost {
title: fixture.title,
slug: fixture.slug.clone(),
description: trim_to_option(fixture.excerpt),
content: fixture.content,
category: trim_to_option(fixture.category),
tags: fixture.tags.unwrap_or_default(),
post_type: "article".to_string(),
image: None,
images: Vec::new(),
pinned: fixture.pinned.unwrap_or(false),
status: if fixture.published.unwrap_or(true) {
POST_STATUS_PUBLISHED.to_string()
} else {
POST_STATUS_DRAFT.to_string()
},
visibility: POST_VISIBILITY_PUBLIC.to_string(),
publish_at: None,
unpublish_at: None,
canonical_url: None,
noindex: false,
og_image: None,
redirect_from: Vec::new(),
redirect_to: None,
file_path: markdown_post_path(&fixture.slug)
.to_string_lossy()
.to_string(),
};
fs::write(
markdown_post_path(&fixture.slug),
build_markdown_document(&post),
)
.map_err(io_error)?;
}
Ok(())
Ok(MarkdownDocumentSource { post, raw_markdown })
}
fn load_markdown_posts_from_disk() -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut posts = fs::read_dir(posts_dir())
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
.filter(|path| path.extension().and_then(|value| value.to_str()) == Some("md"))
.map(|path| parse_markdown_post(&path))
async fn load_markdown_documents_from_store(
ctx: &AppContext,
) -> Result<Vec<MarkdownDocumentSource>> {
let mut documents = posts::Entity::find()
.order_by_asc(posts::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(|item| markdown_document_from_model(&item))
.collect::<Result<Vec<_>>>()?;
posts.sort_by(|left, right| left.slug.cmp(&right.slug));
Ok(posts)
documents.sort_by(|left, right| left.post.slug.cmp(&right.post.slug));
Ok(documents)
}
async fn sync_tags_from_posts(ctx: &AppContext, posts: &[MarkdownPost]) -> Result<()> {
for post in posts {
for tag_name in &post.tags {
let slug = slugify(tag_name);
let trimmed = tag_name.trim();
let existing = tags::Entity::find()
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(trimmed)),
)
.one(&ctx.db)
.await?;
if existing.is_none() {
let item = tags::ActiveModel {
name: Set(Some(trimmed.to_string())),
slug: Set(slug),
..Default::default()
};
let _ = item.insert(&ctx.db).await;
}
}
}
Ok(())
pub async fn load_markdown_posts_from_store(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
Ok(load_markdown_documents_from_store(ctx)
.await?
.into_iter()
.map(|document| document.post)
.collect())
}
async fn ensure_category(ctx: &AppContext, raw_name: &str) -> Result<Option<String>> {
@@ -768,21 +729,138 @@ async fn canonicalize_tags(ctx: &AppContext, raw_tags: &[String]) -> Result<Vec<
Ok(canonical_tags)
}
fn write_markdown_post_to_disk(post: &MarkdownPost) -> Result<()> {
fs::write(
markdown_post_path(&post.slug),
build_markdown_document(post),
)
.map_err(io_error)
fn string_array_json(values: &[String]) -> Option<Value> {
(!values.is_empty()).then(|| Value::Array(values.iter().cloned().map(Value::String).collect()))
}
pub fn rewrite_category_references(
fn apply_markdown_post_to_active_model(
model: &mut posts::ActiveModel,
post: &MarkdownPost,
raw_markdown: &str,
) {
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.source_markdown = Set(Some(raw_markdown.to_string()));
model.category = Set(post.category.clone());
model.tags = Set(string_array_json(&post.tags));
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(string_array_json(&post.images));
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(string_array_json(&post.redirect_from));
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string())
);
}
async fn save_markdown_post_to_store(
ctx: &AppContext,
mut post: MarkdownPost,
slug_hint: Option<&str>,
canonicalize_taxonomy: bool,
) -> Result<MarkdownPost> {
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
post.title = trim_to_option(Some(post.title.clone())).unwrap_or_else(|| post.slug.clone());
post.slug = trim_to_option(Some(post.slug.clone()))
.or_else(|| normalized_slug_hint.clone())
.unwrap_or_else(|| slugify(&post.title));
post.description =
trim_to_option(post.description.clone()).or_else(|| excerpt_from_content(&post.content));
post.content = normalize_newlines(post.content.trim());
post.category = trim_to_option(post.category.clone());
post.tags = normalize_post_tags(post.tags.clone());
post.post_type =
trim_to_option(Some(post.post_type.clone())).unwrap_or_else(|| "article".to_string());
post.image = trim_to_option(post.image.clone());
post.images = normalize_string_list(Some(post.images.clone()));
post.status = normalize_post_status(Some(&post.status));
post.visibility = normalize_post_visibility(Some(&post.visibility));
post.publish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.publish_at.clone()));
post.unpublish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.unpublish_at.clone()));
post.canonical_url = normalize_url_like(post.canonical_url.clone());
post.og_image = normalize_url_like(post.og_image.clone());
post.redirect_from = normalize_redirect_list(Some(post.redirect_from.clone()));
post.redirect_to =
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string());
if post.slug.trim().is_empty() {
return Err(Error::BadRequest("slug is required".to_string()));
}
if canonicalize_taxonomy {
post.category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
post.tags = canonicalize_tags(ctx, &post.tags).await?;
}
let existing_by_hint = if let Some(hint) = normalized_slug_hint.as_deref() {
posts::Entity::find()
.filter(posts::Column::Slug.eq(hint))
.one(&ctx.db)
.await?
} else {
None
};
let existing_by_slug =
if existing_by_hint.as_ref().map(|item| item.slug.as_str()) == Some(post.slug.as_str()) {
None
} else {
posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?
};
if let (Some(by_hint), Some(by_slug)) = (&existing_by_hint, &existing_by_slug) {
if by_hint.id != by_slug.id {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {}",
post.slug
)));
}
}
let has_existing = existing_by_hint.is_some() || existing_by_slug.is_some();
let mut model = existing_by_hint
.or(existing_by_slug)
.map(|item| item.into_active_model())
.unwrap_or_default();
post.file_path = virtual_markdown_document_path(&post.slug);
let raw_markdown = build_markdown_document(&post);
apply_markdown_post_to_active_model(&mut model, &post, &raw_markdown);
if has_existing {
model.update(&ctx.db).await?;
} else {
model.insert(&ctx.db).await?;
}
Ok(post)
}
pub async fn rewrite_category_references(
ctx: &AppContext,
current_name: Option<&str>,
current_slug: &str,
next_name: Option<&str>,
) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new();
if let Some(name) = current_name {
let normalized = normalized_match_key(name);
@@ -805,9 +883,9 @@ pub fn rewrite_category_references(
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?;
let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts {
for mut post in posts {
let Some(category) = post.category.as_deref() else {
continue;
};
@@ -816,16 +894,17 @@ pub fn rewrite_category_references(
continue;
}
let existing_slug = post.slug.clone();
match &next_category {
Some(updated_name) if same_text(category, updated_name) => {}
Some(updated_name) => {
post.category = Some(updated_name.clone());
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
None => {
post.category = None;
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
}
@@ -834,13 +913,12 @@ pub fn rewrite_category_references(
Ok(changed)
}
pub fn rewrite_tag_references(
pub async fn rewrite_tag_references(
ctx: &AppContext,
current_name: Option<&str>,
current_slug: &str,
next_name: Option<&str>,
) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new();
if let Some(name) = current_name {
let normalized = normalized_match_key(name);
@@ -863,9 +941,9 @@ pub fn rewrite_tag_references(
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?;
let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts {
for mut post in posts {
let mut updated_tags = Vec::new();
let mut seen = std::collections::HashSet::new();
let mut post_changed = false;
@@ -889,8 +967,9 @@ pub fn rewrite_tag_references(
}
if post_changed {
let existing_slug = post.slug.clone();
post.tags = updated_tags;
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
}
@@ -898,167 +977,43 @@ pub fn rewrite_tag_references(
Ok(changed)
}
async fn dedupe_tags(ctx: &AppContext) -> Result<()> {
let existing_tags = tags::Entity::find()
.order_by_asc(tags::Column::Id)
.all(&ctx.db)
.await?;
let mut seen = std::collections::HashSet::new();
for tag in existing_tags {
let key = if tag.slug.trim().is_empty() {
tag.name.as_deref().map(slugify).unwrap_or_default()
} else {
slugify(&tag.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = tag.delete(&ctx.db).await;
}
Ok(())
pub async fn read_markdown_document_from_store(
ctx: &AppContext,
slug: &str,
) -> Result<(String, String)> {
let post = posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let document = markdown_document_from_model(&post)?;
Ok((
virtual_markdown_document_path(&document.post.slug),
document.raw_markdown,
))
}
async fn dedupe_categories(ctx: &AppContext) -> Result<()> {
let existing_categories = categories::Entity::find()
.order_by_asc(categories::Column::Id)
.all(&ctx.db)
.await?;
pub async fn upsert_markdown_document(
ctx: &AppContext,
slug_hint: Option<&str>,
markdown: &str,
) -> Result<MarkdownPost> {
let normalized_markdown = normalize_newlines(markdown);
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let file_stem = normalized_slug_hint
.as_deref()
.filter(|value| !value.is_empty())
.unwrap_or("post");
let virtual_path = normalized_slug_hint
.as_deref()
.map(virtual_markdown_document_path)
.unwrap_or_else(|| format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/draft"));
let post = parse_markdown_source(file_stem, &normalized_markdown, &virtual_path)?;
let mut seen = std::collections::HashSet::new();
for category in existing_categories {
let key = if category.slug.trim().is_empty() {
category.name.as_deref().map(slugify).unwrap_or_default()
} else {
slugify(&category.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = category.delete(&ctx.db).await;
}
Ok(())
}
pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
let markdown_posts = load_markdown_posts_from_disk()?;
let markdown_slugs = markdown_posts
.iter()
.map(|post| post.slug.clone())
.collect::<std::collections::HashSet<_>>();
let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for stale_post in existing_posts
.into_iter()
.filter(|post| !markdown_slugs.contains(&post.slug))
{
let stale_slug = stale_post.slug.clone();
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&stale_slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
let _ = stale_post.delete(&ctx.db).await;
}
for post in &markdown_posts {
let canonical_category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
let canonical_tags = canonicalize_tags(ctx, &post.tags).await?;
let existing = posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut model = existing
.map(|item| item.into_active_model())
.unwrap_or_default();
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.category = Set(canonical_category);
model.tags = Set(if canonical_tags.is_empty() {
None
} else {
Some(Value::Array(
canonical_tags.into_iter().map(Value::String).collect(),
))
});
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(if post.images.is_empty() {
None
} else {
Some(Value::Array(
post.images
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(if post.redirect_from.is_empty() {
None
} else {
Some(Value::Array(
post.redirect_from
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string()),
);
if has_existing {
let _ = model.update(&ctx.db).await;
} else {
let _ = model.insert(&ctx.db).await;
}
}
sync_tags_from_posts(ctx, &markdown_posts).await?;
dedupe_tags(ctx).await?;
dedupe_categories(ctx).await?;
Ok(markdown_posts)
}
pub fn read_markdown_document(slug: &str) -> Result<(String, String)> {
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
}
let raw = fs::read_to_string(&path).map_err(io_error)?;
Ok((path.to_string_lossy().to_string(), raw))
save_markdown_post_to_store(ctx, post, normalized_slug_hint.as_deref(), true).await
}
pub async fn write_markdown_document(
@@ -1066,24 +1021,25 @@ pub async fn write_markdown_document(
slug: &str,
markdown: &str,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let path = markdown_post_path(slug);
fs::write(&path, normalize_newlines(markdown)).map_err(io_error)?;
let updated = parse_markdown_post(&path)?;
sync_markdown_posts(ctx).await?;
Ok(updated)
upsert_markdown_document(ctx, Some(slug), markdown).await
}
pub async fn delete_markdown_post(ctx: &AppContext, slug: &str) -> Result<()> {
ensure_markdown_posts_bootstrapped()?;
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
let post = posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
fs::remove_file(&path).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
post.delete(&ctx.db).await?;
Ok(())
}
@@ -1091,8 +1047,6 @@ pub async fn create_markdown_post(
ctx: &AppContext,
draft: MarkdownPostDraft,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let title = draft.title.trim().to_string();
if title.is_empty() {
return Err(Error::BadRequest("title is required".to_string()));
@@ -1110,6 +1064,17 @@ pub async fn create_markdown_post(
return Err(Error::BadRequest("slug is required".to_string()));
}
if posts::Entity::find()
.filter(posts::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?
.is_some()
{
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
let post = MarkdownPost {
title,
slug: slug.clone(),
@@ -1143,28 +1108,16 @@ pub async fn create_markdown_post(
redirect_from: normalize_redirect_list(Some(draft.redirect_from)),
redirect_to: trim_to_option(draft.redirect_to)
.map(|item| item.trim_matches('/').to_string()),
file_path: markdown_post_path(&slug).to_string_lossy().to_string(),
file_path: virtual_markdown_document_path(&slug),
};
let path = markdown_post_path(&slug);
if path.exists() {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
fs::write(&path, build_markdown_document(&post)).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
parse_markdown_post(&path)
save_markdown_post_to_store(ctx, post, Some(&slug), true).await
}
pub async fn import_markdown_documents(
ctx: &AppContext,
files: Vec<MarkdownImportFile>,
) -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut imported_slugs = Vec::new();
let mut imported = Vec::new();
for file in files {
let path = Path::new(&file.file_name);
@@ -1194,15 +1147,8 @@ pub async fn import_markdown_documents(
continue;
}
fs::write(markdown_post_path(&slug), normalize_newlines(&file.content))
.map_err(io_error)?;
imported_slugs.push(slug);
imported.push(upsert_markdown_document(ctx, Some(&slug), &file.content).await?);
}
sync_markdown_posts(ctx).await?;
imported_slugs
.into_iter()
.map(|slug| parse_markdown_post(&markdown_post_path(&slug)))
.collect()
Ok(imported)
}

View File

@@ -2,7 +2,6 @@ use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set,
};
use std::fs;
use crate::{
controllers::admin::AdminIdentity,
@@ -48,10 +47,10 @@ fn trim_to_option(value: Option<String>) -> Option<String> {
fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
let normalized = markdown.replace("\r\n", "\n");
if let Some(frontmatter) = normalized
.strip_prefix("---\n")
.and_then(|rest| rest.split_once("\n---\n").map(|(frontmatter, _)| frontmatter))
{
if let Some(frontmatter) = normalized.strip_prefix("---\n").and_then(|rest| {
rest.split_once("\n---\n")
.map(|(frontmatter, _)| frontmatter)
}) {
for line in frontmatter.lines() {
let trimmed = line.trim();
if let Some(raw) = trimmed.strip_prefix("title:") {
@@ -63,14 +62,16 @@ fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
}
}
normalized.lines().find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
normalized
.lines()
.find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
}
async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> {
@@ -122,7 +123,7 @@ pub async fn capture_current_snapshot(
reason: Option<&str>,
metadata: Option<serde_json::Value>,
) -> Result<Option<post_revisions::Model>> {
let Ok((_path, markdown)) = content::read_markdown_document(slug) else {
let Ok((_path, markdown)) = content::read_markdown_document_from_store(ctx, slug).await else {
return Ok(None);
};
@@ -136,17 +137,14 @@ pub async fn list_revisions(
slug: Option<&str>,
limit: u64,
) -> Result<Vec<post_revisions::Model>> {
let mut query = post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
let mut query =
post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) {
query = query.filter(post_revisions::Column::PostSlug.eq(slug));
}
query
.limit(limit)
.all(&ctx.db)
.await
.map_err(Into::into)
query.limit(limit).all(&ctx.db).await.map_err(Into::into)
}
pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> {
@@ -187,13 +185,18 @@ pub async fn restore_revision(
let markdown = match restore_mode {
RestoreMode::Full => revision_markdown.clone(),
RestoreMode::Markdown | RestoreMode::Metadata => {
let (_path, current_markdown) = content::read_markdown_document(&slug).map_err(|_| {
Error::BadRequest("当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string())
})?;
let (_path, current_markdown) = content::read_markdown_document_from_store(ctx, &slug)
.await
.map_err(|_| {
Error::BadRequest(
"当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string(),
)
})?;
let virtual_path = content::virtual_markdown_document_path(&slug);
let revision_post =
content::parse_markdown_source(&slug, &revision_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
content::parse_markdown_source(&slug, &revision_markdown, &virtual_path)?;
let current_post =
content::parse_markdown_source(&slug, &current_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
content::parse_markdown_source(&slug, &current_markdown, &virtual_path)?;
let mut merged = current_post.clone();
match restore_mode {
RestoreMode::Markdown => {
@@ -224,10 +227,7 @@ pub async fn restore_revision(
}
};
fs::create_dir_all(content::MARKDOWN_POSTS_DIR).map_err(|error| Error::BadRequest(error.to_string()))?;
fs::write(content::markdown_post_path(&slug), markdown.replace("\r\n", "\n"))
.map_err(|error| Error::BadRequest(error.to_string()))?;
content::sync_markdown_posts(ctx).await?;
content::write_markdown_document(ctx, &slug, &markdown).await?;
let _ = capture_snapshot_from_markdown(
ctx,

View File

@@ -20,6 +20,27 @@ pub enum TurnstileScope {
Subscription,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum VerificationMode {
Off,
Captcha,
Turnstile,
}
pub const VERIFICATION_MODE_OFF: &str = "off";
pub const VERIFICATION_MODE_CAPTCHA: &str = "captcha";
pub const VERIFICATION_MODE_TURNSTILE: &str = "turnstile";
impl VerificationMode {
pub const fn as_str(self) -> &'static str {
match self {
Self::Off => VERIFICATION_MODE_OFF,
Self::Captcha => VERIFICATION_MODE_CAPTCHA,
Self::Turnstile => VERIFICATION_MODE_TURNSTILE,
}
}
}
#[derive(Clone, Debug, Deserialize)]
struct TurnstileVerifyResponse {
success: bool,
@@ -56,6 +77,15 @@ fn configured_value(value: Option<&String>) -> Option<String> {
})
}
pub fn normalize_verification_mode(value: Option<&str>) -> Option<VerificationMode> {
match value?.trim().to_ascii_lowercase().as_str() {
VERIFICATION_MODE_OFF => Some(VerificationMode::Off),
VERIFICATION_MODE_CAPTCHA | "normal" | "simple" => Some(VerificationMode::Captcha),
VERIFICATION_MODE_TURNSTILE => Some(VerificationMode::Turnstile),
_ => None,
}
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
}
@@ -89,17 +119,48 @@ pub fn secret_key_configured(settings: &site_settings::Model) -> bool {
secret_key(settings).is_some()
}
fn scope_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
fn legacy_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match scope {
TurnstileScope::Comment => settings.comment_turnstile_enabled.unwrap_or(false),
TurnstileScope::Subscription => settings.subscription_turnstile_enabled.unwrap_or(false),
TurnstileScope::Comment => {
if settings.comment_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Captcha
}
}
TurnstileScope::Subscription => {
if settings.subscription_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Off
}
}
}
}
pub fn selected_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
let configured = match scope {
TurnstileScope::Comment => settings.comment_verification_mode.as_deref(),
TurnstileScope::Subscription => settings.subscription_verification_mode.as_deref(),
};
normalize_verification_mode(configured).unwrap_or_else(|| legacy_mode(settings, scope))
}
pub fn effective_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match selected_mode(settings, scope) {
VerificationMode::Turnstile
if site_key_configured(settings) && secret_key_configured(settings) =>
{
VerificationMode::Turnstile
}
VerificationMode::Turnstile => VerificationMode::Captcha,
mode => mode,
}
}
pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
scope_enabled(settings, scope)
&& site_key_configured(settings)
&& secret_key_configured(settings)
effective_mode(settings, scope) == VerificationMode::Turnstile
}
pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> {
@@ -107,7 +168,7 @@ pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Resu
Ok(is_enabled(&settings, scope))
}
async fn verify_token(
pub async fn verify_token(
settings: &site_settings::Model,
token: Option<&str>,
client_ip: Option<&str>,
@@ -173,7 +234,7 @@ pub async fn verify_if_enabled(
client_ip: Option<&str>,
) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
if !is_enabled(&settings, scope) {
if effective_mode(&settings, scope) != VerificationMode::Turnstile {
return Ok(false);
}