feat: refresh content workflow and verification settings
All checks were successful
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 43s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 25m9s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 51s

This commit is contained in:
2026-04-01 18:47:17 +08:00
parent f2c07df320
commit 7de4ddc3ee
66 changed files with 1455 additions and 2759 deletions

View File

@@ -331,6 +331,24 @@ impl Hooks for App {
.await?;
if existing.is_none() {
let comment_verification_mode = settings["comment_verification_mode"]
.as_str()
.map(ToString::to_string);
let subscription_verification_mode = settings
["subscription_verification_mode"]
.as_str()
.map(ToString::to_string);
let comment_turnstile_enabled = settings["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = settings
["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let tech_stack = settings["tech_stack"]
.as_array()
.map(|items| {
@@ -408,6 +426,16 @@ impl Hooks for App {
paragraph_comments_enabled: Set(settings["paragraph_comments_enabled"]
.as_bool()
.or(Some(true))),
comment_verification_mode: Set(comment_verification_mode),
comment_turnstile_enabled: Set(comment_turnstile_enabled),
subscription_verification_mode: Set(subscription_verification_mode),
subscription_turnstile_enabled: Set(subscription_turnstile_enabled),
turnstile_site_key: Set(settings["turnstile_site_key"]
.as_str()
.map(ToString::to_string)),
turnstile_secret_key: Set(settings["turnstile_secret_key"]
.as_str()
.map(ToString::to_string)),
ai_provider: Set(settings["ai_provider"].as_str().map(ToString::to_string)),
ai_api_base: Set(settings["ai_api_base"].as_str().map(ToString::to_string)),
ai_api_key: Set(settings["ai_api_key"].as_str().map(ToString::to_string)),

View File

@@ -1,6 +1,6 @@
use axum::{
extract::{Multipart, Query},
http::{header, HeaderMap},
http::{HeaderMap, header},
};
use loco_rs::prelude::*;
use sea_orm::{
@@ -170,7 +170,9 @@ pub struct AdminSiteSettingsResponse {
pub music_playlist: Vec<site_settings::MusicTrackPayload>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
pub comment_turnstile_enabled: bool,
pub subscription_verification_mode: String,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
@@ -686,9 +688,7 @@ fn build_media_object_response(
title: metadata.and_then(|entry| entry.title.clone()),
alt_text: metadata.and_then(|entry| entry.alt_text.clone()),
caption: metadata.and_then(|entry| entry.caption.clone()),
tags: metadata
.map(media_assets::tag_list)
.unwrap_or_default(),
tags: metadata.map(media_assets::tag_list).unwrap_or_default(),
notes: metadata.and_then(|entry| entry.notes.clone()),
}
}
@@ -724,6 +724,14 @@ fn build_settings_response(
) -> AdminSiteSettingsResponse {
let ai_providers = site_settings::ai_provider_configs(&item);
let ai_active_provider_id = site_settings::active_ai_provider_id(&item);
let comment_verification_mode = crate::services::turnstile::selected_mode(
&item,
crate::services::turnstile::TurnstileScope::Comment,
);
let subscription_verification_mode = crate::services::turnstile::selected_mode(
&item,
crate::services::turnstile::TurnstileScope::Subscription,
);
let turnstile_site_key = crate::services::turnstile::site_key(&item);
let turnstile_secret_key = crate::services::turnstile::secret_key(&item);
let web_push_vapid_public_key = crate::services::web_push::public_key(&item);
@@ -751,8 +759,16 @@ fn build_settings_response(
music_playlist: music_playlist_values(&item.music_playlist),
ai_enabled: item.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled: item.comment_turnstile_enabled.unwrap_or(false),
subscription_turnstile_enabled: item.subscription_turnstile_enabled.unwrap_or(false),
comment_verification_mode: comment_verification_mode.as_str().to_string(),
comment_turnstile_enabled: matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
subscription_verification_mode: subscription_verification_mode.as_str().to_string(),
subscription_turnstile_enabled: matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
web_push_enabled: item.web_push_enabled.unwrap_or(false),
turnstile_site_key,
turnstile_secret_key,
@@ -887,7 +903,6 @@ pub async fn session_logout(headers: HeaderMap, State(ctx): State<AppContext>) -
#[debug_handler]
pub async fn dashboard(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let all_posts = posts::Entity::find().all(&ctx.db).await?;
let total_posts = all_posts.len() as u64;
@@ -1190,8 +1205,8 @@ pub async fn list_media_objects(
check_auth(&headers)?;
let settings = storage::require_r2_settings(&ctx).await?;
let objects = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?;
let objects =
storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200)).await?;
let keys = objects
.iter()
.map(|item| item.key.clone())
@@ -1228,7 +1243,11 @@ pub async fn delete_media_object(
storage::delete_object(&ctx, key).await?;
if let Err(error) = media_assets::delete_by_key(&ctx, key).await {
tracing::warn!(?error, key, "failed to delete media metadata after object deletion");
tracing::warn!(
?error,
key,
"failed to delete media metadata after object deletion"
);
}
format::json(AdminMediaDeleteResponse {
@@ -1325,7 +1344,11 @@ pub async fn batch_delete_media_objects(
match storage::delete_object(&ctx, &key).await {
Ok(()) => {
if let Err(error) = media_assets::delete_by_key(&ctx, &key).await {
tracing::warn!(?error, key, "failed to delete media metadata after batch removal");
tracing::warn!(
?error,
key,
"failed to delete media metadata after batch removal"
);
}
deleted.push(key)
}
@@ -1955,7 +1978,10 @@ pub fn routes() -> Routes {
"/storage/media/batch-delete",
post(batch_delete_media_objects),
)
.add("/storage/media/metadata", patch(update_media_object_metadata))
.add(
"/storage/media/metadata",
patch(update_media_object_metadata),
)
.add("/storage/media/replace", post(replace_media_object))
.add(
"/comments/blacklist",

View File

@@ -137,7 +137,10 @@ fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn build_category_record(item: &categories::Model, post_items: &[posts::Model]) -> AdminCategoryRecord {
fn build_category_record(
item: &categories::Model,
post_items: &[posts::Model],
) -> AdminCategoryRecord {
let name = category_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items
@@ -224,7 +227,11 @@ async fn ensure_category_slug_unique(
Ok(())
}
async fn ensure_tag_slug_unique(ctx: &AppContext, slug: &str, exclude_id: Option<i32>) -> Result<()> {
async fn ensure_tag_slug_unique(
ctx: &AppContext,
slug: &str,
exclude_id: Option<i32>,
) -> Result<()> {
if let Some(existing) = tags::Entity::find()
.filter(tags::Column::Slug.eq(slug))
.one(&ctx.db)
@@ -243,9 +250,11 @@ async fn load_posts(ctx: &AppContext) -> Result<Vec<posts::Model>> {
}
#[debug_handler]
pub async fn list_categories(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn list_categories(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
@@ -254,7 +263,8 @@ pub async fn list_categories(headers: HeaderMap, State(ctx): State<AppContext>)
let post_items = load_posts(&ctx).await?;
format::json(
items.into_iter()
items
.into_iter()
.map(|item| build_category_record(&item, &post_items))
.collect::<Vec<_>>(),
)
@@ -312,7 +322,13 @@ pub async fn update_category(
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_category_references(previous_name.as_deref(), &previous_slug, Some(&name))?;
content::rewrite_category_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)
.await?;
}
let mut active = item.into_active_model();
@@ -324,7 +340,6 @@ pub async fn update_category(
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&updated, &post_items))
@@ -339,9 +354,8 @@ pub async fn delete_category(
check_auth(&headers)?;
let item = load_category(&ctx, id).await?;
content::rewrite_category_references(item.name.as_deref(), &item.slug, None)?;
content::rewrite_category_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}
@@ -349,7 +363,6 @@ pub async fn delete_category(
#[debug_handler]
pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
@@ -358,7 +371,8 @@ pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Res
let post_items = load_posts(&ctx).await?;
format::json(
items.into_iter()
items
.into_iter()
.map(|item| build_tag_record(&item, &post_items))
.collect::<Vec<_>>(),
)
@@ -416,7 +430,13 @@ pub async fn update_tag(
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_tag_references(previous_name.as_deref(), &previous_slug, Some(&name))?;
content::rewrite_tag_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)
.await?;
}
let mut active = item.into_active_model();
@@ -428,7 +448,6 @@ pub async fn update_tag(
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&updated, &post_items))
@@ -443,9 +462,8 @@ pub async fn delete_tag(
check_auth(&headers)?;
let item = load_tag(&ctx, id).await?;
content::rewrite_tag_references(item.name.as_deref(), &item.slug, None)?;
content::rewrite_tag_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}

View File

@@ -112,7 +112,9 @@ fn build_summary(item: &categories::Model, post_items: &[posts::Model]) -> Categ
post.category
.as_deref()
.map(str::trim)
.is_some_and(|value| value.eq_ignore_ascii_case(&name) || value.eq_ignore_ascii_case(&item.slug))
.is_some_and(|value| {
value.eq_ignore_ascii_case(&name) || value.eq_ignore_ascii_case(&item.slug)
})
})
.count();
@@ -151,8 +153,6 @@ async fn load_item(ctx: &AppContext, id: i32) -> Result<categories::Model> {
#[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let category_items = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
@@ -224,10 +224,12 @@ pub async fn update(
!= Some(name.as_str())
{
content::rewrite_category_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)?;
)
.await?;
}
let mut item = item.into_active_model();
@@ -239,16 +241,14 @@ pub async fn update(
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(build_record(item))
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
let item = load_item(&ctx, id).await?;
content::rewrite_category_references(item.name.as_deref(), &item.slug, None)?;
content::rewrite_category_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}

View File

@@ -80,7 +80,9 @@ fn post_has_tag(post: &Model, wanted_tag: &str) -> bool {
fn effective_status(post: &Model) -> String {
content::effective_post_state(
post.status.as_deref().unwrap_or(content::POST_STATUS_PUBLISHED),
post.status
.as_deref()
.unwrap_or(content::POST_STATUS_PUBLISHED),
post.publish_at,
post.unpublish_at,
Utc::now().fixed_offset(),
@@ -157,16 +159,18 @@ fn parse_optional_markdown_datetime(
return None;
}
chrono::DateTime::parse_from_rfc3339(value).ok().or_else(|| {
chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d")
.ok()
.and_then(|date| date.and_hms_opt(0, 0, 0))
.and_then(|naive| {
chrono::FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single()
})
})
chrono::DateTime::parse_from_rfc3339(value)
.ok()
.or_else(|| {
chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d")
.ok()
.and_then(|date| date.and_hms_opt(0, 0, 0))
.and_then(|naive| {
chrono::FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single()
})
})
}
fn markdown_post_listed_publicly(post: &content::MarkdownPost) -> bool {
@@ -253,7 +257,9 @@ fn should_include_post(
}
if let Some(status) = &query.status {
if effective_status(post) != content::normalize_post_status(Some(status)) && effective_status(post) != status.trim().to_ascii_lowercase() {
if effective_status(post) != content::normalize_post_status(Some(status))
&& effective_status(post) != status.trim().to_ascii_lowercase()
{
return false;
}
}
@@ -397,22 +403,22 @@ impl Params {
item.image = Set(self.image.clone());
item.images = Set(self.images.clone());
item.pinned = Set(self.pinned);
item.status = Set(self.status.clone().map(|value| requested_status(Some(value), None)));
item.visibility = Set(
self.visibility
.clone()
.map(|value| normalize_visibility(Some(value))),
);
item.publish_at = Set(
self.publish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
);
item.unpublish_at = Set(
self.unpublish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
);
item.status = Set(self
.status
.clone()
.map(|value| requested_status(Some(value), None)));
item.visibility = Set(self
.visibility
.clone()
.map(|value| normalize_visibility(Some(value))));
item.publish_at = Set(self
.publish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()));
item.unpublish_at = Set(self
.unpublish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()));
item.canonical_url = Set(self.canonical_url.clone());
item.noindex = Set(self.noindex);
item.og_image = Set(self.og_image.clone());
@@ -526,8 +532,6 @@ pub async fn list(
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true);
let include_redirects = query.include_redirects.unwrap_or(preview);
@@ -539,7 +543,9 @@ pub async fn list(
let filtered = posts
.into_iter()
.filter(|post| should_include_post(post, &query, preview, include_private, include_redirects))
.filter(|post| {
should_include_post(post, &query, preview, include_private, include_redirects)
})
.collect::<Vec<_>>();
format::json(filtered)
@@ -551,8 +557,6 @@ pub async fn list_page(
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.filters.preview, &headers);
let include_private = preview && query.filters.include_private.unwrap_or(true);
let include_redirects = query.filters.include_redirects.unwrap_or(preview);
@@ -672,7 +676,10 @@ pub async fn update(
.into_iter()
.filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(),
post_type: item.post_type.clone().unwrap_or_else(|| "article".to_string()),
post_type: item
.post_type
.clone()
.unwrap_or_else(|| "article".to_string()),
image: item.image.clone(),
images: item
.images
@@ -684,7 +691,10 @@ pub async fn update(
.filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(),
pinned: item.pinned.unwrap_or(false),
status: item.status.clone().unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()),
status: item
.status
.clone()
.unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()),
visibility: item
.visibility
.clone()
@@ -696,9 +706,7 @@ pub async fn update(
og_image: item.og_image.clone(),
redirect_from: content::post_redirects_from_json(&item.redirect_from),
redirect_to: item.redirect_to.clone(),
file_path: content::markdown_post_path(&item.slug)
.to_string_lossy()
.to_string(),
file_path: content::virtual_markdown_document_path(&item.slug),
};
let _ = subscriptions::notify_post_published(&ctx, &post).await;
}
@@ -736,7 +744,6 @@ pub async fn get_one(
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers);
let post = load_item(&ctx, id).await?;
@@ -754,7 +761,6 @@ pub async fn get_by_slug(
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true);
let post = resolve_post_by_slug(&ctx, &slug).await?;
@@ -780,8 +786,7 @@ pub async fn get_markdown_by_slug(
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let (path, markdown) = content::read_markdown_document(&slug)?;
let (path, markdown) = content::read_markdown_document_from_store(&ctx, &slug).await?;
format::json(MarkdownDocumentResponse {
slug,
path,
@@ -807,7 +812,7 @@ pub async fn update_markdown_by_slug(
)
.await?;
let updated = content::write_markdown_document(&ctx, &slug, &params.markdown).await?;
let (path, markdown) = content::read_markdown_document(&updated.slug)?;
let (path, markdown) = content::read_markdown_document_from_store(&ctx, &updated.slug).await?;
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
@@ -874,7 +879,7 @@ pub async fn create_markdown(
},
)
.await?;
let (path, markdown) = content::read_markdown_document(&created.slug)?;
let (path, markdown) = content::read_markdown_document_from_store(&ctx, &created.slug).await?;
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
@@ -936,7 +941,9 @@ pub async fn import_markdown(
let imported = content::import_markdown_documents(&ctx, files).await?;
for item in &imported {
if let Ok((_path, markdown)) = content::read_markdown_document(&item.slug) {
if let Ok((_path, markdown)) =
content::read_markdown_document_from_store(&ctx, &item.slug).await
{
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),

View File

@@ -63,9 +63,7 @@ fn levenshtein_distance(left: &str, right: &str) -> usize {
let mut curr = vec![i + 1; right_chars.len() + 1];
for (j, right_ch) in right_chars.iter().enumerate() {
let cost = usize::from(left_ch != *right_ch);
curr[j + 1] = (curr[j] + 1)
.min(prev[j + 1] + 1)
.min(prev[j] + cost);
curr[j + 1] = (curr[j] + 1).min(prev[j + 1] + 1).min(prev[j] + cost);
}
prev = curr;
}
@@ -157,7 +155,11 @@ fn candidate_terms(posts: &[posts::Model]) -> Vec<String> {
candidates
}
fn find_spelling_fallback(query: &str, posts: &[posts::Model], synonym_groups: &[Vec<String>]) -> Vec<String> {
fn find_spelling_fallback(
query: &str,
posts: &[posts::Model],
synonym_groups: &[Vec<String>],
) -> Vec<String> {
let primary_token = tokenize(query).into_iter().next().unwrap_or_default();
if primary_token.len() < 3 {
return Vec::new();
@@ -397,7 +399,6 @@ async fn build_search_results(
headers: &HeaderMap,
) -> Result<(String, bool, Vec<SearchResult>)> {
let preview_search = is_preview_search(query, headers);
content::sync_markdown_posts(ctx).await?;
let q = query.q.clone().unwrap_or_default().trim().to_string();
if q.is_empty() {
@@ -442,7 +443,12 @@ async fn build_search_results(
});
}
if let Some(tag) = query.tag.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(tag) = query
.tag
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| post_has_tag(post, tag));
}
@@ -550,7 +556,8 @@ pub async fn search_page(
let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_search_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref(), &sort_by);
let (q, preview_search, mut results) = build_search_results(&ctx, &query.search, &headers).await?;
let (q, preview_search, mut results) =
build_search_results(&ctx, &query.search, &headers).await?;
if q.is_empty() {
return format::json(PagedSearchResponse {

View File

@@ -93,8 +93,12 @@ pub struct SiteSettingsPayload {
pub ai_enabled: Option<bool>,
#[serde(default, alias = "paragraphCommentsEnabled")]
pub paragraph_comments_enabled: Option<bool>,
#[serde(default, alias = "commentVerificationMode")]
pub comment_verification_mode: Option<String>,
#[serde(default, alias = "commentTurnstileEnabled")]
pub comment_turnstile_enabled: Option<bool>,
#[serde(default, alias = "subscriptionVerificationMode")]
pub subscription_verification_mode: Option<String>,
#[serde(default, alias = "subscriptionTurnstileEnabled")]
pub subscription_turnstile_enabled: Option<bool>,
#[serde(default, alias = "webPushEnabled")]
@@ -195,7 +199,9 @@ pub struct PublicSiteSettingsResponse {
pub music_playlist: Option<serde_json::Value>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
pub comment_turnstile_enabled: bool,
pub subscription_verification_mode: String,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
@@ -270,6 +276,9 @@ pub(crate) fn default_subscription_popup_delay_seconds() -> i32 {
18
}
const DEFAULT_TURNSTILE_SITE_KEY: &str = "0x4AAAAAACy58kMBSwXwqMhx";
const DEFAULT_TURNSTILE_SECRET_KEY: &str = "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U";
fn normalize_string_list(values: Vec<String>) -> Vec<String> {
values
.into_iter()
@@ -549,11 +558,48 @@ impl SiteSettingsPayload {
if let Some(paragraph_comments_enabled) = self.paragraph_comments_enabled {
item.paragraph_comments_enabled = Some(paragraph_comments_enabled);
}
if let Some(comment_turnstile_enabled) = self.comment_turnstile_enabled {
if let Some(comment_verification_mode) = self
.comment_verification_mode
.as_deref()
.and_then(|value| crate::services::turnstile::normalize_verification_mode(Some(value)))
{
item.comment_verification_mode = Some(comment_verification_mode.as_str().to_string());
item.comment_turnstile_enabled = Some(matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
));
} else if let Some(comment_turnstile_enabled) = self.comment_turnstile_enabled {
item.comment_turnstile_enabled = Some(comment_turnstile_enabled);
item.comment_verification_mode = Some(
if comment_turnstile_enabled {
crate::services::turnstile::VERIFICATION_MODE_TURNSTILE
} else {
crate::services::turnstile::VERIFICATION_MODE_CAPTCHA
}
.to_string(),
);
}
if let Some(subscription_turnstile_enabled) = self.subscription_turnstile_enabled {
if let Some(subscription_verification_mode) = self
.subscription_verification_mode
.as_deref()
.and_then(|value| crate::services::turnstile::normalize_verification_mode(Some(value)))
{
item.subscription_verification_mode =
Some(subscription_verification_mode.as_str().to_string());
item.subscription_turnstile_enabled = Some(matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
));
} else if let Some(subscription_turnstile_enabled) = self.subscription_turnstile_enabled {
item.subscription_turnstile_enabled = Some(subscription_turnstile_enabled);
item.subscription_verification_mode = Some(
if subscription_turnstile_enabled {
crate::services::turnstile::VERIFICATION_MODE_TURNSTILE
} else {
crate::services::turnstile::VERIFICATION_MODE_OFF
}
.to_string(),
);
}
if let Some(web_push_enabled) = self.web_push_enabled {
item.web_push_enabled = Some(web_push_enabled);
@@ -763,11 +809,17 @@ fn default_payload() -> SiteSettingsPayload {
]),
ai_enabled: Some(false),
paragraph_comments_enabled: Some(true),
comment_verification_mode: Some(
crate::services::turnstile::VERIFICATION_MODE_CAPTCHA.to_string(),
),
comment_turnstile_enabled: Some(false),
subscription_verification_mode: Some(
crate::services::turnstile::VERIFICATION_MODE_OFF.to_string(),
),
subscription_turnstile_enabled: Some(false),
web_push_enabled: Some(false),
turnstile_site_key: None,
turnstile_secret_key: None,
turnstile_site_key: Some(DEFAULT_TURNSTILE_SITE_KEY.to_string()),
turnstile_secret_key: Some(DEFAULT_TURNSTILE_SECRET_KEY.to_string()),
web_push_vapid_public_key: None,
web_push_vapid_private_key: None,
web_push_vapid_subject: None,
@@ -835,11 +887,11 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
fn public_response(model: Model) -> PublicSiteSettingsResponse {
let turnstile_site_key = crate::services::turnstile::site_key(&model);
let web_push_vapid_public_key = crate::services::web_push::public_key(&model);
let comment_turnstile_enabled = crate::services::turnstile::is_enabled(
let comment_verification_mode = crate::services::turnstile::effective_mode(
&model,
crate::services::turnstile::TurnstileScope::Comment,
);
let subscription_turnstile_enabled = crate::services::turnstile::is_enabled(
let subscription_verification_mode = crate::services::turnstile::effective_mode(
&model,
crate::services::turnstile::TurnstileScope::Subscription,
);
@@ -866,8 +918,16 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
music_playlist: model.music_playlist,
ai_enabled: model.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled,
subscription_turnstile_enabled,
comment_verification_mode: comment_verification_mode.as_str().to_string(),
comment_turnstile_enabled: matches!(
comment_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
subscription_verification_mode: subscription_verification_mode.as_str().to_string(),
subscription_turnstile_enabled: matches!(
subscription_verification_mode,
crate::services::turnstile::VerificationMode::Turnstile
),
web_push_enabled,
turnstile_site_key,
web_push_vapid_public_key,
@@ -890,8 +950,6 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
#[debug_handler]
pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let site_settings = public_response(load_current(&ctx).await?);
let posts = posts::Entity::find()
.order_by_desc(posts::Column::CreatedAt)

View File

@@ -14,6 +14,10 @@ pub struct PublicSubscriptionPayload {
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -23,6 +27,10 @@ pub struct PublicBrowserPushSubscriptionPayload {
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -81,6 +89,28 @@ fn public_browser_push_metadata(
})
}
async fn verify_subscription_human_check(
settings: &crate::models::_entities::site_settings::Model,
turnstile_token: Option<&str>,
captcha_token: Option<&str>,
captcha_answer: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
match turnstile::effective_mode(settings, turnstile::TurnstileScope::Subscription) {
turnstile::VerificationMode::Off => Ok(()),
turnstile::VerificationMode::Captcha => {
crate::services::comment_guard::verify_captcha_solution(
captcha_token,
captcha_answer,
client_ip,
)
}
turnstile::VerificationMode::Turnstile => {
turnstile::verify_token(settings, turnstile_token, client_ip).await
}
}
}
#[debug_handler]
pub async fn subscribe(
State(ctx): State<AppContext>,
@@ -94,10 +124,12 @@ pub async fn subscribe(
client_ip.as_deref(),
Some(&email),
)?;
let _ = turnstile::verify_if_enabled(
&ctx,
turnstile::TurnstileScope::Subscription,
let settings = crate::controllers::site_settings::load_current(&ctx).await?;
verify_subscription_human_check(
&settings,
payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
client_ip.as_deref(),
)
.await?;
@@ -165,10 +197,11 @@ pub async fn subscribe_browser_push(
.map(ToString::to_string);
abuse_guard::enforce_public_scope("browser-push-subscription", client_ip.as_deref(), Some(&endpoint))?;
let _ = turnstile::verify_if_enabled(
&ctx,
turnstile::TurnstileScope::Subscription,
verify_subscription_human_check(
&settings,
payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
client_ip.as_deref(),
)
.await?;

View File

@@ -118,7 +118,10 @@ fn tag_values(post: &posts::Model) -> Vec<String> {
fn build_summary(item: &tags::Model, post_items: &[posts::Model]) -> TagSummary {
let name = tag_name(item);
let aliases = [name.trim().to_ascii_lowercase(), item.slug.trim().to_ascii_lowercase()];
let aliases = [
name.trim().to_ascii_lowercase(),
item.slug.trim().to_ascii_lowercase(),
];
let count = post_items
.iter()
.filter(|post| {
@@ -163,7 +166,6 @@ async fn load_item(ctx: &AppContext, id: i32) -> Result<tags::Model> {
#[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let tag_items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
@@ -234,10 +236,12 @@ pub async fn update(
!= Some(name.as_str())
{
content::rewrite_tag_references(
&ctx,
previous_name.as_deref(),
&previous_slug,
Some(&name),
)?;
)
.await?;
}
let mut item = item.into_active_model();
@@ -249,16 +253,14 @@ pub async fn update(
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(build_record(item))
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
let item = load_item(&ctx, id).await?;
content::rewrite_tag_references(item.name.as_deref(), &item.slug, None)?;
content::rewrite_tag_references(&ctx, item.name.as_deref(), &item.slug, None).await?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}

View File

@@ -45,6 +45,10 @@
description: "节奏更明显一点,适合切换阅读状态。"
ai_enabled: false
paragraph_comments_enabled: true
comment_verification_mode: "captcha"
subscription_verification_mode: "off"
turnstile_site_key: "0x4AAAAAACy58kMBSwXwqMhx"
turnstile_secret_key: "0x4AAAAAACy58m3gYfSqM-VIz4QK4wuO73U"
ai_provider: "newapi"
ai_api_base: "https://91code.jiangnight.com/v1"
ai_api_key: "sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571"

View File

@@ -1,14 +1,12 @@
use async_trait::async_trait;
use loco_rs::{
app::{AppContext, Initializer},
Result,
app::{AppContext, Initializer},
};
use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set};
use std::path::{Path, PathBuf};
use crate::models::_entities::{comments, posts, site_settings};
use crate::services::content;
const FIXTURES_DIR: &str = "src/fixtures";
pub struct ContentSyncInitializer;
@@ -25,7 +23,6 @@ impl Initializer for ContentSyncInitializer {
}
async fn sync_content(ctx: &AppContext, base: &Path) -> Result<()> {
content::sync_markdown_posts(ctx).await?;
sync_site_settings(ctx, base).await?;
sync_comment_post_slugs(ctx, base).await?;
Ok(())
@@ -111,6 +108,19 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
})
.filter(|items| !items.is_empty())
.map(serde_json::Value::Array);
let comment_verification_mode = as_optional_string(&seed["comment_verification_mode"]);
let subscription_verification_mode =
as_optional_string(&seed["subscription_verification_mode"]);
let comment_turnstile_enabled = seed["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = seed["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let existing = site_settings::Entity::find()
.order_by_asc(site_settings::Column::Id)
@@ -179,6 +189,24 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
model.paragraph_comments_enabled =
Set(seed["paragraph_comments_enabled"].as_bool().or(Some(true)));
}
if existing.comment_verification_mode.is_none() {
model.comment_verification_mode = Set(comment_verification_mode.clone());
}
if existing.comment_turnstile_enabled.is_none() {
model.comment_turnstile_enabled = Set(comment_turnstile_enabled);
}
if existing.subscription_verification_mode.is_none() {
model.subscription_verification_mode = Set(subscription_verification_mode.clone());
}
if existing.subscription_turnstile_enabled.is_none() {
model.subscription_turnstile_enabled = Set(subscription_turnstile_enabled);
}
if is_blank(&existing.turnstile_site_key) {
model.turnstile_site_key = Set(as_optional_string(&seed["turnstile_site_key"]));
}
if is_blank(&existing.turnstile_secret_key) {
model.turnstile_secret_key = Set(as_optional_string(&seed["turnstile_secret_key"]));
}
if should_upgrade_legacy_ai_defaults {
model.ai_provider = Set(as_optional_string(&seed["ai_provider"]));
model.ai_api_base = Set(as_optional_string(&seed["ai_api_base"]));
@@ -237,6 +265,12 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
paragraph_comments_enabled: Set(seed["paragraph_comments_enabled"]
.as_bool()
.or(Some(true))),
comment_verification_mode: Set(comment_verification_mode),
comment_turnstile_enabled: Set(comment_turnstile_enabled),
subscription_verification_mode: Set(subscription_verification_mode),
subscription_turnstile_enabled: Set(subscription_turnstile_enabled),
turnstile_site_key: Set(as_optional_string(&seed["turnstile_site_key"])),
turnstile_secret_key: Set(as_optional_string(&seed["turnstile_secret_key"])),
ai_provider: Set(as_optional_string(&seed["ai_provider"])),
ai_api_base: Set(as_optional_string(&seed["ai_api_base"])),
ai_api_key: Set(as_optional_string(&seed["ai_api_key"])),

View File

@@ -15,6 +15,9 @@ pub struct Model {
pub description: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub content: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
#[serde(skip_serializing, skip_deserializing)]
pub source_markdown: Option<String>,
pub category: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub tags: Option<Json>,

View File

@@ -34,6 +34,8 @@ pub struct Model {
pub paragraph_comments_enabled: Option<bool>,
pub comment_turnstile_enabled: Option<bool>,
pub subscription_turnstile_enabled: Option<bool>,
pub comment_verification_mode: Option<String>,
pub subscription_verification_mode: Option<String>,
pub web_push_enabled: Option<bool>,
#[sea_orm(column_type = "Text", nullable)]
pub turnstile_site_key: Option<String>,

View File

@@ -1,16 +1,16 @@
use base64::{engine::general_purpose::STANDARD as BASE64_STANDARD, Engine as _};
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
use chrono::{DateTime, Utc};
use fastembed::{
InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel,
};
use loco_rs::prelude::*;
use reqwest::{header::CONTENT_TYPE, multipart, Client, Url};
use reqwest::{Client, Url, header::CONTENT_TYPE, multipart};
use sea_orm::{
ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel,
PaginatorTrait, QueryOrder, Set, Statement,
};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use serde_json::{Value, json};
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::{Mutex, OnceLock};
@@ -34,8 +34,7 @@ const DEFAULT_CLOUDFLARE_CHAT_MODEL: &str = "@cf/meta/llama-3.1-8b-instruct";
const DEFAULT_CLOUDFLARE_IMAGE_MODEL: &str = "@cf/black-forest-labs/flux-2-klein-4b";
const DEFAULT_TOP_K: usize = 4;
const DEFAULT_CHUNK_SIZE: usize = 1200;
const DEFAULT_SYSTEM_PROMPT: &str =
"你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
const DEFAULT_SYSTEM_PROMPT: &str = "你是这个博客的站内 AI 助手。请严格基于提供的博客上下文回答,优先给出准确结论,再补充细节;如果上下文不足,请明确说明。";
const EMBEDDING_BATCH_SIZE: usize = 32;
const EMBEDDING_DIMENSION: usize = 384;
const LOCAL_EMBEDDING_MODEL_LABEL: &str = "fastembed / local all-MiniLM-L6-v2";
@@ -2096,8 +2095,8 @@ pub(crate) fn build_provider_url(request: &AiProviderRequest) -> String {
#[cfg(test)]
mod tests {
use super::{
build_provider_url, extract_provider_text, is_profile_question,
normalize_provider_api_base, parse_provider_sse_body, AiProviderRequest,
AiProviderRequest, build_provider_url, extract_provider_text, is_profile_question,
normalize_provider_api_base, parse_provider_sse_body,
};
fn build_request(provider: &str, api_base: &str) -> AiProviderRequest {
@@ -2643,7 +2642,7 @@ async fn retrieve_matches(
pub async fn rebuild_index(ctx: &AppContext) -> Result<AiIndexSummary> {
let settings = load_runtime_settings(ctx, false).await?;
let posts = content::sync_markdown_posts(ctx).await?;
let posts = content::load_markdown_posts_from_store(ctx).await?;
let mut chunk_drafts = build_chunks(&posts, settings.chunk_size);
chunk_drafts.extend(build_profile_chunks(&settings.raw, settings.chunk_size));
let embeddings = if chunk_drafts.is_empty() {

View File

@@ -1,4 +1,4 @@
use std::{fs, path::Path, path::PathBuf};
use std::path::Path;
use chrono::Utc;
use loco_rs::prelude::*;
@@ -11,15 +11,14 @@ use serde::{Deserialize, Serialize};
use crate::{
controllers::site_settings,
models::_entities::{
categories, friend_links, media_assets, posts, reviews, site_settings as site_settings_entity,
tags,
categories, comments, friend_links, media_assets, posts, reviews,
site_settings as site_settings_entity, tags,
},
services::{content, media_assets as media_assets_service, storage},
};
const BACKUP_VERSION: &str = "2026-04-01";
const WARNING_STORAGE_BINARIES: &str =
"当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
const WARNING_STORAGE_BINARIES: &str = "当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupTaxonomyRecord {
@@ -152,47 +151,22 @@ fn normalize_backup_mode(value: Option<&str>) -> String {
}
}
fn markdown_posts_dir() -> PathBuf {
PathBuf::from(content::MARKDOWN_POSTS_DIR)
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
}
fn remove_existing_markdown_documents() -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
let mut removed = 0_usize;
for path in fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
{
let extension = path
.extension()
.and_then(|value| value.to_str())
.map(|value| value.to_ascii_lowercase())
.unwrap_or_default();
if extension == "md" || extension == "markdown" {
fs::remove_file(&path).map_err(io_error)?;
removed += 1;
}
}
Ok(removed)
}
fn normalize_markdown(value: &str) -> String {
value.replace("\r\n", "\n")
}
fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> {
let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default();
let file_name = trim_to_option(Some(document.file_name.clone()))
.unwrap_or_else(|| format!("{}.md", if candidate_slug.is_empty() { "post" } else { &candidate_slug }));
let file_name = trim_to_option(Some(document.file_name.clone())).unwrap_or_else(|| {
format!(
"{}.md",
if candidate_slug.is_empty() {
"post"
} else {
&candidate_slug
}
)
});
let file_stem = Path::new(&file_name)
.file_stem()
.and_then(|value| value.to_str())
@@ -296,7 +270,6 @@ fn export_media_asset_record(item: media_assets::Model) -> BackupMediaAssetRecor
pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> {
let site_settings_row = site_settings::load_current(ctx).await?;
let markdown_posts = content::sync_markdown_posts(ctx).await?;
let categories = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
@@ -332,21 +305,24 @@ pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument>
.into_iter()
.map(export_media_asset_record)
.collect::<Vec<_>>();
let posts = markdown_posts
let posts = content::load_markdown_posts_from_store(ctx)
.await?
.into_iter()
.map(|post| {
let (_, markdown) = content::read_markdown_document(&post.slug)?;
Ok(BackupPostDocument {
slug: post.slug.clone(),
file_name: format!("{}.md", post.slug),
markdown,
markdown: content::build_markdown_document(&post),
})
})
.collect::<Result<Vec<_>>>()?;
let storage_manifest = match export_storage_manifest(ctx).await {
Ok(items) => items,
Err(error) => {
tracing::warn!(?error, "failed to export storage manifest, continuing without it");
tracing::warn!(
?error,
"failed to export storage manifest, continuing without it"
);
None
}
};
@@ -549,28 +525,32 @@ async fn write_backup_posts(
documents: &[BackupPostDocument],
replace_existing: bool,
) -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
if replace_existing {
remove_existing_markdown_documents()?;
let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for post in &existing_posts {
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&post.slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
}
posts::Entity::delete_many().exec(&ctx.db).await?;
}
if documents.is_empty() {
if replace_existing {
posts::Entity::delete_many().exec(&ctx.db).await?;
}
return Ok(0);
}
let mut written = std::collections::HashSet::new();
for document in documents {
let (slug, markdown) = normalized_backup_post(document)?;
fs::write(content::markdown_post_path(&slug), markdown).map_err(io_error)?;
content::upsert_markdown_document(ctx, Some(&slug), &markdown).await?;
written.insert(slug);
}
content::sync_markdown_posts(ctx).await?;
Ok(written.len())
}

View File

@@ -363,15 +363,23 @@ pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'
return Err(Error::BadRequest("提交未通过校验".to_string()));
}
if !crate::services::turnstile::verify_if_enabled(
ctx,
let settings = crate::controllers::site_settings::load_current(ctx).await?;
match crate::services::turnstile::effective_mode(
&settings,
crate::services::turnstile::TurnstileScope::Comment,
input.turnstile_token,
input.ip_address,
)
.await?
{
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
) {
crate::services::turnstile::VerificationMode::Off => {}
crate::services::turnstile::VerificationMode::Captcha => {
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
}
crate::services::turnstile::VerificationMode::Turnstile => {
crate::services::turnstile::verify_token(
&settings,
input.turnstile_token,
input.ip_address,
)
.await?;
}
}
if contains_blocked_keyword(input).is_some() {

View File

@@ -6,19 +6,17 @@ use sea_orm::{
};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use std::fs;
use std::path::{Path, PathBuf};
use std::path::Path;
use crate::models::_entities::{categories, comments, posts, tags};
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
pub const POST_STATUS_DRAFT: &str = "draft";
pub const POST_STATUS_PUBLISHED: &str = "published";
pub const POST_STATUS_OFFLINE: &str = "offline";
pub const POST_VISIBILITY_PUBLIC: &str = "public";
pub const POST_VISIBILITY_UNLISTED: &str = "unlisted";
pub const POST_VISIBILITY_PRIVATE: &str = "private";
const VIRTUAL_MARKDOWN_PATH_PREFIX: &str = "article://posts";
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
struct MarkdownFrontmatter {
@@ -105,32 +103,18 @@ pub struct MarkdownImportFile {
pub content: String,
}
#[derive(Debug, Clone, Deserialize)]
struct LegacyFixturePost {
title: String,
slug: String,
content: String,
excerpt: Option<String>,
category: Option<String>,
tags: Option<Vec<String>>,
pinned: Option<bool>,
published: Option<bool>,
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
#[derive(Debug, Clone)]
struct MarkdownDocumentSource {
post: MarkdownPost,
raw_markdown: String,
}
fn yaml_error(err: serde_yaml::Error) -> Error {
Error::string(&err.to_string())
}
fn posts_dir() -> PathBuf {
PathBuf::from(MARKDOWN_POSTS_DIR)
}
pub fn markdown_post_path(slug: &str) -> PathBuf {
posts_dir().join(format!("{slug}.md"))
pub fn virtual_markdown_document_path(slug: &str) -> String {
format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/{slug}")
}
fn normalize_newlines(input: &str) -> String {
@@ -157,6 +141,15 @@ fn normalize_string_list(values: Option<Vec<String>>) -> Vec<String> {
.collect()
}
fn normalize_post_tags(values: Vec<String>) -> Vec<String> {
let mut seen = std::collections::HashSet::new();
normalize_string_list(Some(values))
.into_iter()
.filter(|item| seen.insert(normalized_match_key(item)))
.collect()
}
fn yaml_scalar(value: &str) -> String {
serde_yaml::to_string(value)
.unwrap_or_else(|_| format!("{value:?}"))
@@ -214,7 +207,9 @@ fn parse_frontmatter_datetime(value: Option<String>) -> Option<DateTime<FixedOff
if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") {
let naive = date_only.and_hms_opt(0, 0, 0)?;
return FixedOffset::east_opt(0)?.from_local_datetime(&naive).single();
return FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single();
}
None
@@ -278,6 +273,46 @@ pub fn post_redirects_from_json(value: &Option<Value>) -> Vec<String> {
.collect()
}
fn json_string_array(value: &Option<Value>) -> Vec<String> {
value
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.map(|item| item.trim().to_string())
.filter(|item| !item.is_empty())
.collect()
}
fn markdown_post_from_model(post: &posts::Model) -> MarkdownPost {
MarkdownPost {
title: trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
slug: post.slug.clone(),
description: trim_to_option(post.description.clone())
.or_else(|| post.content.as_deref().and_then(excerpt_from_content)),
content: post.content.clone().unwrap_or_default(),
category: trim_to_option(post.category.clone()),
tags: json_string_array(&post.tags),
post_type: trim_to_option(post.post_type.clone()).unwrap_or_else(|| "article".to_string()),
image: trim_to_option(post.image.clone()),
images: json_string_array(&post.images),
pinned: post.pinned.unwrap_or(false),
status: normalize_post_status(post.status.as_deref()),
visibility: normalize_post_visibility(post.visibility.as_deref()),
publish_at: format_frontmatter_datetime(post.publish_at.clone()),
unpublish_at: format_frontmatter_datetime(post.unpublish_at.clone()),
canonical_url: normalize_url_like(post.canonical_url.clone()),
noindex: post.noindex.unwrap_or(false),
og_image: normalize_url_like(post.og_image.clone()),
redirect_from: post_redirects_from_json(&post.redirect_from),
redirect_to: trim_to_option(post.redirect_to.clone())
.map(|item| item.trim_matches('/').to_string()),
file_path: virtual_markdown_document_path(&post.slug),
}
}
pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
effective_post_state(
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
@@ -431,17 +466,6 @@ fn split_frontmatter(raw: &str) -> Result<(MarkdownFrontmatter, String)> {
Ok((parsed, content))
}
fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
let raw = fs::read_to_string(path).map_err(io_error)?;
let file_stem = path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post")
.to_string();
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
}
pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
let (frontmatter, content) = split_frontmatter(raw)?;
@@ -567,103 +591,40 @@ pub fn build_markdown_document(post: &MarkdownPost) -> String {
lines.join("\n")
}
fn ensure_markdown_posts_bootstrapped() -> Result<()> {
let dir = posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
fn markdown_document_from_model(model: &posts::Model) -> Result<MarkdownDocumentSource> {
let raw_markdown = model
.source_markdown
.clone()
.map(|value| normalize_newlines(&value))
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| build_markdown_document(&markdown_post_from_model(model)));
let virtual_path = virtual_markdown_document_path(&model.slug);
let post = parse_markdown_source(&model.slug, &raw_markdown, &virtual_path)?;
let has_markdown = fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.any(|entry| entry.path().extension().and_then(|value| value.to_str()) == Some("md"));
if has_markdown {
return Ok(());
}
let raw = fs::read_to_string(FIXTURE_POSTS_FILE).map_err(io_error)?;
let fixtures = serde_yaml::from_str::<Vec<LegacyFixturePost>>(&raw).map_err(yaml_error)?;
for fixture in fixtures {
let post = MarkdownPost {
title: fixture.title,
slug: fixture.slug.clone(),
description: trim_to_option(fixture.excerpt),
content: fixture.content,
category: trim_to_option(fixture.category),
tags: fixture.tags.unwrap_or_default(),
post_type: "article".to_string(),
image: None,
images: Vec::new(),
pinned: fixture.pinned.unwrap_or(false),
status: if fixture.published.unwrap_or(true) {
POST_STATUS_PUBLISHED.to_string()
} else {
POST_STATUS_DRAFT.to_string()
},
visibility: POST_VISIBILITY_PUBLIC.to_string(),
publish_at: None,
unpublish_at: None,
canonical_url: None,
noindex: false,
og_image: None,
redirect_from: Vec::new(),
redirect_to: None,
file_path: markdown_post_path(&fixture.slug)
.to_string_lossy()
.to_string(),
};
fs::write(
markdown_post_path(&fixture.slug),
build_markdown_document(&post),
)
.map_err(io_error)?;
}
Ok(())
Ok(MarkdownDocumentSource { post, raw_markdown })
}
fn load_markdown_posts_from_disk() -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut posts = fs::read_dir(posts_dir())
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
.filter(|path| path.extension().and_then(|value| value.to_str()) == Some("md"))
.map(|path| parse_markdown_post(&path))
async fn load_markdown_documents_from_store(
ctx: &AppContext,
) -> Result<Vec<MarkdownDocumentSource>> {
let mut documents = posts::Entity::find()
.order_by_asc(posts::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(|item| markdown_document_from_model(&item))
.collect::<Result<Vec<_>>>()?;
posts.sort_by(|left, right| left.slug.cmp(&right.slug));
Ok(posts)
documents.sort_by(|left, right| left.post.slug.cmp(&right.post.slug));
Ok(documents)
}
async fn sync_tags_from_posts(ctx: &AppContext, posts: &[MarkdownPost]) -> Result<()> {
for post in posts {
for tag_name in &post.tags {
let slug = slugify(tag_name);
let trimmed = tag_name.trim();
let existing = tags::Entity::find()
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(trimmed)),
)
.one(&ctx.db)
.await?;
if existing.is_none() {
let item = tags::ActiveModel {
name: Set(Some(trimmed.to_string())),
slug: Set(slug),
..Default::default()
};
let _ = item.insert(&ctx.db).await;
}
}
}
Ok(())
pub async fn load_markdown_posts_from_store(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
Ok(load_markdown_documents_from_store(ctx)
.await?
.into_iter()
.map(|document| document.post)
.collect())
}
async fn ensure_category(ctx: &AppContext, raw_name: &str) -> Result<Option<String>> {
@@ -768,21 +729,138 @@ async fn canonicalize_tags(ctx: &AppContext, raw_tags: &[String]) -> Result<Vec<
Ok(canonical_tags)
}
fn write_markdown_post_to_disk(post: &MarkdownPost) -> Result<()> {
fs::write(
markdown_post_path(&post.slug),
build_markdown_document(post),
)
.map_err(io_error)
fn string_array_json(values: &[String]) -> Option<Value> {
(!values.is_empty()).then(|| Value::Array(values.iter().cloned().map(Value::String).collect()))
}
pub fn rewrite_category_references(
fn apply_markdown_post_to_active_model(
model: &mut posts::ActiveModel,
post: &MarkdownPost,
raw_markdown: &str,
) {
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.source_markdown = Set(Some(raw_markdown.to_string()));
model.category = Set(post.category.clone());
model.tags = Set(string_array_json(&post.tags));
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(string_array_json(&post.images));
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(string_array_json(&post.redirect_from));
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string())
);
}
async fn save_markdown_post_to_store(
ctx: &AppContext,
mut post: MarkdownPost,
slug_hint: Option<&str>,
canonicalize_taxonomy: bool,
) -> Result<MarkdownPost> {
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
post.title = trim_to_option(Some(post.title.clone())).unwrap_or_else(|| post.slug.clone());
post.slug = trim_to_option(Some(post.slug.clone()))
.or_else(|| normalized_slug_hint.clone())
.unwrap_or_else(|| slugify(&post.title));
post.description =
trim_to_option(post.description.clone()).or_else(|| excerpt_from_content(&post.content));
post.content = normalize_newlines(post.content.trim());
post.category = trim_to_option(post.category.clone());
post.tags = normalize_post_tags(post.tags.clone());
post.post_type =
trim_to_option(Some(post.post_type.clone())).unwrap_or_else(|| "article".to_string());
post.image = trim_to_option(post.image.clone());
post.images = normalize_string_list(Some(post.images.clone()));
post.status = normalize_post_status(Some(&post.status));
post.visibility = normalize_post_visibility(Some(&post.visibility));
post.publish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.publish_at.clone()));
post.unpublish_at =
format_frontmatter_datetime(parse_frontmatter_datetime(post.unpublish_at.clone()));
post.canonical_url = normalize_url_like(post.canonical_url.clone());
post.og_image = normalize_url_like(post.og_image.clone());
post.redirect_from = normalize_redirect_list(Some(post.redirect_from.clone()));
post.redirect_to =
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string());
if post.slug.trim().is_empty() {
return Err(Error::BadRequest("slug is required".to_string()));
}
if canonicalize_taxonomy {
post.category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
post.tags = canonicalize_tags(ctx, &post.tags).await?;
}
let existing_by_hint = if let Some(hint) = normalized_slug_hint.as_deref() {
posts::Entity::find()
.filter(posts::Column::Slug.eq(hint))
.one(&ctx.db)
.await?
} else {
None
};
let existing_by_slug =
if existing_by_hint.as_ref().map(|item| item.slug.as_str()) == Some(post.slug.as_str()) {
None
} else {
posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?
};
if let (Some(by_hint), Some(by_slug)) = (&existing_by_hint, &existing_by_slug) {
if by_hint.id != by_slug.id {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {}",
post.slug
)));
}
}
let has_existing = existing_by_hint.is_some() || existing_by_slug.is_some();
let mut model = existing_by_hint
.or(existing_by_slug)
.map(|item| item.into_active_model())
.unwrap_or_default();
post.file_path = virtual_markdown_document_path(&post.slug);
let raw_markdown = build_markdown_document(&post);
apply_markdown_post_to_active_model(&mut model, &post, &raw_markdown);
if has_existing {
model.update(&ctx.db).await?;
} else {
model.insert(&ctx.db).await?;
}
Ok(post)
}
pub async fn rewrite_category_references(
ctx: &AppContext,
current_name: Option<&str>,
current_slug: &str,
next_name: Option<&str>,
) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new();
if let Some(name) = current_name {
let normalized = normalized_match_key(name);
@@ -805,9 +883,9 @@ pub fn rewrite_category_references(
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?;
let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts {
for mut post in posts {
let Some(category) = post.category.as_deref() else {
continue;
};
@@ -816,16 +894,17 @@ pub fn rewrite_category_references(
continue;
}
let existing_slug = post.slug.clone();
match &next_category {
Some(updated_name) if same_text(category, updated_name) => {}
Some(updated_name) => {
post.category = Some(updated_name.clone());
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
None => {
post.category = None;
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
}
@@ -834,13 +913,12 @@ pub fn rewrite_category_references(
Ok(changed)
}
pub fn rewrite_tag_references(
pub async fn rewrite_tag_references(
ctx: &AppContext,
current_name: Option<&str>,
current_slug: &str,
next_name: Option<&str>,
) -> Result<usize> {
ensure_markdown_posts_bootstrapped()?;
let mut match_keys = Vec::new();
if let Some(name) = current_name {
let normalized = normalized_match_key(name);
@@ -863,9 +941,9 @@ pub fn rewrite_tag_references(
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let mut changed = 0_usize;
let mut posts = load_markdown_posts_from_disk()?;
let posts = load_markdown_posts_from_store(ctx).await?;
for post in &mut posts {
for mut post in posts {
let mut updated_tags = Vec::new();
let mut seen = std::collections::HashSet::new();
let mut post_changed = false;
@@ -889,8 +967,9 @@ pub fn rewrite_tag_references(
}
if post_changed {
let existing_slug = post.slug.clone();
post.tags = updated_tags;
write_markdown_post_to_disk(post)?;
save_markdown_post_to_store(ctx, post, Some(&existing_slug), false).await?;
changed += 1;
}
}
@@ -898,167 +977,43 @@ pub fn rewrite_tag_references(
Ok(changed)
}
async fn dedupe_tags(ctx: &AppContext) -> Result<()> {
let existing_tags = tags::Entity::find()
.order_by_asc(tags::Column::Id)
.all(&ctx.db)
.await?;
let mut seen = std::collections::HashSet::new();
for tag in existing_tags {
let key = if tag.slug.trim().is_empty() {
tag.name.as_deref().map(slugify).unwrap_or_default()
} else {
slugify(&tag.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = tag.delete(&ctx.db).await;
}
Ok(())
pub async fn read_markdown_document_from_store(
ctx: &AppContext,
slug: &str,
) -> Result<(String, String)> {
let post = posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let document = markdown_document_from_model(&post)?;
Ok((
virtual_markdown_document_path(&document.post.slug),
document.raw_markdown,
))
}
async fn dedupe_categories(ctx: &AppContext) -> Result<()> {
let existing_categories = categories::Entity::find()
.order_by_asc(categories::Column::Id)
.all(&ctx.db)
.await?;
pub async fn upsert_markdown_document(
ctx: &AppContext,
slug_hint: Option<&str>,
markdown: &str,
) -> Result<MarkdownPost> {
let normalized_markdown = normalize_newlines(markdown);
let normalized_slug_hint = slug_hint
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let file_stem = normalized_slug_hint
.as_deref()
.filter(|value| !value.is_empty())
.unwrap_or("post");
let virtual_path = normalized_slug_hint
.as_deref()
.map(virtual_markdown_document_path)
.unwrap_or_else(|| format!("{VIRTUAL_MARKDOWN_PATH_PREFIX}/draft"));
let post = parse_markdown_source(file_stem, &normalized_markdown, &virtual_path)?;
let mut seen = std::collections::HashSet::new();
for category in existing_categories {
let key = if category.slug.trim().is_empty() {
category.name.as_deref().map(slugify).unwrap_or_default()
} else {
slugify(&category.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = category.delete(&ctx.db).await;
}
Ok(())
}
pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
let markdown_posts = load_markdown_posts_from_disk()?;
let markdown_slugs = markdown_posts
.iter()
.map(|post| post.slug.clone())
.collect::<std::collections::HashSet<_>>();
let existing_posts = posts::Entity::find().all(&ctx.db).await?;
for stale_post in existing_posts
.into_iter()
.filter(|post| !markdown_slugs.contains(&post.slug))
{
let stale_slug = stale_post.slug.clone();
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(&stale_slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
let _ = stale_post.delete(&ctx.db).await;
}
for post in &markdown_posts {
let canonical_category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
let canonical_tags = canonicalize_tags(ctx, &post.tags).await?;
let existing = posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut model = existing
.map(|item| item.into_active_model())
.unwrap_or_default();
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.category = Set(canonical_category);
model.tags = Set(if canonical_tags.is_empty() {
None
} else {
Some(Value::Array(
canonical_tags.into_iter().map(Value::String).collect(),
))
});
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.images = Set(if post.images.is_empty() {
None
} else {
Some(Value::Array(
post.images
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(if post.redirect_from.is_empty() {
None
} else {
Some(Value::Array(
post.redirect_from
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string()),
);
if has_existing {
let _ = model.update(&ctx.db).await;
} else {
let _ = model.insert(&ctx.db).await;
}
}
sync_tags_from_posts(ctx, &markdown_posts).await?;
dedupe_tags(ctx).await?;
dedupe_categories(ctx).await?;
Ok(markdown_posts)
}
pub fn read_markdown_document(slug: &str) -> Result<(String, String)> {
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
}
let raw = fs::read_to_string(&path).map_err(io_error)?;
Ok((path.to_string_lossy().to_string(), raw))
save_markdown_post_to_store(ctx, post, normalized_slug_hint.as_deref(), true).await
}
pub async fn write_markdown_document(
@@ -1066,24 +1021,25 @@ pub async fn write_markdown_document(
slug: &str,
markdown: &str,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let path = markdown_post_path(slug);
fs::write(&path, normalize_newlines(markdown)).map_err(io_error)?;
let updated = parse_markdown_post(&path)?;
sync_markdown_posts(ctx).await?;
Ok(updated)
upsert_markdown_document(ctx, Some(slug), markdown).await
}
pub async fn delete_markdown_post(ctx: &AppContext, slug: &str) -> Result<()> {
ensure_markdown_posts_bootstrapped()?;
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
let post = posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let related_comments = comments::Entity::find()
.filter(comments::Column::PostSlug.eq(slug))
.all(&ctx.db)
.await?;
for comment in related_comments {
let _ = comment.delete(&ctx.db).await;
}
fs::remove_file(&path).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
post.delete(&ctx.db).await?;
Ok(())
}
@@ -1091,8 +1047,6 @@ pub async fn create_markdown_post(
ctx: &AppContext,
draft: MarkdownPostDraft,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let title = draft.title.trim().to_string();
if title.is_empty() {
return Err(Error::BadRequest("title is required".to_string()));
@@ -1110,6 +1064,17 @@ pub async fn create_markdown_post(
return Err(Error::BadRequest("slug is required".to_string()));
}
if posts::Entity::find()
.filter(posts::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?
.is_some()
{
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
let post = MarkdownPost {
title,
slug: slug.clone(),
@@ -1143,28 +1108,16 @@ pub async fn create_markdown_post(
redirect_from: normalize_redirect_list(Some(draft.redirect_from)),
redirect_to: trim_to_option(draft.redirect_to)
.map(|item| item.trim_matches('/').to_string()),
file_path: markdown_post_path(&slug).to_string_lossy().to_string(),
file_path: virtual_markdown_document_path(&slug),
};
let path = markdown_post_path(&slug);
if path.exists() {
return Err(Error::BadRequest(format!(
"markdown post already exists for slug: {slug}"
)));
}
fs::write(&path, build_markdown_document(&post)).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
parse_markdown_post(&path)
save_markdown_post_to_store(ctx, post, Some(&slug), true).await
}
pub async fn import_markdown_documents(
ctx: &AppContext,
files: Vec<MarkdownImportFile>,
) -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut imported_slugs = Vec::new();
let mut imported = Vec::new();
for file in files {
let path = Path::new(&file.file_name);
@@ -1194,15 +1147,8 @@ pub async fn import_markdown_documents(
continue;
}
fs::write(markdown_post_path(&slug), normalize_newlines(&file.content))
.map_err(io_error)?;
imported_slugs.push(slug);
imported.push(upsert_markdown_document(ctx, Some(&slug), &file.content).await?);
}
sync_markdown_posts(ctx).await?;
imported_slugs
.into_iter()
.map(|slug| parse_markdown_post(&markdown_post_path(&slug)))
.collect()
Ok(imported)
}

View File

@@ -2,7 +2,6 @@ use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set,
};
use std::fs;
use crate::{
controllers::admin::AdminIdentity,
@@ -48,10 +47,10 @@ fn trim_to_option(value: Option<String>) -> Option<String> {
fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
let normalized = markdown.replace("\r\n", "\n");
if let Some(frontmatter) = normalized
.strip_prefix("---\n")
.and_then(|rest| rest.split_once("\n---\n").map(|(frontmatter, _)| frontmatter))
{
if let Some(frontmatter) = normalized.strip_prefix("---\n").and_then(|rest| {
rest.split_once("\n---\n")
.map(|(frontmatter, _)| frontmatter)
}) {
for line in frontmatter.lines() {
let trimmed = line.trim();
if let Some(raw) = trimmed.strip_prefix("title:") {
@@ -63,14 +62,16 @@ fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
}
}
normalized.lines().find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
normalized
.lines()
.find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
}
async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> {
@@ -122,7 +123,7 @@ pub async fn capture_current_snapshot(
reason: Option<&str>,
metadata: Option<serde_json::Value>,
) -> Result<Option<post_revisions::Model>> {
let Ok((_path, markdown)) = content::read_markdown_document(slug) else {
let Ok((_path, markdown)) = content::read_markdown_document_from_store(ctx, slug).await else {
return Ok(None);
};
@@ -136,17 +137,14 @@ pub async fn list_revisions(
slug: Option<&str>,
limit: u64,
) -> Result<Vec<post_revisions::Model>> {
let mut query = post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
let mut query =
post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) {
query = query.filter(post_revisions::Column::PostSlug.eq(slug));
}
query
.limit(limit)
.all(&ctx.db)
.await
.map_err(Into::into)
query.limit(limit).all(&ctx.db).await.map_err(Into::into)
}
pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> {
@@ -187,13 +185,18 @@ pub async fn restore_revision(
let markdown = match restore_mode {
RestoreMode::Full => revision_markdown.clone(),
RestoreMode::Markdown | RestoreMode::Metadata => {
let (_path, current_markdown) = content::read_markdown_document(&slug).map_err(|_| {
Error::BadRequest("当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string())
})?;
let (_path, current_markdown) = content::read_markdown_document_from_store(ctx, &slug)
.await
.map_err(|_| {
Error::BadRequest(
"当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string(),
)
})?;
let virtual_path = content::virtual_markdown_document_path(&slug);
let revision_post =
content::parse_markdown_source(&slug, &revision_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
content::parse_markdown_source(&slug, &revision_markdown, &virtual_path)?;
let current_post =
content::parse_markdown_source(&slug, &current_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
content::parse_markdown_source(&slug, &current_markdown, &virtual_path)?;
let mut merged = current_post.clone();
match restore_mode {
RestoreMode::Markdown => {
@@ -224,10 +227,7 @@ pub async fn restore_revision(
}
};
fs::create_dir_all(content::MARKDOWN_POSTS_DIR).map_err(|error| Error::BadRequest(error.to_string()))?;
fs::write(content::markdown_post_path(&slug), markdown.replace("\r\n", "\n"))
.map_err(|error| Error::BadRequest(error.to_string()))?;
content::sync_markdown_posts(ctx).await?;
content::write_markdown_document(ctx, &slug, &markdown).await?;
let _ = capture_snapshot_from_markdown(
ctx,

View File

@@ -20,6 +20,27 @@ pub enum TurnstileScope {
Subscription,
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum VerificationMode {
Off,
Captcha,
Turnstile,
}
pub const VERIFICATION_MODE_OFF: &str = "off";
pub const VERIFICATION_MODE_CAPTCHA: &str = "captcha";
pub const VERIFICATION_MODE_TURNSTILE: &str = "turnstile";
impl VerificationMode {
pub const fn as_str(self) -> &'static str {
match self {
Self::Off => VERIFICATION_MODE_OFF,
Self::Captcha => VERIFICATION_MODE_CAPTCHA,
Self::Turnstile => VERIFICATION_MODE_TURNSTILE,
}
}
}
#[derive(Clone, Debug, Deserialize)]
struct TurnstileVerifyResponse {
success: bool,
@@ -56,6 +77,15 @@ fn configured_value(value: Option<&String>) -> Option<String> {
})
}
pub fn normalize_verification_mode(value: Option<&str>) -> Option<VerificationMode> {
match value?.trim().to_ascii_lowercase().as_str() {
VERIFICATION_MODE_OFF => Some(VerificationMode::Off),
VERIFICATION_MODE_CAPTCHA | "normal" | "simple" => Some(VerificationMode::Captcha),
VERIFICATION_MODE_TURNSTILE => Some(VerificationMode::Turnstile),
_ => None,
}
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
}
@@ -89,17 +119,48 @@ pub fn secret_key_configured(settings: &site_settings::Model) -> bool {
secret_key(settings).is_some()
}
fn scope_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
fn legacy_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match scope {
TurnstileScope::Comment => settings.comment_turnstile_enabled.unwrap_or(false),
TurnstileScope::Subscription => settings.subscription_turnstile_enabled.unwrap_or(false),
TurnstileScope::Comment => {
if settings.comment_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Captcha
}
}
TurnstileScope::Subscription => {
if settings.subscription_turnstile_enabled.unwrap_or(false) {
VerificationMode::Turnstile
} else {
VerificationMode::Off
}
}
}
}
pub fn selected_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
let configured = match scope {
TurnstileScope::Comment => settings.comment_verification_mode.as_deref(),
TurnstileScope::Subscription => settings.subscription_verification_mode.as_deref(),
};
normalize_verification_mode(configured).unwrap_or_else(|| legacy_mode(settings, scope))
}
pub fn effective_mode(settings: &site_settings::Model, scope: TurnstileScope) -> VerificationMode {
match selected_mode(settings, scope) {
VerificationMode::Turnstile
if site_key_configured(settings) && secret_key_configured(settings) =>
{
VerificationMode::Turnstile
}
VerificationMode::Turnstile => VerificationMode::Captcha,
mode => mode,
}
}
pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
scope_enabled(settings, scope)
&& site_key_configured(settings)
&& secret_key_configured(settings)
effective_mode(settings, scope) == VerificationMode::Turnstile
}
pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> {
@@ -107,7 +168,7 @@ pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Resu
Ok(is_enabled(&settings, scope))
}
async fn verify_token(
pub async fn verify_token(
settings: &site_settings::Model,
token: Option<&str>,
client_ip: Option<&str>,
@@ -173,7 +234,7 @@ pub async fn verify_if_enabled(
client_ip: Option<&str>,
) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
if !is_enabled(&settings, scope) {
if effective_mode(&settings, scope) != VerificationMode::Turnstile {
return Ok(false);
}