feat: ship public ops features and cache docker builds
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 13s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Has been cancelled
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Has been cancelled

This commit is contained in:
2026-04-01 13:22:19 +08:00
parent 669b79cc95
commit 497a9d713d
75 changed files with 6985 additions and 668 deletions

View File

@@ -106,6 +106,7 @@ impl Hooks for App {
AppRoutes::with_default_routes() // controller routes below
.add_route(controllers::health::routes())
.add_route(controllers::admin_api::routes())
.add_route(controllers::admin_taxonomy::routes())
.add_route(controllers::admin_ops::routes())
.add_route(controllers::review::routes())
.add_route(controllers::category::routes())

View File

@@ -22,7 +22,7 @@ use crate::{
ai_chunks, comment_blacklist, comment_persona_analysis_logs, comments, friend_links, posts,
reviews,
},
services::{admin_audit, ai, analytics, comment_guard, content, storage},
services::{admin_audit, ai, analytics, comment_guard, content, media_assets, storage},
};
#[derive(Clone, Debug, Deserialize)]
@@ -170,6 +170,14 @@ pub struct AdminSiteSettingsResponse {
pub music_playlist: Vec<site_settings::MusicTrackPayload>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_turnstile_enabled: bool,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
pub turnstile_secret_key: Option<String>,
pub web_push_vapid_public_key: Option<String>,
pub web_push_vapid_private_key: Option<String>,
pub web_push_vapid_subject: Option<String>,
pub ai_provider: Option<String>,
pub ai_api_base: Option<String>,
pub ai_api_key: Option<String>,
@@ -196,6 +204,7 @@ pub struct AdminSiteSettingsResponse {
pub seo_default_og_image: Option<String>,
pub seo_default_twitter_handle: Option<String>,
pub notification_webhook_url: Option<String>,
pub notification_channel_type: String,
pub notification_comment_enabled: bool,
pub notification_friend_link_enabled: bool,
pub subscription_popup_enabled: bool,
@@ -258,6 +267,11 @@ pub struct AdminMediaObjectResponse {
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -304,6 +318,32 @@ pub struct AdminMediaReplaceResponse {
pub url: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaMetadataPayload {
pub key: String,
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub alt_text: Option<String>,
#[serde(default)]
pub caption: Option<String>,
#[serde(default)]
pub tags: Option<Vec<String>>,
#[serde(default)]
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaMetadataResponse {
pub saved: bool,
pub key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaListQuery {
pub prefix: Option<String>,
@@ -634,6 +674,25 @@ fn normalize_media_key(value: Option<String>) -> Option<String> {
})
}
fn build_media_object_response(
item: storage::StoredObjectSummary,
metadata: Option<&crate::models::_entities::media_assets::Model>,
) -> AdminMediaObjectResponse {
AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
title: metadata.and_then(|entry| entry.title.clone()),
alt_text: metadata.and_then(|entry| entry.alt_text.clone()),
caption: metadata.and_then(|entry| entry.caption.clone()),
tags: metadata
.map(media_assets::tag_list)
.unwrap_or_default(),
notes: metadata.and_then(|entry| entry.notes.clone()),
}
}
fn tech_stack_values(value: &Option<serde_json::Value>) -> Vec<String> {
value
.as_ref()
@@ -665,6 +724,11 @@ fn build_settings_response(
) -> AdminSiteSettingsResponse {
let ai_providers = site_settings::ai_provider_configs(&item);
let ai_active_provider_id = site_settings::active_ai_provider_id(&item);
let turnstile_site_key = crate::services::turnstile::site_key(&item);
let turnstile_secret_key = crate::services::turnstile::secret_key(&item);
let web_push_vapid_public_key = crate::services::web_push::public_key(&item);
let web_push_vapid_private_key = crate::services::web_push::private_key(&item);
let web_push_vapid_subject = crate::services::web_push::vapid_subject(&item);
AdminSiteSettingsResponse {
id: item.id,
@@ -687,6 +751,14 @@ fn build_settings_response(
music_playlist: music_playlist_values(&item.music_playlist),
ai_enabled: item.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled: item.comment_turnstile_enabled.unwrap_or(false),
subscription_turnstile_enabled: item.subscription_turnstile_enabled.unwrap_or(false),
web_push_enabled: item.web_push_enabled.unwrap_or(false),
turnstile_site_key,
turnstile_secret_key,
web_push_vapid_public_key,
web_push_vapid_private_key,
web_push_vapid_subject,
ai_provider: item.ai_provider,
ai_api_base: item.ai_api_base,
ai_api_key: item.ai_api_key,
@@ -713,6 +785,9 @@ fn build_settings_response(
seo_default_og_image: item.seo_default_og_image,
seo_default_twitter_handle: item.seo_default_twitter_handle,
notification_webhook_url: item.notification_webhook_url,
notification_channel_type: item
.notification_channel_type
.unwrap_or_else(|| "webhook".to_string()),
notification_comment_enabled: item.notification_comment_enabled.unwrap_or(false),
notification_friend_link_enabled: item.notification_friend_link_enabled.unwrap_or(false),
subscription_popup_enabled: item
@@ -1115,14 +1190,18 @@ pub async fn list_media_objects(
check_auth(&headers)?;
let settings = storage::require_r2_settings(&ctx).await?;
let items = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?
let objects = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?;
let keys = objects
.iter()
.map(|item| item.key.clone())
.collect::<Vec<_>>();
let metadata_map = media_assets::list_by_keys(&ctx, &keys).await?;
let items = objects
.into_iter()
.map(|item| AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
.map(|item| {
let metadata = metadata_map.get(&item.key);
build_media_object_response(item, metadata)
})
.collect::<Vec<_>>();
@@ -1148,6 +1227,9 @@ pub async fn delete_media_object(
}
storage::delete_object(&ctx, key).await?;
if let Err(error) = media_assets::delete_by_key(&ctx, key).await {
tracing::warn!(?error, key, "failed to delete media metadata after object deletion");
}
format::json(AdminMediaDeleteResponse {
deleted: true,
@@ -1241,7 +1323,12 @@ pub async fn batch_delete_media_objects(
for key in keys {
match storage::delete_object(&ctx, &key).await {
Ok(()) => deleted.push(key),
Ok(()) => {
if let Err(error) = media_assets::delete_by_key(&ctx, &key).await {
tracing::warn!(?error, key, "failed to delete media metadata after batch removal");
}
deleted.push(key)
}
Err(_) => failed.push(key),
}
}
@@ -1249,6 +1336,43 @@ pub async fn batch_delete_media_objects(
format::json(AdminMediaBatchDeleteResponse { deleted, failed })
}
#[debug_handler]
pub async fn update_media_object_metadata(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<AdminMediaMetadataPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let key = payload.key.trim();
if key.is_empty() {
return Err(Error::BadRequest("缺少对象 key".to_string()));
}
let metadata = media_assets::upsert_by_key(
&ctx,
key,
media_assets::MediaAssetMetadataInput {
title: payload.title,
alt_text: payload.alt_text,
caption: payload.caption,
tags: payload.tags,
notes: payload.notes,
},
)
.await?;
format::json(AdminMediaMetadataResponse {
saved: true,
key: metadata.object_key.clone(),
title: metadata.title.clone(),
alt_text: metadata.alt_text.clone(),
caption: metadata.caption.clone(),
tags: media_assets::tag_list(&metadata),
notes: metadata.notes.clone(),
})
}
#[debug_handler]
pub async fn replace_media_object(
headers: HeaderMap,
@@ -1831,6 +1955,7 @@ pub fn routes() -> Routes {
"/storage/media/batch-delete",
post(batch_delete_media_objects),
)
.add("/storage/media/metadata", patch(update_media_object_metadata))
.add("/storage/media/replace", post(replace_media_object))
.add(
"/comments/blacklist",

View File

@@ -11,7 +11,10 @@ use crate::{
models::_entities::{
admin_audit_logs, notification_deliveries, post_revisions, subscriptions,
},
services::{admin_audit, post_revisions as revision_service, subscriptions as subscription_service},
services::{
admin_audit, backups, post_revisions as revision_service,
subscriptions as subscription_service,
},
};
#[derive(Clone, Debug, Default, Deserialize)]
@@ -82,6 +85,13 @@ pub struct DigestDispatchRequest {
pub period: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SiteBackupImportRequest {
pub backup: backups::SiteBackupDocument,
#[serde(default)]
pub mode: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PostRevisionListItem {
pub id: i32,
@@ -440,6 +450,25 @@ pub async fn send_subscription_digest(
format::json(summary)
}
#[debug_handler]
pub async fn export_site_backup(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(backups::export_site_backup(&ctx).await?)
}
#[debug_handler]
pub async fn import_site_backup(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<SiteBackupImportRequest>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(backups::import_site_backup(&ctx, payload.backup, payload.mode.as_deref()).await?)
}
pub fn routes() -> Routes {
Routes::new()
.prefix("/api/admin")
@@ -452,4 +481,6 @@ pub fn routes() -> Routes {
.add("/subscriptions/digest", post(send_subscription_digest))
.add("/subscriptions/{id}", patch(update_subscription).delete(delete_subscription))
.add("/subscriptions/{id}/test", post(test_subscription))
.add("/site-backup/export", get(export_site_backup))
.add("/site-backup/import", post(import_site_backup))
}

View File

@@ -0,0 +1,465 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use crate::{
controllers::admin::check_auth,
models::_entities::{categories, posts, tags},
services::content,
};
#[derive(Clone, Debug, Deserialize)]
pub struct TaxonomyPayload {
pub name: Option<String>,
#[serde(default)]
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminCategoryRecord {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminTagRecord {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalized_name(params: &TaxonomyPayload, label: &str) -> Result<String> {
params
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.ok_or_else(|| Error::BadRequest(format!("{label}名称不能为空")))
}
fn normalized_slug(value: Option<&str>, fallback: &str, label: &str) -> Result<String> {
let slug = value
.map(str::trim)
.filter(|item| !item.is_empty())
.map(ToString::to_string)
.unwrap_or_else(|| slugify(fallback));
if slug.is_empty() {
return Err(Error::BadRequest(format!(
"{label} slug 不能为空,请填写英文字母 / 数字 / 连字符"
)));
}
Ok(slug)
}
fn normalized_token(value: &str) -> String {
value.trim().to_ascii_lowercase()
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn post_tag_values(post: &posts::Model) -> Vec<String> {
post.tags
.as_ref()
.and_then(|value| serde_json::from_value::<Vec<String>>(value.clone()).ok())
.unwrap_or_default()
.into_iter()
.map(|item| normalized_token(&item))
.filter(|item| !item.is_empty())
.collect()
}
fn category_name(item: &categories::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn build_category_record(item: &categories::Model, post_items: &[posts::Model]) -> AdminCategoryRecord {
let name = category_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items
.iter()
.filter(|post| {
post.category
.as_deref()
.map(normalized_token)
.is_some_and(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
AdminCategoryRecord {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
fn build_tag_record(item: &tags::Model, post_items: &[posts::Model]) -> AdminTagRecord {
let name = tag_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items
.iter()
.filter(|post| {
post_tag_values(post)
.into_iter()
.any(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
AdminTagRecord {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_category(ctx: &AppContext, id: i32) -> Result<categories::Model> {
categories::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)
}
async fn load_tag(ctx: &AppContext, id: i32) -> Result<tags::Model> {
tags::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)
}
async fn ensure_category_slug_unique(
ctx: &AppContext,
slug: &str,
exclude_id: Option<i32>,
) -> Result<()> {
if let Some(existing) = categories::Entity::find()
.filter(categories::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
{
if Some(existing.id) != exclude_id {
return Err(Error::BadRequest("分类 slug 已存在".to_string()));
}
}
Ok(())
}
async fn ensure_tag_slug_unique(ctx: &AppContext, slug: &str, exclude_id: Option<i32>) -> Result<()> {
if let Some(existing) = tags::Entity::find()
.filter(tags::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
{
if Some(existing.id) != exclude_id {
return Err(Error::BadRequest("标签 slug 已存在".to_string()));
}
}
Ok(())
}
async fn load_posts(ctx: &AppContext) -> Result<Vec<posts::Model>> {
Ok(posts::Entity::find().all(&ctx.db).await?)
}
#[debug_handler]
pub async fn list_categories(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(
items.into_iter()
.map(|item| build_category_record(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn create_category(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "分类")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "分类")?;
ensure_category_slug_unique(&ctx, &slug, None).await?;
let item = categories::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(payload.description)),
cover_image: Set(trim_to_option(payload.cover_image)),
accent_color: Set(trim_to_option(payload.accent_color)),
seo_title: Set(trim_to_option(payload.seo_title)),
seo_description: Set(trim_to_option(payload.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&item, &post_items))
}
#[debug_handler]
pub async fn update_category(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "分类")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "分类")?;
ensure_category_slug_unique(&ctx, &slug, Some(id)).await?;
let item = load_category(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_category_references(previous_name.as_deref(), &previous_slug, Some(&name))?;
}
let mut active = item.into_active_model();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(payload.description));
active.cover_image = Set(trim_to_option(payload.cover_image));
active.accent_color = Set(trim_to_option(payload.accent_color));
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&updated, &post_items))
}
#[debug_handler]
pub async fn delete_category(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let item = load_category(&ctx, id).await?;
content::rewrite_category_references(item.name.as_deref(), &item.slug, None)?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}
#[debug_handler]
pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(
items.into_iter()
.map(|item| build_tag_record(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn create_tag(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "标签")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "标签")?;
ensure_tag_slug_unique(&ctx, &slug, None).await?;
let item = tags::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(payload.description)),
cover_image: Set(trim_to_option(payload.cover_image)),
accent_color: Set(trim_to_option(payload.accent_color)),
seo_title: Set(trim_to_option(payload.seo_title)),
seo_description: Set(trim_to_option(payload.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&item, &post_items))
}
#[debug_handler]
pub async fn update_tag(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "标签")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "标签")?;
ensure_tag_slug_unique(&ctx, &slug, Some(id)).await?;
let item = load_tag(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_tag_references(previous_name.as_deref(), &previous_slug, Some(&name))?;
}
let mut active = item.into_active_model();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(payload.description));
active.cover_image = Set(trim_to_option(payload.cover_image));
active.accent_color = Set(trim_to_option(payload.accent_color));
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&updated, &post_items))
}
#[debug_handler]
pub async fn delete_tag(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let item = load_tag(&ctx, id).await?;
content::rewrite_tag_references(item.name.as_deref(), &item.slug, None)?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}
pub fn routes() -> Routes {
Routes::new()
.add(
"/api/admin/categories",
get(list_categories).post(create_category),
)
.add(
"/api/admin/categories/{id}",
patch(update_category).delete(delete_category),
)
.add("/api/admin/tags", get(list_tags).post(create_tag))
.add("/api/admin/tags/{id}", patch(update_tag).delete(delete_tag))
}

View File

@@ -14,12 +14,41 @@ pub struct CategorySummary {
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct CategoryRecord {
pub id: i32,
pub name: Option<String>,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
pub name: Option<String>,
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
fn slugify(value: &str) -> String {
@@ -39,6 +68,17 @@ fn slugify(value: &str) -> String {
slug.trim_matches('-').to_string()
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalized_name(params: &Params) -> Result<String> {
let name = params
.name
@@ -60,6 +100,50 @@ fn normalized_slug(params: &Params, fallback: &str) -> String {
.unwrap_or_else(|| slugify(fallback))
}
fn category_name(item: &categories::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn build_summary(item: &categories::Model, post_items: &[posts::Model]) -> CategorySummary {
let name = category_name(item);
let count = post_items
.iter()
.filter(|post| {
post.category
.as_deref()
.map(str::trim)
.is_some_and(|value| value.eq_ignore_ascii_case(&name) || value.eq_ignore_ascii_case(&item.slug))
})
.count();
CategorySummary {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
}
}
fn build_record(item: categories::Model) -> CategoryRecord {
CategoryRecord {
id: item.id,
name: item.name,
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<categories::Model> {
let item = categories::Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or(Error::NotFound)
@@ -77,23 +161,7 @@ pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
let categories = category_items
.into_iter()
.map(|category| {
let name = category
.name
.clone()
.unwrap_or_else(|| category.slug.clone());
let count = post_items
.iter()
.filter(|post| post.category.as_deref().map(str::trim) == Some(name.as_str()))
.count();
CategorySummary {
id: category.id,
name,
slug: category.slug,
count,
}
})
.map(|category| build_summary(&category, &post_items))
.collect::<Vec<_>>();
format::json(categories)
@@ -113,18 +181,28 @@ pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> R
let mut model = existing_category.into_active_model();
model.name = Set(Some(name));
model.slug = Set(slug);
model.description = Set(trim_to_option(params.description));
model.cover_image = Set(trim_to_option(params.cover_image));
model.accent_color = Set(trim_to_option(params.accent_color));
model.seo_title = Set(trim_to_option(params.seo_title));
model.seo_description = Set(trim_to_option(params.seo_description));
model.update(&ctx.db).await?
} else {
categories::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(params.description)),
cover_image: Set(trim_to_option(params.cover_image)),
accent_color: Set(trim_to_option(params.accent_color)),
seo_title: Set(trim_to_option(params.seo_title)),
seo_description: Set(trim_to_option(params.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?
};
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -155,9 +233,14 @@ pub async fn update(
let mut item = item.into_active_model();
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -171,7 +254,7 @@ pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Resul
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_item(&ctx, id).await?)
format::json(build_record(load_item(&ctx, id).await?))
}
pub fn routes() -> Routes {

View File

@@ -122,6 +122,8 @@ pub struct CreateCommentRequest {
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default)]
pub website: Option<String>,
}
@@ -383,6 +385,7 @@ pub async fn add(
author: author.as_deref(),
content: content.as_deref(),
honeypot_website: params.website.as_deref(),
turnstile_token: params.turnstile_token.as_deref(),
captcha_token: params.captcha_token.as_deref(),
captcha_answer: params.captcha_answer.as_deref(),
},

View File

@@ -1,5 +1,6 @@
pub mod admin;
pub mod admin_api;
pub mod admin_taxonomy;
pub mod admin_ops;
pub mod ai;
pub mod auth;

View File

@@ -95,6 +95,60 @@ fn publicly_accessible(post: &Model) -> bool {
content::is_post_publicly_accessible(post, Utc::now().fixed_offset())
}
fn normalize_post_sort_by(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"updated_at" | "updated" => "updated_at".to_string(),
"title" => "title".to_string(),
_ => "created_at".to_string(),
}
}
fn normalize_sort_order(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"asc" => "asc".to_string(),
_ => "desc".to_string(),
}
}
fn sort_posts(items: &mut [Model], sort_by: &str, sort_order: &str) {
items.sort_by(|left, right| {
let ordering = match sort_by {
"updated_at" => left.updated_at.cmp(&right.updated_at),
"title" => left
.title
.as_deref()
.unwrap_or(&left.slug)
.to_ascii_lowercase()
.cmp(
&right
.title
.as_deref()
.unwrap_or(&right.slug)
.to_ascii_lowercase(),
),
_ => left.created_at.cmp(&right.created_at),
};
let ordering = if sort_order == "asc" {
ordering
} else {
ordering.reverse()
};
ordering.then_with(|| left.id.cmp(&right.id))
});
}
fn parse_optional_markdown_datetime(
value: Option<&str>,
) -> Option<chrono::DateTime<chrono::FixedOffset>> {
@@ -388,6 +442,28 @@ pub struct ListQuery {
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct PagedPostsQuery {
#[serde(flatten)]
pub filters: ListQuery,
pub page: Option<u64>,
#[serde(alias = "page_size")]
pub page_size: Option<u64>,
pub sort_by: Option<String>,
pub sort_order: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PagedPostsResponse {
pub items: Vec<Model>,
pub page: u64,
pub page_size: u64,
pub total: usize,
pub total_pages: u64,
pub sort_by: String,
pub sort_order: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct LookupQuery {
#[serde(default, deserialize_with = "deserialize_boolish_option")]
@@ -469,6 +545,61 @@ pub async fn list(
format::json(filtered)
}
#[debug_handler]
pub async fn list_page(
Query(query): Query<PagedPostsQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.filters.preview, &headers);
let include_private = preview && query.filters.include_private.unwrap_or(true);
let include_redirects = query.filters.include_redirects.unwrap_or(preview);
let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_post_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref());
let mut filtered = Entity::find()
.order_by_desc(Column::CreatedAt)
.all(&ctx.db)
.await?
.into_iter()
.filter(|post| {
should_include_post(
post,
&query.filters,
preview,
include_private,
include_redirects,
)
})
.collect::<Vec<_>>();
sort_posts(&mut filtered, &sort_by, &sort_order);
let total = filtered.len();
let total_pages = std::cmp::max(1, ((total as u64) + page_size - 1) / page_size);
let page = query.page.unwrap_or(1).clamp(1, total_pages);
let start = ((page - 1) * page_size) as usize;
let end = std::cmp::min(start + page_size as usize, total);
let items = if start >= total {
Vec::new()
} else {
filtered[start..end].to_vec()
};
format::json(PagedPostsResponse {
items,
page,
page_size,
total,
total_pages,
sort_by,
sort_order,
})
}
#[debug_handler]
pub async fn add(
headers: HeaderMap,
@@ -876,6 +1007,7 @@ pub async fn delete_markdown_by_slug(
pub fn routes() -> Routes {
Routes::new()
.prefix("api/posts/")
.add("page", get(list_page))
.add("/", get(list))
.add("/", post(add))
.add("markdown", post(create_markdown))

View File

@@ -274,6 +274,71 @@ fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
.unwrap_or(false)
}
fn normalize_search_sort_by(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"newest" | "created_at" => "newest".to_string(),
"oldest" => "oldest".to_string(),
"title" => "title".to_string(),
_ => "relevance".to_string(),
}
}
fn normalize_sort_order(value: Option<&str>, sort_by: &str) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"asc" => "asc".to_string(),
"desc" => "desc".to_string(),
_ if sort_by == "title" => "asc".to_string(),
_ => "desc".to_string(),
}
}
fn sort_search_results(items: &mut [SearchResult], sort_by: &str, sort_order: &str) {
items.sort_by(|left, right| {
let ordering = match sort_by {
"newest" => right.created_at.cmp(&left.created_at),
"oldest" => left.created_at.cmp(&right.created_at),
"title" => left
.title
.as_deref()
.unwrap_or(&left.slug)
.to_ascii_lowercase()
.cmp(
&right
.title
.as_deref()
.unwrap_or(&right.slug)
.to_ascii_lowercase(),
),
_ => right
.rank
.partial_cmp(&left.rank)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| right.created_at.cmp(&left.created_at)),
};
if sort_by == "relevance" || sort_by == "newest" || sort_by == "oldest" {
return ordering;
}
let ordering = if sort_order == "asc" {
ordering
} else {
ordering.reverse()
};
ordering.then_with(|| left.slug.cmp(&right.slug))
});
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchQuery {
pub q: Option<String>,
@@ -286,6 +351,17 @@ pub struct SearchQuery {
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchPageQuery {
#[serde(flatten)]
pub search: SearchQuery,
pub page: Option<u64>,
#[serde(alias = "page_size")]
pub page_size: Option<u64>,
pub sort_by: Option<String>,
pub sort_order: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct SearchResult {
pub id: i32,
@@ -296,37 +372,47 @@ pub struct SearchResult {
pub category: Option<String>,
pub tags: Option<Value>,
pub post_type: Option<String>,
pub image: Option<String>,
pub pinned: Option<bool>,
pub created_at: chrono::DateTime<chrono::Utc>,
pub updated_at: chrono::DateTime<chrono::Utc>,
pub rank: f64,
}
#[debug_handler]
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let preview_search = is_preview_search(&query, &headers);
content::sync_markdown_posts(&ctx).await?;
#[derive(Clone, Debug, Serialize)]
pub struct PagedSearchResponse {
pub query: String,
pub items: Vec<SearchResult>,
pub page: u64,
pub page_size: u64,
pub total: usize,
pub total_pages: u64,
pub sort_by: String,
pub sort_order: String,
}
let q = query.q.unwrap_or_default().trim().to_string();
async fn build_search_results(
ctx: &AppContext,
query: &SearchQuery,
headers: &HeaderMap,
) -> Result<(String, bool, Vec<SearchResult>)> {
let preview_search = is_preview_search(query, headers);
content::sync_markdown_posts(ctx).await?;
let q = query.q.clone().unwrap_or_default().trim().to_string();
if q.is_empty() {
return format::json(Vec::<SearchResult>::new());
return Ok((q, preview_search, Vec::new()));
}
if !preview_search {
abuse_guard::enforce_public_scope(
"search",
abuse_guard::detect_client_ip(&headers).as_deref(),
abuse_guard::detect_client_ip(headers).as_deref(),
Some(&q),
)?;
}
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
let settings = site_settings::load_current(&ctx).await.ok();
let settings = site_settings::load_current(ctx).await.ok();
let synonym_groups = settings
.as_ref()
.map(|item| parse_synonym_groups(&item.search_synonyms))
@@ -342,7 +428,12 @@ pub async fn search(
})
.collect::<Vec<_>>();
if let Some(category) = query.category.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(category) = query
.category
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| {
post.category
.as_deref()
@@ -355,7 +446,12 @@ pub async fn search(
all_posts.retain(|post| post_has_tag(post, tag));
}
if let Some(post_type) = query.post_type.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(post_type) = query
.post_type
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| {
post.post_type
.as_deref()
@@ -378,6 +474,7 @@ pub async fn search(
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
image: post.image.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
@@ -401,6 +498,7 @@ pub async fn search(
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
image: post.image.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
@@ -410,13 +508,22 @@ pub async fn search(
}
}
results.sort_by(|left, right| {
right
.rank
.partial_cmp(&left.rank)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| right.created_at.cmp(&left.created_at))
});
sort_search_results(&mut results, "relevance", "desc");
Ok((q, preview_search, results))
}
#[debug_handler]
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
let (q, preview_search, mut results) = build_search_results(&ctx, &query, &headers).await?;
if q.is_empty() {
return format::json(Vec::<SearchResult>::new());
}
results.truncate(limit);
if !preview_search {
@@ -433,6 +540,70 @@ pub async fn search(
format::json(results)
}
pub fn routes() -> Routes {
Routes::new().prefix("api/search/").add("/", get(search))
#[debug_handler]
pub async fn search_page(
Query(query): Query<SearchPageQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_search_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref(), &sort_by);
let (q, preview_search, mut results) = build_search_results(&ctx, &query.search, &headers).await?;
if q.is_empty() {
return format::json(PagedSearchResponse {
query: q,
items: Vec::new(),
page: 1,
page_size,
total: 0,
total_pages: 1,
sort_by,
sort_order,
});
}
sort_search_results(&mut results, &sort_by, &sort_order);
let total = results.len();
let total_pages = std::cmp::max(1, ((total as u64) + page_size - 1) / page_size);
let page = query.page.unwrap_or(1).clamp(1, total_pages);
let start = ((page - 1) * page_size) as usize;
let end = std::cmp::min(start + page_size as usize, total);
let items = if start >= total {
Vec::new()
} else {
results[start..end].to_vec()
};
if !preview_search {
analytics::record_search_event(
&ctx,
&q,
total,
&headers,
started_at.elapsed().as_millis() as i64,
)
.await;
}
format::json(PagedSearchResponse {
query: q,
items,
page,
page_size,
total,
total_pages,
sort_by,
sort_order,
})
}
pub fn routes() -> Routes {
Routes::new()
.prefix("api/search/")
.add("page", get(search_page))
.add("/", get(search))
}

View File

@@ -93,6 +93,22 @@ pub struct SiteSettingsPayload {
pub ai_enabled: Option<bool>,
#[serde(default, alias = "paragraphCommentsEnabled")]
pub paragraph_comments_enabled: Option<bool>,
#[serde(default, alias = "commentTurnstileEnabled")]
pub comment_turnstile_enabled: Option<bool>,
#[serde(default, alias = "subscriptionTurnstileEnabled")]
pub subscription_turnstile_enabled: Option<bool>,
#[serde(default, alias = "webPushEnabled")]
pub web_push_enabled: Option<bool>,
#[serde(default, alias = "turnstileSiteKey")]
pub turnstile_site_key: Option<String>,
#[serde(default, alias = "turnstileSecretKey")]
pub turnstile_secret_key: Option<String>,
#[serde(default, alias = "webPushVapidPublicKey")]
pub web_push_vapid_public_key: Option<String>,
#[serde(default, alias = "webPushVapidPrivateKey")]
pub web_push_vapid_private_key: Option<String>,
#[serde(default, alias = "webPushVapidSubject")]
pub web_push_vapid_subject: Option<String>,
#[serde(default, alias = "aiProvider")]
pub ai_provider: Option<String>,
#[serde(default, alias = "aiApiBase")]
@@ -139,6 +155,8 @@ pub struct SiteSettingsPayload {
pub seo_default_twitter_handle: Option<String>,
#[serde(default, alias = "notificationWebhookUrl")]
pub notification_webhook_url: Option<String>,
#[serde(default, alias = "notificationChannelType")]
pub notification_channel_type: Option<String>,
#[serde(default, alias = "notificationCommentEnabled")]
pub notification_comment_enabled: Option<bool>,
#[serde(default, alias = "notificationFriendLinkEnabled")]
@@ -177,6 +195,11 @@ pub struct PublicSiteSettingsResponse {
pub music_playlist: Option<serde_json::Value>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_turnstile_enabled: bool,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
pub web_push_vapid_public_key: Option<String>,
pub subscription_popup_enabled: bool,
pub subscription_popup_title: String,
pub subscription_popup_description: String,
@@ -220,6 +243,17 @@ fn normalize_optional_int(value: Option<i32>, min: i32, max: i32) -> Option<i32>
value.map(|item| item.clamp(min, max))
}
fn normalize_notification_channel_type(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let normalized = item.trim().to_ascii_lowercase();
match normalized.as_str() {
"ntfy" => Some("ntfy".to_string()),
"webhook" => Some("webhook".to_string()),
_ => None,
}
})
}
pub(crate) fn default_subscription_popup_enabled() -> bool {
true
}
@@ -515,6 +549,32 @@ impl SiteSettingsPayload {
if let Some(paragraph_comments_enabled) = self.paragraph_comments_enabled {
item.paragraph_comments_enabled = Some(paragraph_comments_enabled);
}
if let Some(comment_turnstile_enabled) = self.comment_turnstile_enabled {
item.comment_turnstile_enabled = Some(comment_turnstile_enabled);
}
if let Some(subscription_turnstile_enabled) = self.subscription_turnstile_enabled {
item.subscription_turnstile_enabled = Some(subscription_turnstile_enabled);
}
if let Some(web_push_enabled) = self.web_push_enabled {
item.web_push_enabled = Some(web_push_enabled);
}
if let Some(turnstile_site_key) = self.turnstile_site_key {
item.turnstile_site_key = normalize_optional_string(Some(turnstile_site_key));
}
if let Some(turnstile_secret_key) = self.turnstile_secret_key {
item.turnstile_secret_key = normalize_optional_string(Some(turnstile_secret_key));
}
if let Some(web_push_vapid_public_key) = self.web_push_vapid_public_key {
item.web_push_vapid_public_key =
normalize_optional_string(Some(web_push_vapid_public_key));
}
if let Some(web_push_vapid_private_key) = self.web_push_vapid_private_key {
item.web_push_vapid_private_key =
normalize_optional_string(Some(web_push_vapid_private_key));
}
if let Some(web_push_vapid_subject) = self.web_push_vapid_subject {
item.web_push_vapid_subject = normalize_optional_string(Some(web_push_vapid_subject));
}
let provider_list_supplied = self.ai_providers.is_some();
let provided_ai_providers = self.ai_providers.map(normalize_ai_provider_configs);
let requested_active_provider_id = self
@@ -591,6 +651,10 @@ impl SiteSettingsPayload {
item.notification_webhook_url =
normalize_optional_string(Some(notification_webhook_url));
}
if self.notification_channel_type.is_some() {
item.notification_channel_type =
normalize_notification_channel_type(self.notification_channel_type);
}
if let Some(notification_comment_enabled) = self.notification_comment_enabled {
item.notification_comment_enabled = Some(notification_comment_enabled);
}
@@ -699,6 +763,14 @@ fn default_payload() -> SiteSettingsPayload {
]),
ai_enabled: Some(false),
paragraph_comments_enabled: Some(true),
comment_turnstile_enabled: Some(false),
subscription_turnstile_enabled: Some(false),
web_push_enabled: Some(false),
turnstile_site_key: None,
turnstile_secret_key: None,
web_push_vapid_public_key: None,
web_push_vapid_private_key: None,
web_push_vapid_subject: None,
ai_provider: Some(ai::provider_name(None)),
ai_api_base: Some(ai::default_api_base().to_string()),
ai_api_key: Some(ai::default_api_key().to_string()),
@@ -725,6 +797,7 @@ fn default_payload() -> SiteSettingsPayload {
seo_default_og_image: None,
seo_default_twitter_handle: None,
notification_webhook_url: None,
notification_channel_type: Some("webhook".to_string()),
notification_comment_enabled: Some(false),
notification_friend_link_enabled: Some(false),
subscription_popup_enabled: Some(default_subscription_popup_enabled()),
@@ -760,6 +833,18 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
}
fn public_response(model: Model) -> PublicSiteSettingsResponse {
let turnstile_site_key = crate::services::turnstile::site_key(&model);
let web_push_vapid_public_key = crate::services::web_push::public_key(&model);
let comment_turnstile_enabled = crate::services::turnstile::is_enabled(
&model,
crate::services::turnstile::TurnstileScope::Comment,
);
let subscription_turnstile_enabled = crate::services::turnstile::is_enabled(
&model,
crate::services::turnstile::TurnstileScope::Subscription,
);
let web_push_enabled = crate::services::web_push::is_enabled(&model);
PublicSiteSettingsResponse {
id: model.id,
site_name: model.site_name,
@@ -781,6 +866,11 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
music_playlist: model.music_playlist,
ai_enabled: model.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled,
subscription_turnstile_enabled,
web_push_enabled,
turnstile_site_key,
web_push_vapid_public_key,
subscription_popup_enabled: model
.subscription_popup_enabled
.unwrap_or_else(default_subscription_popup_enabled),

View File

@@ -1,7 +1,9 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::services::{abuse_guard, admin_audit, subscriptions};
use axum::http::header;
use crate::services::{abuse_guard, admin_audit, subscriptions, turnstile};
#[derive(Clone, Debug, Deserialize)]
pub struct PublicSubscriptionPayload {
@@ -10,6 +12,17 @@ pub struct PublicSubscriptionPayload {
pub display_name: Option<String>,
#[serde(default)]
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct PublicBrowserPushSubscriptionPayload {
pub subscription: serde_json::Value,
#[serde(default)]
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -55,6 +68,19 @@ fn public_subscription_metadata(source: Option<String>) -> serde_json::Value {
})
}
fn public_browser_push_metadata(
source: Option<String>,
subscription: serde_json::Value,
user_agent: Option<String>,
) -> serde_json::Value {
serde_json::json!({
"source": source,
"kind": "browser-push",
"subscription": subscription,
"user_agent": user_agent,
})
}
#[debug_handler]
pub async fn subscribe(
State(ctx): State<AppContext>,
@@ -62,11 +88,19 @@ pub async fn subscribe(
Json(payload): Json<PublicSubscriptionPayload>,
) -> Result<Response> {
let email = payload.email.trim().to_ascii_lowercase();
let client_ip = abuse_guard::detect_client_ip(&headers);
abuse_guard::enforce_public_scope(
"subscription",
abuse_guard::detect_client_ip(&headers).as_deref(),
client_ip.as_deref(),
Some(&email),
)?;
let _ = turnstile::verify_if_enabled(
&ctx,
turnstile::TurnstileScope::Subscription,
payload.turnstile_token.as_deref(),
client_ip.as_deref(),
)
.await?;
let result = subscriptions::create_public_email_subscription(
&ctx,
@@ -103,6 +137,76 @@ pub async fn subscribe(
})
}
#[debug_handler]
pub async fn subscribe_browser_push(
State(ctx): State<AppContext>,
headers: axum::http::HeaderMap,
Json(payload): Json<PublicBrowserPushSubscriptionPayload>,
) -> Result<Response> {
let settings = crate::controllers::site_settings::load_current(&ctx).await?;
if !crate::services::web_push::is_enabled(&settings) {
return Err(Error::BadRequest("浏览器推送未启用".to_string()));
}
let endpoint = payload
.subscription
.get("endpoint")
.and_then(serde_json::Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| Error::BadRequest("browser push subscription.endpoint 不能为空".to_string()))?
.to_string();
let client_ip = abuse_guard::detect_client_ip(&headers);
let user_agent = headers
.get(header::USER_AGENT)
.and_then(|value| value.to_str().ok())
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
abuse_guard::enforce_public_scope("browser-push-subscription", client_ip.as_deref(), Some(&endpoint))?;
let _ = turnstile::verify_if_enabled(
&ctx,
turnstile::TurnstileScope::Subscription,
payload.turnstile_token.as_deref(),
client_ip.as_deref(),
)
.await?;
let result = subscriptions::create_public_web_push_subscription(
&ctx,
payload.subscription.clone(),
Some(public_browser_push_metadata(
payload.source,
payload.subscription,
user_agent,
)),
)
.await?;
admin_audit::log_event(
&ctx,
None,
"subscription.public.web_push.active",
"subscription",
Some(result.subscription.id.to_string()),
Some(result.subscription.target.clone()),
Some(serde_json::json!({
"channel_type": result.subscription.channel_type,
"status": result.subscription.status,
})),
)
.await?;
format::json(PublicSubscriptionResponse {
ok: true,
subscription_id: result.subscription.id,
status: result.subscription.status,
requires_confirmation: false,
message: result.message,
})
}
#[debug_handler]
pub async fn confirm(
State(ctx): State<AppContext>,
@@ -196,6 +300,7 @@ pub fn routes() -> Routes {
Routes::new()
.prefix("/api/subscriptions")
.add("/", post(subscribe))
.add("/browser-push", post(subscribe_browser_push))
.add("/confirm", post(confirm))
.add("/manage", get(manage).patch(update_manage))
.add("/unsubscribe", post(unsubscribe))

View File

@@ -2,43 +2,217 @@
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::models::_entities::tags::{ActiveModel, Entity, Model};
use crate::models::_entities::{posts, tags};
use crate::services::content;
#[derive(Clone, Debug, Serialize)]
pub struct TagSummary {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct TagRecord {
pub id: i32,
pub name: Option<String>,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
pub name: Option<String>,
pub slug: String,
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
impl Params {
fn update(&self, item: &mut ActiveModel) {
item.name = Set(self.name.clone());
item.slug = Set(self.slug.clone());
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalized_name(params: &Params) -> Result<String> {
params
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.ok_or_else(|| Error::BadRequest("tag name is required".to_string()))
}
fn normalized_slug(params: &Params, fallback: &str) -> String {
params
.slug
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.unwrap_or_else(|| slugify(fallback))
}
fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn tag_values(post: &posts::Model) -> Vec<String> {
post.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(|value| value.trim().to_ascii_lowercase()))
.filter(|item| !item.is_empty())
.collect()
}
fn build_summary(item: &tags::Model, post_items: &[posts::Model]) -> TagSummary {
let name = tag_name(item);
let aliases = [name.trim().to_ascii_lowercase(), item.slug.trim().to_ascii_lowercase()];
let count = post_items
.iter()
.filter(|post| {
tag_values(post)
.into_iter()
.any(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
TagSummary {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
let item = Entity::find_by_id(id).one(&ctx.db).await?;
fn build_record(item: tags::Model) -> TagRecord {
TagRecord {
id: item.id,
name: item.name,
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<tags::Model> {
let item = tags::Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or_else(|| Error::NotFound)
}
#[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
format::json(Entity::find().all(&ctx.db).await?)
let tag_items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = posts::Entity::find().all(&ctx.db).await?;
format::json(
tag_items
.into_iter()
.map(|item| build_summary(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> Result<Response> {
let mut item = ActiveModel {
..Default::default()
let name = normalized_name(&params)?;
let slug = normalized_slug(&params, &name);
let existing = tags::Entity::find()
.filter(tags::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?;
let item = if let Some(existing_tag) = existing {
let mut item = existing_tag.into_active_model();
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
item.update(&ctx.db).await?
} else {
tags::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(params.description)),
cover_image: Set(trim_to_option(params.cover_image)),
accent_color: Set(trim_to_option(params.accent_color)),
seo_title: Set(trim_to_option(params.seo_title)),
seo_description: Set(trim_to_option(params.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?
};
params.update(&mut item);
let item = item.insert(&ctx.db).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -47,35 +221,36 @@ pub async fn update(
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let name = normalized_name(&params)?;
let slug = normalized_slug(&params, &name);
let item = load_item(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
let next_name = params
.name
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty());
if let Some(next_name) = next_name {
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(next_name)
{
content::rewrite_tag_references(
previous_name.as_deref(),
&previous_slug,
Some(next_name),
)?;
}
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_tag_references(
previous_name.as_deref(),
&previous_slug,
Some(&name),
)?;
}
let mut item = item.into_active_model();
params.update(&mut item);
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -89,7 +264,7 @@ pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Resul
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_item(&ctx, id).await?)
format::json(build_record(load_item(&ctx, id).await?))
}
pub fn routes() -> Routes {

View File

@@ -10,6 +10,13 @@ pub struct Model {
pub id: i32,
pub name: Option<String>,
pub slug: String,
#[sea_orm(column_type = "Text", nullable)]
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub seo_description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "media_assets")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub object_key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub caption: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub tags: Option<Json>,
#[sea_orm(column_type = "Text", nullable)]
pub notes: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}

View File

@@ -10,6 +10,7 @@ pub mod comment_persona_analysis_logs;
pub mod comments;
pub mod content_events;
pub mod friend_links;
pub mod media_assets;
pub mod notification_deliveries;
pub mod post_revisions;
pub mod posts;

View File

@@ -8,6 +8,7 @@ pub use super::comment_persona_analysis_logs::Entity as CommentPersonaAnalysisLo
pub use super::comments::Entity as Comments;
pub use super::content_events::Entity as ContentEvents;
pub use super::friend_links::Entity as FriendLinks;
pub use super::media_assets::Entity as MediaAssets;
pub use super::notification_deliveries::Entity as NotificationDeliveries;
pub use super::post_revisions::Entity as PostRevisions;
pub use super::posts::Entity as Posts;

View File

@@ -32,6 +32,19 @@ pub struct Model {
pub music_playlist: Option<Json>,
pub ai_enabled: Option<bool>,
pub paragraph_comments_enabled: Option<bool>,
pub comment_turnstile_enabled: Option<bool>,
pub subscription_turnstile_enabled: Option<bool>,
pub web_push_enabled: Option<bool>,
#[sea_orm(column_type = "Text", nullable)]
pub turnstile_site_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub turnstile_secret_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_public_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_private_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_subject: Option<String>,
pub ai_provider: Option<String>,
pub ai_api_base: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
@@ -63,6 +76,7 @@ pub struct Model {
pub seo_default_twitter_handle: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub notification_webhook_url: Option<String>,
pub notification_channel_type: Option<String>,
pub notification_comment_enabled: Option<bool>,
pub notification_friend_link_enabled: Option<bool>,
pub subscription_popup_enabled: Option<bool>,

View File

@@ -12,6 +12,13 @@ pub struct Model {
pub id: i32,
pub name: Option<String>,
pub slug: String,
#[sea_orm(column_type = "Text", nullable)]
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub seo_description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -0,0 +1,23 @@
pub use super::_entities::media_assets::{ActiveModel, Entity, Model};
use sea_orm::entity::prelude::*;
pub type MediaAssets = Entity;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(self, _db: &C, insert: bool) -> std::result::Result<Self, DbErr>
where
C: ConnectionTrait,
{
if !insert && self.updated_at.is_unchanged() {
let mut this = self;
this.updated_at = sea_orm::ActiveValue::Set(chrono::Utc::now().into());
Ok(this)
} else {
Ok(self)
}
}
}
impl Model {}
impl ActiveModel {}
impl Entity {}

View File

@@ -3,6 +3,7 @@ pub mod ai_chunks;
pub mod categories;
pub mod comments;
pub mod friend_links;
pub mod media_assets;
pub mod posts;
pub mod site_settings;
pub mod tags;

View File

@@ -0,0 +1,640 @@
use std::{fs, path::Path, path::PathBuf};
use chrono::Utc;
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter,
QueryOrder, Set,
};
use serde::{Deserialize, Serialize};
use crate::{
controllers::site_settings,
models::_entities::{
categories, friend_links, media_assets, posts, reviews, site_settings as site_settings_entity,
tags,
},
services::{content, media_assets as media_assets_service, storage},
};
const BACKUP_VERSION: &str = "2026-04-01";
const WARNING_STORAGE_BINARIES: &str =
"当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupTaxonomyRecord {
pub name: String,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupReviewRecord {
pub title: Option<String>,
pub review_type: Option<String>,
pub rating: Option<i32>,
pub review_date: Option<String>,
pub status: Option<String>,
pub description: Option<String>,
pub tags: Option<String>,
pub cover: Option<String>,
pub link_url: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupFriendLinkRecord {
pub site_name: Option<String>,
pub site_url: String,
pub avatar_url: Option<String>,
pub description: Option<String>,
pub category: Option<String>,
pub status: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupMediaAssetRecord {
pub object_key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupStorageObjectRecord {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupPostDocument {
pub slug: String,
pub file_name: String,
pub markdown: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupDocument {
pub version: String,
pub exported_at: String,
pub includes_storage_binaries: bool,
pub warning: String,
pub site_settings: site_settings_entity::Model,
pub categories: Vec<BackupTaxonomyRecord>,
pub tags: Vec<BackupTaxonomyRecord>,
pub reviews: Vec<BackupReviewRecord>,
pub friend_links: Vec<BackupFriendLinkRecord>,
pub media_assets: Vec<BackupMediaAssetRecord>,
pub storage_manifest: Option<Vec<BackupStorageObjectRecord>>,
pub posts: Vec<BackupPostDocument>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupImportSummary {
pub imported: bool,
pub mode: String,
pub site_settings_restored: bool,
pub posts_written: usize,
pub categories_upserted: usize,
pub tags_upserted: usize,
pub reviews_upserted: usize,
pub friend_links_upserted: usize,
pub media_assets_upserted: usize,
pub storage_manifest_items: usize,
pub includes_storage_binaries: bool,
pub warning: String,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalize_backup_mode(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or("merge")
.to_ascii_lowercase()
.as_str()
{
"replace" => "replace".to_string(),
_ => "merge".to_string(),
}
}
fn markdown_posts_dir() -> PathBuf {
PathBuf::from(content::MARKDOWN_POSTS_DIR)
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
}
fn remove_existing_markdown_documents() -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
let mut removed = 0_usize;
for path in fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
{
let extension = path
.extension()
.and_then(|value| value.to_str())
.map(|value| value.to_ascii_lowercase())
.unwrap_or_default();
if extension == "md" || extension == "markdown" {
fs::remove_file(&path).map_err(io_error)?;
removed += 1;
}
}
Ok(removed)
}
fn normalize_markdown(value: &str) -> String {
value.replace("\r\n", "\n")
}
fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> {
let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default();
let file_name = trim_to_option(Some(document.file_name.clone()))
.unwrap_or_else(|| format!("{}.md", if candidate_slug.is_empty() { "post" } else { &candidate_slug }));
let file_stem = Path::new(&file_name)
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post");
let markdown = normalize_markdown(&document.markdown);
let parsed = content::parse_markdown_source(file_stem, &markdown, &file_name)?;
let slug = if parsed.slug.trim().is_empty() {
candidate_slug
} else {
parsed.slug
};
if slug.trim().is_empty() {
return Err(Error::BadRequest("备份中的文章 slug 不能为空".to_string()));
}
Ok((slug, markdown))
}
async fn export_storage_manifest(
ctx: &AppContext,
) -> Result<Option<Vec<BackupStorageObjectRecord>>> {
if storage::optional_r2_settings(ctx).await?.is_none() {
return Ok(None);
}
Ok(Some(
storage::list_objects(ctx, None, 1000)
.await?
.into_iter()
.map(|item| BackupStorageObjectRecord {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
})
.collect(),
))
}
fn export_category_record(item: categories::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_tag_record(item: tags::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_review_record(item: reviews::Model) -> BackupReviewRecord {
BackupReviewRecord {
title: item.title,
review_type: item.review_type,
rating: item.rating,
review_date: item.review_date,
status: item.status,
description: item.description,
tags: item.tags,
cover: item.cover,
link_url: item.link_url,
}
}
fn export_friend_link_record(item: friend_links::Model) -> BackupFriendLinkRecord {
BackupFriendLinkRecord {
site_name: item.site_name,
site_url: item.site_url,
avatar_url: item.avatar_url,
description: item.description,
category: item.category,
status: item.status,
}
}
fn export_media_asset_record(item: media_assets::Model) -> BackupMediaAssetRecord {
let tags = media_assets_service::tag_list(&item);
BackupMediaAssetRecord {
object_key: item.object_key,
title: item.title,
alt_text: item.alt_text,
caption: item.caption,
tags,
notes: item.notes,
}
}
pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> {
let site_settings_row = site_settings::load_current(ctx).await?;
let markdown_posts = content::sync_markdown_posts(ctx).await?;
let categories = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_category_record)
.collect::<Vec<_>>();
let tags = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_tag_record)
.collect::<Vec<_>>();
let reviews = reviews::Entity::find()
.order_by_desc(reviews::Column::UpdatedAt)
.all(&ctx.db)
.await?
.into_iter()
.map(export_review_record)
.collect::<Vec<_>>();
let friend_links = friend_links::Entity::find()
.order_by_asc(friend_links::Column::SiteUrl)
.all(&ctx.db)
.await?
.into_iter()
.map(export_friend_link_record)
.collect::<Vec<_>>();
let media_assets = media_assets::Entity::find()
.order_by_asc(media_assets::Column::ObjectKey)
.all(&ctx.db)
.await?
.into_iter()
.map(export_media_asset_record)
.collect::<Vec<_>>();
let posts = markdown_posts
.into_iter()
.map(|post| {
let (_, markdown) = content::read_markdown_document(&post.slug)?;
Ok(BackupPostDocument {
slug: post.slug.clone(),
file_name: format!("{}.md", post.slug),
markdown,
})
})
.collect::<Result<Vec<_>>>()?;
let storage_manifest = match export_storage_manifest(ctx).await {
Ok(items) => items,
Err(error) => {
tracing::warn!(?error, "failed to export storage manifest, continuing without it");
None
}
};
Ok(SiteBackupDocument {
version: BACKUP_VERSION.to_string(),
exported_at: Utc::now().to_rfc3339(),
includes_storage_binaries: false,
warning: WARNING_STORAGE_BINARIES.to_string(),
site_settings: site_settings_row,
categories,
tags,
reviews,
friend_links,
media_assets,
storage_manifest,
posts,
})
}
async fn restore_site_settings(
ctx: &AppContext,
value: &site_settings_entity::Model,
) -> Result<()> {
let current = site_settings::load_current(ctx).await?;
let mut active = value.clone().into_active_model();
active.id = Set(current.id);
active.created_at = Set(current.created_at);
active.updated_at = Set(Utc::now().into());
active.reset_all().update(&ctx.db).await?;
Ok(())
}
async fn upsert_category(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("分类 slug 不能为空".to_string()));
}
let existing = categories::Entity::find()
.filter(
Condition::any()
.add(categories::Column::Slug.eq(&slug))
.add(categories::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_tag(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("标签 slug 不能为空".to_string()));
}
let existing = tags::Entity::find()
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_friend_link(ctx: &AppContext, item: &BackupFriendLinkRecord) -> Result<()> {
let site_url = trim_to_option(Some(item.site_url.clone()))
.ok_or_else(|| Error::BadRequest("友链 site_url 不能为空".to_string()))?;
let existing = friend_links::Entity::find()
.filter(friend_links::Column::SiteUrl.eq(&site_url))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.site_name = Set(trim_to_option(item.site_name.clone()));
active.site_url = Set(site_url);
active.avatar_url = Set(trim_to_option(item.avatar_url.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.category = Set(trim_to_option(item.category.clone()));
active.status = Set(trim_to_option(item.status.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_review(ctx: &AppContext, item: &BackupReviewRecord) -> Result<()> {
let title = trim_to_option(item.title.clone());
let review_type = trim_to_option(item.review_type.clone());
let review_date = trim_to_option(item.review_date.clone());
let mut query = reviews::Entity::find();
if let Some(value) = title.clone() {
query = query.filter(reviews::Column::Title.eq(value));
}
if let Some(value) = review_type.clone() {
query = query.filter(reviews::Column::ReviewType.eq(value));
}
if let Some(value) = review_date.clone() {
query = query.filter(reviews::Column::ReviewDate.eq(value));
}
let existing = if title.is_some() || review_type.is_some() || review_date.is_some() {
query.order_by_asc(reviews::Column::Id).one(&ctx.db).await?
} else {
None
};
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.title = Set(title);
active.review_type = Set(review_type);
active.rating = Set(item.rating);
active.review_date = Set(review_date);
active.status = Set(trim_to_option(item.status.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.tags = Set(trim_to_option(item.tags.clone()));
active.cover = Set(trim_to_option(item.cover.clone()));
active.link_url = Set(trim_to_option(item.link_url.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_media_asset(ctx: &AppContext, item: &BackupMediaAssetRecord) -> Result<()> {
media_assets_service::upsert_by_key(
ctx,
&item.object_key,
media_assets_service::MediaAssetMetadataInput {
title: item.title.clone(),
alt_text: item.alt_text.clone(),
caption: item.caption.clone(),
tags: Some(item.tags.clone()),
notes: item.notes.clone(),
},
)
.await?;
Ok(())
}
async fn write_backup_posts(
ctx: &AppContext,
documents: &[BackupPostDocument],
replace_existing: bool,
) -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
if replace_existing {
remove_existing_markdown_documents()?;
}
if documents.is_empty() {
if replace_existing {
posts::Entity::delete_many().exec(&ctx.db).await?;
}
return Ok(0);
}
let mut written = std::collections::HashSet::new();
for document in documents {
let (slug, markdown) = normalized_backup_post(document)?;
fs::write(content::markdown_post_path(&slug), markdown).map_err(io_error)?;
written.insert(slug);
}
content::sync_markdown_posts(ctx).await?;
Ok(written.len())
}
pub async fn import_site_backup(
ctx: &AppContext,
backup: SiteBackupDocument,
mode: Option<&str>,
) -> Result<SiteBackupImportSummary> {
let mode = normalize_backup_mode(mode);
let replace_existing = mode == "replace";
if replace_existing {
friend_links::Entity::delete_many().exec(&ctx.db).await?;
reviews::Entity::delete_many().exec(&ctx.db).await?;
media_assets::Entity::delete_many().exec(&ctx.db).await?;
categories::Entity::delete_many().exec(&ctx.db).await?;
tags::Entity::delete_many().exec(&ctx.db).await?;
}
restore_site_settings(ctx, &backup.site_settings).await?;
let posts_written = write_backup_posts(ctx, &backup.posts, replace_existing).await?;
let mut categories_upserted = 0_usize;
for item in &backup.categories {
upsert_category(ctx, item).await?;
categories_upserted += 1;
}
let mut tags_upserted = 0_usize;
for item in &backup.tags {
upsert_tag(ctx, item).await?;
tags_upserted += 1;
}
let mut reviews_upserted = 0_usize;
for item in &backup.reviews {
upsert_review(ctx, item).await?;
reviews_upserted += 1;
}
let mut friend_links_upserted = 0_usize;
for item in &backup.friend_links {
upsert_friend_link(ctx, item).await?;
friend_links_upserted += 1;
}
let mut media_assets_upserted = 0_usize;
for item in &backup.media_assets {
upsert_media_asset(ctx, item).await?;
media_assets_upserted += 1;
}
Ok(SiteBackupImportSummary {
imported: true,
mode,
site_settings_restored: true,
posts_written,
categories_upserted,
tags_upserted,
reviews_upserted,
friend_links_upserted,
media_assets_upserted,
storage_manifest_items: backup.storage_manifest.as_ref().map(Vec::len).unwrap_or(0),
includes_storage_binaries: backup.includes_storage_binaries,
warning: WARNING_STORAGE_BINARIES.to_string(),
})
}

View File

@@ -39,6 +39,7 @@ pub struct CommentGuardInput<'a> {
pub author: Option<&'a str>,
pub content: Option<&'a str>,
pub honeypot_website: Option<&'a str>,
pub turnstile_token: Option<&'a str>,
pub captcha_token: Option<&'a str>,
pub captcha_answer: Option<&'a str>,
}
@@ -362,7 +363,16 @@ pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'
return Err(Error::BadRequest("提交未通过校验".to_string()));
}
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
if !crate::services::turnstile::verify_if_enabled(
ctx,
crate::services::turnstile::TurnstileScope::Comment,
input.turnstile_token,
input.ip_address,
)
.await?
{
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
}
if contains_blocked_keyword(input).is_some() {
return Err(Error::BadRequest("评论内容包含敏感关键词".to_string()));

View File

@@ -0,0 +1,125 @@
use std::collections::HashMap;
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, Set};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::models::_entities::media_assets;
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct MediaAssetMetadataInput {
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Option<Vec<String>>,
pub notes: Option<String>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalize_tag_list(values: Option<Vec<String>>) -> Option<Value> {
let mut seen = std::collections::HashSet::new();
let tags = values
.unwrap_or_default()
.into_iter()
.filter_map(|item| trim_to_option(Some(item)))
.map(|item| item.to_ascii_lowercase())
.filter(|item| seen.insert(item.clone()))
.map(Value::String)
.collect::<Vec<_>>();
(!tags.is_empty()).then_some(Value::Array(tags))
}
pub fn tag_list(model: &media_assets::Model) -> Vec<String> {
model
.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.collect()
}
pub async fn list_by_keys(
ctx: &AppContext,
keys: &[String],
) -> Result<HashMap<String, media_assets::Model>> {
if keys.is_empty() {
return Ok(HashMap::new());
}
Ok(media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.is_in(keys.iter().cloned()))
.all(&ctx.db)
.await?
.into_iter()
.map(|item| (item.object_key.clone(), item))
.collect())
}
pub async fn get_by_key(ctx: &AppContext, object_key: &str) -> Result<Option<media_assets::Model>> {
media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.eq(object_key))
.one(&ctx.db)
.await
.map_err(Into::into)
}
pub async fn upsert_by_key(
ctx: &AppContext,
object_key: &str,
payload: MediaAssetMetadataInput,
) -> Result<media_assets::Model> {
let normalized_key = object_key.trim();
if normalized_key.is_empty() {
return Err(Error::BadRequest("object key 不能为空".to_string()));
}
let existing = get_by_key(ctx, normalized_key).await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|item| item.into_active_model())
.unwrap_or_else(|| media_assets::ActiveModel {
object_key: Set(normalized_key.to_string()),
..Default::default()
});
active.title = Set(trim_to_option(payload.title));
active.alt_text = Set(trim_to_option(payload.alt_text));
active.caption = Set(trim_to_option(payload.caption));
active.tags = Set(normalize_tag_list(payload.tags));
active.notes = Set(trim_to_option(payload.notes));
if has_existing {
active.update(&ctx.db).await.map_err(Into::into)
} else {
active.insert(&ctx.db).await.map_err(Into::into)
}
}
pub async fn delete_by_key(ctx: &AppContext, object_key: &str) -> Result<()> {
if let Some(item) = get_by_key(ctx, object_key).await? {
item.delete(&ctx.db).await?;
}
Ok(())
}
pub async fn delete_by_keys(ctx: &AppContext, object_keys: &[String]) -> Result<()> {
for key in object_keys {
delete_by_key(ctx, key).await?;
}
Ok(())
}

View File

@@ -2,9 +2,13 @@ pub mod admin_audit;
pub mod abuse_guard;
pub mod ai;
pub mod analytics;
pub mod backups;
pub mod comment_guard;
pub mod content;
pub mod media_assets;
pub mod notifications;
pub mod post_revisions;
pub mod storage;
pub mod subscriptions;
pub mod turnstile;
pub mod web_push;

View File

@@ -1,10 +1,23 @@
use loco_rs::prelude::*;
use crate::{
controllers::site_settings,
models::_entities::{comments, friend_links},
models::_entities::{comments, friend_links, site_settings as site_settings_model},
services::subscriptions,
};
fn notification_channel_type(settings: &site_settings_model::Model) -> &'static str {
match settings
.notification_channel_type
.as_deref()
.map(str::trim)
.map(str::to_ascii_lowercase)
.as_deref()
{
Some("ntfy") => subscriptions::CHANNEL_NTFY,
_ => subscriptions::CHANNEL_WEBHOOK,
}
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
@@ -81,9 +94,10 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
if settings.notification_comment_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_COMMENT_CREATED,
"新评论通知",
@@ -94,7 +108,7 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
)
.await
{
tracing::warn!("failed to queue legacy comment webhook notification: {error}");
tracing::warn!("failed to queue comment admin notification: {error}");
}
}
}
@@ -144,9 +158,10 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
if settings.notification_friend_link_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_FRIEND_LINK_CREATED,
"新友链申请通知",
@@ -157,7 +172,7 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
)
.await
{
tracing::warn!("failed to queue legacy friend-link webhook notification: {error}");
tracing::warn!("failed to queue friend-link admin notification: {error}");
}
}
}

View File

@@ -15,7 +15,7 @@ use uuid::Uuid;
use crate::{
mailers::subscription::SubscriptionMailer,
models::_entities::{notification_deliveries, posts, subscriptions},
services::content,
services::{content, web_push as web_push_service},
workers::notification_delivery::{
NotificationDeliveryWorker, NotificationDeliveryWorkerArgs,
},
@@ -26,6 +26,7 @@ pub const CHANNEL_WEBHOOK: &str = "webhook";
pub const CHANNEL_DISCORD: &str = "discord";
pub const CHANNEL_TELEGRAM: &str = "telegram";
pub const CHANNEL_NTFY: &str = "ntfy";
pub const CHANNEL_WEB_PUSH: &str = "web_push";
pub const STATUS_PENDING: &str = "pending";
pub const STATUS_ACTIVE: &str = "active";
@@ -139,6 +140,9 @@ pub fn normalize_channel_type(value: &str) -> String {
CHANNEL_DISCORD => CHANNEL_DISCORD.to_string(),
CHANNEL_TELEGRAM => CHANNEL_TELEGRAM.to_string(),
CHANNEL_NTFY => CHANNEL_NTFY.to_string(),
CHANNEL_WEB_PUSH | "browser_push" | "browser-push" | "webpush" => {
CHANNEL_WEB_PUSH.to_string()
}
_ => CHANNEL_EMAIL.to_string(),
}
}
@@ -225,6 +229,35 @@ fn merge_metadata(existing: Option<&Value>, incoming: Option<Value>) -> Option<V
}
}
fn normalize_browser_push_subscription(raw: Value) -> Result<Value> {
let mut subscription = serde_json::from_value::<web_push::SubscriptionInfo>(raw)
.map_err(|_| Error::BadRequest("browser push subscription 非法".to_string()))?;
subscription.endpoint = subscription.endpoint.trim().to_string();
subscription.keys.p256dh = subscription.keys.p256dh.trim().to_string();
subscription.keys.auth = subscription.keys.auth.trim().to_string();
if subscription.endpoint.is_empty()
|| subscription.keys.p256dh.is_empty()
|| subscription.keys.auth.is_empty()
{
return Err(Error::BadRequest(
"browser push subscription 缺少 endpoint / keys".to_string(),
));
}
serde_json::to_value(subscription).map_err(Into::into)
}
fn merge_browser_push_metadata(existing: Option<&Value>, incoming: Option<Value>, subscription: Value) -> Value {
let mut object = merge_metadata(existing, incoming)
.and_then(|value| value.as_object().cloned())
.unwrap_or_default();
object.insert("kind".to_string(), Value::String("browser-push".to_string()));
object.insert("subscription".to_string(), subscription);
Value::Object(object)
}
fn json_string_list(value: Option<&Value>, key: &str) -> Vec<String> {
value
.and_then(Value::as_object)
@@ -592,6 +625,88 @@ pub async fn create_public_email_subscription(
})
}
pub async fn create_public_web_push_subscription(
ctx: &AppContext,
subscription: Value,
metadata: Option<Value>,
) -> Result<PublicSubscriptionResult> {
let normalized_subscription = normalize_browser_push_subscription(subscription)?;
let endpoint = normalized_subscription
.get("endpoint")
.and_then(Value::as_str)
.ok_or_else(|| Error::BadRequest("browser push endpoint 非法".to_string()))?
.to_string();
let existing = subscriptions::Entity::find()
.filter(subscriptions::Column::ChannelType.eq(CHANNEL_WEB_PUSH))
.filter(subscriptions::Column::Target.eq(&endpoint))
.one(&ctx.db)
.await?;
if let Some(existing) = existing {
let mut active = existing.clone().into_active_model();
let manage_token = existing
.manage_token
.clone()
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(generate_subscription_token);
active.manage_token = Set(Some(manage_token));
active.status = Set(STATUS_ACTIVE.to_string());
active.confirm_token = Set(None);
active.verified_at = Set(Some(Utc::now().to_rfc3339()));
active.metadata = Set(Some(merge_browser_push_metadata(
existing.metadata.as_ref(),
metadata,
normalized_subscription,
)));
if existing
.display_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.is_none()
{
active.display_name = Set(Some("Browser Push".to_string()));
}
let updated = active.update(&ctx.db).await?;
return Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&updated),
requires_confirmation: false,
message: "浏览器推送已更新,后续有新内容时会直接提醒。".to_string(),
});
}
let created = subscriptions::ActiveModel {
channel_type: Set(CHANNEL_WEB_PUSH.to_string()),
target: Set(endpoint),
display_name: Set(Some("Browser Push".to_string())),
status: Set(STATUS_ACTIVE.to_string()),
filters: Set(Some(default_public_filters())),
secret: Set(None),
notes: Set(None),
confirm_token: Set(None),
manage_token: Set(Some(generate_subscription_token())),
metadata: Set(Some(merge_browser_push_metadata(
None,
metadata,
normalized_subscription,
))),
verified_at: Set(Some(Utc::now().to_rfc3339())),
last_notified_at: Set(None),
failure_count: Set(Some(0)),
last_delivery_status: Set(None),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&created),
requires_confirmation: false,
message: "浏览器推送已开启,后续有新内容时会直接提醒。".to_string(),
})
}
pub async fn confirm_subscription(ctx: &AppContext, token: &str) -> Result<subscriptions::Model> {
let token = token.trim();
if token.is_empty() {
@@ -869,6 +984,7 @@ fn provider_name(channel_type: &str) -> &'static str {
CHANNEL_DISCORD => "discord-webhook",
CHANNEL_TELEGRAM => "telegram-bot-api",
CHANNEL_NTFY => "ntfy",
CHANNEL_WEB_PUSH => "web-push",
_ => "webhook",
}
}
@@ -882,10 +998,65 @@ fn resolve_ntfy_target(target: &str) -> String {
}
}
fn collapse_whitespace(value: &str) -> String {
value.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn truncate_chars(value: &str, max_chars: usize) -> String {
if value.chars().count() <= max_chars {
return value.to_string();
}
let mut sliced = value.chars().take(max_chars).collect::<String>();
sliced.push('…');
sliced
}
fn site_asset_url(site_url: Option<&str>, path: &str) -> Option<String> {
let base = site_url?.trim().trim_end_matches('/');
if base.is_empty() {
return None;
}
Some(format!("{base}{path}"))
}
fn web_push_target_url(message: &QueuedDeliveryPayload) -> Option<String> {
message
.payload
.get("url")
.and_then(Value::as_str)
.map(ToString::to_string)
.or_else(|| message.site_url.clone())
}
fn build_web_push_payload(message: &QueuedDeliveryPayload) -> Value {
let body = truncate_chars(&collapse_whitespace(&message.text), 220);
serde_json::json!({
"title": message.subject,
"body": body,
"icon": site_asset_url(message.site_url.as_deref(), "/favicon.svg"),
"badge": site_asset_url(message.site_url.as_deref(), "/favicon.ico"),
"url": web_push_target_url(message),
"tag": message
.payload
.get("event_type")
.and_then(Value::as_str)
.unwrap_or("subscription"),
"data": {
"event_type": message.payload.get("event_type").cloned().unwrap_or(Value::Null),
"payload": message.payload,
}
})
}
async fn deliver_via_channel(
ctx: &AppContext,
channel_type: &str,
target: &str,
message: &QueuedDeliveryPayload,
metadata: Option<&Value>,
) -> Result<Option<String>> {
match channel_type {
CHANNEL_EMAIL => Err(Error::BadRequest(
@@ -923,6 +1094,21 @@ async fn deliver_via_channel(
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_WEB_PUSH => {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
let subscription_info = web_push_service::subscription_info_from_metadata(metadata)?;
let payload = serde_json::to_vec(&build_web_push_payload(message))?;
web_push_service::send_payload(
&settings,
&subscription_info,
&payload,
Some(web_push::Urgency::Normal),
24 * 60 * 60,
message.site_url.as_deref(),
)
.await?;
Ok(None)
}
_ => {
let envelope = DeliveryEnvelope {
event: message
@@ -1010,10 +1196,17 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.await
.map(|_| None)
} else {
deliver_via_channel(&subscription.channel_type, &subscription.target, &message).await
deliver_via_channel(
ctx,
&subscription.channel_type,
&subscription.target,
&message,
subscription.metadata.as_ref(),
)
.await
}
} else {
deliver_via_channel(&delivery.channel_type, &delivery.target, &message).await
deliver_via_channel(ctx, &delivery.channel_type, &delivery.target, &message, None).await
};
let subscription_id = delivery.subscription_id;
let delivery_channel_type = delivery.channel_type.clone();

View File

@@ -0,0 +1,182 @@
use std::sync::OnceLock;
use loco_rs::prelude::*;
use reqwest::Client;
use serde::Deserialize;
use crate::models::_entities::site_settings;
const DEFAULT_TURNSTILE_VERIFY_URL: &str =
"https://challenges.cloudflare.com/turnstile/v0/siteverify";
const ENV_TURNSTILE_SECRET_KEY: &str = "TERMI_TURNSTILE_SECRET_KEY";
const ENV_LEGACY_TURNSTILE_SECRET_KEY: &str = "TERMI_COMMENT_TURNSTILE_SECRET_KEY";
const ENV_TURNSTILE_SITE_KEY: &str = "PUBLIC_COMMENT_TURNSTILE_SITE_KEY";
const ENV_TURNSTILE_VERIFY_URL: &str = "TERMI_TURNSTILE_VERIFY_URL";
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum TurnstileScope {
Comment,
Subscription,
}
#[derive(Clone, Debug, Deserialize)]
struct TurnstileVerifyResponse {
success: bool,
#[serde(default, rename = "error-codes")]
error_codes: Vec<String>,
}
fn trim_to_option(value: Option<&str>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
}
fn verify_url() -> String {
env_value(ENV_TURNSTILE_VERIFY_URL)
.unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
}
fn client() -> &'static Client {
static CLIENT: OnceLock<Client> = OnceLock::new();
CLIENT.get_or_init(Client::new)
}
pub fn secret_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_secret_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SECRET_KEY))
.or_else(|| env_value(ENV_LEGACY_TURNSTILE_SECRET_KEY))
}
pub fn site_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_site_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SITE_KEY))
}
fn site_key_configured(settings: &site_settings::Model) -> bool {
site_key(settings).is_some()
}
pub fn secret_key_configured(settings: &site_settings::Model) -> bool {
secret_key(settings).is_some()
}
fn scope_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
match scope {
TurnstileScope::Comment => settings.comment_turnstile_enabled.unwrap_or(false),
TurnstileScope::Subscription => settings.subscription_turnstile_enabled.unwrap_or(false),
}
}
pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
scope_enabled(settings, scope)
&& site_key_configured(settings)
&& secret_key_configured(settings)
}
pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
Ok(is_enabled(&settings, scope))
}
async fn verify_token(
settings: &site_settings::Model,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
let secret = secret_key(settings).ok_or_else(|| {
Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string())
})?;
let response_token = trim_to_option(token)
.ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let mut form_data = vec![
("secret".to_string(), secret),
("response".to_string(), response_token),
];
if let Some(remote_ip) = normalize_ip(client_ip) {
form_data.push(("remoteip".to_string(), remote_ip));
}
let response = client()
.post(verify_url())
.form(&form_data)
.send()
.await
.map_err(|error| {
tracing::warn!("turnstile verify request failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !response.status().is_success() {
tracing::warn!(
"turnstile verify returned unexpected status: {}",
response.status()
);
return Err(Error::BadRequest(
"人机验证服务暂时不可用,请稍后重试".to_string(),
));
}
let payload = response
.json::<TurnstileVerifyResponse>()
.await
.map_err(|error| {
tracing::warn!("turnstile verify decode failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !payload.success {
tracing::warn!(
error_codes = ?payload.error_codes,
"turnstile verify rejected request"
);
return Err(Error::BadRequest("人机验证未通过,请重试".to_string()));
}
Ok(())
}
pub async fn verify_if_enabled(
ctx: &AppContext,
scope: TurnstileScope,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
if !is_enabled(&settings, scope) {
return Ok(false);
}
verify_token(&settings, token, client_ip).await?;
Ok(true)
}

View File

@@ -0,0 +1,122 @@
use loco_rs::prelude::*;
use serde_json::Value;
use web_push::{
ContentEncoding, HyperWebPushClient, SubscriptionInfo, Urgency, VapidSignatureBuilder,
WebPushClient, WebPushMessageBuilder,
};
use crate::models::_entities::site_settings;
const ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "TERMI_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_WEB_PUSH_VAPID_PRIVATE_KEY: &str = "TERMI_WEB_PUSH_VAPID_PRIVATE_KEY";
const ENV_WEB_PUSH_VAPID_SUBJECT: &str = "TERMI_WEB_PUSH_VAPID_SUBJECT";
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
pub fn public_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_public_key.as_ref())
.or_else(|| env_value(ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY))
.or_else(|| env_value(ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY))
}
pub fn private_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_private_key.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_PRIVATE_KEY))
}
pub fn vapid_subject(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_subject.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_SUBJECT))
}
fn effective_vapid_subject(settings: &site_settings::Model, site_url: Option<&str>) -> String {
vapid_subject(settings)
.or_else(|| {
site_url
.map(str::trim)
.filter(|value| value.starts_with("http://") || value.starts_with("https://"))
.map(ToString::to_string)
})
.unwrap_or_else(|| "mailto:noreply@example.com".to_string())
}
pub fn public_key_configured(settings: &site_settings::Model) -> bool {
public_key(settings).is_some()
}
pub fn private_key_configured(settings: &site_settings::Model) -> bool {
private_key(settings).is_some()
}
pub fn is_enabled(settings: &site_settings::Model) -> bool {
settings.web_push_enabled.unwrap_or(false)
&& public_key_configured(settings)
&& private_key_configured(settings)
}
pub fn subscription_info_from_metadata(metadata: Option<&Value>) -> Result<SubscriptionInfo> {
let subscription = metadata
.and_then(Value::as_object)
.and_then(|object| object.get("subscription"))
.cloned()
.ok_or_else(|| Error::BadRequest("browser push metadata 缺少 subscription".to_string()))?;
serde_json::from_value::<SubscriptionInfo>(subscription)
.map_err(|_| Error::BadRequest("browser push metadata 非法".to_string()))
}
pub async fn send_payload(
settings: &site_settings::Model,
subscription_info: &SubscriptionInfo,
payload: &[u8],
urgency: Option<Urgency>,
ttl: u32,
site_url: Option<&str>,
) -> Result<()> {
let private_key = private_key(settings)
.ok_or_else(|| Error::BadRequest("web push VAPID private key 未配置".to_string()))?;
let mut signature_builder = VapidSignatureBuilder::from_base64(&private_key, subscription_info)
.map_err(|error| Error::BadRequest(format!("web push vapid build failed: {error}")))?;
signature_builder.add_claim("sub", effective_vapid_subject(settings, site_url));
let signature = signature_builder
.build()
.map_err(|error| Error::BadRequest(format!("web push vapid sign failed: {error}")))?;
let mut builder = WebPushMessageBuilder::new(subscription_info);
builder.set_ttl(ttl);
if let Some(urgency) = urgency {
builder.set_urgency(urgency);
}
builder.set_payload(ContentEncoding::Aes128Gcm, payload);
builder.set_vapid_signature(signature);
let client = HyperWebPushClient::new();
let message = builder
.build()
.map_err(|error| Error::BadRequest(format!("web push message build failed: {error}")))?;
client
.send(message)
.await
.map_err(|error| Error::BadRequest(format!("web push send failed: {error}")))?;
Ok(())
}