feat: ship public ops features and cache docker builds
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 13s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Has been cancelled
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Has been cancelled

This commit is contained in:
2026-04-01 13:22:19 +08:00
parent 669b79cc95
commit 497a9d713d
75 changed files with 6985 additions and 668 deletions

View File

@@ -0,0 +1,640 @@
use std::{fs, path::Path, path::PathBuf};
use chrono::Utc;
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter,
QueryOrder, Set,
};
use serde::{Deserialize, Serialize};
use crate::{
controllers::site_settings,
models::_entities::{
categories, friend_links, media_assets, posts, reviews, site_settings as site_settings_entity,
tags,
},
services::{content, media_assets as media_assets_service, storage},
};
const BACKUP_VERSION: &str = "2026-04-01";
const WARNING_STORAGE_BINARIES: &str =
"当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupTaxonomyRecord {
pub name: String,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupReviewRecord {
pub title: Option<String>,
pub review_type: Option<String>,
pub rating: Option<i32>,
pub review_date: Option<String>,
pub status: Option<String>,
pub description: Option<String>,
pub tags: Option<String>,
pub cover: Option<String>,
pub link_url: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupFriendLinkRecord {
pub site_name: Option<String>,
pub site_url: String,
pub avatar_url: Option<String>,
pub description: Option<String>,
pub category: Option<String>,
pub status: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupMediaAssetRecord {
pub object_key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupStorageObjectRecord {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupPostDocument {
pub slug: String,
pub file_name: String,
pub markdown: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupDocument {
pub version: String,
pub exported_at: String,
pub includes_storage_binaries: bool,
pub warning: String,
pub site_settings: site_settings_entity::Model,
pub categories: Vec<BackupTaxonomyRecord>,
pub tags: Vec<BackupTaxonomyRecord>,
pub reviews: Vec<BackupReviewRecord>,
pub friend_links: Vec<BackupFriendLinkRecord>,
pub media_assets: Vec<BackupMediaAssetRecord>,
pub storage_manifest: Option<Vec<BackupStorageObjectRecord>>,
pub posts: Vec<BackupPostDocument>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupImportSummary {
pub imported: bool,
pub mode: String,
pub site_settings_restored: bool,
pub posts_written: usize,
pub categories_upserted: usize,
pub tags_upserted: usize,
pub reviews_upserted: usize,
pub friend_links_upserted: usize,
pub media_assets_upserted: usize,
pub storage_manifest_items: usize,
pub includes_storage_binaries: bool,
pub warning: String,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalize_backup_mode(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or("merge")
.to_ascii_lowercase()
.as_str()
{
"replace" => "replace".to_string(),
_ => "merge".to_string(),
}
}
fn markdown_posts_dir() -> PathBuf {
PathBuf::from(content::MARKDOWN_POSTS_DIR)
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
}
fn remove_existing_markdown_documents() -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
let mut removed = 0_usize;
for path in fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
{
let extension = path
.extension()
.and_then(|value| value.to_str())
.map(|value| value.to_ascii_lowercase())
.unwrap_or_default();
if extension == "md" || extension == "markdown" {
fs::remove_file(&path).map_err(io_error)?;
removed += 1;
}
}
Ok(removed)
}
fn normalize_markdown(value: &str) -> String {
value.replace("\r\n", "\n")
}
fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> {
let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default();
let file_name = trim_to_option(Some(document.file_name.clone()))
.unwrap_or_else(|| format!("{}.md", if candidate_slug.is_empty() { "post" } else { &candidate_slug }));
let file_stem = Path::new(&file_name)
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post");
let markdown = normalize_markdown(&document.markdown);
let parsed = content::parse_markdown_source(file_stem, &markdown, &file_name)?;
let slug = if parsed.slug.trim().is_empty() {
candidate_slug
} else {
parsed.slug
};
if slug.trim().is_empty() {
return Err(Error::BadRequest("备份中的文章 slug 不能为空".to_string()));
}
Ok((slug, markdown))
}
async fn export_storage_manifest(
ctx: &AppContext,
) -> Result<Option<Vec<BackupStorageObjectRecord>>> {
if storage::optional_r2_settings(ctx).await?.is_none() {
return Ok(None);
}
Ok(Some(
storage::list_objects(ctx, None, 1000)
.await?
.into_iter()
.map(|item| BackupStorageObjectRecord {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
})
.collect(),
))
}
fn export_category_record(item: categories::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_tag_record(item: tags::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_review_record(item: reviews::Model) -> BackupReviewRecord {
BackupReviewRecord {
title: item.title,
review_type: item.review_type,
rating: item.rating,
review_date: item.review_date,
status: item.status,
description: item.description,
tags: item.tags,
cover: item.cover,
link_url: item.link_url,
}
}
fn export_friend_link_record(item: friend_links::Model) -> BackupFriendLinkRecord {
BackupFriendLinkRecord {
site_name: item.site_name,
site_url: item.site_url,
avatar_url: item.avatar_url,
description: item.description,
category: item.category,
status: item.status,
}
}
fn export_media_asset_record(item: media_assets::Model) -> BackupMediaAssetRecord {
let tags = media_assets_service::tag_list(&item);
BackupMediaAssetRecord {
object_key: item.object_key,
title: item.title,
alt_text: item.alt_text,
caption: item.caption,
tags,
notes: item.notes,
}
}
pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> {
let site_settings_row = site_settings::load_current(ctx).await?;
let markdown_posts = content::sync_markdown_posts(ctx).await?;
let categories = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_category_record)
.collect::<Vec<_>>();
let tags = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_tag_record)
.collect::<Vec<_>>();
let reviews = reviews::Entity::find()
.order_by_desc(reviews::Column::UpdatedAt)
.all(&ctx.db)
.await?
.into_iter()
.map(export_review_record)
.collect::<Vec<_>>();
let friend_links = friend_links::Entity::find()
.order_by_asc(friend_links::Column::SiteUrl)
.all(&ctx.db)
.await?
.into_iter()
.map(export_friend_link_record)
.collect::<Vec<_>>();
let media_assets = media_assets::Entity::find()
.order_by_asc(media_assets::Column::ObjectKey)
.all(&ctx.db)
.await?
.into_iter()
.map(export_media_asset_record)
.collect::<Vec<_>>();
let posts = markdown_posts
.into_iter()
.map(|post| {
let (_, markdown) = content::read_markdown_document(&post.slug)?;
Ok(BackupPostDocument {
slug: post.slug.clone(),
file_name: format!("{}.md", post.slug),
markdown,
})
})
.collect::<Result<Vec<_>>>()?;
let storage_manifest = match export_storage_manifest(ctx).await {
Ok(items) => items,
Err(error) => {
tracing::warn!(?error, "failed to export storage manifest, continuing without it");
None
}
};
Ok(SiteBackupDocument {
version: BACKUP_VERSION.to_string(),
exported_at: Utc::now().to_rfc3339(),
includes_storage_binaries: false,
warning: WARNING_STORAGE_BINARIES.to_string(),
site_settings: site_settings_row,
categories,
tags,
reviews,
friend_links,
media_assets,
storage_manifest,
posts,
})
}
async fn restore_site_settings(
ctx: &AppContext,
value: &site_settings_entity::Model,
) -> Result<()> {
let current = site_settings::load_current(ctx).await?;
let mut active = value.clone().into_active_model();
active.id = Set(current.id);
active.created_at = Set(current.created_at);
active.updated_at = Set(Utc::now().into());
active.reset_all().update(&ctx.db).await?;
Ok(())
}
async fn upsert_category(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("分类 slug 不能为空".to_string()));
}
let existing = categories::Entity::find()
.filter(
Condition::any()
.add(categories::Column::Slug.eq(&slug))
.add(categories::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_tag(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("标签 slug 不能为空".to_string()));
}
let existing = tags::Entity::find()
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_friend_link(ctx: &AppContext, item: &BackupFriendLinkRecord) -> Result<()> {
let site_url = trim_to_option(Some(item.site_url.clone()))
.ok_or_else(|| Error::BadRequest("友链 site_url 不能为空".to_string()))?;
let existing = friend_links::Entity::find()
.filter(friend_links::Column::SiteUrl.eq(&site_url))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.site_name = Set(trim_to_option(item.site_name.clone()));
active.site_url = Set(site_url);
active.avatar_url = Set(trim_to_option(item.avatar_url.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.category = Set(trim_to_option(item.category.clone()));
active.status = Set(trim_to_option(item.status.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_review(ctx: &AppContext, item: &BackupReviewRecord) -> Result<()> {
let title = trim_to_option(item.title.clone());
let review_type = trim_to_option(item.review_type.clone());
let review_date = trim_to_option(item.review_date.clone());
let mut query = reviews::Entity::find();
if let Some(value) = title.clone() {
query = query.filter(reviews::Column::Title.eq(value));
}
if let Some(value) = review_type.clone() {
query = query.filter(reviews::Column::ReviewType.eq(value));
}
if let Some(value) = review_date.clone() {
query = query.filter(reviews::Column::ReviewDate.eq(value));
}
let existing = if title.is_some() || review_type.is_some() || review_date.is_some() {
query.order_by_asc(reviews::Column::Id).one(&ctx.db).await?
} else {
None
};
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.title = Set(title);
active.review_type = Set(review_type);
active.rating = Set(item.rating);
active.review_date = Set(review_date);
active.status = Set(trim_to_option(item.status.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.tags = Set(trim_to_option(item.tags.clone()));
active.cover = Set(trim_to_option(item.cover.clone()));
active.link_url = Set(trim_to_option(item.link_url.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_media_asset(ctx: &AppContext, item: &BackupMediaAssetRecord) -> Result<()> {
media_assets_service::upsert_by_key(
ctx,
&item.object_key,
media_assets_service::MediaAssetMetadataInput {
title: item.title.clone(),
alt_text: item.alt_text.clone(),
caption: item.caption.clone(),
tags: Some(item.tags.clone()),
notes: item.notes.clone(),
},
)
.await?;
Ok(())
}
async fn write_backup_posts(
ctx: &AppContext,
documents: &[BackupPostDocument],
replace_existing: bool,
) -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
if replace_existing {
remove_existing_markdown_documents()?;
}
if documents.is_empty() {
if replace_existing {
posts::Entity::delete_many().exec(&ctx.db).await?;
}
return Ok(0);
}
let mut written = std::collections::HashSet::new();
for document in documents {
let (slug, markdown) = normalized_backup_post(document)?;
fs::write(content::markdown_post_path(&slug), markdown).map_err(io_error)?;
written.insert(slug);
}
content::sync_markdown_posts(ctx).await?;
Ok(written.len())
}
pub async fn import_site_backup(
ctx: &AppContext,
backup: SiteBackupDocument,
mode: Option<&str>,
) -> Result<SiteBackupImportSummary> {
let mode = normalize_backup_mode(mode);
let replace_existing = mode == "replace";
if replace_existing {
friend_links::Entity::delete_many().exec(&ctx.db).await?;
reviews::Entity::delete_many().exec(&ctx.db).await?;
media_assets::Entity::delete_many().exec(&ctx.db).await?;
categories::Entity::delete_many().exec(&ctx.db).await?;
tags::Entity::delete_many().exec(&ctx.db).await?;
}
restore_site_settings(ctx, &backup.site_settings).await?;
let posts_written = write_backup_posts(ctx, &backup.posts, replace_existing).await?;
let mut categories_upserted = 0_usize;
for item in &backup.categories {
upsert_category(ctx, item).await?;
categories_upserted += 1;
}
let mut tags_upserted = 0_usize;
for item in &backup.tags {
upsert_tag(ctx, item).await?;
tags_upserted += 1;
}
let mut reviews_upserted = 0_usize;
for item in &backup.reviews {
upsert_review(ctx, item).await?;
reviews_upserted += 1;
}
let mut friend_links_upserted = 0_usize;
for item in &backup.friend_links {
upsert_friend_link(ctx, item).await?;
friend_links_upserted += 1;
}
let mut media_assets_upserted = 0_usize;
for item in &backup.media_assets {
upsert_media_asset(ctx, item).await?;
media_assets_upserted += 1;
}
Ok(SiteBackupImportSummary {
imported: true,
mode,
site_settings_restored: true,
posts_written,
categories_upserted,
tags_upserted,
reviews_upserted,
friend_links_upserted,
media_assets_upserted,
storage_manifest_items: backup.storage_manifest.as_ref().map(Vec::len).unwrap_or(0),
includes_storage_binaries: backup.includes_storage_binaries,
warning: WARNING_STORAGE_BINARIES.to_string(),
})
}

View File

@@ -39,6 +39,7 @@ pub struct CommentGuardInput<'a> {
pub author: Option<&'a str>,
pub content: Option<&'a str>,
pub honeypot_website: Option<&'a str>,
pub turnstile_token: Option<&'a str>,
pub captcha_token: Option<&'a str>,
pub captcha_answer: Option<&'a str>,
}
@@ -362,7 +363,16 @@ pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'
return Err(Error::BadRequest("提交未通过校验".to_string()));
}
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
if !crate::services::turnstile::verify_if_enabled(
ctx,
crate::services::turnstile::TurnstileScope::Comment,
input.turnstile_token,
input.ip_address,
)
.await?
{
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
}
if contains_blocked_keyword(input).is_some() {
return Err(Error::BadRequest("评论内容包含敏感关键词".to_string()));

View File

@@ -0,0 +1,125 @@
use std::collections::HashMap;
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, Set};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::models::_entities::media_assets;
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct MediaAssetMetadataInput {
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Option<Vec<String>>,
pub notes: Option<String>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalize_tag_list(values: Option<Vec<String>>) -> Option<Value> {
let mut seen = std::collections::HashSet::new();
let tags = values
.unwrap_or_default()
.into_iter()
.filter_map(|item| trim_to_option(Some(item)))
.map(|item| item.to_ascii_lowercase())
.filter(|item| seen.insert(item.clone()))
.map(Value::String)
.collect::<Vec<_>>();
(!tags.is_empty()).then_some(Value::Array(tags))
}
pub fn tag_list(model: &media_assets::Model) -> Vec<String> {
model
.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.collect()
}
pub async fn list_by_keys(
ctx: &AppContext,
keys: &[String],
) -> Result<HashMap<String, media_assets::Model>> {
if keys.is_empty() {
return Ok(HashMap::new());
}
Ok(media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.is_in(keys.iter().cloned()))
.all(&ctx.db)
.await?
.into_iter()
.map(|item| (item.object_key.clone(), item))
.collect())
}
pub async fn get_by_key(ctx: &AppContext, object_key: &str) -> Result<Option<media_assets::Model>> {
media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.eq(object_key))
.one(&ctx.db)
.await
.map_err(Into::into)
}
pub async fn upsert_by_key(
ctx: &AppContext,
object_key: &str,
payload: MediaAssetMetadataInput,
) -> Result<media_assets::Model> {
let normalized_key = object_key.trim();
if normalized_key.is_empty() {
return Err(Error::BadRequest("object key 不能为空".to_string()));
}
let existing = get_by_key(ctx, normalized_key).await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|item| item.into_active_model())
.unwrap_or_else(|| media_assets::ActiveModel {
object_key: Set(normalized_key.to_string()),
..Default::default()
});
active.title = Set(trim_to_option(payload.title));
active.alt_text = Set(trim_to_option(payload.alt_text));
active.caption = Set(trim_to_option(payload.caption));
active.tags = Set(normalize_tag_list(payload.tags));
active.notes = Set(trim_to_option(payload.notes));
if has_existing {
active.update(&ctx.db).await.map_err(Into::into)
} else {
active.insert(&ctx.db).await.map_err(Into::into)
}
}
pub async fn delete_by_key(ctx: &AppContext, object_key: &str) -> Result<()> {
if let Some(item) = get_by_key(ctx, object_key).await? {
item.delete(&ctx.db).await?;
}
Ok(())
}
pub async fn delete_by_keys(ctx: &AppContext, object_keys: &[String]) -> Result<()> {
for key in object_keys {
delete_by_key(ctx, key).await?;
}
Ok(())
}

View File

@@ -2,9 +2,13 @@ pub mod admin_audit;
pub mod abuse_guard;
pub mod ai;
pub mod analytics;
pub mod backups;
pub mod comment_guard;
pub mod content;
pub mod media_assets;
pub mod notifications;
pub mod post_revisions;
pub mod storage;
pub mod subscriptions;
pub mod turnstile;
pub mod web_push;

View File

@@ -1,10 +1,23 @@
use loco_rs::prelude::*;
use crate::{
controllers::site_settings,
models::_entities::{comments, friend_links},
models::_entities::{comments, friend_links, site_settings as site_settings_model},
services::subscriptions,
};
fn notification_channel_type(settings: &site_settings_model::Model) -> &'static str {
match settings
.notification_channel_type
.as_deref()
.map(str::trim)
.map(str::to_ascii_lowercase)
.as_deref()
{
Some("ntfy") => subscriptions::CHANNEL_NTFY,
_ => subscriptions::CHANNEL_WEBHOOK,
}
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
@@ -81,9 +94,10 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
if settings.notification_comment_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_COMMENT_CREATED,
"新评论通知",
@@ -94,7 +108,7 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
)
.await
{
tracing::warn!("failed to queue legacy comment webhook notification: {error}");
tracing::warn!("failed to queue comment admin notification: {error}");
}
}
}
@@ -144,9 +158,10 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
if settings.notification_friend_link_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_FRIEND_LINK_CREATED,
"新友链申请通知",
@@ -157,7 +172,7 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
)
.await
{
tracing::warn!("failed to queue legacy friend-link webhook notification: {error}");
tracing::warn!("failed to queue friend-link admin notification: {error}");
}
}
}

View File

@@ -15,7 +15,7 @@ use uuid::Uuid;
use crate::{
mailers::subscription::SubscriptionMailer,
models::_entities::{notification_deliveries, posts, subscriptions},
services::content,
services::{content, web_push as web_push_service},
workers::notification_delivery::{
NotificationDeliveryWorker, NotificationDeliveryWorkerArgs,
},
@@ -26,6 +26,7 @@ pub const CHANNEL_WEBHOOK: &str = "webhook";
pub const CHANNEL_DISCORD: &str = "discord";
pub const CHANNEL_TELEGRAM: &str = "telegram";
pub const CHANNEL_NTFY: &str = "ntfy";
pub const CHANNEL_WEB_PUSH: &str = "web_push";
pub const STATUS_PENDING: &str = "pending";
pub const STATUS_ACTIVE: &str = "active";
@@ -139,6 +140,9 @@ pub fn normalize_channel_type(value: &str) -> String {
CHANNEL_DISCORD => CHANNEL_DISCORD.to_string(),
CHANNEL_TELEGRAM => CHANNEL_TELEGRAM.to_string(),
CHANNEL_NTFY => CHANNEL_NTFY.to_string(),
CHANNEL_WEB_PUSH | "browser_push" | "browser-push" | "webpush" => {
CHANNEL_WEB_PUSH.to_string()
}
_ => CHANNEL_EMAIL.to_string(),
}
}
@@ -225,6 +229,35 @@ fn merge_metadata(existing: Option<&Value>, incoming: Option<Value>) -> Option<V
}
}
fn normalize_browser_push_subscription(raw: Value) -> Result<Value> {
let mut subscription = serde_json::from_value::<web_push::SubscriptionInfo>(raw)
.map_err(|_| Error::BadRequest("browser push subscription 非法".to_string()))?;
subscription.endpoint = subscription.endpoint.trim().to_string();
subscription.keys.p256dh = subscription.keys.p256dh.trim().to_string();
subscription.keys.auth = subscription.keys.auth.trim().to_string();
if subscription.endpoint.is_empty()
|| subscription.keys.p256dh.is_empty()
|| subscription.keys.auth.is_empty()
{
return Err(Error::BadRequest(
"browser push subscription 缺少 endpoint / keys".to_string(),
));
}
serde_json::to_value(subscription).map_err(Into::into)
}
fn merge_browser_push_metadata(existing: Option<&Value>, incoming: Option<Value>, subscription: Value) -> Value {
let mut object = merge_metadata(existing, incoming)
.and_then(|value| value.as_object().cloned())
.unwrap_or_default();
object.insert("kind".to_string(), Value::String("browser-push".to_string()));
object.insert("subscription".to_string(), subscription);
Value::Object(object)
}
fn json_string_list(value: Option<&Value>, key: &str) -> Vec<String> {
value
.and_then(Value::as_object)
@@ -592,6 +625,88 @@ pub async fn create_public_email_subscription(
})
}
pub async fn create_public_web_push_subscription(
ctx: &AppContext,
subscription: Value,
metadata: Option<Value>,
) -> Result<PublicSubscriptionResult> {
let normalized_subscription = normalize_browser_push_subscription(subscription)?;
let endpoint = normalized_subscription
.get("endpoint")
.and_then(Value::as_str)
.ok_or_else(|| Error::BadRequest("browser push endpoint 非法".to_string()))?
.to_string();
let existing = subscriptions::Entity::find()
.filter(subscriptions::Column::ChannelType.eq(CHANNEL_WEB_PUSH))
.filter(subscriptions::Column::Target.eq(&endpoint))
.one(&ctx.db)
.await?;
if let Some(existing) = existing {
let mut active = existing.clone().into_active_model();
let manage_token = existing
.manage_token
.clone()
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(generate_subscription_token);
active.manage_token = Set(Some(manage_token));
active.status = Set(STATUS_ACTIVE.to_string());
active.confirm_token = Set(None);
active.verified_at = Set(Some(Utc::now().to_rfc3339()));
active.metadata = Set(Some(merge_browser_push_metadata(
existing.metadata.as_ref(),
metadata,
normalized_subscription,
)));
if existing
.display_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.is_none()
{
active.display_name = Set(Some("Browser Push".to_string()));
}
let updated = active.update(&ctx.db).await?;
return Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&updated),
requires_confirmation: false,
message: "浏览器推送已更新,后续有新内容时会直接提醒。".to_string(),
});
}
let created = subscriptions::ActiveModel {
channel_type: Set(CHANNEL_WEB_PUSH.to_string()),
target: Set(endpoint),
display_name: Set(Some("Browser Push".to_string())),
status: Set(STATUS_ACTIVE.to_string()),
filters: Set(Some(default_public_filters())),
secret: Set(None),
notes: Set(None),
confirm_token: Set(None),
manage_token: Set(Some(generate_subscription_token())),
metadata: Set(Some(merge_browser_push_metadata(
None,
metadata,
normalized_subscription,
))),
verified_at: Set(Some(Utc::now().to_rfc3339())),
last_notified_at: Set(None),
failure_count: Set(Some(0)),
last_delivery_status: Set(None),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&created),
requires_confirmation: false,
message: "浏览器推送已开启,后续有新内容时会直接提醒。".to_string(),
})
}
pub async fn confirm_subscription(ctx: &AppContext, token: &str) -> Result<subscriptions::Model> {
let token = token.trim();
if token.is_empty() {
@@ -869,6 +984,7 @@ fn provider_name(channel_type: &str) -> &'static str {
CHANNEL_DISCORD => "discord-webhook",
CHANNEL_TELEGRAM => "telegram-bot-api",
CHANNEL_NTFY => "ntfy",
CHANNEL_WEB_PUSH => "web-push",
_ => "webhook",
}
}
@@ -882,10 +998,65 @@ fn resolve_ntfy_target(target: &str) -> String {
}
}
fn collapse_whitespace(value: &str) -> String {
value.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn truncate_chars(value: &str, max_chars: usize) -> String {
if value.chars().count() <= max_chars {
return value.to_string();
}
let mut sliced = value.chars().take(max_chars).collect::<String>();
sliced.push('…');
sliced
}
fn site_asset_url(site_url: Option<&str>, path: &str) -> Option<String> {
let base = site_url?.trim().trim_end_matches('/');
if base.is_empty() {
return None;
}
Some(format!("{base}{path}"))
}
fn web_push_target_url(message: &QueuedDeliveryPayload) -> Option<String> {
message
.payload
.get("url")
.and_then(Value::as_str)
.map(ToString::to_string)
.or_else(|| message.site_url.clone())
}
fn build_web_push_payload(message: &QueuedDeliveryPayload) -> Value {
let body = truncate_chars(&collapse_whitespace(&message.text), 220);
serde_json::json!({
"title": message.subject,
"body": body,
"icon": site_asset_url(message.site_url.as_deref(), "/favicon.svg"),
"badge": site_asset_url(message.site_url.as_deref(), "/favicon.ico"),
"url": web_push_target_url(message),
"tag": message
.payload
.get("event_type")
.and_then(Value::as_str)
.unwrap_or("subscription"),
"data": {
"event_type": message.payload.get("event_type").cloned().unwrap_or(Value::Null),
"payload": message.payload,
}
})
}
async fn deliver_via_channel(
ctx: &AppContext,
channel_type: &str,
target: &str,
message: &QueuedDeliveryPayload,
metadata: Option<&Value>,
) -> Result<Option<String>> {
match channel_type {
CHANNEL_EMAIL => Err(Error::BadRequest(
@@ -923,6 +1094,21 @@ async fn deliver_via_channel(
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_WEB_PUSH => {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
let subscription_info = web_push_service::subscription_info_from_metadata(metadata)?;
let payload = serde_json::to_vec(&build_web_push_payload(message))?;
web_push_service::send_payload(
&settings,
&subscription_info,
&payload,
Some(web_push::Urgency::Normal),
24 * 60 * 60,
message.site_url.as_deref(),
)
.await?;
Ok(None)
}
_ => {
let envelope = DeliveryEnvelope {
event: message
@@ -1010,10 +1196,17 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.await
.map(|_| None)
} else {
deliver_via_channel(&subscription.channel_type, &subscription.target, &message).await
deliver_via_channel(
ctx,
&subscription.channel_type,
&subscription.target,
&message,
subscription.metadata.as_ref(),
)
.await
}
} else {
deliver_via_channel(&delivery.channel_type, &delivery.target, &message).await
deliver_via_channel(ctx, &delivery.channel_type, &delivery.target, &message, None).await
};
let subscription_id = delivery.subscription_id;
let delivery_channel_type = delivery.channel_type.clone();

View File

@@ -0,0 +1,182 @@
use std::sync::OnceLock;
use loco_rs::prelude::*;
use reqwest::Client;
use serde::Deserialize;
use crate::models::_entities::site_settings;
const DEFAULT_TURNSTILE_VERIFY_URL: &str =
"https://challenges.cloudflare.com/turnstile/v0/siteverify";
const ENV_TURNSTILE_SECRET_KEY: &str = "TERMI_TURNSTILE_SECRET_KEY";
const ENV_LEGACY_TURNSTILE_SECRET_KEY: &str = "TERMI_COMMENT_TURNSTILE_SECRET_KEY";
const ENV_TURNSTILE_SITE_KEY: &str = "PUBLIC_COMMENT_TURNSTILE_SITE_KEY";
const ENV_TURNSTILE_VERIFY_URL: &str = "TERMI_TURNSTILE_VERIFY_URL";
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum TurnstileScope {
Comment,
Subscription,
}
#[derive(Clone, Debug, Deserialize)]
struct TurnstileVerifyResponse {
success: bool,
#[serde(default, rename = "error-codes")]
error_codes: Vec<String>,
}
fn trim_to_option(value: Option<&str>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
}
fn verify_url() -> String {
env_value(ENV_TURNSTILE_VERIFY_URL)
.unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
}
fn client() -> &'static Client {
static CLIENT: OnceLock<Client> = OnceLock::new();
CLIENT.get_or_init(Client::new)
}
pub fn secret_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_secret_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SECRET_KEY))
.or_else(|| env_value(ENV_LEGACY_TURNSTILE_SECRET_KEY))
}
pub fn site_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_site_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SITE_KEY))
}
fn site_key_configured(settings: &site_settings::Model) -> bool {
site_key(settings).is_some()
}
pub fn secret_key_configured(settings: &site_settings::Model) -> bool {
secret_key(settings).is_some()
}
fn scope_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
match scope {
TurnstileScope::Comment => settings.comment_turnstile_enabled.unwrap_or(false),
TurnstileScope::Subscription => settings.subscription_turnstile_enabled.unwrap_or(false),
}
}
pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
scope_enabled(settings, scope)
&& site_key_configured(settings)
&& secret_key_configured(settings)
}
pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
Ok(is_enabled(&settings, scope))
}
async fn verify_token(
settings: &site_settings::Model,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
let secret = secret_key(settings).ok_or_else(|| {
Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string())
})?;
let response_token = trim_to_option(token)
.ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let mut form_data = vec![
("secret".to_string(), secret),
("response".to_string(), response_token),
];
if let Some(remote_ip) = normalize_ip(client_ip) {
form_data.push(("remoteip".to_string(), remote_ip));
}
let response = client()
.post(verify_url())
.form(&form_data)
.send()
.await
.map_err(|error| {
tracing::warn!("turnstile verify request failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !response.status().is_success() {
tracing::warn!(
"turnstile verify returned unexpected status: {}",
response.status()
);
return Err(Error::BadRequest(
"人机验证服务暂时不可用,请稍后重试".to_string(),
));
}
let payload = response
.json::<TurnstileVerifyResponse>()
.await
.map_err(|error| {
tracing::warn!("turnstile verify decode failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !payload.success {
tracing::warn!(
error_codes = ?payload.error_codes,
"turnstile verify rejected request"
);
return Err(Error::BadRequest("人机验证未通过,请重试".to_string()));
}
Ok(())
}
pub async fn verify_if_enabled(
ctx: &AppContext,
scope: TurnstileScope,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
if !is_enabled(&settings, scope) {
return Ok(false);
}
verify_token(&settings, token, client_ip).await?;
Ok(true)
}

View File

@@ -0,0 +1,122 @@
use loco_rs::prelude::*;
use serde_json::Value;
use web_push::{
ContentEncoding, HyperWebPushClient, SubscriptionInfo, Urgency, VapidSignatureBuilder,
WebPushClient, WebPushMessageBuilder,
};
use crate::models::_entities::site_settings;
const ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "TERMI_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_WEB_PUSH_VAPID_PRIVATE_KEY: &str = "TERMI_WEB_PUSH_VAPID_PRIVATE_KEY";
const ENV_WEB_PUSH_VAPID_SUBJECT: &str = "TERMI_WEB_PUSH_VAPID_SUBJECT";
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
pub fn public_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_public_key.as_ref())
.or_else(|| env_value(ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY))
.or_else(|| env_value(ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY))
}
pub fn private_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_private_key.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_PRIVATE_KEY))
}
pub fn vapid_subject(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_subject.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_SUBJECT))
}
fn effective_vapid_subject(settings: &site_settings::Model, site_url: Option<&str>) -> String {
vapid_subject(settings)
.or_else(|| {
site_url
.map(str::trim)
.filter(|value| value.starts_with("http://") || value.starts_with("https://"))
.map(ToString::to_string)
})
.unwrap_or_else(|| "mailto:noreply@example.com".to_string())
}
pub fn public_key_configured(settings: &site_settings::Model) -> bool {
public_key(settings).is_some()
}
pub fn private_key_configured(settings: &site_settings::Model) -> bool {
private_key(settings).is_some()
}
pub fn is_enabled(settings: &site_settings::Model) -> bool {
settings.web_push_enabled.unwrap_or(false)
&& public_key_configured(settings)
&& private_key_configured(settings)
}
pub fn subscription_info_from_metadata(metadata: Option<&Value>) -> Result<SubscriptionInfo> {
let subscription = metadata
.and_then(Value::as_object)
.and_then(|object| object.get("subscription"))
.cloned()
.ok_or_else(|| Error::BadRequest("browser push metadata 缺少 subscription".to_string()))?;
serde_json::from_value::<SubscriptionInfo>(subscription)
.map_err(|_| Error::BadRequest("browser push metadata 非法".to_string()))
}
pub async fn send_payload(
settings: &site_settings::Model,
subscription_info: &SubscriptionInfo,
payload: &[u8],
urgency: Option<Urgency>,
ttl: u32,
site_url: Option<&str>,
) -> Result<()> {
let private_key = private_key(settings)
.ok_or_else(|| Error::BadRequest("web push VAPID private key 未配置".to_string()))?;
let mut signature_builder = VapidSignatureBuilder::from_base64(&private_key, subscription_info)
.map_err(|error| Error::BadRequest(format!("web push vapid build failed: {error}")))?;
signature_builder.add_claim("sub", effective_vapid_subject(settings, site_url));
let signature = signature_builder
.build()
.map_err(|error| Error::BadRequest(format!("web push vapid sign failed: {error}")))?;
let mut builder = WebPushMessageBuilder::new(subscription_info);
builder.set_ttl(ttl);
if let Some(urgency) = urgency {
builder.set_urgency(urgency);
}
builder.set_payload(ContentEncoding::Aes128Gcm, payload);
builder.set_vapid_signature(signature);
let client = HyperWebPushClient::new();
let message = builder
.build()
.map_err(|error| Error::BadRequest(format!("web push message build failed: {error}")))?;
client
.send(message)
.await
.map_err(|error| Error::BadRequest(format!("web push send failed: {error}")))?;
Ok(())
}