feat: ship blog platform admin and deploy stack

This commit is contained in:
2026-03-31 21:48:39 +08:00
parent a9a05aa105
commit 313f174fbc
210 changed files with 25476 additions and 5803 deletions

View File

@@ -25,7 +25,7 @@ use crate::{
ai_chunks, categories, comments, friend_links, posts, reviews, site_settings, tags, users,
},
tasks,
workers::downloader::DownloadWorker,
workers::{downloader::DownloadWorker, notification_delivery::NotificationDeliveryWorker},
};
pub struct App;
@@ -54,16 +54,14 @@ impl Hooks for App {
}
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
Ok(vec![
Box::new(initializers::content_sync::ContentSyncInitializer),
Box::new(initializers::view_engine::ViewEngineInitializer),
])
Ok(vec![Box::new(initializers::content_sync::ContentSyncInitializer)])
}
fn routes(_ctx: &AppContext) -> AppRoutes {
AppRoutes::with_default_routes() // controller routes below
.add_route(controllers::admin::routes())
.add_route(controllers::health::routes())
.add_route(controllers::admin_api::routes())
.add_route(controllers::admin_ops::routes())
.add_route(controllers::review::routes())
.add_route(controllers::category::routes())
.add_route(controllers::friend_link::routes())
@@ -71,9 +69,11 @@ impl Hooks for App {
.add_route(controllers::comment::routes())
.add_route(controllers::post::routes())
.add_route(controllers::search::routes())
.add_route(controllers::content_analytics::routes())
.add_route(controllers::site_settings::routes())
.add_route(controllers::ai::routes())
.add_route(controllers::auth::routes())
.add_route(controllers::subscription::routes())
}
async fn after_routes(router: AxumRouter, _ctx: &AppContext) -> Result<AxumRouter> {
let cors = CorsLayer::new()
@@ -91,11 +91,15 @@ impl Hooks for App {
}
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
queue.register(DownloadWorker::build(ctx)).await?;
queue.register(NotificationDeliveryWorker::build(ctx)).await?;
Ok(())
}
#[allow(unused_variables)]
fn register_tasks(tasks: &mut Tasks) {
tasks.register(tasks::retry_deliveries::RetryDeliveries);
tasks.register(tasks::send_weekly_digest::SendWeeklyDigest);
tasks.register(tasks::send_monthly_digest::SendMonthlyDigest);
// tasks-inject (do not remove)
}
async fn seed(ctx: &AppContext, base: &Path) -> Result<()> {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,455 @@
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, Order, QueryFilter, QueryOrder,
QuerySelect, Set,
};
use serde::{Deserialize, Serialize};
use crate::{
controllers::admin::check_auth,
models::_entities::{
admin_audit_logs, notification_deliveries, post_revisions, subscriptions,
},
services::{admin_audit, post_revisions as revision_service, subscriptions as subscription_service},
};
#[derive(Clone, Debug, Default, Deserialize)]
pub struct AuditLogQuery {
pub action: Option<String>,
pub target_type: Option<String>,
pub limit: Option<u64>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct RevisionQuery {
pub slug: Option<String>,
pub limit: Option<u64>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct DeliveriesQuery {
pub limit: Option<u64>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SubscriptionPayload {
#[serde(alias = "channelType")]
pub channel_type: String,
pub target: String,
#[serde(default, alias = "displayName")]
pub display_name: Option<String>,
#[serde(default)]
pub status: Option<String>,
#[serde(default)]
pub filters: Option<serde_json::Value>,
#[serde(default)]
pub metadata: Option<serde_json::Value>,
#[serde(default)]
pub secret: Option<String>,
#[serde(default)]
pub notes: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SubscriptionUpdatePayload {
#[serde(default, alias = "channelType")]
pub channel_type: Option<String>,
#[serde(default)]
pub target: Option<String>,
#[serde(default, alias = "displayName")]
pub display_name: Option<String>,
#[serde(default)]
pub status: Option<String>,
#[serde(default)]
pub filters: Option<serde_json::Value>,
#[serde(default)]
pub metadata: Option<serde_json::Value>,
#[serde(default)]
pub secret: Option<String>,
#[serde(default)]
pub notes: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct RestoreRevisionRequest {
#[serde(default)]
pub mode: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct DigestDispatchRequest {
pub period: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PostRevisionListItem {
pub id: i32,
pub post_slug: String,
pub post_title: Option<String>,
pub operation: String,
pub revision_reason: Option<String>,
pub actor_username: Option<String>,
pub actor_email: Option<String>,
pub actor_source: Option<String>,
pub created_at: String,
pub has_markdown: bool,
pub metadata: Option<serde_json::Value>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PostRevisionDetailResponse {
#[serde(flatten)]
pub item: PostRevisionListItem,
pub markdown: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct RestoreRevisionResponse {
pub restored: bool,
pub revision_id: i32,
pub post_slug: String,
pub mode: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct SubscriptionListResponse {
pub subscriptions: Vec<subscriptions::Model>,
}
#[derive(Clone, Debug, Serialize)]
pub struct DeliveryListResponse {
pub deliveries: Vec<notification_deliveries::Model>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn format_revision(item: post_revisions::Model) -> PostRevisionListItem {
PostRevisionListItem {
id: item.id,
post_slug: item.post_slug,
post_title: item.post_title,
operation: item.operation,
revision_reason: item.revision_reason,
actor_username: item.actor_username,
actor_email: item.actor_email,
actor_source: item.actor_source,
created_at: item.created_at.format("%Y-%m-%d %H:%M:%S").to_string(),
has_markdown: item.markdown.as_deref().map(str::trim).filter(|value| !value.is_empty()).is_some(),
metadata: item.metadata,
}
}
#[debug_handler]
pub async fn list_audit_logs(
headers: HeaderMap,
Query(query): Query<AuditLogQuery>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let mut db_query = admin_audit_logs::Entity::find().order_by(admin_audit_logs::Column::CreatedAt, Order::Desc);
if let Some(action) = query.action.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
db_query = db_query.filter(admin_audit_logs::Column::Action.eq(action));
}
if let Some(target_type) = query.target_type.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
db_query = db_query.filter(admin_audit_logs::Column::TargetType.eq(target_type));
}
format::json(db_query.limit(query.limit.unwrap_or(80)).all(&ctx.db).await?)
}
#[debug_handler]
pub async fn list_post_revisions(
headers: HeaderMap,
Query(query): Query<RevisionQuery>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let items = revision_service::list_revisions(&ctx, query.slug.as_deref(), query.limit.unwrap_or(120)).await?;
format::json(items.into_iter().map(format_revision).collect::<Vec<_>>())
}
#[debug_handler]
pub async fn get_post_revision(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let item = revision_service::get_revision(&ctx, id).await?;
format::json(PostRevisionDetailResponse {
item: format_revision(item.clone()),
markdown: item.markdown,
})
}
#[debug_handler]
pub async fn restore_post_revision(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<RestoreRevisionRequest>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let mode = payload.mode.unwrap_or_else(|| "full".to_string());
let restored =
revision_service::restore_revision(&ctx, Some(&actor), id, &mode).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"post.revision.restore",
"post_revision",
Some(restored.id.to_string()),
Some(restored.post_slug.clone()),
Some(serde_json::json!({
"post_slug": restored.post_slug,
"source_revision_id": id,
"mode": mode,
})),
)
.await?;
format::json(RestoreRevisionResponse {
restored: true,
revision_id: id,
post_slug: restored.post_slug,
mode,
})
}
#[debug_handler]
pub async fn list_subscriptions(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(SubscriptionListResponse {
subscriptions: subscription_service::list_subscriptions(&ctx, None, None).await?,
})
}
#[debug_handler]
pub async fn list_subscription_deliveries(
headers: HeaderMap,
Query(query): Query<DeliveriesQuery>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(DeliveryListResponse {
deliveries: subscription_service::list_recent_deliveries(&ctx, query.limit.unwrap_or(80)).await?,
})
}
#[debug_handler]
pub async fn create_subscription(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<SubscriptionPayload>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let channel_type = subscription_service::normalize_channel_type(&payload.channel_type);
let target = payload.target.trim().to_string();
if target.is_empty() {
return Err(Error::BadRequest("target 不能为空".to_string()));
}
let created = subscriptions::ActiveModel {
channel_type: Set(channel_type.clone()),
target: Set(target.clone()),
display_name: Set(trim_to_option(payload.display_name)),
status: Set(subscription_service::normalize_status(payload.status.as_deref().unwrap_or("active"))),
filters: Set(subscription_service::normalize_filters(payload.filters)),
metadata: Set(payload.metadata),
secret: Set(trim_to_option(payload.secret)),
notes: Set(trim_to_option(payload.notes)),
confirm_token: Set(None),
manage_token: Set(Some(subscription_service::generate_subscription_token())),
verified_at: Set(Some(chrono::Utc::now().to_rfc3339())),
failure_count: Set(Some(0)),
..Default::default()
}
.insert(&ctx.db)
.await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"subscription.create",
"subscription",
Some(created.id.to_string()),
Some(format!("{}:{}", created.channel_type, created.target)),
Some(serde_json::json!({ "channel_type": created.channel_type, "target": created.target })),
)
.await?;
format::json(created)
}
#[debug_handler]
pub async fn update_subscription(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<SubscriptionUpdatePayload>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = subscriptions::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let mut active = item.clone().into_active_model();
if let Some(channel_type) = payload.channel_type {
active.channel_type = Set(subscription_service::normalize_channel_type(&channel_type));
}
if let Some(target) = payload.target {
let normalized_target = target.trim().to_string();
if normalized_target.is_empty() {
return Err(Error::BadRequest("target 不能为空".to_string()));
}
active.target = Set(normalized_target);
}
if payload.display_name.is_some() {
active.display_name = Set(trim_to_option(payload.display_name));
}
if let Some(status) = payload.status {
active.status = Set(subscription_service::normalize_status(&status));
}
if payload.filters.is_some() {
active.filters = Set(subscription_service::normalize_filters(payload.filters));
}
if payload.metadata.is_some() {
active.metadata = Set(payload.metadata);
}
if payload.secret.is_some() {
active.secret = Set(trim_to_option(payload.secret));
}
if payload.notes.is_some() {
active.notes = Set(trim_to_option(payload.notes));
}
let updated = active.update(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"subscription.update",
"subscription",
Some(updated.id.to_string()),
Some(format!("{}:{}", updated.channel_type, updated.target)),
None,
)
.await?;
format::json(updated)
}
#[debug_handler]
pub async fn delete_subscription(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = subscriptions::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let label = format!("{}:{}", item.channel_type, item.target);
item.delete(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"subscription.delete",
"subscription",
Some(id.to_string()),
Some(label),
None,
)
.await?;
format::empty()
}
#[debug_handler]
pub async fn test_subscription(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = subscriptions::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)?;
let delivery = subscription_service::send_test_notification(&ctx, &item).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"subscription.test",
"subscription",
Some(item.id.to_string()),
Some(format!("{}:{}", item.channel_type, item.target)),
None,
)
.await?;
format::json(serde_json::json!({ "queued": true, "id": item.id, "delivery_id": delivery.id }))
}
#[debug_handler]
pub async fn send_subscription_digest(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<DigestDispatchRequest>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let summary = subscription_service::send_digest(&ctx, payload.period.as_deref().unwrap_or("weekly")).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"subscription.digest.send",
"subscription_digest",
None,
Some(summary.period.clone()),
Some(serde_json::json!({
"period": summary.period,
"post_count": summary.post_count,
"queued": summary.queued,
"skipped": summary.skipped,
})),
)
.await?;
format::json(summary)
}
pub fn routes() -> Routes {
Routes::new()
.prefix("/api/admin")
.add("/audit-logs", get(list_audit_logs))
.add("/post-revisions", get(list_post_revisions))
.add("/post-revisions/{id}", get(get_post_revision))
.add("/post-revisions/{id}/restore", post(restore_post_revision))
.add("/subscriptions", get(list_subscriptions).post(create_subscription))
.add("/subscriptions/deliveries", get(list_subscription_deliveries))
.add("/subscriptions/digest", post(send_subscription_digest))
.add("/subscriptions/{id}", patch(update_subscription).delete(delete_subscription))
.add("/subscriptions/{id}/test", post(test_subscription))
}

View File

@@ -16,7 +16,7 @@ use std::time::Instant;
use crate::{
controllers::{admin::check_auth, site_settings},
services::{ai, analytics},
services::{abuse_guard, ai, analytics},
};
#[derive(Clone, Debug, Deserialize)]
@@ -212,6 +212,11 @@ pub async fn ask(
let started_at = Instant::now();
let question = payload.question.trim().to_string();
let (provider, chat_model) = current_provider_metadata(&ctx).await;
abuse_guard::enforce_public_scope(
"ai_ask",
abuse_guard::detect_client_ip(&headers).as_deref(),
Some(&question),
)?;
match ai::answer_question(&ctx, &payload.question).await {
Ok(result) => {
@@ -263,6 +268,11 @@ pub async fn ask_stream(
let request_headers = headers.clone();
let question = payload.question.trim().to_string();
let (fallback_provider, fallback_chat_model) = current_provider_metadata(&ctx).await;
abuse_guard::enforce_public_scope(
"ai_stream",
abuse_guard::detect_client_ip(&headers).as_deref(),
Some(&question),
)?;
let stream = stream! {
let started_at = Instant::now();
@@ -503,8 +513,8 @@ pub async fn ask_stream(
}
#[debug_handler]
pub async fn reindex(State(ctx): State<AppContext>) -> Result<Response> {
check_auth()?;
pub async fn reindex(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
let summary = ai::rebuild_index(&ctx).await?;
format::json(ReindexResponse {

View File

@@ -5,11 +5,23 @@ use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, QueryFilter, QueryOrder};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::net::SocketAddr;
use axum::{
extract::{rejection::ExtensionRejection, ConnectInfo},
http::{header, HeaderMap},
};
use crate::models::_entities::{
comments::{ActiveModel, Column, Entity, Model},
posts,
};
use crate::services::{
admin_audit,
comment_guard::{self, CommentGuardInput},
notifications,
};
use crate::controllers::admin::check_auth;
const ARTICLE_SCOPE: &str = "article";
const PARAGRAPH_SCOPE: &str = "paragraph";
@@ -106,6 +118,12 @@ pub struct CreateCommentRequest {
pub paragraph_excerpt: Option<String>,
#[serde(default)]
pub approved: Option<bool>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
#[serde(default)]
pub website: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -125,6 +143,50 @@ fn normalize_optional_string(value: Option<String>) -> Option<String> {
})
}
fn normalize_with_limit(value: Option<&str>, max_chars: usize) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
return None;
}
Some(trimmed.chars().take(max_chars).collect::<String>())
})
}
fn header_value<'a>(headers: &'a HeaderMap, key: header::HeaderName) -> Option<&'a str> {
headers.get(key).and_then(|value| value.to_str().ok())
}
fn first_forwarded_ip(value: &str) -> Option<&str> {
value
.split(',')
.map(str::trim)
.find(|item| !item.is_empty())
}
fn detect_client_ip(
headers: &HeaderMap,
connect_info: Option<&ConnectInfo<SocketAddr>>,
) -> Option<String> {
let forwarded = header_value(headers, header::HeaderName::from_static("x-forwarded-for"))
.and_then(first_forwarded_ip);
let real_ip = header_value(headers, header::HeaderName::from_static("x-real-ip"));
let cf_connecting_ip =
header_value(headers, header::HeaderName::from_static("cf-connecting-ip"));
let true_client_ip = header_value(headers, header::HeaderName::from_static("true-client-ip"));
let remote_addr = connect_info.map(|addr| addr.0.ip().to_string());
normalize_with_limit(
forwarded
.or(real_ip)
.or(cf_connecting_ip)
.or(true_client_ip)
.or(remote_addr.as_deref()),
96,
)
}
fn normalized_scope(value: Option<String>) -> Result<String> {
match value
.unwrap_or_else(|| ARTICLE_SCOPE.to_string())
@@ -171,7 +233,12 @@ async fn resolve_post_slug(ctx: &AppContext, raw: &str) -> Result<Option<String>
pub async fn list(
Query(query): Query<ListQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
if query.approved != Some(true) {
check_auth(&headers)?;
}
let mut db_query = Entity::find().order_by_asc(Column::CreatedAt);
let post_slug = if let Some(post_slug) = query.post_slug {
@@ -252,9 +319,22 @@ pub async fn paragraph_summary(
format::json(summary)
}
#[debug_handler]
pub async fn captcha_challenge(
headers: HeaderMap,
connect_info: Result<ConnectInfo<SocketAddr>, ExtensionRejection>,
) -> Result<Response> {
let ip_address = detect_client_ip(&headers, connect_info.as_ref().ok());
format::json(comment_guard::create_captcha_challenge(
ip_address.as_deref(),
)?)
}
#[debug_handler]
pub async fn add(
State(ctx): State<AppContext>,
headers: HeaderMap,
connect_info: Result<ConnectInfo<SocketAddr>, ExtensionRejection>,
Json(params): Json<CreateCommentRequest>,
) -> Result<Response> {
let scope = normalized_scope(params.scope.clone())?;
@@ -271,6 +351,9 @@ pub async fn add(
let email = normalize_optional_string(params.email);
let avatar = normalize_optional_string(params.avatar);
let content = normalize_optional_string(params.content);
let ip_address = detect_client_ip(&headers, connect_info.as_ref().ok());
let user_agent = normalize_with_limit(header_value(&headers, header::USER_AGENT), 512);
let referer = normalize_with_limit(header_value(&headers, header::REFERER), 1024);
let paragraph_key = normalize_optional_string(params.paragraph_key);
let paragraph_excerpt = normalize_optional_string(params.paragraph_excerpt)
.or_else(|| content.as_deref().and_then(preview_excerpt));
@@ -291,6 +374,21 @@ pub async fn add(
return Err(Error::BadRequest("paragraph_key is required".to_string()));
}
comment_guard::enforce_comment_guard(
&ctx,
&CommentGuardInput {
ip_address: ip_address.as_deref(),
email: email.as_deref(),
user_agent: user_agent.as_deref(),
author: author.as_deref(),
content: content.as_deref(),
honeypot_website: params.website.as_deref(),
captcha_token: params.captcha_token.as_deref(),
captcha_answer: params.captcha_answer.as_deref(),
},
)
.await?;
let mut item = ActiveModel {
..Default::default()
};
@@ -302,6 +400,9 @@ pub async fn add(
item.author = Set(author);
item.email = Set(email);
item.avatar = Set(avatar);
item.ip_address = Set(ip_address);
item.user_agent = Set(user_agent);
item.referer = Set(referer);
item.content = Set(content);
item.scope = Set(scope);
item.paragraph_key = Set(paragraph_key);
@@ -313,36 +414,72 @@ pub async fn add(
item.reply_to_comment_id = Set(params.reply_to_comment_id);
item.approved = Set(Some(params.approved.unwrap_or(false)));
let item = item.insert(&ctx.db).await?;
notifications::notify_new_comment(&ctx, &item).await;
format::json(item)
}
#[debug_handler]
pub async fn update(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = load_item(&ctx, id).await?;
let mut item = item.into_active_model();
params.update(&mut item);
let item = item.update(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"comment.update",
"comment",
Some(item.id.to_string()),
item.post_slug.clone(),
Some(serde_json::json!({ "approved": item.approved })),
)
.await?;
format::json(item)
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
load_item(&ctx, id).await?.delete(&ctx.db).await?;
pub async fn remove(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = load_item(&ctx, id).await?;
let label = item.post_slug.clone();
item.delete(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"comment.delete",
"comment",
Some(id.to_string()),
label,
None,
)
.await?;
format::empty()
}
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn get_one(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(load_item(&ctx, id).await?)
}
pub fn routes() -> Routes {
Routes::new()
.prefix("api/comments/")
.add("captcha", get(captcha_challenge))
.add("/", get(list))
.add("paragraphs/summary", get(paragraph_summary))
.add("/", post(add))

View File

@@ -0,0 +1,68 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::services::analytics;
#[derive(Clone, Debug, Deserialize)]
pub struct ContentAnalyticsEventPayload {
pub event_type: String,
pub path: String,
#[serde(default)]
pub post_slug: Option<String>,
#[serde(default)]
pub session_id: Option<String>,
#[serde(default)]
pub duration_ms: Option<i32>,
#[serde(default)]
pub progress_percent: Option<i32>,
#[serde(default)]
pub metadata: Option<Value>,
#[serde(default)]
pub referrer: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct ContentAnalyticsEventResponse {
pub recorded: bool,
}
#[debug_handler]
pub async fn record(
State(ctx): State<AppContext>,
headers: HeaderMap,
Json(payload): Json<ContentAnalyticsEventPayload>,
) -> Result<Response> {
let mut request_context = analytics::content_request_context_from_headers(&payload.path, &headers);
if payload.referrer.as_deref().map(str::trim).filter(|value| !value.is_empty()).is_some() {
request_context.referrer = payload.referrer;
}
analytics::record_content_event(
&ctx,
analytics::ContentEventDraft {
event_type: payload.event_type,
path: payload.path,
post_slug: payload.post_slug,
session_id: payload.session_id,
request_context,
duration_ms: payload.duration_ms,
progress_percent: payload.progress_percent,
metadata: payload.metadata,
},
)
.await;
format::json(ContentAnalyticsEventResponse { recorded: true })
}
pub fn routes() -> Routes {
Routes::new()
.prefix("api/analytics/")
.add("content", post(record))
}

View File

@@ -1,11 +1,14 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, QueryFilter, QueryOrder};
use serde::{Deserialize, Serialize};
use crate::controllers::admin::check_auth;
use crate::models::_entities::friend_links::{ActiveModel, Column, Entity, Model};
use crate::services::{admin_audit, notifications};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
@@ -69,11 +72,15 @@ async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
pub async fn list(
Query(query): Query<ListQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let authenticated = check_auth(&headers).ok();
let mut db_query = Entity::find().order_by_desc(Column::CreatedAt);
if let Some(status) = query.status {
db_query = db_query.filter(Column::Status.eq(status));
} else if authenticated.is_none() {
db_query = db_query.filter(Column::Status.eq("approved"));
}
if let Some(category) = query.category {
@@ -98,30 +105,65 @@ pub async fn add(
item.category = Set(params.category);
item.status = Set(Some(params.status.unwrap_or_else(|| "pending".to_string())));
let item = item.insert(&ctx.db).await?;
notifications::notify_new_friend_link(&ctx, &item).await;
format::json(item)
}
#[debug_handler]
pub async fn update(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = load_item(&ctx, id).await?;
let mut item = item.into_active_model();
params.update(&mut item);
let item = item.update(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"friend_link.update",
"friend_link",
Some(item.id.to_string()),
item.site_name.clone().or_else(|| Some(item.site_url.clone())),
Some(serde_json::json!({ "status": item.status })),
)
.await?;
format::json(item)
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
load_item(&ctx, id).await?.delete(&ctx.db).await?;
pub async fn remove(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = load_item(&ctx, id).await?;
let label = item.site_name.clone().or_else(|| Some(item.site_url.clone()));
item.delete(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"friend_link.delete",
"friend_link",
Some(id.to_string()),
label,
None,
)
.await?;
format::empty()
}
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn get_one(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(load_item(&ctx, id).await?)
}

View File

@@ -0,0 +1,13 @@
use loco_rs::prelude::*;
#[debug_handler]
pub async fn healthz() -> Result<Response> {
format::json(serde_json::json!({
"ok": true,
"service": "backend",
}))
}
pub fn routes() -> Routes {
Routes::new().add("/healthz", get(healthz))
}

View File

@@ -1,12 +1,16 @@
pub mod admin;
pub mod admin_api;
pub mod admin_ops;
pub mod ai;
pub mod auth;
pub mod content_analytics;
pub mod category;
pub mod comment;
pub mod friend_link;
pub mod health;
pub mod post;
pub mod review;
pub mod search;
pub mod site_settings;
pub mod subscription;
pub mod tag;

View File

@@ -1,13 +1,312 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::extract::Multipart;
use std::collections::HashSet;
use axum::{extract::Multipart, http::HeaderMap};
use chrono::{TimeZone, Utc};
use loco_rs::prelude::*;
use sea_orm::QueryOrder;
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize};
use crate::models::_entities::posts::{ActiveModel, Column, Entity, Model};
use crate::services::content;
use crate::{
controllers::admin::check_auth,
services::{admin_audit, content, post_revisions, subscriptions},
};
fn deserialize_boolish_option<'de, D>(
deserializer: D,
) -> std::result::Result<Option<bool>, D::Error>
where
D: Deserializer<'de>,
{
let raw = Option::<String>::deserialize(deserializer)?;
raw.map(|value| match value.trim().to_ascii_lowercase().as_str() {
"1" | "true" | "yes" | "on" => Ok(true),
"0" | "false" | "no" | "off" => Ok(false),
other => Err(serde::de::Error::custom(format!(
"invalid boolean value `{other}`"
))),
})
.transpose()
}
fn normalize_slug_key(value: &str) -> String {
value.trim().trim_matches('/').to_string()
}
fn request_preview_mode(preview: Option<bool>, headers: &HeaderMap) -> bool {
preview.unwrap_or(false)
|| headers
.get("x-termi-post-mode")
.and_then(|value| value.to_str().ok())
.map(|value| value.eq_ignore_ascii_case("preview"))
.unwrap_or(false)
}
fn requested_status(status: Option<String>, published: Option<bool>) -> String {
if let Some(status) = status.as_deref() {
return content::normalize_post_status(Some(status));
}
if published == Some(false) {
content::POST_STATUS_DRAFT.to_string()
} else {
content::POST_STATUS_PUBLISHED.to_string()
}
}
fn normalize_visibility(value: Option<String>) -> String {
content::normalize_post_visibility(value.as_deref())
}
fn post_has_tag(post: &Model, wanted_tag: &str) -> bool {
let wanted = wanted_tag.trim().to_lowercase();
post.tags
.as_ref()
.and_then(|value| value.as_array())
.map(|tags| {
tags.iter().filter_map(|tag| tag.as_str()).any(|tag| {
let normalized = tag.trim().to_lowercase();
normalized == wanted
})
})
.unwrap_or(false)
}
fn effective_status(post: &Model) -> String {
content::effective_post_state(
post.status.as_deref().unwrap_or(content::POST_STATUS_PUBLISHED),
post.publish_at,
post.unpublish_at,
Utc::now().fixed_offset(),
)
}
fn listed_publicly(post: &Model) -> bool {
content::is_post_listed_publicly(post, Utc::now().fixed_offset())
}
fn publicly_accessible(post: &Model) -> bool {
content::is_post_publicly_accessible(post, Utc::now().fixed_offset())
}
fn parse_optional_markdown_datetime(
value: Option<&str>,
) -> Option<chrono::DateTime<chrono::FixedOffset>> {
let value = value?.trim();
if value.is_empty() {
return None;
}
chrono::DateTime::parse_from_rfc3339(value).ok().or_else(|| {
chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d")
.ok()
.and_then(|date| date.and_hms_opt(0, 0, 0))
.and_then(|naive| {
chrono::FixedOffset::east_opt(0)?
.from_local_datetime(&naive)
.single()
})
})
}
fn markdown_post_listed_publicly(post: &content::MarkdownPost) -> bool {
content::effective_post_state(
&post.status,
parse_optional_markdown_datetime(post.publish_at.as_deref()),
parse_optional_markdown_datetime(post.unpublish_at.as_deref()),
Utc::now().fixed_offset(),
) == content::POST_STATUS_PUBLISHED
&& post.visibility == content::POST_VISIBILITY_PUBLIC
}
fn should_include_post(
post: &Model,
query: &ListQuery,
preview: bool,
include_private: bool,
include_redirects: bool,
) -> bool {
if !preview && !listed_publicly(post) {
return false;
}
if query.listed_only.unwrap_or(!preview) && !listed_publicly(post) {
return false;
}
if !include_private
&& content::normalize_post_visibility(post.visibility.as_deref())
== content::POST_VISIBILITY_PRIVATE
{
return false;
}
if !include_redirects
&& post
.redirect_to
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.is_some()
{
return false;
}
if let Some(slug) = &query.slug {
if post.slug != *slug {
return false;
}
}
if let Some(category) = &query.category {
if post
.category
.as_deref()
.map(|value| !value.eq_ignore_ascii_case(category))
.unwrap_or(true)
{
return false;
}
}
if let Some(post_type) = &query.post_type {
if post
.post_type
.as_deref()
.map(|value| !value.eq_ignore_ascii_case(post_type))
.unwrap_or(true)
{
return false;
}
}
if let Some(pinned) = query.pinned {
if post.pinned.unwrap_or(false) != pinned {
return false;
}
}
if let Some(tag) = &query.tag {
if !post_has_tag(post, tag) {
return false;
}
}
if let Some(status) = &query.status {
if effective_status(post) != content::normalize_post_status(Some(status)) && effective_status(post) != status.trim().to_ascii_lowercase() {
return false;
}
}
if let Some(visibility) = &query.visibility {
if content::normalize_post_visibility(post.visibility.as_deref())
!= content::normalize_post_visibility(Some(visibility))
{
return false;
}
}
if let Some(search) = &query.search {
let wanted = search.trim().to_lowercase();
let haystack = [
post.title.as_deref().unwrap_or_default(),
post.description.as_deref().unwrap_or_default(),
post.content.as_deref().unwrap_or_default(),
post.category.as_deref().unwrap_or_default(),
&post.slug,
]
.join("\n")
.to_lowercase();
if !haystack.contains(&wanted)
&& !post
.tags
.as_ref()
.and_then(|value| value.as_array())
.map(|tags| {
tags.iter()
.filter_map(|tag| tag.as_str())
.any(|tag| tag.to_lowercase().contains(&wanted))
})
.unwrap_or(false)
{
return false;
}
}
true
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
let item = Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or(Error::NotFound)
}
async fn load_item_by_slug_once(ctx: &AppContext, slug: &str) -> Result<Option<Model>> {
Entity::find()
.filter(Column::Slug.eq(slug))
.one(&ctx.db)
.await
.map_err(Into::into)
}
async fn resolve_post_by_slug(ctx: &AppContext, slug: &str) -> Result<Model> {
let mut current_slug = normalize_slug_key(slug);
if current_slug.is_empty() {
return Err(Error::NotFound);
}
let mut visited = HashSet::new();
loop {
if !visited.insert(current_slug.clone()) {
return Err(Error::NotFound);
}
if let Some(post) = load_item_by_slug_once(ctx, &current_slug).await? {
let next_slug = post
.redirect_to
.as_deref()
.map(normalize_slug_key)
.filter(|value| !value.is_empty() && *value != post.slug);
if let Some(next_slug) = next_slug {
current_slug = next_slug;
continue;
}
return Ok(post);
}
let candidates = Entity::find().all(&ctx.db).await?;
let Some(candidate) = candidates.into_iter().find(|item| {
content::post_redirects_from_json(&item.redirect_from)
.into_iter()
.any(|redirect| redirect.eq_ignore_ascii_case(&current_slug))
}) else {
return Err(Error::NotFound);
};
let next_slug = candidate
.redirect_to
.as_deref()
.map(normalize_slug_key)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| candidate.slug.clone());
if next_slug == candidate.slug {
return Ok(candidate);
}
current_slug = next_slug;
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
@@ -21,6 +320,15 @@ pub struct Params {
pub image: Option<String>,
pub images: Option<serde_json::Value>,
pub pinned: Option<bool>,
pub status: Option<String>,
pub visibility: Option<String>,
pub publish_at: Option<String>,
pub unpublish_at: Option<String>,
pub canonical_url: Option<String>,
pub noindex: Option<bool>,
pub og_image: Option<String>,
pub redirect_from: Option<serde_json::Value>,
pub redirect_to: Option<String>,
}
impl Params {
@@ -35,6 +343,27 @@ impl Params {
item.image = Set(self.image.clone());
item.images = Set(self.images.clone());
item.pinned = Set(self.pinned);
item.status = Set(self.status.clone().map(|value| requested_status(Some(value), None)));
item.visibility = Set(
self.visibility
.clone()
.map(|value| normalize_visibility(Some(value))),
);
item.publish_at = Set(
self.publish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
);
item.unpublish_at = Set(
self.unpublish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
);
item.canonical_url = Set(self.canonical_url.clone());
item.noindex = Set(self.noindex);
item.og_image = Set(self.og_image.clone());
item.redirect_from = Set(self.redirect_from.clone());
item.redirect_to = Set(self.redirect_to.clone());
}
}
@@ -47,6 +376,24 @@ pub struct ListQuery {
#[serde(alias = "type")]
pub post_type: Option<String>,
pub pinned: Option<bool>,
pub status: Option<String>,
pub visibility: Option<String>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub listed_only: Option<bool>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub include_private: Option<bool>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub include_redirects: Option<bool>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct LookupQuery {
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub preview: Option<bool>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub include_private: Option<bool>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -66,6 +413,15 @@ pub struct MarkdownCreateParams {
pub image: Option<String>,
pub images: Option<Vec<String>>,
pub pinned: Option<bool>,
pub status: Option<String>,
pub visibility: Option<String>,
pub publish_at: Option<String>,
pub unpublish_at: Option<String>,
pub canonical_url: Option<String>,
pub noindex: Option<bool>,
pub og_image: Option<String>,
pub redirect_from: Option<Vec<String>>,
pub redirect_to: Option<String>,
pub published: Option<bool>,
}
@@ -88,174 +444,211 @@ pub struct MarkdownImportResponse {
pub slugs: Vec<String>,
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
let item = Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or_else(|| Error::NotFound)
}
async fn load_item_by_slug(ctx: &AppContext, slug: &str) -> Result<Model> {
let item = Entity::find()
.filter(Column::Slug.eq(slug))
.one(&ctx.db)
.await?;
item.ok_or_else(|| Error::NotFound)
}
fn post_has_tag(post: &Model, wanted_tag: &str) -> bool {
let wanted = wanted_tag.trim().to_lowercase();
post.tags
.as_ref()
.and_then(|value| value.as_array())
.map(|tags| {
tags.iter().filter_map(|tag| tag.as_str()).any(|tag| {
let normalized = tag.trim().to_lowercase();
normalized == wanted
})
})
.unwrap_or(false)
}
#[debug_handler]
pub async fn list(
Query(query): Query<ListQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true);
let include_redirects = query.include_redirects.unwrap_or(preview);
let posts = Entity::find()
.order_by_desc(Column::CreatedAt)
.all(&ctx.db)
.await?;
let filtered: Vec<Model> = posts
let filtered = posts
.into_iter()
.filter(|post| {
if let Some(slug) = &query.slug {
if post.slug != *slug {
return false;
}
}
if let Some(category) = &query.category {
if post
.category
.as_deref()
.map(|value| !value.eq_ignore_ascii_case(category))
.unwrap_or(true)
{
return false;
}
}
if let Some(post_type) = &query.post_type {
if post
.post_type
.as_deref()
.map(|value| !value.eq_ignore_ascii_case(post_type))
.unwrap_or(true)
{
return false;
}
}
if let Some(pinned) = query.pinned {
if post.pinned.unwrap_or(false) != pinned {
return false;
}
}
if let Some(tag) = &query.tag {
if !post_has_tag(post, tag) {
return false;
}
}
if let Some(search) = &query.search {
let wanted = search.trim().to_lowercase();
let haystack = [
post.title.as_deref().unwrap_or_default(),
post.description.as_deref().unwrap_or_default(),
post.content.as_deref().unwrap_or_default(),
post.category.as_deref().unwrap_or_default(),
&post.slug,
]
.join("\n")
.to_lowercase();
if !haystack.contains(&wanted)
&& !post
.tags
.as_ref()
.and_then(|value| value.as_array())
.map(|tags| {
tags.iter()
.filter_map(|tag| tag.as_str())
.any(|tag| tag.to_lowercase().contains(&wanted))
})
.unwrap_or(false)
{
return false;
}
}
true
})
.collect();
.filter(|post| should_include_post(post, &query, preview, include_private, include_redirects))
.collect::<Vec<_>>();
format::json(filtered)
}
#[debug_handler]
pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> Result<Response> {
pub async fn add(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let mut item = ActiveModel {
..Default::default()
};
params.update(&mut item);
let item = item.insert(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"post.create",
"post",
Some(item.id.to_string()),
Some(item.slug.clone()),
None,
)
.await?;
format::json(item)
}
#[debug_handler]
pub async fn update(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let item = load_item(&ctx, id).await?;
let mut item = item.into_active_model();
let actor = check_auth(&headers)?;
let previous = load_item(&ctx, id).await?;
let was_public = content::is_post_listed_publicly(&previous, Utc::now().fixed_offset());
let previous_slug = previous.slug.clone();
let mut item = previous.into_active_model();
params.update(&mut item);
let item = item.update(&ctx.db).await?;
let is_public = content::is_post_listed_publicly(&item, Utc::now().fixed_offset());
admin_audit::log_event(
&ctx,
Some(&actor),
"post.update",
"post",
Some(item.id.to_string()),
Some(item.slug.clone()),
Some(serde_json::json!({
"previous_slug": previous_slug,
"published": is_public,
})),
)
.await?;
if is_public && !was_public {
let post = content::MarkdownPost {
title: item.title.clone().unwrap_or_else(|| item.slug.clone()),
slug: item.slug.clone(),
description: item.description.clone(),
content: item.content.clone().unwrap_or_default(),
category: item.category.clone(),
tags: item
.tags
.as_ref()
.and_then(|value| value.as_array())
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(),
post_type: item.post_type.clone().unwrap_or_else(|| "article".to_string()),
image: item.image.clone(),
images: item
.images
.as_ref()
.and_then(|value| value.as_array())
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|tag| tag.as_str().map(ToString::to_string))
.collect(),
pinned: item.pinned.unwrap_or(false),
status: item.status.clone().unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()),
visibility: item
.visibility
.clone()
.unwrap_or_else(|| content::POST_VISIBILITY_PUBLIC.to_string()),
publish_at: item.publish_at.map(|value| value.to_rfc3339()),
unpublish_at: item.unpublish_at.map(|value| value.to_rfc3339()),
canonical_url: item.canonical_url.clone(),
noindex: item.noindex.unwrap_or(false),
og_image: item.og_image.clone(),
redirect_from: content::post_redirects_from_json(&item.redirect_from),
redirect_to: item.redirect_to.clone(),
file_path: content::markdown_post_path(&item.slug)
.to_string_lossy()
.to_string(),
};
let _ = subscriptions::notify_post_published(&ctx, &post).await;
}
format::json(item)
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
load_item(&ctx, id).await?.delete(&ctx.db).await?;
pub async fn remove(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = load_item(&ctx, id).await?;
let slug = item.slug.clone();
item.delete(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"post.delete",
"post",
Some(id.to_string()),
Some(slug),
None,
)
.await?;
format::empty()
}
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
pub async fn get_one(
Path(id): Path<i32>,
Query(query): Query<LookupQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
format::json(load_item(&ctx, id).await?)
let preview = request_preview_mode(query.preview, &headers);
let post = load_item(&ctx, id).await?;
if !preview && !publicly_accessible(&post) {
return Err(Error::NotFound);
}
format::json(post)
}
#[debug_handler]
pub async fn get_by_slug(
Path(slug): Path<String>,
Query(query): Query<LookupQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
format::json(load_item_by_slug(&ctx, &slug).await?)
let preview = request_preview_mode(query.preview, &headers);
let include_private = preview && query.include_private.unwrap_or(true);
let post = resolve_post_by_slug(&ctx, &slug).await?;
if !preview && !publicly_accessible(&post) {
return Err(Error::NotFound);
}
if !include_private
&& content::normalize_post_visibility(post.visibility.as_deref())
== content::POST_VISIBILITY_PRIVATE
{
return Err(Error::NotFound);
}
format::json(post)
}
#[debug_handler]
pub async fn get_markdown_by_slug(
headers: HeaderMap,
Path(slug): Path<String>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let (path, markdown) = content::read_markdown_document(&slug)?;
format::json(MarkdownDocumentResponse {
@@ -267,12 +660,43 @@ pub async fn get_markdown_by_slug(
#[debug_handler]
pub async fn update_markdown_by_slug(
headers: HeaderMap,
Path(slug): Path<String>,
State(ctx): State<AppContext>,
Json(params): Json<MarkdownUpdateParams>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let _ = post_revisions::capture_current_snapshot(
&ctx,
Some(&actor),
&slug,
"update",
Some("保存文章前的自动快照"),
None,
)
.await?;
let updated = content::write_markdown_document(&ctx, &slug, &params.markdown).await?;
let (path, markdown) = content::read_markdown_document(&updated.slug)?;
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
&updated.slug,
&markdown,
"saved",
Some("保存后的当前版本"),
None,
)
.await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"post.markdown.update",
"post",
None,
Some(updated.slug.clone()),
None,
)
.await?;
format::json(MarkdownDocumentResponse {
slug: updated.slug,
@@ -283,9 +707,11 @@ pub async fn update_markdown_by_slug(
#[debug_handler]
pub async fn create_markdown(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(params): Json<MarkdownCreateParams>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let title = params.title.trim();
if title.is_empty() {
return Err(Error::BadRequest("title is required".to_string()));
@@ -305,11 +731,42 @@ pub async fn create_markdown(
image: params.image,
images: params.images.unwrap_or_default(),
pinned: params.pinned.unwrap_or(false),
published: params.published.unwrap_or(true),
status: requested_status(params.status, params.published),
visibility: normalize_visibility(params.visibility),
publish_at: params.publish_at,
unpublish_at: params.unpublish_at,
canonical_url: params.canonical_url,
noindex: params.noindex.unwrap_or(false),
og_image: params.og_image,
redirect_from: params.redirect_from.unwrap_or_default(),
redirect_to: params.redirect_to,
},
)
.await?;
let (path, markdown) = content::read_markdown_document(&created.slug)?;
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
&created.slug,
&markdown,
"create",
Some("新建文章"),
None,
)
.await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"post.markdown.create",
"post",
None,
Some(created.slug.clone()),
None,
)
.await?;
if markdown_post_listed_publicly(&created) {
let _ = subscriptions::notify_post_published(&ctx, &created).await;
}
format::json(MarkdownDocumentResponse {
slug: created.slug,
@@ -320,9 +777,11 @@ pub async fn create_markdown(
#[debug_handler]
pub async fn import_markdown(
headers: HeaderMap,
State(ctx): State<AppContext>,
mut multipart: Multipart,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let mut files = Vec::new();
while let Some(field) = multipart
@@ -345,6 +804,35 @@ pub async fn import_markdown(
}
let imported = content::import_markdown_documents(&ctx, files).await?;
for item in &imported {
if let Ok((_path, markdown)) = content::read_markdown_document(&item.slug) {
let _ = post_revisions::capture_snapshot_from_markdown(
&ctx,
Some(&actor),
&item.slug,
&markdown,
"import",
Some("批量导入 Markdown"),
None,
)
.await;
}
if markdown_post_listed_publicly(item) {
let _ = subscriptions::notify_post_published(&ctx, item).await;
}
}
admin_audit::log_event(
&ctx,
Some(&actor),
"post.markdown.import",
"post_import",
None,
Some(format!("{} files", imported.len())),
Some(serde_json::json!({
"slugs": imported.iter().map(|item| item.slug.clone()).collect::<Vec<_>>(),
})),
)
.await?;
format::json(MarkdownImportResponse {
count: imported.len(),
@@ -354,10 +842,31 @@ pub async fn import_markdown(
#[debug_handler]
pub async fn delete_markdown_by_slug(
headers: HeaderMap,
Path(slug): Path<String>,
State(ctx): State<AppContext>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let _ = post_revisions::capture_current_snapshot(
&ctx,
Some(&actor),
&slug,
"delete",
Some("删除前自动快照"),
None,
)
.await?;
content::delete_markdown_post(&ctx, &slug).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"post.markdown.delete",
"post",
None,
Some(slug.clone()),
None,
)
.await?;
format::json(MarkdownDeleteResponse {
slug,
deleted: true,

View File

@@ -1,11 +1,15 @@
use axum::extract::{Path, State};
use axum::{
extract::{Path, State},
http::HeaderMap,
};
use loco_rs::prelude::*;
use sea_orm::{EntityTrait, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use crate::{
controllers::admin::check_auth,
models::_entities::reviews::{self, Entity as ReviewEntity},
services::storage,
services::{admin_audit, storage},
};
#[derive(Serialize, Deserialize, Debug)]
@@ -56,9 +60,11 @@ pub async fn get_one(
}
pub async fn create(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(req): Json<CreateReviewRequest>,
) -> Result<impl IntoResponse> {
let actor = check_auth(&headers)?;
let new_review = reviews::ActiveModel {
title: Set(Some(req.title)),
review_type: Set(Some(req.review_type)),
@@ -76,14 +82,26 @@ pub async fn create(
};
let review = new_review.insert(&ctx.db).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"review.create",
"review",
Some(review.id.to_string()),
review.title.clone(),
None,
)
.await?;
format::json(review)
}
pub async fn update(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(req): Json<UpdateReviewRequest>,
) -> Result<impl IntoResponse> {
let actor = check_auth(&headers)?;
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
let Some(existing_review) = review else {
@@ -132,24 +150,47 @@ pub async fn update(
tracing::warn!("failed to cleanup replaced review cover: {error}");
}
}
admin_audit::log_event(
&ctx,
Some(&actor),
"review.update",
"review",
Some(review.id.to_string()),
review.title.clone(),
None,
)
.await?;
format::json(review)
}
pub async fn remove(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<impl IntoResponse> {
let actor = check_auth(&headers)?;
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
match review {
Some(r) => {
let cover = r.cover.clone();
let title = r.title.clone();
r.delete(&ctx.db).await?;
if let Some(cover) = cover.filter(|value| !value.trim().is_empty()) {
if let Err(error) = storage::delete_managed_url(&ctx, &cover).await {
tracing::warn!("failed to cleanup deleted review cover: {error}");
}
}
admin_audit::log_event(
&ctx,
Some(&actor),
"review.delete",
"review",
Some(id.to_string()),
title,
None,
)
.await?;
format::empty()
}
None => Err(Error::NotFound),

View File

@@ -1,12 +1,14 @@
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ConnectionTrait, DatabaseBackend, DbBackend, FromQueryResult, Statement};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use std::time::Instant;
use std::{collections::HashSet, time::Instant};
use crate::models::_entities::posts;
use crate::services::{analytics, content};
use crate::{
controllers::site_settings,
models::_entities::posts,
services::{abuse_guard, analytics, content},
};
fn deserialize_boolish_option<'de, D>(
deserializer: D,
@@ -26,6 +28,243 @@ where
.transpose()
}
fn normalize_text(value: &str) -> String {
value
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_ascii_lowercase()
}
fn tokenize(value: &str) -> Vec<String> {
value
.split(|ch: char| !ch.is_alphanumeric() && ch != '-' && ch != '_')
.map(normalize_text)
.filter(|item| !item.is_empty())
.collect()
}
fn levenshtein_distance(left: &str, right: &str) -> usize {
if left == right {
return 0;
}
if left.is_empty() {
return right.chars().count();
}
if right.is_empty() {
return left.chars().count();
}
let right_chars = right.chars().collect::<Vec<_>>();
let mut prev = (0..=right_chars.len()).collect::<Vec<_>>();
for (i, left_ch) in left.chars().enumerate() {
let mut curr = vec![i + 1; right_chars.len() + 1];
for (j, right_ch) in right_chars.iter().enumerate() {
let cost = usize::from(left_ch != *right_ch);
curr[j + 1] = (curr[j] + 1)
.min(prev[j + 1] + 1)
.min(prev[j] + cost);
}
prev = curr;
}
prev[right_chars.len()]
}
fn parse_synonym_groups(value: &Option<Value>) -> Vec<Vec<String>> {
value
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.map(|item| {
let normalized = item.replace("=>", ",").replace('|', ",");
normalized
.split([',', ''])
.map(normalize_text)
.filter(|token| !token.is_empty())
.collect::<Vec<_>>()
})
.filter(|group| !group.is_empty())
.collect()
}
fn expand_search_terms(query: &str, synonym_groups: &[Vec<String>]) -> Vec<String> {
let normalized_query = normalize_text(query);
let query_tokens = tokenize(query);
let mut expanded = Vec::new();
let mut seen = HashSet::new();
if !normalized_query.is_empty() && seen.insert(normalized_query.clone()) {
expanded.push(normalized_query.clone());
}
for token in &query_tokens {
if seen.insert(token.clone()) {
expanded.push(token.clone());
}
}
for group in synonym_groups {
let matched = group.iter().any(|item| {
*item == normalized_query
|| query_tokens.iter().any(|token| token == item)
|| normalized_query.contains(item)
});
if matched {
for token in group {
if seen.insert(token.clone()) {
expanded.push(token.clone());
}
}
}
}
expanded
}
fn candidate_terms(posts: &[posts::Model]) -> Vec<String> {
let mut seen = HashSet::new();
let mut candidates = Vec::new();
for post in posts {
for source in [
post.title.as_deref().unwrap_or_default(),
post.category.as_deref().unwrap_or_default(),
&post.slug,
] {
for token in tokenize(source) {
if token.len() >= 3 && seen.insert(token.clone()) {
candidates.push(token);
}
}
}
if let Some(tags) = post.tags.as_ref().and_then(Value::as_array) {
for token in tags.iter().filter_map(Value::as_str).flat_map(tokenize) {
if token.len() >= 2 && seen.insert(token.clone()) {
candidates.push(token);
}
}
}
}
candidates
}
fn find_spelling_fallback(query: &str, posts: &[posts::Model], synonym_groups: &[Vec<String>]) -> Vec<String> {
let primary_token = tokenize(query).into_iter().next().unwrap_or_default();
if primary_token.len() < 3 {
return Vec::new();
}
let mut nearest = candidate_terms(posts)
.into_iter()
.map(|candidate| {
let distance = levenshtein_distance(&primary_token, &candidate);
(candidate, distance)
})
.filter(|(_, distance)| *distance <= 2)
.collect::<Vec<_>>();
nearest.sort_by(|left, right| left.1.cmp(&right.1).then_with(|| left.0.cmp(&right.0)));
nearest
.into_iter()
.take(3)
.flat_map(|(candidate, _)| expand_search_terms(&candidate, synonym_groups))
.collect()
}
fn post_has_tag(post: &posts::Model, wanted_tag: &str) -> bool {
let wanted = normalize_text(wanted_tag);
post.tags
.as_ref()
.and_then(Value::as_array)
.map(|tags| {
tags.iter()
.filter_map(Value::as_str)
.map(normalize_text)
.any(|tag| tag == wanted)
})
.unwrap_or(false)
}
fn score_post(post: &posts::Model, query: &str, terms: &[String]) -> f64 {
let normalized_query = normalize_text(query);
let title = normalize_text(post.title.as_deref().unwrap_or_default());
let description = normalize_text(post.description.as_deref().unwrap_or_default());
let content_text = normalize_text(post.content.as_deref().unwrap_or_default());
let category = normalize_text(post.category.as_deref().unwrap_or_default());
let slug = normalize_text(&post.slug);
let tags = post
.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(normalize_text))
.collect::<Vec<_>>();
let mut score = 0.0;
if !normalized_query.is_empty() {
if title.contains(&normalized_query) {
score += 6.0;
}
if description.contains(&normalized_query) {
score += 4.0;
}
if slug.contains(&normalized_query) {
score += 4.0;
}
if category.contains(&normalized_query) {
score += 3.0;
}
if tags.iter().any(|tag| tag.contains(&normalized_query)) {
score += 4.0;
}
if content_text.contains(&normalized_query) {
score += 2.0;
}
}
for term in terms {
if term.is_empty() {
continue;
}
if title.contains(term) {
score += 3.5;
}
if description.contains(term) {
score += 2.2;
}
if slug.contains(term) {
score += 2.0;
}
if category.contains(term) {
score += 1.8;
}
if tags.iter().any(|tag| tag == term) {
score += 2.5;
} else if tags.iter().any(|tag| tag.contains(term)) {
score += 1.5;
}
if content_text.contains(term) {
score += 0.8;
}
}
score
}
fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
query.preview.unwrap_or(false)
|| headers
@@ -39,11 +278,15 @@ fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
pub struct SearchQuery {
pub q: Option<String>,
pub limit: Option<u64>,
pub category: Option<String>,
pub tag: Option<String>,
#[serde(alias = "type")]
pub post_type: Option<String>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Serialize, FromQueryResult)]
#[derive(Clone, Debug, Serialize)]
pub struct SearchResult {
pub id: i32,
pub title: Option<String>,
@@ -59,131 +302,6 @@ pub struct SearchResult {
pub rank: f64,
}
fn search_sql() -> &'static str {
r#"
SELECT
p.id,
p.title,
p.slug,
p.description,
p.content,
p.category,
p.tags,
p.post_type,
p.pinned,
p.created_at,
p.updated_at,
ts_rank_cd(
setweight(to_tsvector('simple', coalesce(p.title, '')), 'A') ||
setweight(to_tsvector('simple', coalesce(p.description, '')), 'B') ||
setweight(to_tsvector('simple', coalesce(p.category, '')), 'C') ||
setweight(to_tsvector('simple', coalesce(p.tags::text, '')), 'C') ||
setweight(to_tsvector('simple', coalesce(p.content, '')), 'D'),
plainto_tsquery('simple', $1)
)::float8 AS rank
FROM posts p
WHERE (
setweight(to_tsvector('simple', coalesce(p.title, '')), 'A') ||
setweight(to_tsvector('simple', coalesce(p.description, '')), 'B') ||
setweight(to_tsvector('simple', coalesce(p.category, '')), 'C') ||
setweight(to_tsvector('simple', coalesce(p.tags::text, '')), 'C') ||
setweight(to_tsvector('simple', coalesce(p.content, '')), 'D')
) @@ plainto_tsquery('simple', $1)
ORDER BY rank DESC, p.created_at DESC
LIMIT $2
"#
}
fn app_level_rank(post: &posts::Model, wanted: &str) -> f64 {
let wanted_lower = wanted.to_lowercase();
let mut rank = 0.0;
if post
.title
.as_deref()
.unwrap_or_default()
.to_lowercase()
.contains(&wanted_lower)
{
rank += 4.0;
}
if post
.description
.as_deref()
.unwrap_or_default()
.to_lowercase()
.contains(&wanted_lower)
{
rank += 2.5;
}
if post
.content
.as_deref()
.unwrap_or_default()
.to_lowercase()
.contains(&wanted_lower)
{
rank += 1.0;
}
if post
.category
.as_deref()
.unwrap_or_default()
.to_lowercase()
.contains(&wanted_lower)
{
rank += 1.5;
}
if post
.tags
.as_ref()
.and_then(Value::as_array)
.map(|tags| {
tags.iter()
.filter_map(Value::as_str)
.any(|tag| tag.to_lowercase().contains(&wanted_lower))
})
.unwrap_or(false)
{
rank += 2.0;
}
rank
}
async fn fallback_search(ctx: &AppContext, q: &str, limit: u64) -> Result<Vec<SearchResult>> {
let mut results = posts::Entity::find().all(&ctx.db).await?;
results.sort_by(|left, right| right.created_at.cmp(&left.created_at));
Ok(results
.into_iter()
.map(|post| {
let rank = app_level_rank(&post, q);
(post, rank)
})
.filter(|(_, rank)| *rank > 0.0)
.take(limit as usize)
.map(|(post, rank)| SearchResult {
id: post.id,
title: post.title,
slug: post.slug,
description: post.description,
content: post.content,
category: post.category,
tags: post.tags,
post_type: post.post_type,
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
rank,
})
.collect())
}
#[debug_handler]
pub async fn search(
Query(query): Query<SearchQuery>,
@@ -199,26 +317,107 @@ pub async fn search(
return format::json(Vec::<SearchResult>::new());
}
let limit = query.limit.unwrap_or(20).clamp(1, 100);
if !preview_search {
abuse_guard::enforce_public_scope(
"search",
abuse_guard::detect_client_ip(&headers).as_deref(),
Some(&q),
)?;
}
let results = if ctx.db.get_database_backend() == DatabaseBackend::Postgres {
let statement = Statement::from_sql_and_values(
DbBackend::Postgres,
search_sql(),
[q.clone().into(), (limit as i64).into()],
);
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
let settings = site_settings::load_current(&ctx).await.ok();
let synonym_groups = settings
.as_ref()
.map(|item| parse_synonym_groups(&item.search_synonyms))
.unwrap_or_default();
match SearchResult::find_by_statement(statement)
.all(&ctx.db)
.await
{
Ok(rows) if !rows.is_empty() => rows,
Ok(_) => fallback_search(&ctx, &q, limit).await?,
Err(_) => fallback_search(&ctx, &q, limit).await?,
let mut all_posts = posts::Entity::find()
.all(&ctx.db)
.await?
.into_iter()
.filter(|post| {
preview_search
|| content::is_post_listed_publicly(post, chrono::Utc::now().fixed_offset())
})
.collect::<Vec<_>>();
if let Some(category) = query.category.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
all_posts.retain(|post| {
post.category
.as_deref()
.map(|value| value.eq_ignore_ascii_case(category))
.unwrap_or(false)
});
}
if let Some(tag) = query.tag.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
all_posts.retain(|post| post_has_tag(post, tag));
}
if let Some(post_type) = query.post_type.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
all_posts.retain(|post| {
post.post_type
.as_deref()
.map(|value| value.eq_ignore_ascii_case(post_type))
.unwrap_or(false)
});
}
let mut expanded_terms = expand_search_terms(&q, &synonym_groups);
let mut results = all_posts
.iter()
.map(|post| (post, score_post(post, &q, &expanded_terms)))
.filter(|(_, rank)| *rank > 0.0)
.map(|(post, rank)| SearchResult {
id: post.id,
title: post.title.clone(),
slug: post.slug.clone(),
description: post.description.clone(),
content: post.content.clone(),
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
rank,
})
.collect::<Vec<_>>();
if results.is_empty() {
expanded_terms = find_spelling_fallback(&q, &all_posts, &synonym_groups);
if !expanded_terms.is_empty() {
results = all_posts
.iter()
.map(|post| (post, score_post(post, &q, &expanded_terms)))
.filter(|(_, rank)| *rank > 0.0)
.map(|(post, rank)| SearchResult {
id: post.id,
title: post.title.clone(),
slug: post.slug.clone(),
description: post.description.clone(),
content: post.content.clone(),
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
rank,
})
.collect::<Vec<_>>();
}
} else {
fallback_search(&ctx, &q, limit).await?
};
}
results.sort_by(|left, right| {
right
.rank
.partial_cmp(&left.rank)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| right.created_at.cmp(&left.created_at))
});
results.truncate(limit);
if !preview_search {
analytics::record_search_event(

View File

@@ -2,6 +2,7 @@
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set};
use serde::{Deserialize, Serialize};
@@ -11,7 +12,9 @@ use uuid::Uuid;
use crate::{
controllers::admin::check_auth,
models::_entities::{
categories, friend_links, posts, site_settings::{self, ActiveModel, Entity, Model}, tags,
categories, friend_links, posts,
site_settings::{self, ActiveModel, Entity, Model},
tags,
},
services::{ai, content},
};
@@ -130,6 +133,18 @@ pub struct SiteSettingsPayload {
pub media_r2_access_key_id: Option<String>,
#[serde(default, alias = "mediaR2SecretAccessKey")]
pub media_r2_secret_access_key: Option<String>,
#[serde(default, alias = "seoDefaultOgImage")]
pub seo_default_og_image: Option<String>,
#[serde(default, alias = "seoDefaultTwitterHandle")]
pub seo_default_twitter_handle: Option<String>,
#[serde(default, alias = "notificationWebhookUrl")]
pub notification_webhook_url: Option<String>,
#[serde(default, alias = "notificationCommentEnabled")]
pub notification_comment_enabled: Option<bool>,
#[serde(default, alias = "notificationFriendLinkEnabled")]
pub notification_friend_link_enabled: Option<bool>,
#[serde(default, alias = "searchSynonyms")]
pub search_synonyms: Option<Vec<String>>,
}
#[derive(Clone, Debug, Serialize)]
@@ -154,6 +169,8 @@ pub struct PublicSiteSettingsResponse {
pub music_playlist: Option<serde_json::Value>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub seo_default_og_image: Option<String>,
pub seo_default_twitter_handle: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -171,6 +188,9 @@ pub struct HomePageResponse {
pub tags: Vec<tags::Model>,
pub friend_links: Vec<friend_links::Model>,
pub categories: Vec<HomeCategorySummary>,
pub content_overview: crate::services::analytics::ContentAnalyticsOverview,
pub popular_posts: Vec<crate::services::analytics::AnalyticsPopularPost>,
pub content_ranges: Vec<crate::services::analytics::PublicContentWindowHighlights>,
}
fn normalize_optional_string(value: Option<String>) -> Option<String> {
@@ -188,6 +208,13 @@ fn normalize_optional_int(value: Option<i32>, min: i32, max: i32) -> Option<i32>
value.map(|item| item.clamp(min, max))
}
fn normalize_string_list(values: Vec<String>) -> Vec<String> {
values
.into_iter()
.filter_map(|item| normalize_optional_string(Some(item)))
.collect()
}
fn create_ai_provider_id() -> String {
format!("provider-{}", Uuid::new_v4().simple())
}
@@ -525,6 +552,27 @@ impl SiteSettingsPayload {
item.media_r2_secret_access_key =
normalize_optional_string(Some(media_r2_secret_access_key));
}
if let Some(seo_default_og_image) = self.seo_default_og_image {
item.seo_default_og_image = normalize_optional_string(Some(seo_default_og_image));
}
if let Some(seo_default_twitter_handle) = self.seo_default_twitter_handle {
item.seo_default_twitter_handle =
normalize_optional_string(Some(seo_default_twitter_handle));
}
if let Some(notification_webhook_url) = self.notification_webhook_url {
item.notification_webhook_url =
normalize_optional_string(Some(notification_webhook_url));
}
if let Some(notification_comment_enabled) = self.notification_comment_enabled {
item.notification_comment_enabled = Some(notification_comment_enabled);
}
if let Some(notification_friend_link_enabled) = self.notification_friend_link_enabled {
item.notification_friend_link_enabled = Some(notification_friend_link_enabled);
}
if let Some(search_synonyms) = self.search_synonyms {
let normalized = normalize_string_list(search_synonyms);
item.search_synonyms = (!normalized.is_empty()).then(|| serde_json::json!(normalized));
}
if provider_list_supplied {
write_ai_provider_state(
@@ -631,6 +679,12 @@ fn default_payload() -> SiteSettingsPayload {
media_r2_public_base_url: None,
media_r2_access_key_id: None,
media_r2_secret_access_key: None,
seo_default_og_image: None,
seo_default_twitter_handle: None,
notification_webhook_url: None,
notification_comment_enabled: Some(false),
notification_friend_link_enabled: Some(false),
search_synonyms: Some(Vec::new()),
}
}
@@ -680,6 +734,8 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
music_playlist: model.music_playlist,
ai_enabled: model.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
seo_default_og_image: model.seo_default_og_image,
seo_default_twitter_handle: model.seo_default_twitter_handle,
}
}
@@ -691,9 +747,13 @@ pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
let posts = posts::Entity::find()
.order_by_desc(posts::Column::CreatedAt)
.all(&ctx.db)
.await?;
.await?
.into_iter()
.filter(|post| content::is_post_listed_publicly(post, chrono::Utc::now().fixed_offset()))
.collect::<Vec<_>>();
let tags = tags::Entity::find().all(&ctx.db).await?;
let friend_links = friend_links::Entity::find()
.filter(friend_links::Column::Status.eq("approved"))
.order_by_desc(friend_links::Column::CreatedAt)
.all(&ctx.db)
.await?;
@@ -722,6 +782,9 @@ pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
}
})
.collect::<Vec<_>>();
let content_highlights =
crate::services::analytics::build_public_content_highlights(&ctx, &posts).await?;
let content_ranges = crate::services::analytics::build_public_content_windows(&ctx, &posts).await?;
format::json(HomePageResponse {
site_settings,
@@ -729,6 +792,9 @@ pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
tags,
friend_links,
categories,
content_overview: content_highlights.overview,
popular_posts: content_highlights.popular_posts,
content_ranges,
})
}
@@ -739,10 +805,11 @@ pub async fn show(State(ctx): State<AppContext>) -> Result<Response> {
#[debug_handler]
pub async fn update(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(params): Json<SiteSettingsPayload>,
) -> Result<Response> {
check_auth()?;
check_auth(&headers)?;
let current = load_current(&ctx).await?;
let mut item = current;

View File

@@ -0,0 +1,202 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::services::{abuse_guard, admin_audit, subscriptions};
#[derive(Clone, Debug, Deserialize)]
pub struct PublicSubscriptionPayload {
pub email: String,
#[serde(default, alias = "displayName")]
pub display_name: Option<String>,
#[serde(default)]
pub source: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SubscriptionTokenPayload {
pub token: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SubscriptionManageQuery {
pub token: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SubscriptionManageUpdatePayload {
pub token: String,
#[serde(default, alias = "displayName")]
pub display_name: Option<String>,
#[serde(default)]
pub status: Option<String>,
#[serde(default)]
pub filters: Option<serde_json::Value>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PublicSubscriptionResponse {
pub ok: bool,
pub subscription_id: i32,
pub status: String,
pub requires_confirmation: bool,
pub message: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct SubscriptionManageResponse {
pub ok: bool,
pub subscription: subscriptions::PublicSubscriptionView,
}
fn public_subscription_metadata(source: Option<String>) -> serde_json::Value {
serde_json::json!({
"source": source,
"kind": "public-form",
})
}
#[debug_handler]
pub async fn subscribe(
State(ctx): State<AppContext>,
headers: axum::http::HeaderMap,
Json(payload): Json<PublicSubscriptionPayload>,
) -> Result<Response> {
let email = payload.email.trim().to_ascii_lowercase();
abuse_guard::enforce_public_scope(
"subscription",
abuse_guard::detect_client_ip(&headers).as_deref(),
Some(&email),
)?;
let result = subscriptions::create_public_email_subscription(
&ctx,
&email,
payload.display_name,
Some(public_subscription_metadata(payload.source)),
)
.await?;
admin_audit::log_event(
&ctx,
None,
if result.requires_confirmation {
"subscription.public.pending"
} else {
"subscription.public.active"
},
"subscription",
Some(result.subscription.id.to_string()),
Some(result.subscription.target.clone()),
Some(serde_json::json!({
"channel_type": result.subscription.channel_type,
"status": result.subscription.status,
})),
)
.await?;
format::json(PublicSubscriptionResponse {
ok: true,
subscription_id: result.subscription.id,
status: result.subscription.status,
requires_confirmation: result.requires_confirmation,
message: result.message,
})
}
#[debug_handler]
pub async fn confirm(
State(ctx): State<AppContext>,
Json(payload): Json<SubscriptionTokenPayload>,
) -> Result<Response> {
let item = subscriptions::confirm_subscription(&ctx, &payload.token).await?;
admin_audit::log_event(
&ctx,
None,
"subscription.public.confirm",
"subscription",
Some(item.id.to_string()),
Some(item.target.clone()),
Some(serde_json::json!({ "channel_type": item.channel_type })),
)
.await?;
format::json(SubscriptionManageResponse {
ok: true,
subscription: subscriptions::to_public_subscription_view(&item),
})
}
#[debug_handler]
pub async fn manage(
State(ctx): State<AppContext>,
Query(query): Query<SubscriptionManageQuery>,
) -> Result<Response> {
let item = subscriptions::get_subscription_by_manage_token(&ctx, &query.token).await?;
format::json(SubscriptionManageResponse {
ok: true,
subscription: subscriptions::to_public_subscription_view(&item),
})
}
#[debug_handler]
pub async fn update_manage(
State(ctx): State<AppContext>,
Json(payload): Json<SubscriptionManageUpdatePayload>,
) -> Result<Response> {
let item = subscriptions::update_subscription_preferences(
&ctx,
&payload.token,
payload.display_name,
payload.status,
payload.filters,
)
.await?;
admin_audit::log_event(
&ctx,
None,
"subscription.public.update",
"subscription",
Some(item.id.to_string()),
Some(item.target.clone()),
None,
)
.await?;
format::json(SubscriptionManageResponse {
ok: true,
subscription: subscriptions::to_public_subscription_view(&item),
})
}
#[debug_handler]
pub async fn unsubscribe(
State(ctx): State<AppContext>,
Json(payload): Json<SubscriptionTokenPayload>,
) -> Result<Response> {
let item = subscriptions::unsubscribe_subscription(&ctx, &payload.token).await?;
admin_audit::log_event(
&ctx,
None,
"subscription.public.unsubscribe",
"subscription",
Some(item.id.to_string()),
Some(item.target.clone()),
None,
)
.await?;
format::json(SubscriptionManageResponse {
ok: true,
subscription: subscriptions::to_public_subscription_view(&item),
})
}
pub fn routes() -> Routes {
Routes::new()
.prefix("/api/subscriptions")
.add("/", post(subscribe))
.add("/confirm", post(confirm))
.add("/manage", get(manage).patch(update_manage))
.add("/unsubscribe", post(unsubscribe))
}

View File

@@ -1,2 +1 @@
pub mod content_sync;
pub mod view_engine;

View File

@@ -1,43 +0,0 @@
use async_trait::async_trait;
use axum::{Extension, Router as AxumRouter};
use fluent_templates::{ArcLoader, FluentLoader};
use loco_rs::{
app::{AppContext, Initializer},
controller::views::{engines, ViewEngine},
Error, Result,
};
use tracing::info;
const I18N_DIR: &str = "assets/i18n";
const I18N_SHARED: &str = "assets/i18n/shared.ftl";
#[allow(clippy::module_name_repetitions)]
pub struct ViewEngineInitializer;
#[async_trait]
impl Initializer for ViewEngineInitializer {
fn name(&self) -> String {
"view-engine".to_string()
}
async fn after_routes(&self, router: AxumRouter, _ctx: &AppContext) -> Result<AxumRouter> {
let tera_engine = if std::path::Path::new(I18N_DIR).exists() {
let arc = std::sync::Arc::new(
ArcLoader::builder(&I18N_DIR, unic_langid::langid!("en-US"))
.shared_resources(Some(&[I18N_SHARED.into()]))
.customize(|bundle| bundle.set_use_isolating(false))
.build()
.map_err(|e| Error::string(&e.to_string()))?,
);
info!("locales loaded");
engines::TeraView::build()?.post_process(move |tera| {
tera.register_function("t", FluentLoader::new(arc.clone()));
Ok(())
})?
} else {
engines::TeraView::build()?
};
Ok(router.layer(Extension(ViewEngine::from(tera_engine))))
}
}

View File

@@ -1 +1,2 @@
pub mod auth;
pub mod subscription;

View File

@@ -0,0 +1,77 @@
#![allow(non_upper_case_globals)]
use loco_rs::prelude::*;
use serde_json::json;
static confirm: Dir<'_> = include_dir!("src/mailers/subscription/confirm");
static notification: Dir<'_> = include_dir!("src/mailers/subscription/notification");
pub struct SubscriptionMailer {}
impl Mailer for SubscriptionMailer {}
impl SubscriptionMailer {
pub async fn send_confirmation(
ctx: &AppContext,
to: &str,
site_name: Option<&str>,
site_url: Option<&str>,
confirm_url: &str,
manage_url: Option<&str>,
) -> Result<()> {
Self::mail_template(
ctx,
&confirm,
mailer::Args {
to: to.to_string(),
locals: json!({
"subject": "请确认你的订阅",
"siteName": site_name.unwrap_or("Termi"),
"siteUrl": site_url
.map(ToString::to_string)
.unwrap_or_else(|| ctx.config.server.full_url()),
"confirmUrl": confirm_url,
"manageUrl": manage_url,
}),
..Default::default()
},
)
.await?;
Ok(())
}
pub async fn send_notification(
ctx: &AppContext,
to: &str,
subject: &str,
headline: &str,
body: &str,
site_name: Option<&str>,
site_url: Option<&str>,
manage_url: Option<&str>,
unsubscribe_url: Option<&str>,
) -> Result<()> {
Self::mail_template(
ctx,
&notification,
mailer::Args {
to: to.to_string(),
locals: json!({
"subject": subject,
"headline": headline,
"body": body,
"siteName": site_name.unwrap_or("Termi"),
"siteUrl": site_url
.map(ToString::to_string)
.unwrap_or_else(|| ctx.config.server.full_url()),
"manageUrl": manage_url,
"unsubscribeUrl": unsubscribe_url,
}),
..Default::default()
},
)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,25 @@
<html>
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; color: #0f172a; line-height: 1.7;">
<div style="max-width: 640px; margin: 0 auto; padding: 24px;">
<p style="font-size: 12px; letter-spacing: 0.18em; text-transform: uppercase; color: #64748b;">{{ siteName }}</p>
<h1 style="margin-top: 8px; font-size: 24px;">请确认你的订阅</h1>
<p style="margin-top: 20px;">为了确认这是你本人提交的邮箱请点击下面的确认按钮</p>
<p style="margin-top: 24px;">
<a href="{{ confirmUrl }}" style="display: inline-block; padding: 12px 18px; border-radius: 9999px; background: #0f172a; color: #ffffff; text-decoration: none;">确认订阅</a>
</p>
<p style="margin-top: 20px; font-size: 14px; color: #475569; word-break: break-all;">
如果按钮无法点击请直接打开<br />
<a href="{{ confirmUrl }}">{{ confirmUrl }}</a>
</p>
{% if manageUrl %}
<p style="margin-top: 20px; font-size: 14px; color: #475569;">
确认完成后你可以在这里管理偏好<br />
<a href="{{ manageUrl }}">{{ manageUrl }}</a>
</p>
{% endif %}
<p style="margin-top: 28px; font-size: 13px; color: #64748b;">
来自 {{ siteName }} · <a href="{{ siteUrl }}">{{ siteUrl }}</a>
</p>
</div>
</body>
</html>

View File

@@ -0,0 +1 @@
请确认你的订阅

View File

@@ -0,0 +1,13 @@
你好
请点击下面的链接确认你的订阅
{{ confirmUrl }}
{% if manageUrl %}
确认完成后你也可以通过这个链接管理偏好
{{ manageUrl }}
{% endif %}
--
{{ siteName }}
{{ siteUrl }}

View File

@@ -0,0 +1,22 @@
<html>
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; color: #0f172a; line-height: 1.7;">
<div style="max-width: 640px; margin: 0 auto; padding: 24px;">
<p style="font-size: 12px; letter-spacing: 0.18em; text-transform: uppercase; color: #64748b;">{{ siteName }}</p>
<h1 style="margin-top: 8px; font-size: 24px;">{{ headline }}</h1>
<div style="margin-top: 20px; white-space: pre-wrap;">{{ body }}</div>
{% if manageUrl or unsubscribeUrl %}
<div style="margin-top: 24px; display: flex; flex-wrap: wrap; gap: 12px;">
{% if manageUrl %}
<a href="{{ manageUrl }}" style="display: inline-block; padding: 10px 16px; border-radius: 9999px; background: #0f172a; color: #ffffff; text-decoration: none;">管理订阅</a>
{% endif %}
{% if unsubscribeUrl %}
<a href="{{ unsubscribeUrl }}" style="display: inline-block; padding: 10px 16px; border-radius: 9999px; border: 1px solid #cbd5e1; color: #334155; text-decoration: none;">取消订阅</a>
{% endif %}
</div>
{% endif %}
<p style="margin-top: 28px; font-size: 13px; color: #64748b;">
来自 {{ siteName }} · <a href="{{ siteUrl }}">{{ siteUrl }}</a>
</p>
</div>
</body>
</html>

View File

@@ -0,0 +1 @@
{{ subject }}

View File

@@ -0,0 +1,14 @@
{{ headline }}
{{ body }}
{% if manageUrl %}
管理订阅{{ manageUrl }}
{% endif %}
{% if unsubscribeUrl %}
取消订阅{{ unsubscribeUrl }}
{% endif %}
--
{{ siteName }}
{{ siteUrl }}

View File

@@ -0,0 +1,27 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "admin_audit_logs")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub actor_username: Option<String>,
pub actor_email: Option<String>,
pub actor_source: Option<String>,
pub action: String,
pub target_type: String,
pub target_id: Option<String>,
pub target_label: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub metadata: Option<Json>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,24 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "comment_blacklist")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub matcher_type: String,
pub matcher_value: String,
#[sea_orm(column_type = "Text", nullable)]
pub reason: Option<String>,
pub active: Option<bool>,
pub expires_at: Option<DateTimeWithTimeZone>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,28 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "comment_persona_analysis_logs")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub matcher_type: String,
pub matcher_value: String,
pub from_at: Option<DateTimeWithTimeZone>,
pub to_at: Option<DateTimeWithTimeZone>,
pub total_comments: i32,
pub pending_comments: i32,
pub distinct_posts: i32,
#[sea_orm(column_type = "Text")]
pub analysis_text: String,
pub sample_json: Option<Json>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -15,6 +15,9 @@ pub struct Model {
pub author: Option<String>,
pub email: Option<String>,
pub avatar: Option<String>,
pub ip_address: Option<String>,
pub user_agent: Option<String>,
pub referer: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub content: Option<String>,
pub scope: String,

View File

@@ -0,0 +1,29 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "content_events")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub event_type: String,
pub path: String,
pub post_slug: Option<String>,
pub session_id: Option<String>,
pub referrer: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub user_agent: Option<String>,
pub duration_ms: Option<i32>,
pub progress_percent: Option<i32>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub metadata: Option<Json>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -3,12 +3,19 @@
pub mod ai_chunks;
pub mod prelude;
pub mod admin_audit_logs;
pub mod categories;
pub mod comment_blacklist;
pub mod comment_persona_analysis_logs;
pub mod comments;
pub mod content_events;
pub mod friend_links;
pub mod notification_deliveries;
pub mod post_revisions;
pub mod posts;
pub mod query_events;
pub mod reviews;
pub mod site_settings;
pub mod subscriptions;
pub mod tags;
pub mod users;

View File

@@ -0,0 +1,32 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "notification_deliveries")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub subscription_id: Option<i32>,
pub channel_type: String,
pub target: String,
pub event_type: String,
pub status: String,
pub provider: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub response_text: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub payload: Option<Json>,
pub attempts_count: i32,
pub next_retry_at: Option<String>,
pub last_attempt_at: Option<String>,
pub delivered_at: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,30 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "post_revisions")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub post_slug: String,
pub post_title: Option<String>,
pub operation: String,
#[sea_orm(column_type = "Text", nullable)]
pub revision_reason: Option<String>,
pub actor_username: Option<String>,
pub actor_email: Option<String>,
pub actor_source: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub markdown: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub metadata: Option<Json>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,4 +1,4 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.10
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
@@ -23,6 +23,19 @@ pub struct Model {
#[sea_orm(column_type = "JsonBinary", nullable)]
pub images: Option<Json>,
pub pinned: Option<bool>,
pub status: Option<String>,
pub visibility: Option<String>,
pub publish_at: Option<DateTimeWithTimeZone>,
pub unpublish_at: Option<DateTimeWithTimeZone>,
#[sea_orm(column_type = "Text", nullable)]
pub canonical_url: Option<String>,
pub noindex: Option<bool>,
#[sea_orm(column_type = "Text", nullable)]
pub og_image: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub redirect_from: Option<Json>,
#[sea_orm(column_type = "Text", nullable)]
pub redirect_to: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -1,12 +1,19 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.10
pub use super::ai_chunks::Entity as AiChunks;
pub use super::admin_audit_logs::Entity as AdminAuditLogs;
pub use super::categories::Entity as Categories;
pub use super::comment_blacklist::Entity as CommentBlacklist;
pub use super::comment_persona_analysis_logs::Entity as CommentPersonaAnalysisLogs;
pub use super::comments::Entity as Comments;
pub use super::content_events::Entity as ContentEvents;
pub use super::friend_links::Entity as FriendLinks;
pub use super::notification_deliveries::Entity as NotificationDeliveries;
pub use super::post_revisions::Entity as PostRevisions;
pub use super::posts::Entity as Posts;
pub use super::query_events::Entity as QueryEvents;
pub use super::reviews::Entity as Reviews;
pub use super::site_settings::Entity as SiteSettings;
pub use super::subscriptions::Entity as Subscriptions;
pub use super::tags::Entity as Tags;
pub use super::users::Entity as Users;

View File

@@ -58,6 +58,15 @@ pub struct Model {
pub media_r2_access_key_id: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub media_r2_secret_access_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub seo_default_og_image: Option<String>,
pub seo_default_twitter_handle: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub notification_webhook_url: Option<String>,
pub notification_comment_enabled: Option<bool>,
pub notification_friend_link_enabled: Option<bool>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub search_synonyms: Option<Json>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -0,0 +1,36 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "subscriptions")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub channel_type: String,
pub target: String,
pub display_name: Option<String>,
pub status: String,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub filters: Option<Json>,
#[sea_orm(column_type = "Text", nullable)]
pub secret: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub notes: Option<String>,
pub confirm_token: Option<String>,
pub manage_token: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub metadata: Option<Json>,
pub verified_at: Option<String>,
pub last_notified_at: Option<String>,
pub failure_count: Option<i32>,
pub last_delivery_status: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,210 @@
use std::{
collections::HashMap,
sync::{Mutex, OnceLock},
};
use axum::http::{header, HeaderMap, StatusCode};
use chrono::{DateTime, Duration, Utc};
use loco_rs::{
controller::ErrorDetail,
prelude::*,
};
const DEFAULT_WINDOW_SECONDS: i64 = 5 * 60;
const DEFAULT_MAX_REQUESTS_PER_WINDOW: u32 = 45;
const DEFAULT_BAN_MINUTES: i64 = 30;
const DEFAULT_BURST_LIMIT: u32 = 8;
const DEFAULT_BURST_WINDOW_SECONDS: i64 = 30;
const ENV_WINDOW_SECONDS: &str = "TERMI_PUBLIC_RATE_LIMIT_WINDOW_SECONDS";
const ENV_MAX_REQUESTS_PER_WINDOW: &str = "TERMI_PUBLIC_RATE_LIMIT_MAX";
const ENV_BAN_MINUTES: &str = "TERMI_PUBLIC_RATE_LIMIT_BAN_MINUTES";
const ENV_BURST_LIMIT: &str = "TERMI_PUBLIC_RATE_LIMIT_BURST_MAX";
const ENV_BURST_WINDOW_SECONDS: &str = "TERMI_PUBLIC_RATE_LIMIT_BURST_WINDOW_SECONDS";
#[derive(Clone, Debug)]
struct AbuseGuardConfig {
window_seconds: i64,
max_requests_per_window: u32,
ban_minutes: i64,
burst_limit: u32,
burst_window_seconds: i64,
}
#[derive(Clone, Debug)]
struct AbuseGuardEntry {
window_started_at: DateTime<Utc>,
request_count: u32,
burst_window_started_at: DateTime<Utc>,
burst_count: u32,
banned_until: Option<DateTime<Utc>>,
last_reason: Option<String>,
}
fn parse_env_i64(name: &str, fallback: i64, min: i64, max: i64) -> i64 {
std::env::var(name)
.ok()
.and_then(|value| value.trim().parse::<i64>().ok())
.map(|value| value.clamp(min, max))
.unwrap_or(fallback)
}
fn parse_env_u32(name: &str, fallback: u32, min: u32, max: u32) -> u32 {
std::env::var(name)
.ok()
.and_then(|value| value.trim().parse::<u32>().ok())
.map(|value| value.clamp(min, max))
.unwrap_or(fallback)
}
fn load_config() -> AbuseGuardConfig {
AbuseGuardConfig {
window_seconds: parse_env_i64(ENV_WINDOW_SECONDS, DEFAULT_WINDOW_SECONDS, 10, 24 * 60 * 60),
max_requests_per_window: parse_env_u32(
ENV_MAX_REQUESTS_PER_WINDOW,
DEFAULT_MAX_REQUESTS_PER_WINDOW,
1,
50_000,
),
ban_minutes: parse_env_i64(ENV_BAN_MINUTES, DEFAULT_BAN_MINUTES, 1, 7 * 24 * 60),
burst_limit: parse_env_u32(ENV_BURST_LIMIT, DEFAULT_BURST_LIMIT, 1, 1_000),
burst_window_seconds: parse_env_i64(
ENV_BURST_WINDOW_SECONDS,
DEFAULT_BURST_WINDOW_SECONDS,
5,
60 * 60,
),
}
}
fn normalize_token(value: Option<&str>, max_chars: usize) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.chars().take(max_chars).collect::<String>())
}
})
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
normalize_token(value, 96)
}
pub fn header_value<'a>(headers: &'a HeaderMap, key: header::HeaderName) -> Option<&'a str> {
headers.get(key).and_then(|value| value.to_str().ok())
}
fn first_forwarded_ip(value: &str) -> Option<&str> {
value
.split(',')
.map(str::trim)
.find(|item| !item.is_empty())
}
pub fn detect_client_ip(headers: &HeaderMap) -> Option<String> {
let forwarded = header_value(headers, header::HeaderName::from_static("x-forwarded-for"))
.and_then(first_forwarded_ip);
let real_ip = header_value(headers, header::HeaderName::from_static("x-real-ip"));
let cf_connecting_ip =
header_value(headers, header::HeaderName::from_static("cf-connecting-ip"));
let true_client_ip = header_value(headers, header::HeaderName::from_static("true-client-ip"));
normalize_ip(
forwarded
.or(real_ip)
.or(cf_connecting_ip)
.or(true_client_ip),
)
}
fn abuse_store() -> &'static Mutex<HashMap<String, AbuseGuardEntry>> {
static STORE: OnceLock<Mutex<HashMap<String, AbuseGuardEntry>>> = OnceLock::new();
STORE.get_or_init(|| Mutex::new(HashMap::new()))
}
fn make_key(scope: &str, client_ip: Option<&str>, fingerprint: Option<&str>) -> String {
let normalized_scope = scope.trim().to_ascii_lowercase();
let normalized_ip = normalize_ip(client_ip).unwrap_or_else(|| "unknown".to_string());
let normalized_fingerprint = normalize_token(fingerprint, 160).unwrap_or_default();
if normalized_fingerprint.is_empty() {
format!("{normalized_scope}:{normalized_ip}")
} else {
format!("{normalized_scope}:{normalized_ip}:{normalized_fingerprint}")
}
}
fn too_many_requests(message: impl Into<String>) -> Error {
let message = message.into();
Error::CustomError(
StatusCode::TOO_MANY_REQUESTS,
ErrorDetail::new("rate_limited".to_string(), message),
)
}
pub fn enforce_public_scope(
scope: &str,
client_ip: Option<&str>,
fingerprint: Option<&str>,
) -> Result<()> {
let config = load_config();
let key = make_key(scope, client_ip, fingerprint);
let now = Utc::now();
let mut store = abuse_store()
.lock()
.map_err(|_| Error::InternalServerError)?;
store.retain(|_, entry| {
entry
.banned_until
.map(|until| until > now - Duration::days(1))
.unwrap_or_else(|| entry.window_started_at > now - Duration::days(1))
});
let entry = store.entry(key).or_insert_with(|| AbuseGuardEntry {
window_started_at: now,
request_count: 0,
burst_window_started_at: now,
burst_count: 0,
banned_until: None,
last_reason: None,
});
if let Some(banned_until) = entry.banned_until {
if banned_until > now {
let retry_after = (banned_until - now).num_minutes().max(1);
return Err(too_many_requests(format!(
"请求过于频繁,请在 {retry_after} 分钟后重试"
)));
}
entry.banned_until = None;
}
if entry.window_started_at + Duration::seconds(config.window_seconds) <= now {
entry.window_started_at = now;
entry.request_count = 0;
}
if entry.burst_window_started_at + Duration::seconds(config.burst_window_seconds) <= now {
entry.burst_window_started_at = now;
entry.burst_count = 0;
}
entry.request_count += 1;
entry.burst_count += 1;
if entry.burst_count > config.burst_limit {
entry.banned_until = Some(now + Duration::minutes(config.ban_minutes));
entry.last_reason = Some("burst_limit".to_string());
return Err(too_many_requests("短时间请求过多,已临时封禁,请稍后再试"));
}
if entry.request_count > config.max_requests_per_window {
entry.banned_until = Some(now + Duration::minutes(config.ban_minutes));
entry.last_reason = Some("window_limit".to_string());
return Err(too_many_requests("请求过于频繁,已临时封禁,请稍后再试"));
}
Ok(())
}

View File

@@ -0,0 +1,33 @@
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, Set};
use crate::{
controllers::admin::AdminIdentity,
models::_entities::admin_audit_logs,
};
pub async fn log_event(
ctx: &AppContext,
actor: Option<&AdminIdentity>,
action: &str,
target_type: &str,
target_id: Option<String>,
target_label: Option<String>,
metadata: Option<serde_json::Value>,
) -> Result<()> {
admin_audit_logs::ActiveModel {
actor_username: Set(actor.map(|item| item.username.clone())),
actor_email: Set(actor.and_then(|item| item.email.clone())),
actor_source: Set(actor.map(|item| item.source.clone())),
action: Set(action.to_string()),
target_type: Set(target_type.to_string()),
target_id: Set(target_id),
target_label: Set(target_label),
metadata: Set(metadata),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(())
}

View File

@@ -552,8 +552,22 @@ fn build_source_chunks(
fn build_chunks(posts: &[content::MarkdownPost], chunk_size: usize) -> Vec<ChunkDraft> {
let mut chunks = Vec::new();
let now = chrono::Utc::now().fixed_offset();
for post in posts.iter().filter(|post| post.published) {
for post in posts.iter().filter(|post| {
content::effective_post_state(
&post.status,
post.publish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(&value).ok()),
post.unpublish_at
.clone()
.and_then(|value| chrono::DateTime::parse_from_rfc3339(&value).ok()),
now,
) == content::POST_STATUS_PUBLISHED
&& content::normalize_post_visibility(Some(&post.visibility))
!= content::POST_VISIBILITY_PRIVATE
}) {
let mut sections = Vec::new();
sections.push(format!("# {}", post.title));
if let Some(description) = post
@@ -2714,6 +2728,30 @@ pub async fn answer_question(ctx: &AppContext, question: &str) -> Result<AiAnswe
})
}
pub async fn admin_chat_completion(
ctx: &AppContext,
system_prompt: &str,
prompt: &str,
) -> Result<String> {
let settings = load_runtime_settings(ctx, false).await?;
let api_base = settings
.api_base
.ok_or_else(|| Error::BadRequest("请先在后台配置 AI API Base".to_string()))?;
let api_key = settings
.api_key
.ok_or_else(|| Error::BadRequest("请先在后台配置 AI API Key".to_string()))?;
request_chat_answer(&AiProviderRequest {
provider: settings.provider,
api_base,
api_key,
chat_model: settings.chat_model,
system_prompt: system_prompt.trim().to_string(),
prompt: prompt.trim().to_string(),
})
.await
}
pub fn provider_name(value: Option<&str>) -> String {
trim_to_option(value.map(ToString::to_string))
.unwrap_or_else(|| DEFAULT_AI_PROVIDER.to_string())

View File

@@ -9,10 +9,13 @@ use sea_orm::{
};
use serde::Serialize;
use crate::models::_entities::query_events;
use crate::models::_entities::{content_events, posts, query_events};
const EVENT_TYPE_SEARCH: &str = "search";
const EVENT_TYPE_AI_QUESTION: &str = "ai_question";
pub const CONTENT_EVENT_PAGE_VIEW: &str = "page_view";
pub const CONTENT_EVENT_READ_PROGRESS: &str = "read_progress";
pub const CONTENT_EVENT_READ_COMPLETE: &str = "read_complete";
#[derive(Clone, Debug, Default)]
pub struct QueryEventRequestContext {
@@ -34,6 +37,25 @@ pub struct QueryEventDraft {
pub latency_ms: Option<i32>,
}
#[derive(Clone, Debug, Default)]
pub struct ContentEventRequestContext {
pub path: Option<String>,
pub referrer: Option<String>,
pub user_agent: Option<String>,
}
#[derive(Clone, Debug)]
pub struct ContentEventDraft {
pub event_type: String,
pub path: String,
pub post_slug: Option<String>,
pub session_id: Option<String>,
pub request_context: ContentEventRequestContext,
pub duration_ms: Option<i32>,
pub progress_percent: Option<i32>,
pub metadata: Option<serde_json::Value>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsOverview {
pub total_searches: u64,
@@ -48,6 +70,17 @@ pub struct AnalyticsOverview {
pub avg_ai_latency_ms_last_7d: Option<f64>,
}
#[derive(Clone, Debug, Serialize)]
pub struct ContentAnalyticsOverview {
pub total_page_views: u64,
pub page_views_last_24h: u64,
pub page_views_last_7d: u64,
pub total_read_completes: u64,
pub read_completes_last_7d: u64,
pub avg_read_progress_last_7d: f64,
pub avg_read_duration_ms_last_7d: Option<f64>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsTopQuery {
pub query: String,
@@ -75,6 +108,22 @@ pub struct AnalyticsProviderBucket {
pub count: u64,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsReferrerBucket {
pub referrer: String,
pub count: u64,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsPopularPost {
pub slug: String,
pub title: String,
pub page_views: u64,
pub read_completes: u64,
pub avg_progress_percent: f64,
pub avg_duration_ms: Option<f64>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsDailyBucket {
pub date: String,
@@ -85,13 +134,39 @@ pub struct AnalyticsDailyBucket {
#[derive(Clone, Debug, Serialize)]
pub struct AdminAnalyticsResponse {
pub overview: AnalyticsOverview,
pub content_overview: ContentAnalyticsOverview,
pub top_search_terms: Vec<AnalyticsTopQuery>,
pub top_ai_questions: Vec<AnalyticsTopQuery>,
pub recent_events: Vec<AnalyticsRecentEvent>,
pub providers_last_7d: Vec<AnalyticsProviderBucket>,
pub top_referrers: Vec<AnalyticsReferrerBucket>,
pub popular_posts: Vec<AnalyticsPopularPost>,
pub daily_activity: Vec<AnalyticsDailyBucket>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PublicContentHighlights {
pub overview: ContentAnalyticsOverview,
pub popular_posts: Vec<AnalyticsPopularPost>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PublicContentWindowOverview {
pub page_views: u64,
pub read_completes: u64,
pub avg_read_progress: f64,
pub avg_read_duration_ms: Option<f64>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PublicContentWindowHighlights {
pub key: String,
pub label: String,
pub days: i32,
pub overview: PublicContentWindowOverview,
pub popular_posts: Vec<AnalyticsPopularPost>,
}
#[derive(Clone, Debug)]
struct QueryAggregate {
query: String,
@@ -122,6 +197,18 @@ fn format_timestamp(value: DateTime<Utc>) -> String {
value.format("%Y-%m-%d %H:%M").to_string()
}
fn normalize_referrer_source(value: Option<String>) -> String {
let Some(value) = trim_to_option(value) else {
return "direct".to_string();
};
reqwest::Url::parse(&value)
.ok()
.and_then(|url| url.host_str().map(ToString::to_string))
.filter(|item| !item.trim().is_empty())
.unwrap_or(value)
}
fn header_value(headers: &HeaderMap, key: &str) -> Option<String> {
headers
.get(key)
@@ -134,6 +221,10 @@ fn clamp_latency(latency_ms: i64) -> i32 {
latency_ms.clamp(0, i64::from(i32::MAX)) as i32
}
fn clamp_percentage(value: i32) -> i32 {
value.clamp(0, 100)
}
fn build_query_aggregates(
events: &[query_events::Model],
wanted_type: &str,
@@ -199,6 +290,17 @@ pub fn request_context_from_headers(path: &str, headers: &HeaderMap) -> QueryEve
}
}
pub fn content_request_context_from_headers(
path: &str,
headers: &HeaderMap,
) -> ContentEventRequestContext {
ContentEventRequestContext {
path: trim_to_option(Some(path.to_string())),
referrer: header_value(headers, "referer"),
user_agent: header_value(headers, "user-agent"),
}
}
pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
let query_text = draft.query_text.trim().to_string();
if query_text.is_empty() {
@@ -226,6 +328,38 @@ pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
}
}
pub async fn record_content_event(ctx: &AppContext, draft: ContentEventDraft) {
let path = draft.path.trim().to_string();
if path.is_empty() {
return;
}
let event_type = draft.event_type.trim().to_ascii_lowercase();
if !matches!(
event_type.as_str(),
CONTENT_EVENT_PAGE_VIEW | CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
) {
return;
}
let active_model = content_events::ActiveModel {
event_type: Set(event_type),
path: Set(path),
post_slug: Set(trim_to_option(draft.post_slug)),
session_id: Set(trim_to_option(draft.session_id)),
referrer: Set(trim_to_option(draft.request_context.referrer)),
user_agent: Set(trim_to_option(draft.request_context.user_agent)),
duration_ms: Set(draft.duration_ms.map(|value| value.max(0))),
progress_percent: Set(draft.progress_percent.map(clamp_percentage)),
metadata: Set(draft.metadata),
..Default::default()
};
if let Err(error) = active_model.insert(&ctx.db).await {
tracing::warn!("failed to record content analytics event: {error}");
}
}
pub async fn record_search_event(
ctx: &AppContext,
query_text: &str,
@@ -309,12 +443,25 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
.filter(query_events::Column::CreatedAt.gte(since_24h))
.count(&ctx.db)
.await?;
let total_page_views = content_events::Entity::find()
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_PAGE_VIEW))
.count(&ctx.db)
.await?;
let total_read_completes = content_events::Entity::find()
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_READ_COMPLETE))
.count(&ctx.db)
.await?;
let last_7d_events = query_events::Entity::find()
.filter(query_events::Column::CreatedAt.gte(since_7d))
.order_by_desc(query_events::Column::CreatedAt)
.all(&ctx.db)
.await?;
let last_7d_content_events = content_events::Entity::find()
.filter(content_events::Column::CreatedAt.gte(since_7d))
.order_by_desc(content_events::Column::CreatedAt)
.all(&ctx.db)
.await?;
let searches_last_7d = last_7d_events
.iter()
@@ -336,6 +483,14 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
let mut counted_search_results = 0_u64;
let mut total_ai_latency = 0.0_f64;
let mut counted_ai_latency = 0_u64;
let mut referrer_breakdown: HashMap<String, u64> = HashMap::new();
let mut total_read_progress = 0.0_f64;
let mut counted_read_progress = 0_u64;
let mut total_read_duration = 0.0_f64;
let mut counted_read_duration = 0_u64;
let mut page_views_last_24h = 0_u64;
let mut page_views_last_7d = 0_u64;
let mut read_completes_last_7d = 0_u64;
for offset in 0..7 {
let date = (now - Duration::days(offset)).date_naive();
@@ -372,6 +527,104 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
}
}
let post_titles = posts::Entity::find()
.all(&ctx.db)
.await?
.into_iter()
.map(|post| {
(
post.slug,
post.title.unwrap_or_else(|| "Untitled post".to_string()),
)
})
.collect::<HashMap<_, _>>();
let mut post_breakdown: HashMap<String, (u64, u64, f64, u64, f64, u64)> = HashMap::new();
for event in &last_7d_content_events {
let created_at = DateTime::<Utc>::from(event.created_at);
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
page_views_last_7d += 1;
if created_at >= since_24h {
page_views_last_24h += 1;
}
let referrer = normalize_referrer_source(event.referrer.clone());
*referrer_breakdown.entry(referrer).or_insert(0) += 1;
}
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
read_completes_last_7d += 1;
}
if matches!(
event.event_type.as_str(),
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
) {
let progress = event.progress_percent.unwrap_or({
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
100
} else {
0
}
});
if progress > 0 {
total_read_progress += f64::from(progress);
counted_read_progress += 1;
}
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
total_read_duration += f64::from(duration_ms);
counted_read_duration += 1;
}
}
let Some(post_slug) = event
.post_slug
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
else {
continue;
};
let entry = post_breakdown
.entry(post_slug)
.or_insert((0, 0, 0.0, 0, 0.0, 0));
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
entry.0 += 1;
}
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
entry.1 += 1;
}
if matches!(
event.event_type.as_str(),
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
) {
let progress = event.progress_percent.unwrap_or({
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
100
} else {
0
}
});
if progress > 0 {
entry.2 += f64::from(progress);
entry.3 += 1;
}
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
entry.4 += f64::from(duration_ms);
entry.5 += 1;
}
}
}
let mut providers_last_7d = provider_breakdown
.into_iter()
.map(|(provider, count)| AnalyticsProviderBucket { provider, count })
@@ -384,6 +637,50 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
});
providers_last_7d.truncate(6);
let mut top_referrers = referrer_breakdown
.into_iter()
.map(|(referrer, count)| AnalyticsReferrerBucket { referrer, count })
.collect::<Vec<_>>();
top_referrers.sort_by(|left, right| {
right
.count
.cmp(&left.count)
.then_with(|| left.referrer.cmp(&right.referrer))
});
top_referrers.truncate(8);
let mut popular_posts = post_breakdown
.into_iter()
.map(
|(slug, (page_views, read_completes, total_progress, progress_count, total_duration, duration_count))| {
AnalyticsPopularPost {
title: post_titles
.get(&slug)
.cloned()
.unwrap_or_else(|| slug.clone()),
slug,
page_views,
read_completes,
avg_progress_percent: if progress_count > 0 {
total_progress / progress_count as f64
} else {
0.0
},
avg_duration_ms: (duration_count > 0)
.then(|| total_duration / duration_count as f64),
}
},
)
.collect::<Vec<_>>();
popular_posts.sort_by(|left, right| {
right
.page_views
.cmp(&left.page_views)
.then_with(|| right.read_completes.cmp(&left.read_completes))
.then_with(|| left.slug.cmp(&right.slug))
});
popular_posts.truncate(10);
let mut daily_activity = daily_map
.into_iter()
.map(|(date, (searches, ai_questions))| AnalyticsDailyBucket {
@@ -432,10 +729,448 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
avg_ai_latency_ms_last_7d: (counted_ai_latency > 0)
.then(|| total_ai_latency / counted_ai_latency as f64),
},
content_overview: ContentAnalyticsOverview {
total_page_views,
page_views_last_24h,
page_views_last_7d,
total_read_completes,
read_completes_last_7d,
avg_read_progress_last_7d: if counted_read_progress > 0 {
total_read_progress / counted_read_progress as f64
} else {
0.0
},
avg_read_duration_ms_last_7d: (counted_read_duration > 0)
.then(|| total_read_duration / counted_read_duration as f64),
},
top_search_terms,
top_ai_questions,
recent_events,
providers_last_7d,
top_referrers,
popular_posts,
daily_activity,
})
}
pub async fn build_public_content_highlights(
ctx: &AppContext,
public_posts: &[posts::Model],
) -> Result<PublicContentHighlights> {
if public_posts.is_empty() {
return Ok(PublicContentHighlights {
overview: ContentAnalyticsOverview {
total_page_views: 0,
page_views_last_24h: 0,
page_views_last_7d: 0,
total_read_completes: 0,
read_completes_last_7d: 0,
avg_read_progress_last_7d: 0.0,
avg_read_duration_ms_last_7d: None,
},
popular_posts: Vec::new(),
});
}
let now = Utc::now();
let since_24h = now - Duration::hours(24);
let since_7d = now - Duration::days(7);
let public_slugs = public_posts
.iter()
.map(|post| post.slug.clone())
.collect::<Vec<_>>();
let post_titles = public_posts
.iter()
.map(|post| {
(
post.slug.clone(),
trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
)
})
.collect::<HashMap<_, _>>();
let total_page_views = content_events::Entity::find()
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_PAGE_VIEW))
.filter(content_events::Column::PostSlug.is_in(public_slugs.clone()))
.count(&ctx.db)
.await?;
let total_read_completes = content_events::Entity::find()
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_READ_COMPLETE))
.filter(content_events::Column::PostSlug.is_in(public_slugs.clone()))
.count(&ctx.db)
.await?;
let last_7d_content_events = content_events::Entity::find()
.filter(content_events::Column::CreatedAt.gte(since_7d))
.filter(content_events::Column::PostSlug.is_in(public_slugs))
.all(&ctx.db)
.await?;
let mut page_views_last_24h = 0_u64;
let mut page_views_last_7d = 0_u64;
let mut read_completes_last_7d = 0_u64;
let mut total_read_progress = 0.0_f64;
let mut counted_read_progress = 0_u64;
let mut total_read_duration = 0.0_f64;
let mut counted_read_duration = 0_u64;
let mut post_breakdown = HashMap::<String, (u64, u64, f64, u64, f64, u64)>::new();
for event in &last_7d_content_events {
let created_at = DateTime::<Utc>::from(event.created_at);
let Some(post_slug) = event
.post_slug
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
else {
continue;
};
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
page_views_last_7d += 1;
if created_at >= since_24h {
page_views_last_24h += 1;
}
}
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
read_completes_last_7d += 1;
}
if matches!(
event.event_type.as_str(),
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
) {
let progress = event.progress_percent.unwrap_or({
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
100
} else {
0
}
});
if progress > 0 {
total_read_progress += f64::from(progress);
counted_read_progress += 1;
}
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
total_read_duration += f64::from(duration_ms);
counted_read_duration += 1;
}
}
let entry = post_breakdown
.entry(post_slug)
.or_insert((0, 0, 0.0, 0, 0.0, 0));
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
entry.0 += 1;
}
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
entry.1 += 1;
}
if matches!(
event.event_type.as_str(),
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
) {
let progress = event.progress_percent.unwrap_or({
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
100
} else {
0
}
});
if progress > 0 {
entry.2 += f64::from(progress);
entry.3 += 1;
}
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
entry.4 += f64::from(duration_ms);
entry.5 += 1;
}
}
}
let mut popular_posts = post_breakdown
.into_iter()
.map(
|(
slug,
(
page_views,
read_completes,
total_progress,
progress_count,
total_duration,
duration_count,
),
)| AnalyticsPopularPost {
title: post_titles
.get(&slug)
.cloned()
.unwrap_or_else(|| slug.clone()),
slug,
page_views,
read_completes,
avg_progress_percent: if progress_count > 0 {
total_progress / progress_count as f64
} else {
0.0
},
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
},
)
.collect::<Vec<_>>();
popular_posts.sort_by(|left, right| {
right
.page_views
.cmp(&left.page_views)
.then_with(|| right.read_completes.cmp(&left.read_completes))
.then_with(|| left.slug.cmp(&right.slug))
});
popular_posts.truncate(6);
Ok(PublicContentHighlights {
overview: ContentAnalyticsOverview {
total_page_views,
page_views_last_24h,
page_views_last_7d,
total_read_completes,
read_completes_last_7d,
avg_read_progress_last_7d: if counted_read_progress > 0 {
total_read_progress / counted_read_progress as f64
} else {
0.0
},
avg_read_duration_ms_last_7d: (counted_read_duration > 0)
.then(|| total_read_duration / counted_read_duration as f64),
},
popular_posts,
})
}
pub async fn build_public_content_windows(
ctx: &AppContext,
public_posts: &[posts::Model],
) -> Result<Vec<PublicContentWindowHighlights>> {
if public_posts.is_empty() {
return Ok(vec![
build_empty_public_content_window("24h", "24h", 1),
build_empty_public_content_window("7d", "7d", 7),
build_empty_public_content_window("30d", "30d", 30),
]);
}
let now = Utc::now();
let since_30d = now - Duration::days(30);
let public_slugs = public_posts
.iter()
.map(|post| post.slug.clone())
.collect::<Vec<_>>();
let post_titles = public_posts
.iter()
.map(|post| {
(
post.slug.clone(),
trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
)
})
.collect::<HashMap<_, _>>();
let events = content_events::Entity::find()
.filter(content_events::Column::CreatedAt.gte(since_30d))
.filter(content_events::Column::PostSlug.is_in(public_slugs))
.all(&ctx.db)
.await?;
Ok(vec![
summarize_public_content_window(&events, &post_titles, now - Duration::hours(24), "24h", "24h", 1),
summarize_public_content_window(&events, &post_titles, now - Duration::days(7), "7d", "7d", 7),
summarize_public_content_window(&events, &post_titles, since_30d, "30d", "30d", 30),
])
}
fn build_empty_public_content_window(
key: &str,
label: &str,
days: i32,
) -> PublicContentWindowHighlights {
PublicContentWindowHighlights {
key: key.to_string(),
label: label.to_string(),
days,
overview: PublicContentWindowOverview {
page_views: 0,
read_completes: 0,
avg_read_progress: 0.0,
avg_read_duration_ms: None,
},
popular_posts: Vec::new(),
}
}
fn summarize_public_content_window(
events: &[content_events::Model],
post_titles: &HashMap<String, String>,
since: DateTime<Utc>,
key: &str,
label: &str,
days: i32,
) -> PublicContentWindowHighlights {
let mut page_views = 0_u64;
let mut read_completes = 0_u64;
let mut total_read_progress = 0.0_f64;
let mut counted_read_progress = 0_u64;
let mut total_read_duration = 0.0_f64;
let mut counted_read_duration = 0_u64;
let mut post_breakdown = HashMap::<String, (u64, u64, f64, u64, f64, u64)>::new();
for event in events {
let created_at = DateTime::<Utc>::from(event.created_at);
if created_at < since {
continue;
}
let Some(post_slug) = event
.post_slug
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
else {
continue;
};
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
page_views += 1;
}
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
read_completes += 1;
}
if matches!(
event.event_type.as_str(),
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
) {
let progress = event.progress_percent.unwrap_or({
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
100
} else {
0
}
});
if progress > 0 {
total_read_progress += f64::from(progress);
counted_read_progress += 1;
}
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
total_read_duration += f64::from(duration_ms);
counted_read_duration += 1;
}
}
let entry = post_breakdown
.entry(post_slug)
.or_insert((0, 0, 0.0, 0, 0.0, 0));
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
entry.0 += 1;
}
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
entry.1 += 1;
}
if matches!(
event.event_type.as_str(),
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
) {
let progress = event.progress_percent.unwrap_or({
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
100
} else {
0
}
});
if progress > 0 {
entry.2 += f64::from(progress);
entry.3 += 1;
}
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
entry.4 += f64::from(duration_ms);
entry.5 += 1;
}
}
}
let mut popular_posts = post_breakdown
.into_iter()
.map(
|(
slug,
(
item_page_views,
item_read_completes,
total_progress,
progress_count,
total_duration,
duration_count,
),
)| AnalyticsPopularPost {
title: post_titles
.get(&slug)
.cloned()
.unwrap_or_else(|| slug.clone()),
slug,
page_views: item_page_views,
read_completes: item_read_completes,
avg_progress_percent: if progress_count > 0 {
total_progress / progress_count as f64
} else {
0.0
},
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
},
)
.collect::<Vec<_>>();
popular_posts.sort_by(|left, right| {
right
.page_views
.cmp(&left.page_views)
.then_with(|| right.read_completes.cmp(&left.read_completes))
.then_with(|| {
right
.avg_progress_percent
.partial_cmp(&left.avg_progress_percent)
.unwrap_or(std::cmp::Ordering::Equal)
})
.then_with(|| left.slug.cmp(&right.slug))
});
popular_posts.truncate(6);
PublicContentWindowHighlights {
key: key.to_string(),
label: label.to_string(),
days,
overview: PublicContentWindowOverview {
page_views,
read_completes,
avg_read_progress: if counted_read_progress > 0 {
total_read_progress / counted_read_progress as f64
} else {
0.0
},
avg_read_duration_ms: (counted_read_duration > 0)
.then(|| total_read_duration / counted_read_duration as f64),
},
popular_posts,
}
}

View File

@@ -0,0 +1,375 @@
use std::collections::HashMap;
use std::sync::{Mutex, OnceLock};
use chrono::{DateTime, Duration, Utc};
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, Condition, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
use serde::Serialize;
use uuid::Uuid;
use crate::models::_entities::{comment_blacklist, comments};
const DEFAULT_RATE_LIMIT_WINDOW_SECONDS: i64 = 10 * 60;
const DEFAULT_RATE_LIMIT_MAX_PER_WINDOW: u64 = 8;
const DEFAULT_MIN_INTERVAL_SECONDS: i64 = 12;
const DEFAULT_CAPTCHA_TTL_SECONDS: i64 = 10 * 60;
const ENV_RATE_LIMIT_WINDOW_SECONDS: &str = "TERMI_COMMENT_RATE_LIMIT_WINDOW_SECONDS";
const ENV_RATE_LIMIT_MAX_PER_WINDOW: &str = "TERMI_COMMENT_RATE_LIMIT_MAX_PER_WINDOW";
const ENV_MIN_INTERVAL_SECONDS: &str = "TERMI_COMMENT_MIN_INTERVAL_SECONDS";
const ENV_BLOCK_KEYWORDS: &str = "TERMI_COMMENT_BLOCK_KEYWORDS";
const ENV_CAPTCHA_TTL_SECONDS: &str = "TERMI_COMMENT_CAPTCHA_TTL_SECONDS";
pub const MATCHER_TYPE_IP: &str = "ip";
pub const MATCHER_TYPE_EMAIL: &str = "email";
pub const MATCHER_TYPE_USER_AGENT: &str = "user_agent";
#[derive(Clone, Debug, Serialize)]
pub struct CommentCaptchaChallenge {
pub token: String,
pub question: String,
pub expires_in_seconds: i64,
}
#[derive(Clone, Debug)]
pub struct CommentGuardInput<'a> {
pub ip_address: Option<&'a str>,
pub email: Option<&'a str>,
pub user_agent: Option<&'a str>,
pub author: Option<&'a str>,
pub content: Option<&'a str>,
pub honeypot_website: Option<&'a str>,
pub captcha_token: Option<&'a str>,
pub captcha_answer: Option<&'a str>,
}
#[derive(Clone, Debug)]
struct GuardConfig {
rate_limit_window_seconds: i64,
rate_limit_max_per_window: u64,
min_interval_seconds: i64,
blocked_keywords: Vec<String>,
captcha_ttl_seconds: i64,
}
#[derive(Clone, Debug)]
struct CaptchaEntry {
answer: String,
expires_at: DateTime<Utc>,
ip_address: Option<String>,
}
fn parse_env_i64(name: &str, fallback: i64, min: i64, max: i64) -> i64 {
std::env::var(name)
.ok()
.and_then(|value| value.trim().parse::<i64>().ok())
.map(|value| value.clamp(min, max))
.unwrap_or(fallback)
}
fn trim_to_option(value: Option<&str>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn normalize_email(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.to_lowercase())
}
fn normalize_user_agent(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(512).collect::<String>())
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
}
fn parse_keywords() -> Vec<String> {
std::env::var(ENV_BLOCK_KEYWORDS)
.ok()
.map(|value| {
value
.split([',', '\n', '\r'])
.map(str::trim)
.filter(|item| !item.is_empty())
.map(|item| item.to_lowercase())
.collect::<Vec<_>>()
})
.unwrap_or_default()
}
fn load_config() -> GuardConfig {
GuardConfig {
rate_limit_window_seconds: parse_env_i64(
ENV_RATE_LIMIT_WINDOW_SECONDS,
DEFAULT_RATE_LIMIT_WINDOW_SECONDS,
10,
24 * 60 * 60,
),
rate_limit_max_per_window: parse_env_i64(
ENV_RATE_LIMIT_MAX_PER_WINDOW,
DEFAULT_RATE_LIMIT_MAX_PER_WINDOW as i64,
1,
500,
) as u64,
min_interval_seconds: parse_env_i64(
ENV_MIN_INTERVAL_SECONDS,
DEFAULT_MIN_INTERVAL_SECONDS,
0,
6 * 60 * 60,
),
blocked_keywords: parse_keywords(),
captcha_ttl_seconds: parse_env_i64(
ENV_CAPTCHA_TTL_SECONDS,
DEFAULT_CAPTCHA_TTL_SECONDS,
30,
24 * 60 * 60,
),
}
}
fn captcha_store() -> &'static Mutex<HashMap<String, CaptchaEntry>> {
static STORE: OnceLock<Mutex<HashMap<String, CaptchaEntry>>> = OnceLock::new();
STORE.get_or_init(|| Mutex::new(HashMap::new()))
}
fn cleanup_expired_captcha_entries(store: &mut HashMap<String, CaptchaEntry>, now: DateTime<Utc>) {
let expired = store
.iter()
.filter_map(|(token, entry)| (entry.expires_at <= now).then_some(token.clone()))
.collect::<Vec<_>>();
for token in expired {
store.remove(&token);
}
}
pub fn normalize_matcher_type(value: &str) -> Option<&'static str> {
match value.trim().to_ascii_lowercase().as_str() {
MATCHER_TYPE_IP => Some(MATCHER_TYPE_IP),
MATCHER_TYPE_EMAIL => Some(MATCHER_TYPE_EMAIL),
MATCHER_TYPE_USER_AGENT | "ua" | "useragent" => Some(MATCHER_TYPE_USER_AGENT),
_ => None,
}
}
pub fn normalize_matcher_value(matcher_type: &str, raw_value: &str) -> Option<String> {
let normalized_type = normalize_matcher_type(matcher_type)?;
match normalized_type {
MATCHER_TYPE_IP => normalize_ip(Some(raw_value)),
MATCHER_TYPE_EMAIL => normalize_email(Some(raw_value)),
MATCHER_TYPE_USER_AGENT => normalize_user_agent(Some(raw_value)),
_ => None,
}
}
pub fn create_captcha_challenge(client_ip: Option<&str>) -> Result<CommentCaptchaChallenge> {
let config = load_config();
let seed = Uuid::new_v4().as_u128();
let left = ((seed % 9) + 1) as i64;
let right = (((seed / 11) % 9) + 1) as i64;
let use_subtract = seed % 2 == 0 && left > right;
let (question, answer) = if use_subtract {
(
format!("{} - {} = ?", left, right),
(left - right).to_string(),
)
} else {
(
format!("{} + {} = ?", left, right),
(left + right).to_string(),
)
};
let token = Uuid::new_v4().to_string();
let now = Utc::now();
let expires_at = now + Duration::seconds(config.captcha_ttl_seconds);
let ip_address = normalize_ip(client_ip);
let mut store = captcha_store()
.lock()
.map_err(|_| Error::InternalServerError)?;
cleanup_expired_captcha_entries(&mut store, now);
store.insert(
token.clone(),
CaptchaEntry {
answer,
expires_at,
ip_address,
},
);
Ok(CommentCaptchaChallenge {
token,
question,
expires_in_seconds: config.captcha_ttl_seconds,
})
}
pub fn verify_captcha_solution(
captcha_token: Option<&str>,
captcha_answer: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
let token = trim_to_option(captcha_token)
.ok_or_else(|| Error::BadRequest("请先完成验证码".to_string()))?;
let answer = trim_to_option(captcha_answer)
.ok_or_else(|| Error::BadRequest("请填写验证码答案".to_string()))?;
let now = Utc::now();
let normalized_ip = normalize_ip(client_ip);
let mut store = captcha_store()
.lock()
.map_err(|_| Error::InternalServerError)?;
cleanup_expired_captcha_entries(&mut store, now);
let Some(entry) = store.remove(&token) else {
return Err(Error::BadRequest("验证码已失效,请刷新后重试".to_string()));
};
if entry.expires_at <= now {
return Err(Error::BadRequest("验证码已过期,请刷新后重试".to_string()));
}
if entry
.ip_address
.as_deref()
.zip(normalized_ip.as_deref())
.is_some_and(|(expected, current)| expected != current)
{
return Err(Error::BadRequest(
"验证码校验失败,请刷新后重试".to_string(),
));
}
if entry.answer.trim() != answer.trim() {
return Err(Error::BadRequest("验证码答案错误".to_string()));
}
Ok(())
}
async fn check_blacklist(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
let now = Utc::now();
let candidates = [
(MATCHER_TYPE_IP, normalize_ip(input.ip_address)),
(MATCHER_TYPE_EMAIL, normalize_email(input.email)),
(
MATCHER_TYPE_USER_AGENT,
normalize_user_agent(input.user_agent),
),
];
for (matcher_type, matcher_value) in candidates {
let Some(matcher_value) = matcher_value else {
continue;
};
let matched = comment_blacklist::Entity::find()
.filter(comment_blacklist::Column::MatcherType.eq(matcher_type))
.filter(comment_blacklist::Column::MatcherValue.eq(&matcher_value))
.filter(
Condition::any()
.add(comment_blacklist::Column::Active.is_null())
.add(comment_blacklist::Column::Active.eq(true)),
)
.filter(
Condition::any()
.add(comment_blacklist::Column::ExpiresAt.is_null())
.add(comment_blacklist::Column::ExpiresAt.gt(now)),
)
.one(&ctx.db)
.await?;
if matched.is_some() {
return Err(Error::BadRequest("评论请求已被拦截".to_string()));
}
}
Ok(())
}
async fn check_rate_limit(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
let config = load_config();
let Some(ip_address) = normalize_ip(input.ip_address) else {
return Ok(());
};
let now = Utc::now();
let since = now - Duration::seconds(config.rate_limit_window_seconds);
let count = comments::Entity::find()
.filter(comments::Column::IpAddress.eq(&ip_address))
.filter(comments::Column::CreatedAt.gte(since))
.count(&ctx.db)
.await?;
if count >= config.rate_limit_max_per_window {
return Err(Error::BadRequest("评论过于频繁,请稍后再试".to_string()));
}
if config.min_interval_seconds <= 0 {
return Ok(());
}
if let Some(last_comment) = comments::Entity::find()
.filter(comments::Column::IpAddress.eq(&ip_address))
.order_by_desc(comments::Column::CreatedAt)
.one(&ctx.db)
.await?
{
let last_created_at = DateTime::<Utc>::from(last_comment.created_at);
let elapsed = now.signed_duration_since(last_created_at).num_seconds();
if elapsed < config.min_interval_seconds {
return Err(Error::BadRequest("提交过快,请稍后再试".to_string()));
}
}
Ok(())
}
fn contains_blocked_keyword(input: &CommentGuardInput<'_>) -> Option<String> {
let config = load_config();
if config.blocked_keywords.is_empty() {
return None;
}
let mut merged = String::new();
for value in [input.author, input.email, input.content] {
if let Some(value) = value {
merged.push_str(value);
merged.push('\n');
}
}
let lower = merged.to_lowercase();
config
.blocked_keywords
.into_iter()
.find(|keyword| lower.contains(keyword))
}
pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
if trim_to_option(input.honeypot_website).is_some() {
return Err(Error::BadRequest("提交未通过校验".to_string()));
}
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
if contains_blocked_keyword(input).is_some() {
return Err(Error::BadRequest("评论内容包含敏感关键词".to_string()));
}
check_blacklist(ctx, input).await?;
check_rate_limit(ctx, input).await?;
Ok(())
}

View File

@@ -1,3 +1,4 @@
use chrono::{DateTime, FixedOffset, NaiveDate, TimeZone, Utc};
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter,
@@ -12,6 +13,12 @@ use crate::models::_entities::{categories, comments, posts, tags};
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
pub const POST_STATUS_DRAFT: &str = "draft";
pub const POST_STATUS_PUBLISHED: &str = "published";
pub const POST_STATUS_OFFLINE: &str = "offline";
pub const POST_VISIBILITY_PUBLIC: &str = "public";
pub const POST_VISIBILITY_UNLISTED: &str = "unlisted";
pub const POST_VISIBILITY_PRIVATE: &str = "private";
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
struct MarkdownFrontmatter {
@@ -33,6 +40,16 @@ struct MarkdownFrontmatter {
pinned: Option<bool>,
published: Option<bool>,
draft: Option<bool>,
status: Option<String>,
visibility: Option<String>,
publish_at: Option<String>,
unpublish_at: Option<String>,
canonical_url: Option<String>,
noindex: Option<bool>,
og_image: Option<String>,
#[serde(default, deserialize_with = "deserialize_optional_string_list")]
redirect_from: Option<Vec<String>>,
redirect_to: Option<String>,
}
#[derive(Debug, Clone, Serialize)]
@@ -47,7 +64,15 @@ pub struct MarkdownPost {
pub image: Option<String>,
pub images: Vec<String>,
pub pinned: bool,
pub published: bool,
pub status: String,
pub visibility: String,
pub publish_at: Option<String>,
pub unpublish_at: Option<String>,
pub canonical_url: Option<String>,
pub noindex: bool,
pub og_image: Option<String>,
pub redirect_from: Vec<String>,
pub redirect_to: Option<String>,
pub file_path: String,
}
@@ -63,7 +88,15 @@ pub struct MarkdownPostDraft {
pub image: Option<String>,
pub images: Vec<String>,
pub pinned: bool,
pub published: bool,
pub status: String,
pub visibility: String,
pub publish_at: Option<String>,
pub unpublish_at: Option<String>,
pub canonical_url: Option<String>,
pub noindex: bool,
pub og_image: Option<String>,
pub redirect_from: Vec<String>,
pub redirect_to: Option<String>,
}
#[derive(Debug, Clone)]
@@ -124,6 +157,147 @@ fn normalize_string_list(values: Option<Vec<String>>) -> Vec<String> {
.collect()
}
fn yaml_scalar(value: &str) -> String {
serde_yaml::to_string(value)
.unwrap_or_else(|_| format!("{value:?}"))
.trim()
.to_string()
}
fn normalize_redirect_list(values: Option<Vec<String>>) -> Vec<String> {
let mut seen = std::collections::HashSet::new();
normalize_string_list(values)
.into_iter()
.map(|item| item.trim_matches('/').to_string())
.filter(|item| !item.is_empty())
.filter(|item| seen.insert(item.to_lowercase()))
.collect()
}
fn normalize_url_like(value: Option<String>) -> Option<String> {
trim_to_option(value).map(|item| item.trim_end_matches('/').to_string())
}
pub fn normalize_post_status(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
POST_STATUS_DRAFT => POST_STATUS_DRAFT.to_string(),
POST_STATUS_OFFLINE => POST_STATUS_OFFLINE.to_string(),
_ => POST_STATUS_PUBLISHED.to_string(),
}
}
pub fn normalize_post_visibility(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
POST_VISIBILITY_UNLISTED => POST_VISIBILITY_UNLISTED.to_string(),
POST_VISIBILITY_PRIVATE => POST_VISIBILITY_PRIVATE.to_string(),
_ => POST_VISIBILITY_PUBLIC.to_string(),
}
}
fn parse_frontmatter_datetime(value: Option<String>) -> Option<DateTime<FixedOffset>> {
let raw = trim_to_option(value)?;
if let Ok(parsed) = DateTime::parse_from_rfc3339(&raw) {
return Some(parsed);
}
if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") {
let naive = date_only.and_hms_opt(0, 0, 0)?;
return FixedOffset::east_opt(0)?.from_local_datetime(&naive).single();
}
None
}
pub fn format_frontmatter_datetime(value: Option<DateTime<FixedOffset>>) -> Option<String> {
value.map(|item| item.with_timezone(&Utc).to_rfc3339())
}
fn resolve_post_status(frontmatter: &MarkdownFrontmatter) -> String {
if let Some(status) = trim_to_option(frontmatter.status.clone()) {
return normalize_post_status(Some(&status));
}
if frontmatter.draft.unwrap_or(false) {
POST_STATUS_DRAFT.to_string()
} else if frontmatter.published.unwrap_or(true) {
POST_STATUS_PUBLISHED.to_string()
} else {
POST_STATUS_DRAFT.to_string()
}
}
pub fn effective_post_state(
status: &str,
publish_at: Option<DateTime<FixedOffset>>,
unpublish_at: Option<DateTime<FixedOffset>>,
now: DateTime<FixedOffset>,
) -> String {
let normalized_status = normalize_post_status(Some(status));
if normalized_status == POST_STATUS_DRAFT {
return POST_STATUS_DRAFT.to_string();
}
if normalized_status == POST_STATUS_OFFLINE {
return POST_STATUS_OFFLINE.to_string();
}
if publish_at.map(|value| value > now).unwrap_or(false) {
return "scheduled".to_string();
}
if unpublish_at.map(|value| value <= now).unwrap_or(false) {
return "expired".to_string();
}
POST_STATUS_PUBLISHED.to_string()
}
pub fn post_redirects_from_json(value: &Option<Value>) -> Vec<String> {
value
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.map(|item| item.trim_matches('/').to_string())
.filter(|item| !item.is_empty())
.collect()
}
pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
effective_post_state(
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
post.publish_at,
post.unpublish_at,
now,
) == POST_STATUS_PUBLISHED
&& normalize_post_visibility(post.visibility.as_deref()) == POST_VISIBILITY_PUBLIC
}
pub fn is_post_publicly_accessible(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
effective_post_state(
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
post.publish_at,
post.unpublish_at,
now,
) == POST_STATUS_PUBLISHED
&& normalize_post_visibility(post.visibility.as_deref()) != POST_VISIBILITY_PRIVATE
}
fn split_inline_list(value: &str) -> Vec<String> {
value
.split([',', ''])
@@ -268,7 +442,7 @@ fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
}
fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
let (frontmatter, content) = split_frontmatter(raw)?;
let slug = trim_to_option(frontmatter.slug.clone()).unwrap_or_else(|| file_stem.to_string());
@@ -282,6 +456,7 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<
.next();
let tags = frontmatter
.tags
.clone()
.unwrap_or_default()
.into_iter()
.map(|item| item.trim().to_string())
@@ -300,60 +475,90 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<
image: trim_to_option(frontmatter.image.clone()),
images: normalize_string_list(frontmatter.images.clone()),
pinned: frontmatter.pinned.unwrap_or(false),
published: frontmatter
.published
.unwrap_or(!frontmatter.draft.unwrap_or(false)),
status: resolve_post_status(&frontmatter),
visibility: normalize_post_visibility(frontmatter.visibility.as_deref()),
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
frontmatter.publish_at.clone(),
)),
unpublish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
frontmatter.unpublish_at.clone(),
)),
canonical_url: normalize_url_like(frontmatter.canonical_url.clone()),
noindex: frontmatter.noindex.unwrap_or(false),
og_image: normalize_url_like(frontmatter.og_image.clone()),
redirect_from: normalize_redirect_list(frontmatter.redirect_from.clone()),
redirect_to: trim_to_option(frontmatter.redirect_to.clone())
.map(|item| item.trim_matches('/').to_string()),
file_path: file_path.to_string(),
})
}
fn build_markdown_document(post: &MarkdownPost) -> String {
pub fn build_markdown_document(post: &MarkdownPost) -> String {
let mut lines = vec![
"---".to_string(),
format!(
"title: {}",
serde_yaml::to_string(&post.title)
.unwrap_or_else(|_| format!("{:?}", post.title))
.trim()
),
format!("slug: {}", post.slug),
format!("title: {}", yaml_scalar(&post.title)),
format!("slug: {}", yaml_scalar(&post.slug)),
];
if let Some(description) = &post.description {
lines.push(format!(
"description: {}",
serde_yaml::to_string(description)
.unwrap_or_else(|_| format!("{description:?}"))
.trim()
));
lines.push(format!("description: {}", yaml_scalar(description)));
}
if let Some(category) = &post.category {
lines.push(format!("category: {}", category));
lines.push(format!("category: {}", yaml_scalar(category)));
}
lines.push(format!("post_type: {}", post.post_type));
lines.push(format!("post_type: {}", yaml_scalar(&post.post_type)));
lines.push(format!("pinned: {}", post.pinned));
lines.push(format!("published: {}", post.published));
lines.push(format!("status: {}", yaml_scalar(&post.status)));
lines.push(format!("visibility: {}", yaml_scalar(&post.visibility)));
lines.push(format!("noindex: {}", post.noindex));
if let Some(publish_at) = &post.publish_at {
lines.push(format!("publish_at: {}", yaml_scalar(publish_at)));
}
if let Some(unpublish_at) = &post.unpublish_at {
lines.push(format!("unpublish_at: {}", yaml_scalar(unpublish_at)));
}
if let Some(image) = &post.image {
lines.push(format!("image: {}", image));
lines.push(format!("image: {}", yaml_scalar(image)));
}
if !post.images.is_empty() {
lines.push("images:".to_string());
for image in &post.images {
lines.push(format!(" - {}", image));
lines.push(format!(" - {}", yaml_scalar(image)));
}
}
if !post.tags.is_empty() {
lines.push("tags:".to_string());
for tag in &post.tags {
lines.push(format!(" - {}", tag));
lines.push(format!(" - {}", yaml_scalar(tag)));
}
}
if let Some(canonical_url) = &post.canonical_url {
lines.push(format!("canonical_url: {}", yaml_scalar(canonical_url)));
}
if let Some(og_image) = &post.og_image {
lines.push(format!("og_image: {}", yaml_scalar(og_image)));
}
if !post.redirect_from.is_empty() {
lines.push("redirect_from:".to_string());
for redirect in &post.redirect_from {
lines.push(format!(" - {}", yaml_scalar(redirect)));
}
}
if let Some(redirect_to) = &post.redirect_to {
lines.push(format!("redirect_to: {}", yaml_scalar(redirect_to)));
}
lines.push("---".to_string());
lines.push(String::new());
lines.push(post.content.trim().to_string());
@@ -390,7 +595,19 @@ fn ensure_markdown_posts_bootstrapped() -> Result<()> {
image: None,
images: Vec::new(),
pinned: fixture.pinned.unwrap_or(false),
published: fixture.published.unwrap_or(true),
status: if fixture.published.unwrap_or(true) {
POST_STATUS_PUBLISHED.to_string()
} else {
POST_STATUS_DRAFT.to_string()
},
visibility: POST_VISIBILITY_PUBLIC.to_string(),
publish_at: None,
unpublish_at: None,
canonical_url: None,
noindex: false,
og_image: None,
redirect_from: Vec::new(),
redirect_to: None,
file_path: markdown_post_path(&fixture.slug)
.to_string_lossy()
.to_string(),
@@ -799,6 +1016,27 @@ pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>>
))
});
model.pinned = Set(Some(post.pinned));
model.status = Set(Some(normalize_post_status(Some(&post.status))));
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
model.noindex = Set(Some(post.noindex));
model.og_image = Set(normalize_url_like(post.og_image.clone()));
model.redirect_from = Set(if post.redirect_from.is_empty() {
None
} else {
Some(Value::Array(
post.redirect_from
.iter()
.cloned()
.map(Value::String)
.collect::<Vec<_>>(),
))
});
model.redirect_to = Set(
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string()),
);
if has_existing {
let _ = model.update(&ctx.db).await;
@@ -895,7 +1133,16 @@ pub async fn create_markdown_post(
image: trim_to_option(draft.image),
images: normalize_string_list(Some(draft.images)),
pinned: draft.pinned,
published: draft.published,
status: normalize_post_status(Some(&draft.status)),
visibility: normalize_post_visibility(Some(&draft.visibility)),
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(draft.publish_at)),
unpublish_at: format_frontmatter_datetime(parse_frontmatter_datetime(draft.unpublish_at)),
canonical_url: normalize_url_like(draft.canonical_url),
noindex: draft.noindex,
og_image: normalize_url_like(draft.og_image),
redirect_from: normalize_redirect_list(Some(draft.redirect_from)),
redirect_to: trim_to_option(draft.redirect_to)
.map(|item| item.trim_matches('/').to_string()),
file_path: markdown_post_path(&slug).to_string_lossy().to_string(),
};

View File

@@ -1,4 +1,10 @@
pub mod admin_audit;
pub mod abuse_guard;
pub mod ai;
pub mod analytics;
pub mod comment_guard;
pub mod content;
pub mod notifications;
pub mod post_revisions;
pub mod storage;
pub mod subscriptions;

View File

@@ -0,0 +1,164 @@
use loco_rs::prelude::*;
use crate::{
controllers::site_settings,
models::_entities::{comments, friend_links},
services::subscriptions,
};
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn excerpt(value: Option<&str>, limit: usize) -> Option<String> {
let flattened = value?
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string();
if flattened.is_empty() {
return None;
}
let mut shortened = flattened.chars().take(limit).collect::<String>();
if flattened.chars().count() > limit {
shortened.push_str("...");
}
Some(shortened)
}
pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
let settings = match site_settings::load_current(ctx).await {
Ok(settings) => settings,
Err(error) => {
tracing::warn!("failed to load site settings before comment notification: {error}");
return;
}
};
let payload = serde_json::json!({
"event_type": subscriptions::EVENT_COMMENT_CREATED,
"id": item.id,
"post_slug": item.post_slug,
"author": item.author,
"email": item.email,
"scope": item.scope,
"paragraph_key": item.paragraph_key,
"approved": item.approved.unwrap_or(false),
"excerpt": excerpt(item.content.as_deref(), 200),
"created_at": item.created_at.to_rfc3339(),
});
let text = format!(
"收到一条新的评论。\n\n文章:{}\n作者:{}\n范围:{}\n状态:{}\n摘要:{}",
item.post_slug.clone().unwrap_or_else(|| "未知文章".to_string()),
item.author.clone().unwrap_or_else(|| "匿名".to_string()),
item.scope,
if item.approved.unwrap_or(false) { "已通过" } else { "待审核" },
excerpt(item.content.as_deref(), 200).unwrap_or_else(|| "".to_string()),
);
if let Err(error) = subscriptions::queue_event_for_active_subscriptions(
ctx,
subscriptions::EVENT_COMMENT_CREATED,
"新评论通知",
&text,
payload.clone(),
trim_to_option(settings.site_name.clone()),
trim_to_option(settings.site_url.clone()),
)
.await
{
tracing::warn!("failed to queue comment subscription notification: {error}");
}
if settings.notification_comment_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
&target,
subscriptions::EVENT_COMMENT_CREATED,
"新评论通知",
&text,
payload,
trim_to_option(settings.site_name),
trim_to_option(settings.site_url),
)
.await
{
tracing::warn!("failed to queue legacy comment webhook notification: {error}");
}
}
}
}
pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model) {
let settings = match site_settings::load_current(ctx).await {
Ok(settings) => settings,
Err(error) => {
tracing::warn!("failed to load site settings before friend-link notification: {error}");
return;
}
};
let payload = serde_json::json!({
"event_type": subscriptions::EVENT_FRIEND_LINK_CREATED,
"id": item.id,
"site_name": item.site_name,
"site_url": item.site_url,
"category": item.category,
"status": item.status,
"description": item.description,
"created_at": item.created_at.to_rfc3339(),
});
let text = format!(
"收到新的友链申请。\n\n站点:{}\n链接:{}\n分类:{}\n状态:{}\n描述:{}",
item.site_name.clone().unwrap_or_else(|| "未命名站点".to_string()),
item.site_url,
item.category.clone().unwrap_or_else(|| "未分类".to_string()),
item.status.clone().unwrap_or_else(|| "pending".to_string()),
item.description.clone().unwrap_or_else(|| "".to_string()),
);
if let Err(error) = subscriptions::queue_event_for_active_subscriptions(
ctx,
subscriptions::EVENT_FRIEND_LINK_CREATED,
"新友链申请通知",
&text,
payload.clone(),
trim_to_option(settings.site_name.clone()),
trim_to_option(settings.site_url.clone()),
)
.await
{
tracing::warn!("failed to queue friend-link subscription notification: {error}");
}
if settings.notification_friend_link_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
&target,
subscriptions::EVENT_FRIEND_LINK_CREATED,
"新友链申请通知",
&text,
payload,
trim_to_option(settings.site_name),
trim_to_option(settings.site_url),
)
.await
{
tracing::warn!("failed to queue legacy friend-link webhook notification: {error}");
}
}
}
}

View File

@@ -0,0 +1,247 @@
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set,
};
use std::fs;
use crate::{
controllers::admin::AdminIdentity,
models::_entities::{post_revisions, posts},
services::content,
};
#[derive(Clone, Copy, Debug)]
pub enum RestoreMode {
Full,
Markdown,
Metadata,
}
impl RestoreMode {
pub fn parse(value: &str) -> Self {
match value.trim().to_ascii_lowercase().as_str() {
"markdown" | "content" | "body" => Self::Markdown,
"metadata" | "frontmatter" => Self::Metadata,
_ => Self::Full,
}
}
pub fn as_str(self) -> &'static str {
match self {
Self::Full => "full",
Self::Markdown => "markdown",
Self::Metadata => "metadata",
}
}
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
let normalized = markdown.replace("\r\n", "\n");
if let Some(frontmatter) = normalized
.strip_prefix("---\n")
.and_then(|rest| rest.split_once("\n---\n").map(|(frontmatter, _)| frontmatter))
{
for line in frontmatter.lines() {
let trimmed = line.trim();
if let Some(raw) = trimmed.strip_prefix("title:") {
let title = raw.trim().trim_matches('"').trim_matches('\'').trim();
if !title.is_empty() {
return Some(title.to_string());
}
}
}
}
normalized.lines().find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
.or_else(|| trim_to_option(Some(slug.to_string())))
}
async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> {
posts::Entity::find()
.filter(posts::Column::Slug.eq(slug))
.one(&ctx.db)
.await
.ok()
.flatten()
.and_then(|item| item.title)
.and_then(|value| trim_to_option(Some(value)))
}
pub async fn capture_snapshot_from_markdown(
ctx: &AppContext,
actor: Option<&AdminIdentity>,
slug: &str,
markdown: &str,
operation: &str,
reason: Option<&str>,
metadata: Option<serde_json::Value>,
) -> Result<post_revisions::Model> {
let post_title = lookup_post_title(ctx, slug)
.await
.or_else(|| title_from_markdown(markdown, slug));
post_revisions::ActiveModel {
post_slug: Set(slug.to_string()),
post_title: Set(post_title),
operation: Set(operation.to_string()),
revision_reason: Set(reason.map(ToString::to_string)),
actor_username: Set(actor.map(|item| item.username.clone())),
actor_email: Set(actor.and_then(|item| item.email.clone())),
actor_source: Set(actor.map(|item| item.source.clone())),
markdown: Set(Some(markdown.replace("\r\n", "\n"))),
metadata: Set(metadata),
..Default::default()
}
.insert(&ctx.db)
.await
.map_err(Into::into)
}
pub async fn capture_current_snapshot(
ctx: &AppContext,
actor: Option<&AdminIdentity>,
slug: &str,
operation: &str,
reason: Option<&str>,
metadata: Option<serde_json::Value>,
) -> Result<Option<post_revisions::Model>> {
let Ok((_path, markdown)) = content::read_markdown_document(slug) else {
return Ok(None);
};
capture_snapshot_from_markdown(ctx, actor, slug, &markdown, operation, reason, metadata)
.await
.map(Some)
}
pub async fn list_revisions(
ctx: &AppContext,
slug: Option<&str>,
limit: u64,
) -> Result<Vec<post_revisions::Model>> {
let mut query = post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) {
query = query.filter(post_revisions::Column::PostSlug.eq(slug));
}
query
.limit(limit)
.all(&ctx.db)
.await
.map_err(Into::into)
}
pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> {
post_revisions::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)
}
pub async fn restore_revision(
ctx: &AppContext,
actor: Option<&AdminIdentity>,
revision_id: i32,
mode: &str,
) -> Result<post_revisions::Model> {
let revision = get_revision(ctx, revision_id).await?;
let slug = revision.post_slug.clone();
let revision_markdown = revision
.markdown
.clone()
.filter(|value| !value.trim().is_empty())
.ok_or_else(|| Error::BadRequest("该版本没有可恢复的 Markdown 快照".to_string()))?;
let restore_mode = RestoreMode::parse(mode);
let _ = capture_current_snapshot(
ctx,
actor,
&slug,
"restore_backup",
Some("恢复前自动备份"),
Some(serde_json::json!({
"source_revision_id": revision_id,
"mode": restore_mode.as_str(),
})),
)
.await?;
let markdown = match restore_mode {
RestoreMode::Full => revision_markdown.clone(),
RestoreMode::Markdown | RestoreMode::Metadata => {
let (_path, current_markdown) = content::read_markdown_document(&slug).map_err(|_| {
Error::BadRequest("当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string())
})?;
let revision_post =
content::parse_markdown_source(&slug, &revision_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
let current_post =
content::parse_markdown_source(&slug, &current_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
let mut merged = current_post.clone();
match restore_mode {
RestoreMode::Markdown => {
merged.content = revision_post.content;
}
RestoreMode::Metadata => {
merged.title = revision_post.title;
merged.description = revision_post.description;
merged.category = revision_post.category;
merged.tags = revision_post.tags;
merged.post_type = revision_post.post_type;
merged.image = revision_post.image;
merged.images = revision_post.images;
merged.pinned = revision_post.pinned;
merged.status = revision_post.status;
merged.visibility = revision_post.visibility;
merged.publish_at = revision_post.publish_at;
merged.unpublish_at = revision_post.unpublish_at;
merged.canonical_url = revision_post.canonical_url;
merged.noindex = revision_post.noindex;
merged.og_image = revision_post.og_image;
merged.redirect_from = revision_post.redirect_from;
merged.redirect_to = revision_post.redirect_to;
}
RestoreMode::Full => unreachable!(),
}
content::build_markdown_document(&merged)
}
};
fs::create_dir_all(content::MARKDOWN_POSTS_DIR).map_err(|error| Error::BadRequest(error.to_string()))?;
fs::write(content::markdown_post_path(&slug), markdown.replace("\r\n", "\n"))
.map_err(|error| Error::BadRequest(error.to_string()))?;
content::sync_markdown_posts(ctx).await?;
let _ = capture_snapshot_from_markdown(
ctx,
actor,
&slug,
&markdown,
"restore",
Some("通过版本历史恢复"),
Some(serde_json::json!({
"source_revision_id": revision_id,
"mode": restore_mode.as_str(),
})),
)
.await?;
Ok(revision)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1 +1,3 @@
pub mod retry_deliveries;
pub mod send_monthly_digest;
pub mod send_weekly_digest;

View File

@@ -0,0 +1,26 @@
use loco_rs::prelude::*;
use crate::services::subscriptions;
pub struct RetryDeliveries;
#[async_trait]
impl Task for RetryDeliveries {
fn task(&self) -> TaskInfo {
TaskInfo {
name: "retry_deliveries".to_string(),
detail: "enqueue due notification deliveries for retry".to_string(),
}
}
async fn run(&self, app_context: &AppContext, vars: &task::Vars) -> Result<()> {
let limit = vars
.cli
.get("limit")
.and_then(|value| value.parse::<u64>().ok())
.unwrap_or(200);
let queued = subscriptions::retry_due_deliveries(app_context, limit).await?;
tracing::info!("retry_deliveries queued {queued} jobs");
Ok(())
}
}

View File

@@ -0,0 +1,26 @@
use loco_rs::prelude::*;
use crate::services::subscriptions;
pub struct SendMonthlyDigest;
#[async_trait]
impl Task for SendMonthlyDigest {
fn task(&self) -> TaskInfo {
TaskInfo {
name: "send_monthly_digest".to_string(),
detail: "queue monthly digest notifications".to_string(),
}
}
async fn run(&self, app_context: &AppContext, _vars: &task::Vars) -> Result<()> {
let summary = subscriptions::send_digest(app_context, "monthly").await?;
tracing::info!(
"send_monthly_digest queued={} skipped={} posts={}",
summary.queued,
summary.skipped,
summary.post_count
);
Ok(())
}
}

View File

@@ -0,0 +1,26 @@
use loco_rs::prelude::*;
use crate::services::subscriptions;
pub struct SendWeeklyDigest;
#[async_trait]
impl Task for SendWeeklyDigest {
fn task(&self) -> TaskInfo {
TaskInfo {
name: "send_weekly_digest".to_string(),
detail: "queue weekly digest notifications".to_string(),
}
}
async fn run(&self, app_context: &AppContext, _vars: &task::Vars) -> Result<()> {
let summary = subscriptions::send_digest(app_context, "weekly").await?;
tracing::info!(
"send_weekly_digest queued={} skipped={} posts={}",
summary.queued,
summary.skipped,
summary.post_count
);
Ok(())
}
}

View File

@@ -1 +1,2 @@
pub mod downloader;
pub mod notification_delivery;

View File

@@ -0,0 +1,28 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::services::subscriptions;
pub struct NotificationDeliveryWorker {
pub ctx: AppContext,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct NotificationDeliveryWorkerArgs {
pub delivery_id: i32,
}
#[async_trait]
impl BackgroundWorker<NotificationDeliveryWorkerArgs> for NotificationDeliveryWorker {
fn build(ctx: &AppContext) -> Self {
Self { ctx: ctx.clone() }
}
fn tags() -> Vec<String> {
vec!["notifications".to_string()]
}
async fn perform(&self, args: NotificationDeliveryWorkerArgs) -> Result<()> {
subscriptions::process_delivery(&self.ctx, args.delivery_id).await
}
}