feat: ship blog platform admin and deploy stack
This commit is contained in:
210
backend/src/services/abuse_guard.rs
Normal file
210
backend/src/services/abuse_guard.rs
Normal file
@@ -0,0 +1,210 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Mutex, OnceLock},
|
||||
};
|
||||
|
||||
use axum::http::{header, HeaderMap, StatusCode};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use loco_rs::{
|
||||
controller::ErrorDetail,
|
||||
prelude::*,
|
||||
};
|
||||
|
||||
const DEFAULT_WINDOW_SECONDS: i64 = 5 * 60;
|
||||
const DEFAULT_MAX_REQUESTS_PER_WINDOW: u32 = 45;
|
||||
const DEFAULT_BAN_MINUTES: i64 = 30;
|
||||
const DEFAULT_BURST_LIMIT: u32 = 8;
|
||||
const DEFAULT_BURST_WINDOW_SECONDS: i64 = 30;
|
||||
|
||||
const ENV_WINDOW_SECONDS: &str = "TERMI_PUBLIC_RATE_LIMIT_WINDOW_SECONDS";
|
||||
const ENV_MAX_REQUESTS_PER_WINDOW: &str = "TERMI_PUBLIC_RATE_LIMIT_MAX";
|
||||
const ENV_BAN_MINUTES: &str = "TERMI_PUBLIC_RATE_LIMIT_BAN_MINUTES";
|
||||
const ENV_BURST_LIMIT: &str = "TERMI_PUBLIC_RATE_LIMIT_BURST_MAX";
|
||||
const ENV_BURST_WINDOW_SECONDS: &str = "TERMI_PUBLIC_RATE_LIMIT_BURST_WINDOW_SECONDS";
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct AbuseGuardConfig {
|
||||
window_seconds: i64,
|
||||
max_requests_per_window: u32,
|
||||
ban_minutes: i64,
|
||||
burst_limit: u32,
|
||||
burst_window_seconds: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct AbuseGuardEntry {
|
||||
window_started_at: DateTime<Utc>,
|
||||
request_count: u32,
|
||||
burst_window_started_at: DateTime<Utc>,
|
||||
burst_count: u32,
|
||||
banned_until: Option<DateTime<Utc>>,
|
||||
last_reason: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_env_i64(name: &str, fallback: i64, min: i64, max: i64) -> i64 {
|
||||
std::env::var(name)
|
||||
.ok()
|
||||
.and_then(|value| value.trim().parse::<i64>().ok())
|
||||
.map(|value| value.clamp(min, max))
|
||||
.unwrap_or(fallback)
|
||||
}
|
||||
|
||||
fn parse_env_u32(name: &str, fallback: u32, min: u32, max: u32) -> u32 {
|
||||
std::env::var(name)
|
||||
.ok()
|
||||
.and_then(|value| value.trim().parse::<u32>().ok())
|
||||
.map(|value| value.clamp(min, max))
|
||||
.unwrap_or(fallback)
|
||||
}
|
||||
|
||||
fn load_config() -> AbuseGuardConfig {
|
||||
AbuseGuardConfig {
|
||||
window_seconds: parse_env_i64(ENV_WINDOW_SECONDS, DEFAULT_WINDOW_SECONDS, 10, 24 * 60 * 60),
|
||||
max_requests_per_window: parse_env_u32(
|
||||
ENV_MAX_REQUESTS_PER_WINDOW,
|
||||
DEFAULT_MAX_REQUESTS_PER_WINDOW,
|
||||
1,
|
||||
50_000,
|
||||
),
|
||||
ban_minutes: parse_env_i64(ENV_BAN_MINUTES, DEFAULT_BAN_MINUTES, 1, 7 * 24 * 60),
|
||||
burst_limit: parse_env_u32(ENV_BURST_LIMIT, DEFAULT_BURST_LIMIT, 1, 1_000),
|
||||
burst_window_seconds: parse_env_i64(
|
||||
ENV_BURST_WINDOW_SECONDS,
|
||||
DEFAULT_BURST_WINDOW_SECONDS,
|
||||
5,
|
||||
60 * 60,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_token(value: Option<&str>, max_chars: usize) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed.chars().take(max_chars).collect::<String>())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_ip(value: Option<&str>) -> Option<String> {
|
||||
normalize_token(value, 96)
|
||||
}
|
||||
|
||||
pub fn header_value<'a>(headers: &'a HeaderMap, key: header::HeaderName) -> Option<&'a str> {
|
||||
headers.get(key).and_then(|value| value.to_str().ok())
|
||||
}
|
||||
|
||||
fn first_forwarded_ip(value: &str) -> Option<&str> {
|
||||
value
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.find(|item| !item.is_empty())
|
||||
}
|
||||
|
||||
pub fn detect_client_ip(headers: &HeaderMap) -> Option<String> {
|
||||
let forwarded = header_value(headers, header::HeaderName::from_static("x-forwarded-for"))
|
||||
.and_then(first_forwarded_ip);
|
||||
let real_ip = header_value(headers, header::HeaderName::from_static("x-real-ip"));
|
||||
let cf_connecting_ip =
|
||||
header_value(headers, header::HeaderName::from_static("cf-connecting-ip"));
|
||||
let true_client_ip = header_value(headers, header::HeaderName::from_static("true-client-ip"));
|
||||
|
||||
normalize_ip(
|
||||
forwarded
|
||||
.or(real_ip)
|
||||
.or(cf_connecting_ip)
|
||||
.or(true_client_ip),
|
||||
)
|
||||
}
|
||||
|
||||
fn abuse_store() -> &'static Mutex<HashMap<String, AbuseGuardEntry>> {
|
||||
static STORE: OnceLock<Mutex<HashMap<String, AbuseGuardEntry>>> = OnceLock::new();
|
||||
STORE.get_or_init(|| Mutex::new(HashMap::new()))
|
||||
}
|
||||
|
||||
fn make_key(scope: &str, client_ip: Option<&str>, fingerprint: Option<&str>) -> String {
|
||||
let normalized_scope = scope.trim().to_ascii_lowercase();
|
||||
let normalized_ip = normalize_ip(client_ip).unwrap_or_else(|| "unknown".to_string());
|
||||
let normalized_fingerprint = normalize_token(fingerprint, 160).unwrap_or_default();
|
||||
if normalized_fingerprint.is_empty() {
|
||||
format!("{normalized_scope}:{normalized_ip}")
|
||||
} else {
|
||||
format!("{normalized_scope}:{normalized_ip}:{normalized_fingerprint}")
|
||||
}
|
||||
}
|
||||
|
||||
fn too_many_requests(message: impl Into<String>) -> Error {
|
||||
let message = message.into();
|
||||
Error::CustomError(
|
||||
StatusCode::TOO_MANY_REQUESTS,
|
||||
ErrorDetail::new("rate_limited".to_string(), message),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn enforce_public_scope(
|
||||
scope: &str,
|
||||
client_ip: Option<&str>,
|
||||
fingerprint: Option<&str>,
|
||||
) -> Result<()> {
|
||||
let config = load_config();
|
||||
let key = make_key(scope, client_ip, fingerprint);
|
||||
let now = Utc::now();
|
||||
let mut store = abuse_store()
|
||||
.lock()
|
||||
.map_err(|_| Error::InternalServerError)?;
|
||||
|
||||
store.retain(|_, entry| {
|
||||
entry
|
||||
.banned_until
|
||||
.map(|until| until > now - Duration::days(1))
|
||||
.unwrap_or_else(|| entry.window_started_at > now - Duration::days(1))
|
||||
});
|
||||
|
||||
let entry = store.entry(key).or_insert_with(|| AbuseGuardEntry {
|
||||
window_started_at: now,
|
||||
request_count: 0,
|
||||
burst_window_started_at: now,
|
||||
burst_count: 0,
|
||||
banned_until: None,
|
||||
last_reason: None,
|
||||
});
|
||||
|
||||
if let Some(banned_until) = entry.banned_until {
|
||||
if banned_until > now {
|
||||
let retry_after = (banned_until - now).num_minutes().max(1);
|
||||
return Err(too_many_requests(format!(
|
||||
"请求过于频繁,请在 {retry_after} 分钟后重试"
|
||||
)));
|
||||
}
|
||||
entry.banned_until = None;
|
||||
}
|
||||
|
||||
if entry.window_started_at + Duration::seconds(config.window_seconds) <= now {
|
||||
entry.window_started_at = now;
|
||||
entry.request_count = 0;
|
||||
}
|
||||
|
||||
if entry.burst_window_started_at + Duration::seconds(config.burst_window_seconds) <= now {
|
||||
entry.burst_window_started_at = now;
|
||||
entry.burst_count = 0;
|
||||
}
|
||||
|
||||
entry.request_count += 1;
|
||||
entry.burst_count += 1;
|
||||
|
||||
if entry.burst_count > config.burst_limit {
|
||||
entry.banned_until = Some(now + Duration::minutes(config.ban_minutes));
|
||||
entry.last_reason = Some("burst_limit".to_string());
|
||||
return Err(too_many_requests("短时间请求过多,已临时封禁,请稍后再试"));
|
||||
}
|
||||
|
||||
if entry.request_count > config.max_requests_per_window {
|
||||
entry.banned_until = Some(now + Duration::minutes(config.ban_minutes));
|
||||
entry.last_reason = Some("window_limit".to_string());
|
||||
return Err(too_many_requests("请求过于频繁,已临时封禁,请稍后再试"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
33
backend/src/services/admin_audit.rs
Normal file
33
backend/src/services/admin_audit.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{ActiveModelTrait, Set};
|
||||
|
||||
use crate::{
|
||||
controllers::admin::AdminIdentity,
|
||||
models::_entities::admin_audit_logs,
|
||||
};
|
||||
|
||||
pub async fn log_event(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
action: &str,
|
||||
target_type: &str,
|
||||
target_id: Option<String>,
|
||||
target_label: Option<String>,
|
||||
metadata: Option<serde_json::Value>,
|
||||
) -> Result<()> {
|
||||
admin_audit_logs::ActiveModel {
|
||||
actor_username: Set(actor.map(|item| item.username.clone())),
|
||||
actor_email: Set(actor.and_then(|item| item.email.clone())),
|
||||
actor_source: Set(actor.map(|item| item.source.clone())),
|
||||
action: Set(action.to_string()),
|
||||
target_type: Set(target_type.to_string()),
|
||||
target_id: Set(target_id),
|
||||
target_label: Set(target_label),
|
||||
metadata: Set(metadata),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&ctx.db)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -552,8 +552,22 @@ fn build_source_chunks(
|
||||
|
||||
fn build_chunks(posts: &[content::MarkdownPost], chunk_size: usize) -> Vec<ChunkDraft> {
|
||||
let mut chunks = Vec::new();
|
||||
let now = chrono::Utc::now().fixed_offset();
|
||||
|
||||
for post in posts.iter().filter(|post| post.published) {
|
||||
for post in posts.iter().filter(|post| {
|
||||
content::effective_post_state(
|
||||
&post.status,
|
||||
post.publish_at
|
||||
.clone()
|
||||
.and_then(|value| chrono::DateTime::parse_from_rfc3339(&value).ok()),
|
||||
post.unpublish_at
|
||||
.clone()
|
||||
.and_then(|value| chrono::DateTime::parse_from_rfc3339(&value).ok()),
|
||||
now,
|
||||
) == content::POST_STATUS_PUBLISHED
|
||||
&& content::normalize_post_visibility(Some(&post.visibility))
|
||||
!= content::POST_VISIBILITY_PRIVATE
|
||||
}) {
|
||||
let mut sections = Vec::new();
|
||||
sections.push(format!("# {}", post.title));
|
||||
if let Some(description) = post
|
||||
@@ -2714,6 +2728,30 @@ pub async fn answer_question(ctx: &AppContext, question: &str) -> Result<AiAnswe
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn admin_chat_completion(
|
||||
ctx: &AppContext,
|
||||
system_prompt: &str,
|
||||
prompt: &str,
|
||||
) -> Result<String> {
|
||||
let settings = load_runtime_settings(ctx, false).await?;
|
||||
let api_base = settings
|
||||
.api_base
|
||||
.ok_or_else(|| Error::BadRequest("请先在后台配置 AI API Base".to_string()))?;
|
||||
let api_key = settings
|
||||
.api_key
|
||||
.ok_or_else(|| Error::BadRequest("请先在后台配置 AI API Key".to_string()))?;
|
||||
|
||||
request_chat_answer(&AiProviderRequest {
|
||||
provider: settings.provider,
|
||||
api_base,
|
||||
api_key,
|
||||
chat_model: settings.chat_model,
|
||||
system_prompt: system_prompt.trim().to_string(),
|
||||
prompt: prompt.trim().to_string(),
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn provider_name(value: Option<&str>) -> String {
|
||||
trim_to_option(value.map(ToString::to_string))
|
||||
.unwrap_or_else(|| DEFAULT_AI_PROVIDER.to_string())
|
||||
|
||||
@@ -9,10 +9,13 @@ use sea_orm::{
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::models::_entities::query_events;
|
||||
use crate::models::_entities::{content_events, posts, query_events};
|
||||
|
||||
const EVENT_TYPE_SEARCH: &str = "search";
|
||||
const EVENT_TYPE_AI_QUESTION: &str = "ai_question";
|
||||
pub const CONTENT_EVENT_PAGE_VIEW: &str = "page_view";
|
||||
pub const CONTENT_EVENT_READ_PROGRESS: &str = "read_progress";
|
||||
pub const CONTENT_EVENT_READ_COMPLETE: &str = "read_complete";
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct QueryEventRequestContext {
|
||||
@@ -34,6 +37,25 @@ pub struct QueryEventDraft {
|
||||
pub latency_ms: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ContentEventRequestContext {
|
||||
pub path: Option<String>,
|
||||
pub referrer: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ContentEventDraft {
|
||||
pub event_type: String,
|
||||
pub path: String,
|
||||
pub post_slug: Option<String>,
|
||||
pub session_id: Option<String>,
|
||||
pub request_context: ContentEventRequestContext,
|
||||
pub duration_ms: Option<i32>,
|
||||
pub progress_percent: Option<i32>,
|
||||
pub metadata: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsOverview {
|
||||
pub total_searches: u64,
|
||||
@@ -48,6 +70,17 @@ pub struct AnalyticsOverview {
|
||||
pub avg_ai_latency_ms_last_7d: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct ContentAnalyticsOverview {
|
||||
pub total_page_views: u64,
|
||||
pub page_views_last_24h: u64,
|
||||
pub page_views_last_7d: u64,
|
||||
pub total_read_completes: u64,
|
||||
pub read_completes_last_7d: u64,
|
||||
pub avg_read_progress_last_7d: f64,
|
||||
pub avg_read_duration_ms_last_7d: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsTopQuery {
|
||||
pub query: String,
|
||||
@@ -75,6 +108,22 @@ pub struct AnalyticsProviderBucket {
|
||||
pub count: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsReferrerBucket {
|
||||
pub referrer: String,
|
||||
pub count: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsPopularPost {
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
pub page_views: u64,
|
||||
pub read_completes: u64,
|
||||
pub avg_progress_percent: f64,
|
||||
pub avg_duration_ms: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsDailyBucket {
|
||||
pub date: String,
|
||||
@@ -85,13 +134,39 @@ pub struct AnalyticsDailyBucket {
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AdminAnalyticsResponse {
|
||||
pub overview: AnalyticsOverview,
|
||||
pub content_overview: ContentAnalyticsOverview,
|
||||
pub top_search_terms: Vec<AnalyticsTopQuery>,
|
||||
pub top_ai_questions: Vec<AnalyticsTopQuery>,
|
||||
pub recent_events: Vec<AnalyticsRecentEvent>,
|
||||
pub providers_last_7d: Vec<AnalyticsProviderBucket>,
|
||||
pub top_referrers: Vec<AnalyticsReferrerBucket>,
|
||||
pub popular_posts: Vec<AnalyticsPopularPost>,
|
||||
pub daily_activity: Vec<AnalyticsDailyBucket>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PublicContentHighlights {
|
||||
pub overview: ContentAnalyticsOverview,
|
||||
pub popular_posts: Vec<AnalyticsPopularPost>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PublicContentWindowOverview {
|
||||
pub page_views: u64,
|
||||
pub read_completes: u64,
|
||||
pub avg_read_progress: f64,
|
||||
pub avg_read_duration_ms: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PublicContentWindowHighlights {
|
||||
pub key: String,
|
||||
pub label: String,
|
||||
pub days: i32,
|
||||
pub overview: PublicContentWindowOverview,
|
||||
pub popular_posts: Vec<AnalyticsPopularPost>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct QueryAggregate {
|
||||
query: String,
|
||||
@@ -122,6 +197,18 @@ fn format_timestamp(value: DateTime<Utc>) -> String {
|
||||
value.format("%Y-%m-%d %H:%M").to_string()
|
||||
}
|
||||
|
||||
fn normalize_referrer_source(value: Option<String>) -> String {
|
||||
let Some(value) = trim_to_option(value) else {
|
||||
return "direct".to_string();
|
||||
};
|
||||
|
||||
reqwest::Url::parse(&value)
|
||||
.ok()
|
||||
.and_then(|url| url.host_str().map(ToString::to_string))
|
||||
.filter(|item| !item.trim().is_empty())
|
||||
.unwrap_or(value)
|
||||
}
|
||||
|
||||
fn header_value(headers: &HeaderMap, key: &str) -> Option<String> {
|
||||
headers
|
||||
.get(key)
|
||||
@@ -134,6 +221,10 @@ fn clamp_latency(latency_ms: i64) -> i32 {
|
||||
latency_ms.clamp(0, i64::from(i32::MAX)) as i32
|
||||
}
|
||||
|
||||
fn clamp_percentage(value: i32) -> i32 {
|
||||
value.clamp(0, 100)
|
||||
}
|
||||
|
||||
fn build_query_aggregates(
|
||||
events: &[query_events::Model],
|
||||
wanted_type: &str,
|
||||
@@ -199,6 +290,17 @@ pub fn request_context_from_headers(path: &str, headers: &HeaderMap) -> QueryEve
|
||||
}
|
||||
}
|
||||
|
||||
pub fn content_request_context_from_headers(
|
||||
path: &str,
|
||||
headers: &HeaderMap,
|
||||
) -> ContentEventRequestContext {
|
||||
ContentEventRequestContext {
|
||||
path: trim_to_option(Some(path.to_string())),
|
||||
referrer: header_value(headers, "referer"),
|
||||
user_agent: header_value(headers, "user-agent"),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
|
||||
let query_text = draft.query_text.trim().to_string();
|
||||
if query_text.is_empty() {
|
||||
@@ -226,6 +328,38 @@ pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn record_content_event(ctx: &AppContext, draft: ContentEventDraft) {
|
||||
let path = draft.path.trim().to_string();
|
||||
if path.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let event_type = draft.event_type.trim().to_ascii_lowercase();
|
||||
if !matches!(
|
||||
event_type.as_str(),
|
||||
CONTENT_EVENT_PAGE_VIEW | CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
let active_model = content_events::ActiveModel {
|
||||
event_type: Set(event_type),
|
||||
path: Set(path),
|
||||
post_slug: Set(trim_to_option(draft.post_slug)),
|
||||
session_id: Set(trim_to_option(draft.session_id)),
|
||||
referrer: Set(trim_to_option(draft.request_context.referrer)),
|
||||
user_agent: Set(trim_to_option(draft.request_context.user_agent)),
|
||||
duration_ms: Set(draft.duration_ms.map(|value| value.max(0))),
|
||||
progress_percent: Set(draft.progress_percent.map(clamp_percentage)),
|
||||
metadata: Set(draft.metadata),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Err(error) = active_model.insert(&ctx.db).await {
|
||||
tracing::warn!("failed to record content analytics event: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn record_search_event(
|
||||
ctx: &AppContext,
|
||||
query_text: &str,
|
||||
@@ -309,12 +443,25 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
.filter(query_events::Column::CreatedAt.gte(since_24h))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
let total_page_views = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_PAGE_VIEW))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
let total_read_completes = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_READ_COMPLETE))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let last_7d_events = query_events::Entity::find()
|
||||
.filter(query_events::Column::CreatedAt.gte(since_7d))
|
||||
.order_by_desc(query_events::Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
let last_7d_content_events = content_events::Entity::find()
|
||||
.filter(content_events::Column::CreatedAt.gte(since_7d))
|
||||
.order_by_desc(content_events::Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let searches_last_7d = last_7d_events
|
||||
.iter()
|
||||
@@ -336,6 +483,14 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
let mut counted_search_results = 0_u64;
|
||||
let mut total_ai_latency = 0.0_f64;
|
||||
let mut counted_ai_latency = 0_u64;
|
||||
let mut referrer_breakdown: HashMap<String, u64> = HashMap::new();
|
||||
let mut total_read_progress = 0.0_f64;
|
||||
let mut counted_read_progress = 0_u64;
|
||||
let mut total_read_duration = 0.0_f64;
|
||||
let mut counted_read_duration = 0_u64;
|
||||
let mut page_views_last_24h = 0_u64;
|
||||
let mut page_views_last_7d = 0_u64;
|
||||
let mut read_completes_last_7d = 0_u64;
|
||||
|
||||
for offset in 0..7 {
|
||||
let date = (now - Duration::days(offset)).date_naive();
|
||||
@@ -372,6 +527,104 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
}
|
||||
}
|
||||
|
||||
let post_titles = posts::Entity::find()
|
||||
.all(&ctx.db)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|post| {
|
||||
(
|
||||
post.slug,
|
||||
post.title.unwrap_or_else(|| "Untitled post".to_string()),
|
||||
)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut post_breakdown: HashMap<String, (u64, u64, f64, u64, f64, u64)> = HashMap::new();
|
||||
|
||||
for event in &last_7d_content_events {
|
||||
let created_at = DateTime::<Utc>::from(event.created_at);
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
page_views_last_7d += 1;
|
||||
if created_at >= since_24h {
|
||||
page_views_last_24h += 1;
|
||||
}
|
||||
|
||||
let referrer = normalize_referrer_source(event.referrer.clone());
|
||||
*referrer_breakdown.entry(referrer).or_insert(0) += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
read_completes_last_7d += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
total_read_progress += f64::from(progress);
|
||||
counted_read_progress += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
total_read_duration += f64::from(duration_ms);
|
||||
counted_read_duration += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let Some(post_slug) = event
|
||||
.post_slug
|
||||
.as_deref()
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let entry = post_breakdown
|
||||
.entry(post_slug)
|
||||
.or_insert((0, 0, 0.0, 0, 0.0, 0));
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
entry.0 += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
entry.1 += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
entry.2 += f64::from(progress);
|
||||
entry.3 += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
entry.4 += f64::from(duration_ms);
|
||||
entry.5 += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut providers_last_7d = provider_breakdown
|
||||
.into_iter()
|
||||
.map(|(provider, count)| AnalyticsProviderBucket { provider, count })
|
||||
@@ -384,6 +637,50 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
});
|
||||
providers_last_7d.truncate(6);
|
||||
|
||||
let mut top_referrers = referrer_breakdown
|
||||
.into_iter()
|
||||
.map(|(referrer, count)| AnalyticsReferrerBucket { referrer, count })
|
||||
.collect::<Vec<_>>();
|
||||
top_referrers.sort_by(|left, right| {
|
||||
right
|
||||
.count
|
||||
.cmp(&left.count)
|
||||
.then_with(|| left.referrer.cmp(&right.referrer))
|
||||
});
|
||||
top_referrers.truncate(8);
|
||||
|
||||
let mut popular_posts = post_breakdown
|
||||
.into_iter()
|
||||
.map(
|
||||
|(slug, (page_views, read_completes, total_progress, progress_count, total_duration, duration_count))| {
|
||||
AnalyticsPopularPost {
|
||||
title: post_titles
|
||||
.get(&slug)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| slug.clone()),
|
||||
slug,
|
||||
page_views,
|
||||
read_completes,
|
||||
avg_progress_percent: if progress_count > 0 {
|
||||
total_progress / progress_count as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_duration_ms: (duration_count > 0)
|
||||
.then(|| total_duration / duration_count as f64),
|
||||
}
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
popular_posts.sort_by(|left, right| {
|
||||
right
|
||||
.page_views
|
||||
.cmp(&left.page_views)
|
||||
.then_with(|| right.read_completes.cmp(&left.read_completes))
|
||||
.then_with(|| left.slug.cmp(&right.slug))
|
||||
});
|
||||
popular_posts.truncate(10);
|
||||
|
||||
let mut daily_activity = daily_map
|
||||
.into_iter()
|
||||
.map(|(date, (searches, ai_questions))| AnalyticsDailyBucket {
|
||||
@@ -432,10 +729,448 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
avg_ai_latency_ms_last_7d: (counted_ai_latency > 0)
|
||||
.then(|| total_ai_latency / counted_ai_latency as f64),
|
||||
},
|
||||
content_overview: ContentAnalyticsOverview {
|
||||
total_page_views,
|
||||
page_views_last_24h,
|
||||
page_views_last_7d,
|
||||
total_read_completes,
|
||||
read_completes_last_7d,
|
||||
avg_read_progress_last_7d: if counted_read_progress > 0 {
|
||||
total_read_progress / counted_read_progress as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_read_duration_ms_last_7d: (counted_read_duration > 0)
|
||||
.then(|| total_read_duration / counted_read_duration as f64),
|
||||
},
|
||||
top_search_terms,
|
||||
top_ai_questions,
|
||||
recent_events,
|
||||
providers_last_7d,
|
||||
top_referrers,
|
||||
popular_posts,
|
||||
daily_activity,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_public_content_highlights(
|
||||
ctx: &AppContext,
|
||||
public_posts: &[posts::Model],
|
||||
) -> Result<PublicContentHighlights> {
|
||||
if public_posts.is_empty() {
|
||||
return Ok(PublicContentHighlights {
|
||||
overview: ContentAnalyticsOverview {
|
||||
total_page_views: 0,
|
||||
page_views_last_24h: 0,
|
||||
page_views_last_7d: 0,
|
||||
total_read_completes: 0,
|
||||
read_completes_last_7d: 0,
|
||||
avg_read_progress_last_7d: 0.0,
|
||||
avg_read_duration_ms_last_7d: None,
|
||||
},
|
||||
popular_posts: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
let now = Utc::now();
|
||||
let since_24h = now - Duration::hours(24);
|
||||
let since_7d = now - Duration::days(7);
|
||||
let public_slugs = public_posts
|
||||
.iter()
|
||||
.map(|post| post.slug.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let post_titles = public_posts
|
||||
.iter()
|
||||
.map(|post| {
|
||||
(
|
||||
post.slug.clone(),
|
||||
trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
|
||||
)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let total_page_views = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_PAGE_VIEW))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs.clone()))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
let total_read_completes = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_READ_COMPLETE))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs.clone()))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let last_7d_content_events = content_events::Entity::find()
|
||||
.filter(content_events::Column::CreatedAt.gte(since_7d))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs))
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let mut page_views_last_24h = 0_u64;
|
||||
let mut page_views_last_7d = 0_u64;
|
||||
let mut read_completes_last_7d = 0_u64;
|
||||
let mut total_read_progress = 0.0_f64;
|
||||
let mut counted_read_progress = 0_u64;
|
||||
let mut total_read_duration = 0.0_f64;
|
||||
let mut counted_read_duration = 0_u64;
|
||||
let mut post_breakdown = HashMap::<String, (u64, u64, f64, u64, f64, u64)>::new();
|
||||
|
||||
for event in &last_7d_content_events {
|
||||
let created_at = DateTime::<Utc>::from(event.created_at);
|
||||
let Some(post_slug) = event
|
||||
.post_slug
|
||||
.as_deref()
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
page_views_last_7d += 1;
|
||||
if created_at >= since_24h {
|
||||
page_views_last_24h += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
read_completes_last_7d += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
total_read_progress += f64::from(progress);
|
||||
counted_read_progress += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
total_read_duration += f64::from(duration_ms);
|
||||
counted_read_duration += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let entry = post_breakdown
|
||||
.entry(post_slug)
|
||||
.or_insert((0, 0, 0.0, 0, 0.0, 0));
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
entry.0 += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
entry.1 += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
entry.2 += f64::from(progress);
|
||||
entry.3 += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
entry.4 += f64::from(duration_ms);
|
||||
entry.5 += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut popular_posts = post_breakdown
|
||||
.into_iter()
|
||||
.map(
|
||||
|(
|
||||
slug,
|
||||
(
|
||||
page_views,
|
||||
read_completes,
|
||||
total_progress,
|
||||
progress_count,
|
||||
total_duration,
|
||||
duration_count,
|
||||
),
|
||||
)| AnalyticsPopularPost {
|
||||
title: post_titles
|
||||
.get(&slug)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| slug.clone()),
|
||||
slug,
|
||||
page_views,
|
||||
read_completes,
|
||||
avg_progress_percent: if progress_count > 0 {
|
||||
total_progress / progress_count as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
popular_posts.sort_by(|left, right| {
|
||||
right
|
||||
.page_views
|
||||
.cmp(&left.page_views)
|
||||
.then_with(|| right.read_completes.cmp(&left.read_completes))
|
||||
.then_with(|| left.slug.cmp(&right.slug))
|
||||
});
|
||||
popular_posts.truncate(6);
|
||||
|
||||
Ok(PublicContentHighlights {
|
||||
overview: ContentAnalyticsOverview {
|
||||
total_page_views,
|
||||
page_views_last_24h,
|
||||
page_views_last_7d,
|
||||
total_read_completes,
|
||||
read_completes_last_7d,
|
||||
avg_read_progress_last_7d: if counted_read_progress > 0 {
|
||||
total_read_progress / counted_read_progress as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_read_duration_ms_last_7d: (counted_read_duration > 0)
|
||||
.then(|| total_read_duration / counted_read_duration as f64),
|
||||
},
|
||||
popular_posts,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_public_content_windows(
|
||||
ctx: &AppContext,
|
||||
public_posts: &[posts::Model],
|
||||
) -> Result<Vec<PublicContentWindowHighlights>> {
|
||||
if public_posts.is_empty() {
|
||||
return Ok(vec![
|
||||
build_empty_public_content_window("24h", "24h", 1),
|
||||
build_empty_public_content_window("7d", "7d", 7),
|
||||
build_empty_public_content_window("30d", "30d", 30),
|
||||
]);
|
||||
}
|
||||
|
||||
let now = Utc::now();
|
||||
let since_30d = now - Duration::days(30);
|
||||
let public_slugs = public_posts
|
||||
.iter()
|
||||
.map(|post| post.slug.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let post_titles = public_posts
|
||||
.iter()
|
||||
.map(|post| {
|
||||
(
|
||||
post.slug.clone(),
|
||||
trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
|
||||
)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let events = content_events::Entity::find()
|
||||
.filter(content_events::Column::CreatedAt.gte(since_30d))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs))
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
Ok(vec![
|
||||
summarize_public_content_window(&events, &post_titles, now - Duration::hours(24), "24h", "24h", 1),
|
||||
summarize_public_content_window(&events, &post_titles, now - Duration::days(7), "7d", "7d", 7),
|
||||
summarize_public_content_window(&events, &post_titles, since_30d, "30d", "30d", 30),
|
||||
])
|
||||
}
|
||||
|
||||
fn build_empty_public_content_window(
|
||||
key: &str,
|
||||
label: &str,
|
||||
days: i32,
|
||||
) -> PublicContentWindowHighlights {
|
||||
PublicContentWindowHighlights {
|
||||
key: key.to_string(),
|
||||
label: label.to_string(),
|
||||
days,
|
||||
overview: PublicContentWindowOverview {
|
||||
page_views: 0,
|
||||
read_completes: 0,
|
||||
avg_read_progress: 0.0,
|
||||
avg_read_duration_ms: None,
|
||||
},
|
||||
popular_posts: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn summarize_public_content_window(
|
||||
events: &[content_events::Model],
|
||||
post_titles: &HashMap<String, String>,
|
||||
since: DateTime<Utc>,
|
||||
key: &str,
|
||||
label: &str,
|
||||
days: i32,
|
||||
) -> PublicContentWindowHighlights {
|
||||
let mut page_views = 0_u64;
|
||||
let mut read_completes = 0_u64;
|
||||
let mut total_read_progress = 0.0_f64;
|
||||
let mut counted_read_progress = 0_u64;
|
||||
let mut total_read_duration = 0.0_f64;
|
||||
let mut counted_read_duration = 0_u64;
|
||||
let mut post_breakdown = HashMap::<String, (u64, u64, f64, u64, f64, u64)>::new();
|
||||
|
||||
for event in events {
|
||||
let created_at = DateTime::<Utc>::from(event.created_at);
|
||||
if created_at < since {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(post_slug) = event
|
||||
.post_slug
|
||||
.as_deref()
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
page_views += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
read_completes += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
total_read_progress += f64::from(progress);
|
||||
counted_read_progress += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
total_read_duration += f64::from(duration_ms);
|
||||
counted_read_duration += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let entry = post_breakdown
|
||||
.entry(post_slug)
|
||||
.or_insert((0, 0, 0.0, 0, 0.0, 0));
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
entry.0 += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
entry.1 += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
entry.2 += f64::from(progress);
|
||||
entry.3 += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
entry.4 += f64::from(duration_ms);
|
||||
entry.5 += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut popular_posts = post_breakdown
|
||||
.into_iter()
|
||||
.map(
|
||||
|(
|
||||
slug,
|
||||
(
|
||||
item_page_views,
|
||||
item_read_completes,
|
||||
total_progress,
|
||||
progress_count,
|
||||
total_duration,
|
||||
duration_count,
|
||||
),
|
||||
)| AnalyticsPopularPost {
|
||||
title: post_titles
|
||||
.get(&slug)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| slug.clone()),
|
||||
slug,
|
||||
page_views: item_page_views,
|
||||
read_completes: item_read_completes,
|
||||
avg_progress_percent: if progress_count > 0 {
|
||||
total_progress / progress_count as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
popular_posts.sort_by(|left, right| {
|
||||
right
|
||||
.page_views
|
||||
.cmp(&left.page_views)
|
||||
.then_with(|| right.read_completes.cmp(&left.read_completes))
|
||||
.then_with(|| {
|
||||
right
|
||||
.avg_progress_percent
|
||||
.partial_cmp(&left.avg_progress_percent)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
})
|
||||
.then_with(|| left.slug.cmp(&right.slug))
|
||||
});
|
||||
popular_posts.truncate(6);
|
||||
|
||||
PublicContentWindowHighlights {
|
||||
key: key.to_string(),
|
||||
label: label.to_string(),
|
||||
days,
|
||||
overview: PublicContentWindowOverview {
|
||||
page_views,
|
||||
read_completes,
|
||||
avg_read_progress: if counted_read_progress > 0 {
|
||||
total_read_progress / counted_read_progress as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_read_duration_ms: (counted_read_duration > 0)
|
||||
.then(|| total_read_duration / counted_read_duration as f64),
|
||||
},
|
||||
popular_posts,
|
||||
}
|
||||
}
|
||||
|
||||
375
backend/src/services/comment_guard.rs
Normal file
375
backend/src/services/comment_guard.rs
Normal file
@@ -0,0 +1,375 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{ColumnTrait, Condition, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
|
||||
use serde::Serialize;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::models::_entities::{comment_blacklist, comments};
|
||||
|
||||
const DEFAULT_RATE_LIMIT_WINDOW_SECONDS: i64 = 10 * 60;
|
||||
const DEFAULT_RATE_LIMIT_MAX_PER_WINDOW: u64 = 8;
|
||||
const DEFAULT_MIN_INTERVAL_SECONDS: i64 = 12;
|
||||
const DEFAULT_CAPTCHA_TTL_SECONDS: i64 = 10 * 60;
|
||||
|
||||
const ENV_RATE_LIMIT_WINDOW_SECONDS: &str = "TERMI_COMMENT_RATE_LIMIT_WINDOW_SECONDS";
|
||||
const ENV_RATE_LIMIT_MAX_PER_WINDOW: &str = "TERMI_COMMENT_RATE_LIMIT_MAX_PER_WINDOW";
|
||||
const ENV_MIN_INTERVAL_SECONDS: &str = "TERMI_COMMENT_MIN_INTERVAL_SECONDS";
|
||||
const ENV_BLOCK_KEYWORDS: &str = "TERMI_COMMENT_BLOCK_KEYWORDS";
|
||||
const ENV_CAPTCHA_TTL_SECONDS: &str = "TERMI_COMMENT_CAPTCHA_TTL_SECONDS";
|
||||
|
||||
pub const MATCHER_TYPE_IP: &str = "ip";
|
||||
pub const MATCHER_TYPE_EMAIL: &str = "email";
|
||||
pub const MATCHER_TYPE_USER_AGENT: &str = "user_agent";
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct CommentCaptchaChallenge {
|
||||
pub token: String,
|
||||
pub question: String,
|
||||
pub expires_in_seconds: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CommentGuardInput<'a> {
|
||||
pub ip_address: Option<&'a str>,
|
||||
pub email: Option<&'a str>,
|
||||
pub user_agent: Option<&'a str>,
|
||||
pub author: Option<&'a str>,
|
||||
pub content: Option<&'a str>,
|
||||
pub honeypot_website: Option<&'a str>,
|
||||
pub captcha_token: Option<&'a str>,
|
||||
pub captcha_answer: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct GuardConfig {
|
||||
rate_limit_window_seconds: i64,
|
||||
rate_limit_max_per_window: u64,
|
||||
min_interval_seconds: i64,
|
||||
blocked_keywords: Vec<String>,
|
||||
captcha_ttl_seconds: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct CaptchaEntry {
|
||||
answer: String,
|
||||
expires_at: DateTime<Utc>,
|
||||
ip_address: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_env_i64(name: &str, fallback: i64, min: i64, max: i64) -> i64 {
|
||||
std::env::var(name)
|
||||
.ok()
|
||||
.and_then(|value| value.trim().parse::<i64>().ok())
|
||||
.map(|value| value.clamp(min, max))
|
||||
.unwrap_or(fallback)
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<&str>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed.to_string())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_email(value: Option<&str>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.to_lowercase())
|
||||
}
|
||||
|
||||
fn normalize_user_agent(value: Option<&str>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.chars().take(512).collect::<String>())
|
||||
}
|
||||
|
||||
fn normalize_ip(value: Option<&str>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
|
||||
}
|
||||
|
||||
fn parse_keywords() -> Vec<String> {
|
||||
std::env::var(ENV_BLOCK_KEYWORDS)
|
||||
.ok()
|
||||
.map(|value| {
|
||||
value
|
||||
.split([',', '\n', '\r'])
|
||||
.map(str::trim)
|
||||
.filter(|item| !item.is_empty())
|
||||
.map(|item| item.to_lowercase())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn load_config() -> GuardConfig {
|
||||
GuardConfig {
|
||||
rate_limit_window_seconds: parse_env_i64(
|
||||
ENV_RATE_LIMIT_WINDOW_SECONDS,
|
||||
DEFAULT_RATE_LIMIT_WINDOW_SECONDS,
|
||||
10,
|
||||
24 * 60 * 60,
|
||||
),
|
||||
rate_limit_max_per_window: parse_env_i64(
|
||||
ENV_RATE_LIMIT_MAX_PER_WINDOW,
|
||||
DEFAULT_RATE_LIMIT_MAX_PER_WINDOW as i64,
|
||||
1,
|
||||
500,
|
||||
) as u64,
|
||||
min_interval_seconds: parse_env_i64(
|
||||
ENV_MIN_INTERVAL_SECONDS,
|
||||
DEFAULT_MIN_INTERVAL_SECONDS,
|
||||
0,
|
||||
6 * 60 * 60,
|
||||
),
|
||||
blocked_keywords: parse_keywords(),
|
||||
captcha_ttl_seconds: parse_env_i64(
|
||||
ENV_CAPTCHA_TTL_SECONDS,
|
||||
DEFAULT_CAPTCHA_TTL_SECONDS,
|
||||
30,
|
||||
24 * 60 * 60,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn captcha_store() -> &'static Mutex<HashMap<String, CaptchaEntry>> {
|
||||
static STORE: OnceLock<Mutex<HashMap<String, CaptchaEntry>>> = OnceLock::new();
|
||||
STORE.get_or_init(|| Mutex::new(HashMap::new()))
|
||||
}
|
||||
|
||||
fn cleanup_expired_captcha_entries(store: &mut HashMap<String, CaptchaEntry>, now: DateTime<Utc>) {
|
||||
let expired = store
|
||||
.iter()
|
||||
.filter_map(|(token, entry)| (entry.expires_at <= now).then_some(token.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for token in expired {
|
||||
store.remove(&token);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn normalize_matcher_type(value: &str) -> Option<&'static str> {
|
||||
match value.trim().to_ascii_lowercase().as_str() {
|
||||
MATCHER_TYPE_IP => Some(MATCHER_TYPE_IP),
|
||||
MATCHER_TYPE_EMAIL => Some(MATCHER_TYPE_EMAIL),
|
||||
MATCHER_TYPE_USER_AGENT | "ua" | "useragent" => Some(MATCHER_TYPE_USER_AGENT),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn normalize_matcher_value(matcher_type: &str, raw_value: &str) -> Option<String> {
|
||||
let normalized_type = normalize_matcher_type(matcher_type)?;
|
||||
|
||||
match normalized_type {
|
||||
MATCHER_TYPE_IP => normalize_ip(Some(raw_value)),
|
||||
MATCHER_TYPE_EMAIL => normalize_email(Some(raw_value)),
|
||||
MATCHER_TYPE_USER_AGENT => normalize_user_agent(Some(raw_value)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_captcha_challenge(client_ip: Option<&str>) -> Result<CommentCaptchaChallenge> {
|
||||
let config = load_config();
|
||||
let seed = Uuid::new_v4().as_u128();
|
||||
let left = ((seed % 9) + 1) as i64;
|
||||
let right = (((seed / 11) % 9) + 1) as i64;
|
||||
let use_subtract = seed % 2 == 0 && left > right;
|
||||
let (question, answer) = if use_subtract {
|
||||
(
|
||||
format!("{} - {} = ?", left, right),
|
||||
(left - right).to_string(),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
format!("{} + {} = ?", left, right),
|
||||
(left + right).to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let token = Uuid::new_v4().to_string();
|
||||
let now = Utc::now();
|
||||
let expires_at = now + Duration::seconds(config.captcha_ttl_seconds);
|
||||
let ip_address = normalize_ip(client_ip);
|
||||
|
||||
let mut store = captcha_store()
|
||||
.lock()
|
||||
.map_err(|_| Error::InternalServerError)?;
|
||||
cleanup_expired_captcha_entries(&mut store, now);
|
||||
store.insert(
|
||||
token.clone(),
|
||||
CaptchaEntry {
|
||||
answer,
|
||||
expires_at,
|
||||
ip_address,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(CommentCaptchaChallenge {
|
||||
token,
|
||||
question,
|
||||
expires_in_seconds: config.captcha_ttl_seconds,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn verify_captcha_solution(
|
||||
captcha_token: Option<&str>,
|
||||
captcha_answer: Option<&str>,
|
||||
client_ip: Option<&str>,
|
||||
) -> Result<()> {
|
||||
let token = trim_to_option(captcha_token)
|
||||
.ok_or_else(|| Error::BadRequest("请先完成验证码".to_string()))?;
|
||||
let answer = trim_to_option(captcha_answer)
|
||||
.ok_or_else(|| Error::BadRequest("请填写验证码答案".to_string()))?;
|
||||
|
||||
let now = Utc::now();
|
||||
let normalized_ip = normalize_ip(client_ip);
|
||||
|
||||
let mut store = captcha_store()
|
||||
.lock()
|
||||
.map_err(|_| Error::InternalServerError)?;
|
||||
cleanup_expired_captcha_entries(&mut store, now);
|
||||
|
||||
let Some(entry) = store.remove(&token) else {
|
||||
return Err(Error::BadRequest("验证码已失效,请刷新后重试".to_string()));
|
||||
};
|
||||
|
||||
if entry.expires_at <= now {
|
||||
return Err(Error::BadRequest("验证码已过期,请刷新后重试".to_string()));
|
||||
}
|
||||
|
||||
if entry
|
||||
.ip_address
|
||||
.as_deref()
|
||||
.zip(normalized_ip.as_deref())
|
||||
.is_some_and(|(expected, current)| expected != current)
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
"验证码校验失败,请刷新后重试".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if entry.answer.trim() != answer.trim() {
|
||||
return Err(Error::BadRequest("验证码答案错误".to_string()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn check_blacklist(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
|
||||
let now = Utc::now();
|
||||
let candidates = [
|
||||
(MATCHER_TYPE_IP, normalize_ip(input.ip_address)),
|
||||
(MATCHER_TYPE_EMAIL, normalize_email(input.email)),
|
||||
(
|
||||
MATCHER_TYPE_USER_AGENT,
|
||||
normalize_user_agent(input.user_agent),
|
||||
),
|
||||
];
|
||||
|
||||
for (matcher_type, matcher_value) in candidates {
|
||||
let Some(matcher_value) = matcher_value else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let matched = comment_blacklist::Entity::find()
|
||||
.filter(comment_blacklist::Column::MatcherType.eq(matcher_type))
|
||||
.filter(comment_blacklist::Column::MatcherValue.eq(&matcher_value))
|
||||
.filter(
|
||||
Condition::any()
|
||||
.add(comment_blacklist::Column::Active.is_null())
|
||||
.add(comment_blacklist::Column::Active.eq(true)),
|
||||
)
|
||||
.filter(
|
||||
Condition::any()
|
||||
.add(comment_blacklist::Column::ExpiresAt.is_null())
|
||||
.add(comment_blacklist::Column::ExpiresAt.gt(now)),
|
||||
)
|
||||
.one(&ctx.db)
|
||||
.await?;
|
||||
|
||||
if matched.is_some() {
|
||||
return Err(Error::BadRequest("评论请求已被拦截".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn check_rate_limit(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
|
||||
let config = load_config();
|
||||
let Some(ip_address) = normalize_ip(input.ip_address) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let now = Utc::now();
|
||||
let since = now - Duration::seconds(config.rate_limit_window_seconds);
|
||||
|
||||
let count = comments::Entity::find()
|
||||
.filter(comments::Column::IpAddress.eq(&ip_address))
|
||||
.filter(comments::Column::CreatedAt.gte(since))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
|
||||
if count >= config.rate_limit_max_per_window {
|
||||
return Err(Error::BadRequest("评论过于频繁,请稍后再试".to_string()));
|
||||
}
|
||||
|
||||
if config.min_interval_seconds <= 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(last_comment) = comments::Entity::find()
|
||||
.filter(comments::Column::IpAddress.eq(&ip_address))
|
||||
.order_by_desc(comments::Column::CreatedAt)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
{
|
||||
let last_created_at = DateTime::<Utc>::from(last_comment.created_at);
|
||||
let elapsed = now.signed_duration_since(last_created_at).num_seconds();
|
||||
if elapsed < config.min_interval_seconds {
|
||||
return Err(Error::BadRequest("提交过快,请稍后再试".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn contains_blocked_keyword(input: &CommentGuardInput<'_>) -> Option<String> {
|
||||
let config = load_config();
|
||||
if config.blocked_keywords.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut merged = String::new();
|
||||
for value in [input.author, input.email, input.content] {
|
||||
if let Some(value) = value {
|
||||
merged.push_str(value);
|
||||
merged.push('\n');
|
||||
}
|
||||
}
|
||||
let lower = merged.to_lowercase();
|
||||
|
||||
config
|
||||
.blocked_keywords
|
||||
.into_iter()
|
||||
.find(|keyword| lower.contains(keyword))
|
||||
}
|
||||
|
||||
pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
|
||||
if trim_to_option(input.honeypot_website).is_some() {
|
||||
return Err(Error::BadRequest("提交未通过校验".to_string()));
|
||||
}
|
||||
|
||||
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
|
||||
|
||||
if contains_blocked_keyword(input).is_some() {
|
||||
return Err(Error::BadRequest("评论内容包含敏感关键词".to_string()));
|
||||
}
|
||||
|
||||
check_blacklist(ctx, input).await?;
|
||||
check_rate_limit(ctx, input).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
use chrono::{DateTime, FixedOffset, NaiveDate, TimeZone, Utc};
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter,
|
||||
@@ -12,6 +13,12 @@ use crate::models::_entities::{categories, comments, posts, tags};
|
||||
|
||||
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
|
||||
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
|
||||
pub const POST_STATUS_DRAFT: &str = "draft";
|
||||
pub const POST_STATUS_PUBLISHED: &str = "published";
|
||||
pub const POST_STATUS_OFFLINE: &str = "offline";
|
||||
pub const POST_VISIBILITY_PUBLIC: &str = "public";
|
||||
pub const POST_VISIBILITY_UNLISTED: &str = "unlisted";
|
||||
pub const POST_VISIBILITY_PRIVATE: &str = "private";
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
|
||||
struct MarkdownFrontmatter {
|
||||
@@ -33,6 +40,16 @@ struct MarkdownFrontmatter {
|
||||
pinned: Option<bool>,
|
||||
published: Option<bool>,
|
||||
draft: Option<bool>,
|
||||
status: Option<String>,
|
||||
visibility: Option<String>,
|
||||
publish_at: Option<String>,
|
||||
unpublish_at: Option<String>,
|
||||
canonical_url: Option<String>,
|
||||
noindex: Option<bool>,
|
||||
og_image: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_optional_string_list")]
|
||||
redirect_from: Option<Vec<String>>,
|
||||
redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
@@ -47,7 +64,15 @@ pub struct MarkdownPost {
|
||||
pub image: Option<String>,
|
||||
pub images: Vec<String>,
|
||||
pub pinned: bool,
|
||||
pub published: bool,
|
||||
pub status: String,
|
||||
pub visibility: String,
|
||||
pub publish_at: Option<String>,
|
||||
pub unpublish_at: Option<String>,
|
||||
pub canonical_url: Option<String>,
|
||||
pub noindex: bool,
|
||||
pub og_image: Option<String>,
|
||||
pub redirect_from: Vec<String>,
|
||||
pub redirect_to: Option<String>,
|
||||
pub file_path: String,
|
||||
}
|
||||
|
||||
@@ -63,7 +88,15 @@ pub struct MarkdownPostDraft {
|
||||
pub image: Option<String>,
|
||||
pub images: Vec<String>,
|
||||
pub pinned: bool,
|
||||
pub published: bool,
|
||||
pub status: String,
|
||||
pub visibility: String,
|
||||
pub publish_at: Option<String>,
|
||||
pub unpublish_at: Option<String>,
|
||||
pub canonical_url: Option<String>,
|
||||
pub noindex: bool,
|
||||
pub og_image: Option<String>,
|
||||
pub redirect_from: Vec<String>,
|
||||
pub redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -124,6 +157,147 @@ fn normalize_string_list(values: Option<Vec<String>>) -> Vec<String> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn yaml_scalar(value: &str) -> String {
|
||||
serde_yaml::to_string(value)
|
||||
.unwrap_or_else(|_| format!("{value:?}"))
|
||||
.trim()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn normalize_redirect_list(values: Option<Vec<String>>) -> Vec<String> {
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
|
||||
normalize_string_list(values)
|
||||
.into_iter()
|
||||
.map(|item| item.trim_matches('/').to_string())
|
||||
.filter(|item| !item.is_empty())
|
||||
.filter(|item| seen.insert(item.to_lowercase()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn normalize_url_like(value: Option<String>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.trim_end_matches('/').to_string())
|
||||
}
|
||||
|
||||
pub fn normalize_post_status(value: Option<&str>) -> String {
|
||||
match value
|
||||
.map(str::trim)
|
||||
.unwrap_or_default()
|
||||
.to_ascii_lowercase()
|
||||
.as_str()
|
||||
{
|
||||
POST_STATUS_DRAFT => POST_STATUS_DRAFT.to_string(),
|
||||
POST_STATUS_OFFLINE => POST_STATUS_OFFLINE.to_string(),
|
||||
_ => POST_STATUS_PUBLISHED.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn normalize_post_visibility(value: Option<&str>) -> String {
|
||||
match value
|
||||
.map(str::trim)
|
||||
.unwrap_or_default()
|
||||
.to_ascii_lowercase()
|
||||
.as_str()
|
||||
{
|
||||
POST_VISIBILITY_UNLISTED => POST_VISIBILITY_UNLISTED.to_string(),
|
||||
POST_VISIBILITY_PRIVATE => POST_VISIBILITY_PRIVATE.to_string(),
|
||||
_ => POST_VISIBILITY_PUBLIC.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_frontmatter_datetime(value: Option<String>) -> Option<DateTime<FixedOffset>> {
|
||||
let raw = trim_to_option(value)?;
|
||||
|
||||
if let Ok(parsed) = DateTime::parse_from_rfc3339(&raw) {
|
||||
return Some(parsed);
|
||||
}
|
||||
|
||||
if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") {
|
||||
let naive = date_only.and_hms_opt(0, 0, 0)?;
|
||||
return FixedOffset::east_opt(0)?.from_local_datetime(&naive).single();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn format_frontmatter_datetime(value: Option<DateTime<FixedOffset>>) -> Option<String> {
|
||||
value.map(|item| item.with_timezone(&Utc).to_rfc3339())
|
||||
}
|
||||
|
||||
fn resolve_post_status(frontmatter: &MarkdownFrontmatter) -> String {
|
||||
if let Some(status) = trim_to_option(frontmatter.status.clone()) {
|
||||
return normalize_post_status(Some(&status));
|
||||
}
|
||||
|
||||
if frontmatter.draft.unwrap_or(false) {
|
||||
POST_STATUS_DRAFT.to_string()
|
||||
} else if frontmatter.published.unwrap_or(true) {
|
||||
POST_STATUS_PUBLISHED.to_string()
|
||||
} else {
|
||||
POST_STATUS_DRAFT.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn effective_post_state(
|
||||
status: &str,
|
||||
publish_at: Option<DateTime<FixedOffset>>,
|
||||
unpublish_at: Option<DateTime<FixedOffset>>,
|
||||
now: DateTime<FixedOffset>,
|
||||
) -> String {
|
||||
let normalized_status = normalize_post_status(Some(status));
|
||||
|
||||
if normalized_status == POST_STATUS_DRAFT {
|
||||
return POST_STATUS_DRAFT.to_string();
|
||||
}
|
||||
|
||||
if normalized_status == POST_STATUS_OFFLINE {
|
||||
return POST_STATUS_OFFLINE.to_string();
|
||||
}
|
||||
|
||||
if publish_at.map(|value| value > now).unwrap_or(false) {
|
||||
return "scheduled".to_string();
|
||||
}
|
||||
|
||||
if unpublish_at.map(|value| value <= now).unwrap_or(false) {
|
||||
return "expired".to_string();
|
||||
}
|
||||
|
||||
POST_STATUS_PUBLISHED.to_string()
|
||||
}
|
||||
|
||||
pub fn post_redirects_from_json(value: &Option<Value>) -> Vec<String> {
|
||||
value
|
||||
.as_ref()
|
||||
.and_then(Value::as_array)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|item| item.as_str().map(ToString::to_string))
|
||||
.map(|item| item.trim_matches('/').to_string())
|
||||
.filter(|item| !item.is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
|
||||
effective_post_state(
|
||||
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
|
||||
post.publish_at,
|
||||
post.unpublish_at,
|
||||
now,
|
||||
) == POST_STATUS_PUBLISHED
|
||||
&& normalize_post_visibility(post.visibility.as_deref()) == POST_VISIBILITY_PUBLIC
|
||||
}
|
||||
|
||||
pub fn is_post_publicly_accessible(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
|
||||
effective_post_state(
|
||||
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
|
||||
post.publish_at,
|
||||
post.unpublish_at,
|
||||
now,
|
||||
) == POST_STATUS_PUBLISHED
|
||||
&& normalize_post_visibility(post.visibility.as_deref()) != POST_VISIBILITY_PRIVATE
|
||||
}
|
||||
|
||||
fn split_inline_list(value: &str) -> Vec<String> {
|
||||
value
|
||||
.split([',', ','])
|
||||
@@ -268,7 +442,7 @@ fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
|
||||
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
|
||||
}
|
||||
|
||||
fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
|
||||
pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
|
||||
let (frontmatter, content) = split_frontmatter(raw)?;
|
||||
|
||||
let slug = trim_to_option(frontmatter.slug.clone()).unwrap_or_else(|| file_stem.to_string());
|
||||
@@ -282,6 +456,7 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<
|
||||
.next();
|
||||
let tags = frontmatter
|
||||
.tags
|
||||
.clone()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|item| item.trim().to_string())
|
||||
@@ -300,60 +475,90 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<
|
||||
image: trim_to_option(frontmatter.image.clone()),
|
||||
images: normalize_string_list(frontmatter.images.clone()),
|
||||
pinned: frontmatter.pinned.unwrap_or(false),
|
||||
published: frontmatter
|
||||
.published
|
||||
.unwrap_or(!frontmatter.draft.unwrap_or(false)),
|
||||
status: resolve_post_status(&frontmatter),
|
||||
visibility: normalize_post_visibility(frontmatter.visibility.as_deref()),
|
||||
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
|
||||
frontmatter.publish_at.clone(),
|
||||
)),
|
||||
unpublish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
|
||||
frontmatter.unpublish_at.clone(),
|
||||
)),
|
||||
canonical_url: normalize_url_like(frontmatter.canonical_url.clone()),
|
||||
noindex: frontmatter.noindex.unwrap_or(false),
|
||||
og_image: normalize_url_like(frontmatter.og_image.clone()),
|
||||
redirect_from: normalize_redirect_list(frontmatter.redirect_from.clone()),
|
||||
redirect_to: trim_to_option(frontmatter.redirect_to.clone())
|
||||
.map(|item| item.trim_matches('/').to_string()),
|
||||
file_path: file_path.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
fn build_markdown_document(post: &MarkdownPost) -> String {
|
||||
pub fn build_markdown_document(post: &MarkdownPost) -> String {
|
||||
let mut lines = vec![
|
||||
"---".to_string(),
|
||||
format!(
|
||||
"title: {}",
|
||||
serde_yaml::to_string(&post.title)
|
||||
.unwrap_or_else(|_| format!("{:?}", post.title))
|
||||
.trim()
|
||||
),
|
||||
format!("slug: {}", post.slug),
|
||||
format!("title: {}", yaml_scalar(&post.title)),
|
||||
format!("slug: {}", yaml_scalar(&post.slug)),
|
||||
];
|
||||
|
||||
if let Some(description) = &post.description {
|
||||
lines.push(format!(
|
||||
"description: {}",
|
||||
serde_yaml::to_string(description)
|
||||
.unwrap_or_else(|_| format!("{description:?}"))
|
||||
.trim()
|
||||
));
|
||||
lines.push(format!("description: {}", yaml_scalar(description)));
|
||||
}
|
||||
|
||||
if let Some(category) = &post.category {
|
||||
lines.push(format!("category: {}", category));
|
||||
lines.push(format!("category: {}", yaml_scalar(category)));
|
||||
}
|
||||
|
||||
lines.push(format!("post_type: {}", post.post_type));
|
||||
lines.push(format!("post_type: {}", yaml_scalar(&post.post_type)));
|
||||
lines.push(format!("pinned: {}", post.pinned));
|
||||
lines.push(format!("published: {}", post.published));
|
||||
lines.push(format!("status: {}", yaml_scalar(&post.status)));
|
||||
lines.push(format!("visibility: {}", yaml_scalar(&post.visibility)));
|
||||
lines.push(format!("noindex: {}", post.noindex));
|
||||
|
||||
if let Some(publish_at) = &post.publish_at {
|
||||
lines.push(format!("publish_at: {}", yaml_scalar(publish_at)));
|
||||
}
|
||||
|
||||
if let Some(unpublish_at) = &post.unpublish_at {
|
||||
lines.push(format!("unpublish_at: {}", yaml_scalar(unpublish_at)));
|
||||
}
|
||||
|
||||
if let Some(image) = &post.image {
|
||||
lines.push(format!("image: {}", image));
|
||||
lines.push(format!("image: {}", yaml_scalar(image)));
|
||||
}
|
||||
|
||||
if !post.images.is_empty() {
|
||||
lines.push("images:".to_string());
|
||||
for image in &post.images {
|
||||
lines.push(format!(" - {}", image));
|
||||
lines.push(format!(" - {}", yaml_scalar(image)));
|
||||
}
|
||||
}
|
||||
|
||||
if !post.tags.is_empty() {
|
||||
lines.push("tags:".to_string());
|
||||
for tag in &post.tags {
|
||||
lines.push(format!(" - {}", tag));
|
||||
lines.push(format!(" - {}", yaml_scalar(tag)));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(canonical_url) = &post.canonical_url {
|
||||
lines.push(format!("canonical_url: {}", yaml_scalar(canonical_url)));
|
||||
}
|
||||
|
||||
if let Some(og_image) = &post.og_image {
|
||||
lines.push(format!("og_image: {}", yaml_scalar(og_image)));
|
||||
}
|
||||
|
||||
if !post.redirect_from.is_empty() {
|
||||
lines.push("redirect_from:".to_string());
|
||||
for redirect in &post.redirect_from {
|
||||
lines.push(format!(" - {}", yaml_scalar(redirect)));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(redirect_to) = &post.redirect_to {
|
||||
lines.push(format!("redirect_to: {}", yaml_scalar(redirect_to)));
|
||||
}
|
||||
|
||||
lines.push("---".to_string());
|
||||
lines.push(String::new());
|
||||
lines.push(post.content.trim().to_string());
|
||||
@@ -390,7 +595,19 @@ fn ensure_markdown_posts_bootstrapped() -> Result<()> {
|
||||
image: None,
|
||||
images: Vec::new(),
|
||||
pinned: fixture.pinned.unwrap_or(false),
|
||||
published: fixture.published.unwrap_or(true),
|
||||
status: if fixture.published.unwrap_or(true) {
|
||||
POST_STATUS_PUBLISHED.to_string()
|
||||
} else {
|
||||
POST_STATUS_DRAFT.to_string()
|
||||
},
|
||||
visibility: POST_VISIBILITY_PUBLIC.to_string(),
|
||||
publish_at: None,
|
||||
unpublish_at: None,
|
||||
canonical_url: None,
|
||||
noindex: false,
|
||||
og_image: None,
|
||||
redirect_from: Vec::new(),
|
||||
redirect_to: None,
|
||||
file_path: markdown_post_path(&fixture.slug)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
@@ -799,6 +1016,27 @@ pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>>
|
||||
))
|
||||
});
|
||||
model.pinned = Set(Some(post.pinned));
|
||||
model.status = Set(Some(normalize_post_status(Some(&post.status))));
|
||||
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
|
||||
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
|
||||
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
|
||||
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
|
||||
model.noindex = Set(Some(post.noindex));
|
||||
model.og_image = Set(normalize_url_like(post.og_image.clone()));
|
||||
model.redirect_from = Set(if post.redirect_from.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Value::Array(
|
||||
post.redirect_from
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(Value::String)
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
});
|
||||
model.redirect_to = Set(
|
||||
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string()),
|
||||
);
|
||||
|
||||
if has_existing {
|
||||
let _ = model.update(&ctx.db).await;
|
||||
@@ -895,7 +1133,16 @@ pub async fn create_markdown_post(
|
||||
image: trim_to_option(draft.image),
|
||||
images: normalize_string_list(Some(draft.images)),
|
||||
pinned: draft.pinned,
|
||||
published: draft.published,
|
||||
status: normalize_post_status(Some(&draft.status)),
|
||||
visibility: normalize_post_visibility(Some(&draft.visibility)),
|
||||
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(draft.publish_at)),
|
||||
unpublish_at: format_frontmatter_datetime(parse_frontmatter_datetime(draft.unpublish_at)),
|
||||
canonical_url: normalize_url_like(draft.canonical_url),
|
||||
noindex: draft.noindex,
|
||||
og_image: normalize_url_like(draft.og_image),
|
||||
redirect_from: normalize_redirect_list(Some(draft.redirect_from)),
|
||||
redirect_to: trim_to_option(draft.redirect_to)
|
||||
.map(|item| item.trim_matches('/').to_string()),
|
||||
file_path: markdown_post_path(&slug).to_string_lossy().to_string(),
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
pub mod admin_audit;
|
||||
pub mod abuse_guard;
|
||||
pub mod ai;
|
||||
pub mod analytics;
|
||||
pub mod comment_guard;
|
||||
pub mod content;
|
||||
pub mod notifications;
|
||||
pub mod post_revisions;
|
||||
pub mod storage;
|
||||
pub mod subscriptions;
|
||||
|
||||
164
backend/src/services/notifications.rs
Normal file
164
backend/src/services/notifications.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use loco_rs::prelude::*;
|
||||
use crate::{
|
||||
controllers::site_settings,
|
||||
models::_entities::{comments, friend_links},
|
||||
services::subscriptions,
|
||||
};
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn excerpt(value: Option<&str>, limit: usize) -> Option<String> {
|
||||
let flattened = value?
|
||||
.split_whitespace()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
if flattened.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut shortened = flattened.chars().take(limit).collect::<String>();
|
||||
if flattened.chars().count() > limit {
|
||||
shortened.push_str("...");
|
||||
}
|
||||
Some(shortened)
|
||||
}
|
||||
|
||||
pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
|
||||
let settings = match site_settings::load_current(ctx).await {
|
||||
Ok(settings) => settings,
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to load site settings before comment notification: {error}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let payload = serde_json::json!({
|
||||
"event_type": subscriptions::EVENT_COMMENT_CREATED,
|
||||
"id": item.id,
|
||||
"post_slug": item.post_slug,
|
||||
"author": item.author,
|
||||
"email": item.email,
|
||||
"scope": item.scope,
|
||||
"paragraph_key": item.paragraph_key,
|
||||
"approved": item.approved.unwrap_or(false),
|
||||
"excerpt": excerpt(item.content.as_deref(), 200),
|
||||
"created_at": item.created_at.to_rfc3339(),
|
||||
});
|
||||
let text = format!(
|
||||
"收到一条新的评论。\n\n文章:{}\n作者:{}\n范围:{}\n状态:{}\n摘要:{}",
|
||||
item.post_slug.clone().unwrap_or_else(|| "未知文章".to_string()),
|
||||
item.author.clone().unwrap_or_else(|| "匿名".to_string()),
|
||||
item.scope,
|
||||
if item.approved.unwrap_or(false) { "已通过" } else { "待审核" },
|
||||
excerpt(item.content.as_deref(), 200).unwrap_or_else(|| "无".to_string()),
|
||||
);
|
||||
|
||||
if let Err(error) = subscriptions::queue_event_for_active_subscriptions(
|
||||
ctx,
|
||||
subscriptions::EVENT_COMMENT_CREATED,
|
||||
"新评论通知",
|
||||
&text,
|
||||
payload.clone(),
|
||||
trim_to_option(settings.site_name.clone()),
|
||||
trim_to_option(settings.site_url.clone()),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue comment subscription notification: {error}");
|
||||
}
|
||||
|
||||
if settings.notification_comment_enabled.unwrap_or(false) {
|
||||
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
|
||||
if let Err(error) = subscriptions::queue_direct_notification(
|
||||
ctx,
|
||||
subscriptions::CHANNEL_WEBHOOK,
|
||||
&target,
|
||||
subscriptions::EVENT_COMMENT_CREATED,
|
||||
"新评论通知",
|
||||
&text,
|
||||
payload,
|
||||
trim_to_option(settings.site_name),
|
||||
trim_to_option(settings.site_url),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue legacy comment webhook notification: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model) {
|
||||
let settings = match site_settings::load_current(ctx).await {
|
||||
Ok(settings) => settings,
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to load site settings before friend-link notification: {error}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let payload = serde_json::json!({
|
||||
"event_type": subscriptions::EVENT_FRIEND_LINK_CREATED,
|
||||
"id": item.id,
|
||||
"site_name": item.site_name,
|
||||
"site_url": item.site_url,
|
||||
"category": item.category,
|
||||
"status": item.status,
|
||||
"description": item.description,
|
||||
"created_at": item.created_at.to_rfc3339(),
|
||||
});
|
||||
let text = format!(
|
||||
"收到新的友链申请。\n\n站点:{}\n链接:{}\n分类:{}\n状态:{}\n描述:{}",
|
||||
item.site_name.clone().unwrap_or_else(|| "未命名站点".to_string()),
|
||||
item.site_url,
|
||||
item.category.clone().unwrap_or_else(|| "未分类".to_string()),
|
||||
item.status.clone().unwrap_or_else(|| "pending".to_string()),
|
||||
item.description.clone().unwrap_or_else(|| "无".to_string()),
|
||||
);
|
||||
|
||||
if let Err(error) = subscriptions::queue_event_for_active_subscriptions(
|
||||
ctx,
|
||||
subscriptions::EVENT_FRIEND_LINK_CREATED,
|
||||
"新友链申请通知",
|
||||
&text,
|
||||
payload.clone(),
|
||||
trim_to_option(settings.site_name.clone()),
|
||||
trim_to_option(settings.site_url.clone()),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue friend-link subscription notification: {error}");
|
||||
}
|
||||
|
||||
if settings.notification_friend_link_enabled.unwrap_or(false) {
|
||||
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
|
||||
if let Err(error) = subscriptions::queue_direct_notification(
|
||||
ctx,
|
||||
subscriptions::CHANNEL_WEBHOOK,
|
||||
&target,
|
||||
subscriptions::EVENT_FRIEND_LINK_CREATED,
|
||||
"新友链申请通知",
|
||||
&text,
|
||||
payload,
|
||||
trim_to_option(settings.site_name),
|
||||
trim_to_option(settings.site_url),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue legacy friend-link webhook notification: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
247
backend/src/services/post_revisions.rs
Normal file
247
backend/src/services/post_revisions.rs
Normal file
@@ -0,0 +1,247 @@
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set,
|
||||
};
|
||||
use std::fs;
|
||||
|
||||
use crate::{
|
||||
controllers::admin::AdminIdentity,
|
||||
models::_entities::{post_revisions, posts},
|
||||
services::content,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum RestoreMode {
|
||||
Full,
|
||||
Markdown,
|
||||
Metadata,
|
||||
}
|
||||
|
||||
impl RestoreMode {
|
||||
pub fn parse(value: &str) -> Self {
|
||||
match value.trim().to_ascii_lowercase().as_str() {
|
||||
"markdown" | "content" | "body" => Self::Markdown,
|
||||
"metadata" | "frontmatter" => Self::Metadata,
|
||||
_ => Self::Full,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Full => "full",
|
||||
Self::Markdown => "markdown",
|
||||
Self::Metadata => "metadata",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
|
||||
let normalized = markdown.replace("\r\n", "\n");
|
||||
if let Some(frontmatter) = normalized
|
||||
.strip_prefix("---\n")
|
||||
.and_then(|rest| rest.split_once("\n---\n").map(|(frontmatter, _)| frontmatter))
|
||||
{
|
||||
for line in frontmatter.lines() {
|
||||
let trimmed = line.trim();
|
||||
if let Some(raw) = trimmed.strip_prefix("title:") {
|
||||
let title = raw.trim().trim_matches('"').trim_matches('\'').trim();
|
||||
if !title.is_empty() {
|
||||
return Some(title.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
normalized.lines().find_map(|line| {
|
||||
line.trim()
|
||||
.strip_prefix("# ")
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
})
|
||||
.or_else(|| trim_to_option(Some(slug.to_string())))
|
||||
}
|
||||
|
||||
async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> {
|
||||
posts::Entity::find()
|
||||
.filter(posts::Column::Slug.eq(slug))
|
||||
.one(&ctx.db)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
.and_then(|item| item.title)
|
||||
.and_then(|value| trim_to_option(Some(value)))
|
||||
}
|
||||
|
||||
pub async fn capture_snapshot_from_markdown(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
slug: &str,
|
||||
markdown: &str,
|
||||
operation: &str,
|
||||
reason: Option<&str>,
|
||||
metadata: Option<serde_json::Value>,
|
||||
) -> Result<post_revisions::Model> {
|
||||
let post_title = lookup_post_title(ctx, slug)
|
||||
.await
|
||||
.or_else(|| title_from_markdown(markdown, slug));
|
||||
|
||||
post_revisions::ActiveModel {
|
||||
post_slug: Set(slug.to_string()),
|
||||
post_title: Set(post_title),
|
||||
operation: Set(operation.to_string()),
|
||||
revision_reason: Set(reason.map(ToString::to_string)),
|
||||
actor_username: Set(actor.map(|item| item.username.clone())),
|
||||
actor_email: Set(actor.and_then(|item| item.email.clone())),
|
||||
actor_source: Set(actor.map(|item| item.source.clone())),
|
||||
markdown: Set(Some(markdown.replace("\r\n", "\n"))),
|
||||
metadata: Set(metadata),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&ctx.db)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn capture_current_snapshot(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
slug: &str,
|
||||
operation: &str,
|
||||
reason: Option<&str>,
|
||||
metadata: Option<serde_json::Value>,
|
||||
) -> Result<Option<post_revisions::Model>> {
|
||||
let Ok((_path, markdown)) = content::read_markdown_document(slug) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
capture_snapshot_from_markdown(ctx, actor, slug, &markdown, operation, reason, metadata)
|
||||
.await
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
pub async fn list_revisions(
|
||||
ctx: &AppContext,
|
||||
slug: Option<&str>,
|
||||
limit: u64,
|
||||
) -> Result<Vec<post_revisions::Model>> {
|
||||
let mut query = post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
|
||||
|
||||
if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) {
|
||||
query = query.filter(post_revisions::Column::PostSlug.eq(slug));
|
||||
}
|
||||
|
||||
query
|
||||
.limit(limit)
|
||||
.all(&ctx.db)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> {
|
||||
post_revisions::Entity::find_by_id(id)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)
|
||||
}
|
||||
|
||||
pub async fn restore_revision(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
revision_id: i32,
|
||||
mode: &str,
|
||||
) -> Result<post_revisions::Model> {
|
||||
let revision = get_revision(ctx, revision_id).await?;
|
||||
let slug = revision.post_slug.clone();
|
||||
let revision_markdown = revision
|
||||
.markdown
|
||||
.clone()
|
||||
.filter(|value| !value.trim().is_empty())
|
||||
.ok_or_else(|| Error::BadRequest("该版本没有可恢复的 Markdown 快照".to_string()))?;
|
||||
let restore_mode = RestoreMode::parse(mode);
|
||||
|
||||
let _ = capture_current_snapshot(
|
||||
ctx,
|
||||
actor,
|
||||
&slug,
|
||||
"restore_backup",
|
||||
Some("恢复前自动备份"),
|
||||
Some(serde_json::json!({
|
||||
"source_revision_id": revision_id,
|
||||
"mode": restore_mode.as_str(),
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let markdown = match restore_mode {
|
||||
RestoreMode::Full => revision_markdown.clone(),
|
||||
RestoreMode::Markdown | RestoreMode::Metadata => {
|
||||
let (_path, current_markdown) = content::read_markdown_document(&slug).map_err(|_| {
|
||||
Error::BadRequest("当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string())
|
||||
})?;
|
||||
let revision_post =
|
||||
content::parse_markdown_source(&slug, &revision_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
|
||||
let current_post =
|
||||
content::parse_markdown_source(&slug, ¤t_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
|
||||
let mut merged = current_post.clone();
|
||||
match restore_mode {
|
||||
RestoreMode::Markdown => {
|
||||
merged.content = revision_post.content;
|
||||
}
|
||||
RestoreMode::Metadata => {
|
||||
merged.title = revision_post.title;
|
||||
merged.description = revision_post.description;
|
||||
merged.category = revision_post.category;
|
||||
merged.tags = revision_post.tags;
|
||||
merged.post_type = revision_post.post_type;
|
||||
merged.image = revision_post.image;
|
||||
merged.images = revision_post.images;
|
||||
merged.pinned = revision_post.pinned;
|
||||
merged.status = revision_post.status;
|
||||
merged.visibility = revision_post.visibility;
|
||||
merged.publish_at = revision_post.publish_at;
|
||||
merged.unpublish_at = revision_post.unpublish_at;
|
||||
merged.canonical_url = revision_post.canonical_url;
|
||||
merged.noindex = revision_post.noindex;
|
||||
merged.og_image = revision_post.og_image;
|
||||
merged.redirect_from = revision_post.redirect_from;
|
||||
merged.redirect_to = revision_post.redirect_to;
|
||||
}
|
||||
RestoreMode::Full => unreachable!(),
|
||||
}
|
||||
content::build_markdown_document(&merged)
|
||||
}
|
||||
};
|
||||
|
||||
fs::create_dir_all(content::MARKDOWN_POSTS_DIR).map_err(|error| Error::BadRequest(error.to_string()))?;
|
||||
fs::write(content::markdown_post_path(&slug), markdown.replace("\r\n", "\n"))
|
||||
.map_err(|error| Error::BadRequest(error.to_string()))?;
|
||||
content::sync_markdown_posts(ctx).await?;
|
||||
|
||||
let _ = capture_snapshot_from_markdown(
|
||||
ctx,
|
||||
actor,
|
||||
&slug,
|
||||
&markdown,
|
||||
"restore",
|
||||
Some("通过版本历史恢复"),
|
||||
Some(serde_json::json!({
|
||||
"source_revision_id": revision_id,
|
||||
"mode": restore_mode.as_str(),
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(revision)
|
||||
}
|
||||
1216
backend/src/services/subscriptions.rs
Normal file
1216
backend/src/services/subscriptions.rs
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user