chore: checkpoint admin editor and perf work

This commit is contained in:
2026-03-31 00:12:02 +08:00
parent 92a85eef20
commit 99b308e800
45 changed files with 7265 additions and 833 deletions

View File

@@ -1,3 +1,4 @@
use axum::extract::{Multipart, Query};
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, PaginatorTrait, QueryFilter,
@@ -14,7 +15,7 @@ use crate::{
site_settings::{self, SiteSettingsPayload},
},
models::_entities::{ai_chunks, comments, friend_links, posts, reviews},
services::{ai, content},
services::{ai, analytics, content, storage},
};
#[derive(Clone, Debug, Deserialize)]
@@ -130,6 +131,10 @@ pub struct AdminSiteSettingsResponse {
pub ai_api_base: Option<String>,
pub ai_api_key: Option<String>,
pub ai_chat_model: Option<String>,
pub ai_image_provider: Option<String>,
pub ai_image_api_base: Option<String>,
pub ai_image_api_key: Option<String>,
pub ai_image_model: Option<String>,
pub ai_providers: Vec<site_settings::AiProviderConfig>,
pub ai_active_provider_id: Option<String>,
pub ai_embedding_model: Option<String>,
@@ -139,6 +144,12 @@ pub struct AdminSiteSettingsResponse {
pub ai_last_indexed_at: Option<String>,
pub ai_chunks_count: u64,
pub ai_local_embedding: String,
pub media_storage_provider: Option<String>,
pub media_r2_account_id: Option<String>,
pub media_r2_bucket: Option<String>,
pub media_r2_public_base_url: Option<String>,
pub media_r2_access_key_id: Option<String>,
pub media_r2_secret_access_key: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -160,6 +171,67 @@ pub struct AdminAiProviderTestResponse {
pub reply_preview: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminAiImageProviderTestRequest {
pub provider: String,
pub api_base: String,
pub api_key: String,
pub image_model: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminAiImageProviderTestResponse {
pub provider: String,
pub endpoint: String,
pub image_model: String,
pub result_preview: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminImageUploadResponse {
pub url: String,
pub key: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminR2ConnectivityResponse {
pub bucket: String,
pub public_base_url: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaObjectResponse {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaListResponse {
pub provider: String,
pub bucket: String,
pub public_base_url: String,
pub items: Vec<AdminMediaObjectResponse>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaDeleteResponse {
pub deleted: bool,
pub key: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaListQuery {
pub prefix: Option<String>,
pub limit: Option<i32>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaDeleteQuery {
pub key: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminPostMetadataRequest {
pub markdown: String,
@@ -170,6 +242,30 @@ pub struct AdminPostPolishRequest {
pub markdown: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminReviewPolishRequest {
pub title: String,
pub review_type: String,
pub rating: i32,
pub review_date: Option<String>,
pub status: String,
#[serde(default)]
pub tags: Vec<String>,
pub description: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminPostCoverImageRequest {
pub title: String,
pub description: Option<String>,
pub category: Option<String>,
#[serde(default)]
pub tags: Vec<String>,
pub post_type: String,
pub slug: Option<String>,
pub markdown: String,
}
fn format_timestamp(
value: Option<sea_orm::prelude::DateTimeWithTimeZone>,
pattern: &str,
@@ -242,6 +338,10 @@ fn build_settings_response(
ai_api_base: item.ai_api_base,
ai_api_key: item.ai_api_key,
ai_chat_model: item.ai_chat_model,
ai_image_provider: item.ai_image_provider,
ai_image_api_base: item.ai_image_api_base,
ai_image_api_key: item.ai_image_api_key,
ai_image_model: item.ai_image_model,
ai_providers,
ai_active_provider_id,
ai_embedding_model: item.ai_embedding_model,
@@ -251,6 +351,12 @@ fn build_settings_response(
ai_last_indexed_at: format_timestamp(item.ai_last_indexed_at, "%Y-%m-%d %H:%M:%S UTC"),
ai_chunks_count,
ai_local_embedding: ai::local_embedding_label().to_string(),
media_storage_provider: item.media_storage_provider,
media_r2_account_id: item.media_r2_account_id,
media_r2_bucket: item.media_r2_bucket,
media_r2_public_base_url: item.media_r2_public_base_url,
media_r2_access_key_id: item.media_r2_access_key_id,
media_r2_secret_access_key: item.media_r2_secret_access_key,
}
}
@@ -410,6 +516,12 @@ pub async fn dashboard(State(ctx): State<AppContext>) -> Result<Response> {
})
}
#[debug_handler]
pub async fn analytics_overview(State(ctx): State<AppContext>) -> Result<Response> {
check_auth()?;
format::json(analytics::build_admin_analytics(&ctx).await?)
}
#[debug_handler]
pub async fn get_site_settings(State(ctx): State<AppContext>) -> Result<Response> {
check_auth()?;
@@ -428,7 +540,7 @@ pub async fn update_site_settings(
let current = site_settings::load_current(&ctx).await?;
let mut item = current;
params.apply(&mut item);
let item = item.into_active_model();
let item = item.into_active_model().reset_all();
let updated = item.update(&ctx.db).await?;
let ai_chunks_count = ai_chunks::Entity::find().count(&ctx.db).await?;
@@ -469,6 +581,88 @@ pub async fn test_ai_provider(Json(payload): Json<AdminAiProviderTestRequest>) -
})
}
#[debug_handler]
pub async fn test_ai_image_provider(
Json(payload): Json<AdminAiImageProviderTestRequest>,
) -> Result<Response> {
check_auth()?;
let result = ai::test_image_provider_connectivity(
&payload.provider,
&payload.api_base,
&payload.api_key,
&payload.image_model,
)
.await?;
format::json(AdminAiImageProviderTestResponse {
provider: result.provider,
endpoint: result.endpoint,
image_model: result.image_model,
result_preview: result.result_preview,
})
}
#[debug_handler]
pub async fn test_r2_storage(State(ctx): State<AppContext>) -> Result<Response> {
check_auth()?;
let settings = storage::require_r2_settings(&ctx).await?;
let bucket = storage::test_r2_connectivity(&ctx).await?;
format::json(AdminR2ConnectivityResponse {
bucket,
public_base_url: settings.public_base_url,
})
}
#[debug_handler]
pub async fn list_media_objects(
State(ctx): State<AppContext>,
Query(query): Query<AdminMediaListQuery>,
) -> Result<Response> {
check_auth()?;
let settings = storage::require_r2_settings(&ctx).await?;
let items = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?
.into_iter()
.map(|item| AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
})
.collect::<Vec<_>>();
format::json(AdminMediaListResponse {
provider: settings.provider_name,
bucket: settings.bucket,
public_base_url: settings.public_base_url,
items,
})
}
#[debug_handler]
pub async fn delete_media_object(
State(ctx): State<AppContext>,
Query(query): Query<AdminMediaDeleteQuery>,
) -> Result<Response> {
check_auth()?;
let key = query.key.trim();
if key.is_empty() {
return Err(Error::BadRequest("缺少对象 key".to_string()));
}
storage::delete_object(&ctx, key).await?;
format::json(AdminMediaDeleteResponse {
deleted: true,
key: key.to_string(),
})
}
#[debug_handler]
pub async fn generate_post_metadata(
State(ctx): State<AppContext>,
@@ -487,6 +681,127 @@ pub async fn polish_post_markdown(
format::json(ai::polish_post_markdown(&ctx, &payload.markdown).await?)
}
#[debug_handler]
pub async fn polish_review_description(
State(ctx): State<AppContext>,
Json(payload): Json<AdminReviewPolishRequest>,
) -> Result<Response> {
check_auth()?;
format::json(
ai::polish_review_description(
&ctx,
&payload.title,
&payload.review_type,
payload.rating,
payload.review_date.as_deref(),
&payload.status,
&payload.tags,
&payload.description,
)
.await?,
)
}
#[debug_handler]
pub async fn generate_post_cover_image(
State(ctx): State<AppContext>,
Json(payload): Json<AdminPostCoverImageRequest>,
) -> Result<Response> {
check_auth()?;
format::json(
ai::generate_post_cover_image(
&ctx,
&payload.title,
payload.description.as_deref(),
payload.category.as_deref(),
&payload.tags,
&payload.post_type,
payload.slug.as_deref(),
&payload.markdown,
)
.await?,
)
}
fn review_cover_extension(
file_name: Option<&str>,
content_type: Option<&str>,
) -> Option<&'static str> {
let from_file_name = file_name
.and_then(|name| name.rsplit('.').next())
.map(|ext| ext.trim().to_ascii_lowercase());
match from_file_name.as_deref() {
Some("png") => return Some("png"),
Some("jpg") | Some("jpeg") => return Some("jpg"),
Some("webp") => return Some("webp"),
Some("gif") => return Some("gif"),
Some("avif") => return Some("avif"),
Some("svg") => return Some("svg"),
_ => {}
}
match content_type
.unwrap_or_default()
.trim()
.to_ascii_lowercase()
.as_str()
{
"image/png" => Some("png"),
"image/jpeg" => Some("jpg"),
"image/webp" => Some("webp"),
"image/gif" => Some("gif"),
"image/avif" => Some("avif"),
"image/svg+xml" => Some("svg"),
_ => None,
}
}
#[debug_handler]
pub async fn upload_review_cover_image(
State(ctx): State<AppContext>,
mut multipart: Multipart,
) -> Result<Response> {
check_auth()?;
let field = multipart
.next_field()
.await
.map_err(|error| Error::BadRequest(error.to_string()))?
.ok_or_else(|| Error::BadRequest("请先选择图片文件".to_string()))?;
let file_name = field.file_name().map(ToString::to_string);
let content_type = field.content_type().map(ToString::to_string);
let extension = review_cover_extension(file_name.as_deref(), content_type.as_deref())
.ok_or_else(|| Error::BadRequest("仅支持常见图片格式上传".to_string()))?;
let bytes = field
.bytes()
.await
.map_err(|error| Error::BadRequest(error.to_string()))?;
if bytes.is_empty() {
return Err(Error::BadRequest("上传的图片内容为空".to_string()));
}
let key = crate::services::storage::build_object_key(
"review-covers",
file_name.as_deref().unwrap_or("review-cover"),
extension,
);
let stored = crate::services::storage::upload_bytes_to_r2(
&ctx,
&key,
bytes.to_vec(),
content_type.as_deref(),
Some("public, max-age=31536000, immutable"),
)
.await?;
format::json(AdminImageUploadResponse {
url: stored.url,
key: stored.key,
})
}
pub fn routes() -> Routes {
Routes::new()
.prefix("/api/admin")
@@ -494,11 +809,21 @@ pub fn routes() -> Routes {
.add("/session", delete(session_logout))
.add("/session/login", post(session_login))
.add("/dashboard", get(dashboard))
.add("/analytics", get(analytics_overview))
.add("/site-settings", get(get_site_settings))
.add("/site-settings", patch(update_site_settings))
.add("/site-settings", put(update_site_settings))
.add("/ai/reindex", post(reindex_ai))
.add("/ai/test-provider", post(test_ai_provider))
.add("/ai/test-image-provider", post(test_ai_image_provider))
.add("/storage/r2/test", post(test_r2_storage))
.add(
"/storage/media",
get(list_media_objects).delete(delete_media_object),
)
.add("/ai/post-metadata", post(generate_post_metadata))
.add("/ai/polish-post", post(polish_post_markdown))
.add("/ai/polish-review", post(polish_review_description))
.add("/ai/post-cover", post(generate_post_cover_image))
.add("/storage/review-cover", post(upload_review_cover_image))
}

View File

@@ -5,15 +5,19 @@ use axum::{
body::{Body, Bytes},
http::{
header::{CACHE_CONTROL, CONNECTION, CONTENT_TYPE},
HeaderValue,
HeaderMap, HeaderValue,
},
};
use chrono::{DateTime, Utc};
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::time::Instant;
use crate::{controllers::admin::check_auth, services::ai};
use crate::{
controllers::{admin::check_auth, site_settings},
services::{ai, analytics},
};
#[derive(Clone, Debug, Deserialize)]
pub struct AskPayload {
@@ -55,6 +59,30 @@ fn format_timestamp(value: Option<DateTime<Utc>>) -> Option<String> {
value.map(|item| item.to_rfc3339())
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
async fn current_provider_metadata(ctx: &AppContext) -> (Option<String>, Option<String>) {
match site_settings::load_current(ctx).await {
Ok(settings) => (
trim_to_option(settings.ai_provider),
trim_to_option(settings.ai_chat_model),
),
Err(error) => {
tracing::warn!("failed to load ai provider metadata for analytics: {error}");
(None, None)
}
}
}
fn sse_bytes<T: Serialize>(event: &str, payload: &T) -> Bytes {
let data = serde_json::to_string(payload)
.unwrap_or_else(|_| "{\"message\":\"failed to serialize SSE payload\"}".to_string());
@@ -178,24 +206,66 @@ fn build_ask_response(prepared: &ai::PreparedAiAnswer, answer: String) -> AskRes
#[debug_handler]
pub async fn ask(
State(ctx): State<AppContext>,
headers: HeaderMap,
Json(payload): Json<AskPayload>,
) -> Result<Response> {
let result = ai::answer_question(&ctx, &payload.question).await?;
format::json(AskResponse {
question: payload.question.trim().to_string(),
answer: result.answer,
sources: result.sources,
indexed_chunks: result.indexed_chunks,
last_indexed_at: format_timestamp(result.last_indexed_at),
})
let started_at = Instant::now();
let question = payload.question.trim().to_string();
let (provider, chat_model) = current_provider_metadata(&ctx).await;
match ai::answer_question(&ctx, &payload.question).await {
Ok(result) => {
analytics::record_ai_question_event(
&ctx,
&question,
&headers,
true,
"sync",
provider,
chat_model,
Some(result.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
format::json(AskResponse {
question,
answer: result.answer,
sources: result.sources,
indexed_chunks: result.indexed_chunks,
last_indexed_at: format_timestamp(result.last_indexed_at),
})
}
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&headers,
false,
"sync",
provider,
chat_model,
None,
started_at.elapsed().as_millis() as i64,
)
.await;
Err(error)
}
}
}
#[debug_handler]
pub async fn ask_stream(
State(ctx): State<AppContext>,
headers: HeaderMap,
Json(payload): Json<AskPayload>,
) -> Result<Response> {
let request_headers = headers.clone();
let question = payload.question.trim().to_string();
let (fallback_provider, fallback_chat_model) = current_provider_metadata(&ctx).await;
let stream = stream! {
let started_at = Instant::now();
yield Ok::<Bytes, std::io::Error>(sse_bytes("status", &StreamStatusEvent {
phase: "retrieving".to_string(),
message: "正在检索知识库上下文...".to_string(),
@@ -204,6 +274,18 @@ pub async fn ask_stream(
let prepared = match ai::prepare_answer(&ctx, &payload.question).await {
Ok(prepared) => prepared,
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
fallback_provider.clone(),
fallback_chat_model.clone(),
None,
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: error.to_string(),
}));
@@ -212,6 +294,16 @@ pub async fn ask_stream(
};
let mut accumulated_answer = String::new();
let active_provider = prepared
.provider_request
.as_ref()
.map(|request| request.provider.clone())
.or_else(|| fallback_provider.clone());
let active_chat_model = prepared
.provider_request
.as_ref()
.map(|request| request.chat_model.clone())
.or_else(|| fallback_chat_model.clone());
if let Some(answer) = prepared.immediate_answer.as_deref() {
yield Ok(sse_bytes("status", &StreamStatusEvent {
@@ -241,6 +333,18 @@ pub async fn ask_stream(
let mut response = match response {
Ok(response) => response,
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: format!("AI request failed: {error}"),
}));
@@ -251,6 +355,18 @@ pub async fn ask_stream(
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: format!("AI provider returned {status}: {body}"),
}));
@@ -265,6 +381,18 @@ pub async fn ask_stream(
let Some(chunk) = (match next_chunk {
Ok(chunk) => chunk,
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: format!("AI stream read failed: {error}"),
}));
@@ -323,6 +451,18 @@ pub async fn ask_stream(
}
if accumulated_answer.is_empty() {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: "AI chat response did not contain readable content".to_string(),
}));
@@ -330,6 +470,19 @@ pub async fn ask_stream(
}
}
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
true,
"stream",
active_provider,
active_chat_model,
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
let final_payload = build_ask_response(&prepared, accumulated_answer);
yield Ok(sse_bytes("complete", &final_payload));
};

View File

@@ -3,7 +3,10 @@ use loco_rs::prelude::*;
use sea_orm::{EntityTrait, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use crate::models::_entities::reviews::{self, Entity as ReviewEntity};
use crate::{
models::_entities::reviews::{self, Entity as ReviewEntity},
services::storage,
};
#[derive(Serialize, Deserialize, Debug)]
pub struct CreateReviewRequest {
@@ -83,9 +86,11 @@ pub async fn update(
) -> Result<impl IntoResponse> {
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
let Some(mut review) = review.map(|r| r.into_active_model()) else {
let Some(existing_review) = review else {
return Err(Error::NotFound);
};
let old_cover = existing_review.cover.clone();
let mut review = existing_review.into_active_model();
if let Some(title) = req.title {
review.title = Set(Some(title));
@@ -108,7 +113,9 @@ pub async fn update(
if let Some(tags) = req.tags {
review.tags = Set(Some(serde_json::to_string(&tags).unwrap_or_default()));
}
let mut next_cover = old_cover.clone();
if let Some(cover) = req.cover {
next_cover = Some(cover.clone());
review.cover = Set(Some(cover));
}
if let Some(link_url) = req.link_url {
@@ -117,6 +124,14 @@ pub async fn update(
}
let review = review.update(&ctx.db).await?;
if let Some(old_cover) = old_cover
.filter(|old| Some(old.clone()) != next_cover)
.filter(|old| !old.trim().is_empty())
{
if let Err(error) = storage::delete_managed_url(&ctx, &old_cover).await {
tracing::warn!("failed to cleanup replaced review cover: {error}");
}
}
format::json(review)
}
@@ -128,7 +143,13 @@ pub async fn remove(
match review {
Some(r) => {
let cover = r.cover.clone();
r.delete(&ctx.db).await?;
if let Some(cover) = cover.filter(|value| !value.trim().is_empty()) {
if let Err(error) = storage::delete_managed_url(&ctx, &cover).await {
tracing::warn!("failed to cleanup deleted review cover: {error}");
}
}
format::empty()
}
None => Err(Error::NotFound),

View File

@@ -1,15 +1,46 @@
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ConnectionTrait, DatabaseBackend, DbBackend, FromQueryResult, Statement};
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use std::time::Instant;
use crate::models::_entities::posts;
use crate::services::content;
use crate::services::{analytics, content};
fn deserialize_boolish_option<'de, D>(
deserializer: D,
) -> std::result::Result<Option<bool>, D::Error>
where
D: Deserializer<'de>,
{
let raw = Option::<String>::deserialize(deserializer)?;
raw.map(|value| match value.trim().to_ascii_lowercase().as_str() {
"1" | "true" | "yes" | "on" => Ok(true),
"0" | "false" | "no" | "off" => Ok(false),
other => Err(serde::de::Error::custom(format!(
"invalid boolean value `{other}`"
))),
})
.transpose()
}
fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
query.preview.unwrap_or(false)
|| headers
.get("x-termi-search-mode")
.and_then(|value| value.to_str().ok())
.map(|value| value.eq_ignore_ascii_case("preview"))
.unwrap_or(false)
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchQuery {
pub q: Option<String>,
pub limit: Option<u64>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Serialize, FromQueryResult)]
@@ -157,7 +188,10 @@ async fn fallback_search(ctx: &AppContext, q: &str, limit: u64) -> Result<Vec<Se
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let preview_search = is_preview_search(&query, &headers);
content::sync_markdown_posts(&ctx).await?;
let q = query.q.unwrap_or_default().trim().to_string();
@@ -186,6 +220,17 @@ pub async fn search(
fallback_search(&ctx, &q, limit).await?
};
if !preview_search {
analytics::record_search_event(
&ctx,
&q,
results.len(),
&headers,
started_at.elapsed().as_millis() as i64,
)
.await;
}
format::json(results)
}

View File

@@ -44,6 +44,8 @@ pub struct AiProviderConfig {
pub api_key: Option<String>,
#[serde(default, alias = "chatModel")]
pub chat_model: Option<String>,
#[serde(default, alias = "imageModel")]
pub image_model: Option<String>,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
@@ -94,6 +96,14 @@ pub struct SiteSettingsPayload {
pub ai_api_key: Option<String>,
#[serde(default, alias = "aiChatModel")]
pub ai_chat_model: Option<String>,
#[serde(default, alias = "aiImageProvider")]
pub ai_image_provider: Option<String>,
#[serde(default, alias = "aiImageApiBase")]
pub ai_image_api_base: Option<String>,
#[serde(default, alias = "aiImageApiKey")]
pub ai_image_api_key: Option<String>,
#[serde(default, alias = "aiImageModel")]
pub ai_image_model: Option<String>,
#[serde(default, alias = "aiProviders")]
pub ai_providers: Option<Vec<AiProviderConfig>>,
#[serde(default, alias = "aiActiveProviderId")]
@@ -106,6 +116,18 @@ pub struct SiteSettingsPayload {
pub ai_top_k: Option<i32>,
#[serde(default, alias = "aiChunkSize")]
pub ai_chunk_size: Option<i32>,
#[serde(default, alias = "mediaR2AccountId")]
pub media_r2_account_id: Option<String>,
#[serde(default, alias = "mediaStorageProvider")]
pub media_storage_provider: Option<String>,
#[serde(default, alias = "mediaR2Bucket")]
pub media_r2_bucket: Option<String>,
#[serde(default, alias = "mediaR2PublicBaseUrl")]
pub media_r2_public_base_url: Option<String>,
#[serde(default, alias = "mediaR2AccessKeyId")]
pub media_r2_access_key_id: Option<String>,
#[serde(default, alias = "mediaR2SecretAccessKey")]
pub media_r2_secret_access_key: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -152,13 +174,16 @@ fn create_ai_provider_id() -> String {
}
fn default_ai_provider_config() -> AiProviderConfig {
let provider = ai::provider_name(None);
AiProviderConfig {
id: "default".to_string(),
name: "默认提供商".to_string(),
provider: ai::provider_name(None),
provider: provider.clone(),
api_base: Some(ai::default_api_base().to_string()),
api_key: Some(ai::default_api_key().to_string()),
chat_model: Some(ai::default_chat_model().to_string()),
image_model: Some(ai::default_image_model_for_provider(&provider).to_string()),
}
}
@@ -174,11 +199,13 @@ fn normalize_ai_provider_configs(items: Vec<AiProviderConfig>) -> Vec<AiProvider
let api_base = normalize_optional_string(item.api_base);
let api_key = normalize_optional_string(item.api_key);
let chat_model = normalize_optional_string(item.chat_model);
let image_model = normalize_optional_string(item.image_model);
let has_content = !item.name.trim().is_empty()
|| !provider.trim().is_empty()
|| api_base.is_some()
|| api_key.is_some()
|| chat_model.is_some();
|| chat_model.is_some()
|| image_model.is_some();
if !has_content {
return None;
@@ -201,6 +228,7 @@ fn normalize_ai_provider_configs(items: Vec<AiProviderConfig>) -> Vec<AiProvider
api_base,
api_key,
chat_model,
image_model,
})
})
.collect()
@@ -216,13 +244,16 @@ fn legacy_ai_provider_config(model: &Model) -> Option<AiProviderConfig> {
return None;
}
let normalized_provider = provider.unwrap_or_else(|| ai::provider_name(None));
Some(AiProviderConfig {
id: "default".to_string(),
name: "当前提供商".to_string(),
provider: provider.unwrap_or_else(|| ai::provider_name(None)),
provider: normalized_provider.clone(),
api_base,
api_key,
chat_model,
image_model: Some(ai::default_image_model_for_provider(&normalized_provider).to_string()),
})
}
@@ -301,6 +332,8 @@ fn update_active_provider_from_legacy_fields(model: &mut Model) {
config.api_base = api_base;
config.api_key = api_key;
config.chat_model = chat_model;
config.image_model =
Some(ai::default_image_model_for_provider(&config.provider).to_string());
write_ai_provider_state(
model,
vec![config],
@@ -322,6 +355,10 @@ fn update_active_provider_from_legacy_fields(model: &mut Model) {
config.api_base = api_base.clone();
config.api_key = api_key.clone();
config.chat_model = chat_model.clone();
if config.image_model.is_none() {
config.image_model =
Some(ai::default_image_model_for_provider(&config.provider).to_string());
}
}
}
}
@@ -425,6 +462,18 @@ impl SiteSettingsPayload {
if let Some(ai_chat_model) = self.ai_chat_model {
item.ai_chat_model = normalize_optional_string(Some(ai_chat_model));
}
if let Some(ai_image_provider) = self.ai_image_provider {
item.ai_image_provider = normalize_optional_string(Some(ai_image_provider));
}
if let Some(ai_image_api_base) = self.ai_image_api_base {
item.ai_image_api_base = normalize_optional_string(Some(ai_image_api_base));
}
if let Some(ai_image_api_key) = self.ai_image_api_key {
item.ai_image_api_key = normalize_optional_string(Some(ai_image_api_key));
}
if let Some(ai_image_model) = self.ai_image_model {
item.ai_image_model = normalize_optional_string(Some(ai_image_model));
}
if let Some(ai_embedding_model) = self.ai_embedding_model {
item.ai_embedding_model = normalize_optional_string(Some(ai_embedding_model));
}
@@ -437,6 +486,26 @@ impl SiteSettingsPayload {
if self.ai_chunk_size.is_some() {
item.ai_chunk_size = normalize_optional_int(self.ai_chunk_size, 400, 4000);
}
if let Some(media_r2_account_id) = self.media_r2_account_id {
item.media_r2_account_id = normalize_optional_string(Some(media_r2_account_id));
}
if let Some(media_storage_provider) = self.media_storage_provider {
item.media_storage_provider = normalize_optional_string(Some(media_storage_provider));
}
if let Some(media_r2_bucket) = self.media_r2_bucket {
item.media_r2_bucket = normalize_optional_string(Some(media_r2_bucket));
}
if let Some(media_r2_public_base_url) = self.media_r2_public_base_url {
item.media_r2_public_base_url =
normalize_optional_string(Some(media_r2_public_base_url));
}
if let Some(media_r2_access_key_id) = self.media_r2_access_key_id {
item.media_r2_access_key_id = normalize_optional_string(Some(media_r2_access_key_id));
}
if let Some(media_r2_secret_access_key) = self.media_r2_secret_access_key {
item.media_r2_secret_access_key =
normalize_optional_string(Some(media_r2_secret_access_key));
}
if provider_list_supplied {
write_ai_provider_state(
@@ -524,6 +593,10 @@ fn default_payload() -> SiteSettingsPayload {
ai_api_base: Some(ai::default_api_base().to_string()),
ai_api_key: Some(ai::default_api_key().to_string()),
ai_chat_model: Some(ai::default_chat_model().to_string()),
ai_image_provider: None,
ai_image_api_base: None,
ai_image_api_key: None,
ai_image_model: None,
ai_providers: Some(vec![default_ai_provider_config()]),
ai_active_provider_id: Some("default".to_string()),
ai_embedding_model: Some(ai::local_embedding_label().to_string()),
@@ -533,6 +606,12 @@ fn default_payload() -> SiteSettingsPayload {
),
ai_top_k: Some(4),
ai_chunk_size: Some(1200),
media_storage_provider: None,
media_r2_account_id: None,
media_r2_bucket: None,
media_r2_public_base_url: None,
media_r2_access_key_id: None,
media_r2_secret_access_key: None,
}
}
@@ -553,7 +632,11 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
.await?;
let mut model = inserted;
default_payload().apply(&mut model);
Ok(model.into_active_model().update(&ctx.db).await?)
Ok(model
.into_active_model()
.reset_all()
.update(&ctx.db)
.await?)
}
fn public_response(model: Model) -> PublicSiteSettingsResponse {
@@ -596,7 +679,7 @@ pub async fn update(
let current = load_current(&ctx).await?;
let mut item = current;
params.apply(&mut item);
let item = item.into_active_model();
let item = item.into_active_model().reset_all();
let updated = item.update(&ctx.db).await?;
format::json(public_response(updated))
}

View File

@@ -7,6 +7,7 @@ pub mod categories;
pub mod comments;
pub mod friend_links;
pub mod posts;
pub mod query_events;
pub mod reviews;
pub mod site_settings;
pub mod tags;

View File

@@ -5,6 +5,7 @@ pub use super::categories::Entity as Categories;
pub use super::comments::Entity as Comments;
pub use super::friend_links::Entity as FriendLinks;
pub use super::posts::Entity as Posts;
pub use super::query_events::Entity as QueryEvents;
pub use super::reviews::Entity as Reviews;
pub use super::site_settings::Entity as SiteSettings;
pub use super::tags::Entity as Tags;

View File

@@ -0,0 +1,33 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "query_events")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub event_type: String,
#[sea_orm(column_type = "Text")]
pub query_text: String,
#[sea_orm(column_type = "Text")]
pub normalized_query: String,
pub request_path: Option<String>,
pub referrer: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub user_agent: Option<String>,
pub result_count: Option<i32>,
pub success: Option<bool>,
pub response_mode: Option<String>,
pub provider: Option<String>,
pub chat_model: Option<String>,
pub latency_ms: Option<i32>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -37,6 +37,11 @@ pub struct Model {
#[sea_orm(column_type = "Text", nullable)]
pub ai_api_key: Option<String>,
pub ai_chat_model: Option<String>,
pub ai_image_provider: Option<String>,
pub ai_image_api_base: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub ai_image_api_key: Option<String>,
pub ai_image_model: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub ai_providers: Option<Json>,
pub ai_active_provider_id: Option<String>,
@@ -46,6 +51,13 @@ pub struct Model {
pub ai_top_k: Option<i32>,
pub ai_chunk_size: Option<i32>,
pub ai_last_indexed_at: Option<DateTimeWithTimeZone>,
pub media_storage_provider: Option<String>,
pub media_r2_account_id: Option<String>,
pub media_r2_bucket: Option<String>,
pub media_r2_public_base_url: Option<String>,
pub media_r2_access_key_id: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub media_r2_secret_access_key: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -4,7 +4,7 @@ use fastembed::{
InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel,
};
use loco_rs::prelude::*;
use reqwest::{Client, Url};
use reqwest::{header::CONTENT_TYPE, multipart, Client, Url};
use sea_orm::{
ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel,
PaginatorTrait, QueryOrder, Set, Statement,
@@ -17,11 +17,12 @@ use std::sync::{Mutex, OnceLock};
use uuid::Uuid;
use crate::{
controllers::site_settings as site_settings_controller,
models::_entities::{ai_chunks, site_settings},
services::content,
services::{content, storage},
};
const DEFAULT_AI_PROVIDER: &str = "newapi";
const DEFAULT_AI_PROVIDER: &str = "openai";
const DEFAULT_AI_API_BASE: &str = "https://91code.jiangnight.com/v1";
const DEFAULT_AI_API_KEY: &str =
"sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571";
@@ -29,6 +30,8 @@ const DEFAULT_CHAT_MODEL: &str = "gpt-5.4";
const DEFAULT_REASONING_EFFORT: &str = "medium";
const DEFAULT_DISABLE_RESPONSE_STORAGE: bool = true;
const DEFAULT_IMAGE_MODEL: &str = "gpt-image-1";
const DEFAULT_CLOUDFLARE_CHAT_MODEL: &str = "@cf/meta/llama-3.1-8b-instruct";
const DEFAULT_CLOUDFLARE_IMAGE_MODEL: &str = "@cf/black-forest-labs/flux-2-klein-4b";
const DEFAULT_TOP_K: usize = 4;
const DEFAULT_CHUNK_SIZE: usize = 1200;
const DEFAULT_SYSTEM_PROMPT: &str =
@@ -49,6 +52,14 @@ const LOCAL_EMBEDDING_FILES: [&str; 5] = [
static TEXT_EMBEDDING_MODEL: OnceLock<Mutex<TextEmbedding>> = OnceLock::new();
#[derive(Clone, Debug)]
struct AiImageRuntimeSettings {
provider: String,
api_base: Option<String>,
api_key: Option<String>,
image_model: String,
}
#[derive(Clone, Debug)]
struct AiRuntimeSettings {
raw: site_settings::Model,
@@ -56,6 +67,7 @@ struct AiRuntimeSettings {
api_base: Option<String>,
api_key: Option<String>,
chat_model: String,
image: AiImageRuntimeSettings,
system_prompt: String,
top_k: usize,
chunk_size: usize,
@@ -121,6 +133,11 @@ pub struct PolishedPostMarkdown {
pub polished_markdown: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct PolishedReviewDescription {
pub polished_description: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct GeneratedPostCoverImage {
pub image_url: String,
@@ -135,6 +152,14 @@ pub struct AiProviderConnectivityResult {
pub reply_preview: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AiImageProviderConnectivityResult {
pub provider: String,
pub endpoint: String,
pub image_model: String,
pub result_preview: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
struct GeneratedPostMetadataDraft {
title: Option<String>,
@@ -227,16 +252,57 @@ fn build_endpoint(api_base: &str, path: &str) -> String {
)
}
fn provider_uses_cloudflare(provider: &str) -> bool {
provider.eq_ignore_ascii_case("cloudflare")
|| provider.eq_ignore_ascii_case("cloudflare-workers-ai")
|| provider.eq_ignore_ascii_case("workers-ai")
}
fn provider_uses_openai_api_prefix(provider: &str) -> bool {
provider_uses_responses(provider) || provider.eq_ignore_ascii_case("openai-compatible")
}
fn normalize_cloudflare_api_base(api_base: &str) -> String {
let trimmed = api_base.trim().trim_end_matches('/');
if trimmed.is_empty() {
return String::new();
}
if !trimmed.starts_with("http://") && !trimmed.starts_with("https://") {
return format!(
"https://api.cloudflare.com/client/v4/accounts/{}",
trimmed.trim_matches('/'),
);
}
let Ok(mut parsed) = Url::parse(trimmed) else {
return trimmed.to_string();
};
let segments = parsed
.path_segments()
.map(|items| items.collect::<Vec<_>>())
.unwrap_or_default();
if let Some(account_index) = segments.iter().position(|segment| *segment == "accounts") {
if let Some(account_id) = segments.get(account_index + 1) {
parsed.set_path(&format!("/client/v4/accounts/{account_id}"));
}
}
parsed.to_string().trim_end_matches('/').to_string()
}
fn normalize_provider_api_base(provider: &str, api_base: &str) -> String {
let trimmed = api_base.trim();
if trimmed.is_empty() {
return String::new();
}
if provider_uses_cloudflare(provider) {
return normalize_cloudflare_api_base(trimmed);
}
if !provider_uses_openai_api_prefix(provider) {
return trimmed.trim_end_matches('/').to_string();
}
@@ -604,6 +670,19 @@ fn parse_provider_sse_body(body: &str) -> Option<Value> {
latest_response.or(latest_payload)
}
fn parse_json_body(body: &str) -> Result<Value> {
serde_json::from_str(body)
.or_else(|_| {
parse_provider_sse_body(body).ok_or_else(|| {
serde_json::Error::io(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"provider returned neither JSON nor SSE JSON payload",
))
})
})
.map_err(|error| Error::BadRequest(format!("AI response parse failed: {error}")))
}
async fn request_json(client: &Client, url: &str, api_key: &str, payload: Value) -> Result<Value> {
let response = client
.post(url)
@@ -626,20 +705,56 @@ async fn request_json(client: &Client, url: &str, api_key: &str, payload: Value)
)));
}
serde_json::from_str(&body)
.or_else(|_| {
parse_provider_sse_body(&body).ok_or_else(|| {
serde_json::Error::io(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"provider returned neither JSON nor SSE JSON payload",
))
})
})
.map_err(|error| Error::BadRequest(format!("AI response parse failed: {error}")))
parse_json_body(&body)
}
async fn request_multipart_json(
client: &Client,
url: &str,
api_key: &str,
form: multipart::Form,
) -> Result<Value> {
let response = client
.post(url)
.bearer_auth(api_key)
.header("Accept", "application/json")
.multipart(form)
.send()
.await
.map_err(|error| Error::BadRequest(format!("AI request failed: {error}")))?;
let status = response.status();
let body = response
.text()
.await
.map_err(|error| Error::BadRequest(format!("AI response read failed: {error}")))?;
if !status.is_success() {
return Err(Error::BadRequest(format!(
"AI provider returned {status}: {body}"
)));
}
parse_json_body(&body)
}
fn provider_uses_responses(provider: &str) -> bool {
provider.eq_ignore_ascii_case("newapi")
|| provider.eq_ignore_ascii_case("openai")
|| provider.eq_ignore_ascii_case("anthropic")
|| provider.eq_ignore_ascii_case("gemini")
}
fn default_image_model_for_provider_name(provider: &str) -> &'static str {
if provider_uses_cloudflare(provider) {
DEFAULT_CLOUDFLARE_IMAGE_MODEL
} else {
DEFAULT_IMAGE_MODEL
}
}
pub fn default_image_model_for_provider(provider: &str) -> &'static str {
default_image_model_for_provider_name(provider)
}
async fn embed_texts_locally(inputs: Vec<String>, kind: EmbeddingKind) -> Result<Vec<Vec<f64>>> {
@@ -1015,6 +1130,66 @@ fn build_polish_markdown_prompt(markdown: &str) -> String {
)
}
fn build_polish_review_prompt(
title: &str,
review_type: &str,
rating: i32,
review_date: Option<&str>,
status: &str,
tags: &[String],
description: &str,
) -> String {
let review_date_text = review_date
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or("未填写");
let tag_text = if tags.is_empty() {
"".to_string()
} else {
tags.join(" / ")
};
format!(
"请润色一段中文作品评测简介/点评文案。\n\
要求:\n\
1. 保留原文观点、倾向和结论,不要杜撰剧情、设定或体验细节。\n\
2. 语言更凝练、更自然,适合放在评测页中展示。\n\
3. 可以优化句式与节奏,但不要改写成标题、列表或营销文案。\n\
4. 默认输出一到三段正文,总长度尽量控制在 80 到 220 字之间;如果原文本身更长,可适度保留信息密度。\n\
5. 只返回润色后的简介正文,不要附加解释。\n\n\
作品标题:{}\n\
评测类型:{}\n\
评分:{}/5\n\
评测日期:{}\n\
状态:{}\n\
标签:{}\n\n\
当前简介:\n{}",
title.trim(),
review_type.trim(),
rating,
review_date_text,
status.trim(),
tag_text,
description.trim(),
)
}
fn ensure_sentence_ending(text: &str) -> String {
let trimmed = text.trim();
if trimmed.is_empty() {
return String::new();
}
if matches!(
trimmed.chars().last(),
Some('。' | '' | '' | '.' | '!' | '?')
) {
trimmed.to_string()
} else {
format!("{trimmed}")
}
}
fn build_post_cover_prompt(
title: &str,
description: Option<&str>,
@@ -1063,12 +1238,40 @@ fn build_post_cover_prompt(
)
}
fn build_image_generation_url(provider: &str, api_base: &str) -> String {
fn build_image_generation_url(provider: &str, api_base: &str, image_model: &str) -> String {
let normalized = normalize_provider_api_base(provider, api_base);
if provider_uses_cloudflare(provider) {
return build_endpoint(&normalized, &format!("/ai/run/{}", image_model.trim()));
}
build_endpoint(&normalized, "/images/generations")
}
fn extract_image_generation_result(value: &Value) -> Option<String> {
if let Some(result) = value.get("result") {
if let Some(image) = result.get("image").and_then(Value::as_str) {
let trimmed = image.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
if let Some(url) = result.get("url").and_then(Value::as_str) {
let trimmed = url.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
}
if let Some(image) = value.get("image").and_then(Value::as_str) {
let trimmed = image.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
let data = value.get("data").and_then(Value::as_array)?;
for item in data {
@@ -1115,25 +1318,113 @@ fn generated_cover_directory() -> PathBuf {
})
}
fn persist_generated_cover_image(slug_hint: &str, base64_data: &str) -> Result<String> {
fn image_details_from_mime(content_type: &str) -> Option<(&'static str, &'static str)> {
match content_type
.trim()
.split(';')
.next()
.unwrap_or_default()
.trim()
.to_ascii_lowercase()
.as_str()
{
"image/png" => Some(("png", "image/png")),
"image/jpeg" | "image/jpg" => Some(("jpg", "image/jpeg")),
"image/webp" => Some(("webp", "image/webp")),
"image/gif" => Some(("gif", "image/gif")),
"image/svg+xml" => Some(("svg", "image/svg+xml")),
_ => None,
}
}
fn image_details_from_extension(extension: &str) -> Option<(&'static str, &'static str)> {
match extension
.trim()
.trim_start_matches('.')
.to_ascii_lowercase()
.as_str()
{
"png" => Some(("png", "image/png")),
"jpg" | "jpeg" => Some(("jpg", "image/jpeg")),
"webp" => Some(("webp", "image/webp")),
"gif" => Some(("gif", "image/gif")),
"svg" => Some(("svg", "image/svg+xml")),
_ => None,
}
}
fn image_details_from_url(url: &str) -> Option<(&'static str, &'static str)> {
let parsed = Url::parse(url).ok()?;
let path = parsed.path();
let extension = path.rsplit_once('.')?.1;
image_details_from_extension(extension)
}
fn decode_base64_image_payload(base64_data: &str) -> Result<(Vec<u8>, &'static str, &'static str)> {
let trimmed = base64_data.trim();
let (payload, extension, content_type) = if let Some(rest) = trimmed.strip_prefix("data:") {
let (metadata, encoded) = rest
.split_once(',')
.ok_or_else(|| Error::BadRequest("AI 封面图 data URL 格式不正确".to_string()))?;
let mime = metadata
.split(';')
.next()
.filter(|value| !value.trim().is_empty())
.unwrap_or("image/png");
let (extension, content_type) =
image_details_from_mime(mime).unwrap_or(("png", "image/png"));
(encoded, extension, content_type)
} else {
(trimmed, "png", "image/png")
};
let image_bytes = BASE64_STANDARD
.decode(payload)
.map_err(|error| Error::BadRequest(format!("解析 AI 封面图失败: {error}")))?;
Ok((image_bytes, extension, content_type))
}
async fn persist_generated_cover_image_bytes(
ctx: &AppContext,
slug_hint: &str,
image_bytes: Vec<u8>,
extension: &str,
content_type: &str,
) -> Result<String> {
if storage::optional_r2_settings(ctx).await?.is_some() {
let key = storage::build_object_key("post-covers", slug_hint, extension);
let stored = storage::upload_bytes_to_r2(
ctx,
&key,
image_bytes,
Some(content_type),
Some("public, max-age=31536000, immutable"),
)
.await?;
return Ok(stored.url);
}
let directory = generated_cover_directory();
fs::create_dir_all(&directory)
.map_err(|error| Error::BadRequest(format!("创建封面图目录失败: {error}")))?;
let safe_slug = metadata_slugify(slug_hint);
let file_name = format!(
"{}-{}.png",
"{}-{}.{}",
if safe_slug.is_empty() {
"cover".to_string()
} else {
safe_slug
},
Uuid::new_v4().simple()
Uuid::new_v4().simple(),
extension
.trim()
.trim_start_matches('.')
.to_ascii_lowercase()
);
let file_path = directory.join(&file_name);
let image_bytes = BASE64_STANDARD
.decode(base64_data)
.map_err(|error| Error::BadRequest(format!("解析 AI 封面图失败: {error}")))?;
fs::write(&file_path, image_bytes)
.map_err(|error| Error::BadRequest(format!("写入 AI 封面图失败: {error}")))?;
@@ -1141,6 +1432,49 @@ fn persist_generated_cover_image(slug_hint: &str, base64_data: &str) -> Result<S
Ok(format!("/generated-covers/{file_name}"))
}
async fn persist_generated_cover_image(
ctx: &AppContext,
slug_hint: &str,
image_result: &str,
) -> Result<String> {
if image_result.starts_with("http://") || image_result.starts_with("https://") {
let client = Client::new();
let response = client
.get(image_result)
.send()
.await
.map_err(|error| Error::BadRequest(format!("下载 AI 封面图失败: {error}")))?
.error_for_status()
.map_err(|error| Error::BadRequest(format!("下载 AI 封面图失败: {error}")))?;
let content_type_header = response
.headers()
.get(CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string);
let image_bytes = response
.bytes()
.await
.map_err(|error| Error::BadRequest(format!("读取 AI 封面图失败: {error}")))?;
let (extension, content_type) = content_type_header
.as_deref()
.and_then(image_details_from_mime)
.or_else(|| image_details_from_url(image_result))
.unwrap_or(("png", "image/png"));
return persist_generated_cover_image_bytes(
ctx,
slug_hint,
image_bytes.to_vec(),
extension,
content_type,
)
.await;
}
let (image_bytes, extension, content_type) = decode_base64_image_payload(image_result)?;
persist_generated_cover_image_bytes(ctx, slug_hint, image_bytes, extension, content_type).await
}
fn fallback_polished_markdown(markdown: &str) -> String {
let metadata = fallback_generated_metadata(markdown);
let body = strip_markdown_frontmatter(markdown)
@@ -1176,6 +1510,17 @@ tags:\n{}\n\
)
}
fn fallback_polished_review_description(description: &str) -> String {
let normalized = normalize_newlines(description)
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.collect::<Vec<_>>()
.join("\n\n");
ensure_sentence_ending(&normalized)
}
pub async fn generate_post_metadata(
ctx: &AppContext,
markdown: &str,
@@ -1303,6 +1648,85 @@ pub async fn polish_post_markdown(
}
}
pub async fn polish_review_description(
ctx: &AppContext,
title: &str,
review_type: &str,
rating: i32,
review_date: Option<&str>,
status: &str,
tags: &[String],
description: &str,
) -> Result<PolishedReviewDescription> {
let trimmed_description = description.trim();
if trimmed_description.is_empty() {
return Err(Error::BadRequest(
"请先填写点评内容,再进行润色。".to_string(),
));
}
let settings = load_runtime_settings(ctx, false).await?;
let remote_result: Result<PolishedReviewDescription> = match (
settings.api_base.clone(),
settings.api_key.clone(),
) {
(Some(api_base), Some(api_key)) => {
let request = AiProviderRequest {
provider: settings.provider.clone(),
api_base,
api_key,
chat_model: settings.chat_model.clone(),
system_prompt: "你是中文内容平台里的资深评测编辑。你只负责润色用户已有的作品点评文案,不要改写核心观点,不要虚构事实,不要输出额外解释。".to_string(),
prompt: build_polish_review_prompt(
title,
review_type,
rating,
review_date,
status,
tags,
trimmed_description,
),
};
let client = Client::new();
let response = request_json(
&client,
&build_provider_url(&request),
&request.api_key,
build_provider_payload(&request, false),
)
.await;
match response {
Ok(response) => {
let polished_description =
extract_provider_text(&response).ok_or_else(|| {
Error::BadRequest("AI 润色响应里没有可读取内容。".to_string())
})?;
Ok(PolishedReviewDescription {
polished_description: normalize_newlines(polished_description.trim()),
})
}
Err(error) => Err(error),
}
}
_ => Err(Error::BadRequest(
"AI 服务未配置完整,已自动切换为本地智能润色。".to_string(),
)),
};
match remote_result {
Ok(result) => Ok(result),
Err(error) => {
tracing::warn!("AI review polish fallback: {error}");
Ok(PolishedReviewDescription {
polished_description: fallback_polished_review_description(trimmed_description),
})
}
}
}
pub async fn generate_post_cover_image(
ctx: &AppContext,
title: &str,
@@ -1323,14 +1747,14 @@ pub async fn generate_post_cover_image(
}
let settings = load_runtime_settings(ctx, false).await?;
let api_base = settings
.api_base
.clone()
.ok_or_else(|| Error::BadRequest("AI API Base 未配置,无法生成封面图。".to_string()))?;
let api_key = settings
let image_settings = settings.image.clone();
let api_base = image_settings.api_base.clone().ok_or_else(|| {
Error::BadRequest("图片 AI API Base 未配置,无法生成封面图。".to_string())
})?;
let api_key = image_settings
.api_key
.clone()
.ok_or_else(|| Error::BadRequest("AI API Key 未配置,无法生成封面图。".to_string()))?;
.ok_or_else(|| Error::BadRequest("图片 AI API Key 未配置,无法生成封面图。".to_string()))?;
let prompt = build_post_cover_prompt(
if trimmed_title.is_empty() {
"未命名文章"
@@ -1343,33 +1767,22 @@ pub async fn generate_post_cover_image(
post_type,
trimmed_markdown,
);
let payload = json!({
"model": DEFAULT_IMAGE_MODEL,
"prompt": prompt,
"size": "1536x1024",
"quality": "high",
"output_format": "png"
});
let client = Client::new();
let response = request_json(
&client,
&build_image_generation_url(&settings.provider, &api_base),
let image_model = image_settings.image_model.clone();
let response = request_image_generation(
&image_settings.provider,
&api_base,
&api_key,
payload,
&image_model,
&prompt,
)
.await?;
let image_result = extract_image_generation_result(&response)
.ok_or_else(|| Error::BadRequest("AI 封面图响应里没有可读取图片。".to_string()))?;
let image_url = if image_result.starts_with("http://") || image_result.starts_with("https://") {
image_result
} else {
let slug_hint = slug
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or(trimmed_title);
persist_generated_cover_image(slug_hint, &image_result)?
};
let slug_hint = slug
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or(trimmed_title);
let image_url = persist_generated_cover_image(ctx, slug_hint, &image_result).await?;
Ok(GeneratedPostCoverImage { image_url, prompt })
}
@@ -1389,6 +1802,12 @@ fn extract_response_output(value: &Value) -> Option<String> {
}
}
if let Some(result) = value.get("result") {
if let Some(text) = extract_response_output(result) {
return Some(text);
}
}
if let Some(response) = value.get("response") {
if let Some(text) = extract_response_output(response) {
return Some(text);
@@ -1590,7 +2009,16 @@ fn build_sources(matches: &[ScoredChunk]) -> Vec<AiSource> {
}
pub(crate) fn build_provider_payload(request: &AiProviderRequest, stream: bool) -> Value {
if provider_uses_responses(&request.provider) {
if provider_uses_cloudflare(&request.provider) {
json!({
"prompt": format!(
"系统指令:{}
用户请求:{}",
request.system_prompt, request.prompt,
)
})
} else if provider_uses_responses(&request.provider) {
json!({
"model": request.chat_model,
"input": [
@@ -1640,13 +2068,15 @@ pub(crate) fn build_provider_payload(request: &AiProviderRequest, stream: bool)
pub(crate) fn build_provider_url(request: &AiProviderRequest) -> String {
let api_base = normalize_provider_api_base(&request.provider, &request.api_base);
let path = if provider_uses_responses(&request.provider) {
"/responses"
let path = if provider_uses_cloudflare(&request.provider) {
format!("/ai/run/{}", request.chat_model.trim())
} else if provider_uses_responses(&request.provider) {
"/responses".to_string()
} else {
"/chat/completions"
"/chat/completions".to_string()
};
build_endpoint(&api_base, path)
build_endpoint(&api_base, &path)
}
#[cfg(test)]
@@ -1701,6 +2131,25 @@ mod tests {
);
}
#[test]
fn normalize_provider_api_base_supports_cloudflare_account_id() {
assert_eq!(
normalize_provider_api_base("cloudflare", "test-account-id"),
"https://api.cloudflare.com/client/v4/accounts/test-account-id"
);
}
#[test]
fn build_provider_url_uses_cloudflare_run_endpoint() {
let mut request = build_request("cloudflare", "test-account-id");
request.chat_model = "@cf/meta/llama-3.1-8b-instruct".to_string();
assert_eq!(
build_provider_url(&request),
"https://api.cloudflare.com/client/v4/accounts/test-account-id/ai/run/@cf/meta/llama-3.1-8b-instruct"
);
}
#[test]
fn profile_question_detects_owner_keywords() {
assert!(is_profile_question("站长的技术栈和个人介绍是什么?"));
@@ -1780,6 +2229,59 @@ async fn request_chat_answer(request: &AiProviderRequest) -> Result<String> {
})
}
async fn request_image_generation(
provider: &str,
api_base: &str,
api_key: &str,
image_model: &str,
prompt: &str,
) -> Result<Value> {
let client = Client::new();
if provider_uses_cloudflare(provider) {
if image_model.eq_ignore_ascii_case(DEFAULT_CLOUDFLARE_IMAGE_MODEL) {
let form = multipart::Form::new()
.text("prompt", prompt.to_string())
.text("width", "1024")
.text("height", "576")
.text("steps", "16");
return request_multipart_json(
&client,
&build_image_generation_url(provider, api_base, image_model),
api_key,
form,
)
.await;
}
return request_json(
&client,
&build_image_generation_url(provider, api_base, image_model),
api_key,
json!({
"prompt": prompt,
"steps": 4
}),
)
.await;
}
request_json(
&client,
&build_image_generation_url(provider, api_base, image_model),
api_key,
json!({
"model": image_model,
"prompt": prompt,
"size": "1536x1024",
"quality": "high",
"output_format": "png"
}),
)
.await
}
pub async fn test_provider_connectivity(
provider: &str,
api_base: &str,
@@ -1814,6 +2316,59 @@ pub async fn test_provider_connectivity(
})
}
pub async fn test_image_provider_connectivity(
provider: &str,
api_base: &str,
api_key: &str,
image_model: &str,
) -> Result<AiImageProviderConnectivityResult> {
let provider = trim_to_option(Some(provider.to_string()))
.unwrap_or_else(|| DEFAULT_AI_PROVIDER.to_string());
let api_base = trim_to_option(Some(api_base.to_string()))
.ok_or_else(|| Error::BadRequest("请先填写图片 API 地址".to_string()))?;
let api_key = trim_to_option(Some(api_key.to_string()))
.ok_or_else(|| Error::BadRequest("请先填写图片 API 密钥".to_string()))?;
let image_model = trim_to_option(Some(image_model.to_string()))
.ok_or_else(|| Error::BadRequest("请先填写图片模型".to_string()))?;
let prompt = "Minimal abstract technology cover art, blue gradient, no text, no watermark";
let response = if provider_uses_cloudflare(&provider)
&& image_model.eq_ignore_ascii_case(DEFAULT_CLOUDFLARE_IMAGE_MODEL)
{
let client = Client::new();
let form = multipart::Form::new()
.text("prompt", prompt.to_string())
.text("width", "512")
.text("height", "288")
.text("steps", "4");
request_multipart_json(
&client,
&build_image_generation_url(&provider, &api_base, &image_model),
&api_key,
form,
)
.await?
} else {
request_image_generation(&provider, &api_base, &api_key, &image_model, prompt).await?
};
let image_result = extract_image_generation_result(&response)
.ok_or_else(|| Error::BadRequest("图片接口响应里没有可读取的图片结果".to_string()))?;
let result_preview =
if image_result.starts_with("http://") || image_result.starts_with("https://") {
image_result
} else {
format!("base64 image ok ({} chars)", image_result.len())
};
Ok(AiImageProviderConnectivityResult {
provider: provider.clone(),
endpoint: build_image_generation_url(&provider, &api_base, &image_model),
image_model,
result_preview,
})
}
pub(crate) async fn prepare_answer(ctx: &AppContext, question: &str) -> Result<PreparedAiAnswer> {
let trimmed_question = question.trim();
if trimmed_question.is_empty() {
@@ -1907,12 +2462,72 @@ async fn load_runtime_settings(
return Err(Error::NotFound);
}
let active_provider =
site_settings_controller::active_ai_provider_id(&raw).and_then(|active_id| {
site_settings_controller::ai_provider_configs(&raw)
.into_iter()
.find(|item| item.id == active_id)
});
let provider = active_provider
.as_ref()
.map(|item| provider_name(Some(item.provider.as_str())))
.unwrap_or_else(|| provider_name(raw.ai_provider.as_deref()));
let api_base = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.api_base.clone()))
.or_else(|| trim_to_option(raw.ai_api_base.clone()));
let api_key = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.api_key.clone()))
.or_else(|| trim_to_option(raw.ai_api_key.clone()));
let chat_model = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.chat_model.clone()))
.unwrap_or_else(|| {
if provider_uses_cloudflare(&provider) {
DEFAULT_CLOUDFLARE_CHAT_MODEL.to_string()
} else {
DEFAULT_CHAT_MODEL.to_string()
}
});
let legacy_image_model = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.image_model.clone()))
.unwrap_or_else(|| default_image_model_for_provider_name(&provider).to_string());
let image_provider = trim_to_option(raw.ai_image_provider.clone());
let image_api_base = trim_to_option(raw.ai_image_api_base.clone());
let image_api_key = trim_to_option(raw.ai_image_api_key.clone());
let image_model = trim_to_option(raw.ai_image_model.clone());
let has_dedicated_image_settings = image_provider.is_some()
|| image_api_base.is_some()
|| image_api_key.is_some()
|| image_model.is_some();
let image = if has_dedicated_image_settings {
let provider = image_provider.unwrap_or_else(|| provider.clone());
let image_model = image_model
.unwrap_or_else(|| default_image_model_for_provider_name(&provider).to_string());
AiImageRuntimeSettings {
provider,
api_base: image_api_base,
api_key: image_api_key,
image_model,
}
} else {
AiImageRuntimeSettings {
provider: provider.clone(),
api_base: api_base.clone(),
api_key: api_key.clone(),
image_model: legacy_image_model,
}
};
Ok(AiRuntimeSettings {
provider: provider_name(raw.ai_provider.as_deref()),
api_base: trim_to_option(raw.ai_api_base.clone()),
api_key: trim_to_option(raw.ai_api_key.clone()),
chat_model: trim_to_option(raw.ai_chat_model.clone())
.unwrap_or_else(|| DEFAULT_CHAT_MODEL.to_string()),
provider,
api_base,
api_key,
chat_model,
image,
system_prompt: trim_to_option(raw.ai_system_prompt.clone())
.unwrap_or_else(|| DEFAULT_SYSTEM_PROMPT.to_string()),
top_k: raw

View File

@@ -0,0 +1,441 @@
use std::collections::{BTreeMap, HashMap};
use axum::http::HeaderMap;
use chrono::{DateTime, Duration, NaiveDate, Utc};
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder,
QuerySelect, Set,
};
use serde::Serialize;
use crate::models::_entities::query_events;
const EVENT_TYPE_SEARCH: &str = "search";
const EVENT_TYPE_AI_QUESTION: &str = "ai_question";
#[derive(Clone, Debug, Default)]
pub struct QueryEventRequestContext {
pub request_path: Option<String>,
pub referrer: Option<String>,
pub user_agent: Option<String>,
}
#[derive(Clone, Debug)]
pub struct QueryEventDraft {
pub event_type: String,
pub query_text: String,
pub request_context: QueryEventRequestContext,
pub result_count: Option<i32>,
pub success: Option<bool>,
pub response_mode: Option<String>,
pub provider: Option<String>,
pub chat_model: Option<String>,
pub latency_ms: Option<i32>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsOverview {
pub total_searches: u64,
pub total_ai_questions: u64,
pub searches_last_24h: u64,
pub ai_questions_last_24h: u64,
pub searches_last_7d: u64,
pub ai_questions_last_7d: u64,
pub unique_search_terms_last_7d: usize,
pub unique_ai_questions_last_7d: usize,
pub avg_search_results_last_7d: f64,
pub avg_ai_latency_ms_last_7d: Option<f64>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsTopQuery {
pub query: String,
pub count: u64,
pub last_seen_at: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsRecentEvent {
pub id: i32,
pub event_type: String,
pub query: String,
pub result_count: Option<i32>,
pub success: Option<bool>,
pub response_mode: Option<String>,
pub provider: Option<String>,
pub chat_model: Option<String>,
pub latency_ms: Option<i32>,
pub created_at: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsProviderBucket {
pub provider: String,
pub count: u64,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsDailyBucket {
pub date: String,
pub searches: u64,
pub ai_questions: u64,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminAnalyticsResponse {
pub overview: AnalyticsOverview,
pub top_search_terms: Vec<AnalyticsTopQuery>,
pub top_ai_questions: Vec<AnalyticsTopQuery>,
pub recent_events: Vec<AnalyticsRecentEvent>,
pub providers_last_7d: Vec<AnalyticsProviderBucket>,
pub daily_activity: Vec<AnalyticsDailyBucket>,
}
#[derive(Clone, Debug)]
struct QueryAggregate {
query: String,
count: u64,
last_seen_at: DateTime<Utc>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalize_query(value: &str) -> String {
value
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.to_lowercase()
}
fn format_timestamp(value: DateTime<Utc>) -> String {
value.format("%Y-%m-%d %H:%M").to_string()
}
fn header_value(headers: &HeaderMap, key: &str) -> Option<String> {
headers
.get(key)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string)
.and_then(|value| trim_to_option(Some(value)))
}
fn clamp_latency(latency_ms: i64) -> i32 {
latency_ms.clamp(0, i64::from(i32::MAX)) as i32
}
fn build_query_aggregates(
events: &[query_events::Model],
wanted_type: &str,
) -> Vec<QueryAggregate> {
let mut grouped: HashMap<String, QueryAggregate> = HashMap::new();
for event in events
.iter()
.filter(|event| event.event_type == wanted_type)
{
let entry = grouped
.entry(event.normalized_query.clone())
.or_insert_with(|| QueryAggregate {
query: event.query_text.clone(),
count: 0,
last_seen_at: event.created_at.into(),
});
entry.count += 1;
let created_at = DateTime::<Utc>::from(event.created_at);
if created_at >= entry.last_seen_at {
entry.query = event.query_text.clone();
entry.last_seen_at = created_at;
}
}
let mut items = grouped.into_values().collect::<Vec<_>>();
items.sort_by(|left, right| {
right
.count
.cmp(&left.count)
.then_with(|| right.last_seen_at.cmp(&left.last_seen_at))
});
items
}
fn aggregate_queries(
events: &[query_events::Model],
wanted_type: &str,
limit: usize,
) -> (usize, Vec<AnalyticsTopQuery>) {
let aggregates = build_query_aggregates(events, wanted_type);
let total_unique = aggregates.len();
let items = aggregates
.into_iter()
.take(limit)
.map(|item| AnalyticsTopQuery {
query: item.query,
count: item.count,
last_seen_at: format_timestamp(item.last_seen_at),
})
.collect::<Vec<_>>();
(total_unique, items)
}
pub fn request_context_from_headers(path: &str, headers: &HeaderMap) -> QueryEventRequestContext {
QueryEventRequestContext {
request_path: trim_to_option(Some(path.to_string())),
referrer: header_value(headers, "referer"),
user_agent: header_value(headers, "user-agent"),
}
}
pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
let query_text = draft.query_text.trim().to_string();
if query_text.is_empty() {
return;
}
let active_model = query_events::ActiveModel {
event_type: Set(draft.event_type),
query_text: Set(query_text.clone()),
normalized_query: Set(normalize_query(&query_text)),
request_path: Set(trim_to_option(draft.request_context.request_path)),
referrer: Set(trim_to_option(draft.request_context.referrer)),
user_agent: Set(trim_to_option(draft.request_context.user_agent)),
result_count: Set(draft.result_count),
success: Set(draft.success),
response_mode: Set(trim_to_option(draft.response_mode)),
provider: Set(trim_to_option(draft.provider)),
chat_model: Set(trim_to_option(draft.chat_model)),
latency_ms: Set(draft.latency_ms.map(|value| value.max(0))),
..Default::default()
};
if let Err(error) = active_model.insert(&ctx.db).await {
tracing::warn!("failed to record query analytics event: {error}");
}
}
pub async fn record_search_event(
ctx: &AppContext,
query_text: &str,
result_count: usize,
headers: &HeaderMap,
latency_ms: i64,
) {
record_event(
ctx,
QueryEventDraft {
event_type: EVENT_TYPE_SEARCH.to_string(),
query_text: query_text.to_string(),
request_context: request_context_from_headers("/api/search", headers),
result_count: Some(result_count.min(i32::MAX as usize) as i32),
success: Some(true),
response_mode: None,
provider: None,
chat_model: None,
latency_ms: Some(clamp_latency(latency_ms)),
},
)
.await;
}
pub async fn record_ai_question_event(
ctx: &AppContext,
question: &str,
headers: &HeaderMap,
success: bool,
response_mode: &str,
provider: Option<String>,
chat_model: Option<String>,
result_count: Option<usize>,
latency_ms: i64,
) {
record_event(
ctx,
QueryEventDraft {
event_type: EVENT_TYPE_AI_QUESTION.to_string(),
query_text: question.to_string(),
request_context: request_context_from_headers(
if response_mode == "stream" {
"/api/ai/ask/stream"
} else {
"/api/ai/ask"
},
headers,
),
result_count: result_count.map(|value| value.min(i32::MAX as usize) as i32),
success: Some(success),
response_mode: Some(response_mode.to_string()),
provider,
chat_model,
latency_ms: Some(clamp_latency(latency_ms)),
},
)
.await;
}
pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsResponse> {
let now = Utc::now();
let since_24h = now - Duration::hours(24);
let since_7d = now - Duration::days(7);
let total_searches = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_SEARCH))
.count(&ctx.db)
.await?;
let total_ai_questions = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_AI_QUESTION))
.count(&ctx.db)
.await?;
let searches_last_24h = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_SEARCH))
.filter(query_events::Column::CreatedAt.gte(since_24h))
.count(&ctx.db)
.await?;
let ai_questions_last_24h = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_AI_QUESTION))
.filter(query_events::Column::CreatedAt.gte(since_24h))
.count(&ctx.db)
.await?;
let last_7d_events = query_events::Entity::find()
.filter(query_events::Column::CreatedAt.gte(since_7d))
.order_by_desc(query_events::Column::CreatedAt)
.all(&ctx.db)
.await?;
let searches_last_7d = last_7d_events
.iter()
.filter(|event| event.event_type == EVENT_TYPE_SEARCH)
.count() as u64;
let ai_questions_last_7d = last_7d_events
.iter()
.filter(|event| event.event_type == EVENT_TYPE_AI_QUESTION)
.count() as u64;
let (unique_search_terms_last_7d, top_search_terms) =
aggregate_queries(&last_7d_events, EVENT_TYPE_SEARCH, 8);
let (unique_ai_questions_last_7d, top_ai_questions) =
aggregate_queries(&last_7d_events, EVENT_TYPE_AI_QUESTION, 8);
let mut provider_breakdown: HashMap<String, u64> = HashMap::new();
let mut daily_map: BTreeMap<NaiveDate, (u64, u64)> = BTreeMap::new();
let mut total_search_results = 0.0_f64;
let mut counted_search_results = 0_u64;
let mut total_ai_latency = 0.0_f64;
let mut counted_ai_latency = 0_u64;
for offset in 0..7 {
let date = (now - Duration::days(offset)).date_naive();
daily_map.entry(date).or_insert((0, 0));
}
for event in &last_7d_events {
let day = DateTime::<Utc>::from(event.created_at).date_naive();
let entry = daily_map.entry(day).or_insert((0, 0));
if event.event_type == EVENT_TYPE_SEARCH {
entry.0 += 1;
if let Some(result_count) = event.result_count {
total_search_results += f64::from(result_count.max(0));
counted_search_results += 1;
}
continue;
}
if event.event_type == EVENT_TYPE_AI_QUESTION {
entry.1 += 1;
let provider = event
.provider
.clone()
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| "local-or-unspecified".to_string());
*provider_breakdown.entry(provider).or_insert(0) += 1;
if let Some(latency_ms) = event.latency_ms {
total_ai_latency += f64::from(latency_ms.max(0));
counted_ai_latency += 1;
}
}
}
let mut providers_last_7d = provider_breakdown
.into_iter()
.map(|(provider, count)| AnalyticsProviderBucket { provider, count })
.collect::<Vec<_>>();
providers_last_7d.sort_by(|left, right| {
right
.count
.cmp(&left.count)
.then_with(|| left.provider.cmp(&right.provider))
});
providers_last_7d.truncate(6);
let mut daily_activity = daily_map
.into_iter()
.map(|(date, (searches, ai_questions))| AnalyticsDailyBucket {
date: date.format("%Y-%m-%d").to_string(),
searches,
ai_questions,
})
.collect::<Vec<_>>();
daily_activity.sort_by(|left, right| left.date.cmp(&right.date));
let recent_events = query_events::Entity::find()
.order_by_desc(query_events::Column::CreatedAt)
.limit(24)
.all(&ctx.db)
.await?
.into_iter()
.map(|event| AnalyticsRecentEvent {
id: event.id,
event_type: event.event_type,
query: event.query_text,
result_count: event.result_count,
success: event.success,
response_mode: event.response_mode,
provider: event.provider,
chat_model: event.chat_model,
latency_ms: event.latency_ms,
created_at: format_timestamp(event.created_at.into()),
})
.collect::<Vec<_>>();
Ok(AdminAnalyticsResponse {
overview: AnalyticsOverview {
total_searches,
total_ai_questions,
searches_last_24h,
ai_questions_last_24h,
searches_last_7d,
ai_questions_last_7d,
unique_search_terms_last_7d,
unique_ai_questions_last_7d,
avg_search_results_last_7d: if counted_search_results > 0 {
total_search_results / counted_search_results as f64
} else {
0.0
},
avg_ai_latency_ms_last_7d: (counted_ai_latency > 0)
.then(|| total_ai_latency / counted_ai_latency as f64),
},
top_search_terms,
top_ai_questions,
recent_events,
providers_last_7d,
daily_activity,
})
}

View File

@@ -1,2 +1,4 @@
pub mod ai;
pub mod analytics;
pub mod content;
pub mod storage;

View File

@@ -0,0 +1,513 @@
use aws_config::BehaviorVersion;
use aws_sdk_s3::{config::Credentials, primitives::ByteStream, Client};
use loco_rs::prelude::*;
use sea_orm::{EntityTrait, QueryOrder};
use std::path::{Path, PathBuf};
use uuid::Uuid;
use crate::models::_entities::site_settings;
const ENV_MEDIA_PROVIDER: &str = "TERMI_MEDIA_PROVIDER";
const ENV_MEDIA_ENDPOINT: &str = "TERMI_MEDIA_ENDPOINT";
const ENV_MEDIA_BUCKET: &str = "TERMI_MEDIA_BUCKET";
const ENV_MEDIA_PUBLIC_BASE_URL: &str = "TERMI_MEDIA_PUBLIC_BASE_URL";
const ENV_MEDIA_ACCESS_KEY_ID: &str = "TERMI_MEDIA_ACCESS_KEY_ID";
const ENV_MEDIA_SECRET_ACCESS_KEY: &str = "TERMI_MEDIA_SECRET_ACCESS_KEY";
const ENV_R2_ACCOUNT_ID: &str = "TERMI_R2_ACCOUNT_ID";
const ENV_R2_BUCKET: &str = "TERMI_R2_BUCKET";
const ENV_R2_PUBLIC_BASE_URL: &str = "TERMI_R2_PUBLIC_BASE_URL";
const ENV_R2_ACCESS_KEY_ID: &str = "TERMI_R2_ACCESS_KEY_ID";
const ENV_R2_SECRET_ACCESS_KEY: &str = "TERMI_R2_SECRET_ACCESS_KEY";
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum MediaStorageProvider {
R2,
Minio,
}
#[derive(Clone, Debug)]
pub struct MediaStorageSettings {
pub provider: MediaStorageProvider,
pub provider_name: String,
pub endpoint: String,
pub bucket: String,
pub public_base_url: String,
pub access_key_id: String,
pub secret_access_key: String,
pub region: String,
pub force_path_style: bool,
}
#[derive(Clone, Debug)]
pub struct StoredObject {
pub key: String,
pub url: String,
}
#[derive(Clone, Debug)]
pub struct StoredObjectSummary {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.and_then(|value| trim_to_option(Some(value)))
}
fn slugify_segment(value: &str) -> String {
let mut output = String::new();
let mut previous_dash = false;
for ch in value.chars() {
if ch.is_ascii_alphanumeric() {
output.push(ch.to_ascii_lowercase());
previous_dash = false;
} else if !previous_dash {
output.push('-');
previous_dash = true;
}
}
output.trim_matches('-').to_string()
}
fn normalize_public_base_url(value: String) -> String {
value.trim().trim_end_matches('/').to_string()
}
fn normalize_provider(value: Option<String>) -> MediaStorageProvider {
match value
.as_deref()
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"minio" => MediaStorageProvider::Minio,
_ => MediaStorageProvider::R2,
}
}
async fn load_settings_row(ctx: &AppContext) -> Result<Option<site_settings::Model>> {
site_settings::Entity::find()
.order_by_asc(site_settings::Column::Id)
.one(&ctx.db)
.await
.map_err(Into::into)
}
fn build_r2_endpoint(account_id: &str) -> String {
let trimmed = account_id.trim().trim_end_matches('/');
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
trimmed.to_string()
} else {
format!("https://{trimmed}.r2.cloudflarestorage.com")
}
}
fn default_public_base_url_for_minio(endpoint: &str, bucket: &str) -> String {
format!(
"{}/{}",
endpoint.trim().trim_end_matches('/'),
bucket.trim().trim_start_matches('/')
)
}
pub fn generated_cover_directory() -> PathBuf {
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let candidates = [
current_dir
.join("frontend")
.join("public")
.join("generated-covers"),
current_dir
.join("..")
.join("frontend")
.join("public")
.join("generated-covers"),
];
candidates
.into_iter()
.find(|path| path.parent().map(|parent| parent.exists()).unwrap_or(false))
.unwrap_or_else(|| {
PathBuf::from("..")
.join("frontend")
.join("public")
.join("generated-covers")
})
}
pub fn file_extension_for_path(path: &Path) -> &str {
path.extension()
.and_then(|ext| ext.to_str())
.map(str::trim)
.filter(|ext| !ext.is_empty())
.unwrap_or("png")
}
pub async fn optional_r2_settings(ctx: &AppContext) -> Result<Option<MediaStorageSettings>> {
let row = load_settings_row(ctx).await?;
let provider_raw = row
.as_ref()
.and_then(|item| trim_to_option(item.media_storage_provider.clone()))
.or_else(|| env_value(ENV_MEDIA_PROVIDER));
let provider = normalize_provider(provider_raw.clone());
let endpoint_or_account = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_account_id.clone()))
.or_else(|| env_value(ENV_MEDIA_ENDPOINT))
.or_else(|| env_value(ENV_R2_ACCOUNT_ID));
let bucket = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_bucket.clone()))
.or_else(|| env_value(ENV_MEDIA_BUCKET))
.or_else(|| env_value(ENV_R2_BUCKET));
let access_key_id = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_access_key_id.clone()))
.or_else(|| env_value(ENV_MEDIA_ACCESS_KEY_ID))
.or_else(|| env_value(ENV_R2_ACCESS_KEY_ID));
let secret_access_key = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_secret_access_key.clone()))
.or_else(|| env_value(ENV_MEDIA_SECRET_ACCESS_KEY))
.or_else(|| env_value(ENV_R2_SECRET_ACCESS_KEY));
let public_base_url = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_public_base_url.clone()))
.or_else(|| env_value(ENV_MEDIA_PUBLIC_BASE_URL))
.or_else(|| env_value(ENV_R2_PUBLIC_BASE_URL))
.or_else(
|| match (&provider, endpoint_or_account.as_deref(), bucket.as_deref()) {
(MediaStorageProvider::Minio, Some(endpoint), Some(bucket)) => {
Some(default_public_base_url_for_minio(endpoint, bucket))
}
_ => None,
},
);
let has_any = endpoint_or_account.is_some()
|| bucket.is_some()
|| public_base_url.is_some()
|| access_key_id.is_some()
|| secret_access_key.is_some()
|| provider_raw.is_some();
if !has_any {
return Ok(None);
}
let missing = [
(
match provider {
MediaStorageProvider::Minio => "Endpoint",
MediaStorageProvider::R2 => "Account ID",
},
endpoint_or_account.is_some(),
),
("Bucket", bucket.is_some()),
("Public Base URL", public_base_url.is_some()),
("Access Key ID", access_key_id.is_some()),
("Secret Access Key", secret_access_key.is_some()),
]
.into_iter()
.filter_map(|(label, present)| (!present).then_some(label))
.collect::<Vec<_>>();
if !missing.is_empty() {
return Err(Error::BadRequest(format!(
"对象存储配置不完整,请补齐:{}",
missing.join(" / ")
)));
}
let endpoint = match provider {
MediaStorageProvider::R2 => {
build_r2_endpoint(&endpoint_or_account.clone().unwrap_or_default())
}
MediaStorageProvider::Minio => endpoint_or_account.clone().unwrap_or_default(),
};
Ok(Some(MediaStorageSettings {
provider: provider.clone(),
provider_name: match provider {
MediaStorageProvider::R2 => "r2".to_string(),
MediaStorageProvider::Minio => "minio".to_string(),
},
endpoint,
bucket: bucket.unwrap_or_default(),
public_base_url: normalize_public_base_url(public_base_url.unwrap_or_default()),
access_key_id: access_key_id.unwrap_or_default(),
secret_access_key: secret_access_key.unwrap_or_default(),
region: match provider {
MediaStorageProvider::R2 => "auto".to_string(),
MediaStorageProvider::Minio => "us-east-1".to_string(),
},
force_path_style: provider == MediaStorageProvider::Minio,
}))
}
pub async fn require_r2_settings(ctx: &AppContext) -> Result<MediaStorageSettings> {
optional_r2_settings(ctx)
.await?
.ok_or_else(|| Error::BadRequest("请先在后台配置媒体对象存储".to_string()))
}
async fn r2_client(settings: &MediaStorageSettings) -> Client {
let shared_config = aws_config::defaults(BehaviorVersion::latest())
.endpoint_url(settings.endpoint.clone())
.credentials_provider(Credentials::new(
settings.access_key_id.clone(),
settings.secret_access_key.clone(),
None,
None,
match settings.provider {
MediaStorageProvider::R2 => "r2",
MediaStorageProvider::Minio => "minio",
},
))
.region(aws_sdk_s3::config::Region::new(settings.region.clone()))
.load()
.await;
let conf = aws_sdk_s3::config::Builder::from(&shared_config)
.force_path_style(settings.force_path_style)
.build();
Client::from_conf(conf)
}
fn build_public_url(settings: &MediaStorageSettings, key: &str) -> String {
format!(
"{}/{}",
settings.public_base_url,
key.trim_start_matches('/')
)
}
pub fn object_key_from_public_url(settings: &MediaStorageSettings, url: &str) -> Option<String> {
let normalized_base = settings.public_base_url.trim().trim_end_matches('/');
let normalized_url = url.trim();
if normalized_base.is_empty() || normalized_url.is_empty() {
return None;
}
normalized_url
.strip_prefix(normalized_base)
.map(|suffix| suffix.trim_start_matches('/').to_string())
.filter(|suffix| !suffix.is_empty())
}
pub fn build_object_key(prefix: &str, stem: &str, extension: &str) -> String {
let safe_prefix = prefix.trim_matches('/');
let safe_stem = slugify_segment(stem);
let safe_extension = extension
.trim()
.trim_start_matches('.')
.to_ascii_lowercase();
let object_name = format!(
"{}-{}.{}",
if safe_stem.is_empty() {
"asset".to_string()
} else {
safe_stem
},
Uuid::new_v4().simple(),
if safe_extension.is_empty() {
"bin".to_string()
} else {
safe_extension
},
);
if safe_prefix.is_empty() {
object_name
} else {
format!("{safe_prefix}/{object_name}")
}
}
async fn ensure_bucket_exists(client: &Client, settings: &MediaStorageSettings) -> Result<()> {
if client
.head_bucket()
.bucket(&settings.bucket)
.send()
.await
.is_ok()
{
return Ok(());
}
if settings.provider != MediaStorageProvider::Minio {
return Err(Error::BadRequest(format!(
"对象存储 bucket 不存在或不可访问:{}",
settings.bucket
)));
}
client
.create_bucket()
.bucket(&settings.bucket)
.send()
.await
.map_err(|error| Error::BadRequest(format!("自动创建 MinIO bucket 失败: {error}")))?;
Ok(())
}
pub async fn upload_bytes_to_r2(
ctx: &AppContext,
key: &str,
bytes: Vec<u8>,
content_type: Option<&str>,
cache_control: Option<&str>,
) -> Result<StoredObject> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
ensure_bucket_exists(&client, &settings).await?;
let mut request = client
.put_object()
.bucket(&settings.bucket)
.key(key)
.body(ByteStream::from(bytes));
if let Some(content_type) = content_type
.map(str::trim)
.filter(|value| !value.is_empty())
{
request = request.content_type(content_type);
}
if let Some(cache_control) = cache_control
.map(str::trim)
.filter(|value| !value.is_empty())
{
request = request.cache_control(cache_control);
}
request
.send()
.await
.map_err(|error| Error::BadRequest(format!("上传文件到对象存储失败: {error}")))?;
Ok(StoredObject {
key: key.to_string(),
url: build_public_url(&settings, key),
})
}
pub async fn delete_object(ctx: &AppContext, key: &str) -> Result<()> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
client
.delete_object()
.bucket(&settings.bucket)
.key(key)
.send()
.await
.map_err(|error| Error::BadRequest(format!("删除对象存储文件失败: {error}")))?;
Ok(())
}
pub async fn delete_managed_url(ctx: &AppContext, url: &str) -> Result<bool> {
let Some(settings) = optional_r2_settings(ctx).await? else {
return Ok(false);
};
let Some(key) = object_key_from_public_url(&settings, url) else {
return Ok(false);
};
let client = r2_client(&settings).await;
client
.delete_object()
.bucket(&settings.bucket)
.key(&key)
.send()
.await
.map_err(|error| Error::BadRequest(format!("删除对象存储文件失败: {error}")))?;
Ok(true)
}
pub async fn list_objects(
ctx: &AppContext,
prefix: Option<&str>,
limit: i32,
) -> Result<Vec<StoredObjectSummary>> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
ensure_bucket_exists(&client, &settings).await?;
let mut request = client
.list_objects_v2()
.bucket(&settings.bucket)
.max_keys(limit.clamp(1, 1000));
if let Some(prefix) = prefix.map(str::trim).filter(|value| !value.is_empty()) {
request = request.prefix(prefix);
}
let result = request
.send()
.await
.map_err(|error| Error::BadRequest(format!("读取对象存储列表失败: {error}")))?;
Ok(result
.contents()
.iter()
.filter_map(|item| {
let key = item.key()?.to_string();
Some(StoredObjectSummary {
url: build_public_url(&settings, &key),
key,
size_bytes: item.size().unwrap_or_default(),
last_modified: item.last_modified().map(|ts| format!("{ts:?}")),
})
})
.collect())
}
pub async fn test_r2_connectivity(ctx: &AppContext) -> Result<String> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
ensure_bucket_exists(&client, &settings).await?;
let healthcheck_key = format!(".healthchecks/{}.txt", Uuid::new_v4().simple());
client
.put_object()
.bucket(&settings.bucket)
.key(&healthcheck_key)
.body(ByteStream::from_static(b"termi-storage-ok"))
.content_type("text/plain")
.send()
.await
.map_err(|error| Error::BadRequest(format!("对象存储连接测试失败: {error}")))?;
client
.delete_object()
.bucket(&settings.bucket)
.key(&healthcheck_key)
.send()
.await
.map_err(|error| Error::BadRequest(format!("对象存储清理测试文件失败: {error}")))?;
Ok(settings.bucket)
}