chore: checkpoint admin editor and perf work

This commit is contained in:
2026-03-31 00:12:02 +08:00
parent 92a85eef20
commit 99b308e800
45 changed files with 7265 additions and 833 deletions

View File

@@ -1,3 +1,4 @@
use axum::extract::{Multipart, Query};
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, PaginatorTrait, QueryFilter,
@@ -14,7 +15,7 @@ use crate::{
site_settings::{self, SiteSettingsPayload},
},
models::_entities::{ai_chunks, comments, friend_links, posts, reviews},
services::{ai, content},
services::{ai, analytics, content, storage},
};
#[derive(Clone, Debug, Deserialize)]
@@ -130,6 +131,10 @@ pub struct AdminSiteSettingsResponse {
pub ai_api_base: Option<String>,
pub ai_api_key: Option<String>,
pub ai_chat_model: Option<String>,
pub ai_image_provider: Option<String>,
pub ai_image_api_base: Option<String>,
pub ai_image_api_key: Option<String>,
pub ai_image_model: Option<String>,
pub ai_providers: Vec<site_settings::AiProviderConfig>,
pub ai_active_provider_id: Option<String>,
pub ai_embedding_model: Option<String>,
@@ -139,6 +144,12 @@ pub struct AdminSiteSettingsResponse {
pub ai_last_indexed_at: Option<String>,
pub ai_chunks_count: u64,
pub ai_local_embedding: String,
pub media_storage_provider: Option<String>,
pub media_r2_account_id: Option<String>,
pub media_r2_bucket: Option<String>,
pub media_r2_public_base_url: Option<String>,
pub media_r2_access_key_id: Option<String>,
pub media_r2_secret_access_key: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -160,6 +171,67 @@ pub struct AdminAiProviderTestResponse {
pub reply_preview: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminAiImageProviderTestRequest {
pub provider: String,
pub api_base: String,
pub api_key: String,
pub image_model: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminAiImageProviderTestResponse {
pub provider: String,
pub endpoint: String,
pub image_model: String,
pub result_preview: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminImageUploadResponse {
pub url: String,
pub key: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminR2ConnectivityResponse {
pub bucket: String,
pub public_base_url: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaObjectResponse {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaListResponse {
pub provider: String,
pub bucket: String,
pub public_base_url: String,
pub items: Vec<AdminMediaObjectResponse>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaDeleteResponse {
pub deleted: bool,
pub key: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaListQuery {
pub prefix: Option<String>,
pub limit: Option<i32>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaDeleteQuery {
pub key: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminPostMetadataRequest {
pub markdown: String,
@@ -170,6 +242,30 @@ pub struct AdminPostPolishRequest {
pub markdown: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminReviewPolishRequest {
pub title: String,
pub review_type: String,
pub rating: i32,
pub review_date: Option<String>,
pub status: String,
#[serde(default)]
pub tags: Vec<String>,
pub description: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminPostCoverImageRequest {
pub title: String,
pub description: Option<String>,
pub category: Option<String>,
#[serde(default)]
pub tags: Vec<String>,
pub post_type: String,
pub slug: Option<String>,
pub markdown: String,
}
fn format_timestamp(
value: Option<sea_orm::prelude::DateTimeWithTimeZone>,
pattern: &str,
@@ -242,6 +338,10 @@ fn build_settings_response(
ai_api_base: item.ai_api_base,
ai_api_key: item.ai_api_key,
ai_chat_model: item.ai_chat_model,
ai_image_provider: item.ai_image_provider,
ai_image_api_base: item.ai_image_api_base,
ai_image_api_key: item.ai_image_api_key,
ai_image_model: item.ai_image_model,
ai_providers,
ai_active_provider_id,
ai_embedding_model: item.ai_embedding_model,
@@ -251,6 +351,12 @@ fn build_settings_response(
ai_last_indexed_at: format_timestamp(item.ai_last_indexed_at, "%Y-%m-%d %H:%M:%S UTC"),
ai_chunks_count,
ai_local_embedding: ai::local_embedding_label().to_string(),
media_storage_provider: item.media_storage_provider,
media_r2_account_id: item.media_r2_account_id,
media_r2_bucket: item.media_r2_bucket,
media_r2_public_base_url: item.media_r2_public_base_url,
media_r2_access_key_id: item.media_r2_access_key_id,
media_r2_secret_access_key: item.media_r2_secret_access_key,
}
}
@@ -410,6 +516,12 @@ pub async fn dashboard(State(ctx): State<AppContext>) -> Result<Response> {
})
}
#[debug_handler]
pub async fn analytics_overview(State(ctx): State<AppContext>) -> Result<Response> {
check_auth()?;
format::json(analytics::build_admin_analytics(&ctx).await?)
}
#[debug_handler]
pub async fn get_site_settings(State(ctx): State<AppContext>) -> Result<Response> {
check_auth()?;
@@ -428,7 +540,7 @@ pub async fn update_site_settings(
let current = site_settings::load_current(&ctx).await?;
let mut item = current;
params.apply(&mut item);
let item = item.into_active_model();
let item = item.into_active_model().reset_all();
let updated = item.update(&ctx.db).await?;
let ai_chunks_count = ai_chunks::Entity::find().count(&ctx.db).await?;
@@ -469,6 +581,88 @@ pub async fn test_ai_provider(Json(payload): Json<AdminAiProviderTestRequest>) -
})
}
#[debug_handler]
pub async fn test_ai_image_provider(
Json(payload): Json<AdminAiImageProviderTestRequest>,
) -> Result<Response> {
check_auth()?;
let result = ai::test_image_provider_connectivity(
&payload.provider,
&payload.api_base,
&payload.api_key,
&payload.image_model,
)
.await?;
format::json(AdminAiImageProviderTestResponse {
provider: result.provider,
endpoint: result.endpoint,
image_model: result.image_model,
result_preview: result.result_preview,
})
}
#[debug_handler]
pub async fn test_r2_storage(State(ctx): State<AppContext>) -> Result<Response> {
check_auth()?;
let settings = storage::require_r2_settings(&ctx).await?;
let bucket = storage::test_r2_connectivity(&ctx).await?;
format::json(AdminR2ConnectivityResponse {
bucket,
public_base_url: settings.public_base_url,
})
}
#[debug_handler]
pub async fn list_media_objects(
State(ctx): State<AppContext>,
Query(query): Query<AdminMediaListQuery>,
) -> Result<Response> {
check_auth()?;
let settings = storage::require_r2_settings(&ctx).await?;
let items = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?
.into_iter()
.map(|item| AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
})
.collect::<Vec<_>>();
format::json(AdminMediaListResponse {
provider: settings.provider_name,
bucket: settings.bucket,
public_base_url: settings.public_base_url,
items,
})
}
#[debug_handler]
pub async fn delete_media_object(
State(ctx): State<AppContext>,
Query(query): Query<AdminMediaDeleteQuery>,
) -> Result<Response> {
check_auth()?;
let key = query.key.trim();
if key.is_empty() {
return Err(Error::BadRequest("缺少对象 key".to_string()));
}
storage::delete_object(&ctx, key).await?;
format::json(AdminMediaDeleteResponse {
deleted: true,
key: key.to_string(),
})
}
#[debug_handler]
pub async fn generate_post_metadata(
State(ctx): State<AppContext>,
@@ -487,6 +681,127 @@ pub async fn polish_post_markdown(
format::json(ai::polish_post_markdown(&ctx, &payload.markdown).await?)
}
#[debug_handler]
pub async fn polish_review_description(
State(ctx): State<AppContext>,
Json(payload): Json<AdminReviewPolishRequest>,
) -> Result<Response> {
check_auth()?;
format::json(
ai::polish_review_description(
&ctx,
&payload.title,
&payload.review_type,
payload.rating,
payload.review_date.as_deref(),
&payload.status,
&payload.tags,
&payload.description,
)
.await?,
)
}
#[debug_handler]
pub async fn generate_post_cover_image(
State(ctx): State<AppContext>,
Json(payload): Json<AdminPostCoverImageRequest>,
) -> Result<Response> {
check_auth()?;
format::json(
ai::generate_post_cover_image(
&ctx,
&payload.title,
payload.description.as_deref(),
payload.category.as_deref(),
&payload.tags,
&payload.post_type,
payload.slug.as_deref(),
&payload.markdown,
)
.await?,
)
}
fn review_cover_extension(
file_name: Option<&str>,
content_type: Option<&str>,
) -> Option<&'static str> {
let from_file_name = file_name
.and_then(|name| name.rsplit('.').next())
.map(|ext| ext.trim().to_ascii_lowercase());
match from_file_name.as_deref() {
Some("png") => return Some("png"),
Some("jpg") | Some("jpeg") => return Some("jpg"),
Some("webp") => return Some("webp"),
Some("gif") => return Some("gif"),
Some("avif") => return Some("avif"),
Some("svg") => return Some("svg"),
_ => {}
}
match content_type
.unwrap_or_default()
.trim()
.to_ascii_lowercase()
.as_str()
{
"image/png" => Some("png"),
"image/jpeg" => Some("jpg"),
"image/webp" => Some("webp"),
"image/gif" => Some("gif"),
"image/avif" => Some("avif"),
"image/svg+xml" => Some("svg"),
_ => None,
}
}
#[debug_handler]
pub async fn upload_review_cover_image(
State(ctx): State<AppContext>,
mut multipart: Multipart,
) -> Result<Response> {
check_auth()?;
let field = multipart
.next_field()
.await
.map_err(|error| Error::BadRequest(error.to_string()))?
.ok_or_else(|| Error::BadRequest("请先选择图片文件".to_string()))?;
let file_name = field.file_name().map(ToString::to_string);
let content_type = field.content_type().map(ToString::to_string);
let extension = review_cover_extension(file_name.as_deref(), content_type.as_deref())
.ok_or_else(|| Error::BadRequest("仅支持常见图片格式上传".to_string()))?;
let bytes = field
.bytes()
.await
.map_err(|error| Error::BadRequest(error.to_string()))?;
if bytes.is_empty() {
return Err(Error::BadRequest("上传的图片内容为空".to_string()));
}
let key = crate::services::storage::build_object_key(
"review-covers",
file_name.as_deref().unwrap_or("review-cover"),
extension,
);
let stored = crate::services::storage::upload_bytes_to_r2(
&ctx,
&key,
bytes.to_vec(),
content_type.as_deref(),
Some("public, max-age=31536000, immutable"),
)
.await?;
format::json(AdminImageUploadResponse {
url: stored.url,
key: stored.key,
})
}
pub fn routes() -> Routes {
Routes::new()
.prefix("/api/admin")
@@ -494,11 +809,21 @@ pub fn routes() -> Routes {
.add("/session", delete(session_logout))
.add("/session/login", post(session_login))
.add("/dashboard", get(dashboard))
.add("/analytics", get(analytics_overview))
.add("/site-settings", get(get_site_settings))
.add("/site-settings", patch(update_site_settings))
.add("/site-settings", put(update_site_settings))
.add("/ai/reindex", post(reindex_ai))
.add("/ai/test-provider", post(test_ai_provider))
.add("/ai/test-image-provider", post(test_ai_image_provider))
.add("/storage/r2/test", post(test_r2_storage))
.add(
"/storage/media",
get(list_media_objects).delete(delete_media_object),
)
.add("/ai/post-metadata", post(generate_post_metadata))
.add("/ai/polish-post", post(polish_post_markdown))
.add("/ai/polish-review", post(polish_review_description))
.add("/ai/post-cover", post(generate_post_cover_image))
.add("/storage/review-cover", post(upload_review_cover_image))
}

View File

@@ -5,15 +5,19 @@ use axum::{
body::{Body, Bytes},
http::{
header::{CACHE_CONTROL, CONNECTION, CONTENT_TYPE},
HeaderValue,
HeaderMap, HeaderValue,
},
};
use chrono::{DateTime, Utc};
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::time::Instant;
use crate::{controllers::admin::check_auth, services::ai};
use crate::{
controllers::{admin::check_auth, site_settings},
services::{ai, analytics},
};
#[derive(Clone, Debug, Deserialize)]
pub struct AskPayload {
@@ -55,6 +59,30 @@ fn format_timestamp(value: Option<DateTime<Utc>>) -> Option<String> {
value.map(|item| item.to_rfc3339())
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
async fn current_provider_metadata(ctx: &AppContext) -> (Option<String>, Option<String>) {
match site_settings::load_current(ctx).await {
Ok(settings) => (
trim_to_option(settings.ai_provider),
trim_to_option(settings.ai_chat_model),
),
Err(error) => {
tracing::warn!("failed to load ai provider metadata for analytics: {error}");
(None, None)
}
}
}
fn sse_bytes<T: Serialize>(event: &str, payload: &T) -> Bytes {
let data = serde_json::to_string(payload)
.unwrap_or_else(|_| "{\"message\":\"failed to serialize SSE payload\"}".to_string());
@@ -178,24 +206,66 @@ fn build_ask_response(prepared: &ai::PreparedAiAnswer, answer: String) -> AskRes
#[debug_handler]
pub async fn ask(
State(ctx): State<AppContext>,
headers: HeaderMap,
Json(payload): Json<AskPayload>,
) -> Result<Response> {
let result = ai::answer_question(&ctx, &payload.question).await?;
format::json(AskResponse {
question: payload.question.trim().to_string(),
answer: result.answer,
sources: result.sources,
indexed_chunks: result.indexed_chunks,
last_indexed_at: format_timestamp(result.last_indexed_at),
})
let started_at = Instant::now();
let question = payload.question.trim().to_string();
let (provider, chat_model) = current_provider_metadata(&ctx).await;
match ai::answer_question(&ctx, &payload.question).await {
Ok(result) => {
analytics::record_ai_question_event(
&ctx,
&question,
&headers,
true,
"sync",
provider,
chat_model,
Some(result.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
format::json(AskResponse {
question,
answer: result.answer,
sources: result.sources,
indexed_chunks: result.indexed_chunks,
last_indexed_at: format_timestamp(result.last_indexed_at),
})
}
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&headers,
false,
"sync",
provider,
chat_model,
None,
started_at.elapsed().as_millis() as i64,
)
.await;
Err(error)
}
}
}
#[debug_handler]
pub async fn ask_stream(
State(ctx): State<AppContext>,
headers: HeaderMap,
Json(payload): Json<AskPayload>,
) -> Result<Response> {
let request_headers = headers.clone();
let question = payload.question.trim().to_string();
let (fallback_provider, fallback_chat_model) = current_provider_metadata(&ctx).await;
let stream = stream! {
let started_at = Instant::now();
yield Ok::<Bytes, std::io::Error>(sse_bytes("status", &StreamStatusEvent {
phase: "retrieving".to_string(),
message: "正在检索知识库上下文...".to_string(),
@@ -204,6 +274,18 @@ pub async fn ask_stream(
let prepared = match ai::prepare_answer(&ctx, &payload.question).await {
Ok(prepared) => prepared,
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
fallback_provider.clone(),
fallback_chat_model.clone(),
None,
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: error.to_string(),
}));
@@ -212,6 +294,16 @@ pub async fn ask_stream(
};
let mut accumulated_answer = String::new();
let active_provider = prepared
.provider_request
.as_ref()
.map(|request| request.provider.clone())
.or_else(|| fallback_provider.clone());
let active_chat_model = prepared
.provider_request
.as_ref()
.map(|request| request.chat_model.clone())
.or_else(|| fallback_chat_model.clone());
if let Some(answer) = prepared.immediate_answer.as_deref() {
yield Ok(sse_bytes("status", &StreamStatusEvent {
@@ -241,6 +333,18 @@ pub async fn ask_stream(
let mut response = match response {
Ok(response) => response,
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: format!("AI request failed: {error}"),
}));
@@ -251,6 +355,18 @@ pub async fn ask_stream(
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: format!("AI provider returned {status}: {body}"),
}));
@@ -265,6 +381,18 @@ pub async fn ask_stream(
let Some(chunk) = (match next_chunk {
Ok(chunk) => chunk,
Err(error) => {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: format!("AI stream read failed: {error}"),
}));
@@ -323,6 +451,18 @@ pub async fn ask_stream(
}
if accumulated_answer.is_empty() {
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
false,
"stream",
active_provider.clone(),
active_chat_model.clone(),
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
yield Ok(sse_bytes("error", &StreamErrorEvent {
message: "AI chat response did not contain readable content".to_string(),
}));
@@ -330,6 +470,19 @@ pub async fn ask_stream(
}
}
analytics::record_ai_question_event(
&ctx,
&question,
&request_headers,
true,
"stream",
active_provider,
active_chat_model,
Some(prepared.sources.len()),
started_at.elapsed().as_millis() as i64,
)
.await;
let final_payload = build_ask_response(&prepared, accumulated_answer);
yield Ok(sse_bytes("complete", &final_payload));
};

View File

@@ -3,7 +3,10 @@ use loco_rs::prelude::*;
use sea_orm::{EntityTrait, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use crate::models::_entities::reviews::{self, Entity as ReviewEntity};
use crate::{
models::_entities::reviews::{self, Entity as ReviewEntity},
services::storage,
};
#[derive(Serialize, Deserialize, Debug)]
pub struct CreateReviewRequest {
@@ -83,9 +86,11 @@ pub async fn update(
) -> Result<impl IntoResponse> {
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
let Some(mut review) = review.map(|r| r.into_active_model()) else {
let Some(existing_review) = review else {
return Err(Error::NotFound);
};
let old_cover = existing_review.cover.clone();
let mut review = existing_review.into_active_model();
if let Some(title) = req.title {
review.title = Set(Some(title));
@@ -108,7 +113,9 @@ pub async fn update(
if let Some(tags) = req.tags {
review.tags = Set(Some(serde_json::to_string(&tags).unwrap_or_default()));
}
let mut next_cover = old_cover.clone();
if let Some(cover) = req.cover {
next_cover = Some(cover.clone());
review.cover = Set(Some(cover));
}
if let Some(link_url) = req.link_url {
@@ -117,6 +124,14 @@ pub async fn update(
}
let review = review.update(&ctx.db).await?;
if let Some(old_cover) = old_cover
.filter(|old| Some(old.clone()) != next_cover)
.filter(|old| !old.trim().is_empty())
{
if let Err(error) = storage::delete_managed_url(&ctx, &old_cover).await {
tracing::warn!("failed to cleanup replaced review cover: {error}");
}
}
format::json(review)
}
@@ -128,7 +143,13 @@ pub async fn remove(
match review {
Some(r) => {
let cover = r.cover.clone();
r.delete(&ctx.db).await?;
if let Some(cover) = cover.filter(|value| !value.trim().is_empty()) {
if let Err(error) = storage::delete_managed_url(&ctx, &cover).await {
tracing::warn!("failed to cleanup deleted review cover: {error}");
}
}
format::empty()
}
None => Err(Error::NotFound),

View File

@@ -1,15 +1,46 @@
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ConnectionTrait, DatabaseBackend, DbBackend, FromQueryResult, Statement};
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value;
use std::time::Instant;
use crate::models::_entities::posts;
use crate::services::content;
use crate::services::{analytics, content};
fn deserialize_boolish_option<'de, D>(
deserializer: D,
) -> std::result::Result<Option<bool>, D::Error>
where
D: Deserializer<'de>,
{
let raw = Option::<String>::deserialize(deserializer)?;
raw.map(|value| match value.trim().to_ascii_lowercase().as_str() {
"1" | "true" | "yes" | "on" => Ok(true),
"0" | "false" | "no" | "off" => Ok(false),
other => Err(serde::de::Error::custom(format!(
"invalid boolean value `{other}`"
))),
})
.transpose()
}
fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
query.preview.unwrap_or(false)
|| headers
.get("x-termi-search-mode")
.and_then(|value| value.to_str().ok())
.map(|value| value.eq_ignore_ascii_case("preview"))
.unwrap_or(false)
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchQuery {
pub q: Option<String>,
pub limit: Option<u64>,
#[serde(default, deserialize_with = "deserialize_boolish_option")]
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Serialize, FromQueryResult)]
@@ -157,7 +188,10 @@ async fn fallback_search(ctx: &AppContext, q: &str, limit: u64) -> Result<Vec<Se
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let preview_search = is_preview_search(&query, &headers);
content::sync_markdown_posts(&ctx).await?;
let q = query.q.unwrap_or_default().trim().to_string();
@@ -186,6 +220,17 @@ pub async fn search(
fallback_search(&ctx, &q, limit).await?
};
if !preview_search {
analytics::record_search_event(
&ctx,
&q,
results.len(),
&headers,
started_at.elapsed().as_millis() as i64,
)
.await;
}
format::json(results)
}

View File

@@ -44,6 +44,8 @@ pub struct AiProviderConfig {
pub api_key: Option<String>,
#[serde(default, alias = "chatModel")]
pub chat_model: Option<String>,
#[serde(default, alias = "imageModel")]
pub image_model: Option<String>,
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
@@ -94,6 +96,14 @@ pub struct SiteSettingsPayload {
pub ai_api_key: Option<String>,
#[serde(default, alias = "aiChatModel")]
pub ai_chat_model: Option<String>,
#[serde(default, alias = "aiImageProvider")]
pub ai_image_provider: Option<String>,
#[serde(default, alias = "aiImageApiBase")]
pub ai_image_api_base: Option<String>,
#[serde(default, alias = "aiImageApiKey")]
pub ai_image_api_key: Option<String>,
#[serde(default, alias = "aiImageModel")]
pub ai_image_model: Option<String>,
#[serde(default, alias = "aiProviders")]
pub ai_providers: Option<Vec<AiProviderConfig>>,
#[serde(default, alias = "aiActiveProviderId")]
@@ -106,6 +116,18 @@ pub struct SiteSettingsPayload {
pub ai_top_k: Option<i32>,
#[serde(default, alias = "aiChunkSize")]
pub ai_chunk_size: Option<i32>,
#[serde(default, alias = "mediaR2AccountId")]
pub media_r2_account_id: Option<String>,
#[serde(default, alias = "mediaStorageProvider")]
pub media_storage_provider: Option<String>,
#[serde(default, alias = "mediaR2Bucket")]
pub media_r2_bucket: Option<String>,
#[serde(default, alias = "mediaR2PublicBaseUrl")]
pub media_r2_public_base_url: Option<String>,
#[serde(default, alias = "mediaR2AccessKeyId")]
pub media_r2_access_key_id: Option<String>,
#[serde(default, alias = "mediaR2SecretAccessKey")]
pub media_r2_secret_access_key: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -152,13 +174,16 @@ fn create_ai_provider_id() -> String {
}
fn default_ai_provider_config() -> AiProviderConfig {
let provider = ai::provider_name(None);
AiProviderConfig {
id: "default".to_string(),
name: "默认提供商".to_string(),
provider: ai::provider_name(None),
provider: provider.clone(),
api_base: Some(ai::default_api_base().to_string()),
api_key: Some(ai::default_api_key().to_string()),
chat_model: Some(ai::default_chat_model().to_string()),
image_model: Some(ai::default_image_model_for_provider(&provider).to_string()),
}
}
@@ -174,11 +199,13 @@ fn normalize_ai_provider_configs(items: Vec<AiProviderConfig>) -> Vec<AiProvider
let api_base = normalize_optional_string(item.api_base);
let api_key = normalize_optional_string(item.api_key);
let chat_model = normalize_optional_string(item.chat_model);
let image_model = normalize_optional_string(item.image_model);
let has_content = !item.name.trim().is_empty()
|| !provider.trim().is_empty()
|| api_base.is_some()
|| api_key.is_some()
|| chat_model.is_some();
|| chat_model.is_some()
|| image_model.is_some();
if !has_content {
return None;
@@ -201,6 +228,7 @@ fn normalize_ai_provider_configs(items: Vec<AiProviderConfig>) -> Vec<AiProvider
api_base,
api_key,
chat_model,
image_model,
})
})
.collect()
@@ -216,13 +244,16 @@ fn legacy_ai_provider_config(model: &Model) -> Option<AiProviderConfig> {
return None;
}
let normalized_provider = provider.unwrap_or_else(|| ai::provider_name(None));
Some(AiProviderConfig {
id: "default".to_string(),
name: "当前提供商".to_string(),
provider: provider.unwrap_or_else(|| ai::provider_name(None)),
provider: normalized_provider.clone(),
api_base,
api_key,
chat_model,
image_model: Some(ai::default_image_model_for_provider(&normalized_provider).to_string()),
})
}
@@ -301,6 +332,8 @@ fn update_active_provider_from_legacy_fields(model: &mut Model) {
config.api_base = api_base;
config.api_key = api_key;
config.chat_model = chat_model;
config.image_model =
Some(ai::default_image_model_for_provider(&config.provider).to_string());
write_ai_provider_state(
model,
vec![config],
@@ -322,6 +355,10 @@ fn update_active_provider_from_legacy_fields(model: &mut Model) {
config.api_base = api_base.clone();
config.api_key = api_key.clone();
config.chat_model = chat_model.clone();
if config.image_model.is_none() {
config.image_model =
Some(ai::default_image_model_for_provider(&config.provider).to_string());
}
}
}
}
@@ -425,6 +462,18 @@ impl SiteSettingsPayload {
if let Some(ai_chat_model) = self.ai_chat_model {
item.ai_chat_model = normalize_optional_string(Some(ai_chat_model));
}
if let Some(ai_image_provider) = self.ai_image_provider {
item.ai_image_provider = normalize_optional_string(Some(ai_image_provider));
}
if let Some(ai_image_api_base) = self.ai_image_api_base {
item.ai_image_api_base = normalize_optional_string(Some(ai_image_api_base));
}
if let Some(ai_image_api_key) = self.ai_image_api_key {
item.ai_image_api_key = normalize_optional_string(Some(ai_image_api_key));
}
if let Some(ai_image_model) = self.ai_image_model {
item.ai_image_model = normalize_optional_string(Some(ai_image_model));
}
if let Some(ai_embedding_model) = self.ai_embedding_model {
item.ai_embedding_model = normalize_optional_string(Some(ai_embedding_model));
}
@@ -437,6 +486,26 @@ impl SiteSettingsPayload {
if self.ai_chunk_size.is_some() {
item.ai_chunk_size = normalize_optional_int(self.ai_chunk_size, 400, 4000);
}
if let Some(media_r2_account_id) = self.media_r2_account_id {
item.media_r2_account_id = normalize_optional_string(Some(media_r2_account_id));
}
if let Some(media_storage_provider) = self.media_storage_provider {
item.media_storage_provider = normalize_optional_string(Some(media_storage_provider));
}
if let Some(media_r2_bucket) = self.media_r2_bucket {
item.media_r2_bucket = normalize_optional_string(Some(media_r2_bucket));
}
if let Some(media_r2_public_base_url) = self.media_r2_public_base_url {
item.media_r2_public_base_url =
normalize_optional_string(Some(media_r2_public_base_url));
}
if let Some(media_r2_access_key_id) = self.media_r2_access_key_id {
item.media_r2_access_key_id = normalize_optional_string(Some(media_r2_access_key_id));
}
if let Some(media_r2_secret_access_key) = self.media_r2_secret_access_key {
item.media_r2_secret_access_key =
normalize_optional_string(Some(media_r2_secret_access_key));
}
if provider_list_supplied {
write_ai_provider_state(
@@ -524,6 +593,10 @@ fn default_payload() -> SiteSettingsPayload {
ai_api_base: Some(ai::default_api_base().to_string()),
ai_api_key: Some(ai::default_api_key().to_string()),
ai_chat_model: Some(ai::default_chat_model().to_string()),
ai_image_provider: None,
ai_image_api_base: None,
ai_image_api_key: None,
ai_image_model: None,
ai_providers: Some(vec![default_ai_provider_config()]),
ai_active_provider_id: Some("default".to_string()),
ai_embedding_model: Some(ai::local_embedding_label().to_string()),
@@ -533,6 +606,12 @@ fn default_payload() -> SiteSettingsPayload {
),
ai_top_k: Some(4),
ai_chunk_size: Some(1200),
media_storage_provider: None,
media_r2_account_id: None,
media_r2_bucket: None,
media_r2_public_base_url: None,
media_r2_access_key_id: None,
media_r2_secret_access_key: None,
}
}
@@ -553,7 +632,11 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
.await?;
let mut model = inserted;
default_payload().apply(&mut model);
Ok(model.into_active_model().update(&ctx.db).await?)
Ok(model
.into_active_model()
.reset_all()
.update(&ctx.db)
.await?)
}
fn public_response(model: Model) -> PublicSiteSettingsResponse {
@@ -596,7 +679,7 @@ pub async fn update(
let current = load_current(&ctx).await?;
let mut item = current;
params.apply(&mut item);
let item = item.into_active_model();
let item = item.into_active_model().reset_all();
let updated = item.update(&ctx.db).await?;
format::json(public_response(updated))
}