chore: checkpoint ai search comments and i18n foundation

This commit is contained in:
2026-03-28 17:17:31 +08:00
parent d18a709987
commit ec96d91548
71 changed files with 9494 additions and 423 deletions

View File

@@ -6,7 +6,11 @@ use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use crate::models::_entities::site_settings::{self, ActiveModel, Entity, Model};
use crate::{
controllers::admin::check_auth,
models::_entities::site_settings::{self, ActiveModel, Entity, Model},
services::ai,
};
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct SiteSettingsPayload {
@@ -42,6 +46,46 @@ pub struct SiteSettingsPayload {
pub location: Option<String>,
#[serde(default, alias = "techStack")]
pub tech_stack: Option<Vec<String>>,
#[serde(default, alias = "aiEnabled")]
pub ai_enabled: Option<bool>,
#[serde(default, alias = "aiProvider")]
pub ai_provider: Option<String>,
#[serde(default, alias = "aiApiBase")]
pub ai_api_base: Option<String>,
#[serde(default, alias = "aiApiKey")]
pub ai_api_key: Option<String>,
#[serde(default, alias = "aiChatModel")]
pub ai_chat_model: Option<String>,
#[serde(default, alias = "aiEmbeddingModel")]
pub ai_embedding_model: Option<String>,
#[serde(default, alias = "aiSystemPrompt")]
pub ai_system_prompt: Option<String>,
#[serde(default, alias = "aiTopK")]
pub ai_top_k: Option<i32>,
#[serde(default, alias = "aiChunkSize")]
pub ai_chunk_size: Option<i32>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PublicSiteSettingsResponse {
pub id: i32,
pub site_name: Option<String>,
pub site_short_name: Option<String>,
pub site_url: Option<String>,
pub site_title: Option<String>,
pub site_description: Option<String>,
pub hero_title: Option<String>,
pub hero_subtitle: Option<String>,
pub owner_name: Option<String>,
pub owner_title: Option<String>,
pub owner_bio: Option<String>,
pub owner_avatar_url: Option<String>,
pub social_github: Option<String>,
pub social_twitter: Option<String>,
pub social_email: Option<String>,
pub location: Option<String>,
pub tech_stack: Option<serde_json::Value>,
pub ai_enabled: bool,
}
fn normalize_optional_string(value: Option<String>) -> Option<String> {
@@ -55,6 +99,10 @@ fn normalize_optional_string(value: Option<String>) -> Option<String> {
})
}
fn normalize_optional_int(value: Option<i32>, min: i32, max: i32) -> Option<i32> {
value.map(|item| item.clamp(min, max))
}
impl SiteSettingsPayload {
fn apply(self, item: &mut ActiveModel) {
if let Some(site_name) = self.site_name {
@@ -105,6 +153,33 @@ impl SiteSettingsPayload {
if let Some(tech_stack) = self.tech_stack {
item.tech_stack = Set(Some(serde_json::json!(tech_stack)));
}
if let Some(ai_enabled) = self.ai_enabled {
item.ai_enabled = Set(Some(ai_enabled));
}
if let Some(ai_provider) = self.ai_provider {
item.ai_provider = Set(normalize_optional_string(Some(ai_provider)));
}
if let Some(ai_api_base) = self.ai_api_base {
item.ai_api_base = Set(normalize_optional_string(Some(ai_api_base)));
}
if let Some(ai_api_key) = self.ai_api_key {
item.ai_api_key = Set(normalize_optional_string(Some(ai_api_key)));
}
if let Some(ai_chat_model) = self.ai_chat_model {
item.ai_chat_model = Set(normalize_optional_string(Some(ai_chat_model)));
}
if let Some(ai_embedding_model) = self.ai_embedding_model {
item.ai_embedding_model = Set(normalize_optional_string(Some(ai_embedding_model)));
}
if let Some(ai_system_prompt) = self.ai_system_prompt {
item.ai_system_prompt = Set(normalize_optional_string(Some(ai_system_prompt)));
}
if self.ai_top_k.is_some() {
item.ai_top_k = Set(normalize_optional_int(self.ai_top_k, 1, 12));
}
if self.ai_chunk_size.is_some() {
item.ai_chunk_size = Set(normalize_optional_int(self.ai_chunk_size, 400, 4000));
}
}
}
@@ -134,10 +209,22 @@ fn default_payload() -> SiteSettingsPayload {
"Tailwind CSS".to_string(),
"TypeScript".to_string(),
]),
ai_enabled: Some(false),
ai_provider: Some(ai::provider_name(None)),
ai_api_base: Some(ai::default_api_base().to_string()),
ai_api_key: Some(ai::default_api_key().to_string()),
ai_chat_model: Some(ai::default_chat_model().to_string()),
ai_embedding_model: Some(ai::local_embedding_label().to_string()),
ai_system_prompt: Some(
"你是这个博客的站内 AI 助手。请优先基于提供的上下文回答,答案要准确、简洁、实用;如果上下文不足,请明确说明。"
.to_string(),
),
ai_top_k: Some(4),
ai_chunk_size: Some(1200),
}
}
async fn load_current(ctx: &AppContext) -> Result<Model> {
pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
if let Some(settings) = Entity::find()
.order_by_asc(site_settings::Column::Id)
.one(&ctx.db)
@@ -154,9 +241,32 @@ async fn load_current(ctx: &AppContext) -> Result<Model> {
Ok(item.insert(&ctx.db).await?)
}
fn public_response(model: Model) -> PublicSiteSettingsResponse {
PublicSiteSettingsResponse {
id: model.id,
site_name: model.site_name,
site_short_name: model.site_short_name,
site_url: model.site_url,
site_title: model.site_title,
site_description: model.site_description,
hero_title: model.hero_title,
hero_subtitle: model.hero_subtitle,
owner_name: model.owner_name,
owner_title: model.owner_title,
owner_bio: model.owner_bio,
owner_avatar_url: model.owner_avatar_url,
social_github: model.social_github,
social_twitter: model.social_twitter,
social_email: model.social_email,
location: model.location,
tech_stack: model.tech_stack,
ai_enabled: model.ai_enabled.unwrap_or(false),
}
}
#[debug_handler]
pub async fn show(State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_current(&ctx).await?)
format::json(public_response(load_current(&ctx).await?))
}
#[debug_handler]
@@ -164,10 +274,13 @@ pub async fn update(
State(ctx): State<AppContext>,
Json(params): Json<SiteSettingsPayload>,
) -> Result<Response> {
check_auth()?;
let current = load_current(&ctx).await?;
let mut item = current.into_active_model();
params.apply(&mut item);
format::json(item.update(&ctx.db).await?)
let updated = item.update(&ctx.db).await?;
format::json(public_response(updated))
}
pub fn routes() -> Routes {