chore: checkpoint ai search comments and i18n foundation

This commit is contained in:
2026-03-28 17:17:31 +08:00
parent d18a709987
commit ec96d91548
71 changed files with 9494 additions and 423 deletions

View File

@@ -56,6 +56,13 @@ fn is_blank(value: &Option<String>) -> bool {
value.as_deref().map(str::trim).unwrap_or("").is_empty()
}
fn matches_legacy_ai_defaults(settings: &site_settings::Model) -> bool {
settings.ai_provider.as_deref().map(str::trim) == Some("openai-compatible")
&& settings.ai_api_base.as_deref().map(str::trim) == Some("https://api.openai.com/v1")
&& settings.ai_chat_model.as_deref().map(str::trim) == Some("gpt-4.1-mini")
&& is_blank(&settings.ai_api_key)
}
async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
let rows = read_fixture_rows(base, "site_settings.yaml");
let Some(seed) = rows.first() else {
@@ -81,6 +88,7 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
if let Some(existing) = existing {
let mut model = existing.clone().into_active_model();
let should_upgrade_legacy_ai_defaults = matches_legacy_ai_defaults(&existing);
if is_blank(&existing.site_name) {
model.site_name = Set(as_optional_string(&seed["site_name"]));
@@ -130,6 +138,39 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
if existing.tech_stack.is_none() {
model.tech_stack = Set(tech_stack);
}
if existing.ai_enabled.is_none() {
model.ai_enabled = Set(seed["ai_enabled"].as_bool());
}
if should_upgrade_legacy_ai_defaults {
model.ai_provider = Set(as_optional_string(&seed["ai_provider"]));
model.ai_api_base = Set(as_optional_string(&seed["ai_api_base"]));
model.ai_api_key = Set(as_optional_string(&seed["ai_api_key"]));
model.ai_chat_model = Set(as_optional_string(&seed["ai_chat_model"]));
}
if is_blank(&existing.ai_provider) {
model.ai_provider = Set(as_optional_string(&seed["ai_provider"]));
}
if is_blank(&existing.ai_api_base) {
model.ai_api_base = Set(as_optional_string(&seed["ai_api_base"]));
}
if is_blank(&existing.ai_api_key) {
model.ai_api_key = Set(as_optional_string(&seed["ai_api_key"]));
}
if is_blank(&existing.ai_chat_model) {
model.ai_chat_model = Set(as_optional_string(&seed["ai_chat_model"]));
}
if is_blank(&existing.ai_embedding_model) {
model.ai_embedding_model = Set(as_optional_string(&seed["ai_embedding_model"]));
}
if is_blank(&existing.ai_system_prompt) {
model.ai_system_prompt = Set(as_optional_string(&seed["ai_system_prompt"]));
}
if existing.ai_top_k.is_none() {
model.ai_top_k = Set(seed["ai_top_k"].as_i64().map(|value| value as i32));
}
if existing.ai_chunk_size.is_none() {
model.ai_chunk_size = Set(seed["ai_chunk_size"].as_i64().map(|value| value as i32));
}
let _ = model.update(&ctx.db).await;
return Ok(());
@@ -153,6 +194,15 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
social_email: Set(as_optional_string(&seed["social_email"])),
location: Set(as_optional_string(&seed["location"])),
tech_stack: Set(tech_stack),
ai_enabled: Set(seed["ai_enabled"].as_bool()),
ai_provider: Set(as_optional_string(&seed["ai_provider"])),
ai_api_base: Set(as_optional_string(&seed["ai_api_base"])),
ai_api_key: Set(as_optional_string(&seed["ai_api_key"])),
ai_chat_model: Set(as_optional_string(&seed["ai_chat_model"])),
ai_embedding_model: Set(as_optional_string(&seed["ai_embedding_model"])),
ai_system_prompt: Set(as_optional_string(&seed["ai_system_prompt"])),
ai_top_k: Set(seed["ai_top_k"].as_i64().map(|value| value as i32)),
ai_chunk_size: Set(seed["ai_chunk_size"].as_i64().map(|value| value as i32)),
..Default::default()
};