chore: checkpoint admin editor and perf work

This commit is contained in:
2026-03-31 00:12:02 +08:00
parent 92a85eef20
commit 99b308e800
45 changed files with 7265 additions and 833 deletions

View File

@@ -4,7 +4,7 @@ use fastembed::{
InitOptionsUserDefined, Pooling, TextEmbedding, TokenizerFiles, UserDefinedEmbeddingModel,
};
use loco_rs::prelude::*;
use reqwest::{Client, Url};
use reqwest::{header::CONTENT_TYPE, multipart, Client, Url};
use sea_orm::{
ActiveModelTrait, ConnectionTrait, DbBackend, EntityTrait, FromQueryResult, IntoActiveModel,
PaginatorTrait, QueryOrder, Set, Statement,
@@ -17,11 +17,12 @@ use std::sync::{Mutex, OnceLock};
use uuid::Uuid;
use crate::{
controllers::site_settings as site_settings_controller,
models::_entities::{ai_chunks, site_settings},
services::content,
services::{content, storage},
};
const DEFAULT_AI_PROVIDER: &str = "newapi";
const DEFAULT_AI_PROVIDER: &str = "openai";
const DEFAULT_AI_API_BASE: &str = "https://91code.jiangnight.com/v1";
const DEFAULT_AI_API_KEY: &str =
"sk-5a5e27db9fb8f8ee7e1d8e3c6a44638c2e50cdb0a0cf9d926fefb5418ff62571";
@@ -29,6 +30,8 @@ const DEFAULT_CHAT_MODEL: &str = "gpt-5.4";
const DEFAULT_REASONING_EFFORT: &str = "medium";
const DEFAULT_DISABLE_RESPONSE_STORAGE: bool = true;
const DEFAULT_IMAGE_MODEL: &str = "gpt-image-1";
const DEFAULT_CLOUDFLARE_CHAT_MODEL: &str = "@cf/meta/llama-3.1-8b-instruct";
const DEFAULT_CLOUDFLARE_IMAGE_MODEL: &str = "@cf/black-forest-labs/flux-2-klein-4b";
const DEFAULT_TOP_K: usize = 4;
const DEFAULT_CHUNK_SIZE: usize = 1200;
const DEFAULT_SYSTEM_PROMPT: &str =
@@ -49,6 +52,14 @@ const LOCAL_EMBEDDING_FILES: [&str; 5] = [
static TEXT_EMBEDDING_MODEL: OnceLock<Mutex<TextEmbedding>> = OnceLock::new();
#[derive(Clone, Debug)]
struct AiImageRuntimeSettings {
provider: String,
api_base: Option<String>,
api_key: Option<String>,
image_model: String,
}
#[derive(Clone, Debug)]
struct AiRuntimeSettings {
raw: site_settings::Model,
@@ -56,6 +67,7 @@ struct AiRuntimeSettings {
api_base: Option<String>,
api_key: Option<String>,
chat_model: String,
image: AiImageRuntimeSettings,
system_prompt: String,
top_k: usize,
chunk_size: usize,
@@ -121,6 +133,11 @@ pub struct PolishedPostMarkdown {
pub polished_markdown: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct PolishedReviewDescription {
pub polished_description: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct GeneratedPostCoverImage {
pub image_url: String,
@@ -135,6 +152,14 @@ pub struct AiProviderConnectivityResult {
pub reply_preview: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AiImageProviderConnectivityResult {
pub provider: String,
pub endpoint: String,
pub image_model: String,
pub result_preview: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
struct GeneratedPostMetadataDraft {
title: Option<String>,
@@ -227,16 +252,57 @@ fn build_endpoint(api_base: &str, path: &str) -> String {
)
}
fn provider_uses_cloudflare(provider: &str) -> bool {
provider.eq_ignore_ascii_case("cloudflare")
|| provider.eq_ignore_ascii_case("cloudflare-workers-ai")
|| provider.eq_ignore_ascii_case("workers-ai")
}
fn provider_uses_openai_api_prefix(provider: &str) -> bool {
provider_uses_responses(provider) || provider.eq_ignore_ascii_case("openai-compatible")
}
fn normalize_cloudflare_api_base(api_base: &str) -> String {
let trimmed = api_base.trim().trim_end_matches('/');
if trimmed.is_empty() {
return String::new();
}
if !trimmed.starts_with("http://") && !trimmed.starts_with("https://") {
return format!(
"https://api.cloudflare.com/client/v4/accounts/{}",
trimmed.trim_matches('/'),
);
}
let Ok(mut parsed) = Url::parse(trimmed) else {
return trimmed.to_string();
};
let segments = parsed
.path_segments()
.map(|items| items.collect::<Vec<_>>())
.unwrap_or_default();
if let Some(account_index) = segments.iter().position(|segment| *segment == "accounts") {
if let Some(account_id) = segments.get(account_index + 1) {
parsed.set_path(&format!("/client/v4/accounts/{account_id}"));
}
}
parsed.to_string().trim_end_matches('/').to_string()
}
fn normalize_provider_api_base(provider: &str, api_base: &str) -> String {
let trimmed = api_base.trim();
if trimmed.is_empty() {
return String::new();
}
if provider_uses_cloudflare(provider) {
return normalize_cloudflare_api_base(trimmed);
}
if !provider_uses_openai_api_prefix(provider) {
return trimmed.trim_end_matches('/').to_string();
}
@@ -604,6 +670,19 @@ fn parse_provider_sse_body(body: &str) -> Option<Value> {
latest_response.or(latest_payload)
}
fn parse_json_body(body: &str) -> Result<Value> {
serde_json::from_str(body)
.or_else(|_| {
parse_provider_sse_body(body).ok_or_else(|| {
serde_json::Error::io(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"provider returned neither JSON nor SSE JSON payload",
))
})
})
.map_err(|error| Error::BadRequest(format!("AI response parse failed: {error}")))
}
async fn request_json(client: &Client, url: &str, api_key: &str, payload: Value) -> Result<Value> {
let response = client
.post(url)
@@ -626,20 +705,56 @@ async fn request_json(client: &Client, url: &str, api_key: &str, payload: Value)
)));
}
serde_json::from_str(&body)
.or_else(|_| {
parse_provider_sse_body(&body).ok_or_else(|| {
serde_json::Error::io(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"provider returned neither JSON nor SSE JSON payload",
))
})
})
.map_err(|error| Error::BadRequest(format!("AI response parse failed: {error}")))
parse_json_body(&body)
}
async fn request_multipart_json(
client: &Client,
url: &str,
api_key: &str,
form: multipart::Form,
) -> Result<Value> {
let response = client
.post(url)
.bearer_auth(api_key)
.header("Accept", "application/json")
.multipart(form)
.send()
.await
.map_err(|error| Error::BadRequest(format!("AI request failed: {error}")))?;
let status = response.status();
let body = response
.text()
.await
.map_err(|error| Error::BadRequest(format!("AI response read failed: {error}")))?;
if !status.is_success() {
return Err(Error::BadRequest(format!(
"AI provider returned {status}: {body}"
)));
}
parse_json_body(&body)
}
fn provider_uses_responses(provider: &str) -> bool {
provider.eq_ignore_ascii_case("newapi")
|| provider.eq_ignore_ascii_case("openai")
|| provider.eq_ignore_ascii_case("anthropic")
|| provider.eq_ignore_ascii_case("gemini")
}
fn default_image_model_for_provider_name(provider: &str) -> &'static str {
if provider_uses_cloudflare(provider) {
DEFAULT_CLOUDFLARE_IMAGE_MODEL
} else {
DEFAULT_IMAGE_MODEL
}
}
pub fn default_image_model_for_provider(provider: &str) -> &'static str {
default_image_model_for_provider_name(provider)
}
async fn embed_texts_locally(inputs: Vec<String>, kind: EmbeddingKind) -> Result<Vec<Vec<f64>>> {
@@ -1015,6 +1130,66 @@ fn build_polish_markdown_prompt(markdown: &str) -> String {
)
}
fn build_polish_review_prompt(
title: &str,
review_type: &str,
rating: i32,
review_date: Option<&str>,
status: &str,
tags: &[String],
description: &str,
) -> String {
let review_date_text = review_date
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or("未填写");
let tag_text = if tags.is_empty() {
"".to_string()
} else {
tags.join(" / ")
};
format!(
"请润色一段中文作品评测简介/点评文案。\n\
要求:\n\
1. 保留原文观点、倾向和结论,不要杜撰剧情、设定或体验细节。\n\
2. 语言更凝练、更自然,适合放在评测页中展示。\n\
3. 可以优化句式与节奏,但不要改写成标题、列表或营销文案。\n\
4. 默认输出一到三段正文,总长度尽量控制在 80 到 220 字之间;如果原文本身更长,可适度保留信息密度。\n\
5. 只返回润色后的简介正文,不要附加解释。\n\n\
作品标题:{}\n\
评测类型:{}\n\
评分:{}/5\n\
评测日期:{}\n\
状态:{}\n\
标签:{}\n\n\
当前简介:\n{}",
title.trim(),
review_type.trim(),
rating,
review_date_text,
status.trim(),
tag_text,
description.trim(),
)
}
fn ensure_sentence_ending(text: &str) -> String {
let trimmed = text.trim();
if trimmed.is_empty() {
return String::new();
}
if matches!(
trimmed.chars().last(),
Some('。' | '' | '' | '.' | '!' | '?')
) {
trimmed.to_string()
} else {
format!("{trimmed}")
}
}
fn build_post_cover_prompt(
title: &str,
description: Option<&str>,
@@ -1063,12 +1238,40 @@ fn build_post_cover_prompt(
)
}
fn build_image_generation_url(provider: &str, api_base: &str) -> String {
fn build_image_generation_url(provider: &str, api_base: &str, image_model: &str) -> String {
let normalized = normalize_provider_api_base(provider, api_base);
if provider_uses_cloudflare(provider) {
return build_endpoint(&normalized, &format!("/ai/run/{}", image_model.trim()));
}
build_endpoint(&normalized, "/images/generations")
}
fn extract_image_generation_result(value: &Value) -> Option<String> {
if let Some(result) = value.get("result") {
if let Some(image) = result.get("image").and_then(Value::as_str) {
let trimmed = image.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
if let Some(url) = result.get("url").and_then(Value::as_str) {
let trimmed = url.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
}
if let Some(image) = value.get("image").and_then(Value::as_str) {
let trimmed = image.trim();
if !trimmed.is_empty() {
return Some(trimmed.to_string());
}
}
let data = value.get("data").and_then(Value::as_array)?;
for item in data {
@@ -1115,25 +1318,113 @@ fn generated_cover_directory() -> PathBuf {
})
}
fn persist_generated_cover_image(slug_hint: &str, base64_data: &str) -> Result<String> {
fn image_details_from_mime(content_type: &str) -> Option<(&'static str, &'static str)> {
match content_type
.trim()
.split(';')
.next()
.unwrap_or_default()
.trim()
.to_ascii_lowercase()
.as_str()
{
"image/png" => Some(("png", "image/png")),
"image/jpeg" | "image/jpg" => Some(("jpg", "image/jpeg")),
"image/webp" => Some(("webp", "image/webp")),
"image/gif" => Some(("gif", "image/gif")),
"image/svg+xml" => Some(("svg", "image/svg+xml")),
_ => None,
}
}
fn image_details_from_extension(extension: &str) -> Option<(&'static str, &'static str)> {
match extension
.trim()
.trim_start_matches('.')
.to_ascii_lowercase()
.as_str()
{
"png" => Some(("png", "image/png")),
"jpg" | "jpeg" => Some(("jpg", "image/jpeg")),
"webp" => Some(("webp", "image/webp")),
"gif" => Some(("gif", "image/gif")),
"svg" => Some(("svg", "image/svg+xml")),
_ => None,
}
}
fn image_details_from_url(url: &str) -> Option<(&'static str, &'static str)> {
let parsed = Url::parse(url).ok()?;
let path = parsed.path();
let extension = path.rsplit_once('.')?.1;
image_details_from_extension(extension)
}
fn decode_base64_image_payload(base64_data: &str) -> Result<(Vec<u8>, &'static str, &'static str)> {
let trimmed = base64_data.trim();
let (payload, extension, content_type) = if let Some(rest) = trimmed.strip_prefix("data:") {
let (metadata, encoded) = rest
.split_once(',')
.ok_or_else(|| Error::BadRequest("AI 封面图 data URL 格式不正确".to_string()))?;
let mime = metadata
.split(';')
.next()
.filter(|value| !value.trim().is_empty())
.unwrap_or("image/png");
let (extension, content_type) =
image_details_from_mime(mime).unwrap_or(("png", "image/png"));
(encoded, extension, content_type)
} else {
(trimmed, "png", "image/png")
};
let image_bytes = BASE64_STANDARD
.decode(payload)
.map_err(|error| Error::BadRequest(format!("解析 AI 封面图失败: {error}")))?;
Ok((image_bytes, extension, content_type))
}
async fn persist_generated_cover_image_bytes(
ctx: &AppContext,
slug_hint: &str,
image_bytes: Vec<u8>,
extension: &str,
content_type: &str,
) -> Result<String> {
if storage::optional_r2_settings(ctx).await?.is_some() {
let key = storage::build_object_key("post-covers", slug_hint, extension);
let stored = storage::upload_bytes_to_r2(
ctx,
&key,
image_bytes,
Some(content_type),
Some("public, max-age=31536000, immutable"),
)
.await?;
return Ok(stored.url);
}
let directory = generated_cover_directory();
fs::create_dir_all(&directory)
.map_err(|error| Error::BadRequest(format!("创建封面图目录失败: {error}")))?;
let safe_slug = metadata_slugify(slug_hint);
let file_name = format!(
"{}-{}.png",
"{}-{}.{}",
if safe_slug.is_empty() {
"cover".to_string()
} else {
safe_slug
},
Uuid::new_v4().simple()
Uuid::new_v4().simple(),
extension
.trim()
.trim_start_matches('.')
.to_ascii_lowercase()
);
let file_path = directory.join(&file_name);
let image_bytes = BASE64_STANDARD
.decode(base64_data)
.map_err(|error| Error::BadRequest(format!("解析 AI 封面图失败: {error}")))?;
fs::write(&file_path, image_bytes)
.map_err(|error| Error::BadRequest(format!("写入 AI 封面图失败: {error}")))?;
@@ -1141,6 +1432,49 @@ fn persist_generated_cover_image(slug_hint: &str, base64_data: &str) -> Result<S
Ok(format!("/generated-covers/{file_name}"))
}
async fn persist_generated_cover_image(
ctx: &AppContext,
slug_hint: &str,
image_result: &str,
) -> Result<String> {
if image_result.starts_with("http://") || image_result.starts_with("https://") {
let client = Client::new();
let response = client
.get(image_result)
.send()
.await
.map_err(|error| Error::BadRequest(format!("下载 AI 封面图失败: {error}")))?
.error_for_status()
.map_err(|error| Error::BadRequest(format!("下载 AI 封面图失败: {error}")))?;
let content_type_header = response
.headers()
.get(CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string);
let image_bytes = response
.bytes()
.await
.map_err(|error| Error::BadRequest(format!("读取 AI 封面图失败: {error}")))?;
let (extension, content_type) = content_type_header
.as_deref()
.and_then(image_details_from_mime)
.or_else(|| image_details_from_url(image_result))
.unwrap_or(("png", "image/png"));
return persist_generated_cover_image_bytes(
ctx,
slug_hint,
image_bytes.to_vec(),
extension,
content_type,
)
.await;
}
let (image_bytes, extension, content_type) = decode_base64_image_payload(image_result)?;
persist_generated_cover_image_bytes(ctx, slug_hint, image_bytes, extension, content_type).await
}
fn fallback_polished_markdown(markdown: &str) -> String {
let metadata = fallback_generated_metadata(markdown);
let body = strip_markdown_frontmatter(markdown)
@@ -1176,6 +1510,17 @@ tags:\n{}\n\
)
}
fn fallback_polished_review_description(description: &str) -> String {
let normalized = normalize_newlines(description)
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.collect::<Vec<_>>()
.join("\n\n");
ensure_sentence_ending(&normalized)
}
pub async fn generate_post_metadata(
ctx: &AppContext,
markdown: &str,
@@ -1303,6 +1648,85 @@ pub async fn polish_post_markdown(
}
}
pub async fn polish_review_description(
ctx: &AppContext,
title: &str,
review_type: &str,
rating: i32,
review_date: Option<&str>,
status: &str,
tags: &[String],
description: &str,
) -> Result<PolishedReviewDescription> {
let trimmed_description = description.trim();
if trimmed_description.is_empty() {
return Err(Error::BadRequest(
"请先填写点评内容,再进行润色。".to_string(),
));
}
let settings = load_runtime_settings(ctx, false).await?;
let remote_result: Result<PolishedReviewDescription> = match (
settings.api_base.clone(),
settings.api_key.clone(),
) {
(Some(api_base), Some(api_key)) => {
let request = AiProviderRequest {
provider: settings.provider.clone(),
api_base,
api_key,
chat_model: settings.chat_model.clone(),
system_prompt: "你是中文内容平台里的资深评测编辑。你只负责润色用户已有的作品点评文案,不要改写核心观点,不要虚构事实,不要输出额外解释。".to_string(),
prompt: build_polish_review_prompt(
title,
review_type,
rating,
review_date,
status,
tags,
trimmed_description,
),
};
let client = Client::new();
let response = request_json(
&client,
&build_provider_url(&request),
&request.api_key,
build_provider_payload(&request, false),
)
.await;
match response {
Ok(response) => {
let polished_description =
extract_provider_text(&response).ok_or_else(|| {
Error::BadRequest("AI 润色响应里没有可读取内容。".to_string())
})?;
Ok(PolishedReviewDescription {
polished_description: normalize_newlines(polished_description.trim()),
})
}
Err(error) => Err(error),
}
}
_ => Err(Error::BadRequest(
"AI 服务未配置完整,已自动切换为本地智能润色。".to_string(),
)),
};
match remote_result {
Ok(result) => Ok(result),
Err(error) => {
tracing::warn!("AI review polish fallback: {error}");
Ok(PolishedReviewDescription {
polished_description: fallback_polished_review_description(trimmed_description),
})
}
}
}
pub async fn generate_post_cover_image(
ctx: &AppContext,
title: &str,
@@ -1323,14 +1747,14 @@ pub async fn generate_post_cover_image(
}
let settings = load_runtime_settings(ctx, false).await?;
let api_base = settings
.api_base
.clone()
.ok_or_else(|| Error::BadRequest("AI API Base 未配置,无法生成封面图。".to_string()))?;
let api_key = settings
let image_settings = settings.image.clone();
let api_base = image_settings.api_base.clone().ok_or_else(|| {
Error::BadRequest("图片 AI API Base 未配置,无法生成封面图。".to_string())
})?;
let api_key = image_settings
.api_key
.clone()
.ok_or_else(|| Error::BadRequest("AI API Key 未配置,无法生成封面图。".to_string()))?;
.ok_or_else(|| Error::BadRequest("图片 AI API Key 未配置,无法生成封面图。".to_string()))?;
let prompt = build_post_cover_prompt(
if trimmed_title.is_empty() {
"未命名文章"
@@ -1343,33 +1767,22 @@ pub async fn generate_post_cover_image(
post_type,
trimmed_markdown,
);
let payload = json!({
"model": DEFAULT_IMAGE_MODEL,
"prompt": prompt,
"size": "1536x1024",
"quality": "high",
"output_format": "png"
});
let client = Client::new();
let response = request_json(
&client,
&build_image_generation_url(&settings.provider, &api_base),
let image_model = image_settings.image_model.clone();
let response = request_image_generation(
&image_settings.provider,
&api_base,
&api_key,
payload,
&image_model,
&prompt,
)
.await?;
let image_result = extract_image_generation_result(&response)
.ok_or_else(|| Error::BadRequest("AI 封面图响应里没有可读取图片。".to_string()))?;
let image_url = if image_result.starts_with("http://") || image_result.starts_with("https://") {
image_result
} else {
let slug_hint = slug
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or(trimmed_title);
persist_generated_cover_image(slug_hint, &image_result)?
};
let slug_hint = slug
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or(trimmed_title);
let image_url = persist_generated_cover_image(ctx, slug_hint, &image_result).await?;
Ok(GeneratedPostCoverImage { image_url, prompt })
}
@@ -1389,6 +1802,12 @@ fn extract_response_output(value: &Value) -> Option<String> {
}
}
if let Some(result) = value.get("result") {
if let Some(text) = extract_response_output(result) {
return Some(text);
}
}
if let Some(response) = value.get("response") {
if let Some(text) = extract_response_output(response) {
return Some(text);
@@ -1590,7 +2009,16 @@ fn build_sources(matches: &[ScoredChunk]) -> Vec<AiSource> {
}
pub(crate) fn build_provider_payload(request: &AiProviderRequest, stream: bool) -> Value {
if provider_uses_responses(&request.provider) {
if provider_uses_cloudflare(&request.provider) {
json!({
"prompt": format!(
"系统指令:{}
用户请求:{}",
request.system_prompt, request.prompt,
)
})
} else if provider_uses_responses(&request.provider) {
json!({
"model": request.chat_model,
"input": [
@@ -1640,13 +2068,15 @@ pub(crate) fn build_provider_payload(request: &AiProviderRequest, stream: bool)
pub(crate) fn build_provider_url(request: &AiProviderRequest) -> String {
let api_base = normalize_provider_api_base(&request.provider, &request.api_base);
let path = if provider_uses_responses(&request.provider) {
"/responses"
let path = if provider_uses_cloudflare(&request.provider) {
format!("/ai/run/{}", request.chat_model.trim())
} else if provider_uses_responses(&request.provider) {
"/responses".to_string()
} else {
"/chat/completions"
"/chat/completions".to_string()
};
build_endpoint(&api_base, path)
build_endpoint(&api_base, &path)
}
#[cfg(test)]
@@ -1701,6 +2131,25 @@ mod tests {
);
}
#[test]
fn normalize_provider_api_base_supports_cloudflare_account_id() {
assert_eq!(
normalize_provider_api_base("cloudflare", "test-account-id"),
"https://api.cloudflare.com/client/v4/accounts/test-account-id"
);
}
#[test]
fn build_provider_url_uses_cloudflare_run_endpoint() {
let mut request = build_request("cloudflare", "test-account-id");
request.chat_model = "@cf/meta/llama-3.1-8b-instruct".to_string();
assert_eq!(
build_provider_url(&request),
"https://api.cloudflare.com/client/v4/accounts/test-account-id/ai/run/@cf/meta/llama-3.1-8b-instruct"
);
}
#[test]
fn profile_question_detects_owner_keywords() {
assert!(is_profile_question("站长的技术栈和个人介绍是什么?"));
@@ -1780,6 +2229,59 @@ async fn request_chat_answer(request: &AiProviderRequest) -> Result<String> {
})
}
async fn request_image_generation(
provider: &str,
api_base: &str,
api_key: &str,
image_model: &str,
prompt: &str,
) -> Result<Value> {
let client = Client::new();
if provider_uses_cloudflare(provider) {
if image_model.eq_ignore_ascii_case(DEFAULT_CLOUDFLARE_IMAGE_MODEL) {
let form = multipart::Form::new()
.text("prompt", prompt.to_string())
.text("width", "1024")
.text("height", "576")
.text("steps", "16");
return request_multipart_json(
&client,
&build_image_generation_url(provider, api_base, image_model),
api_key,
form,
)
.await;
}
return request_json(
&client,
&build_image_generation_url(provider, api_base, image_model),
api_key,
json!({
"prompt": prompt,
"steps": 4
}),
)
.await;
}
request_json(
&client,
&build_image_generation_url(provider, api_base, image_model),
api_key,
json!({
"model": image_model,
"prompt": prompt,
"size": "1536x1024",
"quality": "high",
"output_format": "png"
}),
)
.await
}
pub async fn test_provider_connectivity(
provider: &str,
api_base: &str,
@@ -1814,6 +2316,59 @@ pub async fn test_provider_connectivity(
})
}
pub async fn test_image_provider_connectivity(
provider: &str,
api_base: &str,
api_key: &str,
image_model: &str,
) -> Result<AiImageProviderConnectivityResult> {
let provider = trim_to_option(Some(provider.to_string()))
.unwrap_or_else(|| DEFAULT_AI_PROVIDER.to_string());
let api_base = trim_to_option(Some(api_base.to_string()))
.ok_or_else(|| Error::BadRequest("请先填写图片 API 地址".to_string()))?;
let api_key = trim_to_option(Some(api_key.to_string()))
.ok_or_else(|| Error::BadRequest("请先填写图片 API 密钥".to_string()))?;
let image_model = trim_to_option(Some(image_model.to_string()))
.ok_or_else(|| Error::BadRequest("请先填写图片模型".to_string()))?;
let prompt = "Minimal abstract technology cover art, blue gradient, no text, no watermark";
let response = if provider_uses_cloudflare(&provider)
&& image_model.eq_ignore_ascii_case(DEFAULT_CLOUDFLARE_IMAGE_MODEL)
{
let client = Client::new();
let form = multipart::Form::new()
.text("prompt", prompt.to_string())
.text("width", "512")
.text("height", "288")
.text("steps", "4");
request_multipart_json(
&client,
&build_image_generation_url(&provider, &api_base, &image_model),
&api_key,
form,
)
.await?
} else {
request_image_generation(&provider, &api_base, &api_key, &image_model, prompt).await?
};
let image_result = extract_image_generation_result(&response)
.ok_or_else(|| Error::BadRequest("图片接口响应里没有可读取的图片结果".to_string()))?;
let result_preview =
if image_result.starts_with("http://") || image_result.starts_with("https://") {
image_result
} else {
format!("base64 image ok ({} chars)", image_result.len())
};
Ok(AiImageProviderConnectivityResult {
provider: provider.clone(),
endpoint: build_image_generation_url(&provider, &api_base, &image_model),
image_model,
result_preview,
})
}
pub(crate) async fn prepare_answer(ctx: &AppContext, question: &str) -> Result<PreparedAiAnswer> {
let trimmed_question = question.trim();
if trimmed_question.is_empty() {
@@ -1907,12 +2462,72 @@ async fn load_runtime_settings(
return Err(Error::NotFound);
}
let active_provider =
site_settings_controller::active_ai_provider_id(&raw).and_then(|active_id| {
site_settings_controller::ai_provider_configs(&raw)
.into_iter()
.find(|item| item.id == active_id)
});
let provider = active_provider
.as_ref()
.map(|item| provider_name(Some(item.provider.as_str())))
.unwrap_or_else(|| provider_name(raw.ai_provider.as_deref()));
let api_base = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.api_base.clone()))
.or_else(|| trim_to_option(raw.ai_api_base.clone()));
let api_key = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.api_key.clone()))
.or_else(|| trim_to_option(raw.ai_api_key.clone()));
let chat_model = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.chat_model.clone()))
.unwrap_or_else(|| {
if provider_uses_cloudflare(&provider) {
DEFAULT_CLOUDFLARE_CHAT_MODEL.to_string()
} else {
DEFAULT_CHAT_MODEL.to_string()
}
});
let legacy_image_model = active_provider
.as_ref()
.and_then(|item| trim_to_option(item.image_model.clone()))
.unwrap_or_else(|| default_image_model_for_provider_name(&provider).to_string());
let image_provider = trim_to_option(raw.ai_image_provider.clone());
let image_api_base = trim_to_option(raw.ai_image_api_base.clone());
let image_api_key = trim_to_option(raw.ai_image_api_key.clone());
let image_model = trim_to_option(raw.ai_image_model.clone());
let has_dedicated_image_settings = image_provider.is_some()
|| image_api_base.is_some()
|| image_api_key.is_some()
|| image_model.is_some();
let image = if has_dedicated_image_settings {
let provider = image_provider.unwrap_or_else(|| provider.clone());
let image_model = image_model
.unwrap_or_else(|| default_image_model_for_provider_name(&provider).to_string());
AiImageRuntimeSettings {
provider,
api_base: image_api_base,
api_key: image_api_key,
image_model,
}
} else {
AiImageRuntimeSettings {
provider: provider.clone(),
api_base: api_base.clone(),
api_key: api_key.clone(),
image_model: legacy_image_model,
}
};
Ok(AiRuntimeSettings {
provider: provider_name(raw.ai_provider.as_deref()),
api_base: trim_to_option(raw.ai_api_base.clone()),
api_key: trim_to_option(raw.ai_api_key.clone()),
chat_model: trim_to_option(raw.ai_chat_model.clone())
.unwrap_or_else(|| DEFAULT_CHAT_MODEL.to_string()),
provider,
api_base,
api_key,
chat_model,
image,
system_prompt: trim_to_option(raw.ai_system_prompt.clone())
.unwrap_or_else(|| DEFAULT_SYSTEM_PROMPT.to_string()),
top_k: raw

View File

@@ -0,0 +1,441 @@
use std::collections::{BTreeMap, HashMap};
use axum::http::HeaderMap;
use chrono::{DateTime, Duration, NaiveDate, Utc};
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder,
QuerySelect, Set,
};
use serde::Serialize;
use crate::models::_entities::query_events;
const EVENT_TYPE_SEARCH: &str = "search";
const EVENT_TYPE_AI_QUESTION: &str = "ai_question";
#[derive(Clone, Debug, Default)]
pub struct QueryEventRequestContext {
pub request_path: Option<String>,
pub referrer: Option<String>,
pub user_agent: Option<String>,
}
#[derive(Clone, Debug)]
pub struct QueryEventDraft {
pub event_type: String,
pub query_text: String,
pub request_context: QueryEventRequestContext,
pub result_count: Option<i32>,
pub success: Option<bool>,
pub response_mode: Option<String>,
pub provider: Option<String>,
pub chat_model: Option<String>,
pub latency_ms: Option<i32>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsOverview {
pub total_searches: u64,
pub total_ai_questions: u64,
pub searches_last_24h: u64,
pub ai_questions_last_24h: u64,
pub searches_last_7d: u64,
pub ai_questions_last_7d: u64,
pub unique_search_terms_last_7d: usize,
pub unique_ai_questions_last_7d: usize,
pub avg_search_results_last_7d: f64,
pub avg_ai_latency_ms_last_7d: Option<f64>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsTopQuery {
pub query: String,
pub count: u64,
pub last_seen_at: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsRecentEvent {
pub id: i32,
pub event_type: String,
pub query: String,
pub result_count: Option<i32>,
pub success: Option<bool>,
pub response_mode: Option<String>,
pub provider: Option<String>,
pub chat_model: Option<String>,
pub latency_ms: Option<i32>,
pub created_at: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsProviderBucket {
pub provider: String,
pub count: u64,
}
#[derive(Clone, Debug, Serialize)]
pub struct AnalyticsDailyBucket {
pub date: String,
pub searches: u64,
pub ai_questions: u64,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminAnalyticsResponse {
pub overview: AnalyticsOverview,
pub top_search_terms: Vec<AnalyticsTopQuery>,
pub top_ai_questions: Vec<AnalyticsTopQuery>,
pub recent_events: Vec<AnalyticsRecentEvent>,
pub providers_last_7d: Vec<AnalyticsProviderBucket>,
pub daily_activity: Vec<AnalyticsDailyBucket>,
}
#[derive(Clone, Debug)]
struct QueryAggregate {
query: String,
count: u64,
last_seen_at: DateTime<Utc>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalize_query(value: &str) -> String {
value
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.to_lowercase()
}
fn format_timestamp(value: DateTime<Utc>) -> String {
value.format("%Y-%m-%d %H:%M").to_string()
}
fn header_value(headers: &HeaderMap, key: &str) -> Option<String> {
headers
.get(key)
.and_then(|value| value.to_str().ok())
.map(ToString::to_string)
.and_then(|value| trim_to_option(Some(value)))
}
fn clamp_latency(latency_ms: i64) -> i32 {
latency_ms.clamp(0, i64::from(i32::MAX)) as i32
}
fn build_query_aggregates(
events: &[query_events::Model],
wanted_type: &str,
) -> Vec<QueryAggregate> {
let mut grouped: HashMap<String, QueryAggregate> = HashMap::new();
for event in events
.iter()
.filter(|event| event.event_type == wanted_type)
{
let entry = grouped
.entry(event.normalized_query.clone())
.or_insert_with(|| QueryAggregate {
query: event.query_text.clone(),
count: 0,
last_seen_at: event.created_at.into(),
});
entry.count += 1;
let created_at = DateTime::<Utc>::from(event.created_at);
if created_at >= entry.last_seen_at {
entry.query = event.query_text.clone();
entry.last_seen_at = created_at;
}
}
let mut items = grouped.into_values().collect::<Vec<_>>();
items.sort_by(|left, right| {
right
.count
.cmp(&left.count)
.then_with(|| right.last_seen_at.cmp(&left.last_seen_at))
});
items
}
fn aggregate_queries(
events: &[query_events::Model],
wanted_type: &str,
limit: usize,
) -> (usize, Vec<AnalyticsTopQuery>) {
let aggregates = build_query_aggregates(events, wanted_type);
let total_unique = aggregates.len();
let items = aggregates
.into_iter()
.take(limit)
.map(|item| AnalyticsTopQuery {
query: item.query,
count: item.count,
last_seen_at: format_timestamp(item.last_seen_at),
})
.collect::<Vec<_>>();
(total_unique, items)
}
pub fn request_context_from_headers(path: &str, headers: &HeaderMap) -> QueryEventRequestContext {
QueryEventRequestContext {
request_path: trim_to_option(Some(path.to_string())),
referrer: header_value(headers, "referer"),
user_agent: header_value(headers, "user-agent"),
}
}
pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
let query_text = draft.query_text.trim().to_string();
if query_text.is_empty() {
return;
}
let active_model = query_events::ActiveModel {
event_type: Set(draft.event_type),
query_text: Set(query_text.clone()),
normalized_query: Set(normalize_query(&query_text)),
request_path: Set(trim_to_option(draft.request_context.request_path)),
referrer: Set(trim_to_option(draft.request_context.referrer)),
user_agent: Set(trim_to_option(draft.request_context.user_agent)),
result_count: Set(draft.result_count),
success: Set(draft.success),
response_mode: Set(trim_to_option(draft.response_mode)),
provider: Set(trim_to_option(draft.provider)),
chat_model: Set(trim_to_option(draft.chat_model)),
latency_ms: Set(draft.latency_ms.map(|value| value.max(0))),
..Default::default()
};
if let Err(error) = active_model.insert(&ctx.db).await {
tracing::warn!("failed to record query analytics event: {error}");
}
}
pub async fn record_search_event(
ctx: &AppContext,
query_text: &str,
result_count: usize,
headers: &HeaderMap,
latency_ms: i64,
) {
record_event(
ctx,
QueryEventDraft {
event_type: EVENT_TYPE_SEARCH.to_string(),
query_text: query_text.to_string(),
request_context: request_context_from_headers("/api/search", headers),
result_count: Some(result_count.min(i32::MAX as usize) as i32),
success: Some(true),
response_mode: None,
provider: None,
chat_model: None,
latency_ms: Some(clamp_latency(latency_ms)),
},
)
.await;
}
pub async fn record_ai_question_event(
ctx: &AppContext,
question: &str,
headers: &HeaderMap,
success: bool,
response_mode: &str,
provider: Option<String>,
chat_model: Option<String>,
result_count: Option<usize>,
latency_ms: i64,
) {
record_event(
ctx,
QueryEventDraft {
event_type: EVENT_TYPE_AI_QUESTION.to_string(),
query_text: question.to_string(),
request_context: request_context_from_headers(
if response_mode == "stream" {
"/api/ai/ask/stream"
} else {
"/api/ai/ask"
},
headers,
),
result_count: result_count.map(|value| value.min(i32::MAX as usize) as i32),
success: Some(success),
response_mode: Some(response_mode.to_string()),
provider,
chat_model,
latency_ms: Some(clamp_latency(latency_ms)),
},
)
.await;
}
pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsResponse> {
let now = Utc::now();
let since_24h = now - Duration::hours(24);
let since_7d = now - Duration::days(7);
let total_searches = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_SEARCH))
.count(&ctx.db)
.await?;
let total_ai_questions = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_AI_QUESTION))
.count(&ctx.db)
.await?;
let searches_last_24h = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_SEARCH))
.filter(query_events::Column::CreatedAt.gte(since_24h))
.count(&ctx.db)
.await?;
let ai_questions_last_24h = query_events::Entity::find()
.filter(query_events::Column::EventType.eq(EVENT_TYPE_AI_QUESTION))
.filter(query_events::Column::CreatedAt.gte(since_24h))
.count(&ctx.db)
.await?;
let last_7d_events = query_events::Entity::find()
.filter(query_events::Column::CreatedAt.gte(since_7d))
.order_by_desc(query_events::Column::CreatedAt)
.all(&ctx.db)
.await?;
let searches_last_7d = last_7d_events
.iter()
.filter(|event| event.event_type == EVENT_TYPE_SEARCH)
.count() as u64;
let ai_questions_last_7d = last_7d_events
.iter()
.filter(|event| event.event_type == EVENT_TYPE_AI_QUESTION)
.count() as u64;
let (unique_search_terms_last_7d, top_search_terms) =
aggregate_queries(&last_7d_events, EVENT_TYPE_SEARCH, 8);
let (unique_ai_questions_last_7d, top_ai_questions) =
aggregate_queries(&last_7d_events, EVENT_TYPE_AI_QUESTION, 8);
let mut provider_breakdown: HashMap<String, u64> = HashMap::new();
let mut daily_map: BTreeMap<NaiveDate, (u64, u64)> = BTreeMap::new();
let mut total_search_results = 0.0_f64;
let mut counted_search_results = 0_u64;
let mut total_ai_latency = 0.0_f64;
let mut counted_ai_latency = 0_u64;
for offset in 0..7 {
let date = (now - Duration::days(offset)).date_naive();
daily_map.entry(date).or_insert((0, 0));
}
for event in &last_7d_events {
let day = DateTime::<Utc>::from(event.created_at).date_naive();
let entry = daily_map.entry(day).or_insert((0, 0));
if event.event_type == EVENT_TYPE_SEARCH {
entry.0 += 1;
if let Some(result_count) = event.result_count {
total_search_results += f64::from(result_count.max(0));
counted_search_results += 1;
}
continue;
}
if event.event_type == EVENT_TYPE_AI_QUESTION {
entry.1 += 1;
let provider = event
.provider
.clone()
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| "local-or-unspecified".to_string());
*provider_breakdown.entry(provider).or_insert(0) += 1;
if let Some(latency_ms) = event.latency_ms {
total_ai_latency += f64::from(latency_ms.max(0));
counted_ai_latency += 1;
}
}
}
let mut providers_last_7d = provider_breakdown
.into_iter()
.map(|(provider, count)| AnalyticsProviderBucket { provider, count })
.collect::<Vec<_>>();
providers_last_7d.sort_by(|left, right| {
right
.count
.cmp(&left.count)
.then_with(|| left.provider.cmp(&right.provider))
});
providers_last_7d.truncate(6);
let mut daily_activity = daily_map
.into_iter()
.map(|(date, (searches, ai_questions))| AnalyticsDailyBucket {
date: date.format("%Y-%m-%d").to_string(),
searches,
ai_questions,
})
.collect::<Vec<_>>();
daily_activity.sort_by(|left, right| left.date.cmp(&right.date));
let recent_events = query_events::Entity::find()
.order_by_desc(query_events::Column::CreatedAt)
.limit(24)
.all(&ctx.db)
.await?
.into_iter()
.map(|event| AnalyticsRecentEvent {
id: event.id,
event_type: event.event_type,
query: event.query_text,
result_count: event.result_count,
success: event.success,
response_mode: event.response_mode,
provider: event.provider,
chat_model: event.chat_model,
latency_ms: event.latency_ms,
created_at: format_timestamp(event.created_at.into()),
})
.collect::<Vec<_>>();
Ok(AdminAnalyticsResponse {
overview: AnalyticsOverview {
total_searches,
total_ai_questions,
searches_last_24h,
ai_questions_last_24h,
searches_last_7d,
ai_questions_last_7d,
unique_search_terms_last_7d,
unique_ai_questions_last_7d,
avg_search_results_last_7d: if counted_search_results > 0 {
total_search_results / counted_search_results as f64
} else {
0.0
},
avg_ai_latency_ms_last_7d: (counted_ai_latency > 0)
.then(|| total_ai_latency / counted_ai_latency as f64),
},
top_search_terms,
top_ai_questions,
recent_events,
providers_last_7d,
daily_activity,
})
}

View File

@@ -1,2 +1,4 @@
pub mod ai;
pub mod analytics;
pub mod content;
pub mod storage;

View File

@@ -0,0 +1,513 @@
use aws_config::BehaviorVersion;
use aws_sdk_s3::{config::Credentials, primitives::ByteStream, Client};
use loco_rs::prelude::*;
use sea_orm::{EntityTrait, QueryOrder};
use std::path::{Path, PathBuf};
use uuid::Uuid;
use crate::models::_entities::site_settings;
const ENV_MEDIA_PROVIDER: &str = "TERMI_MEDIA_PROVIDER";
const ENV_MEDIA_ENDPOINT: &str = "TERMI_MEDIA_ENDPOINT";
const ENV_MEDIA_BUCKET: &str = "TERMI_MEDIA_BUCKET";
const ENV_MEDIA_PUBLIC_BASE_URL: &str = "TERMI_MEDIA_PUBLIC_BASE_URL";
const ENV_MEDIA_ACCESS_KEY_ID: &str = "TERMI_MEDIA_ACCESS_KEY_ID";
const ENV_MEDIA_SECRET_ACCESS_KEY: &str = "TERMI_MEDIA_SECRET_ACCESS_KEY";
const ENV_R2_ACCOUNT_ID: &str = "TERMI_R2_ACCOUNT_ID";
const ENV_R2_BUCKET: &str = "TERMI_R2_BUCKET";
const ENV_R2_PUBLIC_BASE_URL: &str = "TERMI_R2_PUBLIC_BASE_URL";
const ENV_R2_ACCESS_KEY_ID: &str = "TERMI_R2_ACCESS_KEY_ID";
const ENV_R2_SECRET_ACCESS_KEY: &str = "TERMI_R2_SECRET_ACCESS_KEY";
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum MediaStorageProvider {
R2,
Minio,
}
#[derive(Clone, Debug)]
pub struct MediaStorageSettings {
pub provider: MediaStorageProvider,
pub provider_name: String,
pub endpoint: String,
pub bucket: String,
pub public_base_url: String,
pub access_key_id: String,
pub secret_access_key: String,
pub region: String,
pub force_path_style: bool,
}
#[derive(Clone, Debug)]
pub struct StoredObject {
pub key: String,
pub url: String,
}
#[derive(Clone, Debug)]
pub struct StoredObjectSummary {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.and_then(|value| trim_to_option(Some(value)))
}
fn slugify_segment(value: &str) -> String {
let mut output = String::new();
let mut previous_dash = false;
for ch in value.chars() {
if ch.is_ascii_alphanumeric() {
output.push(ch.to_ascii_lowercase());
previous_dash = false;
} else if !previous_dash {
output.push('-');
previous_dash = true;
}
}
output.trim_matches('-').to_string()
}
fn normalize_public_base_url(value: String) -> String {
value.trim().trim_end_matches('/').to_string()
}
fn normalize_provider(value: Option<String>) -> MediaStorageProvider {
match value
.as_deref()
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"minio" => MediaStorageProvider::Minio,
_ => MediaStorageProvider::R2,
}
}
async fn load_settings_row(ctx: &AppContext) -> Result<Option<site_settings::Model>> {
site_settings::Entity::find()
.order_by_asc(site_settings::Column::Id)
.one(&ctx.db)
.await
.map_err(Into::into)
}
fn build_r2_endpoint(account_id: &str) -> String {
let trimmed = account_id.trim().trim_end_matches('/');
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
trimmed.to_string()
} else {
format!("https://{trimmed}.r2.cloudflarestorage.com")
}
}
fn default_public_base_url_for_minio(endpoint: &str, bucket: &str) -> String {
format!(
"{}/{}",
endpoint.trim().trim_end_matches('/'),
bucket.trim().trim_start_matches('/')
)
}
pub fn generated_cover_directory() -> PathBuf {
let current_dir = std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."));
let candidates = [
current_dir
.join("frontend")
.join("public")
.join("generated-covers"),
current_dir
.join("..")
.join("frontend")
.join("public")
.join("generated-covers"),
];
candidates
.into_iter()
.find(|path| path.parent().map(|parent| parent.exists()).unwrap_or(false))
.unwrap_or_else(|| {
PathBuf::from("..")
.join("frontend")
.join("public")
.join("generated-covers")
})
}
pub fn file_extension_for_path(path: &Path) -> &str {
path.extension()
.and_then(|ext| ext.to_str())
.map(str::trim)
.filter(|ext| !ext.is_empty())
.unwrap_or("png")
}
pub async fn optional_r2_settings(ctx: &AppContext) -> Result<Option<MediaStorageSettings>> {
let row = load_settings_row(ctx).await?;
let provider_raw = row
.as_ref()
.and_then(|item| trim_to_option(item.media_storage_provider.clone()))
.or_else(|| env_value(ENV_MEDIA_PROVIDER));
let provider = normalize_provider(provider_raw.clone());
let endpoint_or_account = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_account_id.clone()))
.or_else(|| env_value(ENV_MEDIA_ENDPOINT))
.or_else(|| env_value(ENV_R2_ACCOUNT_ID));
let bucket = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_bucket.clone()))
.or_else(|| env_value(ENV_MEDIA_BUCKET))
.or_else(|| env_value(ENV_R2_BUCKET));
let access_key_id = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_access_key_id.clone()))
.or_else(|| env_value(ENV_MEDIA_ACCESS_KEY_ID))
.or_else(|| env_value(ENV_R2_ACCESS_KEY_ID));
let secret_access_key = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_secret_access_key.clone()))
.or_else(|| env_value(ENV_MEDIA_SECRET_ACCESS_KEY))
.or_else(|| env_value(ENV_R2_SECRET_ACCESS_KEY));
let public_base_url = row
.as_ref()
.and_then(|item| trim_to_option(item.media_r2_public_base_url.clone()))
.or_else(|| env_value(ENV_MEDIA_PUBLIC_BASE_URL))
.or_else(|| env_value(ENV_R2_PUBLIC_BASE_URL))
.or_else(
|| match (&provider, endpoint_or_account.as_deref(), bucket.as_deref()) {
(MediaStorageProvider::Minio, Some(endpoint), Some(bucket)) => {
Some(default_public_base_url_for_minio(endpoint, bucket))
}
_ => None,
},
);
let has_any = endpoint_or_account.is_some()
|| bucket.is_some()
|| public_base_url.is_some()
|| access_key_id.is_some()
|| secret_access_key.is_some()
|| provider_raw.is_some();
if !has_any {
return Ok(None);
}
let missing = [
(
match provider {
MediaStorageProvider::Minio => "Endpoint",
MediaStorageProvider::R2 => "Account ID",
},
endpoint_or_account.is_some(),
),
("Bucket", bucket.is_some()),
("Public Base URL", public_base_url.is_some()),
("Access Key ID", access_key_id.is_some()),
("Secret Access Key", secret_access_key.is_some()),
]
.into_iter()
.filter_map(|(label, present)| (!present).then_some(label))
.collect::<Vec<_>>();
if !missing.is_empty() {
return Err(Error::BadRequest(format!(
"对象存储配置不完整,请补齐:{}",
missing.join(" / ")
)));
}
let endpoint = match provider {
MediaStorageProvider::R2 => {
build_r2_endpoint(&endpoint_or_account.clone().unwrap_or_default())
}
MediaStorageProvider::Minio => endpoint_or_account.clone().unwrap_or_default(),
};
Ok(Some(MediaStorageSettings {
provider: provider.clone(),
provider_name: match provider {
MediaStorageProvider::R2 => "r2".to_string(),
MediaStorageProvider::Minio => "minio".to_string(),
},
endpoint,
bucket: bucket.unwrap_or_default(),
public_base_url: normalize_public_base_url(public_base_url.unwrap_or_default()),
access_key_id: access_key_id.unwrap_or_default(),
secret_access_key: secret_access_key.unwrap_or_default(),
region: match provider {
MediaStorageProvider::R2 => "auto".to_string(),
MediaStorageProvider::Minio => "us-east-1".to_string(),
},
force_path_style: provider == MediaStorageProvider::Minio,
}))
}
pub async fn require_r2_settings(ctx: &AppContext) -> Result<MediaStorageSettings> {
optional_r2_settings(ctx)
.await?
.ok_or_else(|| Error::BadRequest("请先在后台配置媒体对象存储".to_string()))
}
async fn r2_client(settings: &MediaStorageSettings) -> Client {
let shared_config = aws_config::defaults(BehaviorVersion::latest())
.endpoint_url(settings.endpoint.clone())
.credentials_provider(Credentials::new(
settings.access_key_id.clone(),
settings.secret_access_key.clone(),
None,
None,
match settings.provider {
MediaStorageProvider::R2 => "r2",
MediaStorageProvider::Minio => "minio",
},
))
.region(aws_sdk_s3::config::Region::new(settings.region.clone()))
.load()
.await;
let conf = aws_sdk_s3::config::Builder::from(&shared_config)
.force_path_style(settings.force_path_style)
.build();
Client::from_conf(conf)
}
fn build_public_url(settings: &MediaStorageSettings, key: &str) -> String {
format!(
"{}/{}",
settings.public_base_url,
key.trim_start_matches('/')
)
}
pub fn object_key_from_public_url(settings: &MediaStorageSettings, url: &str) -> Option<String> {
let normalized_base = settings.public_base_url.trim().trim_end_matches('/');
let normalized_url = url.trim();
if normalized_base.is_empty() || normalized_url.is_empty() {
return None;
}
normalized_url
.strip_prefix(normalized_base)
.map(|suffix| suffix.trim_start_matches('/').to_string())
.filter(|suffix| !suffix.is_empty())
}
pub fn build_object_key(prefix: &str, stem: &str, extension: &str) -> String {
let safe_prefix = prefix.trim_matches('/');
let safe_stem = slugify_segment(stem);
let safe_extension = extension
.trim()
.trim_start_matches('.')
.to_ascii_lowercase();
let object_name = format!(
"{}-{}.{}",
if safe_stem.is_empty() {
"asset".to_string()
} else {
safe_stem
},
Uuid::new_v4().simple(),
if safe_extension.is_empty() {
"bin".to_string()
} else {
safe_extension
},
);
if safe_prefix.is_empty() {
object_name
} else {
format!("{safe_prefix}/{object_name}")
}
}
async fn ensure_bucket_exists(client: &Client, settings: &MediaStorageSettings) -> Result<()> {
if client
.head_bucket()
.bucket(&settings.bucket)
.send()
.await
.is_ok()
{
return Ok(());
}
if settings.provider != MediaStorageProvider::Minio {
return Err(Error::BadRequest(format!(
"对象存储 bucket 不存在或不可访问:{}",
settings.bucket
)));
}
client
.create_bucket()
.bucket(&settings.bucket)
.send()
.await
.map_err(|error| Error::BadRequest(format!("自动创建 MinIO bucket 失败: {error}")))?;
Ok(())
}
pub async fn upload_bytes_to_r2(
ctx: &AppContext,
key: &str,
bytes: Vec<u8>,
content_type: Option<&str>,
cache_control: Option<&str>,
) -> Result<StoredObject> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
ensure_bucket_exists(&client, &settings).await?;
let mut request = client
.put_object()
.bucket(&settings.bucket)
.key(key)
.body(ByteStream::from(bytes));
if let Some(content_type) = content_type
.map(str::trim)
.filter(|value| !value.is_empty())
{
request = request.content_type(content_type);
}
if let Some(cache_control) = cache_control
.map(str::trim)
.filter(|value| !value.is_empty())
{
request = request.cache_control(cache_control);
}
request
.send()
.await
.map_err(|error| Error::BadRequest(format!("上传文件到对象存储失败: {error}")))?;
Ok(StoredObject {
key: key.to_string(),
url: build_public_url(&settings, key),
})
}
pub async fn delete_object(ctx: &AppContext, key: &str) -> Result<()> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
client
.delete_object()
.bucket(&settings.bucket)
.key(key)
.send()
.await
.map_err(|error| Error::BadRequest(format!("删除对象存储文件失败: {error}")))?;
Ok(())
}
pub async fn delete_managed_url(ctx: &AppContext, url: &str) -> Result<bool> {
let Some(settings) = optional_r2_settings(ctx).await? else {
return Ok(false);
};
let Some(key) = object_key_from_public_url(&settings, url) else {
return Ok(false);
};
let client = r2_client(&settings).await;
client
.delete_object()
.bucket(&settings.bucket)
.key(&key)
.send()
.await
.map_err(|error| Error::BadRequest(format!("删除对象存储文件失败: {error}")))?;
Ok(true)
}
pub async fn list_objects(
ctx: &AppContext,
prefix: Option<&str>,
limit: i32,
) -> Result<Vec<StoredObjectSummary>> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
ensure_bucket_exists(&client, &settings).await?;
let mut request = client
.list_objects_v2()
.bucket(&settings.bucket)
.max_keys(limit.clamp(1, 1000));
if let Some(prefix) = prefix.map(str::trim).filter(|value| !value.is_empty()) {
request = request.prefix(prefix);
}
let result = request
.send()
.await
.map_err(|error| Error::BadRequest(format!("读取对象存储列表失败: {error}")))?;
Ok(result
.contents()
.iter()
.filter_map(|item| {
let key = item.key()?.to_string();
Some(StoredObjectSummary {
url: build_public_url(&settings, &key),
key,
size_bytes: item.size().unwrap_or_default(),
last_modified: item.last_modified().map(|ts| format!("{ts:?}")),
})
})
.collect())
}
pub async fn test_r2_connectivity(ctx: &AppContext) -> Result<String> {
let settings = require_r2_settings(ctx).await?;
let client = r2_client(&settings).await;
ensure_bucket_exists(&client, &settings).await?;
let healthcheck_key = format!(".healthchecks/{}.txt", Uuid::new_v4().simple());
client
.put_object()
.bucket(&settings.bucket)
.key(&healthcheck_key)
.body(ByteStream::from_static(b"termi-storage-ok"))
.content_type("text/plain")
.send()
.await
.map_err(|error| Error::BadRequest(format!("对象存储连接测试失败: {error}")))?;
client
.delete_object()
.bucket(&settings.bucket)
.key(&healthcheck_key)
.send()
.await
.map_err(|error| Error::BadRequest(format!("对象存储清理测试文件失败: {error}")))?;
Ok(settings.bucket)
}