use axum::{ extract::{Multipart, Query}, http::{header, HeaderMap}, }; use loco_rs::prelude::*; use sea_orm::{ ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, PaginatorTrait, QueryFilter, QueryOrder, QuerySelect, Set, }; use serde::{Deserialize, Serialize}; use crate::{ controllers::{ admin::{ admin_username, check_auth, clear_local_session, clear_local_session_cookie, local_login_enabled, proxy_auth_enabled, resolve_admin_identity, start_local_session, validate_admin_credentials, }, site_settings::{self, SiteSettingsPayload}, }, models::_entities::{ ai_chunks, comment_blacklist, comment_persona_analysis_logs, comments, friend_links, posts, reviews, }, services::{admin_audit, ai, analytics, comment_guard, content, media_assets, storage}, }; #[derive(Clone, Debug, Deserialize)] pub struct AdminLoginPayload { pub username: String, pub password: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminSessionResponse { pub authenticated: bool, pub username: Option, pub email: Option, pub auth_source: Option, pub auth_provider: Option, pub groups: Vec, pub proxy_auth_enabled: bool, pub local_login_enabled: bool, pub can_logout: bool, } fn build_session_response( identity: Option, ) -> AdminSessionResponse { let can_logout = matches!( identity.as_ref().map(|item| item.source.as_str()), Some("local") ); AdminSessionResponse { authenticated: identity.is_some(), username: identity.as_ref().map(|item| item.username.clone()), email: identity.as_ref().and_then(|item| item.email.clone()), auth_source: identity.as_ref().map(|item| item.source.clone()), auth_provider: identity.as_ref().and_then(|item| item.provider.clone()), groups: identity.map(|item| item.groups).unwrap_or_default(), proxy_auth_enabled: proxy_auth_enabled(), local_login_enabled: local_login_enabled(), can_logout, } } #[derive(Clone, Debug, Serialize)] pub struct DashboardStats { pub total_posts: u64, pub total_comments: u64, pub pending_comments: u64, pub draft_posts: u64, pub scheduled_posts: u64, pub offline_posts: u64, pub expired_posts: u64, pub private_posts: u64, pub unlisted_posts: u64, pub total_categories: u64, pub total_tags: u64, pub total_reviews: u64, pub total_links: u64, pub pending_links: u64, pub ai_chunks: u64, pub ai_enabled: bool, } #[derive(Clone, Debug, Serialize)] pub struct DashboardPostItem { pub id: i32, pub title: String, pub slug: String, pub category: String, pub post_type: String, pub pinned: bool, pub status: String, pub visibility: String, pub created_at: String, } #[derive(Clone, Debug, Serialize)] pub struct DashboardCommentItem { pub id: i32, pub author: String, pub post_slug: String, pub scope: String, pub excerpt: String, pub approved: bool, pub created_at: String, } #[derive(Clone, Debug, Serialize)] pub struct DashboardFriendLinkItem { pub id: i32, pub site_name: String, pub site_url: String, pub category: String, pub status: String, pub created_at: String, } #[derive(Clone, Debug, Serialize)] pub struct DashboardReviewItem { pub id: i32, pub title: String, pub review_type: String, pub rating: i32, pub status: String, pub review_date: String, } #[derive(Clone, Debug, Serialize)] pub struct DashboardSiteSummary { pub site_name: String, pub site_url: String, pub ai_enabled: bool, pub ai_chunks: u64, pub ai_last_indexed_at: Option, } #[derive(Clone, Debug, Serialize)] pub struct AdminDashboardResponse { pub stats: DashboardStats, pub site: DashboardSiteSummary, pub recent_posts: Vec, pub pending_comments: Vec, pub pending_friend_links: Vec, pub recent_reviews: Vec, } #[derive(Clone, Debug, Serialize)] pub struct AdminSiteSettingsResponse { pub id: i32, pub site_name: Option, pub site_short_name: Option, pub site_url: Option, pub site_title: Option, pub site_description: Option, pub hero_title: Option, pub hero_subtitle: Option, pub owner_name: Option, pub owner_title: Option, pub owner_bio: Option, pub owner_avatar_url: Option, pub social_github: Option, pub social_twitter: Option, pub social_email: Option, pub location: Option, pub tech_stack: Vec, pub music_playlist: Vec, pub ai_enabled: bool, pub paragraph_comments_enabled: bool, pub comment_turnstile_enabled: bool, pub subscription_turnstile_enabled: bool, pub web_push_enabled: bool, pub turnstile_site_key: Option, pub turnstile_secret_key: Option, pub web_push_vapid_public_key: Option, pub web_push_vapid_private_key: Option, pub web_push_vapid_subject: Option, pub ai_provider: Option, pub ai_api_base: Option, pub ai_api_key: Option, pub ai_chat_model: Option, pub ai_image_provider: Option, pub ai_image_api_base: Option, pub ai_image_api_key: Option, pub ai_image_model: Option, pub ai_providers: Vec, pub ai_active_provider_id: Option, pub ai_embedding_model: Option, pub ai_system_prompt: Option, pub ai_top_k: Option, pub ai_chunk_size: Option, pub ai_last_indexed_at: Option, pub ai_chunks_count: u64, pub ai_local_embedding: String, pub media_storage_provider: Option, pub media_r2_account_id: Option, pub media_r2_bucket: Option, pub media_r2_public_base_url: Option, pub media_r2_access_key_id: Option, pub media_r2_secret_access_key: Option, pub seo_default_og_image: Option, pub seo_default_twitter_handle: Option, pub notification_webhook_url: Option, pub notification_channel_type: String, pub notification_comment_enabled: bool, pub notification_friend_link_enabled: bool, pub subscription_popup_enabled: bool, pub subscription_popup_title: String, pub subscription_popup_description: String, pub subscription_popup_delay_seconds: i32, pub search_synonyms: Vec, } #[derive(Clone, Debug, Serialize)] pub struct AdminAiReindexResponse { pub indexed_chunks: usize, pub last_indexed_at: Option, } #[derive(Clone, Debug, Deserialize)] pub struct AdminAiProviderTestRequest { pub provider: site_settings::AiProviderConfig, } #[derive(Clone, Debug, Serialize)] pub struct AdminAiProviderTestResponse { pub provider: String, pub endpoint: String, pub chat_model: String, pub reply_preview: String, } #[derive(Clone, Debug, Deserialize)] pub struct AdminAiImageProviderTestRequest { pub provider: String, pub api_base: String, pub api_key: String, pub image_model: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminAiImageProviderTestResponse { pub provider: String, pub endpoint: String, pub image_model: String, pub result_preview: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminImageUploadResponse { pub url: String, pub key: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminR2ConnectivityResponse { pub bucket: String, pub public_base_url: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaObjectResponse { pub key: String, pub url: String, pub size_bytes: i64, pub last_modified: Option, pub title: Option, pub alt_text: Option, pub caption: Option, pub tags: Vec, pub notes: Option, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaListResponse { pub provider: String, pub bucket: String, pub public_base_url: String, pub items: Vec, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaDeleteResponse { pub deleted: bool, pub key: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaUploadItem { pub key: String, pub url: String, pub size_bytes: i64, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaUploadResponse { pub uploaded: Vec, } #[derive(Clone, Debug, Deserialize)] pub struct AdminMediaBatchDeleteRequest { #[serde(default)] pub keys: Vec, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaBatchDeleteResponse { pub deleted: Vec, pub failed: Vec, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaReplaceResponse { pub key: String, pub url: String, } #[derive(Clone, Debug, Deserialize)] pub struct AdminMediaMetadataPayload { pub key: String, #[serde(default)] pub title: Option, #[serde(default)] pub alt_text: Option, #[serde(default)] pub caption: Option, #[serde(default)] pub tags: Option>, #[serde(default)] pub notes: Option, } #[derive(Clone, Debug, Serialize)] pub struct AdminMediaMetadataResponse { pub saved: bool, pub key: String, pub title: Option, pub alt_text: Option, pub caption: Option, pub tags: Vec, pub notes: Option, } #[derive(Clone, Debug, Deserialize)] pub struct AdminMediaListQuery { pub prefix: Option, pub limit: Option, } #[derive(Clone, Debug, Deserialize)] pub struct AdminMediaDeleteQuery { pub key: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminCommentBlacklistItem { pub id: i32, pub matcher_type: String, pub matcher_value: String, pub reason: Option, pub active: bool, pub expires_at: Option, pub created_at: String, pub updated_at: String, pub effective: bool, } #[derive(Clone, Debug, Deserialize)] pub struct AdminCommentBlacklistCreateRequest { pub matcher_type: String, pub matcher_value: String, pub reason: Option, pub active: Option, pub expires_at: Option, } #[derive(Clone, Debug, Deserialize)] pub struct AdminCommentBlacklistUpdateRequest { pub reason: Option, pub active: Option, pub expires_at: Option, #[serde(default)] pub clear_expires_at: bool, } #[derive(Clone, Debug, Serialize)] pub struct AdminCommentBlacklistDeleteResponse { pub deleted: bool, pub id: i32, } #[derive(Clone, Debug, Deserialize)] pub struct AdminCommentAnalyzeRequest { pub matcher_type: String, pub matcher_value: String, pub from: Option, pub to: Option, pub limit: Option, } #[derive(Clone, Debug, Deserialize)] pub struct AdminCommentAnalyzeLogsQuery { pub matcher_type: Option, pub matcher_value: Option, pub limit: Option, } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct AdminCommentAnalyzeSample { pub id: i32, pub created_at: String, pub post_slug: String, pub author: String, pub email: String, pub approved: bool, pub content_preview: String, } #[derive(Clone, Debug, Serialize)] pub struct AdminCommentAnalyzeResponse { pub matcher_type: String, pub matcher_value: String, pub total_comments: u64, pub pending_comments: u64, pub first_seen_at: Option, pub latest_seen_at: Option, pub distinct_posts: usize, pub analysis: String, pub samples: Vec, } #[derive(Clone, Debug, Serialize)] pub struct AdminCommentAnalyzeLogItem { pub id: i32, pub matcher_type: String, pub matcher_value: String, pub from_at: Option, pub to_at: Option, pub total_comments: u64, pub pending_comments: u64, pub distinct_posts: usize, pub analysis: String, pub samples: Vec, pub created_at: String, } #[derive(Clone, Debug, Deserialize)] pub struct AdminPostMetadataRequest { pub markdown: String, } #[derive(Clone, Debug, Deserialize)] pub struct AdminPostPolishRequest { pub markdown: String, } #[derive(Clone, Debug, Deserialize)] pub struct AdminReviewPolishRequest { pub title: String, pub review_type: String, pub rating: i32, pub review_date: Option, pub status: String, #[serde(default)] pub tags: Vec, pub description: String, } #[derive(Clone, Debug, Deserialize)] pub struct AdminPostCoverImageRequest { pub title: String, pub description: Option, pub category: Option, #[serde(default)] pub tags: Vec, pub post_type: String, pub slug: Option, pub markdown: String, } fn format_timestamp( value: Option, pattern: &str, ) -> Option { value.map(|item| item.format(pattern).to_string()) } fn required_text(value: Option<&str>, fallback: &str) -> String { value .map(str::trim) .filter(|item| !item.is_empty()) .unwrap_or(fallback) .to_string() } fn trim_to_option(value: Option) -> Option { value.and_then(|item| { let trimmed = item.trim().to_string(); if trimmed.is_empty() { None } else { Some(trimmed) } }) } fn parse_optional_timestamp( value: Option<&str>, ) -> Result>> { let Some(value) = value.map(str::trim).filter(|value| !value.is_empty()) else { return Ok(None); }; chrono::DateTime::parse_from_rfc3339(value) .map(Some) .map_err(|_| Error::BadRequest("expires_at 必须是 RFC3339 时间格式".to_string())) } fn parse_optional_datetime_utc( value: Option<&str>, ) -> Result>> { let Some(value) = value.map(str::trim).filter(|value| !value.is_empty()) else { return Ok(None); }; if let Ok(parsed) = chrono::DateTime::parse_from_rfc3339(value) { return Ok(Some(parsed.with_timezone(&chrono::Utc))); } if let Ok(date_only) = chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d") { let Some(naive) = date_only.and_hms_opt(0, 0, 0) else { return Ok(None); }; return Ok(Some( chrono::DateTime::::from_naive_utc_and_offset(naive, chrono::Utc), )); } Err(Error::BadRequest( "from/to 必须是 RFC3339 或 YYYY-MM-DD 格式".to_string(), )) } fn truncate_chars(value: &str, max_chars: usize) -> String { let trimmed = value.trim(); if trimmed.chars().count() <= max_chars { return trimmed.to_string(); } let mut sliced = trimmed.chars().take(max_chars).collect::(); sliced.push_str("..."); sliced } async fn save_comment_persona_analysis_log( ctx: &AppContext, matcher_type: &str, matcher_value: &str, from: Option>, to: Option>, total_comments: u64, pending_comments: u64, distinct_posts: usize, analysis_text: &str, samples: &[AdminCommentAnalyzeSample], ) -> Result<()> { let sample_json = serde_json::to_value(samples).ok(); comment_persona_analysis_logs::ActiveModel { matcher_type: Set(matcher_type.to_string()), matcher_value: Set(matcher_value.to_string()), from_at: Set(from.map(|value| value.fixed_offset())), to_at: Set(to.map(|value| value.fixed_offset())), total_comments: Set(total_comments.min(i32::MAX as u64) as i32), pending_comments: Set(pending_comments.min(i32::MAX as u64) as i32), distinct_posts: Set(distinct_posts.min(i32::MAX as usize) as i32), analysis_text: Set(analysis_text.to_string()), sample_json: Set(sample_json), ..Default::default() } .insert(&ctx.db) .await?; Ok(()) } fn format_comment_analyze_log_item( item: comment_persona_analysis_logs::Model, ) -> AdminCommentAnalyzeLogItem { let samples = item .sample_json .clone() .and_then(|value| serde_json::from_value::>(value).ok()) .unwrap_or_default(); AdminCommentAnalyzeLogItem { id: item.id, matcher_type: item.matcher_type, matcher_value: item.matcher_value, from_at: format_timestamp(item.from_at, "%Y-%m-%d %H:%M:%S UTC"), to_at: format_timestamp(item.to_at, "%Y-%m-%d %H:%M:%S UTC"), total_comments: item.total_comments.max(0) as u64, pending_comments: item.pending_comments.max(0) as u64, distinct_posts: item.distinct_posts.max(0) as usize, analysis: item.analysis_text, samples, created_at: item.created_at.format("%Y-%m-%d %H:%M:%S").to_string(), } } fn format_comment_blacklist_item(item: comment_blacklist::Model) -> AdminCommentBlacklistItem { let now = chrono::Utc::now(); let active = item.active.unwrap_or(true); let not_expired = item .expires_at .map(|value| chrono::DateTime::::from(value) > now) .unwrap_or(true); AdminCommentBlacklistItem { id: item.id, matcher_type: item.matcher_type, matcher_value: item.matcher_value, reason: item.reason, active, expires_at: format_timestamp(item.expires_at, "%Y-%m-%d %H:%M:%S UTC"), created_at: item.created_at.format("%Y-%m-%d %H:%M:%S").to_string(), updated_at: item.updated_at.format("%Y-%m-%d %H:%M:%S").to_string(), effective: active && not_expired, } } fn infer_media_extension(file_name: Option<&str>, content_type: Option<&str>) -> String { let from_name = file_name .and_then(|name| name.rsplit('.').next()) .map(str::trim) .filter(|ext| !ext.is_empty()) .map(str::to_ascii_lowercase); if let Some(ext) = from_name .as_deref() .filter(|ext| ext.chars().all(|ch| ch.is_ascii_alphanumeric()) && ext.len() <= 10) { return ext.to_string(); } match content_type .unwrap_or_default() .trim() .to_ascii_lowercase() .as_str() { "image/png" => "png".to_string(), "image/jpeg" => "jpg".to_string(), "image/webp" => "webp".to_string(), "image/gif" => "gif".to_string(), "image/avif" => "avif".to_string(), "image/svg+xml" => "svg".to_string(), "application/pdf" => "pdf".to_string(), _ => "bin".to_string(), } } fn normalize_media_key(value: Option) -> Option { value.and_then(|raw| { let trimmed = raw.trim().trim_start_matches('/').to_string(); if trimmed.is_empty() { None } else { Some(trimmed) } }) } fn build_media_object_response( item: storage::StoredObjectSummary, metadata: Option<&crate::models::_entities::media_assets::Model>, ) -> AdminMediaObjectResponse { AdminMediaObjectResponse { key: item.key, url: item.url, size_bytes: item.size_bytes, last_modified: item.last_modified, title: metadata.and_then(|entry| entry.title.clone()), alt_text: metadata.and_then(|entry| entry.alt_text.clone()), caption: metadata.and_then(|entry| entry.caption.clone()), tags: metadata .map(media_assets::tag_list) .unwrap_or_default(), notes: metadata.and_then(|entry| entry.notes.clone()), } } fn tech_stack_values(value: &Option) -> Vec { value .as_ref() .and_then(serde_json::Value::as_array) .cloned() .unwrap_or_default() .into_iter() .filter_map(|item| item.as_str().map(ToString::to_string)) .collect() } fn music_playlist_values( value: &Option, ) -> Vec { value .as_ref() .and_then(serde_json::Value::as_array) .cloned() .unwrap_or_default() .into_iter() .filter_map(|item| serde_json::from_value::(item).ok()) .filter(|item| !item.title.trim().is_empty() && !item.url.trim().is_empty()) .collect() } fn build_settings_response( item: crate::models::_entities::site_settings::Model, ai_chunks_count: u64, ) -> AdminSiteSettingsResponse { let ai_providers = site_settings::ai_provider_configs(&item); let ai_active_provider_id = site_settings::active_ai_provider_id(&item); let turnstile_site_key = crate::services::turnstile::site_key(&item); let turnstile_secret_key = crate::services::turnstile::secret_key(&item); let web_push_vapid_public_key = crate::services::web_push::public_key(&item); let web_push_vapid_private_key = crate::services::web_push::private_key(&item); let web_push_vapid_subject = crate::services::web_push::vapid_subject(&item); AdminSiteSettingsResponse { id: item.id, site_name: item.site_name, site_short_name: item.site_short_name, site_url: item.site_url, site_title: item.site_title, site_description: item.site_description, hero_title: item.hero_title, hero_subtitle: item.hero_subtitle, owner_name: item.owner_name, owner_title: item.owner_title, owner_bio: item.owner_bio, owner_avatar_url: item.owner_avatar_url, social_github: item.social_github, social_twitter: item.social_twitter, social_email: item.social_email, location: item.location, tech_stack: tech_stack_values(&item.tech_stack), music_playlist: music_playlist_values(&item.music_playlist), ai_enabled: item.ai_enabled.unwrap_or(false), paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true), comment_turnstile_enabled: item.comment_turnstile_enabled.unwrap_or(false), subscription_turnstile_enabled: item.subscription_turnstile_enabled.unwrap_or(false), web_push_enabled: item.web_push_enabled.unwrap_or(false), turnstile_site_key, turnstile_secret_key, web_push_vapid_public_key, web_push_vapid_private_key, web_push_vapid_subject, ai_provider: item.ai_provider, ai_api_base: item.ai_api_base, ai_api_key: item.ai_api_key, ai_chat_model: item.ai_chat_model, ai_image_provider: item.ai_image_provider, ai_image_api_base: item.ai_image_api_base, ai_image_api_key: item.ai_image_api_key, ai_image_model: item.ai_image_model, ai_providers, ai_active_provider_id, ai_embedding_model: item.ai_embedding_model, ai_system_prompt: item.ai_system_prompt, ai_top_k: item.ai_top_k, ai_chunk_size: item.ai_chunk_size, ai_last_indexed_at: format_timestamp(item.ai_last_indexed_at, "%Y-%m-%d %H:%M:%S UTC"), ai_chunks_count, ai_local_embedding: ai::local_embedding_label().to_string(), media_storage_provider: item.media_storage_provider, media_r2_account_id: item.media_r2_account_id, media_r2_bucket: item.media_r2_bucket, media_r2_public_base_url: item.media_r2_public_base_url, media_r2_access_key_id: item.media_r2_access_key_id, media_r2_secret_access_key: item.media_r2_secret_access_key, seo_default_og_image: item.seo_default_og_image, seo_default_twitter_handle: item.seo_default_twitter_handle, notification_webhook_url: item.notification_webhook_url, notification_channel_type: item .notification_channel_type .unwrap_or_else(|| "webhook".to_string()), notification_comment_enabled: item.notification_comment_enabled.unwrap_or(false), notification_friend_link_enabled: item.notification_friend_link_enabled.unwrap_or(false), subscription_popup_enabled: item .subscription_popup_enabled .unwrap_or_else(site_settings::default_subscription_popup_enabled), subscription_popup_title: item .subscription_popup_title .unwrap_or_else(site_settings::default_subscription_popup_title), subscription_popup_description: item .subscription_popup_description .unwrap_or_else(site_settings::default_subscription_popup_description), subscription_popup_delay_seconds: item .subscription_popup_delay_seconds .unwrap_or_else(site_settings::default_subscription_popup_delay_seconds), search_synonyms: tech_stack_values(&item.search_synonyms), } } #[debug_handler] pub async fn session_status(headers: HeaderMap) -> Result { format::json(build_session_response(resolve_admin_identity(&headers))) } #[debug_handler] pub async fn session_login( State(ctx): State, Json(payload): Json, ) -> Result { if !local_login_enabled() { return unauthorized("Local admin login is disabled"); } if !validate_admin_credentials(payload.username.trim(), payload.password.trim()) { return unauthorized("Invalid credentials"); } let (identity, _token, cookie) = start_local_session(&admin_username()); admin_audit::log_event( &ctx, Some(&identity), "admin.login", "admin_session", None, Some(identity.username.clone()), None, ) .await?; let mut response = format::json(build_session_response(Some(identity.clone())))?; response.headers_mut().append( header::SET_COOKIE, cookie .parse() .map_err(|error| Error::BadRequest(format!("invalid session cookie: {error}")))?, ); Ok(response) } #[debug_handler] pub async fn session_logout(headers: HeaderMap, State(ctx): State) -> Result { let before = resolve_admin_identity(&headers); if matches!( before.as_ref().map(|item| item.source.as_str()), Some("local") ) { clear_local_session(&headers); } if let Some(identity) = before.as_ref() { admin_audit::log_event( &ctx, Some(identity), "admin.logout", "admin_session", None, identity .email .clone() .or_else(|| Some(identity.username.clone())), None, ) .await?; } let after = resolve_admin_identity(&headers).filter(|item| item.source != "local"); let mut response = format::json(build_session_response(after))?; response.headers_mut().append( header::SET_COOKIE, clear_local_session_cookie() .parse() .map_err(|error| Error::BadRequest(format!("invalid logout cookie: {error}")))?, ); Ok(response) } #[debug_handler] pub async fn dashboard(headers: HeaderMap, State(ctx): State) -> Result { check_auth(&headers)?; content::sync_markdown_posts(&ctx).await?; let all_posts = posts::Entity::find().all(&ctx.db).await?; let total_posts = all_posts.len() as u64; let total_comments = comments::Entity::find().count(&ctx.db).await?; let pending_comments = comments::Entity::find() .filter(comments::Column::Approved.eq(false)) .count(&ctx.db) .await?; let total_categories = crate::models::_entities::categories::Entity::find() .count(&ctx.db) .await?; let total_tags = crate::models::_entities::tags::Entity::find() .count(&ctx.db) .await?; let total_reviews = reviews::Entity::find().count(&ctx.db).await?; let total_links = friend_links::Entity::find().count(&ctx.db).await?; let pending_links = friend_links::Entity::find() .filter(friend_links::Column::Status.eq("pending")) .count(&ctx.db) .await?; let ai_chunks_count = ai_chunks::Entity::find().count(&ctx.db).await?; let site_settings = site_settings::load_current(&ctx).await?; let now = chrono::Utc::now().fixed_offset(); let mut draft_posts = 0_u64; let mut scheduled_posts = 0_u64; let mut offline_posts = 0_u64; let mut expired_posts = 0_u64; let mut private_posts = 0_u64; let mut unlisted_posts = 0_u64; for post in &all_posts { let effective_state = content::effective_post_state( post.status .as_deref() .unwrap_or(content::POST_STATUS_PUBLISHED), post.publish_at, post.unpublish_at, now, ); let visibility = content::normalize_post_visibility(post.visibility.as_deref()); match effective_state.as_str() { content::POST_STATUS_DRAFT => draft_posts += 1, content::POST_STATUS_OFFLINE => offline_posts += 1, "scheduled" => scheduled_posts += 1, "expired" => expired_posts += 1, _ => {} } match visibility.as_str() { content::POST_VISIBILITY_PRIVATE => private_posts += 1, content::POST_VISIBILITY_UNLISTED => unlisted_posts += 1, _ => {} } } let mut recent_posts = all_posts.clone().into_iter().collect::>(); recent_posts.sort_by(|left, right| right.created_at.cmp(&left.created_at)); let recent_posts = recent_posts .into_iter() .take(6) .map(|post| DashboardPostItem { id: post.id, title: required_text(post.title.as_deref(), "Untitled post"), slug: post.slug, category: required_text(post.category.as_deref(), "Uncategorized"), post_type: required_text(post.post_type.as_deref(), "article"), pinned: post.pinned.unwrap_or(false), status: content::effective_post_state( post.status .as_deref() .unwrap_or(content::POST_STATUS_PUBLISHED), post.publish_at, post.unpublish_at, now, ), visibility: content::normalize_post_visibility(post.visibility.as_deref()), created_at: post.created_at.format("%Y-%m-%d %H:%M").to_string(), }) .collect::>(); let pending_comment_rows = comments::Entity::find() .filter(comments::Column::Approved.eq(false)) .order_by_desc(comments::Column::CreatedAt) .limit(8) .all(&ctx.db) .await? .into_iter() .map(|comment| DashboardCommentItem { id: comment.id, author: required_text(comment.author.as_deref(), "Anonymous"), post_slug: required_text(comment.post_slug.as_deref(), "unknown-post"), scope: required_text(Some(comment.scope.as_str()), "global"), excerpt: required_text(comment.content.as_deref(), ""), approved: comment.approved.unwrap_or(false), created_at: comment.created_at.format("%Y-%m-%d %H:%M").to_string(), }) .collect::>(); let pending_friend_links = friend_links::Entity::find() .filter(friend_links::Column::Status.eq("pending")) .order_by_desc(friend_links::Column::CreatedAt) .limit(6) .all(&ctx.db) .await? .into_iter() .map(|link| DashboardFriendLinkItem { id: link.id, site_name: required_text(link.site_name.as_deref(), "Unnamed site"), site_url: link.site_url, category: required_text(link.category.as_deref(), "Other"), status: required_text(link.status.as_deref(), "pending"), created_at: link.created_at.format("%Y-%m-%d %H:%M").to_string(), }) .collect::>(); let recent_reviews = reviews::Entity::find() .order_by_desc(reviews::Column::CreatedAt) .limit(6) .all(&ctx.db) .await? .into_iter() .map(|review| DashboardReviewItem { id: review.id, title: required_text(review.title.as_deref(), "Untitled review"), review_type: required_text(review.review_type.as_deref(), "game"), rating: review.rating.unwrap_or(0), status: required_text(review.status.as_deref(), "completed"), review_date: required_text(review.review_date.as_deref(), ""), }) .collect::>(); format::json(AdminDashboardResponse { stats: DashboardStats { total_posts, total_comments, pending_comments, draft_posts, scheduled_posts, offline_posts, expired_posts, private_posts, unlisted_posts, total_categories, total_tags, total_reviews, total_links, pending_links, ai_chunks: ai_chunks_count, ai_enabled: site_settings.ai_enabled.unwrap_or(false), }, site: DashboardSiteSummary { site_name: required_text(site_settings.site_name.as_deref(), "Unnamed site"), site_url: required_text(site_settings.site_url.as_deref(), ""), ai_enabled: site_settings.ai_enabled.unwrap_or(false), ai_chunks: ai_chunks_count, ai_last_indexed_at: format_timestamp( site_settings.ai_last_indexed_at, "%Y-%m-%d %H:%M:%S UTC", ), }, recent_posts, pending_comments: pending_comment_rows, pending_friend_links, recent_reviews, }) } #[debug_handler] pub async fn analytics_overview( headers: HeaderMap, State(ctx): State, ) -> Result { check_auth(&headers)?; format::json(analytics::build_admin_analytics(&ctx).await?) } #[debug_handler] pub async fn get_site_settings( headers: HeaderMap, State(ctx): State, ) -> Result { check_auth(&headers)?; let current = site_settings::load_current(&ctx).await?; let ai_chunks_count = ai_chunks::Entity::find().count(&ctx.db).await?; format::json(build_settings_response(current, ai_chunks_count)) } #[debug_handler] pub async fn update_site_settings( headers: HeaderMap, State(ctx): State, Json(params): Json, ) -> Result { let actor = check_auth(&headers)?; let current = site_settings::load_current(&ctx).await?; let mut item = current; params.apply(&mut item); let item = item.into_active_model().reset_all(); let updated = item.update(&ctx.db).await?; let ai_chunks_count = ai_chunks::Entity::find().count(&ctx.db).await?; admin_audit::log_event( &ctx, Some(&actor), "site_settings.update", "site_settings", Some(updated.id.to_string()), updated.site_name.clone(), None, ) .await?; format::json(build_settings_response(updated, ai_chunks_count)) } #[debug_handler] pub async fn reindex_ai(headers: HeaderMap, State(ctx): State) -> Result { check_auth(&headers)?; let summary = ai::rebuild_index(&ctx).await?; format::json(AdminAiReindexResponse { indexed_chunks: summary.indexed_chunks, last_indexed_at: format_timestamp( summary.last_indexed_at.map(Into::into), "%Y-%m-%d %H:%M:%S UTC", ), }) } #[debug_handler] pub async fn test_ai_provider( headers: HeaderMap, Json(payload): Json, ) -> Result { check_auth(&headers)?; let result = ai::test_provider_connectivity( &payload.provider.provider, payload.provider.api_base.as_deref().unwrap_or_default(), payload.provider.api_key.as_deref().unwrap_or_default(), payload.provider.chat_model.as_deref().unwrap_or_default(), ) .await?; format::json(AdminAiProviderTestResponse { provider: result.provider, endpoint: result.endpoint, chat_model: result.chat_model, reply_preview: result.reply_preview, }) } #[debug_handler] pub async fn test_ai_image_provider( headers: HeaderMap, Json(payload): Json, ) -> Result { check_auth(&headers)?; let result = ai::test_image_provider_connectivity( &payload.provider, &payload.api_base, &payload.api_key, &payload.image_model, ) .await?; format::json(AdminAiImageProviderTestResponse { provider: result.provider, endpoint: result.endpoint, image_model: result.image_model, result_preview: result.result_preview, }) } #[debug_handler] pub async fn test_r2_storage( headers: HeaderMap, State(ctx): State, ) -> Result { check_auth(&headers)?; let settings = storage::require_r2_settings(&ctx).await?; let bucket = storage::test_r2_connectivity(&ctx).await?; format::json(AdminR2ConnectivityResponse { bucket, public_base_url: settings.public_base_url, }) } #[debug_handler] pub async fn list_media_objects( headers: HeaderMap, State(ctx): State, Query(query): Query, ) -> Result { check_auth(&headers)?; let settings = storage::require_r2_settings(&ctx).await?; let objects = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200)) .await?; let keys = objects .iter() .map(|item| item.key.clone()) .collect::>(); let metadata_map = media_assets::list_by_keys(&ctx, &keys).await?; let items = objects .into_iter() .map(|item| { let metadata = metadata_map.get(&item.key); build_media_object_response(item, metadata) }) .collect::>(); format::json(AdminMediaListResponse { provider: settings.provider_name, bucket: settings.bucket, public_base_url: settings.public_base_url, items, }) } #[debug_handler] pub async fn delete_media_object( headers: HeaderMap, State(ctx): State, Query(query): Query, ) -> Result { check_auth(&headers)?; let key = query.key.trim(); if key.is_empty() { return Err(Error::BadRequest("缺少对象 key".to_string())); } storage::delete_object(&ctx, key).await?; if let Err(error) = media_assets::delete_by_key(&ctx, key).await { tracing::warn!(?error, key, "failed to delete media metadata after object deletion"); } format::json(AdminMediaDeleteResponse { deleted: true, key: key.to_string(), }) } #[debug_handler] pub async fn upload_media_objects( headers: HeaderMap, State(ctx): State, mut multipart: Multipart, ) -> Result { check_auth(&headers)?; let mut prefix = "uploads".to_string(); let mut uploaded = Vec::::new(); while let Some(field) = multipart .next_field() .await .map_err(|error| Error::BadRequest(error.to_string()))? { let name = field.name().unwrap_or_default().to_string(); if name == "prefix" { if let Ok(value) = field.text().await { if let Some(next_prefix) = trim_to_option(Some(value)) { prefix = next_prefix.trim_matches('/').to_string(); } } continue; } let file_name = field.file_name().map(ToString::to_string); let content_type = field.content_type().map(ToString::to_string); let bytes = field .bytes() .await .map_err(|error| Error::BadRequest(error.to_string()))?; if bytes.is_empty() { continue; } let extension = infer_media_extension(file_name.as_deref(), content_type.as_deref()); let key = storage::build_object_key(&prefix, file_name.as_deref().unwrap_or("asset"), &extension); let stored = storage::upload_bytes_to_r2( &ctx, &key, bytes.to_vec(), content_type.as_deref(), Some("public, max-age=31536000, immutable"), ) .await?; uploaded.push(AdminMediaUploadItem { key: stored.key, url: stored.url, size_bytes: bytes.len() as i64, }); } if uploaded.is_empty() { return Err(Error::BadRequest("请至少选择一个文件上传".to_string())); } format::json(AdminMediaUploadResponse { uploaded }) } #[debug_handler] pub async fn batch_delete_media_objects( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; let keys = payload .keys .into_iter() .filter_map(|key| normalize_media_key(Some(key))) .collect::>(); if keys.is_empty() { return Err(Error::BadRequest("请至少传入一个对象 key".to_string())); } let mut deleted = Vec::new(); let mut failed = Vec::new(); for key in keys { match storage::delete_object(&ctx, &key).await { Ok(()) => { if let Err(error) = media_assets::delete_by_key(&ctx, &key).await { tracing::warn!(?error, key, "failed to delete media metadata after batch removal"); } deleted.push(key) } Err(_) => failed.push(key), } } format::json(AdminMediaBatchDeleteResponse { deleted, failed }) } #[debug_handler] pub async fn update_media_object_metadata( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; let key = payload.key.trim(); if key.is_empty() { return Err(Error::BadRequest("缺少对象 key".to_string())); } let metadata = media_assets::upsert_by_key( &ctx, key, media_assets::MediaAssetMetadataInput { title: payload.title, alt_text: payload.alt_text, caption: payload.caption, tags: payload.tags, notes: payload.notes, }, ) .await?; format::json(AdminMediaMetadataResponse { saved: true, key: metadata.object_key.clone(), title: metadata.title.clone(), alt_text: metadata.alt_text.clone(), caption: metadata.caption.clone(), tags: media_assets::tag_list(&metadata), notes: metadata.notes.clone(), }) } #[debug_handler] pub async fn replace_media_object( headers: HeaderMap, State(ctx): State, mut multipart: Multipart, ) -> Result { check_auth(&headers)?; let mut key: Option = None; let mut bytes: Option> = None; let mut content_type: Option = None; while let Some(field) = multipart .next_field() .await .map_err(|error| Error::BadRequest(error.to_string()))? { let name = field.name().unwrap_or_default().to_string(); if name == "key" { let text = field .text() .await .map_err(|error| Error::BadRequest(error.to_string()))?; key = normalize_media_key(Some(text)); continue; } if bytes.is_none() { content_type = field.content_type().map(ToString::to_string); bytes = Some( field .bytes() .await .map_err(|error| Error::BadRequest(error.to_string()))? .to_vec(), ); } } let key = key.ok_or_else(|| Error::BadRequest("缺少待替换对象 key".to_string()))?; let bytes = bytes.ok_or_else(|| Error::BadRequest("请先选择替换文件".to_string()))?; if bytes.is_empty() { return Err(Error::BadRequest("替换文件内容为空".to_string())); } let stored = storage::upload_bytes_to_r2( &ctx, &key, bytes, content_type.as_deref(), Some("public, max-age=31536000, immutable"), ) .await?; format::json(AdminMediaReplaceResponse { key: stored.key, url: stored.url, }) } #[debug_handler] pub async fn list_comment_blacklist( headers: HeaderMap, State(ctx): State, ) -> Result { check_auth(&headers)?; let items = comment_blacklist::Entity::find() .order_by_desc(comment_blacklist::Column::CreatedAt) .all(&ctx.db) .await? .into_iter() .map(format_comment_blacklist_item) .collect::>(); format::json(items) } #[debug_handler] pub async fn create_comment_blacklist( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; let matcher_type = comment_guard::normalize_matcher_type(&payload.matcher_type).ok_or_else(|| { Error::BadRequest("matcher_type 仅支持 ip / email / user_agent".to_string()) })?; let matcher_value = comment_guard::normalize_matcher_value(matcher_type, &payload.matcher_value) .ok_or_else(|| Error::BadRequest("matcher_value 不能为空".to_string()))?; let expires_at = parse_optional_timestamp(payload.expires_at.as_deref())?; let item = comment_blacklist::ActiveModel { matcher_type: Set(matcher_type.to_string()), matcher_value: Set(matcher_value), reason: Set(trim_to_option(payload.reason)), active: Set(Some(payload.active.unwrap_or(true))), expires_at: Set(expires_at), ..Default::default() } .insert(&ctx.db) .await?; format::json(format_comment_blacklist_item(item)) } #[debug_handler] pub async fn update_comment_blacklist( headers: HeaderMap, Path(id): Path, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; let item = comment_blacklist::Entity::find_by_id(id) .one(&ctx.db) .await? .ok_or(Error::NotFound)?; let mut item = item.into_active_model(); if let Some(reason) = payload.reason { item.reason = Set(trim_to_option(Some(reason))); } if let Some(active) = payload.active { item.active = Set(Some(active)); } if payload.clear_expires_at { item.expires_at = Set(None); } else if payload.expires_at.is_some() { item.expires_at = Set(parse_optional_timestamp(payload.expires_at.as_deref())?); } let updated = item.update(&ctx.db).await?; format::json(format_comment_blacklist_item(updated)) } #[debug_handler] pub async fn delete_comment_blacklist( headers: HeaderMap, Path(id): Path, State(ctx): State, ) -> Result { check_auth(&headers)?; if let Some(item) = comment_blacklist::Entity::find_by_id(id) .one(&ctx.db) .await? { item.delete(&ctx.db).await?; } format::json(AdminCommentBlacklistDeleteResponse { deleted: true, id }) } #[debug_handler] pub async fn list_comment_persona_analysis_logs( headers: HeaderMap, State(ctx): State, Query(query): Query, ) -> Result { check_auth(&headers)?; let matcher_type = query .matcher_type .as_deref() .map(str::trim) .filter(|value| !value.is_empty()) .map(|value| { comment_guard::normalize_matcher_type(value).ok_or_else(|| { Error::BadRequest("matcher_type 仅支持 ip / email / user_agent".to_string()) }) }) .transpose()?; let matcher_value = query .matcher_value .as_deref() .map(str::trim) .filter(|value| !value.is_empty()) .map(|value| { if let Some(matcher_type) = matcher_type { comment_guard::normalize_matcher_value(matcher_type, value) } else { Some(value.to_string()) } }) .flatten(); if query.matcher_value.is_some() && matcher_value.is_none() { return Err(Error::BadRequest("matcher_value 不能为空".to_string())); } let mut query_builder = comment_persona_analysis_logs::Entity::find(); if let Some(matcher_type) = matcher_type { query_builder = query_builder .filter(comment_persona_analysis_logs::Column::MatcherType.eq(matcher_type)); } if let Some(matcher_value) = matcher_value { query_builder = query_builder .filter(comment_persona_analysis_logs::Column::MatcherValue.eq(matcher_value)); } let limit = query.limit.unwrap_or(20).clamp(1, 100); let items = query_builder .order_by_desc(comment_persona_analysis_logs::Column::CreatedAt) .limit(limit) .all(&ctx.db) .await? .into_iter() .map(format_comment_analyze_log_item) .collect::>(); format::json(items) } #[debug_handler] pub async fn analyze_comment_persona( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; let matcher_type = comment_guard::normalize_matcher_type(&payload.matcher_type).ok_or_else(|| { Error::BadRequest("matcher_type 仅支持 ip / email / user_agent".to_string()) })?; let matcher_value = comment_guard::normalize_matcher_value(matcher_type, &payload.matcher_value) .ok_or_else(|| Error::BadRequest("matcher_value 不能为空".to_string()))?; let from = parse_optional_datetime_utc(payload.from.as_deref())?; let to = parse_optional_datetime_utc(payload.to.as_deref())?; let limit = payload.limit.unwrap_or(20).clamp(5, 80); let build_query = || { let mut query = comments::Entity::find(); query = match matcher_type { comment_guard::MATCHER_TYPE_IP => { query.filter(comments::Column::IpAddress.eq(&matcher_value)) } comment_guard::MATCHER_TYPE_EMAIL => { query.filter(comments::Column::Email.eq(&matcher_value)) } comment_guard::MATCHER_TYPE_USER_AGENT => { query.filter(comments::Column::UserAgent.eq(&matcher_value)) } _ => query, }; if let Some(from) = from { query = query.filter(comments::Column::CreatedAt.gte(from)); } if let Some(to) = to { query = query.filter(comments::Column::CreatedAt.lte(to)); } query }; let total_comments = build_query().count(&ctx.db).await?; if total_comments == 0 { let analysis = "当前条件下没有匹配评论,无法生成画像。".to_string(); save_comment_persona_analysis_log( &ctx, matcher_type, &matcher_value, from, to, 0, 0, 0, &analysis, &[], ) .await?; return format::json(AdminCommentAnalyzeResponse { matcher_type: matcher_type.to_string(), matcher_value, total_comments: 0, pending_comments: 0, first_seen_at: None, latest_seen_at: None, distinct_posts: 0, analysis, samples: Vec::new(), }); } let pending_comments = build_query() .filter(comments::Column::Approved.eq(false)) .count(&ctx.db) .await?; let first_item = build_query() .order_by_asc(comments::Column::CreatedAt) .one(&ctx.db) .await?; let latest_item = build_query() .order_by_desc(comments::Column::CreatedAt) .one(&ctx.db) .await?; let distinct_posts = build_query() .select_only() .column(comments::Column::PostSlug) .distinct() .into_tuple::>() .all(&ctx.db) .await? .into_iter() .filter_map(|item| item.map(|value| value.trim().to_string())) .filter(|value| !value.is_empty()) .collect::>() .len(); let sample_rows = build_query() .order_by_desc(comments::Column::CreatedAt) .limit(limit) .all(&ctx.db) .await?; let samples = sample_rows .iter() .map(|item| AdminCommentAnalyzeSample { id: item.id, created_at: item.created_at.format("%Y-%m-%d %H:%M:%S").to_string(), post_slug: required_text(item.post_slug.as_deref(), "unknown-post"), author: required_text(item.author.as_deref(), "匿名"), email: required_text(item.email.as_deref(), ""), approved: item.approved.unwrap_or(false), content_preview: truncate_chars(item.content.as_deref().unwrap_or_default(), 220), }) .collect::>(); let sample_text = samples .iter() .map(|item| { format!( "- [{}] {} | post={} | author={} | status={} | content={}", item.id, item.created_at, item.post_slug, item.author, if item.approved { "approved" } else { "pending" }, item.content_preview ) }) .collect::>() .join("\n"); let analysis = ai::admin_chat_completion( &ctx, "你是博客评论风控分析助手。请输出中文,先结论后细节,不要编造。", &format!( "请基于以下评论画像数据,输出:\n\ 1) 风险等级(低/中/高)和理由;\n\ 2) 行为特征总结;\n\ 3) 建议动作(通过/观察/限速/临时封禁/永久封禁)及理由;\n\ 4) 误伤风险提示。\n\n\ 画像维度: type={matcher_type}, value={matcher_value}\n\ 评论总数: {total_comments}\n\ 待审核数: {pending_comments}\n\ 涉及文章数: {distinct_posts}\n\ 时间范围: from={} to={}\n\ 样本:\n{}", payload.from.as_deref().unwrap_or("-"), payload.to.as_deref().unwrap_or("-"), sample_text ), ) .await?; save_comment_persona_analysis_log( &ctx, matcher_type, &matcher_value, from, to, total_comments, pending_comments, distinct_posts, &analysis, &samples, ) .await?; format::json(AdminCommentAnalyzeResponse { matcher_type: matcher_type.to_string(), matcher_value, total_comments, pending_comments, first_seen_at: first_item .map(|item| item.created_at.format("%Y-%m-%d %H:%M:%S").to_string()), latest_seen_at: latest_item .map(|item| item.created_at.format("%Y-%m-%d %H:%M:%S").to_string()), distinct_posts, analysis, samples, }) } #[debug_handler] pub async fn generate_post_metadata( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; format::json(ai::generate_post_metadata(&ctx, &payload.markdown).await?) } #[debug_handler] pub async fn polish_post_markdown( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; format::json(ai::polish_post_markdown(&ctx, &payload.markdown).await?) } #[debug_handler] pub async fn polish_review_description( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; format::json( ai::polish_review_description( &ctx, &payload.title, &payload.review_type, payload.rating, payload.review_date.as_deref(), &payload.status, &payload.tags, &payload.description, ) .await?, ) } #[debug_handler] pub async fn generate_post_cover_image( headers: HeaderMap, State(ctx): State, Json(payload): Json, ) -> Result { check_auth(&headers)?; format::json( ai::generate_post_cover_image( &ctx, &payload.title, payload.description.as_deref(), payload.category.as_deref(), &payload.tags, &payload.post_type, payload.slug.as_deref(), &payload.markdown, ) .await?, ) } fn review_cover_extension( file_name: Option<&str>, content_type: Option<&str>, ) -> Option<&'static str> { let from_file_name = file_name .and_then(|name| name.rsplit('.').next()) .map(|ext| ext.trim().to_ascii_lowercase()); match from_file_name.as_deref() { Some("png") => return Some("png"), Some("jpg") | Some("jpeg") => return Some("jpg"), Some("webp") => return Some("webp"), Some("gif") => return Some("gif"), Some("avif") => return Some("avif"), Some("svg") => return Some("svg"), _ => {} } match content_type .unwrap_or_default() .trim() .to_ascii_lowercase() .as_str() { "image/png" => Some("png"), "image/jpeg" => Some("jpg"), "image/webp" => Some("webp"), "image/gif" => Some("gif"), "image/avif" => Some("avif"), "image/svg+xml" => Some("svg"), _ => None, } } #[debug_handler] pub async fn upload_review_cover_image( headers: HeaderMap, State(ctx): State, mut multipart: Multipart, ) -> Result { check_auth(&headers)?; let field = multipart .next_field() .await .map_err(|error| Error::BadRequest(error.to_string()))? .ok_or_else(|| Error::BadRequest("请先选择图片文件".to_string()))?; let file_name = field.file_name().map(ToString::to_string); let content_type = field.content_type().map(ToString::to_string); let extension = review_cover_extension(file_name.as_deref(), content_type.as_deref()) .ok_or_else(|| Error::BadRequest("仅支持常见图片格式上传".to_string()))?; let bytes = field .bytes() .await .map_err(|error| Error::BadRequest(error.to_string()))?; if bytes.is_empty() { return Err(Error::BadRequest("上传的图片内容为空".to_string())); } let key = crate::services::storage::build_object_key( "review-covers", file_name.as_deref().unwrap_or("review-cover"), extension, ); let stored = crate::services::storage::upload_bytes_to_r2( &ctx, &key, bytes.to_vec(), content_type.as_deref(), Some("public, max-age=31536000, immutable"), ) .await?; format::json(AdminImageUploadResponse { url: stored.url, key: stored.key, }) } pub fn routes() -> Routes { Routes::new() .prefix("/api/admin") .add("/session", get(session_status)) .add("/session", delete(session_logout)) .add("/session/login", post(session_login)) .add("/dashboard", get(dashboard)) .add("/analytics", get(analytics_overview)) .add("/site-settings", get(get_site_settings)) .add("/site-settings", patch(update_site_settings)) .add("/site-settings", put(update_site_settings)) .add("/ai/reindex", post(reindex_ai)) .add("/ai/test-provider", post(test_ai_provider)) .add("/ai/test-image-provider", post(test_ai_image_provider)) .add("/storage/r2/test", post(test_r2_storage)) .add( "/storage/media", get(list_media_objects) .post(upload_media_objects) .delete(delete_media_object), ) .add( "/storage/media/batch-delete", post(batch_delete_media_objects), ) .add("/storage/media/metadata", patch(update_media_object_metadata)) .add("/storage/media/replace", post(replace_media_object)) .add( "/comments/blacklist", get(list_comment_blacklist).post(create_comment_blacklist), ) .add( "/comments/blacklist/{id}", patch(update_comment_blacklist).delete(delete_comment_blacklist), ) .add( "/comments/analyze/logs", get(list_comment_persona_analysis_logs), ) .add("/comments/analyze", post(analyze_comment_persona)) .add("/ai/post-metadata", post(generate_post_metadata)) .add("/ai/polish-post", post(polish_post_markdown)) .add("/ai/polish-review", post(polish_review_description)) .add("/ai/post-cover", post(generate_post_cover_image)) .add("/storage/review-cover", post(upload_review_cover_image)) }