feat: update tag and timeline share panel copy for clarity and conciseness
Some checks failed
docker-images / resolve-build-targets (push) Successful in 7s
ui-regression / playwright-regression (push) Failing after 13m4s
docker-images / build-and-push (admin) (push) Successful in 1m17s
docker-images / build-and-push (backend) (push) Successful in 28m13s
docker-images / build-and-push (frontend) (push) Successful in 47s
docker-images / submit-indexnow (push) Successful in 13s

style: enhance global CSS for better responsiveness of terminal chips and navigation pills

test: remove inline subscription test and add maintenance mode access code test

feat: implement media library picker dialog for selecting images from the media library

feat: add media URL controls for uploading and managing media assets

feat: add migration for music_enabled and maintenance_mode settings in site settings

feat: implement maintenance mode functionality with access control

feat: create maintenance page with access code input and error handling

chore: add TypeScript declaration for QR code module
This commit is contained in:
2026-04-02 23:05:49 +08:00
parent 6a50dd478c
commit 9665c933b5
94 changed files with 5266 additions and 1612 deletions

View File

@@ -1,18 +1,18 @@
use async_trait::async_trait;
use axum::{
http::{header, HeaderName, Method},
Router as AxumRouter,
http::{HeaderName, Method, header},
};
use loco_rs::{
Result,
app::{AppContext, Hooks, Initializer},
bgworker::{BackgroundWorker, Queue},
boot::{create_app, BootResult, StartMode},
boot::{BootResult, StartMode, create_app},
config::Config,
controller::AppRoutes,
db::{self, truncate_table},
environment::Environment,
task::Tasks,
Result,
};
use migration::Migrator;
use sea_orm::{
@@ -99,7 +99,9 @@ impl Hooks for App {
}
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
Ok(vec![Box::new(initializers::content_sync::ContentSyncInitializer)])
Ok(vec![Box::new(
initializers::content_sync::ContentSyncInitializer,
)])
}
fn routes(_ctx: &AppContext) -> AppRoutes {
@@ -152,7 +154,9 @@ impl Hooks for App {
}
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
queue.register(DownloadWorker::build(ctx)).await?;
queue.register(NotificationDeliveryWorker::build(ctx)).await?;
queue
.register(NotificationDeliveryWorker::build(ctx))
.await?;
Ok(())
}
@@ -334,8 +338,7 @@ impl Hooks for App {
let comment_verification_mode = settings["comment_verification_mode"]
.as_str()
.map(ToString::to_string);
let subscription_verification_mode = settings
["subscription_verification_mode"]
let subscription_verification_mode = settings["subscription_verification_mode"]
.as_str()
.map(ToString::to_string);
let comment_turnstile_enabled = settings["comment_turnstile_enabled"]
@@ -343,8 +346,7 @@ impl Hooks for App {
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = settings
["subscription_turnstile_enabled"]
let subscription_turnstile_enabled = settings["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
@@ -381,6 +383,20 @@ impl Hooks for App {
})
.filter(|items| !items.is_empty())
.map(serde_json::Value::Array);
let music_enabled = settings["music_enabled"].as_bool().or(Some(true));
let maintenance_mode_enabled = settings["maintenance_mode_enabled"]
.as_bool()
.or(Some(false));
let maintenance_access_code = settings["maintenance_access_code"]
.as_str()
.and_then(|value| {
let trimmed = value.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
});
let item = site_settings::ActiveModel {
id: Set(settings["id"].as_i64().unwrap_or(1) as i32),
@@ -422,6 +438,9 @@ impl Hooks for App {
location: Set(settings["location"].as_str().map(ToString::to_string)),
tech_stack: Set(tech_stack),
music_playlist: Set(music_playlist),
music_enabled: Set(music_enabled),
maintenance_mode_enabled: Set(maintenance_mode_enabled),
maintenance_access_code: Set(maintenance_access_code),
ai_enabled: Set(settings["ai_enabled"].as_bool()),
paragraph_comments_enabled: Set(settings["paragraph_comments_enabled"]
.as_bool()

View File

@@ -1,4 +1,4 @@
use axum::http::{header, HeaderMap};
use axum::http::{HeaderMap, header};
use loco_rs::prelude::*;
use serde::Serialize;
use std::{
@@ -75,7 +75,8 @@ fn header_value(headers: &HeaderMap, key: &'static str) -> Option<String> {
}
fn split_groups(value: Option<String>) -> Vec<String> {
value.unwrap_or_default()
value
.unwrap_or_default()
.split([',', ';', ' '])
.map(str::trim)
.filter(|item| !item.is_empty())
@@ -192,8 +193,7 @@ pub(crate) fn resolve_admin_identity(headers: &HeaderMap) -> Option<AdminIdentit
}
pub(crate) fn check_auth(headers: &HeaderMap) -> Result<AdminIdentity> {
resolve_admin_identity(headers)
.ok_or_else(|| Error::Unauthorized("Not logged in".to_string()))
resolve_admin_identity(headers).ok_or_else(|| Error::Unauthorized("Not logged in".to_string()))
}
pub(crate) fn start_local_session(username: &str) -> (AdminIdentity, String, String) {

View File

@@ -1,8 +1,12 @@
use std::collections::{HashMap, HashSet};
use axum::{
extract::{Multipart, Query},
http::{HeaderMap, header},
};
use loco_rs::prelude::*;
use regex::Regex;
use reqwest::Url;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, PaginatorTrait, QueryFilter,
QueryOrder, QuerySelect, Set,
@@ -25,7 +29,7 @@ use crate::{
services::{
admin_audit, ai, analytics, comment_guard, content, media_assets, storage, worker_jobs,
},
workers::downloader::DownloadWorkerArgs,
workers::downloader::{DownloadWorkerArgs, download_media_to_storage, normalize_target_format},
};
#[derive(Clone, Debug, Deserialize)]
@@ -171,6 +175,9 @@ pub struct AdminSiteSettingsResponse {
pub location: Option<String>,
pub tech_stack: Vec<String>,
pub music_playlist: Vec<site_settings::MusicTrackPayload>,
pub music_enabled: bool,
pub maintenance_mode_enabled: bool,
pub maintenance_access_code: Option<String>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
@@ -356,6 +363,8 @@ pub struct AdminMediaDownloadPayload {
#[serde(default)]
pub prefix: Option<String>,
#[serde(default)]
pub target_format: Option<String>,
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub alt_text: Option<String>,
@@ -365,13 +374,19 @@ pub struct AdminMediaDownloadPayload {
pub tags: Option<Vec<String>>,
#[serde(default)]
pub notes: Option<String>,
#[serde(default)]
pub sync: bool,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaDownloadResponse {
pub queued: bool,
pub job_id: i32,
pub status: String,
pub job_id: Option<i32>,
pub status: Option<String>,
pub key: Option<String>,
pub url: Option<String>,
pub size_bytes: Option<i64>,
pub content_type: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -487,6 +502,37 @@ pub struct AdminPostPolishRequest {
pub markdown: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminPostLocalizeImagesRequest {
pub markdown: String,
#[serde(default)]
pub prefix: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminPostLocalizedImageItem {
pub source_url: String,
pub localized_url: String,
pub key: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminPostLocalizeImagesFailure {
pub source_url: String,
pub error: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminPostLocalizeImagesResponse {
pub markdown: String,
pub detected_count: usize,
pub localized_count: usize,
pub uploaded_count: usize,
pub failed_count: usize,
pub items: Vec<AdminPostLocalizedImageItem>,
pub failures: Vec<AdminPostLocalizeImagesFailure>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminReviewPolishRequest {
pub title: String,
@@ -537,6 +583,199 @@ fn trim_to_option(value: Option<String>) -> Option<String> {
})
}
fn normalize_localize_image_prefix(value: Option<String>) -> String {
trim_to_option(value)
.map(|item| item.trim_matches('/').to_string())
.filter(|item| !item.is_empty())
.unwrap_or_else(|| "post-inline-images".to_string())
}
fn normalize_markdown_image_target(value: &str) -> Option<String> {
let trimmed = value.trim();
if trimmed.is_empty() {
return None;
}
if trimmed.starts_with('<') && trimmed.ends_with('>') && trimmed.len() > 2 {
Some(trimmed[1..trimmed.len() - 1].trim().to_string())
} else {
Some(trimmed.to_string())
}
}
fn markdown_image_reference_urls(markdown: &str) -> Vec<String> {
let markdown_pattern =
Regex::new(r#"!\[[^\]]*]\((?P<url><[^>\n]+>|[^)\s]+)(?:\s+(?:"[^"]*"|'[^']*'))?\)"#)
.expect("valid markdown image regex");
let html_double_quote_pattern = Regex::new(r#"(?i)<img\b[^>]*?\bsrc\s*=\s*"(?P<url>[^"]+)""#)
.expect("valid html img double quote regex");
let html_single_quote_pattern = Regex::new(r#"(?i)<img\b[^>]*?\bsrc\s*=\s*'(?P<url>[^']+)'"#)
.expect("valid html img single quote regex");
let mut urls = Vec::new();
for captures in markdown_pattern.captures_iter(markdown) {
if let Some(url) = captures
.name("url")
.and_then(|item| normalize_markdown_image_target(item.as_str()))
{
urls.push(url);
}
}
for captures in html_double_quote_pattern.captures_iter(markdown) {
if let Some(url) = captures
.name("url")
.and_then(|item| normalize_markdown_image_target(item.as_str()))
{
urls.push(url);
}
}
for captures in html_single_quote_pattern.captures_iter(markdown) {
if let Some(url) = captures
.name("url")
.and_then(|item| normalize_markdown_image_target(item.as_str()))
{
urls.push(url);
}
}
urls
}
fn is_remote_markdown_image_candidate(
url: &str,
settings: Option<&storage::MediaStorageSettings>,
) -> bool {
let Ok(parsed) = Url::parse(url) else {
return false;
};
if !matches!(parsed.scheme(), "http" | "https") {
return false;
}
if settings
.and_then(|item| storage::object_key_from_public_url(item, url))
.is_some()
{
return false;
}
true
}
fn replace_markdown_image_urls(
markdown: &str,
replacements: &HashMap<String, String>,
) -> (String, usize) {
let markdown_pattern = Regex::new(
r#"(?P<lead>!\[[^\]]*]\()(?P<url><[^>\n]+>|[^)\s]+)(?P<trail>(?:\s+(?:"[^"]*"|'[^']*'))?\))"#,
)
.expect("valid markdown image replacement regex");
let html_double_quote_pattern =
Regex::new(r#"(?i)(?P<lead><img\b[^>]*?\bsrc\s*=\s*")(?P<url>[^"]+)(?P<trail>"[^>]*>)"#)
.expect("valid html img double quote replacement regex");
let html_single_quote_pattern =
Regex::new(r#"(?i)(?P<lead><img\b[^>]*?\bsrc\s*=\s*')(?P<url>[^']+)(?P<trail>'[^>]*>)"#)
.expect("valid html img single quote replacement regex");
let mut localized_count = 0usize;
let after_markdown = markdown_pattern
.replace_all(markdown, |captures: &regex::Captures<'_>| {
let raw_url = captures
.name("url")
.map(|item| item.as_str())
.unwrap_or_default();
let Some(normalized_url) = normalize_markdown_image_target(raw_url) else {
return captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string();
};
if let Some(localized_url) = replacements.get(&normalized_url) {
localized_count += 1;
format!(
"{}{}{}",
&captures["lead"], localized_url, &captures["trail"]
)
} else {
captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string()
}
})
.to_string();
let after_html_double = html_double_quote_pattern
.replace_all(&after_markdown, |captures: &regex::Captures<'_>| {
let raw_url = captures
.name("url")
.map(|item| item.as_str())
.unwrap_or_default();
let Some(normalized_url) = normalize_markdown_image_target(raw_url) else {
return captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string();
};
if let Some(localized_url) = replacements.get(&normalized_url) {
localized_count += 1;
format!(
"{}{}{}",
&captures["lead"], localized_url, &captures["trail"]
)
} else {
captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string()
}
})
.to_string();
let after_html_single = html_single_quote_pattern
.replace_all(&after_html_double, |captures: &regex::Captures<'_>| {
let raw_url = captures
.name("url")
.map(|item| item.as_str())
.unwrap_or_default();
let Some(normalized_url) = normalize_markdown_image_target(raw_url) else {
return captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string();
};
if let Some(localized_url) = replacements.get(&normalized_url) {
localized_count += 1;
format!(
"{}{}{}",
&captures["lead"], localized_url, &captures["trail"]
)
} else {
captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string()
}
})
.to_string();
(after_html_single, localized_count)
}
fn parse_optional_timestamp(
value: Option<&str>,
) -> Result<Option<chrono::DateTime<chrono::FixedOffset>>> {
@@ -785,6 +1024,9 @@ fn build_settings_response(
location: item.location,
tech_stack: tech_stack_values(&item.tech_stack),
music_playlist: music_playlist_values(&item.music_playlist),
music_enabled: item.music_enabled.unwrap_or(true),
maintenance_mode_enabled: item.maintenance_mode_enabled.unwrap_or(false),
maintenance_access_code: item.maintenance_access_code,
ai_enabled: item.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true),
comment_verification_mode: comment_verification_mode.as_str().to_string(),
@@ -1493,9 +1735,11 @@ pub async fn download_media_object(
Json(payload): Json<AdminMediaDownloadPayload>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let target_format = normalize_target_format(payload.target_format.clone())?;
let worker_args = DownloadWorkerArgs {
source_url: payload.source_url.clone(),
prefix: payload.prefix.clone(),
target_format,
title: payload.title.clone(),
alt_text: payload.alt_text.clone(),
caption: payload.caption.clone(),
@@ -1503,6 +1747,38 @@ pub async fn download_media_object(
notes: payload.notes.clone(),
job_id: None,
};
if payload.sync {
let downloaded = download_media_to_storage(&ctx, &worker_args).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"media.download",
"media",
Some(downloaded.key.clone()),
Some(payload.source_url.clone()),
Some(serde_json::json!({
"queued": false,
"source_url": payload.source_url,
"target_format": worker_args.target_format,
"key": downloaded.key,
"url": downloaded.url,
})),
)
.await?;
return format::json(AdminMediaDownloadResponse {
queued: false,
job_id: None,
status: Some("completed".to_string()),
key: Some(downloaded.key),
url: Some(downloaded.url),
size_bytes: Some(downloaded.size_bytes),
content_type: downloaded.content_type,
});
}
let job = worker_jobs::queue_download_job(
&ctx,
&worker_args,
@@ -1524,14 +1800,19 @@ pub async fn download_media_object(
"job_id": job.id,
"queued": true,
"source_url": payload.source_url,
"target_format": worker_args.target_format,
})),
)
.await?;
format::json(AdminMediaDownloadResponse {
queued: true,
job_id: job.id,
status: job.status,
job_id: Some(job.id),
status: Some(job.status),
key: None,
url: None,
size_bytes: None,
content_type: None,
})
}
@@ -1907,6 +2188,89 @@ pub async fn polish_post_markdown(
format::json(ai::polish_post_markdown(&ctx, &payload.markdown).await?)
}
#[debug_handler]
pub async fn localize_post_markdown_images(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<AdminPostLocalizeImagesRequest>,
) -> Result<Response> {
check_auth(&headers)?;
let normalized_markdown = payload.markdown.replace("\r\n", "\n");
let prefix = normalize_localize_image_prefix(payload.prefix);
let settings = storage::optional_r2_settings(&ctx).await?;
let detected_urls = markdown_image_reference_urls(&normalized_markdown);
let candidate_urls = detected_urls
.into_iter()
.filter(|url| is_remote_markdown_image_candidate(url, settings.as_ref()))
.collect::<Vec<_>>();
if candidate_urls.is_empty() {
return format::json(AdminPostLocalizeImagesResponse {
markdown: normalized_markdown,
detected_count: 0,
localized_count: 0,
uploaded_count: 0,
failed_count: 0,
items: Vec::new(),
failures: Vec::new(),
});
}
let mut seen = HashSet::new();
let unique_urls = candidate_urls
.iter()
.filter(|url| seen.insert((*url).clone()))
.cloned()
.collect::<Vec<_>>();
let mut replacements = HashMap::<String, String>::new();
let mut items = Vec::<AdminPostLocalizedImageItem>::new();
let mut failures = Vec::<AdminPostLocalizeImagesFailure>::new();
for source_url in unique_urls {
let args = DownloadWorkerArgs {
source_url: source_url.clone(),
prefix: Some(prefix.clone()),
target_format: None,
title: None,
alt_text: None,
caption: None,
tags: vec!["markdown-image".to_string()],
notes: Some("localized from markdown body".to_string()),
job_id: None,
};
match download_media_to_storage(&ctx, &args).await {
Ok(downloaded) => {
replacements.insert(source_url.clone(), downloaded.url.clone());
items.push(AdminPostLocalizedImageItem {
source_url,
localized_url: downloaded.url,
key: downloaded.key,
});
}
Err(error) => failures.push(AdminPostLocalizeImagesFailure {
source_url,
error: error.to_string(),
}),
}
}
let (markdown, localized_count) =
replace_markdown_image_urls(&normalized_markdown, &replacements);
format::json(AdminPostLocalizeImagesResponse {
markdown,
detected_count: candidate_urls.len(),
localized_count,
uploaded_count: items.len(),
failed_count: failures.len(),
items,
failures,
})
}
#[debug_handler]
pub async fn polish_review_description(
headers: HeaderMap,
@@ -2045,6 +2409,10 @@ pub fn routes() -> Routes {
.add("/ai/reindex", post(reindex_ai))
.add("/ai/test-provider", post(test_ai_provider))
.add("/ai/test-image-provider", post(test_ai_image_provider))
.add(
"/posts/localize-images",
post(localize_post_markdown_images),
)
.add("/storage/r2/test", post(test_r2_storage))
.add(
"/storage/media",

View File

@@ -8,9 +8,7 @@ use serde::{Deserialize, Serialize};
use crate::{
controllers::admin::check_auth,
models::_entities::{
admin_audit_logs, notification_deliveries, post_revisions, subscriptions,
},
models::_entities::{admin_audit_logs, notification_deliveries, post_revisions, subscriptions},
services::{
admin_audit, backups, post_revisions as revision_service,
subscriptions as subscription_service, worker_jobs,
@@ -174,7 +172,12 @@ fn format_revision(item: post_revisions::Model) -> PostRevisionListItem {
actor_email: item.actor_email,
actor_source: item.actor_source,
created_at: item.created_at.format("%Y-%m-%d %H:%M:%S").to_string(),
has_markdown: item.markdown.as_deref().map(str::trim).filter(|value| !value.is_empty()).is_some(),
has_markdown: item
.markdown
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.is_some(),
metadata: item.metadata,
}
}
@@ -187,17 +190,31 @@ pub async fn list_audit_logs(
) -> Result<Response> {
check_auth(&headers)?;
let mut db_query = admin_audit_logs::Entity::find().order_by(admin_audit_logs::Column::CreatedAt, Order::Desc);
let mut db_query =
admin_audit_logs::Entity::find().order_by(admin_audit_logs::Column::CreatedAt, Order::Desc);
if let Some(action) = query.action.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(action) = query
.action
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(admin_audit_logs::Column::Action.eq(action));
}
if let Some(target_type) = query.target_type.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(target_type) = query
.target_type
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(admin_audit_logs::Column::TargetType.eq(target_type));
}
format::json(db_query.limit(query.limit.unwrap_or(80)).all(&ctx.db).await?)
format::json(
db_query
.limit(query.limit.unwrap_or(80))
.all(&ctx.db)
.await?,
)
}
#[debug_handler]
@@ -207,7 +224,9 @@ pub async fn list_post_revisions(
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let items = revision_service::list_revisions(&ctx, query.slug.as_deref(), query.limit.unwrap_or(120)).await?;
let items =
revision_service::list_revisions(&ctx, query.slug.as_deref(), query.limit.unwrap_or(120))
.await?;
format::json(items.into_iter().map(format_revision).collect::<Vec<_>>())
}
@@ -234,8 +253,7 @@ pub async fn restore_post_revision(
) -> Result<Response> {
let actor = check_auth(&headers)?;
let mode = payload.mode.unwrap_or_else(|| "full".to_string());
let restored =
revision_service::restore_revision(&ctx, Some(&actor), id, &mode).await?;
let restored = revision_service::restore_revision(&ctx, Some(&actor), id, &mode).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
@@ -278,7 +296,8 @@ pub async fn list_subscription_deliveries(
) -> Result<Response> {
check_auth(&headers)?;
format::json(DeliveryListResponse {
deliveries: subscription_service::list_recent_deliveries(&ctx, query.limit.unwrap_or(80)).await?,
deliveries: subscription_service::list_recent_deliveries(&ctx, query.limit.unwrap_or(80))
.await?,
})
}
@@ -300,7 +319,9 @@ pub async fn create_subscription(
channel_type: Set(channel_type.clone()),
target: Set(target.clone()),
display_name: Set(trim_to_option(payload.display_name)),
status: Set(subscription_service::normalize_status(payload.status.as_deref().unwrap_or("active"))),
status: Set(subscription_service::normalize_status(
payload.status.as_deref().unwrap_or("active"),
)),
filters: Set(subscription_service::normalize_filters(payload.filters)),
metadata: Set(payload.metadata),
secret: Set(trim_to_option(payload.secret)),
@@ -461,7 +482,9 @@ pub async fn send_subscription_digest(
Json(payload): Json<DigestDispatchRequest>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let summary = subscription_service::send_digest(&ctx, payload.period.as_deref().unwrap_or("weekly")).await?;
let summary =
subscription_service::send_digest(&ctx, payload.period.as_deref().unwrap_or("weekly"))
.await?;
admin_audit::log_event(
&ctx,
@@ -664,17 +687,29 @@ pub fn routes() -> Routes {
.add("/post-revisions", get(list_post_revisions))
.add("/post-revisions/{id}", get(get_post_revision))
.add("/post-revisions/{id}/restore", post(restore_post_revision))
.add("/subscriptions", get(list_subscriptions).post(create_subscription))
.add("/subscriptions/deliveries", get(list_subscription_deliveries))
.add(
"/subscriptions",
get(list_subscriptions).post(create_subscription),
)
.add(
"/subscriptions/deliveries",
get(list_subscription_deliveries),
)
.add("/subscriptions/digest", post(send_subscription_digest))
.add("/subscriptions/{id}", patch(update_subscription).delete(delete_subscription))
.add(
"/subscriptions/{id}",
patch(update_subscription).delete(delete_subscription),
)
.add("/subscriptions/{id}/test", post(test_subscription))
.add("/workers/overview", get(workers_overview))
.add("/workers/jobs", get(list_worker_jobs))
.add("/workers/jobs/{id}", get(get_worker_job))
.add("/workers/jobs/{id}/cancel", post(cancel_worker_job))
.add("/workers/jobs/{id}/retry", post(retry_worker_job))
.add("/workers/tasks/retry-deliveries", post(run_retry_deliveries_job))
.add(
"/workers/tasks/retry-deliveries",
post(run_retry_deliveries_job),
)
.add("/workers/tasks/digest", post(run_digest_worker_job))
.add("/site-backup/export", get(export_site_backup))
.add("/site-backup/import", post(import_site_backup))

View File

@@ -4,8 +4,8 @@ use async_stream::stream;
use axum::{
body::{Body, Bytes},
http::{
header::{CACHE_CONTROL, CONNECTION, CONTENT_TYPE},
HeaderMap, HeaderValue,
header::{CACHE_CONTROL, CONNECTION, CONTENT_TYPE},
},
};
use chrono::{DateTime, Utc};

View File

@@ -8,10 +8,11 @@ use std::collections::BTreeMap;
use std::net::SocketAddr;
use axum::{
extract::{rejection::ExtensionRejection, ConnectInfo},
http::{header, HeaderMap},
extract::{ConnectInfo, rejection::ExtensionRejection},
http::{HeaderMap, header},
};
use crate::controllers::admin::check_auth;
use crate::models::_entities::{
comments::{ActiveModel, Column, Entity, Model},
posts,
@@ -21,7 +22,6 @@ use crate::services::{
comment_guard::{self, CommentGuardInput},
notifications,
};
use crate::controllers::admin::check_auth;
const ARTICLE_SCOPE: &str = "article";
const PARAGRAPH_SCOPE: &str = "paragraph";

View File

@@ -38,8 +38,15 @@ pub async fn record(
headers: HeaderMap,
Json(payload): Json<ContentAnalyticsEventPayload>,
) -> Result<Response> {
let mut request_context = analytics::content_request_context_from_headers(&payload.path, &headers);
if payload.referrer.as_deref().map(str::trim).filter(|value| !value.is_empty()).is_some() {
let mut request_context =
analytics::content_request_context_from_headers(&payload.path, &headers);
if payload
.referrer
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.is_some()
{
request_context.referrer = payload.referrer;
}

View File

@@ -127,7 +127,9 @@ pub async fn update(
"friend_link.update",
"friend_link",
Some(item.id.to_string()),
item.site_name.clone().or_else(|| Some(item.site_url.clone())),
item.site_name
.clone()
.or_else(|| Some(item.site_url.clone())),
Some(serde_json::json!({ "status": item.status })),
)
.await?;
@@ -142,7 +144,10 @@ pub async fn remove(
) -> Result<Response> {
let actor = check_auth(&headers)?;
let item = load_item(&ctx, id).await?;
let label = item.site_name.clone().or_else(|| Some(item.site_url.clone()));
let label = item
.site_name
.clone()
.or_else(|| Some(item.site_url.clone()));
item.delete(&ctx.db).await?;
admin_audit::log_event(
&ctx,

View File

@@ -1,12 +1,12 @@
pub mod admin;
pub mod admin_api;
pub mod admin_taxonomy;
pub mod admin_ops;
pub mod admin_taxonomy;
pub mod ai;
pub mod auth;
pub mod content_analytics;
pub mod category;
pub mod comment;
pub mod content_analytics;
pub mod friend_link;
pub mod health;
pub mod post;

View File

@@ -14,7 +14,11 @@ use crate::{
fn is_public_review_status(status: Option<&str>) -> bool {
matches!(
status.unwrap_or_default().trim().to_ascii_lowercase().as_str(),
status
.unwrap_or_default()
.trim()
.to_ascii_lowercase()
.as_str(),
"published" | "completed" | "done"
)
}
@@ -67,7 +71,9 @@ pub async fn get_one(
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
match review {
Some(r) if include_private || is_public_review_status(r.status.as_deref()) => format::json(r),
Some(r) if include_private || is_public_review_status(r.status.as_deref()) => {
format::json(r)
}
Some(_) => Err(Error::NotFound),
None => Err(Error::NotFound),
}

View File

@@ -4,6 +4,7 @@
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sha2::{Digest, Sha256};
use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use std::collections::HashSet;
@@ -89,6 +90,12 @@ pub struct SiteSettingsPayload {
pub tech_stack: Option<Vec<String>>,
#[serde(default, alias = "musicPlaylist")]
pub music_playlist: Option<Vec<MusicTrackPayload>>,
#[serde(default, alias = "musicEnabled")]
pub music_enabled: Option<bool>,
#[serde(default, alias = "maintenanceModeEnabled")]
pub maintenance_mode_enabled: Option<bool>,
#[serde(default, alias = "maintenanceAccessCode")]
pub maintenance_access_code: Option<String>,
#[serde(default, alias = "aiEnabled")]
pub ai_enabled: Option<bool>,
#[serde(default, alias = "paragraphCommentsEnabled")]
@@ -199,6 +206,7 @@ pub struct PublicSiteSettingsResponse {
pub location: Option<String>,
pub tech_stack: Option<serde_json::Value>,
pub music_playlist: Option<serde_json::Value>,
pub music_enabled: bool,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
@@ -217,6 +225,31 @@ pub struct PublicSiteSettingsResponse {
pub seo_wechat_share_qr_enabled: bool,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct MaintenanceAccessTokenPayload {
#[serde(default, alias = "accessToken")]
pub access_token: Option<String>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct MaintenanceVerifyPayload {
#[serde(default)]
pub code: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct MaintenanceAccessStatusResponse {
pub maintenance_mode_enabled: bool,
pub access_granted: bool,
}
#[derive(Clone, Debug, Serialize)]
pub struct MaintenanceVerifyResponse {
pub maintenance_mode_enabled: bool,
pub access_granted: bool,
pub access_token: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct HomeCategorySummary {
pub id: i32,
@@ -252,6 +285,51 @@ fn normalize_optional_int(value: Option<i32>, min: i32, max: i32) -> Option<i32>
value.map(|item| item.clamp(min, max))
}
fn maintenance_mode_enabled(model: &Model) -> bool {
model.maintenance_mode_enabled.unwrap_or(false)
}
fn maintenance_access_code(model: &Model) -> Option<String> {
normalize_optional_string(model.maintenance_access_code.clone())
}
fn maintenance_access_token_from_secret(secret: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(b"termi-maintenance-access:v1:");
hasher.update(secret.as_bytes());
let digest = hasher.finalize();
digest
.iter()
.map(|byte| format!("{byte:02x}"))
.collect::<String>()
}
fn validate_maintenance_access_token(model: &Model, token: Option<&str>) -> bool {
let Some(candidate) = token.and_then(|item| {
let trimmed = item.trim();
(!trimmed.is_empty()).then(|| trimmed.to_string())
}) else {
return false;
};
let Some(secret) = maintenance_access_code(model) else {
return false;
};
candidate == maintenance_access_token_from_secret(&secret)
}
fn verify_maintenance_access_code(model: &Model, code: Option<&str>) -> Option<String> {
let candidate = code.and_then(|item| {
let trimmed = item.trim();
(!trimmed.is_empty()).then(|| trimmed.to_string())
})?;
let secret = maintenance_access_code(model)?;
(candidate == secret).then(|| maintenance_access_token_from_secret(&secret))
}
fn normalize_notification_channel_type(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let normalized = item.trim().to_ascii_lowercase();
@@ -272,7 +350,7 @@ pub(crate) fn default_subscription_popup_title() -> String {
}
pub(crate) fn default_subscription_popup_description() -> String {
"有新文章或汇总简报时,通过邮件第一时间收到提醒。需要先确认邮箱,可随时退订".to_string()
"有新内容时及时提醒你;如果愿意,也可以再留一个邮箱备份".to_string()
}
pub(crate) fn default_subscription_popup_delay_seconds() -> i32 {
@@ -555,6 +633,15 @@ impl SiteSettingsPayload {
if let Some(music_playlist) = self.music_playlist {
item.music_playlist = Some(serde_json::json!(normalize_music_playlist(music_playlist)));
}
if let Some(music_enabled) = self.music_enabled {
item.music_enabled = Some(music_enabled);
}
if let Some(maintenance_mode_enabled) = self.maintenance_mode_enabled {
item.maintenance_mode_enabled = Some(maintenance_mode_enabled);
}
if self.maintenance_access_code.is_some() {
item.maintenance_access_code = normalize_optional_string(self.maintenance_access_code);
}
if let Some(ai_enabled) = self.ai_enabled {
item.ai_enabled = Some(ai_enabled);
}
@@ -752,10 +839,10 @@ fn default_payload() -> SiteSettingsPayload {
site_name: Some("InitCool".to_string()),
site_short_name: Some("Termi".to_string()),
site_url: Some("https://init.cool".to_string()),
site_title: Some("InitCool - 终端风格的内容平台".to_string()),
site_description: Some("一个基于终端美学的个人内容站,记录代码、设计和生活".to_string()),
hero_title: Some("欢迎来到我的极客终端博客".to_string()),
hero_subtitle: Some("这里记录技术、代码和生活点滴".to_string()),
site_title: Some("InitCool · 技术笔记与内容档案".to_string()),
site_description: Some("围绕开发实践、产品观察与长期积累整理的中文内容站".to_string()),
hero_title: Some("欢迎来到 InitCool".to_string()),
hero_subtitle: Some("记录开发实践、产品观察与长期积累,分享清晰、耐读、可回看的内容。".to_string()),
owner_name: Some("InitCool".to_string()),
owner_title: Some("Rust / Go / Python Developer · Builder @ init.cool".to_string()),
owner_bio: Some(
@@ -813,6 +900,9 @@ fn default_payload() -> SiteSettingsPayload {
description: Some("节奏更明显一点,适合切换阅读状态。".to_string()),
},
]),
music_enabled: Some(true),
maintenance_mode_enabled: Some(false),
maintenance_access_code: None,
ai_enabled: Some(false),
paragraph_comments_enabled: Some(true),
comment_verification_mode: Some(
@@ -923,6 +1013,7 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
location: model.location,
tech_stack: model.tech_stack,
music_playlist: model.music_playlist,
music_enabled: model.music_enabled.unwrap_or(true),
ai_enabled: model.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
comment_verification_mode: comment_verification_mode.as_str().to_string(),
@@ -1019,6 +1110,50 @@ pub async fn show(State(ctx): State<AppContext>) -> Result<Response> {
format::json(public_response(load_current(&ctx).await?))
}
#[debug_handler]
pub async fn maintenance_status(
State(ctx): State<AppContext>,
Json(params): Json<MaintenanceAccessTokenPayload>,
) -> Result<Response> {
let current = load_current(&ctx).await?;
let enabled = maintenance_mode_enabled(&current);
let access_granted = if enabled {
validate_maintenance_access_token(&current, params.access_token.as_deref())
} else {
true
};
format::json(MaintenanceAccessStatusResponse {
maintenance_mode_enabled: enabled,
access_granted,
})
}
#[debug_handler]
pub async fn maintenance_verify(
State(ctx): State<AppContext>,
Json(params): Json<MaintenanceVerifyPayload>,
) -> Result<Response> {
let current = load_current(&ctx).await?;
let enabled = maintenance_mode_enabled(&current);
if !enabled {
return format::json(MaintenanceVerifyResponse {
maintenance_mode_enabled: false,
access_granted: true,
access_token: None,
});
}
let access_token = verify_maintenance_access_code(&current, params.code.as_deref());
format::json(MaintenanceVerifyResponse {
maintenance_mode_enabled: true,
access_granted: access_token.is_some(),
access_token,
})
}
#[debug_handler]
pub async fn update(
headers: HeaderMap,
@@ -1039,6 +1174,8 @@ pub fn routes() -> Routes {
Routes::new()
.prefix("api/site_settings/")
.add("home", get(home))
.add("maintenance/status", post(maintenance_status))
.add("maintenance/verify", post(maintenance_verify))
.add("/", get(show))
.add("/", put(update))
.add("/", patch(update))

View File

@@ -33,6 +33,26 @@ pub struct PublicBrowserPushSubscriptionPayload {
pub captcha_answer: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct PublicCombinedSubscriptionPayload {
#[serde(default)]
pub channels: Vec<String>,
#[serde(default)]
pub email: Option<String>,
#[serde(default, alias = "displayName")]
pub display_name: Option<String>,
#[serde(default)]
pub subscription: Option<serde_json::Value>,
#[serde(default)]
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default, alias = "captchaToken")]
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SubscriptionTokenPayload {
pub token: String,
@@ -63,6 +83,21 @@ pub struct PublicSubscriptionResponse {
pub message: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct PublicCombinedSubscriptionItemResponse {
pub channel_type: String,
pub subscription_id: i32,
pub status: String,
pub requires_confirmation: bool,
}
#[derive(Clone, Debug, Serialize)]
pub struct PublicCombinedSubscriptionResponse {
pub ok: bool,
pub channels: Vec<PublicCombinedSubscriptionItemResponse>,
pub message: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct SubscriptionManageResponse {
pub ok: bool,
@@ -89,6 +124,30 @@ fn public_browser_push_metadata(
})
}
fn normalize_public_subscription_channels(channels: &[String]) -> Vec<String> {
let mut normalized = Vec::new();
for raw in channels {
let Some(channel) = ({
match raw.trim().to_ascii_lowercase().as_str() {
"email" | "mail" => Some("email"),
"browser" | "browser-push" | "browser_push" | "webpush" | "web-push" => {
Some("browser_push")
}
_ => None,
}
}) else {
continue;
};
if !normalized.iter().any(|value| value == channel) {
normalized.push(channel.to_string());
}
}
normalized
}
async fn verify_subscription_human_check(
settings: &crate::models::_entities::site_settings::Model,
turnstile_token: Option<&str>,
@@ -119,11 +178,7 @@ pub async fn subscribe(
) -> Result<Response> {
let email = payload.email.trim().to_ascii_lowercase();
let client_ip = abuse_guard::detect_client_ip(&headers);
abuse_guard::enforce_public_scope(
"subscription",
client_ip.as_deref(),
Some(&email),
)?;
abuse_guard::enforce_public_scope("subscription", client_ip.as_deref(), Some(&email))?;
let settings = crate::controllers::site_settings::load_current(&ctx).await?;
verify_subscription_human_check(
&settings,
@@ -186,7 +241,9 @@ pub async fn subscribe_browser_push(
.and_then(serde_json::Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| Error::BadRequest("browser push subscription.endpoint 不能为空".to_string()))?
.ok_or_else(|| {
Error::BadRequest("browser push subscription.endpoint 不能为空".to_string())
})?
.to_string();
let client_ip = abuse_guard::detect_client_ip(&headers);
let user_agent = headers
@@ -196,15 +253,11 @@ pub async fn subscribe_browser_push(
.filter(|value| !value.is_empty())
.map(ToString::to_string);
abuse_guard::enforce_public_scope("browser-push-subscription", client_ip.as_deref(), Some(&endpoint))?;
verify_subscription_human_check(
&settings,
payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
abuse_guard::enforce_public_scope(
"browser-push-subscription",
client_ip.as_deref(),
)
.await?;
Some(&endpoint),
)?;
let result = subscriptions::create_public_web_push_subscription(
&ctx,
@@ -240,6 +293,174 @@ pub async fn subscribe_browser_push(
})
}
#[debug_handler]
pub async fn subscribe_combined(
State(ctx): State<AppContext>,
headers: axum::http::HeaderMap,
Json(payload): Json<PublicCombinedSubscriptionPayload>,
) -> Result<Response> {
let selected_channels = normalize_public_subscription_channels(&payload.channels);
if selected_channels.is_empty() {
return Err(Error::BadRequest("请至少选择一种订阅方式".to_string()));
}
let wants_email = selected_channels.iter().any(|value| value == "email");
let wants_browser_push = selected_channels
.iter()
.any(|value| value == "browser_push");
let settings = crate::controllers::site_settings::load_current(&ctx).await?;
let client_ip = abuse_guard::detect_client_ip(&headers);
let normalized_email = payload
.email
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(|value| value.to_ascii_lowercase());
if wants_email {
let email = normalized_email
.as_deref()
.ok_or_else(|| Error::BadRequest("请选择邮箱订阅后填写邮箱地址".to_string()))?;
abuse_guard::enforce_public_scope("subscription", client_ip.as_deref(), Some(email))?;
}
let normalized_browser_subscription = if wants_browser_push {
if !crate::services::web_push::is_enabled(&settings) {
return Err(Error::BadRequest("浏览器推送未启用".to_string()));
}
let subscription = payload
.subscription
.clone()
.ok_or_else(|| Error::BadRequest("缺少浏览器推送订阅信息".to_string()))?;
let endpoint = subscription
.get("endpoint")
.and_then(serde_json::Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| {
Error::BadRequest("browser push subscription.endpoint 不能为空".to_string())
})?
.to_string();
abuse_guard::enforce_public_scope(
"browser-push-subscription",
client_ip.as_deref(),
Some(&endpoint),
)?;
Some(subscription)
} else {
None
};
if wants_email {
verify_subscription_human_check(
&settings,
payload.turnstile_token.as_deref(),
payload.captcha_token.as_deref(),
payload.captcha_answer.as_deref(),
client_ip.as_deref(),
)
.await?;
}
let user_agent = headers
.get(header::USER_AGENT)
.and_then(|value| value.to_str().ok())
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
let mut items = Vec::new();
let mut message_parts = Vec::new();
if let Some(subscription) = normalized_browser_subscription {
let browser_result = subscriptions::create_public_web_push_subscription(
&ctx,
subscription.clone(),
Some(public_browser_push_metadata(
payload.source.clone(),
subscription,
user_agent,
)),
)
.await?;
admin_audit::log_event(
&ctx,
None,
"subscription.public.web_push.active",
"subscription",
Some(browser_result.subscription.id.to_string()),
Some(browser_result.subscription.target.clone()),
Some(serde_json::json!({
"channel_type": browser_result.subscription.channel_type,
"status": browser_result.subscription.status,
})),
)
.await?;
message_parts.push(browser_result.message.clone());
items.push(PublicCombinedSubscriptionItemResponse {
channel_type: browser_result.subscription.channel_type,
subscription_id: browser_result.subscription.id,
status: browser_result.subscription.status,
requires_confirmation: false,
});
}
if wants_email {
let email_result = subscriptions::create_public_email_subscription(
&ctx,
normalized_email.as_deref().unwrap_or_default(),
payload.display_name,
Some(public_subscription_metadata(payload.source)),
)
.await?;
admin_audit::log_event(
&ctx,
None,
if email_result.requires_confirmation {
"subscription.public.pending"
} else {
"subscription.public.active"
},
"subscription",
Some(email_result.subscription.id.to_string()),
Some(email_result.subscription.target.clone()),
Some(serde_json::json!({
"channel_type": email_result.subscription.channel_type,
"status": email_result.subscription.status,
})),
)
.await?;
message_parts.push(email_result.message.clone());
items.push(PublicCombinedSubscriptionItemResponse {
channel_type: email_result.subscription.channel_type,
subscription_id: email_result.subscription.id,
status: email_result.subscription.status,
requires_confirmation: email_result.requires_confirmation,
});
}
let message = if message_parts.is_empty() {
"订阅请求已处理。".to_string()
} else {
message_parts.join(" ")
};
format::json(PublicCombinedSubscriptionResponse {
ok: true,
channels: items,
message,
})
}
#[debug_handler]
pub async fn confirm(
State(ctx): State<AppContext>,
@@ -333,6 +554,7 @@ pub fn routes() -> Routes {
Routes::new()
.prefix("/api/subscriptions")
.add("/", post(subscribe))
.add("/combined", post(subscribe_combined))
.add("/browser-push", post(subscribe_browser_push))
.add("/confirm", post(confirm))
.add("/manage", get(manage).patch(update_manage))

View File

@@ -2,35 +2,35 @@
pid: 1
author: "林川"
email: "linchuan@example.com"
content: "这篇做长文测试很合适,段落密度和古文节奏都不错。"
content: "这篇读起来很稳,段落密度和古文节奏都很舒服。"
approved: true
- id: 2
pid: 1
author: "阿青"
email: "aqing@example.com"
content: "建议后面再加几篇山水游记,方便测试问答检索是否能区分不同山名。"
content: "建议后面再加几篇山水游记,读者会更容易比较不同山名与路线。"
approved: true
- id: 3
pid: 2
author: "周宁"
email: "zhouling@example.com"
content: "这一段关于南岩和琼台的描写很好,适合测试段落评论锚点。"
content: "这一段关于南岩和琼台的描写很好,细节很有画面感。"
approved: true
- id: 4
pid: 3
author: "顾远"
email: "guyuan@example.com"
content: "悬空寺这一段信息量很大,拿来测试 AI 摘要应该很有代表性。"
content: "悬空寺这一段信息量很大,拿来做导读或摘录都很有代表性。"
approved: true
- id: 5
pid: 4
author: "清嘉"
email: "qingjia@example.com"
content: "黄山记的序文很适合测试首屏摘要生成。"
content: "黄山记的序文很适合作为开篇导读,气势一下就起来了。"
approved: true
- id: 6

View File

@@ -10,7 +10,7 @@
自此连逾山岭,桃李缤纷,山花夹道,幽艳异常。山坞之中,居庐相望,沿流稻畦,高下鳞次,不似山、陕间矣。
骑而南趋,石道平敞。三十里,越一石梁,有溪自西东注,即太和下流入汉者。越桥为迎恩宫,西向。前有碑大书“第一山”三字,乃米襄阳笔。
excerpt: "《徐霞客游记》太和山上篇,适合作为中文长文测试样本。"
excerpt: "《徐霞客游记》太和山上篇,写山路、水势与沿途景物,适合静心细读。"
category: "古籍游记"
published: true
pinned: true
@@ -18,7 +18,7 @@
- 徐霞客
- 游记
- 太和山
- 长文测试
- 山水游记
- id: 2
pid: 2
@@ -40,7 +40,7 @@
- 徐霞客
- 游记
- 太和山
- 长文测试
- 山水游记
- id: 3
pid: 3
@@ -54,7 +54,7 @@
余溯西涧入,又一涧自北来,遂从其西登岭,道甚峻。北向直上者六七里,西转,又北跻而上者五六里,登峰两重,造其巅,是名箭筸岭。
三转,峡愈隘,崖愈高。西崖之半,层楼高悬,曲榭斜倚,望之如蜃吐重台者,悬空寺也。
excerpt: "游恒山、悬空寺与北岳登顶的古文纪行,适合做中文长文测试。"
excerpt: "游恒山、悬空寺与北岳登顶的古文纪行,气象开阔,层次分明。"
category: "古籍游记"
published: true
pinned: false
@@ -62,7 +62,7 @@
- 徐霞客
- 恒山
- 悬空寺
- 长文测试
- 山水游记
- id: 4
pid: 4
@@ -84,7 +84,7 @@
- 钱谦益
- 黄山
- 游记
- 长文测试
- 山水游记
- id: 5
pid: 5
@@ -98,7 +98,7 @@
憩桃源庵,指天都为诸峰之中峰,山形络绎,未有以殊异也。云生峰腰,层叠如裼衣焉。
清晓,出文殊院,神鸦背行而先。避莲华沟险,从支径右折,险益甚。上平天矼,转始信峰,经散花坞,看扰龙松。
excerpt: "钱谦益《游黄山记》中篇,适合测试中文长文、检索与段落锚点。"
excerpt: "钱谦益《游黄山记》中篇,写奇峰云气与山行转折,节奏峻拔。"
category: "古籍游记"
published: true
pinned: false
@@ -106,4 +106,4 @@
- 钱谦益
- 黄山
- 游记
- 长文测试
- 山水游记

View File

@@ -34,7 +34,7 @@
rating: 5
review_date: "2024-02-18"
status: "published"
description: "把很多宏观经济问题讲得非常清楚,适合做深阅读测试。"
description: "把很多宏观经济问题讲得非常清楚,适合反复阅读。"
tags: ["经济", "非虚构", "中国"]
cover: "/review-covers/placed-within.svg"

View File

@@ -2,10 +2,10 @@
site_name: "InitCool"
site_short_name: "Termi"
site_url: "https://init.cool"
site_title: "InitCool · 中文长文与 AI 搜索实验站"
site_description: "一个偏终端审美的中文内容站用来测试文章检索、AI 问答、段落评论与后台工作流。"
hero_title: "欢迎来到我的中文内容实验站"
hero_subtitle: "这里有长文章、评测、友链,以及逐步打磨中的 AI 搜索体验"
site_title: "InitCool · 技术笔记与内容档案"
site_description: "围绕开发实践、产品观察与长期积累整理的中文内容站。"
hero_title: "欢迎来到 InitCool"
hero_subtitle: "记录开发实践、产品观察与长期积累,分享清晰、耐读、可回看的内容。"
owner_name: "InitCool"
owner_title: "Rust / Go / Python Developer · Builder @ init.cool"
owner_bio: "InitCoolGitHub 用户名 limitcool。坚持不要重复造轮子当前在维护 starter平时主要写 Rust、Go、Python 相关项目,也在持续学习 AI 与 Web3。"
@@ -43,6 +43,9 @@
cover_image_url: "https://images.unsplash.com/photo-1493225457124-a3eb161ffa5f?auto=format&fit=crop&w=600&q=80"
accent_color: "#375a7f"
description: "节奏更明显一点,适合切换阅读状态。"
music_enabled: true
maintenance_mode_enabled: false
maintenance_access_code: null
ai_enabled: false
paragraph_comments_enabled: true
comment_verification_mode: "captcha"

View File

@@ -108,19 +108,24 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
})
.filter(|items| !items.is_empty())
.map(serde_json::Value::Array);
let music_enabled = seed["music_enabled"].as_bool().or(Some(true));
let maintenance_mode_enabled = seed["maintenance_mode_enabled"].as_bool().or(Some(false));
let maintenance_access_code = as_optional_string(&seed["maintenance_access_code"]);
let comment_verification_mode = as_optional_string(&seed["comment_verification_mode"]);
let subscription_verification_mode =
as_optional_string(&seed["subscription_verification_mode"]);
let comment_turnstile_enabled = seed["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled = seed["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let comment_turnstile_enabled =
seed["comment_turnstile_enabled"]
.as_bool()
.or(comment_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let subscription_turnstile_enabled =
seed["subscription_turnstile_enabled"]
.as_bool()
.or(subscription_verification_mode
.as_deref()
.map(|value| value.eq_ignore_ascii_case("turnstile")));
let existing = site_settings::Entity::find()
.order_by_asc(site_settings::Column::Id)
@@ -182,6 +187,15 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
if existing.music_playlist.is_none() {
model.music_playlist = Set(music_playlist);
}
if existing.music_enabled.is_none() {
model.music_enabled = Set(music_enabled);
}
if existing.maintenance_mode_enabled.is_none() {
model.maintenance_mode_enabled = Set(maintenance_mode_enabled);
}
if is_blank(&existing.maintenance_access_code) {
model.maintenance_access_code = Set(maintenance_access_code.clone());
}
if existing.ai_enabled.is_none() {
model.ai_enabled = Set(seed["ai_enabled"].as_bool());
}
@@ -261,6 +275,9 @@ async fn sync_site_settings(ctx: &AppContext, base: &Path) -> Result<()> {
location: Set(as_optional_string(&seed["location"])),
tech_stack: Set(tech_stack),
music_playlist: Set(music_playlist),
music_enabled: Set(music_enabled),
maintenance_mode_enabled: Set(maintenance_mode_enabled),
maintenance_access_code: Set(maintenance_access_code),
ai_enabled: Set(seed["ai_enabled"].as_bool()),
paragraph_comments_enabled: Set(seed["paragraph_comments_enabled"]
.as_bool()

View File

@@ -1,7 +1,7 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.10
pub use super::ai_chunks::Entity as AiChunks;
pub use super::admin_audit_logs::Entity as AdminAuditLogs;
pub use super::ai_chunks::Entity as AiChunks;
pub use super::categories::Entity as Categories;
pub use super::comment_blacklist::Entity as CommentBlacklist;
pub use super::comment_persona_analysis_logs::Entity as CommentPersonaAnalysisLogs;

View File

@@ -30,6 +30,10 @@ pub struct Model {
pub tech_stack: Option<Json>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub music_playlist: Option<Json>,
pub music_enabled: Option<bool>,
pub maintenance_mode_enabled: Option<bool>,
#[sea_orm(column_type = "Text", nullable)]
pub maintenance_access_code: Option<String>,
pub ai_enabled: Option<bool>,
pub paragraph_comments_enabled: Option<bool>,
pub comment_turnstile_enabled: Option<bool>,

View File

@@ -1,5 +1,5 @@
use async_trait::async_trait;
use chrono::{offset::Local, Duration};
use chrono::{Duration, offset::Local};
use loco_rs::{auth::jwt, hash, prelude::*};
use serde::{Deserialize, Serialize};
use serde_json::Map;

View File

@@ -3,12 +3,9 @@ use std::{
sync::{Mutex, OnceLock},
};
use axum::http::{header, HeaderMap, StatusCode};
use axum::http::{HeaderMap, StatusCode, header};
use chrono::{DateTime, Duration, Utc};
use loco_rs::{
controller::ErrorDetail,
prelude::*,
};
use loco_rs::{controller::ErrorDetail, prelude::*};
const DEFAULT_WINDOW_SECONDS: i64 = 5 * 60;
const DEFAULT_MAX_REQUESTS_PER_WINDOW: u32 = 45;

View File

@@ -1,33 +1,15 @@
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, Set};
use loco_rs::prelude::{AppContext, Result};
use crate::{
controllers::admin::AdminIdentity,
models::_entities::admin_audit_logs,
};
use crate::controllers::admin::AdminIdentity;
pub async fn log_event(
ctx: &AppContext,
actor: Option<&AdminIdentity>,
action: &str,
target_type: &str,
target_id: Option<String>,
target_label: Option<String>,
metadata: Option<serde_json::Value>,
_ctx: &AppContext,
_actor: Option<&AdminIdentity>,
_action: &str,
_target_type: &str,
_target_id: Option<String>,
_target_label: Option<String>,
_metadata: Option<serde_json::Value>,
) -> Result<()> {
admin_audit_logs::ActiveModel {
actor_username: Set(actor.map(|item| item.username.clone())),
actor_email: Set(actor.and_then(|item| item.email.clone())),
actor_source: Set(actor.map(|item| item.source.clone())),
action: Set(action.to_string()),
target_type: Set(target_type.to_string()),
target_id: Set(target_id),
target_label: Set(target_label),
metadata: Set(metadata),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(())
}

View File

@@ -246,9 +246,7 @@ fn normalize_tracking_source_token(value: Option<String>) -> String {
"chatgpt-search".to_string()
}
value if value.contains("perplexity") => "perplexity".to_string(),
value if value.contains("copilot") || value.contains("bing") => {
"copilot-bing".to_string()
}
value if value.contains("copilot") || value.contains("bing") => "copilot-bing".to_string(),
value if value.contains("gemini") => "gemini".to_string(),
value if value.contains("google") => "google".to_string(),
value if value.contains("claude") => "claude".to_string(),
@@ -289,11 +287,10 @@ fn sorted_referrer_buckets(
let mut items = breakdown
.iter()
.filter_map(|(referrer, count)| {
predicate(referrer)
.then(|| AnalyticsReferrerBucket {
referrer: referrer.clone(),
count: *count,
})
predicate(referrer).then(|| AnalyticsReferrerBucket {
referrer: referrer.clone(),
count: *count,
})
})
.collect::<Vec<_>>();
@@ -648,8 +645,11 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
page_views_last_24h += 1;
}
let referrer =
normalize_tracking_source(Some(&event.path), event.referrer.clone(), event.metadata.as_ref());
let referrer = normalize_tracking_source(
Some(&event.path),
event.referrer.clone(),
event.metadata.as_ref(),
);
*referrer_breakdown.entry(referrer).or_insert(0) += 1;
}
@@ -737,7 +737,8 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
providers_last_7d.truncate(6);
let top_referrers = sorted_referrer_buckets(&referrer_breakdown, |_| true, 8);
let ai_referrers_last_7d = sorted_referrer_buckets(&referrer_breakdown, is_ai_discovery_source, 6);
let ai_referrers_last_7d =
sorted_referrer_buckets(&referrer_breakdown, is_ai_discovery_source, 6);
let ai_discovery_page_views_last_7d = referrer_breakdown
.iter()
.filter(|(referrer, _)| is_ai_discovery_source(referrer))
@@ -747,7 +748,17 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
let mut popular_posts = post_breakdown
.into_iter()
.map(
|(slug, (page_views, read_completes, total_progress, progress_count, total_duration, duration_count))| {
|(
slug,
(
page_views,
read_completes,
total_progress,
progress_count,
total_duration,
duration_count,
),
)| {
AnalyticsPopularPost {
title: post_titles
.get(&slug)
@@ -1018,7 +1029,8 @@ pub async fn build_public_content_highlights(
} else {
0.0
},
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
avg_duration_ms: (duration_count > 0)
.then(|| total_duration / duration_count as f64),
},
)
.collect::<Vec<_>>();
@@ -1085,8 +1097,22 @@ pub async fn build_public_content_windows(
.await?;
Ok(vec![
summarize_public_content_window(&events, &post_titles, now - Duration::hours(24), "24h", "24h", 1),
summarize_public_content_window(&events, &post_titles, now - Duration::days(7), "7d", "7d", 7),
summarize_public_content_window(
&events,
&post_titles,
now - Duration::hours(24),
"24h",
"24h",
1,
),
summarize_public_content_window(
&events,
&post_titles,
now - Duration::days(7),
"7d",
"7d",
7,
),
summarize_public_content_window(&events, &post_titles, since_30d, "30d", "30d", 30),
])
}
@@ -1233,7 +1259,8 @@ fn summarize_public_content_window(
} else {
0.0
},
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
avg_duration_ms: (duration_count > 0)
.then(|| total_duration / duration_count as f64),
},
)
.collect::<Vec<_>>();

View File

@@ -30,16 +30,23 @@ struct MarkdownFrontmatter {
deserialize_with = "deserialize_optional_string_list"
)]
categories: Option<Vec<String>>,
#[serde(default, deserialize_with = "deserialize_optional_string_list")]
#[serde(
default,
alias = "tag",
deserialize_with = "deserialize_optional_string_list"
)]
tags: Option<Vec<String>>,
post_type: Option<String>,
image: Option<String>,
images: Option<Vec<String>>,
pinned: Option<bool>,
#[serde(alias = "Hidden")]
hidden: Option<bool>,
published: Option<bool>,
draft: Option<bool>,
status: Option<String>,
visibility: Option<String>,
#[serde(alias = "date")]
publish_at: Option<String>,
unpublish_at: Option<String>,
canonical_url: Option<String>,
@@ -233,6 +240,18 @@ fn resolve_post_status(frontmatter: &MarkdownFrontmatter) -> String {
}
}
fn resolve_post_visibility(frontmatter: &MarkdownFrontmatter) -> String {
if let Some(visibility) = trim_to_option(frontmatter.visibility.clone()) {
return normalize_post_visibility(Some(&visibility));
}
if frontmatter.hidden.unwrap_or(false) {
POST_VISIBILITY_UNLISTED.to_string()
} else {
POST_VISIBILITY_PUBLIC.to_string()
}
}
pub fn effective_post_state(
status: &str,
publish_at: Option<DateTime<FixedOffset>>,
@@ -500,7 +519,7 @@ pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Res
images: normalize_string_list(frontmatter.images.clone()),
pinned: frontmatter.pinned.unwrap_or(false),
status: resolve_post_status(&frontmatter),
visibility: normalize_post_visibility(frontmatter.visibility.as_deref()),
visibility: resolve_post_visibility(&frontmatter),
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
frontmatter.publish_at.clone(),
)),
@@ -1152,3 +1171,39 @@ pub async fn import_markdown_documents(
Ok(imported)
}
#[cfg(test)]
mod tests {
use super::{POST_VISIBILITY_UNLISTED, parse_markdown_source};
#[test]
fn parse_markdown_source_supports_hugo_aliases() {
let markdown = r#"---
title: "Linux Shell"
date: 2022-05-21T10:02:09+08:00
draft: false
Hidden: true
slug: linux-shell
categories:
- Linux
tag:
- Linux
- Shell
---
# Linux Shell
"#;
let post = parse_markdown_source("linux-shell", markdown, "content/posts/linux-shell.md")
.expect("markdown should parse");
assert_eq!(post.slug, "linux-shell");
assert_eq!(post.category.as_deref(), Some("Linux"));
assert_eq!(post.tags, vec!["Linux", "Shell"]);
assert_eq!(post.visibility, POST_VISIBILITY_UNLISTED);
assert_eq!(
post.publish_at.as_deref(),
Some("2022-05-21T02:02:09+00:00")
);
}
}

View File

@@ -1,5 +1,5 @@
pub mod admin_audit;
pub mod abuse_guard;
pub mod admin_audit;
pub mod ai;
pub mod analytics;
pub mod backups;

View File

@@ -1,9 +1,9 @@
use loco_rs::prelude::*;
use crate::{
controllers::site_settings,
models::_entities::{comments, friend_links, site_settings as site_settings_model},
services::subscriptions,
};
use loco_rs::prelude::*;
fn notification_channel_type(settings: &site_settings_model::Model) -> &'static str {
match settings
@@ -71,10 +71,16 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
});
let text = format!(
"收到一条新的评论。\n\n文章:{}\n作者:{}\n范围:{}\n状态:{}\n摘要:{}",
item.post_slug.clone().unwrap_or_else(|| "未知文章".to_string()),
item.post_slug
.clone()
.unwrap_or_else(|| "未知文章".to_string()),
item.author.clone().unwrap_or_else(|| "匿名".to_string()),
item.scope,
if item.approved.unwrap_or(false) { "已通过" } else { "待审核" },
if item.approved.unwrap_or(false) {
"已通过"
} else {
"待审核"
},
excerpt(item.content.as_deref(), 200).unwrap_or_else(|| "".to_string()),
);
@@ -135,9 +141,13 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
});
let text = format!(
"收到新的友链申请。\n\n站点:{}\n链接:{}\n分类:{}\n状态:{}\n描述:{}",
item.site_name.clone().unwrap_or_else(|| "未命名站点".to_string()),
item.site_name
.clone()
.unwrap_or_else(|| "未命名站点".to_string()),
item.site_url,
item.category.clone().unwrap_or_else(|| "未分类".to_string()),
item.category
.clone()
.unwrap_or_else(|| "未分类".to_string()),
item.status.clone().unwrap_or_else(|| "pending".to_string()),
item.description.clone().unwrap_or_else(|| "".to_string()),
);

View File

@@ -1,5 +1,5 @@
use aws_config::BehaviorVersion;
use aws_sdk_s3::{config::Credentials, primitives::ByteStream, Client};
use aws_sdk_s3::{Client, config::Credentials, primitives::ByteStream};
use loco_rs::prelude::*;
use sea_orm::{EntityTrait, QueryOrder};
use std::path::{Path, PathBuf};

View File

@@ -243,11 +243,18 @@ fn normalize_browser_push_subscription(raw: Value) -> Result<Value> {
serde_json::to_value(subscription).map_err(Into::into)
}
fn merge_browser_push_metadata(existing: Option<&Value>, incoming: Option<Value>, subscription: Value) -> Value {
fn merge_browser_push_metadata(
existing: Option<&Value>,
incoming: Option<Value>,
subscription: Value,
) -> Value {
let mut object = merge_metadata(existing, incoming)
.and_then(|value| value.as_object().cloned())
.unwrap_or_default();
object.insert("kind".to_string(), Value::String("browser-push".to_string()));
object.insert(
"kind".to_string(),
Value::String("browser-push".to_string()),
);
object.insert("subscription".to_string(), subscription);
Value::Object(object)
}
@@ -280,7 +287,8 @@ fn payload_match_strings(payload: &Value, key: &str) -> Vec<String> {
if let Some(items) = payload.get(key).and_then(Value::as_array) {
values.extend(
items.iter()
items
.iter()
.filter_map(Value::as_str)
.map(normalize_string)
.filter(|item| !item.is_empty()),
@@ -298,7 +306,8 @@ fn payload_match_strings(payload: &Value, key: &str) -> Vec<String> {
if let Some(items) = post.get(key).and_then(Value::as_array) {
values.extend(
items.iter()
items
.iter()
.filter_map(Value::as_str)
.map(normalize_string)
.filter(|item| !item.is_empty()),
@@ -410,19 +419,31 @@ pub fn to_public_subscription_view(item: &subscriptions::Model) -> PublicSubscri
}
}
fn subscription_links(item: &subscriptions::Model, site_context: &SiteContext) -> (Option<String>, Option<String>, Option<String>) {
let manage_url = item
.manage_token
.as_deref()
.and_then(|token| build_token_link(site_context.site_url.as_deref(), "/subscriptions/manage", token));
let unsubscribe_url = item
.manage_token
.as_deref()
.and_then(|token| build_token_link(site_context.site_url.as_deref(), "/subscriptions/unsubscribe", token));
let confirm_url = item
.confirm_token
.as_deref()
.and_then(|token| build_token_link(site_context.site_url.as_deref(), "/subscriptions/confirm", token));
fn subscription_links(
item: &subscriptions::Model,
site_context: &SiteContext,
) -> (Option<String>, Option<String>, Option<String>) {
let manage_url = item.manage_token.as_deref().and_then(|token| {
build_token_link(
site_context.site_url.as_deref(),
"/subscriptions/manage",
token,
)
});
let unsubscribe_url = item.manage_token.as_deref().and_then(|token| {
build_token_link(
site_context.site_url.as_deref(),
"/subscriptions/unsubscribe",
token,
)
});
let confirm_url = item.confirm_token.as_deref().and_then(|token| {
build_token_link(
site_context.site_url.as_deref(),
"/subscriptions/confirm",
token,
)
});
(manage_url, unsubscribe_url, confirm_url)
}
@@ -449,7 +470,11 @@ async fn send_confirmation_email(ctx: &AppContext, item: &subscriptions::Model)
.await
}
fn subscription_allows_event(item: &subscriptions::Model, event_type: &str, payload: &Value) -> bool {
fn subscription_allows_event(
item: &subscriptions::Model,
event_type: &str,
payload: &Value,
) -> bool {
if normalize_status(&item.status) != STATUS_ACTIVE {
return false;
}
@@ -487,7 +512,9 @@ fn subscription_allows_event(item: &subscriptions::Model, event_type: &str, payl
if !tags.is_empty() {
let payload_tags = payload_match_strings(payload, "tags");
if payload_tags.is_empty()
|| !tags.iter().any(|tag| payload_tags.iter().any(|item| item == tag))
|| !tags
.iter()
.any(|tag| payload_tags.iter().any(|item| item == tag))
{
return false;
}
@@ -501,10 +528,15 @@ pub async fn list_subscriptions(
channel_type: Option<&str>,
status: Option<&str>,
) -> Result<Vec<subscriptions::Model>> {
let mut query = subscriptions::Entity::find().order_by(subscriptions::Column::CreatedAt, Order::Desc);
let mut query =
subscriptions::Entity::find().order_by(subscriptions::Column::CreatedAt, Order::Desc);
if let Some(channel_type) = channel_type.map(str::trim).filter(|value| !value.is_empty()) {
query = query.filter(subscriptions::Column::ChannelType.eq(normalize_channel_type(channel_type)));
if let Some(channel_type) = channel_type
.map(str::trim)
.filter(|value| !value.is_empty())
{
query = query
.filter(subscriptions::Column::ChannelType.eq(normalize_channel_type(channel_type)));
}
if let Some(status) = status.map(str::trim).filter(|value| !value.is_empty()) {
@@ -771,7 +803,9 @@ pub async fn update_subscription_preferences(
if let Some(status) = status {
let normalized = normalize_status(&status);
if normalized == STATUS_PENDING {
return Err(Error::BadRequest("偏好页不支持将状态改回 pending".to_string()));
return Err(Error::BadRequest(
"偏好页不支持将状态改回 pending".to_string(),
));
}
active.status = Set(normalized);
}
@@ -783,7 +817,10 @@ pub async fn update_subscription_preferences(
active.update(&ctx.db).await.map_err(Into::into)
}
pub async fn unsubscribe_subscription(ctx: &AppContext, token: &str) -> Result<subscriptions::Model> {
pub async fn unsubscribe_subscription(
ctx: &AppContext,
token: &str,
) -> Result<subscriptions::Model> {
let item = get_subscription_by_manage_token(ctx, token).await?;
let mut active = item.into_active_model();
active.status = Set(STATUS_UNSUBSCRIBED.to_string());
@@ -821,11 +858,7 @@ async fn update_subscription_delivery_state(
let mut active = subscription.into_active_model();
active.last_notified_at = Set(Some(Utc::now().to_rfc3339()));
active.last_delivery_status = Set(Some(status.to_string()));
active.failure_count = Set(Some(if success {
0
} else {
current_failures + 1
}));
active.failure_count = Set(Some(if success { 0 } else { current_failures + 1 }));
let _ = active.update(&ctx.db).await?;
Ok(())
}
@@ -945,10 +978,16 @@ pub async fn queue_event_for_active_subscriptions(
) -> Result<QueueDispatchSummary> {
let subscriptions = active_subscriptions(ctx).await?;
if subscriptions.is_empty() {
return Ok(QueueDispatchSummary { queued: 0, skipped: 0 });
return Ok(QueueDispatchSummary {
queued: 0,
skipped: 0,
});
}
let site_context = SiteContext { site_name, site_url };
let site_context = SiteContext {
site_name,
site_url,
};
let mut queued = 0usize;
let mut skipped = 0usize;
@@ -1058,38 +1097,32 @@ async fn deliver_via_channel(
CHANNEL_EMAIL => Err(Error::BadRequest(
"email channel must be delivered via subscription context".to_string(),
)),
CHANNEL_DISCORD => {
Client::new()
.post(target)
.json(&serde_json::json!({ "content": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_TELEGRAM => {
Client::new()
.post(target)
.json(&serde_json::json!({ "text": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_NTFY => {
Client::new()
.post(resolve_ntfy_target(target))
.header("Title", &message.subject)
.header("Content-Type", "text/plain; charset=utf-8")
.body(message.text.clone())
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_DISCORD => Client::new()
.post(target)
.json(&serde_json::json!({ "content": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string())),
CHANNEL_TELEGRAM => Client::new()
.post(target)
.json(&serde_json::json!({ "text": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string())),
CHANNEL_NTFY => Client::new()
.post(resolve_ntfy_target(target))
.header("Title", &message.subject)
.header("Content-Type", "text/plain; charset=utf-8")
.body(message.text.clone())
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string())),
CHANNEL_WEB_PUSH => {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
let subscription_info = web_push_service::subscription_info_from_metadata(metadata)?;
@@ -1141,7 +1174,10 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
return Ok(());
};
if matches!(delivery.status.as_str(), DELIVERY_STATUS_SENT | DELIVERY_STATUS_SKIPPED | DELIVERY_STATUS_EXHAUSTED) {
if matches!(
delivery.status.as_str(),
DELIVERY_STATUS_SENT | DELIVERY_STATUS_SKIPPED | DELIVERY_STATUS_EXHAUSTED
) {
return Ok(());
}
@@ -1149,15 +1185,19 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.payload
.clone()
.ok_or_else(|| Error::BadRequest("delivery payload 为空".to_string()))
.and_then(|value| serde_json::from_value::<QueuedDeliveryPayload>(value).map_err(Into::into))?;
.and_then(|value| {
serde_json::from_value::<QueuedDeliveryPayload>(value).map_err(Into::into)
})?;
let attempts = delivery.attempts_count + 1;
let now = Utc::now().to_rfc3339();
let subscription = match delivery.subscription_id {
Some(subscription_id) => subscriptions::Entity::find_by_id(subscription_id)
.one(&ctx.db)
.await?,
Some(subscription_id) => {
subscriptions::Entity::find_by_id(subscription_id)
.one(&ctx.db)
.await?
}
None => None,
};
@@ -1171,7 +1211,13 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
active.next_retry_at = Set(None);
active.delivered_at = Set(Some(Utc::now().to_rfc3339()));
let _ = active.update(&ctx.db).await?;
update_subscription_delivery_state(ctx, Some(subscription.id), DELIVERY_STATUS_SKIPPED, false).await?;
update_subscription_delivery_state(
ctx,
Some(subscription.id),
DELIVERY_STATUS_SKIPPED,
false,
)
.await?;
return Ok(());
}
}
@@ -1202,7 +1248,14 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.await
}
} else {
deliver_via_channel(ctx, &delivery.channel_type, &delivery.target, &message, None).await
deliver_via_channel(
ctx,
&delivery.channel_type,
&delivery.target,
&message,
None,
)
.await
};
let subscription_id = delivery.subscription_id;
let delivery_channel_type = delivery.channel_type.clone();
@@ -1218,7 +1271,8 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
active.next_retry_at = Set(None);
active.delivered_at = Set(Some(Utc::now().to_rfc3339()));
let _ = active.update(&ctx.db).await?;
update_subscription_delivery_state(ctx, subscription_id, DELIVERY_STATUS_SENT, true).await?;
update_subscription_delivery_state(ctx, subscription_id, DELIVERY_STATUS_SENT, true)
.await?;
}
Err(error) => {
let next_retry_at = (attempts < MAX_DELIVERY_ATTEMPTS)
@@ -1298,7 +1352,10 @@ pub async fn send_test_notification(
.await
}
pub async fn notify_post_published(ctx: &AppContext, post: &content::MarkdownPost) -> Result<QueueDispatchSummary> {
pub async fn notify_post_published(
ctx: &AppContext,
post: &content::MarkdownPost,
) -> Result<QueueDispatchSummary> {
let site_context = load_site_context(ctx).await;
let public_url = post_public_url(site_context.site_url.as_deref(), &post.slug);
let subject = format!("新文章发布:{}", post.title);
@@ -1315,13 +1372,17 @@ pub async fn notify_post_published(ctx: &AppContext, post: &content::MarkdownPos
let text = format!(
"{}》已发布。\n\n分类:{}\n标签:{}\n链接:{}\n\n{}",
post.title,
post.category.clone().unwrap_or_else(|| "未分类".to_string()),
post.category
.clone()
.unwrap_or_else(|| "未分类".to_string()),
if post.tags.is_empty() {
"".to_string()
} else {
post.tags.join(", ")
},
public_url.clone().unwrap_or_else(|| format!("/articles/{}", post.slug)),
public_url
.clone()
.unwrap_or_else(|| format!("/articles/{}", post.slug)),
post.description.clone().unwrap_or_default(),
);
@@ -1355,7 +1416,8 @@ pub async fn send_digest(ctx: &AppContext, period: &str) -> Result<DigestDispatc
let lines = if posts.is_empty() {
vec![format!("最近 {} 天还没有新的公开文章。", days)]
} else {
posts.iter()
posts
.iter()
.map(|post| {
let url = post_public_url(site_context.site_url.as_deref(), &post.slug)
.unwrap_or_else(|| format!("/articles/{}", post.slug));
@@ -1369,7 +1431,14 @@ pub async fn send_digest(ctx: &AppContext, period: &str) -> Result<DigestDispatc
.collect::<Vec<_>>()
};
let subject = format!("{} 内容摘要", if normalized_period == "monthly" { "月报" } else { "周报" });
let subject = format!(
"{} 内容摘要",
if normalized_period == "monthly" {
"月报"
} else {
"周报"
}
);
let body = format!("统计周期:最近 {}\n\n{}", days, lines.join("\n\n"));
let payload = serde_json::json!({
"event_type": event_type,

View File

@@ -91,8 +91,7 @@ fn normalize_ip(value: Option<&str>) -> Option<String> {
}
fn verify_url() -> String {
env_value(ENV_TURNSTILE_VERIFY_URL)
.unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
env_value(ENV_TURNSTILE_VERIFY_URL).unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
}
fn client() -> &'static Client {
@@ -173,11 +172,10 @@ pub async fn verify_token(
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
let secret = secret_key(settings).ok_or_else(|| {
Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string())
})?;
let response_token = trim_to_option(token)
.ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let secret = secret_key(settings)
.ok_or_else(|| Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string()))?;
let response_token =
trim_to_option(token).ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let mut form_data = vec![
("secret".to_string(), secret),

View File

@@ -66,9 +66,7 @@ pub fn private_key_configured(settings: &site_settings::Model) -> bool {
}
pub fn is_enabled(settings: &site_settings::Model) -> bool {
settings.web_push_enabled.unwrap_or(false)
&& public_key_configured(settings)
&& private_key_configured(settings)
public_key_configured(settings) && private_key_configured(settings)
}
pub fn subscription_info_from_metadata(metadata: Option<&Value>) -> Result<SubscriptionInfo> {

View File

@@ -1,14 +1,11 @@
use chrono::Utc;
use loco_rs::{
bgworker::BackgroundWorker,
prelude::*,
};
use loco_rs::{bgworker::BackgroundWorker, prelude::*};
use sea_orm::{
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, Order,
PaginatorTrait, QueryFilter, QueryOrder, QuerySelect, Set,
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, Order, PaginatorTrait,
QueryFilter, QueryOrder, QuerySelect, Set,
};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use serde_json::{Value, json};
use crate::{
models::_entities::{notification_deliveries, worker_jobs},
@@ -213,7 +210,10 @@ fn can_cancel_status(status: &str, cancel_requested: bool) -> bool {
}
fn can_retry_status(status: &str) -> bool {
matches!(status, JOB_STATUS_FAILED | JOB_STATUS_CANCELLED | JOB_STATUS_SUCCEEDED)
matches!(
status,
JOB_STATUS_FAILED | JOB_STATUS_CANCELLED | JOB_STATUS_SUCCEEDED
)
}
fn to_job_record(item: worker_jobs::Model) -> WorkerJobRecord {
@@ -256,15 +256,17 @@ fn catalog_entries() -> Vec<WorkerCatalogEntry> {
(TASK_SEND_MONTHLY_DIGEST, JOB_KIND_TASK, true, true),
]
.into_iter()
.map(|(worker_name, job_kind, supports_cancel, supports_retry)| WorkerCatalogEntry {
worker_name: worker_name.to_string(),
job_kind: job_kind.to_string(),
label: label_for(worker_name),
description: description_for(worker_name),
queue_name: queue_name_for(worker_name),
supports_cancel,
supports_retry,
})
.map(
|(worker_name, job_kind, supports_cancel, supports_retry)| WorkerCatalogEntry {
worker_name: worker_name.to_string(),
job_kind: job_kind.to_string(),
label: label_for(worker_name),
description: description_for(worker_name),
queue_name: queue_name_for(worker_name),
supports_cancel,
supports_retry,
},
)
.collect()
}
@@ -311,7 +313,10 @@ async fn dispatch_download(args_ctx: AppContext, args: DownloadWorkerArgs) {
}
}
async fn dispatch_notification_delivery(args_ctx: AppContext, args: NotificationDeliveryWorkerArgs) {
async fn dispatch_notification_delivery(
args_ctx: AppContext,
args: NotificationDeliveryWorkerArgs,
) {
let worker = NotificationDeliveryWorker::build(&args_ctx);
if let Err(error) = worker.perform(args).await {
tracing::warn!("notification delivery worker execution failed: {error}");
@@ -326,7 +331,9 @@ async fn enqueue_download_worker(ctx: &AppContext, args: DownloadWorkerArgs) ->
Ok(())
}
Err(error) => {
tracing::warn!("download worker queue unavailable, falling back to local task: {error}");
tracing::warn!(
"download worker queue unavailable, falling back to local task: {error}"
);
tokio::spawn(dispatch_download(ctx.clone(), args));
Ok(())
}
@@ -344,7 +351,9 @@ async fn enqueue_notification_worker(
Ok(())
}
Err(error) => {
tracing::warn!("notification worker queue unavailable, falling back to local task: {error}");
tracing::warn!(
"notification worker queue unavailable, falling back to local task: {error}"
);
tokio::spawn(dispatch_notification_delivery(ctx.clone(), args));
Ok(())
}
@@ -442,17 +451,19 @@ pub async fn get_overview(ctx: &AppContext) -> Result<WorkerOverview> {
_ => {}
}
let entry = grouped.entry(item.worker_name.clone()).or_insert_with(|| WorkerStats {
worker_name: item.worker_name.clone(),
job_kind: item.job_kind.clone(),
label: label_for(&item.worker_name),
queued: 0,
running: 0,
succeeded: 0,
failed: 0,
cancelled: 0,
last_job_at: None,
});
let entry = grouped
.entry(item.worker_name.clone())
.or_insert_with(|| WorkerStats {
worker_name: item.worker_name.clone(),
job_kind: item.job_kind.clone(),
label: label_for(&item.worker_name),
queued: 0,
running: 0,
succeeded: 0,
failed: 0,
cancelled: 0,
last_job_at: None,
});
match item.status.as_str() {
JOB_STATUS_QUEUED => entry.queued += 1,
@@ -473,18 +484,35 @@ pub async fn get_overview(ctx: &AppContext) -> Result<WorkerOverview> {
}
pub async fn list_jobs(ctx: &AppContext, query: WorkerJobListQuery) -> Result<WorkerJobListResult> {
let mut db_query = worker_jobs::Entity::find().order_by(worker_jobs::Column::CreatedAt, Order::Desc);
let mut db_query =
worker_jobs::Entity::find().order_by(worker_jobs::Column::CreatedAt, Order::Desc);
if let Some(status) = query.status.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(status) = query
.status
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(worker_jobs::Column::Status.eq(status));
}
if let Some(job_kind) = query.job_kind.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(job_kind) = query
.job_kind
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(worker_jobs::Column::JobKind.eq(job_kind));
}
if let Some(worker_name) = query.worker_name.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(worker_name) = query
.worker_name
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(worker_jobs::Column::WorkerName.eq(worker_name));
}
if let Some(search) = query.search.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(search) = query
.search
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(
Condition::any()
.add(worker_jobs::Column::WorkerName.contains(search.clone()))
@@ -830,6 +858,9 @@ pub async fn retry_job(
)
.await
}
_ => Err(Error::BadRequest(format!("不支持重试任务:{}", item.worker_name))),
_ => Err(Error::BadRequest(format!(
"不支持重试任务:{}",
item.worker_name
))),
}
}

View File

@@ -1,5 +1,8 @@
use std::io::Cursor;
use image::{ImageFormat, load_from_memory};
use loco_rs::prelude::*;
use reqwest::{header, redirect::Policy, Url};
use reqwest::{Url, header, redirect::Policy};
use serde::{Deserialize, Serialize};
use crate::services::{media_assets, storage, worker_jobs};
@@ -14,6 +17,8 @@ pub struct DownloadWorkerArgs {
#[serde(default)]
pub prefix: Option<String>,
#[serde(default)]
pub target_format: Option<String>,
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub alt_text: Option<String>,
@@ -48,12 +53,30 @@ fn trim_to_option(value: Option<String>) -> Option<String> {
}
fn normalize_prefix(value: Option<String>) -> String {
value.unwrap_or_else(|| "uploads".to_string())
value
.unwrap_or_else(|| "uploads".to_string())
.trim()
.trim_matches('/')
.to_string()
}
pub fn normalize_target_format(value: Option<String>) -> Result<Option<String>> {
let Some(value) = value.map(|item| item.trim().to_ascii_lowercase()) else {
return Ok(None);
};
if value.is_empty() || value == "original" {
return Ok(None);
}
match value.as_str() {
"webp" | "avif" => Ok(Some(value)),
_ => Err(Error::BadRequest(
"target_format 仅支持 webp、avif 或 original".to_string(),
)),
}
}
fn derive_file_name(url: &Url) -> Option<String> {
url.path_segments()
.and_then(|segments| segments.last())
@@ -102,10 +125,78 @@ fn is_supported_content_type(value: Option<&str>) -> bool {
.trim()
.split(';')
.next()
.map(|item| matches!(item, "image/png" | "image/jpeg" | "image/webp" | "image/gif" | "image/avif" | "image/svg+xml" | "application/pdf"))
.map(|item| {
matches!(
item,
"image/png"
| "image/jpeg"
| "image/webp"
| "image/gif"
| "image/avif"
| "image/svg+xml"
| "application/pdf"
)
})
.unwrap_or(false)
}
fn is_convertible_bitmap_content_type(value: Option<&str>) -> bool {
value
.unwrap_or_default()
.trim()
.split(';')
.next()
.map(|item| {
matches!(
item,
"image/png" | "image/jpeg" | "image/webp" | "image/avif"
)
})
.unwrap_or(false)
}
fn target_mime_type(target_format: &str) -> Option<&'static str> {
match target_format {
"webp" => Some("image/webp"),
"avif" => Some("image/avif"),
_ => None,
}
}
fn convert_media_bytes(
bytes: &[u8],
content_type: Option<&str>,
target_format: &str,
) -> Result<(Vec<u8>, String, String)> {
let target_mime = target_mime_type(target_format)
.ok_or_else(|| Error::BadRequest("不支持的目标媒体格式".to_string()))?;
if !is_convertible_bitmap_content_type(content_type) {
return Err(Error::BadRequest(
"当前仅支持把 PNG / JPEG / WebP / AVIF 转成 WebP 或 AVIF".to_string(),
));
}
let image = load_from_memory(bytes)
.map_err(|error| Error::BadRequest(format!("解析远程图片失败: {error}")))?;
let image_format = match target_format {
"webp" => ImageFormat::WebP,
"avif" => ImageFormat::Avif,
_ => return Err(Error::BadRequest("不支持的目标媒体格式".to_string())),
};
let mut cursor = Cursor::new(Vec::new());
image
.write_to(&mut cursor, image_format)
.map_err(|error| Error::BadRequest(format!("转换远程图片格式失败: {error}")))?;
Ok((
cursor.into_inner(),
target_format.to_string(),
target_mime.to_string(),
))
}
fn default_title(args: &DownloadWorkerArgs, file_name: Option<&str>) -> String {
trim_to_option(args.title.clone())
.or_else(|| {
@@ -188,8 +279,41 @@ pub async fn download_media_to_storage(
}
let file_name = derive_file_name(&final_url);
let extension = infer_extension(file_name.as_deref(), content_type.as_deref())
.ok_or_else(|| Error::BadRequest("无法识别远程媒体文件类型".to_string()))?;
let target_format = normalize_target_format(args.target_format.clone())?;
let normalized_source_content_type = content_type
.as_deref()
.map(str::trim)
.and_then(|value| value.split(';').next())
.map(str::to_ascii_lowercase);
let already_target_format = target_format
.as_deref()
.and_then(target_mime_type)
.zip(normalized_source_content_type.as_deref())
.map(|(target_mime, source_mime)| source_mime == target_mime)
.unwrap_or(false);
let (payload_bytes, extension, resolved_content_type) =
if let Some(target_format) = target_format.as_deref() {
if already_target_format {
(
bytes.to_vec(),
target_format.to_string(),
target_mime_type(target_format)
.unwrap_or_default()
.to_string(),
)
} else {
convert_media_bytes(&bytes, content_type.as_deref(), target_format)?
}
} else {
(
bytes.to_vec(),
infer_extension(file_name.as_deref(), content_type.as_deref())
.ok_or_else(|| Error::BadRequest("无法识别远程媒体文件类型".to_string()))?,
content_type
.clone()
.unwrap_or_else(|| "application/octet-stream".to_string()),
)
};
let prefix = normalize_prefix(args.prefix.clone());
let object_key = storage::build_object_key(
&prefix,
@@ -199,8 +323,8 @@ pub async fn download_media_to_storage(
let stored = storage::upload_bytes_to_r2(
ctx,
&object_key,
bytes.to_vec(),
content_type.as_deref(),
payload_bytes.clone(),
Some(resolved_content_type.as_str()),
Some("public, max-age=31536000, immutable"),
)
.await?;
@@ -222,9 +346,9 @@ pub async fn download_media_to_storage(
Ok(DownloadedMediaObject {
key: stored.key,
url: stored.url,
size_bytes: bytes.len() as i64,
size_bytes: payload_bytes.len() as i64,
source_url: final_url.to_string(),
content_type,
content_type: Some(resolved_content_type),
})
}