feat: update tag and timeline share panel copy for clarity and conciseness
Some checks failed
docker-images / resolve-build-targets (push) Successful in 7s
ui-regression / playwright-regression (push) Failing after 13m4s
docker-images / build-and-push (admin) (push) Successful in 1m17s
docker-images / build-and-push (backend) (push) Successful in 28m13s
docker-images / build-and-push (frontend) (push) Successful in 47s
docker-images / submit-indexnow (push) Successful in 13s

style: enhance global CSS for better responsiveness of terminal chips and navigation pills

test: remove inline subscription test and add maintenance mode access code test

feat: implement media library picker dialog for selecting images from the media library

feat: add media URL controls for uploading and managing media assets

feat: add migration for music_enabled and maintenance_mode settings in site settings

feat: implement maintenance mode functionality with access control

feat: create maintenance page with access code input and error handling

chore: add TypeScript declaration for QR code module
This commit is contained in:
2026-04-02 23:05:49 +08:00
parent 6a50dd478c
commit 9665c933b5
94 changed files with 5266 additions and 1612 deletions

View File

@@ -3,12 +3,9 @@ use std::{
sync::{Mutex, OnceLock},
};
use axum::http::{header, HeaderMap, StatusCode};
use axum::http::{HeaderMap, StatusCode, header};
use chrono::{DateTime, Duration, Utc};
use loco_rs::{
controller::ErrorDetail,
prelude::*,
};
use loco_rs::{controller::ErrorDetail, prelude::*};
const DEFAULT_WINDOW_SECONDS: i64 = 5 * 60;
const DEFAULT_MAX_REQUESTS_PER_WINDOW: u32 = 45;

View File

@@ -1,33 +1,15 @@
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, Set};
use loco_rs::prelude::{AppContext, Result};
use crate::{
controllers::admin::AdminIdentity,
models::_entities::admin_audit_logs,
};
use crate::controllers::admin::AdminIdentity;
pub async fn log_event(
ctx: &AppContext,
actor: Option<&AdminIdentity>,
action: &str,
target_type: &str,
target_id: Option<String>,
target_label: Option<String>,
metadata: Option<serde_json::Value>,
_ctx: &AppContext,
_actor: Option<&AdminIdentity>,
_action: &str,
_target_type: &str,
_target_id: Option<String>,
_target_label: Option<String>,
_metadata: Option<serde_json::Value>,
) -> Result<()> {
admin_audit_logs::ActiveModel {
actor_username: Set(actor.map(|item| item.username.clone())),
actor_email: Set(actor.and_then(|item| item.email.clone())),
actor_source: Set(actor.map(|item| item.source.clone())),
action: Set(action.to_string()),
target_type: Set(target_type.to_string()),
target_id: Set(target_id),
target_label: Set(target_label),
metadata: Set(metadata),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(())
}

View File

@@ -246,9 +246,7 @@ fn normalize_tracking_source_token(value: Option<String>) -> String {
"chatgpt-search".to_string()
}
value if value.contains("perplexity") => "perplexity".to_string(),
value if value.contains("copilot") || value.contains("bing") => {
"copilot-bing".to_string()
}
value if value.contains("copilot") || value.contains("bing") => "copilot-bing".to_string(),
value if value.contains("gemini") => "gemini".to_string(),
value if value.contains("google") => "google".to_string(),
value if value.contains("claude") => "claude".to_string(),
@@ -289,11 +287,10 @@ fn sorted_referrer_buckets(
let mut items = breakdown
.iter()
.filter_map(|(referrer, count)| {
predicate(referrer)
.then(|| AnalyticsReferrerBucket {
referrer: referrer.clone(),
count: *count,
})
predicate(referrer).then(|| AnalyticsReferrerBucket {
referrer: referrer.clone(),
count: *count,
})
})
.collect::<Vec<_>>();
@@ -648,8 +645,11 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
page_views_last_24h += 1;
}
let referrer =
normalize_tracking_source(Some(&event.path), event.referrer.clone(), event.metadata.as_ref());
let referrer = normalize_tracking_source(
Some(&event.path),
event.referrer.clone(),
event.metadata.as_ref(),
);
*referrer_breakdown.entry(referrer).or_insert(0) += 1;
}
@@ -737,7 +737,8 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
providers_last_7d.truncate(6);
let top_referrers = sorted_referrer_buckets(&referrer_breakdown, |_| true, 8);
let ai_referrers_last_7d = sorted_referrer_buckets(&referrer_breakdown, is_ai_discovery_source, 6);
let ai_referrers_last_7d =
sorted_referrer_buckets(&referrer_breakdown, is_ai_discovery_source, 6);
let ai_discovery_page_views_last_7d = referrer_breakdown
.iter()
.filter(|(referrer, _)| is_ai_discovery_source(referrer))
@@ -747,7 +748,17 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
let mut popular_posts = post_breakdown
.into_iter()
.map(
|(slug, (page_views, read_completes, total_progress, progress_count, total_duration, duration_count))| {
|(
slug,
(
page_views,
read_completes,
total_progress,
progress_count,
total_duration,
duration_count,
),
)| {
AnalyticsPopularPost {
title: post_titles
.get(&slug)
@@ -1018,7 +1029,8 @@ pub async fn build_public_content_highlights(
} else {
0.0
},
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
avg_duration_ms: (duration_count > 0)
.then(|| total_duration / duration_count as f64),
},
)
.collect::<Vec<_>>();
@@ -1085,8 +1097,22 @@ pub async fn build_public_content_windows(
.await?;
Ok(vec![
summarize_public_content_window(&events, &post_titles, now - Duration::hours(24), "24h", "24h", 1),
summarize_public_content_window(&events, &post_titles, now - Duration::days(7), "7d", "7d", 7),
summarize_public_content_window(
&events,
&post_titles,
now - Duration::hours(24),
"24h",
"24h",
1,
),
summarize_public_content_window(
&events,
&post_titles,
now - Duration::days(7),
"7d",
"7d",
7,
),
summarize_public_content_window(&events, &post_titles, since_30d, "30d", "30d", 30),
])
}
@@ -1233,7 +1259,8 @@ fn summarize_public_content_window(
} else {
0.0
},
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
avg_duration_ms: (duration_count > 0)
.then(|| total_duration / duration_count as f64),
},
)
.collect::<Vec<_>>();

View File

@@ -30,16 +30,23 @@ struct MarkdownFrontmatter {
deserialize_with = "deserialize_optional_string_list"
)]
categories: Option<Vec<String>>,
#[serde(default, deserialize_with = "deserialize_optional_string_list")]
#[serde(
default,
alias = "tag",
deserialize_with = "deserialize_optional_string_list"
)]
tags: Option<Vec<String>>,
post_type: Option<String>,
image: Option<String>,
images: Option<Vec<String>>,
pinned: Option<bool>,
#[serde(alias = "Hidden")]
hidden: Option<bool>,
published: Option<bool>,
draft: Option<bool>,
status: Option<String>,
visibility: Option<String>,
#[serde(alias = "date")]
publish_at: Option<String>,
unpublish_at: Option<String>,
canonical_url: Option<String>,
@@ -233,6 +240,18 @@ fn resolve_post_status(frontmatter: &MarkdownFrontmatter) -> String {
}
}
fn resolve_post_visibility(frontmatter: &MarkdownFrontmatter) -> String {
if let Some(visibility) = trim_to_option(frontmatter.visibility.clone()) {
return normalize_post_visibility(Some(&visibility));
}
if frontmatter.hidden.unwrap_or(false) {
POST_VISIBILITY_UNLISTED.to_string()
} else {
POST_VISIBILITY_PUBLIC.to_string()
}
}
pub fn effective_post_state(
status: &str,
publish_at: Option<DateTime<FixedOffset>>,
@@ -500,7 +519,7 @@ pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Res
images: normalize_string_list(frontmatter.images.clone()),
pinned: frontmatter.pinned.unwrap_or(false),
status: resolve_post_status(&frontmatter),
visibility: normalize_post_visibility(frontmatter.visibility.as_deref()),
visibility: resolve_post_visibility(&frontmatter),
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
frontmatter.publish_at.clone(),
)),
@@ -1152,3 +1171,39 @@ pub async fn import_markdown_documents(
Ok(imported)
}
#[cfg(test)]
mod tests {
use super::{POST_VISIBILITY_UNLISTED, parse_markdown_source};
#[test]
fn parse_markdown_source_supports_hugo_aliases() {
let markdown = r#"---
title: "Linux Shell"
date: 2022-05-21T10:02:09+08:00
draft: false
Hidden: true
slug: linux-shell
categories:
- Linux
tag:
- Linux
- Shell
---
# Linux Shell
"#;
let post = parse_markdown_source("linux-shell", markdown, "content/posts/linux-shell.md")
.expect("markdown should parse");
assert_eq!(post.slug, "linux-shell");
assert_eq!(post.category.as_deref(), Some("Linux"));
assert_eq!(post.tags, vec!["Linux", "Shell"]);
assert_eq!(post.visibility, POST_VISIBILITY_UNLISTED);
assert_eq!(
post.publish_at.as_deref(),
Some("2022-05-21T02:02:09+00:00")
);
}
}

View File

@@ -1,5 +1,5 @@
pub mod admin_audit;
pub mod abuse_guard;
pub mod admin_audit;
pub mod ai;
pub mod analytics;
pub mod backups;

View File

@@ -1,9 +1,9 @@
use loco_rs::prelude::*;
use crate::{
controllers::site_settings,
models::_entities::{comments, friend_links, site_settings as site_settings_model},
services::subscriptions,
};
use loco_rs::prelude::*;
fn notification_channel_type(settings: &site_settings_model::Model) -> &'static str {
match settings
@@ -71,10 +71,16 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
});
let text = format!(
"收到一条新的评论。\n\n文章:{}\n作者:{}\n范围:{}\n状态:{}\n摘要:{}",
item.post_slug.clone().unwrap_or_else(|| "未知文章".to_string()),
item.post_slug
.clone()
.unwrap_or_else(|| "未知文章".to_string()),
item.author.clone().unwrap_or_else(|| "匿名".to_string()),
item.scope,
if item.approved.unwrap_or(false) { "已通过" } else { "待审核" },
if item.approved.unwrap_or(false) {
"已通过"
} else {
"待审核"
},
excerpt(item.content.as_deref(), 200).unwrap_or_else(|| "".to_string()),
);
@@ -135,9 +141,13 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
});
let text = format!(
"收到新的友链申请。\n\n站点:{}\n链接:{}\n分类:{}\n状态:{}\n描述:{}",
item.site_name.clone().unwrap_or_else(|| "未命名站点".to_string()),
item.site_name
.clone()
.unwrap_or_else(|| "未命名站点".to_string()),
item.site_url,
item.category.clone().unwrap_or_else(|| "未分类".to_string()),
item.category
.clone()
.unwrap_or_else(|| "未分类".to_string()),
item.status.clone().unwrap_or_else(|| "pending".to_string()),
item.description.clone().unwrap_or_else(|| "".to_string()),
);

View File

@@ -1,5 +1,5 @@
use aws_config::BehaviorVersion;
use aws_sdk_s3::{config::Credentials, primitives::ByteStream, Client};
use aws_sdk_s3::{Client, config::Credentials, primitives::ByteStream};
use loco_rs::prelude::*;
use sea_orm::{EntityTrait, QueryOrder};
use std::path::{Path, PathBuf};

View File

@@ -243,11 +243,18 @@ fn normalize_browser_push_subscription(raw: Value) -> Result<Value> {
serde_json::to_value(subscription).map_err(Into::into)
}
fn merge_browser_push_metadata(existing: Option<&Value>, incoming: Option<Value>, subscription: Value) -> Value {
fn merge_browser_push_metadata(
existing: Option<&Value>,
incoming: Option<Value>,
subscription: Value,
) -> Value {
let mut object = merge_metadata(existing, incoming)
.and_then(|value| value.as_object().cloned())
.unwrap_or_default();
object.insert("kind".to_string(), Value::String("browser-push".to_string()));
object.insert(
"kind".to_string(),
Value::String("browser-push".to_string()),
);
object.insert("subscription".to_string(), subscription);
Value::Object(object)
}
@@ -280,7 +287,8 @@ fn payload_match_strings(payload: &Value, key: &str) -> Vec<String> {
if let Some(items) = payload.get(key).and_then(Value::as_array) {
values.extend(
items.iter()
items
.iter()
.filter_map(Value::as_str)
.map(normalize_string)
.filter(|item| !item.is_empty()),
@@ -298,7 +306,8 @@ fn payload_match_strings(payload: &Value, key: &str) -> Vec<String> {
if let Some(items) = post.get(key).and_then(Value::as_array) {
values.extend(
items.iter()
items
.iter()
.filter_map(Value::as_str)
.map(normalize_string)
.filter(|item| !item.is_empty()),
@@ -410,19 +419,31 @@ pub fn to_public_subscription_view(item: &subscriptions::Model) -> PublicSubscri
}
}
fn subscription_links(item: &subscriptions::Model, site_context: &SiteContext) -> (Option<String>, Option<String>, Option<String>) {
let manage_url = item
.manage_token
.as_deref()
.and_then(|token| build_token_link(site_context.site_url.as_deref(), "/subscriptions/manage", token));
let unsubscribe_url = item
.manage_token
.as_deref()
.and_then(|token| build_token_link(site_context.site_url.as_deref(), "/subscriptions/unsubscribe", token));
let confirm_url = item
.confirm_token
.as_deref()
.and_then(|token| build_token_link(site_context.site_url.as_deref(), "/subscriptions/confirm", token));
fn subscription_links(
item: &subscriptions::Model,
site_context: &SiteContext,
) -> (Option<String>, Option<String>, Option<String>) {
let manage_url = item.manage_token.as_deref().and_then(|token| {
build_token_link(
site_context.site_url.as_deref(),
"/subscriptions/manage",
token,
)
});
let unsubscribe_url = item.manage_token.as_deref().and_then(|token| {
build_token_link(
site_context.site_url.as_deref(),
"/subscriptions/unsubscribe",
token,
)
});
let confirm_url = item.confirm_token.as_deref().and_then(|token| {
build_token_link(
site_context.site_url.as_deref(),
"/subscriptions/confirm",
token,
)
});
(manage_url, unsubscribe_url, confirm_url)
}
@@ -449,7 +470,11 @@ async fn send_confirmation_email(ctx: &AppContext, item: &subscriptions::Model)
.await
}
fn subscription_allows_event(item: &subscriptions::Model, event_type: &str, payload: &Value) -> bool {
fn subscription_allows_event(
item: &subscriptions::Model,
event_type: &str,
payload: &Value,
) -> bool {
if normalize_status(&item.status) != STATUS_ACTIVE {
return false;
}
@@ -487,7 +512,9 @@ fn subscription_allows_event(item: &subscriptions::Model, event_type: &str, payl
if !tags.is_empty() {
let payload_tags = payload_match_strings(payload, "tags");
if payload_tags.is_empty()
|| !tags.iter().any(|tag| payload_tags.iter().any(|item| item == tag))
|| !tags
.iter()
.any(|tag| payload_tags.iter().any(|item| item == tag))
{
return false;
}
@@ -501,10 +528,15 @@ pub async fn list_subscriptions(
channel_type: Option<&str>,
status: Option<&str>,
) -> Result<Vec<subscriptions::Model>> {
let mut query = subscriptions::Entity::find().order_by(subscriptions::Column::CreatedAt, Order::Desc);
let mut query =
subscriptions::Entity::find().order_by(subscriptions::Column::CreatedAt, Order::Desc);
if let Some(channel_type) = channel_type.map(str::trim).filter(|value| !value.is_empty()) {
query = query.filter(subscriptions::Column::ChannelType.eq(normalize_channel_type(channel_type)));
if let Some(channel_type) = channel_type
.map(str::trim)
.filter(|value| !value.is_empty())
{
query = query
.filter(subscriptions::Column::ChannelType.eq(normalize_channel_type(channel_type)));
}
if let Some(status) = status.map(str::trim).filter(|value| !value.is_empty()) {
@@ -771,7 +803,9 @@ pub async fn update_subscription_preferences(
if let Some(status) = status {
let normalized = normalize_status(&status);
if normalized == STATUS_PENDING {
return Err(Error::BadRequest("偏好页不支持将状态改回 pending".to_string()));
return Err(Error::BadRequest(
"偏好页不支持将状态改回 pending".to_string(),
));
}
active.status = Set(normalized);
}
@@ -783,7 +817,10 @@ pub async fn update_subscription_preferences(
active.update(&ctx.db).await.map_err(Into::into)
}
pub async fn unsubscribe_subscription(ctx: &AppContext, token: &str) -> Result<subscriptions::Model> {
pub async fn unsubscribe_subscription(
ctx: &AppContext,
token: &str,
) -> Result<subscriptions::Model> {
let item = get_subscription_by_manage_token(ctx, token).await?;
let mut active = item.into_active_model();
active.status = Set(STATUS_UNSUBSCRIBED.to_string());
@@ -821,11 +858,7 @@ async fn update_subscription_delivery_state(
let mut active = subscription.into_active_model();
active.last_notified_at = Set(Some(Utc::now().to_rfc3339()));
active.last_delivery_status = Set(Some(status.to_string()));
active.failure_count = Set(Some(if success {
0
} else {
current_failures + 1
}));
active.failure_count = Set(Some(if success { 0 } else { current_failures + 1 }));
let _ = active.update(&ctx.db).await?;
Ok(())
}
@@ -945,10 +978,16 @@ pub async fn queue_event_for_active_subscriptions(
) -> Result<QueueDispatchSummary> {
let subscriptions = active_subscriptions(ctx).await?;
if subscriptions.is_empty() {
return Ok(QueueDispatchSummary { queued: 0, skipped: 0 });
return Ok(QueueDispatchSummary {
queued: 0,
skipped: 0,
});
}
let site_context = SiteContext { site_name, site_url };
let site_context = SiteContext {
site_name,
site_url,
};
let mut queued = 0usize;
let mut skipped = 0usize;
@@ -1058,38 +1097,32 @@ async fn deliver_via_channel(
CHANNEL_EMAIL => Err(Error::BadRequest(
"email channel must be delivered via subscription context".to_string(),
)),
CHANNEL_DISCORD => {
Client::new()
.post(target)
.json(&serde_json::json!({ "content": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_TELEGRAM => {
Client::new()
.post(target)
.json(&serde_json::json!({ "text": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_NTFY => {
Client::new()
.post(resolve_ntfy_target(target))
.header("Title", &message.subject)
.header("Content-Type", "text/plain; charset=utf-8")
.body(message.text.clone())
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_DISCORD => Client::new()
.post(target)
.json(&serde_json::json!({ "content": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string())),
CHANNEL_TELEGRAM => Client::new()
.post(target)
.json(&serde_json::json!({ "text": message.text }))
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string())),
CHANNEL_NTFY => Client::new()
.post(resolve_ntfy_target(target))
.header("Title", &message.subject)
.header("Content-Type", "text/plain; charset=utf-8")
.body(message.text.clone())
.send()
.await
.and_then(|response| response.error_for_status())
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string())),
CHANNEL_WEB_PUSH => {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
let subscription_info = web_push_service::subscription_info_from_metadata(metadata)?;
@@ -1141,7 +1174,10 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
return Ok(());
};
if matches!(delivery.status.as_str(), DELIVERY_STATUS_SENT | DELIVERY_STATUS_SKIPPED | DELIVERY_STATUS_EXHAUSTED) {
if matches!(
delivery.status.as_str(),
DELIVERY_STATUS_SENT | DELIVERY_STATUS_SKIPPED | DELIVERY_STATUS_EXHAUSTED
) {
return Ok(());
}
@@ -1149,15 +1185,19 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.payload
.clone()
.ok_or_else(|| Error::BadRequest("delivery payload 为空".to_string()))
.and_then(|value| serde_json::from_value::<QueuedDeliveryPayload>(value).map_err(Into::into))?;
.and_then(|value| {
serde_json::from_value::<QueuedDeliveryPayload>(value).map_err(Into::into)
})?;
let attempts = delivery.attempts_count + 1;
let now = Utc::now().to_rfc3339();
let subscription = match delivery.subscription_id {
Some(subscription_id) => subscriptions::Entity::find_by_id(subscription_id)
.one(&ctx.db)
.await?,
Some(subscription_id) => {
subscriptions::Entity::find_by_id(subscription_id)
.one(&ctx.db)
.await?
}
None => None,
};
@@ -1171,7 +1211,13 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
active.next_retry_at = Set(None);
active.delivered_at = Set(Some(Utc::now().to_rfc3339()));
let _ = active.update(&ctx.db).await?;
update_subscription_delivery_state(ctx, Some(subscription.id), DELIVERY_STATUS_SKIPPED, false).await?;
update_subscription_delivery_state(
ctx,
Some(subscription.id),
DELIVERY_STATUS_SKIPPED,
false,
)
.await?;
return Ok(());
}
}
@@ -1202,7 +1248,14 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.await
}
} else {
deliver_via_channel(ctx, &delivery.channel_type, &delivery.target, &message, None).await
deliver_via_channel(
ctx,
&delivery.channel_type,
&delivery.target,
&message,
None,
)
.await
};
let subscription_id = delivery.subscription_id;
let delivery_channel_type = delivery.channel_type.clone();
@@ -1218,7 +1271,8 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
active.next_retry_at = Set(None);
active.delivered_at = Set(Some(Utc::now().to_rfc3339()));
let _ = active.update(&ctx.db).await?;
update_subscription_delivery_state(ctx, subscription_id, DELIVERY_STATUS_SENT, true).await?;
update_subscription_delivery_state(ctx, subscription_id, DELIVERY_STATUS_SENT, true)
.await?;
}
Err(error) => {
let next_retry_at = (attempts < MAX_DELIVERY_ATTEMPTS)
@@ -1298,7 +1352,10 @@ pub async fn send_test_notification(
.await
}
pub async fn notify_post_published(ctx: &AppContext, post: &content::MarkdownPost) -> Result<QueueDispatchSummary> {
pub async fn notify_post_published(
ctx: &AppContext,
post: &content::MarkdownPost,
) -> Result<QueueDispatchSummary> {
let site_context = load_site_context(ctx).await;
let public_url = post_public_url(site_context.site_url.as_deref(), &post.slug);
let subject = format!("新文章发布:{}", post.title);
@@ -1315,13 +1372,17 @@ pub async fn notify_post_published(ctx: &AppContext, post: &content::MarkdownPos
let text = format!(
"{}》已发布。\n\n分类:{}\n标签:{}\n链接:{}\n\n{}",
post.title,
post.category.clone().unwrap_or_else(|| "未分类".to_string()),
post.category
.clone()
.unwrap_or_else(|| "未分类".to_string()),
if post.tags.is_empty() {
"".to_string()
} else {
post.tags.join(", ")
},
public_url.clone().unwrap_or_else(|| format!("/articles/{}", post.slug)),
public_url
.clone()
.unwrap_or_else(|| format!("/articles/{}", post.slug)),
post.description.clone().unwrap_or_default(),
);
@@ -1355,7 +1416,8 @@ pub async fn send_digest(ctx: &AppContext, period: &str) -> Result<DigestDispatc
let lines = if posts.is_empty() {
vec![format!("最近 {} 天还没有新的公开文章。", days)]
} else {
posts.iter()
posts
.iter()
.map(|post| {
let url = post_public_url(site_context.site_url.as_deref(), &post.slug)
.unwrap_or_else(|| format!("/articles/{}", post.slug));
@@ -1369,7 +1431,14 @@ pub async fn send_digest(ctx: &AppContext, period: &str) -> Result<DigestDispatc
.collect::<Vec<_>>()
};
let subject = format!("{} 内容摘要", if normalized_period == "monthly" { "月报" } else { "周报" });
let subject = format!(
"{} 内容摘要",
if normalized_period == "monthly" {
"月报"
} else {
"周报"
}
);
let body = format!("统计周期:最近 {}\n\n{}", days, lines.join("\n\n"));
let payload = serde_json::json!({
"event_type": event_type,

View File

@@ -91,8 +91,7 @@ fn normalize_ip(value: Option<&str>) -> Option<String> {
}
fn verify_url() -> String {
env_value(ENV_TURNSTILE_VERIFY_URL)
.unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
env_value(ENV_TURNSTILE_VERIFY_URL).unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
}
fn client() -> &'static Client {
@@ -173,11 +172,10 @@ pub async fn verify_token(
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
let secret = secret_key(settings).ok_or_else(|| {
Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string())
})?;
let response_token = trim_to_option(token)
.ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let secret = secret_key(settings)
.ok_or_else(|| Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string()))?;
let response_token =
trim_to_option(token).ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let mut form_data = vec![
("secret".to_string(), secret),

View File

@@ -66,9 +66,7 @@ pub fn private_key_configured(settings: &site_settings::Model) -> bool {
}
pub fn is_enabled(settings: &site_settings::Model) -> bool {
settings.web_push_enabled.unwrap_or(false)
&& public_key_configured(settings)
&& private_key_configured(settings)
public_key_configured(settings) && private_key_configured(settings)
}
pub fn subscription_info_from_metadata(metadata: Option<&Value>) -> Result<SubscriptionInfo> {

View File

@@ -1,14 +1,11 @@
use chrono::Utc;
use loco_rs::{
bgworker::BackgroundWorker,
prelude::*,
};
use loco_rs::{bgworker::BackgroundWorker, prelude::*};
use sea_orm::{
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, Order,
PaginatorTrait, QueryFilter, QueryOrder, QuerySelect, Set,
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, Order, PaginatorTrait,
QueryFilter, QueryOrder, QuerySelect, Set,
};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use serde_json::{Value, json};
use crate::{
models::_entities::{notification_deliveries, worker_jobs},
@@ -213,7 +210,10 @@ fn can_cancel_status(status: &str, cancel_requested: bool) -> bool {
}
fn can_retry_status(status: &str) -> bool {
matches!(status, JOB_STATUS_FAILED | JOB_STATUS_CANCELLED | JOB_STATUS_SUCCEEDED)
matches!(
status,
JOB_STATUS_FAILED | JOB_STATUS_CANCELLED | JOB_STATUS_SUCCEEDED
)
}
fn to_job_record(item: worker_jobs::Model) -> WorkerJobRecord {
@@ -256,15 +256,17 @@ fn catalog_entries() -> Vec<WorkerCatalogEntry> {
(TASK_SEND_MONTHLY_DIGEST, JOB_KIND_TASK, true, true),
]
.into_iter()
.map(|(worker_name, job_kind, supports_cancel, supports_retry)| WorkerCatalogEntry {
worker_name: worker_name.to_string(),
job_kind: job_kind.to_string(),
label: label_for(worker_name),
description: description_for(worker_name),
queue_name: queue_name_for(worker_name),
supports_cancel,
supports_retry,
})
.map(
|(worker_name, job_kind, supports_cancel, supports_retry)| WorkerCatalogEntry {
worker_name: worker_name.to_string(),
job_kind: job_kind.to_string(),
label: label_for(worker_name),
description: description_for(worker_name),
queue_name: queue_name_for(worker_name),
supports_cancel,
supports_retry,
},
)
.collect()
}
@@ -311,7 +313,10 @@ async fn dispatch_download(args_ctx: AppContext, args: DownloadWorkerArgs) {
}
}
async fn dispatch_notification_delivery(args_ctx: AppContext, args: NotificationDeliveryWorkerArgs) {
async fn dispatch_notification_delivery(
args_ctx: AppContext,
args: NotificationDeliveryWorkerArgs,
) {
let worker = NotificationDeliveryWorker::build(&args_ctx);
if let Err(error) = worker.perform(args).await {
tracing::warn!("notification delivery worker execution failed: {error}");
@@ -326,7 +331,9 @@ async fn enqueue_download_worker(ctx: &AppContext, args: DownloadWorkerArgs) ->
Ok(())
}
Err(error) => {
tracing::warn!("download worker queue unavailable, falling back to local task: {error}");
tracing::warn!(
"download worker queue unavailable, falling back to local task: {error}"
);
tokio::spawn(dispatch_download(ctx.clone(), args));
Ok(())
}
@@ -344,7 +351,9 @@ async fn enqueue_notification_worker(
Ok(())
}
Err(error) => {
tracing::warn!("notification worker queue unavailable, falling back to local task: {error}");
tracing::warn!(
"notification worker queue unavailable, falling back to local task: {error}"
);
tokio::spawn(dispatch_notification_delivery(ctx.clone(), args));
Ok(())
}
@@ -442,17 +451,19 @@ pub async fn get_overview(ctx: &AppContext) -> Result<WorkerOverview> {
_ => {}
}
let entry = grouped.entry(item.worker_name.clone()).or_insert_with(|| WorkerStats {
worker_name: item.worker_name.clone(),
job_kind: item.job_kind.clone(),
label: label_for(&item.worker_name),
queued: 0,
running: 0,
succeeded: 0,
failed: 0,
cancelled: 0,
last_job_at: None,
});
let entry = grouped
.entry(item.worker_name.clone())
.or_insert_with(|| WorkerStats {
worker_name: item.worker_name.clone(),
job_kind: item.job_kind.clone(),
label: label_for(&item.worker_name),
queued: 0,
running: 0,
succeeded: 0,
failed: 0,
cancelled: 0,
last_job_at: None,
});
match item.status.as_str() {
JOB_STATUS_QUEUED => entry.queued += 1,
@@ -473,18 +484,35 @@ pub async fn get_overview(ctx: &AppContext) -> Result<WorkerOverview> {
}
pub async fn list_jobs(ctx: &AppContext, query: WorkerJobListQuery) -> Result<WorkerJobListResult> {
let mut db_query = worker_jobs::Entity::find().order_by(worker_jobs::Column::CreatedAt, Order::Desc);
let mut db_query =
worker_jobs::Entity::find().order_by(worker_jobs::Column::CreatedAt, Order::Desc);
if let Some(status) = query.status.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(status) = query
.status
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(worker_jobs::Column::Status.eq(status));
}
if let Some(job_kind) = query.job_kind.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(job_kind) = query
.job_kind
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(worker_jobs::Column::JobKind.eq(job_kind));
}
if let Some(worker_name) = query.worker_name.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(worker_name) = query
.worker_name
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(worker_jobs::Column::WorkerName.eq(worker_name));
}
if let Some(search) = query.search.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
if let Some(search) = query
.search
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
{
db_query = db_query.filter(
Condition::any()
.add(worker_jobs::Column::WorkerName.contains(search.clone()))
@@ -830,6 +858,9 @@ pub async fn retry_job(
)
.await
}
_ => Err(Error::BadRequest(format!("不支持重试任务:{}", item.worker_name))),
_ => Err(Error::BadRequest(format!(
"不支持重试任务:{}",
item.worker_name
))),
}
}