feat: update tag and timeline share panel copy for clarity and conciseness
Some checks failed
docker-images / resolve-build-targets (push) Successful in 7s
ui-regression / playwright-regression (push) Failing after 13m4s
docker-images / build-and-push (admin) (push) Successful in 1m17s
docker-images / build-and-push (backend) (push) Successful in 28m13s
docker-images / build-and-push (frontend) (push) Successful in 47s
docker-images / submit-indexnow (push) Successful in 13s

style: enhance global CSS for better responsiveness of terminal chips and navigation pills

test: remove inline subscription test and add maintenance mode access code test

feat: implement media library picker dialog for selecting images from the media library

feat: add media URL controls for uploading and managing media assets

feat: add migration for music_enabled and maintenance_mode settings in site settings

feat: implement maintenance mode functionality with access control

feat: create maintenance page with access code input and error handling

chore: add TypeScript declaration for QR code module
This commit is contained in:
2026-04-02 23:05:49 +08:00
parent 6a50dd478c
commit 9665c933b5
94 changed files with 5266 additions and 1612 deletions

View File

@@ -1,8 +1,12 @@
use std::collections::{HashMap, HashSet};
use axum::{
extract::{Multipart, Query},
http::{HeaderMap, header},
};
use loco_rs::prelude::*;
use regex::Regex;
use reqwest::Url;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, PaginatorTrait, QueryFilter,
QueryOrder, QuerySelect, Set,
@@ -25,7 +29,7 @@ use crate::{
services::{
admin_audit, ai, analytics, comment_guard, content, media_assets, storage, worker_jobs,
},
workers::downloader::DownloadWorkerArgs,
workers::downloader::{DownloadWorkerArgs, download_media_to_storage, normalize_target_format},
};
#[derive(Clone, Debug, Deserialize)]
@@ -171,6 +175,9 @@ pub struct AdminSiteSettingsResponse {
pub location: Option<String>,
pub tech_stack: Vec<String>,
pub music_playlist: Vec<site_settings::MusicTrackPayload>,
pub music_enabled: bool,
pub maintenance_mode_enabled: bool,
pub maintenance_access_code: Option<String>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_verification_mode: String,
@@ -356,6 +363,8 @@ pub struct AdminMediaDownloadPayload {
#[serde(default)]
pub prefix: Option<String>,
#[serde(default)]
pub target_format: Option<String>,
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub alt_text: Option<String>,
@@ -365,13 +374,19 @@ pub struct AdminMediaDownloadPayload {
pub tags: Option<Vec<String>>,
#[serde(default)]
pub notes: Option<String>,
#[serde(default)]
pub sync: bool,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaDownloadResponse {
pub queued: bool,
pub job_id: i32,
pub status: String,
pub job_id: Option<i32>,
pub status: Option<String>,
pub key: Option<String>,
pub url: Option<String>,
pub size_bytes: Option<i64>,
pub content_type: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -487,6 +502,37 @@ pub struct AdminPostPolishRequest {
pub markdown: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminPostLocalizeImagesRequest {
pub markdown: String,
#[serde(default)]
pub prefix: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminPostLocalizedImageItem {
pub source_url: String,
pub localized_url: String,
pub key: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminPostLocalizeImagesFailure {
pub source_url: String,
pub error: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminPostLocalizeImagesResponse {
pub markdown: String,
pub detected_count: usize,
pub localized_count: usize,
pub uploaded_count: usize,
pub failed_count: usize,
pub items: Vec<AdminPostLocalizedImageItem>,
pub failures: Vec<AdminPostLocalizeImagesFailure>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminReviewPolishRequest {
pub title: String,
@@ -537,6 +583,199 @@ fn trim_to_option(value: Option<String>) -> Option<String> {
})
}
fn normalize_localize_image_prefix(value: Option<String>) -> String {
trim_to_option(value)
.map(|item| item.trim_matches('/').to_string())
.filter(|item| !item.is_empty())
.unwrap_or_else(|| "post-inline-images".to_string())
}
fn normalize_markdown_image_target(value: &str) -> Option<String> {
let trimmed = value.trim();
if trimmed.is_empty() {
return None;
}
if trimmed.starts_with('<') && trimmed.ends_with('>') && trimmed.len() > 2 {
Some(trimmed[1..trimmed.len() - 1].trim().to_string())
} else {
Some(trimmed.to_string())
}
}
fn markdown_image_reference_urls(markdown: &str) -> Vec<String> {
let markdown_pattern =
Regex::new(r#"!\[[^\]]*]\((?P<url><[^>\n]+>|[^)\s]+)(?:\s+(?:"[^"]*"|'[^']*'))?\)"#)
.expect("valid markdown image regex");
let html_double_quote_pattern = Regex::new(r#"(?i)<img\b[^>]*?\bsrc\s*=\s*"(?P<url>[^"]+)""#)
.expect("valid html img double quote regex");
let html_single_quote_pattern = Regex::new(r#"(?i)<img\b[^>]*?\bsrc\s*=\s*'(?P<url>[^']+)'"#)
.expect("valid html img single quote regex");
let mut urls = Vec::new();
for captures in markdown_pattern.captures_iter(markdown) {
if let Some(url) = captures
.name("url")
.and_then(|item| normalize_markdown_image_target(item.as_str()))
{
urls.push(url);
}
}
for captures in html_double_quote_pattern.captures_iter(markdown) {
if let Some(url) = captures
.name("url")
.and_then(|item| normalize_markdown_image_target(item.as_str()))
{
urls.push(url);
}
}
for captures in html_single_quote_pattern.captures_iter(markdown) {
if let Some(url) = captures
.name("url")
.and_then(|item| normalize_markdown_image_target(item.as_str()))
{
urls.push(url);
}
}
urls
}
fn is_remote_markdown_image_candidate(
url: &str,
settings: Option<&storage::MediaStorageSettings>,
) -> bool {
let Ok(parsed) = Url::parse(url) else {
return false;
};
if !matches!(parsed.scheme(), "http" | "https") {
return false;
}
if settings
.and_then(|item| storage::object_key_from_public_url(item, url))
.is_some()
{
return false;
}
true
}
fn replace_markdown_image_urls(
markdown: &str,
replacements: &HashMap<String, String>,
) -> (String, usize) {
let markdown_pattern = Regex::new(
r#"(?P<lead>!\[[^\]]*]\()(?P<url><[^>\n]+>|[^)\s]+)(?P<trail>(?:\s+(?:"[^"]*"|'[^']*'))?\))"#,
)
.expect("valid markdown image replacement regex");
let html_double_quote_pattern =
Regex::new(r#"(?i)(?P<lead><img\b[^>]*?\bsrc\s*=\s*")(?P<url>[^"]+)(?P<trail>"[^>]*>)"#)
.expect("valid html img double quote replacement regex");
let html_single_quote_pattern =
Regex::new(r#"(?i)(?P<lead><img\b[^>]*?\bsrc\s*=\s*')(?P<url>[^']+)(?P<trail>'[^>]*>)"#)
.expect("valid html img single quote replacement regex");
let mut localized_count = 0usize;
let after_markdown = markdown_pattern
.replace_all(markdown, |captures: &regex::Captures<'_>| {
let raw_url = captures
.name("url")
.map(|item| item.as_str())
.unwrap_or_default();
let Some(normalized_url) = normalize_markdown_image_target(raw_url) else {
return captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string();
};
if let Some(localized_url) = replacements.get(&normalized_url) {
localized_count += 1;
format!(
"{}{}{}",
&captures["lead"], localized_url, &captures["trail"]
)
} else {
captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string()
}
})
.to_string();
let after_html_double = html_double_quote_pattern
.replace_all(&after_markdown, |captures: &regex::Captures<'_>| {
let raw_url = captures
.name("url")
.map(|item| item.as_str())
.unwrap_or_default();
let Some(normalized_url) = normalize_markdown_image_target(raw_url) else {
return captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string();
};
if let Some(localized_url) = replacements.get(&normalized_url) {
localized_count += 1;
format!(
"{}{}{}",
&captures["lead"], localized_url, &captures["trail"]
)
} else {
captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string()
}
})
.to_string();
let after_html_single = html_single_quote_pattern
.replace_all(&after_html_double, |captures: &regex::Captures<'_>| {
let raw_url = captures
.name("url")
.map(|item| item.as_str())
.unwrap_or_default();
let Some(normalized_url) = normalize_markdown_image_target(raw_url) else {
return captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string();
};
if let Some(localized_url) = replacements.get(&normalized_url) {
localized_count += 1;
format!(
"{}{}{}",
&captures["lead"], localized_url, &captures["trail"]
)
} else {
captures
.get(0)
.map(|item| item.as_str())
.unwrap_or_default()
.to_string()
}
})
.to_string();
(after_html_single, localized_count)
}
fn parse_optional_timestamp(
value: Option<&str>,
) -> Result<Option<chrono::DateTime<chrono::FixedOffset>>> {
@@ -785,6 +1024,9 @@ fn build_settings_response(
location: item.location,
tech_stack: tech_stack_values(&item.tech_stack),
music_playlist: music_playlist_values(&item.music_playlist),
music_enabled: item.music_enabled.unwrap_or(true),
maintenance_mode_enabled: item.maintenance_mode_enabled.unwrap_or(false),
maintenance_access_code: item.maintenance_access_code,
ai_enabled: item.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true),
comment_verification_mode: comment_verification_mode.as_str().to_string(),
@@ -1493,9 +1735,11 @@ pub async fn download_media_object(
Json(payload): Json<AdminMediaDownloadPayload>,
) -> Result<Response> {
let actor = check_auth(&headers)?;
let target_format = normalize_target_format(payload.target_format.clone())?;
let worker_args = DownloadWorkerArgs {
source_url: payload.source_url.clone(),
prefix: payload.prefix.clone(),
target_format,
title: payload.title.clone(),
alt_text: payload.alt_text.clone(),
caption: payload.caption.clone(),
@@ -1503,6 +1747,38 @@ pub async fn download_media_object(
notes: payload.notes.clone(),
job_id: None,
};
if payload.sync {
let downloaded = download_media_to_storage(&ctx, &worker_args).await?;
admin_audit::log_event(
&ctx,
Some(&actor),
"media.download",
"media",
Some(downloaded.key.clone()),
Some(payload.source_url.clone()),
Some(serde_json::json!({
"queued": false,
"source_url": payload.source_url,
"target_format": worker_args.target_format,
"key": downloaded.key,
"url": downloaded.url,
})),
)
.await?;
return format::json(AdminMediaDownloadResponse {
queued: false,
job_id: None,
status: Some("completed".to_string()),
key: Some(downloaded.key),
url: Some(downloaded.url),
size_bytes: Some(downloaded.size_bytes),
content_type: downloaded.content_type,
});
}
let job = worker_jobs::queue_download_job(
&ctx,
&worker_args,
@@ -1524,14 +1800,19 @@ pub async fn download_media_object(
"job_id": job.id,
"queued": true,
"source_url": payload.source_url,
"target_format": worker_args.target_format,
})),
)
.await?;
format::json(AdminMediaDownloadResponse {
queued: true,
job_id: job.id,
status: job.status,
job_id: Some(job.id),
status: Some(job.status),
key: None,
url: None,
size_bytes: None,
content_type: None,
})
}
@@ -1907,6 +2188,89 @@ pub async fn polish_post_markdown(
format::json(ai::polish_post_markdown(&ctx, &payload.markdown).await?)
}
#[debug_handler]
pub async fn localize_post_markdown_images(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<AdminPostLocalizeImagesRequest>,
) -> Result<Response> {
check_auth(&headers)?;
let normalized_markdown = payload.markdown.replace("\r\n", "\n");
let prefix = normalize_localize_image_prefix(payload.prefix);
let settings = storage::optional_r2_settings(&ctx).await?;
let detected_urls = markdown_image_reference_urls(&normalized_markdown);
let candidate_urls = detected_urls
.into_iter()
.filter(|url| is_remote_markdown_image_candidate(url, settings.as_ref()))
.collect::<Vec<_>>();
if candidate_urls.is_empty() {
return format::json(AdminPostLocalizeImagesResponse {
markdown: normalized_markdown,
detected_count: 0,
localized_count: 0,
uploaded_count: 0,
failed_count: 0,
items: Vec::new(),
failures: Vec::new(),
});
}
let mut seen = HashSet::new();
let unique_urls = candidate_urls
.iter()
.filter(|url| seen.insert((*url).clone()))
.cloned()
.collect::<Vec<_>>();
let mut replacements = HashMap::<String, String>::new();
let mut items = Vec::<AdminPostLocalizedImageItem>::new();
let mut failures = Vec::<AdminPostLocalizeImagesFailure>::new();
for source_url in unique_urls {
let args = DownloadWorkerArgs {
source_url: source_url.clone(),
prefix: Some(prefix.clone()),
target_format: None,
title: None,
alt_text: None,
caption: None,
tags: vec!["markdown-image".to_string()],
notes: Some("localized from markdown body".to_string()),
job_id: None,
};
match download_media_to_storage(&ctx, &args).await {
Ok(downloaded) => {
replacements.insert(source_url.clone(), downloaded.url.clone());
items.push(AdminPostLocalizedImageItem {
source_url,
localized_url: downloaded.url,
key: downloaded.key,
});
}
Err(error) => failures.push(AdminPostLocalizeImagesFailure {
source_url,
error: error.to_string(),
}),
}
}
let (markdown, localized_count) =
replace_markdown_image_urls(&normalized_markdown, &replacements);
format::json(AdminPostLocalizeImagesResponse {
markdown,
detected_count: candidate_urls.len(),
localized_count,
uploaded_count: items.len(),
failed_count: failures.len(),
items,
failures,
})
}
#[debug_handler]
pub async fn polish_review_description(
headers: HeaderMap,
@@ -2045,6 +2409,10 @@ pub fn routes() -> Routes {
.add("/ai/reindex", post(reindex_ai))
.add("/ai/test-provider", post(test_ai_provider))
.add("/ai/test-image-provider", post(test_ai_image_provider))
.add(
"/posts/localize-images",
post(localize_post_markdown_images),
)
.add("/storage/r2/test", post(test_r2_storage))
.add(
"/storage/media",