feat: add worker operations and fix gitea actions
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 29s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 33m13s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 58s
ui-regression / playwright-regression (push) Failing after 13m24s
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Successful in 29s
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Successful in 33m13s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Successful in 58s
ui-regression / playwright-regression (push) Failing after 13m24s
This commit is contained in:
@@ -1,5 +1,3 @@
|
||||
# syntax=docker/dockerfile:1.7
|
||||
|
||||
FROM rust:1.94-trixie AS chef
|
||||
RUN cargo install cargo-chef --locked
|
||||
WORKDIR /app
|
||||
|
||||
@@ -43,6 +43,7 @@ mod m20260401_000032_add_runtime_security_keys_to_site_settings;
|
||||
mod m20260401_000033_add_taxonomy_metadata_and_media_assets;
|
||||
mod m20260401_000034_add_source_markdown_to_posts;
|
||||
mod m20260401_000035_add_human_verification_modes_to_site_settings;
|
||||
mod m20260402_000036_create_worker_jobs;
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@@ -90,6 +91,7 @@ impl MigratorTrait for Migrator {
|
||||
Box::new(m20260401_000033_add_taxonomy_metadata_and_media_assets::Migration),
|
||||
Box::new(m20260401_000034_add_source_markdown_to_posts::Migration),
|
||||
Box::new(m20260401_000035_add_human_verification_modes_to_site_settings::Migration),
|
||||
Box::new(m20260402_000036_create_worker_jobs::Migration),
|
||||
// inject-above (do not remove this comment)
|
||||
]
|
||||
}
|
||||
|
||||
98
backend/migration/src/m20260402_000036_create_worker_jobs.rs
Normal file
98
backend/migration/src/m20260402_000036_create_worker_jobs.rs
Normal file
@@ -0,0 +1,98 @@
|
||||
use loco_rs::schema::*;
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_table(
|
||||
manager,
|
||||
"worker_jobs",
|
||||
&[
|
||||
("id", ColType::PkAuto),
|
||||
("parent_job_id", ColType::IntegerNull),
|
||||
("job_kind", ColType::String),
|
||||
("worker_name", ColType::String),
|
||||
("display_name", ColType::StringNull),
|
||||
("status", ColType::String),
|
||||
("queue_name", ColType::StringNull),
|
||||
("requested_by", ColType::StringNull),
|
||||
("requested_source", ColType::StringNull),
|
||||
("trigger_mode", ColType::StringNull),
|
||||
("payload", ColType::JsonBinaryNull),
|
||||
("result", ColType::JsonBinaryNull),
|
||||
("error_text", ColType::TextNull),
|
||||
("tags", ColType::JsonBinaryNull),
|
||||
("related_entity_type", ColType::StringNull),
|
||||
("related_entity_id", ColType::StringNull),
|
||||
("attempts_count", ColType::Integer),
|
||||
("max_attempts", ColType::Integer),
|
||||
("cancel_requested", ColType::Boolean),
|
||||
("queued_at", ColType::StringNull),
|
||||
("started_at", ColType::StringNull),
|
||||
("finished_at", ColType::StringNull),
|
||||
],
|
||||
&[],
|
||||
)
|
||||
.await?;
|
||||
|
||||
for (name, columns) in [
|
||||
(
|
||||
"idx_worker_jobs_status_created_at",
|
||||
vec![Alias::new("status"), Alias::new("created_at")],
|
||||
),
|
||||
(
|
||||
"idx_worker_jobs_worker_status_created_at",
|
||||
vec![
|
||||
Alias::new("worker_name"),
|
||||
Alias::new("status"),
|
||||
Alias::new("created_at"),
|
||||
],
|
||||
),
|
||||
(
|
||||
"idx_worker_jobs_kind_created_at",
|
||||
vec![Alias::new("job_kind"), Alias::new("created_at")],
|
||||
),
|
||||
(
|
||||
"idx_worker_jobs_related_entity",
|
||||
vec![Alias::new("related_entity_type"), Alias::new("related_entity_id")],
|
||||
),
|
||||
(
|
||||
"idx_worker_jobs_parent_job_id",
|
||||
vec![Alias::new("parent_job_id")],
|
||||
),
|
||||
] {
|
||||
let mut statement = Index::create();
|
||||
statement.name(name).table(Alias::new("worker_jobs"));
|
||||
for column in columns {
|
||||
statement.col(column);
|
||||
}
|
||||
manager.create_index(statement.to_owned()).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_worker_jobs_parent_job_id",
|
||||
"idx_worker_jobs_related_entity",
|
||||
"idx_worker_jobs_kind_created_at",
|
||||
"idx_worker_jobs_worker_status_created_at",
|
||||
"idx_worker_jobs_status_created_at",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("worker_jobs"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop_table(manager, "worker_jobs").await
|
||||
}
|
||||
}
|
||||
@@ -22,7 +22,10 @@ use crate::{
|
||||
ai_chunks, comment_blacklist, comment_persona_analysis_logs, comments, friend_links, posts,
|
||||
reviews,
|
||||
},
|
||||
services::{admin_audit, ai, analytics, comment_guard, content, media_assets, storage},
|
||||
services::{
|
||||
admin_audit, ai, analytics, comment_guard, content, media_assets, storage, worker_jobs,
|
||||
},
|
||||
workers::downloader::DownloadWorkerArgs,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
@@ -346,6 +349,30 @@ pub struct AdminMediaMetadataResponse {
|
||||
pub notes: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct AdminMediaDownloadPayload {
|
||||
pub source_url: String,
|
||||
#[serde(default)]
|
||||
pub prefix: Option<String>,
|
||||
#[serde(default)]
|
||||
pub title: Option<String>,
|
||||
#[serde(default)]
|
||||
pub alt_text: Option<String>,
|
||||
#[serde(default)]
|
||||
pub caption: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tags: Option<Vec<String>>,
|
||||
#[serde(default)]
|
||||
pub notes: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AdminMediaDownloadResponse {
|
||||
pub queued: bool,
|
||||
pub job_id: i32,
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct AdminMediaListQuery {
|
||||
pub prefix: Option<String>,
|
||||
@@ -1457,6 +1484,55 @@ pub async fn replace_media_object(
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn download_media_object(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<AdminMediaDownloadPayload>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let worker_args = DownloadWorkerArgs {
|
||||
source_url: payload.source_url.clone(),
|
||||
prefix: payload.prefix.clone(),
|
||||
title: payload.title.clone(),
|
||||
alt_text: payload.alt_text.clone(),
|
||||
caption: payload.caption.clone(),
|
||||
tags: payload.tags.unwrap_or_default(),
|
||||
notes: payload.notes.clone(),
|
||||
job_id: None,
|
||||
};
|
||||
let job = worker_jobs::queue_download_job(
|
||||
&ctx,
|
||||
&worker_args,
|
||||
Some(actor.username.clone()),
|
||||
Some(actor.source.clone()),
|
||||
None,
|
||||
Some("manual".to_string()),
|
||||
)
|
||||
.await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"media.download",
|
||||
"media",
|
||||
Some(job.id.to_string()),
|
||||
Some(payload.source_url.clone()),
|
||||
Some(serde_json::json!({
|
||||
"job_id": job.id,
|
||||
"queued": true,
|
||||
"source_url": payload.source_url,
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(AdminMediaDownloadResponse {
|
||||
queued: true,
|
||||
job_id: job.id,
|
||||
status: job.status,
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn list_comment_blacklist(
|
||||
headers: HeaderMap,
|
||||
@@ -1982,6 +2058,7 @@ pub fn routes() -> Routes {
|
||||
"/storage/media/metadata",
|
||||
patch(update_media_object_metadata),
|
||||
)
|
||||
.add("/storage/media/download", post(download_media_object))
|
||||
.add("/storage/media/replace", post(replace_media_object))
|
||||
.add(
|
||||
"/comments/blacklist",
|
||||
|
||||
@@ -13,7 +13,7 @@ use crate::{
|
||||
},
|
||||
services::{
|
||||
admin_audit, backups, post_revisions as revision_service,
|
||||
subscriptions as subscription_service,
|
||||
subscriptions as subscription_service, worker_jobs,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -35,6 +35,15 @@ pub struct DeliveriesQuery {
|
||||
pub limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct WorkerJobsQuery {
|
||||
pub status: Option<String>,
|
||||
pub job_kind: Option<String>,
|
||||
pub worker_name: Option<String>,
|
||||
pub search: Option<String>,
|
||||
pub limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct SubscriptionPayload {
|
||||
#[serde(alias = "channelType")]
|
||||
@@ -85,6 +94,11 @@ pub struct DigestDispatchRequest {
|
||||
pub period: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct RetryDeliveriesRequest {
|
||||
pub limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct SiteBackupImportRequest {
|
||||
pub backup: backups::SiteBackupDocument,
|
||||
@@ -132,6 +146,12 @@ pub struct DeliveryListResponse {
|
||||
pub deliveries: Vec<notification_deliveries::Model>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct WorkerTaskActionResponse {
|
||||
pub queued: bool,
|
||||
pub job: worker_jobs::WorkerJobRecord,
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
@@ -408,6 +428,13 @@ pub async fn test_subscription(
|
||||
.ok_or(Error::NotFound)?;
|
||||
|
||||
let delivery = subscription_service::send_test_notification(&ctx, &item).await?;
|
||||
let job = worker_jobs::find_latest_job_by_related_entity(
|
||||
&ctx,
|
||||
"notification_delivery",
|
||||
&delivery.id.to_string(),
|
||||
Some(worker_jobs::WORKER_NOTIFICATION_DELIVERY),
|
||||
)
|
||||
.await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
@@ -419,7 +446,12 @@ pub async fn test_subscription(
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(serde_json::json!({ "queued": true, "id": item.id, "delivery_id": delivery.id }))
|
||||
format::json(serde_json::json!({
|
||||
"queued": true,
|
||||
"id": item.id,
|
||||
"delivery_id": delivery.id,
|
||||
"job_id": job.as_ref().map(|value| value.id),
|
||||
}))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
@@ -450,6 +482,162 @@ pub async fn send_subscription_digest(
|
||||
format::json(summary)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn workers_overview(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
format::json(worker_jobs::get_overview(&ctx).await?)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn list_worker_jobs(
|
||||
headers: HeaderMap,
|
||||
Query(query): Query<WorkerJobsQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
format::json(
|
||||
worker_jobs::list_jobs(
|
||||
&ctx,
|
||||
worker_jobs::WorkerJobListQuery {
|
||||
status: query.status,
|
||||
job_kind: query.job_kind,
|
||||
worker_name: query.worker_name,
|
||||
search: query.search,
|
||||
limit: query.limit,
|
||||
},
|
||||
)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn get_worker_job(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
format::json(worker_jobs::get_job_record(&ctx, id).await?)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn cancel_worker_job(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let updated = worker_jobs::request_cancel(&ctx, id).await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"worker.cancel",
|
||||
"worker_job",
|
||||
Some(id.to_string()),
|
||||
Some(updated.worker_name.clone()),
|
||||
Some(serde_json::json!({ "status": updated.status })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(updated)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn retry_worker_job(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let job = worker_jobs::retry_job(
|
||||
&ctx,
|
||||
id,
|
||||
Some(actor.username.clone()),
|
||||
Some(actor.source.clone()),
|
||||
)
|
||||
.await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"worker.retry",
|
||||
"worker_job",
|
||||
Some(job.id.to_string()),
|
||||
Some(job.worker_name.clone()),
|
||||
Some(serde_json::json!({ "source_job_id": id })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(WorkerTaskActionResponse { queued: true, job })
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn run_retry_deliveries_job(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<RetryDeliveriesRequest>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let job = worker_jobs::spawn_retry_deliveries_task(
|
||||
&ctx,
|
||||
payload.limit,
|
||||
Some(actor.username.clone()),
|
||||
Some(actor.source.clone()),
|
||||
None,
|
||||
Some("manual".to_string()),
|
||||
)
|
||||
.await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"worker.task.retry_deliveries",
|
||||
"worker_job",
|
||||
Some(job.id.to_string()),
|
||||
Some(job.worker_name.clone()),
|
||||
Some(serde_json::json!({ "limit": payload.limit })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(WorkerTaskActionResponse { queued: true, job })
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn run_digest_worker_job(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<DigestDispatchRequest>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let period = payload.period.unwrap_or_else(|| "weekly".to_string());
|
||||
let job = worker_jobs::spawn_digest_task(
|
||||
&ctx,
|
||||
&period,
|
||||
Some(actor.username.clone()),
|
||||
Some(actor.source.clone()),
|
||||
None,
|
||||
Some("manual".to_string()),
|
||||
)
|
||||
.await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"worker.task.digest",
|
||||
"worker_job",
|
||||
Some(job.id.to_string()),
|
||||
Some(job.worker_name.clone()),
|
||||
Some(serde_json::json!({ "period": period })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(WorkerTaskActionResponse { queued: true, job })
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn export_site_backup(
|
||||
headers: HeaderMap,
|
||||
@@ -481,6 +669,13 @@ pub fn routes() -> Routes {
|
||||
.add("/subscriptions/digest", post(send_subscription_digest))
|
||||
.add("/subscriptions/{id}", patch(update_subscription).delete(delete_subscription))
|
||||
.add("/subscriptions/{id}/test", post(test_subscription))
|
||||
.add("/workers/overview", get(workers_overview))
|
||||
.add("/workers/jobs", get(list_worker_jobs))
|
||||
.add("/workers/jobs/{id}", get(get_worker_job))
|
||||
.add("/workers/jobs/{id}/cancel", post(cancel_worker_job))
|
||||
.add("/workers/jobs/{id}/retry", post(retry_worker_job))
|
||||
.add("/workers/tasks/retry-deliveries", post(run_retry_deliveries_job))
|
||||
.add("/workers/tasks/digest", post(run_digest_worker_job))
|
||||
.add("/site-backup/export", get(export_site_backup))
|
||||
.add("/site-backup/import", post(import_site_backup))
|
||||
}
|
||||
|
||||
@@ -7,12 +7,19 @@ use sea_orm::{EntityTrait, QueryOrder, Set};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
controllers::admin::check_auth,
|
||||
controllers::admin::{check_auth, resolve_admin_identity},
|
||||
models::_entities::reviews::{self, Entity as ReviewEntity},
|
||||
services::{admin_audit, storage},
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
fn is_public_review_status(status: Option<&str>) -> bool {
|
||||
matches!(
|
||||
status.unwrap_or_default().trim().to_ascii_lowercase().as_str(),
|
||||
"published" | "completed" | "done"
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct CreateReviewRequest {
|
||||
pub title: String,
|
||||
pub review_type: String,
|
||||
@@ -25,7 +32,7 @@ pub struct CreateReviewRequest {
|
||||
pub link_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct UpdateReviewRequest {
|
||||
pub title: Option<String>,
|
||||
pub review_type: Option<String>,
|
||||
@@ -38,23 +45,30 @@ pub struct UpdateReviewRequest {
|
||||
pub link_url: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn list(State(ctx): State<AppContext>) -> Result<impl IntoResponse> {
|
||||
pub async fn list(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<impl IntoResponse> {
|
||||
let include_private = resolve_admin_identity(&headers).is_some();
|
||||
let reviews = ReviewEntity::find()
|
||||
.order_by_desc(reviews::Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|review| include_private || is_public_review_status(review.status.as_deref()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
format::json(reviews)
|
||||
}
|
||||
|
||||
pub async fn get_one(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<impl IntoResponse> {
|
||||
let include_private = resolve_admin_identity(&headers).is_some();
|
||||
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
|
||||
|
||||
match review {
|
||||
Some(r) => format::json(r),
|
||||
Some(r) if include_private || is_public_review_status(r.status.as_deref()) => format::json(r),
|
||||
Some(_) => Err(Error::NotFound),
|
||||
None => Err(Error::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,3 +20,4 @@ pub mod site_settings;
|
||||
pub mod subscriptions;
|
||||
pub mod tags;
|
||||
pub mod users;
|
||||
pub mod worker_jobs;
|
||||
|
||||
@@ -18,3 +18,4 @@ pub use super::site_settings::Entity as SiteSettings;
|
||||
pub use super::subscriptions::Entity as Subscriptions;
|
||||
pub use super::tags::Entity as Tags;
|
||||
pub use super::users::Entity as Users;
|
||||
pub use super::worker_jobs::Entity as WorkerJobs;
|
||||
|
||||
43
backend/src/models/_entities/worker_jobs.rs
Normal file
43
backend/src/models/_entities/worker_jobs.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "worker_jobs")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub parent_job_id: Option<i32>,
|
||||
pub job_kind: String,
|
||||
pub worker_name: String,
|
||||
pub display_name: Option<String>,
|
||||
pub status: String,
|
||||
pub queue_name: Option<String>,
|
||||
pub requested_by: Option<String>,
|
||||
pub requested_source: Option<String>,
|
||||
pub trigger_mode: Option<String>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub payload: Option<Json>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub result: Option<Json>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub error_text: Option<String>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub tags: Option<Json>,
|
||||
pub related_entity_type: Option<String>,
|
||||
pub related_entity_id: Option<String>,
|
||||
pub attempts_count: i32,
|
||||
pub max_attempts: i32,
|
||||
pub cancel_requested: bool,
|
||||
pub queued_at: Option<String>,
|
||||
pub started_at: Option<String>,
|
||||
pub finished_at: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -12,3 +12,4 @@ pub mod storage;
|
||||
pub mod subscriptions;
|
||||
pub mod turnstile;
|
||||
pub mod web_push;
|
||||
pub mod worker_jobs;
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
use chrono::{Duration, Utc};
|
||||
use loco_rs::{
|
||||
bgworker::BackgroundWorker,
|
||||
prelude::*,
|
||||
};
|
||||
use loco_rs::prelude::*;
|
||||
use reqwest::Client;
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, Order, QueryFilter, QueryOrder,
|
||||
@@ -15,10 +12,7 @@ use uuid::Uuid;
|
||||
use crate::{
|
||||
mailers::subscription::SubscriptionMailer,
|
||||
models::_entities::{notification_deliveries, posts, subscriptions},
|
||||
services::{content, web_push as web_push_service},
|
||||
workers::notification_delivery::{
|
||||
NotificationDeliveryWorker, NotificationDeliveryWorkerArgs,
|
||||
},
|
||||
services::{content, web_push as web_push_service, worker_jobs},
|
||||
};
|
||||
|
||||
pub const CHANNEL_EMAIL: &str = "email";
|
||||
@@ -837,14 +831,16 @@ async fn update_subscription_delivery_state(
|
||||
}
|
||||
|
||||
async fn enqueue_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()> {
|
||||
match NotificationDeliveryWorker::perform_later(ctx, NotificationDeliveryWorkerArgs { delivery_id }).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(Error::QueueProviderMissing) => process_delivery(ctx, delivery_id).await,
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to enqueue delivery #{delivery_id}, falling back to sync processing: {error}");
|
||||
process_delivery(ctx, delivery_id).await
|
||||
}
|
||||
}
|
||||
let _ = worker_jobs::queue_notification_delivery_job(
|
||||
ctx,
|
||||
delivery_id,
|
||||
None,
|
||||
Some("system".to_string()),
|
||||
None,
|
||||
Some("system".to_string()),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn queue_direct_notification(
|
||||
|
||||
835
backend/src/services/worker_jobs.rs
Normal file
835
backend/src/services/worker_jobs.rs
Normal file
@@ -0,0 +1,835 @@
|
||||
use chrono::Utc;
|
||||
use loco_rs::{
|
||||
bgworker::BackgroundWorker,
|
||||
prelude::*,
|
||||
};
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, Order,
|
||||
PaginatorTrait, QueryFilter, QueryOrder, QuerySelect, Set,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::{
|
||||
models::_entities::{notification_deliveries, worker_jobs},
|
||||
services::subscriptions,
|
||||
workers::{
|
||||
downloader::{DownloadWorker, DownloadWorkerArgs},
|
||||
notification_delivery::{NotificationDeliveryWorker, NotificationDeliveryWorkerArgs},
|
||||
},
|
||||
};
|
||||
|
||||
pub const JOB_KIND_WORKER: &str = "worker";
|
||||
pub const JOB_KIND_TASK: &str = "task";
|
||||
|
||||
pub const JOB_STATUS_QUEUED: &str = "queued";
|
||||
pub const JOB_STATUS_RUNNING: &str = "running";
|
||||
pub const JOB_STATUS_SUCCEEDED: &str = "succeeded";
|
||||
pub const JOB_STATUS_FAILED: &str = "failed";
|
||||
pub const JOB_STATUS_CANCELLED: &str = "cancelled";
|
||||
|
||||
pub const WORKER_DOWNLOAD_MEDIA: &str = "worker.download_media";
|
||||
pub const WORKER_NOTIFICATION_DELIVERY: &str = "worker.notification_delivery";
|
||||
pub const TASK_RETRY_DELIVERIES: &str = "task.retry_deliveries";
|
||||
pub const TASK_SEND_WEEKLY_DIGEST: &str = "task.send_weekly_digest";
|
||||
pub const TASK_SEND_MONTHLY_DIGEST: &str = "task.send_monthly_digest";
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WorkerJobListQuery {
|
||||
pub status: Option<String>,
|
||||
pub job_kind: Option<String>,
|
||||
pub worker_name: Option<String>,
|
||||
pub search: Option<String>,
|
||||
pub limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct WorkerCatalogEntry {
|
||||
pub worker_name: String,
|
||||
pub job_kind: String,
|
||||
pub label: String,
|
||||
pub description: String,
|
||||
pub queue_name: Option<String>,
|
||||
pub supports_cancel: bool,
|
||||
pub supports_retry: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct WorkerStats {
|
||||
pub worker_name: String,
|
||||
pub job_kind: String,
|
||||
pub label: String,
|
||||
pub queued: usize,
|
||||
pub running: usize,
|
||||
pub succeeded: usize,
|
||||
pub failed: usize,
|
||||
pub cancelled: usize,
|
||||
pub last_job_at: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct WorkerOverview {
|
||||
pub total_jobs: usize,
|
||||
pub queued: usize,
|
||||
pub running: usize,
|
||||
pub succeeded: usize,
|
||||
pub failed: usize,
|
||||
pub cancelled: usize,
|
||||
pub active_jobs: usize,
|
||||
pub worker_stats: Vec<WorkerStats>,
|
||||
pub catalog: Vec<WorkerCatalogEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct WorkerJobRecord {
|
||||
pub created_at: String,
|
||||
pub updated_at: String,
|
||||
pub id: i32,
|
||||
pub parent_job_id: Option<i32>,
|
||||
pub job_kind: String,
|
||||
pub worker_name: String,
|
||||
pub display_name: Option<String>,
|
||||
pub status: String,
|
||||
pub queue_name: Option<String>,
|
||||
pub requested_by: Option<String>,
|
||||
pub requested_source: Option<String>,
|
||||
pub trigger_mode: Option<String>,
|
||||
pub payload: Option<Value>,
|
||||
pub result: Option<Value>,
|
||||
pub error_text: Option<String>,
|
||||
pub tags: Option<Value>,
|
||||
pub related_entity_type: Option<String>,
|
||||
pub related_entity_id: Option<String>,
|
||||
pub attempts_count: i32,
|
||||
pub max_attempts: i32,
|
||||
pub cancel_requested: bool,
|
||||
pub queued_at: Option<String>,
|
||||
pub started_at: Option<String>,
|
||||
pub finished_at: Option<String>,
|
||||
pub can_cancel: bool,
|
||||
pub can_retry: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct WorkerJobListResult {
|
||||
pub total: u64,
|
||||
pub jobs: Vec<WorkerJobRecord>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct WorkerTaskDispatchResult {
|
||||
pub queued: bool,
|
||||
pub job: WorkerJobRecord,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct CreateWorkerJobInput {
|
||||
parent_job_id: Option<i32>,
|
||||
job_kind: String,
|
||||
worker_name: String,
|
||||
display_name: Option<String>,
|
||||
queue_name: Option<String>,
|
||||
requested_by: Option<String>,
|
||||
requested_source: Option<String>,
|
||||
trigger_mode: Option<String>,
|
||||
payload: Option<Value>,
|
||||
tags: Option<Value>,
|
||||
related_entity_type: Option<String>,
|
||||
related_entity_id: Option<String>,
|
||||
max_attempts: i32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct RetryDeliveriesTaskPayload {
|
||||
#[serde(default)]
|
||||
limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
struct DigestTaskPayload {
|
||||
period: String,
|
||||
}
|
||||
|
||||
fn now_rfc3339() -> String {
|
||||
Utc::now().to_rfc3339()
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn queue_name_for(worker_name: &str) -> Option<String> {
|
||||
match worker_name {
|
||||
WORKER_DOWNLOAD_MEDIA => Some("media".to_string()),
|
||||
WORKER_NOTIFICATION_DELIVERY => Some("notifications".to_string()),
|
||||
TASK_RETRY_DELIVERIES => Some("maintenance".to_string()),
|
||||
TASK_SEND_WEEKLY_DIGEST | TASK_SEND_MONTHLY_DIGEST => Some("digests".to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn label_for(worker_name: &str) -> String {
|
||||
match worker_name {
|
||||
WORKER_DOWNLOAD_MEDIA => "远程媒体下载".to_string(),
|
||||
WORKER_NOTIFICATION_DELIVERY => "通知投递".to_string(),
|
||||
TASK_RETRY_DELIVERIES => "重试待投递通知".to_string(),
|
||||
TASK_SEND_WEEKLY_DIGEST => "发送周报".to_string(),
|
||||
TASK_SEND_MONTHLY_DIGEST => "发送月报".to_string(),
|
||||
_ => worker_name.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn description_for(worker_name: &str) -> String {
|
||||
match worker_name {
|
||||
WORKER_DOWNLOAD_MEDIA => "抓取远程图片 / PDF 到媒体库,并回写媒体元数据。".to_string(),
|
||||
WORKER_NOTIFICATION_DELIVERY => "执行订阅通知、测试通知与 digest 投递。".to_string(),
|
||||
TASK_RETRY_DELIVERIES => "扫描 retry_pending 的通知记录并重新入队。".to_string(),
|
||||
TASK_SEND_WEEKLY_DIGEST => "根据近期内容生成周报,并为活跃订阅目标入队。".to_string(),
|
||||
TASK_SEND_MONTHLY_DIGEST => "根据近期内容生成月报,并为活跃订阅目标入队。".to_string(),
|
||||
_ => "后台异步任务。".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn tags_for(worker_name: &str) -> Value {
|
||||
match worker_name {
|
||||
WORKER_DOWNLOAD_MEDIA => json!(["media", "download"]),
|
||||
WORKER_NOTIFICATION_DELIVERY => json!(["notifications", "delivery"]),
|
||||
TASK_RETRY_DELIVERIES => json!(["maintenance", "retry"]),
|
||||
TASK_SEND_WEEKLY_DIGEST => json!(["digest", "weekly"]),
|
||||
TASK_SEND_MONTHLY_DIGEST => json!(["digest", "monthly"]),
|
||||
_ => json!([]),
|
||||
}
|
||||
}
|
||||
|
||||
fn can_cancel_status(status: &str, cancel_requested: bool) -> bool {
|
||||
!cancel_requested && matches!(status, JOB_STATUS_QUEUED | JOB_STATUS_RUNNING)
|
||||
}
|
||||
|
||||
fn can_retry_status(status: &str) -> bool {
|
||||
matches!(status, JOB_STATUS_FAILED | JOB_STATUS_CANCELLED | JOB_STATUS_SUCCEEDED)
|
||||
}
|
||||
|
||||
fn to_job_record(item: worker_jobs::Model) -> WorkerJobRecord {
|
||||
WorkerJobRecord {
|
||||
created_at: item.created_at.to_rfc3339(),
|
||||
updated_at: item.updated_at.to_rfc3339(),
|
||||
id: item.id,
|
||||
parent_job_id: item.parent_job_id,
|
||||
job_kind: item.job_kind,
|
||||
worker_name: item.worker_name,
|
||||
display_name: item.display_name,
|
||||
status: item.status.clone(),
|
||||
queue_name: item.queue_name,
|
||||
requested_by: item.requested_by,
|
||||
requested_source: item.requested_source,
|
||||
trigger_mode: item.trigger_mode,
|
||||
payload: item.payload,
|
||||
result: item.result,
|
||||
error_text: item.error_text,
|
||||
tags: item.tags,
|
||||
related_entity_type: item.related_entity_type,
|
||||
related_entity_id: item.related_entity_id,
|
||||
attempts_count: item.attempts_count,
|
||||
max_attempts: item.max_attempts,
|
||||
cancel_requested: item.cancel_requested,
|
||||
queued_at: item.queued_at,
|
||||
started_at: item.started_at,
|
||||
finished_at: item.finished_at,
|
||||
can_cancel: can_cancel_status(&item.status, item.cancel_requested),
|
||||
can_retry: can_retry_status(&item.status),
|
||||
}
|
||||
}
|
||||
|
||||
fn catalog_entries() -> Vec<WorkerCatalogEntry> {
|
||||
[
|
||||
(WORKER_DOWNLOAD_MEDIA, JOB_KIND_WORKER, true, true),
|
||||
(WORKER_NOTIFICATION_DELIVERY, JOB_KIND_WORKER, true, true),
|
||||
(TASK_RETRY_DELIVERIES, JOB_KIND_TASK, true, true),
|
||||
(TASK_SEND_WEEKLY_DIGEST, JOB_KIND_TASK, true, true),
|
||||
(TASK_SEND_MONTHLY_DIGEST, JOB_KIND_TASK, true, true),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|(worker_name, job_kind, supports_cancel, supports_retry)| WorkerCatalogEntry {
|
||||
worker_name: worker_name.to_string(),
|
||||
job_kind: job_kind.to_string(),
|
||||
label: label_for(worker_name),
|
||||
description: description_for(worker_name),
|
||||
queue_name: queue_name_for(worker_name),
|
||||
supports_cancel,
|
||||
supports_retry,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn create_job(ctx: &AppContext, input: CreateWorkerJobInput) -> Result<worker_jobs::Model> {
|
||||
Ok(worker_jobs::ActiveModel {
|
||||
parent_job_id: Set(input.parent_job_id),
|
||||
job_kind: Set(input.job_kind),
|
||||
worker_name: Set(input.worker_name),
|
||||
display_name: Set(trim_to_option(input.display_name)),
|
||||
status: Set(JOB_STATUS_QUEUED.to_string()),
|
||||
queue_name: Set(trim_to_option(input.queue_name)),
|
||||
requested_by: Set(trim_to_option(input.requested_by)),
|
||||
requested_source: Set(trim_to_option(input.requested_source)),
|
||||
trigger_mode: Set(trim_to_option(input.trigger_mode)),
|
||||
payload: Set(input.payload),
|
||||
result: Set(None),
|
||||
error_text: Set(None),
|
||||
tags: Set(input.tags),
|
||||
related_entity_type: Set(trim_to_option(input.related_entity_type)),
|
||||
related_entity_id: Set(trim_to_option(input.related_entity_id)),
|
||||
attempts_count: Set(0),
|
||||
max_attempts: Set(input.max_attempts.max(1)),
|
||||
cancel_requested: Set(false),
|
||||
queued_at: Set(Some(now_rfc3339())),
|
||||
started_at: Set(None),
|
||||
finished_at: Set(None),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&ctx.db)
|
||||
.await?)
|
||||
}
|
||||
|
||||
async fn find_job(ctx: &AppContext, id: i32) -> Result<worker_jobs::Model> {
|
||||
worker_jobs::Entity::find_by_id(id)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)
|
||||
}
|
||||
|
||||
async fn dispatch_download(args_ctx: AppContext, args: DownloadWorkerArgs) {
|
||||
let worker = DownloadWorker::build(&args_ctx);
|
||||
if let Err(error) = worker.perform(args).await {
|
||||
tracing::warn!("download worker execution failed: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
async fn dispatch_notification_delivery(args_ctx: AppContext, args: NotificationDeliveryWorkerArgs) {
|
||||
let worker = NotificationDeliveryWorker::build(&args_ctx);
|
||||
if let Err(error) = worker.perform(args).await {
|
||||
tracing::warn!("notification delivery worker execution failed: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
async fn enqueue_download_worker(ctx: &AppContext, args: DownloadWorkerArgs) -> Result<()> {
|
||||
match DownloadWorker::perform_later(ctx, args.clone()).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(Error::QueueProviderMissing) => {
|
||||
tokio::spawn(dispatch_download(ctx.clone(), args));
|
||||
Ok(())
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::warn!("download worker queue unavailable, falling back to local task: {error}");
|
||||
tokio::spawn(dispatch_download(ctx.clone(), args));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn enqueue_notification_worker(
|
||||
ctx: &AppContext,
|
||||
args: NotificationDeliveryWorkerArgs,
|
||||
) -> Result<()> {
|
||||
match NotificationDeliveryWorker::perform_later(ctx, args.clone()).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(Error::QueueProviderMissing) => {
|
||||
tokio::spawn(dispatch_notification_delivery(ctx.clone(), args));
|
||||
Ok(())
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::warn!("notification worker queue unavailable, falling back to local task: {error}");
|
||||
tokio::spawn(dispatch_notification_delivery(ctx.clone(), args));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_retry_deliveries_task(ctx: AppContext, job_id: i32, limit: Option<u64>) {
|
||||
match begin_job_execution(&ctx, job_id).await {
|
||||
Ok(true) => {}
|
||||
Ok(false) => return,
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to start retry deliveries job #{job_id}: {error}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let result = async {
|
||||
let effective_limit = limit.unwrap_or(60);
|
||||
let queued = subscriptions::retry_due_deliveries(&ctx, effective_limit).await?;
|
||||
mark_job_succeeded(
|
||||
&ctx,
|
||||
job_id,
|
||||
Some(json!({
|
||||
"limit": effective_limit,
|
||||
"queued": queued,
|
||||
})),
|
||||
)
|
||||
.await
|
||||
}
|
||||
.await;
|
||||
|
||||
if let Err(error) = result {
|
||||
let _ = mark_job_failed(&ctx, job_id, error.to_string()).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn run_digest_task(ctx: AppContext, job_id: i32, period: String) {
|
||||
match begin_job_execution(&ctx, job_id).await {
|
||||
Ok(true) => {}
|
||||
Ok(false) => return,
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to start digest job #{job_id}: {error}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let result = async {
|
||||
let summary = subscriptions::send_digest(&ctx, &period).await?;
|
||||
mark_job_succeeded(
|
||||
&ctx,
|
||||
job_id,
|
||||
Some(json!({
|
||||
"period": summary.period,
|
||||
"post_count": summary.post_count,
|
||||
"queued": summary.queued,
|
||||
"skipped": summary.skipped,
|
||||
})),
|
||||
)
|
||||
.await
|
||||
}
|
||||
.await;
|
||||
|
||||
if let Err(error) = result {
|
||||
let _ = mark_job_failed(&ctx, job_id, error.to_string()).await;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_overview(ctx: &AppContext) -> Result<WorkerOverview> {
|
||||
let items = worker_jobs::Entity::find()
|
||||
.order_by(worker_jobs::Column::CreatedAt, Order::Desc)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let mut overview = WorkerOverview {
|
||||
total_jobs: items.len(),
|
||||
queued: 0,
|
||||
running: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
cancelled: 0,
|
||||
active_jobs: 0,
|
||||
worker_stats: Vec::new(),
|
||||
catalog: catalog_entries(),
|
||||
};
|
||||
|
||||
let mut grouped = std::collections::BTreeMap::<String, WorkerStats>::new();
|
||||
|
||||
for item in items {
|
||||
match item.status.as_str() {
|
||||
JOB_STATUS_QUEUED => overview.queued += 1,
|
||||
JOB_STATUS_RUNNING => overview.running += 1,
|
||||
JOB_STATUS_SUCCEEDED => overview.succeeded += 1,
|
||||
JOB_STATUS_FAILED => overview.failed += 1,
|
||||
JOB_STATUS_CANCELLED => overview.cancelled += 1,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let entry = grouped.entry(item.worker_name.clone()).or_insert_with(|| WorkerStats {
|
||||
worker_name: item.worker_name.clone(),
|
||||
job_kind: item.job_kind.clone(),
|
||||
label: label_for(&item.worker_name),
|
||||
queued: 0,
|
||||
running: 0,
|
||||
succeeded: 0,
|
||||
failed: 0,
|
||||
cancelled: 0,
|
||||
last_job_at: None,
|
||||
});
|
||||
|
||||
match item.status.as_str() {
|
||||
JOB_STATUS_QUEUED => entry.queued += 1,
|
||||
JOB_STATUS_RUNNING => entry.running += 1,
|
||||
JOB_STATUS_SUCCEEDED => entry.succeeded += 1,
|
||||
JOB_STATUS_FAILED => entry.failed += 1,
|
||||
JOB_STATUS_CANCELLED => entry.cancelled += 1,
|
||||
_ => {}
|
||||
}
|
||||
if entry.last_job_at.is_none() {
|
||||
entry.last_job_at = Some(item.created_at.to_rfc3339());
|
||||
}
|
||||
}
|
||||
|
||||
overview.active_jobs = overview.queued + overview.running;
|
||||
overview.worker_stats = grouped.into_values().collect();
|
||||
Ok(overview)
|
||||
}
|
||||
|
||||
pub async fn list_jobs(ctx: &AppContext, query: WorkerJobListQuery) -> Result<WorkerJobListResult> {
|
||||
let mut db_query = worker_jobs::Entity::find().order_by(worker_jobs::Column::CreatedAt, Order::Desc);
|
||||
|
||||
if let Some(status) = query.status.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
|
||||
db_query = db_query.filter(worker_jobs::Column::Status.eq(status));
|
||||
}
|
||||
if let Some(job_kind) = query.job_kind.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
|
||||
db_query = db_query.filter(worker_jobs::Column::JobKind.eq(job_kind));
|
||||
}
|
||||
if let Some(worker_name) = query.worker_name.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
|
||||
db_query = db_query.filter(worker_jobs::Column::WorkerName.eq(worker_name));
|
||||
}
|
||||
if let Some(search) = query.search.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
|
||||
db_query = db_query.filter(
|
||||
Condition::any()
|
||||
.add(worker_jobs::Column::WorkerName.contains(search.clone()))
|
||||
.add(worker_jobs::Column::DisplayName.contains(search.clone()))
|
||||
.add(worker_jobs::Column::RelatedEntityId.contains(search.clone()))
|
||||
.add(worker_jobs::Column::RelatedEntityType.contains(search)),
|
||||
);
|
||||
}
|
||||
|
||||
let total = db_query.clone().count(&ctx.db).await?;
|
||||
let limit = query.limit.unwrap_or(120);
|
||||
let items = db_query.limit(limit).all(&ctx.db).await?;
|
||||
|
||||
Ok(WorkerJobListResult {
|
||||
total,
|
||||
jobs: items.into_iter().map(to_job_record).collect(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_job_record(ctx: &AppContext, id: i32) -> Result<WorkerJobRecord> {
|
||||
Ok(to_job_record(find_job(ctx, id).await?))
|
||||
}
|
||||
|
||||
pub async fn find_latest_job_by_related_entity(
|
||||
ctx: &AppContext,
|
||||
related_entity_type: &str,
|
||||
related_entity_id: &str,
|
||||
worker_name: Option<&str>,
|
||||
) -> Result<Option<WorkerJobRecord>> {
|
||||
let mut query = worker_jobs::Entity::find()
|
||||
.filter(worker_jobs::Column::RelatedEntityType.eq(related_entity_type.to_string()))
|
||||
.filter(worker_jobs::Column::RelatedEntityId.eq(related_entity_id.to_string()))
|
||||
.order_by(worker_jobs::Column::CreatedAt, Order::Desc);
|
||||
|
||||
if let Some(worker_name) = worker_name.map(str::trim).filter(|value| !value.is_empty()) {
|
||||
query = query.filter(worker_jobs::Column::WorkerName.eq(worker_name.to_string()));
|
||||
}
|
||||
|
||||
Ok(query.one(&ctx.db).await?.map(to_job_record))
|
||||
}
|
||||
|
||||
pub async fn begin_job_execution(ctx: &AppContext, id: i32) -> Result<bool> {
|
||||
let item = find_job(ctx, id).await?;
|
||||
if item.status == JOB_STATUS_CANCELLED {
|
||||
return Ok(false);
|
||||
}
|
||||
if item.cancel_requested {
|
||||
finish_job_cancelled(ctx, id, Some("job cancelled before execution".to_string())).await?;
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let attempts_count = item.attempts_count + 1;
|
||||
let mut active = item.into_active_model();
|
||||
active.status = Set(JOB_STATUS_RUNNING.to_string());
|
||||
active.started_at = Set(Some(now_rfc3339()));
|
||||
active.finished_at = Set(None);
|
||||
active.error_text = Set(None);
|
||||
active.result = Set(None);
|
||||
active.attempts_count = Set(attempts_count);
|
||||
let _ = active.update(&ctx.db).await?;
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub async fn mark_job_succeeded(ctx: &AppContext, id: i32, result: Option<Value>) -> Result<()> {
|
||||
let item = find_job(ctx, id).await?;
|
||||
let mut active = item.into_active_model();
|
||||
active.status = Set(JOB_STATUS_SUCCEEDED.to_string());
|
||||
active.result = Set(result);
|
||||
active.error_text = Set(None);
|
||||
active.finished_at = Set(Some(now_rfc3339()));
|
||||
active.update(&ctx.db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn mark_job_failed(ctx: &AppContext, id: i32, error_text: String) -> Result<()> {
|
||||
let item = find_job(ctx, id).await?;
|
||||
let mut active = item.into_active_model();
|
||||
active.status = Set(JOB_STATUS_FAILED.to_string());
|
||||
active.error_text = Set(Some(error_text));
|
||||
active.finished_at = Set(Some(now_rfc3339()));
|
||||
active.update(&ctx.db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn finish_job_cancelled(
|
||||
ctx: &AppContext,
|
||||
id: i32,
|
||||
error_text: Option<String>,
|
||||
) -> Result<()> {
|
||||
let item = find_job(ctx, id).await?;
|
||||
let mut active = item.into_active_model();
|
||||
active.status = Set(JOB_STATUS_CANCELLED.to_string());
|
||||
active.cancel_requested = Set(true);
|
||||
active.finished_at = Set(Some(now_rfc3339()));
|
||||
if error_text.is_some() {
|
||||
active.error_text = Set(error_text);
|
||||
}
|
||||
active.update(&ctx.db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn request_cancel(ctx: &AppContext, id: i32) -> Result<WorkerJobRecord> {
|
||||
let item = find_job(ctx, id).await?;
|
||||
let mut active = item.clone().into_active_model();
|
||||
active.cancel_requested = Set(true);
|
||||
|
||||
if item.status == JOB_STATUS_QUEUED {
|
||||
active.status = Set(JOB_STATUS_CANCELLED.to_string());
|
||||
active.finished_at = Set(Some(now_rfc3339()));
|
||||
active.error_text = Set(Some("job cancelled before start".to_string()));
|
||||
}
|
||||
|
||||
let updated = active.update(&ctx.db).await?;
|
||||
Ok(to_job_record(updated))
|
||||
}
|
||||
|
||||
pub async fn queue_download_job(
|
||||
ctx: &AppContext,
|
||||
args: &DownloadWorkerArgs,
|
||||
requested_by: Option<String>,
|
||||
requested_source: Option<String>,
|
||||
parent_job_id: Option<i32>,
|
||||
trigger_mode: Option<String>,
|
||||
) -> Result<WorkerJobRecord> {
|
||||
let payload = serde_json::to_value(args)?;
|
||||
let job = create_job(
|
||||
ctx,
|
||||
CreateWorkerJobInput {
|
||||
parent_job_id,
|
||||
job_kind: JOB_KIND_WORKER.to_string(),
|
||||
worker_name: WORKER_DOWNLOAD_MEDIA.to_string(),
|
||||
display_name: Some(
|
||||
args.title
|
||||
.clone()
|
||||
.filter(|value| !value.trim().is_empty())
|
||||
.unwrap_or_else(|| format!("download {}", args.source_url)),
|
||||
),
|
||||
queue_name: queue_name_for(WORKER_DOWNLOAD_MEDIA),
|
||||
requested_by,
|
||||
requested_source,
|
||||
trigger_mode,
|
||||
payload: Some(payload),
|
||||
tags: Some(tags_for(WORKER_DOWNLOAD_MEDIA)),
|
||||
related_entity_type: Some("media_download".to_string()),
|
||||
related_entity_id: Some(args.source_url.clone()),
|
||||
max_attempts: 1,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut worker_args = args.clone();
|
||||
worker_args.job_id = Some(job.id);
|
||||
enqueue_download_worker(ctx, worker_args).await?;
|
||||
get_job_record(ctx, job.id).await
|
||||
}
|
||||
|
||||
pub async fn queue_notification_delivery_job(
|
||||
ctx: &AppContext,
|
||||
delivery_id: i32,
|
||||
requested_by: Option<String>,
|
||||
requested_source: Option<String>,
|
||||
parent_job_id: Option<i32>,
|
||||
trigger_mode: Option<String>,
|
||||
) -> Result<WorkerJobRecord> {
|
||||
let delivery = notification_deliveries::Entity::find_by_id(delivery_id)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)?;
|
||||
|
||||
let base_args = NotificationDeliveryWorkerArgs {
|
||||
delivery_id,
|
||||
job_id: None,
|
||||
};
|
||||
let payload = serde_json::to_value(&base_args)?;
|
||||
let display_name = format!("{} → {}", delivery.event_type, delivery.target);
|
||||
|
||||
let job = create_job(
|
||||
ctx,
|
||||
CreateWorkerJobInput {
|
||||
parent_job_id,
|
||||
job_kind: JOB_KIND_WORKER.to_string(),
|
||||
worker_name: WORKER_NOTIFICATION_DELIVERY.to_string(),
|
||||
display_name: Some(display_name),
|
||||
queue_name: queue_name_for(WORKER_NOTIFICATION_DELIVERY),
|
||||
requested_by,
|
||||
requested_source,
|
||||
trigger_mode,
|
||||
payload: Some(payload),
|
||||
tags: Some(tags_for(WORKER_NOTIFICATION_DELIVERY)),
|
||||
related_entity_type: Some("notification_delivery".to_string()),
|
||||
related_entity_id: Some(delivery_id.to_string()),
|
||||
max_attempts: 1,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let args = NotificationDeliveryWorkerArgs {
|
||||
delivery_id,
|
||||
job_id: Some(job.id),
|
||||
};
|
||||
enqueue_notification_worker(ctx, args).await?;
|
||||
get_job_record(ctx, job.id).await
|
||||
}
|
||||
|
||||
pub async fn spawn_retry_deliveries_task(
|
||||
ctx: &AppContext,
|
||||
limit: Option<u64>,
|
||||
requested_by: Option<String>,
|
||||
requested_source: Option<String>,
|
||||
parent_job_id: Option<i32>,
|
||||
trigger_mode: Option<String>,
|
||||
) -> Result<WorkerJobRecord> {
|
||||
let payload = serde_json::to_value(RetryDeliveriesTaskPayload { limit })?;
|
||||
let job = create_job(
|
||||
ctx,
|
||||
CreateWorkerJobInput {
|
||||
parent_job_id,
|
||||
job_kind: JOB_KIND_TASK.to_string(),
|
||||
worker_name: TASK_RETRY_DELIVERIES.to_string(),
|
||||
display_name: Some("重试待投递通知".to_string()),
|
||||
queue_name: queue_name_for(TASK_RETRY_DELIVERIES),
|
||||
requested_by,
|
||||
requested_source,
|
||||
trigger_mode,
|
||||
payload: Some(payload),
|
||||
tags: Some(tags_for(TASK_RETRY_DELIVERIES)),
|
||||
related_entity_type: Some("notification_delivery".to_string()),
|
||||
related_entity_id: None,
|
||||
max_attempts: 1,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
tokio::spawn(run_retry_deliveries_task(ctx.clone(), job.id, limit));
|
||||
get_job_record(ctx, job.id).await
|
||||
}
|
||||
|
||||
pub async fn spawn_digest_task(
|
||||
ctx: &AppContext,
|
||||
period: &str,
|
||||
requested_by: Option<String>,
|
||||
requested_source: Option<String>,
|
||||
parent_job_id: Option<i32>,
|
||||
trigger_mode: Option<String>,
|
||||
) -> Result<WorkerJobRecord> {
|
||||
let normalized_period = match period.trim().to_ascii_lowercase().as_str() {
|
||||
"monthly" => "monthly",
|
||||
_ => "weekly",
|
||||
}
|
||||
.to_string();
|
||||
let payload = serde_json::to_value(DigestTaskPayload {
|
||||
period: normalized_period.clone(),
|
||||
})?;
|
||||
let worker_name = if normalized_period == "monthly" {
|
||||
TASK_SEND_MONTHLY_DIGEST
|
||||
} else {
|
||||
TASK_SEND_WEEKLY_DIGEST
|
||||
};
|
||||
|
||||
let job = create_job(
|
||||
ctx,
|
||||
CreateWorkerJobInput {
|
||||
parent_job_id,
|
||||
job_kind: JOB_KIND_TASK.to_string(),
|
||||
worker_name: worker_name.to_string(),
|
||||
display_name: Some(if normalized_period == "monthly" {
|
||||
"发送月报".to_string()
|
||||
} else {
|
||||
"发送周报".to_string()
|
||||
}),
|
||||
queue_name: queue_name_for(worker_name),
|
||||
requested_by,
|
||||
requested_source,
|
||||
trigger_mode,
|
||||
payload: Some(payload),
|
||||
tags: Some(tags_for(worker_name)),
|
||||
related_entity_type: Some("subscription_digest".to_string()),
|
||||
related_entity_id: Some(normalized_period.clone()),
|
||||
max_attempts: 1,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
tokio::spawn(run_digest_task(ctx.clone(), job.id, normalized_period));
|
||||
get_job_record(ctx, job.id).await
|
||||
}
|
||||
|
||||
pub async fn retry_job(
|
||||
ctx: &AppContext,
|
||||
id: i32,
|
||||
requested_by: Option<String>,
|
||||
requested_source: Option<String>,
|
||||
) -> Result<WorkerJobRecord> {
|
||||
let item = find_job(ctx, id).await?;
|
||||
let payload = item.payload.clone().unwrap_or(Value::Null);
|
||||
|
||||
match item.worker_name.as_str() {
|
||||
WORKER_DOWNLOAD_MEDIA => {
|
||||
let args = serde_json::from_value::<DownloadWorkerArgs>(payload)?;
|
||||
queue_download_job(
|
||||
ctx,
|
||||
&args,
|
||||
requested_by,
|
||||
requested_source,
|
||||
Some(item.id),
|
||||
Some("retry".to_string()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
WORKER_NOTIFICATION_DELIVERY => {
|
||||
let args = serde_json::from_value::<NotificationDeliveryWorkerArgs>(payload)?;
|
||||
queue_notification_delivery_job(
|
||||
ctx,
|
||||
args.delivery_id,
|
||||
requested_by,
|
||||
requested_source,
|
||||
Some(item.id),
|
||||
Some("retry".to_string()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
TASK_RETRY_DELIVERIES => {
|
||||
let args = serde_json::from_value::<RetryDeliveriesTaskPayload>(payload)?;
|
||||
spawn_retry_deliveries_task(
|
||||
ctx,
|
||||
args.limit,
|
||||
requested_by,
|
||||
requested_source,
|
||||
Some(item.id),
|
||||
Some("retry".to_string()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
TASK_SEND_WEEKLY_DIGEST | TASK_SEND_MONTHLY_DIGEST => {
|
||||
let args = serde_json::from_value::<DigestTaskPayload>(payload)?;
|
||||
spawn_digest_task(
|
||||
ctx,
|
||||
&args.period,
|
||||
requested_by,
|
||||
requested_source,
|
||||
Some(item.id),
|
||||
Some("retry".to_string()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
_ => Err(Error::BadRequest(format!("不支持重试任务:{}", item.worker_name))),
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,231 @@
|
||||
use loco_rs::prelude::*;
|
||||
use reqwest::{header, redirect::Policy, Url};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::services::{media_assets, storage, worker_jobs};
|
||||
|
||||
pub struct DownloadWorker {
|
||||
pub ctx: AppContext,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Serialize)]
|
||||
#[derive(Clone, Deserialize, Debug, Serialize)]
|
||||
pub struct DownloadWorkerArgs {
|
||||
pub user_guid: String,
|
||||
pub source_url: String,
|
||||
#[serde(default)]
|
||||
pub prefix: Option<String>,
|
||||
#[serde(default)]
|
||||
pub title: Option<String>,
|
||||
#[serde(default)]
|
||||
pub alt_text: Option<String>,
|
||||
#[serde(default)]
|
||||
pub caption: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tags: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub notes: Option<String>,
|
||||
#[serde(default)]
|
||||
pub job_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct DownloadedMediaObject {
|
||||
pub key: String,
|
||||
pub url: String,
|
||||
pub size_bytes: i64,
|
||||
pub source_url: String,
|
||||
pub content_type: Option<String>,
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_prefix(value: Option<String>) -> String {
|
||||
value.unwrap_or_else(|| "uploads".to_string())
|
||||
.trim()
|
||||
.trim_matches('/')
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn derive_file_name(url: &Url) -> Option<String> {
|
||||
url.path_segments()
|
||||
.and_then(|segments| segments.last())
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
}
|
||||
|
||||
fn infer_extension(file_name: Option<&str>, content_type: Option<&str>) -> Option<String> {
|
||||
let from_name = file_name
|
||||
.and_then(|name| name.rsplit('.').next())
|
||||
.map(str::trim)
|
||||
.filter(|ext| !ext.is_empty())
|
||||
.map(str::to_ascii_lowercase);
|
||||
|
||||
if let Some(ext) = from_name
|
||||
.as_deref()
|
||||
.filter(|ext| ext.chars().all(|ch| ch.is_ascii_alphanumeric()) && ext.len() <= 10)
|
||||
{
|
||||
return Some(ext.to_string());
|
||||
}
|
||||
|
||||
match content_type
|
||||
.unwrap_or_default()
|
||||
.trim()
|
||||
.split(';')
|
||||
.next()
|
||||
.unwrap_or_default()
|
||||
.to_ascii_lowercase()
|
||||
.as_str()
|
||||
{
|
||||
"image/png" => Some("png".to_string()),
|
||||
"image/jpeg" => Some("jpg".to_string()),
|
||||
"image/webp" => Some("webp".to_string()),
|
||||
"image/gif" => Some("gif".to_string()),
|
||||
"image/avif" => Some("avif".to_string()),
|
||||
"image/svg+xml" => Some("svg".to_string()),
|
||||
"application/pdf" => Some("pdf".to_string()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_supported_content_type(value: Option<&str>) -> bool {
|
||||
value
|
||||
.unwrap_or_default()
|
||||
.trim()
|
||||
.split(';')
|
||||
.next()
|
||||
.map(|item| matches!(item, "image/png" | "image/jpeg" | "image/webp" | "image/gif" | "image/avif" | "image/svg+xml" | "application/pdf"))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn default_title(args: &DownloadWorkerArgs, file_name: Option<&str>) -> String {
|
||||
trim_to_option(args.title.clone())
|
||||
.or_else(|| {
|
||||
file_name.map(|value| {
|
||||
value
|
||||
.rsplit_once('.')
|
||||
.map(|(stem, _)| stem)
|
||||
.unwrap_or(value)
|
||||
.replace(['-', '_'], " ")
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
})
|
||||
.filter(|value| !value.is_empty())
|
||||
.unwrap_or_else(|| "remote asset".to_string())
|
||||
}
|
||||
|
||||
fn merge_notes(notes: Option<String>, source_url: &str) -> Option<String> {
|
||||
let note = notes.unwrap_or_default().trim().to_string();
|
||||
let source_line = format!("source_url: {source_url}");
|
||||
|
||||
if note.is_empty() {
|
||||
return Some(source_line);
|
||||
}
|
||||
|
||||
if note.contains(&source_line) {
|
||||
return Some(note);
|
||||
}
|
||||
|
||||
Some(format!("{note}\n{source_line}"))
|
||||
}
|
||||
|
||||
pub async fn download_media_to_storage(
|
||||
ctx: &AppContext,
|
||||
args: &DownloadWorkerArgs,
|
||||
) -> Result<DownloadedMediaObject> {
|
||||
let source_url = trim_to_option(Some(args.source_url.clone()))
|
||||
.ok_or_else(|| Error::BadRequest("source_url 不能为空".to_string()))?;
|
||||
let parsed_url = Url::parse(&source_url)
|
||||
.map_err(|_| Error::BadRequest("source_url 必须是合法的绝对 URL".to_string()))?;
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.redirect(Policy::limited(5))
|
||||
.build()
|
||||
.map_err(|error| Error::BadRequest(format!("初始化下载客户端失败: {error}")))?;
|
||||
|
||||
let response = client
|
||||
.get(parsed_url.clone())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|error| Error::BadRequest(format!("下载远程媒体失败: {error}")))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(Error::BadRequest(format!(
|
||||
"下载远程媒体失败,状态码:{}",
|
||||
response.status()
|
||||
)));
|
||||
}
|
||||
|
||||
let final_url = response.url().clone();
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get(header::CONTENT_TYPE)
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(ToString::to_string);
|
||||
|
||||
if !is_supported_content_type(content_type.as_deref()) {
|
||||
return Err(Error::BadRequest(
|
||||
"仅支持图片或 PDF 资源的远程抓取".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|error| Error::BadRequest(format!("读取远程媒体内容失败: {error}")))?;
|
||||
|
||||
if bytes.is_empty() {
|
||||
return Err(Error::BadRequest("下载到的远程媒体内容为空".to_string()));
|
||||
}
|
||||
|
||||
let file_name = derive_file_name(&final_url);
|
||||
let extension = infer_extension(file_name.as_deref(), content_type.as_deref())
|
||||
.ok_or_else(|| Error::BadRequest("无法识别远程媒体文件类型".to_string()))?;
|
||||
let prefix = normalize_prefix(args.prefix.clone());
|
||||
let object_key = storage::build_object_key(
|
||||
&prefix,
|
||||
&default_title(args, file_name.as_deref()),
|
||||
&extension,
|
||||
);
|
||||
let stored = storage::upload_bytes_to_r2(
|
||||
ctx,
|
||||
&object_key,
|
||||
bytes.to_vec(),
|
||||
content_type.as_deref(),
|
||||
Some("public, max-age=31536000, immutable"),
|
||||
)
|
||||
.await?;
|
||||
|
||||
media_assets::upsert_by_key(
|
||||
ctx,
|
||||
&stored.key,
|
||||
media_assets::MediaAssetMetadataInput {
|
||||
title: trim_to_option(args.title.clone())
|
||||
.or_else(|| trim_to_option(Some(default_title(args, file_name.as_deref())))),
|
||||
alt_text: trim_to_option(args.alt_text.clone()),
|
||||
caption: trim_to_option(args.caption.clone()),
|
||||
tags: (!args.tags.is_empty()).then_some(args.tags.clone()),
|
||||
notes: merge_notes(args.notes.clone(), final_url.as_str()),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(DownloadedMediaObject {
|
||||
key: stored.key,
|
||||
url: stored.url,
|
||||
size_bytes: bytes.len() as i64,
|
||||
source_url: final_url.to_string(),
|
||||
content_type,
|
||||
})
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -15,9 +233,31 @@ impl BackgroundWorker<DownloadWorkerArgs> for DownloadWorker {
|
||||
fn build(ctx: &AppContext) -> Self {
|
||||
Self { ctx: ctx.clone() }
|
||||
}
|
||||
async fn perform(&self, _args: DownloadWorkerArgs) -> Result<()> {
|
||||
// TODO: Some actual work goes here...
|
||||
|
||||
Ok(())
|
||||
async fn perform(&self, args: DownloadWorkerArgs) -> Result<()> {
|
||||
if let Some(job_id) = args.job_id {
|
||||
if !worker_jobs::begin_job_execution(&self.ctx, job_id).await? {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match download_media_to_storage(&self.ctx, &args).await {
|
||||
Ok(downloaded) => {
|
||||
worker_jobs::mark_job_succeeded(
|
||||
&self.ctx,
|
||||
job_id,
|
||||
Some(serde_json::to_value(downloaded)?),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
Err(error) => {
|
||||
worker_jobs::mark_job_failed(&self.ctx, job_id, error.to_string()).await?;
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
download_media_to_storage(&self.ctx, &args).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::services::subscriptions;
|
||||
use crate::services::{subscriptions, worker_jobs};
|
||||
|
||||
pub struct NotificationDeliveryWorker {
|
||||
pub ctx: AppContext,
|
||||
@@ -10,6 +10,8 @@ pub struct NotificationDeliveryWorker {
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct NotificationDeliveryWorkerArgs {
|
||||
pub delivery_id: i32,
|
||||
#[serde(default)]
|
||||
pub job_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -23,6 +25,28 @@ impl BackgroundWorker<NotificationDeliveryWorkerArgs> for NotificationDeliveryWo
|
||||
}
|
||||
|
||||
async fn perform(&self, args: NotificationDeliveryWorkerArgs) -> Result<()> {
|
||||
subscriptions::process_delivery(&self.ctx, args.delivery_id).await
|
||||
if let Some(job_id) = args.job_id {
|
||||
if !worker_jobs::begin_job_execution(&self.ctx, job_id).await? {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match subscriptions::process_delivery(&self.ctx, args.delivery_id).await {
|
||||
Ok(_) => {
|
||||
worker_jobs::mark_job_succeeded(
|
||||
&self.ctx,
|
||||
job_id,
|
||||
Some(serde_json::json!({ "delivery_id": args.delivery_id })),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
Err(error) => {
|
||||
worker_jobs::mark_job_failed(&self.ctx, job_id, error.to_string()).await?;
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
subscriptions::process_delivery(&self.ctx, args.delivery_id).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user