feat: ship blog platform admin and deploy stack
This commit is contained in:
8
backend/.dockerignore
Normal file
8
backend/.dockerignore
Normal file
@@ -0,0 +1,8 @@
|
||||
target
|
||||
.git
|
||||
.github
|
||||
.gitea
|
||||
node_modules
|
||||
*.log
|
||||
*.out
|
||||
*.err
|
||||
1
backend/.gitignore
vendored
1
backend/.gitignore
vendored
@@ -1,6 +1,5 @@
|
||||
**/config/local.yaml
|
||||
**/config/*.local.yaml
|
||||
**/config/production.yaml
|
||||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
|
||||
157
backend/Cargo.lock
generated
157
backend/Cargo.lock
generated
@@ -2202,76 +2202,6 @@ dependencies = [
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fluent-bundle"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "01203cb8918f5711e73891b347816d932046f95f54207710bda99beaeb423bf4"
|
||||
dependencies = [
|
||||
"fluent-langneg",
|
||||
"fluent-syntax",
|
||||
"intl-memoizer",
|
||||
"intl_pluralrules",
|
||||
"rustc-hash",
|
||||
"self_cell",
|
||||
"smallvec",
|
||||
"unic-langid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fluent-langneg"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eebbe59450baee8282d71676f3bfed5689aeab00b27545e83e5f14b1195e8b0"
|
||||
dependencies = [
|
||||
"unic-langid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fluent-syntax"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54f0d287c53ffd184d04d8677f590f4ac5379785529e5e08b1c8083acdd5c198"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"thiserror 2.0.18",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fluent-template-macros"
|
||||
version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "748050b3fb6fd97b566aedff8e9e021389c963e73d5afbeb92752c2b8d686c6c"
|
||||
dependencies = [
|
||||
"flume",
|
||||
"ignore",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
"unic-langid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fluent-templates"
|
||||
version = "0.13.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56264446a01f404469aef9cc5fd4a4d736f68a0f52482bf6d1a54d6e9bbd9476"
|
||||
dependencies = [
|
||||
"fluent-bundle",
|
||||
"fluent-langneg",
|
||||
"fluent-syntax",
|
||||
"fluent-template-macros",
|
||||
"flume",
|
||||
"heck 0.5.0",
|
||||
"ignore",
|
||||
"intl-memoizer",
|
||||
"log",
|
||||
"serde_json",
|
||||
"tera",
|
||||
"thiserror 2.0.18",
|
||||
"unic-langid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flume"
|
||||
version = "0.11.1"
|
||||
@@ -3296,25 +3226,6 @@ dependencies = [
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "intl-memoizer"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "310da2e345f5eb861e7a07ee182262e94975051db9e4223e909ba90f392f163f"
|
||||
dependencies = [
|
||||
"type-map",
|
||||
"unic-langid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "intl_pluralrules"
|
||||
version = "7.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "078ea7b7c29a2b4df841a7f6ac8775ff6074020c6776d48491ce2268e068f972"
|
||||
dependencies = [
|
||||
"unic-langid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.12.0"
|
||||
@@ -4631,12 +4542,6 @@ dependencies = [
|
||||
"syn 2.0.117",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-hack"
|
||||
version = "0.5.20+deprecated"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.106"
|
||||
@@ -5724,12 +5629,6 @@ dependencies = [
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "self_cell"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b12e76d157a900eb52e81bc6e9f3069344290341720e9178cde2407113ac8d89"
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.27"
|
||||
@@ -6566,7 +6465,6 @@ dependencies = [
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"fastembed",
|
||||
"fluent-templates",
|
||||
"include_dir",
|
||||
"insta",
|
||||
"loco-rs",
|
||||
@@ -6583,7 +6481,6 @@ dependencies = [
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"unic-langid",
|
||||
"uuid",
|
||||
"validator",
|
||||
]
|
||||
@@ -6689,7 +6586,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"serde_core",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
@@ -7079,15 +6975,6 @@ version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
|
||||
|
||||
[[package]]
|
||||
name = "type-map"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb30dbbd9036155e74adad6812e9898d03ec374946234fbcebd5dfc7b9187b90"
|
||||
dependencies = [
|
||||
"rustc-hash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.19.0"
|
||||
@@ -7110,49 +6997,6 @@ dependencies = [
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unic-langid"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a28ba52c9b05311f4f6e62d5d9d46f094bd6e84cb8df7b3ef952748d752a7d05"
|
||||
dependencies = [
|
||||
"unic-langid-impl",
|
||||
"unic-langid-macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unic-langid-impl"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dce1bf08044d4b7a94028c93786f8566047edc11110595914de93362559bc658"
|
||||
dependencies = [
|
||||
"tinystr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unic-langid-macros"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d5957eb82e346d7add14182a3315a7e298f04e1ba4baac36f7f0dbfedba5fc25"
|
||||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
"tinystr",
|
||||
"unic-langid-impl",
|
||||
"unic-langid-macros-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unic-langid-macros-impl"
|
||||
version = "0.9.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1249a628de3ad34b821ecb1001355bca3940bcb2f88558f1a8bd82e977f75b5"
|
||||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
"quote",
|
||||
"syn 2.0.117",
|
||||
"unic-langid-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.9.0"
|
||||
@@ -8183,7 +8027,6 @@ version = "0.11.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"yoke",
|
||||
"zerofrom",
|
||||
"zerovec-derive",
|
||||
|
||||
@@ -36,10 +36,6 @@ chrono = { version = "0.4" }
|
||||
validator = { version = "0.20" }
|
||||
uuid = { version = "1.6", features = ["v4"] }
|
||||
include_dir = { version = "0.7" }
|
||||
# view engine i18n
|
||||
fluent-templates = { version = "0.13", features = ["tera"] }
|
||||
unic-langid = { version = "0.9" }
|
||||
# /view engine
|
||||
axum-extra = { version = "0.10", features = ["form"] }
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
reqwest = { version = "0.12", default-features = false, features = ["blocking", "json", "multipart", "rustls-tls"] }
|
||||
|
||||
32
backend/Dockerfile
Normal file
32
backend/Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
||||
# syntax=docker/dockerfile:1.7
|
||||
|
||||
FROM rust:1.88-bookworm AS builder
|
||||
WORKDIR /app
|
||||
|
||||
COPY Cargo.toml Cargo.lock ./
|
||||
COPY migration/Cargo.toml migration/Cargo.toml
|
||||
COPY src src
|
||||
COPY migration/src migration/src
|
||||
COPY config config
|
||||
COPY assets assets
|
||||
|
||||
RUN cargo build --release --locked --bin termi_api-cli
|
||||
|
||||
FROM debian:bookworm-slim AS runtime
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates tzdata wget \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
COPY --from=builder /app/target/release/termi_api-cli /usr/local/bin/termi_api-cli
|
||||
COPY --from=builder /app/config ./config
|
||||
COPY --from=builder /app/assets ./assets
|
||||
COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
||||
|
||||
ENV RUST_LOG=info
|
||||
EXPOSE 5150
|
||||
HEALTHCHECK --interval=30s --timeout=3s --start-period=15s --retries=5 CMD wget -q -O /dev/null http://127.0.0.1:5150/healthz || exit 1
|
||||
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||
CMD ["termi_api-cli", "-e", "production", "start", "--no-banner"]
|
||||
@@ -1,58 +1,37 @@
|
||||
# Welcome to Loco :train:
|
||||
# backend
|
||||
|
||||
[Loco](https://loco.rs) is a web and API framework running on Rust.
|
||||
Loco.rs backend,当前仅保留 API 与后台鉴权相关逻辑,不再提供旧的 Tera HTML 后台页面。
|
||||
|
||||
This is the **SaaS starter** which includes a `User` model and authentication based on JWT.
|
||||
It also include configuration sections that help you pick either a frontend or a server-side template set up for your fullstack server.
|
||||
## 本地启动
|
||||
|
||||
|
||||
## Quick Start
|
||||
|
||||
```sh
|
||||
```powershell
|
||||
cargo loco start
|
||||
```
|
||||
|
||||
```sh
|
||||
$ cargo loco start
|
||||
Finished dev [unoptimized + debuginfo] target(s) in 21.63s
|
||||
Running `target/debug/myapp start`
|
||||
默认本地监听:
|
||||
|
||||
:
|
||||
:
|
||||
:
|
||||
- `http://localhost:5150`
|
||||
|
||||
controller/app_routes.rs:203: [Middleware] Adding log trace id
|
||||
## 当前职责
|
||||
|
||||
▄ ▀
|
||||
▀ ▄
|
||||
▄ ▀ ▄ ▄ ▄▀
|
||||
▄ ▀▄▄
|
||||
▄ ▀ ▀ ▀▄▀█▄
|
||||
▀█▄
|
||||
▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▀▀█
|
||||
██████ █████ ███ █████ ███ █████ ███ ▀█
|
||||
██████ █████ ███ █████ ▀▀▀ █████ ███ ▄█▄
|
||||
██████ █████ ███ █████ █████ ███ ████▄
|
||||
██████ █████ ███ █████ ▄▄▄ █████ ███ █████
|
||||
██████ █████ ███ ████ ███ █████ ███ ████▀
|
||||
▀▀▀██▄ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ██▀
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
https://loco.rs
|
||||
- 文章 / 分类 / 标签 / 评论 / 友链 / 评测 API
|
||||
- admin 登录态与后台接口
|
||||
- 站点设置与 AI 相关后端能力
|
||||
- Markdown frontmatter 与数据库双向同步
|
||||
- 内容生命周期:`draft / published / scheduled / offline / expired`
|
||||
- 可见性与 SEO:`public / unlisted / private`、`canonical`、`noindex`、`OG`、redirect
|
||||
- Webhook 通知:新评论 / 新友链申请
|
||||
- 内容消费统计:`page_view / read_progress / read_complete`
|
||||
|
||||
environment: development
|
||||
database: automigrate
|
||||
logger: debug
|
||||
compilation: debug
|
||||
modes: server
|
||||
## 生产部署
|
||||
|
||||
listening on http://localhost:5150
|
||||
```
|
||||
生产环境推荐通过环境变量注入:
|
||||
|
||||
## Full Stack Serving
|
||||
- `APP_BASE_URL`
|
||||
- `DATABASE_URL`
|
||||
- `REDIS_URL`
|
||||
- `JWT_SECRET`
|
||||
|
||||
You can check your [configuration](config/development.yaml) to pick either frontend setup or server-side rendered template, and activate the relevant configuration sections.
|
||||
Docker / compose 相关示例见仓库根目录:
|
||||
|
||||
|
||||
## Getting help
|
||||
|
||||
Check out [a quick tour](https://loco.rs/docs/getting-started/tour/) or [the complete guide](https://loco.rs/docs/getting-started/guide/).
|
||||
- `deploy/docker/compose.package.yml`
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
<html><body>
|
||||
not found :-(
|
||||
</body></html>
|
||||
@@ -1,11 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="refresh" content="0; url=/admin">
|
||||
<title>Redirecting...</title>
|
||||
</head>
|
||||
<body>
|
||||
<p>Redirecting to <a href="/admin">Admin Dashboard</a>...</p>
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,705 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ page_title | default(value="Termi Admin") }} · Termi Admin</title>
|
||||
<style>
|
||||
:root {
|
||||
--bg: #f4f4f5;
|
||||
--bg-panel: rgba(255, 255, 255, 0.88);
|
||||
--bg-panel-strong: rgba(255, 255, 255, 0.98);
|
||||
--line: rgba(24, 24, 27, 0.09);
|
||||
--line-strong: rgba(24, 24, 27, 0.16);
|
||||
--text: #09090b;
|
||||
--text-soft: #52525b;
|
||||
--text-mute: #71717a;
|
||||
--accent: #18181b;
|
||||
--accent-2: #2563eb;
|
||||
--accent-3: #dc2626;
|
||||
--accent-4: #16a34a;
|
||||
--shadow: 0 10px 30px rgba(15, 23, 42, 0.08);
|
||||
--radius-xl: 24px;
|
||||
--radius-lg: 18px;
|
||||
--radius-md: 12px;
|
||||
--font-sans: "Inter", "Segoe UI", "PingFang SC", sans-serif;
|
||||
--font-mono: "JetBrains Mono", "Cascadia Code", monospace;
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
min-height: 100vh;
|
||||
font-family: var(--font-sans);
|
||||
color: var(--text);
|
||||
background:
|
||||
radial-gradient(circle at top, rgba(37, 99, 235, 0.08), transparent 30%),
|
||||
linear-gradient(180deg, #fafafa 0%, #f4f4f5 100%);
|
||||
}
|
||||
|
||||
a {
|
||||
color: inherit;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
button,
|
||||
input,
|
||||
textarea,
|
||||
select {
|
||||
font: inherit;
|
||||
}
|
||||
|
||||
.shell {
|
||||
display: grid;
|
||||
grid-template-columns: 290px minmax(0, 1fr);
|
||||
min-height: 100vh;
|
||||
gap: 24px;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.sidebar,
|
||||
.surface,
|
||||
.stat,
|
||||
.table-panel,
|
||||
.hero-card,
|
||||
.form-panel,
|
||||
.login-panel {
|
||||
border: 1px solid var(--line);
|
||||
border-radius: var(--radius-xl);
|
||||
background: var(--bg-panel);
|
||||
box-shadow: var(--shadow);
|
||||
backdrop-filter: blur(16px);
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
padding: 28px 22px;
|
||||
position: sticky;
|
||||
top: 24px;
|
||||
height: calc(100vh - 48px);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 24px;
|
||||
}
|
||||
|
||||
.brand-mark {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 52px;
|
||||
height: 52px;
|
||||
border-radius: 16px;
|
||||
background: #111827;
|
||||
border: 1px solid #111827;
|
||||
font-family: var(--font-mono);
|
||||
font-weight: 700;
|
||||
color: #fafafa;
|
||||
}
|
||||
|
||||
.brand-title {
|
||||
margin: 14px 0 6px;
|
||||
font-size: 1.35rem;
|
||||
}
|
||||
|
||||
.brand-copy {
|
||||
margin: 0;
|
||||
color: var(--text-soft);
|
||||
line-height: 1.6;
|
||||
font-size: 0.95rem;
|
||||
}
|
||||
|
||||
.nav-group {
|
||||
display: grid;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.nav-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
padding: 14px 16px;
|
||||
border-radius: 18px;
|
||||
color: var(--text-soft);
|
||||
border: 1px solid transparent;
|
||||
transition: 160ms ease;
|
||||
}
|
||||
|
||||
.nav-item:hover,
|
||||
.nav-item.active {
|
||||
background: rgba(255, 255, 255, 0.98);
|
||||
border-color: var(--line);
|
||||
color: var(--text);
|
||||
transform: translateX(2px);
|
||||
}
|
||||
|
||||
.nav-item.active {
|
||||
box-shadow: inset 0 0 0 1px rgba(24, 24, 27, 0.06);
|
||||
}
|
||||
|
||||
.nav-kicker {
|
||||
margin-top: auto;
|
||||
padding: 18px;
|
||||
border-radius: 22px;
|
||||
background: rgba(255, 255, 255, 0.82);
|
||||
border: 1px solid var(--line);
|
||||
}
|
||||
|
||||
.nav-kicker strong {
|
||||
display: block;
|
||||
margin-bottom: 6px;
|
||||
font-size: 0.98rem;
|
||||
}
|
||||
|
||||
.nav-kicker p {
|
||||
margin: 0;
|
||||
color: var(--text-soft);
|
||||
line-height: 1.55;
|
||||
font-size: 0.92rem;
|
||||
}
|
||||
|
||||
.content-shell {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 18px;
|
||||
}
|
||||
|
||||
.surface {
|
||||
padding: 26px 28px;
|
||||
}
|
||||
|
||||
.topbar {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 20px;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.eyebrow {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 6px 10px;
|
||||
border-radius: 999px;
|
||||
background: rgba(24, 24, 27, 0.05);
|
||||
color: var(--text-soft);
|
||||
font-size: 0.84rem;
|
||||
letter-spacing: 0.04em;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.page-title {
|
||||
margin: 12px 0 8px;
|
||||
font-size: clamp(1.7rem, 2.2vw, 2.5rem);
|
||||
line-height: 1.1;
|
||||
}
|
||||
|
||||
.page-description {
|
||||
margin: 0;
|
||||
max-width: 760px;
|
||||
color: var(--text-soft);
|
||||
line-height: 1.7;
|
||||
}
|
||||
|
||||
.toolbar {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 10px;
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
min-height: 44px;
|
||||
padding: 0 16px;
|
||||
border-radius: 14px;
|
||||
border: 1px solid transparent;
|
||||
cursor: pointer;
|
||||
transition: 160ms ease;
|
||||
font-weight: 600;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.btn:hover {
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
|
||||
.btn-primary {
|
||||
background: var(--accent);
|
||||
color: #fafafa;
|
||||
box-shadow: 0 10px 24px rgba(24, 24, 27, 0.16);
|
||||
}
|
||||
|
||||
.btn-ghost {
|
||||
background: rgba(255, 255, 255, 0.98);
|
||||
border-color: var(--line);
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.btn-danger {
|
||||
background: rgba(220, 38, 38, 0.08);
|
||||
border-color: rgba(220, 38, 38, 0.14);
|
||||
color: var(--accent-3);
|
||||
}
|
||||
|
||||
.btn-success {
|
||||
background: rgba(22, 163, 74, 0.08);
|
||||
border-color: rgba(22, 163, 74, 0.14);
|
||||
color: var(--accent-4);
|
||||
}
|
||||
|
||||
.btn-warning {
|
||||
background: rgba(245, 158, 11, 0.08);
|
||||
border-color: rgba(245, 158, 11, 0.16);
|
||||
color: #b45309;
|
||||
}
|
||||
|
||||
.content-grid {
|
||||
display: grid;
|
||||
gap: 18px;
|
||||
}
|
||||
|
||||
.stats-grid,
|
||||
.card-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.stat,
|
||||
.hero-card,
|
||||
.table-panel,
|
||||
.form-panel {
|
||||
padding: 22px;
|
||||
border-radius: var(--radius-lg);
|
||||
background: var(--bg-panel-strong);
|
||||
}
|
||||
|
||||
.stat-label,
|
||||
.muted,
|
||||
.table-note,
|
||||
.field-hint,
|
||||
.badge-soft {
|
||||
color: var(--text-mute);
|
||||
}
|
||||
|
||||
.stat-value {
|
||||
margin: 10px 0 6px;
|
||||
font-size: 2rem;
|
||||
font-weight: 700;
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.tone-blue .stat-value { color: var(--accent-2); }
|
||||
.tone-gold .stat-value { color: var(--accent); }
|
||||
.tone-green .stat-value { color: var(--accent-4); }
|
||||
.tone-pink .stat-value { color: var(--accent-3); }
|
||||
.tone-violet .stat-value { color: #7a5ef4; }
|
||||
|
||||
.table-panel {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.table-head {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 14px;
|
||||
align-items: flex-end;
|
||||
margin-bottom: 14px;
|
||||
}
|
||||
|
||||
.table-head h2,
|
||||
.hero-card h2 {
|
||||
margin: 0 0 6px;
|
||||
font-size: 1.15rem;
|
||||
}
|
||||
|
||||
.table-wrap {
|
||||
overflow: auto;
|
||||
border-radius: 16px;
|
||||
border: 1px solid var(--line);
|
||||
background: rgba(255, 255, 255, 0.98);
|
||||
}
|
||||
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
min-width: 880px;
|
||||
}
|
||||
|
||||
th,
|
||||
td {
|
||||
padding: 16px;
|
||||
text-align: left;
|
||||
vertical-align: top;
|
||||
border-bottom: 1px solid rgba(93, 76, 56, 0.1);
|
||||
}
|
||||
|
||||
th {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
background: rgba(250, 250, 250, 0.98);
|
||||
color: var(--text-soft);
|
||||
font-size: 0.8rem;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.06em;
|
||||
}
|
||||
|
||||
tr:last-child td {
|
||||
border-bottom: 0;
|
||||
}
|
||||
|
||||
.item-title {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 6px;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.item-title strong {
|
||||
font-size: 0.98rem;
|
||||
}
|
||||
|
||||
.item-meta,
|
||||
.mono {
|
||||
color: var(--text-soft);
|
||||
font-family: var(--font-mono);
|
||||
font-size: 0.84rem;
|
||||
word-break: break-all;
|
||||
}
|
||||
|
||||
.badge,
|
||||
.chip {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-height: 28px;
|
||||
padding: 0 10px;
|
||||
border-radius: 999px;
|
||||
font-size: 0.78rem;
|
||||
border: 1px solid transparent;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.badge-success {
|
||||
color: var(--accent-4);
|
||||
background: rgba(93, 122, 45, 0.1);
|
||||
border-color: rgba(93, 122, 45, 0.14);
|
||||
}
|
||||
|
||||
.badge-warning {
|
||||
color: var(--accent);
|
||||
background: rgba(202, 94, 45, 0.1);
|
||||
border-color: rgba(202, 94, 45, 0.14);
|
||||
}
|
||||
|
||||
.badge-danger {
|
||||
color: var(--accent-3);
|
||||
background: rgba(156, 61, 84, 0.1);
|
||||
border-color: rgba(156, 61, 84, 0.14);
|
||||
}
|
||||
|
||||
.chip {
|
||||
background: rgba(241, 245, 249, 0.95);
|
||||
color: var(--text-soft);
|
||||
border-color: rgba(148, 163, 184, 0.18);
|
||||
}
|
||||
|
||||
.actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.inline-links {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.inline-link {
|
||||
color: var(--accent-2);
|
||||
font-size: 0.88rem;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.inline-link:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.empty {
|
||||
padding: 40px 18px;
|
||||
text-align: center;
|
||||
color: var(--text-soft);
|
||||
}
|
||||
|
||||
.form-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(260px, 1fr));
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.field-wide {
|
||||
grid-column: 1 / -1;
|
||||
}
|
||||
|
||||
.field label {
|
||||
display: block;
|
||||
margin-bottom: 8px;
|
||||
font-size: 0.9rem;
|
||||
color: var(--text-soft);
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.field input,
|
||||
.field textarea,
|
||||
.field select {
|
||||
width: 100%;
|
||||
border: 1px solid var(--line);
|
||||
border-radius: 14px;
|
||||
background: rgba(255, 255, 255, 0.98);
|
||||
color: var(--text);
|
||||
padding: 14px 16px;
|
||||
}
|
||||
|
||||
.field textarea {
|
||||
resize: vertical;
|
||||
min-height: 132px;
|
||||
}
|
||||
|
||||
.inline-form {
|
||||
display: grid;
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.inline-form.compact {
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.compact-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(140px, 1fr));
|
||||
gap: 8px;
|
||||
align-items: end;
|
||||
}
|
||||
|
||||
.compact-grid textarea,
|
||||
.compact-grid input,
|
||||
.compact-grid select {
|
||||
width: 100%;
|
||||
min-height: 40px;
|
||||
border: 1px solid var(--line);
|
||||
border-radius: 12px;
|
||||
background: rgba(255, 255, 255, 0.98);
|
||||
color: var(--text);
|
||||
padding: 10px 12px;
|
||||
}
|
||||
|
||||
.compact-grid textarea {
|
||||
min-height: 84px;
|
||||
resize: vertical;
|
||||
}
|
||||
|
||||
.compact-actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.notice {
|
||||
display: none;
|
||||
margin-top: 14px;
|
||||
padding: 14px 16px;
|
||||
border-radius: 14px;
|
||||
border: 1px solid transparent;
|
||||
}
|
||||
|
||||
.notice.show {
|
||||
display: block;
|
||||
}
|
||||
|
||||
.notice-success {
|
||||
color: var(--accent-4);
|
||||
background: rgba(22, 163, 74, 0.08);
|
||||
border-color: rgba(22, 163, 74, 0.14);
|
||||
}
|
||||
|
||||
.notice-error {
|
||||
color: var(--accent-3);
|
||||
background: rgba(220, 38, 38, 0.08);
|
||||
border-color: rgba(220, 38, 38, 0.14);
|
||||
}
|
||||
|
||||
.login-shell {
|
||||
min-height: 100vh;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.login-panel {
|
||||
width: min(520px, 100%);
|
||||
padding: 34px;
|
||||
}
|
||||
|
||||
.login-panel h1 {
|
||||
margin: 18px 0 10px;
|
||||
font-size: 2rem;
|
||||
}
|
||||
|
||||
.login-panel p {
|
||||
margin: 0;
|
||||
color: var(--text-soft);
|
||||
line-height: 1.7;
|
||||
}
|
||||
|
||||
.login-error {
|
||||
display: none;
|
||||
margin-top: 18px;
|
||||
padding: 14px 16px;
|
||||
border-radius: 14px;
|
||||
background: rgba(220, 38, 38, 0.08);
|
||||
border: 1px solid rgba(220, 38, 38, 0.14);
|
||||
color: var(--accent-3);
|
||||
}
|
||||
|
||||
.login-error.show {
|
||||
display: block;
|
||||
}
|
||||
|
||||
@media (max-width: 1100px) {
|
||||
.shell {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.sidebar {
|
||||
position: static;
|
||||
height: auto;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 760px) {
|
||||
.shell,
|
||||
.surface {
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.topbar {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.toolbar {
|
||||
width: 100%;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
table {
|
||||
min-width: 760px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{% block body %}
|
||||
<div class="shell">
|
||||
<aside class="sidebar">
|
||||
<div>
|
||||
<div class="brand-mark">/></div>
|
||||
<h1 class="brand-title">Termi Admin</h1>
|
||||
<p class="brand-copy">后台数据直接联动前台页面。你可以在这里审核评论和友链、检查分类标签,并跳到对应前台页面确认效果。</p>
|
||||
</div>
|
||||
|
||||
<nav class="nav-group">
|
||||
<a href="/admin" class="nav-item {% if active_nav == 'dashboard' %}active{% endif %}">概览面板</a>
|
||||
<a href="/admin/posts" class="nav-item {% if active_nav == 'posts' %}active{% endif %}">文章管理</a>
|
||||
<a href="/admin/comments" class="nav-item {% if active_nav == 'comments' %}active{% endif %}">评论审核</a>
|
||||
<a href="/admin/categories" class="nav-item {% if active_nav == 'categories' %}active{% endif %}">分类管理</a>
|
||||
<a href="/admin/tags" class="nav-item {% if active_nav == 'tags' %}active{% endif %}">标签管理</a>
|
||||
<a href="/admin/reviews" class="nav-item {% if active_nav == 'reviews' %}active{% endif %}">评价管理</a>
|
||||
<a href="/admin/friend_links" class="nav-item {% if active_nav == 'friend_links' %}active{% endif %}">友链申请</a>
|
||||
<a href="/admin/site-settings" class="nav-item {% if active_nav == 'site_settings' %}active{% endif %}">站点设置</a>
|
||||
</nav>
|
||||
|
||||
<div class="nav-kicker">
|
||||
<strong>前台联调入口</strong>
|
||||
<p>所有管理页都带了前台直达链接,处理完数据后可以立刻跳转验证。</p>
|
||||
</div>
|
||||
</aside>
|
||||
|
||||
<div class="content-shell">
|
||||
<header class="surface topbar">
|
||||
<div>
|
||||
<span class="eyebrow">Unified Admin</span>
|
||||
<h1 class="page-title">{{ page_title | default(value="Termi Admin") }}</h1>
|
||||
<p class="page-description">{{ page_description | default(value="统一处理后台数据与前台联调。") }}</p>
|
||||
</div>
|
||||
<div class="toolbar">
|
||||
{% for item in header_actions | default(value=[]) %}
|
||||
<a
|
||||
href="{{ item.href }}"
|
||||
class="btn btn-{{ item.variant }}"
|
||||
{% if item.external %}target="_blank" rel="noreferrer noopener"{% endif %}
|
||||
>
|
||||
{{ item.label }}
|
||||
</a>
|
||||
{% endfor %}
|
||||
<a href="/admin/logout" class="btn btn-danger">退出后台</a>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<main class="content-grid">
|
||||
{% block main_content %}{% endblock %}
|
||||
</main>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
<script>
|
||||
async function adminPatch(url, payload, successMessage) {
|
||||
const response = await fetch(url, {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text() || "request failed");
|
||||
}
|
||||
|
||||
if (successMessage) {
|
||||
alert(successMessage);
|
||||
}
|
||||
|
||||
location.reload();
|
||||
}
|
||||
|
||||
async function adminDelete(url, successMessage) {
|
||||
const confirmed = confirm("确认删除这条记录吗?此操作无法撤销。");
|
||||
if (!confirmed) {
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: "DELETE"
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text() || "request failed");
|
||||
}
|
||||
|
||||
if (successMessage) {
|
||||
alert(successMessage);
|
||||
}
|
||||
|
||||
location.reload();
|
||||
}
|
||||
</script>
|
||||
{% block page_scripts %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
@@ -1,85 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>新增分类</h2>
|
||||
<div class="table-note">这里维护分类字典。文章 Markdown 导入时会优先复用这里的分类,不存在才自动创建。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form method="post" action="/admin/categories" class="inline-form">
|
||||
<div class="compact-grid">
|
||||
<input type="text" name="name" placeholder="分类名,例如 Technology" value="{{ create_form.name }}" required>
|
||||
<input type="text" name="slug" placeholder="slug,可留空自动生成" value="{{ create_form.slug }}">
|
||||
</div>
|
||||
<div class="compact-actions">
|
||||
<button type="submit" class="btn btn-primary">创建分类</button>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section class="table-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>分类列表</h2>
|
||||
<div class="table-note">分类名称会作为文章展示名称使用,文章数来自当前已同步的真实内容。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if rows | length > 0 %}
|
||||
<div class="table-wrap">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>分类</th>
|
||||
<th>文章数</th>
|
||||
<th>最近文章</th>
|
||||
<th>操作</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
<td class="mono">#{{ row.id }}</td>
|
||||
<td>
|
||||
<form method="post" action="/admin/categories/{{ row.id }}/update" class="inline-form compact">
|
||||
<div class="compact-grid">
|
||||
<input type="text" name="name" value="{{ row.name }}" required>
|
||||
<input type="text" name="slug" value="{{ row.slug }}" required>
|
||||
</div>
|
||||
<div class="compact-actions">
|
||||
<button type="submit" class="btn btn-success">保存</button>
|
||||
<a href="{{ row.api_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">API</a>
|
||||
</div>
|
||||
</form>
|
||||
</td>
|
||||
<td><span class="chip">{{ row.count }} 篇</span></td>
|
||||
<td>
|
||||
{% if row.latest_frontend_url %}
|
||||
<a href="{{ row.latest_frontend_url }}" class="inline-link" target="_blank" rel="noreferrer noopener">{{ row.latest_title }}</a>
|
||||
{% else %}
|
||||
<span class="badge-soft">{{ row.latest_title }}</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
<div class="actions">
|
||||
<a href="{{ row.frontend_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">前台分类页</a>
|
||||
<a href="{{ row.articles_url }}" class="btn btn-primary" target="_blank" rel="noreferrer noopener">前台筛选</a>
|
||||
<form method="post" action="/admin/categories/{{ row.id }}/delete">
|
||||
<button type="submit" class="btn btn-danger">删除</button>
|
||||
</form>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="empty">暂无分类数据。</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -1,147 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>评论筛选</h2>
|
||||
<div class="table-note">按 scope、审核状态、文章 slug 或关键词快速定位评论,尤其适合处理段落评论和垃圾留言。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form method="get" action="/admin/comments" class="inline-form compact">
|
||||
<div class="compact-grid">
|
||||
<div class="field">
|
||||
<label for="scope">评论类型</label>
|
||||
<select id="scope" name="scope">
|
||||
<option value="" {% if filters.scope == "" %}selected{% endif %}>全部</option>
|
||||
<option value="article" {% if filters.scope == "article" %}selected{% endif %}>全文评论</option>
|
||||
<option value="paragraph" {% if filters.scope == "paragraph" %}selected{% endif %}>段落评论</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="field">
|
||||
<label for="approved">审核状态</label>
|
||||
<select id="approved" name="approved">
|
||||
<option value="" {% if filters.approved == "" %}selected{% endif %}>全部</option>
|
||||
<option value="true" {% if filters.approved == "true" %}selected{% endif %}>已审核</option>
|
||||
<option value="false" {% if filters.approved == "false" %}selected{% endif %}>待审核</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="field">
|
||||
<label for="post_slug">文章</label>
|
||||
<select id="post_slug" name="post_slug">
|
||||
<option value="" {% if filters.post_slug == "" %}selected{% endif %}>全部文章</option>
|
||||
{% for slug in post_options %}
|
||||
<option value="{{ slug }}" {% if filters.post_slug == slug %}selected{% endif %}>{{ slug }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="field">
|
||||
<label for="q">关键词</label>
|
||||
<input
|
||||
id="q"
|
||||
type="text"
|
||||
name="q"
|
||||
value="{{ filters.q }}"
|
||||
placeholder="作者 / 内容 / 段落 key"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="compact-actions">
|
||||
<button type="submit" class="btn btn-primary">应用筛选</button>
|
||||
<a href="/admin/comments" class="btn btn-ghost">清空</a>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div class="inline-links" style="margin-top: 14px;">
|
||||
{% for stat in stats %}
|
||||
<span class="chip">{{ stat.label }} · {{ stat.value }}</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section class="table-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>评论队列</h2>
|
||||
<div class="table-note">处理前台真实评论,并能一键跳到对应文章或段落核对上下文。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if rows | length > 0 %}
|
||||
<div class="table-wrap">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>作者 / 文章</th>
|
||||
<th>内容与上下文</th>
|
||||
<th>状态</th>
|
||||
<th>时间</th>
|
||||
<th>操作</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
<td class="mono">#{{ row.id }}</td>
|
||||
<td>
|
||||
<div class="item-title">
|
||||
<strong>{{ row.author }}</strong>
|
||||
<span class="item-meta">{{ row.post_slug }}</span>
|
||||
{% if row.scope == "paragraph" %}
|
||||
<span class="badge badge-warning">{{ row.scope_label }}</span>
|
||||
{% else %}
|
||||
<span class="badge">{{ row.scope_label }}</span>
|
||||
{% endif %}
|
||||
{% if row.frontend_url %}
|
||||
<a href="{{ row.frontend_url }}" class="inline-link" target="_blank" rel="noreferrer noopener">
|
||||
{% if row.scope == "paragraph" %}跳到前台段落{% else %}跳到前台文章{% endif %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
<div class="item-title">
|
||||
<strong>{{ row.content }}</strong>
|
||||
{% if row.reply_target != "-" %}
|
||||
<span class="item-meta">回复目标:{{ row.reply_target }}</span>
|
||||
{% endif %}
|
||||
{% if row.scope == "paragraph" and row.paragraph_excerpt != "-" %}
|
||||
<span class="item-meta">段落上下文:{{ row.paragraph_excerpt }}</span>
|
||||
{% endif %}
|
||||
{% if row.scope == "paragraph" and row.paragraph_key != "-" %}
|
||||
<span class="item-meta">段落 key:{{ row.paragraph_key }}</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
{% if row.approved %}
|
||||
<span class="badge badge-success">已审核</span>
|
||||
{% else %}
|
||||
<span class="badge badge-warning">待审核</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="mono">{{ row.created_at }}</td>
|
||||
<td>
|
||||
<div class="actions">
|
||||
<button class="btn btn-success" onclick='adminPatch("{{ row.api_url }}", {"approved": true}, "评论状态已更新")'>通过</button>
|
||||
<button class="btn btn-warning" onclick='adminPatch("{{ row.api_url }}", {"approved": false}, "评论状态已更新")'>待审</button>
|
||||
<button class="btn btn-danger" onclick='adminDelete("{{ row.api_url }}", "评论已删除")'>删除</button>
|
||||
<a href="{{ row.api_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">API</a>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="empty">当前筛选条件下暂无评论数据。</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -1,64 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="table-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>友链审核</h2>
|
||||
<div class="table-note">前台提交后会进入这里,你可以审核状态,再跳去前台友链页确认展示。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if rows | length > 0 %}
|
||||
<div class="table-wrap">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>站点</th>
|
||||
<th>分类</th>
|
||||
<th>状态</th>
|
||||
<th>时间</th>
|
||||
<th>操作</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
<td class="mono">#{{ row.id }}</td>
|
||||
<td>
|
||||
<div class="item-title">
|
||||
<strong>{{ row.site_name }}</strong>
|
||||
<a href="{{ row.site_url }}" class="inline-link" target="_blank" rel="noreferrer noopener">{{ row.site_url }}</a>
|
||||
<span class="item-meta">{{ row.description }}</span>
|
||||
</div>
|
||||
</td>
|
||||
<td>{{ row.category_name }}</td>
|
||||
<td>
|
||||
{% if row.status == "已通过" %}
|
||||
<span class="badge badge-success">{{ row.status }}</span>
|
||||
{% elif row.status == "已拒绝" %}
|
||||
<span class="badge badge-danger">{{ row.status }}</span>
|
||||
{% else %}
|
||||
<span class="badge badge-warning">{{ row.status }}</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
<td class="mono">{{ row.created_at }}</td>
|
||||
<td>
|
||||
<div class="actions">
|
||||
<button class="btn btn-success" onclick='adminPatch("{{ row.api_url }}", {"status": "approved"}, "友链状态已更新")'>通过</button>
|
||||
<button class="btn btn-warning" onclick='adminPatch("{{ row.api_url }}", {"status": "pending"}, "友链状态已更新")'>待审</button>
|
||||
<button class="btn btn-danger" onclick='adminPatch("{{ row.api_url }}", {"status": "rejected"}, "友链状态已更新")'>拒绝</button>
|
||||
<a href="{{ row.frontend_page_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">前台友链页</a>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="empty">暂无友链申请数据。</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -1,29 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="stats-grid">
|
||||
{% for stat in stats %}
|
||||
<article class="stat tone-{{ stat.tone }}">
|
||||
<div class="stat-label">{{ stat.label }}</div>
|
||||
<div class="stat-value">{{ stat.value }}</div>
|
||||
<div class="muted">{{ stat.note }}</div>
|
||||
</article>
|
||||
{% endfor %}
|
||||
</section>
|
||||
|
||||
<section class="hero-card">
|
||||
<h2>{{ site_profile.site_name }}</h2>
|
||||
<p class="page-description" style="margin-bottom: 10px;">{{ site_profile.site_description }}</p>
|
||||
<a href="{{ site_profile.site_url }}" class="inline-link" target="_blank" rel="noreferrer noopener">{{ site_profile.site_url }}</a>
|
||||
</section>
|
||||
|
||||
<section class="card-grid">
|
||||
{% for card in nav_cards %}
|
||||
<a href="{{ card.href }}" class="hero-card">
|
||||
<h2>{{ card.title }}</h2>
|
||||
<p class="page-description" style="margin-bottom: 10px;">{{ card.description }}</p>
|
||||
<span class="chip">{{ card.meta }}</span>
|
||||
</a>
|
||||
{% endfor %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -1,35 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block body %}
|
||||
<div class="login-shell">
|
||||
<section class="login-panel">
|
||||
<span class="eyebrow">Termi Admin</span>
|
||||
<div class="brand-mark" style="margin-top: 18px;">/></div>
|
||||
<h1>后台管理入口</h1>
|
||||
<p>评论审核、友链申请、分类标签检查和站点设置都在这里统一处理。当前后台界面已经走 Tera 模板,不再在 Rust 里硬拼整页 HTML。</p>
|
||||
|
||||
<div class="login-error {% if show_error %}show{% endif %}">
|
||||
用户名或密码错误,请重试。
|
||||
</div>
|
||||
|
||||
<form method="POST" action="/admin/login" class="form-grid" style="margin-top: 22px;">
|
||||
<div class="field field-wide">
|
||||
<label>用户名</label>
|
||||
<input name="username" placeholder="admin" required>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>密码</label>
|
||||
<input type="password" name="password" placeholder="admin123" required>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<button type="submit" class="btn btn-primary" style="width: 100%;">进入后台</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div class="hero-card" style="margin-top: 18px;">
|
||||
<h2>默认测试账号</h2>
|
||||
<p class="mono">admin / admin123</p>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,70 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>{{ editor.title }}</h2>
|
||||
<div class="table-note">当前源文件:<span class="mono">{{ editor.file_path }}</span></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form id="markdown-editor-form" class="form-grid">
|
||||
<div class="field field-wide">
|
||||
<label>Slug</label>
|
||||
<input value="{{ editor.slug }}" readonly>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>Markdown 文件内容</label>
|
||||
<textarea id="markdown-content" name="markdown" style="min-height: 65vh; font-family: var(--font-mono); line-height: 1.65;">{{ editor.markdown }}</textarea>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<div class="actions">
|
||||
<button type="submit" class="btn btn-primary">保存 Markdown</button>
|
||||
</div>
|
||||
<div class="field-hint" style="margin-top: 10px;">这里保存的是服务器上的原始 Markdown 文件。你也可以直接在服务器用编辑器打开这个路径修改。</div>
|
||||
<div id="notice" class="notice"></div>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script>
|
||||
const markdownForm = document.getElementById("markdown-editor-form");
|
||||
const markdownField = document.getElementById("markdown-content");
|
||||
const markdownNotice = document.getElementById("notice");
|
||||
const markdownSlug = "{{ editor.slug }}";
|
||||
|
||||
function showMarkdownNotice(message, kind) {
|
||||
markdownNotice.textContent = message;
|
||||
markdownNotice.className = "notice show " + (kind === "success" ? "notice-success" : "notice-error");
|
||||
}
|
||||
|
||||
markdownForm?.addEventListener("submit", async (event) => {
|
||||
event.preventDefault();
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/posts/slug/${markdownSlug}/markdown`, {
|
||||
method: "PUT",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
markdown: markdownField.value
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text() || "save failed");
|
||||
}
|
||||
|
||||
const payload = await response.json();
|
||||
markdownField.value = payload.markdown;
|
||||
showMarkdownNotice("Markdown 文件已保存并同步到数据库。", "success");
|
||||
} catch (error) {
|
||||
showMarkdownNotice("保存失败:" + (error?.message || "unknown error"), "error");
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -1,199 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>新建 Markdown 文章</h2>
|
||||
<div class="table-note">直接生成 `content/posts/*.md` 文件,后端会自动解析 frontmatter、同步分类和标签。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form method="post" action="/admin/posts" class="form-grid">
|
||||
<div class="field">
|
||||
<label>标题</label>
|
||||
<input type="text" name="title" value="{{ create_form.title }}" required>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>Slug</label>
|
||||
<input type="text" name="slug" value="{{ create_form.slug }}" placeholder="可留空自动生成">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>分类</label>
|
||||
<input type="text" name="category" value="{{ create_form.category }}" placeholder="例如 tech">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>标签</label>
|
||||
<input type="text" name="tags" value="{{ create_form.tags }}" placeholder="逗号分隔">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>文章类型</label>
|
||||
<input type="text" name="post_type" value="{{ create_form.post_type }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>封面图</label>
|
||||
<input type="text" name="image" value="{{ create_form.image }}" placeholder="可选">
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>摘要</label>
|
||||
<textarea name="description">{{ create_form.description }}</textarea>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>正文 Markdown</label>
|
||||
<textarea name="content" style="min-height: 22rem; font-family: var(--font-mono); line-height: 1.65;">{{ create_form.content }}</textarea>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<div class="actions">
|
||||
<label class="chip"><input type="checkbox" name="published" checked style="margin-right: 8px;">发布</label>
|
||||
<label class="chip"><input type="checkbox" name="pinned" style="margin-right: 8px;">置顶</label>
|
||||
<button type="submit" class="btn btn-primary">创建文章</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>导入 Markdown 文件</h2>
|
||||
<div class="table-note">支持选择单个 `.md/.markdown` 文件,也支持直接选择一个本地 Markdown 文件夹批量导入。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form id="markdown-import-form" class="form-grid">
|
||||
<div class="field">
|
||||
<label>选择文件</label>
|
||||
<input id="markdown-files" type="file" accept=".md,.markdown" multiple>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>选择文件夹</label>
|
||||
<input id="markdown-folder" type="file" accept=".md,.markdown" webkitdirectory directory multiple>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<div class="actions">
|
||||
<button id="import-submit" type="submit" class="btn btn-success">导入 Markdown</button>
|
||||
</div>
|
||||
<div class="field-hint" style="margin-top: 10px;">导入时会从 frontmatter 和正文里提取标题、slug、摘要、分类、标签与内容,并写入服务器 `content/posts`。</div>
|
||||
<div id="import-notice" class="notice"></div>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section class="table-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>内容列表</h2>
|
||||
<div class="table-note">直接跳到前台文章、分类筛选和 API 明细。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if rows | length > 0 %}
|
||||
<div class="table-wrap">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>文章</th>
|
||||
<th>分类</th>
|
||||
<th>标签</th>
|
||||
<th>时间</th>
|
||||
<th>跳转</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
<td class="mono">#{{ row.id }}</td>
|
||||
<td>
|
||||
<div class="item-title">
|
||||
<strong>{{ row.title }}</strong>
|
||||
<span class="item-meta">{{ row.slug }}</span>
|
||||
<span class="item-meta">{{ row.file_path }}</span>
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
<div class="item-title">
|
||||
<strong>{{ row.category_name }}</strong>
|
||||
<a href="{{ row.category_frontend_url }}" class="inline-link" target="_blank" rel="noreferrer noopener">查看该分类文章</a>
|
||||
</div>
|
||||
</td>
|
||||
<td>
|
||||
<div class="inline-links">
|
||||
{% if row.tags | length > 0 %}
|
||||
{% for tag in row.tags %}
|
||||
<span class="chip">#{{ tag }}</span>
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
<span class="badge-soft">暂无标签</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</td>
|
||||
<td class="mono">{{ row.created_at }}</td>
|
||||
<td>
|
||||
<div class="actions">
|
||||
<a href="{{ row.edit_url }}" class="btn btn-success">编辑 Markdown</a>
|
||||
<a href="{{ row.frontend_url }}" class="btn btn-primary" target="_blank" rel="noreferrer noopener">前台详情</a>
|
||||
<a href="{{ row.api_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">API</a>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="empty">当前没有可管理的文章数据。</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script>
|
||||
const importForm = document.getElementById("markdown-import-form");
|
||||
const importFiles = document.getElementById("markdown-files");
|
||||
const importFolder = document.getElementById("markdown-folder");
|
||||
const importNotice = document.getElementById("import-notice");
|
||||
|
||||
function showImportNotice(message, kind) {
|
||||
importNotice.textContent = message;
|
||||
importNotice.className = "notice show " + (kind === "success" ? "notice-success" : "notice-error");
|
||||
}
|
||||
|
||||
importForm?.addEventListener("submit", async (event) => {
|
||||
event.preventDefault();
|
||||
|
||||
const selectedFiles = [
|
||||
...(importFiles?.files ? Array.from(importFiles.files) : []),
|
||||
...(importFolder?.files ? Array.from(importFolder.files) : []),
|
||||
].filter((file) => file.name.endsWith(".md") || file.name.endsWith(".markdown"));
|
||||
|
||||
if (!selectedFiles.length) {
|
||||
showImportNotice("请先选择要导入的 Markdown 文件或文件夹。", "error");
|
||||
return;
|
||||
}
|
||||
|
||||
const payload = new FormData();
|
||||
selectedFiles.forEach((file) => {
|
||||
const uploadName = file.webkitRelativePath || file.name;
|
||||
payload.append("files", file, uploadName);
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await fetch("/admin/posts/import", {
|
||||
method: "POST",
|
||||
body: payload,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text() || "import failed");
|
||||
}
|
||||
|
||||
const result = await response.json();
|
||||
showImportNotice(`已导入 ${result.count} 个 Markdown 文件,正在刷新列表。`, "success");
|
||||
setTimeout(() => window.location.reload(), 900);
|
||||
} catch (error) {
|
||||
showImportNotice("导入失败:" + (error?.message || "unknown error"), "error");
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -1,113 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>新增评价</h2>
|
||||
<div class="table-note">这里创建的评价会立刻出现在前台 `/reviews` 页面。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form method="post" action="/admin/reviews" class="inline-form">
|
||||
<div class="compact-grid">
|
||||
<input type="text" name="title" placeholder="标题" value="{{ create_form.title }}" required>
|
||||
<select name="review_type">
|
||||
<option value="game" {% if create_form.review_type == "game" %}selected{% endif %}>游戏</option>
|
||||
<option value="anime" {% if create_form.review_type == "anime" %}selected{% endif %}>动画</option>
|
||||
<option value="music" {% if create_form.review_type == "music" %}selected{% endif %}>音乐</option>
|
||||
<option value="book" {% if create_form.review_type == "book" %}selected{% endif %}>书籍</option>
|
||||
<option value="movie" {% if create_form.review_type == "movie" %}selected{% endif %}>影视</option>
|
||||
</select>
|
||||
<input type="number" name="rating" min="0" max="5" value="{{ create_form.rating }}" required>
|
||||
<input type="date" name="review_date" value="{{ create_form.review_date }}">
|
||||
<select name="status">
|
||||
<option value="completed" {% if create_form.status == "completed" %}selected{% endif %}>已完成</option>
|
||||
<option value="in-progress" {% if create_form.status == "in-progress" %}selected{% endif %}>进行中</option>
|
||||
<option value="dropped" {% if create_form.status == "dropped" %}selected{% endif %}>已弃坑</option>
|
||||
</select>
|
||||
<input type="text" name="cover" value="{{ create_form.cover }}" placeholder="封面图标或 emoji">
|
||||
<input type="url" name="link_url" value="{{ create_form.link_url }}" placeholder="跳转链接,可选">
|
||||
<input type="text" name="tags" value="{{ create_form.tags }}" placeholder="标签,逗号分隔">
|
||||
<textarea name="description" placeholder="评价描述">{{ create_form.description }}</textarea>
|
||||
</div>
|
||||
<div class="compact-actions">
|
||||
<button type="submit" class="btn btn-primary">创建评价</button>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section class="table-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>评价列表</h2>
|
||||
<div class="table-note">这里的每一行都可以直接编辑,保存后前台评价页会读取最新数据。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if rows | length > 0 %}
|
||||
<div class="table-wrap">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>评价内容</th>
|
||||
<th>状态</th>
|
||||
<th>操作</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
<td class="mono">#{{ row.id }}</td>
|
||||
<td>
|
||||
<form method="post" action="/admin/reviews/{{ row.id }}/update" class="inline-form compact">
|
||||
<div class="compact-grid">
|
||||
<input type="text" name="title" value="{{ row.title }}" required>
|
||||
<select name="review_type">
|
||||
<option value="game" {% if row.review_type == "game" %}selected{% endif %}>游戏</option>
|
||||
<option value="anime" {% if row.review_type == "anime" %}selected{% endif %}>动画</option>
|
||||
<option value="music" {% if row.review_type == "music" %}selected{% endif %}>音乐</option>
|
||||
<option value="book" {% if row.review_type == "book" %}selected{% endif %}>书籍</option>
|
||||
<option value="movie" {% if row.review_type == "movie" %}selected{% endif %}>影视</option>
|
||||
</select>
|
||||
<input type="number" name="rating" min="0" max="5" value="{{ row.rating }}" required>
|
||||
<input type="date" name="review_date" value="{{ row.review_date }}">
|
||||
<select name="status">
|
||||
<option value="completed" {% if row.status == "completed" %}selected{% endif %}>已完成</option>
|
||||
<option value="in-progress" {% if row.status == "in-progress" %}selected{% endif %}>进行中</option>
|
||||
<option value="dropped" {% if row.status == "dropped" %}selected{% endif %}>已弃坑</option>
|
||||
</select>
|
||||
<input type="text" name="cover" value="{{ row.cover }}" placeholder="封面图标或 emoji">
|
||||
<input type="url" name="link_url" value="{{ row.link_url }}" placeholder="跳转链接,可选">
|
||||
<input type="text" name="tags" value="{{ row.tags_input }}" placeholder="标签,逗号分隔">
|
||||
<textarea name="description" placeholder="评价描述">{{ row.description }}</textarea>
|
||||
</div>
|
||||
<div class="compact-actions">
|
||||
<button type="submit" class="btn btn-success">保存</button>
|
||||
{% if row.link_url %}
|
||||
<a href="{{ row.link_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">跳转</a>
|
||||
{% endif %}
|
||||
<a href="{{ row.api_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">API</a>
|
||||
</div>
|
||||
</form>
|
||||
</td>
|
||||
<td><span class="chip">{{ row.status }}</span></td>
|
||||
<td>
|
||||
<div class="actions">
|
||||
<a href="http://localhost:4321/reviews" class="btn btn-primary" target="_blank" rel="noreferrer noopener">前台查看</a>
|
||||
<form method="post" action="/admin/reviews/{{ row.id }}/delete">
|
||||
<button type="submit" class="btn btn-danger">删除</button>
|
||||
</form>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="empty">暂无评价数据。</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -1,225 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>站点资料</h2>
|
||||
<div class="table-note">保存后首页、关于页、页脚和友链页中的本站信息会直接读取这里的配置。AI 问答也在这里统一开启和配置。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form id="site-settings-form" class="form-grid">
|
||||
<div class="field">
|
||||
<label>站点名称</label>
|
||||
<input name="site_name" value="{{ form.site_name }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>短名称</label>
|
||||
<input name="site_short_name" value="{{ form.site_short_name }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>站点链接</label>
|
||||
<input name="site_url" value="{{ form.site_url }}">
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>站点标题</label>
|
||||
<input name="site_title" value="{{ form.site_title }}">
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>站点简介</label>
|
||||
<textarea name="site_description">{{ form.site_description }}</textarea>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>首页主标题</label>
|
||||
<input name="hero_title" value="{{ form.hero_title }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>首页副标题</label>
|
||||
<input name="hero_subtitle" value="{{ form.hero_subtitle }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>个人名称</label>
|
||||
<input name="owner_name" value="{{ form.owner_name }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>个人头衔</label>
|
||||
<input name="owner_title" value="{{ form.owner_title }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>头像 URL</label>
|
||||
<input name="owner_avatar_url" value="{{ form.owner_avatar_url }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>所在地</label>
|
||||
<input name="location" value="{{ form.location }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>GitHub</label>
|
||||
<input name="social_github" value="{{ form.social_github }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>Twitter / X</label>
|
||||
<input name="social_twitter" value="{{ form.social_twitter }}">
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>Email / mailto</label>
|
||||
<input name="social_email" value="{{ form.social_email }}">
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>个人简介</label>
|
||||
<textarea name="owner_bio">{{ form.owner_bio }}</textarea>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>技术栈(每行一个)</label>
|
||||
<textarea name="tech_stack">{{ form.tech_stack }}</textarea>
|
||||
</div>
|
||||
|
||||
<div class="field field-wide" style="border-top: 1px solid rgba(148, 163, 184, 0.18); padding-top: 18px; margin-top: 10px;">
|
||||
<label style="display:flex; align-items:center; gap:10px;">
|
||||
<input type="checkbox" name="ai_enabled" {% if form.ai_enabled %}checked{% endif %}>
|
||||
<span>启用前台 AI 问答</span>
|
||||
</label>
|
||||
<div class="field-hint">关闭后,前台导航不会显示 AI 页面,公开接口也不会对外提供回答。Embedding 已改为后端本地生成,并使用 PostgreSQL 的 pgvector 存储与检索。</div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>接入类型 / 协议</label>
|
||||
<input name="ai_provider" value="{{ form.ai_provider }}" placeholder="newapi">
|
||||
<div class="field-hint">这里是后端适配器类型,不是模型厂商名。`newapi` 表示走 NewAPI 兼容的 Responses 接口;厂商和型号建议写在你的通道备注与模型名里。</div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>聊天 API Base</label>
|
||||
<input name="ai_api_base" value="{{ form.ai_api_base }}" placeholder="https://91code.jiangnight.com/v1">
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>聊天 API Key</label>
|
||||
<input name="ai_api_key" value="{{ form.ai_api_key }}" placeholder="sk-...">
|
||||
<div class="field-hint">这里只保存在后端数据库里,前台公开接口不会返回这个字段。当前默认接入 91code.jiangnight.com 的 NewAPI 兼容接口,未配置时前台仍可做本地检索,但不会生成完整聊天回答。</div>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>聊天模型</label>
|
||||
<input name="ai_chat_model" value="{{ form.ai_chat_model }}" placeholder="gpt-5.4">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>本地 Embedding</label>
|
||||
<input value="{{ form.ai_local_embedding }}" disabled>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>Top K</label>
|
||||
<input type="number" min="1" max="12" name="ai_top_k" value="{{ form.ai_top_k }}">
|
||||
</div>
|
||||
<div class="field">
|
||||
<label>Chunk Size</label>
|
||||
<input type="number" min="400" max="4000" step="50" name="ai_chunk_size" value="{{ form.ai_chunk_size }}">
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<label>系统提示词</label>
|
||||
<textarea name="ai_system_prompt">{{ form.ai_system_prompt }}</textarea>
|
||||
</div>
|
||||
<div class="field field-wide">
|
||||
<div class="table-note">AI 索引状态:已索引 {{ form.ai_chunks_count }} 个片段,最近建立时间 {{ form.ai_last_indexed_at }}。</div>
|
||||
<div class="actions">
|
||||
<button type="submit" class="btn btn-primary">保存设置</button>
|
||||
<button type="button" id="reindex-btn" class="btn">重建 AI 索引</button>
|
||||
</div>
|
||||
<div class="field-hint" style="margin-top: 10px;">文章内容变化后建议手动重建一次 AI 索引。本地 embedding 使用后端内置 `fastembed` 生成,向量会写入 PostgreSQL 的 `pgvector` 列,并通过 HNSW 索引做相似度检索;聊天回答默认走 `newapi -> /responses -> gpt-5.4`。前台用户提交过的搜索词和 AI 问题会单独写入分析日志,方便在新版后台里查看。</div>
|
||||
<div id="notice" class="notice"></div>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
{% endblock %}
|
||||
|
||||
{% block page_scripts %}
|
||||
<script>
|
||||
const form = document.getElementById("site-settings-form");
|
||||
const notice = document.getElementById("notice");
|
||||
const reindexBtn = document.getElementById("reindex-btn");
|
||||
|
||||
function showNotice(message, kind) {
|
||||
notice.textContent = message;
|
||||
notice.className = "notice show " + (kind === "success" ? "notice-success" : "notice-error");
|
||||
}
|
||||
|
||||
function numericOrNull(value) {
|
||||
const parsed = Number(value);
|
||||
return Number.isFinite(parsed) ? parsed : null;
|
||||
}
|
||||
|
||||
form?.addEventListener("submit", async (event) => {
|
||||
event.preventDefault();
|
||||
|
||||
const data = new FormData(form);
|
||||
const payload = {
|
||||
siteName: data.get("site_name"),
|
||||
siteShortName: data.get("site_short_name"),
|
||||
siteUrl: data.get("site_url"),
|
||||
siteTitle: data.get("site_title"),
|
||||
siteDescription: data.get("site_description"),
|
||||
heroTitle: data.get("hero_title"),
|
||||
heroSubtitle: data.get("hero_subtitle"),
|
||||
ownerName: data.get("owner_name"),
|
||||
ownerTitle: data.get("owner_title"),
|
||||
ownerAvatarUrl: data.get("owner_avatar_url"),
|
||||
location: data.get("location"),
|
||||
socialGithub: data.get("social_github"),
|
||||
socialTwitter: data.get("social_twitter"),
|
||||
socialEmail: data.get("social_email"),
|
||||
ownerBio: data.get("owner_bio"),
|
||||
techStack: String(data.get("tech_stack") || "")
|
||||
.split("\n")
|
||||
.map((item) => item.trim())
|
||||
.filter(Boolean),
|
||||
aiEnabled: data.get("ai_enabled") === "on",
|
||||
aiProvider: data.get("ai_provider"),
|
||||
aiApiBase: data.get("ai_api_base"),
|
||||
aiApiKey: data.get("ai_api_key"),
|
||||
aiChatModel: data.get("ai_chat_model"),
|
||||
aiTopK: numericOrNull(data.get("ai_top_k")),
|
||||
aiChunkSize: numericOrNull(data.get("ai_chunk_size")),
|
||||
aiSystemPrompt: data.get("ai_system_prompt")
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await fetch("/api/site_settings", {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify(payload)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text() || "save failed");
|
||||
}
|
||||
|
||||
showNotice("站点信息与 AI 配置已保存。", "success");
|
||||
} catch (error) {
|
||||
showNotice("保存失败:" + (error?.message || "unknown error"), "error");
|
||||
}
|
||||
});
|
||||
|
||||
reindexBtn?.addEventListener("click", async () => {
|
||||
reindexBtn.disabled = true;
|
||||
reindexBtn.textContent = "正在重建...";
|
||||
|
||||
try {
|
||||
const response = await fetch("/api/ai/reindex", {
|
||||
method: "POST"
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text() || "reindex failed");
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
showNotice(`AI 索引已重建,当前共有 ${data.indexed_chunks} 个片段。`, "success");
|
||||
window.setTimeout(() => window.location.reload(), 900);
|
||||
} catch (error) {
|
||||
showNotice("重建失败:" + (error?.message || "unknown error"), "error");
|
||||
} finally {
|
||||
reindexBtn.disabled = false;
|
||||
reindexBtn.textContent = "重建 AI 索引";
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -1,77 +0,0 @@
|
||||
{% extends "admin/base.html" %}
|
||||
|
||||
{% block main_content %}
|
||||
<section class="form-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>新增标签</h2>
|
||||
<div class="table-note">这里维护标签字典。文章 Markdown 导入时会优先复用这里的标签,不存在才自动创建。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<form method="post" action="/admin/tags" class="inline-form">
|
||||
<div class="compact-grid">
|
||||
<input type="text" name="name" placeholder="标签名,例如 Rust" value="{{ create_form.name }}" required>
|
||||
<input type="text" name="slug" placeholder="slug,可留空自动生成" value="{{ create_form.slug }}">
|
||||
</div>
|
||||
<div class="compact-actions">
|
||||
<button type="submit" class="btn btn-primary">创建标签</button>
|
||||
</div>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section class="table-panel">
|
||||
<div class="table-head">
|
||||
<div>
|
||||
<h2>标签映射</h2>
|
||||
<div class="table-note">标签名称会作为文章展示名称使用,使用次数来自当前已同步的真实文章内容。</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if rows | length > 0 %}
|
||||
<div class="table-wrap">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>标签</th>
|
||||
<th>使用次数</th>
|
||||
<th>跳转</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
<td class="mono">#{{ row.id }}</td>
|
||||
<td>
|
||||
<form method="post" action="/admin/tags/{{ row.id }}/update" class="inline-form compact">
|
||||
<div class="compact-grid">
|
||||
<input type="text" name="name" value="{{ row.name }}" required>
|
||||
<input type="text" name="slug" value="{{ row.slug }}" required>
|
||||
</div>
|
||||
<div class="compact-actions">
|
||||
<button type="submit" class="btn btn-success">保存</button>
|
||||
<a href="{{ row.api_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">API</a>
|
||||
</div>
|
||||
</form>
|
||||
</td>
|
||||
<td><span class="chip">{{ row.usage_count }} 篇文章</span></td>
|
||||
<td>
|
||||
<div class="actions">
|
||||
<a href="{{ row.frontend_url }}" class="btn btn-ghost" target="_blank" rel="noreferrer noopener">前台标签页</a>
|
||||
<a href="{{ row.articles_url }}" class="btn btn-primary" target="_blank" rel="noreferrer noopener">前台筛选</a>
|
||||
<form method="post" action="/admin/tags/{{ row.id }}/delete">
|
||||
<button type="submit" class="btn btn-danger">删除</button>
|
||||
</form>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="empty">暂无标签数据。</div>
|
||||
{% endif %}
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -1,12 +0,0 @@
|
||||
<html><body>
|
||||
<img src="/static/image.png" width="200"/>
|
||||
<br/>
|
||||
find this tera template at <code>assets/views/home/hello.html</code>:
|
||||
<br/>
|
||||
<br/>
|
||||
{{ t(key="hello-world", lang="en-US") }},
|
||||
<br/>
|
||||
{{ t(key="hello-world", lang="de-DE") }}
|
||||
|
||||
</body></html>
|
||||
|
||||
@@ -1,330 +0,0 @@
|
||||
Compiling proc-macro2 v1.0.106
|
||||
Compiling quote v1.0.45
|
||||
Compiling unicode-ident v1.0.24
|
||||
Compiling serde_core v1.0.228
|
||||
Compiling serde v1.0.228
|
||||
Compiling getrandom v0.3.4
|
||||
Compiling autocfg v1.5.0
|
||||
Compiling find-msvc-tools v0.1.9
|
||||
Compiling shlex v1.3.0
|
||||
Compiling version_check v0.9.5
|
||||
Compiling crossbeam-utils v0.8.21
|
||||
Compiling zmij v1.0.21
|
||||
Compiling zerocopy v0.8.47
|
||||
Compiling serde_json v1.0.149
|
||||
Compiling pkg-config v0.3.32
|
||||
Compiling icu_normalizer_data v2.1.1
|
||||
Compiling icu_properties_data v2.1.2
|
||||
Compiling thiserror v2.0.18
|
||||
Compiling libc v0.2.183
|
||||
Compiling typenum v1.19.0
|
||||
Compiling generic-array v0.14.7
|
||||
Compiling rustls v0.23.37
|
||||
Compiling num-traits v0.2.19
|
||||
Compiling libm v0.2.16
|
||||
Compiling getrandom v0.4.2
|
||||
Compiling windows_x86_64_msvc v0.52.6
|
||||
Compiling jobserver v0.1.34
|
||||
Compiling ident_case v1.0.1
|
||||
Compiling parking_lot_core v0.9.12
|
||||
Compiling regex-syntax v0.8.10
|
||||
Compiling crc32fast v1.5.0
|
||||
Compiling httparse v1.10.1
|
||||
Compiling bigdecimal v0.4.10
|
||||
Compiling cc v1.2.57
|
||||
Compiling crossbeam-epoch v0.9.18
|
||||
Compiling rust_decimal v1.40.0
|
||||
Compiling windows-targets v0.52.6
|
||||
Compiling rand v0.10.0
|
||||
Compiling proc-macro-hack v0.5.20+deprecated
|
||||
Compiling crossbeam-deque v0.8.6
|
||||
Compiling rand_core v0.6.4
|
||||
Compiling windows_x86_64_msvc v0.48.5
|
||||
Compiling flate2 v1.1.9
|
||||
Compiling windows_x86_64_msvc v0.53.1
|
||||
Compiling syn v2.0.117
|
||||
Compiling rand v0.8.5
|
||||
Compiling rayon-core v1.13.0
|
||||
Compiling regex-automata v0.4.14
|
||||
Compiling num-integer v0.1.46
|
||||
Compiling zstd-safe v7.2.4
|
||||
Compiling windows-sys v0.59.0
|
||||
Compiling concurrent-queue v2.5.0
|
||||
Compiling log v0.4.29
|
||||
Compiling num-bigint v0.4.6
|
||||
Compiling phf_generator v0.11.3
|
||||
Compiling block-buffer v0.10.4
|
||||
Compiling crypto-common v0.1.7
|
||||
Compiling winapi v0.3.9
|
||||
Compiling vcpkg v0.2.15
|
||||
Compiling anyhow v1.0.102
|
||||
Compiling native-tls v0.2.18
|
||||
Compiling digest v0.10.7
|
||||
Compiling object v0.37.3
|
||||
Compiling phf_codegen v0.11.3
|
||||
Compiling sha2 v0.10.9
|
||||
Compiling event-listener v5.4.1
|
||||
Compiling hashbrown v0.16.1
|
||||
Compiling deranged v0.5.8
|
||||
Compiling uuid v1.23.0
|
||||
Compiling ring v0.17.14
|
||||
Compiling zstd-sys v2.0.16+zstd.1.5.7
|
||||
Compiling windows-targets v0.53.5
|
||||
Compiling libsqlite3-sys v0.30.1
|
||||
Compiling windows-targets v0.48.5
|
||||
Compiling crossbeam-queue v0.3.12
|
||||
Compiling ahash v0.8.12
|
||||
Compiling windows-sys v0.48.0
|
||||
Compiling indexmap v2.13.0
|
||||
Compiling windows-sys v0.60.2
|
||||
Compiling time v0.3.47
|
||||
Compiling hmac v0.12.1
|
||||
Compiling regex v1.12.3
|
||||
Compiling md-5 v0.10.6
|
||||
Compiling atoi v2.0.0
|
||||
Compiling proc-macro-error-attr2 v2.0.0
|
||||
Compiling rustversion v1.0.22
|
||||
Compiling parse-zoneinfo v0.3.1
|
||||
Compiling etcetera v0.8.0
|
||||
Compiling hkdf v0.12.4
|
||||
Compiling rand_core v0.9.5
|
||||
Compiling chrono-tz-build v0.3.0
|
||||
Compiling proc-macro2-diagnostics v0.10.1
|
||||
Compiling portable-atomic v1.13.1
|
||||
Compiling base64ct v1.8.3
|
||||
Compiling socks v0.3.4
|
||||
Compiling paste v1.0.15
|
||||
Compiling pem-rfc7468 v1.0.0
|
||||
Compiling ignore v0.4.25
|
||||
Compiling ordered-float v4.6.0
|
||||
Compiling yansi v1.0.1
|
||||
Compiling thiserror v1.0.69
|
||||
Compiling ureq-proto v0.6.0
|
||||
Compiling der v0.8.0
|
||||
Compiling globwalk v0.9.1
|
||||
Compiling stacker v0.1.23
|
||||
Compiling num-rational v0.4.2
|
||||
Compiling humansize v2.1.3
|
||||
Compiling fs-err v2.11.0
|
||||
Compiling synstructure v0.13.2
|
||||
Compiling darling_core v0.20.11
|
||||
Compiling proc-macro-error2 v2.0.1
|
||||
Compiling pest_generator v2.8.6
|
||||
Compiling multer v3.1.0
|
||||
Compiling chrono-tz v0.9.0
|
||||
Compiling av-scenechange v0.14.1
|
||||
Compiling utf8-zero v0.8.1
|
||||
Compiling unicode-xid v0.2.6
|
||||
Compiling built v0.8.0
|
||||
Compiling ureq v3.3.0
|
||||
Compiling shared_child v1.1.1
|
||||
Compiling onig_sys v69.9.1
|
||||
Compiling matrixmultiply v0.3.10
|
||||
Compiling cookie v0.18.1
|
||||
Compiling hmac-sha256 v1.1.14
|
||||
Compiling rav1e v0.8.1
|
||||
Compiling pastey v0.1.1
|
||||
Compiling lzma-rust2 v0.15.7
|
||||
Compiling duct v1.1.1
|
||||
Compiling serde_path_to_error v0.1.20
|
||||
Compiling ar_archive_writer v0.5.1
|
||||
Compiling simd_helpers v0.1.0
|
||||
Compiling include_dir_macros v0.7.4
|
||||
Compiling windows-sys v0.52.0
|
||||
Compiling crossbeam-channel v0.5.15
|
||||
Compiling esaxx-rs v0.1.10
|
||||
Compiling tokio-cron-scheduler v0.11.1
|
||||
Compiling noop_proc_macro v0.3.0
|
||||
Compiling console v0.15.11
|
||||
Compiling include_dir v0.7.4
|
||||
Compiling castaway v0.2.4
|
||||
Compiling globset v0.4.18
|
||||
Compiling serde_derive v1.0.228
|
||||
Compiling displaydoc v0.2.5
|
||||
Compiling zerofrom-derive v0.1.6
|
||||
Compiling yoke-derive v0.8.1
|
||||
Compiling zerovec-derive v0.11.2
|
||||
Compiling tokio-macros v2.6.1
|
||||
Compiling tracing-attributes v0.1.31
|
||||
Compiling zerocopy-derive v0.8.47
|
||||
Compiling thiserror-impl v2.0.18
|
||||
Compiling futures-macro v0.3.32
|
||||
Compiling rustls-webpki v0.103.10
|
||||
Compiling darling_macro v0.20.11
|
||||
Compiling tinystr v0.8.2
|
||||
Compiling tokio v1.50.0
|
||||
Compiling unic-langid-impl v0.9.6
|
||||
Compiling equator-macro v0.4.2
|
||||
Compiling psm v0.1.30
|
||||
Compiling zerofrom v0.1.6
|
||||
Compiling darling v0.20.11
|
||||
Compiling futures-util v0.3.32
|
||||
Compiling yoke v0.8.1
|
||||
Compiling inherent v1.0.13
|
||||
Compiling num-derive v0.4.2
|
||||
Compiling tracing v0.1.44
|
||||
Compiling unic-langid-macros-impl v0.9.6
|
||||
Compiling zerovec v0.11.5
|
||||
Compiling zerotrie v0.2.3
|
||||
Compiling equator v0.4.2
|
||||
Compiling clap_derive v4.6.0
|
||||
Compiling pest_derive v2.8.6
|
||||
Compiling sea-query-derive v0.4.3
|
||||
Compiling aligned-vec v0.6.4
|
||||
Compiling thiserror-impl v1.0.69
|
||||
Compiling v_frame v0.3.9
|
||||
Compiling sea-bae v0.2.1
|
||||
Compiling async-trait v0.1.89
|
||||
Compiling profiling-procmacros v1.0.17
|
||||
Compiling derive_more-impl v2.1.1
|
||||
Compiling potential_utf v0.1.4
|
||||
Compiling icu_locale_core v2.1.1
|
||||
Compiling icu_collections v2.1.1
|
||||
Compiling arg_enum_proc_macro v0.3.4
|
||||
Compiling unic-langid-macros v0.9.6
|
||||
Compiling futures-executor v0.3.32
|
||||
Compiling futures v0.3.32
|
||||
Compiling icu_provider v2.1.1
|
||||
Compiling unic-langid v0.9.6
|
||||
Compiling smallvec v1.15.1
|
||||
Compiling chrono v0.4.44
|
||||
Compiling either v1.15.0
|
||||
Compiling serde_urlencoded v0.7.1
|
||||
Compiling icu_properties v2.1.2
|
||||
Compiling tracing-serde v0.2.0
|
||||
Compiling icu_normalizer v2.1.1
|
||||
Compiling tokio-util v0.7.18
|
||||
Compiling tokio-stream v0.1.18
|
||||
Compiling tower v0.5.3
|
||||
Compiling parking_lot v0.12.5
|
||||
Compiling rayon v1.11.0
|
||||
Compiling tokio-rustls v0.26.4
|
||||
Compiling idna_adapter v1.2.1
|
||||
Compiling h2 v0.4.13
|
||||
Compiling ppv-lite86 v0.2.21
|
||||
Compiling futures-intrusive v0.5.0
|
||||
Compiling idna v1.1.0
|
||||
Compiling tokio-native-tls v0.3.1
|
||||
Compiling sea-query v0.32.7
|
||||
Compiling rand_chacha v0.3.1
|
||||
Compiling rand_chacha v0.9.0
|
||||
Compiling itertools v0.14.0
|
||||
Compiling url v2.5.8
|
||||
Compiling hashbrown v0.14.5
|
||||
Compiling rand v0.9.2
|
||||
Compiling clap v4.6.0
|
||||
Compiling sqlx-core v0.8.6
|
||||
Compiling tracing-subscriber v0.3.23
|
||||
Compiling async-stream-impl v0.3.6
|
||||
Compiling ouroboros_macro v0.18.5
|
||||
Compiling maybe-rayon v0.1.1
|
||||
Compiling half v2.7.1
|
||||
Compiling derive_more v2.1.1
|
||||
Compiling serde_spanned v0.6.9
|
||||
Compiling serde_regex v1.1.0
|
||||
Compiling serde_yaml v0.9.34+deprecated
|
||||
Compiling toml_datetime v0.6.11
|
||||
Compiling tera v1.20.1
|
||||
Compiling async-stream v0.3.6
|
||||
Compiling sea-orm-macros v1.1.19
|
||||
Compiling profiling v1.0.17
|
||||
Compiling av1-grain v0.2.5
|
||||
Compiling hyper v1.8.1
|
||||
Compiling axum-core v0.5.6
|
||||
Compiling derive_builder_core v0.20.2
|
||||
Compiling sqlx-postgres v0.8.6
|
||||
Compiling sqlx-sqlite v0.8.6
|
||||
Compiling hyper-util v0.1.20
|
||||
Compiling ouroboros v0.18.5
|
||||
Compiling ort-sys v2.0.0-rc.11
|
||||
Compiling fax_derive v0.2.0
|
||||
Compiling axum-macros v0.5.0
|
||||
Compiling sea-schema-derive v0.3.0
|
||||
Compiling fax v0.2.6
|
||||
Compiling hyper-tls v0.6.0
|
||||
Compiling hyper-rustls v0.27.7
|
||||
Compiling rrgen v0.5.6
|
||||
Compiling derive_builder_macro v0.20.2
|
||||
Compiling chumsky v0.9.3
|
||||
Compiling sea-orm-cli v1.1.19
|
||||
Compiling toml_edit v0.22.27
|
||||
Compiling combine v4.6.7
|
||||
Compiling cron v0.12.1
|
||||
Compiling backon v1.6.0
|
||||
Compiling quick-xml v0.38.4
|
||||
Compiling simple_asn1 v0.6.4
|
||||
Compiling validator_derive v0.20.0
|
||||
Compiling socket2 v0.5.10
|
||||
Compiling monostate-impl v0.1.18
|
||||
Compiling serde_html_form v0.2.8
|
||||
Compiling sqlx v0.8.6
|
||||
Compiling colored v2.2.0
|
||||
Compiling blake2 v0.10.6
|
||||
Compiling sea-query-binder v0.7.0
|
||||
Compiling num-complex v0.4.6
|
||||
Compiling macro_rules_attribute-proc_macro v0.2.2
|
||||
Compiling loco-rs v0.16.4
|
||||
Compiling moxcms v0.8.1
|
||||
Compiling axum v0.8.8
|
||||
Compiling sea-schema v0.16.2
|
||||
Compiling sea-orm v1.1.19
|
||||
Compiling validator v0.20.0
|
||||
Compiling ndarray v0.17.2
|
||||
Compiling macro_rules_attribute v0.2.2
|
||||
Compiling spm_precompiled v0.1.4
|
||||
Compiling lettre v0.11.19
|
||||
Compiling exr v1.74.0
|
||||
Compiling backtrace_printer v1.3.0
|
||||
Compiling zstd v0.13.3
|
||||
Compiling moka v0.12.15
|
||||
Compiling compression-codecs v0.4.37
|
||||
Compiling ravif v0.13.0
|
||||
Compiling async-compression v0.4.41
|
||||
Compiling redis v0.31.0
|
||||
Compiling tower-http v0.6.8
|
||||
Compiling indicatif v0.17.11
|
||||
Compiling argon2 v0.5.3
|
||||
Compiling reqwest v0.12.28
|
||||
Compiling axum-extra v0.10.3
|
||||
Compiling byte-unit v4.0.19
|
||||
Compiling loco-gen v0.16.4
|
||||
Compiling jsonwebtoken v9.3.1
|
||||
Compiling notify v8.2.0
|
||||
Compiling png v0.18.1
|
||||
Compiling monostate v0.1.18
|
||||
Compiling toml v0.8.23
|
||||
Compiling onig v6.5.1
|
||||
Compiling derive_builder v0.20.2
|
||||
Compiling tiff v0.11.3
|
||||
Compiling tracing-appender v0.2.4
|
||||
Compiling opendal v0.54.1
|
||||
Compiling rayon-cond v0.4.0
|
||||
Compiling ulid v1.2.1
|
||||
Compiling dashmap v6.1.0
|
||||
Compiling ureq v2.12.1
|
||||
Compiling unicode-normalization-alignments v0.1.12
|
||||
Compiling intl_pluralrules v7.0.2
|
||||
Compiling intl-memoizer v0.5.3
|
||||
Compiling fluent-langneg v0.13.1
|
||||
Compiling compact_str v0.9.0
|
||||
Compiling ipnetwork v0.20.0
|
||||
Compiling dary_heap v0.3.8
|
||||
Compiling serde_variant v0.1.3
|
||||
Compiling fluent-syntax v0.12.0
|
||||
Compiling tower v0.4.13
|
||||
Compiling duct_sh v1.0.0
|
||||
Compiling fluent-bundle v0.16.0
|
||||
Compiling tokenizers v0.22.2
|
||||
Compiling hf-hub v0.4.3
|
||||
Compiling image v0.25.10
|
||||
Compiling ort v2.0.0-rc.11
|
||||
Compiling safetensors v0.7.0
|
||||
Compiling sea-orm-migration v1.1.19
|
||||
Compiling fluent-template-macros v0.13.3
|
||||
Compiling fluent-templates v0.13.3
|
||||
Compiling fastembed v5.13.0
|
||||
Compiling migration v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend\migration)
|
||||
Compiling termi-api v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend)
|
||||
Finished `dev` profile [unoptimized + debuginfo] target(s) in 2m 23s
|
||||
Running `target\debug\termi_api-cli.exe start`
|
||||
error: process didn't exit successfully: `target\debug\termi_api-cli.exe start` (exit code: 0xffffffff)
|
||||
@@ -1,26 +0,0 @@
|
||||
[2m2026-03-29T11:49:41.902355Z[0m [33m WARN[0m [2mloco_rs::boot[0m[2m:[0m pretty backtraces are enabled (this is great for development but has a runtime cost for production. disable with `logger.pretty_backtrace` in your config yaml)
|
||||
|
||||
▄ ▀
|
||||
▀ ▄
|
||||
▄ ▀ ▄ ▄ ▄▀
|
||||
▄ ▀▄▄
|
||||
▄ ▀ ▀ ▀▄▀█▄
|
||||
▀█▄
|
||||
▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▀▀█
|
||||
██████ █████ ███ █████ ███ █████ ███ ▀█
|
||||
██████ █████ ███ █████ ▀▀▀ █████ ███ ▄█▄
|
||||
██████ █████ ███ █████ █████ ███ ████▄
|
||||
██████ █████ ███ █████ ▄▄▄ █████ ███ █████
|
||||
██████ █████ ███ ████ ███ █████ ███ ████▀
|
||||
▀▀▀██▄ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ██▀
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
https://loco.rs
|
||||
|
||||
environment: development
|
||||
database: automigrate
|
||||
logger: debug
|
||||
compilation: debug
|
||||
modes: server
|
||||
|
||||
listening on http://localhost:5150
|
||||
[2m2026-03-29T11:50:40.675162Z[0m [31mERROR[0m [1mhttp-request[0m: [2mloco_rs::controller[0m[2m:[0m controller_error [3merror.msg[0m[2m=[0mAI provider returned 429 Too Many Requests: {"error":{"message":"Concurrency limit exceeded for user, please retry later","type":"rate_limit_error"}} [3merror.details[0m[2m=[0mBadRequest("AI provider returned 429 Too Many Requests: {\"error\":{\"message\":\"Concurrency limit exceeded for user, please retry later\",\"type\":\"rate_limit_error\"}}") [2m[3mhttp.method[0m[2m=[0mPOST [3mhttp.uri[0m[2m=[0m/api/ai/ask [3mhttp.version[0m[2m=[0mHTTP/1.1 [3mhttp.user_agent[0m[2m=[0mMozilla/5.0 (Windows NT 10.0; Microsoft Windows 10.0.26200; zh-CN) PowerShell/7.5.5 [3menvironment[0m[2m=[0mdevelopment [3mrequest_id[0m[2m=[0m160e41d4-83b3-49d9-ad6d-e26498301ab9[0m
|
||||
@@ -1,529 +0,0 @@
|
||||
Compiling proc-macro2 v1.0.106
|
||||
Compiling unicode-ident v1.0.24
|
||||
Compiling quote v1.0.45
|
||||
Compiling syn v2.0.117
|
||||
Compiling cfg-if v1.0.4
|
||||
Compiling serde_core v1.0.228
|
||||
Compiling memchr v2.8.0
|
||||
Compiling windows-link v0.2.1
|
||||
Compiling serde v1.0.228
|
||||
Compiling serde_derive v1.0.228
|
||||
Compiling windows-sys v0.61.2
|
||||
Compiling getrandom v0.3.4
|
||||
Compiling itoa v1.0.18
|
||||
Compiling autocfg v1.5.0
|
||||
Compiling once_cell v1.21.4
|
||||
Compiling jobserver v0.1.34
|
||||
Compiling find-msvc-tools v0.1.9
|
||||
Compiling shlex v1.3.0
|
||||
Compiling cc v1.2.57
|
||||
Compiling log v0.4.29
|
||||
Compiling pin-project-lite v0.2.17
|
||||
Compiling bytes v1.11.1
|
||||
Compiling stable_deref_trait v1.2.1
|
||||
Compiling version_check v0.9.5
|
||||
Compiling num-traits v0.2.19
|
||||
Compiling smallvec v1.15.1
|
||||
Compiling displaydoc v0.2.5
|
||||
Compiling synstructure v0.13.2
|
||||
Compiling zerofrom-derive v0.1.6
|
||||
Compiling zerofrom v0.1.6
|
||||
Compiling yoke-derive v0.8.1
|
||||
Compiling futures-core v0.3.32
|
||||
Compiling yoke v0.8.1
|
||||
Compiling percent-encoding v2.3.2
|
||||
Compiling zerovec-derive v0.11.2
|
||||
Compiling crossbeam-utils v0.8.21
|
||||
Compiling zerovec v0.11.5
|
||||
Compiling allocator-api2 v0.2.21
|
||||
Compiling socket2 v0.6.3
|
||||
Compiling mio v1.1.1
|
||||
Compiling tokio-macros v2.6.1
|
||||
Compiling tokio v1.50.0
|
||||
Compiling tinystr v0.8.2
|
||||
Compiling aho-corasick v1.1.4
|
||||
Compiling futures-sink v0.3.32
|
||||
Compiling tracing-core v0.1.36
|
||||
Compiling equivalent v1.0.2
|
||||
Compiling zerocopy v0.8.47
|
||||
Compiling zmij v1.0.21
|
||||
Compiling getrandom v0.2.17
|
||||
Compiling tracing-attributes v0.1.31
|
||||
Compiling zerocopy-derive v0.8.47
|
||||
Compiling serde_json v1.0.149
|
||||
Compiling zeroize v1.8.2
|
||||
Compiling tracing v0.1.44
|
||||
Compiling foldhash v0.2.0
|
||||
Compiling base64 v0.22.1
|
||||
Compiling hashbrown v0.16.1
|
||||
Compiling slab v0.4.12
|
||||
Compiling pkg-config v0.3.32
|
||||
Compiling futures-channel v0.3.32
|
||||
Compiling fnv v1.0.7
|
||||
Compiling indexmap v2.13.0
|
||||
Compiling futures-macro v0.3.32
|
||||
Compiling thiserror-impl v2.0.18
|
||||
Compiling futures-io v0.3.32
|
||||
Compiling subtle v2.6.1
|
||||
Compiling futures-task v0.3.32
|
||||
Compiling futures-util v0.3.32
|
||||
Compiling litemap v0.8.1
|
||||
Compiling writeable v0.6.2
|
||||
Compiling icu_locale_core v2.1.1
|
||||
Compiling potential_utf v0.1.4
|
||||
Compiling zerotrie v0.2.3
|
||||
Compiling num-integer v0.1.46
|
||||
Compiling icu_properties_data v2.1.2
|
||||
Compiling thiserror v2.0.18
|
||||
Compiling icu_normalizer_data v2.1.1
|
||||
Compiling icu_provider v2.1.1
|
||||
Compiling icu_collections v2.1.1
|
||||
Compiling form_urlencoded v1.2.2
|
||||
Compiling ring v0.17.14
|
||||
Compiling libc v0.2.183
|
||||
Compiling bitflags v2.11.0
|
||||
Compiling regex-syntax v0.8.10
|
||||
Compiling regex-automata v0.4.14
|
||||
Compiling scopeguard v1.2.0
|
||||
Compiling typenum v1.19.0
|
||||
Compiling lock_api v0.4.14
|
||||
Compiling icu_normalizer v2.1.1
|
||||
Compiling icu_properties v2.1.2
|
||||
Compiling num-bigint v0.4.6
|
||||
Compiling rustls-pki-types v1.14.0
|
||||
Compiling generic-array v0.14.7
|
||||
Compiling ryu v1.0.23
|
||||
Compiling untrusted v0.9.0
|
||||
Compiling strsim v0.11.1
|
||||
Compiling idna_adapter v1.2.1
|
||||
Compiling crossbeam-epoch v0.9.18
|
||||
Compiling utf8_iter v1.0.4
|
||||
Compiling idna v1.1.0
|
||||
Compiling ppv-lite86 v0.2.21
|
||||
Compiling chrono v0.4.44
|
||||
Compiling either v1.15.0
|
||||
Compiling rustls v0.23.37
|
||||
Compiling url v2.5.8
|
||||
Compiling crossbeam-deque v0.8.6
|
||||
Compiling rustls-webpki v0.103.10
|
||||
Compiling arrayvec v0.7.6
|
||||
Compiling libm v0.2.16
|
||||
Compiling crypto-common v0.1.7
|
||||
Compiling block-buffer v0.10.4
|
||||
Compiling webpki-roots v1.0.6
|
||||
Compiling http v1.4.0
|
||||
Compiling getrandom v0.4.2
|
||||
Compiling num-conv v0.2.1
|
||||
Compiling ident_case v1.0.1
|
||||
Compiling powerfmt v0.2.0
|
||||
Compiling windows_x86_64_msvc v0.52.6
|
||||
Compiling time-core v0.1.8
|
||||
Compiling rand_core v0.10.0
|
||||
Compiling simd-adler32 v0.3.9
|
||||
Compiling time-macros v0.2.27
|
||||
Compiling deranged v0.5.8
|
||||
Compiling darling_core v0.20.11
|
||||
Compiling digest v0.10.7
|
||||
Compiling cpufeatures v0.3.0
|
||||
Compiling byteorder v1.5.0
|
||||
Compiling chacha20 v0.10.0
|
||||
Compiling darling_macro v0.20.11
|
||||
Compiling time v0.3.47
|
||||
Compiling regex v1.12.3
|
||||
Compiling rand_core v0.6.4
|
||||
Compiling tokio-util v0.7.18
|
||||
Compiling crc32fast v1.5.0
|
||||
Compiling parking_lot_core v0.9.12
|
||||
Compiling adler2 v2.0.1
|
||||
Compiling siphasher v1.0.2
|
||||
Compiling miniz_oxide v0.8.9
|
||||
Compiling windows-targets v0.52.6
|
||||
Compiling darling v0.20.11
|
||||
Compiling rand v0.10.0
|
||||
Compiling http-body v1.0.1
|
||||
Compiling spin v0.9.8
|
||||
Compiling heck v0.4.1
|
||||
Compiling httparse v1.10.1
|
||||
Compiling tower-service v0.3.3
|
||||
Compiling uuid v1.23.0
|
||||
Compiling serde_urlencoded v0.7.1
|
||||
Compiling zstd-sys v2.0.16+zstd.1.5.7
|
||||
Compiling httpdate v1.0.3
|
||||
Compiling flate2 v1.1.9
|
||||
Compiling phf_shared v0.11.3
|
||||
Compiling rand_chacha v0.3.1
|
||||
Compiling webpki-roots v0.26.11
|
||||
Compiling bigdecimal v0.4.10
|
||||
Compiling windows_x86_64_msvc v0.48.5
|
||||
Compiling proc-macro-hack v0.5.20+deprecated
|
||||
Compiling atomic-waker v1.1.2
|
||||
Compiling windows_x86_64_msvc v0.53.1
|
||||
Compiling rust_decimal v1.40.0
|
||||
Compiling try-lock v0.2.5
|
||||
Compiling mime v0.3.17
|
||||
Compiling lazy_static v1.5.0
|
||||
Compiling want v0.3.1
|
||||
Compiling h2 v0.4.13
|
||||
Compiling rand v0.8.5
|
||||
Compiling parking_lot v0.12.5
|
||||
Compiling windows-strings v0.5.1
|
||||
Compiling windows-result v0.4.1
|
||||
Compiling bstr v1.12.1
|
||||
Compiling tower-layer v0.3.3
|
||||
Compiling pin-utils v0.1.0
|
||||
Compiling zstd-safe v7.2.4
|
||||
Compiling alloc-no-stdlib v2.0.4
|
||||
Compiling cpufeatures v0.2.17
|
||||
Compiling rayon-core v1.13.0
|
||||
Compiling foldhash v0.1.5
|
||||
Compiling hashbrown v0.15.5
|
||||
Compiling alloc-stdlib v0.2.2
|
||||
Compiling hyper v1.8.1
|
||||
Compiling windows-registry v0.6.1
|
||||
Compiling unic-langid-impl v0.9.6
|
||||
Compiling phf_generator v0.11.3
|
||||
Compiling http-body-util v0.1.3
|
||||
Compiling windows-sys v0.59.0
|
||||
Compiling concurrent-queue v2.5.0
|
||||
Compiling sync_wrapper v1.0.2
|
||||
Compiling winapi-util v0.1.11
|
||||
Compiling parking v2.2.1
|
||||
Compiling native-tls v0.2.18
|
||||
Compiling tinyvec_macros v0.1.1
|
||||
Compiling object v0.37.3
|
||||
Compiling anyhow v1.0.102
|
||||
Compiling vcpkg v0.2.15
|
||||
Compiling winapi v0.3.9
|
||||
Compiling ipnet v2.12.0
|
||||
Compiling crc-catalog v2.4.0
|
||||
Compiling crc v3.4.0
|
||||
Compiling hyper-util v0.1.20
|
||||
Compiling libsqlite3-sys v0.30.1
|
||||
Compiling tinyvec v1.11.0
|
||||
Compiling event-listener v5.4.1
|
||||
Compiling same-file v1.0.6
|
||||
Compiling parse-zoneinfo v0.3.1
|
||||
Compiling windows-targets v0.53.5
|
||||
Compiling unic-langid-macros-impl v0.9.6
|
||||
Compiling windows-targets v0.48.5
|
||||
Compiling phf_codegen v0.11.3
|
||||
Compiling brotli-decompressor v5.0.0
|
||||
Compiling hashlink v0.10.0
|
||||
Compiling sha2 v0.10.9
|
||||
Compiling futures-intrusive v0.5.0
|
||||
Compiling phf v0.11.3
|
||||
Compiling tokio-stream v0.1.18
|
||||
Compiling crossbeam-queue v0.3.12
|
||||
Compiling ahash v0.8.12
|
||||
Compiling schannel v0.1.29
|
||||
Compiling unicase v2.9.0
|
||||
Compiling ucd-trie v0.1.7
|
||||
Compiling heck v0.5.0
|
||||
Compiling pest v2.8.6
|
||||
Compiling mime_guess v2.0.5
|
||||
Compiling sqlx-core v0.8.6
|
||||
Compiling chrono-tz-build v0.3.0
|
||||
Compiling brotli v8.0.2
|
||||
Compiling windows-sys v0.48.0
|
||||
Compiling windows-sys v0.60.2
|
||||
Compiling zstd v0.13.3
|
||||
Compiling walkdir v2.5.0
|
||||
Compiling unicode-normalization v0.1.25
|
||||
Compiling tower v0.5.3
|
||||
Compiling flume v0.11.1
|
||||
Compiling hmac v0.12.1
|
||||
Compiling md-5 v0.10.6
|
||||
Compiling atoi v2.0.0
|
||||
Compiling home v0.5.12
|
||||
Compiling encoding_rs v0.8.35
|
||||
Compiling equator-macro v0.4.2
|
||||
Compiling proc-macro-error-attr2 v2.0.0
|
||||
Compiling rustversion v1.0.22
|
||||
Compiling compression-core v0.4.31
|
||||
Compiling utf8parse v0.2.2
|
||||
Compiling anstyle v1.0.14
|
||||
Compiling unicode-bidi v0.3.18
|
||||
Compiling unicode-segmentation v1.13.2
|
||||
Compiling unicode-properties v0.1.4
|
||||
Compiling once_cell_polyfill v1.70.2
|
||||
Compiling dotenvy v0.15.7
|
||||
Compiling anstyle-wincon v3.0.11
|
||||
Compiling stringprep v0.1.5
|
||||
Compiling anstyle-parse v1.0.0
|
||||
Compiling compression-codecs v0.4.37
|
||||
Compiling proc-macro-error2 v2.0.1
|
||||
Compiling equator v0.4.2
|
||||
Compiling etcetera v0.8.0
|
||||
Compiling hkdf v0.12.4
|
||||
Compiling socks v0.3.4
|
||||
Compiling ar_archive_writer v0.5.1
|
||||
Compiling chrono-tz v0.9.0
|
||||
Compiling pest_meta v2.8.6
|
||||
Compiling rayon v1.11.0
|
||||
Compiling globset v0.4.18
|
||||
Compiling tokio-rustls v0.26.4
|
||||
Compiling futures-executor v0.3.32
|
||||
Compiling proc-macro2-diagnostics v0.10.1
|
||||
Compiling rand_core v0.9.5
|
||||
Compiling anstyle-query v1.1.5
|
||||
Compiling nom v8.0.0
|
||||
Compiling colorchoice v1.0.5
|
||||
Compiling whoami v1.6.1
|
||||
Compiling is_terminal_polyfill v1.70.2
|
||||
Compiling hex v0.4.3
|
||||
Compiling base64ct v1.8.3
|
||||
Compiling paste v1.0.15
|
||||
Compiling portable-atomic v1.13.1
|
||||
Compiling static_assertions v1.1.0
|
||||
Compiling minimal-lexical v0.2.1
|
||||
Compiling nom v7.1.3
|
||||
Compiling pem-rfc7468 v1.0.0
|
||||
Compiling sqlx-postgres v0.8.6
|
||||
Compiling anstream v1.0.0
|
||||
Compiling rand_chacha v0.9.0
|
||||
Compiling sqlx-sqlite v0.8.6
|
||||
Compiling ignore v0.4.25
|
||||
Compiling sea-query-derive v0.4.3
|
||||
Compiling pest_generator v2.8.6
|
||||
Compiling psm v0.1.30
|
||||
Compiling aligned-vec v0.6.4
|
||||
Compiling async-compression v0.4.41
|
||||
Compiling tokio-native-tls v0.3.1
|
||||
Compiling ordered-float v4.6.0
|
||||
Compiling inherent v1.0.13
|
||||
Compiling num-derive v0.4.2
|
||||
Compiling clap_lex v1.1.0
|
||||
Compiling http-range-header v0.4.2
|
||||
Compiling deunicode v1.6.2
|
||||
Compiling yansi v1.0.1
|
||||
Compiling iri-string v0.7.11
|
||||
Compiling thiserror v1.0.69
|
||||
Compiling tower-http v0.6.8
|
||||
Compiling slug v0.1.6
|
||||
Compiling clap_builder v4.6.0
|
||||
Compiling sea-query v0.32.7
|
||||
Compiling ureq-proto v0.6.0
|
||||
Compiling unic-langid-macros v0.9.6
|
||||
Compiling webpki-root-certs v1.0.6
|
||||
Compiling hyper-tls v0.6.0
|
||||
Compiling v_frame v0.3.9
|
||||
Compiling pest_derive v2.8.6
|
||||
Compiling globwalk v0.9.1
|
||||
Compiling sqlx v0.8.6
|
||||
Compiling rand v0.9.2
|
||||
Compiling der v0.8.0
|
||||
Compiling hyper-rustls v0.27.7
|
||||
Compiling clap_derive v4.6.0
|
||||
Compiling sharded-slab v0.1.7
|
||||
Compiling humansize v2.1.3
|
||||
Compiling itertools v0.14.0
|
||||
Compiling num-rational v0.4.2
|
||||
Compiling matchers v0.2.0
|
||||
Compiling tracing-serde v0.2.0
|
||||
Compiling tracing-log v0.2.0
|
||||
Compiling multer v3.1.0
|
||||
Compiling as-slice v0.2.1
|
||||
Compiling stacker v0.1.23
|
||||
Compiling fs-err v2.11.0
|
||||
Compiling nu-ansi-term v0.50.3
|
||||
Compiling thread_local v1.1.9
|
||||
Compiling thiserror-impl v1.0.69
|
||||
Compiling av-scenechange v0.14.1
|
||||
Compiling utf8-zero v0.8.1
|
||||
Compiling glob v0.3.3
|
||||
Compiling built v0.8.0
|
||||
Compiling unicode-xid v0.2.6
|
||||
Compiling derive_more-impl v2.1.1
|
||||
Compiling rav1e v0.8.1
|
||||
Compiling ureq v3.3.0
|
||||
Compiling tracing-subscriber v0.3.23
|
||||
Compiling aligned v0.4.3
|
||||
Compiling tera v1.20.1
|
||||
Compiling clap v4.6.0
|
||||
Compiling reqwest v0.12.28
|
||||
Compiling sea-query-binder v0.7.0
|
||||
Compiling unic-langid v0.9.6
|
||||
Compiling ouroboros_macro v0.18.5
|
||||
Compiling hashbrown v0.14.5
|
||||
Compiling sea-bae v0.2.1
|
||||
Compiling shared_child v1.1.1
|
||||
Compiling futures v0.3.32
|
||||
Compiling onig_sys v69.9.1
|
||||
Compiling cookie v0.18.1
|
||||
Compiling matrixmultiply v0.3.10
|
||||
Compiling os_pipe v1.2.3
|
||||
Compiling core2 v0.4.0
|
||||
Compiling profiling-procmacros v1.0.17
|
||||
Compiling arg_enum_proc_macro v0.3.4
|
||||
Compiling async-trait v0.1.89
|
||||
Compiling async-stream-impl v0.3.6
|
||||
Compiling aliasable v0.1.3
|
||||
Compiling unsafe-libyaml v0.2.11
|
||||
Compiling lzma-rust2 v0.15.7
|
||||
Compiling y4m v0.8.0
|
||||
Compiling hmac-sha256 v1.1.14
|
||||
Compiling quick-error v2.0.1
|
||||
Compiling pastey v0.1.1
|
||||
Compiling fastrand v2.3.0
|
||||
Compiling shared_thread v0.2.0
|
||||
Compiling duct v1.1.1
|
||||
Compiling ort-sys v2.0.0-rc.11
|
||||
Compiling serde_yaml v0.9.34+deprecated
|
||||
Compiling ouroboros v0.18.5
|
||||
Compiling async-stream v0.3.6
|
||||
Compiling profiling v1.0.17
|
||||
Compiling bitstream-io v4.9.0
|
||||
Compiling sea-orm-macros v1.1.19
|
||||
Compiling derive_more v2.1.1
|
||||
Compiling av1-grain v0.2.5
|
||||
Compiling maybe-rayon v0.1.1
|
||||
Compiling axum-core v0.5.6
|
||||
Compiling sea-schema-derive v0.3.0
|
||||
Compiling derive_builder_core v0.20.2
|
||||
Compiling windows-sys v0.52.0
|
||||
Compiling serde_regex v1.1.0
|
||||
Compiling cruet v0.13.3
|
||||
Compiling half v2.7.1
|
||||
Compiling crossbeam-channel v0.5.15
|
||||
Compiling serde_path_to_error v0.1.20
|
||||
Compiling toml_datetime v0.6.11
|
||||
Compiling serde_spanned v0.6.9
|
||||
Compiling fax_derive v0.2.0
|
||||
Compiling axum-macros v0.5.0
|
||||
Compiling include_dir_macros v0.7.4
|
||||
Compiling simd_helpers v0.1.0
|
||||
Compiling noop_proc_macro v0.3.0
|
||||
Compiling new_debug_unreachable v1.0.6
|
||||
Compiling matchit v0.8.4
|
||||
Compiling toml_write v0.1.2
|
||||
Compiling esaxx-rs v0.1.10
|
||||
Compiling rustc-hash v2.1.1
|
||||
Compiling winnow v0.7.15
|
||||
Compiling strum v0.26.3
|
||||
Compiling zune-core v0.5.1
|
||||
Compiling imgref v1.12.0
|
||||
Compiling unicode-width v0.2.2
|
||||
Compiling option-ext v0.2.0
|
||||
Compiling rawpointer v0.2.1
|
||||
Compiling tokio-cron-scheduler v0.11.1
|
||||
Compiling encode_unicode v1.0.0
|
||||
Compiling weezl v0.1.12
|
||||
Compiling console v0.15.11
|
||||
Compiling password-hash v0.5.0
|
||||
Compiling dirs-sys v0.5.0
|
||||
Compiling loop9 v0.1.5
|
||||
Compiling zune-jpeg v0.5.15
|
||||
Compiling sea-orm v1.1.19
|
||||
Compiling toml_edit v0.22.27
|
||||
Compiling type-map v0.5.1
|
||||
Compiling axum v0.8.8
|
||||
Compiling include_dir v0.7.4
|
||||
Compiling fax v0.2.6
|
||||
Compiling rrgen v0.5.6
|
||||
Compiling socket2 v0.5.10
|
||||
Compiling derive_builder_macro v0.20.2
|
||||
Compiling sea-schema v0.16.2
|
||||
Compiling chumsky v0.9.3
|
||||
Compiling backon v1.6.0
|
||||
Compiling sea-orm-cli v1.1.19
|
||||
Compiling castaway v0.2.4
|
||||
Compiling cron v0.12.1
|
||||
Compiling validator_derive v0.20.0
|
||||
Compiling colored v2.2.0
|
||||
Compiling combine v4.6.7
|
||||
Compiling cruet v0.14.0
|
||||
Compiling simple_asn1 v0.6.4
|
||||
Compiling blake2 v0.10.6
|
||||
Compiling zune-inflate v0.2.54
|
||||
Compiling fdeflate v0.3.7
|
||||
Compiling avif-serialize v0.8.8
|
||||
Compiling serde_html_form v0.2.8
|
||||
Compiling notify-types v2.1.0
|
||||
Compiling pem v3.0.6
|
||||
Compiling email-encoding v0.4.1
|
||||
Compiling num-complex v0.4.6
|
||||
Compiling colored v3.1.1
|
||||
Compiling quick-xml v0.38.4
|
||||
Compiling hostname v0.4.2
|
||||
Compiling monostate-impl v0.1.18
|
||||
Compiling utf8-width v0.1.8
|
||||
Compiling byteorder-lite v0.1.0
|
||||
Compiling quoted_printable v0.5.2
|
||||
Compiling base64 v0.13.1
|
||||
Compiling color_quant v1.1.0
|
||||
Compiling pxfm v0.1.28
|
||||
Compiling tagptr v0.2.0
|
||||
Compiling loco-rs v0.16.4
|
||||
Compiling macro_rules_attribute-proc_macro v0.2.2
|
||||
Compiling sha1_smol v1.0.1
|
||||
Compiling rgb v0.8.53
|
||||
Compiling bytemuck v1.25.0
|
||||
Compiling email_address v0.2.9
|
||||
Compiling bit_field v0.10.3
|
||||
Compiling btparse-stable v0.1.2
|
||||
Compiling lebe v0.5.3
|
||||
Compiling number_prefix v0.4.0
|
||||
Compiling indicatif v0.17.11
|
||||
Compiling exr v1.74.0
|
||||
Compiling backtrace_printer v1.3.0
|
||||
Compiling lettre v0.11.19
|
||||
Compiling qoi v0.4.1
|
||||
Compiling ravif v0.13.0
|
||||
Compiling redis v0.31.0
|
||||
Compiling macro_rules_attribute v0.2.2
|
||||
Compiling moka v0.12.15
|
||||
Compiling moxcms v0.8.1
|
||||
Compiling gif v0.14.1
|
||||
Compiling spm_precompiled v0.1.4
|
||||
Compiling image-webp v0.2.4
|
||||
Compiling byte-unit v4.0.19
|
||||
Compiling monostate v0.1.18
|
||||
Compiling opendal v0.54.1
|
||||
Compiling loco-gen v0.16.4
|
||||
Compiling ndarray v0.17.2
|
||||
Compiling jsonwebtoken v9.3.1
|
||||
Compiling notify v8.2.0
|
||||
Compiling axum-extra v0.10.3
|
||||
Compiling png v0.18.1
|
||||
Compiling argon2 v0.5.3
|
||||
Compiling validator v0.20.0
|
||||
Compiling compact_str v0.9.0
|
||||
Compiling sea-orm-migration v1.1.19
|
||||
Compiling onig v6.5.1
|
||||
Compiling derive_builder v0.20.2
|
||||
Compiling tiff v0.11.3
|
||||
Compiling intl-memoizer v0.5.3
|
||||
Compiling toml v0.8.23
|
||||
Compiling dirs v6.0.0
|
||||
Compiling tracing-appender v0.2.4
|
||||
Compiling duct_sh v1.0.0
|
||||
Compiling dashmap v6.1.0
|
||||
Compiling intl_pluralrules v7.0.2
|
||||
Compiling fluent-langneg v0.13.1
|
||||
Compiling rayon-cond v0.4.0
|
||||
Compiling ulid v1.2.1
|
||||
Compiling ureq v2.12.1
|
||||
Compiling tower v0.4.13
|
||||
Compiling english-to-cron v0.1.7
|
||||
Compiling fluent-syntax v0.12.0
|
||||
Compiling unicode-normalization-alignments v0.1.12
|
||||
Compiling ipnetwork v0.20.0
|
||||
Compiling serde_variant v0.1.3
|
||||
Compiling dary_heap v0.3.8
|
||||
Compiling unicode_categories v0.1.1
|
||||
Compiling self_cell v1.2.2
|
||||
Compiling semver v1.0.27
|
||||
Compiling fluent-bundle v0.16.0
|
||||
Compiling tokenizers v0.22.2
|
||||
Compiling fluent-template-macros v0.13.3
|
||||
Compiling hf-hub v0.4.3
|
||||
Compiling image v0.25.10
|
||||
Compiling ort v2.0.0-rc.11
|
||||
Compiling safetensors v0.7.0
|
||||
Compiling fastembed v5.13.0
|
||||
Compiling fluent-templates v0.13.3
|
||||
Compiling migration v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend\migration)
|
||||
Compiling termi-api v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend)
|
||||
Finished `dev` profile [unoptimized] target(s) in 8m 53s
|
||||
Running `target\debug\termi_api-cli.exe start`
|
||||
error: process didn't exit successfully: `target\debug\termi_api-cli.exe start` (exit code: 1073807364)
|
||||
@@ -1,25 +0,0 @@
|
||||
[2m2026-03-28T15:13:51.613322Z[0m [33m WARN[0m [2mloco_rs::boot[0m[2m:[0m pretty backtraces are enabled (this is great for development but has a runtime cost for production. disable with `logger.pretty_backtrace` in your config yaml)
|
||||
|
||||
▄ ▀
|
||||
▀ ▄
|
||||
▄ ▀ ▄ ▄ ▄▀
|
||||
▄ ▀▄▄
|
||||
▄ ▀ ▀ ▀▄▀█▄
|
||||
▀█▄
|
||||
▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄▄▄ ▄▄▄▄▄▄▄▄▄ ▀▀█
|
||||
██████ █████ ███ █████ ███ █████ ███ ▀█
|
||||
██████ █████ ███ █████ ▀▀▀ █████ ███ ▄█▄
|
||||
██████ █████ ███ █████ █████ ███ ████▄
|
||||
██████ █████ ███ █████ ▄▄▄ █████ ███ █████
|
||||
██████ █████ ███ ████ ███ █████ ███ ████▀
|
||||
▀▀▀██▄ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀ ██▀
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
https://loco.rs
|
||||
|
||||
environment: development
|
||||
database: automigrate
|
||||
logger: debug
|
||||
compilation: debug
|
||||
modes: server
|
||||
|
||||
listening on http://localhost:5150
|
||||
@@ -31,8 +31,6 @@ server:
|
||||
folder:
|
||||
uri: "/static"
|
||||
path: "assets/static"
|
||||
# fallback to index.html which redirects to /admin
|
||||
fallback: "assets/static/index.html"
|
||||
|
||||
# Worker Configuration
|
||||
workers:
|
||||
|
||||
59
backend/config/production.yaml
Normal file
59
backend/config/production.yaml
Normal file
@@ -0,0 +1,59 @@
|
||||
logger:
|
||||
enable: true
|
||||
pretty_backtrace: false
|
||||
level: info
|
||||
format: json
|
||||
|
||||
server:
|
||||
port: {{ get_env(name="PORT", default="5150") }}
|
||||
binding: 0.0.0.0
|
||||
host: {{ get_env(name="APP_BASE_URL", default="http://localhost:5150") }}
|
||||
middlewares:
|
||||
static:
|
||||
enable: true
|
||||
must_exist: true
|
||||
precompressed: false
|
||||
folder:
|
||||
uri: "/static"
|
||||
path: "assets/static"
|
||||
|
||||
workers:
|
||||
mode: BackgroundQueue
|
||||
|
||||
queue:
|
||||
kind: Redis
|
||||
uri: {{ get_env(name="REDIS_URL", default="redis://redis:6379") }}
|
||||
dangerously_flush: false
|
||||
|
||||
mailer:
|
||||
smtp:
|
||||
enable: {{ get_env(name="SMTP_ENABLE", default="false") }}
|
||||
host: '{{ get_env(name="SMTP_HOST", default="localhost") }}'
|
||||
port: {{ get_env(name="SMTP_PORT", default="1025") }}
|
||||
secure: {{ get_env(name="SMTP_SECURE", default="false") }}
|
||||
{% set smtp_user = get_env(name="SMTP_USER", default="") %}
|
||||
{% if smtp_user != "" %}
|
||||
auth:
|
||||
user: '{{ smtp_user }}'
|
||||
password: '{{ get_env(name="SMTP_PASSWORD", default="") }}'
|
||||
{% endif %}
|
||||
{% set smtp_hello_name = get_env(name="SMTP_HELLO_NAME", default="") %}
|
||||
{% if smtp_hello_name != "" %}
|
||||
hello_name: '{{ smtp_hello_name }}'
|
||||
{% endif %}
|
||||
|
||||
database:
|
||||
uri: {{ get_env(name="DATABASE_URL", default="postgres://termi:termi@db:5432/termi_api") }}
|
||||
enable_logging: false
|
||||
connect_timeout: {{ get_env(name="DB_CONNECT_TIMEOUT", default="500") }}
|
||||
idle_timeout: {{ get_env(name="DB_IDLE_TIMEOUT", default="500") }}
|
||||
min_connections: {{ get_env(name="DB_MIN_CONNECTIONS", default="1") }}
|
||||
max_connections: {{ get_env(name="DB_MAX_CONNECTIONS", default="10") }}
|
||||
auto_migrate: true
|
||||
dangerously_truncate: false
|
||||
dangerously_recreate: false
|
||||
|
||||
auth:
|
||||
jwt:
|
||||
secret: {{ get_env(name="JWT_SECRET", default="please-change-me") }}
|
||||
expiration: {{ get_env(name="JWT_EXPIRATION_SECONDS", default="604800") }}
|
||||
@@ -29,7 +29,6 @@ server:
|
||||
folder:
|
||||
uri: "/static"
|
||||
path: "assets/static"
|
||||
fallback: "assets/static/404.html"
|
||||
|
||||
# Worker Configuration
|
||||
workers:
|
||||
|
||||
9
backend/docker-entrypoint.sh
Normal file
9
backend/docker-entrypoint.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
if [ "${TERMI_SKIP_MIGRATIONS:-false}" != "true" ]; then
|
||||
echo "[entrypoint] running database migrations..."
|
||||
termi_api-cli -e production db migrate
|
||||
fi
|
||||
|
||||
exec "$@"
|
||||
@@ -25,6 +25,17 @@ mod m20260329_000014_create_query_events;
|
||||
mod m20260330_000015_add_image_ai_settings_to_site_settings;
|
||||
mod m20260330_000016_add_r2_media_settings_to_site_settings;
|
||||
mod m20260330_000017_add_media_storage_provider_to_site_settings;
|
||||
mod m20260331_000018_add_comment_request_metadata;
|
||||
mod m20260331_000019_create_comment_blacklist;
|
||||
mod m20260331_000020_create_comment_persona_analysis_logs;
|
||||
mod m20260331_000021_add_post_lifecycle_and_seo;
|
||||
mod m20260331_000022_add_site_settings_notifications_and_seo;
|
||||
mod m20260331_000023_create_content_events;
|
||||
mod m20260331_000024_create_admin_audit_logs;
|
||||
mod m20260331_000025_create_post_revisions;
|
||||
mod m20260331_000026_create_subscriptions;
|
||||
mod m20260331_000027_create_notification_deliveries;
|
||||
mod m20260331_000028_expand_subscriptions_and_deliveries;
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
@@ -54,6 +65,17 @@ impl MigratorTrait for Migrator {
|
||||
Box::new(m20260330_000015_add_image_ai_settings_to_site_settings::Migration),
|
||||
Box::new(m20260330_000016_add_r2_media_settings_to_site_settings::Migration),
|
||||
Box::new(m20260330_000017_add_media_storage_provider_to_site_settings::Migration),
|
||||
Box::new(m20260331_000018_add_comment_request_metadata::Migration),
|
||||
Box::new(m20260331_000019_create_comment_blacklist::Migration),
|
||||
Box::new(m20260331_000020_create_comment_persona_analysis_logs::Migration),
|
||||
Box::new(m20260331_000021_add_post_lifecycle_and_seo::Migration),
|
||||
Box::new(m20260331_000022_add_site_settings_notifications_and_seo::Migration),
|
||||
Box::new(m20260331_000023_create_content_events::Migration),
|
||||
Box::new(m20260331_000024_create_admin_audit_logs::Migration),
|
||||
Box::new(m20260331_000025_create_post_revisions::Migration),
|
||||
Box::new(m20260331_000026_create_subscriptions::Migration),
|
||||
Box::new(m20260331_000027_create_notification_deliveries::Migration),
|
||||
Box::new(m20260331_000028_expand_subscriptions_and_deliveries::Migration),
|
||||
// inject-above (do not remove this comment)
|
||||
]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,85 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
const TABLE: &str = "comments";
|
||||
const IP_ADDRESS_COLUMN: &str = "ip_address";
|
||||
const USER_AGENT_COLUMN: &str = "user_agent";
|
||||
const REFERER_COLUMN: &str = "referer";
|
||||
const COMMENT_IP_INDEX: &str = "idx_comments_ip_address";
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
let table = Alias::new(TABLE);
|
||||
|
||||
if !manager.has_column(TABLE, IP_ADDRESS_COLUMN).await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new(IP_ADDRESS_COLUMN))
|
||||
.string()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column(TABLE, USER_AGENT_COLUMN).await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(ColumnDef::new(Alias::new(USER_AGENT_COLUMN)).text().null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column(TABLE, REFERER_COLUMN).await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table)
|
||||
.add_column(ColumnDef::new(Alias::new(REFERER_COLUMN)).text().null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
manager
|
||||
.get_connection()
|
||||
.execute_unprepared(&format!(
|
||||
"CREATE INDEX IF NOT EXISTS {COMMENT_IP_INDEX} ON {TABLE} ({IP_ADDRESS_COLUMN})"
|
||||
))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.get_connection()
|
||||
.execute_unprepared(&format!("DROP INDEX IF EXISTS {COMMENT_IP_INDEX}"))
|
||||
.await?;
|
||||
|
||||
for column in [REFERER_COLUMN, USER_AGENT_COLUMN, IP_ADDRESS_COLUMN] {
|
||||
if manager.has_column(TABLE, column).await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Alias::new(TABLE))
|
||||
.drop_column(Alias::new(column))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Alias::new("comment_blacklist"))
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("id"))
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("matcher_type"))
|
||||
.string()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("matcher_value"))
|
||||
.string()
|
||||
.not_null(),
|
||||
)
|
||||
.col(ColumnDef::new(Alias::new("reason")).text().null())
|
||||
.col(ColumnDef::new(Alias::new("active")).boolean().null())
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("expires_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("created_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("updated_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_comment_blacklist_matcher")
|
||||
.table(Alias::new("comment_blacklist"))
|
||||
.col(Alias::new("matcher_type"))
|
||||
.col(Alias::new("matcher_value"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_comment_blacklist_active_expires")
|
||||
.table(Alias::new("comment_blacklist"))
|
||||
.col(Alias::new("active"))
|
||||
.col(Alias::new("expires_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_comment_blacklist_active_expires",
|
||||
"idx_comment_blacklist_matcher",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("comment_blacklist"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
manager
|
||||
.drop_table(
|
||||
Table::drop()
|
||||
.table(Alias::new("comment_blacklist"))
|
||||
.if_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Alias::new("comment_persona_analysis_logs"))
|
||||
.if_not_exists()
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("id"))
|
||||
.integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("matcher_type"))
|
||||
.string()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("matcher_value"))
|
||||
.string()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("from_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("to_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("total_comments"))
|
||||
.integer()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("pending_comments"))
|
||||
.integer()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("distinct_posts"))
|
||||
.integer()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("analysis_text"))
|
||||
.text()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("sample_json"))
|
||||
.json_binary()
|
||||
.null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("created_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Alias::new("updated_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.not_null()
|
||||
.default(Expr::current_timestamp()),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_comment_persona_analysis_logs_matcher_created_at")
|
||||
.table(Alias::new("comment_persona_analysis_logs"))
|
||||
.col(Alias::new("matcher_type"))
|
||||
.col(Alias::new("matcher_value"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_comment_persona_analysis_logs_created_at")
|
||||
.table(Alias::new("comment_persona_analysis_logs"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_comment_persona_analysis_logs_created_at",
|
||||
"idx_comment_persona_analysis_logs_matcher_created_at",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("comment_persona_analysis_logs"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
manager
|
||||
.drop_table(
|
||||
Table::drop()
|
||||
.table(Alias::new("comment_persona_analysis_logs"))
|
||||
.if_exists()
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
let table = Alias::new("posts");
|
||||
|
||||
if !manager.has_column("posts", "status").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("status"))
|
||||
.string()
|
||||
.null()
|
||||
.default("published"),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "visibility").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("visibility"))
|
||||
.string()
|
||||
.null()
|
||||
.default("public"),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "publish_at").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("publish_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "unpublish_at").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("unpublish_at"))
|
||||
.timestamp_with_time_zone()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "canonical_url").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(ColumnDef::new(Alias::new("canonical_url")).text().null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "noindex").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("noindex"))
|
||||
.boolean()
|
||||
.null()
|
||||
.default(false),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "og_image").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(ColumnDef::new(Alias::new("og_image")).text().null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "redirect_from").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("redirect_from"))
|
||||
.json_binary()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager.has_column("posts", "redirect_to").await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(ColumnDef::new(Alias::new("redirect_to")).text().null())
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
let table = Alias::new("posts");
|
||||
|
||||
for column in [
|
||||
"redirect_to",
|
||||
"redirect_from",
|
||||
"og_image",
|
||||
"noindex",
|
||||
"canonical_url",
|
||||
"unpublish_at",
|
||||
"publish_at",
|
||||
"visibility",
|
||||
"status",
|
||||
] {
|
||||
if manager.has_column("posts", column).await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.drop_column(Alias::new(column))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,149 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
let table = Alias::new("site_settings");
|
||||
|
||||
if !manager
|
||||
.has_column("site_settings", "seo_default_og_image")
|
||||
.await?
|
||||
{
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("seo_default_og_image"))
|
||||
.text()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager
|
||||
.has_column("site_settings", "seo_default_twitter_handle")
|
||||
.await?
|
||||
{
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("seo_default_twitter_handle"))
|
||||
.string()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager
|
||||
.has_column("site_settings", "notification_webhook_url")
|
||||
.await?
|
||||
{
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("notification_webhook_url"))
|
||||
.text()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager
|
||||
.has_column("site_settings", "notification_comment_enabled")
|
||||
.await?
|
||||
{
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("notification_comment_enabled"))
|
||||
.boolean()
|
||||
.null()
|
||||
.default(false),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager
|
||||
.has_column("site_settings", "notification_friend_link_enabled")
|
||||
.await?
|
||||
{
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("notification_friend_link_enabled"))
|
||||
.boolean()
|
||||
.null()
|
||||
.default(false),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !manager
|
||||
.has_column("site_settings", "search_synonyms")
|
||||
.await?
|
||||
{
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.add_column(
|
||||
ColumnDef::new(Alias::new("search_synonyms"))
|
||||
.json_binary()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
let table = Alias::new("site_settings");
|
||||
|
||||
for column in [
|
||||
"search_synonyms",
|
||||
"notification_friend_link_enabled",
|
||||
"notification_comment_enabled",
|
||||
"notification_webhook_url",
|
||||
"seo_default_twitter_handle",
|
||||
"seo_default_og_image",
|
||||
] {
|
||||
if manager.has_column("site_settings", column).await? {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(table.clone())
|
||||
.drop_column(Alias::new(column))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
use loco_rs::schema::*;
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_table(
|
||||
manager,
|
||||
"content_events",
|
||||
&[
|
||||
("id", ColType::PkAuto),
|
||||
("event_type", ColType::String),
|
||||
("path", ColType::String),
|
||||
("post_slug", ColType::StringNull),
|
||||
("session_id", ColType::StringNull),
|
||||
("referrer", ColType::StringNull),
|
||||
("user_agent", ColType::TextNull),
|
||||
("duration_ms", ColType::IntegerNull),
|
||||
("progress_percent", ColType::IntegerNull),
|
||||
("metadata", ColType::JsonBinaryNull),
|
||||
],
|
||||
&[],
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_content_events_event_type_created_at")
|
||||
.table(Alias::new("content_events"))
|
||||
.col(Alias::new("event_type"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_content_events_post_slug_created_at")
|
||||
.table(Alias::new("content_events"))
|
||||
.col(Alias::new("post_slug"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_content_events_referrer")
|
||||
.table(Alias::new("content_events"))
|
||||
.col(Alias::new("referrer"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_content_events_referrer",
|
||||
"idx_content_events_post_slug_created_at",
|
||||
"idx_content_events_event_type_created_at",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("content_events"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop_table(manager, "content_events").await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
use loco_rs::schema::*;
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_table(
|
||||
manager,
|
||||
"admin_audit_logs",
|
||||
&[
|
||||
("id", ColType::PkAuto),
|
||||
("actor_username", ColType::StringNull),
|
||||
("actor_email", ColType::StringNull),
|
||||
("actor_source", ColType::StringNull),
|
||||
("action", ColType::String),
|
||||
("target_type", ColType::String),
|
||||
("target_id", ColType::StringNull),
|
||||
("target_label", ColType::StringNull),
|
||||
("metadata", ColType::JsonBinaryNull),
|
||||
],
|
||||
&[],
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_admin_audit_logs_action_created_at")
|
||||
.table(Alias::new("admin_audit_logs"))
|
||||
.col(Alias::new("action"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_admin_audit_logs_target_type_created_at")
|
||||
.table(Alias::new("admin_audit_logs"))
|
||||
.col(Alias::new("target_type"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_admin_audit_logs_target_type_created_at",
|
||||
"idx_admin_audit_logs_action_created_at",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("admin_audit_logs"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop_table(manager, "admin_audit_logs").await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
use loco_rs::schema::*;
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_table(
|
||||
manager,
|
||||
"post_revisions",
|
||||
&[
|
||||
("id", ColType::PkAuto),
|
||||
("post_slug", ColType::String),
|
||||
("post_title", ColType::StringNull),
|
||||
("operation", ColType::String),
|
||||
("revision_reason", ColType::TextNull),
|
||||
("actor_username", ColType::StringNull),
|
||||
("actor_email", ColType::StringNull),
|
||||
("actor_source", ColType::StringNull),
|
||||
("markdown", ColType::TextNull),
|
||||
("metadata", ColType::JsonBinaryNull),
|
||||
],
|
||||
&[],
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_post_revisions_post_slug_created_at")
|
||||
.table(Alias::new("post_revisions"))
|
||||
.col(Alias::new("post_slug"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_post_revisions_operation_created_at")
|
||||
.table(Alias::new("post_revisions"))
|
||||
.col(Alias::new("operation"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_post_revisions_operation_created_at",
|
||||
"idx_post_revisions_post_slug_created_at",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("post_revisions"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop_table(manager, "post_revisions").await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
use loco_rs::schema::*;
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_table(
|
||||
manager,
|
||||
"subscriptions",
|
||||
&[
|
||||
("id", ColType::PkAuto),
|
||||
("channel_type", ColType::String),
|
||||
("target", ColType::String),
|
||||
("display_name", ColType::StringNull),
|
||||
("status", ColType::String),
|
||||
("filters", ColType::JsonBinaryNull),
|
||||
("secret", ColType::TextNull),
|
||||
("notes", ColType::TextNull),
|
||||
("verified_at", ColType::StringNull),
|
||||
("last_notified_at", ColType::StringNull),
|
||||
("failure_count", ColType::IntegerNull),
|
||||
("last_delivery_status", ColType::StringNull),
|
||||
],
|
||||
&[],
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_subscriptions_channel_status")
|
||||
.table(Alias::new("subscriptions"))
|
||||
.col(Alias::new("channel_type"))
|
||||
.col(Alias::new("status"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_subscriptions_channel_target_unique")
|
||||
.table(Alias::new("subscriptions"))
|
||||
.col(Alias::new("channel_type"))
|
||||
.col(Alias::new("target"))
|
||||
.unique()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_subscriptions_channel_target_unique",
|
||||
"idx_subscriptions_channel_status",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("subscriptions"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop_table(manager, "subscriptions").await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
use loco_rs::schema::*;
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
create_table(
|
||||
manager,
|
||||
"notification_deliveries",
|
||||
&[
|
||||
("id", ColType::PkAuto),
|
||||
("subscription_id", ColType::IntegerNull),
|
||||
("channel_type", ColType::String),
|
||||
("target", ColType::String),
|
||||
("event_type", ColType::String),
|
||||
("status", ColType::String),
|
||||
("provider", ColType::StringNull),
|
||||
("response_text", ColType::TextNull),
|
||||
("payload", ColType::JsonBinaryNull),
|
||||
("delivered_at", ColType::StringNull),
|
||||
],
|
||||
&[],
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_notification_deliveries_event_created_at")
|
||||
.table(Alias::new("notification_deliveries"))
|
||||
.col(Alias::new("event_type"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_notification_deliveries_subscription_created_at")
|
||||
.table(Alias::new("notification_deliveries"))
|
||||
.col(Alias::new("subscription_id"))
|
||||
.col(Alias::new("created_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
for index_name in [
|
||||
"idx_notification_deliveries_subscription_created_at",
|
||||
"idx_notification_deliveries_event_created_at",
|
||||
] {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name(index_name)
|
||||
.table(Alias::new("notification_deliveries"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
drop_table(manager, "notification_deliveries").await
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[derive(DeriveMigrationName)]
|
||||
pub struct Migration;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Alias::new("subscriptions"))
|
||||
.add_column_if_not_exists(
|
||||
ColumnDef::new(Alias::new("confirm_token"))
|
||||
.string()
|
||||
.null(),
|
||||
)
|
||||
.add_column_if_not_exists(
|
||||
ColumnDef::new(Alias::new("manage_token"))
|
||||
.string()
|
||||
.null(),
|
||||
)
|
||||
.add_column_if_not_exists(
|
||||
ColumnDef::new(Alias::new("metadata"))
|
||||
.json_binary()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_subscriptions_confirm_token_unique")
|
||||
.table(Alias::new("subscriptions"))
|
||||
.col(Alias::new("confirm_token"))
|
||||
.unique()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_subscriptions_manage_token_unique")
|
||||
.table(Alias::new("subscriptions"))
|
||||
.col(Alias::new("manage_token"))
|
||||
.unique()
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Alias::new("notification_deliveries"))
|
||||
.add_column_if_not_exists(
|
||||
ColumnDef::new(Alias::new("attempts_count"))
|
||||
.integer()
|
||||
.not_null()
|
||||
.default(0),
|
||||
)
|
||||
.add_column_if_not_exists(
|
||||
ColumnDef::new(Alias::new("next_retry_at"))
|
||||
.string()
|
||||
.null(),
|
||||
)
|
||||
.add_column_if_not_exists(
|
||||
ColumnDef::new(Alias::new("last_attempt_at"))
|
||||
.string()
|
||||
.null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.create_index(
|
||||
Index::create()
|
||||
.name("idx_notification_deliveries_status_next_retry")
|
||||
.table(Alias::new("notification_deliveries"))
|
||||
.col(Alias::new("status"))
|
||||
.col(Alias::new("next_retry_at"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name("idx_notification_deliveries_status_next_retry")
|
||||
.table(Alias::new("notification_deliveries"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Alias::new("notification_deliveries"))
|
||||
.drop_column(Alias::new("last_attempt_at"))
|
||||
.drop_column(Alias::new("next_retry_at"))
|
||||
.drop_column(Alias::new("attempts_count"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name("idx_subscriptions_manage_token_unique")
|
||||
.table(Alias::new("subscriptions"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.drop_index(
|
||||
Index::drop()
|
||||
.name("idx_subscriptions_confirm_token_unique")
|
||||
.table(Alias::new("subscriptions"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
manager
|
||||
.alter_table(
|
||||
Table::alter()
|
||||
.table(Alias::new("subscriptions"))
|
||||
.drop_column(Alias::new("metadata"))
|
||||
.drop_column(Alias::new("manage_token"))
|
||||
.drop_column(Alias::new("confirm_token"))
|
||||
.to_owned(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
Compiling termi-api v0.1.0 (D:\dev\frontend\svelte\termi-astro\backend)
|
||||
Finished `dev` profile [unoptimized + debuginfo] target(s) in 17.22s
|
||||
Running `target\debug\termi_api-cli.exe start`
|
||||
@@ -25,7 +25,7 @@ use crate::{
|
||||
ai_chunks, categories, comments, friend_links, posts, reviews, site_settings, tags, users,
|
||||
},
|
||||
tasks,
|
||||
workers::downloader::DownloadWorker,
|
||||
workers::{downloader::DownloadWorker, notification_delivery::NotificationDeliveryWorker},
|
||||
};
|
||||
|
||||
pub struct App;
|
||||
@@ -54,16 +54,14 @@ impl Hooks for App {
|
||||
}
|
||||
|
||||
async fn initializers(_ctx: &AppContext) -> Result<Vec<Box<dyn Initializer>>> {
|
||||
Ok(vec![
|
||||
Box::new(initializers::content_sync::ContentSyncInitializer),
|
||||
Box::new(initializers::view_engine::ViewEngineInitializer),
|
||||
])
|
||||
Ok(vec![Box::new(initializers::content_sync::ContentSyncInitializer)])
|
||||
}
|
||||
|
||||
fn routes(_ctx: &AppContext) -> AppRoutes {
|
||||
AppRoutes::with_default_routes() // controller routes below
|
||||
.add_route(controllers::admin::routes())
|
||||
.add_route(controllers::health::routes())
|
||||
.add_route(controllers::admin_api::routes())
|
||||
.add_route(controllers::admin_ops::routes())
|
||||
.add_route(controllers::review::routes())
|
||||
.add_route(controllers::category::routes())
|
||||
.add_route(controllers::friend_link::routes())
|
||||
@@ -71,9 +69,11 @@ impl Hooks for App {
|
||||
.add_route(controllers::comment::routes())
|
||||
.add_route(controllers::post::routes())
|
||||
.add_route(controllers::search::routes())
|
||||
.add_route(controllers::content_analytics::routes())
|
||||
.add_route(controllers::site_settings::routes())
|
||||
.add_route(controllers::ai::routes())
|
||||
.add_route(controllers::auth::routes())
|
||||
.add_route(controllers::subscription::routes())
|
||||
}
|
||||
async fn after_routes(router: AxumRouter, _ctx: &AppContext) -> Result<AxumRouter> {
|
||||
let cors = CorsLayer::new()
|
||||
@@ -91,11 +91,15 @@ impl Hooks for App {
|
||||
}
|
||||
async fn connect_workers(ctx: &AppContext, queue: &Queue) -> Result<()> {
|
||||
queue.register(DownloadWorker::build(ctx)).await?;
|
||||
queue.register(NotificationDeliveryWorker::build(ctx)).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn register_tasks(tasks: &mut Tasks) {
|
||||
tasks.register(tasks::retry_deliveries::RetryDeliveries);
|
||||
tasks.register(tasks::send_weekly_digest::SendWeeklyDigest);
|
||||
tasks.register(tasks::send_monthly_digest::SendMonthlyDigest);
|
||||
// tasks-inject (do not remove)
|
||||
}
|
||||
async fn seed(ctx: &AppContext, base: &Path) -> Result<()> {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
455
backend/src/controllers/admin_ops.rs
Normal file
455
backend/src/controllers/admin_ops.rs
Normal file
@@ -0,0 +1,455 @@
|
||||
use axum::http::HeaderMap;
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, Order, QueryFilter, QueryOrder,
|
||||
QuerySelect, Set,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
controllers::admin::check_auth,
|
||||
models::_entities::{
|
||||
admin_audit_logs, notification_deliveries, post_revisions, subscriptions,
|
||||
},
|
||||
services::{admin_audit, post_revisions as revision_service, subscriptions as subscription_service},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct AuditLogQuery {
|
||||
pub action: Option<String>,
|
||||
pub target_type: Option<String>,
|
||||
pub limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct RevisionQuery {
|
||||
pub slug: Option<String>,
|
||||
pub limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct DeliveriesQuery {
|
||||
pub limit: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct SubscriptionPayload {
|
||||
#[serde(alias = "channelType")]
|
||||
pub channel_type: String,
|
||||
pub target: String,
|
||||
#[serde(default, alias = "displayName")]
|
||||
pub display_name: Option<String>,
|
||||
#[serde(default)]
|
||||
pub status: Option<String>,
|
||||
#[serde(default)]
|
||||
pub filters: Option<serde_json::Value>,
|
||||
#[serde(default)]
|
||||
pub metadata: Option<serde_json::Value>,
|
||||
#[serde(default)]
|
||||
pub secret: Option<String>,
|
||||
#[serde(default)]
|
||||
pub notes: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct SubscriptionUpdatePayload {
|
||||
#[serde(default, alias = "channelType")]
|
||||
pub channel_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub target: Option<String>,
|
||||
#[serde(default, alias = "displayName")]
|
||||
pub display_name: Option<String>,
|
||||
#[serde(default)]
|
||||
pub status: Option<String>,
|
||||
#[serde(default)]
|
||||
pub filters: Option<serde_json::Value>,
|
||||
#[serde(default)]
|
||||
pub metadata: Option<serde_json::Value>,
|
||||
#[serde(default)]
|
||||
pub secret: Option<String>,
|
||||
#[serde(default)]
|
||||
pub notes: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct RestoreRevisionRequest {
|
||||
#[serde(default)]
|
||||
pub mode: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct DigestDispatchRequest {
|
||||
pub period: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PostRevisionListItem {
|
||||
pub id: i32,
|
||||
pub post_slug: String,
|
||||
pub post_title: Option<String>,
|
||||
pub operation: String,
|
||||
pub revision_reason: Option<String>,
|
||||
pub actor_username: Option<String>,
|
||||
pub actor_email: Option<String>,
|
||||
pub actor_source: Option<String>,
|
||||
pub created_at: String,
|
||||
pub has_markdown: bool,
|
||||
pub metadata: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PostRevisionDetailResponse {
|
||||
#[serde(flatten)]
|
||||
pub item: PostRevisionListItem,
|
||||
pub markdown: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct RestoreRevisionResponse {
|
||||
pub restored: bool,
|
||||
pub revision_id: i32,
|
||||
pub post_slug: String,
|
||||
pub mode: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct SubscriptionListResponse {
|
||||
pub subscriptions: Vec<subscriptions::Model>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct DeliveryListResponse {
|
||||
pub deliveries: Vec<notification_deliveries::Model>,
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn format_revision(item: post_revisions::Model) -> PostRevisionListItem {
|
||||
PostRevisionListItem {
|
||||
id: item.id,
|
||||
post_slug: item.post_slug,
|
||||
post_title: item.post_title,
|
||||
operation: item.operation,
|
||||
revision_reason: item.revision_reason,
|
||||
actor_username: item.actor_username,
|
||||
actor_email: item.actor_email,
|
||||
actor_source: item.actor_source,
|
||||
created_at: item.created_at.format("%Y-%m-%d %H:%M:%S").to_string(),
|
||||
has_markdown: item.markdown.as_deref().map(str::trim).filter(|value| !value.is_empty()).is_some(),
|
||||
metadata: item.metadata,
|
||||
}
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn list_audit_logs(
|
||||
headers: HeaderMap,
|
||||
Query(query): Query<AuditLogQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
|
||||
let mut db_query = admin_audit_logs::Entity::find().order_by(admin_audit_logs::Column::CreatedAt, Order::Desc);
|
||||
|
||||
if let Some(action) = query.action.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
|
||||
db_query = db_query.filter(admin_audit_logs::Column::Action.eq(action));
|
||||
}
|
||||
|
||||
if let Some(target_type) = query.target_type.map(|value| value.trim().to_string()).filter(|value| !value.is_empty()) {
|
||||
db_query = db_query.filter(admin_audit_logs::Column::TargetType.eq(target_type));
|
||||
}
|
||||
|
||||
format::json(db_query.limit(query.limit.unwrap_or(80)).all(&ctx.db).await?)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn list_post_revisions(
|
||||
headers: HeaderMap,
|
||||
Query(query): Query<RevisionQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
let items = revision_service::list_revisions(&ctx, query.slug.as_deref(), query.limit.unwrap_or(120)).await?;
|
||||
format::json(items.into_iter().map(format_revision).collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn get_post_revision(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
let item = revision_service::get_revision(&ctx, id).await?;
|
||||
format::json(PostRevisionDetailResponse {
|
||||
item: format_revision(item.clone()),
|
||||
markdown: item.markdown,
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn restore_post_revision(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<RestoreRevisionRequest>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let mode = payload.mode.unwrap_or_else(|| "full".to_string());
|
||||
let restored =
|
||||
revision_service::restore_revision(&ctx, Some(&actor), id, &mode).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.revision.restore",
|
||||
"post_revision",
|
||||
Some(restored.id.to_string()),
|
||||
Some(restored.post_slug.clone()),
|
||||
Some(serde_json::json!({
|
||||
"post_slug": restored.post_slug,
|
||||
"source_revision_id": id,
|
||||
"mode": mode,
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(RestoreRevisionResponse {
|
||||
restored: true,
|
||||
revision_id: id,
|
||||
post_slug: restored.post_slug,
|
||||
mode,
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn list_subscriptions(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
format::json(SubscriptionListResponse {
|
||||
subscriptions: subscription_service::list_subscriptions(&ctx, None, None).await?,
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn list_subscription_deliveries(
|
||||
headers: HeaderMap,
|
||||
Query(query): Query<DeliveriesQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
format::json(DeliveryListResponse {
|
||||
deliveries: subscription_service::list_recent_deliveries(&ctx, query.limit.unwrap_or(80)).await?,
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn create_subscription(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<SubscriptionPayload>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
|
||||
let channel_type = subscription_service::normalize_channel_type(&payload.channel_type);
|
||||
let target = payload.target.trim().to_string();
|
||||
if target.is_empty() {
|
||||
return Err(Error::BadRequest("target 不能为空".to_string()));
|
||||
}
|
||||
|
||||
let created = subscriptions::ActiveModel {
|
||||
channel_type: Set(channel_type.clone()),
|
||||
target: Set(target.clone()),
|
||||
display_name: Set(trim_to_option(payload.display_name)),
|
||||
status: Set(subscription_service::normalize_status(payload.status.as_deref().unwrap_or("active"))),
|
||||
filters: Set(subscription_service::normalize_filters(payload.filters)),
|
||||
metadata: Set(payload.metadata),
|
||||
secret: Set(trim_to_option(payload.secret)),
|
||||
notes: Set(trim_to_option(payload.notes)),
|
||||
confirm_token: Set(None),
|
||||
manage_token: Set(Some(subscription_service::generate_subscription_token())),
|
||||
verified_at: Set(Some(chrono::Utc::now().to_rfc3339())),
|
||||
failure_count: Set(Some(0)),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&ctx.db)
|
||||
.await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"subscription.create",
|
||||
"subscription",
|
||||
Some(created.id.to_string()),
|
||||
Some(format!("{}:{}", created.channel_type, created.target)),
|
||||
Some(serde_json::json!({ "channel_type": created.channel_type, "target": created.target })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(created)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn update_subscription(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<SubscriptionUpdatePayload>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
|
||||
let item = subscriptions::Entity::find_by_id(id)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)?;
|
||||
let mut active = item.clone().into_active_model();
|
||||
|
||||
if let Some(channel_type) = payload.channel_type {
|
||||
active.channel_type = Set(subscription_service::normalize_channel_type(&channel_type));
|
||||
}
|
||||
if let Some(target) = payload.target {
|
||||
let normalized_target = target.trim().to_string();
|
||||
if normalized_target.is_empty() {
|
||||
return Err(Error::BadRequest("target 不能为空".to_string()));
|
||||
}
|
||||
active.target = Set(normalized_target);
|
||||
}
|
||||
if payload.display_name.is_some() {
|
||||
active.display_name = Set(trim_to_option(payload.display_name));
|
||||
}
|
||||
if let Some(status) = payload.status {
|
||||
active.status = Set(subscription_service::normalize_status(&status));
|
||||
}
|
||||
if payload.filters.is_some() {
|
||||
active.filters = Set(subscription_service::normalize_filters(payload.filters));
|
||||
}
|
||||
if payload.metadata.is_some() {
|
||||
active.metadata = Set(payload.metadata);
|
||||
}
|
||||
if payload.secret.is_some() {
|
||||
active.secret = Set(trim_to_option(payload.secret));
|
||||
}
|
||||
if payload.notes.is_some() {
|
||||
active.notes = Set(trim_to_option(payload.notes));
|
||||
}
|
||||
|
||||
let updated = active.update(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"subscription.update",
|
||||
"subscription",
|
||||
Some(updated.id.to_string()),
|
||||
Some(format!("{}:{}", updated.channel_type, updated.target)),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(updated)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn delete_subscription(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let item = subscriptions::Entity::find_by_id(id)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)?;
|
||||
let label = format!("{}:{}", item.channel_type, item.target);
|
||||
item.delete(&ctx.db).await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"subscription.delete",
|
||||
"subscription",
|
||||
Some(id.to_string()),
|
||||
Some(label),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::empty()
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn test_subscription(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let item = subscriptions::Entity::find_by_id(id)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)?;
|
||||
|
||||
let delivery = subscription_service::send_test_notification(&ctx, &item).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"subscription.test",
|
||||
"subscription",
|
||||
Some(item.id.to_string()),
|
||||
Some(format!("{}:{}", item.channel_type, item.target)),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(serde_json::json!({ "queued": true, "id": item.id, "delivery_id": delivery.id }))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn send_subscription_digest(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<DigestDispatchRequest>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let summary = subscription_service::send_digest(&ctx, payload.period.as_deref().unwrap_or("weekly")).await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"subscription.digest.send",
|
||||
"subscription_digest",
|
||||
None,
|
||||
Some(summary.period.clone()),
|
||||
Some(serde_json::json!({
|
||||
"period": summary.period,
|
||||
"post_count": summary.post_count,
|
||||
"queued": summary.queued,
|
||||
"skipped": summary.skipped,
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(summary)
|
||||
}
|
||||
|
||||
pub fn routes() -> Routes {
|
||||
Routes::new()
|
||||
.prefix("/api/admin")
|
||||
.add("/audit-logs", get(list_audit_logs))
|
||||
.add("/post-revisions", get(list_post_revisions))
|
||||
.add("/post-revisions/{id}", get(get_post_revision))
|
||||
.add("/post-revisions/{id}/restore", post(restore_post_revision))
|
||||
.add("/subscriptions", get(list_subscriptions).post(create_subscription))
|
||||
.add("/subscriptions/deliveries", get(list_subscription_deliveries))
|
||||
.add("/subscriptions/digest", post(send_subscription_digest))
|
||||
.add("/subscriptions/{id}", patch(update_subscription).delete(delete_subscription))
|
||||
.add("/subscriptions/{id}/test", post(test_subscription))
|
||||
}
|
||||
@@ -16,7 +16,7 @@ use std::time::Instant;
|
||||
|
||||
use crate::{
|
||||
controllers::{admin::check_auth, site_settings},
|
||||
services::{ai, analytics},
|
||||
services::{abuse_guard, ai, analytics},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
@@ -212,6 +212,11 @@ pub async fn ask(
|
||||
let started_at = Instant::now();
|
||||
let question = payload.question.trim().to_string();
|
||||
let (provider, chat_model) = current_provider_metadata(&ctx).await;
|
||||
abuse_guard::enforce_public_scope(
|
||||
"ai_ask",
|
||||
abuse_guard::detect_client_ip(&headers).as_deref(),
|
||||
Some(&question),
|
||||
)?;
|
||||
|
||||
match ai::answer_question(&ctx, &payload.question).await {
|
||||
Ok(result) => {
|
||||
@@ -263,6 +268,11 @@ pub async fn ask_stream(
|
||||
let request_headers = headers.clone();
|
||||
let question = payload.question.trim().to_string();
|
||||
let (fallback_provider, fallback_chat_model) = current_provider_metadata(&ctx).await;
|
||||
abuse_guard::enforce_public_scope(
|
||||
"ai_stream",
|
||||
abuse_guard::detect_client_ip(&headers).as_deref(),
|
||||
Some(&question),
|
||||
)?;
|
||||
|
||||
let stream = stream! {
|
||||
let started_at = Instant::now();
|
||||
@@ -503,8 +513,8 @@ pub async fn ask_stream(
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn reindex(State(ctx): State<AppContext>) -> Result<Response> {
|
||||
check_auth()?;
|
||||
pub async fn reindex(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
let summary = ai::rebuild_index(&ctx).await?;
|
||||
|
||||
format::json(ReindexResponse {
|
||||
|
||||
@@ -5,11 +5,23 @@ use loco_rs::prelude::*;
|
||||
use sea_orm::{ColumnTrait, QueryFilter, QueryOrder};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use axum::{
|
||||
extract::{rejection::ExtensionRejection, ConnectInfo},
|
||||
http::{header, HeaderMap},
|
||||
};
|
||||
|
||||
use crate::models::_entities::{
|
||||
comments::{ActiveModel, Column, Entity, Model},
|
||||
posts,
|
||||
};
|
||||
use crate::services::{
|
||||
admin_audit,
|
||||
comment_guard::{self, CommentGuardInput},
|
||||
notifications,
|
||||
};
|
||||
use crate::controllers::admin::check_auth;
|
||||
|
||||
const ARTICLE_SCOPE: &str = "article";
|
||||
const PARAGRAPH_SCOPE: &str = "paragraph";
|
||||
@@ -106,6 +118,12 @@ pub struct CreateCommentRequest {
|
||||
pub paragraph_excerpt: Option<String>,
|
||||
#[serde(default)]
|
||||
pub approved: Option<bool>,
|
||||
#[serde(default, alias = "captchaToken")]
|
||||
pub captcha_token: Option<String>,
|
||||
#[serde(default, alias = "captchaAnswer")]
|
||||
pub captcha_answer: Option<String>,
|
||||
#[serde(default)]
|
||||
pub website: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
@@ -125,6 +143,50 @@ fn normalize_optional_string(value: Option<String>) -> Option<String> {
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_with_limit(value: Option<&str>, max_chars: usize) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim();
|
||||
if trimmed.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(trimmed.chars().take(max_chars).collect::<String>())
|
||||
})
|
||||
}
|
||||
|
||||
fn header_value<'a>(headers: &'a HeaderMap, key: header::HeaderName) -> Option<&'a str> {
|
||||
headers.get(key).and_then(|value| value.to_str().ok())
|
||||
}
|
||||
|
||||
fn first_forwarded_ip(value: &str) -> Option<&str> {
|
||||
value
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.find(|item| !item.is_empty())
|
||||
}
|
||||
|
||||
fn detect_client_ip(
|
||||
headers: &HeaderMap,
|
||||
connect_info: Option<&ConnectInfo<SocketAddr>>,
|
||||
) -> Option<String> {
|
||||
let forwarded = header_value(headers, header::HeaderName::from_static("x-forwarded-for"))
|
||||
.and_then(first_forwarded_ip);
|
||||
let real_ip = header_value(headers, header::HeaderName::from_static("x-real-ip"));
|
||||
let cf_connecting_ip =
|
||||
header_value(headers, header::HeaderName::from_static("cf-connecting-ip"));
|
||||
let true_client_ip = header_value(headers, header::HeaderName::from_static("true-client-ip"));
|
||||
let remote_addr = connect_info.map(|addr| addr.0.ip().to_string());
|
||||
|
||||
normalize_with_limit(
|
||||
forwarded
|
||||
.or(real_ip)
|
||||
.or(cf_connecting_ip)
|
||||
.or(true_client_ip)
|
||||
.or(remote_addr.as_deref()),
|
||||
96,
|
||||
)
|
||||
}
|
||||
|
||||
fn normalized_scope(value: Option<String>) -> Result<String> {
|
||||
match value
|
||||
.unwrap_or_else(|| ARTICLE_SCOPE.to_string())
|
||||
@@ -171,7 +233,12 @@ async fn resolve_post_slug(ctx: &AppContext, raw: &str) -> Result<Option<String>
|
||||
pub async fn list(
|
||||
Query(query): Query<ListQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Response> {
|
||||
if query.approved != Some(true) {
|
||||
check_auth(&headers)?;
|
||||
}
|
||||
|
||||
let mut db_query = Entity::find().order_by_asc(Column::CreatedAt);
|
||||
|
||||
let post_slug = if let Some(post_slug) = query.post_slug {
|
||||
@@ -252,9 +319,22 @@ pub async fn paragraph_summary(
|
||||
format::json(summary)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn captcha_challenge(
|
||||
headers: HeaderMap,
|
||||
connect_info: Result<ConnectInfo<SocketAddr>, ExtensionRejection>,
|
||||
) -> Result<Response> {
|
||||
let ip_address = detect_client_ip(&headers, connect_info.as_ref().ok());
|
||||
format::json(comment_guard::create_captcha_challenge(
|
||||
ip_address.as_deref(),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn add(
|
||||
State(ctx): State<AppContext>,
|
||||
headers: HeaderMap,
|
||||
connect_info: Result<ConnectInfo<SocketAddr>, ExtensionRejection>,
|
||||
Json(params): Json<CreateCommentRequest>,
|
||||
) -> Result<Response> {
|
||||
let scope = normalized_scope(params.scope.clone())?;
|
||||
@@ -271,6 +351,9 @@ pub async fn add(
|
||||
let email = normalize_optional_string(params.email);
|
||||
let avatar = normalize_optional_string(params.avatar);
|
||||
let content = normalize_optional_string(params.content);
|
||||
let ip_address = detect_client_ip(&headers, connect_info.as_ref().ok());
|
||||
let user_agent = normalize_with_limit(header_value(&headers, header::USER_AGENT), 512);
|
||||
let referer = normalize_with_limit(header_value(&headers, header::REFERER), 1024);
|
||||
let paragraph_key = normalize_optional_string(params.paragraph_key);
|
||||
let paragraph_excerpt = normalize_optional_string(params.paragraph_excerpt)
|
||||
.or_else(|| content.as_deref().and_then(preview_excerpt));
|
||||
@@ -291,6 +374,21 @@ pub async fn add(
|
||||
return Err(Error::BadRequest("paragraph_key is required".to_string()));
|
||||
}
|
||||
|
||||
comment_guard::enforce_comment_guard(
|
||||
&ctx,
|
||||
&CommentGuardInput {
|
||||
ip_address: ip_address.as_deref(),
|
||||
email: email.as_deref(),
|
||||
user_agent: user_agent.as_deref(),
|
||||
author: author.as_deref(),
|
||||
content: content.as_deref(),
|
||||
honeypot_website: params.website.as_deref(),
|
||||
captcha_token: params.captcha_token.as_deref(),
|
||||
captcha_answer: params.captcha_answer.as_deref(),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut item = ActiveModel {
|
||||
..Default::default()
|
||||
};
|
||||
@@ -302,6 +400,9 @@ pub async fn add(
|
||||
item.author = Set(author);
|
||||
item.email = Set(email);
|
||||
item.avatar = Set(avatar);
|
||||
item.ip_address = Set(ip_address);
|
||||
item.user_agent = Set(user_agent);
|
||||
item.referer = Set(referer);
|
||||
item.content = Set(content);
|
||||
item.scope = Set(scope);
|
||||
item.paragraph_key = Set(paragraph_key);
|
||||
@@ -313,36 +414,72 @@ pub async fn add(
|
||||
item.reply_to_comment_id = Set(params.reply_to_comment_id);
|
||||
item.approved = Set(Some(params.approved.unwrap_or(false)));
|
||||
let item = item.insert(&ctx.db).await?;
|
||||
notifications::notify_new_comment(&ctx, &item).await;
|
||||
format::json(item)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn update(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(params): Json<Params>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let item = load_item(&ctx, id).await?;
|
||||
let mut item = item.into_active_model();
|
||||
params.update(&mut item);
|
||||
let item = item.update(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"comment.update",
|
||||
"comment",
|
||||
Some(item.id.to_string()),
|
||||
item.post_slug.clone(),
|
||||
Some(serde_json::json!({ "approved": item.approved })),
|
||||
)
|
||||
.await?;
|
||||
format::json(item)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
|
||||
load_item(&ctx, id).await?.delete(&ctx.db).await?;
|
||||
pub async fn remove(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let item = load_item(&ctx, id).await?;
|
||||
let label = item.post_slug.clone();
|
||||
item.delete(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"comment.delete",
|
||||
"comment",
|
||||
Some(id.to_string()),
|
||||
label,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::empty()
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
|
||||
pub async fn get_one(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
format::json(load_item(&ctx, id).await?)
|
||||
}
|
||||
|
||||
pub fn routes() -> Routes {
|
||||
Routes::new()
|
||||
.prefix("api/comments/")
|
||||
.add("captcha", get(captcha_challenge))
|
||||
.add("/", get(list))
|
||||
.add("paragraphs/summary", get(paragraph_summary))
|
||||
.add("/", post(add))
|
||||
|
||||
68
backend/src/controllers/content_analytics.rs
Normal file
68
backend/src/controllers/content_analytics.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
#![allow(clippy::missing_errors_doc)]
|
||||
#![allow(clippy::unnecessary_struct_initialization)]
|
||||
#![allow(clippy::unused_async)]
|
||||
|
||||
use axum::http::HeaderMap;
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::services::analytics;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct ContentAnalyticsEventPayload {
|
||||
pub event_type: String,
|
||||
pub path: String,
|
||||
#[serde(default)]
|
||||
pub post_slug: Option<String>,
|
||||
#[serde(default)]
|
||||
pub session_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub duration_ms: Option<i32>,
|
||||
#[serde(default)]
|
||||
pub progress_percent: Option<i32>,
|
||||
#[serde(default)]
|
||||
pub metadata: Option<Value>,
|
||||
#[serde(default)]
|
||||
pub referrer: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct ContentAnalyticsEventResponse {
|
||||
pub recorded: bool,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn record(
|
||||
State(ctx): State<AppContext>,
|
||||
headers: HeaderMap,
|
||||
Json(payload): Json<ContentAnalyticsEventPayload>,
|
||||
) -> Result<Response> {
|
||||
let mut request_context = analytics::content_request_context_from_headers(&payload.path, &headers);
|
||||
if payload.referrer.as_deref().map(str::trim).filter(|value| !value.is_empty()).is_some() {
|
||||
request_context.referrer = payload.referrer;
|
||||
}
|
||||
|
||||
analytics::record_content_event(
|
||||
&ctx,
|
||||
analytics::ContentEventDraft {
|
||||
event_type: payload.event_type,
|
||||
path: payload.path,
|
||||
post_slug: payload.post_slug,
|
||||
session_id: payload.session_id,
|
||||
request_context,
|
||||
duration_ms: payload.duration_ms,
|
||||
progress_percent: payload.progress_percent,
|
||||
metadata: payload.metadata,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
format::json(ContentAnalyticsEventResponse { recorded: true })
|
||||
}
|
||||
|
||||
pub fn routes() -> Routes {
|
||||
Routes::new()
|
||||
.prefix("api/analytics/")
|
||||
.add("content", post(record))
|
||||
}
|
||||
@@ -1,11 +1,14 @@
|
||||
#![allow(clippy::missing_errors_doc)]
|
||||
#![allow(clippy::unnecessary_struct_initialization)]
|
||||
#![allow(clippy::unused_async)]
|
||||
use axum::http::HeaderMap;
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{ColumnTrait, QueryFilter, QueryOrder};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::controllers::admin::check_auth;
|
||||
use crate::models::_entities::friend_links::{ActiveModel, Column, Entity, Model};
|
||||
use crate::services::{admin_audit, notifications};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Params {
|
||||
@@ -69,11 +72,15 @@ async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
|
||||
pub async fn list(
|
||||
Query(query): Query<ListQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Response> {
|
||||
let authenticated = check_auth(&headers).ok();
|
||||
let mut db_query = Entity::find().order_by_desc(Column::CreatedAt);
|
||||
|
||||
if let Some(status) = query.status {
|
||||
db_query = db_query.filter(Column::Status.eq(status));
|
||||
} else if authenticated.is_none() {
|
||||
db_query = db_query.filter(Column::Status.eq("approved"));
|
||||
}
|
||||
|
||||
if let Some(category) = query.category {
|
||||
@@ -98,30 +105,65 @@ pub async fn add(
|
||||
item.category = Set(params.category);
|
||||
item.status = Set(Some(params.status.unwrap_or_else(|| "pending".to_string())));
|
||||
let item = item.insert(&ctx.db).await?;
|
||||
notifications::notify_new_friend_link(&ctx, &item).await;
|
||||
format::json(item)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn update(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(params): Json<Params>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let item = load_item(&ctx, id).await?;
|
||||
let mut item = item.into_active_model();
|
||||
params.update(&mut item);
|
||||
let item = item.update(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"friend_link.update",
|
||||
"friend_link",
|
||||
Some(item.id.to_string()),
|
||||
item.site_name.clone().or_else(|| Some(item.site_url.clone())),
|
||||
Some(serde_json::json!({ "status": item.status })),
|
||||
)
|
||||
.await?;
|
||||
format::json(item)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
|
||||
load_item(&ctx, id).await?.delete(&ctx.db).await?;
|
||||
pub async fn remove(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let item = load_item(&ctx, id).await?;
|
||||
let label = item.site_name.clone().or_else(|| Some(item.site_url.clone()));
|
||||
item.delete(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"friend_link.delete",
|
||||
"friend_link",
|
||||
Some(id.to_string()),
|
||||
label,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::empty()
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
|
||||
pub async fn get_one(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
format::json(load_item(&ctx, id).await?)
|
||||
}
|
||||
|
||||
|
||||
13
backend/src/controllers/health.rs
Normal file
13
backend/src/controllers/health.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use loco_rs::prelude::*;
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn healthz() -> Result<Response> {
|
||||
format::json(serde_json::json!({
|
||||
"ok": true,
|
||||
"service": "backend",
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn routes() -> Routes {
|
||||
Routes::new().add("/healthz", get(healthz))
|
||||
}
|
||||
@@ -1,12 +1,16 @@
|
||||
pub mod admin;
|
||||
pub mod admin_api;
|
||||
pub mod admin_ops;
|
||||
pub mod ai;
|
||||
pub mod auth;
|
||||
pub mod content_analytics;
|
||||
pub mod category;
|
||||
pub mod comment;
|
||||
pub mod friend_link;
|
||||
pub mod health;
|
||||
pub mod post;
|
||||
pub mod review;
|
||||
pub mod search;
|
||||
pub mod site_settings;
|
||||
pub mod subscription;
|
||||
pub mod tag;
|
||||
|
||||
@@ -1,13 +1,312 @@
|
||||
#![allow(clippy::missing_errors_doc)]
|
||||
#![allow(clippy::unnecessary_struct_initialization)]
|
||||
#![allow(clippy::unused_async)]
|
||||
use axum::extract::Multipart;
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use axum::{extract::Multipart, http::HeaderMap};
|
||||
use chrono::{TimeZone, Utc};
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::QueryOrder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
|
||||
use crate::models::_entities::posts::{ActiveModel, Column, Entity, Model};
|
||||
use crate::services::content;
|
||||
use crate::{
|
||||
controllers::admin::check_auth,
|
||||
services::{admin_audit, content, post_revisions, subscriptions},
|
||||
};
|
||||
|
||||
fn deserialize_boolish_option<'de, D>(
|
||||
deserializer: D,
|
||||
) -> std::result::Result<Option<bool>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let raw = Option::<String>::deserialize(deserializer)?;
|
||||
|
||||
raw.map(|value| match value.trim().to_ascii_lowercase().as_str() {
|
||||
"1" | "true" | "yes" | "on" => Ok(true),
|
||||
"0" | "false" | "no" | "off" => Ok(false),
|
||||
other => Err(serde::de::Error::custom(format!(
|
||||
"invalid boolean value `{other}`"
|
||||
))),
|
||||
})
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn normalize_slug_key(value: &str) -> String {
|
||||
value.trim().trim_matches('/').to_string()
|
||||
}
|
||||
|
||||
fn request_preview_mode(preview: Option<bool>, headers: &HeaderMap) -> bool {
|
||||
preview.unwrap_or(false)
|
||||
|| headers
|
||||
.get("x-termi-post-mode")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.map(|value| value.eq_ignore_ascii_case("preview"))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn requested_status(status: Option<String>, published: Option<bool>) -> String {
|
||||
if let Some(status) = status.as_deref() {
|
||||
return content::normalize_post_status(Some(status));
|
||||
}
|
||||
|
||||
if published == Some(false) {
|
||||
content::POST_STATUS_DRAFT.to_string()
|
||||
} else {
|
||||
content::POST_STATUS_PUBLISHED.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_visibility(value: Option<String>) -> String {
|
||||
content::normalize_post_visibility(value.as_deref())
|
||||
}
|
||||
|
||||
fn post_has_tag(post: &Model, wanted_tag: &str) -> bool {
|
||||
let wanted = wanted_tag.trim().to_lowercase();
|
||||
|
||||
post.tags
|
||||
.as_ref()
|
||||
.and_then(|value| value.as_array())
|
||||
.map(|tags| {
|
||||
tags.iter().filter_map(|tag| tag.as_str()).any(|tag| {
|
||||
let normalized = tag.trim().to_lowercase();
|
||||
normalized == wanted
|
||||
})
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn effective_status(post: &Model) -> String {
|
||||
content::effective_post_state(
|
||||
post.status.as_deref().unwrap_or(content::POST_STATUS_PUBLISHED),
|
||||
post.publish_at,
|
||||
post.unpublish_at,
|
||||
Utc::now().fixed_offset(),
|
||||
)
|
||||
}
|
||||
|
||||
fn listed_publicly(post: &Model) -> bool {
|
||||
content::is_post_listed_publicly(post, Utc::now().fixed_offset())
|
||||
}
|
||||
|
||||
fn publicly_accessible(post: &Model) -> bool {
|
||||
content::is_post_publicly_accessible(post, Utc::now().fixed_offset())
|
||||
}
|
||||
|
||||
fn parse_optional_markdown_datetime(
|
||||
value: Option<&str>,
|
||||
) -> Option<chrono::DateTime<chrono::FixedOffset>> {
|
||||
let value = value?.trim();
|
||||
if value.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
chrono::DateTime::parse_from_rfc3339(value).ok().or_else(|| {
|
||||
chrono::NaiveDate::parse_from_str(value, "%Y-%m-%d")
|
||||
.ok()
|
||||
.and_then(|date| date.and_hms_opt(0, 0, 0))
|
||||
.and_then(|naive| {
|
||||
chrono::FixedOffset::east_opt(0)?
|
||||
.from_local_datetime(&naive)
|
||||
.single()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn markdown_post_listed_publicly(post: &content::MarkdownPost) -> bool {
|
||||
content::effective_post_state(
|
||||
&post.status,
|
||||
parse_optional_markdown_datetime(post.publish_at.as_deref()),
|
||||
parse_optional_markdown_datetime(post.unpublish_at.as_deref()),
|
||||
Utc::now().fixed_offset(),
|
||||
) == content::POST_STATUS_PUBLISHED
|
||||
&& post.visibility == content::POST_VISIBILITY_PUBLIC
|
||||
}
|
||||
|
||||
fn should_include_post(
|
||||
post: &Model,
|
||||
query: &ListQuery,
|
||||
preview: bool,
|
||||
include_private: bool,
|
||||
include_redirects: bool,
|
||||
) -> bool {
|
||||
if !preview && !listed_publicly(post) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if query.listed_only.unwrap_or(!preview) && !listed_publicly(post) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !include_private
|
||||
&& content::normalize_post_visibility(post.visibility.as_deref())
|
||||
== content::POST_VISIBILITY_PRIVATE
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if !include_redirects
|
||||
&& post
|
||||
.redirect_to
|
||||
.as_deref()
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.is_some()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Some(slug) = &query.slug {
|
||||
if post.slug != *slug {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(category) = &query.category {
|
||||
if post
|
||||
.category
|
||||
.as_deref()
|
||||
.map(|value| !value.eq_ignore_ascii_case(category))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(post_type) = &query.post_type {
|
||||
if post
|
||||
.post_type
|
||||
.as_deref()
|
||||
.map(|value| !value.eq_ignore_ascii_case(post_type))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(pinned) = query.pinned {
|
||||
if post.pinned.unwrap_or(false) != pinned {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tag) = &query.tag {
|
||||
if !post_has_tag(post, tag) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(status) = &query.status {
|
||||
if effective_status(post) != content::normalize_post_status(Some(status)) && effective_status(post) != status.trim().to_ascii_lowercase() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(visibility) = &query.visibility {
|
||||
if content::normalize_post_visibility(post.visibility.as_deref())
|
||||
!= content::normalize_post_visibility(Some(visibility))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(search) = &query.search {
|
||||
let wanted = search.trim().to_lowercase();
|
||||
let haystack = [
|
||||
post.title.as_deref().unwrap_or_default(),
|
||||
post.description.as_deref().unwrap_or_default(),
|
||||
post.content.as_deref().unwrap_or_default(),
|
||||
post.category.as_deref().unwrap_or_default(),
|
||||
&post.slug,
|
||||
]
|
||||
.join("\n")
|
||||
.to_lowercase();
|
||||
|
||||
if !haystack.contains(&wanted)
|
||||
&& !post
|
||||
.tags
|
||||
.as_ref()
|
||||
.and_then(|value| value.as_array())
|
||||
.map(|tags| {
|
||||
tags.iter()
|
||||
.filter_map(|tag| tag.as_str())
|
||||
.any(|tag| tag.to_lowercase().contains(&wanted))
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
|
||||
let item = Entity::find_by_id(id).one(&ctx.db).await?;
|
||||
item.ok_or(Error::NotFound)
|
||||
}
|
||||
|
||||
async fn load_item_by_slug_once(ctx: &AppContext, slug: &str) -> Result<Option<Model>> {
|
||||
Entity::find()
|
||||
.filter(Column::Slug.eq(slug))
|
||||
.one(&ctx.db)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
async fn resolve_post_by_slug(ctx: &AppContext, slug: &str) -> Result<Model> {
|
||||
let mut current_slug = normalize_slug_key(slug);
|
||||
if current_slug.is_empty() {
|
||||
return Err(Error::NotFound);
|
||||
}
|
||||
|
||||
let mut visited = HashSet::new();
|
||||
|
||||
loop {
|
||||
if !visited.insert(current_slug.clone()) {
|
||||
return Err(Error::NotFound);
|
||||
}
|
||||
|
||||
if let Some(post) = load_item_by_slug_once(ctx, ¤t_slug).await? {
|
||||
let next_slug = post
|
||||
.redirect_to
|
||||
.as_deref()
|
||||
.map(normalize_slug_key)
|
||||
.filter(|value| !value.is_empty() && *value != post.slug);
|
||||
|
||||
if let Some(next_slug) = next_slug {
|
||||
current_slug = next_slug;
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(post);
|
||||
}
|
||||
|
||||
let candidates = Entity::find().all(&ctx.db).await?;
|
||||
let Some(candidate) = candidates.into_iter().find(|item| {
|
||||
content::post_redirects_from_json(&item.redirect_from)
|
||||
.into_iter()
|
||||
.any(|redirect| redirect.eq_ignore_ascii_case(¤t_slug))
|
||||
}) else {
|
||||
return Err(Error::NotFound);
|
||||
};
|
||||
|
||||
let next_slug = candidate
|
||||
.redirect_to
|
||||
.as_deref()
|
||||
.map(normalize_slug_key)
|
||||
.filter(|value| !value.is_empty())
|
||||
.unwrap_or_else(|| candidate.slug.clone());
|
||||
|
||||
if next_slug == candidate.slug {
|
||||
return Ok(candidate);
|
||||
}
|
||||
|
||||
current_slug = next_slug;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Params {
|
||||
@@ -21,6 +320,15 @@ pub struct Params {
|
||||
pub image: Option<String>,
|
||||
pub images: Option<serde_json::Value>,
|
||||
pub pinned: Option<bool>,
|
||||
pub status: Option<String>,
|
||||
pub visibility: Option<String>,
|
||||
pub publish_at: Option<String>,
|
||||
pub unpublish_at: Option<String>,
|
||||
pub canonical_url: Option<String>,
|
||||
pub noindex: Option<bool>,
|
||||
pub og_image: Option<String>,
|
||||
pub redirect_from: Option<serde_json::Value>,
|
||||
pub redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
impl Params {
|
||||
@@ -35,6 +343,27 @@ impl Params {
|
||||
item.image = Set(self.image.clone());
|
||||
item.images = Set(self.images.clone());
|
||||
item.pinned = Set(self.pinned);
|
||||
item.status = Set(self.status.clone().map(|value| requested_status(Some(value), None)));
|
||||
item.visibility = Set(
|
||||
self.visibility
|
||||
.clone()
|
||||
.map(|value| normalize_visibility(Some(value))),
|
||||
);
|
||||
item.publish_at = Set(
|
||||
self.publish_at
|
||||
.clone()
|
||||
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
|
||||
);
|
||||
item.unpublish_at = Set(
|
||||
self.unpublish_at
|
||||
.clone()
|
||||
.and_then(|value| chrono::DateTime::parse_from_rfc3339(value.trim()).ok()),
|
||||
);
|
||||
item.canonical_url = Set(self.canonical_url.clone());
|
||||
item.noindex = Set(self.noindex);
|
||||
item.og_image = Set(self.og_image.clone());
|
||||
item.redirect_from = Set(self.redirect_from.clone());
|
||||
item.redirect_to = Set(self.redirect_to.clone());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,6 +376,24 @@ pub struct ListQuery {
|
||||
#[serde(alias = "type")]
|
||||
pub post_type: Option<String>,
|
||||
pub pinned: Option<bool>,
|
||||
pub status: Option<String>,
|
||||
pub visibility: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_boolish_option")]
|
||||
pub listed_only: Option<bool>,
|
||||
#[serde(default, deserialize_with = "deserialize_boolish_option")]
|
||||
pub include_private: Option<bool>,
|
||||
#[serde(default, deserialize_with = "deserialize_boolish_option")]
|
||||
pub include_redirects: Option<bool>,
|
||||
#[serde(default, deserialize_with = "deserialize_boolish_option")]
|
||||
pub preview: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Deserialize)]
|
||||
pub struct LookupQuery {
|
||||
#[serde(default, deserialize_with = "deserialize_boolish_option")]
|
||||
pub preview: Option<bool>,
|
||||
#[serde(default, deserialize_with = "deserialize_boolish_option")]
|
||||
pub include_private: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
@@ -66,6 +413,15 @@ pub struct MarkdownCreateParams {
|
||||
pub image: Option<String>,
|
||||
pub images: Option<Vec<String>>,
|
||||
pub pinned: Option<bool>,
|
||||
pub status: Option<String>,
|
||||
pub visibility: Option<String>,
|
||||
pub publish_at: Option<String>,
|
||||
pub unpublish_at: Option<String>,
|
||||
pub canonical_url: Option<String>,
|
||||
pub noindex: Option<bool>,
|
||||
pub og_image: Option<String>,
|
||||
pub redirect_from: Option<Vec<String>>,
|
||||
pub redirect_to: Option<String>,
|
||||
pub published: Option<bool>,
|
||||
}
|
||||
|
||||
@@ -88,174 +444,211 @@ pub struct MarkdownImportResponse {
|
||||
pub slugs: Vec<String>,
|
||||
}
|
||||
|
||||
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
|
||||
let item = Entity::find_by_id(id).one(&ctx.db).await?;
|
||||
item.ok_or_else(|| Error::NotFound)
|
||||
}
|
||||
|
||||
async fn load_item_by_slug(ctx: &AppContext, slug: &str) -> Result<Model> {
|
||||
let item = Entity::find()
|
||||
.filter(Column::Slug.eq(slug))
|
||||
.one(&ctx.db)
|
||||
.await?;
|
||||
|
||||
item.ok_or_else(|| Error::NotFound)
|
||||
}
|
||||
|
||||
fn post_has_tag(post: &Model, wanted_tag: &str) -> bool {
|
||||
let wanted = wanted_tag.trim().to_lowercase();
|
||||
|
||||
post.tags
|
||||
.as_ref()
|
||||
.and_then(|value| value.as_array())
|
||||
.map(|tags| {
|
||||
tags.iter().filter_map(|tag| tag.as_str()).any(|tag| {
|
||||
let normalized = tag.trim().to_lowercase();
|
||||
normalized == wanted
|
||||
})
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn list(
|
||||
Query(query): Query<ListQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Response> {
|
||||
content::sync_markdown_posts(&ctx).await?;
|
||||
|
||||
let preview = request_preview_mode(query.preview, &headers);
|
||||
let include_private = preview && query.include_private.unwrap_or(true);
|
||||
let include_redirects = query.include_redirects.unwrap_or(preview);
|
||||
|
||||
let posts = Entity::find()
|
||||
.order_by_desc(Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let filtered: Vec<Model> = posts
|
||||
let filtered = posts
|
||||
.into_iter()
|
||||
.filter(|post| {
|
||||
if let Some(slug) = &query.slug {
|
||||
if post.slug != *slug {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(category) = &query.category {
|
||||
if post
|
||||
.category
|
||||
.as_deref()
|
||||
.map(|value| !value.eq_ignore_ascii_case(category))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(post_type) = &query.post_type {
|
||||
if post
|
||||
.post_type
|
||||
.as_deref()
|
||||
.map(|value| !value.eq_ignore_ascii_case(post_type))
|
||||
.unwrap_or(true)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(pinned) = query.pinned {
|
||||
if post.pinned.unwrap_or(false) != pinned {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tag) = &query.tag {
|
||||
if !post_has_tag(post, tag) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(search) = &query.search {
|
||||
let wanted = search.trim().to_lowercase();
|
||||
let haystack = [
|
||||
post.title.as_deref().unwrap_or_default(),
|
||||
post.description.as_deref().unwrap_or_default(),
|
||||
post.content.as_deref().unwrap_or_default(),
|
||||
post.category.as_deref().unwrap_or_default(),
|
||||
&post.slug,
|
||||
]
|
||||
.join("\n")
|
||||
.to_lowercase();
|
||||
|
||||
if !haystack.contains(&wanted)
|
||||
&& !post
|
||||
.tags
|
||||
.as_ref()
|
||||
.and_then(|value| value.as_array())
|
||||
.map(|tags| {
|
||||
tags.iter()
|
||||
.filter_map(|tag| tag.as_str())
|
||||
.any(|tag| tag.to_lowercase().contains(&wanted))
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.collect();
|
||||
.filter(|post| should_include_post(post, &query, preview, include_private, include_redirects))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
format::json(filtered)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> Result<Response> {
|
||||
pub async fn add(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(params): Json<Params>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let mut item = ActiveModel {
|
||||
..Default::default()
|
||||
};
|
||||
params.update(&mut item);
|
||||
let item = item.insert(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.create",
|
||||
"post",
|
||||
Some(item.id.to_string()),
|
||||
Some(item.slug.clone()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::json(item)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn update(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(params): Json<Params>,
|
||||
) -> Result<Response> {
|
||||
let item = load_item(&ctx, id).await?;
|
||||
let mut item = item.into_active_model();
|
||||
let actor = check_auth(&headers)?;
|
||||
let previous = load_item(&ctx, id).await?;
|
||||
let was_public = content::is_post_listed_publicly(&previous, Utc::now().fixed_offset());
|
||||
let previous_slug = previous.slug.clone();
|
||||
|
||||
let mut item = previous.into_active_model();
|
||||
params.update(&mut item);
|
||||
let item = item.update(&ctx.db).await?;
|
||||
let is_public = content::is_post_listed_publicly(&item, Utc::now().fixed_offset());
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.update",
|
||||
"post",
|
||||
Some(item.id.to_string()),
|
||||
Some(item.slug.clone()),
|
||||
Some(serde_json::json!({
|
||||
"previous_slug": previous_slug,
|
||||
"published": is_public,
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if is_public && !was_public {
|
||||
let post = content::MarkdownPost {
|
||||
title: item.title.clone().unwrap_or_else(|| item.slug.clone()),
|
||||
slug: item.slug.clone(),
|
||||
description: item.description.clone(),
|
||||
content: item.content.clone().unwrap_or_default(),
|
||||
category: item.category.clone(),
|
||||
tags: item
|
||||
.tags
|
||||
.as_ref()
|
||||
.and_then(|value| value.as_array())
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|tag| tag.as_str().map(ToString::to_string))
|
||||
.collect(),
|
||||
post_type: item.post_type.clone().unwrap_or_else(|| "article".to_string()),
|
||||
image: item.image.clone(),
|
||||
images: item
|
||||
.images
|
||||
.as_ref()
|
||||
.and_then(|value| value.as_array())
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|tag| tag.as_str().map(ToString::to_string))
|
||||
.collect(),
|
||||
pinned: item.pinned.unwrap_or(false),
|
||||
status: item.status.clone().unwrap_or_else(|| content::POST_STATUS_PUBLISHED.to_string()),
|
||||
visibility: item
|
||||
.visibility
|
||||
.clone()
|
||||
.unwrap_or_else(|| content::POST_VISIBILITY_PUBLIC.to_string()),
|
||||
publish_at: item.publish_at.map(|value| value.to_rfc3339()),
|
||||
unpublish_at: item.unpublish_at.map(|value| value.to_rfc3339()),
|
||||
canonical_url: item.canonical_url.clone(),
|
||||
noindex: item.noindex.unwrap_or(false),
|
||||
og_image: item.og_image.clone(),
|
||||
redirect_from: content::post_redirects_from_json(&item.redirect_from),
|
||||
redirect_to: item.redirect_to.clone(),
|
||||
file_path: content::markdown_post_path(&item.slug)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
};
|
||||
let _ = subscriptions::notify_post_published(&ctx, &post).await;
|
||||
}
|
||||
|
||||
format::json(item)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
|
||||
load_item(&ctx, id).await?.delete(&ctx.db).await?;
|
||||
pub async fn remove(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let item = load_item(&ctx, id).await?;
|
||||
let slug = item.slug.clone();
|
||||
item.delete(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.delete",
|
||||
"post",
|
||||
Some(id.to_string()),
|
||||
Some(slug),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::empty()
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
|
||||
pub async fn get_one(
|
||||
Path(id): Path<i32>,
|
||||
Query(query): Query<LookupQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Response> {
|
||||
content::sync_markdown_posts(&ctx).await?;
|
||||
format::json(load_item(&ctx, id).await?)
|
||||
let preview = request_preview_mode(query.preview, &headers);
|
||||
let post = load_item(&ctx, id).await?;
|
||||
|
||||
if !preview && !publicly_accessible(&post) {
|
||||
return Err(Error::NotFound);
|
||||
}
|
||||
|
||||
format::json(post)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn get_by_slug(
|
||||
Path(slug): Path<String>,
|
||||
Query(query): Query<LookupQuery>,
|
||||
State(ctx): State<AppContext>,
|
||||
headers: HeaderMap,
|
||||
) -> Result<Response> {
|
||||
content::sync_markdown_posts(&ctx).await?;
|
||||
format::json(load_item_by_slug(&ctx, &slug).await?)
|
||||
let preview = request_preview_mode(query.preview, &headers);
|
||||
let include_private = preview && query.include_private.unwrap_or(true);
|
||||
let post = resolve_post_by_slug(&ctx, &slug).await?;
|
||||
|
||||
if !preview && !publicly_accessible(&post) {
|
||||
return Err(Error::NotFound);
|
||||
}
|
||||
|
||||
if !include_private
|
||||
&& content::normalize_post_visibility(post.visibility.as_deref())
|
||||
== content::POST_VISIBILITY_PRIVATE
|
||||
{
|
||||
return Err(Error::NotFound);
|
||||
}
|
||||
|
||||
format::json(post)
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn get_markdown_by_slug(
|
||||
headers: HeaderMap,
|
||||
Path(slug): Path<String>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
check_auth(&headers)?;
|
||||
content::sync_markdown_posts(&ctx).await?;
|
||||
let (path, markdown) = content::read_markdown_document(&slug)?;
|
||||
format::json(MarkdownDocumentResponse {
|
||||
@@ -267,12 +660,43 @@ pub async fn get_markdown_by_slug(
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn update_markdown_by_slug(
|
||||
headers: HeaderMap,
|
||||
Path(slug): Path<String>,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(params): Json<MarkdownUpdateParams>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let _ = post_revisions::capture_current_snapshot(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
&slug,
|
||||
"update",
|
||||
Some("保存文章前的自动快照"),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let updated = content::write_markdown_document(&ctx, &slug, ¶ms.markdown).await?;
|
||||
let (path, markdown) = content::read_markdown_document(&updated.slug)?;
|
||||
let _ = post_revisions::capture_snapshot_from_markdown(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
&updated.slug,
|
||||
&markdown,
|
||||
"saved",
|
||||
Some("保存后的当前版本"),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.markdown.update",
|
||||
"post",
|
||||
None,
|
||||
Some(updated.slug.clone()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(MarkdownDocumentResponse {
|
||||
slug: updated.slug,
|
||||
@@ -283,9 +707,11 @@ pub async fn update_markdown_by_slug(
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn create_markdown(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(params): Json<MarkdownCreateParams>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let title = params.title.trim();
|
||||
if title.is_empty() {
|
||||
return Err(Error::BadRequest("title is required".to_string()));
|
||||
@@ -305,11 +731,42 @@ pub async fn create_markdown(
|
||||
image: params.image,
|
||||
images: params.images.unwrap_or_default(),
|
||||
pinned: params.pinned.unwrap_or(false),
|
||||
published: params.published.unwrap_or(true),
|
||||
status: requested_status(params.status, params.published),
|
||||
visibility: normalize_visibility(params.visibility),
|
||||
publish_at: params.publish_at,
|
||||
unpublish_at: params.unpublish_at,
|
||||
canonical_url: params.canonical_url,
|
||||
noindex: params.noindex.unwrap_or(false),
|
||||
og_image: params.og_image,
|
||||
redirect_from: params.redirect_from.unwrap_or_default(),
|
||||
redirect_to: params.redirect_to,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
let (path, markdown) = content::read_markdown_document(&created.slug)?;
|
||||
let _ = post_revisions::capture_snapshot_from_markdown(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
&created.slug,
|
||||
&markdown,
|
||||
"create",
|
||||
Some("新建文章"),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.markdown.create",
|
||||
"post",
|
||||
None,
|
||||
Some(created.slug.clone()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
if markdown_post_listed_publicly(&created) {
|
||||
let _ = subscriptions::notify_post_published(&ctx, &created).await;
|
||||
}
|
||||
|
||||
format::json(MarkdownDocumentResponse {
|
||||
slug: created.slug,
|
||||
@@ -320,9 +777,11 @@ pub async fn create_markdown(
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn import_markdown(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
mut multipart: Multipart,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let mut files = Vec::new();
|
||||
|
||||
while let Some(field) = multipart
|
||||
@@ -345,6 +804,35 @@ pub async fn import_markdown(
|
||||
}
|
||||
|
||||
let imported = content::import_markdown_documents(&ctx, files).await?;
|
||||
for item in &imported {
|
||||
if let Ok((_path, markdown)) = content::read_markdown_document(&item.slug) {
|
||||
let _ = post_revisions::capture_snapshot_from_markdown(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
&item.slug,
|
||||
&markdown,
|
||||
"import",
|
||||
Some("批量导入 Markdown"),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
if markdown_post_listed_publicly(item) {
|
||||
let _ = subscriptions::notify_post_published(&ctx, item).await;
|
||||
}
|
||||
}
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.markdown.import",
|
||||
"post_import",
|
||||
None,
|
||||
Some(format!("{} files", imported.len())),
|
||||
Some(serde_json::json!({
|
||||
"slugs": imported.iter().map(|item| item.slug.clone()).collect::<Vec<_>>(),
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(MarkdownImportResponse {
|
||||
count: imported.len(),
|
||||
@@ -354,10 +842,31 @@ pub async fn import_markdown(
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn delete_markdown_by_slug(
|
||||
headers: HeaderMap,
|
||||
Path(slug): Path<String>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<Response> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let _ = post_revisions::capture_current_snapshot(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
&slug,
|
||||
"delete",
|
||||
Some("删除前自动快照"),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
content::delete_markdown_post(&ctx, &slug).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"post.markdown.delete",
|
||||
"post",
|
||||
None,
|
||||
Some(slug.clone()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::json(MarkdownDeleteResponse {
|
||||
slug,
|
||||
deleted: true,
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
use axum::extract::{Path, State};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::HeaderMap,
|
||||
};
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{EntityTrait, QueryOrder, Set};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
controllers::admin::check_auth,
|
||||
models::_entities::reviews::{self, Entity as ReviewEntity},
|
||||
services::storage,
|
||||
services::{admin_audit, storage},
|
||||
};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -56,9 +60,11 @@ pub async fn get_one(
|
||||
}
|
||||
|
||||
pub async fn create(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(req): Json<CreateReviewRequest>,
|
||||
) -> Result<impl IntoResponse> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let new_review = reviews::ActiveModel {
|
||||
title: Set(Some(req.title)),
|
||||
review_type: Set(Some(req.review_type)),
|
||||
@@ -76,14 +82,26 @@ pub async fn create(
|
||||
};
|
||||
|
||||
let review = new_review.insert(&ctx.db).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"review.create",
|
||||
"review",
|
||||
Some(review.id.to_string()),
|
||||
review.title.clone(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::json(review)
|
||||
}
|
||||
|
||||
pub async fn update(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(req): Json<UpdateReviewRequest>,
|
||||
) -> Result<impl IntoResponse> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
|
||||
|
||||
let Some(existing_review) = review else {
|
||||
@@ -132,24 +150,47 @@ pub async fn update(
|
||||
tracing::warn!("failed to cleanup replaced review cover: {error}");
|
||||
}
|
||||
}
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"review.update",
|
||||
"review",
|
||||
Some(review.id.to_string()),
|
||||
review.title.clone(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::json(review)
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
headers: HeaderMap,
|
||||
Path(id): Path<i32>,
|
||||
State(ctx): State<AppContext>,
|
||||
) -> Result<impl IntoResponse> {
|
||||
let actor = check_auth(&headers)?;
|
||||
let review = ReviewEntity::find_by_id(id).one(&ctx.db).await?;
|
||||
|
||||
match review {
|
||||
Some(r) => {
|
||||
let cover = r.cover.clone();
|
||||
let title = r.title.clone();
|
||||
r.delete(&ctx.db).await?;
|
||||
if let Some(cover) = cover.filter(|value| !value.trim().is_empty()) {
|
||||
if let Err(error) = storage::delete_managed_url(&ctx, &cover).await {
|
||||
tracing::warn!("failed to cleanup deleted review cover: {error}");
|
||||
}
|
||||
}
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
Some(&actor),
|
||||
"review.delete",
|
||||
"review",
|
||||
Some(id.to_string()),
|
||||
title,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
format::empty()
|
||||
}
|
||||
None => Err(Error::NotFound),
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
use axum::http::HeaderMap;
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{ConnectionTrait, DatabaseBackend, DbBackend, FromQueryResult, Statement};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::time::Instant;
|
||||
use std::{collections::HashSet, time::Instant};
|
||||
|
||||
use crate::models::_entities::posts;
|
||||
use crate::services::{analytics, content};
|
||||
use crate::{
|
||||
controllers::site_settings,
|
||||
models::_entities::posts,
|
||||
services::{abuse_guard, analytics, content},
|
||||
};
|
||||
|
||||
fn deserialize_boolish_option<'de, D>(
|
||||
deserializer: D,
|
||||
@@ -26,6 +28,243 @@ where
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn normalize_text(value: &str) -> String {
|
||||
value
|
||||
.split_whitespace()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
.trim()
|
||||
.to_ascii_lowercase()
|
||||
}
|
||||
|
||||
fn tokenize(value: &str) -> Vec<String> {
|
||||
value
|
||||
.split(|ch: char| !ch.is_alphanumeric() && ch != '-' && ch != '_')
|
||||
.map(normalize_text)
|
||||
.filter(|item| !item.is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn levenshtein_distance(left: &str, right: &str) -> usize {
|
||||
if left == right {
|
||||
return 0;
|
||||
}
|
||||
if left.is_empty() {
|
||||
return right.chars().count();
|
||||
}
|
||||
if right.is_empty() {
|
||||
return left.chars().count();
|
||||
}
|
||||
|
||||
let right_chars = right.chars().collect::<Vec<_>>();
|
||||
let mut prev = (0..=right_chars.len()).collect::<Vec<_>>();
|
||||
|
||||
for (i, left_ch) in left.chars().enumerate() {
|
||||
let mut curr = vec![i + 1; right_chars.len() + 1];
|
||||
for (j, right_ch) in right_chars.iter().enumerate() {
|
||||
let cost = usize::from(left_ch != *right_ch);
|
||||
curr[j + 1] = (curr[j] + 1)
|
||||
.min(prev[j + 1] + 1)
|
||||
.min(prev[j] + cost);
|
||||
}
|
||||
prev = curr;
|
||||
}
|
||||
|
||||
prev[right_chars.len()]
|
||||
}
|
||||
|
||||
fn parse_synonym_groups(value: &Option<Value>) -> Vec<Vec<String>> {
|
||||
value
|
||||
.as_ref()
|
||||
.and_then(Value::as_array)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|item| item.as_str().map(ToString::to_string))
|
||||
.map(|item| {
|
||||
let normalized = item.replace("=>", ",").replace('|', ",");
|
||||
normalized
|
||||
.split([',', ','])
|
||||
.map(normalize_text)
|
||||
.filter(|token| !token.is_empty())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.filter(|group| !group.is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn expand_search_terms(query: &str, synonym_groups: &[Vec<String>]) -> Vec<String> {
|
||||
let normalized_query = normalize_text(query);
|
||||
let query_tokens = tokenize(query);
|
||||
let mut expanded = Vec::new();
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
if !normalized_query.is_empty() && seen.insert(normalized_query.clone()) {
|
||||
expanded.push(normalized_query.clone());
|
||||
}
|
||||
|
||||
for token in &query_tokens {
|
||||
if seen.insert(token.clone()) {
|
||||
expanded.push(token.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for group in synonym_groups {
|
||||
let matched = group.iter().any(|item| {
|
||||
*item == normalized_query
|
||||
|| query_tokens.iter().any(|token| token == item)
|
||||
|| normalized_query.contains(item)
|
||||
});
|
||||
|
||||
if matched {
|
||||
for token in group {
|
||||
if seen.insert(token.clone()) {
|
||||
expanded.push(token.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expanded
|
||||
}
|
||||
|
||||
fn candidate_terms(posts: &[posts::Model]) -> Vec<String> {
|
||||
let mut seen = HashSet::new();
|
||||
let mut candidates = Vec::new();
|
||||
|
||||
for post in posts {
|
||||
for source in [
|
||||
post.title.as_deref().unwrap_or_default(),
|
||||
post.category.as_deref().unwrap_or_default(),
|
||||
&post.slug,
|
||||
] {
|
||||
for token in tokenize(source) {
|
||||
if token.len() >= 3 && seen.insert(token.clone()) {
|
||||
candidates.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tags) = post.tags.as_ref().and_then(Value::as_array) {
|
||||
for token in tags.iter().filter_map(Value::as_str).flat_map(tokenize) {
|
||||
if token.len() >= 2 && seen.insert(token.clone()) {
|
||||
candidates.push(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
candidates
|
||||
}
|
||||
|
||||
fn find_spelling_fallback(query: &str, posts: &[posts::Model], synonym_groups: &[Vec<String>]) -> Vec<String> {
|
||||
let primary_token = tokenize(query).into_iter().next().unwrap_or_default();
|
||||
if primary_token.len() < 3 {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut nearest = candidate_terms(posts)
|
||||
.into_iter()
|
||||
.map(|candidate| {
|
||||
let distance = levenshtein_distance(&primary_token, &candidate);
|
||||
(candidate, distance)
|
||||
})
|
||||
.filter(|(_, distance)| *distance <= 2)
|
||||
.collect::<Vec<_>>();
|
||||
nearest.sort_by(|left, right| left.1.cmp(&right.1).then_with(|| left.0.cmp(&right.0)));
|
||||
|
||||
nearest
|
||||
.into_iter()
|
||||
.take(3)
|
||||
.flat_map(|(candidate, _)| expand_search_terms(&candidate, synonym_groups))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn post_has_tag(post: &posts::Model, wanted_tag: &str) -> bool {
|
||||
let wanted = normalize_text(wanted_tag);
|
||||
|
||||
post.tags
|
||||
.as_ref()
|
||||
.and_then(Value::as_array)
|
||||
.map(|tags| {
|
||||
tags.iter()
|
||||
.filter_map(Value::as_str)
|
||||
.map(normalize_text)
|
||||
.any(|tag| tag == wanted)
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn score_post(post: &posts::Model, query: &str, terms: &[String]) -> f64 {
|
||||
let normalized_query = normalize_text(query);
|
||||
let title = normalize_text(post.title.as_deref().unwrap_or_default());
|
||||
let description = normalize_text(post.description.as_deref().unwrap_or_default());
|
||||
let content_text = normalize_text(post.content.as_deref().unwrap_or_default());
|
||||
let category = normalize_text(post.category.as_deref().unwrap_or_default());
|
||||
let slug = normalize_text(&post.slug);
|
||||
let tags = post
|
||||
.tags
|
||||
.as_ref()
|
||||
.and_then(Value::as_array)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|item| item.as_str().map(normalize_text))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut score = 0.0;
|
||||
|
||||
if !normalized_query.is_empty() {
|
||||
if title.contains(&normalized_query) {
|
||||
score += 6.0;
|
||||
}
|
||||
if description.contains(&normalized_query) {
|
||||
score += 4.0;
|
||||
}
|
||||
if slug.contains(&normalized_query) {
|
||||
score += 4.0;
|
||||
}
|
||||
if category.contains(&normalized_query) {
|
||||
score += 3.0;
|
||||
}
|
||||
if tags.iter().any(|tag| tag.contains(&normalized_query)) {
|
||||
score += 4.0;
|
||||
}
|
||||
if content_text.contains(&normalized_query) {
|
||||
score += 2.0;
|
||||
}
|
||||
}
|
||||
|
||||
for term in terms {
|
||||
if term.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if title.contains(term) {
|
||||
score += 3.5;
|
||||
}
|
||||
if description.contains(term) {
|
||||
score += 2.2;
|
||||
}
|
||||
if slug.contains(term) {
|
||||
score += 2.0;
|
||||
}
|
||||
if category.contains(term) {
|
||||
score += 1.8;
|
||||
}
|
||||
if tags.iter().any(|tag| tag == term) {
|
||||
score += 2.5;
|
||||
} else if tags.iter().any(|tag| tag.contains(term)) {
|
||||
score += 1.5;
|
||||
}
|
||||
if content_text.contains(term) {
|
||||
score += 0.8;
|
||||
}
|
||||
}
|
||||
|
||||
score
|
||||
}
|
||||
|
||||
fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
|
||||
query.preview.unwrap_or(false)
|
||||
|| headers
|
||||
@@ -39,11 +278,15 @@ fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
|
||||
pub struct SearchQuery {
|
||||
pub q: Option<String>,
|
||||
pub limit: Option<u64>,
|
||||
pub category: Option<String>,
|
||||
pub tag: Option<String>,
|
||||
#[serde(alias = "type")]
|
||||
pub post_type: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_boolish_option")]
|
||||
pub preview: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, FromQueryResult)]
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct SearchResult {
|
||||
pub id: i32,
|
||||
pub title: Option<String>,
|
||||
@@ -59,131 +302,6 @@ pub struct SearchResult {
|
||||
pub rank: f64,
|
||||
}
|
||||
|
||||
fn search_sql() -> &'static str {
|
||||
r#"
|
||||
SELECT
|
||||
p.id,
|
||||
p.title,
|
||||
p.slug,
|
||||
p.description,
|
||||
p.content,
|
||||
p.category,
|
||||
p.tags,
|
||||
p.post_type,
|
||||
p.pinned,
|
||||
p.created_at,
|
||||
p.updated_at,
|
||||
ts_rank_cd(
|
||||
setweight(to_tsvector('simple', coalesce(p.title, '')), 'A') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.description, '')), 'B') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.category, '')), 'C') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.tags::text, '')), 'C') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.content, '')), 'D'),
|
||||
plainto_tsquery('simple', $1)
|
||||
)::float8 AS rank
|
||||
FROM posts p
|
||||
WHERE (
|
||||
setweight(to_tsvector('simple', coalesce(p.title, '')), 'A') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.description, '')), 'B') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.category, '')), 'C') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.tags::text, '')), 'C') ||
|
||||
setweight(to_tsvector('simple', coalesce(p.content, '')), 'D')
|
||||
) @@ plainto_tsquery('simple', $1)
|
||||
ORDER BY rank DESC, p.created_at DESC
|
||||
LIMIT $2
|
||||
"#
|
||||
}
|
||||
|
||||
fn app_level_rank(post: &posts::Model, wanted: &str) -> f64 {
|
||||
let wanted_lower = wanted.to_lowercase();
|
||||
let mut rank = 0.0;
|
||||
|
||||
if post
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.to_lowercase()
|
||||
.contains(&wanted_lower)
|
||||
{
|
||||
rank += 4.0;
|
||||
}
|
||||
|
||||
if post
|
||||
.description
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.to_lowercase()
|
||||
.contains(&wanted_lower)
|
||||
{
|
||||
rank += 2.5;
|
||||
}
|
||||
|
||||
if post
|
||||
.content
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.to_lowercase()
|
||||
.contains(&wanted_lower)
|
||||
{
|
||||
rank += 1.0;
|
||||
}
|
||||
|
||||
if post
|
||||
.category
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.to_lowercase()
|
||||
.contains(&wanted_lower)
|
||||
{
|
||||
rank += 1.5;
|
||||
}
|
||||
|
||||
if post
|
||||
.tags
|
||||
.as_ref()
|
||||
.and_then(Value::as_array)
|
||||
.map(|tags| {
|
||||
tags.iter()
|
||||
.filter_map(Value::as_str)
|
||||
.any(|tag| tag.to_lowercase().contains(&wanted_lower))
|
||||
})
|
||||
.unwrap_or(false)
|
||||
{
|
||||
rank += 2.0;
|
||||
}
|
||||
|
||||
rank
|
||||
}
|
||||
|
||||
async fn fallback_search(ctx: &AppContext, q: &str, limit: u64) -> Result<Vec<SearchResult>> {
|
||||
let mut results = posts::Entity::find().all(&ctx.db).await?;
|
||||
results.sort_by(|left, right| right.created_at.cmp(&left.created_at));
|
||||
|
||||
Ok(results
|
||||
.into_iter()
|
||||
.map(|post| {
|
||||
let rank = app_level_rank(&post, q);
|
||||
(post, rank)
|
||||
})
|
||||
.filter(|(_, rank)| *rank > 0.0)
|
||||
.take(limit as usize)
|
||||
.map(|(post, rank)| SearchResult {
|
||||
id: post.id,
|
||||
title: post.title,
|
||||
slug: post.slug,
|
||||
description: post.description,
|
||||
content: post.content,
|
||||
category: post.category,
|
||||
tags: post.tags,
|
||||
post_type: post.post_type,
|
||||
pinned: post.pinned,
|
||||
created_at: post.created_at.into(),
|
||||
updated_at: post.updated_at.into(),
|
||||
rank,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn search(
|
||||
Query(query): Query<SearchQuery>,
|
||||
@@ -199,26 +317,107 @@ pub async fn search(
|
||||
return format::json(Vec::<SearchResult>::new());
|
||||
}
|
||||
|
||||
let limit = query.limit.unwrap_or(20).clamp(1, 100);
|
||||
if !preview_search {
|
||||
abuse_guard::enforce_public_scope(
|
||||
"search",
|
||||
abuse_guard::detect_client_ip(&headers).as_deref(),
|
||||
Some(&q),
|
||||
)?;
|
||||
}
|
||||
|
||||
let results = if ctx.db.get_database_backend() == DatabaseBackend::Postgres {
|
||||
let statement = Statement::from_sql_and_values(
|
||||
DbBackend::Postgres,
|
||||
search_sql(),
|
||||
[q.clone().into(), (limit as i64).into()],
|
||||
);
|
||||
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
|
||||
let settings = site_settings::load_current(&ctx).await.ok();
|
||||
let synonym_groups = settings
|
||||
.as_ref()
|
||||
.map(|item| parse_synonym_groups(&item.search_synonyms))
|
||||
.unwrap_or_default();
|
||||
|
||||
match SearchResult::find_by_statement(statement)
|
||||
.all(&ctx.db)
|
||||
.await
|
||||
{
|
||||
Ok(rows) if !rows.is_empty() => rows,
|
||||
Ok(_) => fallback_search(&ctx, &q, limit).await?,
|
||||
Err(_) => fallback_search(&ctx, &q, limit).await?,
|
||||
let mut all_posts = posts::Entity::find()
|
||||
.all(&ctx.db)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|post| {
|
||||
preview_search
|
||||
|| content::is_post_listed_publicly(post, chrono::Utc::now().fixed_offset())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if let Some(category) = query.category.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
|
||||
all_posts.retain(|post| {
|
||||
post.category
|
||||
.as_deref()
|
||||
.map(|value| value.eq_ignore_ascii_case(category))
|
||||
.unwrap_or(false)
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(tag) = query.tag.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
|
||||
all_posts.retain(|post| post_has_tag(post, tag));
|
||||
}
|
||||
|
||||
if let Some(post_type) = query.post_type.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
|
||||
all_posts.retain(|post| {
|
||||
post.post_type
|
||||
.as_deref()
|
||||
.map(|value| value.eq_ignore_ascii_case(post_type))
|
||||
.unwrap_or(false)
|
||||
});
|
||||
}
|
||||
|
||||
let mut expanded_terms = expand_search_terms(&q, &synonym_groups);
|
||||
let mut results = all_posts
|
||||
.iter()
|
||||
.map(|post| (post, score_post(post, &q, &expanded_terms)))
|
||||
.filter(|(_, rank)| *rank > 0.0)
|
||||
.map(|(post, rank)| SearchResult {
|
||||
id: post.id,
|
||||
title: post.title.clone(),
|
||||
slug: post.slug.clone(),
|
||||
description: post.description.clone(),
|
||||
content: post.content.clone(),
|
||||
category: post.category.clone(),
|
||||
tags: post.tags.clone(),
|
||||
post_type: post.post_type.clone(),
|
||||
pinned: post.pinned,
|
||||
created_at: post.created_at.into(),
|
||||
updated_at: post.updated_at.into(),
|
||||
rank,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if results.is_empty() {
|
||||
expanded_terms = find_spelling_fallback(&q, &all_posts, &synonym_groups);
|
||||
if !expanded_terms.is_empty() {
|
||||
results = all_posts
|
||||
.iter()
|
||||
.map(|post| (post, score_post(post, &q, &expanded_terms)))
|
||||
.filter(|(_, rank)| *rank > 0.0)
|
||||
.map(|(post, rank)| SearchResult {
|
||||
id: post.id,
|
||||
title: post.title.clone(),
|
||||
slug: post.slug.clone(),
|
||||
description: post.description.clone(),
|
||||
content: post.content.clone(),
|
||||
category: post.category.clone(),
|
||||
tags: post.tags.clone(),
|
||||
post_type: post.post_type.clone(),
|
||||
pinned: post.pinned,
|
||||
created_at: post.created_at.into(),
|
||||
updated_at: post.updated_at.into(),
|
||||
rank,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
}
|
||||
} else {
|
||||
fallback_search(&ctx, &q, limit).await?
|
||||
};
|
||||
}
|
||||
|
||||
results.sort_by(|left, right| {
|
||||
right
|
||||
.rank
|
||||
.partial_cmp(&left.rank)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
.then_with(|| right.created_at.cmp(&left.created_at))
|
||||
});
|
||||
results.truncate(limit);
|
||||
|
||||
if !preview_search {
|
||||
analytics::record_search_event(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#![allow(clippy::unnecessary_struct_initialization)]
|
||||
#![allow(clippy::unused_async)]
|
||||
|
||||
use axum::http::HeaderMap;
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{ActiveModelTrait, EntityTrait, IntoActiveModel, QueryOrder, Set};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -11,7 +12,9 @@ use uuid::Uuid;
|
||||
use crate::{
|
||||
controllers::admin::check_auth,
|
||||
models::_entities::{
|
||||
categories, friend_links, posts, site_settings::{self, ActiveModel, Entity, Model}, tags,
|
||||
categories, friend_links, posts,
|
||||
site_settings::{self, ActiveModel, Entity, Model},
|
||||
tags,
|
||||
},
|
||||
services::{ai, content},
|
||||
};
|
||||
@@ -130,6 +133,18 @@ pub struct SiteSettingsPayload {
|
||||
pub media_r2_access_key_id: Option<String>,
|
||||
#[serde(default, alias = "mediaR2SecretAccessKey")]
|
||||
pub media_r2_secret_access_key: Option<String>,
|
||||
#[serde(default, alias = "seoDefaultOgImage")]
|
||||
pub seo_default_og_image: Option<String>,
|
||||
#[serde(default, alias = "seoDefaultTwitterHandle")]
|
||||
pub seo_default_twitter_handle: Option<String>,
|
||||
#[serde(default, alias = "notificationWebhookUrl")]
|
||||
pub notification_webhook_url: Option<String>,
|
||||
#[serde(default, alias = "notificationCommentEnabled")]
|
||||
pub notification_comment_enabled: Option<bool>,
|
||||
#[serde(default, alias = "notificationFriendLinkEnabled")]
|
||||
pub notification_friend_link_enabled: Option<bool>,
|
||||
#[serde(default, alias = "searchSynonyms")]
|
||||
pub search_synonyms: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
@@ -154,6 +169,8 @@ pub struct PublicSiteSettingsResponse {
|
||||
pub music_playlist: Option<serde_json::Value>,
|
||||
pub ai_enabled: bool,
|
||||
pub paragraph_comments_enabled: bool,
|
||||
pub seo_default_og_image: Option<String>,
|
||||
pub seo_default_twitter_handle: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
@@ -171,6 +188,9 @@ pub struct HomePageResponse {
|
||||
pub tags: Vec<tags::Model>,
|
||||
pub friend_links: Vec<friend_links::Model>,
|
||||
pub categories: Vec<HomeCategorySummary>,
|
||||
pub content_overview: crate::services::analytics::ContentAnalyticsOverview,
|
||||
pub popular_posts: Vec<crate::services::analytics::AnalyticsPopularPost>,
|
||||
pub content_ranges: Vec<crate::services::analytics::PublicContentWindowHighlights>,
|
||||
}
|
||||
|
||||
fn normalize_optional_string(value: Option<String>) -> Option<String> {
|
||||
@@ -188,6 +208,13 @@ fn normalize_optional_int(value: Option<i32>, min: i32, max: i32) -> Option<i32>
|
||||
value.map(|item| item.clamp(min, max))
|
||||
}
|
||||
|
||||
fn normalize_string_list(values: Vec<String>) -> Vec<String> {
|
||||
values
|
||||
.into_iter()
|
||||
.filter_map(|item| normalize_optional_string(Some(item)))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn create_ai_provider_id() -> String {
|
||||
format!("provider-{}", Uuid::new_v4().simple())
|
||||
}
|
||||
@@ -525,6 +552,27 @@ impl SiteSettingsPayload {
|
||||
item.media_r2_secret_access_key =
|
||||
normalize_optional_string(Some(media_r2_secret_access_key));
|
||||
}
|
||||
if let Some(seo_default_og_image) = self.seo_default_og_image {
|
||||
item.seo_default_og_image = normalize_optional_string(Some(seo_default_og_image));
|
||||
}
|
||||
if let Some(seo_default_twitter_handle) = self.seo_default_twitter_handle {
|
||||
item.seo_default_twitter_handle =
|
||||
normalize_optional_string(Some(seo_default_twitter_handle));
|
||||
}
|
||||
if let Some(notification_webhook_url) = self.notification_webhook_url {
|
||||
item.notification_webhook_url =
|
||||
normalize_optional_string(Some(notification_webhook_url));
|
||||
}
|
||||
if let Some(notification_comment_enabled) = self.notification_comment_enabled {
|
||||
item.notification_comment_enabled = Some(notification_comment_enabled);
|
||||
}
|
||||
if let Some(notification_friend_link_enabled) = self.notification_friend_link_enabled {
|
||||
item.notification_friend_link_enabled = Some(notification_friend_link_enabled);
|
||||
}
|
||||
if let Some(search_synonyms) = self.search_synonyms {
|
||||
let normalized = normalize_string_list(search_synonyms);
|
||||
item.search_synonyms = (!normalized.is_empty()).then(|| serde_json::json!(normalized));
|
||||
}
|
||||
|
||||
if provider_list_supplied {
|
||||
write_ai_provider_state(
|
||||
@@ -631,6 +679,12 @@ fn default_payload() -> SiteSettingsPayload {
|
||||
media_r2_public_base_url: None,
|
||||
media_r2_access_key_id: None,
|
||||
media_r2_secret_access_key: None,
|
||||
seo_default_og_image: None,
|
||||
seo_default_twitter_handle: None,
|
||||
notification_webhook_url: None,
|
||||
notification_comment_enabled: Some(false),
|
||||
notification_friend_link_enabled: Some(false),
|
||||
search_synonyms: Some(Vec::new()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -680,6 +734,8 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
|
||||
music_playlist: model.music_playlist,
|
||||
ai_enabled: model.ai_enabled.unwrap_or(false),
|
||||
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
|
||||
seo_default_og_image: model.seo_default_og_image,
|
||||
seo_default_twitter_handle: model.seo_default_twitter_handle,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -691,9 +747,13 @@ pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
|
||||
let posts = posts::Entity::find()
|
||||
.order_by_desc(posts::Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|post| content::is_post_listed_publicly(post, chrono::Utc::now().fixed_offset()))
|
||||
.collect::<Vec<_>>();
|
||||
let tags = tags::Entity::find().all(&ctx.db).await?;
|
||||
let friend_links = friend_links::Entity::find()
|
||||
.filter(friend_links::Column::Status.eq("approved"))
|
||||
.order_by_desc(friend_links::Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
@@ -722,6 +782,9 @@ pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let content_highlights =
|
||||
crate::services::analytics::build_public_content_highlights(&ctx, &posts).await?;
|
||||
let content_ranges = crate::services::analytics::build_public_content_windows(&ctx, &posts).await?;
|
||||
|
||||
format::json(HomePageResponse {
|
||||
site_settings,
|
||||
@@ -729,6 +792,9 @@ pub async fn home(State(ctx): State<AppContext>) -> Result<Response> {
|
||||
tags,
|
||||
friend_links,
|
||||
categories,
|
||||
content_overview: content_highlights.overview,
|
||||
popular_posts: content_highlights.popular_posts,
|
||||
content_ranges,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -739,10 +805,11 @@ pub async fn show(State(ctx): State<AppContext>) -> Result<Response> {
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn update(
|
||||
headers: HeaderMap,
|
||||
State(ctx): State<AppContext>,
|
||||
Json(params): Json<SiteSettingsPayload>,
|
||||
) -> Result<Response> {
|
||||
check_auth()?;
|
||||
check_auth(&headers)?;
|
||||
|
||||
let current = load_current(&ctx).await?;
|
||||
let mut item = current;
|
||||
|
||||
202
backend/src/controllers/subscription.rs
Normal file
202
backend/src/controllers/subscription.rs
Normal file
@@ -0,0 +1,202 @@
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::services::{abuse_guard, admin_audit, subscriptions};
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct PublicSubscriptionPayload {
|
||||
pub email: String,
|
||||
#[serde(default, alias = "displayName")]
|
||||
pub display_name: Option<String>,
|
||||
#[serde(default)]
|
||||
pub source: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct SubscriptionTokenPayload {
|
||||
pub token: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct SubscriptionManageQuery {
|
||||
pub token: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct SubscriptionManageUpdatePayload {
|
||||
pub token: String,
|
||||
#[serde(default, alias = "displayName")]
|
||||
pub display_name: Option<String>,
|
||||
#[serde(default)]
|
||||
pub status: Option<String>,
|
||||
#[serde(default)]
|
||||
pub filters: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PublicSubscriptionResponse {
|
||||
pub ok: bool,
|
||||
pub subscription_id: i32,
|
||||
pub status: String,
|
||||
pub requires_confirmation: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct SubscriptionManageResponse {
|
||||
pub ok: bool,
|
||||
pub subscription: subscriptions::PublicSubscriptionView,
|
||||
}
|
||||
|
||||
fn public_subscription_metadata(source: Option<String>) -> serde_json::Value {
|
||||
serde_json::json!({
|
||||
"source": source,
|
||||
"kind": "public-form",
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn subscribe(
|
||||
State(ctx): State<AppContext>,
|
||||
headers: axum::http::HeaderMap,
|
||||
Json(payload): Json<PublicSubscriptionPayload>,
|
||||
) -> Result<Response> {
|
||||
let email = payload.email.trim().to_ascii_lowercase();
|
||||
abuse_guard::enforce_public_scope(
|
||||
"subscription",
|
||||
abuse_guard::detect_client_ip(&headers).as_deref(),
|
||||
Some(&email),
|
||||
)?;
|
||||
|
||||
let result = subscriptions::create_public_email_subscription(
|
||||
&ctx,
|
||||
&email,
|
||||
payload.display_name,
|
||||
Some(public_subscription_metadata(payload.source)),
|
||||
)
|
||||
.await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
None,
|
||||
if result.requires_confirmation {
|
||||
"subscription.public.pending"
|
||||
} else {
|
||||
"subscription.public.active"
|
||||
},
|
||||
"subscription",
|
||||
Some(result.subscription.id.to_string()),
|
||||
Some(result.subscription.target.clone()),
|
||||
Some(serde_json::json!({
|
||||
"channel_type": result.subscription.channel_type,
|
||||
"status": result.subscription.status,
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(PublicSubscriptionResponse {
|
||||
ok: true,
|
||||
subscription_id: result.subscription.id,
|
||||
status: result.subscription.status,
|
||||
requires_confirmation: result.requires_confirmation,
|
||||
message: result.message,
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn confirm(
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<SubscriptionTokenPayload>,
|
||||
) -> Result<Response> {
|
||||
let item = subscriptions::confirm_subscription(&ctx, &payload.token).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
None,
|
||||
"subscription.public.confirm",
|
||||
"subscription",
|
||||
Some(item.id.to_string()),
|
||||
Some(item.target.clone()),
|
||||
Some(serde_json::json!({ "channel_type": item.channel_type })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(SubscriptionManageResponse {
|
||||
ok: true,
|
||||
subscription: subscriptions::to_public_subscription_view(&item),
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn manage(
|
||||
State(ctx): State<AppContext>,
|
||||
Query(query): Query<SubscriptionManageQuery>,
|
||||
) -> Result<Response> {
|
||||
let item = subscriptions::get_subscription_by_manage_token(&ctx, &query.token).await?;
|
||||
format::json(SubscriptionManageResponse {
|
||||
ok: true,
|
||||
subscription: subscriptions::to_public_subscription_view(&item),
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn update_manage(
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<SubscriptionManageUpdatePayload>,
|
||||
) -> Result<Response> {
|
||||
let item = subscriptions::update_subscription_preferences(
|
||||
&ctx,
|
||||
&payload.token,
|
||||
payload.display_name,
|
||||
payload.status,
|
||||
payload.filters,
|
||||
)
|
||||
.await?;
|
||||
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
None,
|
||||
"subscription.public.update",
|
||||
"subscription",
|
||||
Some(item.id.to_string()),
|
||||
Some(item.target.clone()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(SubscriptionManageResponse {
|
||||
ok: true,
|
||||
subscription: subscriptions::to_public_subscription_view(&item),
|
||||
})
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn unsubscribe(
|
||||
State(ctx): State<AppContext>,
|
||||
Json(payload): Json<SubscriptionTokenPayload>,
|
||||
) -> Result<Response> {
|
||||
let item = subscriptions::unsubscribe_subscription(&ctx, &payload.token).await?;
|
||||
admin_audit::log_event(
|
||||
&ctx,
|
||||
None,
|
||||
"subscription.public.unsubscribe",
|
||||
"subscription",
|
||||
Some(item.id.to_string()),
|
||||
Some(item.target.clone()),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
format::json(SubscriptionManageResponse {
|
||||
ok: true,
|
||||
subscription: subscriptions::to_public_subscription_view(&item),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn routes() -> Routes {
|
||||
Routes::new()
|
||||
.prefix("/api/subscriptions")
|
||||
.add("/", post(subscribe))
|
||||
.add("/confirm", post(confirm))
|
||||
.add("/manage", get(manage).patch(update_manage))
|
||||
.add("/unsubscribe", post(unsubscribe))
|
||||
}
|
||||
@@ -1,2 +1 @@
|
||||
pub mod content_sync;
|
||||
pub mod view_engine;
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
use async_trait::async_trait;
|
||||
use axum::{Extension, Router as AxumRouter};
|
||||
use fluent_templates::{ArcLoader, FluentLoader};
|
||||
use loco_rs::{
|
||||
app::{AppContext, Initializer},
|
||||
controller::views::{engines, ViewEngine},
|
||||
Error, Result,
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
const I18N_DIR: &str = "assets/i18n";
|
||||
const I18N_SHARED: &str = "assets/i18n/shared.ftl";
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
pub struct ViewEngineInitializer;
|
||||
|
||||
#[async_trait]
|
||||
impl Initializer for ViewEngineInitializer {
|
||||
fn name(&self) -> String {
|
||||
"view-engine".to_string()
|
||||
}
|
||||
|
||||
async fn after_routes(&self, router: AxumRouter, _ctx: &AppContext) -> Result<AxumRouter> {
|
||||
let tera_engine = if std::path::Path::new(I18N_DIR).exists() {
|
||||
let arc = std::sync::Arc::new(
|
||||
ArcLoader::builder(&I18N_DIR, unic_langid::langid!("en-US"))
|
||||
.shared_resources(Some(&[I18N_SHARED.into()]))
|
||||
.customize(|bundle| bundle.set_use_isolating(false))
|
||||
.build()
|
||||
.map_err(|e| Error::string(&e.to_string()))?,
|
||||
);
|
||||
info!("locales loaded");
|
||||
|
||||
engines::TeraView::build()?.post_process(move |tera| {
|
||||
tera.register_function("t", FluentLoader::new(arc.clone()));
|
||||
Ok(())
|
||||
})?
|
||||
} else {
|
||||
engines::TeraView::build()?
|
||||
};
|
||||
|
||||
Ok(router.layer(Extension(ViewEngine::from(tera_engine))))
|
||||
}
|
||||
}
|
||||
@@ -1 +1,2 @@
|
||||
pub mod auth;
|
||||
pub mod subscription;
|
||||
|
||||
77
backend/src/mailers/subscription.rs
Normal file
77
backend/src/mailers/subscription.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
#![allow(non_upper_case_globals)]
|
||||
|
||||
use loco_rs::prelude::*;
|
||||
use serde_json::json;
|
||||
|
||||
static confirm: Dir<'_> = include_dir!("src/mailers/subscription/confirm");
|
||||
static notification: Dir<'_> = include_dir!("src/mailers/subscription/notification");
|
||||
|
||||
pub struct SubscriptionMailer {}
|
||||
impl Mailer for SubscriptionMailer {}
|
||||
|
||||
impl SubscriptionMailer {
|
||||
pub async fn send_confirmation(
|
||||
ctx: &AppContext,
|
||||
to: &str,
|
||||
site_name: Option<&str>,
|
||||
site_url: Option<&str>,
|
||||
confirm_url: &str,
|
||||
manage_url: Option<&str>,
|
||||
) -> Result<()> {
|
||||
Self::mail_template(
|
||||
ctx,
|
||||
&confirm,
|
||||
mailer::Args {
|
||||
to: to.to_string(),
|
||||
locals: json!({
|
||||
"subject": "请确认你的订阅",
|
||||
"siteName": site_name.unwrap_or("Termi"),
|
||||
"siteUrl": site_url
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| ctx.config.server.full_url()),
|
||||
"confirmUrl": confirm_url,
|
||||
"manageUrl": manage_url,
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn send_notification(
|
||||
ctx: &AppContext,
|
||||
to: &str,
|
||||
subject: &str,
|
||||
headline: &str,
|
||||
body: &str,
|
||||
site_name: Option<&str>,
|
||||
site_url: Option<&str>,
|
||||
manage_url: Option<&str>,
|
||||
unsubscribe_url: Option<&str>,
|
||||
) -> Result<()> {
|
||||
Self::mail_template(
|
||||
ctx,
|
||||
¬ification,
|
||||
mailer::Args {
|
||||
to: to.to_string(),
|
||||
locals: json!({
|
||||
"subject": subject,
|
||||
"headline": headline,
|
||||
"body": body,
|
||||
"siteName": site_name.unwrap_or("Termi"),
|
||||
"siteUrl": site_url
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| ctx.config.server.full_url()),
|
||||
"manageUrl": manage_url,
|
||||
"unsubscribeUrl": unsubscribe_url,
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
25
backend/src/mailers/subscription/confirm/html.t
Normal file
25
backend/src/mailers/subscription/confirm/html.t
Normal file
@@ -0,0 +1,25 @@
|
||||
<html>
|
||||
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; color: #0f172a; line-height: 1.7;">
|
||||
<div style="max-width: 640px; margin: 0 auto; padding: 24px;">
|
||||
<p style="font-size: 12px; letter-spacing: 0.18em; text-transform: uppercase; color: #64748b;">{{ siteName }}</p>
|
||||
<h1 style="margin-top: 8px; font-size: 24px;">请确认你的订阅</h1>
|
||||
<p style="margin-top: 20px;">为了确认这是你本人提交的邮箱,请点击下面的确认按钮。</p>
|
||||
<p style="margin-top: 24px;">
|
||||
<a href="{{ confirmUrl }}" style="display: inline-block; padding: 12px 18px; border-radius: 9999px; background: #0f172a; color: #ffffff; text-decoration: none;">确认订阅</a>
|
||||
</p>
|
||||
<p style="margin-top: 20px; font-size: 14px; color: #475569; word-break: break-all;">
|
||||
如果按钮无法点击,请直接打开:<br />
|
||||
<a href="{{ confirmUrl }}">{{ confirmUrl }}</a>
|
||||
</p>
|
||||
{% if manageUrl %}
|
||||
<p style="margin-top: 20px; font-size: 14px; color: #475569;">
|
||||
确认完成后,你可以在这里管理偏好:<br />
|
||||
<a href="{{ manageUrl }}">{{ manageUrl }}</a>
|
||||
</p>
|
||||
{% endif %}
|
||||
<p style="margin-top: 28px; font-size: 13px; color: #64748b;">
|
||||
来自 {{ siteName }} · <a href="{{ siteUrl }}">{{ siteUrl }}</a>
|
||||
</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
1
backend/src/mailers/subscription/confirm/subject.t
Normal file
1
backend/src/mailers/subscription/confirm/subject.t
Normal file
@@ -0,0 +1 @@
|
||||
请确认你的订阅
|
||||
13
backend/src/mailers/subscription/confirm/text.t
Normal file
13
backend/src/mailers/subscription/confirm/text.t
Normal file
@@ -0,0 +1,13 @@
|
||||
你好,
|
||||
|
||||
请点击下面的链接确认你的订阅:
|
||||
{{ confirmUrl }}
|
||||
|
||||
{% if manageUrl %}
|
||||
确认完成后,你也可以通过这个链接管理偏好:
|
||||
{{ manageUrl }}
|
||||
{% endif %}
|
||||
|
||||
--
|
||||
{{ siteName }}
|
||||
{{ siteUrl }}
|
||||
22
backend/src/mailers/subscription/notification/html.t
Normal file
22
backend/src/mailers/subscription/notification/html.t
Normal file
@@ -0,0 +1,22 @@
|
||||
<html>
|
||||
<body style="font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; color: #0f172a; line-height: 1.7;">
|
||||
<div style="max-width: 640px; margin: 0 auto; padding: 24px;">
|
||||
<p style="font-size: 12px; letter-spacing: 0.18em; text-transform: uppercase; color: #64748b;">{{ siteName }}</p>
|
||||
<h1 style="margin-top: 8px; font-size: 24px;">{{ headline }}</h1>
|
||||
<div style="margin-top: 20px; white-space: pre-wrap;">{{ body }}</div>
|
||||
{% if manageUrl or unsubscribeUrl %}
|
||||
<div style="margin-top: 24px; display: flex; flex-wrap: wrap; gap: 12px;">
|
||||
{% if manageUrl %}
|
||||
<a href="{{ manageUrl }}" style="display: inline-block; padding: 10px 16px; border-radius: 9999px; background: #0f172a; color: #ffffff; text-decoration: none;">管理订阅</a>
|
||||
{% endif %}
|
||||
{% if unsubscribeUrl %}
|
||||
<a href="{{ unsubscribeUrl }}" style="display: inline-block; padding: 10px 16px; border-radius: 9999px; border: 1px solid #cbd5e1; color: #334155; text-decoration: none;">取消订阅</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
<p style="margin-top: 28px; font-size: 13px; color: #64748b;">
|
||||
来自 {{ siteName }} · <a href="{{ siteUrl }}">{{ siteUrl }}</a>
|
||||
</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
1
backend/src/mailers/subscription/notification/subject.t
Normal file
1
backend/src/mailers/subscription/notification/subject.t
Normal file
@@ -0,0 +1 @@
|
||||
{{ subject }}
|
||||
14
backend/src/mailers/subscription/notification/text.t
Normal file
14
backend/src/mailers/subscription/notification/text.t
Normal file
@@ -0,0 +1,14 @@
|
||||
{{ headline }}
|
||||
|
||||
{{ body }}
|
||||
|
||||
{% if manageUrl %}
|
||||
管理订阅:{{ manageUrl }}
|
||||
{% endif %}
|
||||
{% if unsubscribeUrl %}
|
||||
取消订阅:{{ unsubscribeUrl }}
|
||||
{% endif %}
|
||||
|
||||
--
|
||||
{{ siteName }}
|
||||
{{ siteUrl }}
|
||||
27
backend/src/models/_entities/admin_audit_logs.rs
Normal file
27
backend/src/models/_entities/admin_audit_logs.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "admin_audit_logs")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub actor_username: Option<String>,
|
||||
pub actor_email: Option<String>,
|
||||
pub actor_source: Option<String>,
|
||||
pub action: String,
|
||||
pub target_type: String,
|
||||
pub target_id: Option<String>,
|
||||
pub target_label: Option<String>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub metadata: Option<Json>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
24
backend/src/models/_entities/comment_blacklist.rs
Normal file
24
backend/src/models/_entities/comment_blacklist.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "comment_blacklist")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub matcher_type: String,
|
||||
pub matcher_value: String,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub reason: Option<String>,
|
||||
pub active: Option<bool>,
|
||||
pub expires_at: Option<DateTimeWithTimeZone>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -0,0 +1,28 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "comment_persona_analysis_logs")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub matcher_type: String,
|
||||
pub matcher_value: String,
|
||||
pub from_at: Option<DateTimeWithTimeZone>,
|
||||
pub to_at: Option<DateTimeWithTimeZone>,
|
||||
pub total_comments: i32,
|
||||
pub pending_comments: i32,
|
||||
pub distinct_posts: i32,
|
||||
#[sea_orm(column_type = "Text")]
|
||||
pub analysis_text: String,
|
||||
pub sample_json: Option<Json>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -15,6 +15,9 @@ pub struct Model {
|
||||
pub author: Option<String>,
|
||||
pub email: Option<String>,
|
||||
pub avatar: Option<String>,
|
||||
pub ip_address: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
pub referer: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub content: Option<String>,
|
||||
pub scope: String,
|
||||
|
||||
29
backend/src/models/_entities/content_events.rs
Normal file
29
backend/src/models/_entities/content_events.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "content_events")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub event_type: String,
|
||||
pub path: String,
|
||||
pub post_slug: Option<String>,
|
||||
pub session_id: Option<String>,
|
||||
pub referrer: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub user_agent: Option<String>,
|
||||
pub duration_ms: Option<i32>,
|
||||
pub progress_percent: Option<i32>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub metadata: Option<Json>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -3,12 +3,19 @@
|
||||
pub mod ai_chunks;
|
||||
pub mod prelude;
|
||||
|
||||
pub mod admin_audit_logs;
|
||||
pub mod categories;
|
||||
pub mod comment_blacklist;
|
||||
pub mod comment_persona_analysis_logs;
|
||||
pub mod comments;
|
||||
pub mod content_events;
|
||||
pub mod friend_links;
|
||||
pub mod notification_deliveries;
|
||||
pub mod post_revisions;
|
||||
pub mod posts;
|
||||
pub mod query_events;
|
||||
pub mod reviews;
|
||||
pub mod site_settings;
|
||||
pub mod subscriptions;
|
||||
pub mod tags;
|
||||
pub mod users;
|
||||
|
||||
32
backend/src/models/_entities/notification_deliveries.rs
Normal file
32
backend/src/models/_entities/notification_deliveries.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "notification_deliveries")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub subscription_id: Option<i32>,
|
||||
pub channel_type: String,
|
||||
pub target: String,
|
||||
pub event_type: String,
|
||||
pub status: String,
|
||||
pub provider: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub response_text: Option<String>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub payload: Option<Json>,
|
||||
pub attempts_count: i32,
|
||||
pub next_retry_at: Option<String>,
|
||||
pub last_attempt_at: Option<String>,
|
||||
pub delivered_at: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
30
backend/src/models/_entities/post_revisions.rs
Normal file
30
backend/src/models/_entities/post_revisions.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "post_revisions")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub post_slug: String,
|
||||
pub post_title: Option<String>,
|
||||
pub operation: String,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub revision_reason: Option<String>,
|
||||
pub actor_username: Option<String>,
|
||||
pub actor_email: Option<String>,
|
||||
pub actor_source: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub markdown: Option<String>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub metadata: Option<Json>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -1,4 +1,4 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.10
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -23,6 +23,19 @@ pub struct Model {
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub images: Option<Json>,
|
||||
pub pinned: Option<bool>,
|
||||
pub status: Option<String>,
|
||||
pub visibility: Option<String>,
|
||||
pub publish_at: Option<DateTimeWithTimeZone>,
|
||||
pub unpublish_at: Option<DateTimeWithTimeZone>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub canonical_url: Option<String>,
|
||||
pub noindex: Option<bool>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub og_image: Option<String>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub redirect_from: Option<Json>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
@@ -1,12 +1,19 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.10
|
||||
|
||||
pub use super::ai_chunks::Entity as AiChunks;
|
||||
pub use super::admin_audit_logs::Entity as AdminAuditLogs;
|
||||
pub use super::categories::Entity as Categories;
|
||||
pub use super::comment_blacklist::Entity as CommentBlacklist;
|
||||
pub use super::comment_persona_analysis_logs::Entity as CommentPersonaAnalysisLogs;
|
||||
pub use super::comments::Entity as Comments;
|
||||
pub use super::content_events::Entity as ContentEvents;
|
||||
pub use super::friend_links::Entity as FriendLinks;
|
||||
pub use super::notification_deliveries::Entity as NotificationDeliveries;
|
||||
pub use super::post_revisions::Entity as PostRevisions;
|
||||
pub use super::posts::Entity as Posts;
|
||||
pub use super::query_events::Entity as QueryEvents;
|
||||
pub use super::reviews::Entity as Reviews;
|
||||
pub use super::site_settings::Entity as SiteSettings;
|
||||
pub use super::subscriptions::Entity as Subscriptions;
|
||||
pub use super::tags::Entity as Tags;
|
||||
pub use super::users::Entity as Users;
|
||||
|
||||
@@ -58,6 +58,15 @@ pub struct Model {
|
||||
pub media_r2_access_key_id: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub media_r2_secret_access_key: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub seo_default_og_image: Option<String>,
|
||||
pub seo_default_twitter_handle: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub notification_webhook_url: Option<String>,
|
||||
pub notification_comment_enabled: Option<bool>,
|
||||
pub notification_friend_link_enabled: Option<bool>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub search_synonyms: Option<Json>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
|
||||
36
backend/src/models/_entities/subscriptions.rs
Normal file
36
backend/src/models/_entities/subscriptions.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
//! `SeaORM` Entity, manually maintained
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "subscriptions")]
|
||||
pub struct Model {
|
||||
pub created_at: DateTimeWithTimeZone,
|
||||
pub updated_at: DateTimeWithTimeZone,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
pub channel_type: String,
|
||||
pub target: String,
|
||||
pub display_name: Option<String>,
|
||||
pub status: String,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub filters: Option<Json>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub secret: Option<String>,
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub notes: Option<String>,
|
||||
pub confirm_token: Option<String>,
|
||||
pub manage_token: Option<String>,
|
||||
#[sea_orm(column_type = "JsonBinary", nullable)]
|
||||
pub metadata: Option<Json>,
|
||||
pub verified_at: Option<String>,
|
||||
pub last_notified_at: Option<String>,
|
||||
pub failure_count: Option<i32>,
|
||||
pub last_delivery_status: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
210
backend/src/services/abuse_guard.rs
Normal file
210
backend/src/services/abuse_guard.rs
Normal file
@@ -0,0 +1,210 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{Mutex, OnceLock},
|
||||
};
|
||||
|
||||
use axum::http::{header, HeaderMap, StatusCode};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use loco_rs::{
|
||||
controller::ErrorDetail,
|
||||
prelude::*,
|
||||
};
|
||||
|
||||
const DEFAULT_WINDOW_SECONDS: i64 = 5 * 60;
|
||||
const DEFAULT_MAX_REQUESTS_PER_WINDOW: u32 = 45;
|
||||
const DEFAULT_BAN_MINUTES: i64 = 30;
|
||||
const DEFAULT_BURST_LIMIT: u32 = 8;
|
||||
const DEFAULT_BURST_WINDOW_SECONDS: i64 = 30;
|
||||
|
||||
const ENV_WINDOW_SECONDS: &str = "TERMI_PUBLIC_RATE_LIMIT_WINDOW_SECONDS";
|
||||
const ENV_MAX_REQUESTS_PER_WINDOW: &str = "TERMI_PUBLIC_RATE_LIMIT_MAX";
|
||||
const ENV_BAN_MINUTES: &str = "TERMI_PUBLIC_RATE_LIMIT_BAN_MINUTES";
|
||||
const ENV_BURST_LIMIT: &str = "TERMI_PUBLIC_RATE_LIMIT_BURST_MAX";
|
||||
const ENV_BURST_WINDOW_SECONDS: &str = "TERMI_PUBLIC_RATE_LIMIT_BURST_WINDOW_SECONDS";
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct AbuseGuardConfig {
|
||||
window_seconds: i64,
|
||||
max_requests_per_window: u32,
|
||||
ban_minutes: i64,
|
||||
burst_limit: u32,
|
||||
burst_window_seconds: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct AbuseGuardEntry {
|
||||
window_started_at: DateTime<Utc>,
|
||||
request_count: u32,
|
||||
burst_window_started_at: DateTime<Utc>,
|
||||
burst_count: u32,
|
||||
banned_until: Option<DateTime<Utc>>,
|
||||
last_reason: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_env_i64(name: &str, fallback: i64, min: i64, max: i64) -> i64 {
|
||||
std::env::var(name)
|
||||
.ok()
|
||||
.and_then(|value| value.trim().parse::<i64>().ok())
|
||||
.map(|value| value.clamp(min, max))
|
||||
.unwrap_or(fallback)
|
||||
}
|
||||
|
||||
fn parse_env_u32(name: &str, fallback: u32, min: u32, max: u32) -> u32 {
|
||||
std::env::var(name)
|
||||
.ok()
|
||||
.and_then(|value| value.trim().parse::<u32>().ok())
|
||||
.map(|value| value.clamp(min, max))
|
||||
.unwrap_or(fallback)
|
||||
}
|
||||
|
||||
fn load_config() -> AbuseGuardConfig {
|
||||
AbuseGuardConfig {
|
||||
window_seconds: parse_env_i64(ENV_WINDOW_SECONDS, DEFAULT_WINDOW_SECONDS, 10, 24 * 60 * 60),
|
||||
max_requests_per_window: parse_env_u32(
|
||||
ENV_MAX_REQUESTS_PER_WINDOW,
|
||||
DEFAULT_MAX_REQUESTS_PER_WINDOW,
|
||||
1,
|
||||
50_000,
|
||||
),
|
||||
ban_minutes: parse_env_i64(ENV_BAN_MINUTES, DEFAULT_BAN_MINUTES, 1, 7 * 24 * 60),
|
||||
burst_limit: parse_env_u32(ENV_BURST_LIMIT, DEFAULT_BURST_LIMIT, 1, 1_000),
|
||||
burst_window_seconds: parse_env_i64(
|
||||
ENV_BURST_WINDOW_SECONDS,
|
||||
DEFAULT_BURST_WINDOW_SECONDS,
|
||||
5,
|
||||
60 * 60,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_token(value: Option<&str>, max_chars: usize) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed.chars().take(max_chars).collect::<String>())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_ip(value: Option<&str>) -> Option<String> {
|
||||
normalize_token(value, 96)
|
||||
}
|
||||
|
||||
pub fn header_value<'a>(headers: &'a HeaderMap, key: header::HeaderName) -> Option<&'a str> {
|
||||
headers.get(key).and_then(|value| value.to_str().ok())
|
||||
}
|
||||
|
||||
fn first_forwarded_ip(value: &str) -> Option<&str> {
|
||||
value
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.find(|item| !item.is_empty())
|
||||
}
|
||||
|
||||
pub fn detect_client_ip(headers: &HeaderMap) -> Option<String> {
|
||||
let forwarded = header_value(headers, header::HeaderName::from_static("x-forwarded-for"))
|
||||
.and_then(first_forwarded_ip);
|
||||
let real_ip = header_value(headers, header::HeaderName::from_static("x-real-ip"));
|
||||
let cf_connecting_ip =
|
||||
header_value(headers, header::HeaderName::from_static("cf-connecting-ip"));
|
||||
let true_client_ip = header_value(headers, header::HeaderName::from_static("true-client-ip"));
|
||||
|
||||
normalize_ip(
|
||||
forwarded
|
||||
.or(real_ip)
|
||||
.or(cf_connecting_ip)
|
||||
.or(true_client_ip),
|
||||
)
|
||||
}
|
||||
|
||||
fn abuse_store() -> &'static Mutex<HashMap<String, AbuseGuardEntry>> {
|
||||
static STORE: OnceLock<Mutex<HashMap<String, AbuseGuardEntry>>> = OnceLock::new();
|
||||
STORE.get_or_init(|| Mutex::new(HashMap::new()))
|
||||
}
|
||||
|
||||
fn make_key(scope: &str, client_ip: Option<&str>, fingerprint: Option<&str>) -> String {
|
||||
let normalized_scope = scope.trim().to_ascii_lowercase();
|
||||
let normalized_ip = normalize_ip(client_ip).unwrap_or_else(|| "unknown".to_string());
|
||||
let normalized_fingerprint = normalize_token(fingerprint, 160).unwrap_or_default();
|
||||
if normalized_fingerprint.is_empty() {
|
||||
format!("{normalized_scope}:{normalized_ip}")
|
||||
} else {
|
||||
format!("{normalized_scope}:{normalized_ip}:{normalized_fingerprint}")
|
||||
}
|
||||
}
|
||||
|
||||
fn too_many_requests(message: impl Into<String>) -> Error {
|
||||
let message = message.into();
|
||||
Error::CustomError(
|
||||
StatusCode::TOO_MANY_REQUESTS,
|
||||
ErrorDetail::new("rate_limited".to_string(), message),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn enforce_public_scope(
|
||||
scope: &str,
|
||||
client_ip: Option<&str>,
|
||||
fingerprint: Option<&str>,
|
||||
) -> Result<()> {
|
||||
let config = load_config();
|
||||
let key = make_key(scope, client_ip, fingerprint);
|
||||
let now = Utc::now();
|
||||
let mut store = abuse_store()
|
||||
.lock()
|
||||
.map_err(|_| Error::InternalServerError)?;
|
||||
|
||||
store.retain(|_, entry| {
|
||||
entry
|
||||
.banned_until
|
||||
.map(|until| until > now - Duration::days(1))
|
||||
.unwrap_or_else(|| entry.window_started_at > now - Duration::days(1))
|
||||
});
|
||||
|
||||
let entry = store.entry(key).or_insert_with(|| AbuseGuardEntry {
|
||||
window_started_at: now,
|
||||
request_count: 0,
|
||||
burst_window_started_at: now,
|
||||
burst_count: 0,
|
||||
banned_until: None,
|
||||
last_reason: None,
|
||||
});
|
||||
|
||||
if let Some(banned_until) = entry.banned_until {
|
||||
if banned_until > now {
|
||||
let retry_after = (banned_until - now).num_minutes().max(1);
|
||||
return Err(too_many_requests(format!(
|
||||
"请求过于频繁,请在 {retry_after} 分钟后重试"
|
||||
)));
|
||||
}
|
||||
entry.banned_until = None;
|
||||
}
|
||||
|
||||
if entry.window_started_at + Duration::seconds(config.window_seconds) <= now {
|
||||
entry.window_started_at = now;
|
||||
entry.request_count = 0;
|
||||
}
|
||||
|
||||
if entry.burst_window_started_at + Duration::seconds(config.burst_window_seconds) <= now {
|
||||
entry.burst_window_started_at = now;
|
||||
entry.burst_count = 0;
|
||||
}
|
||||
|
||||
entry.request_count += 1;
|
||||
entry.burst_count += 1;
|
||||
|
||||
if entry.burst_count > config.burst_limit {
|
||||
entry.banned_until = Some(now + Duration::minutes(config.ban_minutes));
|
||||
entry.last_reason = Some("burst_limit".to_string());
|
||||
return Err(too_many_requests("短时间请求过多,已临时封禁,请稍后再试"));
|
||||
}
|
||||
|
||||
if entry.request_count > config.max_requests_per_window {
|
||||
entry.banned_until = Some(now + Duration::minutes(config.ban_minutes));
|
||||
entry.last_reason = Some("window_limit".to_string());
|
||||
return Err(too_many_requests("请求过于频繁,已临时封禁,请稍后再试"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
33
backend/src/services/admin_audit.rs
Normal file
33
backend/src/services/admin_audit.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{ActiveModelTrait, Set};
|
||||
|
||||
use crate::{
|
||||
controllers::admin::AdminIdentity,
|
||||
models::_entities::admin_audit_logs,
|
||||
};
|
||||
|
||||
pub async fn log_event(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
action: &str,
|
||||
target_type: &str,
|
||||
target_id: Option<String>,
|
||||
target_label: Option<String>,
|
||||
metadata: Option<serde_json::Value>,
|
||||
) -> Result<()> {
|
||||
admin_audit_logs::ActiveModel {
|
||||
actor_username: Set(actor.map(|item| item.username.clone())),
|
||||
actor_email: Set(actor.and_then(|item| item.email.clone())),
|
||||
actor_source: Set(actor.map(|item| item.source.clone())),
|
||||
action: Set(action.to_string()),
|
||||
target_type: Set(target_type.to_string()),
|
||||
target_id: Set(target_id),
|
||||
target_label: Set(target_label),
|
||||
metadata: Set(metadata),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&ctx.db)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -552,8 +552,22 @@ fn build_source_chunks(
|
||||
|
||||
fn build_chunks(posts: &[content::MarkdownPost], chunk_size: usize) -> Vec<ChunkDraft> {
|
||||
let mut chunks = Vec::new();
|
||||
let now = chrono::Utc::now().fixed_offset();
|
||||
|
||||
for post in posts.iter().filter(|post| post.published) {
|
||||
for post in posts.iter().filter(|post| {
|
||||
content::effective_post_state(
|
||||
&post.status,
|
||||
post.publish_at
|
||||
.clone()
|
||||
.and_then(|value| chrono::DateTime::parse_from_rfc3339(&value).ok()),
|
||||
post.unpublish_at
|
||||
.clone()
|
||||
.and_then(|value| chrono::DateTime::parse_from_rfc3339(&value).ok()),
|
||||
now,
|
||||
) == content::POST_STATUS_PUBLISHED
|
||||
&& content::normalize_post_visibility(Some(&post.visibility))
|
||||
!= content::POST_VISIBILITY_PRIVATE
|
||||
}) {
|
||||
let mut sections = Vec::new();
|
||||
sections.push(format!("# {}", post.title));
|
||||
if let Some(description) = post
|
||||
@@ -2714,6 +2728,30 @@ pub async fn answer_question(ctx: &AppContext, question: &str) -> Result<AiAnswe
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn admin_chat_completion(
|
||||
ctx: &AppContext,
|
||||
system_prompt: &str,
|
||||
prompt: &str,
|
||||
) -> Result<String> {
|
||||
let settings = load_runtime_settings(ctx, false).await?;
|
||||
let api_base = settings
|
||||
.api_base
|
||||
.ok_or_else(|| Error::BadRequest("请先在后台配置 AI API Base".to_string()))?;
|
||||
let api_key = settings
|
||||
.api_key
|
||||
.ok_or_else(|| Error::BadRequest("请先在后台配置 AI API Key".to_string()))?;
|
||||
|
||||
request_chat_answer(&AiProviderRequest {
|
||||
provider: settings.provider,
|
||||
api_base,
|
||||
api_key,
|
||||
chat_model: settings.chat_model,
|
||||
system_prompt: system_prompt.trim().to_string(),
|
||||
prompt: prompt.trim().to_string(),
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn provider_name(value: Option<&str>) -> String {
|
||||
trim_to_option(value.map(ToString::to_string))
|
||||
.unwrap_or_else(|| DEFAULT_AI_PROVIDER.to_string())
|
||||
|
||||
@@ -9,10 +9,13 @@ use sea_orm::{
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::models::_entities::query_events;
|
||||
use crate::models::_entities::{content_events, posts, query_events};
|
||||
|
||||
const EVENT_TYPE_SEARCH: &str = "search";
|
||||
const EVENT_TYPE_AI_QUESTION: &str = "ai_question";
|
||||
pub const CONTENT_EVENT_PAGE_VIEW: &str = "page_view";
|
||||
pub const CONTENT_EVENT_READ_PROGRESS: &str = "read_progress";
|
||||
pub const CONTENT_EVENT_READ_COMPLETE: &str = "read_complete";
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct QueryEventRequestContext {
|
||||
@@ -34,6 +37,25 @@ pub struct QueryEventDraft {
|
||||
pub latency_ms: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct ContentEventRequestContext {
|
||||
pub path: Option<String>,
|
||||
pub referrer: Option<String>,
|
||||
pub user_agent: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ContentEventDraft {
|
||||
pub event_type: String,
|
||||
pub path: String,
|
||||
pub post_slug: Option<String>,
|
||||
pub session_id: Option<String>,
|
||||
pub request_context: ContentEventRequestContext,
|
||||
pub duration_ms: Option<i32>,
|
||||
pub progress_percent: Option<i32>,
|
||||
pub metadata: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsOverview {
|
||||
pub total_searches: u64,
|
||||
@@ -48,6 +70,17 @@ pub struct AnalyticsOverview {
|
||||
pub avg_ai_latency_ms_last_7d: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct ContentAnalyticsOverview {
|
||||
pub total_page_views: u64,
|
||||
pub page_views_last_24h: u64,
|
||||
pub page_views_last_7d: u64,
|
||||
pub total_read_completes: u64,
|
||||
pub read_completes_last_7d: u64,
|
||||
pub avg_read_progress_last_7d: f64,
|
||||
pub avg_read_duration_ms_last_7d: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsTopQuery {
|
||||
pub query: String,
|
||||
@@ -75,6 +108,22 @@ pub struct AnalyticsProviderBucket {
|
||||
pub count: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsReferrerBucket {
|
||||
pub referrer: String,
|
||||
pub count: u64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsPopularPost {
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
pub page_views: u64,
|
||||
pub read_completes: u64,
|
||||
pub avg_progress_percent: f64,
|
||||
pub avg_duration_ms: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AnalyticsDailyBucket {
|
||||
pub date: String,
|
||||
@@ -85,13 +134,39 @@ pub struct AnalyticsDailyBucket {
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct AdminAnalyticsResponse {
|
||||
pub overview: AnalyticsOverview,
|
||||
pub content_overview: ContentAnalyticsOverview,
|
||||
pub top_search_terms: Vec<AnalyticsTopQuery>,
|
||||
pub top_ai_questions: Vec<AnalyticsTopQuery>,
|
||||
pub recent_events: Vec<AnalyticsRecentEvent>,
|
||||
pub providers_last_7d: Vec<AnalyticsProviderBucket>,
|
||||
pub top_referrers: Vec<AnalyticsReferrerBucket>,
|
||||
pub popular_posts: Vec<AnalyticsPopularPost>,
|
||||
pub daily_activity: Vec<AnalyticsDailyBucket>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PublicContentHighlights {
|
||||
pub overview: ContentAnalyticsOverview,
|
||||
pub popular_posts: Vec<AnalyticsPopularPost>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PublicContentWindowOverview {
|
||||
pub page_views: u64,
|
||||
pub read_completes: u64,
|
||||
pub avg_read_progress: f64,
|
||||
pub avg_read_duration_ms: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct PublicContentWindowHighlights {
|
||||
pub key: String,
|
||||
pub label: String,
|
||||
pub days: i32,
|
||||
pub overview: PublicContentWindowOverview,
|
||||
pub popular_posts: Vec<AnalyticsPopularPost>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct QueryAggregate {
|
||||
query: String,
|
||||
@@ -122,6 +197,18 @@ fn format_timestamp(value: DateTime<Utc>) -> String {
|
||||
value.format("%Y-%m-%d %H:%M").to_string()
|
||||
}
|
||||
|
||||
fn normalize_referrer_source(value: Option<String>) -> String {
|
||||
let Some(value) = trim_to_option(value) else {
|
||||
return "direct".to_string();
|
||||
};
|
||||
|
||||
reqwest::Url::parse(&value)
|
||||
.ok()
|
||||
.and_then(|url| url.host_str().map(ToString::to_string))
|
||||
.filter(|item| !item.trim().is_empty())
|
||||
.unwrap_or(value)
|
||||
}
|
||||
|
||||
fn header_value(headers: &HeaderMap, key: &str) -> Option<String> {
|
||||
headers
|
||||
.get(key)
|
||||
@@ -134,6 +221,10 @@ fn clamp_latency(latency_ms: i64) -> i32 {
|
||||
latency_ms.clamp(0, i64::from(i32::MAX)) as i32
|
||||
}
|
||||
|
||||
fn clamp_percentage(value: i32) -> i32 {
|
||||
value.clamp(0, 100)
|
||||
}
|
||||
|
||||
fn build_query_aggregates(
|
||||
events: &[query_events::Model],
|
||||
wanted_type: &str,
|
||||
@@ -199,6 +290,17 @@ pub fn request_context_from_headers(path: &str, headers: &HeaderMap) -> QueryEve
|
||||
}
|
||||
}
|
||||
|
||||
pub fn content_request_context_from_headers(
|
||||
path: &str,
|
||||
headers: &HeaderMap,
|
||||
) -> ContentEventRequestContext {
|
||||
ContentEventRequestContext {
|
||||
path: trim_to_option(Some(path.to_string())),
|
||||
referrer: header_value(headers, "referer"),
|
||||
user_agent: header_value(headers, "user-agent"),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
|
||||
let query_text = draft.query_text.trim().to_string();
|
||||
if query_text.is_empty() {
|
||||
@@ -226,6 +328,38 @@ pub async fn record_event(ctx: &AppContext, draft: QueryEventDraft) {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn record_content_event(ctx: &AppContext, draft: ContentEventDraft) {
|
||||
let path = draft.path.trim().to_string();
|
||||
if path.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let event_type = draft.event_type.trim().to_ascii_lowercase();
|
||||
if !matches!(
|
||||
event_type.as_str(),
|
||||
CONTENT_EVENT_PAGE_VIEW | CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
let active_model = content_events::ActiveModel {
|
||||
event_type: Set(event_type),
|
||||
path: Set(path),
|
||||
post_slug: Set(trim_to_option(draft.post_slug)),
|
||||
session_id: Set(trim_to_option(draft.session_id)),
|
||||
referrer: Set(trim_to_option(draft.request_context.referrer)),
|
||||
user_agent: Set(trim_to_option(draft.request_context.user_agent)),
|
||||
duration_ms: Set(draft.duration_ms.map(|value| value.max(0))),
|
||||
progress_percent: Set(draft.progress_percent.map(clamp_percentage)),
|
||||
metadata: Set(draft.metadata),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if let Err(error) = active_model.insert(&ctx.db).await {
|
||||
tracing::warn!("failed to record content analytics event: {error}");
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn record_search_event(
|
||||
ctx: &AppContext,
|
||||
query_text: &str,
|
||||
@@ -309,12 +443,25 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
.filter(query_events::Column::CreatedAt.gte(since_24h))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
let total_page_views = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_PAGE_VIEW))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
let total_read_completes = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_READ_COMPLETE))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let last_7d_events = query_events::Entity::find()
|
||||
.filter(query_events::Column::CreatedAt.gte(since_7d))
|
||||
.order_by_desc(query_events::Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
let last_7d_content_events = content_events::Entity::find()
|
||||
.filter(content_events::Column::CreatedAt.gte(since_7d))
|
||||
.order_by_desc(content_events::Column::CreatedAt)
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let searches_last_7d = last_7d_events
|
||||
.iter()
|
||||
@@ -336,6 +483,14 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
let mut counted_search_results = 0_u64;
|
||||
let mut total_ai_latency = 0.0_f64;
|
||||
let mut counted_ai_latency = 0_u64;
|
||||
let mut referrer_breakdown: HashMap<String, u64> = HashMap::new();
|
||||
let mut total_read_progress = 0.0_f64;
|
||||
let mut counted_read_progress = 0_u64;
|
||||
let mut total_read_duration = 0.0_f64;
|
||||
let mut counted_read_duration = 0_u64;
|
||||
let mut page_views_last_24h = 0_u64;
|
||||
let mut page_views_last_7d = 0_u64;
|
||||
let mut read_completes_last_7d = 0_u64;
|
||||
|
||||
for offset in 0..7 {
|
||||
let date = (now - Duration::days(offset)).date_naive();
|
||||
@@ -372,6 +527,104 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
}
|
||||
}
|
||||
|
||||
let post_titles = posts::Entity::find()
|
||||
.all(&ctx.db)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|post| {
|
||||
(
|
||||
post.slug,
|
||||
post.title.unwrap_or_else(|| "Untitled post".to_string()),
|
||||
)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let mut post_breakdown: HashMap<String, (u64, u64, f64, u64, f64, u64)> = HashMap::new();
|
||||
|
||||
for event in &last_7d_content_events {
|
||||
let created_at = DateTime::<Utc>::from(event.created_at);
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
page_views_last_7d += 1;
|
||||
if created_at >= since_24h {
|
||||
page_views_last_24h += 1;
|
||||
}
|
||||
|
||||
let referrer = normalize_referrer_source(event.referrer.clone());
|
||||
*referrer_breakdown.entry(referrer).or_insert(0) += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
read_completes_last_7d += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
total_read_progress += f64::from(progress);
|
||||
counted_read_progress += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
total_read_duration += f64::from(duration_ms);
|
||||
counted_read_duration += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let Some(post_slug) = event
|
||||
.post_slug
|
||||
.as_deref()
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let entry = post_breakdown
|
||||
.entry(post_slug)
|
||||
.or_insert((0, 0, 0.0, 0, 0.0, 0));
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
entry.0 += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
entry.1 += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
entry.2 += f64::from(progress);
|
||||
entry.3 += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
entry.4 += f64::from(duration_ms);
|
||||
entry.5 += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut providers_last_7d = provider_breakdown
|
||||
.into_iter()
|
||||
.map(|(provider, count)| AnalyticsProviderBucket { provider, count })
|
||||
@@ -384,6 +637,50 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
});
|
||||
providers_last_7d.truncate(6);
|
||||
|
||||
let mut top_referrers = referrer_breakdown
|
||||
.into_iter()
|
||||
.map(|(referrer, count)| AnalyticsReferrerBucket { referrer, count })
|
||||
.collect::<Vec<_>>();
|
||||
top_referrers.sort_by(|left, right| {
|
||||
right
|
||||
.count
|
||||
.cmp(&left.count)
|
||||
.then_with(|| left.referrer.cmp(&right.referrer))
|
||||
});
|
||||
top_referrers.truncate(8);
|
||||
|
||||
let mut popular_posts = post_breakdown
|
||||
.into_iter()
|
||||
.map(
|
||||
|(slug, (page_views, read_completes, total_progress, progress_count, total_duration, duration_count))| {
|
||||
AnalyticsPopularPost {
|
||||
title: post_titles
|
||||
.get(&slug)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| slug.clone()),
|
||||
slug,
|
||||
page_views,
|
||||
read_completes,
|
||||
avg_progress_percent: if progress_count > 0 {
|
||||
total_progress / progress_count as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_duration_ms: (duration_count > 0)
|
||||
.then(|| total_duration / duration_count as f64),
|
||||
}
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
popular_posts.sort_by(|left, right| {
|
||||
right
|
||||
.page_views
|
||||
.cmp(&left.page_views)
|
||||
.then_with(|| right.read_completes.cmp(&left.read_completes))
|
||||
.then_with(|| left.slug.cmp(&right.slug))
|
||||
});
|
||||
popular_posts.truncate(10);
|
||||
|
||||
let mut daily_activity = daily_map
|
||||
.into_iter()
|
||||
.map(|(date, (searches, ai_questions))| AnalyticsDailyBucket {
|
||||
@@ -432,10 +729,448 @@ pub async fn build_admin_analytics(ctx: &AppContext) -> Result<AdminAnalyticsRes
|
||||
avg_ai_latency_ms_last_7d: (counted_ai_latency > 0)
|
||||
.then(|| total_ai_latency / counted_ai_latency as f64),
|
||||
},
|
||||
content_overview: ContentAnalyticsOverview {
|
||||
total_page_views,
|
||||
page_views_last_24h,
|
||||
page_views_last_7d,
|
||||
total_read_completes,
|
||||
read_completes_last_7d,
|
||||
avg_read_progress_last_7d: if counted_read_progress > 0 {
|
||||
total_read_progress / counted_read_progress as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_read_duration_ms_last_7d: (counted_read_duration > 0)
|
||||
.then(|| total_read_duration / counted_read_duration as f64),
|
||||
},
|
||||
top_search_terms,
|
||||
top_ai_questions,
|
||||
recent_events,
|
||||
providers_last_7d,
|
||||
top_referrers,
|
||||
popular_posts,
|
||||
daily_activity,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_public_content_highlights(
|
||||
ctx: &AppContext,
|
||||
public_posts: &[posts::Model],
|
||||
) -> Result<PublicContentHighlights> {
|
||||
if public_posts.is_empty() {
|
||||
return Ok(PublicContentHighlights {
|
||||
overview: ContentAnalyticsOverview {
|
||||
total_page_views: 0,
|
||||
page_views_last_24h: 0,
|
||||
page_views_last_7d: 0,
|
||||
total_read_completes: 0,
|
||||
read_completes_last_7d: 0,
|
||||
avg_read_progress_last_7d: 0.0,
|
||||
avg_read_duration_ms_last_7d: None,
|
||||
},
|
||||
popular_posts: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
let now = Utc::now();
|
||||
let since_24h = now - Duration::hours(24);
|
||||
let since_7d = now - Duration::days(7);
|
||||
let public_slugs = public_posts
|
||||
.iter()
|
||||
.map(|post| post.slug.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let post_titles = public_posts
|
||||
.iter()
|
||||
.map(|post| {
|
||||
(
|
||||
post.slug.clone(),
|
||||
trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
|
||||
)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let total_page_views = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_PAGE_VIEW))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs.clone()))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
let total_read_completes = content_events::Entity::find()
|
||||
.filter(content_events::Column::EventType.eq(CONTENT_EVENT_READ_COMPLETE))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs.clone()))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let last_7d_content_events = content_events::Entity::find()
|
||||
.filter(content_events::Column::CreatedAt.gte(since_7d))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs))
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
let mut page_views_last_24h = 0_u64;
|
||||
let mut page_views_last_7d = 0_u64;
|
||||
let mut read_completes_last_7d = 0_u64;
|
||||
let mut total_read_progress = 0.0_f64;
|
||||
let mut counted_read_progress = 0_u64;
|
||||
let mut total_read_duration = 0.0_f64;
|
||||
let mut counted_read_duration = 0_u64;
|
||||
let mut post_breakdown = HashMap::<String, (u64, u64, f64, u64, f64, u64)>::new();
|
||||
|
||||
for event in &last_7d_content_events {
|
||||
let created_at = DateTime::<Utc>::from(event.created_at);
|
||||
let Some(post_slug) = event
|
||||
.post_slug
|
||||
.as_deref()
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
page_views_last_7d += 1;
|
||||
if created_at >= since_24h {
|
||||
page_views_last_24h += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
read_completes_last_7d += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
total_read_progress += f64::from(progress);
|
||||
counted_read_progress += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
total_read_duration += f64::from(duration_ms);
|
||||
counted_read_duration += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let entry = post_breakdown
|
||||
.entry(post_slug)
|
||||
.or_insert((0, 0, 0.0, 0, 0.0, 0));
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
entry.0 += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
entry.1 += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
entry.2 += f64::from(progress);
|
||||
entry.3 += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
entry.4 += f64::from(duration_ms);
|
||||
entry.5 += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut popular_posts = post_breakdown
|
||||
.into_iter()
|
||||
.map(
|
||||
|(
|
||||
slug,
|
||||
(
|
||||
page_views,
|
||||
read_completes,
|
||||
total_progress,
|
||||
progress_count,
|
||||
total_duration,
|
||||
duration_count,
|
||||
),
|
||||
)| AnalyticsPopularPost {
|
||||
title: post_titles
|
||||
.get(&slug)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| slug.clone()),
|
||||
slug,
|
||||
page_views,
|
||||
read_completes,
|
||||
avg_progress_percent: if progress_count > 0 {
|
||||
total_progress / progress_count as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
popular_posts.sort_by(|left, right| {
|
||||
right
|
||||
.page_views
|
||||
.cmp(&left.page_views)
|
||||
.then_with(|| right.read_completes.cmp(&left.read_completes))
|
||||
.then_with(|| left.slug.cmp(&right.slug))
|
||||
});
|
||||
popular_posts.truncate(6);
|
||||
|
||||
Ok(PublicContentHighlights {
|
||||
overview: ContentAnalyticsOverview {
|
||||
total_page_views,
|
||||
page_views_last_24h,
|
||||
page_views_last_7d,
|
||||
total_read_completes,
|
||||
read_completes_last_7d,
|
||||
avg_read_progress_last_7d: if counted_read_progress > 0 {
|
||||
total_read_progress / counted_read_progress as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_read_duration_ms_last_7d: (counted_read_duration > 0)
|
||||
.then(|| total_read_duration / counted_read_duration as f64),
|
||||
},
|
||||
popular_posts,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_public_content_windows(
|
||||
ctx: &AppContext,
|
||||
public_posts: &[posts::Model],
|
||||
) -> Result<Vec<PublicContentWindowHighlights>> {
|
||||
if public_posts.is_empty() {
|
||||
return Ok(vec![
|
||||
build_empty_public_content_window("24h", "24h", 1),
|
||||
build_empty_public_content_window("7d", "7d", 7),
|
||||
build_empty_public_content_window("30d", "30d", 30),
|
||||
]);
|
||||
}
|
||||
|
||||
let now = Utc::now();
|
||||
let since_30d = now - Duration::days(30);
|
||||
let public_slugs = public_posts
|
||||
.iter()
|
||||
.map(|post| post.slug.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let post_titles = public_posts
|
||||
.iter()
|
||||
.map(|post| {
|
||||
(
|
||||
post.slug.clone(),
|
||||
trim_to_option(post.title.clone()).unwrap_or_else(|| post.slug.clone()),
|
||||
)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let events = content_events::Entity::find()
|
||||
.filter(content_events::Column::CreatedAt.gte(since_30d))
|
||||
.filter(content_events::Column::PostSlug.is_in(public_slugs))
|
||||
.all(&ctx.db)
|
||||
.await?;
|
||||
|
||||
Ok(vec![
|
||||
summarize_public_content_window(&events, &post_titles, now - Duration::hours(24), "24h", "24h", 1),
|
||||
summarize_public_content_window(&events, &post_titles, now - Duration::days(7), "7d", "7d", 7),
|
||||
summarize_public_content_window(&events, &post_titles, since_30d, "30d", "30d", 30),
|
||||
])
|
||||
}
|
||||
|
||||
fn build_empty_public_content_window(
|
||||
key: &str,
|
||||
label: &str,
|
||||
days: i32,
|
||||
) -> PublicContentWindowHighlights {
|
||||
PublicContentWindowHighlights {
|
||||
key: key.to_string(),
|
||||
label: label.to_string(),
|
||||
days,
|
||||
overview: PublicContentWindowOverview {
|
||||
page_views: 0,
|
||||
read_completes: 0,
|
||||
avg_read_progress: 0.0,
|
||||
avg_read_duration_ms: None,
|
||||
},
|
||||
popular_posts: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn summarize_public_content_window(
|
||||
events: &[content_events::Model],
|
||||
post_titles: &HashMap<String, String>,
|
||||
since: DateTime<Utc>,
|
||||
key: &str,
|
||||
label: &str,
|
||||
days: i32,
|
||||
) -> PublicContentWindowHighlights {
|
||||
let mut page_views = 0_u64;
|
||||
let mut read_completes = 0_u64;
|
||||
let mut total_read_progress = 0.0_f64;
|
||||
let mut counted_read_progress = 0_u64;
|
||||
let mut total_read_duration = 0.0_f64;
|
||||
let mut counted_read_duration = 0_u64;
|
||||
let mut post_breakdown = HashMap::<String, (u64, u64, f64, u64, f64, u64)>::new();
|
||||
|
||||
for event in events {
|
||||
let created_at = DateTime::<Utc>::from(event.created_at);
|
||||
if created_at < since {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(post_slug) = event
|
||||
.post_slug
|
||||
.as_deref()
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
page_views += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
read_completes += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
total_read_progress += f64::from(progress);
|
||||
counted_read_progress += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
total_read_duration += f64::from(duration_ms);
|
||||
counted_read_duration += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let entry = post_breakdown
|
||||
.entry(post_slug)
|
||||
.or_insert((0, 0, 0.0, 0, 0.0, 0));
|
||||
|
||||
if event.event_type == CONTENT_EVENT_PAGE_VIEW {
|
||||
entry.0 += 1;
|
||||
}
|
||||
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
entry.1 += 1;
|
||||
}
|
||||
|
||||
if matches!(
|
||||
event.event_type.as_str(),
|
||||
CONTENT_EVENT_READ_PROGRESS | CONTENT_EVENT_READ_COMPLETE
|
||||
) {
|
||||
let progress = event.progress_percent.unwrap_or({
|
||||
if event.event_type == CONTENT_EVENT_READ_COMPLETE {
|
||||
100
|
||||
} else {
|
||||
0
|
||||
}
|
||||
});
|
||||
if progress > 0 {
|
||||
entry.2 += f64::from(progress);
|
||||
entry.3 += 1;
|
||||
}
|
||||
|
||||
if let Some(duration_ms) = event.duration_ms.filter(|value| *value >= 0) {
|
||||
entry.4 += f64::from(duration_ms);
|
||||
entry.5 += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut popular_posts = post_breakdown
|
||||
.into_iter()
|
||||
.map(
|
||||
|(
|
||||
slug,
|
||||
(
|
||||
item_page_views,
|
||||
item_read_completes,
|
||||
total_progress,
|
||||
progress_count,
|
||||
total_duration,
|
||||
duration_count,
|
||||
),
|
||||
)| AnalyticsPopularPost {
|
||||
title: post_titles
|
||||
.get(&slug)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| slug.clone()),
|
||||
slug,
|
||||
page_views: item_page_views,
|
||||
read_completes: item_read_completes,
|
||||
avg_progress_percent: if progress_count > 0 {
|
||||
total_progress / progress_count as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_duration_ms: (duration_count > 0).then(|| total_duration / duration_count as f64),
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
popular_posts.sort_by(|left, right| {
|
||||
right
|
||||
.page_views
|
||||
.cmp(&left.page_views)
|
||||
.then_with(|| right.read_completes.cmp(&left.read_completes))
|
||||
.then_with(|| {
|
||||
right
|
||||
.avg_progress_percent
|
||||
.partial_cmp(&left.avg_progress_percent)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
})
|
||||
.then_with(|| left.slug.cmp(&right.slug))
|
||||
});
|
||||
popular_posts.truncate(6);
|
||||
|
||||
PublicContentWindowHighlights {
|
||||
key: key.to_string(),
|
||||
label: label.to_string(),
|
||||
days,
|
||||
overview: PublicContentWindowOverview {
|
||||
page_views,
|
||||
read_completes,
|
||||
avg_read_progress: if counted_read_progress > 0 {
|
||||
total_read_progress / counted_read_progress as f64
|
||||
} else {
|
||||
0.0
|
||||
},
|
||||
avg_read_duration_ms: (counted_read_duration > 0)
|
||||
.then(|| total_read_duration / counted_read_duration as f64),
|
||||
},
|
||||
popular_posts,
|
||||
}
|
||||
}
|
||||
|
||||
375
backend/src/services/comment_guard.rs
Normal file
375
backend/src/services/comment_guard.rs
Normal file
@@ -0,0 +1,375 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{ColumnTrait, Condition, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
|
||||
use serde::Serialize;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::models::_entities::{comment_blacklist, comments};
|
||||
|
||||
const DEFAULT_RATE_LIMIT_WINDOW_SECONDS: i64 = 10 * 60;
|
||||
const DEFAULT_RATE_LIMIT_MAX_PER_WINDOW: u64 = 8;
|
||||
const DEFAULT_MIN_INTERVAL_SECONDS: i64 = 12;
|
||||
const DEFAULT_CAPTCHA_TTL_SECONDS: i64 = 10 * 60;
|
||||
|
||||
const ENV_RATE_LIMIT_WINDOW_SECONDS: &str = "TERMI_COMMENT_RATE_LIMIT_WINDOW_SECONDS";
|
||||
const ENV_RATE_LIMIT_MAX_PER_WINDOW: &str = "TERMI_COMMENT_RATE_LIMIT_MAX_PER_WINDOW";
|
||||
const ENV_MIN_INTERVAL_SECONDS: &str = "TERMI_COMMENT_MIN_INTERVAL_SECONDS";
|
||||
const ENV_BLOCK_KEYWORDS: &str = "TERMI_COMMENT_BLOCK_KEYWORDS";
|
||||
const ENV_CAPTCHA_TTL_SECONDS: &str = "TERMI_COMMENT_CAPTCHA_TTL_SECONDS";
|
||||
|
||||
pub const MATCHER_TYPE_IP: &str = "ip";
|
||||
pub const MATCHER_TYPE_EMAIL: &str = "email";
|
||||
pub const MATCHER_TYPE_USER_AGENT: &str = "user_agent";
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct CommentCaptchaChallenge {
|
||||
pub token: String,
|
||||
pub question: String,
|
||||
pub expires_in_seconds: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CommentGuardInput<'a> {
|
||||
pub ip_address: Option<&'a str>,
|
||||
pub email: Option<&'a str>,
|
||||
pub user_agent: Option<&'a str>,
|
||||
pub author: Option<&'a str>,
|
||||
pub content: Option<&'a str>,
|
||||
pub honeypot_website: Option<&'a str>,
|
||||
pub captcha_token: Option<&'a str>,
|
||||
pub captcha_answer: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct GuardConfig {
|
||||
rate_limit_window_seconds: i64,
|
||||
rate_limit_max_per_window: u64,
|
||||
min_interval_seconds: i64,
|
||||
blocked_keywords: Vec<String>,
|
||||
captcha_ttl_seconds: i64,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct CaptchaEntry {
|
||||
answer: String,
|
||||
expires_at: DateTime<Utc>,
|
||||
ip_address: Option<String>,
|
||||
}
|
||||
|
||||
fn parse_env_i64(name: &str, fallback: i64, min: i64, max: i64) -> i64 {
|
||||
std::env::var(name)
|
||||
.ok()
|
||||
.and_then(|value| value.trim().parse::<i64>().ok())
|
||||
.map(|value| value.clamp(min, max))
|
||||
.unwrap_or(fallback)
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<&str>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed.to_string())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn normalize_email(value: Option<&str>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.to_lowercase())
|
||||
}
|
||||
|
||||
fn normalize_user_agent(value: Option<&str>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.chars().take(512).collect::<String>())
|
||||
}
|
||||
|
||||
fn normalize_ip(value: Option<&str>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
|
||||
}
|
||||
|
||||
fn parse_keywords() -> Vec<String> {
|
||||
std::env::var(ENV_BLOCK_KEYWORDS)
|
||||
.ok()
|
||||
.map(|value| {
|
||||
value
|
||||
.split([',', '\n', '\r'])
|
||||
.map(str::trim)
|
||||
.filter(|item| !item.is_empty())
|
||||
.map(|item| item.to_lowercase())
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn load_config() -> GuardConfig {
|
||||
GuardConfig {
|
||||
rate_limit_window_seconds: parse_env_i64(
|
||||
ENV_RATE_LIMIT_WINDOW_SECONDS,
|
||||
DEFAULT_RATE_LIMIT_WINDOW_SECONDS,
|
||||
10,
|
||||
24 * 60 * 60,
|
||||
),
|
||||
rate_limit_max_per_window: parse_env_i64(
|
||||
ENV_RATE_LIMIT_MAX_PER_WINDOW,
|
||||
DEFAULT_RATE_LIMIT_MAX_PER_WINDOW as i64,
|
||||
1,
|
||||
500,
|
||||
) as u64,
|
||||
min_interval_seconds: parse_env_i64(
|
||||
ENV_MIN_INTERVAL_SECONDS,
|
||||
DEFAULT_MIN_INTERVAL_SECONDS,
|
||||
0,
|
||||
6 * 60 * 60,
|
||||
),
|
||||
blocked_keywords: parse_keywords(),
|
||||
captcha_ttl_seconds: parse_env_i64(
|
||||
ENV_CAPTCHA_TTL_SECONDS,
|
||||
DEFAULT_CAPTCHA_TTL_SECONDS,
|
||||
30,
|
||||
24 * 60 * 60,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn captcha_store() -> &'static Mutex<HashMap<String, CaptchaEntry>> {
|
||||
static STORE: OnceLock<Mutex<HashMap<String, CaptchaEntry>>> = OnceLock::new();
|
||||
STORE.get_or_init(|| Mutex::new(HashMap::new()))
|
||||
}
|
||||
|
||||
fn cleanup_expired_captcha_entries(store: &mut HashMap<String, CaptchaEntry>, now: DateTime<Utc>) {
|
||||
let expired = store
|
||||
.iter()
|
||||
.filter_map(|(token, entry)| (entry.expires_at <= now).then_some(token.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for token in expired {
|
||||
store.remove(&token);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn normalize_matcher_type(value: &str) -> Option<&'static str> {
|
||||
match value.trim().to_ascii_lowercase().as_str() {
|
||||
MATCHER_TYPE_IP => Some(MATCHER_TYPE_IP),
|
||||
MATCHER_TYPE_EMAIL => Some(MATCHER_TYPE_EMAIL),
|
||||
MATCHER_TYPE_USER_AGENT | "ua" | "useragent" => Some(MATCHER_TYPE_USER_AGENT),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn normalize_matcher_value(matcher_type: &str, raw_value: &str) -> Option<String> {
|
||||
let normalized_type = normalize_matcher_type(matcher_type)?;
|
||||
|
||||
match normalized_type {
|
||||
MATCHER_TYPE_IP => normalize_ip(Some(raw_value)),
|
||||
MATCHER_TYPE_EMAIL => normalize_email(Some(raw_value)),
|
||||
MATCHER_TYPE_USER_AGENT => normalize_user_agent(Some(raw_value)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_captcha_challenge(client_ip: Option<&str>) -> Result<CommentCaptchaChallenge> {
|
||||
let config = load_config();
|
||||
let seed = Uuid::new_v4().as_u128();
|
||||
let left = ((seed % 9) + 1) as i64;
|
||||
let right = (((seed / 11) % 9) + 1) as i64;
|
||||
let use_subtract = seed % 2 == 0 && left > right;
|
||||
let (question, answer) = if use_subtract {
|
||||
(
|
||||
format!("{} - {} = ?", left, right),
|
||||
(left - right).to_string(),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
format!("{} + {} = ?", left, right),
|
||||
(left + right).to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let token = Uuid::new_v4().to_string();
|
||||
let now = Utc::now();
|
||||
let expires_at = now + Duration::seconds(config.captcha_ttl_seconds);
|
||||
let ip_address = normalize_ip(client_ip);
|
||||
|
||||
let mut store = captcha_store()
|
||||
.lock()
|
||||
.map_err(|_| Error::InternalServerError)?;
|
||||
cleanup_expired_captcha_entries(&mut store, now);
|
||||
store.insert(
|
||||
token.clone(),
|
||||
CaptchaEntry {
|
||||
answer,
|
||||
expires_at,
|
||||
ip_address,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(CommentCaptchaChallenge {
|
||||
token,
|
||||
question,
|
||||
expires_in_seconds: config.captcha_ttl_seconds,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn verify_captcha_solution(
|
||||
captcha_token: Option<&str>,
|
||||
captcha_answer: Option<&str>,
|
||||
client_ip: Option<&str>,
|
||||
) -> Result<()> {
|
||||
let token = trim_to_option(captcha_token)
|
||||
.ok_or_else(|| Error::BadRequest("请先完成验证码".to_string()))?;
|
||||
let answer = trim_to_option(captcha_answer)
|
||||
.ok_or_else(|| Error::BadRequest("请填写验证码答案".to_string()))?;
|
||||
|
||||
let now = Utc::now();
|
||||
let normalized_ip = normalize_ip(client_ip);
|
||||
|
||||
let mut store = captcha_store()
|
||||
.lock()
|
||||
.map_err(|_| Error::InternalServerError)?;
|
||||
cleanup_expired_captcha_entries(&mut store, now);
|
||||
|
||||
let Some(entry) = store.remove(&token) else {
|
||||
return Err(Error::BadRequest("验证码已失效,请刷新后重试".to_string()));
|
||||
};
|
||||
|
||||
if entry.expires_at <= now {
|
||||
return Err(Error::BadRequest("验证码已过期,请刷新后重试".to_string()));
|
||||
}
|
||||
|
||||
if entry
|
||||
.ip_address
|
||||
.as_deref()
|
||||
.zip(normalized_ip.as_deref())
|
||||
.is_some_and(|(expected, current)| expected != current)
|
||||
{
|
||||
return Err(Error::BadRequest(
|
||||
"验证码校验失败,请刷新后重试".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if entry.answer.trim() != answer.trim() {
|
||||
return Err(Error::BadRequest("验证码答案错误".to_string()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn check_blacklist(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
|
||||
let now = Utc::now();
|
||||
let candidates = [
|
||||
(MATCHER_TYPE_IP, normalize_ip(input.ip_address)),
|
||||
(MATCHER_TYPE_EMAIL, normalize_email(input.email)),
|
||||
(
|
||||
MATCHER_TYPE_USER_AGENT,
|
||||
normalize_user_agent(input.user_agent),
|
||||
),
|
||||
];
|
||||
|
||||
for (matcher_type, matcher_value) in candidates {
|
||||
let Some(matcher_value) = matcher_value else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let matched = comment_blacklist::Entity::find()
|
||||
.filter(comment_blacklist::Column::MatcherType.eq(matcher_type))
|
||||
.filter(comment_blacklist::Column::MatcherValue.eq(&matcher_value))
|
||||
.filter(
|
||||
Condition::any()
|
||||
.add(comment_blacklist::Column::Active.is_null())
|
||||
.add(comment_blacklist::Column::Active.eq(true)),
|
||||
)
|
||||
.filter(
|
||||
Condition::any()
|
||||
.add(comment_blacklist::Column::ExpiresAt.is_null())
|
||||
.add(comment_blacklist::Column::ExpiresAt.gt(now)),
|
||||
)
|
||||
.one(&ctx.db)
|
||||
.await?;
|
||||
|
||||
if matched.is_some() {
|
||||
return Err(Error::BadRequest("评论请求已被拦截".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn check_rate_limit(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
|
||||
let config = load_config();
|
||||
let Some(ip_address) = normalize_ip(input.ip_address) else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let now = Utc::now();
|
||||
let since = now - Duration::seconds(config.rate_limit_window_seconds);
|
||||
|
||||
let count = comments::Entity::find()
|
||||
.filter(comments::Column::IpAddress.eq(&ip_address))
|
||||
.filter(comments::Column::CreatedAt.gte(since))
|
||||
.count(&ctx.db)
|
||||
.await?;
|
||||
|
||||
if count >= config.rate_limit_max_per_window {
|
||||
return Err(Error::BadRequest("评论过于频繁,请稍后再试".to_string()));
|
||||
}
|
||||
|
||||
if config.min_interval_seconds <= 0 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(last_comment) = comments::Entity::find()
|
||||
.filter(comments::Column::IpAddress.eq(&ip_address))
|
||||
.order_by_desc(comments::Column::CreatedAt)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
{
|
||||
let last_created_at = DateTime::<Utc>::from(last_comment.created_at);
|
||||
let elapsed = now.signed_duration_since(last_created_at).num_seconds();
|
||||
if elapsed < config.min_interval_seconds {
|
||||
return Err(Error::BadRequest("提交过快,请稍后再试".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn contains_blocked_keyword(input: &CommentGuardInput<'_>) -> Option<String> {
|
||||
let config = load_config();
|
||||
if config.blocked_keywords.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut merged = String::new();
|
||||
for value in [input.author, input.email, input.content] {
|
||||
if let Some(value) = value {
|
||||
merged.push_str(value);
|
||||
merged.push('\n');
|
||||
}
|
||||
}
|
||||
let lower = merged.to_lowercase();
|
||||
|
||||
config
|
||||
.blocked_keywords
|
||||
.into_iter()
|
||||
.find(|keyword| lower.contains(keyword))
|
||||
}
|
||||
|
||||
pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'_>) -> Result<()> {
|
||||
if trim_to_option(input.honeypot_website).is_some() {
|
||||
return Err(Error::BadRequest("提交未通过校验".to_string()));
|
||||
}
|
||||
|
||||
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
|
||||
|
||||
if contains_blocked_keyword(input).is_some() {
|
||||
return Err(Error::BadRequest("评论内容包含敏感关键词".to_string()));
|
||||
}
|
||||
|
||||
check_blacklist(ctx, input).await?;
|
||||
check_rate_limit(ctx, input).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
use chrono::{DateTime, FixedOffset, NaiveDate, TimeZone, Utc};
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter,
|
||||
@@ -12,6 +13,12 @@ use crate::models::_entities::{categories, comments, posts, tags};
|
||||
|
||||
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
|
||||
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
|
||||
pub const POST_STATUS_DRAFT: &str = "draft";
|
||||
pub const POST_STATUS_PUBLISHED: &str = "published";
|
||||
pub const POST_STATUS_OFFLINE: &str = "offline";
|
||||
pub const POST_VISIBILITY_PUBLIC: &str = "public";
|
||||
pub const POST_VISIBILITY_UNLISTED: &str = "unlisted";
|
||||
pub const POST_VISIBILITY_PRIVATE: &str = "private";
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
|
||||
struct MarkdownFrontmatter {
|
||||
@@ -33,6 +40,16 @@ struct MarkdownFrontmatter {
|
||||
pinned: Option<bool>,
|
||||
published: Option<bool>,
|
||||
draft: Option<bool>,
|
||||
status: Option<String>,
|
||||
visibility: Option<String>,
|
||||
publish_at: Option<String>,
|
||||
unpublish_at: Option<String>,
|
||||
canonical_url: Option<String>,
|
||||
noindex: Option<bool>,
|
||||
og_image: Option<String>,
|
||||
#[serde(default, deserialize_with = "deserialize_optional_string_list")]
|
||||
redirect_from: Option<Vec<String>>,
|
||||
redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
@@ -47,7 +64,15 @@ pub struct MarkdownPost {
|
||||
pub image: Option<String>,
|
||||
pub images: Vec<String>,
|
||||
pub pinned: bool,
|
||||
pub published: bool,
|
||||
pub status: String,
|
||||
pub visibility: String,
|
||||
pub publish_at: Option<String>,
|
||||
pub unpublish_at: Option<String>,
|
||||
pub canonical_url: Option<String>,
|
||||
pub noindex: bool,
|
||||
pub og_image: Option<String>,
|
||||
pub redirect_from: Vec<String>,
|
||||
pub redirect_to: Option<String>,
|
||||
pub file_path: String,
|
||||
}
|
||||
|
||||
@@ -63,7 +88,15 @@ pub struct MarkdownPostDraft {
|
||||
pub image: Option<String>,
|
||||
pub images: Vec<String>,
|
||||
pub pinned: bool,
|
||||
pub published: bool,
|
||||
pub status: String,
|
||||
pub visibility: String,
|
||||
pub publish_at: Option<String>,
|
||||
pub unpublish_at: Option<String>,
|
||||
pub canonical_url: Option<String>,
|
||||
pub noindex: bool,
|
||||
pub og_image: Option<String>,
|
||||
pub redirect_from: Vec<String>,
|
||||
pub redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -124,6 +157,147 @@ fn normalize_string_list(values: Option<Vec<String>>) -> Vec<String> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn yaml_scalar(value: &str) -> String {
|
||||
serde_yaml::to_string(value)
|
||||
.unwrap_or_else(|_| format!("{value:?}"))
|
||||
.trim()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
fn normalize_redirect_list(values: Option<Vec<String>>) -> Vec<String> {
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
|
||||
normalize_string_list(values)
|
||||
.into_iter()
|
||||
.map(|item| item.trim_matches('/').to_string())
|
||||
.filter(|item| !item.is_empty())
|
||||
.filter(|item| seen.insert(item.to_lowercase()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn normalize_url_like(value: Option<String>) -> Option<String> {
|
||||
trim_to_option(value).map(|item| item.trim_end_matches('/').to_string())
|
||||
}
|
||||
|
||||
pub fn normalize_post_status(value: Option<&str>) -> String {
|
||||
match value
|
||||
.map(str::trim)
|
||||
.unwrap_or_default()
|
||||
.to_ascii_lowercase()
|
||||
.as_str()
|
||||
{
|
||||
POST_STATUS_DRAFT => POST_STATUS_DRAFT.to_string(),
|
||||
POST_STATUS_OFFLINE => POST_STATUS_OFFLINE.to_string(),
|
||||
_ => POST_STATUS_PUBLISHED.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn normalize_post_visibility(value: Option<&str>) -> String {
|
||||
match value
|
||||
.map(str::trim)
|
||||
.unwrap_or_default()
|
||||
.to_ascii_lowercase()
|
||||
.as_str()
|
||||
{
|
||||
POST_VISIBILITY_UNLISTED => POST_VISIBILITY_UNLISTED.to_string(),
|
||||
POST_VISIBILITY_PRIVATE => POST_VISIBILITY_PRIVATE.to_string(),
|
||||
_ => POST_VISIBILITY_PUBLIC.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_frontmatter_datetime(value: Option<String>) -> Option<DateTime<FixedOffset>> {
|
||||
let raw = trim_to_option(value)?;
|
||||
|
||||
if let Ok(parsed) = DateTime::parse_from_rfc3339(&raw) {
|
||||
return Some(parsed);
|
||||
}
|
||||
|
||||
if let Ok(date_only) = NaiveDate::parse_from_str(&raw, "%Y-%m-%d") {
|
||||
let naive = date_only.and_hms_opt(0, 0, 0)?;
|
||||
return FixedOffset::east_opt(0)?.from_local_datetime(&naive).single();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn format_frontmatter_datetime(value: Option<DateTime<FixedOffset>>) -> Option<String> {
|
||||
value.map(|item| item.with_timezone(&Utc).to_rfc3339())
|
||||
}
|
||||
|
||||
fn resolve_post_status(frontmatter: &MarkdownFrontmatter) -> String {
|
||||
if let Some(status) = trim_to_option(frontmatter.status.clone()) {
|
||||
return normalize_post_status(Some(&status));
|
||||
}
|
||||
|
||||
if frontmatter.draft.unwrap_or(false) {
|
||||
POST_STATUS_DRAFT.to_string()
|
||||
} else if frontmatter.published.unwrap_or(true) {
|
||||
POST_STATUS_PUBLISHED.to_string()
|
||||
} else {
|
||||
POST_STATUS_DRAFT.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn effective_post_state(
|
||||
status: &str,
|
||||
publish_at: Option<DateTime<FixedOffset>>,
|
||||
unpublish_at: Option<DateTime<FixedOffset>>,
|
||||
now: DateTime<FixedOffset>,
|
||||
) -> String {
|
||||
let normalized_status = normalize_post_status(Some(status));
|
||||
|
||||
if normalized_status == POST_STATUS_DRAFT {
|
||||
return POST_STATUS_DRAFT.to_string();
|
||||
}
|
||||
|
||||
if normalized_status == POST_STATUS_OFFLINE {
|
||||
return POST_STATUS_OFFLINE.to_string();
|
||||
}
|
||||
|
||||
if publish_at.map(|value| value > now).unwrap_or(false) {
|
||||
return "scheduled".to_string();
|
||||
}
|
||||
|
||||
if unpublish_at.map(|value| value <= now).unwrap_or(false) {
|
||||
return "expired".to_string();
|
||||
}
|
||||
|
||||
POST_STATUS_PUBLISHED.to_string()
|
||||
}
|
||||
|
||||
pub fn post_redirects_from_json(value: &Option<Value>) -> Vec<String> {
|
||||
value
|
||||
.as_ref()
|
||||
.and_then(Value::as_array)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|item| item.as_str().map(ToString::to_string))
|
||||
.map(|item| item.trim_matches('/').to_string())
|
||||
.filter(|item| !item.is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn is_post_listed_publicly(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
|
||||
effective_post_state(
|
||||
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
|
||||
post.publish_at,
|
||||
post.unpublish_at,
|
||||
now,
|
||||
) == POST_STATUS_PUBLISHED
|
||||
&& normalize_post_visibility(post.visibility.as_deref()) == POST_VISIBILITY_PUBLIC
|
||||
}
|
||||
|
||||
pub fn is_post_publicly_accessible(post: &posts::Model, now: DateTime<FixedOffset>) -> bool {
|
||||
effective_post_state(
|
||||
post.status.as_deref().unwrap_or(POST_STATUS_PUBLISHED),
|
||||
post.publish_at,
|
||||
post.unpublish_at,
|
||||
now,
|
||||
) == POST_STATUS_PUBLISHED
|
||||
&& normalize_post_visibility(post.visibility.as_deref()) != POST_VISIBILITY_PRIVATE
|
||||
}
|
||||
|
||||
fn split_inline_list(value: &str) -> Vec<String> {
|
||||
value
|
||||
.split([',', ','])
|
||||
@@ -268,7 +442,7 @@ fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
|
||||
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
|
||||
}
|
||||
|
||||
fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
|
||||
pub fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
|
||||
let (frontmatter, content) = split_frontmatter(raw)?;
|
||||
|
||||
let slug = trim_to_option(frontmatter.slug.clone()).unwrap_or_else(|| file_stem.to_string());
|
||||
@@ -282,6 +456,7 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<
|
||||
.next();
|
||||
let tags = frontmatter
|
||||
.tags
|
||||
.clone()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|item| item.trim().to_string())
|
||||
@@ -300,60 +475,90 @@ fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<
|
||||
image: trim_to_option(frontmatter.image.clone()),
|
||||
images: normalize_string_list(frontmatter.images.clone()),
|
||||
pinned: frontmatter.pinned.unwrap_or(false),
|
||||
published: frontmatter
|
||||
.published
|
||||
.unwrap_or(!frontmatter.draft.unwrap_or(false)),
|
||||
status: resolve_post_status(&frontmatter),
|
||||
visibility: normalize_post_visibility(frontmatter.visibility.as_deref()),
|
||||
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
|
||||
frontmatter.publish_at.clone(),
|
||||
)),
|
||||
unpublish_at: format_frontmatter_datetime(parse_frontmatter_datetime(
|
||||
frontmatter.unpublish_at.clone(),
|
||||
)),
|
||||
canonical_url: normalize_url_like(frontmatter.canonical_url.clone()),
|
||||
noindex: frontmatter.noindex.unwrap_or(false),
|
||||
og_image: normalize_url_like(frontmatter.og_image.clone()),
|
||||
redirect_from: normalize_redirect_list(frontmatter.redirect_from.clone()),
|
||||
redirect_to: trim_to_option(frontmatter.redirect_to.clone())
|
||||
.map(|item| item.trim_matches('/').to_string()),
|
||||
file_path: file_path.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
fn build_markdown_document(post: &MarkdownPost) -> String {
|
||||
pub fn build_markdown_document(post: &MarkdownPost) -> String {
|
||||
let mut lines = vec![
|
||||
"---".to_string(),
|
||||
format!(
|
||||
"title: {}",
|
||||
serde_yaml::to_string(&post.title)
|
||||
.unwrap_or_else(|_| format!("{:?}", post.title))
|
||||
.trim()
|
||||
),
|
||||
format!("slug: {}", post.slug),
|
||||
format!("title: {}", yaml_scalar(&post.title)),
|
||||
format!("slug: {}", yaml_scalar(&post.slug)),
|
||||
];
|
||||
|
||||
if let Some(description) = &post.description {
|
||||
lines.push(format!(
|
||||
"description: {}",
|
||||
serde_yaml::to_string(description)
|
||||
.unwrap_or_else(|_| format!("{description:?}"))
|
||||
.trim()
|
||||
));
|
||||
lines.push(format!("description: {}", yaml_scalar(description)));
|
||||
}
|
||||
|
||||
if let Some(category) = &post.category {
|
||||
lines.push(format!("category: {}", category));
|
||||
lines.push(format!("category: {}", yaml_scalar(category)));
|
||||
}
|
||||
|
||||
lines.push(format!("post_type: {}", post.post_type));
|
||||
lines.push(format!("post_type: {}", yaml_scalar(&post.post_type)));
|
||||
lines.push(format!("pinned: {}", post.pinned));
|
||||
lines.push(format!("published: {}", post.published));
|
||||
lines.push(format!("status: {}", yaml_scalar(&post.status)));
|
||||
lines.push(format!("visibility: {}", yaml_scalar(&post.visibility)));
|
||||
lines.push(format!("noindex: {}", post.noindex));
|
||||
|
||||
if let Some(publish_at) = &post.publish_at {
|
||||
lines.push(format!("publish_at: {}", yaml_scalar(publish_at)));
|
||||
}
|
||||
|
||||
if let Some(unpublish_at) = &post.unpublish_at {
|
||||
lines.push(format!("unpublish_at: {}", yaml_scalar(unpublish_at)));
|
||||
}
|
||||
|
||||
if let Some(image) = &post.image {
|
||||
lines.push(format!("image: {}", image));
|
||||
lines.push(format!("image: {}", yaml_scalar(image)));
|
||||
}
|
||||
|
||||
if !post.images.is_empty() {
|
||||
lines.push("images:".to_string());
|
||||
for image in &post.images {
|
||||
lines.push(format!(" - {}", image));
|
||||
lines.push(format!(" - {}", yaml_scalar(image)));
|
||||
}
|
||||
}
|
||||
|
||||
if !post.tags.is_empty() {
|
||||
lines.push("tags:".to_string());
|
||||
for tag in &post.tags {
|
||||
lines.push(format!(" - {}", tag));
|
||||
lines.push(format!(" - {}", yaml_scalar(tag)));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(canonical_url) = &post.canonical_url {
|
||||
lines.push(format!("canonical_url: {}", yaml_scalar(canonical_url)));
|
||||
}
|
||||
|
||||
if let Some(og_image) = &post.og_image {
|
||||
lines.push(format!("og_image: {}", yaml_scalar(og_image)));
|
||||
}
|
||||
|
||||
if !post.redirect_from.is_empty() {
|
||||
lines.push("redirect_from:".to_string());
|
||||
for redirect in &post.redirect_from {
|
||||
lines.push(format!(" - {}", yaml_scalar(redirect)));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(redirect_to) = &post.redirect_to {
|
||||
lines.push(format!("redirect_to: {}", yaml_scalar(redirect_to)));
|
||||
}
|
||||
|
||||
lines.push("---".to_string());
|
||||
lines.push(String::new());
|
||||
lines.push(post.content.trim().to_string());
|
||||
@@ -390,7 +595,19 @@ fn ensure_markdown_posts_bootstrapped() -> Result<()> {
|
||||
image: None,
|
||||
images: Vec::new(),
|
||||
pinned: fixture.pinned.unwrap_or(false),
|
||||
published: fixture.published.unwrap_or(true),
|
||||
status: if fixture.published.unwrap_or(true) {
|
||||
POST_STATUS_PUBLISHED.to_string()
|
||||
} else {
|
||||
POST_STATUS_DRAFT.to_string()
|
||||
},
|
||||
visibility: POST_VISIBILITY_PUBLIC.to_string(),
|
||||
publish_at: None,
|
||||
unpublish_at: None,
|
||||
canonical_url: None,
|
||||
noindex: false,
|
||||
og_image: None,
|
||||
redirect_from: Vec::new(),
|
||||
redirect_to: None,
|
||||
file_path: markdown_post_path(&fixture.slug)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
@@ -799,6 +1016,27 @@ pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>>
|
||||
))
|
||||
});
|
||||
model.pinned = Set(Some(post.pinned));
|
||||
model.status = Set(Some(normalize_post_status(Some(&post.status))));
|
||||
model.visibility = Set(Some(normalize_post_visibility(Some(&post.visibility))));
|
||||
model.publish_at = Set(parse_frontmatter_datetime(post.publish_at.clone()));
|
||||
model.unpublish_at = Set(parse_frontmatter_datetime(post.unpublish_at.clone()));
|
||||
model.canonical_url = Set(normalize_url_like(post.canonical_url.clone()));
|
||||
model.noindex = Set(Some(post.noindex));
|
||||
model.og_image = Set(normalize_url_like(post.og_image.clone()));
|
||||
model.redirect_from = Set(if post.redirect_from.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Value::Array(
|
||||
post.redirect_from
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(Value::String)
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
});
|
||||
model.redirect_to = Set(
|
||||
trim_to_option(post.redirect_to.clone()).map(|item| item.trim_matches('/').to_string()),
|
||||
);
|
||||
|
||||
if has_existing {
|
||||
let _ = model.update(&ctx.db).await;
|
||||
@@ -895,7 +1133,16 @@ pub async fn create_markdown_post(
|
||||
image: trim_to_option(draft.image),
|
||||
images: normalize_string_list(Some(draft.images)),
|
||||
pinned: draft.pinned,
|
||||
published: draft.published,
|
||||
status: normalize_post_status(Some(&draft.status)),
|
||||
visibility: normalize_post_visibility(Some(&draft.visibility)),
|
||||
publish_at: format_frontmatter_datetime(parse_frontmatter_datetime(draft.publish_at)),
|
||||
unpublish_at: format_frontmatter_datetime(parse_frontmatter_datetime(draft.unpublish_at)),
|
||||
canonical_url: normalize_url_like(draft.canonical_url),
|
||||
noindex: draft.noindex,
|
||||
og_image: normalize_url_like(draft.og_image),
|
||||
redirect_from: normalize_redirect_list(Some(draft.redirect_from)),
|
||||
redirect_to: trim_to_option(draft.redirect_to)
|
||||
.map(|item| item.trim_matches('/').to_string()),
|
||||
file_path: markdown_post_path(&slug).to_string_lossy().to_string(),
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
pub mod admin_audit;
|
||||
pub mod abuse_guard;
|
||||
pub mod ai;
|
||||
pub mod analytics;
|
||||
pub mod comment_guard;
|
||||
pub mod content;
|
||||
pub mod notifications;
|
||||
pub mod post_revisions;
|
||||
pub mod storage;
|
||||
pub mod subscriptions;
|
||||
|
||||
164
backend/src/services/notifications.rs
Normal file
164
backend/src/services/notifications.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use loco_rs::prelude::*;
|
||||
use crate::{
|
||||
controllers::site_settings,
|
||||
models::_entities::{comments, friend_links},
|
||||
services::subscriptions,
|
||||
};
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn excerpt(value: Option<&str>, limit: usize) -> Option<String> {
|
||||
let flattened = value?
|
||||
.split_whitespace()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
if flattened.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut shortened = flattened.chars().take(limit).collect::<String>();
|
||||
if flattened.chars().count() > limit {
|
||||
shortened.push_str("...");
|
||||
}
|
||||
Some(shortened)
|
||||
}
|
||||
|
||||
pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
|
||||
let settings = match site_settings::load_current(ctx).await {
|
||||
Ok(settings) => settings,
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to load site settings before comment notification: {error}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let payload = serde_json::json!({
|
||||
"event_type": subscriptions::EVENT_COMMENT_CREATED,
|
||||
"id": item.id,
|
||||
"post_slug": item.post_slug,
|
||||
"author": item.author,
|
||||
"email": item.email,
|
||||
"scope": item.scope,
|
||||
"paragraph_key": item.paragraph_key,
|
||||
"approved": item.approved.unwrap_or(false),
|
||||
"excerpt": excerpt(item.content.as_deref(), 200),
|
||||
"created_at": item.created_at.to_rfc3339(),
|
||||
});
|
||||
let text = format!(
|
||||
"收到一条新的评论。\n\n文章:{}\n作者:{}\n范围:{}\n状态:{}\n摘要:{}",
|
||||
item.post_slug.clone().unwrap_or_else(|| "未知文章".to_string()),
|
||||
item.author.clone().unwrap_or_else(|| "匿名".to_string()),
|
||||
item.scope,
|
||||
if item.approved.unwrap_or(false) { "已通过" } else { "待审核" },
|
||||
excerpt(item.content.as_deref(), 200).unwrap_or_else(|| "无".to_string()),
|
||||
);
|
||||
|
||||
if let Err(error) = subscriptions::queue_event_for_active_subscriptions(
|
||||
ctx,
|
||||
subscriptions::EVENT_COMMENT_CREATED,
|
||||
"新评论通知",
|
||||
&text,
|
||||
payload.clone(),
|
||||
trim_to_option(settings.site_name.clone()),
|
||||
trim_to_option(settings.site_url.clone()),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue comment subscription notification: {error}");
|
||||
}
|
||||
|
||||
if settings.notification_comment_enabled.unwrap_or(false) {
|
||||
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
|
||||
if let Err(error) = subscriptions::queue_direct_notification(
|
||||
ctx,
|
||||
subscriptions::CHANNEL_WEBHOOK,
|
||||
&target,
|
||||
subscriptions::EVENT_COMMENT_CREATED,
|
||||
"新评论通知",
|
||||
&text,
|
||||
payload,
|
||||
trim_to_option(settings.site_name),
|
||||
trim_to_option(settings.site_url),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue legacy comment webhook notification: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model) {
|
||||
let settings = match site_settings::load_current(ctx).await {
|
||||
Ok(settings) => settings,
|
||||
Err(error) => {
|
||||
tracing::warn!("failed to load site settings before friend-link notification: {error}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let payload = serde_json::json!({
|
||||
"event_type": subscriptions::EVENT_FRIEND_LINK_CREATED,
|
||||
"id": item.id,
|
||||
"site_name": item.site_name,
|
||||
"site_url": item.site_url,
|
||||
"category": item.category,
|
||||
"status": item.status,
|
||||
"description": item.description,
|
||||
"created_at": item.created_at.to_rfc3339(),
|
||||
});
|
||||
let text = format!(
|
||||
"收到新的友链申请。\n\n站点:{}\n链接:{}\n分类:{}\n状态:{}\n描述:{}",
|
||||
item.site_name.clone().unwrap_or_else(|| "未命名站点".to_string()),
|
||||
item.site_url,
|
||||
item.category.clone().unwrap_or_else(|| "未分类".to_string()),
|
||||
item.status.clone().unwrap_or_else(|| "pending".to_string()),
|
||||
item.description.clone().unwrap_or_else(|| "无".to_string()),
|
||||
);
|
||||
|
||||
if let Err(error) = subscriptions::queue_event_for_active_subscriptions(
|
||||
ctx,
|
||||
subscriptions::EVENT_FRIEND_LINK_CREATED,
|
||||
"新友链申请通知",
|
||||
&text,
|
||||
payload.clone(),
|
||||
trim_to_option(settings.site_name.clone()),
|
||||
trim_to_option(settings.site_url.clone()),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue friend-link subscription notification: {error}");
|
||||
}
|
||||
|
||||
if settings.notification_friend_link_enabled.unwrap_or(false) {
|
||||
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
|
||||
if let Err(error) = subscriptions::queue_direct_notification(
|
||||
ctx,
|
||||
subscriptions::CHANNEL_WEBHOOK,
|
||||
&target,
|
||||
subscriptions::EVENT_FRIEND_LINK_CREATED,
|
||||
"新友链申请通知",
|
||||
&text,
|
||||
payload,
|
||||
trim_to_option(settings.site_name),
|
||||
trim_to_option(settings.site_url),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::warn!("failed to queue legacy friend-link webhook notification: {error}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
247
backend/src/services/post_revisions.rs
Normal file
247
backend/src/services/post_revisions.rs
Normal file
@@ -0,0 +1,247 @@
|
||||
use loco_rs::prelude::*;
|
||||
use sea_orm::{
|
||||
ActiveModelTrait, ColumnTrait, EntityTrait, Order, QueryFilter, QueryOrder, QuerySelect, Set,
|
||||
};
|
||||
use std::fs;
|
||||
|
||||
use crate::{
|
||||
controllers::admin::AdminIdentity,
|
||||
models::_entities::{post_revisions, posts},
|
||||
services::content,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum RestoreMode {
|
||||
Full,
|
||||
Markdown,
|
||||
Metadata,
|
||||
}
|
||||
|
||||
impl RestoreMode {
|
||||
pub fn parse(value: &str) -> Self {
|
||||
match value.trim().to_ascii_lowercase().as_str() {
|
||||
"markdown" | "content" | "body" => Self::Markdown,
|
||||
"metadata" | "frontmatter" => Self::Metadata,
|
||||
_ => Self::Full,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Full => "full",
|
||||
Self::Markdown => "markdown",
|
||||
Self::Metadata => "metadata",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn trim_to_option(value: Option<String>) -> Option<String> {
|
||||
value.and_then(|item| {
|
||||
let trimmed = item.trim().to_string();
|
||||
if trimmed.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn title_from_markdown(markdown: &str, slug: &str) -> Option<String> {
|
||||
let normalized = markdown.replace("\r\n", "\n");
|
||||
if let Some(frontmatter) = normalized
|
||||
.strip_prefix("---\n")
|
||||
.and_then(|rest| rest.split_once("\n---\n").map(|(frontmatter, _)| frontmatter))
|
||||
{
|
||||
for line in frontmatter.lines() {
|
||||
let trimmed = line.trim();
|
||||
if let Some(raw) = trimmed.strip_prefix("title:") {
|
||||
let title = raw.trim().trim_matches('"').trim_matches('\'').trim();
|
||||
if !title.is_empty() {
|
||||
return Some(title.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
normalized.lines().find_map(|line| {
|
||||
line.trim()
|
||||
.strip_prefix("# ")
|
||||
.map(str::trim)
|
||||
.filter(|value| !value.is_empty())
|
||||
.map(ToString::to_string)
|
||||
})
|
||||
.or_else(|| trim_to_option(Some(slug.to_string())))
|
||||
}
|
||||
|
||||
async fn lookup_post_title(ctx: &AppContext, slug: &str) -> Option<String> {
|
||||
posts::Entity::find()
|
||||
.filter(posts::Column::Slug.eq(slug))
|
||||
.one(&ctx.db)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
.and_then(|item| item.title)
|
||||
.and_then(|value| trim_to_option(Some(value)))
|
||||
}
|
||||
|
||||
pub async fn capture_snapshot_from_markdown(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
slug: &str,
|
||||
markdown: &str,
|
||||
operation: &str,
|
||||
reason: Option<&str>,
|
||||
metadata: Option<serde_json::Value>,
|
||||
) -> Result<post_revisions::Model> {
|
||||
let post_title = lookup_post_title(ctx, slug)
|
||||
.await
|
||||
.or_else(|| title_from_markdown(markdown, slug));
|
||||
|
||||
post_revisions::ActiveModel {
|
||||
post_slug: Set(slug.to_string()),
|
||||
post_title: Set(post_title),
|
||||
operation: Set(operation.to_string()),
|
||||
revision_reason: Set(reason.map(ToString::to_string)),
|
||||
actor_username: Set(actor.map(|item| item.username.clone())),
|
||||
actor_email: Set(actor.and_then(|item| item.email.clone())),
|
||||
actor_source: Set(actor.map(|item| item.source.clone())),
|
||||
markdown: Set(Some(markdown.replace("\r\n", "\n"))),
|
||||
metadata: Set(metadata),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&ctx.db)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn capture_current_snapshot(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
slug: &str,
|
||||
operation: &str,
|
||||
reason: Option<&str>,
|
||||
metadata: Option<serde_json::Value>,
|
||||
) -> Result<Option<post_revisions::Model>> {
|
||||
let Ok((_path, markdown)) = content::read_markdown_document(slug) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
capture_snapshot_from_markdown(ctx, actor, slug, &markdown, operation, reason, metadata)
|
||||
.await
|
||||
.map(Some)
|
||||
}
|
||||
|
||||
pub async fn list_revisions(
|
||||
ctx: &AppContext,
|
||||
slug: Option<&str>,
|
||||
limit: u64,
|
||||
) -> Result<Vec<post_revisions::Model>> {
|
||||
let mut query = post_revisions::Entity::find().order_by(post_revisions::Column::CreatedAt, Order::Desc);
|
||||
|
||||
if let Some(slug) = slug.map(str::trim).filter(|value| !value.is_empty()) {
|
||||
query = query.filter(post_revisions::Column::PostSlug.eq(slug));
|
||||
}
|
||||
|
||||
query
|
||||
.limit(limit)
|
||||
.all(&ctx.db)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
pub async fn get_revision(ctx: &AppContext, id: i32) -> Result<post_revisions::Model> {
|
||||
post_revisions::Entity::find_by_id(id)
|
||||
.one(&ctx.db)
|
||||
.await?
|
||||
.ok_or(Error::NotFound)
|
||||
}
|
||||
|
||||
pub async fn restore_revision(
|
||||
ctx: &AppContext,
|
||||
actor: Option<&AdminIdentity>,
|
||||
revision_id: i32,
|
||||
mode: &str,
|
||||
) -> Result<post_revisions::Model> {
|
||||
let revision = get_revision(ctx, revision_id).await?;
|
||||
let slug = revision.post_slug.clone();
|
||||
let revision_markdown = revision
|
||||
.markdown
|
||||
.clone()
|
||||
.filter(|value| !value.trim().is_empty())
|
||||
.ok_or_else(|| Error::BadRequest("该版本没有可恢复的 Markdown 快照".to_string()))?;
|
||||
let restore_mode = RestoreMode::parse(mode);
|
||||
|
||||
let _ = capture_current_snapshot(
|
||||
ctx,
|
||||
actor,
|
||||
&slug,
|
||||
"restore_backup",
|
||||
Some("恢复前自动备份"),
|
||||
Some(serde_json::json!({
|
||||
"source_revision_id": revision_id,
|
||||
"mode": restore_mode.as_str(),
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let markdown = match restore_mode {
|
||||
RestoreMode::Full => revision_markdown.clone(),
|
||||
RestoreMode::Markdown | RestoreMode::Metadata => {
|
||||
let (_path, current_markdown) = content::read_markdown_document(&slug).map_err(|_| {
|
||||
Error::BadRequest("当前文章不存在,无法执行局部恢复,请改用完整恢复".to_string())
|
||||
})?;
|
||||
let revision_post =
|
||||
content::parse_markdown_source(&slug, &revision_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
|
||||
let current_post =
|
||||
content::parse_markdown_source(&slug, ¤t_markdown, &content::markdown_post_path(&slug).to_string_lossy())?;
|
||||
let mut merged = current_post.clone();
|
||||
match restore_mode {
|
||||
RestoreMode::Markdown => {
|
||||
merged.content = revision_post.content;
|
||||
}
|
||||
RestoreMode::Metadata => {
|
||||
merged.title = revision_post.title;
|
||||
merged.description = revision_post.description;
|
||||
merged.category = revision_post.category;
|
||||
merged.tags = revision_post.tags;
|
||||
merged.post_type = revision_post.post_type;
|
||||
merged.image = revision_post.image;
|
||||
merged.images = revision_post.images;
|
||||
merged.pinned = revision_post.pinned;
|
||||
merged.status = revision_post.status;
|
||||
merged.visibility = revision_post.visibility;
|
||||
merged.publish_at = revision_post.publish_at;
|
||||
merged.unpublish_at = revision_post.unpublish_at;
|
||||
merged.canonical_url = revision_post.canonical_url;
|
||||
merged.noindex = revision_post.noindex;
|
||||
merged.og_image = revision_post.og_image;
|
||||
merged.redirect_from = revision_post.redirect_from;
|
||||
merged.redirect_to = revision_post.redirect_to;
|
||||
}
|
||||
RestoreMode::Full => unreachable!(),
|
||||
}
|
||||
content::build_markdown_document(&merged)
|
||||
}
|
||||
};
|
||||
|
||||
fs::create_dir_all(content::MARKDOWN_POSTS_DIR).map_err(|error| Error::BadRequest(error.to_string()))?;
|
||||
fs::write(content::markdown_post_path(&slug), markdown.replace("\r\n", "\n"))
|
||||
.map_err(|error| Error::BadRequest(error.to_string()))?;
|
||||
content::sync_markdown_posts(ctx).await?;
|
||||
|
||||
let _ = capture_snapshot_from_markdown(
|
||||
ctx,
|
||||
actor,
|
||||
&slug,
|
||||
&markdown,
|
||||
"restore",
|
||||
Some("通过版本历史恢复"),
|
||||
Some(serde_json::json!({
|
||||
"source_revision_id": revision_id,
|
||||
"mode": restore_mode.as_str(),
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(revision)
|
||||
}
|
||||
1216
backend/src/services/subscriptions.rs
Normal file
1216
backend/src/services/subscriptions.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1 +1,3 @@
|
||||
|
||||
pub mod retry_deliveries;
|
||||
pub mod send_monthly_digest;
|
||||
pub mod send_weekly_digest;
|
||||
|
||||
26
backend/src/tasks/retry_deliveries.rs
Normal file
26
backend/src/tasks/retry_deliveries.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use loco_rs::prelude::*;
|
||||
|
||||
use crate::services::subscriptions;
|
||||
|
||||
pub struct RetryDeliveries;
|
||||
|
||||
#[async_trait]
|
||||
impl Task for RetryDeliveries {
|
||||
fn task(&self) -> TaskInfo {
|
||||
TaskInfo {
|
||||
name: "retry_deliveries".to_string(),
|
||||
detail: "enqueue due notification deliveries for retry".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn run(&self, app_context: &AppContext, vars: &task::Vars) -> Result<()> {
|
||||
let limit = vars
|
||||
.cli
|
||||
.get("limit")
|
||||
.and_then(|value| value.parse::<u64>().ok())
|
||||
.unwrap_or(200);
|
||||
let queued = subscriptions::retry_due_deliveries(app_context, limit).await?;
|
||||
tracing::info!("retry_deliveries queued {queued} jobs");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
26
backend/src/tasks/send_monthly_digest.rs
Normal file
26
backend/src/tasks/send_monthly_digest.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use loco_rs::prelude::*;
|
||||
|
||||
use crate::services::subscriptions;
|
||||
|
||||
pub struct SendMonthlyDigest;
|
||||
|
||||
#[async_trait]
|
||||
impl Task for SendMonthlyDigest {
|
||||
fn task(&self) -> TaskInfo {
|
||||
TaskInfo {
|
||||
name: "send_monthly_digest".to_string(),
|
||||
detail: "queue monthly digest notifications".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn run(&self, app_context: &AppContext, _vars: &task::Vars) -> Result<()> {
|
||||
let summary = subscriptions::send_digest(app_context, "monthly").await?;
|
||||
tracing::info!(
|
||||
"send_monthly_digest queued={} skipped={} posts={}",
|
||||
summary.queued,
|
||||
summary.skipped,
|
||||
summary.post_count
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
26
backend/src/tasks/send_weekly_digest.rs
Normal file
26
backend/src/tasks/send_weekly_digest.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use loco_rs::prelude::*;
|
||||
|
||||
use crate::services::subscriptions;
|
||||
|
||||
pub struct SendWeeklyDigest;
|
||||
|
||||
#[async_trait]
|
||||
impl Task for SendWeeklyDigest {
|
||||
fn task(&self) -> TaskInfo {
|
||||
TaskInfo {
|
||||
name: "send_weekly_digest".to_string(),
|
||||
detail: "queue weekly digest notifications".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn run(&self, app_context: &AppContext, _vars: &task::Vars) -> Result<()> {
|
||||
let summary = subscriptions::send_digest(app_context, "weekly").await?;
|
||||
tracing::info!(
|
||||
"send_weekly_digest queued={} skipped={} posts={}",
|
||||
summary.queued,
|
||||
summary.skipped,
|
||||
summary.post_count
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1 +1,2 @@
|
||||
pub mod downloader;
|
||||
pub mod notification_delivery;
|
||||
|
||||
28
backend/src/workers/notification_delivery.rs
Normal file
28
backend/src/workers/notification_delivery.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use loco_rs::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::services::subscriptions;
|
||||
|
||||
pub struct NotificationDeliveryWorker {
|
||||
pub ctx: AppContext,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct NotificationDeliveryWorkerArgs {
|
||||
pub delivery_id: i32,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl BackgroundWorker<NotificationDeliveryWorkerArgs> for NotificationDeliveryWorker {
|
||||
fn build(ctx: &AppContext) -> Self {
|
||||
Self { ctx: ctx.clone() }
|
||||
}
|
||||
|
||||
fn tags() -> Vec<String> {
|
||||
vec!["notifications".to_string()]
|
||||
}
|
||||
|
||||
async fn perform(&self, args: NotificationDeliveryWorkerArgs) -> Result<()> {
|
||||
subscriptions::process_delivery(&self.ctx, args.delivery_id).await
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user