feat: ship public ops features and cache docker builds
Some checks failed
docker-images / build-and-push (admin, admin, termi-astro-admin, admin/Dockerfile) (push) Failing after 13s
docker-images / build-and-push (frontend, frontend, termi-astro-frontend, frontend/Dockerfile) (push) Has been cancelled
docker-images / build-and-push (backend, backend, termi-astro-backend, backend/Dockerfile) (push) Has been cancelled

This commit is contained in:
2026-04-01 13:22:19 +08:00
parent 669b79cc95
commit 497a9d713d
75 changed files with 6985 additions and 668 deletions

View File

@@ -1,4 +1,5 @@
target
target-*
.git
.github
.gitea
@@ -6,3 +7,4 @@ node_modules
*.log
*.out
*.err
storage

3
backend/.gitignore vendored
View File

@@ -5,6 +5,7 @@
# will have compiled files and executables
debug/
target/
target-*/
# include cargo lock
!Cargo.lock
@@ -16,4 +17,4 @@ target/
*.pdb
*.sqlite
*.sqlite-*
*.sqlite-*

589
backend/Cargo.lock generated
View File

@@ -8,6 +8,62 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "aead"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0"
dependencies = [
"crypto-common 0.1.7",
"generic-array",
]
[[package]]
name = "aes"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0"
dependencies = [
"cfg-if",
"cipher 0.4.4",
"cpufeatures 0.2.17",
]
[[package]]
name = "aes"
version = "0.9.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04097e08a47d9ad181c2e1f4a5fabc9ae06ce8839a333ba9a949bcb0d31fd2a3"
dependencies = [
"cipher 0.5.1",
"cpubits",
"cpufeatures 0.2.17",
]
[[package]]
name = "aes-gcm"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1"
dependencies = [
"aead",
"aes 0.8.4",
"cipher 0.4.4",
"ctr",
"ghash",
"subtle",
]
[[package]]
name = "aes-keywrap"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10b6f24a1f796bc46415a1d0d18dc0a8203ccba088acf5def3291c4f61225522"
dependencies = [
"aes 0.9.0-rc.4",
"byteorder",
]
[[package]]
name = "ahash"
version = "0.7.8"
@@ -190,6 +246,12 @@ dependencies = [
"password-hash",
]
[[package]]
name = "arrayref"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
[[package]]
name = "arrayvec"
version = "0.7.6"
@@ -547,7 +609,7 @@ dependencies = [
"hmac",
"http 0.2.12",
"http 1.4.0",
"p256",
"p256 0.11.1",
"percent-encoding",
"ring",
"sha2",
@@ -950,12 +1012,24 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce"
[[package]]
name = "base16ct"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
[[package]]
name = "base64"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
@@ -992,6 +1066,12 @@ dependencies = [
"serde",
]
[[package]]
name = "binstring"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0669d5a35b64fdb5ab7fb19cae13148b6b5cbdf4b8247faf54ece47f699c8cef"
[[package]]
name = "bit_field"
version = "0.10.3"
@@ -1043,6 +1123,17 @@ dependencies = [
"digest",
]
[[package]]
name = "blake2b_simd"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b79834656f71332577234b50bfc009996f7449e0c056884e6a02492ded0ca2f3"
dependencies = [
"arrayref",
"arrayvec",
"constant_time_eq",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@@ -1287,6 +1378,26 @@ dependencies = [
"stacker",
]
[[package]]
name = "cipher"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
dependencies = [
"crypto-common 0.1.7",
"inout 0.1.4",
]
[[package]]
name = "cipher"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e34d8227fe1ba289043aeb13792056ff80fd6de1a9f49137a5f499de8e8c78ea"
dependencies = [
"crypto-common 0.2.1",
"inout 0.2.2",
]
[[package]]
name = "clap"
version = "4.6.0"
@@ -1336,6 +1447,17 @@ dependencies = [
"cc",
]
[[package]]
name = "coarsetime"
version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e58eb270476aa4fc7843849f8a35063e8743b4dbcdf6dd0f8ea0886980c204c2"
dependencies = [
"libc",
"wasix",
"wasm-bindgen",
]
[[package]]
name = "color_quant"
version = "1.1.0"
@@ -1449,12 +1571,24 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "const-oid"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d6f2aa4d0537bcc1c74df8755072bd31c1ef1a3a1b85a68e8404a8c353b7b8b"
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "constant_time_eq"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b"
[[package]]
name = "cookie"
version = "0.18.1"
@@ -1501,6 +1635,12 @@ dependencies = [
"memchr",
]
[[package]]
name = "cpubits"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ef0c543070d296ea414df2dd7625d1b24866ce206709d8a4a424f28377f5861"
[[package]]
name = "cpufeatures"
version = "0.2.17"
@@ -1654,8 +1794,10 @@ version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"subtle",
"zeroize",
]
[[package]]
@@ -1665,9 +1807,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"typenum",
]
[[package]]
name = "crypto-common"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710"
dependencies = [
"hybrid-array",
]
[[package]]
name = "cssparser"
version = "0.34.0"
@@ -1691,6 +1843,21 @@ dependencies = [
"syn 2.0.117",
]
[[package]]
name = "ct-codecs"
version = "1.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b10589d1a5e400d61f9f38f12f884cfd080ff345de8f17efda36fe0e4a02aa8"
[[package]]
name = "ctr"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835"
dependencies = [
"cipher 0.4.4",
]
[[package]]
name = "darling"
version = "0.20.11"
@@ -1749,13 +1916,23 @@ dependencies = [
"parking_lot_core",
]
[[package]]
name = "der"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79b71cca7d95d7681a4b3b9cdf63c8dbc3730d0584c2c74e31416d64a90493f4"
dependencies = [
"const-oid 0.6.2",
"der_derive",
]
[[package]]
name = "der"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de"
dependencies = [
"const-oid",
"const-oid 0.9.6",
"zeroize",
]
@@ -1765,7 +1942,7 @@ version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
"const-oid",
"const-oid 0.9.6",
"pem-rfc7468 0.7.0",
"zeroize",
]
@@ -1780,6 +1957,18 @@ dependencies = [
"zeroize",
]
[[package]]
name = "der_derive"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8aed3b3c608dc56cf36c45fe979d04eda51242e6703d8d0bb03426ef7c41db6a"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"synstructure 0.12.6",
]
[[package]]
name = "deranged"
version = "0.5.8"
@@ -1873,8 +2062,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"const-oid",
"crypto-common",
"const-oid 0.9.6",
"crypto-common 0.1.7",
"subtle",
]
@@ -1965,11 +2154,53 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c"
dependencies = [
"der 0.6.1",
"elliptic-curve",
"rfc6979",
"elliptic-curve 0.12.3",
"rfc6979 0.3.1",
"signature 1.6.4",
]
[[package]]
name = "ecdsa"
version = "0.16.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca"
dependencies = [
"der 0.7.10",
"digest",
"elliptic-curve 0.13.8",
"rfc6979 0.4.0",
"signature 2.2.0",
"spki 0.7.3",
]
[[package]]
name = "ece"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2ea1d2f2cc974957a4e2575d8e5bb494549bab66338d6320c2789abcfff5746"
dependencies = [
"base64 0.21.7",
"byteorder",
"hex",
"hkdf",
"lazy_static",
"once_cell",
"openssl",
"serde",
"sha2",
"thiserror 1.0.69",
]
[[package]]
name = "ed25519-compact"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33ce99a9e19c84beb4cc35ece85374335ccc398240712114c85038319ed709bd"
dependencies = [
"ct-codecs",
"getrandom 0.3.4",
]
[[package]]
name = "ego-tree"
version = "0.9.0"
@@ -1991,16 +2222,37 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3"
dependencies = [
"base16ct",
"base16ct 0.1.1",
"crypto-bigint 0.4.9",
"der 0.6.1",
"digest",
"ff",
"ff 0.12.1",
"generic-array",
"group",
"group 0.12.1",
"pkcs8 0.9.0",
"rand_core 0.6.4",
"sec1",
"sec1 0.3.0",
"subtle",
"zeroize",
]
[[package]]
name = "elliptic-curve"
version = "0.13.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47"
dependencies = [
"base16ct 0.2.0",
"crypto-bigint 0.5.5",
"digest",
"ff 0.13.1",
"generic-array",
"group 0.13.0",
"hkdf",
"pem-rfc7468 0.7.0",
"pkcs8 0.10.2",
"rand_core 0.6.4",
"sec1 0.7.3",
"subtle",
"zeroize",
]
@@ -2186,6 +2438,16 @@ dependencies = [
"subtle",
]
[[package]]
name = "ff"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393"
dependencies = [
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "find-msvc-tools"
version = "0.1.9"
@@ -2416,6 +2678,7 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
"zeroize",
]
[[package]]
@@ -2468,6 +2731,16 @@ dependencies = [
"wasip3",
]
[[package]]
name = "ghash"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1"
dependencies = [
"opaque-debug",
"polyval",
]
[[package]]
name = "gif"
version = "0.14.1"
@@ -2526,7 +2799,18 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7"
dependencies = [
"ff",
"ff 0.12.1",
"rand_core 0.6.4",
"subtle",
]
[[package]]
name = "group"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63"
dependencies = [
"ff 0.13.1",
"rand_core 0.6.4",
"subtle",
]
@@ -2689,11 +2973,29 @@ dependencies = [
"digest",
]
[[package]]
name = "hmac-sha1-compact"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0b3ba31f6dc772cc8221ce81dbbbd64fa1e668255a6737d95eeace59b5a8823"
[[package]]
name = "hmac-sha256"
version = "1.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec9d92d097f4749b64e8cc33d924d9f40a2d4eb91402b458014b781f5733d60f"
dependencies = [
"digest",
]
[[package]]
name = "hmac-sha512"
version = "1.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "019ece39bbefc17f13f677a690328cb978dbf6790e141a3c24e66372cb38588b"
dependencies = [
"digest",
]
[[package]]
name = "home"
@@ -2809,6 +3111,15 @@ dependencies = [
"libm",
]
[[package]]
name = "hybrid-array"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3944cf8cf766b40e2a1a333ee5e9b563f854d5fa49d6a8ca2764e97c6eddb214"
dependencies = [
"typenum",
]
[[package]]
name = "hyper"
version = "0.14.32"
@@ -2889,6 +3200,19 @@ dependencies = [
"webpki-roots 1.0.6",
]
[[package]]
name = "hyper-tls"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
dependencies = [
"bytes",
"hyper 0.14.32",
"native-tls",
"tokio",
"tokio-native-tls",
]
[[package]]
name = "hyper-tls"
version = "0.6.0"
@@ -3199,6 +3523,24 @@ dependencies = [
"libc",
]
[[package]]
name = "inout"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
dependencies = [
"generic-array",
]
[[package]]
name = "inout"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4250ce6452e92010fdf7268ccc5d14faa80bb12fc741938534c58f16804e03c7"
dependencies = [
"hybrid-array",
]
[[package]]
name = "insta"
version = "1.47.0"
@@ -3300,13 +3642,53 @@ checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde"
dependencies = [
"base64 0.22.1",
"js-sys",
"pem",
"pem 3.0.6",
"ring",
"serde",
"serde_json",
"simple_asn1",
]
[[package]]
name = "jwt-simple"
version = "0.12.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3991f54af4b009bb6efe01aa5a4fcce9ca52f3de7a104a3f6b6e2ad36c852c48"
dependencies = [
"anyhow",
"binstring",
"blake2b_simd",
"coarsetime",
"ct-codecs",
"ed25519-compact",
"hmac-sha1-compact",
"hmac-sha256",
"hmac-sha512",
"k256",
"p256 0.13.2",
"p384",
"rand 0.8.5",
"serde",
"serde_json",
"superboring",
"thiserror 2.0.18",
"zeroize",
]
[[package]]
name = "k256"
version = "0.13.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b"
dependencies = [
"cfg-if",
"ecdsa 0.16.9",
"elliptic-curve 0.13.8",
"once_cell",
"sha2",
"signature 2.2.0",
]
[[package]]
name = "kqueue"
version = "1.1.1"
@@ -4019,6 +4401,12 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "opaque-debug"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
[[package]]
name = "opendal"
version = "0.54.1"
@@ -4174,8 +4562,32 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594"
dependencies = [
"ecdsa",
"elliptic-curve",
"ecdsa 0.14.8",
"elliptic-curve 0.12.3",
"sha2",
]
[[package]]
name = "p256"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b"
dependencies = [
"ecdsa 0.16.9",
"elliptic-curve 0.13.8",
"primeorder",
"sha2",
]
[[package]]
name = "p384"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe42f1670a52a47d448f14b6a5c61dd78fce51856e68edaa38f7ae3a46b8d6b6"
dependencies = [
"ecdsa 0.16.9",
"elliptic-curve 0.13.8",
"primeorder",
"sha2",
]
@@ -4240,6 +4652,17 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35fb2e5f958ec131621fdd531e9fc186ed768cbe395337403ae56c17a74c68ec"
[[package]]
name = "pem"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb"
dependencies = [
"base64 0.13.1",
"once_cell",
"regex",
]
[[package]]
name = "pem"
version = "3.0.6"
@@ -4446,6 +4869,18 @@ dependencies = [
"miniz_oxide",
]
[[package]]
name = "polyval"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25"
dependencies = [
"cfg-if",
"cpufeatures 0.2.17",
"opaque-debug",
"universal-hash",
]
[[package]]
name = "portable-atomic"
version = "1.13.1"
@@ -4511,6 +4946,15 @@ dependencies = [
"syn 2.0.117",
]
[[package]]
name = "primeorder"
version = "0.13.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6"
dependencies = [
"elliptic-curve 0.13.8",
]
[[package]]
name = "proc-macro-crate"
version = "3.5.0"
@@ -5014,7 +5458,7 @@ dependencies = [
"http-body-util",
"hyper 1.8.1",
"hyper-rustls 0.27.7",
"hyper-tls",
"hyper-tls 0.6.0",
"hyper-util",
"js-sys",
"log",
@@ -5065,6 +5509,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rfc6979"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2"
dependencies = [
"hmac",
"subtle",
]
[[package]]
name = "rgb"
version = "0.8.53"
@@ -5139,7 +5593,7 @@ version = "0.9.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d"
dependencies = [
"const-oid",
"const-oid 0.9.6",
"digest",
"num-bigint-dig",
"num-integer",
@@ -5147,6 +5601,7 @@ dependencies = [
"pkcs1",
"pkcs8 0.10.2",
"rand_core 0.6.4",
"sha2",
"signature 2.2.0",
"spki 0.7.3",
"subtle",
@@ -5579,7 +6034,7 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928"
dependencies = [
"base16ct",
"base16ct 0.1.1",
"der 0.6.1",
"generic-array",
"pkcs8 0.9.0",
@@ -5587,6 +6042,31 @@ dependencies = [
"zeroize",
]
[[package]]
name = "sec1"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc"
dependencies = [
"base16ct 0.2.0",
"der 0.7.10",
"generic-array",
"pkcs8 0.10.2",
"subtle",
"zeroize",
]
[[package]]
name = "sec1_decode"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6326ddc956378a0739200b2c30892dccaf198992dfd7323274690b9e188af23"
dependencies = [
"der 0.4.5",
"pem 0.8.3",
"thiserror 1.0.69",
]
[[package]]
name = "security-framework"
version = "3.7.0"
@@ -6331,6 +6811,21 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "superboring"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af44d8b60bc4ffb966f80d1582d579c84f559419e7abafb948d706fc6f95b3d4"
dependencies = [
"aes-gcm",
"aes-keywrap",
"getrandom 0.2.17",
"hmac-sha256",
"hmac-sha512",
"rand 0.8.5",
"rsa",
]
[[package]]
name = "syn"
version = "1.0.109"
@@ -6362,6 +6857,18 @@ dependencies = [
"futures-core",
]
[[package]]
name = "synstructure"
version = "0.12.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"unicode-xid",
]
[[package]]
name = "synstructure"
version = "0.13.2"
@@ -6483,6 +6990,7 @@ dependencies = [
"tracing-subscriber",
"uuid",
"validator",
"web-push",
]
[[package]]
@@ -7063,6 +7571,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
[[package]]
name = "universal-hash"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea"
dependencies = [
"crypto-common 0.1.7",
"subtle",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
@@ -7300,6 +7818,15 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
[[package]]
name = "wasix"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1757e0d1f8456693c7e5c6c629bdb54884e032aa0bb53c155f6a39f94440d332"
dependencies = [
"wasi",
]
[[package]]
name = "wasm-bindgen"
version = "0.2.114"
@@ -7406,6 +7933,28 @@ dependencies = [
"semver",
]
[[package]]
name = "web-push"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5c305b9ee2993ab68b7744b13ef32231d83600dd879ac8183b4c76ae31d28ac"
dependencies = [
"async-trait",
"chrono",
"ct-codecs",
"ece",
"http 0.2.12",
"hyper 0.14.32",
"hyper-tls 0.5.0",
"jwt-simple",
"log",
"pem 3.0.6",
"sec1_decode",
"serde",
"serde_derive",
"serde_json",
]
[[package]]
name = "web-sys"
version = "0.3.91"
@@ -7960,7 +8509,7 @@ dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
"synstructure",
"synstructure 0.13.2",
]
[[package]]
@@ -8001,7 +8550,7 @@ dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
"synstructure",
"synstructure 0.13.2",
]
[[package]]

View File

@@ -45,6 +45,7 @@ async-stream = "0.3"
base64 = "0.22"
aws-config = "1"
aws-sdk-s3 = "1"
web-push = { version = "0.11.0", default-features = false, features = ["hyper-client"] }
[[bin]]
name = "termi_api-cli"

View File

@@ -1,13 +1,18 @@
FROM rust:1.94-trixie AS builder
# syntax=docker/dockerfile:1.7
FROM rust:1.94-trixie AS chef
RUN cargo install cargo-chef --locked
WORKDIR /app
COPY Cargo.toml Cargo.lock ./
COPY migration/Cargo.toml migration/Cargo.toml
COPY src src
COPY migration/src migration/src
COPY config config
COPY assets assets
FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --locked --recipe-path recipe.json
COPY . .
RUN cargo build --release --locked --bin termi_api-cli
FROM debian:trixie-slim AS runtime

View File

@@ -0,0 +1,40 @@
<!doctype html>
<html lang="zh-CN">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>404 Not Found</title>
<style>
body {
margin: 0;
min-height: 100vh;
display: grid;
place-items: center;
background: #0f172a;
color: #e2e8f0;
font: 16px/1.6 -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
}
main {
padding: 24px;
text-align: center;
}
h1 {
margin: 0 0 8px;
font-size: 28px;
}
p {
margin: 0;
color: #94a3b8;
}
</style>
</head>
<body>
<main>
<h1>404</h1>
<p>Not Found</p>
</main>
</body>
</html>

View File

@@ -37,6 +37,10 @@ mod m20260331_000026_create_subscriptions;
mod m20260331_000027_create_notification_deliveries;
mod m20260331_000028_expand_subscriptions_and_deliveries;
mod m20260331_000029_add_subscription_popup_settings_to_site_settings;
mod m20260401_000030_add_public_security_and_web_push_to_site_settings;
mod m20260401_000031_add_notification_channel_type_to_site_settings;
mod m20260401_000032_add_runtime_security_keys_to_site_settings;
mod m20260401_000033_add_taxonomy_metadata_and_media_assets;
pub struct Migrator;
#[async_trait::async_trait]
@@ -78,6 +82,10 @@ impl MigratorTrait for Migrator {
Box::new(m20260331_000027_create_notification_deliveries::Migration),
Box::new(m20260331_000028_expand_subscriptions_and_deliveries::Migration),
Box::new(m20260331_000029_add_subscription_popup_settings_to_site_settings::Migration),
Box::new(m20260401_000030_add_public_security_and_web_push_to_site_settings::Migration),
Box::new(m20260401_000031_add_notification_channel_type_to_site_settings::Migration),
Box::new(m20260401_000032_add_runtime_security_keys_to_site_settings::Migration),
Box::new(m20260401_000033_add_taxonomy_metadata_and_media_assets::Migration),
// inject-above (do not remove this comment)
]
}

View File

@@ -0,0 +1,59 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column_if_not_exists(
ColumnDef::new(Alias::new("comment_turnstile_enabled"))
.boolean()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("subscription_turnstile_enabled"))
.boolean()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_enabled"))
.boolean()
.null(),
)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
for column in [
"web_push_enabled",
"subscription_turnstile_enabled",
"comment_turnstile_enabled",
] {
if manager.has_column("site_settings", column).await? {
manager
.alter_table(
Table::alter()
.table(table.clone())
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
Ok(())
}
}

View File

@@ -0,0 +1,51 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
if !manager
.has_column("site_settings", "notification_channel_type")
.await?
{
manager
.alter_table(
Table::alter()
.table(table.clone())
.add_column(
ColumnDef::new(Alias::new("notification_channel_type"))
.string()
.null(),
)
.to_owned(),
)
.await?;
}
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
if manager
.has_column("site_settings", "notification_channel_type")
.await?
{
manager
.alter_table(
Table::alter()
.table(table)
.drop_column(Alias::new("notification_channel_type"))
.to_owned(),
)
.await?;
}
Ok(())
}
}

View File

@@ -0,0 +1,71 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
manager
.alter_table(
Table::alter()
.table(table)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("turnstile_site_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("turnstile_secret_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_vapid_public_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_vapid_private_key"))
.text()
.null(),
)
.add_column_if_not_exists(
ColumnDef::new(Alias::new("web_push_vapid_subject"))
.text()
.null(),
)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = Alias::new("site_settings");
for column in [
"web_push_vapid_subject",
"web_push_vapid_private_key",
"web_push_vapid_public_key",
"turnstile_secret_key",
"turnstile_site_key",
] {
if manager.has_column("site_settings", column).await? {
manager
.alter_table(
Table::alter()
.table(table.clone())
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
Ok(())
}
}

View File

@@ -0,0 +1,161 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
for table_name in ["categories", "tags"] {
if !manager.has_column(table_name, "description").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("description")).text().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "cover_image").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("cover_image")).string().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "accent_color").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("accent_color")).string().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "seo_title").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("seo_title")).string().null())
.to_owned(),
)
.await?;
}
if !manager.has_column(table_name, "seo_description").await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.add_column(ColumnDef::new(Alias::new("seo_description")).text().null())
.to_owned(),
)
.await?;
}
}
if !manager.has_table("media_assets").await? {
manager
.create_table(
Table::create()
.table(Alias::new("media_assets"))
.if_not_exists()
.col(
ColumnDef::new(Alias::new("created_at"))
.timestamp_with_time_zone()
.not_null()
.default(Expr::current_timestamp()),
)
.col(
ColumnDef::new(Alias::new("updated_at"))
.timestamp_with_time_zone()
.not_null()
.default(Expr::current_timestamp()),
)
.col(
ColumnDef::new(Alias::new("id"))
.integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Alias::new("object_key")).string().not_null())
.col(ColumnDef::new(Alias::new("title")).string().null())
.col(ColumnDef::new(Alias::new("alt_text")).string().null())
.col(ColumnDef::new(Alias::new("caption")).text().null())
.col(ColumnDef::new(Alias::new("tags")).json_binary().null())
.col(ColumnDef::new(Alias::new("notes")).text().null())
.to_owned(),
)
.await?;
}
manager
.create_index(
Index::create()
.name("idx_media_assets_object_key_unique")
.table(Alias::new("media_assets"))
.col(Alias::new("object_key"))
.unique()
.if_not_exists()
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
if manager
.has_index("media_assets", "idx_media_assets_object_key_unique")
.await?
{
manager
.drop_index(
Index::drop()
.name("idx_media_assets_object_key_unique")
.table(Alias::new("media_assets"))
.to_owned(),
)
.await?;
}
if manager.has_table("media_assets").await? {
manager
.drop_table(Table::drop().table(Alias::new("media_assets")).to_owned())
.await?;
}
for table_name in ["categories", "tags"] {
for column in [
"seo_description",
"seo_title",
"accent_color",
"cover_image",
"description",
] {
if manager.has_column(table_name, column).await? {
manager
.alter_table(
Table::alter()
.table(Alias::new(table_name))
.drop_column(Alias::new(column))
.to_owned(),
)
.await?;
}
}
}
Ok(())
}
}

View File

@@ -106,6 +106,7 @@ impl Hooks for App {
AppRoutes::with_default_routes() // controller routes below
.add_route(controllers::health::routes())
.add_route(controllers::admin_api::routes())
.add_route(controllers::admin_taxonomy::routes())
.add_route(controllers::admin_ops::routes())
.add_route(controllers::review::routes())
.add_route(controllers::category::routes())

View File

@@ -22,7 +22,7 @@ use crate::{
ai_chunks, comment_blacklist, comment_persona_analysis_logs, comments, friend_links, posts,
reviews,
},
services::{admin_audit, ai, analytics, comment_guard, content, storage},
services::{admin_audit, ai, analytics, comment_guard, content, media_assets, storage},
};
#[derive(Clone, Debug, Deserialize)]
@@ -170,6 +170,14 @@ pub struct AdminSiteSettingsResponse {
pub music_playlist: Vec<site_settings::MusicTrackPayload>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_turnstile_enabled: bool,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
pub turnstile_secret_key: Option<String>,
pub web_push_vapid_public_key: Option<String>,
pub web_push_vapid_private_key: Option<String>,
pub web_push_vapid_subject: Option<String>,
pub ai_provider: Option<String>,
pub ai_api_base: Option<String>,
pub ai_api_key: Option<String>,
@@ -196,6 +204,7 @@ pub struct AdminSiteSettingsResponse {
pub seo_default_og_image: Option<String>,
pub seo_default_twitter_handle: Option<String>,
pub notification_webhook_url: Option<String>,
pub notification_channel_type: String,
pub notification_comment_enabled: bool,
pub notification_friend_link_enabled: bool,
pub subscription_popup_enabled: bool,
@@ -258,6 +267,11 @@ pub struct AdminMediaObjectResponse {
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
@@ -304,6 +318,32 @@ pub struct AdminMediaReplaceResponse {
pub url: String,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaMetadataPayload {
pub key: String,
#[serde(default)]
pub title: Option<String>,
#[serde(default)]
pub alt_text: Option<String>,
#[serde(default)]
pub caption: Option<String>,
#[serde(default)]
pub tags: Option<Vec<String>>,
#[serde(default)]
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminMediaMetadataResponse {
pub saved: bool,
pub key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AdminMediaListQuery {
pub prefix: Option<String>,
@@ -634,6 +674,25 @@ fn normalize_media_key(value: Option<String>) -> Option<String> {
})
}
fn build_media_object_response(
item: storage::StoredObjectSummary,
metadata: Option<&crate::models::_entities::media_assets::Model>,
) -> AdminMediaObjectResponse {
AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
title: metadata.and_then(|entry| entry.title.clone()),
alt_text: metadata.and_then(|entry| entry.alt_text.clone()),
caption: metadata.and_then(|entry| entry.caption.clone()),
tags: metadata
.map(media_assets::tag_list)
.unwrap_or_default(),
notes: metadata.and_then(|entry| entry.notes.clone()),
}
}
fn tech_stack_values(value: &Option<serde_json::Value>) -> Vec<String> {
value
.as_ref()
@@ -665,6 +724,11 @@ fn build_settings_response(
) -> AdminSiteSettingsResponse {
let ai_providers = site_settings::ai_provider_configs(&item);
let ai_active_provider_id = site_settings::active_ai_provider_id(&item);
let turnstile_site_key = crate::services::turnstile::site_key(&item);
let turnstile_secret_key = crate::services::turnstile::secret_key(&item);
let web_push_vapid_public_key = crate::services::web_push::public_key(&item);
let web_push_vapid_private_key = crate::services::web_push::private_key(&item);
let web_push_vapid_subject = crate::services::web_push::vapid_subject(&item);
AdminSiteSettingsResponse {
id: item.id,
@@ -687,6 +751,14 @@ fn build_settings_response(
music_playlist: music_playlist_values(&item.music_playlist),
ai_enabled: item.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: item.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled: item.comment_turnstile_enabled.unwrap_or(false),
subscription_turnstile_enabled: item.subscription_turnstile_enabled.unwrap_or(false),
web_push_enabled: item.web_push_enabled.unwrap_or(false),
turnstile_site_key,
turnstile_secret_key,
web_push_vapid_public_key,
web_push_vapid_private_key,
web_push_vapid_subject,
ai_provider: item.ai_provider,
ai_api_base: item.ai_api_base,
ai_api_key: item.ai_api_key,
@@ -713,6 +785,9 @@ fn build_settings_response(
seo_default_og_image: item.seo_default_og_image,
seo_default_twitter_handle: item.seo_default_twitter_handle,
notification_webhook_url: item.notification_webhook_url,
notification_channel_type: item
.notification_channel_type
.unwrap_or_else(|| "webhook".to_string()),
notification_comment_enabled: item.notification_comment_enabled.unwrap_or(false),
notification_friend_link_enabled: item.notification_friend_link_enabled.unwrap_or(false),
subscription_popup_enabled: item
@@ -1115,14 +1190,18 @@ pub async fn list_media_objects(
check_auth(&headers)?;
let settings = storage::require_r2_settings(&ctx).await?;
let items = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?
let objects = storage::list_objects(&ctx, query.prefix.as_deref(), query.limit.unwrap_or(200))
.await?;
let keys = objects
.iter()
.map(|item| item.key.clone())
.collect::<Vec<_>>();
let metadata_map = media_assets::list_by_keys(&ctx, &keys).await?;
let items = objects
.into_iter()
.map(|item| AdminMediaObjectResponse {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
.map(|item| {
let metadata = metadata_map.get(&item.key);
build_media_object_response(item, metadata)
})
.collect::<Vec<_>>();
@@ -1148,6 +1227,9 @@ pub async fn delete_media_object(
}
storage::delete_object(&ctx, key).await?;
if let Err(error) = media_assets::delete_by_key(&ctx, key).await {
tracing::warn!(?error, key, "failed to delete media metadata after object deletion");
}
format::json(AdminMediaDeleteResponse {
deleted: true,
@@ -1241,7 +1323,12 @@ pub async fn batch_delete_media_objects(
for key in keys {
match storage::delete_object(&ctx, &key).await {
Ok(()) => deleted.push(key),
Ok(()) => {
if let Err(error) = media_assets::delete_by_key(&ctx, &key).await {
tracing::warn!(?error, key, "failed to delete media metadata after batch removal");
}
deleted.push(key)
}
Err(_) => failed.push(key),
}
}
@@ -1249,6 +1336,43 @@ pub async fn batch_delete_media_objects(
format::json(AdminMediaBatchDeleteResponse { deleted, failed })
}
#[debug_handler]
pub async fn update_media_object_metadata(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<AdminMediaMetadataPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let key = payload.key.trim();
if key.is_empty() {
return Err(Error::BadRequest("缺少对象 key".to_string()));
}
let metadata = media_assets::upsert_by_key(
&ctx,
key,
media_assets::MediaAssetMetadataInput {
title: payload.title,
alt_text: payload.alt_text,
caption: payload.caption,
tags: payload.tags,
notes: payload.notes,
},
)
.await?;
format::json(AdminMediaMetadataResponse {
saved: true,
key: metadata.object_key.clone(),
title: metadata.title.clone(),
alt_text: metadata.alt_text.clone(),
caption: metadata.caption.clone(),
tags: media_assets::tag_list(&metadata),
notes: metadata.notes.clone(),
})
}
#[debug_handler]
pub async fn replace_media_object(
headers: HeaderMap,
@@ -1831,6 +1955,7 @@ pub fn routes() -> Routes {
"/storage/media/batch-delete",
post(batch_delete_media_objects),
)
.add("/storage/media/metadata", patch(update_media_object_metadata))
.add("/storage/media/replace", post(replace_media_object))
.add(
"/comments/blacklist",

View File

@@ -11,7 +11,10 @@ use crate::{
models::_entities::{
admin_audit_logs, notification_deliveries, post_revisions, subscriptions,
},
services::{admin_audit, post_revisions as revision_service, subscriptions as subscription_service},
services::{
admin_audit, backups, post_revisions as revision_service,
subscriptions as subscription_service,
},
};
#[derive(Clone, Debug, Default, Deserialize)]
@@ -82,6 +85,13 @@ pub struct DigestDispatchRequest {
pub period: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SiteBackupImportRequest {
pub backup: backups::SiteBackupDocument,
#[serde(default)]
pub mode: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PostRevisionListItem {
pub id: i32,
@@ -440,6 +450,25 @@ pub async fn send_subscription_digest(
format::json(summary)
}
#[debug_handler]
pub async fn export_site_backup(
headers: HeaderMap,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(backups::export_site_backup(&ctx).await?)
}
#[debug_handler]
pub async fn import_site_backup(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<SiteBackupImportRequest>,
) -> Result<Response> {
check_auth(&headers)?;
format::json(backups::import_site_backup(&ctx, payload.backup, payload.mode.as_deref()).await?)
}
pub fn routes() -> Routes {
Routes::new()
.prefix("/api/admin")
@@ -452,4 +481,6 @@ pub fn routes() -> Routes {
.add("/subscriptions/digest", post(send_subscription_digest))
.add("/subscriptions/{id}", patch(update_subscription).delete(delete_subscription))
.add("/subscriptions/{id}/test", post(test_subscription))
.add("/site-backup/export", get(export_site_backup))
.add("/site-backup/import", post(import_site_backup))
}

View File

@@ -0,0 +1,465 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::http::HeaderMap;
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use crate::{
controllers::admin::check_auth,
models::_entities::{categories, posts, tags},
services::content,
};
#[derive(Clone, Debug, Deserialize)]
pub struct TaxonomyPayload {
pub name: Option<String>,
#[serde(default)]
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminCategoryRecord {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize)]
pub struct AdminTagRecord {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalized_name(params: &TaxonomyPayload, label: &str) -> Result<String> {
params
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.ok_or_else(|| Error::BadRequest(format!("{label}名称不能为空")))
}
fn normalized_slug(value: Option<&str>, fallback: &str, label: &str) -> Result<String> {
let slug = value
.map(str::trim)
.filter(|item| !item.is_empty())
.map(ToString::to_string)
.unwrap_or_else(|| slugify(fallback));
if slug.is_empty() {
return Err(Error::BadRequest(format!(
"{label} slug 不能为空,请填写英文字母 / 数字 / 连字符"
)));
}
Ok(slug)
}
fn normalized_token(value: &str) -> String {
value.trim().to_ascii_lowercase()
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn post_tag_values(post: &posts::Model) -> Vec<String> {
post.tags
.as_ref()
.and_then(|value| serde_json::from_value::<Vec<String>>(value.clone()).ok())
.unwrap_or_default()
.into_iter()
.map(|item| normalized_token(&item))
.filter(|item| !item.is_empty())
.collect()
}
fn category_name(item: &categories::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn build_category_record(item: &categories::Model, post_items: &[posts::Model]) -> AdminCategoryRecord {
let name = category_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items
.iter()
.filter(|post| {
post.category
.as_deref()
.map(normalized_token)
.is_some_and(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
AdminCategoryRecord {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
fn build_tag_record(item: &tags::Model, post_items: &[posts::Model]) -> AdminTagRecord {
let name = tag_name(item);
let aliases = [normalized_token(&name), normalized_token(&item.slug)];
let count = post_items
.iter()
.filter(|post| {
post_tag_values(post)
.into_iter()
.any(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
AdminTagRecord {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_category(ctx: &AppContext, id: i32) -> Result<categories::Model> {
categories::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)
}
async fn load_tag(ctx: &AppContext, id: i32) -> Result<tags::Model> {
tags::Entity::find_by_id(id)
.one(&ctx.db)
.await?
.ok_or(Error::NotFound)
}
async fn ensure_category_slug_unique(
ctx: &AppContext,
slug: &str,
exclude_id: Option<i32>,
) -> Result<()> {
if let Some(existing) = categories::Entity::find()
.filter(categories::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
{
if Some(existing.id) != exclude_id {
return Err(Error::BadRequest("分类 slug 已存在".to_string()));
}
}
Ok(())
}
async fn ensure_tag_slug_unique(ctx: &AppContext, slug: &str, exclude_id: Option<i32>) -> Result<()> {
if let Some(existing) = tags::Entity::find()
.filter(tags::Column::Slug.eq(slug))
.one(&ctx.db)
.await?
{
if Some(existing.id) != exclude_id {
return Err(Error::BadRequest("标签 slug 已存在".to_string()));
}
}
Ok(())
}
async fn load_posts(ctx: &AppContext) -> Result<Vec<posts::Model>> {
Ok(posts::Entity::find().all(&ctx.db).await?)
}
#[debug_handler]
pub async fn list_categories(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(
items.into_iter()
.map(|item| build_category_record(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn create_category(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "分类")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "分类")?;
ensure_category_slug_unique(&ctx, &slug, None).await?;
let item = categories::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(payload.description)),
cover_image: Set(trim_to_option(payload.cover_image)),
accent_color: Set(trim_to_option(payload.accent_color)),
seo_title: Set(trim_to_option(payload.seo_title)),
seo_description: Set(trim_to_option(payload.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&item, &post_items))
}
#[debug_handler]
pub async fn update_category(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "分类")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "分类")?;
ensure_category_slug_unique(&ctx, &slug, Some(id)).await?;
let item = load_category(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_category_references(previous_name.as_deref(), &previous_slug, Some(&name))?;
}
let mut active = item.into_active_model();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(payload.description));
active.cover_image = Set(trim_to_option(payload.cover_image));
active.accent_color = Set(trim_to_option(payload.accent_color));
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_category_record(&updated, &post_items))
}
#[debug_handler]
pub async fn delete_category(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let item = load_category(&ctx, id).await?;
content::rewrite_category_references(item.name.as_deref(), &item.slug, None)?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}
#[debug_handler]
pub async fn list_tags(headers: HeaderMap, State(ctx): State<AppContext>) -> Result<Response> {
check_auth(&headers)?;
content::sync_markdown_posts(&ctx).await?;
let items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(
items.into_iter()
.map(|item| build_tag_record(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn create_tag(
headers: HeaderMap,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "标签")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "标签")?;
ensure_tag_slug_unique(&ctx, &slug, None).await?;
let item = tags::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(payload.description)),
cover_image: Set(trim_to_option(payload.cover_image)),
accent_color: Set(trim_to_option(payload.accent_color)),
seo_title: Set(trim_to_option(payload.seo_title)),
seo_description: Set(trim_to_option(payload.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?;
let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&item, &post_items))
}
#[debug_handler]
pub async fn update_tag(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(payload): Json<TaxonomyPayload>,
) -> Result<Response> {
check_auth(&headers)?;
let name = normalized_name(&payload, "标签")?;
let slug = normalized_slug(payload.slug.as_deref(), &name, "标签")?;
ensure_tag_slug_unique(&ctx, &slug, Some(id)).await?;
let item = load_tag(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_tag_references(previous_name.as_deref(), &previous_slug, Some(&name))?;
}
let mut active = item.into_active_model();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(payload.description));
active.cover_image = Set(trim_to_option(payload.cover_image));
active.accent_color = Set(trim_to_option(payload.accent_color));
active.seo_title = Set(trim_to_option(payload.seo_title));
active.seo_description = Set(trim_to_option(payload.seo_description));
let updated = active.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
let post_items = load_posts(&ctx).await?;
format::json(build_tag_record(&updated, &post_items))
}
#[debug_handler]
pub async fn delete_tag(
headers: HeaderMap,
Path(id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
check_auth(&headers)?;
let item = load_tag(&ctx, id).await?;
content::rewrite_tag_references(item.name.as_deref(), &item.slug, None)?;
item.delete(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::empty()
}
pub fn routes() -> Routes {
Routes::new()
.add(
"/api/admin/categories",
get(list_categories).post(create_category),
)
.add(
"/api/admin/categories/{id}",
patch(update_category).delete(delete_category),
)
.add("/api/admin/tags", get(list_tags).post(create_tag))
.add("/api/admin/tags/{id}", patch(update_tag).delete(delete_tag))
}

View File

@@ -14,12 +14,41 @@ pub struct CategorySummary {
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct CategoryRecord {
pub id: i32,
pub name: Option<String>,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
pub name: Option<String>,
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
fn slugify(value: &str) -> String {
@@ -39,6 +68,17 @@ fn slugify(value: &str) -> String {
slug.trim_matches('-').to_string()
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalized_name(params: &Params) -> Result<String> {
let name = params
.name
@@ -60,6 +100,50 @@ fn normalized_slug(params: &Params, fallback: &str) -> String {
.unwrap_or_else(|| slugify(fallback))
}
fn category_name(item: &categories::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn build_summary(item: &categories::Model, post_items: &[posts::Model]) -> CategorySummary {
let name = category_name(item);
let count = post_items
.iter()
.filter(|post| {
post.category
.as_deref()
.map(str::trim)
.is_some_and(|value| value.eq_ignore_ascii_case(&name) || value.eq_ignore_ascii_case(&item.slug))
})
.count();
CategorySummary {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
}
}
fn build_record(item: categories::Model) -> CategoryRecord {
CategoryRecord {
id: item.id,
name: item.name,
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<categories::Model> {
let item = categories::Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or(Error::NotFound)
@@ -77,23 +161,7 @@ pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
let categories = category_items
.into_iter()
.map(|category| {
let name = category
.name
.clone()
.unwrap_or_else(|| category.slug.clone());
let count = post_items
.iter()
.filter(|post| post.category.as_deref().map(str::trim) == Some(name.as_str()))
.count();
CategorySummary {
id: category.id,
name,
slug: category.slug,
count,
}
})
.map(|category| build_summary(&category, &post_items))
.collect::<Vec<_>>();
format::json(categories)
@@ -113,18 +181,28 @@ pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> R
let mut model = existing_category.into_active_model();
model.name = Set(Some(name));
model.slug = Set(slug);
model.description = Set(trim_to_option(params.description));
model.cover_image = Set(trim_to_option(params.cover_image));
model.accent_color = Set(trim_to_option(params.accent_color));
model.seo_title = Set(trim_to_option(params.seo_title));
model.seo_description = Set(trim_to_option(params.seo_description));
model.update(&ctx.db).await?
} else {
categories::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(params.description)),
cover_image: Set(trim_to_option(params.cover_image)),
accent_color: Set(trim_to_option(params.accent_color)),
seo_title: Set(trim_to_option(params.seo_title)),
seo_description: Set(trim_to_option(params.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?
};
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -155,9 +233,14 @@ pub async fn update(
let mut item = item.into_active_model();
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -171,7 +254,7 @@ pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Resul
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_item(&ctx, id).await?)
format::json(build_record(load_item(&ctx, id).await?))
}
pub fn routes() -> Routes {

View File

@@ -122,6 +122,8 @@ pub struct CreateCommentRequest {
pub captcha_token: Option<String>,
#[serde(default, alias = "captchaAnswer")]
pub captcha_answer: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
#[serde(default)]
pub website: Option<String>,
}
@@ -383,6 +385,7 @@ pub async fn add(
author: author.as_deref(),
content: content.as_deref(),
honeypot_website: params.website.as_deref(),
turnstile_token: params.turnstile_token.as_deref(),
captcha_token: params.captcha_token.as_deref(),
captcha_answer: params.captcha_answer.as_deref(),
},

View File

@@ -1,5 +1,6 @@
pub mod admin;
pub mod admin_api;
pub mod admin_taxonomy;
pub mod admin_ops;
pub mod ai;
pub mod auth;

View File

@@ -95,6 +95,60 @@ fn publicly_accessible(post: &Model) -> bool {
content::is_post_publicly_accessible(post, Utc::now().fixed_offset())
}
fn normalize_post_sort_by(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"updated_at" | "updated" => "updated_at".to_string(),
"title" => "title".to_string(),
_ => "created_at".to_string(),
}
}
fn normalize_sort_order(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"asc" => "asc".to_string(),
_ => "desc".to_string(),
}
}
fn sort_posts(items: &mut [Model], sort_by: &str, sort_order: &str) {
items.sort_by(|left, right| {
let ordering = match sort_by {
"updated_at" => left.updated_at.cmp(&right.updated_at),
"title" => left
.title
.as_deref()
.unwrap_or(&left.slug)
.to_ascii_lowercase()
.cmp(
&right
.title
.as_deref()
.unwrap_or(&right.slug)
.to_ascii_lowercase(),
),
_ => left.created_at.cmp(&right.created_at),
};
let ordering = if sort_order == "asc" {
ordering
} else {
ordering.reverse()
};
ordering.then_with(|| left.id.cmp(&right.id))
});
}
fn parse_optional_markdown_datetime(
value: Option<&str>,
) -> Option<chrono::DateTime<chrono::FixedOffset>> {
@@ -388,6 +442,28 @@ pub struct ListQuery {
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct PagedPostsQuery {
#[serde(flatten)]
pub filters: ListQuery,
pub page: Option<u64>,
#[serde(alias = "page_size")]
pub page_size: Option<u64>,
pub sort_by: Option<String>,
pub sort_order: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct PagedPostsResponse {
pub items: Vec<Model>,
pub page: u64,
pub page_size: u64,
pub total: usize,
pub total_pages: u64,
pub sort_by: String,
pub sort_order: String,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct LookupQuery {
#[serde(default, deserialize_with = "deserialize_boolish_option")]
@@ -469,6 +545,61 @@ pub async fn list(
format::json(filtered)
}
#[debug_handler]
pub async fn list_page(
Query(query): Query<PagedPostsQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
let preview = request_preview_mode(query.filters.preview, &headers);
let include_private = preview && query.filters.include_private.unwrap_or(true);
let include_redirects = query.filters.include_redirects.unwrap_or(preview);
let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_post_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref());
let mut filtered = Entity::find()
.order_by_desc(Column::CreatedAt)
.all(&ctx.db)
.await?
.into_iter()
.filter(|post| {
should_include_post(
post,
&query.filters,
preview,
include_private,
include_redirects,
)
})
.collect::<Vec<_>>();
sort_posts(&mut filtered, &sort_by, &sort_order);
let total = filtered.len();
let total_pages = std::cmp::max(1, ((total as u64) + page_size - 1) / page_size);
let page = query.page.unwrap_or(1).clamp(1, total_pages);
let start = ((page - 1) * page_size) as usize;
let end = std::cmp::min(start + page_size as usize, total);
let items = if start >= total {
Vec::new()
} else {
filtered[start..end].to_vec()
};
format::json(PagedPostsResponse {
items,
page,
page_size,
total,
total_pages,
sort_by,
sort_order,
})
}
#[debug_handler]
pub async fn add(
headers: HeaderMap,
@@ -876,6 +1007,7 @@ pub async fn delete_markdown_by_slug(
pub fn routes() -> Routes {
Routes::new()
.prefix("api/posts/")
.add("page", get(list_page))
.add("/", get(list))
.add("/", post(add))
.add("markdown", post(create_markdown))

View File

@@ -274,6 +274,71 @@ fn is_preview_search(query: &SearchQuery, headers: &HeaderMap) -> bool {
.unwrap_or(false)
}
fn normalize_search_sort_by(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"newest" | "created_at" => "newest".to_string(),
"oldest" => "oldest".to_string(),
"title" => "title".to_string(),
_ => "relevance".to_string(),
}
}
fn normalize_sort_order(value: Option<&str>, sort_by: &str) -> String {
match value
.map(str::trim)
.unwrap_or_default()
.to_ascii_lowercase()
.as_str()
{
"asc" => "asc".to_string(),
"desc" => "desc".to_string(),
_ if sort_by == "title" => "asc".to_string(),
_ => "desc".to_string(),
}
}
fn sort_search_results(items: &mut [SearchResult], sort_by: &str, sort_order: &str) {
items.sort_by(|left, right| {
let ordering = match sort_by {
"newest" => right.created_at.cmp(&left.created_at),
"oldest" => left.created_at.cmp(&right.created_at),
"title" => left
.title
.as_deref()
.unwrap_or(&left.slug)
.to_ascii_lowercase()
.cmp(
&right
.title
.as_deref()
.unwrap_or(&right.slug)
.to_ascii_lowercase(),
),
_ => right
.rank
.partial_cmp(&left.rank)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| right.created_at.cmp(&left.created_at)),
};
if sort_by == "relevance" || sort_by == "newest" || sort_by == "oldest" {
return ordering;
}
let ordering = if sort_order == "asc" {
ordering
} else {
ordering.reverse()
};
ordering.then_with(|| left.slug.cmp(&right.slug))
});
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchQuery {
pub q: Option<String>,
@@ -286,6 +351,17 @@ pub struct SearchQuery {
pub preview: Option<bool>,
}
#[derive(Clone, Debug, Default, Deserialize)]
pub struct SearchPageQuery {
#[serde(flatten)]
pub search: SearchQuery,
pub page: Option<u64>,
#[serde(alias = "page_size")]
pub page_size: Option<u64>,
pub sort_by: Option<String>,
pub sort_order: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct SearchResult {
pub id: i32,
@@ -296,37 +372,47 @@ pub struct SearchResult {
pub category: Option<String>,
pub tags: Option<Value>,
pub post_type: Option<String>,
pub image: Option<String>,
pub pinned: Option<bool>,
pub created_at: chrono::DateTime<chrono::Utc>,
pub updated_at: chrono::DateTime<chrono::Utc>,
pub rank: f64,
}
#[debug_handler]
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let preview_search = is_preview_search(&query, &headers);
content::sync_markdown_posts(&ctx).await?;
#[derive(Clone, Debug, Serialize)]
pub struct PagedSearchResponse {
pub query: String,
pub items: Vec<SearchResult>,
pub page: u64,
pub page_size: u64,
pub total: usize,
pub total_pages: u64,
pub sort_by: String,
pub sort_order: String,
}
let q = query.q.unwrap_or_default().trim().to_string();
async fn build_search_results(
ctx: &AppContext,
query: &SearchQuery,
headers: &HeaderMap,
) -> Result<(String, bool, Vec<SearchResult>)> {
let preview_search = is_preview_search(query, headers);
content::sync_markdown_posts(ctx).await?;
let q = query.q.clone().unwrap_or_default().trim().to_string();
if q.is_empty() {
return format::json(Vec::<SearchResult>::new());
return Ok((q, preview_search, Vec::new()));
}
if !preview_search {
abuse_guard::enforce_public_scope(
"search",
abuse_guard::detect_client_ip(&headers).as_deref(),
abuse_guard::detect_client_ip(headers).as_deref(),
Some(&q),
)?;
}
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
let settings = site_settings::load_current(&ctx).await.ok();
let settings = site_settings::load_current(ctx).await.ok();
let synonym_groups = settings
.as_ref()
.map(|item| parse_synonym_groups(&item.search_synonyms))
@@ -342,7 +428,12 @@ pub async fn search(
})
.collect::<Vec<_>>();
if let Some(category) = query.category.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(category) = query
.category
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| {
post.category
.as_deref()
@@ -355,7 +446,12 @@ pub async fn search(
all_posts.retain(|post| post_has_tag(post, tag));
}
if let Some(post_type) = query.post_type.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(post_type) = query
.post_type
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
all_posts.retain(|post| {
post.post_type
.as_deref()
@@ -378,6 +474,7 @@ pub async fn search(
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
image: post.image.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
@@ -401,6 +498,7 @@ pub async fn search(
category: post.category.clone(),
tags: post.tags.clone(),
post_type: post.post_type.clone(),
image: post.image.clone(),
pinned: post.pinned,
created_at: post.created_at.into(),
updated_at: post.updated_at.into(),
@@ -410,13 +508,22 @@ pub async fn search(
}
}
results.sort_by(|left, right| {
right
.rank
.partial_cmp(&left.rank)
.unwrap_or(std::cmp::Ordering::Equal)
.then_with(|| right.created_at.cmp(&left.created_at))
});
sort_search_results(&mut results, "relevance", "desc");
Ok((q, preview_search, results))
}
#[debug_handler]
pub async fn search(
Query(query): Query<SearchQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let limit = query.limit.unwrap_or(20).clamp(1, 100) as usize;
let (q, preview_search, mut results) = build_search_results(&ctx, &query, &headers).await?;
if q.is_empty() {
return format::json(Vec::<SearchResult>::new());
}
results.truncate(limit);
if !preview_search {
@@ -433,6 +540,70 @@ pub async fn search(
format::json(results)
}
pub fn routes() -> Routes {
Routes::new().prefix("api/search/").add("/", get(search))
#[debug_handler]
pub async fn search_page(
Query(query): Query<SearchPageQuery>,
State(ctx): State<AppContext>,
headers: HeaderMap,
) -> Result<Response> {
let started_at = Instant::now();
let page_size = query.page_size.unwrap_or(20).clamp(1, 100);
let sort_by = normalize_search_sort_by(query.sort_by.as_deref());
let sort_order = normalize_sort_order(query.sort_order.as_deref(), &sort_by);
let (q, preview_search, mut results) = build_search_results(&ctx, &query.search, &headers).await?;
if q.is_empty() {
return format::json(PagedSearchResponse {
query: q,
items: Vec::new(),
page: 1,
page_size,
total: 0,
total_pages: 1,
sort_by,
sort_order,
});
}
sort_search_results(&mut results, &sort_by, &sort_order);
let total = results.len();
let total_pages = std::cmp::max(1, ((total as u64) + page_size - 1) / page_size);
let page = query.page.unwrap_or(1).clamp(1, total_pages);
let start = ((page - 1) * page_size) as usize;
let end = std::cmp::min(start + page_size as usize, total);
let items = if start >= total {
Vec::new()
} else {
results[start..end].to_vec()
};
if !preview_search {
analytics::record_search_event(
&ctx,
&q,
total,
&headers,
started_at.elapsed().as_millis() as i64,
)
.await;
}
format::json(PagedSearchResponse {
query: q,
items,
page,
page_size,
total,
total_pages,
sort_by,
sort_order,
})
}
pub fn routes() -> Routes {
Routes::new()
.prefix("api/search/")
.add("page", get(search_page))
.add("/", get(search))
}

View File

@@ -93,6 +93,22 @@ pub struct SiteSettingsPayload {
pub ai_enabled: Option<bool>,
#[serde(default, alias = "paragraphCommentsEnabled")]
pub paragraph_comments_enabled: Option<bool>,
#[serde(default, alias = "commentTurnstileEnabled")]
pub comment_turnstile_enabled: Option<bool>,
#[serde(default, alias = "subscriptionTurnstileEnabled")]
pub subscription_turnstile_enabled: Option<bool>,
#[serde(default, alias = "webPushEnabled")]
pub web_push_enabled: Option<bool>,
#[serde(default, alias = "turnstileSiteKey")]
pub turnstile_site_key: Option<String>,
#[serde(default, alias = "turnstileSecretKey")]
pub turnstile_secret_key: Option<String>,
#[serde(default, alias = "webPushVapidPublicKey")]
pub web_push_vapid_public_key: Option<String>,
#[serde(default, alias = "webPushVapidPrivateKey")]
pub web_push_vapid_private_key: Option<String>,
#[serde(default, alias = "webPushVapidSubject")]
pub web_push_vapid_subject: Option<String>,
#[serde(default, alias = "aiProvider")]
pub ai_provider: Option<String>,
#[serde(default, alias = "aiApiBase")]
@@ -139,6 +155,8 @@ pub struct SiteSettingsPayload {
pub seo_default_twitter_handle: Option<String>,
#[serde(default, alias = "notificationWebhookUrl")]
pub notification_webhook_url: Option<String>,
#[serde(default, alias = "notificationChannelType")]
pub notification_channel_type: Option<String>,
#[serde(default, alias = "notificationCommentEnabled")]
pub notification_comment_enabled: Option<bool>,
#[serde(default, alias = "notificationFriendLinkEnabled")]
@@ -177,6 +195,11 @@ pub struct PublicSiteSettingsResponse {
pub music_playlist: Option<serde_json::Value>,
pub ai_enabled: bool,
pub paragraph_comments_enabled: bool,
pub comment_turnstile_enabled: bool,
pub subscription_turnstile_enabled: bool,
pub web_push_enabled: bool,
pub turnstile_site_key: Option<String>,
pub web_push_vapid_public_key: Option<String>,
pub subscription_popup_enabled: bool,
pub subscription_popup_title: String,
pub subscription_popup_description: String,
@@ -220,6 +243,17 @@ fn normalize_optional_int(value: Option<i32>, min: i32, max: i32) -> Option<i32>
value.map(|item| item.clamp(min, max))
}
fn normalize_notification_channel_type(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let normalized = item.trim().to_ascii_lowercase();
match normalized.as_str() {
"ntfy" => Some("ntfy".to_string()),
"webhook" => Some("webhook".to_string()),
_ => None,
}
})
}
pub(crate) fn default_subscription_popup_enabled() -> bool {
true
}
@@ -515,6 +549,32 @@ impl SiteSettingsPayload {
if let Some(paragraph_comments_enabled) = self.paragraph_comments_enabled {
item.paragraph_comments_enabled = Some(paragraph_comments_enabled);
}
if let Some(comment_turnstile_enabled) = self.comment_turnstile_enabled {
item.comment_turnstile_enabled = Some(comment_turnstile_enabled);
}
if let Some(subscription_turnstile_enabled) = self.subscription_turnstile_enabled {
item.subscription_turnstile_enabled = Some(subscription_turnstile_enabled);
}
if let Some(web_push_enabled) = self.web_push_enabled {
item.web_push_enabled = Some(web_push_enabled);
}
if let Some(turnstile_site_key) = self.turnstile_site_key {
item.turnstile_site_key = normalize_optional_string(Some(turnstile_site_key));
}
if let Some(turnstile_secret_key) = self.turnstile_secret_key {
item.turnstile_secret_key = normalize_optional_string(Some(turnstile_secret_key));
}
if let Some(web_push_vapid_public_key) = self.web_push_vapid_public_key {
item.web_push_vapid_public_key =
normalize_optional_string(Some(web_push_vapid_public_key));
}
if let Some(web_push_vapid_private_key) = self.web_push_vapid_private_key {
item.web_push_vapid_private_key =
normalize_optional_string(Some(web_push_vapid_private_key));
}
if let Some(web_push_vapid_subject) = self.web_push_vapid_subject {
item.web_push_vapid_subject = normalize_optional_string(Some(web_push_vapid_subject));
}
let provider_list_supplied = self.ai_providers.is_some();
let provided_ai_providers = self.ai_providers.map(normalize_ai_provider_configs);
let requested_active_provider_id = self
@@ -591,6 +651,10 @@ impl SiteSettingsPayload {
item.notification_webhook_url =
normalize_optional_string(Some(notification_webhook_url));
}
if self.notification_channel_type.is_some() {
item.notification_channel_type =
normalize_notification_channel_type(self.notification_channel_type);
}
if let Some(notification_comment_enabled) = self.notification_comment_enabled {
item.notification_comment_enabled = Some(notification_comment_enabled);
}
@@ -699,6 +763,14 @@ fn default_payload() -> SiteSettingsPayload {
]),
ai_enabled: Some(false),
paragraph_comments_enabled: Some(true),
comment_turnstile_enabled: Some(false),
subscription_turnstile_enabled: Some(false),
web_push_enabled: Some(false),
turnstile_site_key: None,
turnstile_secret_key: None,
web_push_vapid_public_key: None,
web_push_vapid_private_key: None,
web_push_vapid_subject: None,
ai_provider: Some(ai::provider_name(None)),
ai_api_base: Some(ai::default_api_base().to_string()),
ai_api_key: Some(ai::default_api_key().to_string()),
@@ -725,6 +797,7 @@ fn default_payload() -> SiteSettingsPayload {
seo_default_og_image: None,
seo_default_twitter_handle: None,
notification_webhook_url: None,
notification_channel_type: Some("webhook".to_string()),
notification_comment_enabled: Some(false),
notification_friend_link_enabled: Some(false),
subscription_popup_enabled: Some(default_subscription_popup_enabled()),
@@ -760,6 +833,18 @@ pub(crate) async fn load_current(ctx: &AppContext) -> Result<Model> {
}
fn public_response(model: Model) -> PublicSiteSettingsResponse {
let turnstile_site_key = crate::services::turnstile::site_key(&model);
let web_push_vapid_public_key = crate::services::web_push::public_key(&model);
let comment_turnstile_enabled = crate::services::turnstile::is_enabled(
&model,
crate::services::turnstile::TurnstileScope::Comment,
);
let subscription_turnstile_enabled = crate::services::turnstile::is_enabled(
&model,
crate::services::turnstile::TurnstileScope::Subscription,
);
let web_push_enabled = crate::services::web_push::is_enabled(&model);
PublicSiteSettingsResponse {
id: model.id,
site_name: model.site_name,
@@ -781,6 +866,11 @@ fn public_response(model: Model) -> PublicSiteSettingsResponse {
music_playlist: model.music_playlist,
ai_enabled: model.ai_enabled.unwrap_or(false),
paragraph_comments_enabled: model.paragraph_comments_enabled.unwrap_or(true),
comment_turnstile_enabled,
subscription_turnstile_enabled,
web_push_enabled,
turnstile_site_key,
web_push_vapid_public_key,
subscription_popup_enabled: model
.subscription_popup_enabled
.unwrap_or_else(default_subscription_popup_enabled),

View File

@@ -1,7 +1,9 @@
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::services::{abuse_guard, admin_audit, subscriptions};
use axum::http::header;
use crate::services::{abuse_guard, admin_audit, subscriptions, turnstile};
#[derive(Clone, Debug, Deserialize)]
pub struct PublicSubscriptionPayload {
@@ -10,6 +12,17 @@ pub struct PublicSubscriptionPayload {
pub display_name: Option<String>,
#[serde(default)]
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct PublicBrowserPushSubscriptionPayload {
pub subscription: serde_json::Value,
#[serde(default)]
pub source: Option<String>,
#[serde(default, alias = "turnstileToken")]
pub turnstile_token: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
@@ -55,6 +68,19 @@ fn public_subscription_metadata(source: Option<String>) -> serde_json::Value {
})
}
fn public_browser_push_metadata(
source: Option<String>,
subscription: serde_json::Value,
user_agent: Option<String>,
) -> serde_json::Value {
serde_json::json!({
"source": source,
"kind": "browser-push",
"subscription": subscription,
"user_agent": user_agent,
})
}
#[debug_handler]
pub async fn subscribe(
State(ctx): State<AppContext>,
@@ -62,11 +88,19 @@ pub async fn subscribe(
Json(payload): Json<PublicSubscriptionPayload>,
) -> Result<Response> {
let email = payload.email.trim().to_ascii_lowercase();
let client_ip = abuse_guard::detect_client_ip(&headers);
abuse_guard::enforce_public_scope(
"subscription",
abuse_guard::detect_client_ip(&headers).as_deref(),
client_ip.as_deref(),
Some(&email),
)?;
let _ = turnstile::verify_if_enabled(
&ctx,
turnstile::TurnstileScope::Subscription,
payload.turnstile_token.as_deref(),
client_ip.as_deref(),
)
.await?;
let result = subscriptions::create_public_email_subscription(
&ctx,
@@ -103,6 +137,76 @@ pub async fn subscribe(
})
}
#[debug_handler]
pub async fn subscribe_browser_push(
State(ctx): State<AppContext>,
headers: axum::http::HeaderMap,
Json(payload): Json<PublicBrowserPushSubscriptionPayload>,
) -> Result<Response> {
let settings = crate::controllers::site_settings::load_current(&ctx).await?;
if !crate::services::web_push::is_enabled(&settings) {
return Err(Error::BadRequest("浏览器推送未启用".to_string()));
}
let endpoint = payload
.subscription
.get("endpoint")
.and_then(serde_json::Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| Error::BadRequest("browser push subscription.endpoint 不能为空".to_string()))?
.to_string();
let client_ip = abuse_guard::detect_client_ip(&headers);
let user_agent = headers
.get(header::USER_AGENT)
.and_then(|value| value.to_str().ok())
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string);
abuse_guard::enforce_public_scope("browser-push-subscription", client_ip.as_deref(), Some(&endpoint))?;
let _ = turnstile::verify_if_enabled(
&ctx,
turnstile::TurnstileScope::Subscription,
payload.turnstile_token.as_deref(),
client_ip.as_deref(),
)
.await?;
let result = subscriptions::create_public_web_push_subscription(
&ctx,
payload.subscription.clone(),
Some(public_browser_push_metadata(
payload.source,
payload.subscription,
user_agent,
)),
)
.await?;
admin_audit::log_event(
&ctx,
None,
"subscription.public.web_push.active",
"subscription",
Some(result.subscription.id.to_string()),
Some(result.subscription.target.clone()),
Some(serde_json::json!({
"channel_type": result.subscription.channel_type,
"status": result.subscription.status,
})),
)
.await?;
format::json(PublicSubscriptionResponse {
ok: true,
subscription_id: result.subscription.id,
status: result.subscription.status,
requires_confirmation: false,
message: result.message,
})
}
#[debug_handler]
pub async fn confirm(
State(ctx): State<AppContext>,
@@ -196,6 +300,7 @@ pub fn routes() -> Routes {
Routes::new()
.prefix("/api/subscriptions")
.add("/", post(subscribe))
.add("/browser-push", post(subscribe_browser_push))
.add("/confirm", post(confirm))
.add("/manage", get(manage).patch(update_manage))
.add("/unsubscribe", post(unsubscribe))

View File

@@ -2,43 +2,217 @@
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use loco_rs::prelude::*;
use sea_orm::{ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::models::_entities::tags::{ActiveModel, Entity, Model};
use crate::models::_entities::{posts, tags};
use crate::services::content;
#[derive(Clone, Debug, Serialize)]
pub struct TagSummary {
pub id: i32,
pub name: String,
pub slug: String,
pub count: usize,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize)]
pub struct TagRecord {
pub id: i32,
pub name: Option<String>,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
pub name: Option<String>,
pub slug: String,
pub slug: Option<String>,
#[serde(default)]
pub description: Option<String>,
#[serde(default)]
pub cover_image: Option<String>,
#[serde(default)]
pub accent_color: Option<String>,
#[serde(default)]
pub seo_title: Option<String>,
#[serde(default)]
pub seo_description: Option<String>,
}
impl Params {
fn update(&self, item: &mut ActiveModel) {
item.name = Set(self.name.clone());
item.slug = Set(self.slug.clone());
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalized_name(params: &Params) -> Result<String> {
params
.name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.ok_or_else(|| Error::BadRequest("tag name is required".to_string()))
}
fn normalized_slug(params: &Params, fallback: &str) -> String {
params
.slug
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.unwrap_or_else(|| slugify(fallback))
}
fn tag_name(item: &tags::Model) -> String {
item.name.clone().unwrap_or_else(|| item.slug.clone())
}
fn tag_values(post: &posts::Model) -> Vec<String> {
post.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(|value| value.trim().to_ascii_lowercase()))
.filter(|item| !item.is_empty())
.collect()
}
fn build_summary(item: &tags::Model, post_items: &[posts::Model]) -> TagSummary {
let name = tag_name(item);
let aliases = [name.trim().to_ascii_lowercase(), item.slug.trim().to_ascii_lowercase()];
let count = post_items
.iter()
.filter(|post| {
tag_values(post)
.into_iter()
.any(|value| aliases.iter().any(|alias| alias == &value))
})
.count();
TagSummary {
id: item.id,
name,
slug: item.slug.clone(),
count,
description: item.description.clone(),
cover_image: item.cover_image.clone(),
accent_color: item.accent_color.clone(),
seo_title: item.seo_title.clone(),
seo_description: item.seo_description.clone(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
let item = Entity::find_by_id(id).one(&ctx.db).await?;
fn build_record(item: tags::Model) -> TagRecord {
TagRecord {
id: item.id,
name: item.name,
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
created_at: item.created_at.to_rfc3339(),
updated_at: item.updated_at.to_rfc3339(),
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<tags::Model> {
let item = tags::Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or_else(|| Error::NotFound)
}
#[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
content::sync_markdown_posts(&ctx).await?;
format::json(Entity::find().all(&ctx.db).await?)
let tag_items = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?;
let post_items = posts::Entity::find().all(&ctx.db).await?;
format::json(
tag_items
.into_iter()
.map(|item| build_summary(&item, &post_items))
.collect::<Vec<_>>(),
)
}
#[debug_handler]
pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> Result<Response> {
let mut item = ActiveModel {
..Default::default()
let name = normalized_name(&params)?;
let slug = normalized_slug(&params, &name);
let existing = tags::Entity::find()
.filter(tags::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?;
let item = if let Some(existing_tag) = existing {
let mut item = existing_tag.into_active_model();
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
item.update(&ctx.db).await?
} else {
tags::ActiveModel {
name: Set(Some(name)),
slug: Set(slug),
description: Set(trim_to_option(params.description)),
cover_image: Set(trim_to_option(params.cover_image)),
accent_color: Set(trim_to_option(params.accent_color)),
seo_title: Set(trim_to_option(params.seo_title)),
seo_description: Set(trim_to_option(params.seo_description)),
..Default::default()
}
.insert(&ctx.db)
.await?
};
params.update(&mut item);
let item = item.insert(&ctx.db).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -47,35 +221,36 @@ pub async fn update(
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let name = normalized_name(&params)?;
let slug = normalized_slug(&params, &name);
let item = load_item(&ctx, id).await?;
let previous_name = item.name.clone();
let previous_slug = item.slug.clone();
let next_name = params
.name
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty());
if let Some(next_name) = next_name {
if previous_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
!= Some(next_name)
{
content::rewrite_tag_references(
previous_name.as_deref(),
&previous_slug,
Some(next_name),
)?;
}
.filter(|value| !value.is_empty())
!= Some(name.as_str())
{
content::rewrite_tag_references(
previous_name.as_deref(),
&previous_slug,
Some(&name),
)?;
}
let mut item = item.into_active_model();
params.update(&mut item);
item.name = Set(Some(name));
item.slug = Set(slug);
item.description = Set(trim_to_option(params.description));
item.cover_image = Set(trim_to_option(params.cover_image));
item.accent_color = Set(trim_to_option(params.accent_color));
item.seo_title = Set(trim_to_option(params.seo_title));
item.seo_description = Set(trim_to_option(params.seo_description));
let item = item.update(&ctx.db).await?;
content::sync_markdown_posts(&ctx).await?;
format::json(item)
format::json(build_record(item))
}
#[debug_handler]
@@ -89,7 +264,7 @@ pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Resul
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_item(&ctx, id).await?)
format::json(build_record(load_item(&ctx, id).await?))
}
pub fn routes() -> Routes {

View File

@@ -10,6 +10,13 @@ pub struct Model {
pub id: i32,
pub name: Option<String>,
pub slug: String,
#[sea_orm(column_type = "Text", nullable)]
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub seo_description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, manually maintained
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "media_assets")]
pub struct Model {
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
#[sea_orm(primary_key)]
pub id: i32,
pub object_key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub caption: Option<String>,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub tags: Option<Json>,
#[sea_orm(column_type = "Text", nullable)]
pub notes: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}

View File

@@ -10,6 +10,7 @@ pub mod comment_persona_analysis_logs;
pub mod comments;
pub mod content_events;
pub mod friend_links;
pub mod media_assets;
pub mod notification_deliveries;
pub mod post_revisions;
pub mod posts;

View File

@@ -8,6 +8,7 @@ pub use super::comment_persona_analysis_logs::Entity as CommentPersonaAnalysisLo
pub use super::comments::Entity as Comments;
pub use super::content_events::Entity as ContentEvents;
pub use super::friend_links::Entity as FriendLinks;
pub use super::media_assets::Entity as MediaAssets;
pub use super::notification_deliveries::Entity as NotificationDeliveries;
pub use super::post_revisions::Entity as PostRevisions;
pub use super::posts::Entity as Posts;

View File

@@ -32,6 +32,19 @@ pub struct Model {
pub music_playlist: Option<Json>,
pub ai_enabled: Option<bool>,
pub paragraph_comments_enabled: Option<bool>,
pub comment_turnstile_enabled: Option<bool>,
pub subscription_turnstile_enabled: Option<bool>,
pub web_push_enabled: Option<bool>,
#[sea_orm(column_type = "Text", nullable)]
pub turnstile_site_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub turnstile_secret_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_public_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_private_key: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub web_push_vapid_subject: Option<String>,
pub ai_provider: Option<String>,
pub ai_api_base: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
@@ -63,6 +76,7 @@ pub struct Model {
pub seo_default_twitter_handle: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub notification_webhook_url: Option<String>,
pub notification_channel_type: Option<String>,
pub notification_comment_enabled: Option<bool>,
pub notification_friend_link_enabled: Option<bool>,
pub subscription_popup_enabled: Option<bool>,

View File

@@ -12,6 +12,13 @@ pub struct Model {
pub id: i32,
pub name: Option<String>,
pub slug: String,
#[sea_orm(column_type = "Text", nullable)]
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub seo_description: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]

View File

@@ -0,0 +1,23 @@
pub use super::_entities::media_assets::{ActiveModel, Entity, Model};
use sea_orm::entity::prelude::*;
pub type MediaAssets = Entity;
#[async_trait::async_trait]
impl ActiveModelBehavior for ActiveModel {
async fn before_save<C>(self, _db: &C, insert: bool) -> std::result::Result<Self, DbErr>
where
C: ConnectionTrait,
{
if !insert && self.updated_at.is_unchanged() {
let mut this = self;
this.updated_at = sea_orm::ActiveValue::Set(chrono::Utc::now().into());
Ok(this)
} else {
Ok(self)
}
}
}
impl Model {}
impl ActiveModel {}
impl Entity {}

View File

@@ -3,6 +3,7 @@ pub mod ai_chunks;
pub mod categories;
pub mod comments;
pub mod friend_links;
pub mod media_assets;
pub mod posts;
pub mod site_settings;
pub mod tags;

View File

@@ -0,0 +1,640 @@
use std::{fs, path::Path, path::PathBuf};
use chrono::Utc;
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, Condition, EntityTrait, IntoActiveModel, QueryFilter,
QueryOrder, Set,
};
use serde::{Deserialize, Serialize};
use crate::{
controllers::site_settings,
models::_entities::{
categories, friend_links, media_assets, posts, reviews, site_settings as site_settings_entity,
tags,
},
services::{content, media_assets as media_assets_service, storage},
};
const BACKUP_VERSION: &str = "2026-04-01";
const WARNING_STORAGE_BINARIES: &str =
"当前备份只包含内容、配置与对象清单,不包含对象存储二进制文件本身。恢复后如需图片等资源,仍需保留原对象存储桶或手动回传文件。";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupTaxonomyRecord {
pub name: String,
pub slug: String,
pub description: Option<String>,
pub cover_image: Option<String>,
pub accent_color: Option<String>,
pub seo_title: Option<String>,
pub seo_description: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupReviewRecord {
pub title: Option<String>,
pub review_type: Option<String>,
pub rating: Option<i32>,
pub review_date: Option<String>,
pub status: Option<String>,
pub description: Option<String>,
pub tags: Option<String>,
pub cover: Option<String>,
pub link_url: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupFriendLinkRecord {
pub site_name: Option<String>,
pub site_url: String,
pub avatar_url: Option<String>,
pub description: Option<String>,
pub category: Option<String>,
pub status: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupMediaAssetRecord {
pub object_key: String,
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Vec<String>,
pub notes: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupStorageObjectRecord {
pub key: String,
pub url: String,
pub size_bytes: i64,
pub last_modified: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct BackupPostDocument {
pub slug: String,
pub file_name: String,
pub markdown: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupDocument {
pub version: String,
pub exported_at: String,
pub includes_storage_binaries: bool,
pub warning: String,
pub site_settings: site_settings_entity::Model,
pub categories: Vec<BackupTaxonomyRecord>,
pub tags: Vec<BackupTaxonomyRecord>,
pub reviews: Vec<BackupReviewRecord>,
pub friend_links: Vec<BackupFriendLinkRecord>,
pub media_assets: Vec<BackupMediaAssetRecord>,
pub storage_manifest: Option<Vec<BackupStorageObjectRecord>>,
pub posts: Vec<BackupPostDocument>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SiteBackupImportSummary {
pub imported: bool,
pub mode: String,
pub site_settings_restored: bool,
pub posts_written: usize,
pub categories_upserted: usize,
pub tags_upserted: usize,
pub reviews_upserted: usize,
pub friend_links_upserted: usize,
pub media_assets_upserted: usize,
pub storage_manifest_items: usize,
pub includes_storage_binaries: bool,
pub warning: String,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn normalize_backup_mode(value: Option<&str>) -> String {
match value
.map(str::trim)
.unwrap_or("merge")
.to_ascii_lowercase()
.as_str()
{
"replace" => "replace".to_string(),
_ => "merge".to_string(),
}
}
fn markdown_posts_dir() -> PathBuf {
PathBuf::from(content::MARKDOWN_POSTS_DIR)
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
}
fn remove_existing_markdown_documents() -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
let mut removed = 0_usize;
for path in fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
{
let extension = path
.extension()
.and_then(|value| value.to_str())
.map(|value| value.to_ascii_lowercase())
.unwrap_or_default();
if extension == "md" || extension == "markdown" {
fs::remove_file(&path).map_err(io_error)?;
removed += 1;
}
}
Ok(removed)
}
fn normalize_markdown(value: &str) -> String {
value.replace("\r\n", "\n")
}
fn normalized_backup_post(document: &BackupPostDocument) -> Result<(String, String)> {
let candidate_slug = trim_to_option(Some(document.slug.clone())).unwrap_or_default();
let file_name = trim_to_option(Some(document.file_name.clone()))
.unwrap_or_else(|| format!("{}.md", if candidate_slug.is_empty() { "post" } else { &candidate_slug }));
let file_stem = Path::new(&file_name)
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post");
let markdown = normalize_markdown(&document.markdown);
let parsed = content::parse_markdown_source(file_stem, &markdown, &file_name)?;
let slug = if parsed.slug.trim().is_empty() {
candidate_slug
} else {
parsed.slug
};
if slug.trim().is_empty() {
return Err(Error::BadRequest("备份中的文章 slug 不能为空".to_string()));
}
Ok((slug, markdown))
}
async fn export_storage_manifest(
ctx: &AppContext,
) -> Result<Option<Vec<BackupStorageObjectRecord>>> {
if storage::optional_r2_settings(ctx).await?.is_none() {
return Ok(None);
}
Ok(Some(
storage::list_objects(ctx, None, 1000)
.await?
.into_iter()
.map(|item| BackupStorageObjectRecord {
key: item.key,
url: item.url,
size_bytes: item.size_bytes,
last_modified: item.last_modified,
})
.collect(),
))
}
fn export_category_record(item: categories::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_tag_record(item: tags::Model) -> BackupTaxonomyRecord {
BackupTaxonomyRecord {
name: item.name.unwrap_or_else(|| item.slug.clone()),
slug: item.slug,
description: item.description,
cover_image: item.cover_image,
accent_color: item.accent_color,
seo_title: item.seo_title,
seo_description: item.seo_description,
}
}
fn export_review_record(item: reviews::Model) -> BackupReviewRecord {
BackupReviewRecord {
title: item.title,
review_type: item.review_type,
rating: item.rating,
review_date: item.review_date,
status: item.status,
description: item.description,
tags: item.tags,
cover: item.cover,
link_url: item.link_url,
}
}
fn export_friend_link_record(item: friend_links::Model) -> BackupFriendLinkRecord {
BackupFriendLinkRecord {
site_name: item.site_name,
site_url: item.site_url,
avatar_url: item.avatar_url,
description: item.description,
category: item.category,
status: item.status,
}
}
fn export_media_asset_record(item: media_assets::Model) -> BackupMediaAssetRecord {
let tags = media_assets_service::tag_list(&item);
BackupMediaAssetRecord {
object_key: item.object_key,
title: item.title,
alt_text: item.alt_text,
caption: item.caption,
tags,
notes: item.notes,
}
}
pub async fn export_site_backup(ctx: &AppContext) -> Result<SiteBackupDocument> {
let site_settings_row = site_settings::load_current(ctx).await?;
let markdown_posts = content::sync_markdown_posts(ctx).await?;
let categories = categories::Entity::find()
.order_by_asc(categories::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_category_record)
.collect::<Vec<_>>();
let tags = tags::Entity::find()
.order_by_asc(tags::Column::Slug)
.all(&ctx.db)
.await?
.into_iter()
.map(export_tag_record)
.collect::<Vec<_>>();
let reviews = reviews::Entity::find()
.order_by_desc(reviews::Column::UpdatedAt)
.all(&ctx.db)
.await?
.into_iter()
.map(export_review_record)
.collect::<Vec<_>>();
let friend_links = friend_links::Entity::find()
.order_by_asc(friend_links::Column::SiteUrl)
.all(&ctx.db)
.await?
.into_iter()
.map(export_friend_link_record)
.collect::<Vec<_>>();
let media_assets = media_assets::Entity::find()
.order_by_asc(media_assets::Column::ObjectKey)
.all(&ctx.db)
.await?
.into_iter()
.map(export_media_asset_record)
.collect::<Vec<_>>();
let posts = markdown_posts
.into_iter()
.map(|post| {
let (_, markdown) = content::read_markdown_document(&post.slug)?;
Ok(BackupPostDocument {
slug: post.slug.clone(),
file_name: format!("{}.md", post.slug),
markdown,
})
})
.collect::<Result<Vec<_>>>()?;
let storage_manifest = match export_storage_manifest(ctx).await {
Ok(items) => items,
Err(error) => {
tracing::warn!(?error, "failed to export storage manifest, continuing without it");
None
}
};
Ok(SiteBackupDocument {
version: BACKUP_VERSION.to_string(),
exported_at: Utc::now().to_rfc3339(),
includes_storage_binaries: false,
warning: WARNING_STORAGE_BINARIES.to_string(),
site_settings: site_settings_row,
categories,
tags,
reviews,
friend_links,
media_assets,
storage_manifest,
posts,
})
}
async fn restore_site_settings(
ctx: &AppContext,
value: &site_settings_entity::Model,
) -> Result<()> {
let current = site_settings::load_current(ctx).await?;
let mut active = value.clone().into_active_model();
active.id = Set(current.id);
active.created_at = Set(current.created_at);
active.updated_at = Set(Utc::now().into());
active.reset_all().update(&ctx.db).await?;
Ok(())
}
async fn upsert_category(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("分类 slug 不能为空".to_string()));
}
let existing = categories::Entity::find()
.filter(
Condition::any()
.add(categories::Column::Slug.eq(&slug))
.add(categories::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_tag(ctx: &AppContext, item: &BackupTaxonomyRecord) -> Result<()> {
let name = trim_to_option(Some(item.name.clone())).unwrap_or_else(|| item.slug.clone());
let slug = trim_to_option(Some(item.slug.clone())).unwrap_or_else(|| slugify(&name));
if slug.is_empty() {
return Err(Error::BadRequest("标签 slug 不能为空".to_string()));
}
let existing = tags::Entity::find()
.filter(
Condition::any()
.add(tags::Column::Slug.eq(&slug))
.add(tags::Column::Name.eq(name.clone())),
)
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.name = Set(Some(name));
active.slug = Set(slug);
active.description = Set(trim_to_option(item.description.clone()));
active.cover_image = Set(trim_to_option(item.cover_image.clone()));
active.accent_color = Set(trim_to_option(item.accent_color.clone()));
active.seo_title = Set(trim_to_option(item.seo_title.clone()));
active.seo_description = Set(trim_to_option(item.seo_description.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_friend_link(ctx: &AppContext, item: &BackupFriendLinkRecord) -> Result<()> {
let site_url = trim_to_option(Some(item.site_url.clone()))
.ok_or_else(|| Error::BadRequest("友链 site_url 不能为空".to_string()))?;
let existing = friend_links::Entity::find()
.filter(friend_links::Column::SiteUrl.eq(&site_url))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.site_name = Set(trim_to_option(item.site_name.clone()));
active.site_url = Set(site_url);
active.avatar_url = Set(trim_to_option(item.avatar_url.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.category = Set(trim_to_option(item.category.clone()));
active.status = Set(trim_to_option(item.status.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_review(ctx: &AppContext, item: &BackupReviewRecord) -> Result<()> {
let title = trim_to_option(item.title.clone());
let review_type = trim_to_option(item.review_type.clone());
let review_date = trim_to_option(item.review_date.clone());
let mut query = reviews::Entity::find();
if let Some(value) = title.clone() {
query = query.filter(reviews::Column::Title.eq(value));
}
if let Some(value) = review_type.clone() {
query = query.filter(reviews::Column::ReviewType.eq(value));
}
if let Some(value) = review_date.clone() {
query = query.filter(reviews::Column::ReviewDate.eq(value));
}
let existing = if title.is_some() || review_type.is_some() || review_date.is_some() {
query.order_by_asc(reviews::Column::Id).one(&ctx.db).await?
} else {
None
};
let has_existing = existing.is_some();
let mut active = existing
.map(|model| model.into_active_model())
.unwrap_or_default();
active.title = Set(title);
active.review_type = Set(review_type);
active.rating = Set(item.rating);
active.review_date = Set(review_date);
active.status = Set(trim_to_option(item.status.clone()));
active.description = Set(trim_to_option(item.description.clone()));
active.tags = Set(trim_to_option(item.tags.clone()));
active.cover = Set(trim_to_option(item.cover.clone()));
active.link_url = Set(trim_to_option(item.link_url.clone()));
if has_existing {
active.update(&ctx.db).await?;
} else {
active.insert(&ctx.db).await?;
}
Ok(())
}
async fn upsert_media_asset(ctx: &AppContext, item: &BackupMediaAssetRecord) -> Result<()> {
media_assets_service::upsert_by_key(
ctx,
&item.object_key,
media_assets_service::MediaAssetMetadataInput {
title: item.title.clone(),
alt_text: item.alt_text.clone(),
caption: item.caption.clone(),
tags: Some(item.tags.clone()),
notes: item.notes.clone(),
},
)
.await?;
Ok(())
}
async fn write_backup_posts(
ctx: &AppContext,
documents: &[BackupPostDocument],
replace_existing: bool,
) -> Result<usize> {
let dir = markdown_posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
if replace_existing {
remove_existing_markdown_documents()?;
}
if documents.is_empty() {
if replace_existing {
posts::Entity::delete_many().exec(&ctx.db).await?;
}
return Ok(0);
}
let mut written = std::collections::HashSet::new();
for document in documents {
let (slug, markdown) = normalized_backup_post(document)?;
fs::write(content::markdown_post_path(&slug), markdown).map_err(io_error)?;
written.insert(slug);
}
content::sync_markdown_posts(ctx).await?;
Ok(written.len())
}
pub async fn import_site_backup(
ctx: &AppContext,
backup: SiteBackupDocument,
mode: Option<&str>,
) -> Result<SiteBackupImportSummary> {
let mode = normalize_backup_mode(mode);
let replace_existing = mode == "replace";
if replace_existing {
friend_links::Entity::delete_many().exec(&ctx.db).await?;
reviews::Entity::delete_many().exec(&ctx.db).await?;
media_assets::Entity::delete_many().exec(&ctx.db).await?;
categories::Entity::delete_many().exec(&ctx.db).await?;
tags::Entity::delete_many().exec(&ctx.db).await?;
}
restore_site_settings(ctx, &backup.site_settings).await?;
let posts_written = write_backup_posts(ctx, &backup.posts, replace_existing).await?;
let mut categories_upserted = 0_usize;
for item in &backup.categories {
upsert_category(ctx, item).await?;
categories_upserted += 1;
}
let mut tags_upserted = 0_usize;
for item in &backup.tags {
upsert_tag(ctx, item).await?;
tags_upserted += 1;
}
let mut reviews_upserted = 0_usize;
for item in &backup.reviews {
upsert_review(ctx, item).await?;
reviews_upserted += 1;
}
let mut friend_links_upserted = 0_usize;
for item in &backup.friend_links {
upsert_friend_link(ctx, item).await?;
friend_links_upserted += 1;
}
let mut media_assets_upserted = 0_usize;
for item in &backup.media_assets {
upsert_media_asset(ctx, item).await?;
media_assets_upserted += 1;
}
Ok(SiteBackupImportSummary {
imported: true,
mode,
site_settings_restored: true,
posts_written,
categories_upserted,
tags_upserted,
reviews_upserted,
friend_links_upserted,
media_assets_upserted,
storage_manifest_items: backup.storage_manifest.as_ref().map(Vec::len).unwrap_or(0),
includes_storage_binaries: backup.includes_storage_binaries,
warning: WARNING_STORAGE_BINARIES.to_string(),
})
}

View File

@@ -39,6 +39,7 @@ pub struct CommentGuardInput<'a> {
pub author: Option<&'a str>,
pub content: Option<&'a str>,
pub honeypot_website: Option<&'a str>,
pub turnstile_token: Option<&'a str>,
pub captcha_token: Option<&'a str>,
pub captcha_answer: Option<&'a str>,
}
@@ -362,7 +363,16 @@ pub async fn enforce_comment_guard(ctx: &AppContext, input: &CommentGuardInput<'
return Err(Error::BadRequest("提交未通过校验".to_string()));
}
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
if !crate::services::turnstile::verify_if_enabled(
ctx,
crate::services::turnstile::TurnstileScope::Comment,
input.turnstile_token,
input.ip_address,
)
.await?
{
verify_captcha_solution(input.captcha_token, input.captcha_answer, input.ip_address)?;
}
if contains_blocked_keyword(input).is_some() {
return Err(Error::BadRequest("评论内容包含敏感关键词".to_string()));

View File

@@ -0,0 +1,125 @@
use std::collections::HashMap;
use loco_rs::prelude::*;
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, Set};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::models::_entities::media_assets;
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct MediaAssetMetadataInput {
pub title: Option<String>,
pub alt_text: Option<String>,
pub caption: Option<String>,
pub tags: Option<Vec<String>>,
pub notes: Option<String>,
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn normalize_tag_list(values: Option<Vec<String>>) -> Option<Value> {
let mut seen = std::collections::HashSet::new();
let tags = values
.unwrap_or_default()
.into_iter()
.filter_map(|item| trim_to_option(Some(item)))
.map(|item| item.to_ascii_lowercase())
.filter(|item| seen.insert(item.clone()))
.map(Value::String)
.collect::<Vec<_>>();
(!tags.is_empty()).then_some(Value::Array(tags))
}
pub fn tag_list(model: &media_assets::Model) -> Vec<String> {
model
.tags
.as_ref()
.and_then(Value::as_array)
.cloned()
.unwrap_or_default()
.into_iter()
.filter_map(|item| item.as_str().map(ToString::to_string))
.collect()
}
pub async fn list_by_keys(
ctx: &AppContext,
keys: &[String],
) -> Result<HashMap<String, media_assets::Model>> {
if keys.is_empty() {
return Ok(HashMap::new());
}
Ok(media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.is_in(keys.iter().cloned()))
.all(&ctx.db)
.await?
.into_iter()
.map(|item| (item.object_key.clone(), item))
.collect())
}
pub async fn get_by_key(ctx: &AppContext, object_key: &str) -> Result<Option<media_assets::Model>> {
media_assets::Entity::find()
.filter(media_assets::Column::ObjectKey.eq(object_key))
.one(&ctx.db)
.await
.map_err(Into::into)
}
pub async fn upsert_by_key(
ctx: &AppContext,
object_key: &str,
payload: MediaAssetMetadataInput,
) -> Result<media_assets::Model> {
let normalized_key = object_key.trim();
if normalized_key.is_empty() {
return Err(Error::BadRequest("object key 不能为空".to_string()));
}
let existing = get_by_key(ctx, normalized_key).await?;
let has_existing = existing.is_some();
let mut active = existing
.map(|item| item.into_active_model())
.unwrap_or_else(|| media_assets::ActiveModel {
object_key: Set(normalized_key.to_string()),
..Default::default()
});
active.title = Set(trim_to_option(payload.title));
active.alt_text = Set(trim_to_option(payload.alt_text));
active.caption = Set(trim_to_option(payload.caption));
active.tags = Set(normalize_tag_list(payload.tags));
active.notes = Set(trim_to_option(payload.notes));
if has_existing {
active.update(&ctx.db).await.map_err(Into::into)
} else {
active.insert(&ctx.db).await.map_err(Into::into)
}
}
pub async fn delete_by_key(ctx: &AppContext, object_key: &str) -> Result<()> {
if let Some(item) = get_by_key(ctx, object_key).await? {
item.delete(&ctx.db).await?;
}
Ok(())
}
pub async fn delete_by_keys(ctx: &AppContext, object_keys: &[String]) -> Result<()> {
for key in object_keys {
delete_by_key(ctx, key).await?;
}
Ok(())
}

View File

@@ -2,9 +2,13 @@ pub mod admin_audit;
pub mod abuse_guard;
pub mod ai;
pub mod analytics;
pub mod backups;
pub mod comment_guard;
pub mod content;
pub mod media_assets;
pub mod notifications;
pub mod post_revisions;
pub mod storage;
pub mod subscriptions;
pub mod turnstile;
pub mod web_push;

View File

@@ -1,10 +1,23 @@
use loco_rs::prelude::*;
use crate::{
controllers::site_settings,
models::_entities::{comments, friend_links},
models::_entities::{comments, friend_links, site_settings as site_settings_model},
services::subscriptions,
};
fn notification_channel_type(settings: &site_settings_model::Model) -> &'static str {
match settings
.notification_channel_type
.as_deref()
.map(str::trim)
.map(str::to_ascii_lowercase)
.as_deref()
{
Some("ntfy") => subscriptions::CHANNEL_NTFY,
_ => subscriptions::CHANNEL_WEBHOOK,
}
}
fn trim_to_option(value: Option<String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim().to_string();
@@ -81,9 +94,10 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
if settings.notification_comment_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_COMMENT_CREATED,
"新评论通知",
@@ -94,7 +108,7 @@ pub async fn notify_new_comment(ctx: &AppContext, item: &comments::Model) {
)
.await
{
tracing::warn!("failed to queue legacy comment webhook notification: {error}");
tracing::warn!("failed to queue comment admin notification: {error}");
}
}
}
@@ -144,9 +158,10 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
if settings.notification_friend_link_enabled.unwrap_or(false) {
if let Some(target) = trim_to_option(settings.notification_webhook_url.clone()) {
let channel_type = notification_channel_type(&settings);
if let Err(error) = subscriptions::queue_direct_notification(
ctx,
subscriptions::CHANNEL_WEBHOOK,
channel_type,
&target,
subscriptions::EVENT_FRIEND_LINK_CREATED,
"新友链申请通知",
@@ -157,7 +172,7 @@ pub async fn notify_new_friend_link(ctx: &AppContext, item: &friend_links::Model
)
.await
{
tracing::warn!("failed to queue legacy friend-link webhook notification: {error}");
tracing::warn!("failed to queue friend-link admin notification: {error}");
}
}
}

View File

@@ -15,7 +15,7 @@ use uuid::Uuid;
use crate::{
mailers::subscription::SubscriptionMailer,
models::_entities::{notification_deliveries, posts, subscriptions},
services::content,
services::{content, web_push as web_push_service},
workers::notification_delivery::{
NotificationDeliveryWorker, NotificationDeliveryWorkerArgs,
},
@@ -26,6 +26,7 @@ pub const CHANNEL_WEBHOOK: &str = "webhook";
pub const CHANNEL_DISCORD: &str = "discord";
pub const CHANNEL_TELEGRAM: &str = "telegram";
pub const CHANNEL_NTFY: &str = "ntfy";
pub const CHANNEL_WEB_PUSH: &str = "web_push";
pub const STATUS_PENDING: &str = "pending";
pub const STATUS_ACTIVE: &str = "active";
@@ -139,6 +140,9 @@ pub fn normalize_channel_type(value: &str) -> String {
CHANNEL_DISCORD => CHANNEL_DISCORD.to_string(),
CHANNEL_TELEGRAM => CHANNEL_TELEGRAM.to_string(),
CHANNEL_NTFY => CHANNEL_NTFY.to_string(),
CHANNEL_WEB_PUSH | "browser_push" | "browser-push" | "webpush" => {
CHANNEL_WEB_PUSH.to_string()
}
_ => CHANNEL_EMAIL.to_string(),
}
}
@@ -225,6 +229,35 @@ fn merge_metadata(existing: Option<&Value>, incoming: Option<Value>) -> Option<V
}
}
fn normalize_browser_push_subscription(raw: Value) -> Result<Value> {
let mut subscription = serde_json::from_value::<web_push::SubscriptionInfo>(raw)
.map_err(|_| Error::BadRequest("browser push subscription 非法".to_string()))?;
subscription.endpoint = subscription.endpoint.trim().to_string();
subscription.keys.p256dh = subscription.keys.p256dh.trim().to_string();
subscription.keys.auth = subscription.keys.auth.trim().to_string();
if subscription.endpoint.is_empty()
|| subscription.keys.p256dh.is_empty()
|| subscription.keys.auth.is_empty()
{
return Err(Error::BadRequest(
"browser push subscription 缺少 endpoint / keys".to_string(),
));
}
serde_json::to_value(subscription).map_err(Into::into)
}
fn merge_browser_push_metadata(existing: Option<&Value>, incoming: Option<Value>, subscription: Value) -> Value {
let mut object = merge_metadata(existing, incoming)
.and_then(|value| value.as_object().cloned())
.unwrap_or_default();
object.insert("kind".to_string(), Value::String("browser-push".to_string()));
object.insert("subscription".to_string(), subscription);
Value::Object(object)
}
fn json_string_list(value: Option<&Value>, key: &str) -> Vec<String> {
value
.and_then(Value::as_object)
@@ -592,6 +625,88 @@ pub async fn create_public_email_subscription(
})
}
pub async fn create_public_web_push_subscription(
ctx: &AppContext,
subscription: Value,
metadata: Option<Value>,
) -> Result<PublicSubscriptionResult> {
let normalized_subscription = normalize_browser_push_subscription(subscription)?;
let endpoint = normalized_subscription
.get("endpoint")
.and_then(Value::as_str)
.ok_or_else(|| Error::BadRequest("browser push endpoint 非法".to_string()))?
.to_string();
let existing = subscriptions::Entity::find()
.filter(subscriptions::Column::ChannelType.eq(CHANNEL_WEB_PUSH))
.filter(subscriptions::Column::Target.eq(&endpoint))
.one(&ctx.db)
.await?;
if let Some(existing) = existing {
let mut active = existing.clone().into_active_model();
let manage_token = existing
.manage_token
.clone()
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(generate_subscription_token);
active.manage_token = Set(Some(manage_token));
active.status = Set(STATUS_ACTIVE.to_string());
active.confirm_token = Set(None);
active.verified_at = Set(Some(Utc::now().to_rfc3339()));
active.metadata = Set(Some(merge_browser_push_metadata(
existing.metadata.as_ref(),
metadata,
normalized_subscription,
)));
if existing
.display_name
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.is_none()
{
active.display_name = Set(Some("Browser Push".to_string()));
}
let updated = active.update(&ctx.db).await?;
return Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&updated),
requires_confirmation: false,
message: "浏览器推送已更新,后续有新内容时会直接提醒。".to_string(),
});
}
let created = subscriptions::ActiveModel {
channel_type: Set(CHANNEL_WEB_PUSH.to_string()),
target: Set(endpoint),
display_name: Set(Some("Browser Push".to_string())),
status: Set(STATUS_ACTIVE.to_string()),
filters: Set(Some(default_public_filters())),
secret: Set(None),
notes: Set(None),
confirm_token: Set(None),
manage_token: Set(Some(generate_subscription_token())),
metadata: Set(Some(merge_browser_push_metadata(
None,
metadata,
normalized_subscription,
))),
verified_at: Set(Some(Utc::now().to_rfc3339())),
last_notified_at: Set(None),
failure_count: Set(Some(0)),
last_delivery_status: Set(None),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(PublicSubscriptionResult {
subscription: to_public_subscription_view(&created),
requires_confirmation: false,
message: "浏览器推送已开启,后续有新内容时会直接提醒。".to_string(),
})
}
pub async fn confirm_subscription(ctx: &AppContext, token: &str) -> Result<subscriptions::Model> {
let token = token.trim();
if token.is_empty() {
@@ -869,6 +984,7 @@ fn provider_name(channel_type: &str) -> &'static str {
CHANNEL_DISCORD => "discord-webhook",
CHANNEL_TELEGRAM => "telegram-bot-api",
CHANNEL_NTFY => "ntfy",
CHANNEL_WEB_PUSH => "web-push",
_ => "webhook",
}
}
@@ -882,10 +998,65 @@ fn resolve_ntfy_target(target: &str) -> String {
}
}
fn collapse_whitespace(value: &str) -> String {
value.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn truncate_chars(value: &str, max_chars: usize) -> String {
if value.chars().count() <= max_chars {
return value.to_string();
}
let mut sliced = value.chars().take(max_chars).collect::<String>();
sliced.push('…');
sliced
}
fn site_asset_url(site_url: Option<&str>, path: &str) -> Option<String> {
let base = site_url?.trim().trim_end_matches('/');
if base.is_empty() {
return None;
}
Some(format!("{base}{path}"))
}
fn web_push_target_url(message: &QueuedDeliveryPayload) -> Option<String> {
message
.payload
.get("url")
.and_then(Value::as_str)
.map(ToString::to_string)
.or_else(|| message.site_url.clone())
}
fn build_web_push_payload(message: &QueuedDeliveryPayload) -> Value {
let body = truncate_chars(&collapse_whitespace(&message.text), 220);
serde_json::json!({
"title": message.subject,
"body": body,
"icon": site_asset_url(message.site_url.as_deref(), "/favicon.svg"),
"badge": site_asset_url(message.site_url.as_deref(), "/favicon.ico"),
"url": web_push_target_url(message),
"tag": message
.payload
.get("event_type")
.and_then(Value::as_str)
.unwrap_or("subscription"),
"data": {
"event_type": message.payload.get("event_type").cloned().unwrap_or(Value::Null),
"payload": message.payload,
}
})
}
async fn deliver_via_channel(
ctx: &AppContext,
channel_type: &str,
target: &str,
message: &QueuedDeliveryPayload,
metadata: Option<&Value>,
) -> Result<Option<String>> {
match channel_type {
CHANNEL_EMAIL => Err(Error::BadRequest(
@@ -923,6 +1094,21 @@ async fn deliver_via_channel(
.map(|_| None)
.map_err(|error| Error::BadRequest(error.to_string()))
}
CHANNEL_WEB_PUSH => {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
let subscription_info = web_push_service::subscription_info_from_metadata(metadata)?;
let payload = serde_json::to_vec(&build_web_push_payload(message))?;
web_push_service::send_payload(
&settings,
&subscription_info,
&payload,
Some(web_push::Urgency::Normal),
24 * 60 * 60,
message.site_url.as_deref(),
)
.await?;
Ok(None)
}
_ => {
let envelope = DeliveryEnvelope {
event: message
@@ -1010,10 +1196,17 @@ pub async fn process_delivery(ctx: &AppContext, delivery_id: i32) -> Result<()>
.await
.map(|_| None)
} else {
deliver_via_channel(&subscription.channel_type, &subscription.target, &message).await
deliver_via_channel(
ctx,
&subscription.channel_type,
&subscription.target,
&message,
subscription.metadata.as_ref(),
)
.await
}
} else {
deliver_via_channel(&delivery.channel_type, &delivery.target, &message).await
deliver_via_channel(ctx, &delivery.channel_type, &delivery.target, &message, None).await
};
let subscription_id = delivery.subscription_id;
let delivery_channel_type = delivery.channel_type.clone();

View File

@@ -0,0 +1,182 @@
use std::sync::OnceLock;
use loco_rs::prelude::*;
use reqwest::Client;
use serde::Deserialize;
use crate::models::_entities::site_settings;
const DEFAULT_TURNSTILE_VERIFY_URL: &str =
"https://challenges.cloudflare.com/turnstile/v0/siteverify";
const ENV_TURNSTILE_SECRET_KEY: &str = "TERMI_TURNSTILE_SECRET_KEY";
const ENV_LEGACY_TURNSTILE_SECRET_KEY: &str = "TERMI_COMMENT_TURNSTILE_SECRET_KEY";
const ENV_TURNSTILE_SITE_KEY: &str = "PUBLIC_COMMENT_TURNSTILE_SITE_KEY";
const ENV_TURNSTILE_VERIFY_URL: &str = "TERMI_TURNSTILE_VERIFY_URL";
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum TurnstileScope {
Comment,
Subscription,
}
#[derive(Clone, Debug, Deserialize)]
struct TurnstileVerifyResponse {
success: bool,
#[serde(default, rename = "error-codes")]
error_codes: Vec<String>,
}
fn trim_to_option(value: Option<&str>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
fn normalize_ip(value: Option<&str>) -> Option<String> {
trim_to_option(value).map(|item| item.chars().take(96).collect::<String>())
}
fn verify_url() -> String {
env_value(ENV_TURNSTILE_VERIFY_URL)
.unwrap_or_else(|| DEFAULT_TURNSTILE_VERIFY_URL.to_string())
}
fn client() -> &'static Client {
static CLIENT: OnceLock<Client> = OnceLock::new();
CLIENT.get_or_init(Client::new)
}
pub fn secret_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_secret_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SECRET_KEY))
.or_else(|| env_value(ENV_LEGACY_TURNSTILE_SECRET_KEY))
}
pub fn site_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.turnstile_site_key.as_ref())
.or_else(|| env_value(ENV_TURNSTILE_SITE_KEY))
}
fn site_key_configured(settings: &site_settings::Model) -> bool {
site_key(settings).is_some()
}
pub fn secret_key_configured(settings: &site_settings::Model) -> bool {
secret_key(settings).is_some()
}
fn scope_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
match scope {
TurnstileScope::Comment => settings.comment_turnstile_enabled.unwrap_or(false),
TurnstileScope::Subscription => settings.subscription_turnstile_enabled.unwrap_or(false),
}
}
pub fn is_enabled(settings: &site_settings::Model, scope: TurnstileScope) -> bool {
scope_enabled(settings, scope)
&& site_key_configured(settings)
&& secret_key_configured(settings)
}
pub async fn is_enabled_for_ctx(ctx: &AppContext, scope: TurnstileScope) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
Ok(is_enabled(&settings, scope))
}
async fn verify_token(
settings: &site_settings::Model,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<()> {
let secret = secret_key(settings).ok_or_else(|| {
Error::BadRequest("人机验证尚未配置完成,请稍后重试".to_string())
})?;
let response_token = trim_to_option(token)
.ok_or_else(|| Error::BadRequest("请先完成人机验证".to_string()))?;
let mut form_data = vec![
("secret".to_string(), secret),
("response".to_string(), response_token),
];
if let Some(remote_ip) = normalize_ip(client_ip) {
form_data.push(("remoteip".to_string(), remote_ip));
}
let response = client()
.post(verify_url())
.form(&form_data)
.send()
.await
.map_err(|error| {
tracing::warn!("turnstile verify request failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !response.status().is_success() {
tracing::warn!(
"turnstile verify returned unexpected status: {}",
response.status()
);
return Err(Error::BadRequest(
"人机验证服务暂时不可用,请稍后重试".to_string(),
));
}
let payload = response
.json::<TurnstileVerifyResponse>()
.await
.map_err(|error| {
tracing::warn!("turnstile verify decode failed: {error}");
Error::BadRequest("人机验证服务暂时不可用,请稍后重试".to_string())
})?;
if !payload.success {
tracing::warn!(
error_codes = ?payload.error_codes,
"turnstile verify rejected request"
);
return Err(Error::BadRequest("人机验证未通过,请重试".to_string()));
}
Ok(())
}
pub async fn verify_if_enabled(
ctx: &AppContext,
scope: TurnstileScope,
token: Option<&str>,
client_ip: Option<&str>,
) -> Result<bool> {
let settings = crate::controllers::site_settings::load_current(ctx).await?;
if !is_enabled(&settings, scope) {
return Ok(false);
}
verify_token(&settings, token, client_ip).await?;
Ok(true)
}

View File

@@ -0,0 +1,122 @@
use loco_rs::prelude::*;
use serde_json::Value;
use web_push::{
ContentEncoding, HyperWebPushClient, SubscriptionInfo, Urgency, VapidSignatureBuilder,
WebPushClient, WebPushMessageBuilder,
};
use crate::models::_entities::site_settings;
const ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY: &str = "TERMI_WEB_PUSH_VAPID_PUBLIC_KEY";
const ENV_WEB_PUSH_VAPID_PRIVATE_KEY: &str = "TERMI_WEB_PUSH_VAPID_PRIVATE_KEY";
const ENV_WEB_PUSH_VAPID_SUBJECT: &str = "TERMI_WEB_PUSH_VAPID_SUBJECT";
fn env_value(name: &str) -> Option<String> {
std::env::var(name)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
}
fn configured_value(value: Option<&String>) -> Option<String> {
value.and_then(|item| {
let trimmed = item.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
})
}
pub fn public_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_public_key.as_ref())
.or_else(|| env_value(ENV_PUBLIC_WEB_PUSH_VAPID_PUBLIC_KEY))
.or_else(|| env_value(ENV_LEGACY_WEB_PUSH_VAPID_PUBLIC_KEY))
}
pub fn private_key(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_private_key.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_PRIVATE_KEY))
}
pub fn vapid_subject(settings: &site_settings::Model) -> Option<String> {
configured_value(settings.web_push_vapid_subject.as_ref())
.or_else(|| env_value(ENV_WEB_PUSH_VAPID_SUBJECT))
}
fn effective_vapid_subject(settings: &site_settings::Model, site_url: Option<&str>) -> String {
vapid_subject(settings)
.or_else(|| {
site_url
.map(str::trim)
.filter(|value| value.starts_with("http://") || value.starts_with("https://"))
.map(ToString::to_string)
})
.unwrap_or_else(|| "mailto:noreply@example.com".to_string())
}
pub fn public_key_configured(settings: &site_settings::Model) -> bool {
public_key(settings).is_some()
}
pub fn private_key_configured(settings: &site_settings::Model) -> bool {
private_key(settings).is_some()
}
pub fn is_enabled(settings: &site_settings::Model) -> bool {
settings.web_push_enabled.unwrap_or(false)
&& public_key_configured(settings)
&& private_key_configured(settings)
}
pub fn subscription_info_from_metadata(metadata: Option<&Value>) -> Result<SubscriptionInfo> {
let subscription = metadata
.and_then(Value::as_object)
.and_then(|object| object.get("subscription"))
.cloned()
.ok_or_else(|| Error::BadRequest("browser push metadata 缺少 subscription".to_string()))?;
serde_json::from_value::<SubscriptionInfo>(subscription)
.map_err(|_| Error::BadRequest("browser push metadata 非法".to_string()))
}
pub async fn send_payload(
settings: &site_settings::Model,
subscription_info: &SubscriptionInfo,
payload: &[u8],
urgency: Option<Urgency>,
ttl: u32,
site_url: Option<&str>,
) -> Result<()> {
let private_key = private_key(settings)
.ok_or_else(|| Error::BadRequest("web push VAPID private key 未配置".to_string()))?;
let mut signature_builder = VapidSignatureBuilder::from_base64(&private_key, subscription_info)
.map_err(|error| Error::BadRequest(format!("web push vapid build failed: {error}")))?;
signature_builder.add_claim("sub", effective_vapid_subject(settings, site_url));
let signature = signature_builder
.build()
.map_err(|error| Error::BadRequest(format!("web push vapid sign failed: {error}")))?;
let mut builder = WebPushMessageBuilder::new(subscription_info);
builder.set_ttl(ttl);
if let Some(urgency) = urgency {
builder.set_urgency(urgency);
}
builder.set_payload(ContentEncoding::Aes128Gcm, payload);
builder.set_vapid_signature(signature);
let client = HyperWebPushClient::new();
let message = builder
.build()
.map_err(|error| Error::BadRequest(format!("web push message build failed: {error}")))?;
client
.send(message)
.await
.map_err(|error| Error::BadRequest(format!("web push send failed: {error}")))?;
Ok(())
}

View File

@@ -1 +0,0 @@
{"rustc_fingerprint":10734737548331824535,"outputs":{"17747080675513052775":{"success":true,"status":"","code":0,"stdout":"rustc 1.92.0 (ded5c06cf 2025-12-08)\nbinary: rustc\ncommit-hash: ded5c06cf21d2b93bffd5d884aa6e96934ee4234\ncommit-date: 2025-12-08\nhost: x86_64-pc-windows-msvc\nrelease: 1.92.0\nLLVM version: 21.1.3\n","stderr":""},"7971740275564407648":{"success":true,"status":"","code":0,"stdout":"___.exe\nlib___.rlib\n___.dll\n___.dll\n___.lib\n___.dll\nC:\\Users\\Andorid\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\npacked\n___\ndebug_assertions\npanic=\"unwind\"\nproc_macro\ntarget_abi=\"\"\ntarget_arch=\"x86_64\"\ntarget_endian=\"little\"\ntarget_env=\"msvc\"\ntarget_family=\"windows\"\ntarget_feature=\"cmpxchg16b\"\ntarget_feature=\"fxsr\"\ntarget_feature=\"sse\"\ntarget_feature=\"sse2\"\ntarget_feature=\"sse3\"\ntarget_has_atomic=\"128\"\ntarget_has_atomic=\"16\"\ntarget_has_atomic=\"32\"\ntarget_has_atomic=\"64\"\ntarget_has_atomic=\"8\"\ntarget_has_atomic=\"ptr\"\ntarget_os=\"windows\"\ntarget_pointer_width=\"64\"\ntarget_vendor=\"pc\"\nwindows\n","stderr":""}},"successes":{}}

View File

@@ -1,3 +0,0 @@
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by cargo.
# For information about cache directory tags see https://bford.info/cachedir/