chore: reorganize project into monorepo

This commit is contained in:
2026-03-28 10:40:22 +08:00
parent 60367a5f51
commit 1455d93246
201 changed files with 30081 additions and 93 deletions

View File

@@ -0,0 +1,648 @@
use loco_rs::prelude::*;
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, IntoActiveModel, QueryFilter, QueryOrder, Set,
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::fs;
use std::path::{Path, PathBuf};
use crate::models::_entities::{categories, posts, tags};
pub const MARKDOWN_POSTS_DIR: &str = "content/posts";
const FIXTURE_POSTS_FILE: &str = "src/fixtures/posts.yaml";
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
struct MarkdownFrontmatter {
title: Option<String>,
slug: Option<String>,
description: Option<String>,
category: Option<String>,
tags: Option<Vec<String>>,
post_type: Option<String>,
image: Option<String>,
pinned: Option<bool>,
published: Option<bool>,
}
#[derive(Debug, Clone, Serialize)]
pub struct MarkdownPost {
pub title: String,
pub slug: String,
pub description: Option<String>,
pub content: String,
pub category: Option<String>,
pub tags: Vec<String>,
pub post_type: String,
pub image: Option<String>,
pub pinned: bool,
pub published: bool,
pub file_path: String,
}
#[derive(Debug, Clone)]
pub struct MarkdownPostDraft {
pub title: String,
pub slug: Option<String>,
pub description: Option<String>,
pub content: String,
pub category: Option<String>,
pub tags: Vec<String>,
pub post_type: String,
pub image: Option<String>,
pub pinned: bool,
pub published: bool,
}
#[derive(Debug, Clone)]
pub struct MarkdownImportFile {
pub file_name: String,
pub content: String,
}
#[derive(Debug, Clone, Deserialize)]
struct LegacyFixturePost {
title: String,
slug: String,
content: String,
excerpt: Option<String>,
category: Option<String>,
tags: Option<Vec<String>>,
pinned: Option<bool>,
published: Option<bool>,
}
fn io_error(err: std::io::Error) -> Error {
Error::string(&err.to_string())
}
fn yaml_error(err: serde_yaml::Error) -> Error {
Error::string(&err.to_string())
}
fn posts_dir() -> PathBuf {
PathBuf::from(MARKDOWN_POSTS_DIR)
}
pub fn markdown_post_path(slug: &str) -> PathBuf {
posts_dir().join(format!("{slug}.md"))
}
fn normalize_newlines(input: &str) -> String {
input.replace("\r\n", "\n")
}
fn trim_to_option(input: Option<String>) -> Option<String> {
input.and_then(|value| {
let trimmed = value.trim().to_string();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
})
}
fn slugify(value: &str) -> String {
let mut slug = String::new();
let mut last_was_dash = false;
for ch in value.trim().chars() {
if ch.is_ascii_alphanumeric() {
slug.push(ch.to_ascii_lowercase());
last_was_dash = false;
} else if (ch.is_whitespace() || ch == '-' || ch == '_') && !last_was_dash {
slug.push('-');
last_was_dash = true;
}
}
slug.trim_matches('-').to_string()
}
fn excerpt_from_content(content: &str) -> Option<String> {
let mut in_code_block = false;
for line in content.lines() {
let trimmed = line.trim();
if trimmed.starts_with("```") {
in_code_block = !in_code_block;
continue;
}
if in_code_block || trimmed.is_empty() || trimmed.starts_with('#') {
continue;
}
let excerpt = trimmed.chars().take(180).collect::<String>();
return if excerpt.is_empty() { None } else { Some(excerpt) };
}
None
}
fn title_from_content(content: &str) -> Option<String> {
content.lines().find_map(|line| {
line.trim()
.strip_prefix("# ")
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
})
}
fn split_frontmatter(raw: &str) -> Result<(MarkdownFrontmatter, String)> {
let normalized = normalize_newlines(raw);
if !normalized.starts_with("---\n") {
return Ok((MarkdownFrontmatter::default(), normalized));
}
let rest = &normalized[4..];
let Some(end_index) = rest.find("\n---\n") else {
return Err(Error::string("Markdown frontmatter is not closed"));
};
let frontmatter = &rest[..end_index];
let content = rest[end_index + 5..].to_string();
let parsed = serde_yaml::from_str::<MarkdownFrontmatter>(frontmatter).map_err(yaml_error)?;
Ok((parsed, content))
}
fn parse_markdown_post(path: &Path) -> Result<MarkdownPost> {
let raw = fs::read_to_string(path).map_err(io_error)?;
let file_stem = path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("post")
.to_string();
parse_markdown_source(&file_stem, &raw, &path.to_string_lossy())
}
fn parse_markdown_source(file_stem: &str, raw: &str, file_path: &str) -> Result<MarkdownPost> {
let (frontmatter, content) = split_frontmatter(raw)?;
let slug = trim_to_option(frontmatter.slug.clone()).unwrap_or_else(|| file_stem.to_string());
let title = trim_to_option(frontmatter.title.clone())
.or_else(|| title_from_content(&content))
.unwrap_or_else(|| slug.clone());
let description = trim_to_option(frontmatter.description.clone()).or_else(|| excerpt_from_content(&content));
let category = trim_to_option(frontmatter.category.clone());
let tags = frontmatter
.tags
.unwrap_or_default()
.into_iter()
.map(|item| item.trim().to_string())
.filter(|item| !item.is_empty())
.collect::<Vec<_>>();
Ok(MarkdownPost {
title,
slug,
description,
content: content.trim_start_matches('\n').to_string(),
category,
tags,
post_type: trim_to_option(frontmatter.post_type.clone()).unwrap_or_else(|| "article".to_string()),
image: trim_to_option(frontmatter.image.clone()),
pinned: frontmatter.pinned.unwrap_or(false),
published: frontmatter.published.unwrap_or(true),
file_path: file_path.to_string(),
})
}
fn build_markdown_document(post: &MarkdownPost) -> String {
let mut lines = vec![
"---".to_string(),
format!("title: {}", serde_yaml::to_string(&post.title).unwrap_or_else(|_| format!("{:?}", post.title)).trim()),
format!("slug: {}", post.slug),
];
if let Some(description) = &post.description {
lines.push(format!(
"description: {}",
serde_yaml::to_string(description)
.unwrap_or_else(|_| format!("{description:?}"))
.trim()
));
}
if let Some(category) = &post.category {
lines.push(format!("category: {}", category));
}
lines.push(format!("post_type: {}", post.post_type));
lines.push(format!("pinned: {}", post.pinned));
lines.push(format!("published: {}", post.published));
if let Some(image) = &post.image {
lines.push(format!("image: {}", image));
}
if !post.tags.is_empty() {
lines.push("tags:".to_string());
for tag in &post.tags {
lines.push(format!(" - {}", tag));
}
}
lines.push("---".to_string());
lines.push(String::new());
lines.push(post.content.trim().to_string());
lines.push(String::new());
lines.join("\n")
}
fn ensure_markdown_posts_bootstrapped() -> Result<()> {
let dir = posts_dir();
fs::create_dir_all(&dir).map_err(io_error)?;
let has_markdown = fs::read_dir(&dir)
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.any(|entry| entry.path().extension().and_then(|value| value.to_str()) == Some("md"));
if has_markdown {
return Ok(());
}
let raw = fs::read_to_string(FIXTURE_POSTS_FILE).map_err(io_error)?;
let fixtures = serde_yaml::from_str::<Vec<LegacyFixturePost>>(&raw).map_err(yaml_error)?;
for fixture in fixtures {
let post = MarkdownPost {
title: fixture.title,
slug: fixture.slug.clone(),
description: trim_to_option(fixture.excerpt),
content: fixture.content,
category: trim_to_option(fixture.category),
tags: fixture.tags.unwrap_or_default(),
post_type: "article".to_string(),
image: None,
pinned: fixture.pinned.unwrap_or(false),
published: fixture.published.unwrap_or(true),
file_path: markdown_post_path(&fixture.slug).to_string_lossy().to_string(),
};
fs::write(markdown_post_path(&fixture.slug), build_markdown_document(&post)).map_err(io_error)?;
}
Ok(())
}
fn load_markdown_posts_from_disk() -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut posts = fs::read_dir(posts_dir())
.map_err(io_error)?
.filter_map(|entry| entry.ok())
.map(|entry| entry.path())
.filter(|path| path.extension().and_then(|value| value.to_str()) == Some("md"))
.map(|path| parse_markdown_post(&path))
.collect::<Result<Vec<_>>>()?;
posts.sort_by(|left, right| left.slug.cmp(&right.slug));
Ok(posts)
}
async fn sync_tags_from_posts(ctx: &AppContext, posts: &[MarkdownPost]) -> Result<()> {
for post in posts {
for tag_name in &post.tags {
let slug = slugify(tag_name);
let existing = tags::Entity::find()
.filter(tags::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?;
if existing.is_none() {
let item = tags::ActiveModel {
name: Set(Some(tag_name.clone())),
slug: Set(slug),
..Default::default()
};
let _ = item.insert(&ctx.db).await;
}
}
}
Ok(())
}
async fn ensure_category(ctx: &AppContext, raw_name: &str) -> Result<Option<String>> {
let name = raw_name.trim();
if name.is_empty() {
return Ok(None);
}
let slug = slugify(name);
let existing = categories::Entity::find()
.filter(categories::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?;
if let Some(category) = existing {
if let Some(existing_name) = category.name.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
return Ok(Some(existing_name.to_string()));
}
let mut category_model = category.into_active_model();
category_model.name = Set(Some(name.to_string()));
let updated = category_model.update(&ctx.db).await?;
return Ok(updated.name.or_else(|| Some(name.to_string())));
}
let created = categories::ActiveModel {
name: Set(Some(name.to_string())),
slug: Set(slug),
..Default::default()
}
.insert(&ctx.db)
.await?;
Ok(created.name.or_else(|| Some(name.to_string())))
}
async fn canonicalize_tags(ctx: &AppContext, raw_tags: &[String]) -> Result<Vec<String>> {
let mut canonical_tags = Vec::new();
let mut seen = std::collections::HashSet::new();
for tag_name in raw_tags {
let trimmed = tag_name.trim();
if trimmed.is_empty() {
continue;
}
let slug = slugify(trimmed);
if slug.is_empty() || !seen.insert(slug.clone()) {
continue;
}
let existing = tags::Entity::find()
.filter(tags::Column::Slug.eq(&slug))
.one(&ctx.db)
.await?;
let canonical_name = if let Some(tag) = existing {
if let Some(existing_name) = tag.name.as_deref().map(str::trim).filter(|value| !value.is_empty()) {
existing_name.to_string()
} else {
let mut tag_model = tag.into_active_model();
tag_model.name = Set(Some(trimmed.to_string()));
tag_model
.update(&ctx.db)
.await?
.name
.unwrap_or_else(|| trimmed.to_string())
}
} else {
tags::ActiveModel {
name: Set(Some(trimmed.to_string())),
slug: Set(slug),
..Default::default()
}
.insert(&ctx.db)
.await?
.name
.unwrap_or_else(|| trimmed.to_string())
};
canonical_tags.push(canonical_name);
}
Ok(canonical_tags)
}
async fn dedupe_tags(ctx: &AppContext) -> Result<()> {
let existing_tags = tags::Entity::find()
.order_by_asc(tags::Column::Id)
.all(&ctx.db)
.await?;
let mut seen = std::collections::HashSet::new();
for tag in existing_tags {
let key = if tag.slug.trim().is_empty() {
tag.name
.as_deref()
.map(slugify)
.unwrap_or_default()
} else {
slugify(&tag.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = tag.delete(&ctx.db).await;
}
Ok(())
}
async fn dedupe_categories(ctx: &AppContext) -> Result<()> {
let existing_categories = categories::Entity::find()
.order_by_asc(categories::Column::Id)
.all(&ctx.db)
.await?;
let mut seen = std::collections::HashSet::new();
for category in existing_categories {
let key = if category.slug.trim().is_empty() {
category
.name
.as_deref()
.map(slugify)
.unwrap_or_default()
} else {
slugify(&category.slug)
};
if key.is_empty() || seen.insert(key) {
continue;
}
let _ = category.delete(&ctx.db).await;
}
Ok(())
}
pub async fn sync_markdown_posts(ctx: &AppContext) -> Result<Vec<MarkdownPost>> {
let markdown_posts = load_markdown_posts_from_disk()?;
for post in &markdown_posts {
let canonical_category = match post.category.as_deref() {
Some(category) => ensure_category(ctx, category).await?,
None => None,
};
let canonical_tags = canonicalize_tags(ctx, &post.tags).await?;
let existing = posts::Entity::find()
.filter(posts::Column::Slug.eq(&post.slug))
.one(&ctx.db)
.await?;
let has_existing = existing.is_some();
let mut model = existing
.map(|item| item.into_active_model())
.unwrap_or_default();
model.title = Set(Some(post.title.clone()));
model.slug = Set(post.slug.clone());
model.description = Set(post.description.clone());
model.content = Set(Some(post.content.clone()));
model.category = Set(canonical_category);
model.tags = Set(if canonical_tags.is_empty() {
None
} else {
Some(Value::Array(
canonical_tags.into_iter().map(Value::String).collect(),
))
});
model.post_type = Set(Some(post.post_type.clone()));
model.image = Set(post.image.clone());
model.pinned = Set(Some(post.pinned));
if has_existing {
let _ = model.update(&ctx.db).await;
} else {
let _ = model.insert(&ctx.db).await;
}
}
sync_tags_from_posts(ctx, &markdown_posts).await?;
dedupe_tags(ctx).await?;
dedupe_categories(ctx).await?;
Ok(markdown_posts)
}
pub fn read_markdown_document(slug: &str) -> Result<(String, String)> {
let path = markdown_post_path(slug);
if !path.exists() {
return Err(Error::NotFound);
}
let raw = fs::read_to_string(&path).map_err(io_error)?;
Ok((path.to_string_lossy().to_string(), raw))
}
pub async fn write_markdown_document(
ctx: &AppContext,
slug: &str,
markdown: &str,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let path = markdown_post_path(slug);
fs::write(&path, normalize_newlines(markdown)).map_err(io_error)?;
let updated = parse_markdown_post(&path)?;
sync_markdown_posts(ctx).await?;
Ok(updated)
}
pub async fn create_markdown_post(
ctx: &AppContext,
draft: MarkdownPostDraft,
) -> Result<MarkdownPost> {
ensure_markdown_posts_bootstrapped()?;
let title = draft.title.trim().to_string();
if title.is_empty() {
return Err(Error::BadRequest("title is required".to_string()));
}
let slug = draft
.slug
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
.unwrap_or_else(|| slugify(&title));
if slug.is_empty() {
return Err(Error::BadRequest("slug is required".to_string()));
}
let post = MarkdownPost {
title,
slug: slug.clone(),
description: trim_to_option(draft.description),
content: draft.content.trim().to_string(),
category: trim_to_option(draft.category),
tags: draft
.tags
.into_iter()
.map(|tag| tag.trim().to_string())
.filter(|tag| !tag.is_empty())
.collect(),
post_type: {
let normalized = draft.post_type.trim();
if normalized.is_empty() {
"article".to_string()
} else {
normalized.to_string()
}
},
image: trim_to_option(draft.image),
pinned: draft.pinned,
published: draft.published,
file_path: markdown_post_path(&slug).to_string_lossy().to_string(),
};
fs::write(markdown_post_path(&slug), build_markdown_document(&post)).map_err(io_error)?;
sync_markdown_posts(ctx).await?;
parse_markdown_post(&markdown_post_path(&slug))
}
pub async fn import_markdown_documents(
ctx: &AppContext,
files: Vec<MarkdownImportFile>,
) -> Result<Vec<MarkdownPost>> {
ensure_markdown_posts_bootstrapped()?;
let mut imported_slugs = Vec::new();
for file in files {
let path = Path::new(&file.file_name);
let extension = path
.extension()
.and_then(|value| value.to_str())
.unwrap_or_default()
.to_ascii_lowercase();
if extension != "md" && extension != "markdown" {
continue;
}
let file_stem = path
.file_stem()
.and_then(|value| value.to_str())
.unwrap_or("imported-post")
.to_string();
let parsed = parse_markdown_source(&file_stem, &file.content, &file.file_name)?;
let slug = if parsed.slug.trim().is_empty() {
slugify(&file_stem)
} else {
parsed.slug.clone()
};
if slug.is_empty() {
continue;
}
fs::write(markdown_post_path(&slug), normalize_newlines(&file.content)).map_err(io_error)?;
imported_slugs.push(slug);
}
sync_markdown_posts(ctx).await?;
imported_slugs
.into_iter()
.map(|slug| parse_markdown_post(&markdown_post_path(&slug)))
.collect()
}

View File

@@ -0,0 +1 @@
pub mod content;