1
0
Fork 0

I should commit more often

This commit is contained in:
Adrian Hedqvist 2025-04-02 22:48:45 +02:00
parent 20d95079b5
commit ef0ddaef72
Signed by: tollyx
SSH key fingerprint: SHA256:NqZilNUilqR38F1LQMrz2E65ZsA621eT3lO+FqHS48Y
23 changed files with 345 additions and 220 deletions

View file

@ -18,7 +18,7 @@ serde_derive = "1.0.219"
serde_json = "1.0.140"
syntect = "5.2.0"
tera = { version = "1.20.0", features = ["builtins"] }
time = { version = "0.3.40", features = ["serde"] }
time = { version = "0.3.40", features = ["serde", "macros"] }
tokio = { version = "1.44.1", features = ["full", "tracing"] }
toml = "0.8.20"
tower = { version = "0.5.2", features = ["full"] }

View file

@ -8,8 +8,7 @@ services:
depends_on:
- otel-collector
environment:
TLX_OTLP_ENABLED: true
TLX_LOG: debug
TLX_LOGGING: debug
otel-collector:
image: otel/opentelemetry-collector:latest
restart: unless-stopped

View file

@ -1,7 +1,7 @@
title = "tollyx.se"
base_url = "http://localhost:8080/"
bind_address = "0.0.0.0:8080"
logging = "website=debug"
log_format = "compact"
logging = "website=debug,warn"
log_format = "pretty"
drafts = true
watch = true

View file

@ -7,12 +7,12 @@ app = "cool-glade-6208"
primary_region = "arn"
[http_service]
internal_port = 8080
force_https = true
auto_stop_machines = true
auto_start_machines = true
min_machines_running = 0
internal_port = 8080
force_https = true
auto_stop_machines = "stop"
auto_start_machines = true
min_machines_running = 0
[metrics]
port = 8180
path = "/metrics"
port = 8180
path = "/metrics"

View file

@ -29,3 +29,5 @@ anyway here's a new todo list:
- [ ] other pages (now I've got it set up so I can write any page in markdown!!!)
- [ ] graphviz (or something else) to svg rendering (want it to be serverside)
- [ ] image processing (resizing, conversion to jpgxl, avif, others?)
- [ ] Obsidian-style wiki-links
- [ ] YAML-frontmatter for even more obsidian compat (YAML is a pain, though...)

View file

@ -0,0 +1,7 @@
+++
title = "draft test copy 2"
draft = true
date = 2025-04-02T20:59:21+02:00
+++
wow look it's a hidden post because it's marked as a draft

View file

@ -0,0 +1,7 @@
+++
title = "draft test copy 3"
draft = true
date = 2025-04-02T20:59:28+02:00
+++
wow look it's a hidden post because it's marked as a draft

View file

@ -0,0 +1,7 @@
+++
title = "draft test copy 4"
draft = true
date = 2025-04-02T20:59:31+02:00
+++
wow look it's a hidden post because it's marked as a draft

View file

@ -0,0 +1,7 @@
+++
title = "draft test copy 5"
draft = true
date = 2025-04-02T20:59:35+02:00
+++
wow look it's a hidden post because it's marked as a draft

View file

@ -0,0 +1,7 @@
+++
title = "draft test copy 6"
draft = true
date = 2025-04-02T20:59:38+02:00
+++
wow look it's a hidden post because it's marked as a draft

View file

@ -0,0 +1,7 @@
+++
title = "draft test copy 7"
draft = true
date = 2025-04-02T20:59:42+02:00
+++
wow look it's a hidden post because it's marked as a draft

View file

@ -0,0 +1,7 @@
+++
title = "draft test copy"
draft = true
date = 2025-04-02T20:59:17+02:00
+++
wow look it's a hidden post because it's marked as a draft

View file

@ -8,48 +8,91 @@ use crate::{AppState, page::Page, tag::Tag};
#[derive(Serialize, Debug)]
struct FeedContext<'a> {
feed_url: &'a str,
base_url: &'a str,
next_url: Option<&'a str>,
previous_url: Option<&'a str>,
first_url: Option<&'a str>,
last_url: Option<&'a str>,
feed_url: String,
base_url: String,
next_url: Option<String>,
previous_url: Option<String>,
first_url: Option<String>,
last_url: Option<String>,
site_title: &'a str,
last_updated: &'a str,
last_updated: String,
tag: Option<&'a Tag>,
posts: &'a [&'a Page],
posts: Vec<&'a Page>,
}
impl FeedContext<'_> {
fn new<'a>(state: &'a AppState, tag: Option<&'a Tag>, page: usize) -> FeedContext<'a> {
let page = page.max(1);
let mut posts: Vec<&_> = if let Some(tag) = tag {
state
.published_pages()
.filter(|p| p.tags.contains(&tag.slug))
.collect()
} else {
state.published_pages().collect()
};
posts.sort_by_key(|p| p.last_modified());
posts.reverse();
let page_count = posts.chunks(10).count();
let posts = posts.chunks(10).nth(page - 1).unwrap_or_default().to_vec();
let updated = posts
.iter()
.filter_map(|p| p.last_modified())
.max()
.unwrap_or(state.startup_time);
let base_feed_url = if let Some(tag) = tag {
format!("{}tags/{}/atom.xml", state.base_url, &tag.slug)
} else {
format!("{}atom.xml", state.base_url)
};
let (first_url, last_url) = if page_count > 1 {
let first_url = base_feed_url.clone();
let last_url = format!("{base_feed_url}?page={page_count}");
(Some(first_url), Some(last_url))
} else {
(None, None)
};
let next_url = if page < page_count {
Some(format!("{}?page={}", base_feed_url, page + 1))
} else {
None
};
let (feed_url, previous_url) = if page > 1 {
(
format!("{base_feed_url}?page={page}"),
Some(format!("{}?page={}", base_feed_url, page - 1)),
)
} else {
(base_feed_url.clone(), None)
};
FeedContext {
feed_url,
base_url: state.base_url.to_string(),
next_url,
previous_url,
first_url,
last_url,
site_title: &state.settings.title,
last_updated: updated.format(&Rfc3339).unwrap(),
tag,
posts,
}
}
}
#[instrument(skip(state))]
pub fn render_atom_feed(state: &AppState) -> Result<String> {
let mut posts: Vec<_> = state
.pages
.values()
.filter(|p| p.date.is_some() && p.is_published())
.collect();
posts.sort_by_key(|p| p.last_modified());
posts.reverse();
posts.truncate(10);
let updated = posts
.iter()
.map(|p| p.last_modified())
.max()
.flatten()
.unwrap();
let feed = FeedContext {
feed_url: &format!("{}atom.xml", state.base_url),
base_url: &state.base_url.to_string(),
next_url: None,
previous_url: None,
first_url: None,
last_url: None,
site_title: &state.settings.title,
last_updated: &updated.format(&Rfc3339).unwrap(),
tag: None,
posts: &posts,
};
pub fn render_atom_feed(state: &AppState, page: usize) -> Result<String> {
let feed = FeedContext::new(state, None, page);
let ctx = tera::Context::from_serialize(feed)?;
@ -57,66 +100,33 @@ pub fn render_atom_feed(state: &AppState) -> Result<String> {
}
#[instrument(skip(tag, state))]
pub fn render_atom_tag_feed(tag: &Tag, state: &AppState) -> Result<String> {
let mut posts: Vec<_> = state
.pages
.values()
.filter(|p| p.is_published() && p.tags.contains(&tag.slug))
.collect();
posts.sort_by_key(|p| &p.date);
posts.reverse();
posts.truncate(10);
let updated = posts.iter().map(|p| p.last_modified()).max().flatten();
let slug = &tag.slug;
let feed = FeedContext {
feed_url: &format!("{}tags/{}/atom.xml", state.base_url, slug),
base_url: &state.base_url.to_string(),
next_url: None,
previous_url: None,
first_url: None,
last_url: None,
site_title: &state.settings.title,
last_updated: &updated.map_or_else(String::default, |d| d.format(&Rfc3339).unwrap()),
tag: Some(tag),
posts: &posts,
};
pub fn render_atom_tag_feed(tag: &Tag, state: &AppState, page: usize) -> Result<String> {
let feed = FeedContext::new(state, Some(tag), page);
let ctx = tera::Context::from_serialize(feed)?;
Ok(state.tera.render("atom.xml", &ctx)?)
}
struct _JsonFeed<'a> {
version: &'a str,
title: &'a str,
home_page_url: &'a str,
feed_url: &'a str,
items: Vec<_JsonFeedItem<'a>>,
}
struct _JsonFeedItem<'a> {
id: &'a str,
}
#[cfg(test)]
mod tests {
use crate::{AppState, settings::Settings};
use crate::AppState;
#[test]
fn render_atom_feed() {
let state = AppState::load(Settings::test_config()).unwrap();
let state = AppState::load_test_state();
super::render_atom_feed(&state).unwrap();
super::render_atom_feed(&state, 1).unwrap();
super::render_atom_feed(&state, 2).unwrap();
}
#[test]
fn render_atom_tag_feeds() {
let state = AppState::load(Settings::test_config()).unwrap();
let state = AppState::load_test_state();
for tag in state.tags.values() {
super::render_atom_tag_feed(tag, &state).unwrap();
super::render_atom_tag_feed(tag, &state, 1).unwrap();
super::render_atom_tag_feed(tag, &state, 2).unwrap();
}
}
}

View file

@ -6,7 +6,7 @@ use axum::{
routing::get,
};
use std::sync::Arc;
use time::{OffsetDateTime, format_description::well_known::Rfc2822};
use time::{OffsetDateTime, format_description::well_known::Rfc2822, macros::format_description};
use tokio::sync::RwLock;
use tower_http::services::ServeDir;
use tracing::log::error;
@ -16,6 +16,10 @@ use crate::{AppState, error::WebsiteError};
pub mod pages;
pub mod tags;
const LAST_MODIFIED_FORMAT: &[time::format_description::BorrowedFormatItem<'_>] = format_description!(
"[weekday repr:short], [day] [month repr:short] [year] [hour]:[minute]:[second] GMT"
);
pub fn routes() -> Router<Arc<RwLock<AppState>>> {
Router::new()
.merge(pages::router())
@ -29,20 +33,23 @@ fn should_return_304(
headers: &HeaderMap,
last_changed: Option<OffsetDateTime>,
) -> Option<Response> {
let date = last_changed?;
let since = headers.get(header::IF_MODIFIED_SINCE)?;
let Ok(parsed) = OffsetDateTime::parse(since.to_str().unwrap(), &Rfc2822) else {
return None;
};
if date >= parsed {
if last_changed? >= parsed {
Some(Response::builder().status(304).body(Body::empty()).unwrap())
} else {
None
}
}
fn format_last_modified(datetime: OffsetDateTime) -> String {
datetime.to_utc().format(&LAST_MODIFIED_FORMAT).unwrap()
}
impl IntoResponse for WebsiteError {
fn into_response(self) -> Response {
match self {
@ -64,7 +71,7 @@ impl IntoResponse for WebsiteError {
#[cfg(test)]
mod tests {
use std::{path::PathBuf, sync::Arc};
use std::sync::Arc;
use tokio::sync::RwLock;
@ -72,18 +79,9 @@ mod tests {
#[tokio::test]
async fn setup_routes() {
let mut state = AppState {
base_url: "http://localhost:8180".parse().unwrap(),
tera: tera::Tera::new("templates/**/*").unwrap(),
..Default::default()
};
// Load the actual posts, just to make this test fail if
// aliases overlap with themselves or other routes
let root = PathBuf::from("pages/");
let posts = crate::page::load_recursive(&state, &root, &root, None).unwrap();
state.tags = crate::tag::get_tags(posts.values());
state.pages = posts;
let state = Arc::new(RwLock::new(state));
let state = Arc::new(RwLock::new(AppState::load_test_state()));
super::routes().with_state(state).into_make_service();
}

View file

@ -4,21 +4,22 @@ use anyhow::anyhow;
use axum::{
Router,
body::Body,
extract::{OriginalUri, Request, State},
extract::{OriginalUri, Query, Request, State},
http::{self, HeaderMap, StatusCode, Uri, header},
response::{Html, IntoResponse, Redirect, Response},
routing::get,
};
use time::format_description::well_known::Rfc3339;
use serde::Deserialize;
use tokio::sync::RwLock;
use tower::ServiceExt;
use tower_http::services::ServeDir;
use tracing::instrument;
use tracing::{info, instrument};
use crate::{
AppState,
error::WebsiteError,
handlers::format_last_modified,
page::{Page, render_page},
};
@ -93,13 +94,21 @@ async fn view(
.into_response())
}
#[instrument(skip(state))]
#[derive(Deserialize)]
struct Pagination {
page: Option<usize>,
}
#[instrument(skip(state, pagination))]
pub async fn feed(
State(state): State<Arc<RwLock<AppState>>>,
headers: HeaderMap,
Query(pagination): Query<Pagination>,
) -> Result<Response, WebsiteError> {
let state = state.read().await;
let mut posts: Vec<&Page> = state.pages.values().filter(|p| p.is_published()).collect();
let page = pagination.page.unwrap_or(1).max(1);
let mut posts: Vec<&Page> = state.published_pages().collect();
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
@ -107,13 +116,26 @@ pub async fn feed(
return Ok(res);
}
posts.sort_by_key(|p| &p.date);
posts.sort_by_key(|p| p.last_modified());
posts.reverse();
posts.truncate(10);
let total = posts.len() / 10 + 1;
if page > total {
return Ok(WebsiteError::NotFound.into_response());
}
let start = 10 * (page - 1);
let end = (start + 10).min(posts.len()) as usize;
info!("start: {start}, end: {end}, total: {total}");
if posts.is_empty() {
return Ok(WebsiteError::NotFound.into_response());
}
let last_modified = last_changed.map_or_else(
|| state.startup_time.format(&Rfc3339).unwrap(),
|d| d.format(&Rfc3339).unwrap(),
|| format_last_modified(state.startup_time),
format_last_modified,
);
Ok((
@ -122,7 +144,7 @@ pub async fn feed(
(header::CONTENT_TYPE, "application/atom+xml"),
(header::LAST_MODIFIED, &last_modified),
],
crate::feed::render_atom_feed(&state)?,
crate::feed::render_atom_feed(&state, page)?,
)
.into_response())
}

View file

@ -2,20 +2,20 @@ use std::sync::Arc;
use axum::{
Router,
extract::{Path, State},
extract::{Path, Query, State},
http::{HeaderMap, StatusCode, header},
response::{Html, IntoResponse, Redirect, Response},
routing::get,
};
use serde::Deserialize;
use serde_derive::Serialize;
use time::format_description::well_known::Rfc3339;
use tokio::sync::RwLock;
use tracing::instrument;
use crate::{AppState, error::WebsiteError, page::Page};
use super::should_return_304;
use super::{format_last_modified, should_return_304};
pub fn router() -> Router<Arc<RwLock<AppState>>> {
Router::new()
@ -47,7 +47,7 @@ pub async fn index(State(state): State<Arc<RwLock<AppState>>>) -> Result<Respons
StatusCode::OK,
[(
header::LAST_MODIFIED,
state.startup_time.format(&Rfc3339).unwrap(),
format_last_modified(state.startup_time),
)],
Html(res),
)
@ -62,9 +62,8 @@ pub async fn view(
) -> Result<Response, WebsiteError> {
let state = state.read().await;
let mut posts: Vec<&Page> = state
.pages
.values()
.filter(|p| p.is_published() && p.tags.contains(&tag))
.published_pages()
.filter(|p| p.tags.contains(&tag))
.collect();
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
@ -94,8 +93,8 @@ pub async fn view(
[(
header::LAST_MODIFIED,
&last_changed.map_or_else(
|| state.startup_time.format(&Rfc3339).unwrap(),
|d| d.format(&Rfc3339).unwrap(),
|| format_last_modified(state.startup_time),
format_last_modified,
),
)],
Html(res),
@ -103,19 +102,25 @@ pub async fn view(
.into_response())
}
#[instrument(skip(state))]
#[derive(Deserialize)]
struct Pagination {
page: Option<usize>,
}
#[instrument(skip(state, pagination))]
pub async fn feed(
Path(slug): Path<String>,
State(state): State<Arc<RwLock<AppState>>>,
Query(pagination): Query<Pagination>,
headers: HeaderMap,
) -> Result<Response, WebsiteError> {
let state = state.read().await;
let tag = state.tags.get(&slug).ok_or(WebsiteError::NotFound)?;
let page = pagination.page.unwrap_or(1).max(1);
let mut posts: Vec<&Page> = state
.pages
.values()
.filter(|p| p.is_published() && p.tags.contains(&slug))
.published_pages()
.filter(|p| p.tags.contains(&slug))
.collect();
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
@ -135,12 +140,12 @@ pub async fn feed(
(
header::LAST_MODIFIED,
&last_changed.map_or_else(
|| state.startup_time.format(&Rfc3339).unwrap(),
|d| d.format(&Rfc3339).unwrap(),
|| format_last_modified(state.startup_time),
format_last_modified,
),
),
],
crate::feed::render_atom_tag_feed(tag, &state)?,
crate::feed::render_atom_tag_feed(tag, &state, page)?,
)
.into_response())
}

View file

@ -1,19 +1,27 @@
#![warn(clippy::pedantic)]
use anyhow::Result;
use notify::Watcher;
use std::{path::Path, sync::Arc};
use tokio::{net::TcpListener, signal, sync::RwLock};
use std::{
path::Path,
sync::Arc,
time::{Duration, Instant},
};
use time::OffsetDateTime;
use tokio::{
net::TcpListener,
signal,
sync::{RwLock, mpsc::Receiver},
};
use tower_http::{compression::CompressionLayer, cors::CorsLayer, trace::TraceLayer};
use tracing::{debug, error, instrument, log::info};
use tracing::{debug, error, instrument, level_filters::LevelFilter, log::info, warn};
use tracing_subscriber::EnvFilter;
mod error;
mod feed;
mod handlers;
mod helpers;
mod hilighting;
mod markdown;
mod page;
mod rendering;
mod settings;
mod state;
mod tag;
@ -43,38 +51,22 @@ async fn main() -> Result<()> {
}
fn setup_tracing(cfg: &Settings) {
let env = std::env::var("RUST_LOG");
let filter = EnvFilter::builder()
.with_default_directive(LevelFilter::WARN.into())
.parse_lossy(if let Ok(log) = env.as_deref() {
log
} else {
&cfg.logging
});
let subs = tracing_subscriber::fmt().with_env_filter(filter);
match cfg.log_format.as_str() {
"pretty" => tracing_subscriber::fmt()
.pretty()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(cfg.logging.parse().unwrap_or_default())
.from_env_lossy(),
)
.init(),
"compact" => tracing_subscriber::fmt()
.compact()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(cfg.logging.parse().unwrap_or_default())
.from_env_lossy(),
)
.init(),
"json" => tracing_subscriber::fmt()
.json()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(cfg.logging.parse().unwrap_or_default())
.from_env_lossy(),
)
.init(),
_ => tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(cfg.logging.parse().unwrap_or_default())
.from_env_lossy(),
)
.init(),
"pretty" => subs.pretty().init(),
"compact" => subs.compact().init(),
"json" => subs.json().init(),
_ => subs.init(),
}
}
@ -96,13 +88,20 @@ async fn init_app(cfg: Settings) -> Result<axum::routing::Router> {
}
async fn start_file_watcher(state: Arc<RwLock<AppState>>) {
let (page_tx, mut page_rx) = tokio::sync::mpsc::channel::<notify::Event>(1);
fn event_filter(event: &notify::Event) -> bool {
event.kind.is_modify() || event.kind.is_remove()
}
let (page_tx, page_rx) = tokio::sync::mpsc::channel::<notify::Event>(1);
let mut page_watcher =
notify::recommended_watcher(move |event: Result<notify::Event, notify::Error>| {
let Ok(event) = event.inspect_err(|e| error!("File watcher error: {}", e)) else {
return;
};
if !event_filter(&event) {
return;
}
_ = page_tx
.blocking_send(event)
.inspect_err(|e| error!("Failed to add watch event to channel: {}", e));
@ -113,35 +112,16 @@ async fn start_file_watcher(state: Arc<RwLock<AppState>>) {
.watch(Path::new("pages/"), notify::RecursiveMode::Recursive)
.expect("add pages dir to watcher");
let page_fut = async {
while let Some(event) = page_rx.recv().await {
if !(event.kind.is_create() || event.kind.is_remove() || event.kind.is_modify()) {
continue;
}
if !event.paths.iter().any(|p| p.is_file()) {
continue;
}
debug!("{:?}", event);
let mut state = state.write().await;
info!("Reloading pages");
let root_path = Path::new("pages/");
if let Ok(pages) = page::load_recursive(&state, root_path, root_path, None)
.inspect_err(|err| error!("Error reloading pages: {}", err))
{
state.pages = pages;
}
}
};
let (template_tx, mut template_rx) = tokio::sync::mpsc::channel::<notify::Event>(1);
let (template_tx, template_rx) = tokio::sync::mpsc::channel::<notify::Event>(1);
let mut template_watcher =
notify::recommended_watcher(move |event: Result<notify::Event, notify::Error>| {
let Ok(event) = event.inspect_err(|e| error!("File watcher error: {}", e)) else {
return;
};
if !event_filter(&event) {
return;
}
_ = template_tx
.blocking_send(event)
.inspect_err(|e| error!("Failed to add watch event to channel: {}", e));
@ -152,28 +132,61 @@ async fn start_file_watcher(state: Arc<RwLock<AppState>>) {
.watch(Path::new("templates/"), notify::RecursiveMode::Recursive)
.expect("add templates dir to watcher");
let template_fut = async {
while let Some(event) = template_rx.recv().await {
if !(event.kind.is_create() || event.kind.is_remove() || event.kind.is_modify()) {
continue;
}
if !event.paths.iter().any(|p| p.is_file()) {
continue;
}
debug!("{:?}", event);
tokio::join!(
page_watch_loop(state.clone(), page_rx),
template_watch_loop(state.clone(), template_rx)
);
}
let mut state = state.write().await;
const WATCHER_DEBOUNCE_MILLIS: u64 = 100;
info!("Reloading templates");
_ = state
.tera
.full_reload()
.inspect_err(|err| error!("Error reloading templates: {}", err));
async fn page_watch_loop(state: Arc<RwLock<AppState>>, mut rx: Receiver<notify::Event>) {
let mut last_reload = Instant::now();
debug!("Now watching pages");
while let Some(_event) = rx.recv().await {
if last_reload.elapsed() < Duration::from_millis(WATCHER_DEBOUNCE_MILLIS) {
continue;
}
};
info!("file watchers initialized");
tokio::join!(page_fut, template_fut);
let pages = {
let state = state.read().await;
info!("Reloading pages");
let root_path = Path::new("pages/");
page::load_all(&state, root_path, root_path)
.inspect_err(|err| error!("Error reloading pages: {}", err))
.ok()
};
if let Some(pages) = pages {
let mut state = state.write().await;
state.pages = pages;
state.last_modified = OffsetDateTime::now_utc();
last_reload = Instant::now();
}
}
warn!("Page watch loop stopped");
}
async fn template_watch_loop(state: Arc<RwLock<AppState>>, mut rx: Receiver<notify::Event>) {
let mut last_reload = Instant::now();
debug!("Now watching templates");
while let Some(_event) = rx.recv().await {
if last_reload.elapsed() < Duration::from_millis(WATCHER_DEBOUNCE_MILLIS) {
continue;
}
let mut state = state.write().await;
info!("Reloading templates");
_ = state
.tera
.full_reload()
.inspect_err(|err| error!("Error reloading templates: {}", err));
state.last_modified = OffsetDateTime::now_utc();
last_reload = Instant::now();
}
warn!("Template watch loop stopped");
}
async fn shutdown_signal() {

View file

@ -14,7 +14,7 @@ use time::{OffsetDateTime, format_description::well_known::Rfc3339};
use tracing::{debug, info, instrument};
use crate::{AppState, error::WebsiteError, helpers, markdown};
use crate::{AppState, error::WebsiteError, helpers, rendering::markdown};
#[derive(Deserialize, Debug, Default)]
pub struct FrontMatter {
@ -116,8 +116,17 @@ impl Page {
}
}
#[instrument(skip(state))]
pub fn load_all(state: &AppState, root: &Path, folder: &Path) -> Result<HashMap<String, Page>> {
let pages = load_recursive(state, root, folder, None)?;
info!("{} pages loaded", pages.len());
Ok(pages)
}
#[instrument(skip(state, parent))]
pub fn load_recursive(
fn load_recursive(
state: &AppState,
root: &Path,
folder: &Path,
@ -174,6 +183,7 @@ pub fn load_recursive(
debug!("{path} has {children} child pages");
pages.insert(page.absolute_path.clone(), page);
}
Ok(pages)
}
@ -199,7 +209,7 @@ pub fn load_page(state: &AppState, path: &Path, root_folder: &Path) -> Result<Op
let base_uri = helpers::uri_with_path(&state.base_url, base_path);
let content = markdown::render_markdown_to_html(Some(&base_uri), &content);
let content = markdown::render_to_html(Some(&base_uri), &content);
let frontmatter = match content.meta_kind {
Some(MetadataBlockKind::PlusesStyle) => toml::from_str(&content.metadata)?,
Some(MetadataBlockKind::YamlStyle) => unimplemented!("YAML frontmatter is not implemented"),

View file

@ -1,7 +1,8 @@
use std::sync::LazyLock;
use super::hilighting;
use crate::helpers;
use crate::hilighting;
use axum::http::Uri;
use pulldown_cmark::CodeBlockKind;
use pulldown_cmark::Event;
@ -23,7 +24,7 @@ pub struct RenderResult {
}
#[instrument(skip(markdown))]
pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> RenderResult {
pub fn render_to_html(base_uri: Option<&Uri>, markdown: &str) -> RenderResult {
let mut opt = Options::empty();
opt.insert(Options::ENABLE_FOOTNOTES);

2
src/rendering/mod.rs Normal file
View file

@ -0,0 +1,2 @@
pub mod hilighting;
pub mod markdown;

View file

@ -4,7 +4,6 @@ use anyhow::Result;
use axum::http::Uri;
use tera::Tera;
use time::OffsetDateTime;
use tracing::info;
use crate::{
page::{self, Page},
@ -16,6 +15,7 @@ pub struct AppState {
pub startup_time: OffsetDateTime,
pub base_url: Uri,
pub pages: HashMap<String, Page>,
pub last_modified: OffsetDateTime,
pub aliases: HashMap<String, String>,
pub settings: Settings,
pub tags: HashMap<String, Tag>,
@ -32,6 +32,7 @@ impl Default for AppState {
aliases: HashMap::default(),
pages: HashMap::default(),
tags: HashMap::default(),
last_modified: OffsetDateTime::now_utc(),
}
}
}
@ -49,8 +50,7 @@ impl AppState {
};
let root_path = Path::new("pages/");
let pages = page::load_recursive(&state, root_path, root_path, None)?;
info!("{} pages loaded", pages.len());
let pages = page::load_all(&state, root_path, root_path)?;
let tags = tag::get_tags(pages.values());
state.aliases = pages
@ -62,18 +62,32 @@ impl AppState {
})
.collect();
let last_modified = pages.values().filter_map(Page::last_modified).max();
state.last_modified = last_modified.unwrap_or(state.last_modified);
state.pages = pages;
state.tags = tags;
Ok(state)
}
pub fn published_pages(&self) -> impl Iterator<Item = &Page> {
self.pages
.values()
.filter(|p| p.date.is_some() && p.is_published())
}
#[cfg(test)]
pub fn load_test_state() -> AppState {
AppState::load(Settings::test_config()).unwrap()
}
}
#[cfg(test)]
mod tests {
use crate::{AppState, settings::Settings};
use crate::AppState;
#[test]
fn appstate_load() {
_ = AppState::load(Settings::test_config()).unwrap();
_ = AppState::load_test_state();
}
}

View file

@ -28,7 +28,7 @@
<author>
<name>tollyx</name>
</author>
<link rel="alternate" href="{{ base_url | trim_end_matches(pat='/') | safe }}{{ post.absolute_path | safe }}" type="text/html"/>
<link href="{{ base_url | trim_end_matches(pat='/') | safe }}{{ post.absolute_path | safe }}" type="text/html"/>
<id>{{ base_url | trim_end_matches(pat='/') | safe }}{{ post.absolute_path | safe }}</id>
<content type="html">
{{ post.content }}