1
0
Fork 0

improvements

This commit is contained in:
Adrian Hedqvist 2025-03-20 22:20:12 +01:00
parent 5fd4d1d8a2
commit 382fff5be3
Signed by: tollyx
SSH key fingerprint: SHA256:NqZilNUilqR38F1LQMrz2E65ZsA621eT3lO+FqHS48Y
8 changed files with 49 additions and 113 deletions

View file

@ -18,10 +18,9 @@ anyway here's a new todo list:
- [x] docker from-scratch image (it's small!)
- [x] opentelemetry (metrics, traces)
- [ ] opentelemetry logs? (don't know if I'm gonna need it? can probably just make the collector grab them from the docker logs?)
- [ ] sections (currently the posts page is hardcoded, should be able to turn any page-subfolder into its own section) __👈 NEXT__
- [x] sections (currently the posts page is hardcoded, should be able to turn any page-subfolder into its own section)
- [ ] file-watching (rebuild pages when they're changed, not only on startup)
- [ ] live-reload (I guess it's done by some js and a websocket that sends a message to the browser to reload?)
- [ ] custom
- [ ] ~~sass/less compilation~~ (don't think I need it, will skip for now)
- [ ] fancy css (but nothing too fancy, I like it [Simple And Clean](https://youtu.be/0nKizH5TV_g?t=42))
- [ ] other pages (now I've got it set up so I can write any page in markdown!!!)

View file

@ -10,6 +10,10 @@ use crate::{AppState, page::Page, tag::Tag};
struct FeedContext<'a> {
feed_url: &'a str,
base_url: &'a str,
next_url: Option<&'a str>,
previous_url: Option<&'a str>,
first_url: Option<&'a str>,
last_url: Option<&'a str>,
site_title: &'a str,
last_updated: &'a str,
tag: Option<&'a Tag>,
@ -32,6 +36,10 @@ pub fn render_atom_feed(state: &AppState) -> Result<String> {
let feed = FeedContext {
feed_url: &format!("{}atom.xml", state.base_url),
base_url: &state.base_url.to_string(),
next_url: None,
previous_url: None,
first_url: None,
last_url: None,
site_title: &state.settings.title,
last_updated: &updated.map_or_else(String::new, |d| d.format(&Rfc3339).unwrap()),
tag: None,
@ -60,6 +68,10 @@ pub fn render_atom_tag_feed(tag: &Tag, state: &AppState) -> Result<String> {
let feed = FeedContext {
feed_url: &format!("{}tags/{}/atom.xml", state.base_url, slug),
base_url: &state.base_url.to_string(),
next_url: None,
previous_url: None,
first_url: None,
last_url: None,
site_title: &state.settings.title,
last_updated: &updated.map_or_else(String::default, |d| d.format(&Rfc3339).unwrap()),
tag: Some(tag),

View file

@ -63,7 +63,7 @@ impl IntoResponse for WebsiteError {
#[cfg(test)]
mod tests {
use std::sync::Arc;
use std::{path::PathBuf, sync::Arc};
use crate::AppState;
@ -76,7 +76,8 @@ mod tests {
};
// Load the actual posts, just to make this test fail if
// aliases overlap with themselves or other routes
let posts = crate::page::load_all(&state, "pages/".into()).unwrap();
let root = PathBuf::from("pages/");
let posts = crate::page::load_recursive(&state, &root, &root, None).unwrap();
state.tags = crate::tag::get_tags(posts.values());
state.pages = posts.into();
let state = Arc::new(state);

View file

@ -5,7 +5,6 @@ use axum::http::Uri;
use page::Page;
use serde::Serialize;
use settings::Settings;
use tag::Tag;
use tera::Tera;
@ -106,21 +105,6 @@ async fn init_app(cfg: Settings) -> Result<axum::routing::Router> {
state.pages.clone_from(&pages);
state.tags = tags;
state
.tera
.register_function("get_page", move |args: &HashMap<String, tera::Value>| {
let pages = pages.clone();
if let Some(page) = args
.get("path")
.and_then(|p| p.as_str())
.and_then(|p| pages.get(p))
{
Ok(tera::to_value(page)?)
} else {
Err(tera::Error::msg("page not found"))
}
});
Ok(handlers::routes()
.layer(CorsLayer::permissive())
.layer(CompressionLayer::new())

View file

@ -1,10 +1,9 @@
use std::{
collections::HashMap,
ffi::OsStr,
fmt::Debug,
fs,
hash::{Hash, Hasher},
path::{Path, PathBuf},
path::Path,
};
use anyhow::Result;
@ -12,7 +11,7 @@ use anyhow::Result;
use serde_derive::{Deserialize, Serialize};
use time::{OffsetDateTime, format_description::well_known::Rfc3339};
use tracing::{info, instrument, log::debug};
use tracing::{debug, info, instrument};
use crate::{AppState, WebsiteError, helpers, markdown};
@ -35,16 +34,23 @@ pub struct Page {
pub updated: Option<String>,
pub aliases: Vec<String>,
pub tags: Vec<String>,
pub children: Vec<String>,
pub parent: Option<String>,
pub children: Vec<PageSummary>,
pub parent: Option<PageSummary>,
pub content: String,
pub template: String,
pub slug: String,
pub absolute_path: String,
pub section: Option<String>,
pub etag: String,
}
#[derive(Serialize, Clone, Debug, Default)]
pub struct PageSummary {
pub title: Option<String>,
pub draft: bool,
pub date: Option<String>,
pub absolute_path: String,
}
impl Page {
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Page {
let mut hasher = std::hash::DefaultHasher::default();
@ -62,7 +68,6 @@ impl Page {
},
slug,
etag,
section: None,
content,
children: vec![],
template: fm.template.unwrap_or_else(|| "page.html".to_string()),
@ -99,6 +104,15 @@ impl Page {
pub fn last_modified(&self) -> Option<OffsetDateTime> {
self.parsed_updated().or(self.parsed_date())
}
pub fn summary(&self) -> PageSummary {
PageSummary {
title: self.title.clone(),
draft: self.draft,
date: self.date.clone(),
absolute_path: self.absolute_path.clone(),
}
}
}
#[instrument(skip(state, parent))]
@ -116,8 +130,8 @@ pub fn load_recursive(
let mut i = load_page(state, &index_file, root)?;
if let Some(ref mut par) = parent {
i.parent = Some(par.absolute_path.clone());
par.children.push(i.absolute_path.clone());
i.parent = Some(par.summary());
par.children.push(i.summary());
}
index = Some(i);
@ -146,8 +160,8 @@ pub fn load_recursive(
let mut p = load_page(state, &path, root)?;
if let Some(i) = index.as_mut() {
p.parent = Some(i.absolute_path.clone());
i.children.push(p.absolute_path.clone());
p.parent = Some(i.summary());
i.children.push(p.summary());
}
pages.insert(p.absolute_path.clone(), p);
}
@ -155,71 +169,15 @@ pub fn load_recursive(
if let Some(page) = index {
let path = &page.absolute_path;
let children = page.children.len();
info!("{path} has {children} child pages");
debug!("{path} has {children} child pages");
pages.insert(page.absolute_path.clone(), page);
}
Ok(pages)
}
#[instrument(skip(state))]
pub fn load_all(state: &AppState, folder: PathBuf) -> Result<HashMap<String, Page>> {
let mut pages = HashMap::<String, Page>::new();
let mut dirs: Vec<PathBuf> = vec![folder.clone()];
let mut section_stack: Vec<String> = vec![];
while let Some(dir) = dirs.pop() {
let mut section_index = dir.clone();
section_index.push("index.md");
let is_index = section_index.exists();
let current_section = if is_index {
let mut page = load_page(state, &section_index, &folder)?;
page.section = section_stack.last().cloned();
section_stack.push(page.absolute_path.clone());
pages.insert(page.absolute_path.clone(), page);
section_stack.last()
} else {
section_stack.last()
};
println!("current section: {current_section:?}");
let mut child_pages = vec![];
for entry in fs::read_dir(&dir)? {
let path = entry?.path();
if path.is_dir() {
dirs.push(path);
} else if let Some(ext) = path.extension() {
if ext == "md" || ext == "markdown" {
println!("markdown page!");
// it's a page to load
if path.file_name() == Some(OsStr::new("index.md")) {
println!("index.md!");
// we've already loaded the section index page
continue;
}
println!("child page!");
let mut page = load_page(state, &path, &folder)?;
page.section = current_section.cloned();
child_pages.push(page.absolute_path.clone());
pages.insert(page.absolute_path.clone(), page);
}
}
}
if let Some(section) = current_section {
if let Some(section) = pages.get_mut(section) {
section.children.append(&mut child_pages);
}
}
dbg!(child_pages);
}
Ok(pages)
}
#[instrument(skip(state))]
pub fn load_page(state: &AppState, path: &Path, root_folder: &Path) -> Result<Page> {
info!("loading page: {path:?}");
debug!("loading page: {path:?}");
let content = std::fs::read_to_string(path)?;
@ -277,20 +235,6 @@ mod tests {
use crate::AppState;
#[tokio::test]
async fn render_all_posts() {
let mut state = AppState {
base_url: "http://localhost:8180".parse().unwrap(),
tera: Tera::new("templates/**/*").unwrap(),
..Default::default()
};
state.pages = super::load_all(&state, "pages/".into()).unwrap().into();
for post in state.pages.values() {
super::render_page(&state, post).await.unwrap();
}
}
#[tokio::test]
async fn render_all_posts_recursive() {
let mut state = AppState {

View file

@ -4,11 +4,6 @@
{%- if tag %} - #{{ tag.slug }}{% endif -%}
</title>
<link href="{{ feed_url | safe }}" rel="self" type="application/atom+xml"/>
{% if tag -%}
<link href="{{ base_url | safe }}tags/{{ tag.slug }}/"/>
{%- else -%}
<link href="{{ base_url | safe }}"/>
{%- endif %}
<generator uri="{{ base_url | safe }}">tlxite</generator>
<updated>{{ last_updated | date(format="%+") }}</updated>
<id>{{ feed_url | safe }}</id>

View file

@ -1,5 +1,6 @@
<header>
<nav>
<img src="{{base_url | safe}}static/avatar.png" class="avatar"/> <b><a href="{{base_url | safe}}">{{ site_title }}</a></b> - <a href="{{base_url | safe}}posts/">posts</a>
<a href="{{base_url | safe}}"><img src="{{base_url | safe}}static/avatar.png" class="avatar" /></a>
<b><a href="{{base_url | safe}}">{{ site_title }}</a></b> - <a href="{{base_url | safe}}posts/">posts</a>
</nav>
</header>
</header>

View file

@ -3,11 +3,11 @@
<h1>{{ page.title }}</h1>
{{ page.content | safe }}
<ul>
{% for child_path in page.children -%}
{% set child = get_page(path=child_path) -%}
{% for child in page.children | filter(attribute="draft", value=false) | sort(attribute="date") | reverse -%}
{% if child.title -%}
<li><a href="{{base_url | trim_end_matches(pat='/') | safe}}{{child.absolute_path | safe}}">{% if child.date -%}
<time datetime="{{ child.date }}">{{ child.date }}</time> &ndash; {{ child.title -}}
<time datetime="{{ child.date }}">{{ child.date | date(format="%Y-%m-%d") }}</time> &ndash; {{ child.title
-}}
{% else -%}
{{ child.title -}}
{% endif -%}