progress on sections
This commit is contained in:
parent
74753ad4b4
commit
93f734bdf0
15 changed files with 178 additions and 116 deletions
|
@ -1,3 +1,4 @@
|
||||||
|
title = "tollyx.se"
|
||||||
base_url = "http://localhost:8080/"
|
base_url = "http://localhost:8080/"
|
||||||
bind_address = "0.0.0.0:8080"
|
bind_address = "0.0.0.0:8080"
|
||||||
logging = "info,website=debug"
|
logging = "info,website=debug"
|
||||||
|
|
26
pages/index.md
Normal file
26
pages/index.md
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
why hello there this is a new index page if everything is setup correctly
|
||||||
|
|
||||||
|
whoah this time it's actually generated from markdown too unlike the other one
|
||||||
|
|
||||||
|
anyway here's a new todo list:
|
||||||
|
|
||||||
|
|
||||||
|
## todo
|
||||||
|
|
||||||
|
- [x] static content
|
||||||
|
- [x] template rendering (tera)
|
||||||
|
- [x] markdown rendering (pulldown_cmark)
|
||||||
|
- [x] post metadata (wow now pulldown_cmark can get it for me instead of hacking it in with regex, nice)
|
||||||
|
- [x] page aliases (gotta keep all those old links working)
|
||||||
|
- [x] rss/atom (tera is useful here too)
|
||||||
|
- [x] code hilighting (syntact)
|
||||||
|
- [x] cache headers (pages uses etags, some others timestamps. it works)
|
||||||
|
- [x] docker from-scratch image (it's small!)
|
||||||
|
- [x] opentelemetry (metrics, traces)
|
||||||
|
- [ ] opentelemetry logs? (don't know if I'm gonna need it? can probably just make the collector grab them from the docker logs?)
|
||||||
|
- [ ] file-watching (rebuild pages when they're changed, not only on startup)
|
||||||
|
- [ ] ~~sass/less compilation~~ (don't think I need it, will skip for now)
|
||||||
|
- [ ] fancy css (but nothing too fancy, I like it [Simple And Clean](https://youtu.be/0nKizH5TV_g?t=42))
|
||||||
|
- [ ] other pages (now I've got it set up so I can write any page in markdown!!!)
|
||||||
|
- [ ] graphviz (or something else) to svg rendering (want it to be serverside)
|
||||||
|
- [ ] image processing (resizing, conversion to jpgxl, avif, others?)
|
1
pages/posts/index.md
Normal file
1
pages/posts/index.md
Normal file
|
@ -0,0 +1 @@
|
||||||
|
test
|
|
@ -1,4 +1,5 @@
|
||||||
use axum::{
|
use axum::{
|
||||||
|
body::Body,
|
||||||
extract::{Request, State},
|
extract::{Request, State},
|
||||||
http::{header, HeaderMap, StatusCode},
|
http::{header, HeaderMap, StatusCode},
|
||||||
middleware::Next,
|
middleware::Next,
|
||||||
|
@ -30,11 +31,12 @@ async fn record_hit(method: String, path: String) {
|
||||||
|
|
||||||
pub fn routes() -> Router<Arc<AppState>> {
|
pub fn routes() -> Router<Arc<AppState>> {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/", get(index))
|
// .route("/", get(index))
|
||||||
.merge(pages::router())
|
.merge(pages::router())
|
||||||
.merge(tags::router())
|
.merge(tags::router())
|
||||||
.route("/healthcheck", get(healthcheck))
|
.route("/healthcheck", get(|| async { "OK" }))
|
||||||
.route_service("/static/*path", ServeDir::new("./"))
|
.nest_service("/static", ServeDir::new("static"))
|
||||||
|
.fallback(|| async { WebsiteError::NotFound })
|
||||||
.layer(axum::middleware::from_fn(metrics_middleware))
|
.layer(axum::middleware::from_fn(metrics_middleware))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,8 +45,8 @@ pub async fn index(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
) -> std::result::Result<Response, WebsiteError> {
|
) -> std::result::Result<Response, WebsiteError> {
|
||||||
if should_return_304(&headers, Some(state.startup_time.into())) {
|
if let Some(res) = should_return_304(&headers, Some(state.startup_time.into())) {
|
||||||
return Ok(StatusCode::NOT_MODIFIED.into_response());
|
return Ok(res);
|
||||||
}
|
}
|
||||||
let mut ctx = tera::Context::new();
|
let mut ctx = tera::Context::new();
|
||||||
ctx.insert("base_url", &state.base_url.to_string());
|
ctx.insert("base_url", &state.base_url.to_string());
|
||||||
|
@ -66,14 +68,6 @@ pub async fn index(
|
||||||
.into_response())
|
.into_response())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn healthcheck() -> &'static str {
|
|
||||||
"OK"
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn not_found() -> impl IntoResponse {
|
|
||||||
(StatusCode::NOT_FOUND, ())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(request, next))]
|
#[instrument(skip(request, next))]
|
||||||
pub async fn metrics_middleware(request: Request, next: Next) -> Response {
|
pub async fn metrics_middleware(request: Request, next: Next) -> Response {
|
||||||
let path = request.uri().path().to_string();
|
let path = request.uri().path().to_string();
|
||||||
|
@ -88,32 +82,39 @@ pub async fn metrics_middleware(request: Request, next: Next) -> Response {
|
||||||
response
|
response
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_return_304(headers: &HeaderMap, last_changed: Option<DateTime<FixedOffset>>) -> bool {
|
fn should_return_304(
|
||||||
|
headers: &HeaderMap,
|
||||||
|
last_changed: Option<DateTime<FixedOffset>>,
|
||||||
|
) -> Option<Response> {
|
||||||
let Some(date) = last_changed else {
|
let Some(date) = last_changed else {
|
||||||
return false;
|
return None;
|
||||||
};
|
};
|
||||||
let Some(since) = headers.get(header::IF_MODIFIED_SINCE) else {
|
let Some(since) = headers.get(header::IF_MODIFIED_SINCE) else {
|
||||||
return false;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
let Ok(parsed) = DateTime::<FixedOffset>::parse_from_rfc2822(since.to_str().unwrap()) else {
|
let Ok(parsed) = DateTime::<FixedOffset>::parse_from_rfc2822(since.to_str().unwrap()) else {
|
||||||
return false;
|
return None;
|
||||||
};
|
};
|
||||||
|
|
||||||
date >= parsed
|
if date >= parsed {
|
||||||
|
Some(Response::builder().status(304).body(Body::empty()).unwrap())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoResponse for WebsiteError {
|
impl IntoResponse for WebsiteError {
|
||||||
fn into_response(self) -> Response {
|
fn into_response(self) -> Response {
|
||||||
match self {
|
match self {
|
||||||
WebsiteError::NotFound => (StatusCode::NOT_FOUND, ()).into_response(),
|
WebsiteError::NotFound => (StatusCode::NOT_FOUND, "not found").into_response(),
|
||||||
WebsiteError::InternalError(e) => {
|
WebsiteError::InternalError(e) => {
|
||||||
if let Some(s) = e.source() {
|
if let Some(s) = e.source() {
|
||||||
error!("internal error: {}: {}", e, s);
|
error!("internal error: {}: {}", e, s);
|
||||||
} else {
|
} else {
|
||||||
error!("internal error: {}", e);
|
error!("internal error: {}", e);
|
||||||
}
|
}
|
||||||
(StatusCode::INTERNAL_SERVER_ERROR, ()).into_response()
|
(StatusCode::INTERNAL_SERVER_ERROR, "internal error").into_response()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,18 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use anyhow::anyhow;
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{OriginalUri, Path, Request, State},
|
body::Body,
|
||||||
http::{self, header, request, HeaderMap, StatusCode},
|
extract::{OriginalUri, Request, State},
|
||||||
|
http::{self, header, HeaderMap, StatusCode, Uri},
|
||||||
response::{Html, IntoResponse, Redirect, Response},
|
response::{Html, IntoResponse, Redirect, Response},
|
||||||
routing::get,
|
routing::get,
|
||||||
Router,
|
Router,
|
||||||
};
|
};
|
||||||
|
|
||||||
use serde_derive::Serialize;
|
use serde_derive::Serialize;
|
||||||
use tower::{Service, ServiceExt};
|
use tower::ServiceExt;
|
||||||
use tower_http::{follow_redirect::policy::PolicyExt, services::ServeDir};
|
use tower_http::services::ServeDir;
|
||||||
use tracing::{debug, instrument};
|
use tracing::{debug, instrument};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -24,7 +26,8 @@ pub fn router() -> Router<Arc<AppState>> {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/atom.xml", get(feed))
|
.route("/atom.xml", get(feed))
|
||||||
.route("/posts/", get(index))
|
.route("/posts/", get(index))
|
||||||
.route("/posts", get(|| async { Redirect::permanent("/") }))
|
.route("/posts", get(|| async { Redirect::permanent("/posts/") }))
|
||||||
|
.route("/", get(view))
|
||||||
.route("/*path", get(view))
|
.route("/*path", get(view))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,8 +49,8 @@ async fn index(
|
||||||
|
|
||||||
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
||||||
|
|
||||||
if should_return_304(&headers, last_changed) {
|
if let Some(res) = should_return_304(&headers, last_changed) {
|
||||||
return Ok(StatusCode::NOT_MODIFIED.into_response());
|
return Ok(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
posts.sort_by_key(|p| &p.date);
|
posts.sort_by_key(|p| &p.date);
|
||||||
|
@ -60,7 +63,7 @@ async fn index(
|
||||||
c.insert("posts", &posts);
|
c.insert("posts", &posts);
|
||||||
c.insert("base_url", &state.base_url.to_string());
|
c.insert("base_url", &state.base_url.to_string());
|
||||||
|
|
||||||
let res = state.tera.render("posts_index.html", &c)?;
|
let res = state.tera.render("section_index.html", &c)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
StatusCode::OK,
|
StatusCode::OK,
|
||||||
|
@ -78,13 +81,12 @@ async fn index(
|
||||||
.into_response())
|
.into_response())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(state, uri, method, headers, request))]
|
#[instrument(skip(state, uri, method, headers))]
|
||||||
async fn view(
|
async fn view(
|
||||||
uri: OriginalUri,
|
OriginalUri(uri): OriginalUri,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
method: http::method::Method,
|
method: http::method::Method,
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
request: Request,
|
|
||||||
) -> Result<Response, WebsiteError> {
|
) -> Result<Response, WebsiteError> {
|
||||||
// Fetch post
|
// Fetch post
|
||||||
let Some(post) = state.pages.get(uri.path()) else {
|
let Some(post) = state.pages.get(uri.path()) else {
|
||||||
|
@ -101,12 +103,10 @@ async fn view(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
debug!("page not found for '{uri}', fallback to static files");
|
||||||
|
|
||||||
// TODO: I don't like how we create a new oneshot for every 404 request, but I don't know if there's a better way
|
// TODO: I don't like how we create a new oneshot for every 404 request, but I don't know if there's a better way
|
||||||
return Ok(ServeDir::new("pages")
|
return get_static_file("pages", uri).await;
|
||||||
.oneshot(request)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
.into_response());
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if !post.is_published() {
|
if !post.is_published() {
|
||||||
|
@ -159,8 +159,8 @@ pub async fn feed(
|
||||||
|
|
||||||
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
||||||
|
|
||||||
if should_return_304(&headers, last_changed) {
|
if let Some(res) = should_return_304(&headers, last_changed) {
|
||||||
return Ok(StatusCode::NOT_MODIFIED.into_response());
|
return Ok(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
posts.sort_by_key(|p| &p.date);
|
posts.sort_by_key(|p| &p.date);
|
||||||
|
@ -195,6 +195,16 @@ pub async fn redirect(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn get_static_file(base_dir: &str, uri: Uri) -> Result<Response, WebsiteError> {
|
||||||
|
let req = Request::builder().uri(uri).body(Body::empty()).unwrap();
|
||||||
|
|
||||||
|
ServeDir::new(base_dir)
|
||||||
|
.oneshot(req)
|
||||||
|
.await
|
||||||
|
.map(IntoResponse::into_response)
|
||||||
|
.map_err(|e| anyhow!(e).into())
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use chrono::DateTime;
|
use chrono::DateTime;
|
||||||
|
@ -207,14 +217,14 @@ mod tests {
|
||||||
fn render_index() {
|
fn render_index() {
|
||||||
let posts = vec![
|
let posts = vec![
|
||||||
Page {
|
Page {
|
||||||
title: "test".into(),
|
title: Some("test".into()),
|
||||||
slug: "test".into(),
|
slug: "test".into(),
|
||||||
tags: vec!["abc".into(), "def".into()],
|
tags: vec!["abc".into(), "def".into()],
|
||||||
date: Some(DateTime::parse_from_rfc3339("2023-03-26T13:04:01+02:00").unwrap()),
|
date: Some(DateTime::parse_from_rfc3339("2023-03-26T13:04:01+02:00").unwrap()),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
Page {
|
Page {
|
||||||
title: "test2".into(),
|
title: Some("test2".into()),
|
||||||
slug: "test2".into(),
|
slug: "test2".into(),
|
||||||
date: None,
|
date: None,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
|
|
@ -62,8 +62,8 @@ pub async fn view(
|
||||||
|
|
||||||
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
||||||
|
|
||||||
if should_return_304(&headers, last_changed) {
|
if let Some(res) = should_return_304(&headers, last_changed) {
|
||||||
return Ok(StatusCode::NOT_MODIFIED.into_response());
|
return Ok(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
posts.sort_by_key(|p| &p.date);
|
posts.sort_by_key(|p| &p.date);
|
||||||
|
@ -108,8 +108,8 @@ pub async fn feed(
|
||||||
|
|
||||||
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
|
||||||
|
|
||||||
if should_return_304(&headers, last_changed) {
|
if let Some(res) = should_return_304(&headers, last_changed) {
|
||||||
return Ok(StatusCode::NOT_MODIFIED.into_response());
|
return Ok(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
posts.sort_by_key(|p| &p.date);
|
posts.sort_by_key(|p| &p.date);
|
||||||
|
|
26
src/main.rs
26
src/main.rs
|
@ -32,6 +32,7 @@ pub struct AppState {
|
||||||
base_url: Uri,
|
base_url: Uri,
|
||||||
pages: HashMap<String, Page>,
|
pages: HashMap<String, Page>,
|
||||||
aliases: HashMap<String, String>,
|
aliases: HashMap<String, String>,
|
||||||
|
settings: Settings,
|
||||||
tags: HashMap<String, Tag>,
|
tags: HashMap<String, Tag>,
|
||||||
tera: Tera,
|
tera: Tera,
|
||||||
}
|
}
|
||||||
|
@ -39,13 +40,15 @@ pub struct AppState {
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
let cfg = settings::get()?;
|
let cfg = settings::get()?;
|
||||||
println!("{cfg:?}");
|
|
||||||
|
|
||||||
observability::init(&cfg)?;
|
observability::init(&cfg)?;
|
||||||
info!("Starting server...");
|
|
||||||
let app = init_app(&cfg).await?;
|
|
||||||
let listener = TcpListener::bind(&cfg.bind_address).await.unwrap();
|
|
||||||
|
|
||||||
|
info!("Starting server...");
|
||||||
|
let listener = TcpListener::bind(&cfg.bind_address).await.unwrap();
|
||||||
|
info!(
|
||||||
|
"Bind address: {:?} Base Url: {:?}",
|
||||||
|
cfg.bind_address, cfg.base_url
|
||||||
|
);
|
||||||
|
let app = init_app(cfg).await?;
|
||||||
axum::serve(listener, app.into_make_service())
|
axum::serve(listener, app.into_make_service())
|
||||||
.with_graceful_shutdown(shutdown_signal())
|
.with_graceful_shutdown(shutdown_signal())
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -56,7 +59,7 @@ async fn main() -> Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(cfg))]
|
#[instrument(skip(cfg))]
|
||||||
async fn init_app(cfg: &Settings) -> Result<axum::routing::Router> {
|
async fn init_app(cfg: Settings) -> Result<axum::routing::Router> {
|
||||||
let base_url: Uri = cfg.base_url.parse().unwrap();
|
let base_url: Uri = cfg.base_url.parse().unwrap();
|
||||||
|
|
||||||
let tera = Tera::new("templates/**/*")?;
|
let tera = Tera::new("templates/**/*")?;
|
||||||
|
@ -64,13 +67,17 @@ async fn init_app(cfg: &Settings) -> Result<axum::routing::Router> {
|
||||||
startup_time: chrono::offset::Utc::now(),
|
startup_time: chrono::offset::Utc::now(),
|
||||||
base_url,
|
base_url,
|
||||||
tera,
|
tera,
|
||||||
|
settings: cfg,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let pages = page::load_all(&state, "pages/".into()).await?;
|
let pages = page::load_all(&state, "pages/".into()).await?;
|
||||||
info!("{} pages loaded", pages.len());
|
info!("{} pages loaded", pages.len());
|
||||||
for page in pages.values() {
|
for page in pages.values() {
|
||||||
debug!("slug: {}, path: {}", page.slug, page.absolute_path);
|
debug!(
|
||||||
|
"slug: {:?}, path: {:?}, children: {:?}",
|
||||||
|
page.slug, page.absolute_path, page.child_pages
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let tags = tag::get_tags(pages.values());
|
let tags = tag::get_tags(pages.values());
|
||||||
|
@ -82,11 +89,10 @@ async fn init_app(cfg: &Settings) -> Result<axum::routing::Router> {
|
||||||
.map(|a| (a.clone(), p.absolute_path.clone()))
|
.map(|a| (a.clone(), p.absolute_path.clone()))
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
state.pages = pages;
|
state.pages = pages;
|
||||||
state.tags = tags;
|
state.tags = tags;
|
||||||
let state = Arc::new(state);
|
|
||||||
|
|
||||||
info!("Listening at {}", state.base_url);
|
|
||||||
Ok(handlers::routes()
|
Ok(handlers::routes()
|
||||||
.layer(CorsLayer::permissive())
|
.layer(CorsLayer::permissive())
|
||||||
.layer(CompressionLayer::new())
|
.layer(CompressionLayer::new())
|
||||||
|
@ -95,7 +101,7 @@ async fn init_app(cfg: &Settings) -> Result<axum::routing::Router> {
|
||||||
.make_span_with(observability::make_span)
|
.make_span_with(observability::make_span)
|
||||||
.on_response(observability::on_response),
|
.on_response(observability::on_response),
|
||||||
)
|
)
|
||||||
.with_state(state.clone()))
|
.with_state(Arc::new(state)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn shutdown_signal() {
|
async fn shutdown_signal() {
|
||||||
|
|
|
@ -30,21 +30,19 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render
|
||||||
opt.insert(Options::ENABLE_SMART_PUNCTUATION);
|
opt.insert(Options::ENABLE_SMART_PUNCTUATION);
|
||||||
opt.insert(Options::ENABLE_PLUSES_DELIMITED_METADATA_BLOCKS);
|
opt.insert(Options::ENABLE_PLUSES_DELIMITED_METADATA_BLOCKS);
|
||||||
|
|
||||||
let mut content_html = String::new();
|
let mut content_html = String::with_capacity(markdown.len());
|
||||||
let parser = Parser::new_ext(markdown, opt);
|
let parser = Parser::new_ext(markdown, opt);
|
||||||
|
|
||||||
|
let mut accumulated_block = String::new();
|
||||||
let mut code_lang = None;
|
let mut code_lang = None;
|
||||||
let mut code_accumulator = String::new();
|
|
||||||
let mut meta_kind = None;
|
let mut meta_kind = None;
|
||||||
let mut meta_accumulator = String::new();
|
|
||||||
let mut events = Vec::new();
|
let mut events = Vec::new();
|
||||||
|
let mut metadata = String::new();
|
||||||
for event in parser {
|
for event in parser {
|
||||||
match event {
|
match event {
|
||||||
Event::Text(text) => {
|
Event::Text(text) => {
|
||||||
if code_lang.is_some() {
|
if code_lang.is_some() || meta_kind.is_some() {
|
||||||
code_accumulator.push_str(&text);
|
accumulated_block.push_str(&text);
|
||||||
} else if meta_kind.is_some() {
|
|
||||||
meta_accumulator.push_str(&text);
|
|
||||||
} else {
|
} else {
|
||||||
events.push(Event::Text(text));
|
events.push(Event::Text(text));
|
||||||
}
|
}
|
||||||
|
@ -54,6 +52,8 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render
|
||||||
}
|
}
|
||||||
Event::End(TagEnd::MetadataBlock(_)) => {
|
Event::End(TagEnd::MetadataBlock(_)) => {
|
||||||
meta_kind = None;
|
meta_kind = None;
|
||||||
|
metadata.push_str(&accumulated_block);
|
||||||
|
accumulated_block.clear();
|
||||||
}
|
}
|
||||||
Event::Start(Tag::Link {
|
Event::Start(Tag::Link {
|
||||||
mut dest_url,
|
mut dest_url,
|
||||||
|
@ -103,12 +103,11 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render
|
||||||
}
|
}
|
||||||
Event::End(TagEnd::CodeBlock) => {
|
Event::End(TagEnd::CodeBlock) => {
|
||||||
let lang = code_lang.take().unwrap_or("".into());
|
let lang = code_lang.take().unwrap_or("".into());
|
||||||
let res = hilighting::hilight(&code_accumulator, &lang, Some("base16-ocean.dark"))
|
let res = hilighting::hilight(&accumulated_block, &lang, Some("base16-ocean.dark"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
events.push(Event::Html(res.into()));
|
events.push(Event::Html(res.into()));
|
||||||
|
accumulated_block.clear();
|
||||||
code_accumulator.clear();
|
|
||||||
}
|
}
|
||||||
_ => events.push(event),
|
_ => events.push(event),
|
||||||
}
|
}
|
||||||
|
@ -123,6 +122,6 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render
|
||||||
|
|
||||||
RenderResult {
|
RenderResult {
|
||||||
content_html,
|
content_html,
|
||||||
metadata: meta_accumulator,
|
metadata,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
59
src/page.rs
59
src/page.rs
|
@ -1,5 +1,6 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
|
ffi::OsStr,
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
hash::{Hash, Hasher},
|
hash::{Hash, Hasher},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -18,39 +19,53 @@ use crate::{helpers, markdown, AppState, WebsiteError};
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Default)]
|
#[derive(Deserialize, Debug, Default)]
|
||||||
pub struct TomlFrontMatter {
|
pub struct TomlFrontMatter {
|
||||||
pub title: String,
|
pub title: Option<String>,
|
||||||
pub date: Option<toml::value::Datetime>,
|
pub date: Option<toml::value::Datetime>,
|
||||||
pub updated: Option<toml::value::Datetime>,
|
pub updated: Option<toml::value::Datetime>,
|
||||||
pub draft: Option<bool>,
|
pub draft: Option<bool>,
|
||||||
|
pub template: Option<String>,
|
||||||
pub aliases: Option<Vec<String>>,
|
pub aliases: Option<Vec<String>>,
|
||||||
pub tags: Option<Vec<String>>,
|
pub tags: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Clone, Debug, Default)]
|
#[derive(Serialize, Clone, Debug, Default)]
|
||||||
pub struct Page {
|
pub struct Page {
|
||||||
pub title: String,
|
pub title: Option<String>,
|
||||||
pub draft: bool,
|
pub draft: bool,
|
||||||
pub date: Option<DateTime<FixedOffset>>,
|
pub date: Option<DateTime<FixedOffset>>,
|
||||||
pub updated: Option<DateTime<FixedOffset>>,
|
pub updated: Option<DateTime<FixedOffset>>,
|
||||||
pub aliases: Vec<String>,
|
pub aliases: Vec<String>,
|
||||||
pub tags: Vec<String>,
|
pub tags: Vec<String>,
|
||||||
|
pub child_pages: Vec<String>,
|
||||||
pub content: String,
|
pub content: String,
|
||||||
|
pub template: String,
|
||||||
pub slug: String,
|
pub slug: String,
|
||||||
pub absolute_path: String,
|
pub absolute_path: String,
|
||||||
|
pub section: Option<String>,
|
||||||
pub etag: String,
|
pub etag: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Page {
|
impl Page {
|
||||||
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Page {
|
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Page {
|
||||||
let mut hasher = std::hash::DefaultHasher::default();
|
let mut hasher = std::hash::DefaultHasher::default();
|
||||||
|
fm.title.hash(&mut hasher);
|
||||||
|
fm.draft.hash(&mut hasher);
|
||||||
|
fm.tags.hash(&mut hasher);
|
||||||
content.hash(&mut hasher);
|
content.hash(&mut hasher);
|
||||||
let etag = format!("W/\"{:x}\"", hasher.finish());
|
let etag = format!("W/\"{:x}\"", hasher.finish());
|
||||||
|
|
||||||
Page {
|
Page {
|
||||||
absolute_path: format!("/{slug}/"),
|
absolute_path: if slug.is_empty() {
|
||||||
|
String::from("/")
|
||||||
|
} else {
|
||||||
|
format!("/{slug}/")
|
||||||
|
},
|
||||||
slug,
|
slug,
|
||||||
etag,
|
etag,
|
||||||
|
section: None,
|
||||||
content,
|
content,
|
||||||
|
child_pages: vec![],
|
||||||
|
template: fm.template.unwrap_or_else(|| "page.html".to_string()),
|
||||||
title: fm.title,
|
title: fm.title,
|
||||||
draft: fm.draft.unwrap_or(false),
|
draft: fm.draft.unwrap_or(false),
|
||||||
date: fm
|
date: fm
|
||||||
|
@ -81,10 +96,27 @@ impl Page {
|
||||||
pub async fn load_all(state: &AppState, folder: PathBuf) -> Result<HashMap<String, Page>> {
|
pub async fn load_all(state: &AppState, folder: PathBuf) -> Result<HashMap<String, Page>> {
|
||||||
let mut pages = HashMap::<String, Page>::new();
|
let mut pages = HashMap::<String, Page>::new();
|
||||||
let mut dirs: Vec<PathBuf> = vec![folder.clone()];
|
let mut dirs: Vec<PathBuf> = vec![folder.clone()];
|
||||||
|
let mut section_stack: Vec<String> = vec![];
|
||||||
|
|
||||||
while let Some(dir) = dirs.pop() {
|
while let Some(dir) = dirs.pop() {
|
||||||
let mut read_dir = fs::read_dir(&dir).await?;
|
let mut read_dir = fs::read_dir(&dir).await?;
|
||||||
|
|
||||||
|
let mut section_index = dir.clone();
|
||||||
|
section_index.push("index.md");
|
||||||
|
let is_section = section_index.exists();
|
||||||
|
let current_section = if is_section {
|
||||||
|
let mut page = load_page(state, §ion_index, &folder).await?;
|
||||||
|
page.section = section_stack.last().cloned();
|
||||||
|
section_stack.push(page.absolute_path.clone());
|
||||||
|
pages.insert(page.absolute_path.clone(), page);
|
||||||
|
section_stack.last()
|
||||||
|
} else {
|
||||||
|
section_stack.last()
|
||||||
|
};
|
||||||
|
debug!("current section: {current_section:?}");
|
||||||
|
|
||||||
|
let mut child_pages = vec![];
|
||||||
|
|
||||||
while let Some(entry) = read_dir.next_entry().await? {
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
if path.is_dir() {
|
if path.is_dir() {
|
||||||
|
@ -92,11 +124,23 @@ pub async fn load_all(state: &AppState, folder: PathBuf) -> Result<HashMap<Strin
|
||||||
} else if let Some(ext) = path.extension() {
|
} else if let Some(ext) = path.extension() {
|
||||||
if ext == "md" {
|
if ext == "md" {
|
||||||
// it's a page to load
|
// it's a page to load
|
||||||
let page = load_page(state, &path, &folder).await?;
|
if path.file_name() == Some(OsStr::new("index.md")) {
|
||||||
|
// we've already loaded the section index page
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let mut page = load_page(state, &path, &folder).await?;
|
||||||
|
page.section = current_section.cloned();
|
||||||
|
child_pages.push(page.absolute_path.clone());
|
||||||
pages.insert(page.absolute_path.clone(), page);
|
pages.insert(page.absolute_path.clone(), page);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(section) = current_section {
|
||||||
|
if let Some(section) = pages.get_mut(section) {
|
||||||
|
section.child_pages.append(&mut child_pages);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(pages)
|
Ok(pages)
|
||||||
|
@ -122,8 +166,7 @@ pub async fn load_page(state: &AppState, path: &Path, root_folder: &Path) -> Res
|
||||||
&path_str[root.len()..=i]
|
&path_str[root.len()..=i]
|
||||||
} else {
|
} else {
|
||||||
&path_str[root.len()..]
|
&path_str[root.len()..]
|
||||||
}
|
};
|
||||||
.trim_start_matches("pages/");
|
|
||||||
|
|
||||||
let base_uri = helpers::uri_with_path(&state.base_url, base_path);
|
let base_uri = helpers::uri_with_path(&state.base_url, base_path);
|
||||||
|
|
||||||
|
@ -140,11 +183,13 @@ pub async fn load_page(state: &AppState, path: &Path, root_folder: &Path) -> Res
|
||||||
pub async fn render_page(state: &AppState, page: &Page) -> Result<String, WebsiteError> {
|
pub async fn render_page(state: &AppState, page: &Page) -> Result<String, WebsiteError> {
|
||||||
let mut ctx = tera::Context::new();
|
let mut ctx = tera::Context::new();
|
||||||
ctx.insert("page", &page);
|
ctx.insert("page", &page);
|
||||||
|
ctx.insert("all_pages", &state.pages);
|
||||||
|
ctx.insert("site_title", &state.settings.title);
|
||||||
ctx.insert("base_url", &state.base_url.to_string());
|
ctx.insert("base_url", &state.base_url.to_string());
|
||||||
|
|
||||||
state
|
state
|
||||||
.tera
|
.tera
|
||||||
.render("post.html", &ctx)
|
.render(&page.template, &ctx)
|
||||||
.map_err(std::convert::Into::into)
|
.map_err(std::convert::Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,15 +2,16 @@ use anyhow::{Error, Result};
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug, Default)]
|
||||||
pub struct Settings {
|
pub struct Settings {
|
||||||
|
pub title: String,
|
||||||
pub base_url: String,
|
pub base_url: String,
|
||||||
pub bind_address: String,
|
pub bind_address: String,
|
||||||
pub logging: String,
|
pub logging: String,
|
||||||
pub otlp: Otlp,
|
pub otlp: Otlp,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug, Default)]
|
||||||
pub struct Otlp {
|
pub struct Otlp {
|
||||||
pub enabled: bool,
|
pub enabled: bool,
|
||||||
pub endpoint: String,
|
pub endpoint: String,
|
||||||
|
|
|
@ -1,26 +0,0 @@
|
||||||
{% extends "base.html" %}
|
|
||||||
|
|
||||||
{% block main %}
|
|
||||||
<h1>tollyx.net</h1>
|
|
||||||
<p>hi hello welcome to my website it's pretty wip right now yeah ok bye</p>
|
|
||||||
<h2>todo</h2>
|
|
||||||
<ul>
|
|
||||||
<li>✅ static content</li>
|
|
||||||
<li>✅ template rendering (tera)</li>
|
|
||||||
<li>✅ markdown rendering (pulldown_cmark)</li>
|
|
||||||
<li>✅ post metadata (frontmatter, toml)</li>
|
|
||||||
<li>✅ app metrics (page hits, etc)</li>
|
|
||||||
<li>✅ tests</li>
|
|
||||||
<li>✅ page aliases (redirects, for back-compat with old routes)</li>
|
|
||||||
<li>✅ rss/atom/jsonfeed (atom is good enough for now)</li>
|
|
||||||
<li>✅ proper error handling (i guess??)</li>
|
|
||||||
<li>✅ code hilighting? (good enough for now, gotta figure out themes n' stuff later)</li>
|
|
||||||
<li>⬜ cache headers (etag, last-modified, returning 304, other related headers)
|
|
||||||
<li>⬜ sass compilation (using rsass? grass?)</li>
|
|
||||||
<li>⬜ fancy styling</li>
|
|
||||||
<li>⬜ other pages???</li>
|
|
||||||
<li>⬜ graphviz to svg rendering??</li>
|
|
||||||
<li>⬜ image processing?? (resizing, conversion)</li>
|
|
||||||
<li>⬜ opentelemetry?</li>
|
|
||||||
</ul>
|
|
||||||
{% endblock main %}
|
|
|
@ -14,7 +14,7 @@
|
||||||
{%- endif -%}
|
{%- endif -%}
|
||||||
</small>
|
</small>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{{ page.content | safe -}}
|
{{ page.content | safe }}
|
||||||
{% if page.tags -%}
|
{% if page.tags -%}
|
||||||
<small>
|
<small>
|
||||||
<ul class="tags">
|
<ul class="tags">
|
|
@ -2,29 +2,27 @@
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<link rel="alternate" type="application/rss+xml" href="{{base_url | safe}}atom.xml" title="tollyx.net">
|
<link rel="alternate" type="application/rss+xml" href="{{base_url | safe}}atom.xml" title="{{ site_title }}">
|
||||||
{% if tag_slug -%}
|
{% if tag_slug -%}
|
||||||
<link rel="alternate" type="application/rss+xml" href="{{base_url | safe}}tags/{{ tag_slug }}/atom.xml" title="tollyx.net: Posts tagged #{{ tag_slug }}">
|
<link rel="alternate" type="application/rss+xml" href="{{base_url | safe}}tags/{{ tag_slug }}/atom.xml" title="{{ site_title }}: Posts tagged #{{ tag_slug }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<link rel="stylesheet" href="{{base_url | safe}}static/site.css">
|
<link rel="stylesheet" href="{{base_url | safe}}static/site.css">
|
||||||
<link rel="icon" type="image/png" href="{{base_url | safe}}static/avatar.png" />
|
<link rel="icon" type="image/png" href="{{base_url | safe}}static/avatar.png" />
|
||||||
<meta property="og:type" content="website">
|
|
||||||
<meta property="og:site_name" content="tollyx.net" />
|
|
||||||
{% if page -%}
|
|
||||||
<meta property="og:title" content="{{ page.title }} - tollyx.net" />
|
|
||||||
{%- elif tag -%}
|
|
||||||
<meta property="og:title" content="#{{ tag.slug }} - tollyx.net" />
|
|
||||||
{%- else -%}
|
|
||||||
<meta property="og:title" content="tollyx.net" />
|
|
||||||
{%- endif %}
|
|
||||||
<meta property="og:image" content="{{base_url | safe}}avatar.png" />
|
|
||||||
<meta name="twitter:card" content="summary" />
|
<meta name="twitter:card" content="summary" />
|
||||||
<meta name="twitter:site" content="@tollyx" />
|
<meta name="twitter:site" content="@tollyx" />
|
||||||
<meta name="twitter:creator" content="@tollyx" />
|
<meta name="twitter:creator" content="@tollyx" />
|
||||||
<meta name="twitter:dnt" content="on">
|
<meta name="twitter:dnt" content="on">
|
||||||
|
<meta property="og:type" content="website">
|
||||||
|
<meta property="og:site_name" content="{{ site_title }}" />
|
||||||
|
<meta property="og:image" content="{{base_url | safe}}avatar.png" />
|
||||||
{% if page.title -%}
|
{% if page.title -%}
|
||||||
<title>{{ page.title }} | tollyx.net</title>
|
<meta property="og:title" content="{{ page.title }} - {{ site_title }}" />
|
||||||
|
<title>{{ page.title }} | {{ site_title }}</title>
|
||||||
|
{%- elif tag -%}
|
||||||
|
<meta property="og:title" content="#{{ tag.slug }} - {{ site_title }}" />
|
||||||
|
<title>#{{ tag.slug }} | {{ site_title }}</title>
|
||||||
{%- else -%}
|
{%- else -%}
|
||||||
<title>tollyx.net</title>
|
<meta property="og:title" content="{{ site_title }}" />
|
||||||
|
<title>{{ site_title }}</title>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</head>
|
</head>
|
|
@ -1,5 +1,5 @@
|
||||||
<header>
|
<header>
|
||||||
<nav>
|
<nav>
|
||||||
<img src="{{base_url | safe}}static/avatar.png" class="avatar"/> <a href="{{base_url | safe}}">tollyx</a> - <a href="{{base_url | safe}}posts/">posts</a>
|
<img src="{{base_url | safe}}static/avatar.png" class="avatar"/> <b><a href="{{base_url | safe}}">{{ site_title }}</a></b> - <a href="{{base_url | safe}}posts/">posts</a>
|
||||||
</nav>
|
</nav>
|
||||||
</header>
|
</header>
|
||||||
|
|
Loading…
Reference in a new issue