diff --git a/.continerignore b/.continerignore new file mode 100644 index 0000000..6579a10 --- /dev/null +++ b/.continerignore @@ -0,0 +1,3 @@ +.* +/target +/data diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index eb205fe..0000000 --- a/.dockerignore +++ /dev/null @@ -1,3 +0,0 @@ -.git -.vscode -target diff --git a/Dockerfile b/Containerfile similarity index 96% rename from Dockerfile rename to Containerfile index 2b494ac..4cec677 100644 --- a/Dockerfile +++ b/Containerfile @@ -71,5 +71,7 @@ EXPOSE 8080 USER website:website ENV RUST_LOG="debug" +ENV TLX_WATCH="false" +ENV TLX_LOG_FORMAT="json" ENTRYPOINT ["/app/website"] \ No newline at end of file diff --git a/config.toml b/config.toml index 41a65d8..3dcebd9 100644 --- a/config.toml +++ b/config.toml @@ -1,13 +1,7 @@ title = "tollyx.se" base_url = "http://localhost:8080/" bind_address = "0.0.0.0:8080" -logging = "website=debug,axum=debug,info" +logging = "website=debug" +log_format = "compact" drafts = true - -[otlp] -enabled = true -endpoint = "http://otel-collector:4317" -authorization = "Basic YWRyaWFuQHRvbGx5eC5uZXQ6N3VHVDU1NGpudGdxVE5LMg==" -organization = "default" -stream_name = "default" -tls_insecure = true +watch = true diff --git a/pages/index.md b/pages/index.md index d99e1c9..07796a5 100644 --- a/pages/index.md +++ b/pages/index.md @@ -8,7 +8,6 @@ whoah this time it's actually generated from markdown too unlike the other one anyway here's a new todo list: - ## todo - [x] static content @@ -20,10 +19,10 @@ anyway here's a new todo list: - [x] code hilighting (syntact) - [x] cache headers (pages uses etags, some others timestamps. it works) - [x] docker from-scratch image (it's small!) -- [x] opentelemetry (metrics, traces) +- [x] ~~opentelemetry (metrics, traces)~~ (ripped it our for now) - [ ] opentelemetry logs? (don't know if I'm gonna need it? can probably just make the collector grab them from the docker logs?) - [x] sections (currently the posts page is hardcoded, should be able to turn any page-subfolder into its own section) -- [ ] file-watching (rebuild pages when they're changed, not only on startup) +- [x] file-watching (rebuild pages when they're changed, not only on startup) - [ ] live-reload (I guess it's done by some js and a websocket that sends a message to the browser to reload?) - [ ] ~~sass/less compilation~~ (don't think I need it, will skip for now) - [ ] fancy css (but nothing too fancy, I like it [Simple And Clean](https://youtu.be/0nKizH5TV_g?t=42)) diff --git a/pages/posts/draft-test.md b/pages/posts/draft-test.md index 1c29ae7..fc6283c 100644 --- a/pages/posts/draft-test.md +++ b/pages/posts/draft-test.md @@ -1,7 +1,7 @@ +++ -title="draft test" -draft=true -date=2023-07-29T17:25:20+02:00 +title = "draft test" +draft = true +date = 2023-07-29T17:25:20+02:00 +++ wow look it's a hidden post because it's marked as a draft diff --git a/src/handlers/mod.rs b/src/handlers/mod.rs index 0d5afcf..18fb560 100644 --- a/src/handlers/mod.rs +++ b/src/handlers/mod.rs @@ -7,6 +7,7 @@ use axum::{ }; use std::sync::Arc; use time::{OffsetDateTime, format_description::well_known::Rfc2822}; +use tokio::sync::RwLock; use tower_http::services::ServeDir; use tracing::log::error; @@ -15,7 +16,7 @@ use crate::{AppState, WebsiteError}; pub mod pages; pub mod tags; -pub fn routes() -> Router<Arc<AppState>> { +pub fn routes() -> Router<Arc<RwLock<AppState>>> { Router::new() .merge(pages::router()) .merge(tags::router()) @@ -65,6 +66,8 @@ impl IntoResponse for WebsiteError { mod tests { use std::{path::PathBuf, sync::Arc}; + use tokio::sync::RwLock; + use crate::AppState; #[tokio::test] @@ -79,8 +82,8 @@ mod tests { let root = PathBuf::from("pages/"); let posts = crate::page::load_recursive(&state, &root, &root, None).unwrap(); state.tags = crate::tag::get_tags(posts.values()); - state.pages = posts.into(); - let state = Arc::new(state); + state.pages = posts; + let state = Arc::new(RwLock::new(state)); super::routes().with_state(state).into_make_service(); } diff --git a/src/handlers/pages.rs b/src/handlers/pages.rs index 97d2da1..e391562 100644 --- a/src/handlers/pages.rs +++ b/src/handlers/pages.rs @@ -11,6 +11,7 @@ use axum::{ }; use time::format_description::well_known::Rfc3339; +use tokio::sync::RwLock; use tower::ServiceExt; use tower_http::services::ServeDir; use tracing::instrument; @@ -22,7 +23,7 @@ use crate::{ use super::should_return_304; -pub fn router() -> Router<Arc<AppState>> { +pub fn router() -> Router<Arc<RwLock<AppState>>> { Router::new() .route("/atom.xml", get(feed)) .route("/", get(view)) @@ -32,10 +33,11 @@ pub fn router() -> Router<Arc<AppState>> { #[instrument(skip(state, uri, method, headers))] async fn view( OriginalUri(uri): OriginalUri, - State(state): State<Arc<AppState>>, + State(state): State<Arc<RwLock<AppState>>>, method: http::method::Method, headers: HeaderMap, ) -> Result<Response, WebsiteError> { + let state = state.read().await; // Fetch post let Some(post) = state.pages.get(uri.path()) else { // Invalid path for a post, check aliases @@ -92,9 +94,10 @@ async fn view( #[instrument(skip(state))] pub async fn feed( - State(state): State<Arc<AppState>>, + State(state): State<Arc<RwLock<AppState>>>, headers: HeaderMap, ) -> Result<Response, WebsiteError> { + let state = state.read().await; let mut posts: Vec<&Page> = state.pages.values().filter(|p| p.is_published()).collect(); let last_changed = posts.iter().filter_map(|p| p.last_modified()).max(); diff --git a/src/handlers/tags.rs b/src/handlers/tags.rs index 2c5d1af..37463db 100644 --- a/src/handlers/tags.rs +++ b/src/handlers/tags.rs @@ -10,13 +10,14 @@ use axum::{ use serde_derive::Serialize; use time::format_description::well_known::Rfc3339; +use tokio::sync::RwLock; use tracing::instrument; use crate::{AppState, WebsiteError, page::Page}; use super::should_return_304; -pub fn router() -> Router<Arc<AppState>> { +pub fn router() -> Router<Arc<RwLock<AppState>>> { Router::new() .route("/tags", get(|| async { Redirect::permanent("/") })) .route("/tags/", get(index)) @@ -31,7 +32,8 @@ struct TagContext<'a> { } #[instrument(skip(state))] -pub async fn index(State(state): State<Arc<AppState>>) -> Result<Response, WebsiteError> { +pub async fn index(State(state): State<Arc<RwLock<AppState>>>) -> Result<Response, WebsiteError> { + let state = state.read().await; let tags: Vec<_> = state.tags.values().collect(); let ctx = TagContext { title: "Tags" }; @@ -55,9 +57,10 @@ pub async fn index(State(state): State<Arc<AppState>>) -> Result<Response, Websi #[instrument(skip(state))] pub async fn view( Path(tag): Path<String>, - State(state): State<Arc<AppState>>, + State(state): State<Arc<RwLock<AppState>>>, headers: HeaderMap, ) -> Result<Response, WebsiteError> { + let state = state.read().await; let mut posts: Vec<&Page> = state .pages .values() @@ -103,9 +106,10 @@ pub async fn view( #[instrument(skip(state))] pub async fn feed( Path(slug): Path<String>, - State(state): State<Arc<AppState>>, + State(state): State<Arc<RwLock<AppState>>>, headers: HeaderMap, ) -> Result<Response, WebsiteError> { + let state = state.read().await; let tag = state.tags.get(&slug).ok_or(WebsiteError::NotFound)?; let mut posts: Vec<&Page> = state @@ -144,8 +148,9 @@ pub async fn feed( #[instrument(skip(state))] pub async fn redirect( Path(slug): Path<String>, - State(state): State<Arc<AppState>>, + State(state): State<Arc<RwLock<AppState>>>, ) -> Result<Redirect, WebsiteError> { + let state = state.read().await; if state.tags.contains_key(&slug) { Ok(Redirect::permanent(&format!("/tags/{slug}/"))) } else { diff --git a/src/main.rs b/src/main.rs index 5939ac0..39940f7 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,8 +1,9 @@ #![warn(clippy::pedantic)] -use std::{collections::HashMap, fmt::Display, path::PathBuf, sync::Arc}; +use std::{collections::HashMap, fmt::Display, path::Path, sync::Arc}; use axum::http::Uri; +use notify::Watcher; use page::Page; use settings::Settings; @@ -10,11 +11,12 @@ use tag::Tag; use tera::Tera; use time::OffsetDateTime; -use tokio::{net::TcpListener, signal}; +use tokio::{net::TcpListener, signal, sync::RwLock}; use tower_http::{compression::CompressionLayer, cors::CorsLayer, trace::TraceLayer}; -use tracing::{instrument, log::info}; +use tracing::{debug, error, instrument, log::info}; use anyhow::{Error, Result}; +use tracing_subscriber::EnvFilter; mod feed; mod handlers; @@ -28,7 +30,7 @@ mod tag; pub struct AppState { startup_time: OffsetDateTime, base_url: Uri, - pages: Arc<HashMap<String, Page>>, + pages: HashMap<String, Page>, aliases: HashMap<String, String>, settings: Settings, tags: HashMap<String, Tag>, @@ -43,7 +45,7 @@ impl Default for AppState { tera: Tera::default(), settings: Settings::default(), aliases: HashMap::default(), - pages: Arc::new(HashMap::default()), + pages: HashMap::default(), tags: HashMap::default(), } } @@ -61,8 +63,8 @@ impl AppState { ..Default::default() }; - let root_path: PathBuf = "pages/".into(); - let pages = page::load_recursive(&state, &root_path, &root_path, None)?; + let root_path = Path::new("pages/"); + let pages = page::load_recursive(&state, root_path, root_path, None)?; info!("{} pages loaded", pages.len()); let tags = tag::get_tags(pages.values()); @@ -75,9 +77,7 @@ impl AppState { }) .collect(); - let pages = Arc::new(pages); - - state.pages.clone_from(&pages); + state.pages = pages; state.tags = tags; Ok(state) } @@ -86,10 +86,9 @@ impl AppState { #[tokio::main] async fn main() -> Result<()> { let cfg = settings::get()?; - tracing_subscriber::fmt() - .pretty() - .with_env_filter(&cfg.logging) - .init(); + + setup_tracing(&cfg); + let addr = cfg.bind_address.clone(); let url = cfg.base_url.clone(); @@ -105,15 +104,138 @@ async fn main() -> Result<()> { Ok(()) } +fn setup_tracing(cfg: &Settings) { + match cfg.log_format.as_str() { + "pretty" => tracing_subscriber::fmt() + .pretty() + .with_env_filter( + EnvFilter::builder() + .with_default_directive(cfg.logging.parse().unwrap_or_default()) + .from_env_lossy(), + ) + .init(), + "compact" => tracing_subscriber::fmt() + .compact() + .with_env_filter( + EnvFilter::builder() + .with_default_directive(cfg.logging.parse().unwrap_or_default()) + .from_env_lossy(), + ) + .init(), + "json" => tracing_subscriber::fmt() + .json() + .with_env_filter( + EnvFilter::builder() + .with_default_directive(cfg.logging.parse().unwrap_or_default()) + .from_env_lossy(), + ) + .init(), + _ => tracing_subscriber::fmt() + .with_env_filter( + EnvFilter::builder() + .with_default_directive(cfg.logging.parse().unwrap_or_default()) + .from_env_lossy(), + ) + .init(), + } +} + #[instrument(skip(cfg))] async fn init_app(cfg: Settings) -> Result<axum::routing::Router> { + let watch = cfg.watch; let state = AppState::load(cfg)?; + let state = Arc::new(RwLock::new(state)); + + if watch { + tokio::spawn(start_file_watcher(state.clone())); + } Ok(handlers::routes() .layer(CorsLayer::permissive()) .layer(CompressionLayer::new()) .layer(TraceLayer::new_for_http()) - .with_state(Arc::new(state))) + .with_state(state)) +} + +async fn start_file_watcher(state: Arc<RwLock<AppState>>) { + let (page_tx, mut page_rx) = tokio::sync::mpsc::channel::<notify::Event>(1); + + let mut page_watcher = + notify::recommended_watcher(move |event: Result<notify::Event, notify::Error>| { + let Ok(event) = event.inspect_err(|e| error!("File watcher error: {}", e)) else { + return; + }; + _ = page_tx + .blocking_send(event) + .inspect_err(|e| error!("Failed to add watch event to channel: {}", e)); + }) + .expect("create page file watcher"); + + page_watcher + .watch(Path::new("pages/"), notify::RecursiveMode::Recursive) + .expect("add pages dir to watcher"); + + let page_fut = async { + while let Some(event) = page_rx.recv().await { + if !(event.kind.is_create() || event.kind.is_remove() || event.kind.is_modify()) { + continue; + } + if !event.paths.iter().any(|p| p.is_file()) { + continue; + } + debug!("{:?}", event); + + let mut state = state.write().await; + + info!("Reloading pages"); + let root_path = Path::new("pages/"); + if let Ok(pages) = page::load_recursive(&state, root_path, root_path, None) + .inspect_err(|err| error!("Error reloading pages: {}", err)) + { + state.pages = pages; + } + } + }; + + let (template_tx, mut template_rx) = tokio::sync::mpsc::channel::<notify::Event>(1); + + let mut template_watcher = + notify::recommended_watcher(move |event: Result<notify::Event, notify::Error>| { + let Ok(event) = event.inspect_err(|e| error!("File watcher error: {}", e)) else { + return; + }; + _ = template_tx + .blocking_send(event) + .inspect_err(|e| error!("Failed to add watch event to channel: {}", e)); + }) + .expect("create template file watcher"); + + template_watcher + .watch(Path::new("templates/"), notify::RecursiveMode::Recursive) + .expect("add templates dir to watcher"); + + let template_fut = async { + while let Some(event) = template_rx.recv().await { + if !(event.kind.is_create() || event.kind.is_remove() || event.kind.is_modify()) { + continue; + } + if !event.paths.iter().any(|p| p.is_file()) { + continue; + } + debug!("{:?}", event); + + let mut state = state.write().await; + + info!("Reloading templates"); + _ = state + .tera + .full_reload() + .inspect_err(|err| error!("Error reloading templates: {}", err)); + } + }; + + info!("file watchers initialized"); + tokio::join!(page_fut, template_fut); } async fn shutdown_signal() { diff --git a/src/markdown.rs b/src/markdown.rs index f600acf..46ea892 100644 --- a/src/markdown.rs +++ b/src/markdown.rs @@ -1,27 +1,31 @@ +use std::sync::LazyLock; + use crate::helpers; use crate::hilighting; use axum::http::Uri; -use cached::once_cell::sync::Lazy; use pulldown_cmark::CodeBlockKind; use pulldown_cmark::Event; use pulldown_cmark::LinkType; +use pulldown_cmark::MetadataBlockKind; use pulldown_cmark::Tag; use pulldown_cmark::TagEnd; use pulldown_cmark::{Options, Parser}; use regex::Regex; use tracing::instrument; -static STARTS_WITH_SCHEMA_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\w+:").unwrap()); -static EMAIL_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^.+?@\w+(\.\w+)*$").unwrap()); +static STARTS_WITH_SCHEMA_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^\w+:").unwrap()); +static EMAIL_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^.+?@\w+(\.\w+)*$").unwrap()); pub struct RenderResult { pub content_html: String, pub metadata: String, + pub meta_kind: Option<MetadataBlockKind>, } #[instrument(skip(markdown))] pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> RenderResult { let mut opt = Options::empty(); + opt.insert(Options::ENABLE_FOOTNOTES); opt.insert(Options::ENABLE_HEADING_ATTRIBUTES); opt.insert(Options::ENABLE_STRIKETHROUGH); @@ -36,12 +40,13 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render let mut accumulated_block = String::new(); let mut code_lang = None; let mut meta_kind = None; + let mut block_in_progress = false; let mut events = Vec::new(); let mut metadata = String::new(); for event in parser { match event { Event::Text(text) => { - if code_lang.is_some() || meta_kind.is_some() { + if block_in_progress { accumulated_block.push_str(&text); } else { events.push(Event::Text(text)); @@ -49,9 +54,10 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render } Event::Start(Tag::MetadataBlock(kind)) => { meta_kind = Some(kind); + block_in_progress = true; } Event::End(TagEnd::MetadataBlock(_)) => { - meta_kind = None; + block_in_progress = false; metadata.push_str(&accumulated_block); accumulated_block.clear(); } @@ -99,6 +105,7 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render Event::Start(Tag::CodeBlock(kind)) => { if let CodeBlockKind::Fenced(lang) = kind { code_lang = Some(lang); + block_in_progress = true; } } Event::End(TagEnd::CodeBlock) => { @@ -106,6 +113,7 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render let res = hilighting::hilight(&accumulated_block, &lang, Some("base16-ocean.dark")) .unwrap(); + block_in_progress = false; events.push(Event::Html(res.into())); accumulated_block.clear(); } @@ -123,5 +131,6 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> Render RenderResult { content_html, metadata, + meta_kind, } } diff --git a/src/page.rs b/src/page.rs index df65c00..67c2a4e 100644 --- a/src/page.rs +++ b/src/page.rs @@ -8,6 +8,7 @@ use std::{ use anyhow::Result; +use pulldown_cmark::MetadataBlockKind; use serde_derive::{Deserialize, Serialize}; use time::{OffsetDateTime, format_description::well_known::Rfc3339}; @@ -16,7 +17,7 @@ use tracing::{debug, info, instrument}; use crate::{AppState, WebsiteError, helpers, markdown}; #[derive(Deserialize, Debug, Default)] -pub struct TomlFrontMatter { +pub struct FrontMatter { pub title: Option<String>, pub date: Option<toml::value::Datetime>, pub updated: Option<toml::value::Datetime>, @@ -52,7 +53,7 @@ pub struct PageSummary { } impl Page { - pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Page { + pub fn new(slug: String, content: String, fm: FrontMatter) -> Page { let mut hasher = std::hash::DefaultHasher::default(); fm.title.hash(&mut hasher); fm.draft.hash(&mut hasher); @@ -199,12 +200,13 @@ pub fn load_page(state: &AppState, path: &Path, root_folder: &Path) -> Result<Op let base_uri = helpers::uri_with_path(&state.base_url, base_path); let content = markdown::render_markdown_to_html(Some(&base_uri), &content); + let frontmatter = match content.meta_kind { + Some(MetadataBlockKind::PlusesStyle) => toml::from_str(&content.metadata)?, + Some(MetadataBlockKind::YamlStyle) => unimplemented!("YAML frontmatter is not implemented"), + None => FrontMatter::default(), + }; - let page = Page::new( - slug.to_string(), - content.content_html, - toml::from_str(&content.metadata)?, - ); + let page = Page::new(slug.to_string(), content.content_html, frontmatter); Ok(if state.settings.drafts || !page.draft { debug!("Loaded: {}", &path_str); @@ -218,9 +220,9 @@ pub fn load_page(state: &AppState, path: &Path, root_folder: &Path) -> Result<Op pub async fn render_page(state: &AppState, page: &Page) -> Result<String, WebsiteError> { let mut ctx = tera::Context::new(); ctx.insert("page", &page); - ctx.insert("all_pages", state.pages.as_ref()); ctx.insert("site_title", &state.settings.title); ctx.insert("base_url", &state.base_url.to_string()); + ctx.insert("drafts", &state.settings.drafts); info!( "Rendering page {} with template: {}", @@ -249,9 +251,7 @@ mod tests { ..Default::default() }; let root: PathBuf = "pages/".into(); - state.pages = super::load_recursive(&state, &root, &root, None) - .unwrap() - .into(); + state.pages = super::load_recursive(&state, &root, &root, None).unwrap(); for post in state.pages.values() { super::render_page(&state, post).await.unwrap(); } diff --git a/src/settings.rs b/src/settings.rs index 339c83c..11b675f 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -8,7 +8,9 @@ pub struct Settings { pub base_url: String, pub bind_address: String, pub logging: String, + pub log_format: String, pub drafts: bool, + pub watch: bool, } impl Settings { @@ -18,8 +20,10 @@ impl Settings { title: "Test".to_owned(), base_url: "http://localhost".to_owned(), bind_address: "0.0.0.0:8080".to_owned(), - logging: "trace".to_owned(), + logging: "info".to_owned(), + log_format: "json".to_owned(), drafts: true, + watch: false, } } } diff --git a/templates/base.html b/templates/base.html index 2628661..7c392ac 100644 --- a/templates/base.html +++ b/templates/base.html @@ -1,11 +1,13 @@ <!DOCTYPE html> <html lang="en"> {% include "partials/head.html" %} + <body> -{% include "partials/header.html" -%} -<main> - {% block main %}{% endblock main -%} -</main> -{% include "partials/footer.html" -%} + {% include "partials/header.html" -%} + <main> + {% block main %}{% endblock main -%} + </main> + {% include "partials/footer.html" -%} </body> -</html> + +</html> \ No newline at end of file