1
0
Fork 0

too much stuff sorry

This commit is contained in:
Adrian Hedqvist 2024-04-19 19:36:25 +02:00
parent d8050d2e89
commit bdc291b539
19 changed files with 710 additions and 581 deletions

1
.gitignore vendored
View file

@ -1 +1,2 @@
/target
/data

718
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -13,9 +13,8 @@ chrono = { version = "0.4.31", features = ["serde"] }
config = "0.14.0"
glob = "0.3.0"
opentelemetry = { version = "0.22.0", features = ["trace", "metrics"] }
opentelemetry-otlp = { version = "0.15.0", features = ["trace", "metrics"] }
opentelemetry-otlp = { version = "0.15.0", features = ["trace", "metrics", "logs"] }
opentelemetry_sdk = { version = "0.22.1", features = ["rt-tokio", "trace", "metrics"] }
prometheus = { version = "0.13.3", features = ["process"] }
pulldown-cmark = "0.10.2"
regex = "1.7.2"
serde = "1.0.144"
@ -25,6 +24,7 @@ syntect = "5.0.0"
tera = { version = "1.19.1", features = ["builtins"] }
tokio = { version = "1.34.0", features = ["full", "tracing"] }
toml = "0.8.8"
tonic = "0.11.0"
tower = { version = "0.4.13", features = ["full"] }
tower-http = { version = "0.5.2", features = ["full"] }
tracing = "0.1.35"

View file

@ -63,6 +63,7 @@ COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/website ./
COPY ./static ./static
COPY ./templates ./templates
COPY ./posts ./posts
COPY ./config.toml ./config.toml
EXPOSE 8180
@ -71,4 +72,4 @@ USER website:website
ENV RUST_LOG="debug"
CMD ["./website"]
ENTRYPOINT ["/app/website"]

35
compose.yaml Normal file
View file

@ -0,0 +1,35 @@
name: "tlxite"
services:
web:
build: .
ports:
- "8080:8080"
depends_on:
- otel-collector
otel-collector:
image: otel/opentelemetry-collector:latest
restart: always
command: ["--config=/etc/otel-collector-config.yaml", "${OTELCOL_ARGS}"]
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
ports:
- "1888:1888" # pprof extension
- "8888:8888" # Prometheus metrics exposed by the collector
- "8889:8889" # Prometheus exporter metrics
- "13133:13133" # health_check extension
- "4317:4317" # OTLP gRPC receiver
- "55679:55679" # zpages extension
depends_on:
- openobserve
openobserve:
image: public.ecr.aws/zinclabs/openobserve:latest
restart: unless-stopped
environment:
ZO_ROOT_USER_EMAIL: "adrian@tollyx.net"
ZO_ROOT_USER_PASSWORD: "Planka"
ports:
- "5080:5080"
volumes:
- ./data:/data
volumes:
data:

View file

@ -1,3 +1,11 @@
base_url = "http://localhost:8080/"
bind_address = "0.0.0.0:8080"
logging = "info,website=debug"
[otlp]
enabled = false
endpoint = "http://otel-collector:4317"
authorization = "Basic YWRyaWFuQHRvbGx5eC5uZXQ6N3VHVDU1NGpudGdxVE5LMg=="
organization = "default"
stream_name = "default"
tls_insecure = true

View file

@ -0,0 +1,39 @@
receivers:
otlp:
protocols:
grpc:
exporters:
debug:
otlp/openobserve:
endpoint: openobserve:5081
headers:
Authorization: "Basic YWRyaWFuQHRvbGx5eC5uZXQ6bDVVV21IVHlSd0lmSTJ4Qg=="
organization: default
stream-name: default
tls:
insecure: true
processors:
batch:
extensions:
health_check:
pprof:
endpoint: :1888
zpages:
endpoint: :55679
service:
extensions: [pprof, zpages, health_check]
pipelines:
traces:
receivers: [otlp]
processors: [batch]
exporters: [debug, otlp/openobserve]
metrics:
receivers: [otlp]
processors: [batch]
exporters: [debug, otlp/openobserve]

View file

@ -15,6 +15,8 @@ modified post test, see if docker skips build using
testing "smart" punctuation --- I don't know if I want it. 'it should' do some fancy stuff.
Here's a foornote for testing[^footnote]
code hilighting test:
```rs
@ -23,10 +25,14 @@ fn main() {
}
```
uh oh, here comes a screenshot from a different post!
uh oh, here comes a screenshot from a different post![^2]
![dungeon screenshot](../dungeon/screenshot.png)
and here it is again, except it should 404!
![missing dungeon screenshot](../dungeon/screenshot.jpeg)
[^footnote]: Who is this anyway!
[^2]: a second footnote oh my!

View file

@ -3,7 +3,7 @@ use serde_derive::Serialize;
use tracing::instrument;
use crate::{post::Post, tag::Tag, AppState};
use crate::{page::Page, tag::Tag, AppState};
#[derive(Serialize)]
struct FeedContext<'a> {
@ -11,13 +11,13 @@ struct FeedContext<'a> {
base_url: &'a str,
last_updated: &'a str,
tag: Option<&'a Tag>,
posts: &'a [&'a Post],
posts: &'a [&'a Page],
}
#[instrument(skip(state))]
pub fn render_atom_feed(state: &AppState) -> Result<String> {
let mut posts: Vec<_> = state
.posts
.pages
.values()
.filter(|p| !p.draft && p.is_published())
.collect();
@ -43,7 +43,7 @@ pub fn render_atom_feed(state: &AppState) -> Result<String> {
#[instrument(skip(tag, state))]
pub fn render_atom_tag_feed(tag: &Tag, state: &AppState) -> Result<String> {
let mut posts: Vec<_> = state
.posts
.pages
.values()
.filter(|p| !p.draft && p.is_published() && p.tags.contains(&tag.slug))
.collect();

View file

@ -1,5 +1,4 @@
use axum::{
body::Body,
extract::{Request, State},
http::{header, HeaderMap, StatusCode},
middleware::Next,
@ -7,9 +6,9 @@ use axum::{
routing::get,
Router,
};
use cached::once_cell::sync::Lazy;
use chrono::{DateTime, FixedOffset};
use prometheus::{opts, Encoder, IntCounterVec, TextEncoder};
use opentelemetry::{global, metrics::Counter, KeyValue};
use tokio::sync::OnceCell;
use std::sync::Arc;
use tower_http::services::ServeDir;
use tracing::{
@ -19,31 +18,29 @@ use tracing::{
use crate::{AppState, WebsiteError};
pub mod posts;
pub mod pages;
pub mod tags;
pub static HIT_COUNTER: Lazy<IntCounterVec> = Lazy::new(|| {
prometheus::register_int_counter_vec!(
opts!(
"http_requests_total",
"Total amount of http requests received"
),
&["route", "method", "status"]
)
.unwrap()
});
pub static HIT_COUNTER: OnceCell<Counter<u64>> = OnceCell::const_new();
async fn record_hit(method: String, path: String) {
let counter = HIT_COUNTER.get_or_init(|| async {
global::meter("tlxite").u64_counter("page_hit_count").init()
}).await;
counter.add(1, &[KeyValue::new("path", format!("{method} {path}"))]);
}
pub fn routes(state: &Arc<AppState>) -> Router<Arc<AppState>> {
Router::new()
.route("/", get(index))
.merge(posts::router())
.merge(pages::router())
.merge(tags::router())
.merge(posts::alias_router(state.posts.values()))
.merge(pages::alias_router(state.pages.values()))
.route("/healthcheck", get(healthcheck))
.route_service("/posts/:slug/*path", ServeDir::new("./"))
.route_service("/static/*path", ServeDir::new("./"))
.layer(axum::middleware::from_fn(metrics_middleware))
.route("/metrics", get(metrics))
}
#[instrument(skip(state))]
@ -72,20 +69,6 @@ async fn healthcheck() -> &'static str {
"OK"
}
#[instrument]
async fn metrics() -> impl IntoResponse {
let encoder = TextEncoder::new();
let metric_families = prometheus::gather();
let mut buffer = vec![];
encoder.encode(&metric_families, &mut buffer).unwrap();
Response::builder()
.status(200)
.header(header::CONTENT_TYPE, encoder.format_type())
.body(Body::from(buffer))
.unwrap()
}
pub async fn not_found() -> impl IntoResponse {
(StatusCode::NOT_FOUND, ())
}
@ -98,13 +81,7 @@ pub async fn metrics_middleware(request: Request, next: Next) -> Response {
let response = next.run(request).await;
if !response.status().is_client_error() {
HIT_COUNTER
.with_label_values(&[&path, method.as_str(), response.status().as_str()])
.inc();
} else if response.status() == StatusCode::NOT_FOUND {
HIT_COUNTER
.with_label_values(&["not found", method.as_str(), response.status().as_str()])
.inc();
record_hit(method.to_string(), path).await;
}
response
@ -171,9 +148,9 @@ mod tests {
};
// Load the actual posts, just to make this test fail if
// aliases overlap with themselves or other routes
let posts = crate::post::load_all(&state).await.unwrap();
let posts = crate::page::load_all(&state, "posts/".into()).await.unwrap();
state.tags = crate::tag::get_tags(posts.values());
state.posts = posts;
state.pages = posts;
let state = Arc::new(state);
super::routes(&state).with_state(state).into_make_service();

View file

@ -12,7 +12,7 @@ use serde_derive::Serialize;
use tracing::{instrument, log::warn};
use crate::{
post::{render_post, Post},
page::{render_post, Page},
AppState, WebsiteError,
};
@ -28,7 +28,7 @@ pub fn router() -> Router<Arc<AppState>> {
.route("/posts/:slug/index.md", get(super::not_found))
}
pub fn alias_router<'a>(posts: impl IntoIterator<Item = &'a Post>) -> Router<Arc<AppState>> {
pub fn alias_router<'a>(posts: impl IntoIterator<Item = &'a Page>) -> Router<Arc<AppState>> {
let mut router = Router::new();
for post in posts {
@ -52,12 +52,12 @@ struct PageContext<'a> {
}
#[instrument(skip(state))]
pub async fn index(
async fn index(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
) -> Result<Response, WebsiteError> {
let mut posts: Vec<&Post> = state
.posts
let mut posts: Vec<&Page> = state
.pages
.values()
.filter(|p| !p.draft && p.is_published())
.collect();
@ -92,12 +92,12 @@ pub async fn index(
}
#[instrument(skip(state))]
pub async fn view(
async fn view(
Path(slug): Path<String>,
State(state): State<Arc<AppState>>,
headers: HeaderMap,
) -> Result<axum::response::Response, WebsiteError> {
let post = state.posts.get(&slug).ok_or(WebsiteError::NotFound)?;
let post = state.pages.get(&slug).ok_or(WebsiteError::NotFound)?;
let last_changed = post.last_modified();
@ -128,8 +128,8 @@ pub async fn feed(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
) -> Result<Response, WebsiteError> {
let mut posts: Vec<&Post> = state
.posts
let mut posts: Vec<&Page> = state
.pages
.values()
.filter(|p| !p.draft && p.is_published())
.collect();
@ -163,7 +163,7 @@ pub async fn redirect(
Path(slug): Path<String>,
State(state): State<Arc<AppState>>,
) -> Result<Redirect, WebsiteError> {
if state.posts.contains_key(&slug) {
if state.pages.contains_key(&slug) {
Ok(Redirect::permanent(&format!("/posts/{slug}/")))
} else {
Err(WebsiteError::NotFound)
@ -174,21 +174,21 @@ pub async fn redirect(
mod tests {
use chrono::DateTime;
use crate::post::Post;
use crate::page::Page;
use super::PageContext;
#[test]
fn render_index() {
let posts = vec![
Post {
Page {
title: "test".into(),
slug: "test".into(),
tags: vec!["abc".into(), "def".into()],
date: Some(DateTime::parse_from_rfc3339("2023-03-26T13:04:01+02:00").unwrap()),
..Default::default()
},
Post {
Page {
title: "test2".into(),
slug: "test2".into(),
date: None,

View file

@ -11,7 +11,7 @@ use axum::{
use serde_derive::Serialize;
use tracing::instrument;
use crate::{post::Post, AppState, WebsiteError};
use crate::{page::Page, AppState, WebsiteError};
use super::should_return_304;
@ -54,8 +54,8 @@ pub async fn view(
State(state): State<Arc<AppState>>,
headers: HeaderMap,
) -> Result<Response, WebsiteError> {
let mut posts: Vec<&Post> = state
.posts
let mut posts: Vec<&Page> = state
.pages
.values()
.filter(|p| !p.draft && p.is_published() && p.tags.contains(&tag))
.collect();
@ -100,8 +100,8 @@ pub async fn feed(
) -> Result<Response, WebsiteError> {
let tag = state.tags.get(&slug).ok_or(WebsiteError::NotFound)?;
let mut posts: Vec<&Post> = state
.posts
let mut posts: Vec<&Page> = state
.pages
.values()
.filter(|p| p.is_published() && p.tags.contains(&slug))
.collect();

View file

@ -4,12 +4,13 @@ use std::{collections::HashMap, fmt::Display, sync::Arc};
use axum::http::Uri;
use chrono::DateTime;
use config::Config;
use post::Post;
use page::Page;
use settings::Settings;
use tag::Tag;
use tera::Tera;
use tokio::net::TcpListener;
use tower_http::{compression::CompressionLayer, cors::CorsLayer};
use tracing::{instrument, log::info};
@ -21,38 +22,38 @@ mod helpers;
mod hilighting;
mod markdown;
mod observability;
mod post;
mod page;
mod settings;
mod tag;
#[derive(Default)]
pub struct AppState {
startup_time: DateTime<chrono::offset::Utc>,
base_url: Uri,
posts: HashMap<String, Post>,
pages: HashMap<String, Page>,
tags: HashMap<String, Tag>,
tera: Tera,
}
#[tokio::main]
async fn main() -> Result<()> {
let cfg = Config::builder()
.add_source(config::File::with_name("config.toml"))
.add_source(config::Environment::with_prefix("WEBSITE"))
.build()?;
let cfg = settings::get()?;
println!("{cfg:?}");
observability::init_tracing(&cfg);
observability::init(&cfg)?;
info!("Starting server...");
let addr = cfg.get_string("bind_address")?;
let app = init_app(&cfg).await?;
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
let listener = TcpListener::bind(&cfg.bind_address).await.unwrap();
axum::serve(listener, app.into_make_service()).await?;
opentelemetry::global::shutdown_tracer_provider();
Ok(())
}
#[instrument]
async fn init_app(cfg: &Config) -> Result<axum::routing::Router> {
let base_url: Uri = cfg.get_string("base_url")?.parse().unwrap();
#[instrument(skip(cfg))]
async fn init_app(cfg: &Settings) -> Result<axum::routing::Router> {
let base_url: Uri = cfg.base_url.parse().unwrap();
let tera = Tera::new("templates/**/*")?;
let mut state = AppState {
@ -62,9 +63,9 @@ async fn init_app(cfg: &Config) -> Result<axum::routing::Router> {
..Default::default()
};
let posts = post::load_all(&state).await?;
let posts = page::load_all(&state, "pages/".into()).await?;
let tags = tag::get_tags(posts.values());
state.posts = posts;
state.pages = posts;
state.tags = tags;
let state = Arc::new(state);

View file

@ -4,6 +4,7 @@ use axum::http::Uri;
use cached::once_cell::sync::Lazy;
use pulldown_cmark::CodeBlockKind;
use pulldown_cmark::Event;
use pulldown_cmark::LinkType;
use pulldown_cmark::Tag;
use pulldown_cmark::TagEnd;
use pulldown_cmark::{Options, Parser};
@ -13,8 +14,13 @@ use tracing::instrument;
static STARTS_WITH_SCHEMA_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\w+:").unwrap());
static EMAIL_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^.+?@\w+(\.\w+)*$").unwrap());
pub struct RenderResult {
pub content_html: String,
pub metadata: String
}
#[instrument(skip(markdown))]
pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String {
pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> RenderResult {
let mut opt = Options::empty();
opt.insert(Options::ENABLE_FOOTNOTES);
opt.insert(Options::ENABLE_HEADING_ATTRIBUTES);
@ -22,22 +28,33 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String
opt.insert(Options::ENABLE_TABLES);
opt.insert(Options::ENABLE_TASKLISTS);
opt.insert(Options::ENABLE_SMART_PUNCTUATION);
opt.insert(Options::ENABLE_PLUSES_DELIMITED_METADATA_BLOCKS);
let mut content_html = String::new();
let parser = Parser::new_ext(markdown, opt);
let mut code_lang = None;
let mut code_accumulator = String::new();
let mut meta_kind = None;
let mut meta_accumulator = String::new();
let mut events = Vec::new();
for event in parser {
match event {
Event::Text(text) => {
if code_lang.is_some() {
code_accumulator.push_str(&text);
} else if meta_kind.is_some() {
meta_accumulator.push_str(&text);
} else {
events.push(Event::Text(text));
}
}
Event::Start(Tag::MetadataBlock(kind)) => {
meta_kind = Some(kind);
}
Event::End(TagEnd::MetadataBlock(_)) => {
meta_kind = None;
}
Event::Start(Tag::Link {
mut dest_url,
link_type,
@ -45,7 +62,7 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String
id,
}) => {
if let Some(uri) = base_uri {
if !dest_url.starts_with('#')
if link_type != LinkType::Email
&& !STARTS_WITH_SCHEMA_RE.is_match(&dest_url)
&& !EMAIL_RE.is_match(&dest_url)
{
@ -104,5 +121,8 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String
pulldown_cmark::html::push_html(&mut content_html, events.into_iter());
content_html
RenderResult {
content_html,
metadata: meta_accumulator,
}
}

View file

@ -1,70 +1,158 @@
use std::time::Duration;
use std::{borrow::Cow, net::SocketAddr, time::Duration};
use anyhow::{Error, Result};
use axum::{
extract::{MatchedPath, OriginalUri, Request},
http::uri::PathAndQuery,
extract::{ConnectInfo, MatchedPath, OriginalUri, Request},
http::{header, uri::PathAndQuery, HeaderMap},
response::Response,
};
use config::Config;
use opentelemetry::global;
use opentelemetry_sdk::propagation::TraceContextPropagator;
use opentelemetry::{global, KeyValue};
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::{
metrics::reader::{DefaultAggregationSelector, DefaultTemporalitySelector}, propagation::TraceContextPropagator, trace::{RandomIdGenerator, Sampler}, Resource
};
use tracing::{field::Empty, info_span, Span};
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
pub fn init_tracing(cfg: &Config) {
let filter = if let Ok(filter) = cfg.get_string("logging") {
EnvFilter::builder()
.with_default_directive("info".parse().unwrap())
.parse_lossy(filter)
} else {
EnvFilter::builder()
.with_default_directive("info".parse().unwrap())
.from_env_lossy()
};
use crate::settings::Settings;
global::set_text_map_propagator(TraceContextPropagator::new());
pub fn init(cfg: &Settings) -> Result<(), Error> {
let filter = EnvFilter::builder()
.with_default_directive("info".parse()?)
.parse_lossy(&cfg.logging);
let tracer = opentelemetry_otlp::new_pipeline()
.tracing()
.with_exporter(opentelemetry_otlp::new_exporter().tonic())
.install_batch(opentelemetry_sdk::runtime::Tokio)
.unwrap();
let otel = tracing_opentelemetry::layer().with_tracer(tracer);
tracing_subscriber::registry()
.with(filter)
.with(otel)
.with(tracing_subscriber::fmt::layer())
.init();
if cfg.otlp.enabled {
let tracer = opentelemetry_otlp::new_pipeline()
.tracing()
.with_trace_config(
opentelemetry_sdk::trace::config()
.with_sampler(Sampler::AlwaysOn)
.with_id_generator(RandomIdGenerator::default())
.with_resource(Resource::new(vec![KeyValue::new("service.name", "tlxite")])),
)
.with_exporter(
opentelemetry_otlp::new_exporter()
.tonic()
.with_endpoint(&cfg.otlp.endpoint),
)
.install_batch(opentelemetry_sdk::runtime::Tokio)?;
global::set_text_map_propagator(TraceContextPropagator::new());
let otel_tracer = tracing_opentelemetry::layer().with_tracer(tracer);
let meter = opentelemetry_otlp::new_pipeline()
.metrics(opentelemetry_sdk::runtime::Tokio)
.with_exporter(
opentelemetry_otlp::new_exporter()
.tonic()
.with_endpoint(&cfg.otlp.endpoint),
)
.with_resource(Resource::new(vec![KeyValue::new("service.name", "tlxite")]))
.with_period(Duration::from_secs(3))
.with_timeout(Duration::from_secs(10))
.with_aggregation_selector(DefaultAggregationSelector::new())
.with_temporality_selector(DefaultTemporalitySelector::new())
.build()?;
global::set_meter_provider(meter);
// let logger = opentelemetry_otlp::new_pipeline()
// .logging()
// .with_exporter(
// opentelemetry_otlp::new_exporter()
// .tonic()
// .with_endpoint(&cfg.otlp.endpoint),
// )
// .install_batch(opentelemetry_sdk::runtime::Tokio)?;
tracing_subscriber::registry()
.with(filter)
.with(otel_tracer)
.with(tracing_subscriber::fmt::layer().compact())
.init();
}
else {
tracing_subscriber::registry()
.with(filter)
.with(tracing_subscriber::fmt::layer().compact())
.init();
}
Ok(())
}
pub fn make_span(request: &Request) -> Span {
let uri = if let Some(OriginalUri(uri)) = request.extensions().get::<OriginalUri>() {
pub fn make_span(req: &Request) -> Span {
let uri = if let Some(OriginalUri(uri)) = req.extensions().get::<OriginalUri>() {
uri
} else {
request.uri()
req.uri()
};
let route = request
let route = req
.extensions()
.get::<MatchedPath>()
.map_or(uri.path(), axum::extract::MatchedPath::as_str);
let method = request.method().as_str();
let method = req.method().as_str();
let scheme = req.uri().scheme().map_or("HTTP", |s| s.as_str());
let target = uri
.path_and_query()
.map_or(uri.path(), PathAndQuery::as_str);
let user_agent = req
.headers()
.get(header::USER_AGENT)
.map_or("", |h| h.to_str().unwrap_or(""));
let name = format!("{method} {route}");
let client_ip = parse_x_forwarded_for(req.headers())
.or_else(|| {
req.extensions()
.get::<ConnectInfo<SocketAddr>>()
.map(|ConnectInfo(client_ip)| Cow::from(client_ip.to_string()))
})
.unwrap_or_default();
info_span!(
"request",
otel.name = %name,
otel.kind = &"server",
http.client_ip = %client_ip,
http.route = %route,
http.method = %method,
http.target = %target,
http.status_code = Empty
http.scheme = %scheme,
http.user_agent = %user_agent,
http.status_code = Empty,
otel.status_code = Empty,
)
}
fn parse_x_forwarded_for(headers: &HeaderMap) -> Option<Cow<'_, str>> {
let value = headers.get("x-forwarded-for")?;
let value = value.to_str().ok()?;
let mut ips = value.split(',');
Some(ips.next()?.trim().into())
}
pub fn on_response(response: &Response, _latency: Duration, span: &Span) {
span.record("http.status_code", response.status().as_str());
if response.status().is_server_error() {
span.record(
"otel.status_code",
if response.status().is_server_error() {
"ERROR"
} else {
"OK"
},
);
}
}

View file

@ -1,26 +1,19 @@
use std::{collections::HashMap, path::Path};
use std::{collections::HashMap, fmt::Debug, path::{Path, PathBuf}};
use anyhow::Result;
use cached::once_cell::sync::Lazy;
use chrono::{DateTime, FixedOffset};
use glob::glob;
use regex::Regex;
use serde_derive::{Deserialize, Serialize};
use tokio::fs;
use tracing::{
instrument,
log::{debug, warn},
log::debug,
};
use crate::{helpers, markdown, AppState, WebsiteError};
static FRONTMATTER_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"^[\s]*\+{3}(\r?\n(?s).*?(?-s))\+{3}[\s]*(?:$|(?:\r?\n((?s).*(?-s))$))").unwrap()
});
#[derive(Deserialize, Debug, Default)]
pub struct TomlFrontMatter {
pub title: String,
@ -32,7 +25,7 @@ pub struct TomlFrontMatter {
}
#[derive(Serialize, Clone, Debug, Default)]
pub struct Post {
pub struct Page {
pub title: String,
pub draft: bool,
pub date: Option<DateTime<FixedOffset>>,
@ -44,9 +37,9 @@ pub struct Post {
pub absolute_path: String,
}
impl Post {
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Post {
Post {
impl Page {
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Page {
Page {
absolute_path: format!("/posts/{slug}/"),
slug,
content,
@ -77,70 +70,70 @@ impl Post {
}
#[instrument(skip(state))]
pub async fn load_all(state: &AppState) -> Result<HashMap<String, Post>> {
let mut res = HashMap::<String, Post>::new();
for path in glob("posts/**/*.md")? {
let path = path.unwrap();
debug!("found page: {}", path.display());
pub async fn load_all(state: &AppState, folder: PathBuf) -> Result<HashMap<String, Page>> {
let mut pages = HashMap::<String, Page>::new();
let mut dirs: Vec<PathBuf> = vec![folder];
let path = path.to_string_lossy().replace('\\', "/");
let slug = path
.trim_start_matches("posts")
.trim_start_matches('/')
.trim_start_matches('\\')
.trim_end_matches(".html")
.trim_end_matches(".md")
.trim_end_matches("index")
.trim_end_matches('\\')
.trim_end_matches('/');
while let Some(dir) = dirs.pop() {
let mut read_dir = fs::read_dir(dbg!(dir)).await?;
let post = load_post(state, slug).await?;
res.insert(slug.to_string(), post);
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
if path.is_dir() {
dirs.push(path);
}
else if let Some(ext) = path.extension() {
if ext == "md" {
// it's a page to load
let page = load_page(state, &path).await?;
pages.insert(page.slug.clone(), page);
}
}
}
}
Ok(res)
// for path in --- {
// let path = path.unwrap();
// debug!("found page: {}", path.display());
// let post = load_post(state, &path).await?;
// res.insert(post.slug.clone(), post);
// }
Ok(pages)
}
#[instrument(skip(state))]
pub async fn load_post(state: &AppState, slug: &str) -> Result<Post> {
debug!("loading post: {slug}");
pub async fn load_page(state: &AppState, path: &Path) -> Result<Page> {
debug!("loading post: {path:?}");
let file_path = Path::new("posts").join(slug);
let content = fs::read_to_string(path).await?;
let content = if let Ok(content) = fs::read_to_string(file_path.with_extension("md")).await {
content
} else {
fs::read_to_string(file_path.join("index.md")).await?
};
let (tomlfm, content) = parse_frontmatter(content)?;
let tomlfm = tomlfm.expect("Missing frontmatter");
let base_uri = helpers::uri_with_path(&state.base_url, &format!("/posts/{slug}/"));
let path_str = path.to_string_lossy().replace('\\', "/");
let content = content.map(|c| markdown::render_markdown_to_html(Some(&base_uri), &c));
let slug = path_str
.trim_start_matches("posts/")
.trim_start_matches('/')
.trim_end_matches(".html")
.trim_end_matches(".md")
.trim_end_matches("index")
.trim_end_matches('/');
Ok(Post::new(
let base_uri = helpers::uri_with_path(&state.base_url, &format!("/{slug}/"));
let content = markdown::render_markdown_to_html(Some(&base_uri), &content);
Ok(Page::new(
slug.to_string(),
content.unwrap_or_default(),
tomlfm,
content.content_html,
toml::from_str(&content.metadata)?,
))
}
#[instrument(skip(src))]
fn parse_frontmatter(src: String) -> Result<(Option<TomlFrontMatter>, Option<String>)> {
Ok(if let Some(captures) = FRONTMATTER_REGEX.captures(&src) {
(
Some(toml::from_str(captures.get(1).unwrap().as_str())?),
captures.get(2).map(|m| m.as_str().to_owned()),
)
} else {
(None, Some(src))
})
}
#[instrument(skip(state, post))]
pub async fn render_post(state: &AppState, post: &Post) -> Result<String, WebsiteError> {
pub async fn render_post(state: &AppState, post: &Page) -> Result<String, WebsiteError> {
let mut ctx = tera::Context::new();
ctx.insert("page", &post);
ctx.insert("base_url", &state.base_url.to_string());
@ -165,8 +158,8 @@ mod tests {
..Default::default()
};
state.posts = super::load_all(&state).await.unwrap();
for post in state.posts.values() {
state.pages = super::load_all(&state, "posts/".into()).await.unwrap();
for post in state.pages.values() {
super::render_post(&state, post).await.unwrap();
}
}

30
src/settings.rs Normal file
View file

@ -0,0 +1,30 @@
use anyhow::{Error, Result};
use config::Config;
use serde::Deserialize;
#[derive(Deserialize, Debug)]
pub struct Settings {
pub base_url: String,
pub bind_address: String,
pub logging: String,
pub otlp: Otlp,
}
#[derive(Deserialize, Debug)]
pub struct Otlp {
pub enabled: bool,
pub endpoint: String,
pub authorization: String,
pub organization: String,
pub stream_name: String,
pub tls_insecure: bool,
}
pub fn get() -> Result<Settings, Error> {
let settings = Config::builder()
.add_source(config::File::with_name("config.toml"))
.add_source(config::Environment::with_prefix("TLX"))
.build()?
.try_deserialize()?;
Ok(settings)
}

View file

@ -3,7 +3,7 @@ use std::collections::HashMap;
use serde_derive::Serialize;
use tracing::instrument;
use crate::post::Post;
use crate::page::Page;
#[derive(Serialize, Debug)]
pub struct Tag {
@ -13,7 +13,7 @@ pub struct Tag {
}
#[instrument(skip(posts))]
pub fn get_tags<'a>(posts: impl IntoIterator<Item = &'a Post>) -> HashMap<String, Tag> {
pub fn get_tags<'a>(posts: impl IntoIterator<Item = &'a Page>) -> HashMap<String, Tag> {
let mut tags: HashMap<String, Tag> = HashMap::new();
for post in posts {

View file

@ -29,3 +29,11 @@ pre {
margin-bottom: -0.5em;
}
.footnote-definition {
margin: 1em 0;
}
.footnote-definition p {
display: inline-block;
margin: 0;
}