too much stuff sorry
This commit is contained in:
parent
d8050d2e89
commit
bdc291b539
19 changed files with 710 additions and 581 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1 +1,2 @@
|
||||||
/target
|
/target
|
||||||
|
/data
|
||||||
|
|
718
Cargo.lock
generated
718
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -13,9 +13,8 @@ chrono = { version = "0.4.31", features = ["serde"] }
|
||||||
config = "0.14.0"
|
config = "0.14.0"
|
||||||
glob = "0.3.0"
|
glob = "0.3.0"
|
||||||
opentelemetry = { version = "0.22.0", features = ["trace", "metrics"] }
|
opentelemetry = { version = "0.22.0", features = ["trace", "metrics"] }
|
||||||
opentelemetry-otlp = { version = "0.15.0", features = ["trace", "metrics"] }
|
opentelemetry-otlp = { version = "0.15.0", features = ["trace", "metrics", "logs"] }
|
||||||
opentelemetry_sdk = { version = "0.22.1", features = ["rt-tokio", "trace", "metrics"] }
|
opentelemetry_sdk = { version = "0.22.1", features = ["rt-tokio", "trace", "metrics"] }
|
||||||
prometheus = { version = "0.13.3", features = ["process"] }
|
|
||||||
pulldown-cmark = "0.10.2"
|
pulldown-cmark = "0.10.2"
|
||||||
regex = "1.7.2"
|
regex = "1.7.2"
|
||||||
serde = "1.0.144"
|
serde = "1.0.144"
|
||||||
|
@ -25,6 +24,7 @@ syntect = "5.0.0"
|
||||||
tera = { version = "1.19.1", features = ["builtins"] }
|
tera = { version = "1.19.1", features = ["builtins"] }
|
||||||
tokio = { version = "1.34.0", features = ["full", "tracing"] }
|
tokio = { version = "1.34.0", features = ["full", "tracing"] }
|
||||||
toml = "0.8.8"
|
toml = "0.8.8"
|
||||||
|
tonic = "0.11.0"
|
||||||
tower = { version = "0.4.13", features = ["full"] }
|
tower = { version = "0.4.13", features = ["full"] }
|
||||||
tower-http = { version = "0.5.2", features = ["full"] }
|
tower-http = { version = "0.5.2", features = ["full"] }
|
||||||
tracing = "0.1.35"
|
tracing = "0.1.35"
|
||||||
|
|
|
@ -63,6 +63,7 @@ COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/website ./
|
||||||
COPY ./static ./static
|
COPY ./static ./static
|
||||||
COPY ./templates ./templates
|
COPY ./templates ./templates
|
||||||
COPY ./posts ./posts
|
COPY ./posts ./posts
|
||||||
|
COPY ./config.toml ./config.toml
|
||||||
|
|
||||||
EXPOSE 8180
|
EXPOSE 8180
|
||||||
|
|
||||||
|
@ -71,4 +72,4 @@ USER website:website
|
||||||
|
|
||||||
ENV RUST_LOG="debug"
|
ENV RUST_LOG="debug"
|
||||||
|
|
||||||
CMD ["./website"]
|
ENTRYPOINT ["/app/website"]
|
35
compose.yaml
Normal file
35
compose.yaml
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
name: "tlxite"
|
||||||
|
services:
|
||||||
|
web:
|
||||||
|
build: .
|
||||||
|
ports:
|
||||||
|
- "8080:8080"
|
||||||
|
depends_on:
|
||||||
|
- otel-collector
|
||||||
|
otel-collector:
|
||||||
|
image: otel/opentelemetry-collector:latest
|
||||||
|
restart: always
|
||||||
|
command: ["--config=/etc/otel-collector-config.yaml", "${OTELCOL_ARGS}"]
|
||||||
|
volumes:
|
||||||
|
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||||
|
ports:
|
||||||
|
- "1888:1888" # pprof extension
|
||||||
|
- "8888:8888" # Prometheus metrics exposed by the collector
|
||||||
|
- "8889:8889" # Prometheus exporter metrics
|
||||||
|
- "13133:13133" # health_check extension
|
||||||
|
- "4317:4317" # OTLP gRPC receiver
|
||||||
|
- "55679:55679" # zpages extension
|
||||||
|
depends_on:
|
||||||
|
- openobserve
|
||||||
|
openobserve:
|
||||||
|
image: public.ecr.aws/zinclabs/openobserve:latest
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
ZO_ROOT_USER_EMAIL: "adrian@tollyx.net"
|
||||||
|
ZO_ROOT_USER_PASSWORD: "Planka"
|
||||||
|
ports:
|
||||||
|
- "5080:5080"
|
||||||
|
volumes:
|
||||||
|
- ./data:/data
|
||||||
|
volumes:
|
||||||
|
data:
|
|
@ -1,3 +1,11 @@
|
||||||
base_url = "http://localhost:8080/"
|
base_url = "http://localhost:8080/"
|
||||||
bind_address = "0.0.0.0:8080"
|
bind_address = "0.0.0.0:8080"
|
||||||
logging = "info,website=debug"
|
logging = "info,website=debug"
|
||||||
|
|
||||||
|
[otlp]
|
||||||
|
enabled = false
|
||||||
|
endpoint = "http://otel-collector:4317"
|
||||||
|
authorization = "Basic YWRyaWFuQHRvbGx5eC5uZXQ6N3VHVDU1NGpudGdxVE5LMg=="
|
||||||
|
organization = "default"
|
||||||
|
stream_name = "default"
|
||||||
|
tls_insecure = true
|
||||||
|
|
39
otel-collector-config.yaml
Normal file
39
otel-collector-config.yaml
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
receivers:
|
||||||
|
otlp:
|
||||||
|
protocols:
|
||||||
|
grpc:
|
||||||
|
|
||||||
|
exporters:
|
||||||
|
|
||||||
|
debug:
|
||||||
|
|
||||||
|
otlp/openobserve:
|
||||||
|
endpoint: openobserve:5081
|
||||||
|
headers:
|
||||||
|
Authorization: "Basic YWRyaWFuQHRvbGx5eC5uZXQ6bDVVV21IVHlSd0lmSTJ4Qg=="
|
||||||
|
organization: default
|
||||||
|
stream-name: default
|
||||||
|
tls:
|
||||||
|
insecure: true
|
||||||
|
|
||||||
|
processors:
|
||||||
|
batch:
|
||||||
|
|
||||||
|
extensions:
|
||||||
|
health_check:
|
||||||
|
pprof:
|
||||||
|
endpoint: :1888
|
||||||
|
zpages:
|
||||||
|
endpoint: :55679
|
||||||
|
|
||||||
|
service:
|
||||||
|
extensions: [pprof, zpages, health_check]
|
||||||
|
pipelines:
|
||||||
|
traces:
|
||||||
|
receivers: [otlp]
|
||||||
|
processors: [batch]
|
||||||
|
exporters: [debug, otlp/openobserve]
|
||||||
|
metrics:
|
||||||
|
receivers: [otlp]
|
||||||
|
processors: [batch]
|
||||||
|
exporters: [debug, otlp/openobserve]
|
|
@ -15,6 +15,8 @@ modified post test, see if docker skips build using
|
||||||
|
|
||||||
testing "smart" punctuation --- I don't know if I want it. 'it should' do some fancy stuff.
|
testing "smart" punctuation --- I don't know if I want it. 'it should' do some fancy stuff.
|
||||||
|
|
||||||
|
Here's a foornote for testing[^footnote]
|
||||||
|
|
||||||
code hilighting test:
|
code hilighting test:
|
||||||
|
|
||||||
```rs
|
```rs
|
||||||
|
@ -23,10 +25,14 @@ fn main() {
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
uh oh, here comes a screenshot from a different post!
|
uh oh, here comes a screenshot from a different post![^2]
|
||||||
|
|
||||||
![dungeon screenshot](../dungeon/screenshot.png)
|
![dungeon screenshot](../dungeon/screenshot.png)
|
||||||
|
|
||||||
and here it is again, except it should 404!
|
and here it is again, except it should 404!
|
||||||
|
|
||||||
![missing dungeon screenshot](../dungeon/screenshot.jpeg)
|
![missing dungeon screenshot](../dungeon/screenshot.jpeg)
|
||||||
|
|
||||||
|
[^footnote]: Who is this anyway!
|
||||||
|
|
||||||
|
[^2]: a second footnote oh my!
|
||||||
|
|
|
@ -3,7 +3,7 @@ use serde_derive::Serialize;
|
||||||
|
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
use crate::{post::Post, tag::Tag, AppState};
|
use crate::{page::Page, tag::Tag, AppState};
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct FeedContext<'a> {
|
struct FeedContext<'a> {
|
||||||
|
@ -11,13 +11,13 @@ struct FeedContext<'a> {
|
||||||
base_url: &'a str,
|
base_url: &'a str,
|
||||||
last_updated: &'a str,
|
last_updated: &'a str,
|
||||||
tag: Option<&'a Tag>,
|
tag: Option<&'a Tag>,
|
||||||
posts: &'a [&'a Post],
|
posts: &'a [&'a Page],
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(state))]
|
#[instrument(skip(state))]
|
||||||
pub fn render_atom_feed(state: &AppState) -> Result<String> {
|
pub fn render_atom_feed(state: &AppState) -> Result<String> {
|
||||||
let mut posts: Vec<_> = state
|
let mut posts: Vec<_> = state
|
||||||
.posts
|
.pages
|
||||||
.values()
|
.values()
|
||||||
.filter(|p| !p.draft && p.is_published())
|
.filter(|p| !p.draft && p.is_published())
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -43,7 +43,7 @@ pub fn render_atom_feed(state: &AppState) -> Result<String> {
|
||||||
#[instrument(skip(tag, state))]
|
#[instrument(skip(tag, state))]
|
||||||
pub fn render_atom_tag_feed(tag: &Tag, state: &AppState) -> Result<String> {
|
pub fn render_atom_tag_feed(tag: &Tag, state: &AppState) -> Result<String> {
|
||||||
let mut posts: Vec<_> = state
|
let mut posts: Vec<_> = state
|
||||||
.posts
|
.pages
|
||||||
.values()
|
.values()
|
||||||
.filter(|p| !p.draft && p.is_published() && p.tags.contains(&tag.slug))
|
.filter(|p| !p.draft && p.is_published() && p.tags.contains(&tag.slug))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
use axum::{
|
use axum::{
|
||||||
body::Body,
|
|
||||||
extract::{Request, State},
|
extract::{Request, State},
|
||||||
http::{header, HeaderMap, StatusCode},
|
http::{header, HeaderMap, StatusCode},
|
||||||
middleware::Next,
|
middleware::Next,
|
||||||
|
@ -7,9 +6,9 @@ use axum::{
|
||||||
routing::get,
|
routing::get,
|
||||||
Router,
|
Router,
|
||||||
};
|
};
|
||||||
use cached::once_cell::sync::Lazy;
|
|
||||||
use chrono::{DateTime, FixedOffset};
|
use chrono::{DateTime, FixedOffset};
|
||||||
use prometheus::{opts, Encoder, IntCounterVec, TextEncoder};
|
use opentelemetry::{global, metrics::Counter, KeyValue};
|
||||||
|
use tokio::sync::OnceCell;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tower_http::services::ServeDir;
|
use tower_http::services::ServeDir;
|
||||||
use tracing::{
|
use tracing::{
|
||||||
|
@ -19,31 +18,29 @@ use tracing::{
|
||||||
|
|
||||||
use crate::{AppState, WebsiteError};
|
use crate::{AppState, WebsiteError};
|
||||||
|
|
||||||
pub mod posts;
|
pub mod pages;
|
||||||
pub mod tags;
|
pub mod tags;
|
||||||
|
|
||||||
pub static HIT_COUNTER: Lazy<IntCounterVec> = Lazy::new(|| {
|
pub static HIT_COUNTER: OnceCell<Counter<u64>> = OnceCell::const_new();
|
||||||
prometheus::register_int_counter_vec!(
|
|
||||||
opts!(
|
async fn record_hit(method: String, path: String) {
|
||||||
"http_requests_total",
|
let counter = HIT_COUNTER.get_or_init(|| async {
|
||||||
"Total amount of http requests received"
|
global::meter("tlxite").u64_counter("page_hit_count").init()
|
||||||
),
|
}).await;
|
||||||
&["route", "method", "status"]
|
|
||||||
)
|
counter.add(1, &[KeyValue::new("path", format!("{method} {path}"))]);
|
||||||
.unwrap()
|
}
|
||||||
});
|
|
||||||
|
|
||||||
pub fn routes(state: &Arc<AppState>) -> Router<Arc<AppState>> {
|
pub fn routes(state: &Arc<AppState>) -> Router<Arc<AppState>> {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/", get(index))
|
.route("/", get(index))
|
||||||
.merge(posts::router())
|
.merge(pages::router())
|
||||||
.merge(tags::router())
|
.merge(tags::router())
|
||||||
.merge(posts::alias_router(state.posts.values()))
|
.merge(pages::alias_router(state.pages.values()))
|
||||||
.route("/healthcheck", get(healthcheck))
|
.route("/healthcheck", get(healthcheck))
|
||||||
.route_service("/posts/:slug/*path", ServeDir::new("./"))
|
.route_service("/posts/:slug/*path", ServeDir::new("./"))
|
||||||
.route_service("/static/*path", ServeDir::new("./"))
|
.route_service("/static/*path", ServeDir::new("./"))
|
||||||
.layer(axum::middleware::from_fn(metrics_middleware))
|
.layer(axum::middleware::from_fn(metrics_middleware))
|
||||||
.route("/metrics", get(metrics))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(state))]
|
#[instrument(skip(state))]
|
||||||
|
@ -72,20 +69,6 @@ async fn healthcheck() -> &'static str {
|
||||||
"OK"
|
"OK"
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument]
|
|
||||||
async fn metrics() -> impl IntoResponse {
|
|
||||||
let encoder = TextEncoder::new();
|
|
||||||
let metric_families = prometheus::gather();
|
|
||||||
let mut buffer = vec![];
|
|
||||||
encoder.encode(&metric_families, &mut buffer).unwrap();
|
|
||||||
|
|
||||||
Response::builder()
|
|
||||||
.status(200)
|
|
||||||
.header(header::CONTENT_TYPE, encoder.format_type())
|
|
||||||
.body(Body::from(buffer))
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn not_found() -> impl IntoResponse {
|
pub async fn not_found() -> impl IntoResponse {
|
||||||
(StatusCode::NOT_FOUND, ())
|
(StatusCode::NOT_FOUND, ())
|
||||||
}
|
}
|
||||||
|
@ -98,13 +81,7 @@ pub async fn metrics_middleware(request: Request, next: Next) -> Response {
|
||||||
let response = next.run(request).await;
|
let response = next.run(request).await;
|
||||||
|
|
||||||
if !response.status().is_client_error() {
|
if !response.status().is_client_error() {
|
||||||
HIT_COUNTER
|
record_hit(method.to_string(), path).await;
|
||||||
.with_label_values(&[&path, method.as_str(), response.status().as_str()])
|
|
||||||
.inc();
|
|
||||||
} else if response.status() == StatusCode::NOT_FOUND {
|
|
||||||
HIT_COUNTER
|
|
||||||
.with_label_values(&["not found", method.as_str(), response.status().as_str()])
|
|
||||||
.inc();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
response
|
response
|
||||||
|
@ -171,9 +148,9 @@ mod tests {
|
||||||
};
|
};
|
||||||
// Load the actual posts, just to make this test fail if
|
// Load the actual posts, just to make this test fail if
|
||||||
// aliases overlap with themselves or other routes
|
// aliases overlap with themselves or other routes
|
||||||
let posts = crate::post::load_all(&state).await.unwrap();
|
let posts = crate::page::load_all(&state, "posts/".into()).await.unwrap();
|
||||||
state.tags = crate::tag::get_tags(posts.values());
|
state.tags = crate::tag::get_tags(posts.values());
|
||||||
state.posts = posts;
|
state.pages = posts;
|
||||||
let state = Arc::new(state);
|
let state = Arc::new(state);
|
||||||
|
|
||||||
super::routes(&state).with_state(state).into_make_service();
|
super::routes(&state).with_state(state).into_make_service();
|
||||||
|
|
|
@ -12,7 +12,7 @@ use serde_derive::Serialize;
|
||||||
use tracing::{instrument, log::warn};
|
use tracing::{instrument, log::warn};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
post::{render_post, Post},
|
page::{render_post, Page},
|
||||||
AppState, WebsiteError,
|
AppState, WebsiteError,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ pub fn router() -> Router<Arc<AppState>> {
|
||||||
.route("/posts/:slug/index.md", get(super::not_found))
|
.route("/posts/:slug/index.md", get(super::not_found))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn alias_router<'a>(posts: impl IntoIterator<Item = &'a Post>) -> Router<Arc<AppState>> {
|
pub fn alias_router<'a>(posts: impl IntoIterator<Item = &'a Page>) -> Router<Arc<AppState>> {
|
||||||
let mut router = Router::new();
|
let mut router = Router::new();
|
||||||
|
|
||||||
for post in posts {
|
for post in posts {
|
||||||
|
@ -52,12 +52,12 @@ struct PageContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(state))]
|
#[instrument(skip(state))]
|
||||||
pub async fn index(
|
async fn index(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
) -> Result<Response, WebsiteError> {
|
) -> Result<Response, WebsiteError> {
|
||||||
let mut posts: Vec<&Post> = state
|
let mut posts: Vec<&Page> = state
|
||||||
.posts
|
.pages
|
||||||
.values()
|
.values()
|
||||||
.filter(|p| !p.draft && p.is_published())
|
.filter(|p| !p.draft && p.is_published())
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -92,12 +92,12 @@ pub async fn index(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(state))]
|
#[instrument(skip(state))]
|
||||||
pub async fn view(
|
async fn view(
|
||||||
Path(slug): Path<String>,
|
Path(slug): Path<String>,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
) -> Result<axum::response::Response, WebsiteError> {
|
) -> Result<axum::response::Response, WebsiteError> {
|
||||||
let post = state.posts.get(&slug).ok_or(WebsiteError::NotFound)?;
|
let post = state.pages.get(&slug).ok_or(WebsiteError::NotFound)?;
|
||||||
|
|
||||||
let last_changed = post.last_modified();
|
let last_changed = post.last_modified();
|
||||||
|
|
||||||
|
@ -128,8 +128,8 @@ pub async fn feed(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
) -> Result<Response, WebsiteError> {
|
) -> Result<Response, WebsiteError> {
|
||||||
let mut posts: Vec<&Post> = state
|
let mut posts: Vec<&Page> = state
|
||||||
.posts
|
.pages
|
||||||
.values()
|
.values()
|
||||||
.filter(|p| !p.draft && p.is_published())
|
.filter(|p| !p.draft && p.is_published())
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -163,7 +163,7 @@ pub async fn redirect(
|
||||||
Path(slug): Path<String>,
|
Path(slug): Path<String>,
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
) -> Result<Redirect, WebsiteError> {
|
) -> Result<Redirect, WebsiteError> {
|
||||||
if state.posts.contains_key(&slug) {
|
if state.pages.contains_key(&slug) {
|
||||||
Ok(Redirect::permanent(&format!("/posts/{slug}/")))
|
Ok(Redirect::permanent(&format!("/posts/{slug}/")))
|
||||||
} else {
|
} else {
|
||||||
Err(WebsiteError::NotFound)
|
Err(WebsiteError::NotFound)
|
||||||
|
@ -174,21 +174,21 @@ pub async fn redirect(
|
||||||
mod tests {
|
mod tests {
|
||||||
use chrono::DateTime;
|
use chrono::DateTime;
|
||||||
|
|
||||||
use crate::post::Post;
|
use crate::page::Page;
|
||||||
|
|
||||||
use super::PageContext;
|
use super::PageContext;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn render_index() {
|
fn render_index() {
|
||||||
let posts = vec![
|
let posts = vec![
|
||||||
Post {
|
Page {
|
||||||
title: "test".into(),
|
title: "test".into(),
|
||||||
slug: "test".into(),
|
slug: "test".into(),
|
||||||
tags: vec!["abc".into(), "def".into()],
|
tags: vec!["abc".into(), "def".into()],
|
||||||
date: Some(DateTime::parse_from_rfc3339("2023-03-26T13:04:01+02:00").unwrap()),
|
date: Some(DateTime::parse_from_rfc3339("2023-03-26T13:04:01+02:00").unwrap()),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
Post {
|
Page {
|
||||||
title: "test2".into(),
|
title: "test2".into(),
|
||||||
slug: "test2".into(),
|
slug: "test2".into(),
|
||||||
date: None,
|
date: None,
|
|
@ -11,7 +11,7 @@ use axum::{
|
||||||
use serde_derive::Serialize;
|
use serde_derive::Serialize;
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
use crate::{post::Post, AppState, WebsiteError};
|
use crate::{page::Page, AppState, WebsiteError};
|
||||||
|
|
||||||
use super::should_return_304;
|
use super::should_return_304;
|
||||||
|
|
||||||
|
@ -54,8 +54,8 @@ pub async fn view(
|
||||||
State(state): State<Arc<AppState>>,
|
State(state): State<Arc<AppState>>,
|
||||||
headers: HeaderMap,
|
headers: HeaderMap,
|
||||||
) -> Result<Response, WebsiteError> {
|
) -> Result<Response, WebsiteError> {
|
||||||
let mut posts: Vec<&Post> = state
|
let mut posts: Vec<&Page> = state
|
||||||
.posts
|
.pages
|
||||||
.values()
|
.values()
|
||||||
.filter(|p| !p.draft && p.is_published() && p.tags.contains(&tag))
|
.filter(|p| !p.draft && p.is_published() && p.tags.contains(&tag))
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -100,8 +100,8 @@ pub async fn feed(
|
||||||
) -> Result<Response, WebsiteError> {
|
) -> Result<Response, WebsiteError> {
|
||||||
let tag = state.tags.get(&slug).ok_or(WebsiteError::NotFound)?;
|
let tag = state.tags.get(&slug).ok_or(WebsiteError::NotFound)?;
|
||||||
|
|
||||||
let mut posts: Vec<&Post> = state
|
let mut posts: Vec<&Page> = state
|
||||||
.posts
|
.pages
|
||||||
.values()
|
.values()
|
||||||
.filter(|p| p.is_published() && p.tags.contains(&slug))
|
.filter(|p| p.is_published() && p.tags.contains(&slug))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
33
src/main.rs
33
src/main.rs
|
@ -4,12 +4,13 @@ use std::{collections::HashMap, fmt::Display, sync::Arc};
|
||||||
use axum::http::Uri;
|
use axum::http::Uri;
|
||||||
use chrono::DateTime;
|
use chrono::DateTime;
|
||||||
|
|
||||||
use config::Config;
|
use page::Page;
|
||||||
use post::Post;
|
|
||||||
|
|
||||||
|
use settings::Settings;
|
||||||
use tag::Tag;
|
use tag::Tag;
|
||||||
use tera::Tera;
|
use tera::Tera;
|
||||||
|
|
||||||
|
use tokio::net::TcpListener;
|
||||||
use tower_http::{compression::CompressionLayer, cors::CorsLayer};
|
use tower_http::{compression::CompressionLayer, cors::CorsLayer};
|
||||||
use tracing::{instrument, log::info};
|
use tracing::{instrument, log::info};
|
||||||
|
|
||||||
|
@ -21,38 +22,38 @@ mod helpers;
|
||||||
mod hilighting;
|
mod hilighting;
|
||||||
mod markdown;
|
mod markdown;
|
||||||
mod observability;
|
mod observability;
|
||||||
mod post;
|
mod page;
|
||||||
|
mod settings;
|
||||||
mod tag;
|
mod tag;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
startup_time: DateTime<chrono::offset::Utc>,
|
startup_time: DateTime<chrono::offset::Utc>,
|
||||||
base_url: Uri,
|
base_url: Uri,
|
||||||
posts: HashMap<String, Post>,
|
pages: HashMap<String, Page>,
|
||||||
tags: HashMap<String, Tag>,
|
tags: HashMap<String, Tag>,
|
||||||
tera: Tera,
|
tera: Tera,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<()> {
|
async fn main() -> Result<()> {
|
||||||
let cfg = Config::builder()
|
let cfg = settings::get()?;
|
||||||
.add_source(config::File::with_name("config.toml"))
|
println!("{cfg:?}");
|
||||||
.add_source(config::Environment::with_prefix("WEBSITE"))
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
observability::init_tracing(&cfg);
|
observability::init(&cfg)?;
|
||||||
info!("Starting server...");
|
info!("Starting server...");
|
||||||
let addr = cfg.get_string("bind_address")?;
|
|
||||||
let app = init_app(&cfg).await?;
|
let app = init_app(&cfg).await?;
|
||||||
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
|
let listener = TcpListener::bind(&cfg.bind_address).await.unwrap();
|
||||||
axum::serve(listener, app.into_make_service()).await?;
|
axum::serve(listener, app.into_make_service()).await?;
|
||||||
|
|
||||||
|
opentelemetry::global::shutdown_tracer_provider();
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument]
|
#[instrument(skip(cfg))]
|
||||||
async fn init_app(cfg: &Config) -> Result<axum::routing::Router> {
|
async fn init_app(cfg: &Settings) -> Result<axum::routing::Router> {
|
||||||
let base_url: Uri = cfg.get_string("base_url")?.parse().unwrap();
|
let base_url: Uri = cfg.base_url.parse().unwrap();
|
||||||
|
|
||||||
let tera = Tera::new("templates/**/*")?;
|
let tera = Tera::new("templates/**/*")?;
|
||||||
let mut state = AppState {
|
let mut state = AppState {
|
||||||
|
@ -62,9 +63,9 @@ async fn init_app(cfg: &Config) -> Result<axum::routing::Router> {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let posts = post::load_all(&state).await?;
|
let posts = page::load_all(&state, "pages/".into()).await?;
|
||||||
let tags = tag::get_tags(posts.values());
|
let tags = tag::get_tags(posts.values());
|
||||||
state.posts = posts;
|
state.pages = posts;
|
||||||
state.tags = tags;
|
state.tags = tags;
|
||||||
let state = Arc::new(state);
|
let state = Arc::new(state);
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ use axum::http::Uri;
|
||||||
use cached::once_cell::sync::Lazy;
|
use cached::once_cell::sync::Lazy;
|
||||||
use pulldown_cmark::CodeBlockKind;
|
use pulldown_cmark::CodeBlockKind;
|
||||||
use pulldown_cmark::Event;
|
use pulldown_cmark::Event;
|
||||||
|
use pulldown_cmark::LinkType;
|
||||||
use pulldown_cmark::Tag;
|
use pulldown_cmark::Tag;
|
||||||
use pulldown_cmark::TagEnd;
|
use pulldown_cmark::TagEnd;
|
||||||
use pulldown_cmark::{Options, Parser};
|
use pulldown_cmark::{Options, Parser};
|
||||||
|
@ -13,8 +14,13 @@ use tracing::instrument;
|
||||||
static STARTS_WITH_SCHEMA_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\w+:").unwrap());
|
static STARTS_WITH_SCHEMA_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^\w+:").unwrap());
|
||||||
static EMAIL_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^.+?@\w+(\.\w+)*$").unwrap());
|
static EMAIL_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"^.+?@\w+(\.\w+)*$").unwrap());
|
||||||
|
|
||||||
|
pub struct RenderResult {
|
||||||
|
pub content_html: String,
|
||||||
|
pub metadata: String
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(skip(markdown))]
|
#[instrument(skip(markdown))]
|
||||||
pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String {
|
pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> RenderResult {
|
||||||
let mut opt = Options::empty();
|
let mut opt = Options::empty();
|
||||||
opt.insert(Options::ENABLE_FOOTNOTES);
|
opt.insert(Options::ENABLE_FOOTNOTES);
|
||||||
opt.insert(Options::ENABLE_HEADING_ATTRIBUTES);
|
opt.insert(Options::ENABLE_HEADING_ATTRIBUTES);
|
||||||
|
@ -22,22 +28,33 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String
|
||||||
opt.insert(Options::ENABLE_TABLES);
|
opt.insert(Options::ENABLE_TABLES);
|
||||||
opt.insert(Options::ENABLE_TASKLISTS);
|
opt.insert(Options::ENABLE_TASKLISTS);
|
||||||
opt.insert(Options::ENABLE_SMART_PUNCTUATION);
|
opt.insert(Options::ENABLE_SMART_PUNCTUATION);
|
||||||
|
opt.insert(Options::ENABLE_PLUSES_DELIMITED_METADATA_BLOCKS);
|
||||||
|
|
||||||
let mut content_html = String::new();
|
let mut content_html = String::new();
|
||||||
let parser = Parser::new_ext(markdown, opt);
|
let parser = Parser::new_ext(markdown, opt);
|
||||||
|
|
||||||
let mut code_lang = None;
|
let mut code_lang = None;
|
||||||
let mut code_accumulator = String::new();
|
let mut code_accumulator = String::new();
|
||||||
|
let mut meta_kind = None;
|
||||||
|
let mut meta_accumulator = String::new();
|
||||||
let mut events = Vec::new();
|
let mut events = Vec::new();
|
||||||
for event in parser {
|
for event in parser {
|
||||||
match event {
|
match event {
|
||||||
Event::Text(text) => {
|
Event::Text(text) => {
|
||||||
if code_lang.is_some() {
|
if code_lang.is_some() {
|
||||||
code_accumulator.push_str(&text);
|
code_accumulator.push_str(&text);
|
||||||
|
} else if meta_kind.is_some() {
|
||||||
|
meta_accumulator.push_str(&text);
|
||||||
} else {
|
} else {
|
||||||
events.push(Event::Text(text));
|
events.push(Event::Text(text));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Event::Start(Tag::MetadataBlock(kind)) => {
|
||||||
|
meta_kind = Some(kind);
|
||||||
|
}
|
||||||
|
Event::End(TagEnd::MetadataBlock(_)) => {
|
||||||
|
meta_kind = None;
|
||||||
|
}
|
||||||
Event::Start(Tag::Link {
|
Event::Start(Tag::Link {
|
||||||
mut dest_url,
|
mut dest_url,
|
||||||
link_type,
|
link_type,
|
||||||
|
@ -45,7 +62,7 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String
|
||||||
id,
|
id,
|
||||||
}) => {
|
}) => {
|
||||||
if let Some(uri) = base_uri {
|
if let Some(uri) = base_uri {
|
||||||
if !dest_url.starts_with('#')
|
if link_type != LinkType::Email
|
||||||
&& !STARTS_WITH_SCHEMA_RE.is_match(&dest_url)
|
&& !STARTS_WITH_SCHEMA_RE.is_match(&dest_url)
|
||||||
&& !EMAIL_RE.is_match(&dest_url)
|
&& !EMAIL_RE.is_match(&dest_url)
|
||||||
{
|
{
|
||||||
|
@ -104,5 +121,8 @@ pub fn render_markdown_to_html(base_uri: Option<&Uri>, markdown: &str) -> String
|
||||||
|
|
||||||
pulldown_cmark::html::push_html(&mut content_html, events.into_iter());
|
pulldown_cmark::html::push_html(&mut content_html, events.into_iter());
|
||||||
|
|
||||||
content_html
|
RenderResult {
|
||||||
|
content_html,
|
||||||
|
metadata: meta_accumulator,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,70 +1,158 @@
|
||||||
use std::time::Duration;
|
use std::{borrow::Cow, net::SocketAddr, time::Duration};
|
||||||
|
|
||||||
|
use anyhow::{Error, Result};
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{MatchedPath, OriginalUri, Request},
|
extract::{ConnectInfo, MatchedPath, OriginalUri, Request},
|
||||||
http::uri::PathAndQuery,
|
http::{header, uri::PathAndQuery, HeaderMap},
|
||||||
response::Response,
|
response::Response,
|
||||||
};
|
};
|
||||||
use config::Config;
|
use opentelemetry::{global, KeyValue};
|
||||||
use opentelemetry::global;
|
use opentelemetry_otlp::WithExportConfig;
|
||||||
use opentelemetry_sdk::propagation::TraceContextPropagator;
|
use opentelemetry_sdk::{
|
||||||
|
metrics::reader::{DefaultAggregationSelector, DefaultTemporalitySelector}, propagation::TraceContextPropagator, trace::{RandomIdGenerator, Sampler}, Resource
|
||||||
|
};
|
||||||
use tracing::{field::Empty, info_span, Span};
|
use tracing::{field::Empty, info_span, Span};
|
||||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
|
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};
|
||||||
|
|
||||||
pub fn init_tracing(cfg: &Config) {
|
use crate::settings::Settings;
|
||||||
let filter = if let Ok(filter) = cfg.get_string("logging") {
|
|
||||||
EnvFilter::builder()
|
|
||||||
.with_default_directive("info".parse().unwrap())
|
|
||||||
.parse_lossy(filter)
|
|
||||||
} else {
|
|
||||||
EnvFilter::builder()
|
|
||||||
.with_default_directive("info".parse().unwrap())
|
|
||||||
.from_env_lossy()
|
|
||||||
};
|
|
||||||
|
|
||||||
global::set_text_map_propagator(TraceContextPropagator::new());
|
pub fn init(cfg: &Settings) -> Result<(), Error> {
|
||||||
|
let filter = EnvFilter::builder()
|
||||||
|
.with_default_directive("info".parse()?)
|
||||||
|
.parse_lossy(&cfg.logging);
|
||||||
|
|
||||||
let tracer = opentelemetry_otlp::new_pipeline()
|
|
||||||
.tracing()
|
|
||||||
.with_exporter(opentelemetry_otlp::new_exporter().tonic())
|
|
||||||
.install_batch(opentelemetry_sdk::runtime::Tokio)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let otel = tracing_opentelemetry::layer().with_tracer(tracer);
|
|
||||||
|
|
||||||
tracing_subscriber::registry()
|
if cfg.otlp.enabled {
|
||||||
.with(filter)
|
let tracer = opentelemetry_otlp::new_pipeline()
|
||||||
.with(otel)
|
.tracing()
|
||||||
.with(tracing_subscriber::fmt::layer())
|
.with_trace_config(
|
||||||
.init();
|
opentelemetry_sdk::trace::config()
|
||||||
|
.with_sampler(Sampler::AlwaysOn)
|
||||||
|
.with_id_generator(RandomIdGenerator::default())
|
||||||
|
.with_resource(Resource::new(vec![KeyValue::new("service.name", "tlxite")])),
|
||||||
|
)
|
||||||
|
.with_exporter(
|
||||||
|
opentelemetry_otlp::new_exporter()
|
||||||
|
.tonic()
|
||||||
|
.with_endpoint(&cfg.otlp.endpoint),
|
||||||
|
)
|
||||||
|
.install_batch(opentelemetry_sdk::runtime::Tokio)?;
|
||||||
|
|
||||||
|
global::set_text_map_propagator(TraceContextPropagator::new());
|
||||||
|
let otel_tracer = tracing_opentelemetry::layer().with_tracer(tracer);
|
||||||
|
|
||||||
|
|
||||||
|
let meter = opentelemetry_otlp::new_pipeline()
|
||||||
|
.metrics(opentelemetry_sdk::runtime::Tokio)
|
||||||
|
.with_exporter(
|
||||||
|
opentelemetry_otlp::new_exporter()
|
||||||
|
.tonic()
|
||||||
|
.with_endpoint(&cfg.otlp.endpoint),
|
||||||
|
)
|
||||||
|
.with_resource(Resource::new(vec![KeyValue::new("service.name", "tlxite")]))
|
||||||
|
.with_period(Duration::from_secs(3))
|
||||||
|
.with_timeout(Duration::from_secs(10))
|
||||||
|
.with_aggregation_selector(DefaultAggregationSelector::new())
|
||||||
|
.with_temporality_selector(DefaultTemporalitySelector::new())
|
||||||
|
.build()?;
|
||||||
|
|
||||||
|
global::set_meter_provider(meter);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// let logger = opentelemetry_otlp::new_pipeline()
|
||||||
|
// .logging()
|
||||||
|
// .with_exporter(
|
||||||
|
// opentelemetry_otlp::new_exporter()
|
||||||
|
// .tonic()
|
||||||
|
// .with_endpoint(&cfg.otlp.endpoint),
|
||||||
|
|
||||||
|
// )
|
||||||
|
// .install_batch(opentelemetry_sdk::runtime::Tokio)?;
|
||||||
|
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(filter)
|
||||||
|
.with(otel_tracer)
|
||||||
|
.with(tracing_subscriber::fmt::layer().compact())
|
||||||
|
.init();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
tracing_subscriber::registry()
|
||||||
|
.with(filter)
|
||||||
|
.with(tracing_subscriber::fmt::layer().compact())
|
||||||
|
.init();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make_span(request: &Request) -> Span {
|
pub fn make_span(req: &Request) -> Span {
|
||||||
let uri = if let Some(OriginalUri(uri)) = request.extensions().get::<OriginalUri>() {
|
let uri = if let Some(OriginalUri(uri)) = req.extensions().get::<OriginalUri>() {
|
||||||
uri
|
uri
|
||||||
} else {
|
} else {
|
||||||
request.uri()
|
req.uri()
|
||||||
};
|
};
|
||||||
let route = request
|
let route = req
|
||||||
.extensions()
|
.extensions()
|
||||||
.get::<MatchedPath>()
|
.get::<MatchedPath>()
|
||||||
.map_or(uri.path(), axum::extract::MatchedPath::as_str);
|
.map_or(uri.path(), axum::extract::MatchedPath::as_str);
|
||||||
let method = request.method().as_str();
|
let method = req.method().as_str();
|
||||||
|
let scheme = req.uri().scheme().map_or("HTTP", |s| s.as_str());
|
||||||
let target = uri
|
let target = uri
|
||||||
.path_and_query()
|
.path_and_query()
|
||||||
.map_or(uri.path(), PathAndQuery::as_str);
|
.map_or(uri.path(), PathAndQuery::as_str);
|
||||||
|
|
||||||
|
let user_agent = req
|
||||||
|
.headers()
|
||||||
|
.get(header::USER_AGENT)
|
||||||
|
.map_or("", |h| h.to_str().unwrap_or(""));
|
||||||
|
|
||||||
let name = format!("{method} {route}");
|
let name = format!("{method} {route}");
|
||||||
|
|
||||||
|
let client_ip = parse_x_forwarded_for(req.headers())
|
||||||
|
.or_else(|| {
|
||||||
|
req.extensions()
|
||||||
|
.get::<ConnectInfo<SocketAddr>>()
|
||||||
|
.map(|ConnectInfo(client_ip)| Cow::from(client_ip.to_string()))
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
info_span!(
|
info_span!(
|
||||||
"request",
|
"request",
|
||||||
otel.name = %name,
|
otel.name = %name,
|
||||||
|
otel.kind = &"server",
|
||||||
|
http.client_ip = %client_ip,
|
||||||
http.route = %route,
|
http.route = %route,
|
||||||
http.method = %method,
|
http.method = %method,
|
||||||
http.target = %target,
|
http.target = %target,
|
||||||
http.status_code = Empty
|
http.scheme = %scheme,
|
||||||
|
http.user_agent = %user_agent,
|
||||||
|
http.status_code = Empty,
|
||||||
|
otel.status_code = Empty,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_x_forwarded_for(headers: &HeaderMap) -> Option<Cow<'_, str>> {
|
||||||
|
let value = headers.get("x-forwarded-for")?;
|
||||||
|
let value = value.to_str().ok()?;
|
||||||
|
let mut ips = value.split(',');
|
||||||
|
Some(ips.next()?.trim().into())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn on_response(response: &Response, _latency: Duration, span: &Span) {
|
pub fn on_response(response: &Response, _latency: Duration, span: &Span) {
|
||||||
span.record("http.status_code", response.status().as_str());
|
span.record("http.status_code", response.status().as_str());
|
||||||
|
if response.status().is_server_error() {
|
||||||
|
span.record(
|
||||||
|
"otel.status_code",
|
||||||
|
if response.status().is_server_error() {
|
||||||
|
"ERROR"
|
||||||
|
} else {
|
||||||
|
"OK"
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,26 +1,19 @@
|
||||||
use std::{collections::HashMap, path::Path};
|
use std::{collections::HashMap, fmt::Debug, path::{Path, PathBuf}};
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
use cached::once_cell::sync::Lazy;
|
|
||||||
use chrono::{DateTime, FixedOffset};
|
use chrono::{DateTime, FixedOffset};
|
||||||
use glob::glob;
|
|
||||||
|
|
||||||
use regex::Regex;
|
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
use tokio::fs;
|
use tokio::fs;
|
||||||
|
|
||||||
use tracing::{
|
use tracing::{
|
||||||
instrument,
|
instrument,
|
||||||
log::{debug, warn},
|
log::debug,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{helpers, markdown, AppState, WebsiteError};
|
use crate::{helpers, markdown, AppState, WebsiteError};
|
||||||
|
|
||||||
static FRONTMATTER_REGEX: Lazy<Regex> = Lazy::new(|| {
|
|
||||||
Regex::new(r"^[\s]*\+{3}(\r?\n(?s).*?(?-s))\+{3}[\s]*(?:$|(?:\r?\n((?s).*(?-s))$))").unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug, Default)]
|
#[derive(Deserialize, Debug, Default)]
|
||||||
pub struct TomlFrontMatter {
|
pub struct TomlFrontMatter {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
|
@ -32,7 +25,7 @@ pub struct TomlFrontMatter {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Clone, Debug, Default)]
|
#[derive(Serialize, Clone, Debug, Default)]
|
||||||
pub struct Post {
|
pub struct Page {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub draft: bool,
|
pub draft: bool,
|
||||||
pub date: Option<DateTime<FixedOffset>>,
|
pub date: Option<DateTime<FixedOffset>>,
|
||||||
|
@ -44,9 +37,9 @@ pub struct Post {
|
||||||
pub absolute_path: String,
|
pub absolute_path: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Post {
|
impl Page {
|
||||||
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Post {
|
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Page {
|
||||||
Post {
|
Page {
|
||||||
absolute_path: format!("/posts/{slug}/"),
|
absolute_path: format!("/posts/{slug}/"),
|
||||||
slug,
|
slug,
|
||||||
content,
|
content,
|
||||||
|
@ -77,70 +70,70 @@ impl Post {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(state))]
|
#[instrument(skip(state))]
|
||||||
pub async fn load_all(state: &AppState) -> Result<HashMap<String, Post>> {
|
pub async fn load_all(state: &AppState, folder: PathBuf) -> Result<HashMap<String, Page>> {
|
||||||
let mut res = HashMap::<String, Post>::new();
|
let mut pages = HashMap::<String, Page>::new();
|
||||||
for path in glob("posts/**/*.md")? {
|
let mut dirs: Vec<PathBuf> = vec![folder];
|
||||||
let path = path.unwrap();
|
|
||||||
debug!("found page: {}", path.display());
|
|
||||||
|
|
||||||
let path = path.to_string_lossy().replace('\\', "/");
|
while let Some(dir) = dirs.pop() {
|
||||||
let slug = path
|
let mut read_dir = fs::read_dir(dbg!(dir)).await?;
|
||||||
.trim_start_matches("posts")
|
|
||||||
.trim_start_matches('/')
|
|
||||||
.trim_start_matches('\\')
|
|
||||||
.trim_end_matches(".html")
|
|
||||||
.trim_end_matches(".md")
|
|
||||||
.trim_end_matches("index")
|
|
||||||
.trim_end_matches('\\')
|
|
||||||
.trim_end_matches('/');
|
|
||||||
|
|
||||||
let post = load_post(state, slug).await?;
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
|
let path = entry.path();
|
||||||
res.insert(slug.to_string(), post);
|
if path.is_dir() {
|
||||||
|
dirs.push(path);
|
||||||
|
}
|
||||||
|
else if let Some(ext) = path.extension() {
|
||||||
|
if ext == "md" {
|
||||||
|
// it's a page to load
|
||||||
|
let page = load_page(state, &path).await?;
|
||||||
|
pages.insert(page.slug.clone(), page);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(res)
|
|
||||||
|
// for path in --- {
|
||||||
|
// let path = path.unwrap();
|
||||||
|
// debug!("found page: {}", path.display());
|
||||||
|
|
||||||
|
// let post = load_post(state, &path).await?;
|
||||||
|
|
||||||
|
// res.insert(post.slug.clone(), post);
|
||||||
|
// }
|
||||||
|
Ok(pages)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(state))]
|
#[instrument(skip(state))]
|
||||||
pub async fn load_post(state: &AppState, slug: &str) -> Result<Post> {
|
pub async fn load_page(state: &AppState, path: &Path) -> Result<Page> {
|
||||||
debug!("loading post: {slug}");
|
debug!("loading post: {path:?}");
|
||||||
|
|
||||||
let file_path = Path::new("posts").join(slug);
|
let content = fs::read_to_string(path).await?;
|
||||||
|
|
||||||
let content = if let Ok(content) = fs::read_to_string(file_path.with_extension("md")).await {
|
|
||||||
content
|
|
||||||
} else {
|
|
||||||
fs::read_to_string(file_path.join("index.md")).await?
|
|
||||||
};
|
|
||||||
|
|
||||||
let (tomlfm, content) = parse_frontmatter(content)?;
|
|
||||||
let tomlfm = tomlfm.expect("Missing frontmatter");
|
|
||||||
|
|
||||||
let base_uri = helpers::uri_with_path(&state.base_url, &format!("/posts/{slug}/"));
|
let path_str = path.to_string_lossy().replace('\\', "/");
|
||||||
|
|
||||||
let content = content.map(|c| markdown::render_markdown_to_html(Some(&base_uri), &c));
|
let slug = path_str
|
||||||
|
.trim_start_matches("posts/")
|
||||||
|
.trim_start_matches('/')
|
||||||
|
.trim_end_matches(".html")
|
||||||
|
.trim_end_matches(".md")
|
||||||
|
.trim_end_matches("index")
|
||||||
|
.trim_end_matches('/');
|
||||||
|
|
||||||
Ok(Post::new(
|
let base_uri = helpers::uri_with_path(&state.base_url, &format!("/{slug}/"));
|
||||||
|
|
||||||
|
let content = markdown::render_markdown_to_html(Some(&base_uri), &content);
|
||||||
|
|
||||||
|
Ok(Page::new(
|
||||||
slug.to_string(),
|
slug.to_string(),
|
||||||
content.unwrap_or_default(),
|
content.content_html,
|
||||||
tomlfm,
|
toml::from_str(&content.metadata)?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(src))]
|
|
||||||
fn parse_frontmatter(src: String) -> Result<(Option<TomlFrontMatter>, Option<String>)> {
|
|
||||||
Ok(if let Some(captures) = FRONTMATTER_REGEX.captures(&src) {
|
|
||||||
(
|
|
||||||
Some(toml::from_str(captures.get(1).unwrap().as_str())?),
|
|
||||||
captures.get(2).map(|m| m.as_str().to_owned()),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
(None, Some(src))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(state, post))]
|
#[instrument(skip(state, post))]
|
||||||
pub async fn render_post(state: &AppState, post: &Post) -> Result<String, WebsiteError> {
|
pub async fn render_post(state: &AppState, post: &Page) -> Result<String, WebsiteError> {
|
||||||
let mut ctx = tera::Context::new();
|
let mut ctx = tera::Context::new();
|
||||||
ctx.insert("page", &post);
|
ctx.insert("page", &post);
|
||||||
ctx.insert("base_url", &state.base_url.to_string());
|
ctx.insert("base_url", &state.base_url.to_string());
|
||||||
|
@ -165,8 +158,8 @@ mod tests {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
state.posts = super::load_all(&state).await.unwrap();
|
state.pages = super::load_all(&state, "posts/".into()).await.unwrap();
|
||||||
for post in state.posts.values() {
|
for post in state.pages.values() {
|
||||||
super::render_post(&state, post).await.unwrap();
|
super::render_post(&state, post).await.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
30
src/settings.rs
Normal file
30
src/settings.rs
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
use anyhow::{Error, Result};
|
||||||
|
use config::Config;
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct Settings {
|
||||||
|
pub base_url: String,
|
||||||
|
pub bind_address: String,
|
||||||
|
pub logging: String,
|
||||||
|
pub otlp: Otlp,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct Otlp {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub endpoint: String,
|
||||||
|
pub authorization: String,
|
||||||
|
pub organization: String,
|
||||||
|
pub stream_name: String,
|
||||||
|
pub tls_insecure: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get() -> Result<Settings, Error> {
|
||||||
|
let settings = Config::builder()
|
||||||
|
.add_source(config::File::with_name("config.toml"))
|
||||||
|
.add_source(config::Environment::with_prefix("TLX"))
|
||||||
|
.build()?
|
||||||
|
.try_deserialize()?;
|
||||||
|
Ok(settings)
|
||||||
|
}
|
|
@ -3,7 +3,7 @@ use std::collections::HashMap;
|
||||||
use serde_derive::Serialize;
|
use serde_derive::Serialize;
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
use crate::post::Post;
|
use crate::page::Page;
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
#[derive(Serialize, Debug)]
|
||||||
pub struct Tag {
|
pub struct Tag {
|
||||||
|
@ -13,7 +13,7 @@ pub struct Tag {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(posts))]
|
#[instrument(skip(posts))]
|
||||||
pub fn get_tags<'a>(posts: impl IntoIterator<Item = &'a Post>) -> HashMap<String, Tag> {
|
pub fn get_tags<'a>(posts: impl IntoIterator<Item = &'a Page>) -> HashMap<String, Tag> {
|
||||||
let mut tags: HashMap<String, Tag> = HashMap::new();
|
let mut tags: HashMap<String, Tag> = HashMap::new();
|
||||||
|
|
||||||
for post in posts {
|
for post in posts {
|
||||||
|
|
|
@ -29,3 +29,11 @@ pre {
|
||||||
margin-bottom: -0.5em;
|
margin-bottom: -0.5em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.footnote-definition {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footnote-definition p {
|
||||||
|
display: inline-block;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue