1
0
Fork 0

cargo clippy -- -W clippy::pedantic

This commit is contained in:
Adrian Hedqvist 2023-07-29 12:04:37 +02:00
parent 28a2b3ca43
commit eea0cc764d
7 changed files with 34 additions and 44 deletions

View file

@ -15,7 +15,7 @@ use lazy_static::lazy_static;
use prometheus::{opts, Encoder, IntCounterVec, TextEncoder}; use prometheus::{opts, Encoder, IntCounterVec, TextEncoder};
use std::sync::Arc; use std::sync::Arc;
use tower_http::services::ServeFile; use tower_http::services::ServeFile;
use tracing::{instrument, log::*}; use tracing::{instrument, log::{error, info}};
use crate::{AppState, WebsiteError}; use crate::{AppState, WebsiteError};
@ -111,7 +111,7 @@ pub async fn metrics_middleware<B>(request: Request<B>, next: Next<B>) -> Respon
response response
} }
fn should_return_304(headers: HeaderMap, last_changed: Option<DateTime<FixedOffset>>) -> bool { fn should_return_304(headers: &HeaderMap, last_changed: Option<DateTime<FixedOffset>>) -> bool {
let Some(date) = last_changed else { let Some(date) = last_changed else {
info!("no last modified date"); info!("no last modified date");
return false; return false;

View file

@ -1,4 +1,4 @@
use std::{sync::Arc}; use std::sync::Arc;
use axum::{ use axum::{
extract::{Path, State}, extract::{Path, State},
@ -11,7 +11,7 @@ use hyper::{
HeaderMap, StatusCode, HeaderMap, StatusCode,
}; };
use serde_derive::Serialize; use serde_derive::Serialize;
use tracing::{instrument, log::*}; use tracing::{instrument, log::warn};
use crate::{ use crate::{
post::{render_post, Post}, post::{render_post, Post},
@ -35,7 +35,7 @@ pub fn alias_router<'a>(posts: impl IntoIterator<Item = &'a Post>) -> Router<Arc
for post in posts { for post in posts {
for alias in &post.aliases { for alias in &post.aliases {
let path = post.absolute_path.to_owned(); let path = post.absolute_path.clone();
router = router.route( router = router.route(
alias, alias,
get(move || async { get(move || async {
@ -60,9 +60,9 @@ pub async fn index(
) -> Result<Response, WebsiteError> { ) -> Result<Response, WebsiteError> {
let mut posts: Vec<&Post> = state.posts.values().filter(|p| p.is_published()).collect(); let mut posts: Vec<&Post> = state.posts.values().filter(|p| p.is_published()).collect();
let last_changed = posts.iter().flat_map(|p| p.last_modified()).max(); let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
if should_return_304(headers, last_changed) { if should_return_304(&headers, last_changed) {
return Ok(StatusCode::NOT_MODIFIED.into_response()); return Ok(StatusCode::NOT_MODIFIED.into_response());
} }
@ -81,16 +81,14 @@ pub async fn index(
let mut headers = vec![]; let mut headers = vec![];
if let Some(date) = last_changed { if let Some(date) = last_changed {
headers.push((header::LAST_MODIFIED, date.to_rfc2822())) headers.push((header::LAST_MODIFIED, date.to_rfc2822()));
} }
Ok(( Ok((
StatusCode::OK, StatusCode::OK,
[( [(
header::LAST_MODIFIED, header::LAST_MODIFIED,
last_changed last_changed.map_or_else(|| chrono::offset::Utc::now().to_rfc2822(), |d| d.to_rfc2822()),
.map(|d| d.to_rfc2822())
.unwrap_or_else(|| chrono::offset::Utc::now().to_rfc2822()),
)], )],
Html(res), Html(res),
) )
@ -107,7 +105,7 @@ pub async fn view(
let last_changed = post.last_modified(); let last_changed = post.last_modified();
if should_return_304(headers, last_changed) { if should_return_304(&headers, last_changed) {
return Ok(StatusCode::NOT_MODIFIED.into_response()); return Ok(StatusCode::NOT_MODIFIED.into_response());
} }
@ -122,9 +120,7 @@ pub async fn view(
StatusCode::OK, StatusCode::OK,
[( [(
header::LAST_MODIFIED, header::LAST_MODIFIED,
last_changed last_changed.map_or_else(|| chrono::offset::Utc::now().to_rfc2822(), |d| d.to_rfc2822()),
.map(|d| d.to_rfc2822())
.unwrap_or_else(|| chrono::offset::Utc::now().to_rfc2822()),
)], )],
Html(res), Html(res),
) )
@ -137,9 +133,9 @@ pub async fn feed(
) -> Result<Response, WebsiteError> { ) -> Result<Response, WebsiteError> {
let mut posts: Vec<&Post> = state.posts.values().filter(|p| p.is_published()).collect(); let mut posts: Vec<&Post> = state.posts.values().filter(|p| p.is_published()).collect();
let last_changed = posts.iter().flat_map(|p| p.last_modified()).max(); let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
if should_return_304(headers, last_changed) { if should_return_304(&headers, last_changed) {
return Ok(StatusCode::NOT_MODIFIED.into_response()); return Ok(StatusCode::NOT_MODIFIED.into_response());
} }
@ -153,9 +149,7 @@ pub async fn feed(
(CONTENT_TYPE, "application/atom+xml"), (CONTENT_TYPE, "application/atom+xml"),
( (
header::LAST_MODIFIED, header::LAST_MODIFIED,
&last_changed &last_changed.map_or_else(|| chrono::offset::Utc::now().to_rfc2822(), |d| d.to_rfc2822()),
.map(|d| d.to_rfc2822())
.unwrap_or_else(|| chrono::offset::Utc::now().to_rfc2822()),
), ),
], ],
crate::feed::render_atom_feed(&state)?, crate::feed::render_atom_feed(&state)?,

View file

@ -58,9 +58,9 @@ pub async fn view(
.filter(|p| p.is_published() && p.tags.contains(&tag)) .filter(|p| p.is_published() && p.tags.contains(&tag))
.collect(); .collect();
let last_changed = posts.iter().flat_map(|p| p.last_modified()).max(); let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
if should_return_304(headers, last_changed) { if should_return_304(&headers, last_changed) {
return Ok(StatusCode::NOT_MODIFIED.into_response()); return Ok(StatusCode::NOT_MODIFIED.into_response());
} }
@ -84,9 +84,7 @@ pub async fn view(
[ [
( (
header::LAST_MODIFIED, header::LAST_MODIFIED,
&last_changed &last_changed.map_or_else(|| chrono::offset::Utc::now().to_rfc2822(), |d| d.to_rfc2822()),
.map(|d| d.to_rfc2822())
.unwrap_or_else(|| chrono::offset::Utc::now().to_rfc2822()),
) )
], ],
Html(res) Html(res)
@ -107,9 +105,9 @@ pub async fn feed(
.filter(|p| p.is_published() && p.tags.contains(&slug)) .filter(|p| p.is_published() && p.tags.contains(&slug))
.collect(); .collect();
let last_changed = posts.iter().flat_map(|p| p.last_modified()).max(); let last_changed = posts.iter().filter_map(|p| p.last_modified()).max();
if should_return_304(headers, last_changed) { if should_return_304(&headers, last_changed) {
return Ok(StatusCode::NOT_MODIFIED.into_response()); return Ok(StatusCode::NOT_MODIFIED.into_response());
} }
@ -122,9 +120,7 @@ pub async fn feed(
[(CONTENT_TYPE, "application/atom+xml"), [(CONTENT_TYPE, "application/atom+xml"),
( (
header::LAST_MODIFIED, header::LAST_MODIFIED,
&last_changed &last_changed.map_or_else(|| chrono::offset::Utc::now().to_rfc2822(), |d| d.to_rfc2822()),
.map(|d| d.to_rfc2822())
.unwrap_or_else(|| chrono::offset::Utc::now().to_rfc2822()),
) )
], ],
crate::feed::render_atom_tag_feed(tag, &state)?, crate::feed::render_atom_tag_feed(tag, &state)?,

View file

@ -1,3 +1,5 @@
#![warn(clippy::pedantic)]
#![allow(clippy::unused_async)] // axum handlers needs async, even if no awaiting happens
use std::{collections::HashMap, fmt::Display, sync::Arc, time::Duration}; use std::{collections::HashMap, fmt::Display, sync::Arc, time::Duration};
use axum::extract::MatchedPath; use axum::extract::MatchedPath;
@ -10,7 +12,8 @@ use post::Post;
use tag::Tag; use tag::Tag;
use tera::Tera; use tera::Tera;
use tower_http::{compression::CompressionLayer, cors::CorsLayer}; use tower_http::{compression::CompressionLayer, cors::CorsLayer};
use tracing::{info_span, log::*, Span}; use tracing::{field::Empty, info_span, log::info, Span};
use tracing_subscriber::{prelude::*, EnvFilter}; use tracing_subscriber::{prelude::*, EnvFilter};
mod feed; mod feed;
@ -85,14 +88,13 @@ fn make_span(request: &Request<Body>) -> Span {
let route = request let route = request
.extensions() .extensions()
.get::<MatchedPath>() .get::<MatchedPath>()
.map(|mp| mp.as_str()) .map(axum::extract::MatchedPath::as_str)
.unwrap_or_default(); .unwrap_or_default();
let method = request.method().as_str(); let method = request.method().as_str();
let target = uri.path_and_query().map(|p| p.as_str()).unwrap_or_default(); let target = uri.path_and_query().map(axum::http::uri::PathAndQuery::as_str).unwrap_or_default();
let name = format!("{method} {route}"); let name = format!("{method} {route}");
use tracing::field::Empty;
info_span!( info_span!(
"request", "request",
otel.name = %name, otel.name = %name,
@ -126,7 +128,7 @@ impl Display for WebsiteError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {
WebsiteError::NotFound => write!(f, "Not found"), WebsiteError::NotFound => write!(f, "Not found"),
_ => write!(f, "Internal error"), WebsiteError::InternalError(e) => write!(f, "Internal error: {e}"),
} }
} }
} }

View file

@ -1,11 +1,10 @@
use color_eyre::Result;
use pulldown_cmark::Event; use pulldown_cmark::Event;
use pulldown_cmark::Tag; use pulldown_cmark::Tag;
use pulldown_cmark::{Options, Parser}; use pulldown_cmark::{Options, Parser};
use crate::hilighting; use crate::hilighting;
pub fn render_markdown_to_html(markdown: &str) -> Result<String> { pub fn render_markdown_to_html(markdown: &str) -> String {
let options = Options::all(); let options = Options::all();
let mut content_html = String::new(); let mut content_html = String::new();
let parser = Parser::new_ext(markdown, options); let parser = Parser::new_ext(markdown, options);
@ -51,5 +50,5 @@ pub fn render_markdown_to_html(markdown: &str) -> Result<String> {
pulldown_cmark::html::push_html(&mut content_html, events.into_iter()); pulldown_cmark::html::push_html(&mut content_html, events.into_iter());
Ok(content_html) content_html
} }

View file

@ -9,7 +9,7 @@ use regex::Regex;
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use tokio::fs; use tokio::fs;
use tracing::{instrument, log::*}; use tracing::{instrument, log::{debug, warn}};
use crate::{markdown, AppState, WebsiteError}; use crate::{markdown, AppState, WebsiteError};
@ -38,7 +38,7 @@ pub struct Post {
impl Post { impl Post {
pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Post { pub fn new(slug: String, content: String, fm: TomlFrontMatter) -> Post {
Post { Post {
absolute_path: format!("/posts/{}/", slug), absolute_path: format!("/posts/{slug}/"),
slug, slug,
content, content,
title: fm.title, title: fm.title,
@ -107,8 +107,7 @@ pub async fn load_post(slug: &str) -> color_eyre::eyre::Result<Post> {
let tomlfm = tomlfm.expect("Missing frontmatter"); let tomlfm = tomlfm.expect("Missing frontmatter");
let content = content let content = content
.map(|c| markdown::render_markdown_to_html(&c)) .map(|c| markdown::render_markdown_to_html(&c));
.transpose()?;
Ok(Post::new( Ok(Post::new(
slug.to_string(), slug.to_string(),
@ -144,7 +143,7 @@ pub async fn render_post(state: &AppState, post: &Post) -> Result<String, Websit
ctx.insert("page", &post); ctx.insert("page", &post);
ctx.insert("base_url", &state.base_url); ctx.insert("base_url", &state.base_url);
state.tera.render("post.html", &ctx).map_err(|e| e.into()) state.tera.render("post.html", &ctx).map_err(std::convert::Into::into)
} }
#[cfg(test)] #[cfg(test)]

View file

@ -14,7 +14,7 @@ pub struct Tag {
pub fn get_tags<'a>(posts: impl IntoIterator<Item = &'a Post>) -> HashMap<String, Tag> { pub fn get_tags<'a>(posts: impl IntoIterator<Item = &'a Post>) -> HashMap<String, Tag> {
let mut tags: HashMap<String, Tag> = HashMap::new(); let mut tags: HashMap<String, Tag> = HashMap::new();
for post in posts.into_iter() { for post in posts {
for key in &post.tags { for key in &post.tags {
if let Some(tag) = tags.get_mut(key) { if let Some(tag) = tags.get_mut(key) {
tag.posts.push(post.slug.clone()); tag.posts.push(post.slug.clone());