♻️ Fix some clippy warnings
parent
47c59447f3
commit
c3ff3deb8e
|
@ -1,5 +1,9 @@
|
||||||
#![warn (clippy::pedantic)]
|
#![warn (clippy::pedantic)]
|
||||||
|
|
||||||
|
// I don't see the point of writing the type twice if I'm initializing a struct
|
||||||
|
// and the type is already in the struct definition.
|
||||||
|
#![allow (clippy::default_trait_access)]
|
||||||
|
|
||||||
// I'm not sure if I like this one
|
// I'm not sure if I like this one
|
||||||
#![allow (clippy::enum_glob_use)]
|
#![allow (clippy::enum_glob_use)]
|
||||||
|
|
||||||
|
@ -7,15 +11,11 @@
|
||||||
// error type is defined.
|
// error type is defined.
|
||||||
#![allow (clippy::missing_errors_doc)]
|
#![allow (clippy::missing_errors_doc)]
|
||||||
|
|
||||||
// I don't see the point of writing the type twice if I'm initializing a struct
|
|
||||||
// and the type is already in the struct definition.
|
|
||||||
#![allow (clippy::default_trait_access)]
|
|
||||||
|
|
||||||
// False positive on futures::select! macro
|
// False positive on futures::select! macro
|
||||||
#![allow (clippy::mut_mut)]
|
#![allow (clippy::mut_mut)]
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
convert::TryFrom,
|
convert::TryFrom,
|
||||||
iter::FromIterator,
|
iter::FromIterator,
|
||||||
|
|
|
@ -40,14 +40,16 @@ struct ServerState <'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn status_reply <B: Into <Body>> (status: StatusCode, b: B)
|
fn status_reply <B: Into <Body>> (status: StatusCode, b: B)
|
||||||
-> Response <Body>
|
-> Result <Response <Body>, hyper::http::Error>
|
||||||
{
|
{
|
||||||
Response::builder ().status (status).body (b.into ()).unwrap ()
|
Response::builder ().status (status).body (b.into ())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn handle_all (req: Request <Body>, state: Arc <ServerState <'static>>)
|
async fn handle_all (req: Request <Body>, state: Arc <ServerState <'static>>)
|
||||||
-> Result <Response <Body>, String>
|
-> Result <Response <Body>, hyper::http::Error>
|
||||||
{
|
{
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
debug! ("req.uri () = {:?}", req.uri ());
|
debug! ("req.uri () = {:?}", req.uri ());
|
||||||
|
|
||||||
let path_and_query = req.uri ().path_and_query ().map (|x| x.as_str ()).unwrap_or_else (|| req.uri ().path ());
|
let path_and_query = req.uri ().path_and_query ().map (|x| x.as_str ()).unwrap_or_else (|| req.uri ().path ());
|
||||||
|
@ -58,7 +60,7 @@ async fn handle_all (req: Request <Body>, state: Arc <ServerState <'static>>)
|
||||||
|
|
||||||
let ptth_req = match RequestParts::from_hyper (parts.method, path_and_query, parts.headers) {
|
let ptth_req = match RequestParts::from_hyper (parts.method, path_and_query, parts.headers) {
|
||||||
Ok (x) => x,
|
Ok (x) => x,
|
||||||
_ => return Ok (status_reply (StatusCode::BAD_REQUEST, "Bad request")),
|
_ => return Ok (status_reply (StatusCode::BAD_REQUEST, "Bad request")?),
|
||||||
};
|
};
|
||||||
|
|
||||||
let default_root = PathBuf::from ("./");
|
let default_root = PathBuf::from ("./");
|
||||||
|
@ -79,10 +81,8 @@ async fn handle_all (req: Request <Body>, state: Arc <ServerState <'static>>)
|
||||||
let mut resp = Response::builder ()
|
let mut resp = Response::builder ()
|
||||||
.status (StatusCode::from (ptth_resp.parts.status_code));
|
.status (StatusCode::from (ptth_resp.parts.status_code));
|
||||||
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
for (k, v) in ptth_resp.parts.headers.into_iter () {
|
for (k, v) in ptth_resp.parts.headers.into_iter () {
|
||||||
resp = resp.header (hyper::header::HeaderName::from_str (&k).unwrap (), v);
|
resp = resp.header (hyper::header::HeaderName::from_str (&k)?, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
let body = ptth_resp.body
|
let body = ptth_resp.body
|
||||||
|
@ -90,9 +90,7 @@ async fn handle_all (req: Request <Body>, state: Arc <ServerState <'static>>)
|
||||||
.unwrap_or_else (Body::empty)
|
.unwrap_or_else (Body::empty)
|
||||||
;
|
;
|
||||||
|
|
||||||
let resp = resp.body (body).unwrap ();
|
resp.body (body)
|
||||||
|
|
||||||
Ok (resp)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive (Deserialize)]
|
#[derive (Deserialize)]
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
// Static file server that can plug into the PTTH reverse server
|
// Static file server that can plug into the PTTH reverse server
|
||||||
|
|
||||||
|
// I'm not sure if I like this one
|
||||||
|
#![allow (clippy::enum_glob_use)]
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
cmp::min,
|
cmp::min,
|
||||||
collections::*,
|
collections::HashMap,
|
||||||
convert::{Infallible, TryInto},
|
convert::{Infallible, TryInto},
|
||||||
error::Error,
|
error::Error,
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
|
@ -12,7 +15,9 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use handlebars::Handlebars;
|
use handlebars::Handlebars;
|
||||||
use percent_encoding::*;
|
use percent_encoding::{
|
||||||
|
percent_decode,
|
||||||
|
};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use tokio::{
|
use tokio::{
|
||||||
fs::{
|
fs::{
|
||||||
|
@ -84,7 +89,7 @@ struct TemplateDirPage <'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_range_header (range_str: &str) -> (Option <u64>, Option <u64>) {
|
fn parse_range_header (range_str: &str) -> (Option <u64>, Option <u64>) {
|
||||||
use lazy_static::*;
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref RE: Regex = Regex::new (r"^bytes=(\d*)-(\d*)$").expect ("Couldn't compile regex for Range header");
|
static ref RE: Regex = Regex::new (r"^bytes=(\d*)-(\d*)$").expect ("Couldn't compile regex for Range header");
|
||||||
|
@ -94,17 +99,17 @@ fn parse_range_header (range_str: &str) -> (Option <u64>, Option <u64>) {
|
||||||
|
|
||||||
let caps = match RE.captures (range_str) {
|
let caps = match RE.captures (range_str) {
|
||||||
Some (x) => x,
|
Some (x) => x,
|
||||||
_ => return (None, None),
|
None => return (None, None),
|
||||||
};
|
};
|
||||||
let start = caps.get (1).map (|x| x.as_str ());
|
let start = caps.get (1).map (|x| x.as_str ());
|
||||||
let end = caps.get (2).map (|x| x.as_str ());
|
let end = caps.get (2).map (|x| x.as_str ());
|
||||||
|
|
||||||
let start = start.map (|x| u64::from_str_radix (x, 10).ok ()).flatten ();
|
let start = start.and_then (|x| u64::from_str_radix (x, 10).ok ());
|
||||||
|
|
||||||
// HTTP specifies ranges as [start inclusive, end inclusive]
|
// HTTP specifies ranges as [start inclusive, end inclusive]
|
||||||
// But that's dumb and [start inclusive, end exclusive) is better
|
// But that's dumb and [start inclusive, end exclusive) is better
|
||||||
|
|
||||||
let end = end.map (|x| u64::from_str_radix (x, 10).ok ().map (|x| x + 1)).flatten ();
|
let end = end.and_then (|x| u64::from_str_radix (x, 10).ok ().map (|x| x + 1));
|
||||||
|
|
||||||
(start, end)
|
(start, end)
|
||||||
}
|
}
|
||||||
|
@ -151,9 +156,9 @@ fn check_range (range_str: Option <&str>, file_len: u64)
|
||||||
fn get_icon (file_name: &str) -> &'static str {
|
fn get_icon (file_name: &str) -> &'static str {
|
||||||
// Because my editor actually doesn't render these
|
// Because my editor actually doesn't render these
|
||||||
|
|
||||||
let video = "🎞️";
|
let video = "\u{1f39e}\u{fe0f}";
|
||||||
let picture = "📷";
|
let picture = "\u{1f4f7}";
|
||||||
let file = "📄";
|
let file = "\u{1f4c4}";
|
||||||
|
|
||||||
if
|
if
|
||||||
file_name.ends_with (".mp4") ||
|
file_name.ends_with (".mp4") ||
|
||||||
|
@ -178,10 +183,15 @@ fn get_icon (file_name: &str) -> &'static str {
|
||||||
|
|
||||||
async fn read_dir_entry (entry: DirEntry) -> TemplateDirEntry
|
async fn read_dir_entry (entry: DirEntry) -> TemplateDirEntry
|
||||||
{
|
{
|
||||||
|
use percent_encoding::{
|
||||||
|
CONTROLS,
|
||||||
|
utf8_percent_encode,
|
||||||
|
};
|
||||||
|
|
||||||
let file_name = match entry.file_name ().into_string () {
|
let file_name = match entry.file_name ().into_string () {
|
||||||
Ok (x) => x,
|
Ok (x) => x,
|
||||||
Err (_) => return TemplateDirEntry {
|
Err (_) => return TemplateDirEntry {
|
||||||
icon: "⚠️",
|
icon: "\u{26a0}\u{fe0f}",
|
||||||
trailing_slash: "",
|
trailing_slash: "",
|
||||||
file_name: "File / directory name is not UTF-8".into (),
|
file_name: "File / directory name is not UTF-8".into (),
|
||||||
encoded_file_name: "".into (),
|
encoded_file_name: "".into (),
|
||||||
|
@ -193,7 +203,7 @@ async fn read_dir_entry (entry: DirEntry) -> TemplateDirEntry
|
||||||
let metadata = match entry.metadata ().await {
|
let metadata = match entry.metadata ().await {
|
||||||
Ok (x) => x,
|
Ok (x) => x,
|
||||||
Err (_) => return TemplateDirEntry {
|
Err (_) => return TemplateDirEntry {
|
||||||
icon: "⚠️",
|
icon: "\u{26a0}\u{fe0f}",
|
||||||
trailing_slash: "",
|
trailing_slash: "",
|
||||||
file_name: "Could not fetch metadata".into (),
|
file_name: "Could not fetch metadata".into (),
|
||||||
encoded_file_name: "".into (),
|
encoded_file_name: "".into (),
|
||||||
|
@ -204,7 +214,7 @@ async fn read_dir_entry (entry: DirEntry) -> TemplateDirEntry
|
||||||
|
|
||||||
let (trailing_slash, icon, size) = {
|
let (trailing_slash, icon, size) = {
|
||||||
let t = metadata.file_type ();
|
let t = metadata.file_type ();
|
||||||
let icon_folder = "📁";
|
let icon_folder = "\u{1f4c1}";
|
||||||
|
|
||||||
if t.is_dir () {
|
if t.is_dir () {
|
||||||
("/", icon_folder, "".into ())
|
("/", icon_folder, "".into ())
|
||||||
|
@ -214,8 +224,6 @@ async fn read_dir_entry (entry: DirEntry) -> TemplateDirEntry
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
use percent_encoding::*;
|
|
||||||
|
|
||||||
let encoded_file_name = utf8_percent_encode (&file_name, CONTROLS).to_string ();
|
let encoded_file_name = utf8_percent_encode (&file_name, CONTROLS).to_string ();
|
||||||
|
|
||||||
TemplateDirEntry {
|
TemplateDirEntry {
|
||||||
|
@ -307,7 +315,7 @@ async fn serve_file (
|
||||||
let mut next_mark = mark_interval;
|
let mut next_mark = mark_interval;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let mut buffer = vec! [0u8; 65_536];
|
let mut buffer = vec! [0_u8; 65_536];
|
||||||
let bytes_read: u64 = f.read (&mut buffer).await.unwrap ().try_into ().unwrap ();
|
let bytes_read: u64 = f.read (&mut buffer).await.unwrap ().try_into ().unwrap ();
|
||||||
|
|
||||||
let bytes_read = min (bytes_left, bytes_read);
|
let bytes_read = min (bytes_left, bytes_read);
|
||||||
|
@ -353,11 +361,11 @@ async fn serve_file (
|
||||||
response.header (String::from ("content-length"), range.end.to_string ().into_bytes ());
|
response.header (String::from ("content-length"), range.end.to_string ().into_bytes ());
|
||||||
}
|
}
|
||||||
|
|
||||||
if ! should_send_body {
|
if should_send_body {
|
||||||
response.status_code (StatusCode::NoContent);
|
response.content_length = Some (content_length);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
response.content_length = Some (content_length);
|
response.status_code (StatusCode::NoContent);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some (body) = body {
|
if let Some (body) = body {
|
||||||
|
@ -546,7 +554,7 @@ async fn internal_serve_all (
|
||||||
|
|
||||||
let file_len = file_md.len ();
|
let file_len = file_md.len ();
|
||||||
|
|
||||||
let range_header = headers.get ("range").map (|v| std::str::from_utf8 (v).ok ()).flatten ();
|
let range_header = headers.get ("range").and_then (|v| std::str::from_utf8 (v).ok ());
|
||||||
|
|
||||||
match check_range (range_header, file_len) {
|
match check_range (range_header, file_len) {
|
||||||
ParsedRange::RangeNotSatisfiable (file_len) => RangeNotSatisfiable (file_len),
|
ParsedRange::RangeNotSatisfiable (file_len) => RangeNotSatisfiable (file_len),
|
||||||
|
@ -557,7 +565,7 @@ async fn internal_serve_all (
|
||||||
MarkdownErr (MarkdownError::TooBig)
|
MarkdownErr (MarkdownError::TooBig)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
let mut buffer = vec! [0u8; MAX_BUF_SIZE.try_into ().unwrap ()];
|
let mut buffer = vec! [0_u8; MAX_BUF_SIZE.try_into ().unwrap ()];
|
||||||
let bytes_read = file.read (&mut buffer).await.unwrap ();
|
let bytes_read = file.read (&mut buffer).await.unwrap ();
|
||||||
buffer.truncate (bytes_read);
|
buffer.truncate (bytes_read);
|
||||||
|
|
||||||
|
@ -656,10 +664,10 @@ pub fn load_templates (
|
||||||
|
|
||||||
let asset_root = asset_root.join ("handlebars/server");
|
let asset_root = asset_root.join ("handlebars/server");
|
||||||
|
|
||||||
for (k, v) in vec! [
|
for (k, v) in &[
|
||||||
("file_server_dir", "file_server_dir.html"),
|
("file_server_dir", "file_server_dir.html"),
|
||||||
("file_server_root", "file_server_root.html"),
|
("file_server_root", "file_server_root.html"),
|
||||||
].into_iter () {
|
] {
|
||||||
handlebars.register_template_file (k, asset_root.join (v))?;
|
handlebars.register_template_file (k, asset_root.join (v))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,12 @@
|
||||||
|
#![warn (clippy::pedantic)]
|
||||||
|
|
||||||
|
// I don't see the point in documenting the errors outside of where the
|
||||||
|
// error type is defined.
|
||||||
|
#![allow (clippy::missing_errors_doc)]
|
||||||
|
|
||||||
|
// False positive on futures::select! macro
|
||||||
|
#![allow (clippy::mut_mut)]
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
error::Error,
|
error::Error,
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
|
@ -30,6 +39,7 @@ pub mod load_toml;
|
||||||
|
|
||||||
const BAD_PASSWORDS: &[u8] = include_bytes! ("bad_passwords.txt");
|
const BAD_PASSWORDS: &[u8] = include_bytes! ("bad_passwords.txt");
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn password_is_bad (mut password: String) -> bool {
|
pub fn password_is_bad (mut password: String) -> bool {
|
||||||
password.make_ascii_lowercase ();
|
password.make_ascii_lowercase ();
|
||||||
|
|
||||||
|
@ -66,7 +76,7 @@ async fn handle_req_resp <'a> (
|
||||||
|
|
||||||
debug! ("Unwrapped {} requests", wrapped_reqs.len ());
|
debug! ("Unwrapped {} requests", wrapped_reqs.len ());
|
||||||
|
|
||||||
for wrapped_req in wrapped_reqs.into_iter () {
|
for wrapped_req in wrapped_reqs {
|
||||||
let state = state.clone ();
|
let state = state.clone ();
|
||||||
|
|
||||||
tokio::spawn (async move {
|
tokio::spawn (async move {
|
||||||
|
@ -136,6 +146,7 @@ pub struct ConfigFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigFile {
|
impl ConfigFile {
|
||||||
|
#[must_use]
|
||||||
pub fn tripcode (&self) -> String {
|
pub fn tripcode (&self) -> String {
|
||||||
base64::encode (blake3::hash (self.api_key.as_bytes ()).as_bytes ())
|
base64::encode (blake3::hash (self.api_key.as_bytes ()).as_bytes ())
|
||||||
}
|
}
|
||||||
|
@ -155,10 +166,10 @@ pub async fn run_server (
|
||||||
)
|
)
|
||||||
-> Result <(), Box <dyn Error>>
|
-> Result <(), Box <dyn Error>>
|
||||||
{
|
{
|
||||||
let asset_root = asset_root.unwrap_or_else (PathBuf::new);
|
|
||||||
|
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
|
|
||||||
|
let asset_root = asset_root.unwrap_or_else (PathBuf::new);
|
||||||
|
|
||||||
if password_is_bad (config_file.api_key.clone ()) {
|
if password_is_bad (config_file.api_key.clone ()) {
|
||||||
panic! ("API key is too weak, server can't use it");
|
panic! ("API key is too weak, server can't use it");
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,7 @@ fn load_inner <
|
||||||
> (
|
> (
|
||||||
mut f: File
|
mut f: File
|
||||||
) -> T {
|
) -> T {
|
||||||
let mut buffer = vec! [0u8; 4096];
|
let mut buffer = vec! [0_u8; 4096];
|
||||||
let bytes_read = f.read (&mut buffer).unwrap_or_else (|_| panic! ("Can't read config"));
|
let bytes_read = f.read (&mut buffer).unwrap_or_else (|_| panic! ("Can't read config"));
|
||||||
buffer.truncate (bytes_read);
|
buffer.truncate (bytes_read);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue