🚧 wip: add server list API endpoint

main
_ 2020-12-14 01:07:13 -06:00
parent e865ac56c7
commit fa5aa8b05a
6 changed files with 112 additions and 54 deletions

1
Cargo.lock generated
View File

@ -1237,6 +1237,7 @@ dependencies = [
"ptth_core", "ptth_core",
"rmp-serde", "rmp-serde",
"serde", "serde",
"serde_json",
"thiserror", "thiserror",
"tokio", "tokio",
"toml", "toml",

View File

@ -19,6 +19,7 @@ hyper = "0.13.8"
itertools = "0.9.0" itertools = "0.9.0"
rmp-serde = "0.14.4" rmp-serde = "0.14.4"
serde = {version = "1.0.117", features = ["derive"]} serde = {version = "1.0.117", features = ["derive"]}
serde_json = "1.0.60"
thiserror = "1.0.22" thiserror = "1.0.22"
tokio = { version = "0.2.22", features = ["full"] } tokio = { version = "0.2.22", features = ["full"] }
toml = "0.5.7" toml = "0.5.7"

View File

@ -36,7 +36,7 @@ pub mod file {
#[derive (Default, Deserialize)] #[derive (Default, Deserialize)]
pub struct Isomorphic { pub struct Isomorphic {
#[serde (default)] #[serde (default)]
pub enable_scraper_auth: bool, pub enable_scraper_api: bool,
// If any of these fields are used, we are in dev mode and have to // If any of these fields are used, we are in dev mode and have to
// show extra warnings, since some auth may be weakened // show extra warnings, since some auth may be weakened

View File

@ -16,8 +16,6 @@
use std::{ use std::{
borrow::Cow, borrow::Cow,
collections::HashMap,
iter::FromIterator,
net::SocketAddr, net::SocketAddr,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
@ -69,7 +67,6 @@ pub use errors::*;
pub use relay_state::RelayState; pub use relay_state::RelayState;
use relay_state::*; use relay_state::*;
use scraper_api::*;
fn ok_reply <B: Into <Body>> (b: B) fn ok_reply <B: Into <Body>> (b: B)
-> Result <Response <Body>, http::Error> -> Result <Response <Body>, http::Error>
@ -241,7 +238,7 @@ fn pretty_print_last_seen (
#[derive (Serialize)] #[derive (Serialize)]
struct ServerEntry <'a> { struct ServerEntry <'a> {
id: String, name: String,
display_name: String, display_name: String,
last_seen: Cow <'a, str>, last_seen: Cow <'a, str>,
} }
@ -256,62 +253,40 @@ struct ServerListPage <'a> {
async fn handle_server_list_internal (state: &Arc <RelayState>) async fn handle_server_list_internal (state: &Arc <RelayState>)
-> ServerListPage <'static> -> ServerListPage <'static>
{ {
let dev_mode; use LastSeen::*;
let display_names: HashMap <String, String> = {
let dev_mode = {
let guard = state.config.read ().await; let guard = state.config.read ().await;
guard.iso.dev_mode.is_some ()
dev_mode = guard.iso.dev_mode.is_some ();
let servers = (*guard).servers.iter ()
.map (|(k, v)| {
let display_name = v.display_name
.clone ()
.unwrap_or_else (|| k.clone ());
(k.clone (), display_name)
});
HashMap::from_iter (servers)
}; };
let git_version = git_version::read_git_version ().await;
let server_statuses = { let server_list = scraper_api::v1_server_list (&state).await;
let guard = state.server_status.lock ().await;
(*guard).clone ()
};
let now = Utc::now (); let now = Utc::now ();
let mut servers: Vec <_> = display_names.into_iter () let servers = server_list.servers.into_iter ()
.map (|(id, display_name)| { .map (|x| {
use LastSeen::*; let last_seen = match x.last_seen {
None => "Never".into (),
let status = match server_statuses.get (&id) { Some (x) => match pretty_print_last_seen (now, x) {
None => return ServerEntry {
display_name,
id,
last_seen: "Never".into (),
},
Some (x) => x,
};
let last_seen = match pretty_print_last_seen (now, status.last_seen) {
Negative => "Error (negative time)".into (), Negative => "Error (negative time)".into (),
Connected => "Connected".into (), Connected => "Connected".into (),
Description (s) => s.into (), Description (s) => s.into (),
},
}; };
ServerEntry { ServerEntry {
display_name, name: x.name,
id, display_name: x.display_name,
last_seen, last_seen,
} }
}) })
.collect (); .collect ();
servers.sort_by (|a, b| a.display_name.cmp (&b.display_name));
ServerListPage { ServerListPage {
dev_mode, dev_mode,
git_version: git_version::read_git_version ().await, git_version,
servers, servers,
} }
} }
@ -388,7 +363,7 @@ async fn handle_all (
Err (RequestError::Mysterious) Err (RequestError::Mysterious)
} }
else if let Some (rest) = prefix_match ("/scraper/", &path) { else if let Some (rest) = prefix_match ("/scraper/", &path) {
handle_scraper_api (req, state, rest).await scraper_api::handle (req, state, rest).await
} }
else { else {
Ok (error_reply (StatusCode::OK, "Hi")?) Ok (error_reply (StatusCode::OK, "Hi")?)
@ -423,7 +398,7 @@ async fn reload_config (
(*config) = new_config; (*config) = new_config;
debug! ("Loaded {} server configs", config.servers.len ()); debug! ("Loaded {} server configs", config.servers.len ());
debug! ("enable_scraper_auth: {}", config.iso.enable_scraper_auth); debug! ("enable_scraper_api: {}", config.iso.enable_scraper_api);
if config.iso.dev_mode.is_some () { if config.iso.dev_mode.is_some () {
error! ("Dev mode is enabled! This might turn off some security features. If you see this in production, escalate it to someone!"); error! ("Dev mode is enabled! This might turn off some security features. If you see this in production, escalate it to someone!");

View File

@ -1,13 +1,20 @@
use std::{ use std::{
collections::HashMap,
iter::FromIterator,
sync::Arc, sync::Arc,
}; };
use chrono::{DateTime, Utc};
use hyper::{ use hyper::{
Body, Body,
Request, Request,
Response, Response,
StatusCode, StatusCode,
}; };
use serde::{
Serialize,
Serializer,
};
use tracing::{ use tracing::{
error, error,
instrument, instrument,
@ -21,8 +28,78 @@ use crate::{
relay_state::RelayState, relay_state::RelayState,
}; };
// JSON is probably Good Enough For Now, so I'll just make everything
// a struct and lazily serialize it right before leaving the
// top-level handle () fn.
fn serialize_last_seen <S: Serializer> (x: &Option <DateTime <Utc>>, s: S)
-> Result <S::Ok, S::Error>
{
match x {
None => s.serialize_none (),
Some (x) => s.serialize_str (&x.to_rfc3339 ()),
}
}
#[derive (Serialize)]
pub struct Server {
pub name: String,
pub display_name: String,
#[serde (serialize_with = "serialize_last_seen")]
pub last_seen: Option <DateTime <Utc>>,
}
#[derive (Serialize)]
pub struct ServerList {
pub servers: Vec <Server>,
}
pub async fn v1_server_list (state: &Arc <RelayState>)
-> ServerList
{
// name --> display_name
let display_names: HashMap <String, String> = {
let guard = state.config.read ().await;
let servers = (*guard).servers.iter ()
.map (|(k, v)| {
let display_name = v.display_name
.clone ()
.unwrap_or_else (|| k.clone ());
(k.clone (), display_name)
});
HashMap::from_iter (servers)
};
// name --> status
let server_statuses = {
let guard = state.server_status.lock ().await;
(*guard).clone ()
};
let mut servers: Vec <_> = display_names.into_iter ()
.map (|(name, display_name)| {
let last_seen = server_statuses.get (&name).map (|x| x.last_seen);
Server {
display_name,
name,
last_seen,
}
})
.collect ();
servers.sort_by (|a, b| a.display_name.cmp (&b.display_name));
ServerList {
servers,
}
}
#[instrument (level = "trace", skip (req, state))] #[instrument (level = "trace", skip (req, state))]
pub async fn handle_scraper_api_v1 ( async fn api_v1 (
req: Request <Body>, req: Request <Body>,
state: Arc <RelayState>, state: Arc <RelayState>,
path_rest: &str path_rest: &str
@ -69,13 +146,17 @@ pub async fn handle_scraper_api_v1 (
if path_rest == "test" { if path_rest == "test" {
Ok (error_reply (StatusCode::OK, "You're valid!")?) Ok (error_reply (StatusCode::OK, "You're valid!")?)
} }
else if path_rest == "server_list" {
let x = v1_server_list (&state).await;
Ok (error_reply (StatusCode::OK, &serde_json::to_string (&x).unwrap ())?)
}
else { else {
Ok (error_reply (StatusCode::NOT_FOUND, "Unknown API endpoint")?) Ok (error_reply (StatusCode::NOT_FOUND, "Unknown API endpoint")?)
} }
} }
#[instrument (level = "trace", skip (req, state))] #[instrument (level = "trace", skip (req, state))]
pub async fn handle_scraper_api ( pub async fn handle (
req: Request <Body>, req: Request <Body>,
state: Arc <RelayState>, state: Arc <RelayState>,
path_rest: &str path_rest: &str
@ -83,16 +164,16 @@ pub async fn handle_scraper_api (
-> Result <Response <Body>, RequestError> -> Result <Response <Body>, RequestError>
{ {
{ {
if ! state.config.read ().await.iso.enable_scraper_auth { if ! state.config.read ().await.iso.enable_scraper_api {
return Ok (error_reply (StatusCode::FORBIDDEN, "Scraper API disabled")?); return Ok (error_reply (StatusCode::FORBIDDEN, "Scraper API disabled")?);
} }
} }
if let Some (rest) = prefix_match ("v1/", path_rest) { if let Some (rest) = prefix_match ("v1/", path_rest) {
handle_scraper_api_v1 (req, state, rest).await api_v1 (req, state, rest).await
} }
else if let Some (rest) = prefix_match ("api/", path_rest) { else if let Some (rest) = prefix_match ("api/", path_rest) {
handle_scraper_api_v1 (req, state, rest).await api_v1 (req, state, rest).await
} }
else { else {
Ok (error_reply (StatusCode::NOT_FOUND, "Unknown scraper API version")?) Ok (error_reply (StatusCode::NOT_FOUND, "Unknown scraper API version")?)

View File

@ -51,7 +51,7 @@
{{#each servers}} {{#each servers}}
<tr> <tr>
<td><a class="entry" href="{{this.id}}/files/">{{this.display_name}}</a></td> <td><a class="entry" href="{{this.name}}/files/">{{this.display_name}}</a></td>
<td><span class="grey">{{this.last_seen}}</span></td> <td><span class="grey">{{this.last_seen}}</span></td>
</tr> </tr>
{{/each}} {{/each}}