🚧 wip: add server list API endpoint
parent
e865ac56c7
commit
fa5aa8b05a
|
@ -1237,6 +1237,7 @@ dependencies = [
|
|||
"ptth_core",
|
||||
"rmp-serde",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"toml",
|
||||
|
|
|
@ -19,6 +19,7 @@ hyper = "0.13.8"
|
|||
itertools = "0.9.0"
|
||||
rmp-serde = "0.14.4"
|
||||
serde = {version = "1.0.117", features = ["derive"]}
|
||||
serde_json = "1.0.60"
|
||||
thiserror = "1.0.22"
|
||||
tokio = { version = "0.2.22", features = ["full"] }
|
||||
toml = "0.5.7"
|
||||
|
|
|
@ -36,7 +36,7 @@ pub mod file {
|
|||
#[derive (Default, Deserialize)]
|
||||
pub struct Isomorphic {
|
||||
#[serde (default)]
|
||||
pub enable_scraper_auth: bool,
|
||||
pub enable_scraper_api: bool,
|
||||
|
||||
// If any of these fields are used, we are in dev mode and have to
|
||||
// show extra warnings, since some auth may be weakened
|
||||
|
|
|
@ -16,8 +16,6 @@
|
|||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::HashMap,
|
||||
iter::FromIterator,
|
||||
net::SocketAddr,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
|
@ -69,7 +67,6 @@ pub use errors::*;
|
|||
pub use relay_state::RelayState;
|
||||
|
||||
use relay_state::*;
|
||||
use scraper_api::*;
|
||||
|
||||
fn ok_reply <B: Into <Body>> (b: B)
|
||||
-> Result <Response <Body>, http::Error>
|
||||
|
@ -241,7 +238,7 @@ fn pretty_print_last_seen (
|
|||
|
||||
#[derive (Serialize)]
|
||||
struct ServerEntry <'a> {
|
||||
id: String,
|
||||
name: String,
|
||||
display_name: String,
|
||||
last_seen: Cow <'a, str>,
|
||||
}
|
||||
|
@ -256,62 +253,40 @@ struct ServerListPage <'a> {
|
|||
async fn handle_server_list_internal (state: &Arc <RelayState>)
|
||||
-> ServerListPage <'static>
|
||||
{
|
||||
let dev_mode;
|
||||
let display_names: HashMap <String, String> = {
|
||||
use LastSeen::*;
|
||||
|
||||
let dev_mode = {
|
||||
let guard = state.config.read ().await;
|
||||
|
||||
dev_mode = guard.iso.dev_mode.is_some ();
|
||||
let servers = (*guard).servers.iter ()
|
||||
.map (|(k, v)| {
|
||||
let display_name = v.display_name
|
||||
.clone ()
|
||||
.unwrap_or_else (|| k.clone ());
|
||||
|
||||
(k.clone (), display_name)
|
||||
});
|
||||
|
||||
HashMap::from_iter (servers)
|
||||
guard.iso.dev_mode.is_some ()
|
||||
};
|
||||
let git_version = git_version::read_git_version ().await;
|
||||
|
||||
let server_statuses = {
|
||||
let guard = state.server_status.lock ().await;
|
||||
(*guard).clone ()
|
||||
};
|
||||
let server_list = scraper_api::v1_server_list (&state).await;
|
||||
|
||||
let now = Utc::now ();
|
||||
|
||||
let mut servers: Vec <_> = display_names.into_iter ()
|
||||
.map (|(id, display_name)| {
|
||||
use LastSeen::*;
|
||||
|
||||
let status = match server_statuses.get (&id) {
|
||||
None => return ServerEntry {
|
||||
display_name,
|
||||
id,
|
||||
last_seen: "Never".into (),
|
||||
},
|
||||
Some (x) => x,
|
||||
};
|
||||
|
||||
let last_seen = match pretty_print_last_seen (now, status.last_seen) {
|
||||
let servers = server_list.servers.into_iter ()
|
||||
.map (|x| {
|
||||
let last_seen = match x.last_seen {
|
||||
None => "Never".into (),
|
||||
Some (x) => match pretty_print_last_seen (now, x) {
|
||||
Negative => "Error (negative time)".into (),
|
||||
Connected => "Connected".into (),
|
||||
Description (s) => s.into (),
|
||||
},
|
||||
};
|
||||
|
||||
ServerEntry {
|
||||
display_name,
|
||||
id,
|
||||
name: x.name,
|
||||
display_name: x.display_name,
|
||||
last_seen,
|
||||
}
|
||||
})
|
||||
.collect ();
|
||||
|
||||
servers.sort_by (|a, b| a.display_name.cmp (&b.display_name));
|
||||
|
||||
ServerListPage {
|
||||
dev_mode,
|
||||
git_version: git_version::read_git_version ().await,
|
||||
git_version,
|
||||
servers,
|
||||
}
|
||||
}
|
||||
|
@ -388,7 +363,7 @@ async fn handle_all (
|
|||
Err (RequestError::Mysterious)
|
||||
}
|
||||
else if let Some (rest) = prefix_match ("/scraper/", &path) {
|
||||
handle_scraper_api (req, state, rest).await
|
||||
scraper_api::handle (req, state, rest).await
|
||||
}
|
||||
else {
|
||||
Ok (error_reply (StatusCode::OK, "Hi")?)
|
||||
|
@ -423,7 +398,7 @@ async fn reload_config (
|
|||
(*config) = new_config;
|
||||
|
||||
debug! ("Loaded {} server configs", config.servers.len ());
|
||||
debug! ("enable_scraper_auth: {}", config.iso.enable_scraper_auth);
|
||||
debug! ("enable_scraper_api: {}", config.iso.enable_scraper_api);
|
||||
|
||||
if config.iso.dev_mode.is_some () {
|
||||
error! ("Dev mode is enabled! This might turn off some security features. If you see this in production, escalate it to someone!");
|
||||
|
|
|
@ -1,13 +1,20 @@
|
|||
use std::{
|
||||
collections::HashMap,
|
||||
iter::FromIterator,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use hyper::{
|
||||
Body,
|
||||
Request,
|
||||
Response,
|
||||
StatusCode,
|
||||
};
|
||||
use serde::{
|
||||
Serialize,
|
||||
Serializer,
|
||||
};
|
||||
use tracing::{
|
||||
error,
|
||||
instrument,
|
||||
|
@ -21,8 +28,78 @@ use crate::{
|
|||
relay_state::RelayState,
|
||||
};
|
||||
|
||||
// JSON is probably Good Enough For Now, so I'll just make everything
|
||||
// a struct and lazily serialize it right before leaving the
|
||||
// top-level handle () fn.
|
||||
|
||||
fn serialize_last_seen <S: Serializer> (x: &Option <DateTime <Utc>>, s: S)
|
||||
-> Result <S::Ok, S::Error>
|
||||
{
|
||||
match x {
|
||||
None => s.serialize_none (),
|
||||
Some (x) => s.serialize_str (&x.to_rfc3339 ()),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive (Serialize)]
|
||||
pub struct Server {
|
||||
pub name: String,
|
||||
pub display_name: String,
|
||||
#[serde (serialize_with = "serialize_last_seen")]
|
||||
pub last_seen: Option <DateTime <Utc>>,
|
||||
}
|
||||
|
||||
#[derive (Serialize)]
|
||||
pub struct ServerList {
|
||||
pub servers: Vec <Server>,
|
||||
}
|
||||
|
||||
pub async fn v1_server_list (state: &Arc <RelayState>)
|
||||
-> ServerList
|
||||
{
|
||||
// name --> display_name
|
||||
let display_names: HashMap <String, String> = {
|
||||
let guard = state.config.read ().await;
|
||||
|
||||
let servers = (*guard).servers.iter ()
|
||||
.map (|(k, v)| {
|
||||
let display_name = v.display_name
|
||||
.clone ()
|
||||
.unwrap_or_else (|| k.clone ());
|
||||
|
||||
(k.clone (), display_name)
|
||||
});
|
||||
|
||||
HashMap::from_iter (servers)
|
||||
};
|
||||
|
||||
// name --> status
|
||||
let server_statuses = {
|
||||
let guard = state.server_status.lock ().await;
|
||||
(*guard).clone ()
|
||||
};
|
||||
|
||||
let mut servers: Vec <_> = display_names.into_iter ()
|
||||
.map (|(name, display_name)| {
|
||||
let last_seen = server_statuses.get (&name).map (|x| x.last_seen);
|
||||
|
||||
Server {
|
||||
display_name,
|
||||
name,
|
||||
last_seen,
|
||||
}
|
||||
})
|
||||
.collect ();
|
||||
|
||||
servers.sort_by (|a, b| a.display_name.cmp (&b.display_name));
|
||||
|
||||
ServerList {
|
||||
servers,
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument (level = "trace", skip (req, state))]
|
||||
pub async fn handle_scraper_api_v1 (
|
||||
async fn api_v1 (
|
||||
req: Request <Body>,
|
||||
state: Arc <RelayState>,
|
||||
path_rest: &str
|
||||
|
@ -69,13 +146,17 @@ pub async fn handle_scraper_api_v1 (
|
|||
if path_rest == "test" {
|
||||
Ok (error_reply (StatusCode::OK, "You're valid!")?)
|
||||
}
|
||||
else if path_rest == "server_list" {
|
||||
let x = v1_server_list (&state).await;
|
||||
Ok (error_reply (StatusCode::OK, &serde_json::to_string (&x).unwrap ())?)
|
||||
}
|
||||
else {
|
||||
Ok (error_reply (StatusCode::NOT_FOUND, "Unknown API endpoint")?)
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument (level = "trace", skip (req, state))]
|
||||
pub async fn handle_scraper_api (
|
||||
pub async fn handle (
|
||||
req: Request <Body>,
|
||||
state: Arc <RelayState>,
|
||||
path_rest: &str
|
||||
|
@ -83,16 +164,16 @@ pub async fn handle_scraper_api (
|
|||
-> Result <Response <Body>, RequestError>
|
||||
{
|
||||
{
|
||||
if ! state.config.read ().await.iso.enable_scraper_auth {
|
||||
if ! state.config.read ().await.iso.enable_scraper_api {
|
||||
return Ok (error_reply (StatusCode::FORBIDDEN, "Scraper API disabled")?);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some (rest) = prefix_match ("v1/", path_rest) {
|
||||
handle_scraper_api_v1 (req, state, rest).await
|
||||
api_v1 (req, state, rest).await
|
||||
}
|
||||
else if let Some (rest) = prefix_match ("api/", path_rest) {
|
||||
handle_scraper_api_v1 (req, state, rest).await
|
||||
api_v1 (req, state, rest).await
|
||||
}
|
||||
else {
|
||||
Ok (error_reply (StatusCode::NOT_FOUND, "Unknown scraper API version")?)
|
||||
|
|
|
@ -51,7 +51,7 @@
|
|||
|
||||
{{#each servers}}
|
||||
<tr>
|
||||
<td><a class="entry" href="{{this.id}}/files/">{{this.display_name}}</a></td>
|
||||
<td><a class="entry" href="{{this.name}}/files/">{{this.display_name}}</a></td>
|
||||
<td><span class="grey">{{this.last_seen}}</span></td>
|
||||
</tr>
|
||||
{{/each}}
|
||||
|
|
Loading…
Reference in New Issue