⭐ new: finish MVP for scraper auth.
Adding a SQLite DB to properly track the keys is going to take a while. For now I'll just keep them in the config file and give them 30-day expirations.main
parent
cda627fa4b
commit
9ac44cfeb7
|
@ -8,7 +8,10 @@ use std::{
|
||||||
path::Path,
|
path::Path,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::errors::ConfigError;
|
use crate::{
|
||||||
|
errors::ConfigError,
|
||||||
|
key_validity::*,
|
||||||
|
};
|
||||||
|
|
||||||
// Stuff we need to load from the config file and use to
|
// Stuff we need to load from the config file and use to
|
||||||
// set up the HTTP server
|
// set up the HTTP server
|
||||||
|
@ -28,7 +31,7 @@ pub mod file {
|
||||||
|
|
||||||
#[derive (Deserialize)]
|
#[derive (Deserialize)]
|
||||||
pub struct DevMode {
|
pub struct DevMode {
|
||||||
pub scraper_key: Option <ScraperKey <Valid7Days>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stuff that's identical between the file and the runtime structures
|
// Stuff that's identical between the file and the runtime structures
|
||||||
|
@ -45,19 +48,25 @@ pub mod file {
|
||||||
|
|
||||||
#[derive (Deserialize)]
|
#[derive (Deserialize)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
pub port: Option <u16>,
|
|
||||||
pub servers: Vec <Server>,
|
|
||||||
#[serde (flatten)]
|
#[serde (flatten)]
|
||||||
pub iso: Isomorphic,
|
pub iso: Isomorphic,
|
||||||
|
|
||||||
|
pub port: Option <u16>,
|
||||||
|
pub servers: Vec <Server>,
|
||||||
|
|
||||||
|
// Adding a DB will take a while, so I'm moving these out of dev mode.
|
||||||
|
pub scraper_keys: Vec <ScraperKey <Valid30Days>>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stuff we actually need at runtime
|
// Stuff we actually need at runtime
|
||||||
|
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
|
pub iso: file::Isomorphic,
|
||||||
|
|
||||||
pub port: Option <u16>,
|
pub port: Option <u16>,
|
||||||
pub servers: HashMap <String, file::Server>,
|
pub servers: HashMap <String, file::Server>,
|
||||||
pub iso: file::Isomorphic,
|
pub scraper_keys: HashMap <String, ScraperKey <Valid30Days>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom <file::Config> for Config {
|
impl TryFrom <file::Config> for Config {
|
||||||
|
@ -69,10 +78,18 @@ impl TryFrom <file::Config> for Config {
|
||||||
|
|
||||||
let servers = itertools::process_results (servers, |i| HashMap::from_iter (i))?;
|
let servers = itertools::process_results (servers, |i| HashMap::from_iter (i))?;
|
||||||
|
|
||||||
|
let scraper_keys = if f.iso.enable_scraper_api {
|
||||||
|
HashMap::from_iter (f.scraper_keys.into_iter ().map (|key| (key.hash.encode_base64 (), key)))
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Default::default ()
|
||||||
|
};
|
||||||
|
|
||||||
Ok (Self {
|
Ok (Self {
|
||||||
|
iso: f.iso,
|
||||||
port: f.port,
|
port: f.port,
|
||||||
servers,
|
servers,
|
||||||
iso: f.iso,
|
scraper_keys,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ impl <'de> Deserialize <'de> for BlakeHashWrapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Valid7Days;
|
pub struct Valid7Days;
|
||||||
//pub struct Valid30Days;
|
pub struct Valid30Days;
|
||||||
//pub struct Valid90Days;
|
//pub struct Valid90Days;
|
||||||
|
|
||||||
pub trait MaxValidDuration {
|
pub trait MaxValidDuration {
|
||||||
|
@ -82,11 +82,19 @@ impl MaxValidDuration for Valid7Days {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl MaxValidDuration for Valid30Days {
|
||||||
|
fn dur () -> Duration {
|
||||||
|
Duration::days (30)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive (Deserialize)]
|
#[derive (Deserialize)]
|
||||||
pub struct ScraperKey <V: MaxValidDuration> {
|
pub struct ScraperKey <V: MaxValidDuration> {
|
||||||
|
name: String,
|
||||||
|
|
||||||
not_before: DateTime <Utc>,
|
not_before: DateTime <Utc>,
|
||||||
not_after: DateTime <Utc>,
|
not_after: DateTime <Utc>,
|
||||||
hash: BlakeHashWrapper,
|
pub hash: BlakeHashWrapper,
|
||||||
|
|
||||||
#[serde (default)]
|
#[serde (default)]
|
||||||
_phantom: std::marker::PhantomData <V>,
|
_phantom: std::marker::PhantomData <V>,
|
||||||
|
@ -103,11 +111,12 @@ pub enum KeyValidity {
|
||||||
DurationNegative,
|
DurationNegative,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ScraperKey <Valid7Days> {
|
impl ScraperKey <Valid30Days> {
|
||||||
pub fn new (input: &[u8]) -> Self {
|
pub fn new_30_day <S: Into <String>> (name: S, input: &[u8]) -> Self {
|
||||||
let now = Utc::now ();
|
let now = Utc::now ();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
|
name: name.into (),
|
||||||
not_before: now,
|
not_before: now,
|
||||||
not_after: now + Duration::days (7),
|
not_after: now + Duration::days (7),
|
||||||
hash: BlakeHashWrapper::from_key (input),
|
hash: BlakeHashWrapper::from_key (input),
|
||||||
|
@ -161,7 +170,8 @@ mod tests {
|
||||||
fn duration_negative () {
|
fn duration_negative () {
|
||||||
let zero_time = Utc::now ();
|
let zero_time = Utc::now ();
|
||||||
|
|
||||||
let key = ScraperKey::<Valid7Days> {
|
let key = ScraperKey::<Valid30Days> {
|
||||||
|
name: "automated testing".to_string (),
|
||||||
not_before: zero_time + Duration::days (1 + 2),
|
not_before: zero_time + Duration::days (1 + 2),
|
||||||
not_after: zero_time + Duration::days (1),
|
not_after: zero_time + Duration::days (1),
|
||||||
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
||||||
|
@ -183,14 +193,15 @@ mod tests {
|
||||||
fn key_valid_too_long () {
|
fn key_valid_too_long () {
|
||||||
let zero_time = Utc::now ();
|
let zero_time = Utc::now ();
|
||||||
|
|
||||||
let key = ScraperKey::<Valid7Days> {
|
let key = ScraperKey::<Valid30Days> {
|
||||||
|
name: "automated testing".to_string (),
|
||||||
not_before: zero_time + Duration::days (1),
|
not_before: zero_time + Duration::days (1),
|
||||||
not_after: zero_time + Duration::days (1 + 8),
|
not_after: zero_time + Duration::days (1 + 31),
|
||||||
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
||||||
_phantom: Default::default (),
|
_phantom: Default::default (),
|
||||||
};
|
};
|
||||||
|
|
||||||
let err = DurationTooLong (Duration::days (7));
|
let err = DurationTooLong (Duration::days (30));
|
||||||
|
|
||||||
for (input, expected) in &[
|
for (input, expected) in &[
|
||||||
(zero_time + Duration::days (0), err),
|
(zero_time + Duration::days (0), err),
|
||||||
|
@ -205,9 +216,10 @@ mod tests {
|
||||||
fn normal_key () {
|
fn normal_key () {
|
||||||
let zero_time = Utc::now ();
|
let zero_time = Utc::now ();
|
||||||
|
|
||||||
let key = ScraperKey::<Valid7Days> {
|
let key = ScraperKey::<Valid30Days> {
|
||||||
|
name: "automated testing".to_string (),
|
||||||
not_before: zero_time + Duration::days (1),
|
not_before: zero_time + Duration::days (1),
|
||||||
not_after: zero_time + Duration::days (1 + 7),
|
not_after: zero_time + Duration::days (1 + 30),
|
||||||
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
||||||
_phantom: Default::default (),
|
_phantom: Default::default (),
|
||||||
};
|
};
|
||||||
|
@ -215,7 +227,7 @@ mod tests {
|
||||||
for (input, expected) in &[
|
for (input, expected) in &[
|
||||||
(zero_time + Duration::days (0), ClockIsBehind),
|
(zero_time + Duration::days (0), ClockIsBehind),
|
||||||
(zero_time + Duration::days (2), Valid),
|
(zero_time + Duration::days (2), Valid),
|
||||||
(zero_time + Duration::days (1 + 7), Expired),
|
(zero_time + Duration::days (1 + 30), Expired),
|
||||||
(zero_time + Duration::days (100), Expired),
|
(zero_time + Duration::days (100), Expired),
|
||||||
] {
|
] {
|
||||||
assert_eq! (key.is_valid (*input, "bad_password".as_bytes ()), *expected);
|
assert_eq! (key.is_valid (*input, "bad_password".as_bytes ()), *expected);
|
||||||
|
@ -226,9 +238,10 @@ mod tests {
|
||||||
fn wrong_key () {
|
fn wrong_key () {
|
||||||
let zero_time = Utc::now ();
|
let zero_time = Utc::now ();
|
||||||
|
|
||||||
let key = ScraperKey::<Valid7Days> {
|
let key = ScraperKey::<Valid30Days> {
|
||||||
|
name: "automated testing".to_string (),
|
||||||
not_before: zero_time + Duration::days (1),
|
not_before: zero_time + Duration::days (1),
|
||||||
not_after: zero_time + Duration::days (1 + 7),
|
not_after: zero_time + Duration::days (1 + 30),
|
||||||
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
|
||||||
_phantom: Default::default (),
|
_phantom: Default::default (),
|
||||||
};
|
};
|
||||||
|
@ -236,7 +249,7 @@ mod tests {
|
||||||
for input in &[
|
for input in &[
|
||||||
zero_time + Duration::days (0),
|
zero_time + Duration::days (0),
|
||||||
zero_time + Duration::days (2),
|
zero_time + Duration::days (2),
|
||||||
zero_time + Duration::days (1 + 7),
|
zero_time + Duration::days (1 + 30),
|
||||||
zero_time + Duration::days (100),
|
zero_time + Duration::days (100),
|
||||||
] {
|
] {
|
||||||
let validity = key.is_valid (*input, "badder_password".as_bytes ());
|
let validity = key.is_valid (*input, "badder_password".as_bytes ());
|
||||||
|
|
|
@ -338,6 +338,8 @@ async fn handle_all (
|
||||||
server_endpoint::handle_listen (state, listen_code.into (), api_key.as_bytes ()).await
|
server_endpoint::handle_listen (state, listen_code.into (), api_key.as_bytes ()).await
|
||||||
}
|
}
|
||||||
else if let Some (rest) = prefix_match ("/frontend/servers/", &path) {
|
else if let Some (rest) = prefix_match ("/frontend/servers/", &path) {
|
||||||
|
// DRY T4H76LB3
|
||||||
|
|
||||||
if rest == "" {
|
if rest == "" {
|
||||||
Ok (handle_server_list (state, handlebars).await?)
|
Ok (handle_server_list (state, handlebars).await?)
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,11 +23,21 @@ use tracing::{
|
||||||
use crate::{
|
use crate::{
|
||||||
RequestError,
|
RequestError,
|
||||||
error_reply,
|
error_reply,
|
||||||
key_validity::KeyValidity,
|
key_validity::*,
|
||||||
prefix_match,
|
prefix_match,
|
||||||
relay_state::RelayState,
|
relay_state::RelayState,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Not sure if this is the best way to do a hard-coded string table, but
|
||||||
|
// it'll keep the tests up to date
|
||||||
|
|
||||||
|
mod strings {
|
||||||
|
pub const FORBIDDEN: &str = "403 Forbidden";
|
||||||
|
pub const NO_API_KEY: &str = "Can't auth as scraper without API key";
|
||||||
|
pub const UNKNOWN_API_VERSION: &str = "Unknown scraper API version";
|
||||||
|
pub const UNKNOWN_API_ENDPOINT: &str = "Unknown scraper API endpoint";
|
||||||
|
}
|
||||||
|
|
||||||
// JSON is probably Good Enough For Now, so I'll just make everything
|
// JSON is probably Good Enough For Now, so I'll just make everything
|
||||||
// a struct and lazily serialize it right before leaving the
|
// a struct and lazily serialize it right before leaving the
|
||||||
// top-level handle () fn.
|
// top-level handle () fn.
|
||||||
|
@ -109,27 +119,25 @@ async fn api_v1 (
|
||||||
let api_key = req.headers ().get ("X-ApiKey");
|
let api_key = req.headers ().get ("X-ApiKey");
|
||||||
|
|
||||||
let api_key = match api_key {
|
let api_key = match api_key {
|
||||||
None => return Ok (error_reply (StatusCode::FORBIDDEN, "Can't run scraper without an API key")?),
|
None => return Ok (error_reply (StatusCode::FORBIDDEN, strings::NO_API_KEY)?),
|
||||||
Some (x) => x,
|
Some (x) => x,
|
||||||
};
|
};
|
||||||
|
|
||||||
let bad_key = || error_reply (StatusCode::FORBIDDEN, "403 Forbidden");
|
let actual_hash = BlakeHashWrapper::from_key (api_key.as_bytes ());
|
||||||
|
|
||||||
|
let bad_key = || error_reply (StatusCode::FORBIDDEN, strings::FORBIDDEN);
|
||||||
|
|
||||||
{
|
{
|
||||||
let config = state.config.read ().await;
|
let config = state.config.read ().await;
|
||||||
|
|
||||||
let dev_mode = match &config.iso.dev_mode {
|
let expected_key = match config.scraper_keys.get (&actual_hash.encode_base64 ()) {
|
||||||
None => return Ok (bad_key ()?),
|
|
||||||
Some (x) => x,
|
Some (x) => x,
|
||||||
};
|
|
||||||
|
|
||||||
let expected_key = match &dev_mode.scraper_key {
|
|
||||||
None => return Ok (bad_key ()?),
|
None => return Ok (bad_key ()?),
|
||||||
Some (x) => x,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let now = chrono::Utc::now ();
|
let now = chrono::Utc::now ();
|
||||||
|
|
||||||
|
// The hash in is_valid is redundant
|
||||||
match expected_key.is_valid (now, api_key.as_bytes ()) {
|
match expected_key.is_valid (now, api_key.as_bytes ()) {
|
||||||
KeyValidity::Valid => (),
|
KeyValidity::Valid => (),
|
||||||
KeyValidity::WrongKey (bad_hash) => {
|
KeyValidity::WrongKey (bad_hash) => {
|
||||||
|
@ -150,8 +158,25 @@ async fn api_v1 (
|
||||||
let x = v1_server_list (&state).await;
|
let x = v1_server_list (&state).await;
|
||||||
Ok (error_reply (StatusCode::OK, &serde_json::to_string (&x).unwrap ())?)
|
Ok (error_reply (StatusCode::OK, &serde_json::to_string (&x).unwrap ())?)
|
||||||
}
|
}
|
||||||
|
else if let Some (rest) = prefix_match ("server/", path_rest) {
|
||||||
|
// DRY T4H76LB3
|
||||||
|
|
||||||
|
if let Some (idx) = rest.find ('/') {
|
||||||
|
let listen_code = String::from (&rest [0..idx]);
|
||||||
|
let path = String::from (&rest [idx..]);
|
||||||
|
let (parts, _) = req.into_parts ();
|
||||||
|
|
||||||
|
// This is ugly. I don't like having scraper_api know about the
|
||||||
|
// crate root.
|
||||||
|
|
||||||
|
Ok (crate::handle_http_request (parts, path, state, listen_code).await?)
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
Ok (error_reply (StatusCode::NOT_FOUND, "Unknown API endpoint")?)
|
Ok (error_reply (StatusCode::BAD_REQUEST, "Bad URI format")?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Ok (error_reply (StatusCode::NOT_FOUND, strings::UNKNOWN_API_ENDPOINT)?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -176,7 +201,7 @@ pub async fn handle (
|
||||||
api_v1 (req, state, rest).await
|
api_v1 (req, state, rest).await
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
Ok (error_reply (StatusCode::NOT_FOUND, "Unknown scraper API version")?)
|
Ok (error_reply (StatusCode::NOT_FOUND, strings::UNKNOWN_API_VERSION)?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -203,7 +228,7 @@ mod tests {
|
||||||
// Expected
|
// Expected
|
||||||
expected_status: StatusCode,
|
expected_status: StatusCode,
|
||||||
expected_headers: Vec <(&'static str, &'static str)>,
|
expected_headers: Vec <(&'static str, &'static str)>,
|
||||||
expected_body: &'static str,
|
expected_body: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TestCase {
|
impl TestCase {
|
||||||
|
@ -237,16 +262,16 @@ mod tests {
|
||||||
x
|
x
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expected_body (&self, v: &'static str) -> Self {
|
fn expected_body (&self, v: String) -> Self {
|
||||||
let mut x = self.clone ();
|
let mut x = self.clone ();
|
||||||
x.expected_body = v;
|
x.expected_body = v;
|
||||||
x
|
x
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expected (&self, sc: StatusCode, body: &'static str) -> Self {
|
fn expected (&self, sc: StatusCode, body: &str) -> Self {
|
||||||
self
|
self
|
||||||
.expected_status (sc)
|
.expected_status (sc)
|
||||||
.expected_body (body)
|
.expected_body (format! ("{}\n", body))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn test (&self) {
|
async fn test (&self) {
|
||||||
|
@ -260,14 +285,15 @@ mod tests {
|
||||||
let input = input.body (Body::empty ()).unwrap ();
|
let input = input.body (Body::empty ()).unwrap ();
|
||||||
|
|
||||||
let config_file = config::file::Config {
|
let config_file = config::file::Config {
|
||||||
port: Some (4000),
|
|
||||||
servers: vec! [],
|
|
||||||
iso: config::file::Isomorphic {
|
iso: config::file::Isomorphic {
|
||||||
enable_scraper_api: true,
|
enable_scraper_api: true,
|
||||||
dev_mode: self.valid_key.map (|key| config::file::DevMode {
|
dev_mode: Default::default (),
|
||||||
scraper_key: Some (key_validity::ScraperKey::new (key.as_bytes ())),
|
|
||||||
}),
|
|
||||||
},
|
},
|
||||||
|
port: Some (4000),
|
||||||
|
servers: vec! [],
|
||||||
|
scraper_keys: vec! [
|
||||||
|
self.valid_key.map (|x| key_validity::ScraperKey::new_30_day ("automated test", x.as_bytes ())),
|
||||||
|
].into_iter ().filter_map (|x| x).collect (),
|
||||||
};
|
};
|
||||||
|
|
||||||
let config = config::Config::try_from (config_file).expect ("Can't load config");
|
let config = config::Config::try_from (config_file).expect ("Can't load config");
|
||||||
|
@ -292,7 +318,7 @@ mod tests {
|
||||||
let actual_body = actual_body.to_vec ();
|
let actual_body = actual_body.to_vec ();
|
||||||
let actual_body = String::from_utf8 (actual_body).expect ("Body should be UTF-8");
|
let actual_body = String::from_utf8 (actual_body).expect ("Body should be UTF-8");
|
||||||
|
|
||||||
assert_eq! (&actual_body, self.expected_body);
|
assert_eq! (actual_body, self.expected_body);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -309,21 +335,21 @@ mod tests {
|
||||||
expected_headers: vec! [
|
expected_headers: vec! [
|
||||||
("content-type", "text/plain"),
|
("content-type", "text/plain"),
|
||||||
],
|
],
|
||||||
expected_body: "You're valid!\n",
|
expected_body: "You're valid!\n".to_string (),
|
||||||
};
|
};
|
||||||
|
|
||||||
for case in &[
|
for case in &[
|
||||||
base_case.clone (),
|
base_case.clone (),
|
||||||
base_case.path_rest ("v9999/test")
|
base_case.path_rest ("v9999/test")
|
||||||
.expected (StatusCode::NOT_FOUND, "Unknown scraper API version\n"),
|
.expected (StatusCode::NOT_FOUND, strings::UNKNOWN_API_VERSION),
|
||||||
base_case.valid_key (None)
|
base_case.valid_key (None)
|
||||||
.expected (StatusCode::FORBIDDEN, "403 Forbidden\n"),
|
.expected (StatusCode::FORBIDDEN, strings::FORBIDDEN),
|
||||||
base_case.input_key (Some ("borgus"))
|
base_case.input_key (Some ("borgus"))
|
||||||
.expected (StatusCode::FORBIDDEN, "403 Forbidden\n"),
|
.expected (StatusCode::FORBIDDEN, strings::FORBIDDEN),
|
||||||
base_case.path_rest ("v1/toast")
|
base_case.path_rest ("v1/toast")
|
||||||
.expected (StatusCode::NOT_FOUND, "Unknown API endpoint\n"),
|
.expected (StatusCode::NOT_FOUND, strings::UNKNOWN_API_ENDPOINT),
|
||||||
base_case.input_key (None)
|
base_case.input_key (None)
|
||||||
.expected (StatusCode::FORBIDDEN, "Can't run scraper without an API key\n"),
|
.expected (StatusCode::FORBIDDEN, strings::NO_API_KEY),
|
||||||
] {
|
] {
|
||||||
case.test ().await;
|
case.test ().await;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,52 @@
|
||||||
|
|
||||||
(Find this issue with `git grep YNQAQKJS`)
|
(Find this issue with `git grep YNQAQKJS`)
|
||||||
|
|
||||||
|
## Test curl commands
|
||||||
|
|
||||||
|
(In production the API key should be loaded from a file. Putting it in the
|
||||||
|
Bash command is bad, because it will be saved to Bash's history file. Putting
|
||||||
|
it in environment variables is slightly better)
|
||||||
|
|
||||||
|
`curl -H "X-ApiKey: $API_KEY" 127.0.0.1:4000/scraper/api/test`
|
||||||
|
|
||||||
|
Should return "You're valid!"
|
||||||
|
|
||||||
|
`curl -H "X-ApiKey: $API_KEY" 127.0.0.1:4000/scraper/v1/server_list`
|
||||||
|
|
||||||
|
Should return a JSON object listing all the servers.
|
||||||
|
|
||||||
|
`curl -H "X-ApiKey: $API_KEY" 127.0.0.1:4000/scraper/v1/server/aliens_wildland/api/v1/dir/`
|
||||||
|
|
||||||
|
Proxies into the "aliens_wildland" server and retrieves a JSON object listing
|
||||||
|
the file server root. (The server must be running a new version of ptth_server
|
||||||
|
which can serve the JSON API)
|
||||||
|
|
||||||
|
`curl -H "X-ApiKey: $API_KEY" 127.0.0.1:4000/scraper/v1/server/aliens_wildland/api/v1/dir/src/`
|
||||||
|
|
||||||
|
Same, but retrieves the listing for "/src".
|
||||||
|
|
||||||
|
`curl -H "X-ApiKey: $API_KEY" 127.0.0.1:4000/scraper/v1/server/aliens_wildland/files/src/tests.rs`
|
||||||
|
|
||||||
|
There is no special API for retrieving files yet - But the existing server
|
||||||
|
API will be is proxied through the new scraper API on the relay.
|
||||||
|
|
||||||
|
`curl --head -H "X-ApiKey: $API_KEY" 127.0.0.1:4000/scraper/v1/server/aliens_wildland/files/src/tests.rs`
|
||||||
|
|
||||||
|
PTTH supports HEAD requests. This request will yield a "204 No Content", with
|
||||||
|
the "content-length" header.
|
||||||
|
|
||||||
|
`curl -H "range: bytes=100-199" -H "X-ApiKey: $API_KEY" 127.0.0.1:4000/scraper/v1/server/aliens_wildland/files/src/tests.rs`
|
||||||
|
|
||||||
|
PTTH supports byte range requests. This request will skip 100 bytes into the
|
||||||
|
file, and read 100 bytes.
|
||||||
|
|
||||||
|
To avoid fence-post errors, most programming languages use half-open ranges.
|
||||||
|
e.g. `0..3` means "0, 1, 2". However, HTTP byte ranges are closed ranges.
|
||||||
|
e.g. `0..3` means "0, 1, 2, 3". So 100-199 means 199 is the last byte retrieved.
|
||||||
|
|
||||||
|
By polling with HEAD and byte range requests, a scraper client can approximate
|
||||||
|
`tail -f` behavior of a server-side file.
|
||||||
|
|
||||||
## Problem statement
|
## Problem statement
|
||||||
|
|
||||||
PTTH has 2 auth routes:
|
PTTH has 2 auth routes:
|
||||||
|
@ -38,7 +84,7 @@ stronger is ready.
|
||||||
- (X) (POC) Test with curl
|
- (X) (POC) Test with curl
|
||||||
- (X) Clean up scraper endpoint
|
- (X) Clean up scraper endpoint
|
||||||
- (X) Add (almost) end-to-end tests for test scraper endpoint
|
- (X) Add (almost) end-to-end tests for test scraper endpoint
|
||||||
- ( ) Thread server endpoints through relay scraper auth
|
- (X) Thread server endpoints through relay scraper auth
|
||||||
- ( ) Add tests for other scraper endpoints
|
- ( ) Add tests for other scraper endpoints
|
||||||
- (don't care) Factor v1 API into v1 module
|
- (don't care) Factor v1 API into v1 module
|
||||||
- (X) Add real scraper endpoints
|
- (X) Add real scraper endpoints
|
||||||
|
@ -80,7 +126,7 @@ These will all be JSON for now since Python, Rust, C++, C#, etc. can handle it.
|
||||||
For compatibility with wget spidering, I _might_ do XML or HTML that's
|
For compatibility with wget spidering, I _might_ do XML or HTML that's
|
||||||
machine-readable. We'll see.
|
machine-readable. We'll see.
|
||||||
|
|
||||||
## Open questions
|
## Decision journal
|
||||||
|
|
||||||
**Who generates the API key? The scraper client, or the PTTH relay server?**
|
**Who generates the API key? The scraper client, or the PTTH relay server?**
|
||||||
|
|
||||||
|
|
16
src/tests.rs
16
src/tests.rs
|
@ -36,6 +36,7 @@ fn end_to_end () {
|
||||||
let tripcode = BlakeHashWrapper::from_key (api_key.as_bytes ());
|
let tripcode = BlakeHashWrapper::from_key (api_key.as_bytes ());
|
||||||
debug! ("Relay is expecting tripcode {}", tripcode.encode_base64 ());
|
debug! ("Relay is expecting tripcode {}", tripcode.encode_base64 ());
|
||||||
let config_file = ptth_relay::config::file::Config {
|
let config_file = ptth_relay::config::file::Config {
|
||||||
|
iso: Default::default (),
|
||||||
port: None,
|
port: None,
|
||||||
servers: vec! [
|
servers: vec! [
|
||||||
ptth_relay::config::file::Server {
|
ptth_relay::config::file::Server {
|
||||||
|
@ -44,7 +45,7 @@ fn end_to_end () {
|
||||||
display_name: None,
|
display_name: None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
iso: Default::default (),
|
scraper_keys: vec! [],
|
||||||
};
|
};
|
||||||
|
|
||||||
let config = ptth_relay::config::Config::try_from (config_file).expect ("Can't load config");
|
let config = ptth_relay::config::Config::try_from (config_file).expect ("Can't load config");
|
||||||
|
@ -143,16 +144,15 @@ fn scraper_endpoints () {
|
||||||
use ptth_relay::*;
|
use ptth_relay::*;
|
||||||
|
|
||||||
let config_file = config::file::Config {
|
let config_file = config::file::Config {
|
||||||
port: Some (4001),
|
|
||||||
servers: vec! [
|
|
||||||
|
|
||||||
],
|
|
||||||
iso: config::file::Isomorphic {
|
iso: config::file::Isomorphic {
|
||||||
enable_scraper_api: true,
|
enable_scraper_api: true,
|
||||||
dev_mode: Some (config::file::DevMode {
|
dev_mode: Default::default (),
|
||||||
scraper_key: Some (key_validity::ScraperKey::new (b"bogus")),
|
|
||||||
}),
|
|
||||||
},
|
},
|
||||||
|
port: Some (4001),
|
||||||
|
servers: vec! [],
|
||||||
|
scraper_keys: vec! [
|
||||||
|
key_validity::ScraperKey::new_30_day ("automated testing", b"bogus")
|
||||||
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
let config = config::Config::try_from (config_file).expect ("Can't load config");
|
let config = config::Config::try_from (config_file).expect ("Can't load config");
|
||||||
|
|
Loading…
Reference in New Issue