new (ptth_relay): add test endpoint for scrapers

Scrapers can auth using a shared (but hashed) API key.
The hash of the key is specified in ptth_relay.toml, and forces dev mode on.
main
_ 2020-12-12 17:50:40 +00:00
parent 6961fde7dc
commit 6d68a77364
3 changed files with 67 additions and 13 deletions

View File

@ -1,6 +1,6 @@
use std::{
convert::TryInto,
fmt,
fmt::{self, Debug, Formatter},
ops::Deref,
};
@ -14,8 +14,15 @@ use serde::{
Deserializer,
};
#[derive (Copy, Clone, PartialEq, Eq)]
pub struct BlakeHashWrapper (blake3::Hash);
impl Debug for BlakeHashWrapper {
fn fmt (&self, f: &mut Formatter <'_>) -> Result <(), fmt::Error> {
write! (f, "{}", self.encode_base64 ())
}
}
impl BlakeHashWrapper {
pub fn from_key (bytes: &[u8]) -> Self {
Self (blake3::hash (bytes))
@ -89,7 +96,7 @@ pub struct ScraperKey <V: MaxValidDuration> {
pub enum KeyValidity {
Valid,
WrongKey,
WrongKey (BlakeHashWrapper),
ClockIsBehind,
Expired,
DurationTooLong (Duration),
@ -103,8 +110,9 @@ impl <V: MaxValidDuration> ScraperKey <V> {
// I put this first because I think the constant-time check should run
// before anything else. But I'm not a crypto expert, so it's just
// guesswork.
if blake3::hash (input) != *self.hash {
return WrongKey;
let input_hash = BlakeHashWrapper::from_key (input);
if input_hash != self.hash {
return WrongKey (input_hash);
}
if self.not_after < self.not_before {
@ -212,13 +220,18 @@ mod tests {
_phantom: Default::default (),
};
for (input, expected) in &[
(zero_time + Duration::days (0), WrongKey),
(zero_time + Duration::days (2), WrongKey),
(zero_time + Duration::days (1 + 7), WrongKey),
(zero_time + Duration::days (100), WrongKey),
for input in &[
zero_time + Duration::days (0),
zero_time + Duration::days (2),
zero_time + Duration::days (1 + 7),
zero_time + Duration::days (100),
] {
assert_eq! (key.is_valid (*input, "badder_password".as_bytes ()), *expected);
let validity = key.is_valid (*input, "badder_password".as_bytes ());
match validity {
WrongKey (_) => (),
_ => panic! ("Expected WrongKey here"),
}
}
}
}

View File

@ -434,7 +434,7 @@ async fn handle_all (req: Request <Body>, state: Arc <RelayState>)
if let Some (listen_code) = prefix_match ("/7ZSFUKGV/http_listen/", path) {
let api_key = match api_key {
None => return Ok (error_reply (StatusCode::UNAUTHORIZED, "Can't register as server without an API key")?),
None => return Ok (error_reply (StatusCode::FORBIDDEN, "Can't run server without an API key")?),
Some (x) => x,
};
server_endpoint::handle_listen (state, listen_code.into (), api_key.as_bytes ()).await
@ -464,6 +464,46 @@ async fn handle_all (req: Request <Body>, state: Arc <RelayState>)
else if path == "/frontend/test_mysterious_error" {
Err (RequestError::Mysterious)
}
else if path == "/scraper/v1/test" || path == "/scraper/api/test" {
use key_validity::KeyValidity;
let api_key = match api_key {
None => return Ok (error_reply (StatusCode::FORBIDDEN, "Can't run scraper without an API key")?),
Some (x) => x,
};
let bad_key = || error_reply (StatusCode::FORBIDDEN, "403 Forbidden");
{
let config = state.config.read ().await;
let dev_mode = match &config.iso.dev_mode {
None => return Ok (bad_key ()?),
Some (x) => x,
};
let expected_key = match &dev_mode.scraper_key {
None => return Ok (bad_key ()?),
Some (x) => x,
};
let now = chrono::Utc::now ();
match expected_key.is_valid (now, api_key.as_bytes ()) {
KeyValidity::Valid => (),
KeyValidity::WrongKey (bad_hash) => {
error! ("Bad scraper key with hash {:?}", bad_hash);
return Ok (bad_key ()?);
}
err => {
error! ("Bad scraper key {:?}", err);
return Ok (bad_key ()?);
},
}
}
Ok (error_reply (StatusCode::OK, "You're valid!")?)
}
else {
Ok (error_reply (StatusCode::OK, "Hi")?)
}

View File

@ -34,8 +34,9 @@ stronger is ready.
- (X) Add feature flags to ptth_relay.toml for dev mode and scrapers
- (X) Make sure Docker release CAN build
- (X) Add hash of 1 scraper key to ptth_relay.toml, with 1 week expiration
- ( ) Accept scraper key for some testing endpoint
- ( ) (POC) Test with curl
- (X) Accept scraper key for some testing endpoint
- (X) (POC) Test with curl
- ( ) Clean up scraper endpoint
- ( ) Manually create SQLite DB for scraper keys, add 1 hash
- ( ) Impl DB reads
- ( ) Remove scraper key from config file