Compare commits

..

3 Commits

Author SHA1 Message Date
_ 228a9b4262 🔊 scraper GETs have their own type of audit event now 2021-08-27 20:28:34 -05:00
_ a454585d9c 📝 document how scraper keys work 2021-08-27 20:17:59 -05:00
_ caaed8a5e1 remove 30-day limit on scraper keys 2021-08-27 18:34:38 -05:00
6 changed files with 70 additions and 77 deletions

View File

@ -15,7 +15,6 @@ use crate::{
errors::ConfigError, errors::ConfigError,
key_validity::{ key_validity::{
ScraperKey, ScraperKey,
Valid30Days,
}, },
}; };
@ -99,7 +98,6 @@ pub mod file {
use crate::key_validity::{ use crate::key_validity::{
BlakeHashWrapper, BlakeHashWrapper,
ScraperKey, ScraperKey,
Valid30Days,
}; };
#[derive (Clone, Debug, Deserialize, Serialize)] #[derive (Clone, Debug, Deserialize, Serialize)]
@ -142,7 +140,7 @@ pub mod file {
pub servers: Option <Vec <Server>>, pub servers: Option <Vec <Server>>,
// Adding a DB will take a while, so I'm moving these out of dev mode. // Adding a DB will take a while, so I'm moving these out of dev mode.
pub scraper_keys: Option <Vec <ScraperKey <Valid30Days>>>, pub scraper_keys: Option <Vec <ScraperKey>>,
pub news_url: Option <String>, pub news_url: Option <String>,
} }
@ -156,7 +154,7 @@ pub struct Config {
pub address: IpAddr, pub address: IpAddr,
pub port: Option <u16>, pub port: Option <u16>,
pub servers: HashMap <String, file::Server>, pub servers: HashMap <String, file::Server>,
pub scraper_keys: HashMap <String, ScraperKey <Valid30Days>>, pub scraper_keys: HashMap <String, ScraperKey>,
pub news_url: Option <String>, pub news_url: Option <String>,
} }

View File

@ -78,36 +78,17 @@ impl Serialize for BlakeHashWrapper {
} }
} }
pub struct Valid7Days;
pub struct Valid30Days;
//pub struct Valid90Days;
pub trait MaxValidDuration { pub trait MaxValidDuration {
fn dur () -> Duration; fn dur () -> Duration;
} }
impl MaxValidDuration for Valid7Days {
fn dur () -> Duration {
Duration::days (7)
}
}
impl MaxValidDuration for Valid30Days {
fn dur () -> Duration {
Duration::days (30)
}
}
#[derive (Deserialize)] #[derive (Deserialize)]
pub struct ScraperKey <V: MaxValidDuration> { pub struct ScraperKey {
name: String, pub name: String,
not_before: DateTime <Utc>, not_before: DateTime <Utc>,
not_after: DateTime <Utc>, not_after: DateTime <Utc>,
pub hash: BlakeHashWrapper, pub hash: BlakeHashWrapper,
#[serde (default)]
_phantom: std::marker::PhantomData <V>,
} }
#[derive (Copy, Clone, Debug, PartialEq)] #[derive (Copy, Clone, Debug, PartialEq)]
@ -121,21 +102,20 @@ pub enum KeyValidity {
DurationNegative, DurationNegative,
} }
impl <V: MaxValidDuration> ScraperKey <V> { impl ScraperKey {
pub fn new_30_day <S: Into <String>> (name: S, input: &[u8]) -> Self { pub fn new_30_day <S: Into <String>> (name: S, input: &[u8]) -> Self {
let now = Utc::now (); let now = Utc::now ();
Self { Self {
name: name.into (), name: name.into (),
not_before: now, not_before: now,
not_after: now + V::dur (), not_after: now + Duration::days (30),
hash: BlakeHashWrapper::from_key (input), hash: BlakeHashWrapper::from_key (input),
_phantom: Default::default (),
} }
} }
} }
impl <V: MaxValidDuration> ScraperKey <V> { impl ScraperKey {
#[must_use] #[must_use]
pub fn is_valid (&self, now: DateTime <Utc>, input: &[u8]) -> KeyValidity { pub fn is_valid (&self, now: DateTime <Utc>, input: &[u8]) -> KeyValidity {
use KeyValidity::*; use KeyValidity::*;
@ -152,13 +132,6 @@ impl <V: MaxValidDuration> ScraperKey <V> {
return DurationNegative; return DurationNegative;
} }
let max_dur = V::dur ();
let actual_dur = self.not_after - self.not_before;
if actual_dur > max_dur {
return DurationTooLong (max_dur);
}
if now >= self.not_after { if now >= self.not_after {
return Expired; return Expired;
} }
@ -196,12 +169,11 @@ mod tests {
fn duration_negative () { fn duration_negative () {
let zero_time = Utc::now (); let zero_time = Utc::now ();
let key = ScraperKey::<Valid30Days> { let key = ScraperKey {
name: "automated testing".to_string (), name: "automated testing".to_string (),
not_before: zero_time + Duration::days (1 + 2), not_before: zero_time + Duration::days (1 + 2),
not_after: zero_time + Duration::days (1), not_after: zero_time + Duration::days (1),
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()), hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
_phantom: Default::default (),
}; };
let err = DurationNegative; let err = DurationNegative;
@ -215,46 +187,22 @@ mod tests {
} }
} }
#[test]
fn key_valid_too_long () {
let zero_time = Utc::now ();
let key = ScraperKey::<Valid30Days> {
name: "automated testing".to_string (),
not_before: zero_time + Duration::days (1),
not_after: zero_time + Duration::days (1 + 31),
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
_phantom: Default::default (),
};
let err = DurationTooLong (Duration::days (30));
for (input, expected) in &[
(zero_time + Duration::days (0), err),
(zero_time + Duration::days (2), err),
(zero_time + Duration::days (100), err),
] {
assert_eq! (key.is_valid (*input, "bad_password".as_bytes ()), *expected);
}
}
#[test] #[test]
fn normal_key () { fn normal_key () {
let zero_time = Utc::now (); let zero_time = Utc::now ();
let key = ScraperKey::<Valid30Days> { let key = ScraperKey {
name: "automated testing".to_string (), name: "automated testing".to_string (),
not_before: zero_time + Duration::days (1), not_before: zero_time + Duration::days (1),
not_after: zero_time + Duration::days (1 + 30), not_after: zero_time + Duration::days (1 + 60),
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()), hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
_phantom: Default::default (),
}; };
for (input, expected) in &[ for (input, expected) in &[
(zero_time + Duration::days (0), ClockIsBehind), (zero_time + Duration::days (0), ClockIsBehind),
(zero_time + Duration::days (2), Valid), (zero_time + Duration::days (2), Valid),
(zero_time + Duration::days (29), Valid), (zero_time + Duration::days (60 - 1), Valid),
(zero_time + Duration::days (1 + 30), Expired), (zero_time + Duration::days (60 + 1), Expired),
(zero_time + Duration::days (100), Expired), (zero_time + Duration::days (100), Expired),
] { ] {
assert_eq! (key.is_valid (*input, "bad_password".as_bytes ()), *expected); assert_eq! (key.is_valid (*input, "bad_password".as_bytes ()), *expected);
@ -265,12 +213,11 @@ mod tests {
fn wrong_key () { fn wrong_key () {
let zero_time = Utc::now (); let zero_time = Utc::now ();
let key = ScraperKey::<Valid30Days> { let key = ScraperKey {
name: "automated testing".to_string (), name: "automated testing".to_string (),
not_before: zero_time + Duration::days (1), not_before: zero_time + Duration::days (1),
not_after: zero_time + Duration::days (1 + 30), not_after: zero_time + Duration::days (1 + 30),
hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()), hash: BlakeHashWrapper::from_key ("bad_password".as_bytes ()),
_phantom: Default::default (),
}; };
for input in &[ for input in &[

View File

@ -127,8 +127,6 @@ async fn handle_http_request (
return Err (UnknownServer); return Err (UnknownServer);
} }
let user = get_user_name (&req);
let req = http_serde::RequestParts::from_hyper (req.method, uri.clone (), req.headers) let req = http_serde::RequestParts::from_hyper (req.method, uri.clone (), req.headers)
.map_err (|_| BadRequest)?; .map_err (|_| BadRequest)?;
@ -136,11 +134,6 @@ async fn handle_http_request (
let req_id = rusty_ulid::generate_ulid_string (); let req_id = rusty_ulid::generate_ulid_string ();
state.audit_log.push (AuditEvent::new (AuditData::WebClientGet {
user,
server_name: server_name.to_string (),
uri,
})).await;
trace! ("Created request {}", req_id); trace! ("Created request {}", req_id);
{ {
@ -610,6 +603,13 @@ async fn handle_all (
} => { } => {
let (parts, _) = req.into_parts (); let (parts, _) = req.into_parts ();
let user = get_user_name (&parts);
state.audit_log.push (AuditEvent::new (AuditData::WebClientGet {
user,
server_name: listen_code.to_string (),
uri: path.to_string (),
})).await;
handle_http_request (parts, path.to_string (), &state, listen_code).await? handle_http_request (parts, path.to_string (), &state, listen_code).await?
}, },
ClientServerList => handle_server_list (state, handlebars).await?, ClientServerList => handle_server_list (state, handlebars).await?,

View File

@ -121,6 +121,10 @@ pub enum AuditData {
server: crate::config::file::Server, server: crate::config::file::Server,
}, },
RelayStart, RelayStart,
ScraperGet {
key_name: String,
path: String,
},
WebClientGet { WebClientGet {
user: Option <String>, user: Option <String>,
server_name: String, server_name: String,
@ -312,7 +316,7 @@ impl Builder {
self self
} }
pub fn scraper_key (mut self, key: crate::key_validity::ScraperKey <crate::key_validity::Valid30Days>) pub fn scraper_key (mut self, key: crate::key_validity::ScraperKey)
-> Self -> Self
{ {
self.config.scraper_keys.insert (key.hash.encode_base64 (), key); self.config.scraper_keys.insert (key.hash.encode_base64 (), key);

View File

@ -127,6 +127,11 @@ async fn api_v1 (
) )
-> Result <Response <Body>, RequestError> -> Result <Response <Body>, RequestError>
{ {
use crate::{
AuditData,
AuditEvent,
};
let api_key = req.headers ().get ("X-ApiKey"); let api_key = req.headers ().get ("X-ApiKey");
let api_key = match api_key { let api_key = match api_key {
@ -138,6 +143,8 @@ async fn api_v1 (
let bad_key = || error_reply (StatusCode::FORBIDDEN, strings::FORBIDDEN); let bad_key = || error_reply (StatusCode::FORBIDDEN, strings::FORBIDDEN);
let key_name;
{ {
let config = state.config.read ().await; let config = state.config.read ().await;
@ -160,8 +167,15 @@ async fn api_v1 (
return Ok (bad_key ()?); return Ok (bad_key ()?);
}, },
} }
key_name = expected_key.name.to_string ();
} }
state.audit_log.push (AuditEvent::new (AuditData::ScraperGet {
key_name,
path: path_rest.to_string (),
})).await;
if path_rest == "test" { if path_rest == "test" {
Ok (error_reply (StatusCode::OK, "You're valid!")?) Ok (error_reply (StatusCode::OK, "You're valid!")?)
} }
@ -224,7 +238,6 @@ mod tests {
use tokio::runtime::Runtime; use tokio::runtime::Runtime;
use crate::{ use crate::{
config,
key_validity, key_validity,
}; };
use super::*; use super::*;

View File

@ -0,0 +1,31 @@
# How scraper keys work
Come up with a random passphrase:
`not this, this is a bogus passphrase for documentation`
Run that through the `hash-api-key` subcommand of any `ptth_relay` instance:
`ptth_relay hash-api-key`
You'll get a hash like this:
`RUWt1hQQuHIRjftOdgeZf0PG/DtAmIaMqot/nwBAZXQ=`
Make sure that gets into the relay's config file, `ptth_relay.toml`:
```
[[scraper_keys]]
name = "shudder_mummy"
not_before = "2021-08-27T19:20:25-05:00"
not_after = "2031-08-27T19:20:25-05:00"
hash = "RUWt1hQQuHIRjftOdgeZf0PG/DtAmIaMqot/nwBAZXQ="
```
Use curl to like, try it out:
```
curl \
--header "X-ApiKey: not this, this is a bogus passphrase for documentation" \
http://localhost:4000/scraper/v1/test
```