diff --git a/crates/ptth_relay/src/key_validity.rs b/crates/ptth_relay/src/key_validity.rs index d0d5498..67dc43a 100644 --- a/crates/ptth_relay/src/key_validity.rs +++ b/crates/ptth_relay/src/key_validity.rs @@ -84,7 +84,7 @@ pub trait MaxValidDuration { #[derive (Deserialize)] pub struct ScraperKey { - name: String, + pub name: String, not_before: DateTime , not_after: DateTime , diff --git a/crates/ptth_relay/src/lib.rs b/crates/ptth_relay/src/lib.rs index e5e8a73..72c5f9d 100644 --- a/crates/ptth_relay/src/lib.rs +++ b/crates/ptth_relay/src/lib.rs @@ -127,8 +127,6 @@ async fn handle_http_request ( return Err (UnknownServer); } - let user = get_user_name (&req); - let req = http_serde::RequestParts::from_hyper (req.method, uri.clone (), req.headers) .map_err (|_| BadRequest)?; @@ -136,11 +134,6 @@ async fn handle_http_request ( let req_id = rusty_ulid::generate_ulid_string (); - state.audit_log.push (AuditEvent::new (AuditData::WebClientGet { - user, - server_name: server_name.to_string (), - uri, - })).await; trace! ("Created request {}", req_id); { @@ -610,6 +603,13 @@ async fn handle_all ( } => { let (parts, _) = req.into_parts (); + let user = get_user_name (&parts); + state.audit_log.push (AuditEvent::new (AuditData::WebClientGet { + user, + server_name: listen_code.to_string (), + uri: path.to_string (), + })).await; + handle_http_request (parts, path.to_string (), &state, listen_code).await? }, ClientServerList => handle_server_list (state, handlebars).await?, diff --git a/crates/ptth_relay/src/relay_state.rs b/crates/ptth_relay/src/relay_state.rs index f9dfbf0..9f66ffa 100644 --- a/crates/ptth_relay/src/relay_state.rs +++ b/crates/ptth_relay/src/relay_state.rs @@ -123,8 +123,7 @@ pub enum AuditData { RelayStart, ScraperGet { key_name: String, - server_name: String, - uri: String, + path: String, }, WebClientGet { user: Option , diff --git a/crates/ptth_relay/src/scraper_api.rs b/crates/ptth_relay/src/scraper_api.rs index d85e1e7..ddb2e93 100644 --- a/crates/ptth_relay/src/scraper_api.rs +++ b/crates/ptth_relay/src/scraper_api.rs @@ -127,6 +127,11 @@ async fn api_v1 ( ) -> Result , RequestError> { + use crate::{ + AuditData, + AuditEvent, + }; + let api_key = req.headers ().get ("X-ApiKey"); let api_key = match api_key { @@ -138,6 +143,8 @@ async fn api_v1 ( let bad_key = || error_reply (StatusCode::FORBIDDEN, strings::FORBIDDEN); + let key_name; + { let config = state.config.read ().await; @@ -160,8 +167,15 @@ async fn api_v1 ( return Ok (bad_key ()?); }, } + + key_name = expected_key.name.to_string (); } + state.audit_log.push (AuditEvent::new (AuditData::ScraperGet { + key_name, + path: path_rest.to_string (), + })).await; + if path_rest == "test" { Ok (error_reply (StatusCode::OK, "You're valid!")?) } diff --git a/docs/reference/scraper-keys.md b/docs/reference/scraper-keys.md index 38dbad3..8e2b155 100644 --- a/docs/reference/scraper-keys.md +++ b/docs/reference/scraper-keys.md @@ -27,7 +27,5 @@ Use curl to like, try it out: ``` curl \ --header "X-ApiKey: not this, this is a bogus passphrase for documentation" \ -http://localhost:4000/scraper/v1/server/$SERVER_NAME/files/ +http://localhost:4000/scraper/v1/test ``` - -(Replace `$SERVER_NAME` with the name of the server you want to reach. And change the URL so it's not going to localhost.)