test: add end-to-end test for scraper API

main
_ 2020-12-13 01:54:54 +00:00
parent 1e81421444
commit 670ce30667
7 changed files with 87 additions and 14 deletions

1
Cargo.lock generated
View File

@ -1182,6 +1182,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"base64 0.12.3", "base64 0.12.3",
"blake3", "blake3",
"chrono",
"ptth_relay", "ptth_relay",
"ptth_server", "ptth_server",
"reqwest", "reqwest",

View File

@ -12,6 +12,7 @@ license = "AGPL-3.0"
base64 = "0.12.3" base64 = "0.12.3"
blake3 = "0.3.7" blake3 = "0.3.7"
chrono = {version = "0.4.19", features = ["serde"]}
reqwest = { version = "0.10.8", features = ["stream"] } reqwest = { version = "0.10.8", features = ["stream"] }
tokio = { version = "0.2.22", features = ["full"] } tokio = { version = "0.2.22", features = ["full"] }
tracing = "0.1.21" tracing = "0.1.21"

View File

@ -55,6 +55,7 @@ pub mod file {
// Stuff we actually need at runtime // Stuff we actually need at runtime
pub struct Config { pub struct Config {
pub port: Option <u16>,
pub servers: HashMap <String, file::Server>, pub servers: HashMap <String, file::Server>,
pub iso: file::Isomorphic, pub iso: file::Isomorphic,
} }
@ -69,6 +70,7 @@ impl TryFrom <file::Config> for Config {
let servers = itertools::process_results (servers, |i| HashMap::from_iter (i))?; let servers = itertools::process_results (servers, |i| HashMap::from_iter (i))?;
Ok (Self { Ok (Self {
port: f.port,
servers, servers,
iso: f.iso, iso: f.iso,
}) })

View File

@ -84,9 +84,9 @@ impl MaxValidDuration for Valid7Days {
#[derive (Deserialize)] #[derive (Deserialize)]
pub struct ScraperKey <V: MaxValidDuration> { pub struct ScraperKey <V: MaxValidDuration> {
pub not_before: DateTime <Utc>, not_before: DateTime <Utc>,
pub not_after: DateTime <Utc>, not_after: DateTime <Utc>,
pub hash: BlakeHashWrapper, hash: BlakeHashWrapper,
#[serde (default)] #[serde (default)]
_phantom: std::marker::PhantomData <V>, _phantom: std::marker::PhantomData <V>,
@ -103,6 +103,19 @@ pub enum KeyValidity {
DurationNegative, DurationNegative,
} }
impl ScraperKey <Valid7Days> {
pub fn new (input: &[u8]) -> Self {
let now = Utc::now ();
Self {
not_before: now,
not_after: now + Duration::days (7),
hash: BlakeHashWrapper::from_key (input),
_phantom: Default::default (),
}
}
}
impl <V: MaxValidDuration> ScraperKey <V> { impl <V: MaxValidDuration> ScraperKey <V> {
pub fn is_valid (&self, now: DateTime <Utc>, input: &[u8]) -> KeyValidity { pub fn is_valid (&self, now: DateTime <Utc>, input: &[u8]) -> KeyValidity {
use KeyValidity::*; use KeyValidity::*;

View File

@ -471,6 +471,12 @@ async fn handle_scraper_api (
) )
-> Result <Response <Body>, RequestError> -> Result <Response <Body>, RequestError>
{ {
{
if ! state.config.read ().await.iso.enable_scraper_auth {
return Ok (error_reply (StatusCode::FORBIDDEN, "Scraper API disabled")?);
}
}
if let Some (rest) = prefix_match ("v1/", path_rest) { if let Some (rest) = prefix_match ("v1/", path_rest) {
handle_scraper_api_v1 (req, state, rest).await handle_scraper_api_v1 (req, state, rest).await
} }
@ -590,11 +596,6 @@ pub async fn run_relay (
) )
-> Result <(), RelayError> -> Result <(), RelayError>
{ {
let addr = SocketAddr::from ((
[0, 0, 0, 0],
4000,
));
if let Some (config_reload_path) = config_reload_path { if let Some (config_reload_path) = config_reload_path {
let state_2 = state.clone (); let state_2 = state.clone ();
tokio::spawn (async move { tokio::spawn (async move {
@ -619,6 +620,11 @@ pub async fn run_relay (
} }
}); });
let addr = SocketAddr::from ((
[0, 0, 0, 0],
state.config.read ().await.port.unwrap_or (4000),
));
let server = Server::bind (&addr) let server = Server::bind (&addr)
.serve (make_svc); .serve (make_svc);

View File

@ -36,7 +36,8 @@ stronger is ready.
- (X) Add hash of 1 scraper key to ptth_relay.toml, with 1 week expiration - (X) Add hash of 1 scraper key to ptth_relay.toml, with 1 week expiration
- (X) Accept scraper key for some testing endpoint - (X) Accept scraper key for some testing endpoint
- (X) (POC) Test with curl - (X) (POC) Test with curl
- ( ) Clean up scraper endpoint - (X) Clean up scraper endpoint
- ( ) Add end-to-end tests for scraper endpoint
- ( ) Manually create SQLite DB for scraper keys, add 1 hash - ( ) Manually create SQLite DB for scraper keys, add 1 hash
- ( ) Impl DB reads - ( ) Impl DB reads
- ( ) Remove scraper key from config file - ( ) Remove scraper key from config file

View File

@ -1,5 +1,5 @@
use std::{ use std::{
convert::TryFrom, convert::{TryFrom, TryInto},
sync::{ sync::{
Arc, Arc,
}, },
@ -13,17 +13,17 @@ use tokio::{
time::delay_for, time::delay_for,
}; };
#[test]
fn end_to_end () {
use reqwest::Client; use reqwest::Client;
use tracing::{debug, info}; use tracing::{debug, info};
#[test]
fn end_to_end () {
use ptth_relay::key_validity::BlakeHashWrapper; use ptth_relay::key_validity::BlakeHashWrapper;
// Prefer this form for tests, since all tests share one process // Prefer this form for tests, since all tests share one process
// and we don't care if another test already installed a subscriber. // and we don't care if another test already installed a subscriber.
tracing_subscriber::fmt ().try_init ().ok (); //tracing_subscriber::fmt ().try_init ().ok ();
let mut rt = Runtime::new ().expect ("Can't create runtime for testing"); let mut rt = Runtime::new ().expect ("Can't create runtime for testing");
// Spawn the root task // Spawn the root task
@ -126,3 +126,52 @@ fn end_to_end () {
info! ("Server stopped"); info! ("Server stopped");
}); });
} }
#[test]
fn scraper_endpoints () {
let mut rt = Runtime::new ().expect ("Can't create runtime for testing");
rt.block_on (async {
use ptth_relay::*;
let config_file = config::file::Config {
port: Some (4001),
servers: vec! [
],
iso: config::file::Isomorphic {
enable_scraper_auth: true,
dev_mode: Some (config::file::DevMode {
scraper_key: Some (key_validity::ScraperKey::new (b"bogus")),
}),
},
};
let config = config::Config::try_from (config_file).expect ("Can't load config");
let relay_state = Arc::new (RelayState::try_from (config).expect ("Can't create relay state"));
let relay_state_2 = relay_state.clone ();
let (stop_relay_tx, stop_relay_rx) = oneshot::channel ();
let task_relay = spawn (async move {
run_relay (relay_state_2, stop_relay_rx, None).await
});
let relay_url = "http://127.0.0.1:4001";
let mut headers = reqwest::header::HeaderMap::new ();
headers.insert ("X-ApiKey", "bogus".try_into ().unwrap ());
let client = Client::builder ()
.default_headers (headers)
.timeout (Duration::from_secs (2))
.build ().expect ("Couldn't build HTTP client");
let resp = client.get (&format! ("{}/scraper/api/test", relay_url))
.send ().await.expect ("Couldn't check if relay is up").bytes ().await.expect ("Couldn't check if relay is up");
assert_eq! (resp, "You're valid!\n");
stop_relay_tx.send (()).expect ("Couldn't shut down relay");
task_relay.await.expect ("Couldn't join relay").expect ("Relay error");
});
}