Skip to content

Commit ea8d4d8

Browse files
committed
feat(http): [#191] add route and extractor for scrape req in Axum HTTP tracker
with only one infohash in the URL: http://localhost:7070/scrape?info_hash=%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0 It does not allow more than one infohas yet.
1 parent b826f59 commit ea8d4d8

File tree

8 files changed

+211
-5
lines changed

8 files changed

+211
-5
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
pub mod announce_request;
22
pub mod peer_ip;
33
pub mod remote_client_ip;
4+
pub mod scrape_request;
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
use std::panic::Location;
2+
3+
use axum::async_trait;
4+
use axum::extract::FromRequestParts;
5+
use axum::http::request::Parts;
6+
use axum::response::{IntoResponse, Response};
7+
8+
use crate::http::axum_implementation::query::Query;
9+
use crate::http::axum_implementation::requests::scrape::{ParseScrapeQueryError, Scrape};
10+
use crate::http::axum_implementation::responses;
11+
12+
pub struct ExtractRequest(pub Scrape);
13+
14+
#[async_trait]
15+
impl<S> FromRequestParts<S> for ExtractRequest
16+
where
17+
S: Send + Sync,
18+
{
19+
type Rejection = Response;
20+
21+
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
22+
let raw_query = parts.uri.query();
23+
24+
if raw_query.is_none() {
25+
return Err(responses::error::Error::from(ParseScrapeQueryError::MissingParams {
26+
location: Location::caller(),
27+
})
28+
.into_response());
29+
}
30+
31+
let query = raw_query.unwrap().parse::<Query>();
32+
33+
if let Err(error) = query {
34+
return Err(responses::error::Error::from(error).into_response());
35+
}
36+
37+
let scrape_request = Scrape::try_from(query.unwrap());
38+
39+
if let Err(error) = scrape_request {
40+
return Err(responses::error::Error::from(error).into_response());
41+
}
42+
43+
Ok(ExtractRequest(scrape_request.unwrap()))
44+
}
45+
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
pub mod announce;
2+
pub mod scrape;
23
pub mod status;
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
use std::sync::Arc;
2+
3+
use axum::extract::State;
4+
use log::debug;
5+
6+
use crate::http::axum_implementation::extractors::remote_client_ip::RemoteClientIp;
7+
use crate::http::axum_implementation::extractors::scrape_request::ExtractRequest;
8+
use crate::tracker::Tracker;
9+
10+
#[allow(clippy::unused_async)]
11+
pub async fn handle(
12+
State(_tracker): State<Arc<Tracker>>,
13+
ExtractRequest(scrape_request): ExtractRequest,
14+
_remote_client_ip: RemoteClientIp,
15+
) -> String {
16+
debug!("http scrape request: {:#?}", &scrape_request);
17+
18+
format!("{:#?}", &scrape_request)
19+
}

src/http/axum_implementation/handlers/status.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,6 @@ use crate::http::axum_implementation::resources::ok::Ok;
77
use crate::http::axum_implementation::responses::ok;
88

99
#[allow(clippy::unused_async)]
10-
pub async fn get_status_handler(remote_client_ip: RemoteClientIp) -> Json<Ok> {
10+
pub async fn handle(remote_client_ip: RemoteClientIp) -> Json<Ok> {
1111
ok::response(&remote_client_ip)
1212
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
11
pub mod announce;
2+
pub mod scrape;
Lines changed: 137 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
use std::panic::Location;
2+
3+
use thiserror::Error;
4+
5+
use crate::http::axum_implementation::query::Query;
6+
use crate::http::axum_implementation::responses;
7+
use crate::http::percent_encoding::percent_decode_info_hash;
8+
use crate::located_error::{Located, LocatedError};
9+
use crate::protocol::info_hash::{ConversionError, InfoHash};
10+
11+
pub type NumberOfBytes = i64;
12+
13+
// Query param name
14+
const INFO_HASH_SCRAPE_PARAM: &str = "info_hash";
15+
16+
#[derive(Debug, PartialEq)]
17+
pub struct Scrape {
18+
pub info_hashes: Vec<InfoHash>,
19+
}
20+
21+
#[derive(Error, Debug)]
22+
pub enum ParseScrapeQueryError {
23+
#[error("missing query params for scrape request in {location}")]
24+
MissingParams { location: &'static Location<'static> },
25+
#[error("missing param {param_name} in {location}")]
26+
MissingParam {
27+
location: &'static Location<'static>,
28+
param_name: String,
29+
},
30+
#[error("invalid param value {param_value} for {param_name} in {location}")]
31+
InvalidParam {
32+
param_name: String,
33+
param_value: String,
34+
location: &'static Location<'static>,
35+
},
36+
#[error("invalid param value {param_value} for {param_name} in {source}")]
37+
InvalidInfoHashParam {
38+
param_name: String,
39+
param_value: String,
40+
source: LocatedError<'static, ConversionError>,
41+
},
42+
}
43+
44+
impl From<ParseScrapeQueryError> for responses::error::Error {
45+
fn from(err: ParseScrapeQueryError) -> Self {
46+
responses::error::Error {
47+
failure_reason: format!("Cannot parse query params for scrape request: {err}"),
48+
}
49+
}
50+
}
51+
52+
impl TryFrom<Query> for Scrape {
53+
type Error = ParseScrapeQueryError;
54+
55+
fn try_from(query: Query) -> Result<Self, Self::Error> {
56+
Ok(Self {
57+
info_hashes: extract_info_hashes(&query)?,
58+
})
59+
}
60+
}
61+
62+
fn extract_info_hashes(query: &Query) -> Result<Vec<InfoHash>, ParseScrapeQueryError> {
63+
match query.get_param(INFO_HASH_SCRAPE_PARAM) {
64+
Some(raw_param) => {
65+
let mut info_hashes = vec![];
66+
67+
// todo: multiple infohashes
68+
69+
let info_hash = percent_decode_info_hash(&raw_param).map_err(|err| ParseScrapeQueryError::InvalidInfoHashParam {
70+
param_name: INFO_HASH_SCRAPE_PARAM.to_owned(),
71+
param_value: raw_param.clone(),
72+
source: Located(err).into(),
73+
})?;
74+
75+
info_hashes.push(info_hash);
76+
77+
Ok(info_hashes)
78+
}
79+
None => {
80+
return Err(ParseScrapeQueryError::MissingParam {
81+
location: Location::caller(),
82+
param_name: INFO_HASH_SCRAPE_PARAM.to_owned(),
83+
})
84+
}
85+
}
86+
}
87+
88+
#[cfg(test)]
89+
mod tests {
90+
91+
mod scrape_request {
92+
93+
use crate::http::axum_implementation::query::Query;
94+
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH_SCRAPE_PARAM};
95+
use crate::protocol::info_hash::InfoHash;
96+
97+
#[test]
98+
fn should_be_instantiated_from_the_url_query_with_only_one_infohash() {
99+
let raw_query = Query::from(vec![(
100+
INFO_HASH_SCRAPE_PARAM,
101+
"%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0",
102+
)])
103+
.to_string();
104+
105+
let query = raw_query.parse::<Query>().unwrap();
106+
107+
let scrape_request = Scrape::try_from(query).unwrap();
108+
109+
assert_eq!(
110+
scrape_request,
111+
Scrape {
112+
info_hashes: vec!["3b245504cf5f11bbdbe1201cea6a6bf45aee1bc0".parse::<InfoHash>().unwrap()],
113+
}
114+
);
115+
}
116+
117+
mod when_it_is_instantiated_from_the_url_query_params {
118+
119+
use crate::http::axum_implementation::query::Query;
120+
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH_SCRAPE_PARAM};
121+
122+
#[test]
123+
fn it_should_fail_if_the_query_does_not_include_the_info_hash_param() {
124+
let raw_query_without_info_hash = "another_param=NOT_RELEVANT";
125+
126+
assert!(Scrape::try_from(raw_query_without_info_hash.parse::<Query>().unwrap()).is_err());
127+
}
128+
129+
#[test]
130+
fn it_should_fail_if_the_info_hash_param_is_invalid() {
131+
let raw_query = Query::from(vec![(INFO_HASH_SCRAPE_PARAM, "INVALID_INFO_HASH_VALUE")]).to_string();
132+
133+
assert!(Scrape::try_from(raw_query.parse::<Query>().unwrap()).is_err());
134+
}
135+
}
136+
}
137+
}

src/http/axum_implementation/routes.rs

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,17 @@ use axum::routing::get;
44
use axum::Router;
55
use axum_client_ip::SecureClientIpSource;
66

7-
use super::handlers::announce::handle;
8-
use super::handlers::status::get_status_handler;
7+
use super::handlers::{announce, scrape, status};
98
use crate::tracker::Tracker;
109

1110
pub fn router(tracker: &Arc<Tracker>) -> Router {
1211
Router::new()
1312
// Status
14-
.route("/status", get(get_status_handler))
13+
.route("/status", get(status::handle))
1514
// Announce request
16-
.route("/announce", get(handle).with_state(tracker.clone()))
15+
.route("/announce", get(announce::handle).with_state(tracker.clone()))
16+
// Scrape request
17+
.route("/scrape", get(scrape::handle).with_state(tracker.clone()))
18+
// Add extension to get the client IP from the connection info
1719
.layer(SecureClientIpSource::ConnectInfo.into_extension())
1820
}

0 commit comments

Comments
 (0)