Skip to content

Commit 86ce93c

Browse files
committed
feat(http): [#192] scrape request for Axum HTTP tracker
1 parent ae1a076 commit 86ce93c

File tree

7 files changed

+162
-64
lines changed

7 files changed

+162
-64
lines changed
Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
use std::sync::Arc;
22

33
use axum::extract::State;
4-
use axum::http::StatusCode;
54
use axum::response::{IntoResponse, Response};
65
use log::debug;
76

87
use crate::http::axum_implementation::extractors::peer_ip;
98
use crate::http::axum_implementation::extractors::remote_client_ip::RemoteClientIp;
109
use crate::http::axum_implementation::extractors::scrape_request::ExtractRequest;
11-
use crate::http::axum_implementation::services;
10+
use crate::http::axum_implementation::{responses, services};
1211
use crate::tracker::Tracker;
1312

1413
#[allow(clippy::unused_async)]
@@ -19,20 +18,12 @@ pub async fn handle(
1918
) -> Response {
2019
debug!("http scrape request: {:#?}", &scrape_request);
2120

22-
/*
23-
todo:
24-
- [x] Add the service that sends the event for statistics.
25-
- [ ] Build the HTTP bencoded response.
26-
*/
27-
2821
let peer_ip = match peer_ip::resolve(tracker.config.on_reverse_proxy, &remote_client_ip) {
2922
Ok(peer_ip) => peer_ip,
3023
Err(err) => return err,
3124
};
3225

3326
let scrape_data = services::scrape::invoke(tracker.clone(), &scrape_request.info_hashes, &peer_ip).await;
3427

35-
debug!("scrape data: {:#?}", &scrape_data);
36-
37-
(StatusCode::OK, "todo").into_response()
28+
responses::scrape::Bencoded::from(scrape_data).into_response()
3829
}

src/http/axum_implementation/requests/scrape.rs

Lines changed: 9 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ use crate::protocol::info_hash::{ConversionError, InfoHash};
1010

1111
pub type NumberOfBytes = i64;
1212

13-
// Query param name
14-
const INFO_HASH_SCRAPE_PARAM: &str = "info_hash";
13+
// Query param names
14+
const INFO_HASH: &str = "info_hash";
1515

1616
#[derive(Debug, PartialEq)]
1717
pub struct Scrape {
@@ -27,12 +27,6 @@ pub enum ParseScrapeQueryError {
2727
location: &'static Location<'static>,
2828
param_name: String,
2929
},
30-
#[error("invalid param value {param_value} for {param_name} in {location}")]
31-
InvalidParam {
32-
param_name: String,
33-
param_value: String,
34-
location: &'static Location<'static>,
35-
},
3630
#[error("invalid param value {param_value} for {param_name} in {source}")]
3731
InvalidInfoHashParam {
3832
param_name: String,
@@ -60,14 +54,14 @@ impl TryFrom<Query> for Scrape {
6054
}
6155

6256
fn extract_info_hashes(query: &Query) -> Result<Vec<InfoHash>, ParseScrapeQueryError> {
63-
match query.get_param_vec(INFO_HASH_SCRAPE_PARAM) {
57+
match query.get_param_vec(INFO_HASH) {
6458
Some(raw_params) => {
6559
let mut info_hashes = vec![];
6660

6761
for raw_param in raw_params {
6862
let info_hash =
6963
percent_decode_info_hash(&raw_param).map_err(|err| ParseScrapeQueryError::InvalidInfoHashParam {
70-
param_name: INFO_HASH_SCRAPE_PARAM.to_owned(),
64+
param_name: INFO_HASH.to_owned(),
7165
param_value: raw_param.clone(),
7266
source: Located(err).into(),
7367
})?;
@@ -80,7 +74,7 @@ fn extract_info_hashes(query: &Query) -> Result<Vec<InfoHash>, ParseScrapeQueryE
8074
None => {
8175
return Err(ParseScrapeQueryError::MissingParam {
8276
location: Location::caller(),
83-
param_name: INFO_HASH_SCRAPE_PARAM.to_owned(),
77+
param_name: INFO_HASH.to_owned(),
8478
})
8579
}
8680
}
@@ -92,16 +86,12 @@ mod tests {
9286
mod scrape_request {
9387

9488
use crate::http::axum_implementation::query::Query;
95-
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH_SCRAPE_PARAM};
89+
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH};
9690
use crate::protocol::info_hash::InfoHash;
9791

9892
#[test]
9993
fn should_be_instantiated_from_the_url_query_with_only_one_infohash() {
100-
let raw_query = Query::from(vec![(
101-
INFO_HASH_SCRAPE_PARAM,
102-
"%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0",
103-
)])
104-
.to_string();
94+
let raw_query = Query::from(vec![(INFO_HASH, "%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0")]).to_string();
10595

10696
let query = raw_query.parse::<Query>().unwrap();
10797

@@ -118,7 +108,7 @@ mod tests {
118108
mod when_it_is_instantiated_from_the_url_query_params {
119109

120110
use crate::http::axum_implementation::query::Query;
121-
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH_SCRAPE_PARAM};
111+
use crate::http::axum_implementation::requests::scrape::{Scrape, INFO_HASH};
122112

123113
#[test]
124114
fn it_should_fail_if_the_query_does_not_include_the_info_hash_param() {
@@ -129,7 +119,7 @@ mod tests {
129119

130120
#[test]
131121
fn it_should_fail_if_the_info_hash_param_is_invalid() {
132-
let raw_query = Query::from(vec![(INFO_HASH_SCRAPE_PARAM, "INVALID_INFO_HASH_VALUE")]).to_string();
122+
let raw_query = Query::from(vec![(INFO_HASH, "INVALID_INFO_HASH_VALUE")]).to_string();
133123

134124
assert!(Scrape::try_from(raw_query.parse::<Query>().unwrap()).is_err());
135125
}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
pub mod announce;
22
pub mod error;
33
pub mod ok;
4+
pub mod scrape;
Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
use std::borrow::Cow;
2+
3+
use axum::http::StatusCode;
4+
use axum::response::{IntoResponse, Response};
5+
use bip_bencode::{ben_int, ben_map, BMutAccess};
6+
7+
use crate::tracker::ScrapeData;
8+
9+
#[derive(Debug, PartialEq, Default)]
10+
pub struct Bencoded {
11+
scrape_data: ScrapeData,
12+
}
13+
14+
impl Bencoded {
15+
/// # Panics
16+
///
17+
/// Will return an error if it can't access the bencode as a mutable `BDictAccess`.
18+
#[must_use]
19+
pub fn body(&self) -> Vec<u8> {
20+
let mut scrape_list = ben_map!();
21+
22+
let scrape_list_mut = scrape_list.dict_mut().unwrap();
23+
24+
for (info_hash, value) in &self.scrape_data.files {
25+
scrape_list_mut.insert(
26+
Cow::from(info_hash.bytes().to_vec()),
27+
ben_map! {
28+
"complete" => ben_int!(i64::from(value.complete)),
29+
"downloaded" => ben_int!(i64::from(value.downloaded)),
30+
"incomplete" => ben_int!(i64::from(value.incomplete))
31+
},
32+
);
33+
}
34+
35+
(ben_map! {
36+
"files" => scrape_list
37+
})
38+
.encode()
39+
}
40+
}
41+
42+
impl From<ScrapeData> for Bencoded {
43+
fn from(scrape_data: ScrapeData) -> Self {
44+
Self { scrape_data }
45+
}
46+
}
47+
48+
impl IntoResponse for Bencoded {
49+
fn into_response(self) -> Response {
50+
(StatusCode::OK, self.body()).into_response()
51+
}
52+
}
53+
54+
#[cfg(test)]
55+
mod tests {
56+
57+
mod scrape_response {
58+
use crate::http::axum_implementation::responses::scrape::Bencoded;
59+
use crate::protocol::info_hash::InfoHash;
60+
use crate::tracker::torrent::SwarmMetadata;
61+
use crate::tracker::ScrapeData;
62+
63+
fn sample_scrape_data() -> ScrapeData {
64+
let info_hash = InfoHash([0x69; 20]);
65+
let mut scrape_data = ScrapeData::empty();
66+
scrape_data.add_file(
67+
&info_hash,
68+
SwarmMetadata {
69+
complete: 1,
70+
downloaded: 2,
71+
incomplete: 3,
72+
},
73+
);
74+
scrape_data
75+
}
76+
77+
#[test]
78+
fn should_be_converted_from_scrape_data() {
79+
let response = Bencoded::from(sample_scrape_data());
80+
81+
assert_eq!(
82+
response,
83+
Bencoded {
84+
scrape_data: sample_scrape_data()
85+
}
86+
);
87+
}
88+
89+
#[test]
90+
fn should_be_bencoded() {
91+
let response = Bencoded {
92+
scrape_data: sample_scrape_data(),
93+
};
94+
95+
let bytes = response.body();
96+
97+
// cspell:disable-next-line
98+
let expected_bytes = b"d5:filesd20:iiiiiiiiiiiiiiiiiiiid8:completei1e10:downloadedi2e10:incompletei3eeee";
99+
100+
assert_eq!(
101+
String::from_utf8(bytes).unwrap(),
102+
String::from_utf8(expected_bytes.to_vec()).unwrap()
103+
);
104+
}
105+
}
106+
}

src/tracker/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ pub struct AnnounceData {
5252

5353
#[derive(Debug, PartialEq, Default)]
5454
pub struct ScrapeData {
55-
files: HashMap<InfoHash, SwarmMetadata>,
55+
pub files: HashMap<InfoHash, SwarmMetadata>,
5656
}
5757

5858
impl ScrapeData {

tests/http/asserts.rs

Lines changed: 30 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,36 @@ pub async fn assert_is_announce_response(response: Response) {
7878

7979
// Error responses
8080

81+
// Specific errors for announce request
82+
83+
pub async fn assert_missing_query_params_for_announce_request_error_response(response: Response) {
84+
assert_eq!(response.status(), 200);
85+
86+
assert_bencoded_error(
87+
&response.text().await.unwrap(),
88+
"missing query params for announce request",
89+
Location::caller(),
90+
);
91+
}
92+
93+
pub async fn assert_bad_announce_request_error_response(response: Response, failure: &str) {
94+
assert_cannot_parse_query_params_error_response(response, &format!(" for announce request: {failure}")).await;
95+
}
96+
97+
// Specific errors for scrape request
98+
99+
pub async fn assert_missing_query_params_for_scrape_request_error_response(response: Response) {
100+
assert_eq!(response.status(), 200);
101+
102+
assert_bencoded_error(
103+
&response.text().await.unwrap(),
104+
"missing query params for scrape request",
105+
Location::caller(),
106+
);
107+
}
108+
109+
// Other errors
110+
81111
pub async fn assert_internal_server_error_response(response: Response) {
82112
assert_eq!(response.status(), 200);
83113

@@ -156,22 +186,6 @@ pub async fn assert_invalid_remote_address_on_xff_header_error_response(response
156186
);
157187
}
158188

159-
// Specific errors for announce request
160-
161-
pub async fn assert_missing_query_params_for_announce_request_error_response(response: Response) {
162-
assert_eq!(response.status(), 200);
163-
164-
assert_bencoded_error(
165-
&response.text().await.unwrap(),
166-
"missing query params for announce request",
167-
Location::caller(),
168-
);
169-
}
170-
171-
pub async fn assert_bad_announce_request_error_response(response: Response, failure: &str) {
172-
assert_cannot_parse_query_params_error_response(response, &format!(" for announce request: {failure}")).await;
173-
}
174-
175189
pub async fn assert_cannot_parse_query_param_error_response(response: Response, failure: &str) {
176190
assert_cannot_parse_query_params_error_response(response, &format!(": {failure}")).await;
177191
}

tests/http_tracker.rs

Lines changed: 13 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -2198,24 +2198,25 @@ mod axum_http_tracker_server {
21982198
use torrust_tracker::tracker::peer;
21992199

22002200
use crate::common::fixtures::{invalid_info_hashes, PeerBuilder};
2201-
use crate::http::asserts::{assert_internal_server_error_response, assert_scrape_response};
2201+
use crate::http::asserts::{
2202+
assert_cannot_parse_query_params_error_response, assert_missing_query_params_for_scrape_request_error_response,
2203+
assert_scrape_response,
2204+
};
22022205
use crate::http::client::Client;
22032206
use crate::http::requests;
22042207
use crate::http::requests::scrape::QueryBuilder;
22052208
use crate::http::responses::scrape::{self, File, ResponseBuilder};
22062209
use crate::http::server::{start_ipv6_http_tracker, start_public_http_tracker};
22072210

2208-
//#[tokio::test]
2209-
#[allow(dead_code)]
2210-
async fn should_fail_when_the_request_is_empty() {
2211+
#[tokio::test]
2212+
async fn should_fail_when_the_url_query_component_is_empty() {
22112213
let http_tracker_server = start_public_http_tracker(Version::Axum).await;
22122214
let response = Client::new(http_tracker_server.get_connection_info()).get("scrape").await;
22132215

2214-
assert_internal_server_error_response(response).await;
2216+
assert_missing_query_params_for_scrape_request_error_response(response).await;
22152217
}
22162218

2217-
//#[tokio::test]
2218-
#[allow(dead_code)]
2219+
#[tokio::test]
22192220
async fn should_fail_when_the_info_hash_param_is_invalid() {
22202221
let http_tracker_server = start_public_http_tracker(Version::Axum).await;
22212222

@@ -2228,13 +2229,11 @@ mod axum_http_tracker_server {
22282229
.get(&format!("announce?{params}"))
22292230
.await;
22302231

2231-
// code-review: it's not returning the invalid info hash error
2232-
assert_internal_server_error_response(response).await;
2232+
assert_cannot_parse_query_params_error_response(response, "").await;
22332233
}
22342234
}
22352235

2236-
//#[tokio::test]
2237-
#[allow(dead_code)]
2236+
#[tokio::test]
22382237
async fn should_return_the_file_with_the_incomplete_peer_when_there_is_one_peer_with_bytes_pending_to_download() {
22392238
let http_tracker = start_public_http_tracker(Version::Axum).await;
22402239

@@ -2272,8 +2271,7 @@ mod axum_http_tracker_server {
22722271
assert_scrape_response(response, &expected_scrape_response).await;
22732272
}
22742273

2275-
//#[tokio::test]
2276-
#[allow(dead_code)]
2274+
#[tokio::test]
22772275
async fn should_return_the_file_with_the_complete_peer_when_there_is_one_peer_with_no_bytes_pending_to_download() {
22782276
let http_tracker = start_public_http_tracker(Version::Axum).await;
22792277

@@ -2311,8 +2309,7 @@ mod axum_http_tracker_server {
23112309
assert_scrape_response(response, &expected_scrape_response).await;
23122310
}
23132311

2314-
//#[tokio::test]
2315-
#[allow(dead_code)]
2312+
#[tokio::test]
23162313
async fn should_return_a_file_with_zeroed_values_when_there_are_no_peers() {
23172314
let http_tracker = start_public_http_tracker(Version::Axum).await;
23182315

@@ -2329,8 +2326,7 @@ mod axum_http_tracker_server {
23292326
assert_scrape_response(response, &scrape::Response::with_one_file(info_hash.bytes(), File::zeroed())).await;
23302327
}
23312328

2332-
//#[tokio::test]
2333-
#[allow(dead_code)]
2329+
#[tokio::test]
23342330
async fn should_accept_multiple_infohashes() {
23352331
let http_tracker = start_public_http_tracker(Version::Axum).await;
23362332

0 commit comments

Comments
 (0)