diff --git a/src/http/axum_implementation/handlers/announce.rs b/src/http/axum_implementation/handlers/announce.rs index 3ad11df51..b9b964605 100644 --- a/src/http/axum_implementation/handlers/announce.rs +++ b/src/http/axum_implementation/handlers/announce.rs @@ -7,6 +7,7 @@ use axum::extract::{Path, State}; use axum::response::{IntoResponse, Response}; use log::debug; +use super::auth::KeyIdParam; use crate::http::axum_implementation::extractors::announce_request::ExtractRequest; use crate::http::axum_implementation::extractors::peer_ip; use crate::http::axum_implementation::extractors::remote_client_ip::RemoteClientIp; @@ -41,11 +42,19 @@ pub async fn handle_without_key( pub async fn handle_with_key( State(tracker): State>, ExtractRequest(announce_request): ExtractRequest, - Path(key_id): Path, + Path(key_id_param): Path, remote_client_ip: RemoteClientIp, ) -> Response { debug!("http announce request: {:#?}", announce_request); + let Ok(key_id) = key_id_param.value().parse::() else { + return responses::error::Error::from( + auth::Error::InvalidKeyFormat { + location: Location::caller() + }) + .into_response() + }; + match auth::authenticate(&key_id, &tracker).await { Ok(_) => (), Err(error) => return responses::error::Error::from(error).into_response(), diff --git a/src/http/axum_implementation/handlers/auth.rs b/src/http/axum_implementation/handlers/auth.rs index 13f5b27e6..3b9aebc23 100644 --- a/src/http/axum_implementation/handlers/auth.rs +++ b/src/http/axum_implementation/handlers/auth.rs @@ -1,16 +1,29 @@ use std::panic::Location; use std::sync::Arc; +use serde::Deserialize; use thiserror::Error; use crate::http::axum_implementation::responses; use crate::tracker::auth::{self, KeyId}; use crate::tracker::Tracker; +#[derive(Deserialize)] +pub struct KeyIdParam(String); + +impl KeyIdParam { + #[must_use] + pub fn value(&self) -> String { + self.0.clone() + } +} + #[derive(Debug, Error)] pub enum Error { #[error("Missing authentication key for private tracker. Error in {location}")] MissingAuthKey { location: &'static Location<'static> }, + #[error("Invalid format authentication key. Error in {location}")] + InvalidKeyFormat { location: &'static Location<'static> }, } /// # Errors diff --git a/src/http/axum_implementation/handlers/scrape.rs b/src/http/axum_implementation/handlers/scrape.rs index 51b6fa84d..814cdbfa4 100644 --- a/src/http/axum_implementation/handlers/scrape.rs +++ b/src/http/axum_implementation/handlers/scrape.rs @@ -1,29 +1,79 @@ +use std::panic::Location; use std::sync::Arc; -use axum::extract::State; +use axum::extract::{Path, State}; use axum::response::{IntoResponse, Response}; use log::debug; +use super::auth::KeyIdParam; use crate::http::axum_implementation::extractors::peer_ip; use crate::http::axum_implementation::extractors::remote_client_ip::RemoteClientIp; use crate::http::axum_implementation::extractors::scrape_request::ExtractRequest; +use crate::http::axum_implementation::handlers::auth; +use crate::http::axum_implementation::requests::scrape::Scrape; use crate::http::axum_implementation::{responses, services}; +use crate::tracker::auth::KeyId; use crate::tracker::Tracker; #[allow(clippy::unused_async)] -pub async fn handle( +pub async fn handle_without_key( State(tracker): State>, ExtractRequest(scrape_request): ExtractRequest, remote_client_ip: RemoteClientIp, ) -> Response { debug!("http scrape request: {:#?}", &scrape_request); - let peer_ip = match peer_ip::resolve(tracker.config.on_reverse_proxy, &remote_client_ip) { + if tracker.is_private() { + return handle_fake_scrape(&tracker, &scrape_request, &remote_client_ip).await; + } + + handle_real_scrape(&tracker, &scrape_request, &remote_client_ip).await +} + +#[allow(clippy::unused_async)] +pub async fn handle_with_key( + State(tracker): State>, + ExtractRequest(scrape_request): ExtractRequest, + Path(key_id_param): Path, + remote_client_ip: RemoteClientIp, +) -> Response { + debug!("http scrape request: {:#?}", &scrape_request); + + let Ok(key_id) = key_id_param.value().parse::() else { + return responses::error::Error::from( + auth::Error::InvalidKeyFormat { + location: Location::caller() + }) + .into_response() + }; + + match auth::authenticate(&key_id, &tracker).await { + Ok(_) => (), + Err(_) => return handle_fake_scrape(&tracker, &scrape_request, &remote_client_ip).await, + } + + handle_real_scrape(&tracker, &scrape_request, &remote_client_ip).await +} + +async fn handle_real_scrape(tracker: &Arc, scrape_request: &Scrape, remote_client_ip: &RemoteClientIp) -> Response { + let peer_ip = match peer_ip::resolve(tracker.config.on_reverse_proxy, remote_client_ip) { + Ok(peer_ip) => peer_ip, + Err(err) => return err, + }; + + let scrape_data = services::scrape::invoke(tracker, &scrape_request.info_hashes, &peer_ip).await; + + responses::scrape::Bencoded::from(scrape_data).into_response() +} + +/// When authentication fails in `private` mode the tracker returns empty swarm metadata for all the requested infohashes. +async fn handle_fake_scrape(tracker: &Arc, scrape_request: &Scrape, remote_client_ip: &RemoteClientIp) -> Response { + let peer_ip = match peer_ip::resolve(tracker.config.on_reverse_proxy, remote_client_ip) { Ok(peer_ip) => peer_ip, Err(err) => return err, }; - let scrape_data = services::scrape::invoke(tracker.clone(), &scrape_request.info_hashes, &peer_ip).await; + let scrape_data = services::scrape::fake_invoke(tracker, &scrape_request.info_hashes, &peer_ip).await; responses::scrape::Bencoded::from(scrape_data).into_response() } diff --git a/src/http/axum_implementation/routes.rs b/src/http/axum_implementation/routes.rs index 646dd0aa3..21b7260ae 100644 --- a/src/http/axum_implementation/routes.rs +++ b/src/http/axum_implementation/routes.rs @@ -15,7 +15,8 @@ pub fn router(tracker: &Arc) -> Router { .route("/announce", get(announce::handle_without_key).with_state(tracker.clone())) .route("/announce/:key", get(announce::handle_with_key).with_state(tracker.clone())) // Scrape request - .route("/scrape", get(scrape::handle).with_state(tracker.clone())) + .route("/scrape", get(scrape::handle_without_key).with_state(tracker.clone())) + .route("/scrape/:key", get(scrape::handle_with_key).with_state(tracker.clone())) // Add extension to get the client IP from the connection info .layer(SecureClientIpSource::ConnectInfo.into_extension()) } diff --git a/src/http/axum_implementation/services/scrape.rs b/src/http/axum_implementation/services/scrape.rs index f40b8f999..30f00a47b 100644 --- a/src/http/axum_implementation/services/scrape.rs +++ b/src/http/axum_implementation/services/scrape.rs @@ -4,9 +4,25 @@ use std::sync::Arc; use crate::protocol::info_hash::InfoHash; use crate::tracker::{statistics, ScrapeData, Tracker}; -pub async fn invoke(tracker: Arc, info_hashes: &Vec, original_peer_ip: &IpAddr) -> ScrapeData { +pub async fn invoke(tracker: &Arc, info_hashes: &Vec, original_peer_ip: &IpAddr) -> ScrapeData { let scrape_data = tracker.scrape(info_hashes).await; + send_scrape_event(original_peer_ip, tracker).await; + + scrape_data +} + +/// When the peer is not authenticated and the tracker is running in `private` mode, +/// the tracker returns empty stats for all the torrents. +pub async fn fake_invoke(tracker: &Arc, info_hashes: &Vec, original_peer_ip: &IpAddr) -> ScrapeData { + let scrape_data = tracker.empty_scrape_for(info_hashes); + + send_scrape_event(original_peer_ip, tracker).await; + + scrape_data +} + +async fn send_scrape_event(original_peer_ip: &IpAddr, tracker: &Arc) { match original_peer_ip { IpAddr::V4(_) => { tracker.send_stats_event(statistics::Event::Tcp4Scrape).await; @@ -15,6 +31,4 @@ pub async fn invoke(tracker: Arc, info_hashes: &Vec, original tracker.send_stats_event(statistics::Event::Tcp6Scrape).await; } } - - scrape_data } diff --git a/src/tracker/mod.rs b/src/tracker/mod.rs index 0fb434aea..2604c5045 100644 --- a/src/tracker/mod.rs +++ b/src/tracker/mod.rs @@ -142,6 +142,17 @@ impl Tracker { scrape_data } + // It return empty swarm metadata for all the infohashes. + pub fn empty_scrape_for(&self, info_hashes: &Vec) -> ScrapeData { + let mut scrape_data = ScrapeData::empty(); + + for info_hash in info_hashes { + scrape_data.add_file(info_hash, SwarmMetadata::default()); + } + + scrape_data + } + async fn get_swarm_metadata(&self, info_hash: &InfoHash) -> SwarmMetadata { let torrents = self.get_torrents().await; match torrents.get(info_hash) { diff --git a/tests/http_tracker.rs b/tests/http_tracker.rs index 28ed252e9..0536ab0b7 100644 --- a/tests/http_tracker.rs +++ b/tests/http_tracker.rs @@ -2575,7 +2575,22 @@ mod axum_http_tracker_server { } #[tokio::test] - async fn should_fail_if_the_peer_authentication_key_is_not_valid() { + async fn should_fail_if_the_key_query_param_cannot_be_parsed() { + let http_tracker_server = start_private_http_tracker(Version::Axum).await; + + let invalid_key_id = "INVALID_KEY_ID"; + + let response = Client::new(http_tracker_server.get_connection_info()) + .get(&format!( + "announce/{invalid_key_id}?info_hash=%81%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00%00&peer_addr=2.137.87.41&downloaded=0&uploaded=0&peer_id=-qB00000000000000001&port=17548&left=0&event=completed&compact=0" + )) + .await; + + assert_authentication_error_response(response).await; + } + + #[tokio::test] + async fn should_fail_if_the_peer_cannot_be_authenticated_with_the_provided_key() { let http_tracker_server = start_private_http_tracker(Version::Axum).await; // The tracker does not have this key @@ -2600,14 +2615,28 @@ mod axum_http_tracker_server { use torrust_tracker::tracker::peer; use crate::common::fixtures::PeerBuilder; - use crate::http::asserts::assert_scrape_response; + use crate::http::asserts::{assert_authentication_error_response, assert_scrape_response}; use crate::http::client::Client; use crate::http::requests; use crate::http::responses::scrape::{File, ResponseBuilder}; use crate::http::server::start_private_http_tracker; - //#[tokio::test] - #[allow(dead_code)] + #[tokio::test] + async fn should_fail_if_the_key_query_param_cannot_be_parsed() { + let http_tracker_server = start_private_http_tracker(Version::Axum).await; + + let invalid_key_id = "INVALID_KEY_ID"; + + let response = Client::new(http_tracker_server.get_connection_info()) + .get(&format!( + "scrape/{invalid_key_id}?info_hash=%3B%24U%04%CF%5F%11%BB%DB%E1%20%1C%EAjk%F4Z%EE%1B%C0" + )) + .await; + + assert_authentication_error_response(response).await; + } + + #[tokio::test] async fn should_return_the_zeroed_file_when_the_client_is_not_authenticated() { let http_tracker = start_private_http_tracker(Version::Axum).await; @@ -2636,8 +2665,7 @@ mod axum_http_tracker_server { assert_scrape_response(response, &expected_scrape_response).await; } - //#[tokio::test] - #[allow(dead_code)] + #[tokio::test] async fn should_return_the_real_file_stats_when_the_client_is_authenticated() { let http_tracker = start_private_http_tracker(Version::Axum).await; @@ -2677,10 +2705,10 @@ mod axum_http_tracker_server { assert_scrape_response(response, &expected_scrape_response).await; } - //#[tokio::test] - #[allow(dead_code)] + #[tokio::test] async fn should_return_the_zeroed_file_when_the_authentication_key_provided_by_the_client_is_invalid() { // There is not authentication error + // code-review: should this really be this way? let http_tracker = start_private_http_tracker(Version::Axum).await;