Skip to content

Commit c202db7

Browse files
committed
dev: tracker client error enums
1 parent 988f1c7 commit c202db7

File tree

20 files changed

+770
-559
lines changed

20 files changed

+770
-559
lines changed

Cargo.lock

Lines changed: 1 addition & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ tower-http = { version = "0", features = ["compression-full", "cors", "propagate
8080
trace = "0"
8181
tracing = "0"
8282
tracing-subscriber = { version = "0.3.18", features = ["json"] }
83-
url = "2"
83+
url = {version = "2", features = ["serde"] }
8484
uuid = { version = "1", features = ["v4"] }
8585
zerocopy = "0.7.33"
8686

packages/configuration/src/lib.rs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ pub mod v1;
99
use std::collections::HashMap;
1010
use std::env;
1111
use std::sync::Arc;
12+
use std::time::Duration;
1213

1314
use camino::Utf8PathBuf;
1415
use derive_more::Constructor;
@@ -20,6 +21,10 @@ use torrust_tracker_located_error::{DynError, LocatedError};
2021
/// The maximum number of returned peers for a torrent.
2122
pub const TORRENT_PEERS_LIMIT: usize = 74;
2223

24+
/// Default timeout for sending and receiving packets. And waiting for sockets
25+
/// to be readable and writable.
26+
pub const DEFAULT_TIMEOUT: Duration = Duration::from_secs(5);
27+
2328
// Environment variables
2429

2530
/// The whole `tracker.toml` file content. It has priority over the config file.

src/console/clients/checker/app.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ pub async fn run() -> Result<Vec<CheckResult>> {
9898
console: console_printer,
9999
};
100100

101-
Ok(service.run_checks().await)
101+
service.run_checks().await.context("it should run the check tasks")
102102
}
103103

104104
fn tracing_stdout_init(filter: LevelFilter) {
Lines changed: 64 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,49 +1,77 @@
1+
use std::sync::Arc;
12
use std::time::Duration;
23

3-
use reqwest::{Client as HttpClient, Url, Url as ServiceUrl};
4+
use anyhow::Result;
5+
use hyper::StatusCode;
6+
use reqwest::{Client as HttpClient, Response};
7+
use serde::Serialize;
8+
use thiserror::Error;
9+
use url::Url;
410

5-
use super::structs::{CheckerOutput, Status};
6-
use crate::console::clients::checker::service::{CheckError, CheckResult};
11+
#[derive(Debug, Clone, Error, Serialize)]
12+
#[serde(into = "String")]
13+
pub enum Error {
14+
#[error("Failed to Build a Http Client: {err:?}")]
15+
ClientBuildingError { err: Arc<reqwest::Error> },
16+
#[error("Heath check failed to get a response: {err:?}")]
17+
ResponseError { err: Arc<reqwest::Error> },
18+
#[error("Http check returned a non-success code: \"{code}\" with the response: \"{response:?}\"")]
19+
UnsuccessfulResponse { code: StatusCode, response: Arc<Response> },
20+
}
21+
22+
impl From<Error> for String {
23+
fn from(value: Error) -> Self {
24+
value.to_string()
25+
}
26+
}
27+
28+
#[derive(Debug, Clone, Serialize)]
29+
pub struct Checks {
30+
url: Url,
31+
result: Result<String, Error>,
32+
}
733

8-
#[allow(clippy::missing_panics_doc)]
9-
pub async fn run(health_checks: &Vec<ServiceUrl>, check_results: &mut Vec<CheckResult>) -> Vec<CheckerOutput> {
10-
let mut health_checkers: Vec<CheckerOutput> = Vec::new();
34+
pub async fn run(health_checks: Vec<Url>, timeout: Duration) -> Vec<Result<Checks, Checks>> {
35+
let mut results = Vec::default();
1136

12-
for health_check_url in health_checks {
13-
let mut health_checker = CheckerOutput {
14-
url: health_check_url.to_string(),
15-
status: Status {
16-
code: String::new(),
17-
message: String::new(),
18-
},
37+
tracing::debug!("Health checks ...");
38+
39+
for url in health_checks {
40+
let result = match run_health_check(url.clone(), timeout).await {
41+
Ok(response) => Ok(response.status().to_string()),
42+
Err(err) => Err(err),
1943
};
20-
match run_health_check(health_check_url.clone()).await {
21-
Ok(()) => {
22-
check_results.push(Ok(()));
23-
health_checker.status.code = "ok".to_string();
24-
}
25-
Err(err) => {
26-
check_results.push(Err(err));
27-
health_checker.status.code = "error".to_string();
28-
health_checker.status.message = "Health API is failing.".to_string();
29-
}
44+
45+
let check = Checks { url, result };
46+
47+
if check.result.is_err() {
48+
results.push(Err(check));
49+
} else {
50+
results.push(Ok(check));
3051
}
31-
health_checkers.push(health_checker);
3252
}
33-
health_checkers
53+
54+
results
3455
}
3556

36-
async fn run_health_check(url: Url) -> Result<(), CheckError> {
37-
let client = HttpClient::builder().timeout(Duration::from_secs(5)).build().unwrap();
57+
async fn run_health_check(url: Url, timeout: Duration) -> Result<Response, Error> {
58+
let client = HttpClient::builder()
59+
.timeout(timeout)
60+
.build()
61+
.map_err(|e| Error::ClientBuildingError { err: e.into() })?;
3862

39-
match client.get(url.clone()).send().await {
40-
Ok(response) => {
41-
if response.status().is_success() {
42-
Ok(())
43-
} else {
44-
Err(CheckError::HealthCheckError { url })
45-
}
46-
}
47-
Err(_) => Err(CheckError::HealthCheckError { url }),
63+
let response = client
64+
.get(url.clone())
65+
.send()
66+
.await
67+
.map_err(|e| Error::ResponseError { err: e.into() })?;
68+
69+
if response.status().is_success() {
70+
Ok(response)
71+
} else {
72+
Err(Error::UnsuccessfulResponse {
73+
code: response.status(),
74+
response: response.into(),
75+
})
4876
}
4977
}
Lines changed: 75 additions & 94 deletions
Original file line numberDiff line numberDiff line change
@@ -1,120 +1,101 @@
1-
use std::str::FromStr;
1+
use std::str::FromStr as _;
2+
use std::time::Duration;
23

3-
use reqwest::Url as ServiceUrl;
4+
use serde::Serialize;
45
use torrust_tracker_primitives::info_hash::InfoHash;
5-
use tracing::debug;
66
use url::Url;
77

8-
use super::structs::{CheckerOutput, Status};
9-
use crate::console::clients::checker::service::{CheckError, CheckResult};
10-
use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder;
8+
use crate::console::clients::http::Error;
119
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
1210
use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
1311
use crate::shared::bit_torrent::tracker::http::client::{requests, Client};
1412

15-
#[allow(clippy::missing_panics_doc)]
16-
pub async fn run(http_trackers: &Vec<ServiceUrl>, check_results: &mut Vec<CheckResult>) -> Vec<CheckerOutput> {
17-
let mut http_checkers: Vec<CheckerOutput> = Vec::new();
18-
19-
for http_tracker in http_trackers {
20-
let mut http_checker = CheckerOutput {
21-
url: http_tracker.to_string(),
22-
status: Status {
23-
code: String::new(),
24-
message: String::new(),
25-
},
13+
#[derive(Debug, Clone, Serialize)]
14+
pub struct Checks {
15+
url: Url,
16+
results: Vec<(Check, Result<(), Error>)>,
17+
}
18+
19+
#[derive(Debug, Clone, Serialize)]
20+
pub enum Check {
21+
Announce,
22+
Scrape,
23+
}
24+
25+
pub async fn run(http_trackers: Vec<Url>, timeout: Duration) -> Vec<Result<Checks, Checks>> {
26+
let mut results = Vec::default();
27+
28+
tracing::debug!("HTTP trackers ...");
29+
30+
for ref url in http_trackers {
31+
let mut checks = Checks {
32+
url: url.clone(),
33+
results: Vec::default(),
2634
};
2735

28-
match check_http_announce(http_tracker).await {
29-
Ok(()) => {
30-
check_results.push(Ok(()));
31-
http_checker.status.code = "ok".to_string();
32-
}
33-
Err(err) => {
34-
check_results.push(Err(err));
35-
http_checker.status.code = "error".to_string();
36-
http_checker.status.message = "Announce is failing.".to_string();
37-
}
36+
// Announce
37+
{
38+
let check = check_http_announce(url, timeout).await.map(|_| ());
39+
40+
checks.results.push((Check::Announce, check));
3841
}
3942

40-
match check_http_scrape(http_tracker).await {
41-
Ok(()) => {
42-
check_results.push(Ok(()));
43-
http_checker.status.code = "ok".to_string();
44-
}
45-
Err(err) => {
46-
check_results.push(Err(err));
47-
http_checker.status.code = "error".to_string();
48-
http_checker.status.message = "Scrape is failing.".to_string();
49-
}
43+
// Scrape
44+
{
45+
let check = check_http_scrape(url, timeout).await.map(|_| ());
46+
47+
checks.results.push((Check::Scrape, check));
48+
}
49+
50+
if checks.results.iter().any(|f| f.1.is_err()) {
51+
results.push(Err(checks));
52+
} else {
53+
results.push(Ok(checks));
5054
}
51-
http_checkers.push(http_checker);
5255
}
53-
http_checkers
56+
57+
results
5458
}
5559

56-
async fn check_http_announce(tracker_url: &Url) -> Result<(), CheckError> {
60+
async fn check_http_announce(url: &Url, timeout: Duration) -> Result<Announce, Error> {
5761
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
5862
let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required");
5963

60-
// todo: HTTP request could panic.For example, if the server is not accessible.
61-
// We should change the client to catch that error and return a `CheckError`.
62-
// Otherwise the checking process will stop. The idea is to process all checks
63-
// and return a final report.
64-
let Ok(client) = Client::new(tracker_url.clone()) else {
65-
return Err(CheckError::HttpError {
66-
url: (tracker_url.to_owned()),
67-
});
68-
};
69-
let Ok(response) = client
70-
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query())
64+
let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;
65+
66+
let response = client
67+
.announce(
68+
&requests::announce::QueryBuilder::with_default_values()
69+
.with_info_hash(&info_hash)
70+
.query(),
71+
)
7172
.await
72-
else {
73-
return Err(CheckError::HttpError {
74-
url: (tracker_url.to_owned()),
75-
});
76-
};
77-
78-
if let Ok(body) = response.bytes().await {
79-
if let Ok(_announce_response) = serde_bencode::from_bytes::<Announce>(&body) {
80-
Ok(())
81-
} else {
82-
debug!("announce body {:#?}", body);
83-
Err(CheckError::HttpError {
84-
url: tracker_url.clone(),
85-
})
86-
}
87-
} else {
88-
Err(CheckError::HttpError {
89-
url: tracker_url.clone(),
90-
})
91-
}
73+
.map_err(|err| Error::HttpClientError { err })?;
74+
75+
let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;
76+
77+
let response = serde_bencode::from_bytes::<Announce>(&response).map_err(|e| Error::ParseBencodeError {
78+
data: response,
79+
err: e.into(),
80+
})?;
81+
82+
Ok(response)
9283
}
9384

94-
async fn check_http_scrape(url: &Url) -> Result<(), CheckError> {
85+
async fn check_http_scrape(url: &Url, timeout: Duration) -> Result<scrape::Response, Error> {
9586
let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237
9687
let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required");
9788

98-
// todo: HTTP request could panic.For example, if the server is not accessible.
99-
// We should change the client to catch that error and return a `CheckError`.
100-
// Otherwise the checking process will stop. The idea is to process all checks
101-
// and return a final report.
102-
103-
let Ok(client) = Client::new(url.clone()) else {
104-
return Err(CheckError::HttpError { url: (url.to_owned()) });
105-
};
106-
let Ok(response) = client.scrape(&query).await else {
107-
return Err(CheckError::HttpError { url: (url.to_owned()) });
108-
};
109-
110-
if let Ok(body) = response.bytes().await {
111-
if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) {
112-
Ok(())
113-
} else {
114-
debug!("scrape body {:#?}", body);
115-
Err(CheckError::HttpError { url: url.clone() })
116-
}
117-
} else {
118-
Err(CheckError::HttpError { url: url.clone() })
119-
}
89+
let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?;
90+
91+
let response = client.scrape(&query).await.map_err(|err| Error::HttpClientError { err })?;
92+
93+
let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?;
94+
95+
let response = scrape::Response::try_from_bencoded(&response).map_err(|e| Error::BencodeParseError {
96+
data: response,
97+
err: e.into(),
98+
})?;
99+
100+
Ok(response)
120101
}

0 commit comments

Comments
 (0)