|
1 | | -use std::str::FromStr; |
| 1 | +use std::str::FromStr as _; |
| 2 | +use std::time::Duration; |
2 | 3 |
|
3 | | -use reqwest::Url as ServiceUrl; |
| 4 | +use serde::Serialize; |
4 | 5 | use torrust_tracker_primitives::info_hash::InfoHash; |
5 | | -use tracing::debug; |
6 | 6 | use url::Url; |
7 | 7 |
|
8 | | -use super::structs::{CheckerOutput, Status}; |
9 | | -use crate::console::clients::checker::service::{CheckError, CheckResult}; |
10 | | -use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder; |
| 8 | +use crate::console::clients::http::Error; |
11 | 9 | use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce; |
12 | 10 | use crate::shared::bit_torrent::tracker::http::client::responses::scrape; |
13 | 11 | use crate::shared::bit_torrent::tracker::http::client::{requests, Client}; |
14 | 12 |
|
15 | | -#[allow(clippy::missing_panics_doc)] |
16 | | -pub async fn run(http_trackers: &Vec<ServiceUrl>, check_results: &mut Vec<CheckResult>) -> Vec<CheckerOutput> { |
17 | | - let mut http_checkers: Vec<CheckerOutput> = Vec::new(); |
18 | | - |
19 | | - for http_tracker in http_trackers { |
20 | | - let mut http_checker = CheckerOutput { |
21 | | - url: http_tracker.to_string(), |
22 | | - status: Status { |
23 | | - code: String::new(), |
24 | | - message: String::new(), |
25 | | - }, |
| 13 | +#[derive(Debug, Clone, Serialize)] |
| 14 | +pub struct Checks { |
| 15 | + url: Url, |
| 16 | + results: Vec<(Check, Result<(), Error>)>, |
| 17 | +} |
| 18 | + |
| 19 | +#[derive(Debug, Clone, Serialize)] |
| 20 | +pub enum Check { |
| 21 | + Announce, |
| 22 | + Scrape, |
| 23 | +} |
| 24 | + |
| 25 | +pub async fn run(http_trackers: Vec<Url>, timeout: Duration) -> Vec<Result<Checks, Checks>> { |
| 26 | + let mut results = Vec::default(); |
| 27 | + |
| 28 | + tracing::debug!("HTTP trackers ..."); |
| 29 | + |
| 30 | + for ref url in http_trackers { |
| 31 | + let mut checks = Checks { |
| 32 | + url: url.clone(), |
| 33 | + results: Vec::default(), |
26 | 34 | }; |
27 | 35 |
|
28 | | - match check_http_announce(http_tracker).await { |
29 | | - Ok(()) => { |
30 | | - check_results.push(Ok(())); |
31 | | - http_checker.status.code = "ok".to_string(); |
32 | | - } |
33 | | - Err(err) => { |
34 | | - check_results.push(Err(err)); |
35 | | - http_checker.status.code = "error".to_string(); |
36 | | - http_checker.status.message = "Announce is failing.".to_string(); |
37 | | - } |
| 36 | + // Announce |
| 37 | + { |
| 38 | + let check = check_http_announce(url, timeout).await.map(|_| ()); |
| 39 | + |
| 40 | + checks.results.push((Check::Announce, check)); |
38 | 41 | } |
39 | 42 |
|
40 | | - match check_http_scrape(http_tracker).await { |
41 | | - Ok(()) => { |
42 | | - check_results.push(Ok(())); |
43 | | - http_checker.status.code = "ok".to_string(); |
44 | | - } |
45 | | - Err(err) => { |
46 | | - check_results.push(Err(err)); |
47 | | - http_checker.status.code = "error".to_string(); |
48 | | - http_checker.status.message = "Scrape is failing.".to_string(); |
49 | | - } |
| 43 | + // Scrape |
| 44 | + { |
| 45 | + let check = check_http_scrape(url, timeout).await.map(|_| ()); |
| 46 | + |
| 47 | + checks.results.push((Check::Scrape, check)); |
| 48 | + } |
| 49 | + |
| 50 | + if checks.results.iter().any(|f| f.1.is_err()) { |
| 51 | + results.push(Err(checks)); |
| 52 | + } else { |
| 53 | + results.push(Ok(checks)); |
50 | 54 | } |
51 | | - http_checkers.push(http_checker); |
52 | 55 | } |
53 | | - http_checkers |
| 56 | + |
| 57 | + results |
54 | 58 | } |
55 | 59 |
|
56 | | -async fn check_http_announce(tracker_url: &Url) -> Result<(), CheckError> { |
| 60 | +async fn check_http_announce(url: &Url, timeout: Duration) -> Result<Announce, Error> { |
57 | 61 | let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237 |
58 | 62 | let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required"); |
59 | 63 |
|
60 | | - // todo: HTTP request could panic.For example, if the server is not accessible. |
61 | | - // We should change the client to catch that error and return a `CheckError`. |
62 | | - // Otherwise the checking process will stop. The idea is to process all checks |
63 | | - // and return a final report. |
64 | | - let Ok(client) = Client::new(tracker_url.clone()) else { |
65 | | - return Err(CheckError::HttpError { |
66 | | - url: (tracker_url.to_owned()), |
67 | | - }); |
68 | | - }; |
69 | | - let Ok(response) = client |
70 | | - .announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query()) |
| 64 | + let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?; |
| 65 | + |
| 66 | + let response = client |
| 67 | + .announce( |
| 68 | + &requests::announce::QueryBuilder::with_default_values() |
| 69 | + .with_info_hash(&info_hash) |
| 70 | + .query(), |
| 71 | + ) |
71 | 72 | .await |
72 | | - else { |
73 | | - return Err(CheckError::HttpError { |
74 | | - url: (tracker_url.to_owned()), |
75 | | - }); |
76 | | - }; |
77 | | - |
78 | | - if let Ok(body) = response.bytes().await { |
79 | | - if let Ok(_announce_response) = serde_bencode::from_bytes::<Announce>(&body) { |
80 | | - Ok(()) |
81 | | - } else { |
82 | | - debug!("announce body {:#?}", body); |
83 | | - Err(CheckError::HttpError { |
84 | | - url: tracker_url.clone(), |
85 | | - }) |
86 | | - } |
87 | | - } else { |
88 | | - Err(CheckError::HttpError { |
89 | | - url: tracker_url.clone(), |
90 | | - }) |
91 | | - } |
| 73 | + .map_err(|err| Error::HttpClientError { err })?; |
| 74 | + |
| 75 | + let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?; |
| 76 | + |
| 77 | + let response = serde_bencode::from_bytes::<Announce>(&response).map_err(|e| Error::ParseBencodeError { |
| 78 | + data: response, |
| 79 | + err: e.into(), |
| 80 | + })?; |
| 81 | + |
| 82 | + Ok(response) |
92 | 83 | } |
93 | 84 |
|
94 | | -async fn check_http_scrape(url: &Url) -> Result<(), CheckError> { |
| 85 | +async fn check_http_scrape(url: &Url, timeout: Duration) -> Result<scrape::Response, Error> { |
95 | 86 | let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237 |
96 | 87 | let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required"); |
97 | 88 |
|
98 | | - // todo: HTTP request could panic.For example, if the server is not accessible. |
99 | | - // We should change the client to catch that error and return a `CheckError`. |
100 | | - // Otherwise the checking process will stop. The idea is to process all checks |
101 | | - // and return a final report. |
102 | | - |
103 | | - let Ok(client) = Client::new(url.clone()) else { |
104 | | - return Err(CheckError::HttpError { url: (url.to_owned()) }); |
105 | | - }; |
106 | | - let Ok(response) = client.scrape(&query).await else { |
107 | | - return Err(CheckError::HttpError { url: (url.to_owned()) }); |
108 | | - }; |
109 | | - |
110 | | - if let Ok(body) = response.bytes().await { |
111 | | - if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) { |
112 | | - Ok(()) |
113 | | - } else { |
114 | | - debug!("scrape body {:#?}", body); |
115 | | - Err(CheckError::HttpError { url: url.clone() }) |
116 | | - } |
117 | | - } else { |
118 | | - Err(CheckError::HttpError { url: url.clone() }) |
119 | | - } |
| 89 | + let client = Client::new(url.clone(), timeout).map_err(|err| Error::HttpClientError { err })?; |
| 90 | + |
| 91 | + let response = client.scrape(&query).await.map_err(|err| Error::HttpClientError { err })?; |
| 92 | + |
| 93 | + let response = response.bytes().await.map_err(|e| Error::ResponseError { err: e.into() })?; |
| 94 | + |
| 95 | + let response = scrape::Response::try_from_bencoded(&response).map_err(|e| Error::BencodeParseError { |
| 96 | + data: response, |
| 97 | + err: e.into(), |
| 98 | + })?; |
| 99 | + |
| 100 | + Ok(response) |
120 | 101 | } |
0 commit comments