Skip to content

Commit 70924ed

Browse files
committed
refactor: [#639] Tracker Checker: extract mod for HTTP checks
1 parent 77c32a1 commit 70924ed

File tree

2 files changed

+96
-92
lines changed

2 files changed

+96
-92
lines changed
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
use std::str::FromStr;
2+
3+
use colored::Colorize;
4+
use log::debug;
5+
use reqwest::Url as ServiceUrl;
6+
use url::Url;
7+
8+
use crate::console::clients::checker::console::Console;
9+
use crate::console::clients::checker::printer::Printer;
10+
use crate::console::clients::checker::service::{CheckError, CheckResult};
11+
use crate::shared::bit_torrent::info_hash::InfoHash;
12+
use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder;
13+
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
14+
use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
15+
use crate::shared::bit_torrent::tracker::http::client::{requests, Client};
16+
17+
pub async fn run(http_trackers: &Vec<ServiceUrl>, console: &Console, check_results: &mut Vec<CheckResult>) {
18+
console.println("HTTP trackers ...");
19+
20+
for http_tracker in http_trackers {
21+
let colored_tracker_url = http_tracker.to_string().yellow();
22+
23+
match check_http_announce(http_tracker).await {
24+
Ok(()) => {
25+
check_results.push(Ok(()));
26+
console.println(&format!("{} - Announce at {} is OK", "✓".green(), colored_tracker_url));
27+
}
28+
Err(err) => {
29+
check_results.push(Err(err));
30+
console.println(&format!("{} - Announce at {} is failing", "✗".red(), colored_tracker_url));
31+
}
32+
}
33+
34+
match check_http_scrape(http_tracker).await {
35+
Ok(()) => {
36+
check_results.push(Ok(()));
37+
console.println(&format!("{} - Scrape at {} is OK", "✓".green(), colored_tracker_url));
38+
}
39+
Err(err) => {
40+
check_results.push(Err(err));
41+
console.println(&format!("{} - Scrape at {} is failing", "✗".red(), colored_tracker_url));
42+
}
43+
}
44+
}
45+
}
46+
47+
async fn check_http_announce(tracker_url: &Url) -> Result<(), CheckError> {
48+
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
49+
let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required");
50+
51+
// todo: HTTP request could panic.For example, if the server is not accessible.
52+
// We should change the client to catch that error and return a `CheckError`.
53+
// Otherwise the checking process will stop. The idea is to process all checks
54+
// and return a final report.
55+
let response = Client::new(tracker_url.clone())
56+
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query())
57+
.await;
58+
59+
if let Ok(body) = response.bytes().await {
60+
if let Ok(_announce_response) = serde_bencode::from_bytes::<Announce>(&body) {
61+
Ok(())
62+
} else {
63+
debug!("announce body {:#?}", body);
64+
Err(CheckError::HttpError {
65+
url: tracker_url.clone(),
66+
})
67+
}
68+
} else {
69+
Err(CheckError::HttpError {
70+
url: tracker_url.clone(),
71+
})
72+
}
73+
}
74+
75+
async fn check_http_scrape(url: &Url) -> Result<(), CheckError> {
76+
let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237
77+
let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required");
78+
79+
// todo: HTTP request could panic.For example, if the server is not accessible.
80+
// We should change the client to catch that error and return a `CheckError`.
81+
// Otherwise the checking process will stop. The idea is to process all checks
82+
// and return a final report.
83+
let response = Client::new(url.clone()).scrape(&query).await;
84+
85+
if let Ok(body) = response.bytes().await {
86+
if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) {
87+
Ok(())
88+
} else {
89+
debug!("scrape body {:#?}", body);
90+
Err(CheckError::HttpError { url: url.clone() })
91+
}
92+
} else {
93+
Err(CheckError::HttpError { url: url.clone() })
94+
}
95+
}

src/console/clients/checker/service.rs

Lines changed: 1 addition & 92 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,14 @@
11
use std::net::SocketAddr;
2-
use std::str::FromStr;
32
use std::sync::Arc;
43
use std::time::Duration;
54

65
use colored::Colorize;
7-
use log::debug;
86
use reqwest::{Client as HttpClient, Url};
97

108
use super::checks;
119
use super::config::Configuration;
1210
use super::console::Console;
1311
use crate::console::clients::checker::printer::Printer;
14-
use crate::shared::bit_torrent::info_hash::InfoHash;
15-
use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder;
16-
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
17-
use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
18-
use crate::shared::bit_torrent::tracker::http::client::{requests, Client};
1912

2013
pub struct Service {
2114
pub(crate) config: Arc<Configuration>,
@@ -42,47 +35,13 @@ impl Service {
4235

4336
checks::udp::run(&self.config.udp_trackers, &self.console, &mut check_results).await;
4437

45-
self.check_http_trackers(&mut check_results).await;
38+
checks::http::run(&self.config.http_trackers, &self.console, &mut check_results).await;
4639

4740
self.run_health_checks(&mut check_results).await;
4841

4942
check_results
5043
}
5144

52-
async fn check_http_trackers(&self, check_results: &mut Vec<CheckResult>) {
53-
self.console.println("HTTP trackers ...");
54-
55-
for http_tracker in &self.config.http_trackers {
56-
let colored_tracker_url = http_tracker.to_string().yellow();
57-
58-
match self.check_http_announce(http_tracker).await {
59-
Ok(()) => {
60-
check_results.push(Ok(()));
61-
self.console
62-
.println(&format!("{} - Announce at {} is OK", "✓".green(), colored_tracker_url));
63-
}
64-
Err(err) => {
65-
check_results.push(Err(err));
66-
self.console
67-
.println(&format!("{} - Announce at {} is failing", "✗".red(), colored_tracker_url));
68-
}
69-
}
70-
71-
match self.check_http_scrape(http_tracker).await {
72-
Ok(()) => {
73-
check_results.push(Ok(()));
74-
self.console
75-
.println(&format!("{} - Scrape at {} is OK", "✓".green(), colored_tracker_url));
76-
}
77-
Err(err) => {
78-
check_results.push(Err(err));
79-
self.console
80-
.println(&format!("{} - Scrape at {} is failing", "✗".red(), colored_tracker_url));
81-
}
82-
}
83-
}
84-
}
85-
8645
async fn run_health_checks(&self, check_results: &mut Vec<CheckResult>) {
8746
self.console.println("Health checks ...");
8847

@@ -94,56 +53,6 @@ impl Service {
9453
}
9554
}
9655

97-
async fn check_http_announce(&self, tracker_url: &Url) -> Result<(), CheckError> {
98-
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
99-
let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required");
100-
101-
// todo: HTTP request could panic.For example, if the server is not accessible.
102-
// We should change the client to catch that error and return a `CheckError`.
103-
// Otherwise the checking process will stop. The idea is to process all checks
104-
// and return a final report.
105-
let response = Client::new(tracker_url.clone())
106-
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query())
107-
.await;
108-
109-
if let Ok(body) = response.bytes().await {
110-
if let Ok(_announce_response) = serde_bencode::from_bytes::<Announce>(&body) {
111-
Ok(())
112-
} else {
113-
debug!("announce body {:#?}", body);
114-
Err(CheckError::HttpError {
115-
url: tracker_url.clone(),
116-
})
117-
}
118-
} else {
119-
Err(CheckError::HttpError {
120-
url: tracker_url.clone(),
121-
})
122-
}
123-
}
124-
125-
async fn check_http_scrape(&self, url: &Url) -> Result<(), CheckError> {
126-
let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237
127-
let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required");
128-
129-
// todo: HTTP request could panic.For example, if the server is not accessible.
130-
// We should change the client to catch that error and return a `CheckError`.
131-
// Otherwise the checking process will stop. The idea is to process all checks
132-
// and return a final report.
133-
let response = Client::new(url.clone()).scrape(&query).await;
134-
135-
if let Ok(body) = response.bytes().await {
136-
if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) {
137-
Ok(())
138-
} else {
139-
debug!("scrape body {:#?}", body);
140-
Err(CheckError::HttpError { url: url.clone() })
141-
}
142-
} else {
143-
Err(CheckError::HttpError { url: url.clone() })
144-
}
145-
}
146-
14756
async fn run_health_check(&self, url: Url) -> Result<(), CheckError> {
14857
let client = HttpClient::builder().timeout(Duration::from_secs(5)).build().unwrap();
14958

0 commit comments

Comments
 (0)