Skip to content

Commit 4456203

Browse files
committed
feat: [torrust#640] Tracker Chekcer: scrape check
1 parent cb5bb68 commit 4456203

File tree

2 files changed

+85
-20
lines changed

2 files changed

+85
-20
lines changed

src/console/clients/checker/service.rs

+68-20
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@ use crate::console::clients::checker::printer::Printer;
1212
use crate::shared::bit_torrent::info_hash::InfoHash;
1313
use crate::shared::bit_torrent::tracker::http::client::requests::announce::QueryBuilder;
1414
use crate::shared::bit_torrent::tracker::http::client::responses::announce::Announce;
15-
use crate::shared::bit_torrent::tracker::http::client::Client;
15+
use crate::shared::bit_torrent::tracker::http::client::responses::scrape;
16+
use crate::shared::bit_torrent::tracker::http::client::{requests, Client};
1617

1718
pub struct Service {
1819
pub(crate) config: Arc<Configuration>,
@@ -58,9 +59,32 @@ impl Service {
5859
self.console.println("HTTP trackers ...");
5960

6061
for http_tracker in &self.config.http_trackers {
61-
match self.check_http_tracker(http_tracker).await {
62-
Ok(()) => check_results.push(Ok(())),
63-
Err(err) => check_results.push(Err(err)),
62+
let colored_tracker_url = http_tracker.to_string().yellow();
63+
64+
match self.check_http_announce(http_tracker).await {
65+
Ok(()) => {
66+
check_results.push(Ok(()));
67+
self.console
68+
.println(&format!("{} - Announce at {} is OK", "✓".green(), colored_tracker_url));
69+
}
70+
Err(err) => {
71+
check_results.push(Err(err));
72+
self.console
73+
.println(&format!("{} - Announce at {} is failing", "✗".red(), colored_tracker_url));
74+
}
75+
}
76+
77+
match self.check_http_scrape(http_tracker).await {
78+
Ok(()) => {
79+
check_results.push(Ok(()));
80+
self.console
81+
.println(&format!("{} - Scrape at {} is OK", "✓".green(), colored_tracker_url));
82+
}
83+
Err(err) => {
84+
check_results.push(Err(err));
85+
self.console
86+
.println(&format!("{} - Scrape at {} is failing", "✗".red(), colored_tracker_url));
87+
}
6488
}
6589
}
6690
}
@@ -80,57 +104,81 @@ impl Service {
80104
// todo:
81105
// - Make announce request
82106
// - Make scrape request
83-
self.console
84-
.println(&format!("{} - UDP tracker at udp://{:?} is OK (TODO)", "✓".green(), address));
107+
108+
let colored_address = address.to_string().yellow();
109+
110+
self.console.println(&format!(
111+
"{} - UDP tracker at udp://{} is OK ({})",
112+
"✓".green(),
113+
colored_address,
114+
"TODO".red(),
115+
));
85116
}
86117

87-
async fn check_http_tracker(&self, url: &Url) -> Result<(), CheckError> {
118+
async fn check_http_announce(&self, url: &Url) -> Result<(), CheckError> {
88119
let info_hash_str = "9c38422213e30bff212b30c360d26f9a02136422".to_string(); // # DevSkim: ignore DS173237
89120
let info_hash = InfoHash::from_str(&info_hash_str).expect("a valid info-hash is required");
90121

91-
// Announce request
92-
93122
let response = Client::new(url.clone())
94123
.announce(&QueryBuilder::with_default_values().with_info_hash(&info_hash).query())
95124
.await;
96125

97126
if let Ok(body) = response.bytes().await {
98127
if let Ok(_announce_response) = serde_bencode::from_bytes::<Announce>(&body) {
99-
self.console.println(&format!("{} - Announce at {} is OK", "✓".green(), url));
100-
101128
Ok(())
102129
} else {
103-
self.console.println(&format!("{} - Announce at {} failing", "✗".red(), url));
104130
Err(CheckError::HttpError { url: url.clone() })
105131
}
106132
} else {
107-
self.console.println(&format!("{} - Announce at {} failing", "✗".red(), url));
108133
Err(CheckError::HttpError { url: url.clone() })
109134
}
135+
}
136+
137+
async fn check_http_scrape(&self, url: &Url) -> Result<(), CheckError> {
138+
let info_hashes: Vec<String> = vec!["9c38422213e30bff212b30c360d26f9a02136422".to_string()]; // # DevSkim: ignore DS173237
139+
let query = requests::scrape::Query::try_from(info_hashes).expect("a valid array of info-hashes is required");
110140

111-
// Scrape request
141+
let response = Client::new(url.clone()).scrape(&query).await;
112142

113-
// todo
143+
if let Ok(body) = response.bytes().await {
144+
if let Ok(_scrape_response) = scrape::Response::try_from_bencoded(&body) {
145+
Ok(())
146+
} else {
147+
Err(CheckError::HttpError { url: url.clone() })
148+
}
149+
} else {
150+
Err(CheckError::HttpError { url: url.clone() })
151+
}
114152
}
115153

116154
async fn run_health_check(&self, url: Url) -> Result<(), CheckError> {
117155
let client = HttpClient::builder().timeout(Duration::from_secs(5)).build().unwrap();
118156

157+
let colored_url = url.to_string().yellow();
158+
119159
match client.get(url.clone()).send().await {
120160
Ok(response) => {
121161
if response.status().is_success() {
122162
self.console
123-
.println(&format!("{} - Health API at {} is OK", "✓".green(), url));
163+
.println(&format!("{} - Health API at {} is OK", "✓".green(), colored_url));
124164
Ok(())
125165
} else {
126-
self.console
127-
.eprintln(&format!("{} - Health API at {} failing: {:?}", "✗".red(), url, response));
166+
self.console.eprintln(&format!(
167+
"{} - Health API at {} is failing: {:?}",
168+
"✗".red(),
169+
colored_url,
170+
response
171+
));
128172
Err(CheckError::HealthCheckError { url })
129173
}
130174
}
131175
Err(err) => {
132-
self.console
133-
.eprintln(&format!("{} - Health API at {} failing: {:?}", "✗".red(), url, err));
176+
self.console.eprintln(&format!(
177+
"{} - Health API at {} is failing: {:?}",
178+
"✗".red(),
179+
colored_url,
180+
err
181+
));
134182
Err(CheckError::HealthCheckError { url })
135183
}
136184
}

src/shared/bit_torrent/tracker/http/client/requests/scrape.rs

+17
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,23 @@ impl TryFrom<&[String]> for Query {
4545
}
4646
}
4747

48+
impl TryFrom<Vec<String>> for Query {
49+
type Error = ConversionError;
50+
51+
fn try_from(info_hashes: Vec<String>) -> Result<Self, Self::Error> {
52+
let mut validated_info_hashes: Vec<ByteArray20> = Vec::new();
53+
54+
for info_hash in info_hashes {
55+
let validated_info_hash = InfoHash::from_str(&info_hash).map_err(|_| ConversionError(info_hash.clone()))?;
56+
validated_info_hashes.push(validated_info_hash.0);
57+
}
58+
59+
Ok(Self {
60+
info_hash: validated_info_hashes,
61+
})
62+
}
63+
}
64+
4865
/// HTTP Tracker Scrape Request:
4966
///
5067
/// <https://www.bittorrent.org/beps/bep_0048.html>

0 commit comments

Comments
 (0)