Skip to content

Commit df4f782

Browse files
committed
refactor: [torrust#1318] return error instead of response from http scrape handler
1 parent 0c2be36 commit df4f782

File tree

3 files changed

+85
-9
lines changed

3 files changed

+85
-9
lines changed

packages/axum-http-tracker-server/src/v1/handlers/scrape.rs

+20-4
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ use std::sync::Arc;
66

77
use axum::extract::State;
88
use axum::response::{IntoResponse, Response};
9+
use bittorrent_http_tracker_core::services::scrape::HttpScrapeError;
910
use bittorrent_http_tracker_protocol::v1::requests::scrape::Scrape;
1011
use bittorrent_http_tracker_protocol::v1::responses;
1112
use bittorrent_http_tracker_protocol::v1::services::peer_ip_resolver::ClientIpSources;
@@ -101,7 +102,12 @@ async fn handle(
101102
.await
102103
{
103104
Ok(scrape_data) => scrape_data,
104-
Err(error) => return (StatusCode::OK, error.write()).into_response(),
105+
Err(error) => {
106+
let error_response = responses::error::Error {
107+
failure_reason: error.to_string(),
108+
};
109+
return (StatusCode::OK, error_response.write()).into_response();
110+
}
105111
};
106112

107113
build_response(scrape_data)
@@ -116,7 +122,7 @@ async fn handle_scrape(
116122
scrape_request: &Scrape,
117123
client_ip_sources: &ClientIpSources,
118124
maybe_key: Option<Key>,
119-
) -> Result<ScrapeData, responses::error::Error> {
125+
) -> Result<ScrapeData, HttpScrapeError> {
120126
bittorrent_http_tracker_core::services::scrape::handle_scrape(
121127
core_config,
122128
scrape_handler,
@@ -316,6 +322,7 @@ mod tests {
316322

317323
mod with_tracker_on_reverse_proxy {
318324

325+
use bittorrent_http_tracker_protocol::v1::responses;
319326
use bittorrent_http_tracker_protocol::v1::services::peer_ip_resolver::ClientIpSources;
320327

321328
use super::{initialize_tracker_on_reverse_proxy, sample_scrape_request};
@@ -343,15 +350,20 @@ mod tests {
343350
.await
344351
.unwrap_err();
345352

353+
let error_response = responses::error::Error {
354+
failure_reason: response.to_string(),
355+
};
356+
346357
assert_error_response(
347-
&response,
358+
&error_response,
348359
"Error resolving peer IP: missing or invalid the right most X-Forwarded-For IP",
349360
);
350361
}
351362
}
352363

353364
mod with_tracker_not_on_reverse_proxy {
354365

366+
use bittorrent_http_tracker_protocol::v1::responses;
355367
use bittorrent_http_tracker_protocol::v1::services::peer_ip_resolver::ClientIpSources;
356368

357369
use super::{initialize_tracker_not_on_reverse_proxy, sample_scrape_request};
@@ -379,8 +391,12 @@ mod tests {
379391
.await
380392
.unwrap_err();
381393

394+
let error_response = responses::error::Error {
395+
failure_reason: response.to_string(),
396+
};
397+
382398
assert_error_response(
383-
&response,
399+
&error_response,
384400
"Error resolving peer IP: cannot get the client IP from the connection info",
385401
);
386402
}

packages/http-tracker-core/src/services/scrape.rs

+55-5
Original file line numberDiff line numberDiff line change
@@ -11,17 +11,67 @@ use std::net::IpAddr;
1111
use std::sync::Arc;
1212

1313
use bittorrent_http_tracker_protocol::v1::requests::scrape::Scrape;
14-
use bittorrent_http_tracker_protocol::v1::responses;
15-
use bittorrent_http_tracker_protocol::v1::services::peer_ip_resolver::{self, ClientIpSources};
14+
use bittorrent_http_tracker_protocol::v1::services::peer_ip_resolver::{self, ClientIpSources, PeerIpResolutionError};
1615
use bittorrent_primitives::info_hash::InfoHash;
1716
use bittorrent_tracker_core::authentication::service::AuthenticationService;
18-
use bittorrent_tracker_core::authentication::Key;
17+
use bittorrent_tracker_core::authentication::{self, Key};
18+
use bittorrent_tracker_core::error::{ScrapeError, TrackerCoreError, WhitelistError};
1919
use bittorrent_tracker_core::scrape_handler::ScrapeHandler;
2020
use torrust_tracker_configuration::Core;
2121
use torrust_tracker_primitives::core::ScrapeData;
2222

2323
use crate::statistics;
2424

25+
/// Errors related to announce requests.
26+
#[derive(thiserror::Error, Debug, Clone)]
27+
pub enum HttpScrapeError {
28+
#[error("Error resolving peer IP: {source}")]
29+
PeerIpResolutionError { source: PeerIpResolutionError },
30+
31+
#[error("Tracker core error: {source}")]
32+
TrackerCoreError { source: TrackerCoreError },
33+
}
34+
35+
impl From<PeerIpResolutionError> for HttpScrapeError {
36+
fn from(peer_ip_resolution_error: PeerIpResolutionError) -> Self {
37+
Self::PeerIpResolutionError {
38+
source: peer_ip_resolution_error,
39+
}
40+
}
41+
}
42+
43+
impl From<TrackerCoreError> for HttpScrapeError {
44+
fn from(tracker_core_error: TrackerCoreError) -> Self {
45+
Self::TrackerCoreError {
46+
source: tracker_core_error,
47+
}
48+
}
49+
}
50+
51+
impl From<ScrapeError> for HttpScrapeError {
52+
fn from(announce_error: ScrapeError) -> Self {
53+
Self::TrackerCoreError {
54+
source: announce_error.into(),
55+
}
56+
}
57+
}
58+
59+
impl From<WhitelistError> for HttpScrapeError {
60+
fn from(whitelist_error: WhitelistError) -> Self {
61+
Self::TrackerCoreError {
62+
source: whitelist_error.into(),
63+
}
64+
}
65+
}
66+
67+
impl From<authentication::key::Error> for HttpScrapeError {
68+
fn from(whitelist_error: authentication::key::Error) -> Self {
69+
Self::TrackerCoreError {
70+
source: whitelist_error.into(),
71+
}
72+
}
73+
}
74+
2575
/// The HTTP tracker `scrape` service.
2676
///
2777
/// The service sends an statistics event that increments:
@@ -47,7 +97,7 @@ pub async fn handle_scrape(
4797
scrape_request: &Scrape,
4898
client_ip_sources: &ClientIpSources,
4999
maybe_key: Option<Key>,
50-
) -> Result<ScrapeData, responses::error::Error> {
100+
) -> Result<ScrapeData, HttpScrapeError> {
51101
// Authentication
52102
let return_fake_scrape_data = if core_config.private {
53103
match maybe_key {
@@ -66,7 +116,7 @@ pub async fn handle_scrape(
66116

67117
let peer_ip = match peer_ip_resolver::invoke(core_config.net.on_reverse_proxy, client_ip_sources) {
68118
Ok(peer_ip) => peer_ip,
69-
Err(error) => return Err(responses::error::Error::from(error)),
119+
Err(error) => return Err(error.into()),
70120
};
71121

72122
if return_fake_scrape_data {

packages/tracker-core/src/error.rs

+10
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,10 @@ pub enum TrackerCoreError {
2323
#[error("Tracker core announce error: {source}")]
2424
AnnounceError { source: AnnounceError },
2525

26+
/// Error returned when there was an error with the tracker core scrape handler.
27+
#[error("Tracker core scrape error: {source}")]
28+
ScrapeError { source: ScrapeError },
29+
2630
/// Error returned when there was an error with the tracker core whitelist.
2731
#[error("Tracker core whitelist error: {source}")]
2832
WhitelistError { source: WhitelistError },
@@ -38,6 +42,12 @@ impl From<AnnounceError> for TrackerCoreError {
3842
}
3943
}
4044

45+
impl From<ScrapeError> for TrackerCoreError {
46+
fn from(scrape_error: ScrapeError) -> Self {
47+
Self::ScrapeError { source: scrape_error }
48+
}
49+
}
50+
4151
impl From<WhitelistError> for TrackerCoreError {
4252
fn from(whitelist_error: WhitelistError) -> Self {
4353
Self::WhitelistError { source: whitelist_error }

0 commit comments

Comments
 (0)