Skip to content

Commit 37a142e

Browse files
committed
refactor: [torrust#1268] move scrape logic from axum to http_tracker_core package
1 parent 74815ab commit 37a142e

File tree

2 files changed

+55
-27
lines changed

2 files changed

+55
-27
lines changed

src/packages/http_tracker_core/services/scrape.rs

+42
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,13 @@
1010
use std::net::IpAddr;
1111
use std::sync::Arc;
1212

13+
use bittorrent_http_protocol::v1::requests::scrape::Scrape;
14+
use bittorrent_http_protocol::v1::responses;
15+
use bittorrent_http_protocol::v1::services::peer_ip_resolver::{self, ClientIpSources};
1316
use bittorrent_primitives::info_hash::InfoHash;
17+
use bittorrent_tracker_core::authentication::service::AuthenticationService;
1418
use bittorrent_tracker_core::scrape_handler::ScrapeHandler;
19+
use torrust_tracker_configuration::Core;
1520
use torrust_tracker_primitives::core::ScrapeData;
1621

1722
use crate::packages::http_tracker_core;
@@ -26,6 +31,43 @@ use crate::packages::http_tracker_core;
2631
/// > **NOTICE**: as the HTTP tracker does not requires a connection request
2732
/// > like the UDP tracker, the number of TCP connections is incremented for
2833
/// > each `scrape` request.
34+
///
35+
/// # Errors
36+
///
37+
/// This function will return an error if:
38+
///
39+
/// - There is an error when resolving the client IP address.
40+
#[allow(clippy::too_many_arguments)]
41+
pub async fn handle_scrape(
42+
core_config: &Arc<Core>,
43+
scrape_handler: &Arc<ScrapeHandler>,
44+
_authentication_service: &Arc<AuthenticationService>,
45+
opt_http_stats_event_sender: &Arc<Option<Box<dyn http_tracker_core::statistics::event::sender::Sender>>>,
46+
scrape_request: &Scrape,
47+
client_ip_sources: &ClientIpSources,
48+
return_real_scrape_data: bool,
49+
) -> Result<ScrapeData, responses::error::Error> {
50+
// Authorization for scrape requests is handled at the `http_tracker_core`
51+
// level for each torrent.
52+
53+
let peer_ip = match peer_ip_resolver::invoke(core_config.net.on_reverse_proxy, client_ip_sources) {
54+
Ok(peer_ip) => peer_ip,
55+
Err(error) => return Err(responses::error::Error::from(error)),
56+
};
57+
58+
if return_real_scrape_data {
59+
Ok(invoke(
60+
scrape_handler,
61+
opt_http_stats_event_sender,
62+
&scrape_request.info_hashes,
63+
&peer_ip,
64+
)
65+
.await)
66+
} else {
67+
Ok(http_tracker_core::services::scrape::fake(opt_http_stats_event_sender, &scrape_request.info_hashes, &peer_ip).await)
68+
}
69+
}
70+
2971
pub async fn invoke(
3072
scrape_handler: &Arc<ScrapeHandler>,
3173
opt_http_stats_event_sender: &Arc<Option<Box<dyn http_tracker_core::statistics::event::sender::Sender>>>,

src/servers/http/v1/handlers/scrape.rs

+13-27
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ use axum::extract::State;
1111
use axum::response::{IntoResponse, Response};
1212
use bittorrent_http_protocol::v1::requests::scrape::Scrape;
1313
use bittorrent_http_protocol::v1::responses;
14-
use bittorrent_http_protocol::v1::services::peer_ip_resolver::{self, ClientIpSources};
14+
use bittorrent_http_protocol::v1::services::peer_ip_resolver::ClientIpSources;
1515
use bittorrent_tracker_core::authentication::service::AuthenticationService;
1616
use bittorrent_tracker_core::authentication::Key;
1717
use bittorrent_tracker_core::scrape_handler::ScrapeHandler;
@@ -20,7 +20,6 @@ use torrust_tracker_configuration::Core;
2020
use torrust_tracker_primitives::core::ScrapeData;
2121

2222
use crate::packages::http_tracker_core;
23-
use crate::packages::http_tracker_core::services;
2423
use crate::servers::http::v1::extractors::authentication_key::Extract as ExtractKey;
2524
use crate::servers::http::v1::extractors::client_ip_sources::Extract as ExtractClientIpSources;
2625
use crate::servers::http::v1::extractors::scrape_request::ExtractRequest;
@@ -111,12 +110,6 @@ async fn handle(
111110
build_response(scrape_data)
112111
}
113112

114-
/* code-review: authentication, authorization and peer IP resolution could be moved
115-
from the handler (Axum) layer into the app layer `services::announce::invoke`.
116-
That would make the handler even simpler and the code more reusable and decoupled from Axum.
117-
See https://github.com/torrust/torrust-tracker/discussions/240.
118-
*/
119-
120113
#[allow(clippy::too_many_arguments)]
121114
async fn handle_scrape(
122115
core_config: &Arc<Core>,
@@ -127,6 +120,8 @@ async fn handle_scrape(
127120
client_ip_sources: &ClientIpSources,
128121
maybe_key: Option<Key>,
129122
) -> Result<ScrapeData, responses::error::Error> {
123+
// todo: move authentication inside `http_tracker_core::services::scrape::handle_scrape`
124+
130125
// Authentication
131126
let return_real_scrape_data = if core_config.private {
132127
match maybe_key {
@@ -140,25 +135,16 @@ async fn handle_scrape(
140135
true
141136
};
142137

143-
// Authorization for scrape requests is handled at the `Tracker` level
144-
// for each torrent.
145-
146-
let peer_ip = match peer_ip_resolver::invoke(core_config.net.on_reverse_proxy, client_ip_sources) {
147-
Ok(peer_ip) => peer_ip,
148-
Err(error) => return Err(responses::error::Error::from(error)),
149-
};
150-
151-
if return_real_scrape_data {
152-
Ok(services::scrape::invoke(
153-
scrape_handler,
154-
opt_http_stats_event_sender,
155-
&scrape_request.info_hashes,
156-
&peer_ip,
157-
)
158-
.await)
159-
} else {
160-
Ok(services::scrape::fake(opt_http_stats_event_sender, &scrape_request.info_hashes, &peer_ip).await)
161-
}
138+
http_tracker_core::services::scrape::handle_scrape(
139+
core_config,
140+
scrape_handler,
141+
authentication_service,
142+
opt_http_stats_event_sender,
143+
scrape_request,
144+
client_ip_sources,
145+
return_real_scrape_data,
146+
)
147+
.await
162148
}
163149

164150
fn build_response(scrape_data: ScrapeData) -> Response {

0 commit comments

Comments
 (0)