@@ -11,17 +11,67 @@ use std::net::IpAddr;
11
11
use std:: sync:: Arc ;
12
12
13
13
use bittorrent_http_tracker_protocol:: v1:: requests:: scrape:: Scrape ;
14
- use bittorrent_http_tracker_protocol:: v1:: responses;
15
- use bittorrent_http_tracker_protocol:: v1:: services:: peer_ip_resolver:: { self , ClientIpSources } ;
14
+ use bittorrent_http_tracker_protocol:: v1:: services:: peer_ip_resolver:: { self , ClientIpSources , PeerIpResolutionError } ;
16
15
use bittorrent_primitives:: info_hash:: InfoHash ;
17
16
use bittorrent_tracker_core:: authentication:: service:: AuthenticationService ;
18
- use bittorrent_tracker_core:: authentication:: Key ;
17
+ use bittorrent_tracker_core:: authentication:: { self , Key } ;
18
+ use bittorrent_tracker_core:: error:: { ScrapeError , TrackerCoreError , WhitelistError } ;
19
19
use bittorrent_tracker_core:: scrape_handler:: ScrapeHandler ;
20
20
use torrust_tracker_configuration:: Core ;
21
21
use torrust_tracker_primitives:: core:: ScrapeData ;
22
22
23
23
use crate :: statistics;
24
24
25
+ /// Errors related to announce requests.
26
+ #[ derive( thiserror:: Error , Debug , Clone ) ]
27
+ pub enum HttpScrapeError {
28
+ #[ error( "Error resolving peer IP: {source}" ) ]
29
+ PeerIpResolutionError { source : PeerIpResolutionError } ,
30
+
31
+ #[ error( "Tracker core error: {source}" ) ]
32
+ TrackerCoreError { source : TrackerCoreError } ,
33
+ }
34
+
35
+ impl From < PeerIpResolutionError > for HttpScrapeError {
36
+ fn from ( peer_ip_resolution_error : PeerIpResolutionError ) -> Self {
37
+ Self :: PeerIpResolutionError {
38
+ source : peer_ip_resolution_error,
39
+ }
40
+ }
41
+ }
42
+
43
+ impl From < TrackerCoreError > for HttpScrapeError {
44
+ fn from ( tracker_core_error : TrackerCoreError ) -> Self {
45
+ Self :: TrackerCoreError {
46
+ source : tracker_core_error,
47
+ }
48
+ }
49
+ }
50
+
51
+ impl From < ScrapeError > for HttpScrapeError {
52
+ fn from ( announce_error : ScrapeError ) -> Self {
53
+ Self :: TrackerCoreError {
54
+ source : announce_error. into ( ) ,
55
+ }
56
+ }
57
+ }
58
+
59
+ impl From < WhitelistError > for HttpScrapeError {
60
+ fn from ( whitelist_error : WhitelistError ) -> Self {
61
+ Self :: TrackerCoreError {
62
+ source : whitelist_error. into ( ) ,
63
+ }
64
+ }
65
+ }
66
+
67
+ impl From < authentication:: key:: Error > for HttpScrapeError {
68
+ fn from ( whitelist_error : authentication:: key:: Error ) -> Self {
69
+ Self :: TrackerCoreError {
70
+ source : whitelist_error. into ( ) ,
71
+ }
72
+ }
73
+ }
74
+
25
75
/// The HTTP tracker `scrape` service.
26
76
///
27
77
/// The service sends an statistics event that increments:
@@ -47,7 +97,7 @@ pub async fn handle_scrape(
47
97
scrape_request : & Scrape ,
48
98
client_ip_sources : & ClientIpSources ,
49
99
maybe_key : Option < Key > ,
50
- ) -> Result < ScrapeData , responses :: error :: Error > {
100
+ ) -> Result < ScrapeData , HttpScrapeError > {
51
101
// Authentication
52
102
let return_fake_scrape_data = if core_config. private {
53
103
match maybe_key {
@@ -66,7 +116,7 @@ pub async fn handle_scrape(
66
116
67
117
let peer_ip = match peer_ip_resolver:: invoke ( core_config. net . on_reverse_proxy , client_ip_sources) {
68
118
Ok ( peer_ip) => peer_ip,
69
- Err ( error) => return Err ( responses :: error:: Error :: from ( error ) ) ,
119
+ Err ( error) => return Err ( error. into ( ) ) ,
70
120
} ;
71
121
72
122
if return_fake_scrape_data {
0 commit comments