From c7a593463c0632d4875b1b54d4ce98379386421f Mon Sep 17 00:00:00 2001 From: Power2All Date: Tue, 2 Aug 2022 14:15:02 +0200 Subject: [PATCH 1/6] Pushing update versions --- Cargo.lock | 92 +++++++++++++++++++++++++++++++++++++++--------------- Cargo.toml | 10 +++--- 2 files changed, 71 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 163fd9d..67b6cf4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -30,15 +30,15 @@ dependencies = [ [[package]] name = "arc-swap" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" +checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164" [[package]] name = "async-trait" -version = "0.1.56" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96cf8829f67d2eab0b2dfa42c5d0ef737e0724e4a82b01b3e292456202b19716" +checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f" dependencies = [ "proc-macro2", "quote", @@ -73,12 +73,13 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" -version = "0.5.14" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c943a505c17b494638a38a9af129067f760c9c06794b9f57d499266909be8e72" +checksum = "6b9496f0c1d1afb7a2af4338bbe1d969cddfead41d87a9fb3aaa6d0bbc7af648" dependencies = [ "async-trait", "axum-core", + "base64", "bitflags", "bytes", "futures-util", @@ -94,8 +95,10 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", + "sha-1", "sync_wrapper", "tokio", + "tokio-tungstenite", "tower", "tower-http", "tower-layer", @@ -128,9 +131,9 @@ dependencies = [ [[package]] name = "axum-server" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf18303ef7e23b045301555bf8a0dfbc1444ea1a37b3c81757a32680ace4d7d" +checksum = "2b26e2731cf84d9c54b3768a2faebe97626cb0568babf04bad68077e8b60c555" dependencies = [ "arc-swap", "bytes", @@ -217,9 +220,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0b3de4a0c5e67e16066a0715723abd91edc2f9001d09c46e1dca929351e130e" +checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" [[package]] name = "cc" @@ -392,9 +395,9 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.2" +version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "fern" @@ -785,9 +788,9 @@ checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] name = "libm" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" +checksum = "da83a57f3f5ba3680950aa3cbc806fc297bc0b289d42e8942ed528ace71b8145" [[package]] name = "libsqlite3-sys" @@ -1116,9 +1119,9 @@ checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" [[package]] name = "proc-macro2" -version = "1.0.41" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdcc2916cde080c1876ff40292a396541241fe0072ef928cd76582e9ea5d60d2" +checksum = "c278e965f1d8cf32d6e0e96de3d3e79712178ae67986d9cf9151f51e95aac89b" dependencies = [ "unicode-ident", ] @@ -1164,9 +1167,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534cfe58d6a18cc17120fbf4635d53d14691c1fe4d951064df9bd326178d7d5a" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ "bitflags", ] @@ -1277,18 +1280,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.140" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc855a42c7967b7c369eb5860f7164ef1f6f81c20c7cc1141f2a604e18723b03" +checksum = "7af873f2c95b99fcb0bd0fe622a43e29514658873c8ceba88c4cb88833a22500" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.140" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f2122636b9fe3b81f1cb25099fcf2d3f542cdb1d45940d56c713158884a05da" +checksum = "75743a150d003dd863b51dc809bcad0d73f2102c53632f1e954e738192a3413f" dependencies = [ "proc-macro2", "quote", @@ -1642,6 +1645,18 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-tungstenite" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + [[package]] name = "tokio-util" version = "0.7.3" @@ -1744,9 +1759,9 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.35" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a400e31aa60b9d44a52a8ee0343b5b18566b03a8321e0d321f695cf56e940160" +checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307" dependencies = [ "cfg-if", "log", @@ -1756,9 +1771,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.28" +version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7" +checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7" dependencies = [ "once_cell", ] @@ -1769,6 +1784,25 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" +[[package]] +name = "tungstenite" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" +dependencies = [ + "base64", + "byteorder", + "bytes", + "http", + "httparse", + "log", + "rand", + "sha-1", + "thiserror", + "url", + "utf-8", +] + [[package]] name = "typenum" version = "1.15.0" @@ -1826,6 +1860,12 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "vcpkg" version = "0.2.15" diff --git a/Cargo.toml b/Cargo.toml index d6bb1ce..a50aa5b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,10 +15,10 @@ codegen-units = 1 # Enable this to optimize filesize panic = "abort" # Enable this to optimize filesize [dependencies] -async-trait = "0.1.56" -axum = "0.5.13" +async-trait = "0.1.57" +axum = { version = "0.5.13", features = ["ws"] } axum-client-ip = "0.2.0" -axum-server = { version = "0.4.0", features = ["tls-rustls"] } +axum-server = { version = "0.4.1", features = ["tls-rustls"] } binascii = "0.1.4" bip_bencode = "0.4.4" byteorder = "1.4.3" @@ -32,10 +32,10 @@ log = "0.4.17" percent-encoding = "2.1.0" rustls = "0.20.6" scc = "0.8.3" -serde = { version = "1.0.140", features = ["derive"] } +serde = { version = "1.0.141", features = ["derive"] } serde_json = "1.0.82" sqlx = { version = "0.6.0", features = ["mysql", "postgres", "sqlite", "runtime-tokio-rustls"] } thiserror = "1.0.31" -tokio = { version = "1.20.0", features = ["full"] } +tokio = { version = "1.20.1", features = ["full"] } toml = "0.5.9" tower = "0.4.13" From 36ca4089ad26117d22045e9b26a9c291ef36b6cc Mon Sep 17 00:00:00 2001 From: Power2All Date: Tue, 2 Aug 2022 14:17:15 +0200 Subject: [PATCH 2/6] Adding SSL support for API --- src/config.rs | 15 +++++++++++++-- src/http_api.rs | 18 ++++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/src/config.rs b/src/config.rs index f906f20..663a3a8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -34,6 +34,16 @@ pub struct HttpTrackersConfig { pub ssl_cert: String } +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ApiTrackersConfig { + pub enabled: bool, + pub bind_address: String, + pub key: String, + pub ssl: bool, + pub ssl_key: String, + pub ssl_cert: String +} + #[derive(Serialize, Deserialize, Debug, Clone)] pub struct Configuration { pub log_level: String, @@ -53,7 +63,7 @@ pub struct Configuration { pub udp_server: Vec, pub http_server: Vec, - pub api_server: Vec + pub api_server: Vec } impl Configuration { pub fn default() -> Configuration { @@ -73,9 +83,10 @@ impl Configuration { } ); let api_server = vec!( - HttpTrackersConfig { + ApiTrackersConfig { enabled: false, bind_address: String::from("0.0.0.0:8080"), + key: String::from(""), ssl: false, ssl_key: String::from(""), ssl_cert: String::from("") diff --git a/src/http_api.rs b/src/http_api.rs index 0388bad..194dfc4 100644 --- a/src/http_api.rs +++ b/src/http_api.rs @@ -4,6 +4,7 @@ use axum::{Extension, Router}; use axum_client_ip::ClientIp; use axum::routing::get; use axum_server::{Handle, Server}; +use axum_server::tls_rustls::RustlsConfig; use log::info; use scc::ebr::Arc; use crate::tracker::{StatsEvent, TorrentTracker}; @@ -20,6 +21,23 @@ pub fn http_api(handle: Handle, addr: SocketAddr, data: Arc) -> ) } +pub fn https_api(handle: Handle, addr: SocketAddr, data: Arc, ssl_key: String, ssl_cert: String) -> impl Future> +{ + let ssl_config = RustlsConfig::from_pem_file( + ssl_cert.clone(), + ssl_key.clone() + ).await.unwrap(); + + info!("[API] Starting server listener with SSL on {}", addr); + axum_server::bind_rustls(addr, ssl_config) + .handle(handle) + .serve(Router::new() + .route("/stats", get(http_api_stats)) + .layer(Extension(data)) + .into_make_service_with_connect_info::() + ) +} + pub async fn http_api_stats(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String { http_api_stats_log(ip, state.clone()).await; From e710bffd460c0916515e5f4083f9c9e741213768 Mon Sep 17 00:00:00 2001 From: Power2All Date: Tue, 2 Aug 2022 14:42:24 +0200 Subject: [PATCH 3/6] Bump version --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 67b6cf4..17cf9e7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1682,7 +1682,7 @@ dependencies = [ [[package]] name = "torrust-axum" -version = "3.0.1" +version = "3.1.0" dependencies = [ "async-trait", "axum", diff --git a/Cargo.toml b/Cargo.toml index a50aa5b..f057199 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "torrust-axum" -version = "3.0.1" +version = "3.1.0" edition = "2021" license = "AGPL-3.0" authors = [ From 44bcd6fb8c4e0288cc810ec8b58a708a23654e18 Mon Sep 17 00:00:00 2001 From: Power2All Date: Thu, 4 Aug 2022 12:47:13 +0200 Subject: [PATCH 4/6] Adding first part of the API upgrade, following documentation at https://torrust.com/torrust-tracker/api/ --- src/http_api.rs | 92 ++++++++++++++++++++++++++++++++++++--- src/main.rs | 15 ++++++- src/tracker.rs | 113 ++++++++++++++++++++++++++++++++++++++---------- 3 files changed, 189 insertions(+), 31 deletions(-) diff --git a/src/http_api.rs b/src/http_api.rs index 194dfc4..584871b 100644 --- a/src/http_api.rs +++ b/src/http_api.rs @@ -2,26 +2,31 @@ use std::future::Future; use std::net::{IpAddr, SocketAddr}; use axum::{Extension, Router}; use axum_client_ip::ClientIp; -use axum::routing::get; +use axum::routing::{get, post, delete}; use axum_server::{Handle, Server}; use axum_server::tls_rustls::RustlsConfig; use log::info; use scc::ebr::Arc; use crate::tracker::{StatsEvent, TorrentTracker}; -pub fn http_api(handle: Handle, addr: SocketAddr, data: Arc) -> impl Future> +pub async fn http_api(handle: Handle, addr: SocketAddr, data: Arc) -> impl Future> { info!("[API] Starting server listener on {}", addr); Server::bind(addr) .handle(handle) .serve(Router::new() - .route("/stats", get(http_api_stats)) + .route("/api/stats", get(http_api_stats_get)) + .route("/api/torrents", get(http_api_torrents_get)) + .route("/api/torrent/:info_hash", get(http_api_torrent_get)) + .route("/api/whitelist/:info_hash", get(http_api_whitelist_get).post(http_api_whitelist_post).delete(http_api_whitelist_delete)) + .route("/api/blacklist/:info_hash", get(http_api_blacklist_get).post(http_api_blacklist_post).delete(http_api_blacklist_delete)) + .route("/api/key/:seconds_valid", post(http_api_key_post).delete(http_api_key_delete)) .layer(Extension(data)) .into_make_service_with_connect_info::() ) } -pub fn https_api(handle: Handle, addr: SocketAddr, data: Arc, ssl_key: String, ssl_cert: String) -> impl Future> +pub async fn https_api(handle: Handle, addr: SocketAddr, data: Arc, ssl_key: String, ssl_cert: String) -> impl Future> { let ssl_config = RustlsConfig::from_pem_file( ssl_cert.clone(), @@ -32,19 +37,94 @@ pub fn https_api(handle: Handle, addr: SocketAddr, data: Arc, ss axum_server::bind_rustls(addr, ssl_config) .handle(handle) .serve(Router::new() - .route("/stats", get(http_api_stats)) + .route("/api/stats", get(http_api_stats_get)) + .route("/api/torrents", get(http_api_torrents_get)) + .route("/api/torrent/:info_hash", get(http_api_torrent_get)) + .route("/api/whitelist/:info_hash", get(http_api_whitelist_get).post(http_api_whitelist_post).delete(http_api_whitelist_delete)) + .route("/api/blacklist/:info_hash", get(http_api_blacklist_get).post(http_api_blacklist_post).delete(http_api_blacklist_delete)) + .route("/api/key/:seconds_valid", post(http_api_key_post).delete(http_api_key_delete)) .layer(Extension(data)) .into_make_service_with_connect_info::() ) } -pub async fn http_api_stats(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_stats_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String { http_api_stats_log(ip, state.clone()).await; let stats = state.get_stats().await; serde_json::to_string(&stats).unwrap() } +pub async fn http_api_torrents_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_torrent_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_whitelist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_whitelist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_whitelist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_blacklist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_blacklist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_blacklist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_key_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + +pub async fn http_api_key_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +{ + http_api_stats_log(ip, state.clone()).await; + let return_data: Vec = vec![]; + serde_json::to_string(&return_data).unwrap() +} + pub async fn http_api_stats_log(ip: IpAddr, tracker: Arc) { if ip.is_ipv4() { diff --git a/src/main.rs b/src/main.rs index 1792720..88040cd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,7 +7,7 @@ use log::{error, info}; use scc::ebr::Arc; use torrust_axum::common::{tcp_check_host_and_port_used, udp_check_host_and_port_used}; use torrust_axum::config; -use torrust_axum::http_api::http_api; +use torrust_axum::http_api::{http_api, https_api}; use torrust_axum::http_service::{http_service, https_service}; use torrust_axum::logging::setup_logging; use torrust_axum::tracker::{StatsEvent, TorrentTracker}; @@ -36,13 +36,18 @@ async fn main() -> std::io::Result<()> let handle = Handle::new(); let mut api_futures = Vec::new(); + let mut apis_futures = Vec::new(); for api_server_object in &config.api_server { if api_server_object.enabled { tcp_check_host_and_port_used(api_server_object.bind_address.clone()); let address: SocketAddr = api_server_object.bind_address.parse().unwrap(); let handle = handle.clone(); let tracker_clone = tracker.clone(); - api_futures.push(http_api(handle.clone(), address.clone(), tracker_clone)); + if api_server_object.ssl { + apis_futures.push(https_api(handle.clone(), address.clone(), tracker_clone, api_server_object.ssl_key.clone(), api_server_object.ssl_cert.clone()).await); + } else { + api_futures.push(http_api(handle.clone(), address.clone(), tracker_clone).await); + } } } @@ -79,6 +84,12 @@ async fn main() -> std::io::Result<()> }); } + if apis_futures.len() != 0 { + tokio::spawn(async move { + let _ = try_join_all(apis_futures).await; + }); + } + if http_futures.len() != 0 { tokio::spawn(async move { let _ = try_join_all(http_futures).await; diff --git a/src/tracker.rs b/src/tracker.rs index 89cec81..a2784e1 100644 --- a/src/tracker.rs +++ b/src/tracker.rs @@ -92,7 +92,9 @@ pub struct Torrents { pub map: BTreeMap, pub updates: HashMap, pub shadow: HashMap, - pub stats: Stats + pub stats: Stats, + pub whitelist: HashMap, + pub blacklist: HashMap } pub struct TorrentTracker { @@ -135,12 +137,15 @@ impl TorrentTracker { udp6_connections_handled: 0, udp6_announces_handled: 0, udp6_scrapes_handled: 0 - } + }, + whitelist: HashMap::new(), + blacklist: HashMap::new() })), sqlx: DatabaseConnector::new(config.clone()).await } } + /* === Statistics === */ pub async fn get_stats(&self) -> Stats { let torrents_arc = self.torrents.clone(); @@ -220,6 +225,7 @@ impl TorrentTracker { stats } + /* === Torrents === */ pub async fn load_torrents(&self) { if let Ok(torrents) = self.sqlx.load_torrents().await { @@ -242,6 +248,15 @@ impl TorrentTracker { } } + pub async fn save_torrents(&self) -> bool + { + let shadow = self.get_shadow().await.clone(); + if self.sqlx.save_torrents(shadow).await.is_ok() { + return true; + } + false + } + pub async fn add_torrent(&self, info_hash: InfoHash, torrent_entry: TorrentEntry, persistent: bool) { let torrents_arc = self.torrents.clone(); @@ -293,6 +308,7 @@ impl TorrentTracker { if remove_leechers > 0 { self.update_stats(StatsEvent::Peers, (0 - remove_leechers) as i64).await; } } + /* === Peers === */ pub async fn add_peer(&self, info_hash: InfoHash, peer_id: PeerId, peer_entry: TorrentPeer, completed: bool, persistent: bool) -> TorrentEntry { let mut added_seeder = false; @@ -395,6 +411,33 @@ impl TorrentTracker { torrent_entry } + pub async fn clean_peers(&self, peer_timeout: Duration) + { + let torrents_arc = self.torrents.clone(); + let torrents_lock = torrents_arc.write().await; + + let mut torrent_index = vec![]; + for (info_hash, _torrent_entry) in torrents_lock.map.iter() { + torrent_index.push(*info_hash); + } + drop(torrents_lock); + + for info_hash in torrent_index.iter() { + let torrent_option = self.get_torrent(*info_hash).await.clone(); + if torrent_option.is_some() { + let torrent = torrent_option.unwrap().clone(); + for (peer_id, torrent_peer) in torrent.peers.iter() { + if torrent_peer.updated.elapsed() > peer_timeout { + let _ = self.remove_peer(*info_hash, *peer_id, self.config.clone().persistency).await; + } + } + } else { + continue; + } + } + } + + /* === Updates === */ pub async fn add_update(&self, info_hash: InfoHash, completed: i64) { let torrents_arc = self.torrents.clone(); @@ -437,6 +480,7 @@ impl TorrentTracker { self.set_stats(StatsEvent::TorrentsUpdates, 0).await; } + /* === Shadow === */ pub async fn add_shadow(&self, info_hash: InfoHash, completed: i64) { let torrents_arc = self.torrents.clone(); @@ -475,38 +519,61 @@ impl TorrentTracker { self.set_stats(StatsEvent::TorrentsShadow, 0).await; } - pub async fn clean_peers(&self, peer_timeout: Duration) + /* === Whitelist === */ + pub async fn add_whitelist(&self, info_hash: InfoHash) { let torrents_arc = self.torrents.clone(); - let torrents_lock = torrents_arc.write().await; + let mut torrents_lock = torrents_arc.write().await; + torrents_lock.whitelist.insert(info_hash, 0i64); + drop(torrents_lock); + } - let mut torrent_index = vec![]; - for (info_hash, _torrent_entry) in torrents_lock.map.iter() { - torrent_index.push(*info_hash); + pub async fn check_whitelist(&self, info_hash: InfoHash) -> bool + { + let torrents_arc = self.torrents.clone(); + let mut torrents_lock = torrents_arc.write().await; + let whitelist = torrents_lock.whitelist.get(&info_hash).cloned(); + drop(torrents_lock); + if whitelist.is_some() { + return true; } + false + } + + pub async fn clear_whitelist(&self) + { + let torrents_arc = self.torrents.clone(); + let mut torrents_lock = torrents_arc.write().await; + torrents_lock.whitelist = HashMap::new(); drop(torrents_lock); + } - for info_hash in torrent_index.iter() { - let torrent_option = self.get_torrent(*info_hash).await.clone(); - if torrent_option.is_some() { - let torrent = torrent_option.unwrap().clone(); - for (peer_id, torrent_peer) in torrent.peers.iter() { - if torrent_peer.updated.elapsed() > peer_timeout { - let _ = self.remove_peer(*info_hash, *peer_id, self.config.clone().persistency).await; - } - } - } else { - continue; - } - } + /* === Blacklist === */ + pub async fn add_blacklist(&self, info_hash: InfoHash) + { + let torrents_arc = self.torrents.clone(); + let mut torrents_lock = torrents_arc.write().await; + torrents_lock.blacklist.insert(info_hash, 0i64); + drop(torrents_lock); } - pub async fn save_torrents(&self) -> bool + pub async fn check_blacklist(&self, info_hash: InfoHash) -> bool { - let shadow = self.get_shadow().await.clone(); - if self.sqlx.save_torrents(shadow).await.is_ok() { + let torrents_arc = self.torrents.clone(); + let mut torrents_lock = torrents_arc.write().await; + let blacklist = torrents_lock.blacklist.get(&info_hash).cloned(); + drop(torrents_lock); + if blacklist.is_some() { return true; } false } + + pub async fn clear_blacklist(&self) + { + let torrents_arc = self.torrents.clone(); + let mut torrents_lock = torrents_arc.write().await; + torrents_lock.whitelist = HashMap::new(); + drop(torrents_lock); + } } From 4e4f1346e3ebfd62973597763fd2ddc53ea213c8 Mon Sep 17 00:00:00 2001 From: Power2All Date: Fri, 5 Aug 2022 17:04:03 +0200 Subject: [PATCH 5/6] Adding White and Black list support, as well as adding API functions --- src/config.rs | 14 +- src/http_api.rs | 382 +++++++++++++++++++++++++++++++++++++++++++----- src/main.rs | 1 + src/tracker.rs | 83 +++++++++++ 4 files changed, 445 insertions(+), 35 deletions(-) diff --git a/src/config.rs b/src/config.rs index 663a3a8..b2f7ac8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -38,7 +38,6 @@ pub struct HttpTrackersConfig { pub struct ApiTrackersConfig { pub enabled: bool, pub bind_address: String, - pub key: String, pub ssl: bool, pub ssl_key: String, pub ssl_cert: String @@ -55,6 +54,12 @@ pub struct Configuration { pub persistency: bool, pub persistency_interval: Option, + pub api_key: String, + + pub whitelist: bool, + pub whitelist_from_persistency: bool, + pub blacklist: bool, + pub interval: Option, pub interval_minimum: Option, pub interval_cleanup: Option, @@ -86,7 +91,6 @@ impl Configuration { ApiTrackersConfig { enabled: false, bind_address: String::from("0.0.0.0:8080"), - key: String::from(""), ssl: false, ssl_key: String::from(""), ssl_cert: String::from("") @@ -102,6 +106,12 @@ impl Configuration { persistency: false, persistency_interval: Some(60), + api_key: String::from("MyAccessToken"), + + whitelist: false, + whitelist_from_persistency: false, + blacklist: false, + interval: Some(1800), interval_minimum: Some(1800), interval_cleanup: Some(900), diff --git a/src/http_api.rs b/src/http_api.rs index 584871b..af97d7e 100644 --- a/src/http_api.rs +++ b/src/http_api.rs @@ -1,13 +1,20 @@ +use std::collections::HashMap; use std::future::Future; use std::net::{IpAddr, SocketAddr}; use axum::{Extension, Router}; +use axum::http::{HeaderMap, HeaderValue, StatusCode}; +use axum::http::header::HeaderName; use axum_client_ip::ClientIp; use axum::routing::{get, post, delete}; use axum_server::{Handle, Server}; use axum_server::tls_rustls::RustlsConfig; use log::info; use scc::ebr::Arc; -use crate::tracker::{StatsEvent, TorrentTracker}; +use scc::HashIndex; +use serde_json::json; +use crate::common::{AnnounceEvent, InfoHash, parse_query}; +use crate::config::Configuration; +use crate::tracker::{GetTorrentApi, StatsEvent, TorrentTracker}; pub async fn http_api(handle: Handle, addr: SocketAddr, data: Arc) -> impl Future> { @@ -48,81 +55,373 @@ pub async fn https_api(handle: Handle, addr: SocketAddr, data: Arc>) -> String +pub async fn http_api_stats_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + let stats = state.get_stats().await; - serde_json::to_string(&stats).unwrap() + return (StatusCode::OK, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_torrents_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_torrents_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let torrents = state.get_torrents_api().await; + return (StatusCode::OK, headers, serde_json::to_string(&torrents).unwrap()); } -pub async fn http_api_torrent_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_torrent_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, axum::extract::Path(path_params): axum::extract::Path>, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map.clone()).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let info_hash: InfoHash = match path_params.get("info_hash") { + None => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "unknown info_hash"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + Some(result) => { + let infohash_decoded = hex::decode(result).unwrap(); + let infohash = <[u8; 20]>::try_from(infohash_decoded[0 .. 20].as_ref()).unwrap(); + InfoHash(infohash) + } + }; + + let torrent = state.get_torrent(info_hash).await; + if torrent.is_some() { + let mut return_data = GetTorrentApi{ + info_hash: info_hash.to_string(), + completed: torrent.clone().unwrap().completed, + seeders: torrent.clone().unwrap().seeders, + leechers: torrent.clone().unwrap().leechers, + peers: vec![] + }; + let mut peer_block = vec![]; + for (peer_id, torrent_peer) in torrent.unwrap().peers.iter() { + peer_block.push(json!([ + { + "id": peer_id.to_string(), + "client": "".to_string() + }, + { + "ip": torrent_peer.peer_addr.to_string(), + "updated": torrent_peer.updated.elapsed().as_secs() as i64, + "uploaded": torrent_peer.uploaded.0, + "downloaded": torrent_peer.downloaded.0, + "left": torrent_peer.left.0, + "event": match torrent_peer.event { + AnnounceEvent::Started => { "Started".to_string() } + AnnounceEvent::Stopped => { "Stopped".to_string() } + AnnounceEvent::Completed => { "Completed".to_string() } + AnnounceEvent::None => { "None".to_string() } + } + } + ])); + } + return_data.peers = peer_block; + + return (StatusCode::OK, headers, serde_json::to_string(&return_data).unwrap()); + } + + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "unknown torrent"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); } -pub async fn http_api_whitelist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_whitelist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_whitelist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_whitelist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_whitelist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_whitelist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_blacklist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_blacklist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_blacklist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_blacklist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_blacklist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_blacklist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_key_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_key_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } -pub async fn http_api_key_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(_params): axum::extract::RawQuery, Extension(state): Extension>) -> String +pub async fn http_api_key_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; - let return_data: Vec = vec![]; - serde_json::to_string(&return_data).unwrap() + + let mut headers = HeaderMap::new(); + headers.insert(HeaderName::from_static("content-type"), HeaderValue::from_static("text/plain")); + + let query_map_result = parse_query(params); + let query_map: HashIndex>> = match query_map_result { + Ok(e) => { + e + } + Err(e) => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "error"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + }; + + if !validate_api_token(state.clone().config.clone(), ip, query_map).await { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "invalid token"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + + let stats = state.get_stats().await; + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); } pub async fn http_api_stats_log(ip: IpAddr, tracker: Arc) @@ -134,4 +433,21 @@ pub async fn http_api_stats_log(ip: IpAddr, tracker: Arc) tracker.update_stats(StatsEvent::Tcp6ConnectionsHandled, 1).await; tracker.update_stats(StatsEvent::Tcp6ApiHandled, 1).await; } +} + +pub async fn validate_api_token(config: Arc, _remote_addr: IpAddr, query: HashIndex>>) -> bool +{ + let token = match query.read("token", |_, v| v.clone()) { + None => { return false; } + Some(result) => { + let token = match String::from_utf8(result[0].to_vec()) { Ok(v) => v, Err(_) => return false }; + match token.parse::() { Ok(v) => v, Err(_) => return false } + } + }; + + if token != config.api_key { + return false; + } + + true } \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 88040cd..bbce4b3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -151,6 +151,7 @@ async fn main() -> std::io::Result<()> interval.tick().await; let stats = tracker_clone.clone().get_stats().await; info!("[STATS] Torrents: {} - Updates: {} - Shadow {}: - Seeds: {} - Peers: {} - Completed: {}", stats.torrents, stats.torrents_updates, stats.torrents_shadow, stats.seeds, stats.peers, stats.completed); + info!("[STATS] Whitelists: {} - Blacklists: {}", stats.whitelist, stats.blacklist); info!("[STATS TCP IPv4] Connect: {} - API: {} - Announce: {} - Scrape: {}", stats.tcp4_connections_handled, stats.tcp4_api_handled, stats.tcp4_announces_handled, stats.tcp4_scrapes_handled); info!("[STATS TCP IPv6] Connect: {} - API: {} - Announce: {} - Scrape: {}", stats.tcp6_connections_handled, stats.tcp6_api_handled, stats.tcp6_announces_handled, stats.tcp6_scrapes_handled); info!("[STATS UDP IPv4] Connect: {} - Announce: {} - Scrape: {}", stats.udp4_connections_handled, stats.udp4_announces_handled, stats.udp4_scrapes_handled); diff --git a/src/tracker.rs b/src/tracker.rs index a2784e1..0e6001f 100644 --- a/src/tracker.rs +++ b/src/tracker.rs @@ -6,6 +6,7 @@ use tokio::sync::RwLock; use crate::common::{InfoHash, NumberOfBytes, PeerId, TorrentPeer}; use crate::config::Configuration; use serde::{Deserialize, Serialize}; +use serde_json::Value; use crate::databases::DatabaseConnector; pub enum StatsEvent { @@ -18,6 +19,8 @@ pub enum StatsEvent { Seeds, Peers, Completed, + Whitelist, + Blacklist, Tcp4ConnectionsHandled, Tcp4ApiHandled, Tcp4AnnouncesHandled, @@ -46,6 +49,8 @@ pub struct Stats { pub seeds: i64, pub peers: i64, pub completed: i64, + pub whitelist: i64, + pub blacklist: i64, pub tcp4_connections_handled: i64, pub tcp4_api_handled: i64, pub tcp4_announces_handled: i64, @@ -97,6 +102,23 @@ pub struct Torrents { pub blacklist: HashMap } +#[derive(Serialize, Deserialize, Clone)] +pub struct GetTorrentsApi { + pub info_hash: String, + pub completed: i64, + pub seeders: i64, + pub leechers: i64 +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct GetTorrentApi { + pub info_hash: String, + pub completed: i64, + pub seeders: i64, + pub leechers: i64, + pub peers: Vec +} + pub struct TorrentTracker { pub config: Arc, pub torrents: Arc>, @@ -123,6 +145,8 @@ impl TorrentTracker { seeds: 0, peers: 0, completed: 0, + whitelist: 0, + blacklist: 0, tcp4_connections_handled: 0, tcp4_api_handled: 0, tcp4_announces_handled: 0, @@ -170,6 +194,8 @@ impl TorrentTracker { StatsEvent::Seeds => { stats.seeds += value; } StatsEvent::Peers => { stats.peers += value; } StatsEvent::Completed => { stats.completed += value; } + StatsEvent::Whitelist => { stats.whitelist += value; } + StatsEvent::Blacklist => { stats.blacklist += value; } StatsEvent::Tcp4ConnectionsHandled => { stats.tcp4_connections_handled += value; } StatsEvent::Tcp4ApiHandled => { stats.tcp4_api_handled += value; } StatsEvent::Tcp4AnnouncesHandled => { stats.tcp4_announces_handled += value; } @@ -205,6 +231,8 @@ impl TorrentTracker { StatsEvent::Seeds => { stats.seeds = value; } StatsEvent::Peers => { stats.peers = value; } StatsEvent::Completed => { stats.completed = value; } + StatsEvent::Whitelist => { stats.whitelist = value; } + StatsEvent::Blacklist => { stats.blacklist = value; } StatsEvent::Tcp4ConnectionsHandled => { stats.tcp4_connections_handled = value; } StatsEvent::Tcp4ApiHandled => { stats.tcp4_api_handled = value; } StatsEvent::Tcp4AnnouncesHandled => { stats.tcp4_announces_handled = value; } @@ -281,6 +309,37 @@ impl TorrentTracker { torrent } + pub async fn get_torrents_api(&self) -> Vec + { + let mut return_data: Vec = vec![]; + + let torrents_arc = self.torrents.clone(); + let torrents_lock = torrents_arc.write().await; + + let mut torrent_index = vec![]; + for (info_hash, _torrent_entry) in torrents_lock.map.iter() { + torrent_index.push(*info_hash); + } + drop(torrents_lock); + + for info_hash in torrent_index.iter() { + let torrent_option = self.get_torrent(*info_hash).await.clone(); + if torrent_option.is_some() { + let torrent = torrent_option.unwrap().clone(); + return_data.push(GetTorrentsApi{ + info_hash: info_hash.to_string(), + completed: torrent.completed, + seeders: torrent.seeders, + leechers: torrent.leechers + }); + } else { + continue; + } + } + + return_data + } + pub async fn remove_torrent(&self, info_hash: InfoHash, persistent: bool) { let mut removed_torrent = false; @@ -526,6 +585,17 @@ impl TorrentTracker { let mut torrents_lock = torrents_arc.write().await; torrents_lock.whitelist.insert(info_hash, 0i64); drop(torrents_lock); + self.update_stats(StatsEvent::Whitelist, 1).await; + } + + pub async fn remove_whitelist(&self, info_hash: InfoHash) + { + let torrents_arc = self.torrents.clone(); + let mut torrents_lock = torrents_arc.write().await; + torrents_lock.whitelist.remove(&info_hash); + let whitelist_count = torrents_lock.whitelist.len(); + drop(torrents_lock); + self.set_stats(StatsEvent::Whitelist, whitelist_count as i64).await; } pub async fn check_whitelist(&self, info_hash: InfoHash) -> bool @@ -546,6 +616,7 @@ impl TorrentTracker { let mut torrents_lock = torrents_arc.write().await; torrents_lock.whitelist = HashMap::new(); drop(torrents_lock); + self.set_stats(StatsEvent::Whitelist, 0).await; } /* === Blacklist === */ @@ -555,6 +626,17 @@ impl TorrentTracker { let mut torrents_lock = torrents_arc.write().await; torrents_lock.blacklist.insert(info_hash, 0i64); drop(torrents_lock); + self.update_stats(StatsEvent::Blacklist, 1).await; + } + + pub async fn remove_blacklist(&self, info_hash: InfoHash) + { + let torrents_arc = self.torrents.clone(); + let mut torrents_lock = torrents_arc.write().await; + torrents_lock.blacklist.remove(&info_hash); + let blacklist_count = torrents_lock.blacklist.len(); + drop(torrents_lock); + self.set_stats(StatsEvent::Blacklist, blacklist_count as i64).await; } pub async fn check_blacklist(&self, info_hash: InfoHash) -> bool @@ -575,5 +657,6 @@ impl TorrentTracker { let mut torrents_lock = torrents_arc.write().await; torrents_lock.whitelist = HashMap::new(); drop(torrents_lock); + self.set_stats(StatsEvent::Blacklist, 1).await; } } From 60fa6640a9e92123d1efbdcd06aa479e21ac7405 Mon Sep 17 00:00:00 2001 From: Power2All Date: Mon, 8 Aug 2022 16:25:38 +0200 Subject: [PATCH 6/6] Adding Whitelist mechanic --- src/http_api.rs | 106 ++++++++++++++++++++++++++++++++++---------- src/http_service.rs | 12 +++++ src/tracker.rs | 7 ++- src/udp_common.rs | 3 ++ src/udp_service.rs | 10 +++++ 5 files changed, 113 insertions(+), 25 deletions(-) diff --git a/src/http_api.rs b/src/http_api.rs index af97d7e..711a5f9 100644 --- a/src/http_api.rs +++ b/src/http_api.rs @@ -5,7 +5,7 @@ use axum::{Extension, Router}; use axum::http::{HeaderMap, HeaderValue, StatusCode}; use axum::http::header::HeaderName; use axum_client_ip::ClientIp; -use axum::routing::{get, post, delete}; +use axum::routing::{get, post}; use axum_server::{Handle, Server}; use axum_server::tls_rustls::RustlsConfig; use log::info; @@ -67,7 +67,7 @@ pub async fn http_api_stats_get(ClientIp(ip): ClientIp, axum::extract::RawQuery( Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -96,7 +96,7 @@ pub async fn http_api_torrents_get(ClientIp(ip): ClientIp, axum::extract::RawQue Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -125,7 +125,7 @@ pub async fn http_api_torrent_get(ClientIp(ip): ClientIp, axum::extract::RawQuer Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -142,7 +142,7 @@ pub async fn http_api_torrent_get(ClientIp(ip): ClientIp, axum::extract::RawQuer None => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "unknown info_hash"); - return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + return (StatusCode::NOT_FOUND, headers, serde_json::to_string(&return_data).unwrap()); } Some(result) => { let infohash_decoded = hex::decode(result).unwrap(); @@ -189,10 +189,10 @@ pub async fn http_api_torrent_get(ClientIp(ip): ClientIp, axum::extract::RawQuer let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "unknown torrent"); - return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + return (StatusCode::NOT_FOUND, headers, serde_json::to_string(&return_data).unwrap()); } -pub async fn http_api_whitelist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) +pub async fn http_api_whitelist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, axum::extract::Path(path_params): axum::extract::Path>, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; @@ -204,7 +204,7 @@ pub async fn http_api_whitelist_get(ClientIp(ip): ClientIp, axum::extract::RawQu Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -217,11 +217,33 @@ pub async fn http_api_whitelist_get(ClientIp(ip): ClientIp, axum::extract::RawQu return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); } - let stats = state.get_stats().await; - return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); + let info_hash: InfoHash = match path_params.get("info_hash") { + None => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "unknown info_hash"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + Some(result) => { + let infohash_decoded = hex::decode(result); + if infohash_decoded.is_err() || infohash_decoded.clone().unwrap().len() != 20 { + let return_data = json!({ "status": "invalid info_hash" }); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + let infohash = <[u8; 20]>::try_from(infohash_decoded.clone().unwrap()[0 .. 20].as_ref()).unwrap(); + InfoHash(infohash) + } + }; + + if state.check_whitelist(info_hash).await { + let return_data = json!({ "status": "ok" }); + return (StatusCode::OK, headers, serde_json::to_string(&return_data).unwrap()); + } + + let return_data = json!({ "status": "not found"}); + return (StatusCode::NOT_FOUND, headers, serde_json::to_string(&return_data).unwrap()); } -pub async fn http_api_whitelist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) +pub async fn http_api_whitelist_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, axum::extract::Path(path_params): axum::extract::Path>, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; @@ -233,7 +255,7 @@ pub async fn http_api_whitelist_post(ClientIp(ip): ClientIp, axum::extract::RawQ Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -246,11 +268,30 @@ pub async fn http_api_whitelist_post(ClientIp(ip): ClientIp, axum::extract::RawQ return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); } - let stats = state.get_stats().await; - return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); + let info_hash: InfoHash = match path_params.get("info_hash") { + None => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "unknown info_hash"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + Some(result) => { + let infohash_decoded = hex::decode(result); + if infohash_decoded.is_err() || infohash_decoded.clone().unwrap().len() != 20 { + let return_data = json!({ "status": "invalid info_hash" }); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + let infohash = <[u8; 20]>::try_from(infohash_decoded.clone().unwrap()[0 .. 20].as_ref()).unwrap(); + InfoHash(infohash) + } + }; + + state.add_whitelist(info_hash).await; + + let return_data = json!({ "status": "ok"}); + return (StatusCode::OK, headers, serde_json::to_string(&return_data).unwrap()); } -pub async fn http_api_whitelist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) +pub async fn http_api_whitelist_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, axum::extract::Path(path_params): axum::extract::Path>, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) { http_api_stats_log(ip, state.clone()).await; @@ -262,7 +303,7 @@ pub async fn http_api_whitelist_delete(ClientIp(ip): ClientIp, axum::extract::Ra Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -275,8 +316,27 @@ pub async fn http_api_whitelist_delete(ClientIp(ip): ClientIp, axum::extract::Ra return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); } - let stats = state.get_stats().await; - return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&stats).unwrap()); + let info_hash: InfoHash = match path_params.get("info_hash") { + None => { + let mut return_data: HashMap<&str, &str> = HashMap::new(); + return_data.insert("status", "unknown info_hash"); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + Some(result) => { + let infohash_decoded = hex::decode(result); + if infohash_decoded.is_err() || infohash_decoded.clone().unwrap().len() != 20 { + let return_data = json!({ "status": "invalid info_hash" }); + return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); + } + let infohash = <[u8; 20]>::try_from(infohash_decoded.clone().unwrap()[0 .. 20].as_ref()).unwrap(); + InfoHash(infohash) + } + }; + + state.remove_whitelist(info_hash).await; + + let return_data = json!({ "status": "ok"}); + return (StatusCode::OK, headers, serde_json::to_string(&return_data).unwrap()); } pub async fn http_api_blacklist_get(ClientIp(ip): ClientIp, axum::extract::RawQuery(params): axum::extract::RawQuery, Extension(state): Extension>) -> (StatusCode, HeaderMap, String) @@ -291,7 +351,7 @@ pub async fn http_api_blacklist_get(ClientIp(ip): ClientIp, axum::extract::RawQu Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -320,7 +380,7 @@ pub async fn http_api_blacklist_post(ClientIp(ip): ClientIp, axum::extract::RawQ Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -349,7 +409,7 @@ pub async fn http_api_blacklist_delete(ClientIp(ip): ClientIp, axum::extract::Ra Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -378,7 +438,7 @@ pub async fn http_api_key_post(ClientIp(ip): ClientIp, axum::extract::RawQuery(p Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); @@ -407,7 +467,7 @@ pub async fn http_api_key_delete(ClientIp(ip): ClientIp, axum::extract::RawQuery Ok(e) => { e } - Err(e) => { + Err(_) => { let mut return_data: HashMap<&str, &str> = HashMap::new(); return_data.insert("status", "error"); return (StatusCode::BAD_REQUEST, headers, serde_json::to_string(&return_data).unwrap()); diff --git a/src/http_service.rs b/src/http_service.rs index f7fc9f1..340afd9 100644 --- a/src/http_service.rs +++ b/src/http_service.rs @@ -78,6 +78,18 @@ pub async fn http_service_announce(ClientIp(ip): ClientIp, axum::extract::RawQue } }; + // Check if whitelist is enabled, and if so, check if the torrent hash is known, and if not, show error. + if state.config.whitelist && !state.check_whitelist(announce_unwrapped.info_hash).await { + let return_string = (ben_map! {"failure reason" => ben_bytes!("unknown info_hash")}).encode(); + return (StatusCode::OK, headers, return_string); + } + + // Check if blacklist is enabled, and if so, check if the torrent hash is known, and if so, show error. + if state.config.blacklist && state.check_blacklist(announce_unwrapped.info_hash).await { + let return_string = (ben_map! {"failure reason" => ben_bytes!("forbidden info_hash")}).encode(); + return (StatusCode::OK, headers, return_string); + } + let (_torrent_peer, torrent_entry) = match handle_announce(state.clone(), announce_unwrapped.clone()).await { Ok(result) => { result } Err(e) => { diff --git a/src/tracker.rs b/src/tracker.rs index 0e6001f..9ee09eb 100644 --- a/src/tracker.rs +++ b/src/tracker.rs @@ -267,6 +267,9 @@ impl TorrentTracker { seeders: 0, leechers: 0 }, false).await; + if self.config.whitelist && self.config.whitelist_from_persistency { + self.add_whitelist(*info_hash).await; + } torrent_count += 1; completed_count += *completed; } @@ -601,7 +604,7 @@ impl TorrentTracker { pub async fn check_whitelist(&self, info_hash: InfoHash) -> bool { let torrents_arc = self.torrents.clone(); - let mut torrents_lock = torrents_arc.write().await; + let torrents_lock = torrents_arc.write().await; let whitelist = torrents_lock.whitelist.get(&info_hash).cloned(); drop(torrents_lock); if whitelist.is_some() { @@ -642,7 +645,7 @@ impl TorrentTracker { pub async fn check_blacklist(&self, info_hash: InfoHash) -> bool { let torrents_arc = self.torrents.clone(); - let mut torrents_lock = torrents_arc.write().await; + let torrents_lock = torrents_arc.write().await; let blacklist = torrents_lock.blacklist.get(&info_hash).cloned(); drop(torrents_lock); if blacklist.is_some() { diff --git a/src/udp_common.rs b/src/udp_common.rs index 47fe8d5..e66c07b 100644 --- a/src/udp_common.rs +++ b/src/udp_common.rs @@ -622,6 +622,9 @@ pub enum ServerError { #[error("torrent not on whitelist")] TorrentNotWhitelisted, + #[error("torrent blacklist")] + TorrentBlacklisted, + #[error("peer not authenticated")] PeerNotAuthenticated, diff --git a/src/udp_service.rs b/src/udp_service.rs index 46f8c17..cfc75be 100644 --- a/src/udp_service.rs +++ b/src/udp_service.rs @@ -169,6 +169,16 @@ pub async fn handle_udp_announce(remote_addr: SocketAddr, request: &AnnounceRequ Some(result) => { result } }; + // Check if whitelist is enabled, and if so, check if the torrent hash is known, and if not, show error. + if tracker.config.whitelist && !tracker.check_whitelist(InfoHash(request.info_hash.0)).await { + return Err(ServerError::TorrentNotWhitelisted); + } + + // Check if blacklist is enabled, and if so, check if the torrent hash is known, and if so, show error. + if tracker.config.blacklist && tracker.check_blacklist(InfoHash(request.info_hash.0)).await { + return Err(ServerError::TorrentBlacklisted); + } + // Handle the request data. match handle_announce(tracker.clone(), AnnounceQueryRequest { info_hash: InfoHash(request.info_hash.0),