Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: add a new info_hash URL query array to the torrent list endpoint #728

Merged
merged 2 commits into from
Mar 11, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -34,6 +34,7 @@ aquatic_udp_protocol = "0"
async-trait = "0"
axum = { version = "0", features = ["macros"] }
axum-client-ip = "0"
axum-extra = { version = "0.9.2", features = ["query"] }
axum-server = { version = "0", features = ["tls-rustls"] }
binascii = "0"
chrono = { version = "0", default-features = false, features = ["clock"] }
36 changes: 29 additions & 7 deletions src/core/services/torrent.rs
Original file line number Diff line number Diff line change
@@ -115,7 +115,7 @@ pub async fn get_torrent_info(tracker: Arc<Tracker>, info_hash: &InfoHash) -> Op
}

/// It returns all the information the tracker has about multiple torrents in a [`BasicInfo`] struct, excluding the peer list.
pub async fn get_torrents(tracker: Arc<Tracker>, pagination: &Pagination) -> Vec<BasicInfo> {
pub async fn get_torrents_page(tracker: Arc<Tracker>, pagination: &Pagination) -> Vec<BasicInfo> {
let db = tracker.torrents.get_torrents().await;

let mut basic_infos: Vec<BasicInfo> = vec![];
@@ -134,6 +134,28 @@ pub async fn get_torrents(tracker: Arc<Tracker>, pagination: &Pagination) -> Vec
basic_infos
}

/// It returns all the information the tracker has about multiple torrents in a [`BasicInfo`] struct, excluding the peer list.
pub async fn get_torrents(tracker: Arc<Tracker>, info_hashes: &[InfoHash]) -> Vec<BasicInfo> {
let db = tracker.torrents.get_torrents().await;

let mut basic_infos: Vec<BasicInfo> = vec![];

for info_hash in info_hashes {
if let Some(entry) = db.get(info_hash) {
let (seeders, completed, leechers) = entry.get_stats();

basic_infos.push(BasicInfo {
info_hash: *info_hash,
seeders: u64::from(seeders),
completed: u64::from(completed),
leechers: u64::from(leechers),
});
}
}

basic_infos
}

#[cfg(test)]
mod tests {
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
@@ -219,7 +241,7 @@ mod tests {
use torrust_tracker_test_helpers::configuration;

use crate::core::services::torrent::tests::sample_peer;
use crate::core::services::torrent::{get_torrents, BasicInfo, Pagination};
use crate::core::services::torrent::{get_torrents_page, BasicInfo, Pagination};
use crate::core::services::tracker_factory;
use crate::shared::bit_torrent::info_hash::InfoHash;

@@ -231,7 +253,7 @@ mod tests {
async fn should_return_an_empty_result_if_the_tracker_does_not_have_any_torrent() {
let tracker = Arc::new(tracker_factory(&tracker_configuration()));

let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::default()).await;

assert_eq!(torrents, vec![]);
}
@@ -247,7 +269,7 @@ mod tests {
.update_torrent_with_peer_and_get_stats(&info_hash, &sample_peer())
.await;

let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::default()).await;

assert_eq!(
torrents,
@@ -279,7 +301,7 @@ mod tests {
let offset = 0;
let limit = 1;

let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::new(offset, limit)).await;

assert_eq!(torrents.len(), 1);
}
@@ -303,7 +325,7 @@ mod tests {
let offset = 1;
let limit = 4000;

let torrents = get_torrents(tracker.clone(), &Pagination::new(offset, limit)).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::new(offset, limit)).await;

assert_eq!(torrents.len(), 1);
assert_eq!(
@@ -333,7 +355,7 @@ mod tests {
.update_torrent_with_peer_and_get_stats(&info_hash2, &sample_peer())
.await;

let torrents = get_torrents(tracker.clone(), &Pagination::default()).await;
let torrents = get_torrents_page(tracker.clone(), &Pagination::default()).await;

assert_eq!(
torrents,
91 changes: 70 additions & 21 deletions src/servers/apis/v1/context/torrent/handlers.rs
Original file line number Diff line number Diff line change
@@ -4,14 +4,15 @@
use std::str::FromStr;
use std::sync::Arc;

use axum::extract::{Path, Query, State};
use axum::response::{IntoResponse, Json, Response};
use axum::extract::{Path, State};
use axum::response::{IntoResponse, Response};
use axum_extra::extract::Query;
use log::debug;
use serde::{de, Deserialize, Deserializer};
use thiserror::Error;

use super::resources::torrent::ListItem;
use super::responses::{torrent_info_response, torrent_list_response, torrent_not_known_response};
use crate::core::services::torrent::{get_torrent_info, get_torrents, Pagination};
use crate::core::services::torrent::{get_torrent_info, get_torrents, get_torrents_page, Pagination};
use crate::core::Tracker;
use crate::servers::apis::v1::responses::invalid_info_hash_param_response;
use crate::servers::apis::InfoHashParam;
@@ -36,39 +37,87 @@
}
}

/// A container for the optional URL query pagination parameters:
/// `offset` and `limit`.
/// A container for the URL query parameters.
///
/// Pagination: `offset` and `limit`.
/// Array of infohashes: `info_hash`.
///
/// You can either get all torrents with pagination or get a list of torrents
/// providing a list of infohashes. For example:
///
/// First page of torrents:
///
/// <http://127.0.0.1:1212/api/v1/torrents?token=MyAccessToken>
///
///
/// Only two torrents:
///
/// <http://127.0.0.1:1212/api/v1/torrents?token=MyAccessToken&info_hash=9c38422213e30bff212b30c360d26f9a02136422&info_hash=2b66980093bc11806fab50cb3cb41835b95a0362>
///
///
/// NOTICE: Pagination is ignored if array of infohashes is provided.
#[derive(Deserialize, Debug)]
pub struct PaginationParams {
pub struct QueryParams {
/// The offset of the first page to return. Starts at 0.
#[serde(default, deserialize_with = "empty_string_as_none")]
pub offset: Option<u32>,
/// The maximum number of items to return per page
/// The maximum number of items to return per page.
#[serde(default, deserialize_with = "empty_string_as_none")]
pub limit: Option<u32>,
/// A list of infohashes to retrieve.
#[serde(default, rename = "info_hash")]
pub info_hashes: Vec<String>,

Check warning on line 69 in src/servers/apis/v1/context/torrent/handlers.rs

Codecov / codecov/patch

src/servers/apis/v1/context/torrent/handlers.rs#L69

Added line #L69 was not covered by tests
}

/// It handles the request to get a list of torrents.
///
/// It returns a `200` response with a json array with
/// [`ListItem`]
/// resources.
/// It returns a `200` response with a json array with [`crate::servers::apis::v1::context::torrent::resources::torrent::ListItem`] resources.
///
/// Refer to the [API endpoint documentation](crate::servers::apis::v1::context::torrent#list-torrents)
/// for more information about this endpoint.
pub async fn get_torrents_handler(
State(tracker): State<Arc<Tracker>>,
pagination: Query<PaginationParams>,
) -> Json<Vec<ListItem>> {
pub async fn get_torrents_handler(State(tracker): State<Arc<Tracker>>, pagination: Query<QueryParams>) -> Response {
debug!("pagination: {:?}", pagination);

torrent_list_response(
&get_torrents(
tracker.clone(),
&Pagination::new_with_options(pagination.0.offset, pagination.0.limit),
if pagination.0.info_hashes.is_empty() {
torrent_list_response(
&get_torrents_page(
tracker.clone(),
&Pagination::new_with_options(pagination.0.offset, pagination.0.limit),
)
.await,
)
.await,
)
.into_response()
} else {
match parse_info_hashes(pagination.0.info_hashes) {
Ok(info_hashes) => torrent_list_response(&get_torrents(tracker.clone(), &info_hashes).await).into_response(),
Err(err) => match err {
QueryParamError::InvalidInfoHash { info_hash } => invalid_info_hash_param_response(&info_hash),
},
}
}
}

#[derive(Error, Debug)]

Check warning on line 100 in src/servers/apis/v1/context/torrent/handlers.rs

Codecov / codecov/patch

src/servers/apis/v1/context/torrent/handlers.rs#L100

Added line #L100 was not covered by tests
pub enum QueryParamError {
#[error("invalid infohash {info_hash}")]
InvalidInfoHash { info_hash: String },

Check warning on line 103 in src/servers/apis/v1/context/torrent/handlers.rs

Codecov / codecov/patch

src/servers/apis/v1/context/torrent/handlers.rs#L102-L103

Added lines #L102 - L103 were not covered by tests
}

fn parse_info_hashes(info_hashes_str: Vec<String>) -> Result<Vec<InfoHash>, QueryParamError> {
let mut info_hashes: Vec<InfoHash> = Vec::new();

for info_hash_str in info_hashes_str {
match InfoHash::from_str(&info_hash_str) {
Ok(info_hash) => info_hashes.push(info_hash),
Err(_err) => {
return Err(QueryParamError::InvalidInfoHash {
info_hash: info_hash_str,
})
}
}
}

Ok(info_hashes)
}

/// Serde deserialization decorator to map empty Strings to None,
65 changes: 64 additions & 1 deletion tests/servers/api/v1/contract/context/torrent.rs
Original file line number Diff line number Diff line change
@@ -19,7 +19,7 @@ use crate::servers::api::v1::contract::fixtures::{
use crate::servers::api::Started;

#[tokio::test]
async fn should_allow_getting_torrents() {
async fn should_allow_getting_all_torrents() {
let env = Started::new(&configuration::ephemeral().into()).await;

let info_hash = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap();
@@ -100,6 +100,48 @@ async fn should_allow_the_torrents_result_pagination() {
env.stop().await;
}

#[tokio::test]
async fn should_allow_getting_a_list_of_torrents_providing_infohashes() {
let env = Started::new(&configuration::ephemeral().into()).await;

let info_hash_1 = InfoHash::from_str("9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d").unwrap(); // DevSkim: ignore DS173237
let info_hash_2 = InfoHash::from_str("0b3aea4adc213ce32295be85d3883a63bca25446").unwrap(); // DevSkim: ignore DS173237

env.add_torrent_peer(&info_hash_1, &PeerBuilder::default().into()).await;
env.add_torrent_peer(&info_hash_2, &PeerBuilder::default().into()).await;

let response = Client::new(env.get_connection_info())
.get_torrents(Query::params(
[
QueryParam::new("info_hash", "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d"), // DevSkim: ignore DS173237
QueryParam::new("info_hash", "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d"), // DevSkim: ignore DS173237
]
.to_vec(),
))
.await;

assert_torrent_list(
response,
vec![
torrent::ListItem {
info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), // DevSkim: ignore DS173237
seeders: 1,
completed: 0,
leechers: 0,
},
torrent::ListItem {
info_hash: "9e0217d0fa71c87332cd8bf9dbeabcb2c2cf3c4d".to_string(), // DevSkim: ignore DS173237
seeders: 1,
completed: 0,
leechers: 0,
},
],
)
.await;

env.stop().await;
}

#[tokio::test]
async fn should_fail_getting_torrents_when_the_offset_query_parameter_cannot_be_parsed() {
let env = Started::new(&configuration::ephemeral().into()).await;
@@ -134,6 +176,27 @@ async fn should_fail_getting_torrents_when_the_limit_query_parameter_cannot_be_p
env.stop().await;
}

#[tokio::test]
async fn should_fail_getting_torrents_when_the_info_hash_parameter_is_invalid() {
let env = Started::new(&configuration::ephemeral().into()).await;

let invalid_info_hashes = [" ", "-1", "1.1", "INVALID INFO_HASH"];

for invalid_info_hash in &invalid_info_hashes {
let response = Client::new(env.get_connection_info())
.get_torrents(Query::params([QueryParam::new("info_hash", invalid_info_hash)].to_vec()))
.await;

assert_bad_request(
response,
&format!("Invalid URL: invalid infohash param: string \"{invalid_info_hash}\", expected a 40 character long string"),
)
.await;
}

env.stop().await;
}

#[tokio::test]
async fn should_not_allow_getting_torrents_for_unauthenticated_users() {
let env = Started::new(&configuration::ephemeral().into()).await;