Skip to content

Commit 1b36562

Browse files
committed
dev: more torrent repo testing
1 parent f7f4f37 commit 1b36562

File tree

5 files changed

+169
-19
lines changed

5 files changed

+169
-19
lines changed

packages/primitives/src/lib.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
//! which is a `BitTorrent` tracker server. These structures are used not only
55
//! by the tracker server crate, but also by other crates in the Torrust
66
//! ecosystem.
7+
use std::collections::BTreeMap;
78
use std::time::Duration;
89

910
use info_hash::InfoHash;
@@ -58,7 +59,7 @@ pub enum DatabaseDriver {
5859
MySQL,
5960
}
6061

61-
pub type PersistentTorrents = Vec<(InfoHash, u32)>;
62+
pub type PersistentTorrents = BTreeMap<InfoHash, u32>;
6263

6364
/// The mode the tracker will run in.
6465
///

packages/torrent-repository/src/repository/rw_lock_std.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -75,15 +75,15 @@ where
7575
fn import_persistent(&self, persistent_torrents: &PersistentTorrents) {
7676
let mut torrents = self.get_torrents_mut();
7777

78-
for (info_hash, completed) in persistent_torrents {
78+
for (info_hash, downloaded) in persistent_torrents {
7979
// Skip if torrent entry already exists
8080
if torrents.contains_key(info_hash) {
8181
continue;
8282
}
8383

8484
let entry = EntrySingle {
8585
peers: BTreeMap::default(),
86-
downloaded: *completed,
86+
downloaded: *downloaded,
8787
};
8888

8989
torrents.insert(*info_hash, entry);

packages/torrent-repository/tests/repository/mod.rs

+159-5
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
use std::hash::{DefaultHasher, Hash};
1+
use std::hash::{DefaultHasher, Hash, Hasher};
22

33
use rstest::{fixture, rstest};
44
use torrust_tracker_primitives::announce_event::AnnounceEvent;
55
use torrust_tracker_primitives::info_hash::InfoHash;
6-
use torrust_tracker_primitives::NumberOfBytes;
6+
use torrust_tracker_primitives::{NumberOfBytes, PersistentTorrents};
77
use torrust_tracker_torrent_repository::entry::Entry as _;
88
use torrust_tracker_torrent_repository::repository::{RwLockStd, RwLockTokio};
99
use torrust_tracker_torrent_repository::EntrySingle;
@@ -104,6 +104,37 @@ fn three() -> Entries {
104104
]
105105
}
106106

107+
#[fixture]
108+
fn persistent_empty() -> PersistentTorrents {
109+
PersistentTorrents::default()
110+
}
111+
112+
#[fixture]
113+
fn persistent_single() -> PersistentTorrents {
114+
let hash = &mut DefaultHasher::default();
115+
116+
hash.write_u8(1);
117+
let t = [(InfoHash::from(&hash.clone()), 0_u32)];
118+
119+
t.iter().copied().collect()
120+
}
121+
122+
#[fixture]
123+
fn persistent_three() -> PersistentTorrents {
124+
let hash = &mut DefaultHasher::default();
125+
126+
hash.write_u8(1);
127+
let info_1 = InfoHash::from(&hash.clone());
128+
hash.write_u8(2);
129+
let info_2 = InfoHash::from(&hash.clone());
130+
hash.write_u8(3);
131+
let info_3 = InfoHash::from(&hash.clone());
132+
133+
let t = [(info_1, 1_u32), (info_2, 2_u32), (info_3, 3_u32)];
134+
135+
t.iter().copied().collect()
136+
}
137+
107138
async fn make(repo: &Repo, entries: &Entries) {
108139
for (info_hash, entry) in entries {
109140
repo.insert(info_hash, entry.clone()).await;
@@ -116,7 +147,7 @@ async fn make(repo: &Repo, entries: &Entries) {
116147
#[case::started(started())]
117148
#[case::completed(completed())]
118149
#[case::downloaded(downloaded())]
119-
#[case::four(three())]
150+
#[case::three(three())]
120151
#[tokio::test]
121152
async fn it_should_get_a_torrent_entry(
122153
#[values(standard(), standard_mutex(), standard_tokio(), tokio_std(), tokio_mutex(), tokio_tokio())] repo: Repo,
@@ -137,7 +168,7 @@ async fn it_should_get_a_torrent_entry(
137168
#[case::started(started())]
138169
#[case::completed(completed())]
139170
#[case::downloaded(downloaded())]
140-
#[case::four(three())]
171+
#[case::three(three())]
141172
#[tokio::test]
142173
async fn it_should_get_entries(
143174
#[values(standard(), standard_mutex(), standard_tokio(), tokio_std(), tokio_mutex(), tokio_tokio())] repo: Repo,
@@ -158,7 +189,7 @@ async fn it_should_get_entries(
158189
#[case::started(started())]
159190
#[case::completed(completed())]
160191
#[case::downloaded(downloaded())]
161-
#[case::four(three())]
192+
#[case::three(three())]
162193
#[tokio::test]
163194
async fn it_should_get_metrics(
164195
#[values(standard(), standard_mutex(), standard_tokio(), tokio_std(), tokio_mutex(), tokio_tokio())] repo: Repo,
@@ -181,3 +212,126 @@ async fn it_should_get_metrics(
181212

182213
assert_eq!(repo.get_metrics().await, metrics);
183214
}
215+
216+
#[rstest]
217+
#[case::empty(empty())]
218+
#[case::default(default())]
219+
#[case::started(started())]
220+
#[case::completed(completed())]
221+
#[case::downloaded(downloaded())]
222+
#[case::three(three())]
223+
#[tokio::test]
224+
async fn it_should_import_persistent_torrents(
225+
#[values(standard(), standard_mutex(), standard_tokio(), tokio_std(), tokio_mutex(), tokio_tokio())] repo: Repo,
226+
#[case] entries: Entries,
227+
#[values(persistent_empty(), persistent_single(), persistent_three())] persistent_torrents: PersistentTorrents,
228+
) {
229+
make(&repo, &entries).await;
230+
231+
let mut downloaded = repo.get_metrics().await.downloaded;
232+
persistent_torrents.iter().for_each(|(_, d)| downloaded += u64::from(*d));
233+
234+
repo.import_persistent(&persistent_torrents).await;
235+
236+
assert_eq!(repo.get_metrics().await.downloaded, downloaded);
237+
238+
for (entry, _) in persistent_torrents {
239+
assert!(repo.get(&entry).await.is_some());
240+
}
241+
}
242+
243+
#[rstest]
244+
#[case::empty(empty())]
245+
#[case::default(default())]
246+
#[case::started(started())]
247+
#[case::completed(completed())]
248+
#[case::downloaded(downloaded())]
249+
#[case::three(three())]
250+
#[tokio::test]
251+
async fn it_should_remove_an_entry(
252+
#[values(standard(), standard_mutex(), standard_tokio(), tokio_std(), tokio_mutex(), tokio_tokio())] repo: Repo,
253+
#[case] entries: Entries,
254+
) {
255+
make(&repo, &entries).await;
256+
257+
for (info_hash, torrent) in entries {
258+
assert_eq!(repo.get(&info_hash).await, Some(torrent.clone()));
259+
assert_eq!(repo.remove(&info_hash).await, Some(torrent));
260+
261+
assert_eq!(repo.get(&info_hash).await, None);
262+
assert_eq!(repo.remove(&info_hash).await, None);
263+
}
264+
265+
assert_eq!(repo.get_metrics().await.torrents, 0);
266+
}
267+
268+
#[rstest]
269+
#[case::empty(empty())]
270+
#[case::default(default())]
271+
#[case::started(started())]
272+
#[case::completed(completed())]
273+
#[case::downloaded(downloaded())]
274+
#[case::three(three())]
275+
#[tokio::test]
276+
async fn it_should_remove_inactive_peers(
277+
#[values(standard(), standard_mutex(), standard_tokio(), tokio_std(), tokio_mutex(), tokio_tokio())] repo: Repo,
278+
#[case] entries: Entries,
279+
) {
280+
use std::ops::Sub as _;
281+
use std::time::Duration;
282+
283+
use torrust_tracker_clock::clock::stopped::Stopped as _;
284+
use torrust_tracker_clock::clock::{self, Time as _};
285+
use torrust_tracker_primitives::peer;
286+
287+
use crate::CurrentClock;
288+
289+
const TIMEOUT: Duration = Duration::from_secs(120);
290+
const EXPIRE: Duration = Duration::from_secs(121);
291+
292+
make(&repo, &entries).await;
293+
294+
let info_hash: InfoHash;
295+
let mut peer: peer::Peer;
296+
297+
// Generate a new infohash and peer.
298+
{
299+
let hash = &mut DefaultHasher::default();
300+
hash.write_u8(255);
301+
info_hash = InfoHash::from(&hash.clone());
302+
peer = a_completed_peer(-1);
303+
}
304+
305+
// Set the last updated time of the peer to be 121 seconds ago.
306+
{
307+
let now = clock::Working::now();
308+
clock::Stopped::local_set(&now);
309+
310+
peer.updated = now.sub(EXPIRE);
311+
}
312+
313+
// Insert the infohash and peer into the repository
314+
// and verify there is an extra torrent entry.
315+
{
316+
repo.update_torrent_with_peer_and_get_stats(&info_hash, &peer).await;
317+
assert_eq!(repo.get_metrics().await.torrents, entries.len() as u64 + 1);
318+
}
319+
320+
// Verify that this new peer was inserted into the repository.
321+
{
322+
let entry = repo.get(&info_hash).await.expect("it_should_get_some");
323+
assert!(entry.get_peers(None).contains(&peer.into()));
324+
}
325+
326+
// Remove peers that have not been updated since the timeout (120 seconds ago).
327+
{
328+
repo.remove_inactive_peers(CurrentClock::now_sub(&TIMEOUT).expect("it should get a time passed"))
329+
.await;
330+
}
331+
332+
// Verify that the this peer was removed from the repository.
333+
{
334+
let entry = repo.get(&info_hash).await.expect("it_should_get_some");
335+
assert!(!entry.get_peers(None).contains(&peer.into()));
336+
}
337+
}

src/core/databases/mysql.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use r2d2_mysql::mysql::prelude::Queryable;
99
use r2d2_mysql::mysql::{params, Opts, OptsBuilder};
1010
use r2d2_mysql::MySqlConnectionManager;
1111
use torrust_tracker_primitives::info_hash::InfoHash;
12-
use torrust_tracker_primitives::DatabaseDriver;
12+
use torrust_tracker_primitives::{DatabaseDriver, PersistentTorrents};
1313

1414
use super::{Database, Error};
1515
use crate::core::auth::{self, Key};
@@ -105,7 +105,7 @@ impl Database for Mysql {
105105
}
106106

107107
/// Refer to [`databases::Database::load_persistent_torrents`](crate::core::databases::Database::load_persistent_torrents).
108-
async fn load_persistent_torrents(&self) -> Result<Vec<(InfoHash, u32)>, Error> {
108+
async fn load_persistent_torrents(&self) -> Result<PersistentTorrents, Error> {
109109
let mut conn = self.pool.get().map_err(|e| (e, DRIVER))?;
110110

111111
let torrents = conn.query_map(
@@ -116,7 +116,7 @@ impl Database for Mysql {
116116
},
117117
)?;
118118

119-
Ok(torrents)
119+
Ok(torrents.iter().copied().collect())
120120
}
121121

122122
/// Refer to [`databases::Database::load_keys`](crate::core::databases::Database::load_keys).

src/core/databases/sqlite.rs

+3-8
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use async_trait::async_trait;
66
use r2d2::Pool;
77
use r2d2_sqlite::SqliteConnectionManager;
88
use torrust_tracker_primitives::info_hash::InfoHash;
9-
use torrust_tracker_primitives::{DatabaseDriver, DurationSinceUnixEpoch};
9+
use torrust_tracker_primitives::{DatabaseDriver, DurationSinceUnixEpoch, PersistentTorrents};
1010

1111
use super::{Database, Error};
1212
use crate::core::auth::{self, Key};
@@ -89,7 +89,7 @@ impl Database for Sqlite {
8989
}
9090

9191
/// Refer to [`databases::Database::load_persistent_torrents`](crate::core::databases::Database::load_persistent_torrents).
92-
async fn load_persistent_torrents(&self) -> Result<Vec<(InfoHash, u32)>, Error> {
92+
async fn load_persistent_torrents(&self) -> Result<PersistentTorrents, Error> {
9393
let conn = self.pool.get().map_err(|e| (e, DRIVER))?;
9494

9595
let mut stmt = conn.prepare("SELECT info_hash, completed FROM torrents")?;
@@ -101,12 +101,7 @@ impl Database for Sqlite {
101101
Ok((info_hash, completed))
102102
})?;
103103

104-
//torrent_iter?;
105-
//let torrent_iter = torrent_iter.unwrap();
106-
107-
let torrents: Vec<(InfoHash, u32)> = torrent_iter.filter_map(std::result::Result::ok).collect();
108-
109-
Ok(torrents)
104+
Ok(torrent_iter.filter_map(std::result::Result::ok).collect())
110105
}
111106

112107
/// Refer to [`databases::Database::load_keys`](crate::core::databases::Database::load_keys).

0 commit comments

Comments
 (0)