This commit is contained in:
Antoine Langlois 2024-04-26 09:37:22 +02:00
parent 2b40641de3
commit c5b91bd02a
Signed by: DataHearth
GPG Key ID: 946E2D0C410C7B3D
27 changed files with 407 additions and 320 deletions

1
Cargo.lock generated
View File

@ -163,6 +163,7 @@ dependencies = [
"brs",
"clap",
"clap_complete",
"hex",
"rand",
"thiserror",
"tokio",

View File

@ -1,6 +1,6 @@
[workspace]
members = [
"brs",
"lib",
"cli",
"gui"
]

32
README.md Normal file
View File

@ -0,0 +1,32 @@
# BitTorrent-Rust
[BitTorrent-Rust](https://gitea.antoine-langlois.net/DataHearth/BitTorrent-Rust) is a fully fledged [BitTorrent](https://www.bittorrent.org) tool suite. A `library` supporting all [BEP](https://www.bittorrent.org/beps/bep_0000.html) (WIP), a `GUI` (TODO) for desktop usage, a `CLI`/`TUI` (WIP) and a `web ui` for containerised environment.
## Why another `GUI`/`CLI`/`TUI`/`WebUI`/`library` ?
Why not 😁. Joke aside, I feel like some users might want an `AIO` [BitTorrent](https://www.bittorrent.org) environment. My goal is to create a modern stack.
## Features
### Library
Everything under a [BEP](https://www.bittorrent.org/beps/bep_0000.html) `MUST` be supported. Non standard fields for `.torrent` are also mapped when encountered and they need a special treatement (like dates).
To start developing with it:
```bash
cargo add bittorrent-rs
```
> [!WARNING]
> The library is a work in progress. Please refer to the implemented section below.
> The library's documentation is also a work in progress.
#### Implemented
- [ ] [BEP 0003](https://www.bittorrent.org/beps/bep_0003.html) - V1 specification
- [x] `.torrent` parsing
- [ ] Tracker request
- [ ] Peer download
- [ ] [BEP 0004](https://www.bittorrent.org/beps/bep_0004.html) - Reserved bit allocation (tested & verified)
- [ ] [BEP 0005](https://www.bittorrent.org/beps/bep_0005.html) - DHT protocol
- [ ] [BEP 0020](https://www.bittorrent.org/beps/bep_0020.html) - Peer ID convention

View File

@ -1,21 +0,0 @@
use std::net::AddrParseError;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum TrackerError {
#[error("Failed to execute announce request: {0}")]
AnnounceRequest(#[from] reqwest::Error),
#[error("Failed to decode response body: {0}")]
BencodeDecode(#[from] bendy::serde::Error),
#[error("Tracker responded with an invalid status code: {0}")]
InvalidStatus(u16),
#[error("Announce request failed: {0}")]
AnnounceFailed(String),
#[error("Failed to convert IP string to IpAddr: {0}")]
IpParse(#[from] AddrParseError),
#[error("Invalid compact peers list. Expected a list of {0}*n bytes, found: {1}")]
InvalidPeersCompactList(u8, u64),
#[error("Failed to parse tracker URL: {0}")]
ParseURL(String),
}

View File

@ -1,113 +0,0 @@
//! Tracker operations
//!
//! Start by creating an instance of a tracker
//! ```rust
//! use brs::torrent::v1::Torrent;
//! use brs::torrent::Parse;
//! use brs::tracker::Tracker;
//!
//! fn main() {
//! let torrent = match Torrent::parse("./file.torrent") {
//! Ok(v) => v,
//! Err(e) => return eprintln!("{e}"),
//! };
//!
//! let tracker = Tracker::new(&torrent.url);
//! }
//! ```
mod tracker;
use std::{collections::HashMap, net::IpAddr};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, skip_serializing_none, BoolFromInt};
#[derive(Default, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum TrackerEvent {
Started,
Completed,
Stopped,
#[default]
Empty,
}
/// Tracker query parameters.
#[serde_as]
#[skip_serializing_none]
#[derive(Default, Serialize)]
pub struct TrackerRequest {
/// SHA1 hash of the bencode form. Must be 20 bytes long.
pub info_hash: String,
/// 20 characters ID generate before a download request.
pub peer_id: String,
/// Client's IP address.
pub ip: Option<IpAddr>,
/// Client's listening port.
/// Usually, downloader will try common range: `6881` to `6889`.
pub port: Option<u16>,
/// Total amount of bytes uploaded encoded in base 10 `ASCII`.
pub uploaded: String,
/// Total amount of bytes downloaded encoded in base 10 `ASCII`.
pub downloaded: String,
/// Total amount of bytes left to download encoded in base 10 `ASCII`.
pub left: String,
/// Annoucement event.
pub event: TrackerEvent,
/// Should the tracker respond with a compact peers list
#[serde_as(as = "BoolFromInt")]
pub compact: bool,
}
#[derive(Deserialize)]
pub enum TrackerResponse {
/// Tracker responded with an error
Error {
#[serde(rename = "failure reason")]
failure_reason: String,
},
/// Tracker successfully computed the query - Normal response
Success {
/// Interval in seconds to query the tracker
interval: u64,
/// List of peers
peers: Vec<Peer>,
},
/// Tracker successfully computed the query - Compact response
SuccessCompact {
/// Interval in seconds to query the tracker
interval: u64,
/// List of peers in BigEndian order.
/// 4 bytes allocated for the IPv4 address and 2 bytes for the port.
peers: Option<Vec<u8>>,
peers6: Option<Vec<u8>>
},
}
#[derive(Deserialize)]
pub struct Peer {
/// Unique identifier for the peer
#[serde(rename = "peer id")]
pub peer_id: String,
/// Peer IP address. IPv4 or IPv6
pub ip: String,
/// Peer listening port
pub port: u16,
}
pub struct Tracker {
/// Tracker URL
pub url: String,
/// Interval in seconds to query the tracker once the transfert has started.
/// /!\ Populated with the first announce query.
pub interval: Option<u64>,
/// List of peers.
/// /!\ Populated with the first announce query.
pub peers: Option<HashMap<String, TrackerPeer>>,
}
pub struct TrackerPeer {
pub ip: IpAddr,
pub port: u16,
}

View File

@ -1,102 +0,0 @@
use std::{collections::HashMap, net::IpAddr};
use bendy::serde::from_bytes;
use crate::error::TrackerError;
use super::{Tracker, TrackerPeer, TrackerRequest, TrackerResponse};
const IPV4_PEER_LEN: u8 = 6;
const IPV6_PEER_LEN: u8 = 18;
impl Tracker {
/// Create a new instance of `Tracker`
pub fn new(url: String) -> Self {
Self {
url,
interval: None,
peers: None,
}
}
pub async fn announce(&mut self, req: TrackerRequest) -> Result<(), TrackerError> {
let req = reqwest::Client::new()
.get(&self.url)
.query(&req)
.send()
.await?;
if !req.status().is_success() {
return Err(TrackerError::InvalidStatus(req.status().as_u16()));
}
let rsp: TrackerResponse = from_bytes(&req.bytes().await?)?;
match rsp {
TrackerResponse::Error { failure_reason } => {
return Err(TrackerError::AnnounceFailed(failure_reason))
}
TrackerResponse::Success { interval, peers } => {
let mut hashmap_peers = HashMap::new();
for p in peers {
hashmap_peers.insert(
p.peer_id,
TrackerPeer {
ip: p.ip.parse()?,
port: p.port,
},
);
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
TrackerResponse::SuccessCompact {
interval,
peers,
peers6,
} => {
let mut hashmap_peers = HashMap::new();
if let Some(p) = peers {
if (p.len() % IPV4_PEER_LEN as usize) != 0 {
return Err(TrackerError::InvalidPeersCompactList(IPV4_PEER_LEN, p.len() as u64));
}
for (i, peer) in p.chunks(IPV4_PEER_LEN as usize).enumerate() {
let (ip, port) = peer.split_at(4);
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(TryInto::<[u8; 4]>::try_into(ip).expect(
"cannot convert &[u8] to &[u8; 4] where chunks is already of lenght 4",
)),
port: u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
)),
},
);
}
}
if let Some(p6) = peers6 {
if (p6.len() % IPV6_PEER_LEN as usize) != 0 {
return Err(TrackerError::InvalidPeersCompactList(IPV6_PEER_LEN, p6.len() as u64));
}
for (i, peer) in p6.chunks(IPV6_PEER_LEN as usize).enumerate() {
let (ip, port) = peer.split_at(14);
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(TryInto::<[u8; 16]>::try_into(ip).expect(
"cannot convert &[u8] to &[u8; 16] where chunks is already of lenght 16",
)),
port: u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
)),
},
);
}
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
}
Ok(())
}
}

View File

@ -2,14 +2,15 @@
name = "brs-cli"
version = "0.1.0"
edition = "2021"
description = "BRS desktop GUI and WebUI"
description = "BitTorrent-Rust cli"
authors = [ "Antoine Langlois <dev@antoine-langlois.net>" ]
[dependencies]
brs = { path = "../brs" }
brs = { path = "../lib" }
clap = { version = "4.5", features = ["derive"] }
clap_complete = "4.5"
thiserror = "1.0"
tokio = { version = "1.37", features = ["full"] }
rand = "0.8"
bendy = { version = "0.3", features = ["std", "serde"] }
hex = "0.4"

View File

@ -6,7 +6,7 @@ use std::io;
use clap::{Command, CommandFactory, Parser, Subcommand, ValueHint};
use clap_complete::{generate, Generator, Shell};
use torrent::{create, metadata, raw};
use tracker::check;
use tracker::peers;
#[derive(Parser)]
#[command(version, about)]
@ -44,6 +44,7 @@ enum TorrentCmds {
#[arg(long)]
v2: bool,
},
/// Print data and types found inside a ".torrent" file
Raw {
/// Path to an existing torrent file
#[arg(value_hint = ValueHint::FilePath)]
@ -59,7 +60,7 @@ enum TorrentCmds {
#[derive(Subcommand)]
enum TrackerCmds {
Check {
Peers {
#[arg(value_hint = ValueHint::FilePath)]
path: String,
},
@ -85,7 +86,7 @@ async fn main() {
TorrentCmds::Raw { path } => raw(path),
},
Cmds::Tracker { commands } => match commands {
TrackerCmds::Check { path } => check(path).await,
TrackerCmds::Peers { path } => peers(path).await,
},
}
}

View File

@ -2,12 +2,12 @@ use std::fs;
use brs::{
torrent::v1,
tracker::{Tracker, TrackerRequest},
tracker::{announce::AnnounceReq, Tracker},
};
use rand::distributions::Alphanumeric;
use rand::Rng;
pub(crate) async fn check(path: String) {
pub(crate) async fn peers(path: String) {
let bytes = fs::read(path).unwrap();
let torrent = v1::Torrent::parse_bytes(&bytes);
if let Err(e) = &torrent {
@ -26,8 +26,8 @@ pub(crate) async fn check(path: String) {
};
let mut tracker = Tracker::new(torrent.announce.clone());
let rsp = tracker
.announce(TrackerRequest {
peer_id: format!("-BRS010-{peer_id}"),
.announce(AnnounceReq {
peer_id: format!("-BR010-{peer_id}"),
downloaded: "0".to_string(),
left: torrent.calc_download_lenght().to_string(),
uploaded: "0".to_string(),
@ -35,9 +35,21 @@ pub(crate) async fn check(path: String) {
compact: true,
..Default::default()
})
.await;
.await
.unwrap();
let rsp = tracker.convert_bytes(&rsp).await;
if let Err(e) = rsp {
eprintln!("{e}")
match rsp {
Ok(v) => {
for p in v.peers {
println!("- ip: {}", p.ip);
println!(" port: {}", p.port);
if let Some(id) = p.id {
println!(" id: {}", id);
}
println!("");
}
}
Err(e) => eprintln!("Failed to get peers: {e}"),
}
}

View File

@ -0,0 +1,18 @@
pub(crate) mod skip_empty {
use chrono::{DateTime, Utc};
#[inline(always)]
pub(crate) fn i64(v: &i64) -> bool {
*v == 0
}
#[inline(always)]
pub(crate) fn bool(v: &bool) -> bool {
*v == false
}
#[inline(always)]
pub(crate) fn date(v: &DateTime<Utc>) -> bool {
*v == DateTime::<Utc>::default()
}
}

View File

@ -2,5 +2,5 @@ pub mod torrent;
pub mod tracker;
pub mod peer;
mod error;
mod macros;
mod extension_parsing;

View File

@ -2,7 +2,7 @@
macro_rules! write_optional {
($f:expr, $k:expr, $v:expr, $empty:expr) => {{
if !$empty($v) {
write!($f, "{}: {}\n", $k, $v)?;
writeln!($f, "{}: {}", $k, $v)?;
}
}};
}

View File

@ -10,6 +10,9 @@ pub enum TorrentError {
EncodeInfo(bendy::serde::Error),
#[error("Failed to read torrent file: {0}")]
ReadTorrent(#[from] io::Error),
#[error("Failed to convert bytes array to UTF-8 string")]
Utf8Conversion(#[from] std::string::FromUtf8Error),
}
#[derive(Error, Debug)]
pub enum TcpError {
}

View File

@ -1,88 +1,92 @@
use human_bytes::human_bytes;
use crate::{torrent::v1::ext_parsing::skip_empty, write_optional};
use crate::{extension_parsing, write_optional};
use super::Torrent;
impl std::fmt::Display for Torrent<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "GENERAL\n\n")?;
write!(f, " Tracker: {}\n", self.announce)?;
writeln!(f, "GENERAL\n")?;
writeln!(f, " Tracker: {}", self.announce)?;
if !self.announce_list.is_empty() {
writeln!(f, " Additional trackers:")?;
for i in &self.announce_list {
writeln!(f, " - {i}")?;
}
}
write_optional!(
f,
" Created by",
&self.additional_fields.created_by,
skip_empty::string
String::is_empty
);
write_optional!(
f,
" Creation date",
&self.additional_fields.creation_date,
skip_empty::date
extension_parsing::skip_empty::date
);
write_optional!(
f,
" Comment",
&self.additional_fields.comment,
skip_empty::string
String::is_empty
);
write_optional!(
f,
" Encoding",
&self.additional_fields.encoding,
skip_empty::string
String::is_empty
);
if !self.additional_fields.url_list.is_empty() {
write!(f, " Additional resources:\n")?;
writeln!(f, " Additional resources:")?;
for ar in &self.additional_fields.url_list {
write!(f, " - {}\n", ar)?;
writeln!(f, " - {}", ar)?;
}
}
if !self.additional_fields.extra_fields.is_empty() {
for (k, v) in &self.additional_fields.extra_fields {
write!(f, " {}: {:#?}\n", k, v,)?;
writeln!(f, " {}: {:#?}", k, v,)?;
}
}
write!(f, "\n")?;
write!(f, "TORRENT INFORMATION\n\n")?;
write!(f, " Name: {}\n", self.info.name)?;
write!(f, " Hash: {}\n", self.calc_hash().unwrap())?;
writeln!(f, "\nTORRENT INFORMATION\n")?;
writeln!(f, " Name: {}", self.info.name)?;
let hash = match self.calc_hash() {
Ok(v) => hex::encode(v),
Err(e) => format!("Failed to calculate hash for torrent: {e}"),
};
writeln!(f, " Hash: {hash}",)?;
write_optional!(
f,
" Private",
&self.info.additional_fields.private,
skip_empty::bool
extension_parsing::skip_empty::bool
);
write!(f, " Pieces: {:?}\n", self.info.pieces.len())?;
write!(
writeln!(f, " Pieces: {:?}", self.info.pieces.len())?;
writeln!(
f,
" Piece size: {}\n",
" Piece size: {}",
human_bytes(self.info.piece_length as f64)
)?;
write!(
writeln!(
f,
" Total size: {}\n",
" Total size: {}",
human_bytes(self.calc_download_lenght() as f64)
)?;
if !self.info.additional_fields.extra_fields.is_empty() {
for (k, v) in &self.info.additional_fields.extra_fields {
write!(f, " {}: {:#?}\n", k, v,)?;
writeln!(f, " {}: {:#?}", k, v,)?;
}
}
write!(f, "\nFILES\n\n")?;
writeln!(f, "\nFILES\n")?;
if !self.info.files.is_empty() {
for files in &self.info.files {
write!(
f,
" {} ({})\n",
files.path,
human_bytes(files.length as f64)
)?;
writeln!(f, " {} ({})", files.path, human_bytes(files.length as f64))?;
}
} else {
write!(
writeln!(
f,
" {} ({})",
self.info.name,

View File

@ -1,28 +1,29 @@
use bendy::serde::{from_bytes, to_bytes};
use sha1::{Digest, Sha1};
use crate::torrent::errors::TorrentError;
use crate::{extension_parsing, torrent::errors::TorrentError};
use super::{ext_parsing, Torrent};
use super::Torrent;
impl Torrent<'_> {
pub fn parse_bytes<'a>(bytes: &'a [u8]) -> Result<Torrent<'_>, TorrentError> {
from_bytes::<'a>(bytes).map_err(|e| TorrentError::ParseTorrent(e))
from_bytes::<'_>(bytes).map_err(|e| TorrentError::ParseTorrent(e))
}
pub fn calc_download_lenght(&self) -> i64 {
if !ext_parsing::skip_empty::i64(&self.info.length) {
if !extension_parsing::skip_empty::i64(&self.info.length) {
return self.info.length;
}
self.info.files.iter().map(|f| f.length).sum()
}
pub fn calc_hash(&self) -> Result<String, TorrentError> {
pub fn calc_hash(&self) -> Result<Vec<u8>, TorrentError> {
let mut hasher = Sha1::new();
let encoded = to_bytes(&self.info).map_err(|e| TorrentError::EncodeInfo(e))?;
hasher.update(&encoded);
let hash = hasher.finalize();
Ok(hex::encode(hasher.finalize()))
Ok(hash.to_vec())
}
}

View File

@ -1,6 +1,7 @@
mod display;
mod ext_parsing;
mod main;
mod parsing_modules;
mod tcp;
use std::collections::HashMap;
@ -8,12 +9,21 @@ use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, BoolFromInt, TimestampSeconds};
use crate::extension_parsing;
#[derive(Debug, Deserialize, Serialize)]
pub struct Torrent<'a> {
/// Announcer URL
pub announce: String,
/// Torrent information
pub info: TorrentInfo<'a>,
/// List of additional trackers
#[serde(
default,
rename = "announce-list",
skip_serializing_if = "Vec::is_empty"
)]
pub announce_list: Vec<String>,
/// Non official fields
#[serde(flatten, borrow)]
pub additional_fields: RootAdditionalFields<'a>,
@ -32,17 +42,17 @@ pub struct TorrentInfo<'a> {
pub piece_length: i64,
/// SHA1 hashes of each pieces concatenated. Each hash is 20 bytes long.
/// REQUIRED
#[serde(with = "ext_parsing::pieces")]
#[serde(with = "parsing_modules::pieces")]
pub pieces: Vec<String>,
/// In case of a single file, represents the file size.
/// REQUIRED - If `TorrentInfo.files` is empty
#[serde(default, skip_serializing_if = "ext_parsing::skip_empty::i64")]
#[serde(default, skip_serializing_if = "extension_parsing::skip_empty::i64")]
pub length: i64,
/// In case of multiple files/directories, represents all files/directories available
/// REQUIRED - If `TorrentInfo.length` is empty
#[serde(
default,
with = "ext_parsing::files",
with = "parsing_modules::files",
skip_serializing_if = "Vec::is_empty"
)]
pub files: Vec<TorrentFile>,
@ -71,7 +81,7 @@ pub struct RootAdditionalFields<'a> {
#[serde(
default,
rename = "created by",
skip_serializing_if = "ext_parsing::skip_empty::string"
skip_serializing_if = "String::is_empty"
)]
pub created_by: String,
/// Torrent creation date
@ -79,11 +89,11 @@ pub struct RootAdditionalFields<'a> {
#[serde(
default,
rename = "creation date",
skip_serializing_if = "ext_parsing::skip_empty::date"
skip_serializing_if = "extension_parsing::skip_empty::date"
)]
pub creation_date: DateTime<Utc>,
/// Comment about the torrent
#[serde(default, skip_serializing_if = "ext_parsing::skip_empty::string")]
#[serde(default, skip_serializing_if = "String::is_empty")]
pub comment: String,
/// List of resources available
#[serde(default, rename = "url-list", skip_serializing_if = "Vec::is_empty")]
@ -103,7 +113,7 @@ pub struct RootAdditionalFields<'a> {
pub struct TorrentInfoAdditionalFields<'a> {
/// Is the torrent private
#[serde_as(as = "BoolFromInt")]
#[serde(default, skip_serializing_if = "ext_parsing::skip_empty::bool")]
#[serde(default, skip_serializing_if = "extension_parsing::skip_empty::bool")]
pub private: bool,
/// Extra fields not explicitly covered by the struct
#[serde(flatten, borrow)]

View File

@ -1,27 +1,3 @@
pub(super) mod skip_empty {
use chrono::{DateTime, Utc};
#[inline(always)]
pub(crate) fn i64(v: &i64) -> bool {
*v == 0
}
#[inline(always)]
pub(crate) fn bool(v: &bool) -> bool {
*v == false
}
#[inline(always)]
pub(crate) fn string(v: &String) -> bool {
v.len() == 0
}
#[inline(always)]
pub(crate) fn date(v: &DateTime<Utc>) -> bool {
*v == DateTime::<Utc>::default()
}
}
pub(super) mod pieces {
use serde::{de, ser, Deserializer, Serializer};
use serde_with::{Bytes, DeserializeAs, SerializeAs};

View File

@ -0,0 +1,5 @@
use crate::torrent::errors::TcpError;
pub async fn start(_ip: String) -> Result<(), TcpError> {
todo!()
}

146
lib/src/tracker/announce.rs Normal file
View File

@ -0,0 +1,146 @@
use std::collections::HashMap;
use bendy::{serde::from_bytes, value::Value};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, BoolFromInt};
use super::{errors::TrackerError, parsing_modules, Peer, Tracker};
#[derive(Debug, Clone)]
pub struct AnnounceRsp<'a> {
pub interval: u64,
pub peers: Vec<Peer>,
pub additional_fields: HashMap<String, Value<'a>>,
}
/// Possible events sent when doing the announce request
#[derive(Default, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum AnnounceEv {
Started,
Completed,
Stopped,
#[default]
Empty,
}
/// Tracker query parameters.
#[serde_as]
#[derive(Default, Serialize)]
pub struct AnnounceReq {
/// SHA1 hash of the bencode form. Must be 20 bytes long.
/// REQUIRED
#[serde(serialize_with = "parsing_modules::serialize_bytes_urlencoded")]
pub info_hash: Vec<u8>,
/// 20 characters ID generate before a download request.
/// REQUIRED
pub peer_id: String,
/// Client's IP address.
#[serde(skip_serializing_if = "String::is_empty")]
pub ip: String,
/// Client's listening port.
/// Usually, downloader will try common range: `6881` to `6889`.
/// REQUIRED
pub port: u16,
/// Total amount of bytes uploaded encoded in base 10 `ASCII`.
/// REQUIRED
pub uploaded: String,
/// Total amount of bytes downloaded encoded in base 10 `ASCII`.
/// REQUIRED
pub downloaded: String,
/// Total amount of bytes left to download encoded in base 10 `ASCII`.
/// REQUIRED
pub left: String,
/// Annoucement event.
#[serde(default)]
pub event: AnnounceEv,
/// Should the tracker respond with a compact peers list
#[serde_as(as = "BoolFromInt")]
#[serde(default)]
pub compact: bool,
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
enum Body<'a> {
/// Tracker responded with an error
Error {
/// REQUIRED
#[serde(rename = "failure reason")]
failure_reason: String,
},
/// Tracker responded with a peer list - Normal response
Success {
/// Interval in seconds to query the tracker
/// REQUIRED
interval: u64,
/// List of peers
/// REQUIRED
peers: Vec<Peer>,
},
/// Tracker responded with a peer list - Compact response
SuccessCompact {
/// Interval in seconds to query the tracker
/// REQUIRED
interval: u64,
/// List of peers in BigEndian order.
/// 4 bytes allocated for the IPv4 address and 2 bytes for the port.
/// REQUIRED - if `peers6` key is not present. Both can be present
#[serde(default, deserialize_with = "parsing_modules::deserialize_ipv4")]
peers: Vec<Peer>,
/// List of peers in BigEndian order.
/// 16 bytes allocated for the IPv4 address and 2 bytes for the port.
/// REQUIRED - if `peers` key is not present. Both can be present
#[serde(default, deserialize_with = "parsing_modules::deserialize_ipv6")]
peers6: Vec<Peer>,
#[serde(flatten, borrow)]
additional_fields: HashMap<String, Value<'a>>,
},
}
impl Tracker {
/// Trigger an annouce request to the torrent's tracker.
/// Response will contain a peer list.
pub async fn announce(&mut self, req: AnnounceReq) -> Result<Vec<u8>, TrackerError> {
let req = reqwest::Client::new()
.get(&self.url)
.query(&req)
.send()
.await
.unwrap();
if !req.status().is_success() {
return Err(TrackerError::InvalidStatus(req.status().as_u16()));
}
Ok(req.bytes().await?.to_vec())
}
pub async fn convert_bytes(&self, bytes: &'_ [u8]) -> Result<AnnounceRsp<'_>, TrackerError> {
match from_bytes::<'_, Body>(bytes).map_err(|e| TrackerError::BencodeDecode(e))? {
Body::Error { failure_reason } => {
return Err(TrackerError::AnnounceFailed(failure_reason))
}
Body::Success { interval, peers } => Ok(AnnounceRsp {
interval,
peers,
additional_fields: HashMap::new(),
}),
Body::SuccessCompact {
interval,
mut peers,
peers6,
additional_fields,
} => {
peers.extend(peers6);
Ok(AnnounceRsp {
interval,
peers,
additional_fields: additional_fields
.into_iter()
.map(|(k, v)| (k, v.into_owned()))
.collect(),
})
}
}
}
}

13
lib/src/tracker/errors.rs Normal file
View File

@ -0,0 +1,13 @@
use thiserror::Error;
#[derive(Debug, Error)]
pub enum TrackerError {
#[error("Failed to decode response body: {0}")]
BencodeDecode(bendy::serde::Error),
#[error("Tracker responded with an invalid status code: {0}")]
InvalidStatus(u16),
#[error("Announce request failed: {0}")]
AnnounceFailed(String),
#[error("Failed to get response body: {0}")]
BodyDecode(#[from] reqwest::Error),
}

27
lib/src/tracker/mod.rs Normal file
View File

@ -0,0 +1,27 @@
pub mod announce;
mod errors;
mod parsing_modules;
pub mod tracker;
use std::net::IpAddr;
use serde::Deserialize;
#[derive(Debug, Clone, Deserialize)]
pub struct Peer {
/// Unique identifier for the peer.
/// It can be optional in case the tracker's response is in a compact form.
#[serde(rename = "peer id")]
pub id: Option<String>,
/// Peer IP address. IPv4 or IPv6.
// #[serde(deserialize_with = "parsing_modules::deserialize_ipaddr")]
pub ip: IpAddr,
/// Peer listening port.
pub port: u16,
}
#[derive(Default, Debug)]
pub struct Tracker {
/// Tracker URL
pub url: String,
}

View File

@ -0,0 +1,62 @@
use std::net::IpAddr;
use serde::{Deserializer, Serialize, Serializer};
use serde_with::{Bytes, DeserializeAs};
use super::Peer;
const PEER_IPV4_CHUNK_LEN: u8 = 6;
const PEER_IPV6_CHUNK_LEN: u8 = 18;
pub fn deserialize_ipv4<'de, D>(deserializer: D) -> Result<Vec<Peer>, D::Error>
where
D: Deserializer<'de>,
{
let bytes: &[u8] = Bytes::deserialize_as(deserializer)?;
let mut peers = vec![];
for c in bytes.chunks(PEER_IPV4_CHUNK_LEN as usize) {
let (ip_c, port_c) = c.split_at(4);
peers.push(Peer {
id: None,
ip: IpAddr::from(
TryInto::<[u8; 4]>::try_into(ip_c).expect("ipv4 chunk should be of length 4"),
),
port: u16::from_be_bytes(
TryInto::<[u8; 2]>::try_into(port_c).expect("port chunk should be of length 2"),
),
});
}
Ok(peers)
}
pub fn deserialize_ipv6<'de, D>(deserializer: D) -> Result<Vec<Peer>, D::Error>
where
D: Deserializer<'de>,
{
let bytes: &[u8] = Bytes::deserialize_as(deserializer)?;
let mut peers = vec![];
for c in bytes.chunks(PEER_IPV6_CHUNK_LEN as usize) {
let (ip_c, port_c) = c.split_at(16);
peers.push(Peer {
id: None,
ip: IpAddr::from(
TryInto::<[u8; 16]>::try_into(ip_c).expect("ipv6 chunk should be of length 16"),
),
port: u16::from_be_bytes(
TryInto::<[u8; 2]>::try_into(port_c).expect("port chunk should be of length 2"),
),
});
}
Ok(peers)
}
pub fn serialize_bytes_urlencoded<S>(bytes: &Vec<u8>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
unsafe { std::str::from_utf8_unchecked(bytes) }.serialize(serializer)
}

View File

@ -0,0 +1,11 @@
use super::Tracker;
impl Tracker {
/// Create a new instance of `Tracker`
pub fn new(url: String) -> Self {
Self {
url,
..Default::default()
}
}
}