This commit is contained in:
Antoine Langlois 2024-04-01 14:20:05 +02:00
parent beaec9242f
commit db725b4bec
Signed by: DataHearth
GPG Key ID: 946E2D0C410C7B3D
19 changed files with 1693 additions and 136 deletions

1237
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@
name = "brs"
version = "0.1.0"
edition = "2021"
description = "BRS stands for \"BitTorrent Rust\". It a library for the BitTorrent protocol."
description = "BRS stands for \"BitTorrent Rust\". It's a library for the BitTorrent protocol."
authors = [ "Antoine Langlois <dev@antoine-langlois.net>" ]
[dependencies]
@ -10,8 +10,11 @@ bendy = { version = "0.3", features = ["serde"] }
chrono = { version = "0.4", features = ["serde"] }
hex = "0.4"
human_bytes = "0.4"
reqwest = "0.12"
serde = { version = "1.0", features = ["derive"] }
serde_bencode = "0.2"
serde_bytes = "0.11"
serde_json = "1.0"
serde_with = "3.7"
thiserror = "1.0"
sha1 = "0.10"

View File

@ -1,11 +1,29 @@
use std::io;
use std::{io, net::AddrParseError};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentError {
#[error("Failed to parse torrent file: {0}")]
ParseTorrent(String),
ParseTorrent(#[from] serde_bencode::Error),
#[error("Failed to read torrent file: {0}")]
ReadTorrent(#[from] io::Error)
ReadTorrent(#[from] io::Error),
}
#[derive(Debug, Error)]
pub enum TrackerError {
#[error("Failed to execute announce request: {0}")]
AnnounceRequest(#[from] reqwest::Error),
#[error("Failed to decode response body: {0}")]
BencodeDecode(#[from] serde_bencode::Error),
#[error("Tracker responded with an invalid status code: {0}")]
InvalidStatus(u16),
#[error("Announce request failed: {0}")]
AnnounceFailed(String),
#[error("Failed to convert IP string to IpAddr: {0}")]
IpParse(#[from] AddrParseError),
#[error("Invalid compact peers list. Expected a list of 6*n bytes, found: {0}")]
InvalidPeersCompactList(u64),
#[error("Failed to parse tracker URL: {0}")]
ParseURL(String)
}

View File

@ -1,4 +1,5 @@
pub mod torrent;
// pub mod torrent;
pub mod tracker;
mod error;
mod macros;

View File

@ -1,91 +1,18 @@
mod de;
mod display;
use crate::error::TorrentError;
use std::{collections::HashMap, fs};
pub mod v1;
pub mod v2;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use crate::{error::TorrentError, torrent::de::*};
#[derive(Debug, Deserialize)]
pub struct Torrent {
/// Announcer URL
pub announce: String,
/// Torrent information
pub info: TorrentInfo,
/// Non official fields
#[serde(flatten)]
pub additional_fields: RootAdditionalFields,
pub trait Parse<T> {
fn parse(path: String) -> Result<T, TorrentError>;
fn parse_bytes(bytes: &[u8]) -> Result<T, TorrentError>;
}
/// TorrentInfo is a struct that contains all the information about the torrent file.
#[derive(Debug, Deserialize)]
pub struct TorrentInfo {
/// Recommanded output file or root directory
pub name: String,
/// Size of each data piece
#[serde(rename = "piece length")]
pub piece_length: u64,
/// SHA1 hashes of each pieces
#[serde(deserialize_with = "from_bytes_to_vec")]
pub pieces: Vec<String>,
/// In case of a single file, represents the file size
pub length: Option<u64>,
#[serde(default, deserialize_with = "from_files_list_to_struct")]
/// In case of multiple files/directories, represents all files/directories available
pub files: Option<Vec<TorrentFiles>>,
// Additional fields available that are not part of the original specification
#[serde(flatten)]
pub additional_fields: TorrentInfoAdditionalFields,
pub trait Hash<T> {
fn calc_hash(&self) -> Result<String, TorrentError>;
}
#[derive(Debug, Deserialize)]
pub struct TorrentFiles {
/// Output file path
pub path: String,
/// File size
pub length: u64,
}
/// RootAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[derive(Debug, Deserialize)]
pub struct RootAdditionalFields {
/// Torrent creator or software name
#[serde(rename = "created by")]
pub created_by: Option<String>,
/// Torrent creation date
#[serde(
default,
rename = "creation date",
deserialize_with = "from_i64_to_datetime"
)]
pub creation_date: Option<DateTime<Utc>>,
/// Comment about the torrent
pub comment: Option<String>,
// #[serde(rename = "url-list")]
// /// List of resources available
// pub url_list: Option<Vec<String>>,
#[serde(flatten)]
/// Extra fields not explicitly covered by the struct
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
}
/// TorrentInfoAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[derive(Debug, Deserialize)]
pub struct TorrentInfoAdditionalFields {
/// Is the torrent private
#[serde(default, deserialize_with = "from_bool_to_int")]
pub private: bool,
/// Extra fields not explicitly covered by the struct
#[serde(flatten)]
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
}
pub fn parse(path: String) -> Result<Torrent, TorrentError> {
let torrent_file = fs::read(path).map_err(|e| TorrentError::ReadTorrent(e))?;
serde_bencode::from_bytes(&torrent_file).map_err(|e| TorrentError::ParseTorrent(e.to_string()))
pub trait Create<D> {
fn create(data: D, out: String) -> Result<(), TorrentError>;
}

View File

View File

@ -4,23 +4,9 @@ use serde::{
Deserialize, Deserializer,
};
use crate::{match_bytes, match_dict, match_int, match_list, torrent::TorrentFiles};
use crate::{match_bytes, match_dict, match_int, match_list, torrent::v1::TorrentFiles};
pub fn from_bool_to_int<'de, D>(deserializer: D) -> Result<bool, D::Error>
where
D: Deserializer<'de>,
{
match u8::deserialize(deserializer)? {
0 => Ok(false),
1 => Ok(true),
other => Err(Error::invalid_value(
Unexpected::Unsigned(other as u64),
&"zero or one",
)),
}
}
pub fn from_i64_to_datetime<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>
pub(super) fn from_i64_to_datetime<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>
where
D: Deserializer<'de>,
{
@ -32,7 +18,7 @@ where
Ok(None)
}
pub fn from_bytes_to_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
pub(super) fn from_bytes_to_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
@ -43,7 +29,7 @@ where
.collect::<Vec<String>>())
}
pub fn from_files_list_to_struct<'de, D>(
pub(super) fn from_files_list_to_struct<'de, D>(
deserializer: D,
) -> Result<Option<Vec<TorrentFiles>>, D::Error>
where

View File

@ -45,13 +45,15 @@ impl std::fmt::Display for Torrent {
"\tpiece size: {}\n",
human_bytes(self.info.piece_length as f64)
)?;
if std::env::var("BRS_PRINT_TORRENT_FILES").is_ok() {
if let Some(v) = &self.info.files {
if let Some(v) = &self.info.files {
if std::env::var("BRS_PRINT_TORRENT_FILES").is_ok() {
write!(f, "\tfiles:\n")?;
for file in v {
write!(f, "\t - {}\n", file.path)?;
write!(f, "\t size: {}\n", human_bytes(file.length as f64))?;
}
} else {
write!(f, "\tfiles: {}\n", v.len())?;
}
}
if self.info.additional_fields.extra_fields.len() > 0 {

91
brs/src/torrent/v1/mod.rs Normal file
View File

@ -0,0 +1,91 @@
mod de;
mod display;
pub mod create;
pub mod parse;
pub mod sha1;
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde_with::{serde_as, BoolFromInt};
use super::v1::de::*;
#[derive(Debug, Deserialize)]
pub struct Torrent {
/// Announcer URL
pub announce: String,
/// Torrent information
pub info: TorrentInfo,
/// Non official fields
#[serde(flatten)]
pub additional_fields: RootAdditionalFields,
}
/// TorrentInfo is a struct that contains all the information about the torrent file.
#[derive(Debug, Deserialize)]
pub struct TorrentInfo {
/// Recommanded output file or root directory
pub name: String,
/// Size of each data piece
#[serde(rename = "piece length")]
pub piece_length: u64,
/// SHA1 hashes of each pieces
#[serde(deserialize_with = "from_bytes_to_vec")]
pub pieces: Vec<String>,
/// In case of a single file, represents the file size
pub length: Option<u64>,
#[serde(default, deserialize_with = "from_files_list_to_struct")]
/// In case of multiple files/directories, represents all files/directories available
pub files: Option<Vec<TorrentFiles>>,
// Additional fields available that are not part of the original specification
#[serde(flatten)]
pub additional_fields: TorrentInfoAdditionalFields,
}
#[derive(Debug, Deserialize)]
pub struct TorrentFiles {
/// Output file path
pub path: String,
/// File size
pub length: u64,
}
/// RootAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[derive(Debug, Deserialize)]
pub struct RootAdditionalFields {
/// Torrent creator or software name
#[serde(rename = "created by")]
pub created_by: Option<String>,
/// Torrent creation date
#[serde(
default,
rename = "creation date",
deserialize_with = "from_i64_to_datetime"
)]
pub creation_date: Option<DateTime<Utc>>,
/// Comment about the torrent
pub comment: Option<String>,
// #[serde(rename = "url-list")]
// /// List of resources available
// pub url_list: Option<Vec<String>>,
#[serde(flatten)]
/// Extra fields not explicitly covered by the struct
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
}
/// TorrentInfoAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[serde_as]
#[derive(Debug, Deserialize)]
pub struct TorrentInfoAdditionalFields {
/// Is the torrent private
#[serde_as(as = "BoolFromInt")]
#[serde(default)]
pub private: bool,
/// Extra fields not explicitly covered by the struct
#[serde(flatten)]
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
}

View File

@ -0,0 +1,17 @@
use std::fs;
use crate::{error::TorrentError, torrent::Parse};
use super::Torrent;
impl Parse<Torrent> for Torrent {
fn parse(path: String) -> Result<Torrent, TorrentError> {
let torrent_file = fs::read(path)?;
Ok(serde_bencode::from_bytes(&torrent_file)?)
}
fn parse_bytes(bytes: &[u8]) -> Result<Torrent, TorrentError> {
Ok(serde_bencode::from_bytes(bytes)?)
}
}

View File

@ -0,0 +1,11 @@
use crate::{error::TorrentError, torrent::Hash};
use super::Torrent;
impl Hash<Torrent> for Torrent {
fn calc_hash(&self) -> Result<String, TorrentError> {
let x = self.info;
todo!()
}
}

View File

View File

@ -0,0 +1,65 @@
use std::{collections::HashMap, net::IpAddr};
use crate::error::TrackerError;
use super::{Announce, Tracker, TrackerPeer, TrackerResponse};
impl Announce for Tracker {
async fn announce(&mut self, req: super::TrackerRequest) -> Result<(), TrackerError> {
let req = reqwest::Client::new()
.get(&self.url)
.query(&req)
.send()
.await?;
if !req.status().is_success() {
return Err(TrackerError::InvalidStatus(req.status().as_u16()));
}
let rsp: TrackerResponse = serde_bencode::from_bytes(&req.bytes().await?)?;
match rsp {
TrackerResponse::Error { failure_reason } => {
return Err(TrackerError::AnnounceFailed(failure_reason))
}
TrackerResponse::Success { interval, peers } => {
let mut hashmap_peers = HashMap::new();
for p in peers {
hashmap_peers.insert(
p.peer_id,
TrackerPeer {
ip: p.ip.parse()?,
port: p.port,
},
);
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
TrackerResponse::SuccessCompact { interval, peers } => {
let mut hashmap_peers = HashMap::new();
if (peers.len() % 6) != 0 {
return Err(TrackerError::InvalidPeersCompactList(peers.len() as u64));
}
for (i, peer) in peers.chunks(6).enumerate() {
let (ip, port) = peer.split_at(6);
let ip: [u8; 4] = ip.try_into().expect(
"cannot convert &[u8] to &[u8; 4] where chunks is already of lenght 4",
);
let port = u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
));
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(ip),
port,
},
);
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
}
Ok(())
}
}

102
brs/src/tracker/mod.rs Normal file
View File

@ -0,0 +1,102 @@
mod announce;
use std::{collections::HashMap, net::IpAddr};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, skip_serializing_none, BoolFromInt};
use crate::error::TrackerError;
#[derive(Serialize)]
pub enum TrackerEvent {
Started,
Completed,
Stopped,
Empty,
}
#[serde_as]
#[skip_serializing_none]
#[derive(Serialize)]
pub struct TrackerRequest {
/// SHA1 hash of the bencode form. Must be 20 bytes long
pub info_hash: String,
/// 20 characters ID generate before a download request
pub peer_id: String,
/// Client's IP address
pub ip: Option<IpAddr>,
/// Client's listening port
/// Usually, downloader will try common range: 6881 - 6889
pub port: Option<u16>,
/// Total amount of bytes uploaded encoded in base 10 ASCII
pub uploaded: String,
/// Total amount of bytes downloaded encoded in base 10 ASCII
pub downloaded: String,
/// Total amount of bytes left to download encoded in base 10 ASCII
pub left: String,
/// Annoucement event done regularly
#[serde(default = "TrackerEvent::Started")]
pub event: Option<TrackerEvent>,
/// Should the tracker respond with a compact peers list
#[serde_as(as = "BoolFromInt")]
#[serde(default)]
pub compact: bool,
}
#[derive(Deserialize)]
pub enum TrackerResponse {
/// Tracker responded with an error
Error {
#[serde(rename = "failure reason")]
failure_reason: String,
},
/// Tracker successfully computed the query - Normal response
Success {
/// Interval in seconds to query the tracker
interval: u64,
/// List of peers
peers: Vec<Peer>,
},
/// Tracker successfully computed the query - Compact response
SuccessCompact {
/// Interval in seconds to query the tracker
interval: u64,
/// List of peers in BigEndian order.
/// 4 bytes allocated for the IPv4 address and 2 bytes for the port.
peers: Vec<u8>,
},
}
#[derive(Deserialize)]
pub struct Peer {
/// Unique identifier for the peer
#[serde(rename = "peer id")]
pub peer_id: String,
/// Peer IP address. IPv4 or IPv6
pub ip: String,
/// Peer listening port
pub port: u16,
}
pub struct Tracker {
/// Tracker URL
pub url: String,
/// Interval in seconds to query the tracker once the transfert has started.
/// /!\ Populated with the first announce query.
pub interval: Option<u64>,
/// List of peers.
/// /!\ Populated with the first announce query.
pub peers: Option<HashMap<String, TrackerPeer>>,
}
pub struct TrackerPeer {
pub ip: IpAddr,
pub port: u16,
}
pub trait Announce {
fn announce(
&mut self,
req: TrackerRequest,
) -> impl std::future::Future<Output = Result<(), TrackerError>> + Send;
}

View File

@ -8,3 +8,7 @@ authors = [ "Antoine Langlois <dev@antoine-langlois.net>" ]
[dependencies]
brs = { path = "../brs" }
clap = { version = "4.5", features = ["derive"] }
clap_complete = "4.5"
thiserror = "1.0"
tokio = { version = "1.37", features = ["full"] }
rand = "0.8"

View File

@ -1,28 +1,86 @@
use clap::{self, Parser};
mod torrent;
mod tracker;
#[derive(clap::Parser)]
use std::io;
use clap::{Command, CommandFactory, Parser, Subcommand, ValueHint};
use clap_complete::{generate, Generator, Shell};
use torrent::{create, metadata};
use tracker::check;
#[derive(Parser)]
#[command(version, about)]
struct Cli {
#[command(subcommand)]
commands: Commands,
commands: Option<Cmds>,
#[arg(short, long, value_name = "SHELL", value_enum)]
complete: Option<Shell>,
}
#[derive(clap::Subcommand)]
enum Commands {
Info {
#[derive(Subcommand)]
enum Cmds {
/// Torrent tooling
Torrent {
#[command(subcommand)]
commands: TorrentCmds,
},
Tracker {
#[command(subcommand)]
commands: TrackerCmds,
},
}
#[derive(Subcommand)]
enum TorrentCmds {
/// Retrieve metadata from a ".torrent" file
Metadata {
/// Path to an existing torrent file
#[arg(value_hint = ValueHint::FilePath)]
path: String,
}
/// BitTorrent specification V1
#[arg(long, default_value_t = true)]
v1: bool,
/// BitTorrent specification V2
#[arg(long)]
v2: bool,
},
/// Create a torrent file
Create {
/// Path to an existing torrent file
#[arg(value_hint = ValueHint::FilePath)]
path: String,
},
}
fn main() {
#[derive(Subcommand)]
enum TrackerCmds {
Check {
#[arg(value_hint = ValueHint::FilePath)]
path: String,
},
}
fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
generate(gen, cmd, cmd.get_name().to_string(), &mut io::stdout());
}
#[tokio::main]
async fn main() {
let cli = Cli::parse();
match cli.commands {
Commands::Info { path } => {
match brs::torrent::parse(path) {
Ok(v) => println!("{v}"),
Err(e) => println!("{e}"),
}
if let Some(sh) = cli.complete {
print_completions(sh, &mut Cli::command())
}
if let Some(cmds) = cli.commands {
match cmds {
Cmds::Torrent { commands } => match commands {
TorrentCmds::Metadata { path, v1, v2 } => metadata(v1, v2, path),
TorrentCmds::Create { path } => create(path, String::new()),
},
Cmds::Tracker { commands } => match commands {
TrackerCmds::Check { path } => check(path).await,
},
}
}
}

17
cli/src/torrent.rs Normal file
View File

@ -0,0 +1,17 @@
use brs::torrent::v1;
pub(crate) fn metadata(v1: bool, _v2: bool, path: String) {
if v1 {
let torrent = v1::Torrent::parse(path);
if let Err(e) = &torrent {
println!("Failed to parse torrent: {e}");
}
println!("{}", torrent.unwrap())
} else {
unimplemented!()
}
}
pub(crate) fn create(_path: String, _data: String) {
unimplemented!()
}

42
cli/src/tracker.rs Normal file
View File

@ -0,0 +1,42 @@
use brs::{
torrent::v1,
tracker::{Tracker, TrackerRequest, Announce},
};
use rand::distributions::Alphanumeric;
use rand::Rng;
pub(crate) async fn check(path: String) {
let torrent = v1::Torrent::parse(path);
if let Err(e) = &torrent {
eprintln!("Failed to parse torrent: {e}")
}
let torrent = torrent.unwrap();
let mut tracker = Tracker {
url: torrent.announce,
peers: None,
interval: None,
};
let peer_id: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(12)
.map(char::from)
.collect();
let rsp = tracker
.announce(TrackerRequest {
ip: None,
port: None,
peer_id: format!("-BRS010-{peer_id}"),
downloaded: "".to_string(),
left: "".to_string(),
uploaded: "".to_string(),
event: None,
info_hash: "".to_string(),
compact: true,
})
.await;
if let Err(e) = rsp {
eprintln!("{e}")
}
}

View File

@ -17,6 +17,8 @@
devShells.default = pkgs.mkShell {
buildInputs = with pkgs; [
rust-bin.stable.latest.default
openssl
pkg-config
];
};
}