This commit is contained in:
Antoine Langlois 2024-04-02 23:06:17 +02:00
parent 61fb0934d1
commit f02eac170e
Signed by: DataHearth
GPG Key ID: 946E2D0C410C7B3D
22 changed files with 557 additions and 462 deletions

14
Cargo.lock generated
View File

@ -147,11 +147,10 @@ dependencies = [
"chrono",
"hex",
"human_bytes",
"rand",
"reqwest",
"serde",
"serde_bencode",
"serde_bytes",
"serde_json",
"serde_with",
"sha1",
"thiserror",
@ -161,6 +160,7 @@ dependencies = [
name = "brs-cli"
version = "0.1.0"
dependencies = [
"bendy",
"brs",
"clap",
"clap_complete",
@ -1169,16 +1169,6 @@ dependencies = [
"serde_derive",
]
[[package]]
name = "serde_bencode"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a70dfc7b7438b99896e7f8992363ab8e2c4ba26aa5ec675d32d1c3c2c33d413e"
dependencies = [
"serde",
"serde_bytes",
]
[[package]]
name = "serde_bytes"
version = "0.11.14"

View File

@ -6,15 +6,14 @@ description = "BRS stands for \"BitTorrent Rust\". It's a library for the BitTor
authors = [ "Antoine Langlois <dev@antoine-langlois.net>" ]
[dependencies]
bendy = { version = "0.3", features = ["serde"] }
bendy = { version = "0.3", features = ["std", "serde"] }
chrono = { version = "0.4", features = ["serde"] }
hex = "0.4"
human_bytes = "0.4"
rand = "0.8"
reqwest = "0.12"
serde = { version = "1.0", features = ["derive"] }
serde_bencode = "0.2"
serde_bytes = "0.11"
serde_json = "1.0"
serde_with = "3.7"
thiserror = "1.0"
serde_with = { version = "3.7", features = ["chrono"] }
sha1 = "0.10"
thiserror = "1.0"

View File

@ -1,29 +1,21 @@
use std::{io, net::AddrParseError};
use std::net::AddrParseError;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentError {
#[error("Failed to parse torrent file: {0}")]
ParseTorrent(#[from] serde_bencode::Error),
#[error("Failed to read torrent file: {0}")]
ReadTorrent(#[from] io::Error),
}
#[derive(Debug, Error)]
pub enum TrackerError {
#[error("Failed to execute announce request: {0}")]
AnnounceRequest(#[from] reqwest::Error),
#[error("Failed to decode response body: {0}")]
BencodeDecode(#[from] serde_bencode::Error),
BencodeDecode(#[from] bendy::serde::Error),
#[error("Tracker responded with an invalid status code: {0}")]
InvalidStatus(u16),
#[error("Announce request failed: {0}")]
AnnounceFailed(String),
#[error("Failed to convert IP string to IpAddr: {0}")]
IpParse(#[from] AddrParseError),
#[error("Invalid compact peers list. Expected a list of 6*n bytes, found: {0}")]
InvalidPeersCompactList(u64),
#[error("Invalid compact peers list. Expected a list of {0}*n bytes, found: {1}")]
InvalidPeersCompactList(u8, u64),
#[error("Failed to parse tracker URL: {0}")]
ParseURL(String)
ParseURL(String),
}

View File

@ -1,5 +1,6 @@
pub mod torrent;
pub mod tracker;
pub mod peer;
mod error;
mod macros;

View File

@ -6,87 +6,3 @@ macro_rules! write_option {
}
}};
}
#[macro_export]
macro_rules! match_dict {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::Dict(v) => Ok(v),
serde_bencode::value::Value::List(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Seq,
&$expected,
)),
serde_bencode::value::Value::Int(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Signed(v),
&$expected,
)),
serde_bencode::value::Value::Bytes(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Bytes(&v),
&$expected,
)),
}
}};
}
#[macro_export]
macro_rules! match_list {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::List(v) => Ok(v),
serde_bencode::value::Value::Dict(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Map,
&$expected,
)),
serde_bencode::value::Value::Int(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Signed(*v),
&$expected,
)),
serde_bencode::value::Value::Bytes(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Bytes(&v),
&$expected,
)),
}
}};
}
#[macro_export]
macro_rules! match_bytes {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::Bytes(v) => Ok(v),
serde_bencode::value::Value::Dict(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Map,
&$expected,
)),
serde_bencode::value::Value::Int(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Signed(*v),
&$expected,
)),
serde_bencode::value::Value::List(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Seq,
&$expected,
)),
}
}};
}
#[macro_export]
macro_rules! match_int {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::Int(v) => Ok(v),
serde_bencode::value::Value::Dict(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Map,
&$expected,
)),
serde_bencode::value::Value::Bytes(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Bytes(&v),
&$expected,
)),
serde_bencode::value::Value::List(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Seq,
&$expected,
)),
}
}};
}

31
brs/src/peer.rs Normal file
View File

@ -0,0 +1,31 @@
use rand::{distributions::Alphanumeric, Rng};
const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Generate a peer ID matching the specification `BEP 0020`
pub fn gen_peer_id(prefix: Option<String>) -> String {
let prefix = if let Some(v) = prefix {
v
} else {
let mut iter = VERSION.splitn(3, ".");
let major = iter
.next()
.expect("version should contains 3 elements, not 0");
let minor = iter
.next()
.expect("version should contains 3 elements, not 1");
let patch = iter
.next()
.expect("version should contains 3 elements, not 2");
format!("-B{}{}{}-", major, minor, patch)
};
let random_alphanum: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(20-prefix.len())
.map(char::from)
.collect();
format!("{prefix}{random_alphanum}")
}

15
brs/src/torrent/errors.rs Normal file
View File

@ -0,0 +1,15 @@
use std::io;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentError {
#[error("Failed to parse torrent file: {0}")]
ParseTorrent(bendy::serde::Error),
#[error("Failed to encode info dictionnary: {0}")]
EncodeInfo(bendy::serde::Error),
#[error("Failed to read torrent file: {0}")]
ReadTorrent(#[from] io::Error),
#[error("Failed to convert bytes array to UTF-8 string")]
Utf8Conversion(#[from] std::string::FromUtf8Error),
}

View File

@ -1,18 +1,13 @@
use crate::error::TorrentError;
use self::errors::TorrentError;
pub mod errors;
pub mod v1;
pub mod v2;
pub trait Parse<T> {
fn parse(path: String) -> Result<T, TorrentError>;
fn parse_bytes(bytes: &[u8]) -> Result<T, TorrentError>;
}
pub trait Hash<T> {
pub trait Hash {
fn calc_hash(&self) -> Result<String, TorrentError>;
}
pub trait Create<D> {
fn create(data: D, out: String) -> Result<(), TorrentError>;
pub trait Length {
fn calc_download_lenght(&self) -> u64;
}

View File

@ -1,80 +0,0 @@
use chrono::{DateTime, Utc};
use serde::{
de::{Error, Unexpected},
Deserialize, Deserializer,
};
use crate::{match_bytes, match_dict, match_int, match_list, torrent::v1::TorrentFiles};
pub(super) fn from_i64_to_datetime<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>
where
D: Deserializer<'de>,
{
let timestamp: Option<i64> = Option::deserialize(deserializer)?;
if let Some(v) = timestamp {
return Ok(DateTime::from_timestamp(v, 0));
}
Ok(None)
}
pub(super) fn from_bytes_to_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
let bytes = serde_bytes::ByteBuf::deserialize(deserializer)?;
Ok(bytes
.chunks(20)
.map(|v| hex::encode(v))
.collect::<Vec<String>>())
}
pub(super) fn from_files_list_to_struct<'de, D>(
deserializer: D,
) -> Result<Option<Vec<TorrentFiles>>, D::Error>
where
D: Deserializer<'de>,
{
let list: Vec<serde_bencode::value::Value> = match Option::deserialize(deserializer)? {
Some(v) => v,
None => return Ok(None),
};
let mut torrent_files = vec![];
for v in list {
let file_dict = match_dict!(v, "map with keys \"path\" & \"length\"")?;
if file_dict.len() > 2 {
return Err(Error::invalid_length(
file_dict.len(),
&"path and length only",
));
}
let path = file_dict.get(b"path".as_ref());
let length = file_dict.get(b"length".as_ref());
if path.is_none() {
return Err(Error::missing_field(
"\"path\" is mandatory in a files list",
));
} else if length.is_none() {
return Err(Error::missing_field(
"\"length\" is mandatory in a files list",
));
}
let path = {
let mut str_path = String::new();
for chunks in match_list!(path.unwrap(), "list of bytes")? {
let chunks = match_bytes!(chunks, "sequence of bytes")?;
str_path.push_str(std::str::from_utf8(chunks).map_err(|_| {
Error::invalid_value(Unexpected::Bytes(&chunks), &"Invalid bytes string")
})?);
}
str_path
};
let length = match_int!(length.unwrap(), "integer")?;
torrent_files.push(TorrentFiles {
path,
length: *length as u64,
})
}
Ok(Some(torrent_files))
}

View File

@ -1,118 +1,82 @@
use std::collections::HashMap;
use human_bytes::human_bytes;
use crate::write_option;
use super::Torrent;
impl std::fmt::Display for Torrent {
impl std::fmt::Display for Torrent<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "tracker: {}\n", self.announce)?;
write_option!(f, "created by", &self.additional_fields.created_by);
write_option!(f, "creation date", &self.additional_fields.creation_date);
write_option!(f, "comment", &self.additional_fields.comment);
write!(f, "GENERAL\n\n")?;
write!(f, " Tracker: {}\n", self.announce)?;
write_option!(f, " Created by", &self.additional_fields.created_by);
write_option!(f, " Creation date", &self.additional_fields.creation_date);
write_option!(f, " Comment", &self.additional_fields.comment);
write_option!(f, " Encoding", &self.additional_fields.encoding);
if let Some(ars) = &self.additional_fields.url_list {
write!(f, " Additional resources:\n")?;
for ar in ars {
write!(f, " - {}\n", ar)?;
}
}
if self.additional_fields.extra_fields.len() > 0 {
for (k, v) in self.additional_fields.extra_fields.clone().into_iter() {
let value = match v {
serde_bencode::value::Value::Bytes(v) => std::str::from_utf8(&v)
.map_err(|_| std::fmt::Error)?
.to_string(),
serde_bencode::value::Value::Int(v) => v.to_string(),
serde_bencode::value::Value::List(v) => {
display_list(f, v)?;
continue;
}
serde_bencode::value::Value::Dict(v) => {
display_dict(f, v)?;
continue;
}
};
write!(f, "{k}: {value}\n")?;
for (k, v) in &self.additional_fields.extra_fields {
write!(
f,
" {}: {:#?} <= {}\n",
k,
v,
std::any::type_name_of_val(v)
)?;
}
}
write!(f, "\n")?;
write!(f, "Torrent information:\n")?;
write!(f, "\tname: {}\n", self.info.name)?;
write!(f, "\tprivate: {}\n", self.info.additional_fields.private)?;
if let Some(v) = self.info.length {
write!(f, "\tfile size: {}\n", human_bytes(v as f64))?;
}
write!(f, "\tpieces: {:?}\n", self.info.pieces.len())?;
write!(f, "TORRENT INFORMATION\n\n")?;
write!(f, " Hash: {}\n", self.calc_hash().unwrap())?;
write!(f, " Name: {}\n", self.info.name)?;
// write_option!(f, " Private", &self.info.additional_fields.private);
write!(f, " Pieces: {:?}\n", self.info.pieces.len())?;
write!(
f,
"\tpiece size: {}\n",
" Piece size: {}\n",
human_bytes(self.info.piece_length as f64)
)?;
if let Some(v) = &self.info.files {
if std::env::var("BRS_PRINT_TORRENT_FILES").is_ok() {
write!(f, "\tfiles:\n")?;
for file in v {
write!(f, "\t - {}\n", file.path)?;
write!(f, "\t size: {}\n", human_bytes(file.length as f64))?;
}
} else {
write!(f, "\tfiles: {}\n", v.len())?;
}
}
write!(
f,
" Total size: {}\n",
human_bytes(self.calc_download_lenght() as f64)
)?;
if self.info.additional_fields.extra_fields.len() > 0 {
for (k, v) in &self.info.additional_fields.extra_fields {
write!(f, "\t{}: {:#?}\n", k, v)?;
write!(
f,
" {}: {:#?} <= {}\n",
k,
v,
std::any::type_name_of_val(v)
)?;
}
}
write!(f, "\nFILES\n\n")?;
if self.info.files.len() > 0 {
for files in &self.info.files {
write!(
f,
" {} ({})\n",
files.path,
human_bytes(files.length as f64)
)?;
}
} else {
write!(
f,
" {} ({})",
self.info.name,
human_bytes(self.calc_download_lenght() as f64)
)?;
}
Ok(())
}
}
fn display_dict(
f: &mut std::fmt::Formatter,
dict: HashMap<Vec<u8>, serde_bencode::value::Value>,
) -> std::fmt::Result {
for (k, v) in dict {
let key = std::str::from_utf8(&k).map_err(|_| std::fmt::Error)?;
let value = match v {
serde_bencode::value::Value::Bytes(v) => std::str::from_utf8(&v)
.map_err(|_| std::fmt::Error)?
.to_string(),
serde_bencode::value::Value::Int(v) => v.to_string(),
serde_bencode::value::Value::List(v) => {
display_list(f, v)?;
continue;
}
serde_bencode::value::Value::Dict(v) => {
display_dict(f, v)?;
continue;
}
};
write!(f, "{key}: {value}\n").map_err(|_| std::fmt::Error)?
}
Ok(())
}
fn display_list(
f: &mut std::fmt::Formatter,
list: Vec<serde_bencode::value::Value>,
) -> std::fmt::Result {
for element in list {
let value = match element {
serde_bencode::value::Value::Bytes(v) => std::str::from_utf8(&v)
.map_err(|_| std::fmt::Error)?
.to_string(),
serde_bencode::value::Value::Int(v) => v.to_string(),
serde_bencode::value::Value::List(v) => {
display_list(f, v)?;
continue;
}
serde_bencode::value::Value::Dict(v) => {
display_dict(f, v)?;
continue;
}
};
write!(f, " - {value}\n")?;
}
Ok(())
}

View File

@ -0,0 +1,199 @@
#[inline(always)]
pub(super) fn skip_0(v: &u64) -> bool {
*v == 0
}
#[inline(always)]
pub(crate) fn skip_bool(v: &bool) -> bool {
*v == false
}
pub(super) mod pieces {
use serde::{Deserializer, Serializer};
pub fn serialize<S>(pieces: &Vec<String>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut buf: Vec<u8> = vec![];
for v in pieces.iter() {
buf.append(&mut hex::decode(v).unwrap());
}
serde_bytes::serialize(&buf, serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
let pieces: Vec<u8> = serde_bytes::deserialize(deserializer)?;
if (pieces.len() % 20) != 0 {
return Err(serde::de::Error::custom("Invalid SHA1 pieces"));
}
Ok(pieces.chunks(20).map(|c| hex::encode(c)).collect())
}
}
pub(super) mod files {
use std::{borrow::Cow, collections::BTreeMap};
use bendy::value::Value;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use crate::torrent::v1::TorrentFile;
pub fn serialize<S>(files: &Vec<TorrentFile>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut dict_list = vec![];
let length_key: Cow<'_, [u8]> = Cow::Owned(String::from("length").into_bytes());
let path_key: Cow<'_, [u8]> = Cow::Owned(String::from("path").into_bytes());
for f in files {
let mut dict: BTreeMap<Cow<'_, [u8]>, Value> = BTreeMap::new();
dict.insert(length_key.clone(), Value::Integer(f.length as i64));
let mut path_partial = vec![];
for i in f.path.split("/") {
path_partial.push(Value::Bytes(i.as_bytes().into()));
}
dict.insert(
path_key.clone(),
Value::List(path_partial),
);
dict_list.push(Value::Dict(dict));
}
dict_list.serialize(serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TorrentFile>, D::Error>
where
D: Deserializer<'de>,
{
let values = match Value::deserialize(deserializer)? {
Value::List(v) => v,
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"list of dict",
))
}
Value::Dict(_) => {
return Err(de::Error::invalid_type(
de::Unexpected::Map,
&"list of dict",
))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(v),
&"list of dict",
))
}
};
let mut torrent_files: Vec<TorrentFile> = vec![];
let length_key = Cow::Owned(String::from("length").into_bytes());
let path_key = Cow::Owned(String::from("path").into_bytes());
for v in values {
let file = match v {
Value::Dict(v) => v,
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"dict with keys length and path",
))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(v),
&"dict with keys length and path",
))
}
Value::List(_) => {
return Err(de::Error::invalid_type(
de::Unexpected::Seq,
&"dict with keys length and path",
))
}
};
let length = file.get(&length_key);
let path = file.get(&path_key);
if length.is_none() {
return Err(de::Error::missing_field("length"));
}
if path.is_none() {
return Err(de::Error::missing_field("path"));
}
let length = match length.unwrap() {
Value::Integer(v) => *v as u64,
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"integer",
))
}
Value::Dict(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Map, &"integer"))
}
Value::List(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Seq, &"integer"))
}
};
let path = match path.unwrap() {
Value::List(v) => {
let mut initial = true;
let mut final_str = String::new();
for i in v {
let partial = match i {
Value::Bytes(v) => std::str::from_utf8(v).map_err(|_| {
de::Error::invalid_value(
de::Unexpected::Bytes(v),
&"a valid UTF-8 string",
)
})?,
Value::Dict(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Map, &"string"))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(*v),
&"string",
))
}
Value::List(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Seq, &"string"))
}
};
if initial {
initial = false;
final_str.push_str(partial);
} else {
final_str.push_str(&format!("/{partial}"));
}
}
final_str
}
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"string",
))
}
Value::Dict(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Map, &"string"))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(*v),
&"string",
))
}
};
torrent_files.push(TorrentFile { length, path })
}
Ok(torrent_files)
}
}

View File

@ -0,0 +1,37 @@
use std::fs;
use bendy::serde::{from_bytes, to_bytes};
use sha1::{Digest, Sha1};
use crate::torrent::errors::TorrentError;
use super::Torrent;
impl Torrent<'_> {
pub fn parse_bytes<'a>(bytes: &'a [u8]) -> Result<Torrent<'_>, TorrentError> {
from_bytes::<'a>(bytes).map_err(|e| TorrentError::ParseTorrent(e))
}
pub fn calc_download_lenght(&self) -> u64 {
if self.info.length != 0 {
return self.info.length;
}
let mut total_length = 0u64;
for f in &self.info.files {
total_length += f.length;
}
total_length
}
pub fn calc_hash(&self) -> Result<String, TorrentError> {
let mut hasher = Sha1::new();
let encoded =
to_bytes(&self.info).map_err(|e| TorrentError::EncodeInfo(e))?;
hasher.update(&encoded);
fs::write("/home/datahearth/Downloads/demo.torrent", encoded).unwrap();
fs::write("/home/datahearth/Downloads/xx.torrent", to_bytes(&self).map_err(|e| TorrentError::EncodeInfo(e))?).unwrap();
Ok(hex::encode(hasher.finalize()))
}
}

View File

@ -1,51 +1,53 @@
mod de;
mod ext_parsing;
mod display;
pub mod create;
pub mod parse;
pub mod sha1;
mod main;
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde_with::{serde_as, BoolFromInt};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, skip_serializing_none, BoolFromInt, TimestampSeconds};
use super::v1::de::*;
#[derive(Debug, Deserialize)]
pub struct Torrent {
#[derive(Debug, Deserialize, Serialize)]
pub struct Torrent<'a> {
/// Announcer URL
pub announce: String,
/// Torrent information
pub info: TorrentInfo,
pub info: TorrentInfo<'a>,
/// Non official fields
#[serde(flatten)]
pub additional_fields: RootAdditionalFields,
#[serde(flatten, borrow)]
pub additional_fields: RootAdditionalFields<'a>,
}
/// TorrentInfo is a struct that contains all the information about the torrent file.
#[derive(Debug, Deserialize)]
pub struct TorrentInfo {
/// Recommanded output file or root directory
#[skip_serializing_none]
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct TorrentInfo<'a> {
/// Recommanded output file or root directory.
pub name: String,
/// Size of each data piece
/// Size of each data piece.
#[serde(rename = "piece length")]
pub piece_length: u64,
/// SHA1 hashes of each pieces
#[serde(deserialize_with = "from_bytes_to_vec")]
/// SHA1 hashes of each pieces concatenated.
/// Each hash is 20 bytes long.
#[serde(default, with = "ext_parsing::pieces")]
pub pieces: Vec<String>,
/// In case of a single file, represents the file size
pub length: Option<u64>,
#[serde(default, deserialize_with = "from_files_list_to_struct")]
/// In case of a single file, represents the file size.
/// ! I wished to use Option<u64>, but bendy seems to not allow this by mixing value or
/// whatever.
#[serde(default, skip_serializing_if = "ext_parsing::skip_0")]
pub length: u64,
/// In case of multiple files/directories, represents all files/directories available
pub files: Option<Vec<TorrentFiles>>,
#[serde(default, with = "ext_parsing::files", skip_serializing_if = "Vec::is_empty")]
pub files: Vec<TorrentFile>,
// Additional fields available that are not part of the original specification
#[serde(flatten)]
pub additional_fields: TorrentInfoAdditionalFields,
#[serde(flatten, borrow)]
pub additional_fields: TorrentInfoAdditionalFields<'a>,
}
#[derive(Debug, Deserialize)]
pub struct TorrentFiles {
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
pub struct TorrentFile {
/// Output file path
pub path: String,
/// File size
@ -54,38 +56,39 @@ pub struct TorrentFiles {
/// RootAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[derive(Debug, Deserialize)]
pub struct RootAdditionalFields {
#[skip_serializing_none]
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct RootAdditionalFields<'a> {
/// Torrent creator or software name
#[serde(rename = "created by")]
pub created_by: Option<String>,
/// Torrent creation date
#[serde(
default,
rename = "creation date",
deserialize_with = "from_i64_to_datetime"
)]
#[serde_as(as = "Option<TimestampSeconds<i64>>")]
#[serde(default, rename = "creation date")]
pub creation_date: Option<DateTime<Utc>>,
/// Comment about the torrent
pub comment: Option<String>,
// #[serde(rename = "url-list")]
// /// List of resources available
// pub url_list: Option<Vec<String>>,
#[serde(flatten)]
/// List of resources available
#[serde(rename = "url-list")]
pub url_list: Option<Vec<String>>,
pub encoding: Option<String>,
/// Extra fields not explicitly covered by the struct
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
#[serde(flatten, borrow)]
pub extra_fields: HashMap<String, bendy::value::Value<'a>>,
}
/// TorrentInfoAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[skip_serializing_none]
#[serde_as]
#[derive(Debug, Deserialize)]
pub struct TorrentInfoAdditionalFields {
#[derive(Debug, Deserialize, Serialize)]
pub struct TorrentInfoAdditionalFields<'a> {
/// Is the torrent private
#[serde_as(as = "BoolFromInt")]
#[serde(default)]
#[serde(default, skip_serializing_if = "ext_parsing::skip_bool")]
pub private: bool,
/// Extra fields not explicitly covered by the struct
#[serde(flatten)]
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
#[serde(flatten, borrow)]
pub extra_fields: HashMap<String, bendy::value::Value<'a>>,
}

View File

@ -1,17 +0,0 @@
use std::fs;
use crate::{error::TorrentError, torrent::Parse};
use super::Torrent;
impl Parse<Torrent> for Torrent {
fn parse(path: String) -> Result<Torrent, TorrentError> {
let torrent_file = fs::read(path)?;
Ok(serde_bencode::from_bytes(&torrent_file)?)
}
fn parse_bytes(bytes: &[u8]) -> Result<Torrent, TorrentError> {
Ok(serde_bencode::from_bytes(bytes)?)
}
}

View File

@ -1,11 +0,0 @@
use crate::{error::TorrentError, torrent::Hash};
use super::Torrent;
impl Hash<Torrent> for Torrent {
fn calc_hash(&self) -> Result<String, TorrentError> {
let x = self.info;
todo!()
}
}

View File

@ -1,65 +0,0 @@
use std::{collections::HashMap, net::IpAddr};
use crate::error::TrackerError;
use super::{Announce, Tracker, TrackerPeer, TrackerResponse};
impl Announce for Tracker {
async fn announce(&mut self, req: super::TrackerRequest) -> Result<(), TrackerError> {
let req = reqwest::Client::new()
.get(&self.url)
.query(&req)
.send()
.await?;
if !req.status().is_success() {
return Err(TrackerError::InvalidStatus(req.status().as_u16()));
}
let rsp: TrackerResponse = serde_bencode::from_bytes(&req.bytes().await?)?;
match rsp {
TrackerResponse::Error { failure_reason } => {
return Err(TrackerError::AnnounceFailed(failure_reason))
}
TrackerResponse::Success { interval, peers } => {
let mut hashmap_peers = HashMap::new();
for p in peers {
hashmap_peers.insert(
p.peer_id,
TrackerPeer {
ip: p.ip.parse()?,
port: p.port,
},
);
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
TrackerResponse::SuccessCompact { interval, peers } => {
let mut hashmap_peers = HashMap::new();
if (peers.len() % 6) != 0 {
return Err(TrackerError::InvalidPeersCompactList(peers.len() as u64));
}
for (i, peer) in peers.chunks(6).enumerate() {
let (ip, port) = peer.split_at(6);
let ip: [u8; 4] = ip.try_into().expect(
"cannot convert &[u8] to &[u8; 4] where chunks is already of lenght 4",
);
let port = u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
));
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(ip),
port,
},
);
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
}
Ok(())
}
}

View File

@ -1,45 +1,62 @@
mod announce;
//! Tracker operations
//!
//! Start by creating an instance of a tracker
//! ```rust
//! use brs::torrent::v1::Torrent;
//! use brs::torrent::Parse;
//! use brs::tracker::Tracker;
//!
//! fn main() {
//! let torrent = match Torrent::parse("./file.torrent") {
//! Ok(v) => v,
//! Err(e) => return eprintln!("{e}"),
//! };
//!
//! let tracker = Tracker::new(&torrent.url);
//! }
//! ```
mod tracker;
use std::{collections::HashMap, net::IpAddr};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, skip_serializing_none, BoolFromInt};
use crate::error::TrackerError;
#[derive(Serialize)]
#[derive(Default, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum TrackerEvent {
Started,
Completed,
Stopped,
#[default]
Empty,
}
/// Tracker query parameters.
#[serde_as]
#[skip_serializing_none]
#[derive(Serialize)]
#[derive(Default, Serialize)]
pub struct TrackerRequest {
/// SHA1 hash of the bencode form. Must be 20 bytes long
/// SHA1 hash of the bencode form. Must be 20 bytes long.
pub info_hash: String,
/// 20 characters ID generate before a download request
/// 20 characters ID generate before a download request.
pub peer_id: String,
/// Client's IP address
/// Client's IP address.
pub ip: Option<IpAddr>,
/// Client's listening port
/// Usually, downloader will try common range: 6881 - 6889
/// Client's listening port.
/// Usually, downloader will try common range: `6881` to `6889`.
pub port: Option<u16>,
/// Total amount of bytes uploaded encoded in base 10 ASCII
/// Total amount of bytes uploaded encoded in base 10 `ASCII`.
pub uploaded: String,
/// Total amount of bytes downloaded encoded in base 10 ASCII
/// Total amount of bytes downloaded encoded in base 10 `ASCII`.
pub downloaded: String,
/// Total amount of bytes left to download encoded in base 10 ASCII
/// Total amount of bytes left to download encoded in base 10 `ASCII`.
pub left: String,
/// Annoucement event done regularly
#[serde(default = "TrackerEvent::Started")]
pub event: Option<TrackerEvent>,
/// Annoucement event.
pub event: TrackerEvent,
/// Should the tracker respond with a compact peers list
#[serde_as(as = "BoolFromInt")]
#[serde(default)]
pub compact: bool,
}
@ -63,7 +80,8 @@ pub enum TrackerResponse {
interval: u64,
/// List of peers in BigEndian order.
/// 4 bytes allocated for the IPv4 address and 2 bytes for the port.
peers: Vec<u8>,
peers: Option<Vec<u8>>,
peers6: Option<Vec<u8>>
},
}
@ -93,10 +111,3 @@ pub struct TrackerPeer {
pub ip: IpAddr,
pub port: u16,
}
pub trait Announce {
fn announce(
&mut self,
req: TrackerRequest,
) -> impl std::future::Future<Output = Result<(), TrackerError>> + Send;
}

102
brs/src/tracker/tracker.rs Normal file
View File

@ -0,0 +1,102 @@
use std::{collections::HashMap, net::IpAddr};
use bendy::serde::from_bytes;
use crate::error::TrackerError;
use super::{Tracker, TrackerPeer, TrackerRequest, TrackerResponse};
const IPV4_PEER_LEN: u8 = 6;
const IPV6_PEER_LEN: u8 = 18;
impl Tracker {
/// Create a new instance of `Tracker`
pub fn new(url: String) -> Self {
Self {
url,
interval: None,
peers: None,
}
}
pub async fn announce(&mut self, req: TrackerRequest) -> Result<(), TrackerError> {
let req = reqwest::Client::new()
.get(&self.url)
.query(&req)
.send()
.await?;
if !req.status().is_success() {
return Err(TrackerError::InvalidStatus(req.status().as_u16()));
}
let rsp: TrackerResponse = from_bytes(&req.bytes().await?)?;
match rsp {
TrackerResponse::Error { failure_reason } => {
return Err(TrackerError::AnnounceFailed(failure_reason))
}
TrackerResponse::Success { interval, peers } => {
let mut hashmap_peers = HashMap::new();
for p in peers {
hashmap_peers.insert(
p.peer_id,
TrackerPeer {
ip: p.ip.parse()?,
port: p.port,
},
);
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
TrackerResponse::SuccessCompact {
interval,
peers,
peers6,
} => {
let mut hashmap_peers = HashMap::new();
if let Some(p) = peers {
if (p.len() % IPV4_PEER_LEN as usize) != 0 {
return Err(TrackerError::InvalidPeersCompactList(IPV4_PEER_LEN, p.len() as u64));
}
for (i, peer) in p.chunks(IPV4_PEER_LEN as usize).enumerate() {
let (ip, port) = peer.split_at(4);
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(TryInto::<[u8; 4]>::try_into(ip).expect(
"cannot convert &[u8] to &[u8; 4] where chunks is already of lenght 4",
)),
port: u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
)),
},
);
}
}
if let Some(p6) = peers6 {
if (p6.len() % IPV6_PEER_LEN as usize) != 0 {
return Err(TrackerError::InvalidPeersCompactList(IPV6_PEER_LEN, p6.len() as u64));
}
for (i, peer) in p6.chunks(IPV6_PEER_LEN as usize).enumerate() {
let (ip, port) = peer.split_at(14);
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(TryInto::<[u8; 16]>::try_into(ip).expect(
"cannot convert &[u8] to &[u8; 16] where chunks is already of lenght 16",
)),
port: u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
)),
},
);
}
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
}
Ok(())
}
}

View File

@ -12,3 +12,4 @@ clap_complete = "4.5"
thiserror = "1.0"
tokio = { version = "1.37", features = ["full"] }
rand = "0.8"
bendy = { version = "0.3", features = ["std", "serde"] }

View File

@ -5,7 +5,7 @@ use std::io;
use clap::{Command, CommandFactory, Parser, Subcommand, ValueHint};
use clap_complete::{generate, Generator, Shell};
use torrent::{create, metadata};
use torrent::{create, metadata, raw};
use tracker::check;
#[derive(Parser)]
@ -44,6 +44,11 @@ enum TorrentCmds {
#[arg(long)]
v2: bool,
},
Raw {
/// Path to an existing torrent file
#[arg(value_hint = ValueHint::FilePath)]
path: String,
},
/// Create a torrent file
Create {
/// Path to an existing torrent file
@ -77,6 +82,7 @@ async fn main() {
Cmds::Torrent { commands } => match commands {
TorrentCmds::Metadata { path, v1, v2 } => metadata(v1, v2, path),
TorrentCmds::Create { path } => create(path, String::new()),
TorrentCmds::Raw { path } => raw(path),
},
Cmds::Tracker { commands } => match commands {
TrackerCmds::Check { path } => check(path).await,

View File

@ -1,4 +1,4 @@
use std::fs;
use std::{collections::HashMap, fs};
use brs::torrent::v1;
@ -15,6 +15,12 @@ pub(crate) fn metadata(v1: bool, _v2: bool, path: String) {
}
}
pub(crate) fn raw(path: String) {
let bytes = fs::read(path).unwrap();
let out: HashMap<String, bendy::value::Value> = bendy::serde::from_bytes(&bytes).unwrap();
println!("{:?}", out)
}
pub(crate) fn create(_path: String, _data: String) {
unimplemented!()
}