feat: add v1 torrent parsing

This commit is contained in:
Antoine Langlois 2024-04-01 14:20:05 +02:00
parent beaec9242f
commit 715bdcc275
Signed by: DataHearth
GPG Key ID: 946E2D0C410C7B3D
24 changed files with 2127 additions and 3505 deletions

1251
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -2,16 +2,18 @@
name = "brs"
version = "0.1.0"
edition = "2021"
description = "BRS stands for \"BitTorrent Rust\". It a library for the BitTorrent protocol."
description = "BRS stands for \"BitTorrent Rust\". It's a library for the BitTorrent protocol."
authors = [ "Antoine Langlois <dev@antoine-langlois.net>" ]
[dependencies]
bendy = { version = "0.3", features = ["serde"] }
bendy = { version = "0.3", features = ["std", "serde"] }
chrono = { version = "0.4", features = ["serde"] }
hex = "0.4"
human_bytes = "0.4"
rand = "0.8"
reqwest = "0.12"
serde = { version = "1.0", features = ["derive"] }
serde_bencode = "0.2"
serde_bytes = "0.11"
serde_json = "1.0"
serde_with = { version = "3.7", features = ["chrono"] }
sha1 = "0.10"
thiserror = "1.0"

View File

@ -1,11 +1,21 @@
use std::io;
use std::net::AddrParseError;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentError {
#[error("Failed to parse torrent file: {0}")]
ParseTorrent(String),
#[error("Failed to read torrent file: {0}")]
ReadTorrent(#[from] io::Error)
#[derive(Debug, Error)]
pub enum TrackerError {
#[error("Failed to execute announce request: {0}")]
AnnounceRequest(#[from] reqwest::Error),
#[error("Failed to decode response body: {0}")]
BencodeDecode(#[from] bendy::serde::Error),
#[error("Tracker responded with an invalid status code: {0}")]
InvalidStatus(u16),
#[error("Announce request failed: {0}")]
AnnounceFailed(String),
#[error("Failed to convert IP string to IpAddr: {0}")]
IpParse(#[from] AddrParseError),
#[error("Invalid compact peers list. Expected a list of {0}*n bytes, found: {1}")]
InvalidPeersCompactList(u8, u64),
#[error("Failed to parse tracker URL: {0}")]
ParseURL(String),
}

View File

@ -1,4 +1,6 @@
pub mod torrent;
// pub mod torrent;
pub mod tracker;
pub mod peer;
mod error;
mod macros;

View File

@ -1,92 +1,8 @@
#[macro_export]
macro_rules! write_option {
($f:expr, $k:expr, $v:expr) => {{
if let Some(v) = $v {
write!($f, "{}: {}\n", $k, v)?;
}
}};
}
#[macro_export]
macro_rules! match_dict {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::Dict(v) => Ok(v),
serde_bencode::value::Value::List(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Seq,
&$expected,
)),
serde_bencode::value::Value::Int(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Signed(v),
&$expected,
)),
serde_bencode::value::Value::Bytes(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Bytes(&v),
&$expected,
)),
}
}};
}
#[macro_export]
macro_rules! match_list {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::List(v) => Ok(v),
serde_bencode::value::Value::Dict(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Map,
&$expected,
)),
serde_bencode::value::Value::Int(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Signed(*v),
&$expected,
)),
serde_bencode::value::Value::Bytes(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Bytes(&v),
&$expected,
)),
}
}};
}
#[macro_export]
macro_rules! match_bytes {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::Bytes(v) => Ok(v),
serde_bencode::value::Value::Dict(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Map,
&$expected,
)),
serde_bencode::value::Value::Int(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Signed(*v),
&$expected,
)),
serde_bencode::value::Value::List(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Seq,
&$expected,
)),
}
}};
}
#[macro_export]
macro_rules! match_int {
($value:expr, $expected:expr) => {{
match $value {
serde_bencode::value::Value::Int(v) => Ok(v),
serde_bencode::value::Value::Dict(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Map,
&$expected,
)),
serde_bencode::value::Value::Bytes(v) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Bytes(&v),
&$expected,
)),
serde_bencode::value::Value::List(_) => Err(serde::de::Error::invalid_type(
serde::de::Unexpected::Seq,
&$expected,
)),
macro_rules! write_optional {
($f:expr, $k:expr, $v:expr, $empty:expr) => {{
if !$empty($v) {
write!($f, "{}: {}\n", $k, $v)?;
}
}};
}

31
brs/src/peer.rs Normal file
View File

@ -0,0 +1,31 @@
use rand::{distributions::Alphanumeric, Rng};
const VERSION: &str = env!("CARGO_PKG_VERSION");
/// Generate a peer ID matching the specification `BEP 0020`
pub fn gen_peer_id(prefix: Option<String>) -> String {
let prefix = if let Some(v) = prefix {
v
} else {
let mut iter = VERSION.splitn(3, ".");
let major = iter
.next()
.expect("version should contains 3 elements, not 0");
let minor = iter
.next()
.expect("version should contains 3 elements, not 1");
let patch = iter
.next()
.expect("version should contains 3 elements, not 2");
format!("-B{}{}{}-", major, minor, patch)
};
let random_alphanum: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(20-prefix.len())
.map(char::from)
.collect();
format!("{prefix}{random_alphanum}")
}

View File

@ -1,94 +0,0 @@
use chrono::{DateTime, Utc};
use serde::{
de::{Error, Unexpected},
Deserialize, Deserializer,
};
use crate::{match_bytes, match_dict, match_int, match_list, torrent::TorrentFiles};
pub fn from_bool_to_int<'de, D>(deserializer: D) -> Result<bool, D::Error>
where
D: Deserializer<'de>,
{
match u8::deserialize(deserializer)? {
0 => Ok(false),
1 => Ok(true),
other => Err(Error::invalid_value(
Unexpected::Unsigned(other as u64),
&"zero or one",
)),
}
}
pub fn from_i64_to_datetime<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>
where
D: Deserializer<'de>,
{
let timestamp: Option<i64> = Option::deserialize(deserializer)?;
if let Some(v) = timestamp {
return Ok(DateTime::from_timestamp(v, 0));
}
Ok(None)
}
pub fn from_bytes_to_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
let bytes = serde_bytes::ByteBuf::deserialize(deserializer)?;
Ok(bytes
.chunks(20)
.map(|v| hex::encode(v))
.collect::<Vec<String>>())
}
pub fn from_files_list_to_struct<'de, D>(
deserializer: D,
) -> Result<Option<Vec<TorrentFiles>>, D::Error>
where
D: Deserializer<'de>,
{
let list: Vec<serde_bencode::value::Value> = match Option::deserialize(deserializer)? {
Some(v) => v,
None => return Ok(None),
};
let mut torrent_files = vec![];
for v in list {
let file_dict = match_dict!(v, "map with keys \"path\" & \"length\"")?;
if file_dict.len() > 2 {
return Err(Error::invalid_length(
file_dict.len(),
&"path and length only",
));
}
let path = file_dict.get(b"path".as_ref());
let length = file_dict.get(b"length".as_ref());
if path.is_none() {
return Err(Error::missing_field(
"\"path\" is mandatory in a files list",
));
} else if length.is_none() {
return Err(Error::missing_field(
"\"length\" is mandatory in a files list",
));
}
let path = {
let mut str_path = String::new();
for chunks in match_list!(path.unwrap(), "list of bytes")? {
let chunks = match_bytes!(chunks, "sequence of bytes")?;
str_path.push_str(std::str::from_utf8(chunks).map_err(|_| {
Error::invalid_value(Unexpected::Bytes(&chunks), &"Invalid bytes string")
})?);
}
str_path
};
let length = match_int!(length.unwrap(), "integer")?;
torrent_files.push(TorrentFiles {
path,
length: *length as u64,
})
}
Ok(Some(torrent_files))
}

View File

@ -1,116 +0,0 @@
use std::collections::HashMap;
use human_bytes::human_bytes;
use crate::write_option;
use super::Torrent;
impl std::fmt::Display for Torrent {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "tracker: {}\n", self.announce)?;
write_option!(f, "created by", &self.additional_fields.created_by);
write_option!(f, "creation date", &self.additional_fields.creation_date);
write_option!(f, "comment", &self.additional_fields.comment);
if self.additional_fields.extra_fields.len() > 0 {
for (k, v) in self.additional_fields.extra_fields.clone().into_iter() {
let value = match v {
serde_bencode::value::Value::Bytes(v) => std::str::from_utf8(&v)
.map_err(|_| std::fmt::Error)?
.to_string(),
serde_bencode::value::Value::Int(v) => v.to_string(),
serde_bencode::value::Value::List(v) => {
display_list(f, v)?;
continue;
}
serde_bencode::value::Value::Dict(v) => {
display_dict(f, v)?;
continue;
}
};
write!(f, "{k}: {value}\n")?;
}
}
write!(f, "\n")?;
write!(f, "Torrent information:\n")?;
write!(f, "\tname: {}\n", self.info.name)?;
write!(f, "\tprivate: {}\n", self.info.additional_fields.private)?;
if let Some(v) = self.info.length {
write!(f, "\tfile size: {}\n", human_bytes(v as f64))?;
}
write!(f, "\tpieces: {:?}\n", self.info.pieces.len())?;
write!(
f,
"\tpiece size: {}\n",
human_bytes(self.info.piece_length as f64)
)?;
if std::env::var("BRS_PRINT_TORRENT_FILES").is_ok() {
if let Some(v) = &self.info.files {
write!(f, "\tfiles:\n")?;
for file in v {
write!(f, "\t - {}\n", file.path)?;
write!(f, "\t size: {}\n", human_bytes(file.length as f64))?;
}
}
}
if self.info.additional_fields.extra_fields.len() > 0 {
for (k, v) in &self.info.additional_fields.extra_fields {
write!(f, "\t{}: {:#?}\n", k, v)?;
}
}
Ok(())
}
}
fn display_dict(
f: &mut std::fmt::Formatter,
dict: HashMap<Vec<u8>, serde_bencode::value::Value>,
) -> std::fmt::Result {
for (k, v) in dict {
let key = std::str::from_utf8(&k).map_err(|_| std::fmt::Error)?;
let value = match v {
serde_bencode::value::Value::Bytes(v) => std::str::from_utf8(&v)
.map_err(|_| std::fmt::Error)?
.to_string(),
serde_bencode::value::Value::Int(v) => v.to_string(),
serde_bencode::value::Value::List(v) => {
display_list(f, v)?;
continue;
}
serde_bencode::value::Value::Dict(v) => {
display_dict(f, v)?;
continue;
}
};
write!(f, "{key}: {value}\n").map_err(|_| std::fmt::Error)?
}
Ok(())
}
fn display_list(
f: &mut std::fmt::Formatter,
list: Vec<serde_bencode::value::Value>,
) -> std::fmt::Result {
for element in list {
let value = match element {
serde_bencode::value::Value::Bytes(v) => std::str::from_utf8(&v)
.map_err(|_| std::fmt::Error)?
.to_string(),
serde_bencode::value::Value::Int(v) => v.to_string(),
serde_bencode::value::Value::List(v) => {
display_list(f, v)?;
continue;
}
serde_bencode::value::Value::Dict(v) => {
display_dict(f, v)?;
continue;
}
};
write!(f, " - {value}\n")?;
}
Ok(())
}

15
brs/src/torrent/errors.rs Normal file
View File

@ -0,0 +1,15 @@
use std::io;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum TorrentError {
#[error("Failed to parse torrent file: {0}")]
ParseTorrent(bendy::serde::Error),
#[error("Failed to encode info dictionnary: {0}")]
EncodeInfo(bendy::serde::Error),
#[error("Failed to read torrent file: {0}")]
ReadTorrent(#[from] io::Error),
#[error("Failed to convert bytes array to UTF-8 string")]
Utf8Conversion(#[from] std::string::FromUtf8Error),
}

View File

@ -1,91 +1,3 @@
mod de;
mod display;
use std::{collections::HashMap, fs};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use crate::{error::TorrentError, torrent::de::*};
#[derive(Debug, Deserialize)]
pub struct Torrent {
/// Announcer URL
pub announce: String,
/// Torrent information
pub info: TorrentInfo,
/// Non official fields
#[serde(flatten)]
pub additional_fields: RootAdditionalFields,
}
/// TorrentInfo is a struct that contains all the information about the torrent file.
#[derive(Debug, Deserialize)]
pub struct TorrentInfo {
/// Recommanded output file or root directory
pub name: String,
/// Size of each data piece
#[serde(rename = "piece length")]
pub piece_length: u64,
/// SHA1 hashes of each pieces
#[serde(deserialize_with = "from_bytes_to_vec")]
pub pieces: Vec<String>,
/// In case of a single file, represents the file size
pub length: Option<u64>,
#[serde(default, deserialize_with = "from_files_list_to_struct")]
/// In case of multiple files/directories, represents all files/directories available
pub files: Option<Vec<TorrentFiles>>,
// Additional fields available that are not part of the original specification
#[serde(flatten)]
pub additional_fields: TorrentInfoAdditionalFields,
}
#[derive(Debug, Deserialize)]
pub struct TorrentFiles {
/// Output file path
pub path: String,
/// File size
pub length: u64,
}
/// RootAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[derive(Debug, Deserialize)]
pub struct RootAdditionalFields {
/// Torrent creator or software name
#[serde(rename = "created by")]
pub created_by: Option<String>,
/// Torrent creation date
#[serde(
default,
rename = "creation date",
deserialize_with = "from_i64_to_datetime"
)]
pub creation_date: Option<DateTime<Utc>>,
/// Comment about the torrent
pub comment: Option<String>,
// #[serde(rename = "url-list")]
// /// List of resources available
// pub url_list: Option<Vec<String>>,
#[serde(flatten)]
/// Extra fields not explicitly covered by the struct
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
}
/// TorrentInfoAdditionalFields is a struct that contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
#[derive(Debug, Deserialize)]
pub struct TorrentInfoAdditionalFields {
/// Is the torrent private
#[serde(default, deserialize_with = "from_bool_to_int")]
pub private: bool,
/// Extra fields not explicitly covered by the struct
#[serde(flatten)]
pub extra_fields: HashMap<String, serde_bencode::value::Value>,
}
pub fn parse(path: String) -> Result<Torrent, TorrentError> {
let torrent_file = fs::read(path).map_err(|e| TorrentError::ReadTorrent(e))?;
serde_bencode::from_bytes(&torrent_file).map_err(|e| TorrentError::ParseTorrent(e.to_string()))
}
pub mod errors;
pub mod v1;
pub mod v2;

View File

@ -0,0 +1,95 @@
use human_bytes::human_bytes;
use crate::{torrent::v1::ext_parsing::skip_empty, write_optional};
use super::Torrent;
impl std::fmt::Display for Torrent<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "GENERAL\n\n")?;
write!(f, " Tracker: {}\n", self.announce)?;
write_optional!(
f,
" Created by",
&self.additional_fields.created_by,
skip_empty::string
);
write_optional!(
f,
" Creation date",
&self.additional_fields.creation_date,
skip_empty::date
);
write_optional!(
f,
" Comment",
&self.additional_fields.comment,
skip_empty::string
);
write_optional!(
f,
" Encoding",
&self.additional_fields.encoding,
skip_empty::string
);
if !self.additional_fields.url_list.is_empty() {
write!(f, " Additional resources:\n")?;
for ar in &self.additional_fields.url_list {
write!(f, " - {}\n", ar)?;
}
}
if !self.additional_fields.extra_fields.is_empty() {
for (k, v) in &self.additional_fields.extra_fields {
write!(f, " {}: {:#?}\n", k, v,)?;
}
}
write!(f, "\n")?;
write!(f, "TORRENT INFORMATION\n\n")?;
write!(f, " Name: {}\n", self.info.name)?;
write!(f, " Hash: {}\n", self.calc_hash().unwrap())?;
write_optional!(
f,
" Private",
&self.info.additional_fields.private,
skip_empty::bool
);
write!(f, " Pieces: {:?}\n", self.info.pieces.len())?;
write!(
f,
" Piece size: {}\n",
human_bytes(self.info.piece_length as f64)
)?;
write!(
f,
" Total size: {}\n",
human_bytes(self.calc_download_lenght() as f64)
)?;
if !self.info.additional_fields.extra_fields.is_empty() {
for (k, v) in &self.info.additional_fields.extra_fields {
write!(f, " {}: {:#?}\n", k, v,)?;
}
}
write!(f, "\nFILES\n\n")?;
if !self.info.files.is_empty() {
for files in &self.info.files {
write!(
f,
" {} ({})\n",
files.path,
human_bytes(files.length as f64)
)?;
}
} else {
write!(
f,
" {} ({})",
self.info.name,
human_bytes(self.calc_download_lenght() as f64)
)?;
}
Ok(())
}
}

View File

@ -0,0 +1,213 @@
pub(super) mod skip_empty {
use chrono::{DateTime, Utc};
#[inline(always)]
pub(crate) fn i64(v: &i64) -> bool {
*v == 0
}
#[inline(always)]
pub(crate) fn bool(v: &bool) -> bool {
*v == false
}
#[inline(always)]
pub(crate) fn string(v: &String) -> bool {
v.len() == 0
}
#[inline(always)]
pub(crate) fn date(v: &DateTime<Utc>) -> bool {
*v == DateTime::<Utc>::default()
}
}
pub(super) mod pieces {
use serde::{de, ser, Deserializer, Serializer};
use serde_with::{Bytes, DeserializeAs, SerializeAs};
pub fn serialize<S>(pieces: &Vec<String>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut buf: Vec<u8> = vec![];
for v in pieces {
buf.append(&mut hex::decode(v).map_err(|e| {
ser::Error::custom(format!(
"Every pieces must be serializable into hexadecimal: {e}"
))
})?);
}
Bytes::serialize_as(&buf, serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
where
D: Deserializer<'de>,
{
let bytes: Vec<u8> = Bytes::deserialize_as(deserializer)?;
if (bytes.len() % 20) != 0 {
return Err(de::Error::custom("Invalid SHA1 pieces"));
}
Ok(bytes.chunks(20).map(|c| hex::encode(c)).collect())
}
}
pub(super) mod files {
use std::{borrow::Cow, collections::BTreeMap};
use bendy::value::Value;
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
use crate::torrent::v1::TorrentFile;
pub fn serialize<S>(files: &Vec<TorrentFile>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut dict_list = vec![];
let length_key: Cow<'_, [u8]> = Cow::Owned(String::from("length").into_bytes());
let path_key: Cow<'_, [u8]> = Cow::Owned(String::from("path").into_bytes());
for f in files {
let mut dict: BTreeMap<Cow<'_, [u8]>, Value> = BTreeMap::new();
dict.insert(length_key.clone(), Value::Integer(f.length as i64));
let mut path_partial = vec![];
for i in f.path.split("/") {
path_partial.push(Value::Bytes(i.as_bytes().into()));
}
dict.insert(path_key.clone(), Value::List(path_partial));
dict_list.push(Value::Dict(dict));
}
dict_list.serialize(serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<TorrentFile>, D::Error>
where
D: Deserializer<'de>,
{
let values = match Value::deserialize(deserializer)? {
Value::List(v) => v,
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"list of dict",
))
}
Value::Dict(_) => {
return Err(de::Error::invalid_type(
de::Unexpected::Map,
&"list of dict",
))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(v),
&"list of dict",
))
}
};
let mut torrent_files: Vec<TorrentFile> = vec![];
let length_key = Cow::Owned(String::from("length").into_bytes());
let path_key = Cow::Owned(String::from("path").into_bytes());
for v in values {
let file = match v {
Value::Dict(v) => v,
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"dict with keys length and path",
))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(v),
&"dict with keys length and path",
))
}
Value::List(_) => {
return Err(de::Error::invalid_type(
de::Unexpected::Seq,
&"dict with keys length and path",
))
}
};
let length = file.get(&length_key);
let path = file.get(&path_key);
if length.is_none() {
return Err(de::Error::missing_field("length"));
}
if path.is_none() {
return Err(de::Error::missing_field("path"));
}
let length = match length.unwrap() {
Value::Integer(v) => *v,
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"integer",
))
}
Value::Dict(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Map, &"integer"))
}
Value::List(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Seq, &"integer"))
}
};
let path = match path.unwrap() {
Value::List(v) => {
let mut final_str = String::new();
for (i, item) in v.iter().enumerate() {
let partial = match item {
Value::Bytes(v) => std::str::from_utf8(v).map_err(|_| {
de::Error::invalid_value(
de::Unexpected::Bytes(v),
&"valid UTF-8 string",
)
})?,
Value::Dict(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Map, &"string"))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(*v),
&"string",
))
}
Value::List(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Seq, &"string"))
}
};
if i == 0 {
final_str.push_str(partial);
continue;
}
final_str.push_str(&format!("/{partial}"));
}
final_str
}
Value::Bytes(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Bytes(&v),
&"list of bytes",
))
}
Value::Dict(_) => {
return Err(de::Error::invalid_type(de::Unexpected::Map, &"list of bytes"))
}
Value::Integer(v) => {
return Err(de::Error::invalid_type(
de::Unexpected::Signed(*v),
&"list of bytes",
))
}
};
torrent_files.push(TorrentFile { length, path })
}
Ok(torrent_files)
}
}

View File

@ -0,0 +1,28 @@
use bendy::serde::{from_bytes, to_bytes};
use sha1::{Digest, Sha1};
use crate::torrent::errors::TorrentError;
use super::{ext_parsing, Torrent};
impl Torrent<'_> {
pub fn parse_bytes<'a>(bytes: &'a [u8]) -> Result<Torrent<'_>, TorrentError> {
from_bytes::<'a>(bytes).map_err(|e| TorrentError::ParseTorrent(e))
}
pub fn calc_download_lenght(&self) -> i64 {
if !ext_parsing::skip_empty::i64(&self.info.length) {
return self.info.length;
}
self.info.files.iter().map(|f| f.length).sum()
}
pub fn calc_hash(&self) -> Result<String, TorrentError> {
let mut hasher = Sha1::new();
let encoded = to_bytes(&self.info).map_err(|e| TorrentError::EncodeInfo(e))?;
hasher.update(&encoded);
Ok(hex::encode(hasher.finalize()))
}
}

111
brs/src/torrent/v1/mod.rs Normal file
View File

@ -0,0 +1,111 @@
mod display;
mod ext_parsing;
mod main;
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, BoolFromInt, TimestampSeconds};
#[derive(Debug, Deserialize, Serialize)]
pub struct Torrent<'a> {
/// Announcer URL
pub announce: String,
/// Torrent information
pub info: TorrentInfo<'a>,
/// Non official fields
#[serde(flatten, borrow)]
pub additional_fields: RootAdditionalFields<'a>,
}
/// TorrentInfo is a struct that contains all the information about the torrent file.
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct TorrentInfo<'a> {
/// Recommanded output file or root directory.
/// REQUIRED
pub name: String,
/// Size of each data piece.
/// REQUIRED
#[serde(rename = "piece length")]
pub piece_length: i64,
/// SHA1 hashes of each pieces concatenated. Each hash is 20 bytes long.
/// REQUIRED
#[serde(with = "ext_parsing::pieces")]
pub pieces: Vec<String>,
/// In case of a single file, represents the file size.
/// REQUIRED - If `TorrentInfo.files` is empty
#[serde(default, skip_serializing_if = "ext_parsing::skip_empty::i64")]
pub length: i64,
/// In case of multiple files/directories, represents all files/directories available
/// REQUIRED - If `TorrentInfo.length` is empty
#[serde(
default,
with = "ext_parsing::files",
skip_serializing_if = "Vec::is_empty"
)]
pub files: Vec<TorrentFile>,
// Additional fields available that are not part of the original specification
#[serde(flatten, borrow)]
pub additional_fields: TorrentInfoAdditionalFields<'a>,
}
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
pub struct TorrentFile {
/// Output file path
/// REQUIRED
pub path: String,
/// File size
/// REQUIRED
pub length: i64,
}
/// RootAdditionalFields contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
/// Those who are well known are mapped directly with default values.
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct RootAdditionalFields<'a> {
/// Torrent creator or software name
#[serde(
default,
rename = "created by",
skip_serializing_if = "ext_parsing::skip_empty::string"
)]
pub created_by: String,
/// Torrent creation date
#[serde_as(as = "TimestampSeconds<i64>")]
#[serde(
default,
rename = "creation date",
skip_serializing_if = "ext_parsing::skip_empty::date"
)]
pub creation_date: DateTime<Utc>,
/// Comment about the torrent
#[serde(default, skip_serializing_if = "ext_parsing::skip_empty::string")]
pub comment: String,
/// List of resources available
#[serde(default, rename = "url-list", skip_serializing_if = "Vec::is_empty")]
pub url_list: Vec<String>,
#[serde(default)]
pub encoding: String,
/// Extra fields not explicitly covered by the struct
#[serde(flatten, borrow)]
pub extra_fields: HashMap<String, bendy::value::Value<'a>>,
}
/// TorrentInfoAdditionalFields contains all the additional fields that are not part of the
/// original [BitTorrent](https://www.bittorrent.org/beps/bep_0003.html) specification.
/// Those who are well known are mapped directly with default values.
#[serde_as]
#[derive(Debug, Deserialize, Serialize)]
pub struct TorrentInfoAdditionalFields<'a> {
/// Is the torrent private
#[serde_as(as = "BoolFromInt")]
#[serde(default, skip_serializing_if = "ext_parsing::skip_empty::bool")]
pub private: bool,
/// Extra fields not explicitly covered by the struct
#[serde(flatten, borrow)]
pub extra_fields: HashMap<String, bendy::value::Value<'a>>,
}

View File

113
brs/src/tracker/mod.rs Normal file
View File

@ -0,0 +1,113 @@
//! Tracker operations
//!
//! Start by creating an instance of a tracker
//! ```rust
//! use brs::torrent::v1::Torrent;
//! use brs::torrent::Parse;
//! use brs::tracker::Tracker;
//!
//! fn main() {
//! let torrent = match Torrent::parse("./file.torrent") {
//! Ok(v) => v,
//! Err(e) => return eprintln!("{e}"),
//! };
//!
//! let tracker = Tracker::new(&torrent.url);
//! }
//! ```
mod tracker;
use std::{collections::HashMap, net::IpAddr};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, skip_serializing_none, BoolFromInt};
#[derive(Default, Serialize)]
#[serde(rename_all = "lowercase")]
pub enum TrackerEvent {
Started,
Completed,
Stopped,
#[default]
Empty,
}
/// Tracker query parameters.
#[serde_as]
#[skip_serializing_none]
#[derive(Default, Serialize)]
pub struct TrackerRequest {
/// SHA1 hash of the bencode form. Must be 20 bytes long.
pub info_hash: String,
/// 20 characters ID generate before a download request.
pub peer_id: String,
/// Client's IP address.
pub ip: Option<IpAddr>,
/// Client's listening port.
/// Usually, downloader will try common range: `6881` to `6889`.
pub port: Option<u16>,
/// Total amount of bytes uploaded encoded in base 10 `ASCII`.
pub uploaded: String,
/// Total amount of bytes downloaded encoded in base 10 `ASCII`.
pub downloaded: String,
/// Total amount of bytes left to download encoded in base 10 `ASCII`.
pub left: String,
/// Annoucement event.
pub event: TrackerEvent,
/// Should the tracker respond with a compact peers list
#[serde_as(as = "BoolFromInt")]
pub compact: bool,
}
#[derive(Deserialize)]
pub enum TrackerResponse {
/// Tracker responded with an error
Error {
#[serde(rename = "failure reason")]
failure_reason: String,
},
/// Tracker successfully computed the query - Normal response
Success {
/// Interval in seconds to query the tracker
interval: u64,
/// List of peers
peers: Vec<Peer>,
},
/// Tracker successfully computed the query - Compact response
SuccessCompact {
/// Interval in seconds to query the tracker
interval: u64,
/// List of peers in BigEndian order.
/// 4 bytes allocated for the IPv4 address and 2 bytes for the port.
peers: Option<Vec<u8>>,
peers6: Option<Vec<u8>>
},
}
#[derive(Deserialize)]
pub struct Peer {
/// Unique identifier for the peer
#[serde(rename = "peer id")]
pub peer_id: String,
/// Peer IP address. IPv4 or IPv6
pub ip: String,
/// Peer listening port
pub port: u16,
}
pub struct Tracker {
/// Tracker URL
pub url: String,
/// Interval in seconds to query the tracker once the transfert has started.
/// /!\ Populated with the first announce query.
pub interval: Option<u64>,
/// List of peers.
/// /!\ Populated with the first announce query.
pub peers: Option<HashMap<String, TrackerPeer>>,
}
pub struct TrackerPeer {
pub ip: IpAddr,
pub port: u16,
}

102
brs/src/tracker/tracker.rs Normal file
View File

@ -0,0 +1,102 @@
use std::{collections::HashMap, net::IpAddr};
use bendy::serde::from_bytes;
use crate::error::TrackerError;
use super::{Tracker, TrackerPeer, TrackerRequest, TrackerResponse};
const IPV4_PEER_LEN: u8 = 6;
const IPV6_PEER_LEN: u8 = 18;
impl Tracker {
/// Create a new instance of `Tracker`
pub fn new(url: String) -> Self {
Self {
url,
interval: None,
peers: None,
}
}
pub async fn announce(&mut self, req: TrackerRequest) -> Result<(), TrackerError> {
let req = reqwest::Client::new()
.get(&self.url)
.query(&req)
.send()
.await?;
if !req.status().is_success() {
return Err(TrackerError::InvalidStatus(req.status().as_u16()));
}
let rsp: TrackerResponse = from_bytes(&req.bytes().await?)?;
match rsp {
TrackerResponse::Error { failure_reason } => {
return Err(TrackerError::AnnounceFailed(failure_reason))
}
TrackerResponse::Success { interval, peers } => {
let mut hashmap_peers = HashMap::new();
for p in peers {
hashmap_peers.insert(
p.peer_id,
TrackerPeer {
ip: p.ip.parse()?,
port: p.port,
},
);
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
TrackerResponse::SuccessCompact {
interval,
peers,
peers6,
} => {
let mut hashmap_peers = HashMap::new();
if let Some(p) = peers {
if (p.len() % IPV4_PEER_LEN as usize) != 0 {
return Err(TrackerError::InvalidPeersCompactList(IPV4_PEER_LEN, p.len() as u64));
}
for (i, peer) in p.chunks(IPV4_PEER_LEN as usize).enumerate() {
let (ip, port) = peer.split_at(4);
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(TryInto::<[u8; 4]>::try_into(ip).expect(
"cannot convert &[u8] to &[u8; 4] where chunks is already of lenght 4",
)),
port: u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
)),
},
);
}
}
if let Some(p6) = peers6 {
if (p6.len() % IPV6_PEER_LEN as usize) != 0 {
return Err(TrackerError::InvalidPeersCompactList(IPV6_PEER_LEN, p6.len() as u64));
}
for (i, peer) in p6.chunks(IPV6_PEER_LEN as usize).enumerate() {
let (ip, port) = peer.split_at(14);
hashmap_peers.insert(
i.to_string(),
TrackerPeer {
ip: IpAddr::from(TryInto::<[u8; 16]>::try_into(ip).expect(
"cannot convert &[u8] to &[u8; 16] where chunks is already of lenght 16",
)),
port: u16::from_be_bytes(port.try_into().expect(
"cannot convert &[u8] to &[u8; 2] where chunks is already of lenght 2",
)),
},
);
}
}
self.interval = Some(interval);
self.peers = Some(hashmap_peers);
}
}
Ok(())
}
}

View File

@ -8,3 +8,8 @@ authors = [ "Antoine Langlois <dev@antoine-langlois.net>" ]
[dependencies]
brs = { path = "../brs" }
clap = { version = "4.5", features = ["derive"] }
clap_complete = "4.5"
thiserror = "1.0"
tokio = { version = "1.37", features = ["full"] }
rand = "0.8"
bendy = { version = "0.3", features = ["std", "serde"] }

3059
cli/out

File diff suppressed because it is too large Load Diff

View File

@ -1,28 +1,92 @@
use clap::{self, Parser};
mod torrent;
mod tracker;
#[derive(clap::Parser)]
use std::io;
use clap::{Command, CommandFactory, Parser, Subcommand, ValueHint};
use clap_complete::{generate, Generator, Shell};
use torrent::{create, metadata, raw};
use tracker::check;
#[derive(Parser)]
#[command(version, about)]
struct Cli {
#[command(subcommand)]
commands: Commands,
commands: Option<Cmds>,
#[arg(short, long, value_name = "SHELL", value_enum)]
complete: Option<Shell>,
}
#[derive(clap::Subcommand)]
enum Commands {
Info {
#[derive(Subcommand)]
enum Cmds {
/// Torrent tooling
Torrent {
#[command(subcommand)]
commands: TorrentCmds,
},
Tracker {
#[command(subcommand)]
commands: TrackerCmds,
},
}
#[derive(Subcommand)]
enum TorrentCmds {
/// Retrieve metadata from a ".torrent" file
Metadata {
/// Path to an existing torrent file
#[arg(value_hint = ValueHint::FilePath)]
path: String,
}
/// BitTorrent specification V1
#[arg(long, default_value_t = true)]
v1: bool,
/// BitTorrent specification V2
#[arg(long)]
v2: bool,
},
Raw {
/// Path to an existing torrent file
#[arg(value_hint = ValueHint::FilePath)]
path: String,
},
/// Create a torrent file
Create {
/// Path to an existing torrent file
#[arg(value_hint = ValueHint::FilePath)]
path: String,
},
}
fn main() {
#[derive(Subcommand)]
enum TrackerCmds {
Check {
#[arg(value_hint = ValueHint::FilePath)]
path: String,
},
}
fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
generate(gen, cmd, cmd.get_name().to_string(), &mut io::stdout());
}
#[tokio::main]
async fn main() {
let cli = Cli::parse();
match cli.commands {
Commands::Info { path } => {
match brs::torrent::parse(path) {
Ok(v) => println!("{v}"),
Err(e) => println!("{e}"),
}
if let Some(sh) = cli.complete {
print_completions(sh, &mut Cli::command())
}
if let Some(cmds) = cli.commands {
match cmds {
Cmds::Torrent { commands } => match commands {
TorrentCmds::Metadata { path, v1, v2 } => metadata(v1, v2, path),
TorrentCmds::Create { path } => create(path, String::new()),
TorrentCmds::Raw { path } => raw(path),
},
Cmds::Tracker { commands } => match commands {
TrackerCmds::Check { path } => check(path).await,
},
}
}
}

26
cli/src/torrent.rs Normal file
View File

@ -0,0 +1,26 @@
use std::{collections::HashMap, fs};
use brs::torrent::v1;
pub(crate) fn metadata(v1: bool, _v2: bool, path: String) {
if v1 {
let bytes = fs::read(path).unwrap();
let torrent = v1::Torrent::parse_bytes(&bytes);
if let Err(e) = &torrent {
eprintln!("{e}");
}
println!("{}", torrent.unwrap())
} else {
unimplemented!()
}
}
pub(crate) fn raw(path: String) {
let bytes = fs::read(path).unwrap();
let out: HashMap<String, bendy::value::Value> = bendy::serde::from_bytes(&bytes).unwrap();
println!("{:?}", out)
}
pub(crate) fn create(_path: String, _data: String) {
unimplemented!()
}

43
cli/src/tracker.rs Normal file
View File

@ -0,0 +1,43 @@
use std::fs;
use brs::{
torrent::v1,
tracker::{Tracker, TrackerRequest},
};
use rand::distributions::Alphanumeric;
use rand::Rng;
pub(crate) async fn check(path: String) {
let bytes = fs::read(path).unwrap();
let torrent = v1::Torrent::parse_bytes(&bytes);
if let Err(e) = &torrent {
eprintln!("Failed to parse torrent: {e}")
}
let torrent = torrent.unwrap();
let peer_id: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(12)
.map(char::from)
.collect();
let info_hash = match torrent.calc_hash() {
Ok(v) => v,
Err(e) => return eprintln!("Failed to calculate info hash: {e}"),
};
let mut tracker = Tracker::new(torrent.announce.clone());
let rsp = tracker
.announce(TrackerRequest {
peer_id: format!("-BRS010-{peer_id}"),
downloaded: "0".to_string(),
left: torrent.calc_download_lenght().to_string(),
uploaded: "0".to_string(),
info_hash,
compact: true,
..Default::default()
})
.await;
if let Err(e) = rsp {
eprintln!("{e}")
}
}

View File

@ -38,11 +38,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1711523803,
"narHash": "sha256-UKcYiHWHQynzj6CN/vTcix4yd1eCu1uFdsuarupdCQQ=",
"lastModified": 1711703276,
"narHash": "sha256-iMUFArF0WCatKK6RzfUJknjem0H9m4KgorO/p3Dopkk=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2726f127c15a4cc9810843b96cad73c7eb39e443",
"rev": "d8fe5e6c92d0d190646fb9f1056741a229980089",
"type": "github"
},
"original": {
@ -81,11 +81,11 @@
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1711592024,
"narHash": "sha256-oD4OJ3TRmVrbAuKZWxElRCyCagNCDuhfw2exBmNOy48=",
"lastModified": 1711937855,
"narHash": "sha256-jlfDBRtsLoqRNFxtQtG47wsrwVsQSV4AqoMgWG6Bvng=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "aa858717377db2ed8ffd2d44147d907baee656e5",
"rev": "3f46a51b47f56c24b4d7e8db8fb8e73118923f96",
"type": "github"
},
"original": {

View File

@ -17,6 +17,8 @@
devShells.default = pkgs.mkShell {
buildInputs = with pkgs; [
rust-bin.stable.latest.default
openssl
pkg-config
];
};
}