Compare commits

...

9 commits

42 changed files with 2482 additions and 1933 deletions

3
.gitignore vendored
View file

@ -1,4 +1,5 @@
/target
/Cargo.lock
rusty-tube.json
rustypipe_reports
rustypipe_cache.json

View file

@ -3,10 +3,19 @@ name = "rustypipe"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[workspace]
members = [".", "cli"]
members = [".", "codegen", "cli"]
[features]
default = ["default-tls"]
# Reqwest TLS
default-tls = ["reqwest/default-tls"]
rustls-tls-webpki-roots = ["reqwest/rustls-tls-webpki-roots"]
rustls-tls-native-roots = ["reqwest/rustls-tls-native-roots"]
# Error reports in yaml format
report-yaml = ["serde_yaml"]
[dependencies]
# quick-js = "0.4.1"
@ -17,13 +26,13 @@ anyhow = "1.0"
thiserror = "1.0.31"
url = "2.2.2"
log = "0.4.17"
reqwest = {version = "0.11.11", default-features = false, features = ["json", "gzip", "brotli", "stream", "rustls-tls-native-roots"]}
tokio = {version = "1.20.0", features = ["macros", "fs", "process"]}
reqwest = {version = "0.11.11", default-features = false, features = ["json", "gzip", "brotli", "stream"]}
tokio = {version = "1.20.0", features = ["macros", "time", "fs", "process"]}
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.82"
serde_yaml = {version = "0.9.11", optional = true}
serde_with = {version = "2.0.0", features = ["json"] }
rand = "0.8.5"
async-trait = "0.1.56"
chrono = {version = "0.4.19", features = ["serde"]}
chronoutil = "0.2.3"
futures = "0.3.21"
@ -37,6 +46,5 @@ env_logger = "0.9.0"
test-log = "0.2.11"
rstest = "0.15.0"
temp_testdir = "0.2.3"
insta = {version = "1.17.1", features = ["redactions"]}
insta = {version = "1.17.1", features = ["yaml", "redactions"]}
velcro = "0.5.3"
phf_codegen = "0.11.1"

View file

@ -3,12 +3,10 @@ name = "rustypipe-cli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
rustypipe = {path = "../"}
reqwest = {version = "0.11.11", default_features = false, features = ["gzip", "brotli", "rustls-tls-native-roots"]}
tokio = {version = "1.20.0", features = ["rt-multi-thread"]}
rustypipe = {path = "../", default_features = false, features = ["rustls-tls-native-roots"]}
reqwest = {version = "0.11.11", default_features = false}
tokio = {version = "1.20.0", features = ["macros", "rt-multi-thread"]}
indicatif = "0.17.0"
futures = "0.3.21"
anyhow = "1.0"

View file

@ -6,7 +6,7 @@ use futures::stream::{self, StreamExt};
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use reqwest::{Client, ClientBuilder};
use rustypipe::{
client::{ClientType, RustyTube},
client::{ClientType, RustyPipe},
model::stream_filter::Filter,
};
@ -46,7 +46,7 @@ async fn download_single_video(
output_fname: Option<String>,
resolution: Option<u32>,
ffmpeg: &str,
rt: &RustyTube,
rp: &RustyPipe,
http: Client,
multi: MultiProgress,
main: Option<ProgressBar>,
@ -58,7 +58,8 @@ async fn download_single_video(
pb.set_message(format!("Fetching player data for {}", video_title));
let res = async {
let player_data = rt
let player_data = rp
.query()
.get_player(video_id.as_str(), ClientType::TvHtml5Embed)
.await
.context(format!(
@ -112,7 +113,7 @@ async fn download_video(
.build()
.expect("unable to build the HTTP client");
let rt = RustyTube::new();
let rp = RustyPipe::default();
// Indicatif setup
let multi = MultiProgress::new();
@ -124,7 +125,7 @@ async fn download_video(
output_fname,
resolution,
"ffmpeg",
&rt,
&rp,
http,
multi,
None,
@ -147,8 +148,8 @@ async fn download_playlist(
.build()
.expect("unable to build the HTTP client");
let rt = RustyTube::new();
let playlist = rt.get_playlist(id).await.unwrap();
let rp = RustyPipe::default();
let playlist = rp.query().get_playlist(id).await.unwrap();
// Indicatif setup
let multi = MultiProgress::new();
@ -173,7 +174,7 @@ async fn download_playlist(
output_fname.to_owned(),
resolution,
"ffmpeg",
&rt,
&rp,
http.clone(),
multi.clone(),
Some(main.clone()),

20
codegen/Cargo.toml Normal file
View file

@ -0,0 +1,20 @@
[package]
name = "rustypipe-codegen"
version = "0.1.0"
edition = "2021"
[dependencies]
rustypipe = {path = "../"}
reqwest = "0.11.11"
tokio = {version = "1.20.0", features = ["macros", "rt-multi-thread"]}
futures = "0.3.21"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.82"
serde_with = "2.0.0"
anyhow = "1.0"
log = "0.4.17"
env_logger = "0.9.0"
clap = { version = "3.2.16", features = ["derive"] }
phf_codegen = "0.11.1"
once_cell = "1.12.0"
fancy-regex = "0.10.0"

View file

@ -1,5 +1,3 @@
#![cfg(test)]
use std::{
collections::{BTreeMap, HashMap},
fs::File,
@ -8,14 +6,15 @@ use std::{
path::Path,
};
use futures::{stream, StreamExt};
use rustypipe::{
client::RustyPipe,
model::{locale::LANGUAGES, Language},
timeago::{self, TimeAgo},
};
use serde::{Deserialize, Serialize};
use crate::{
client::RustyTube,
model::{locale::LANGUAGES, Country, Language},
timeago::{self, TimeAgo},
util,
};
use crate::util;
type CollectedDates = BTreeMap<Language, BTreeMap<DateCase, String>>;
@ -38,20 +37,40 @@ enum DateCase {
Dec,
}
// #[test_log::test(tokio::test)]
async fn collect_dates() {
let json_path = Path::new("testfiles/date/playlist_samples.json").to_path_buf();
if json_path.exists() {
return;
}
/// Collect 'Playlist updated' dates in every supported language
/// and write them to `testfiles/date/playlist_samples.json`.
///
/// YouTube's API outputs the update date of playlists only in a
/// textual format (e.g. *Last updated on Jan 3, 2020*), which varies
/// by language.
///
/// For recently updated playlists YouTube shows 'today', 'yesterday'
/// and 'x<=7 days ago' instead of the literal date.
///
/// To parse these dates correctly we need to collect a sample set
/// in every language.
///
/// This set includes
/// - one playlist updated today
/// - one playlist updated yesterday
/// - one playlist updated 2-7 days ago
/// - one playlist from every month. Note that there should not
/// be any dates which include the same number twice (e.g. 01.01.2020).
///
/// Because the relative dates change with time, the first three playlists
/// should be checked and eventually changed before running the program.
pub async fn collect_dates(project_root: &Path, concurrency: usize) {
let mut json_path = project_root.to_path_buf();
json_path.push("testfiles/date/playlist_samples.json");
// These are the sample playlists
let cases = [
(
DateCase::Today,
"RDCLAK5uy_kj3rhiar1LINmyDcuFnXihEO0K1NQa2jI",
),
(DateCase::Yesterday, "PLmB6td997u3kUOrfFwkULZ910ho44oQSy"),
(DateCase::Ago, "PL7zsB-C3aNu2yRY2869T0zj1FhtRIu5am"),
(DateCase::Yesterday, "PL7zsB-C3aNu2yRY2869T0zj1FhtRIu5am"),
(DateCase::Ago, "PLmB6td997u3kUOrfFwkULZ910ho44oQSy"),
(DateCase::Jan, "PL1J-6JOckZtFjcni6Xj1pLYglJp6JCpKD"),
(DateCase::Feb, "PL1J-6JOckZtETrbzwZE7mRIIK6BzWNLAs"),
(DateCase::Mar, "PL1J-6JOckZtG3AVdvBXhMO64mB2k3BtKi"),
@ -66,31 +85,42 @@ async fn collect_dates() {
(DateCase::Dec, "PL1J-6JOckZtHo91uApeb10Qlf2XhkfM-9"),
];
let mut collected_dates = CollectedDates::new();
for lang in LANGUAGES {
let rp = RustyTube::new_with_ua(lang, Country::Us, None);
let rp = RustyPipe::new();
let collected_dates = stream::iter(LANGUAGES)
.map(|lang| {
let rp = rp.clone();
async move {
let mut map: BTreeMap<DateCase, String> = BTreeMap::new();
for (case, pl_id) in cases {
let playlist = rp.get_playlist(pl_id).await.unwrap();
let playlist = rp.query().lang(lang).get_playlist(pl_id).await.unwrap();
map.insert(case, playlist.last_update_txt.unwrap());
}
collected_dates.insert(lang, map);
(lang, map)
}
})
.buffer_unordered(concurrency)
.collect::<BTreeMap<_, _>>()
.await;
let file = File::create(json_path).unwrap();
serde_json::to_writer_pretty(file, &collected_dates).unwrap();
}
// #[test]
fn write_samples_to_dict() {
let json_path = Path::new("testfiles/date/playlist_samples.json").to_path_buf();
/// Attempt to parse the dates collected by `collect-playlist-dates`
/// and write the results to `dictionary.json`.
///
/// The ND (no digit) tokens (today, tomorrow) of some languages cannot be
/// parsed automatically and require manual work.
pub fn write_samples_to_dict(project_root: &Path) {
let mut json_path = project_root.to_path_buf();
json_path.push("testfiles/date/playlist_samples.json");
let json_file = File::open(json_path).unwrap();
let collected_dates: CollectedDates =
serde_json::from_reader(BufReader::new(json_file)).unwrap();
let mut dict = super::read_dict();
let mut dict = util::read_dict(project_root);
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
let months = [
@ -134,7 +164,9 @@ fn write_samples_to_dict() {
let dict_entry = dict.entry(lang).or_default();
let mut num_order = "".to_owned();
let collect_nd_tokens = match lang {
let collect_nd_tokens = !matches!(
lang,
// ND tokens of these languages must be edited manually
Language::Ja
| Language::ZhCn
| Language::ZhHk
@ -146,10 +178,9 @@ fn write_samples_to_dict() {
| Language::Uz
| Language::Te
| Language::PtPt
// Singhalese YT translation is broken (today == tomorrow)
| Language::Si => false,
_ => true,
};
// Singhalese YT translation has an error (today == tomorrow)
| Language::Si
);
dict_entry.months = BTreeMap::new();
@ -164,7 +195,7 @@ fn write_samples_to_dict() {
// Today/Yesterday
{
let mut parse = |string: &str, n: i8| {
timeago::filter_str(string)
util::filter_datestr(string)
.split_whitespace()
.for_each(|word| {
td_words
@ -183,7 +214,7 @@ fn write_samples_to_dict() {
// n days ago
{
let datestr = datestr_table.get(&DateCase::Ago).unwrap();
let tago = timeago::parse_timeago(lang, &datestr);
let tago = timeago::parse_timeago(lang, datestr);
assert_eq!(
tago,
Some(TimeAgo {
@ -201,7 +232,7 @@ fn write_samples_to_dict() {
let datestr = datestr_table.get(m).unwrap();
// Get order of numbers
let nums = util::parse_numeric_vec::<u32>(&datestr);
let nums = util::parse_numeric_vec::<u32>(datestr);
let date = dates[n];
let this_num_order = nums
@ -219,14 +250,14 @@ fn write_samples_to_dict() {
})
.collect::<String>();
if num_order == "" {
if num_order.is_empty() {
num_order = this_num_order;
} else {
assert_eq!(this_num_order, num_order, "lang: {}", lang);
}
// Insert words into the map
timeago::filter_str(&datestr)
util::filter_datestr(datestr)
.split_whitespace()
.for_each(|word| {
month_words
@ -275,5 +306,5 @@ fn write_samples_to_dict() {
dict_entry.date_order = num_order;
}
super::write_dict(&dict);
util::write_dict(project_root, &dict);
}

View file

@ -0,0 +1,120 @@
use std::{
fs::File,
path::{Path, PathBuf},
};
use rustypipe::{
client::{ClientType, RustyPipe},
report::{Report, Reporter},
};
const CLIENT_TYPES: [ClientType; 5] = [
ClientType::Desktop,
ClientType::DesktopMusic,
ClientType::TvHtml5Embed,
ClientType::Android,
ClientType::Ios,
];
/// Store pretty-printed response json
pub struct TestFileReporter {
path: PathBuf,
}
impl TestFileReporter {
pub fn new<P: AsRef<Path>>(path: P) -> Self {
Self {
path: path.as_ref().to_path_buf(),
}
}
}
impl Reporter for TestFileReporter {
fn report(&self, report: &Report) {
let data =
serde_json::from_str::<serde_json::Value>(&report.http_request.resp_body).unwrap();
let file = File::create(&self.path).unwrap();
serde_json::to_writer_pretty(file, &data).unwrap();
println!("Downloaded {}", self.path.display());
}
}
fn rp_testfile(json_path: &Path) -> RustyPipe {
let reporter = TestFileReporter::new(json_path);
RustyPipe::builder()
.reporter(Box::new(reporter))
.report()
.strict()
.build()
}
pub async fn download_testfiles(project_root: &Path) {
let mut testfiles = project_root.to_path_buf();
testfiles.push("testfiles");
tokio::join!(
player(&testfiles),
player_model(&testfiles),
playlist(&testfiles)
);
}
async fn player(testfiles: &Path) {
let video_id = "pPvd8UxmSbQ";
for client_type in CLIENT_TYPES {
let mut json_path = testfiles.to_path_buf();
json_path.push("player");
json_path.push(format!("{:?}_video.json", client_type).to_lowercase());
if json_path.exists() {
continue;
}
let rp = rp_testfile(&json_path);
rp.query().get_player(video_id, client_type).await.unwrap();
}
}
async fn player_model(testfiles: &Path) {
let rp = RustyPipe::builder().strict().build();
for (name, id) in [("multilanguage", "tVWWp1PqDus"), ("hdr", "LXb3EKWsInQ")] {
let mut json_path = testfiles.to_path_buf();
json_path.push("player_model");
json_path.push(format!("{}.json", name).to_lowercase());
if json_path.exists() {
continue;
}
let player_data = rp
.query()
.get_player(id, ClientType::Desktop)
.await
.unwrap();
let file = File::create(&json_path).unwrap();
serde_json::to_writer_pretty(file, &player_data).unwrap();
println!("Downloaded {}", json_path.display());
}
}
async fn playlist(testfiles: &Path) {
for (name, id) in [
("short", "RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk"),
("long", "PL5dDx681T4bR7ZF1IuWzOv1omlRbE7PiJ"),
("nomusic", "PL1J-6JOckZtE_P9Xx8D3b2O6w0idhuKBe"),
] {
let mut json_path = testfiles.to_path_buf();
json_path.push("playlist");
json_path.push(format!("playlist_{}.json", name));
if json_path.exists() {
continue;
}
let rp = rp_testfile(&json_path);
rp.query().get_playlist(id).await.unwrap();
}
}

View file

@ -1,10 +1,13 @@
#![cfg(test)]
use std::fmt::Write;
use std::path::Path;
use crate::{timeago::TimeUnit};
use fancy_regex::Regex;
use once_cell::sync::Lazy;
use rustypipe::timeago::TimeUnit;
const TARGET_FILE: &str = "src/dictionary.rs";
use crate::util;
const TARGET_PATH: &str = "src/dictionary.rs";
fn parse_tu(tu: &str) -> (u8, Option<TimeUnit>) {
static TU_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r"^(\d*)(\w?)$").unwrap());
@ -27,14 +30,13 @@ fn parse_tu(tu: &str) -> (u8, Option<TimeUnit>) {
}
}
// #[test]
fn generate_dictionary() {
let dict = super::read_dict();
pub fn generate_dictionary(project_root: &Path) {
let dict = util::read_dict(project_root);
let code_head = r#"// This file is automatically generated. DO NOT EDIT.
use crate::{
model::Language,
timeago::{TaToken, TimeUnit, DateCmp},
timeago::{DateCmp, TaToken, TimeUnit},
};
pub struct Entry {
@ -56,45 +58,45 @@ pub fn entry(lang: Language) -> Entry {
// Match selector
let mut selector = format!("Language::{:?}", lang);
entry.equivalent.iter().for_each(|eq| {
selector += &format!(" | Language::{:?}", eq);
let _ = write!(selector, " | Language::{:?}", eq);
});
// Timeago tokens
let mut ta_tokens = phf_codegen::Map::<&str>::new();
entry.timeago_tokens.iter().for_each(|(txt, tu_str)| {
let (n, unit) = parse_tu(&tu_str);
let (n, unit) = parse_tu(tu_str);
match unit {
Some(unit) => ta_tokens.entry(
&txt,
txt,
&format!("TaToken {{ n: {}, unit: Some(TimeUnit::{:?}) }}", n, unit),
),
None => ta_tokens.entry(&txt, &format!("TaToken {{ n: {}, unit: None }}", n)),
None => ta_tokens.entry(txt, &format!("TaToken {{ n: {}, unit: None }}", n)),
};
});
// Months
let mut months = phf_codegen::Map::<&str>::new();
entry.months.iter().for_each(|(txt, n_mon)| {
months.entry(&txt, &n_mon.to_string());
months.entry(txt, &n_mon.to_string());
});
// Timeago(ND) tokens
let mut ta_nd_tokens = phf_codegen::Map::<&str>::new();
entry.timeago_nd_tokens.iter().for_each(|(txt, tu_str)| {
let (n, unit) = parse_tu(&tu_str);
let (n, unit) = parse_tu(tu_str);
match unit {
Some(unit) => ta_nd_tokens.entry(
&txt,
txt,
&format!("TaToken {{ n: {}, unit: Some(TimeUnit::{:?}) }}", n, unit),
),
None => ta_nd_tokens.entry(&txt, &format!("TaToken {{ n: {}, unit: None }}", n)),
None => ta_nd_tokens.entry(txt, &format!("TaToken {{ n: {}, unit: None }}", n)),
};
});
// Date order
let mut date_order = "&[".to_owned();
entry.date_order.chars().for_each(|c| {
date_order += &format!("DateCmp::{}, ", c);
let _ = write!(date_order, "DateCmp::{}, ", c);
});
date_order = date_order.trim_end_matches([' ', ',']).to_owned() + "]";
@ -102,15 +104,15 @@ pub fn entry(lang: Language) -> Entry {
let code_ta_nd_tokens = &ta_nd_tokens.build().to_string().replace('\n', "\n ");
let code_months = &months.build().to_string().replace('\n', "\n ");
code_timeago_tokens += &format!(
"{} => Entry {{\n by_char: {:?},\n timeago_tokens: {},\n date_order: {},\n months: {},\n timeago_nd_tokens: {},\n }},\n ",
selector, entry.by_char, code_ta_tokens, date_order, code_months, code_ta_nd_tokens
);
let _ = write!(code_timeago_tokens, "{} => Entry {{\n by_char: {:?},\n timeago_tokens: {},\n date_order: {},\n months: {},\n timeago_nd_tokens: {},\n }},\n ",
selector, entry.by_char, code_ta_tokens, date_order, code_months, code_ta_nd_tokens);
});
code_timeago_tokens = code_timeago_tokens.trim_end().to_owned() + "\n }\n}\n";
let code = format!("{}\n{}", code_head, code_timeago_tokens);
std::fs::write(TARGET_FILE, code).unwrap();
let mut target_path = project_root.to_path_buf();
target_path.push(TARGET_PATH);
std::fs::write(target_path, code).unwrap();
}

View file

@ -1,20 +1,13 @@
#![cfg(test)]
use std::collections::BTreeMap;
use std::fmt::Write;
use std::path::Path;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use reqwest::header;
use reqwest::Client;
use serde::Deserialize;
use serde_with::serde_as;
use serde_with::VecSkipError;
use crate::client::{ClientType, ContextYT, RustyTube};
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct QLanguageMenu {
context: ContextYT,
}
#[serde_as]
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
@ -122,12 +115,10 @@ struct LanguageItemWrap {
compact_link_renderer: LanguageItem,
}
#[serde_as]
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct LanguageItem {
#[serde_as(as = "crate::serializer::text::Text")]
title: String,
title: Text,
service_endpoint: ServiceEndpoint<LanguageCountryAction>,
}
@ -144,9 +135,13 @@ struct LanguageCountryCommand {
hl: String,
}
// #[test_log::test(tokio::test)]
#[allow(dead_code)]
async fn generate_locales() {
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Text {
simple_text: String,
}
pub async fn generate_locales(project_root: &Path) {
let (languages, countries) = get_locales().await;
let code_head = r#"// This file is automatically generated. DO NOT EDIT.
@ -186,18 +181,21 @@ impl FromStr for Country {
}
"#;
let mut code_langs = r#"#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
let mut code_langs =
r#"#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[serde(rename_all = "lowercase")]
pub enum Language {
"#.to_owned();
let mut code_countries = r#"#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
let mut code_countries =
r#"#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[serde(rename_all = "UPPERCASE")]
pub enum Country {
"#.to_owned();
let mut code_lang_array = format!("pub const LANGUAGES: [Language; {}] = [\n", languages.len());
let mut code_country_array = format!("pub const COUNTRIES: [Country; {}] = [\n", countries.len());
let mut code_country_array =
format!("pub const COUNTRIES: [Country; {}] = [\n", countries.len());
let mut code_lang_names = r#"impl Language {
pub fn name(&self) -> &str {
@ -223,18 +221,22 @@ pub enum Country {
.collect::<String>();
// Language enum
code_langs += &format!(" /// {}\n ", n);
let _ = write!(code_langs, " /// {}\n ", n);
if c.contains('-') {
code_langs += &format!("#[serde(rename = \"{}\")]\n ", c);
let _ = write!(code_langs, "#[serde(rename = \"{}\")]\n ", c);
}
code_langs += &enum_name;
code_langs += ",\n";
// Language array
code_lang_array += &format!(" Language::{},\n", enum_name);
let _ = writeln!(code_lang_array, " Language::{},", enum_name);
// Language names
code_lang_names += &format!(" Language::{} => \"{}\",\n", enum_name, n);
let _ = writeln!(
code_lang_names,
" Language::{} => \"{}\",",
enum_name, n
);
});
code_langs += "}\n";
@ -242,14 +244,18 @@ pub enum Country {
let enum_name = c[0..1].to_owned().to_uppercase() + &c[1..].to_owned().to_lowercase();
// Country enum
code_countries += &format!(" /// {}\n", n);
code_countries += &format!(" {},\n", enum_name);
let _ = writeln!(code_countries, " /// {}", n);
let _ = writeln!(code_countries, " {},", enum_name);
// Country array
code_country_array += &format!(" Country::{},\n", enum_name);
let _ = writeln!(code_country_array, " Country::{},", enum_name);
// Country names
code_country_names += &format!(" Country::{} => \"{}\",\n", enum_name, n);
let _ = writeln!(
code_country_names,
" Country::{} => \"{}\",",
enum_name, n
);
});
code_countries += "}\n";
@ -267,26 +273,23 @@ pub enum Country {
code_country_array,
code_lang_names,
code_country_names,
code_foot,
code_foot
);
let locale_path = Path::new("src/model/locale.rs");
std::fs::write(locale_path, code).unwrap();
let mut target_path = project_root.to_path_buf();
target_path.push("src/model/locale.rs");
std::fs::write(target_path, code).unwrap();
}
async fn get_locales() -> (BTreeMap<String, String>, BTreeMap<String, String>) {
let rt = RustyTube::new();
let client = rt.get_ytclient(ClientType::Desktop);
let context = client.get_context(true).await;
let request_body = QLanguageMenu { context };
let client = Client::new();
let resp = client
.request_builder(Method::POST, "account/account_menu")
.await
.json(&request_body)
.send()
.await
.post("https://www.youtube.com/youtubei/v1/account/account_menu?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8")
.header(header::CONTENT_TYPE, "application/json")
.body(
r##"{"context":{"client":{"clientName":"WEB","clientVersion":"2.20220914.06.00","platform":"DESKTOP","originalUrl":"https://www.youtube.com/","hl":"en","gl":"US"},"request":{"internalExperimentFlags":[],"useSsl":true},"user":{"lockedSafetyMode":false}}}"##
)
.send().await
.unwrap()
.error_for_status()
.unwrap();
@ -344,8 +347,8 @@ fn map_language_section(section: &CompactLinkRendererWrap) -> BTreeMap<String, S
.select_language_command
.hl
.to_owned(),
i.compact_link_renderer.title.to_owned(),
i.compact_link_renderer.title.simple_text.to_owned(),
)
})
.collect::<BTreeMap<_, _>>()
.collect()
}

50
codegen/src/main.rs Normal file
View file

@ -0,0 +1,50 @@
mod collect_playlist_dates;
mod download_testfiles;
mod gen_dictionary;
mod gen_locales;
mod util;
use std::path::PathBuf;
use clap::{Parser, Subcommand};
#[derive(Parser)]
struct Cli {
#[clap(subcommand)]
command: Commands,
#[clap(short = 'd', default_value = "..")]
project_root: PathBuf,
#[clap(short, default_value = "8")]
concurrency: usize,
}
#[derive(Subcommand)]
enum Commands {
CollectPlaylistDates,
WritePlaylistDates,
GenLocales,
GenDict,
DownloadTestfiles,
}
#[tokio::main]
async fn main() {
env_logger::init();
let cli = Cli::parse();
match cli.command {
Commands::CollectPlaylistDates => {
collect_playlist_dates::collect_dates(&cli.project_root, cli.concurrency).await;
}
Commands::WritePlaylistDates => {
collect_playlist_dates::write_samples_to_dict(&cli.project_root);
}
Commands::GenLocales => {
gen_locales::generate_locales(&cli.project_root).await;
}
Commands::GenDict => gen_dictionary::generate_dictionary(&cli.project_root),
Commands::DownloadTestfiles => {
download_testfiles::download_testfiles(&cli.project_root).await
}
};
}

72
codegen/src/util.rs Normal file
View file

@ -0,0 +1,72 @@
use std::{collections::BTreeMap, fs::File, io::BufReader, path::Path, str::FromStr};
use rustypipe::model::Language;
use serde::{Deserialize, Serialize};
const DICT_PATH: &str = "testfiles/date/dictionary.json";
type Dictionary = BTreeMap<Language, DictEntry>;
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(default)]
pub struct DictEntry {
pub equivalent: Vec<Language>,
pub by_char: bool,
pub timeago_tokens: BTreeMap<String, String>,
pub date_order: String,
pub months: BTreeMap<String, u8>,
pub timeago_nd_tokens: BTreeMap<String, String>,
}
pub fn read_dict(project_root: &Path) -> Dictionary {
let mut json_path = project_root.to_path_buf();
json_path.push(DICT_PATH);
let json_file = File::open(json_path).unwrap();
serde_json::from_reader(BufReader::new(json_file)).unwrap()
}
pub fn write_dict(project_root: &Path, dict: &Dictionary) {
let mut json_path = project_root.to_path_buf();
json_path.push(DICT_PATH);
let json_file = File::create(json_path).unwrap();
serde_json::to_writer_pretty(json_file, dict).unwrap();
}
pub fn filter_datestr(string: &str) -> String {
string
.to_lowercase()
.chars()
.filter_map(|c| {
if c == '\u{200b}' || c.is_ascii_digit() {
None
} else if c == '-' {
Some(' ')
} else {
Some(c)
}
})
.collect()
}
/// Parse all numbers occurring in a string and reurn them as a vec
pub fn parse_numeric_vec<F>(string: &str) -> Vec<F>
where
F: FromStr,
{
let mut numbers = vec![];
let mut buf = String::new();
for c in string.chars() {
if c.is_ascii_digit() {
buf.push(c);
} else if !buf.is_empty() {
buf.parse::<F>().map_or((), |n| numbers.push(n));
buf.clear();
}
}
if !buf.is_empty() {
buf.parse::<F>().map_or((), |n| numbers.push(n));
}
numbers
}

View file

@ -1,364 +1,57 @@
use std::{
fs::File,
future::Future,
io::BufReader,
fs,
path::{Path, PathBuf},
sync::Arc,
};
use anyhow::Result;
use chrono::{DateTime, Duration, Utc};
use log::{error, info};
use serde::{Deserialize, Serialize};
use tokio::sync::Mutex;
use log::error;
#[derive(Default, Debug, Clone)]
pub struct Cache {
file: Option<PathBuf>,
data: Arc<Mutex<CacheData>>,
pub trait CacheStorage {
fn write(&self, data: &str);
fn read(&self) -> Option<String>;
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
struct CacheData {
desktop_client: Option<CacheEntry<ClientData>>,
music_client: Option<CacheEntry<ClientData>>,
deobf: Option<CacheEntry<DeobfData>>,
pub struct FileStorage {
path: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct CacheEntry<T> {
last_update: DateTime<Utc>,
data: T,
}
impl<T> From<T> for CacheEntry<T> {
fn from(f: T) -> Self {
impl FileStorage {
pub fn new<P: AsRef<Path>>(path: P) -> Self {
Self {
last_update: Utc::now(),
data: f,
path: path.as_ref().to_path_buf(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct ClientData {
pub version: String,
impl Default for FileStorage {
fn default() -> Self {
Self {
path: Path::new("rustypipe_cache.json").into(),
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct DeobfData {
pub js_url: String,
pub sig_fn: String,
pub nsig_fn: String,
pub sts: String,
}
impl Cache {
pub async fn get_desktop_client_data<F>(&self, updater: F) -> Result<ClientData>
where
F: Future<Output = Result<ClientData>> + Send + 'static,
{
let mut cache = self.data.lock().await;
if cache.desktop_client.is_none()
|| cache.desktop_client.as_ref().unwrap().last_update < Utc::now() - Duration::hours(24)
{
let cdata = updater.await?;
cache.desktop_client = Some(CacheEntry::from(cdata.clone()));
self.save(&cache);
Ok(cdata)
} else {
Ok(cache.desktop_client.as_ref().unwrap().data.clone())
}
}
pub async fn get_music_client_data<F>(&self, updater: F) -> Result<ClientData>
where
F: Future<Output = Result<ClientData>> + Send + 'static,
{
let mut cache = self.data.lock().await;
if cache.music_client.is_none()
|| cache.music_client.as_ref().unwrap().last_update < Utc::now() - Duration::hours(24)
{
let cdata = updater.await?;
cache.music_client = Some(CacheEntry::from(cdata.clone()));
self.save(&cache);
Ok(cdata)
} else {
Ok(cache.music_client.as_ref().unwrap().data.clone())
}
}
pub async fn get_deobf_data<F>(&self, updater: F) -> Result<DeobfData>
where
F: Future<Output = Result<DeobfData>> + Send + 'static,
{
let mut cache = self.data.lock().await;
if cache.deobf.is_none()
|| cache.deobf.as_ref().unwrap().last_update < Utc::now() - Duration::hours(24)
{
let deobf_data = updater.await?;
cache.deobf = Some(CacheEntry::from(deobf_data.clone()));
self.save(&cache);
Ok(deobf_data)
} else {
Ok(cache.deobf.as_ref().unwrap().data.clone())
}
}
pub async fn to_json(&self) -> Result<String> {
let cache = self.data.lock().await;
Ok(serde_json::to_string(&cache.clone())?)
}
pub async fn to_json_file<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let cache = self.data.lock().await;
Ok(serde_json::to_writer(&File::create(path)?, &cache.clone())?)
}
pub fn from_json(json: &str) -> Self {
let data: CacheData = match serde_json::from_str(json) {
Ok(cd) => cd,
Err(e) => {
impl CacheStorage for FileStorage {
fn write(&self, data: &str) {
fs::write(&self.path, data).unwrap_or_else(|e| {
error!(
"Could not load cache from json, falling back to default. Error: {}",
"Could not write cache to file `{}`. Error: {}",
self.path.to_string_lossy(),
e
);
CacheData::default()
}
};
Cache {
data: Arc::new(Mutex::new(data)),
file: None,
}
});
}
pub fn from_json_file<P: AsRef<Path>>(path: P) -> Self {
let file = match File::open(path.as_ref()) {
Ok(file) => file,
Err(e) => {
if e.kind() == std::io::ErrorKind::NotFound {
info!(
"Cache json file at {} not found, will be created",
path.as_ref().to_string_lossy()
)
} else {
error!(
"Could not open cache json file, falling back to default. Error: {}",
e
);
}
return Cache {
file: Some(path.as_ref().to_path_buf()),
..Default::default()
};
}
};
let data: CacheData = match serde_json::from_reader(BufReader::new(file)) {
Ok(data) => data,
fn read(&self) -> Option<String> {
match fs::read_to_string(&self.path) {
Ok(data) => Some(data),
Err(e) => {
error!(
"Could not load cache from json, falling back to default. Error: {}",
"Could not load cache from file `{}`. Error: {}",
self.path.to_string_lossy(),
e
);
return Cache {
file: Some(path.as_ref().to_path_buf()),
..Default::default()
};
}
};
Cache {
data: Arc::new(Mutex::new(data)),
file: Some(path.as_ref().to_path_buf()),
}
}
fn save(&self, cache: &CacheData) {
match self.file.as_ref() {
Some(file) => match File::create(file) {
Ok(file) => match serde_json::to_writer(file, cache) {
Ok(_) => {}
Err(e) => error!("Could not write cache to json. Error: {}", e),
},
Err(e) => error!("Could not open cache json file. Error: {}", e),
},
None => {}
None
}
}
}
#[cfg(test)]
mod tests {
use temp_testdir::TempDir;
use super::*;
#[tokio::test]
async fn test() {
let cache = Cache::default();
let desktop_c = cache
.get_desktop_client_data(async {
Ok(ClientData {
version: "1.2.3".to_owned(),
})
})
.await
.unwrap();
assert_eq!(
desktop_c,
ClientData {
version: "1.2.3".to_owned()
}
);
let music_c = cache
.get_music_client_data(async {
Ok(ClientData {
version: "4.5.6".to_owned(),
})
})
.await
.unwrap();
assert_eq!(
music_c,
ClientData {
version: "4.5.6".to_owned()
}
);
let deobf_data = cache
.get_deobf_data(async {
Ok(DeobfData {
js_url:
"https://www.youtube.com/s/player/011af516/player_ias.vflset/en_US/base.js"
.to_owned(),
sig_fn: "t_sig_fn".to_owned(),
nsig_fn: "t_nsig_fn".to_owned(),
sts: "t_sts".to_owned(),
})
})
.await
.unwrap();
assert_eq!(
deobf_data,
DeobfData {
js_url: "https://www.youtube.com/s/player/011af516/player_ias.vflset/en_US/base.js"
.to_owned(),
sig_fn: "t_sig_fn".to_owned(),
nsig_fn: "t_nsig_fn".to_owned(),
sts: "t_sts".to_owned(),
}
);
// Create a new cache from the first one's json
// and check if it returns the same cached data
let json = cache.to_json().await.unwrap();
let new_cache = Cache::from_json(&json);
assert_eq!(
new_cache
.get_desktop_client_data(async {
Ok(ClientData {
version: "".to_owned(),
})
})
.await
.unwrap(),
desktop_c
);
assert_eq!(
new_cache
.get_music_client_data(async {
Ok(ClientData {
version: "".to_owned(),
})
})
.await
.unwrap(),
music_c
);
assert_eq!(
new_cache
.get_deobf_data(async {
Ok(DeobfData {
js_url: "".to_owned(),
nsig_fn: "".to_owned(),
sig_fn: "".to_owned(),
sts: "".to_owned(),
})
})
.await
.unwrap(),
deobf_data
);
}
#[tokio::test]
async fn test_file() {
let temp = TempDir::default();
let mut file_path = PathBuf::from(temp.as_ref());
file_path.push("cache.json");
let cache = Cache::from_json_file(file_path.clone());
let cdata = cache
.get_desktop_client_data(async {
Ok(ClientData {
version: "1.2.3".to_owned(),
})
})
.await
.unwrap();
let deobf_data = cache
.get_deobf_data(async {
Ok(DeobfData {
js_url:
"https://www.youtube.com/s/player/011af516/player_ias.vflset/en_US/base.js"
.to_owned(),
sig_fn: "t_sig_fn".to_owned(),
nsig_fn: "t_nsig_fn".to_owned(),
sts: "t_sts".to_owned(),
})
})
.await
.unwrap();
assert!(file_path.exists());
let new_cache = Cache::from_json_file(file_path.clone());
assert_eq!(
new_cache
.get_desktop_client_data(async {
Ok(ClientData {
version: "".to_owned(),
})
})
.await
.unwrap(),
cdata
);
assert_eq!(
new_cache
.get_deobf_data(async {
Ok(DeobfData {
js_url: "".to_owned(),
nsig_fn: "".to_owned(),
sig_fn: "".to_owned(),
sts: "".to_owned(),
})
})
.await
.unwrap(),
deobf_data
);
}
}

View file

@ -1,41 +0,0 @@
use anyhow::Result;
use reqwest::Method;
use serde::Serialize;
use super::{response, ClientType, ContextYT, RustyTube};
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct QChannel {
context: ContextYT,
browse_id: String,
params: String,
}
impl RustyTube {
async fn get_channel_response(&self, channel_id: &str) -> Result<response::Channel> {
let client = self.get_ytclient(ClientType::Desktop);
let context = client.get_context(true).await;
let request_body = QChannel {
context,
browse_id: channel_id.to_owned(),
params: "EgZ2aWRlb3PyBgQKAjoA".to_owned(),
};
let resp = client
.request_builder(Method::POST, "browse")
.await
.json(&request_body)
.send()
.await?
.error_for_status()?;
Ok(resp.json::<response::Channel>().await?)
}
}
#[cfg(test)]
mod tests {
}

File diff suppressed because it is too large Load diff

View file

@ -1,25 +1,33 @@
use std::{
borrow::Cow,
collections::{BTreeMap, HashMap},
sync::Arc,
};
use anyhow::{anyhow, bail, Result};
use chrono::{Local, NaiveDateTime, NaiveTime, TimeZone};
use fancy_regex::Regex;
use log::{error, warn};
use once_cell::sync::Lazy;
use reqwest::Method;
use reqwest::{Method, Url};
use serde::Serialize;
use url::Url;
use super::{response, ClientType, ContextYT, RustyTube, YTClient};
use crate::{client::response::player, deobfuscate::Deobfuscator, model::*, util};
use crate::{
deobfuscate::Deobfuscator,
model::{
AudioCodec, AudioFormat, AudioStream, AudioTrack, Channel, Language, Subtitle, VideoCodec,
VideoFormat, VideoInfo, VideoPlayer, VideoStream,
},
util,
};
use super::{
response::{self, player},
ClientType, MapResponse, MapResult, RustyPipeQuery, YTContext,
};
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct QPlayer {
context: ContextYT,
context: YTContext,
/// Website playback context
#[serde(skip_serializing_if = "Option::is_none")]
playback_context: Option<QPlaybackContext>,
@ -49,36 +57,18 @@ struct QContentPlaybackContext {
referer: String,
}
impl RustyTube {
pub async fn get_player(&self, video_id: &str, client_type: ClientType) -> Result<VideoPlayer> {
let client = self.get_ytclient(client_type);
let (context, deobf) = tokio::join!(
client.get_context(false),
Deobfuscator::from_fetched_info(client.http_client(), self.cache.clone())
);
let deobf = deobf?;
let request_body = build_request_body(client.clone(), &deobf, context, video_id);
impl RustyPipeQuery {
pub async fn get_player(self, video_id: &str, client_type: ClientType) -> Result<VideoPlayer> {
let q1 = self.clone();
let t_context = tokio::spawn(async move { q1.get_context(client_type, false).await });
let q2 = self.client.clone();
let t_deobf = tokio::spawn(async move { q2.get_deobf().await });
let resp = client
.request_builder(Method::POST, "player")
.await
.json(&request_body)
.send()
.await?
.error_for_status()?;
let (context, deobf) = tokio::join!(t_context, t_deobf);
let context = context.unwrap();
let deobf = deobf.unwrap()?;
let player_response = resp.json::<response::Player>().await?;
map_player_data(player_response, &deobf)
}
}
fn build_request_body(
client: Arc<dyn YTClient>,
deobf: &Deobfuscator,
context: ContextYT,
video_id: &str,
) -> QPlayer {
if client.get_type().is_web() {
let request_body = if client_type.is_web() {
QPlayer {
context,
playback_context: Some(QPlaybackContext {
@ -101,6 +91,177 @@ fn build_request_body(
content_check_ok: true,
racy_check_ok: true,
}
};
self.execute_request_deobf::<response::Player, _, _>(
client_type,
"get_player",
video_id,
Method::POST,
"player",
&request_body,
Some(&deobf),
)
.await
}
}
impl MapResponse<VideoPlayer> for response::Player {
fn map_response(
self,
id: &str,
_lang: Language,
deobf: Option<&Deobfuscator>,
) -> Result<super::MapResult<VideoPlayer>> {
let deobf = deobf.unwrap();
let mut warnings = vec![];
// Check playability status
match self.playability_status {
response::player::PlayabilityStatus::Ok { live_streamability } => {
if live_streamability.is_some() {
bail!("Active livestreams are not supported")
}
}
response::player::PlayabilityStatus::Unplayable { reason } => {
bail!("Video is unplayable. Reason: {}", reason)
}
response::player::PlayabilityStatus::LoginRequired { reason } => {
bail!("Playback requires login. Reason: {}", reason)
}
response::player::PlayabilityStatus::LiveStreamOffline { reason } => {
bail!("Livestream is offline. Reason: {}", reason)
}
response::player::PlayabilityStatus::Error { reason } => {
bail!("Video was deleted. Reason: {}", reason)
}
};
let mut streaming_data = some_or_bail!(
self.streaming_data,
Err(anyhow!("No streaming data was returned"))
);
let video_details = some_or_bail!(
self.video_details,
Err(anyhow!("No video details were returned"))
);
let microformat = self.microformat.map(|m| m.player_microformat_renderer);
let (publish_date, category, tags, is_family_safe) =
microformat.map_or((None, None, None, None), |m| {
(
Local
.from_local_datetime(&NaiveDateTime::new(
m.publish_date,
NaiveTime::from_hms(0, 0, 0),
))
.single(),
Some(m.category),
m.tags,
Some(m.is_family_safe),
)
});
if video_details.video_id != id {
bail!(
"got wrong video id {}, expected {}",
video_details.video_id,
id
);
}
let video_info = VideoInfo {
id: video_details.video_id,
title: video_details.title,
description: video_details.short_description,
length: video_details.length_seconds,
thumbnails: video_details.thumbnail.unwrap_or_default().into(),
channel: Channel {
id: video_details.channel_id,
name: video_details.author,
},
publish_date,
view_count: video_details.view_count,
keywords: match video_details.keywords {
Some(keywords) => keywords,
None => tags.unwrap_or_default(),
},
category,
is_live_content: video_details.is_live_content,
is_family_safe,
};
let mut formats = streaming_data.formats.c;
formats.append(&mut streaming_data.adaptive_formats.c);
warnings.append(&mut streaming_data.formats.warnings);
warnings.append(&mut streaming_data.adaptive_formats.warnings);
let mut last_nsig: [String; 2] = ["".to_owned(), "".to_owned()];
let mut video_streams: Vec<VideoStream> = Vec::new();
let mut video_only_streams: Vec<VideoStream> = Vec::new();
let mut audio_streams: Vec<AudioStream> = Vec::new();
for f in formats {
if f.format_type == player::FormatType::FormatStreamTypeOtf {
continue;
}
match (f.is_video(), f.is_audio()) {
(true, true) => {
let mut map_res = map_video_stream(f, deobf, &mut last_nsig);
warnings.append(&mut map_res.warnings);
if let Some(c) = map_res.c {
video_streams.push(c);
};
}
(true, false) => {
let mut map_res = map_video_stream(f, deobf, &mut last_nsig);
warnings.append(&mut map_res.warnings);
if let Some(c) = map_res.c {
video_only_streams.push(c);
};
}
(false, true) => {
let mut map_res = map_audio_stream(f, deobf, &mut last_nsig);
warnings.append(&mut map_res.warnings);
if let Some(c) = map_res.c {
audio_streams.push(c);
};
}
(false, false) => warnings.push(format!("invalid stream: itag {}", f.itag)),
}
}
video_streams.sort();
video_only_streams.sort();
audio_streams.sort();
let mut subtitles = vec![];
if let Some(captions) = self.captions {
for c in captions.player_captions_tracklist_renderer.caption_tracks {
let lang_auto = c.name.strip_suffix(" (auto-generated)");
subtitles.push(Subtitle {
url: c.base_url,
lang: c.language_code,
lang_name: lang_auto.unwrap_or(&c.name).to_owned(),
auto_generated: lang_auto.is_some(),
})
}
}
Ok(MapResult {
c: VideoPlayer {
info: video_info,
video_streams,
video_only_streams,
audio_streams,
subtitles,
expires_in_seconds: streaming_data.expires_in_seconds,
},
warnings,
})
}
}
@ -136,7 +297,7 @@ fn deobf_nsig(
let nsig: String;
match url_params.get("n") {
Some(n) => {
nsig = if n.to_owned() == last_nsig[0] {
nsig = if n == &last_nsig[0] {
last_nsig[1].to_owned()
} else {
let nsig = deobf.deobfuscate_nsig(n)?;
@ -157,108 +318,192 @@ fn map_url(
signature_cipher: &Option<String>,
deobf: &Deobfuscator,
last_nsig: &mut [String; 2],
) -> Option<(String, bool)> {
) -> MapResult<Option<(String, bool)>> {
let (url_base, mut url_params) = match url {
Some(url) => ok_or_bail!(util::url_to_params(url), None),
Some(url) => ok_or_bail!(
util::url_to_params(url),
MapResult {
c: None,
warnings: vec![format!("Could not parse url `{}`", url)]
}
),
None => match signature_cipher {
Some(signature_cipher) => match cipher_to_url_params(signature_cipher, deobf) {
Ok(res) => res,
Err(e) => {
error!("Could not deobfuscate signatureCipher: {}", e);
return None;
return MapResult {
c: None,
warnings: vec![format!(
"Could not deobfuscate signatureCipher `{}`: {}",
signature_cipher, e
)],
};
}
},
None => return None,
None => {
return MapResult {
c: None,
warnings: vec!["stream contained neither url nor cipher".to_owned()],
}
}
},
};
let mut warnings = vec![];
let mut throttled = false;
deobf_nsig(&mut url_params, deobf, last_nsig).unwrap_or_else(|e| {
warn!("Could not deobfuscate nsig: {}", e);
warnings.push(format!(
"Could not deobfuscate nsig (params: {:?}): {}",
url_params, e
));
throttled = true;
});
Some((
MapResult {
c: Some((
ok_or_bail!(
Url::parse_with_params(url_base.as_str(), url_params.iter()),
None
MapResult {
c: None,
warnings: vec![format!(
"url could not be joined. url: `{}` params: {:?}",
url_base, url_params
)],
}
)
.to_string(),
throttled,
))
)),
warnings,
}
}
fn map_video_stream(
f: &player::Format,
f: player::Format,
deobf: &Deobfuscator,
last_nsig: &mut [String; 2],
) -> Option<VideoStream> {
let (mtype, codecs) = some_or_bail!(parse_mime(&f.mime_type), None);
let (url, throttled) =
some_or_bail!(map_url(&f.url, &f.signature_cipher, deobf, last_nsig), None);
) -> MapResult<Option<VideoStream>> {
let (mtype, codecs) = some_or_bail!(
parse_mime(&f.mime_type),
MapResult {
c: None,
warnings: vec![format!(
"Invalid mime type `{}` in video format {:?}",
&f.mime_type, &f
)]
}
);
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
Some(VideoStream {
match map_res.c {
Some((url, throttled)) => MapResult {
c: Some(VideoStream {
url,
itag: f.itag,
bitrate: f.bitrate,
average_bitrate: f.average_bitrate,
average_bitrate: f.average_bitrate.unwrap_or(f.bitrate),
size: f.content_length,
index_range: f.index_range.clone(),
init_range: f.init_range.clone(),
width: some_or_bail!(f.width, None),
height: some_or_bail!(f.height, None),
fps: some_or_bail!(f.fps, None),
quality: some_or_bail!(f.quality_label.clone(), None),
hdr: f.color_info.clone().unwrap_or_default().primaries
index_range: f.index_range,
init_range: f.init_range,
// Note that the format has already been verified using
// is_video(), so these unwraps are safe
width: f.width.unwrap(),
height: f.height.unwrap(),
fps: f.fps.unwrap(),
quality: f.quality_label.unwrap(),
hdr: f.color_info.unwrap_or_default().primaries
== player::Primaries::ColorPrimariesBt2020,
mime: f.mime_type.to_owned(),
format: some_or_bail!(get_video_format(mtype), None),
format: some_or_bail!(
get_video_format(mtype),
MapResult {
c: None,
warnings: vec![format!("invalid video format. itag: {}", f.itag)]
}
),
codec: get_video_codec(codecs),
throttled,
})
}),
warnings: map_res.warnings,
},
None => MapResult {
c: None,
warnings: map_res.warnings,
},
}
}
fn map_audio_stream(
f: &player::Format,
f: player::Format,
deobf: &Deobfuscator,
last_nsig: &mut [String; 2],
) -> Option<AudioStream> {
) -> MapResult<Option<AudioStream>> {
static LANG_PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r#"^([a-z]{2})\."#).unwrap());
let (mtype, codecs) = some_or_bail!(parse_mime(&f.mime_type), None);
let (url, throttled) =
some_or_bail!(map_url(&f.url, &f.signature_cipher, deobf, last_nsig), None);
let (mtype, codecs) = some_or_bail!(
parse_mime(&f.mime_type),
MapResult {
c: None,
warnings: vec![format!(
"Invalid mime type `{}` in video format {:?}",
&f.mime_type, &f
)]
}
);
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
Some(AudioStream {
match map_res.c {
Some((url, throttled)) => MapResult {
c: Some(AudioStream {
url,
itag: f.itag,
bitrate: f.bitrate,
average_bitrate: f.average_bitrate,
size: f.content_length,
index_range: f.index_range.to_owned(),
init_range: f.init_range.to_owned(),
average_bitrate: f.average_bitrate.unwrap_or(f.bitrate),
size: f.content_length.unwrap(),
index_range: f.index_range,
init_range: f.init_range,
mime: f.mime_type.to_owned(),
format: some_or_bail!(get_audio_format(mtype), None),
format: some_or_bail!(
get_audio_format(mtype),
MapResult {
c: None,
warnings: vec![format!("invalid audio format. itag: {}", f.itag)]
}
),
codec: get_audio_codec(codecs),
throttled,
track: f.audio_track.as_ref().map(|t| AudioTrack {
id: t.id.to_owned(),
lang: LANG_PATTERN
track: match f.audio_track {
Some(t) => {
let lang = LANG_PATTERN
.captures(&t.id)
.ok()
.flatten()
.map(|m| m.get(1).unwrap().as_str().to_owned()),
lang_name: t.display_name.to_owned(),
.map(|m| m.get(1).unwrap().as_str().to_owned());
Some(AudioTrack {
id: t.id,
lang,
lang_name: t.display_name,
is_default: t.audio_is_default,
}),
})
}
None => None,
},
}),
warnings: map_res.warnings,
},
None => MapResult {
c: None,
warnings: map_res.warnings,
},
}
}
fn parse_mime(mime: &str) -> Option<(&str, Vec<&str>)> {
static PATTERN: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"(\w+/\w+);\scodecs="([a-zA-Z-0-9.,\s]*)""#).unwrap());
let captures = some_or_bail!(PATTERN.captures(&mime).ok().flatten(), None);
let captures = some_or_bail!(PATTERN.captures(mime).ok().flatten(), None);
Some((
captures.get(1).unwrap().as_str(),
captures
@ -313,140 +558,11 @@ fn get_audio_codec(codecs: Vec<&str>) -> AudioCodec {
AudioCodec::Unknown
}
fn map_player_data(response: response::Player, deobf: &Deobfuscator) -> Result<VideoPlayer> {
// Check playability status
match response.playability_status {
response::player::PlayabilityStatus::Ok { live_streamability } => {
if live_streamability.is_some() {
bail!("Active livestreams are not supported")
}
}
response::player::PlayabilityStatus::Unplayable { reason } => {
bail!("Video is unplayable. Reason: {}", reason)
}
response::player::PlayabilityStatus::LoginRequired { reason } => {
bail!("Playback requires login. Reason: {}", reason)
}
response::player::PlayabilityStatus::LiveStreamOffline { reason } => {
bail!("Livestream is offline. Reason: {}", reason)
}
response::player::PlayabilityStatus::Error { reason } => {
bail!("Video was deleted. Reason: {}", reason)
}
};
let streaming_data = some_or_bail!(
response.streaming_data,
Err(anyhow!("No streaming data was returned"))
);
let video_details = some_or_bail!(
response.video_details,
Err(anyhow!("No video details were returned"))
);
let microformat = response.microformat.map(|m| m.player_microformat_renderer);
let video_info = VideoInfo {
id: video_details.video_id,
title: video_details.title,
description: video_details.short_description,
length: video_details.length_seconds,
thumbnails: video_details
.thumbnail
.unwrap_or_default()
.thumbnails
.iter()
.map(|t| Thumbnail {
url: t.url.to_owned(),
height: t.height,
width: t.width,
})
.collect(),
channel: Channel {
id: video_details.channel_id,
name: video_details.author,
},
publish_date: microformat.as_ref().map(|m| {
let ndt = NaiveDateTime::new(m.publish_date, NaiveTime::from_hms(0, 0, 0));
Local.from_local_datetime(&ndt).unwrap()
}),
view_count: video_details.view_count,
keywords: video_details
.keywords
.or_else(|| microformat.as_ref().map_or(None, |mf| mf.tags.clone()))
.unwrap_or_default(),
category: microformat.as_ref().map(|m| m.category.to_owned()),
is_live_content: video_details.is_live_content,
is_family_safe: microformat.as_ref().map(|m| m.is_family_safe),
};
let mut formats = streaming_data.formats.clone();
formats.append(&mut streaming_data.adaptive_formats.clone());
let mut last_nsig: [String; 2] = ["".to_owned(), "".to_owned()];
let mut video_streams: Vec<VideoStream> = Vec::new();
let mut video_only_streams: Vec<VideoStream> = Vec::new();
let mut audio_streams: Vec<AudioStream> = Vec::new();
for f in formats {
if f.format_type == player::FormatType::FormatStreamTypeOtf {
continue;
}
match (f.is_video(), f.is_audio()) {
(true, true) => match map_video_stream(&f, deobf, &mut last_nsig) {
Some(stream) => video_streams.push(stream),
None => {}
},
(true, false) => match map_video_stream(&f, deobf, &mut last_nsig) {
Some(stream) => video_only_streams.push(stream),
None => {}
},
(false, true) => match map_audio_stream(&f, deobf, &mut last_nsig) {
Some(stream) => audio_streams.push(stream),
None => {}
},
(false, false) => {}
}
}
video_streams.sort();
video_only_streams.sort();
audio_streams.sort();
let subtitles = response.captions.map_or(vec![], |captions| {
captions
.player_captions_tracklist_renderer
.caption_tracks
.iter()
.map(|caption| {
let lang_auto = caption.name.strip_suffix(" (auto-generated)");
Subtitle {
url: caption.base_url.to_owned(),
lang: caption.language_code.to_owned(),
lang_name: lang_auto.unwrap_or(&caption.name).to_owned(),
auto_generated: lang_auto.is_some(),
}
})
.collect()
});
Ok(VideoPlayer {
info: video_info,
video_streams,
video_only_streams,
audio_streams,
subtitles,
expires_in_seconds: streaming_data.expires_in_seconds,
})
}
#[cfg(test)]
mod tests {
use std::{fs::File, io::BufReader, path::Path};
use crate::{cache::DeobfData, client::CLIENT_TYPES};
use crate::{client::RustyPipe, deobfuscate::DeobfData};
use super::*;
use rstest::rstest;
@ -460,59 +576,6 @@ mod tests {
})
});
#[test_log::test(tokio::test)]
async fn download_response_testfiles() {
let tf_dir = Path::new("testfiles/player");
let video_id = "pPvd8UxmSbQ";
let rt = RustyTube::new();
for client_type in CLIENT_TYPES {
let mut json_path = tf_dir.to_path_buf();
json_path.push(format!("{:?}_video.json", client_type).to_lowercase());
if json_path.exists() {
continue;
}
let client = rt.get_ytclient(client_type);
let context = client.get_context(false).await;
let request_body = build_request_body(client.clone(), &DEOBFUSCATOR, context, video_id);
let resp = client
.request_builder(Method::POST, "player")
.await
.json(&request_body)
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let mut file = File::create(json_path).unwrap();
let mut content = std::io::Cursor::new(resp.bytes().await.unwrap());
std::io::copy(&mut content, &mut file).unwrap();
}
}
#[test_log::test(tokio::test)]
async fn download_model_testfiles() {
let tf_dir = Path::new("testfiles/player_model");
let rt = RustyTube::new();
for (name, id) in [("multilanguage", "tVWWp1PqDus"), ("hdr", "LXb3EKWsInQ")] {
let mut json_path = tf_dir.to_path_buf();
json_path.push(format!("{}.json", name).to_lowercase());
if json_path.exists() {
continue;
}
let player_data = rt.get_player(id, ClientType::Desktop).await.unwrap();
let file = File::create(json_path).unwrap();
serde_json::to_writer_pretty(file, &player_data).unwrap();
}
}
#[rstest]
#[case::desktop("desktop")]
#[case::desktop_music("desktopmusic")]
@ -525,10 +588,17 @@ mod tests {
let json_file = File::open(json_path).unwrap();
let resp: response::Player = serde_json::from_reader(BufReader::new(json_file)).unwrap();
let player_data = map_player_data(resp, &DEOBFUSCATOR).unwrap();
let map_res = resp
.map_response("pPvd8UxmSbQ", Language::En, Some(&DEOBFUSCATOR))
.unwrap();
assert!(
map_res.warnings.is_empty(),
"deserialization/mapping warnings: {:?}",
map_res.warnings
);
let is_desktop = name == "desktop" || name == "desktopmusic";
insta::assert_yaml_snapshot!(format!("map_player_data_{}", name), player_data, {
insta::assert_yaml_snapshot!(format!("map_player_data_{}", name), map_res.c, {
".info.publish_date" => insta::dynamic_redaction(move |value, _path| {
if is_desktop {
assert!(value.as_str().unwrap().starts_with("2019-05-30T00:00:00"));
@ -561,8 +631,12 @@ mod tests {
#[case::ios(ClientType::Ios)]
#[test_log::test(tokio::test)]
async fn t_get_player(#[case] client_type: ClientType) {
let rt = RustyTube::new();
let player_data = rt.get_player("n4tK7LYFxI0", client_type).await.unwrap();
let rp = RustyPipe::builder().strict().build();
let player_data = rp
.query()
.get_player("n4tK7LYFxI0", client_type)
.await
.unwrap();
// dbg!(&player_data);
@ -584,7 +658,12 @@ mod tests {
assert_eq!(player_data.info.is_live_content, false);
if client_type == ClientType::Desktop || client_type == ClientType::DesktopMusic {
assert!(player_data.info.publish_date.unwrap().to_string().starts_with("2013-05-05 00:00:00"));
assert!(player_data
.info
.publish_date
.unwrap()
.to_string()
.starts_with("2013-05-05 00:00:00"));
assert_eq!(player_data.info.category.unwrap(), "Music");
assert_eq!(player_data.info.is_family_safe.unwrap(), true);
}
@ -604,7 +683,7 @@ mod tests {
// Bitrates may change between requests
assert_approx(video.bitrate as f64, 1507068.0);
assert_eq!(video.average_bitrate, 1345149);
assert_eq!(video.size, 43553412);
assert_eq!(video.size.unwrap(), 43553412);
assert_eq!(video.width, 1280);
assert_eq!(video.height, 720);
assert_eq!(video.fps, 30);
@ -634,7 +713,7 @@ mod tests {
assert_approx(video.bitrate as f64, 1340829.0);
assert_approx(video.average_bitrate as f64, 1233444.0);
assert_approx(video.size as f64, 39936630.0);
assert_approx(video.size.unwrap() as f64, 39936630.0);
assert_eq!(video.width, 1280);
assert_eq!(video.height, 720);
assert_eq!(video.fps, 30);
@ -661,15 +740,20 @@ mod tests {
fn t_cipher_to_url() {
let signature_cipher = "s=w%3DAe%3DA6aDNQLkViKS7LOm9QtxZJHKwb53riq9qEFw-ecBWJCAiA%3DcEg0tn3dty9jEHszfzh4Ud__bg9CEHVx4ix-7dKsIPAhIQRw8JQ0qOA&sp=sig&url=https://rr5---sn-h0jelnez.googlevideo.com/videoplayback%3Fexpire%3D1659376413%26ei%3Dvb7nYvH5BMK8gAfBj7ToBQ%26ip%3D2003%253Ade%253Aaf06%253A6300%253Ac750%253A1b77%253Ac74a%253A80e3%26id%3Do-AB_BABwrXZJN428ZwDxq5ScPn2AbcGODnRlTVhCQ3mj2%26itag%3D251%26source%3Dyoutube%26requiressl%3Dyes%26mh%3DhH%26mm%3D31%252C26%26mn%3Dsn-h0jelnez%252Csn-4g5ednsl%26ms%3Dau%252Conr%26mv%3Dm%26mvi%3D5%26pl%3D37%26initcwndbps%3D1588750%26spc%3DlT-Khi831z8dTejFIRCvCEwx_6romtM%26vprv%3D1%26mime%3Daudio%252Fwebm%26ns%3Db_Mq_qlTFcSGlG9RpwpM9xQH%26gir%3Dyes%26clen%3D3781277%26dur%3D229.301%26lmt%3D1655510291473933%26mt%3D1659354538%26fvip%3D5%26keepalive%3Dyes%26fexp%3D24001373%252C24007246%26c%3DWEB%26rbqsm%3Dfr%26txp%3D4532434%26n%3Dd2g6G2hVqWIXxedQ%26sparams%3Dexpire%252Cei%252Cip%252Cid%252Citag%252Csource%252Crequiressl%252Cspc%252Cvprv%252Cmime%252Cns%252Cgir%252Cclen%252Cdur%252Clmt%26lsparams%3Dmh%252Cmm%252Cmn%252Cms%252Cmv%252Cmvi%252Cpl%252Cinitcwndbps%26lsig%3DAG3C_xAwRQIgCKCGJ1iu4wlaGXy3jcJyU3inh9dr1FIfqYOZEG_MdmACIQCbungkQYFk7EhD6K2YvLaHFMjKOFWjw001_tLb0lPDtg%253D%253D";
let mut last_nsig: [String; 2] = ["".to_owned(), "".to_owned()];
let (url, throttled) = map_url(
let map_res = map_url(
&None,
&Some(signature_cipher.to_owned()),
&DEOBFUSCATOR,
&mut last_nsig,
)
.unwrap();
);
let (url, throttled) = map_res.c.unwrap();
assert_eq!(url, "https://rr5---sn-h0jelnez.googlevideo.com/videoplayback?c=WEB&clen=3781277&dur=229.301&ei=vb7nYvH5BMK8gAfBj7ToBQ&expire=1659376413&fexp=24001373%2C24007246&fvip=5&gir=yes&id=o-AB_BABwrXZJN428ZwDxq5ScPn2AbcGODnRlTVhCQ3mj2&initcwndbps=1588750&ip=2003%3Ade%3Aaf06%3A6300%3Ac750%3A1b77%3Ac74a%3A80e3&itag=251&keepalive=yes&lmt=1655510291473933&lsig=AG3C_xAwRQIgCKCGJ1iu4wlaGXy3jcJyU3inh9dr1FIfqYOZEG_MdmACIQCbungkQYFk7EhD6K2YvLaHFMjKOFWjw001_tLb0lPDtg%3D%3D&lsparams=mh%2Cmm%2Cmn%2Cms%2Cmv%2Cmvi%2Cpl%2Cinitcwndbps&mh=hH&mime=audio%2Fwebm&mm=31%2C26&mn=sn-h0jelnez%2Csn-4g5ednsl&ms=au%2Conr&mt=1659354538&mv=m&mvi=5&n=XzXGSfGusw6OCQ&ns=b_Mq_qlTFcSGlG9RpwpM9xQH&pl=37&rbqsm=fr&requiressl=yes&sig=AOq0QJ8wRQIhAPIsKd7-xi4xVHEC9gb__dU4hzfzsHEj9ytd3nt0gEceAiACJWBcw-wFEq9qir35bwKHJZxtQ9mOL7SKiVkLQNDa6A%3D%3D&source=youtube&sparams=expire%2Cei%2Cip%2Cid%2Citag%2Csource%2Crequiressl%2Cspc%2Cvprv%2Cmime%2Cns%2Cgir%2Cclen%2Cdur%2Clmt&spc=lT-Khi831z8dTejFIRCvCEwx_6romtM&txp=4532434&vprv=1");
assert_eq!(throttled, false);
assert!(
map_res.warnings.is_empty(),
"deserialization/mapping warnings: {:?}",
map_res.warnings
);
}
}

View file

@ -1,85 +1,68 @@
use anyhow::{anyhow, Context, Result};
use anyhow::{anyhow, bail, Result};
use reqwest::Method;
use serde::Serialize;
use crate::{
deobfuscate::Deobfuscator,
model::{Channel, Language, Playlist, Thumbnail, Video},
serializer::text::{PageType, TextLink},
timeago, util,
};
use super::{response, ClientType, ContextYT, RustyTube};
use super::{response, ClientType, MapResponse, MapResult, RustyPipeQuery, YTContext};
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct QPlaylist {
context: ContextYT,
context: YTContext,
browse_id: String,
}
#[derive(Clone, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct QPlaylistCont {
context: ContextYT,
context: YTContext,
continuation: String,
}
impl RustyTube {
pub async fn get_playlist(&self, playlist_id: &str) -> Result<Playlist> {
let client = self.get_ytclient(ClientType::Desktop);
let context = client.get_context(true).await;
impl RustyPipeQuery {
pub async fn get_playlist(self, playlist_id: &str) -> Result<Playlist> {
let context = self.get_context(ClientType::Desktop, true).await;
let request_body = QPlaylist {
context,
browse_id: "VL".to_owned() + playlist_id,
};
let resp = client
.request_builder(Method::POST, "browse")
self.execute_request::<response::Playlist, _, _>(
ClientType::Desktop,
"get_playlist",
playlist_id,
Method::POST,
"browse",
&request_body,
)
.await
.json(&request_body)
.send()
.await?
.error_for_status()?;
let resp_body = resp.text().await?;
let playlist_response =
serde_json::from_str::<response::Playlist>(&resp_body).context(resp_body)?;
map_playlist(&playlist_response, self.localization.language)
}
pub async fn get_playlist_cont(&self, playlist: &mut Playlist) -> Result<()> {
pub async fn get_playlist_cont(self, playlist: &mut Playlist) -> Result<()> {
match &playlist.ctoken {
Some(ctoken) => {
let client = self.get_ytclient(ClientType::Desktop);
let context = client.get_context(true).await;
let context = self.get_context(ClientType::Desktop, true).await;
let request_body = QPlaylistCont {
context,
continuation: ctoken.to_owned(),
};
let resp = client
.request_builder(Method::POST, "browse")
.await
.json(&request_body)
.send()
.await?
.error_for_status()?;
let cont_response = resp.json::<response::playlist::PlaylistCont>().await?;
let action = some_or_bail!(
cont_response
.on_response_received_actions
.iter()
.find(|a| a.append_continuation_items_action.target_id == playlist.id),
Err(anyhow!("no continuation action"))
);
let (mut videos, ctoken) =
map_playlist_items(&action.append_continuation_items_action.continuation_items);
let (mut videos, ctoken) = self
.execute_request::<response::PlaylistCont, _, _>(
ClientType::Desktop,
"get_playlist_cont",
&playlist.id,
Method::POST,
"browse",
&request_body,
)
.await?;
playlist.videos.append(&mut videos);
playlist.ctoken = ctoken;
@ -95,12 +78,17 @@ impl RustyTube {
}
}
fn map_playlist(response: &response::Playlist, lang: Language) -> Result<Playlist> {
impl MapResponse<Playlist> for response::Playlist {
fn map_response(
self,
id: &str,
lang: Language,
_deobf: Option<&Deobfuscator>,
) -> Result<MapResult<Playlist>> {
let video_items = &some_or_bail!(
some_or_bail!(
some_or_bail!(
response
.contents
self.contents
.two_column_browse_results_renderer
.contents
.get(0),
@ -121,9 +109,9 @@ fn map_playlist(response: &response::Playlist, lang: Language) -> Result<Playlis
.playlist_video_list_renderer
.contents;
let (videos, ctoken) = map_playlist_items(video_items);
let (videos, ctoken) = map_playlist_items(&video_items.c);
let (thumbnails, last_update_txt) = match &response.sidebar {
let (thumbnails, last_update_txt) = match &self.sidebar {
Some(sidebar) => {
let primary = some_or_bail!(
sidebar.playlist_sidebar_renderer.items.get(0),
@ -146,14 +134,11 @@ fn map_playlist(response: &response::Playlist, lang: Language) -> Result<Playlis
}
None => {
let header_banner = some_or_bail!(
&response
.header
.playlist_header_renderer
.playlist_header_banner,
&self.header.playlist_header_renderer.playlist_header_banner,
Err(anyhow!("no thumbnail found"))
);
let last_update_txt = response
let last_update_txt = self
.header
.playlist_header_renderer
.byline
@ -182,45 +167,48 @@ fn map_playlist(response: &response::Playlist, lang: Language) -> Result<Playlis
let n_videos = match ctoken {
Some(_) => {
ok_or_bail!(
util::parse_numeric(&response.header.playlist_header_renderer.num_videos_text),
util::parse_numeric(&self.header.playlist_header_renderer.num_videos_text),
Err(anyhow!("no video count"))
)
}
None => videos.len() as u32,
};
let id = response
.header
.playlist_header_renderer
.playlist_id
.to_owned();
let name = response.header.playlist_header_renderer.title.to_owned();
let description = response
.header
.playlist_header_renderer
.description_text
.to_owned();
let playlist_id = self.header.playlist_header_renderer.playlist_id;
if playlist_id != id {
bail!("got wrong playlist id {}, expected {}", playlist_id, id);
}
let channel = match &response.header.playlist_header_renderer.owner_text {
Some(owner_text) => match owner_text {
TextLink::Browse {
let name = self.header.playlist_header_renderer.title;
let description = self.header.playlist_header_renderer.description_text;
let channel = match self.header.playlist_header_renderer.owner_text {
Some(TextLink::Browse {
text,
page_type,
page_type: PageType::Channel,
browse_id,
} => match page_type {
PageType::Channel => Some(Channel {
id: browse_id.to_owned(),
name: text.to_owned(),
}) => Some(Channel {
id: browse_id,
name: text,
}),
_ => None,
},
_ => None,
},
};
let mut warnings = video_items.warnings.to_owned();
let last_update = match &last_update_txt {
Some(textual_date) => {
let parsed = timeago::parse_textual_date_to_dt(lang, textual_date);
if parsed.is_none() {
warnings.push(format!("could not parse textual date `{}`", textual_date));
}
parsed
}
None => None,
};
Ok(Playlist {
id,
Ok(MapResult {
c: Playlist {
id: playlist_id,
name,
videos,
n_videos,
@ -228,16 +216,41 @@ fn map_playlist(response: &response::Playlist, lang: Language) -> Result<Playlis
thumbnails,
description,
channel,
last_update: match &last_update_txt {
Some(textual_date) => timeago::parse_textual_date_to_dt(lang, textual_date),
None => None,
},
last_update,
last_update_txt,
},
warnings,
})
}
}
impl MapResponse<(Vec<Video>, Option<String>)> for response::PlaylistCont {
fn map_response(
self,
id: &str,
_lang: Language,
_deobf: Option<&Deobfuscator>,
) -> Result<MapResult<(Vec<Video>, Option<String>)>> {
let action = some_or_bail!(
self.on_response_received_actions
.iter()
.find(|a| a.append_continuation_items_action.target_id == id),
Err(anyhow!("no continuation action"))
);
Ok(MapResult {
c: map_playlist_items(&action.append_continuation_items_action.continuation_items.c),
warnings: action
.append_continuation_items_action
.continuation_items
.warnings
.to_owned(),
})
}
}
fn map_playlist_items(
items: &Vec<response::VideoListItem<response::playlist::PlaylistVideo>>,
items: &[response::VideoListItem<response::playlist::PlaylistVideo>],
) -> (Vec<Video>, Option<String>) {
let mut ctoken: Option<String> = None;
let videos = items
@ -246,10 +259,9 @@ fn map_playlist_items(
response::VideoListItem::GridVideoRenderer { video } => match &video.channel {
TextLink::Browse {
text,
page_type,
page_type: PageType::Channel,
browse_id,
} => match page_type {
PageType::Channel => Some(Video {
} => Some(Video {
id: video.video_id.to_owned(),
title: video.title.to_owned(),
length: video.length_seconds,
@ -270,8 +282,6 @@ fn map_playlist_items(
}),
_ => None,
},
_ => None,
},
response::VideoListItem::ContinuationItemRenderer {
continuation_endpoint,
} => {
@ -289,49 +299,10 @@ mod tests {
use rstest::rstest;
use crate::client::RustyPipe;
use super::*;
#[test_log::test(tokio::test)]
async fn download_testfiles() {
let tf_dir = Path::new("testfiles/playlist");
let rt = RustyTube::new();
for (name, id) in [
("short", "RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk"),
("long", "PL5dDx681T4bR7ZF1IuWzOv1omlRbE7PiJ"),
("nomusic", "PL1J-6JOckZtE_P9Xx8D3b2O6w0idhuKBe"),
] {
let mut json_path = tf_dir.to_path_buf();
json_path.push(format!("playlist_{}.json", name));
if json_path.exists() {
continue;
}
let client = rt.get_ytclient(ClientType::Desktop);
let context = client.get_context(false).await;
let request_body = QPlaylist {
context,
browse_id: "VL".to_owned() + id,
};
let resp = client
.request_builder(Method::POST, "browse")
.await
.json(&request_body)
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let mut file = std::fs::File::create(json_path).unwrap();
let mut content = std::io::Cursor::new(resp.bytes().await.unwrap());
std::io::copy(&mut content, &mut file).unwrap();
}
}
#[rstest]
#[case::long(
"PL5dDx681T4bR7ZF1IuWzOv1omlRbE7PiJ",
@ -368,8 +339,8 @@ mod tests {
#[case] description: Option<String>,
#[case] channel: Option<Channel>,
) {
let rt = RustyTube::new();
let playlist = rt.get_playlist(id).await.unwrap();
let rp = RustyPipe::builder().strict().build();
let playlist = rp.query().get_playlist(id).await.unwrap();
assert_eq!(playlist.id, id);
assert_eq!(playlist.name, name);
@ -378,37 +349,46 @@ mod tests {
assert!(playlist.n_videos > 10);
assert_eq!(playlist.n_videos > 100, is_long);
assert_eq!(playlist.description, description);
if channel.is_some() {
assert_eq!(playlist.channel, channel);
}
assert!(!playlist.thumbnails.is_empty());
}
#[rstest]
#[case::long("long")]
#[case::short("short")]
#[case::nomusic("nomusic")]
fn t_map_playlist_data(#[case] name: &str) {
#[case::short("short", "RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk")]
#[case::long("long", "PL5dDx681T4bR7ZF1IuWzOv1omlRbE7PiJ")]
#[case::nomusic("nomusic", "PL1J-6JOckZtE_P9Xx8D3b2O6w0idhuKBe")]
fn t_map_playlist_data(#[case] name: &str, #[case] id: &str) {
let filename = format!("testfiles/playlist/playlist_{}.json", name);
let json_path = Path::new(&filename);
let json_file = File::open(json_path).unwrap();
let playlist: response::Playlist =
serde_json::from_reader(BufReader::new(json_file)).unwrap();
let playlist_data = map_playlist(&playlist, Language::En).unwrap();
insta::assert_yaml_snapshot!(format!("map_playlist_data_{}", name), playlist_data, {
let map_res = playlist.map_response(id, Language::En, None).unwrap();
assert!(
map_res.warnings.is_empty(),
"deserialization/mapping warnings: {:?}",
map_res.warnings
);
insta::assert_yaml_snapshot!(format!("map_playlist_data_{}", name), map_res.c, {
".last_update" => "[date]"
});
}
#[test_log::test(tokio::test)]
async fn t_playlist_cont() {
let rt = RustyTube::new();
let mut playlist = rt
let rp = RustyPipe::builder().strict().build();
let mut playlist = rp
.query()
.get_playlist("PLbZIPy20-1pN7mqjckepWF78ndb6ci_qi")
.await
.unwrap();
while playlist.ctoken.is_some() {
rt.get_playlist_cont(&mut playlist).await.unwrap();
rp.query().get_playlist_cont(&mut playlist).await.unwrap();
}
assert!(playlist.videos.len() > 100);

View file

@ -4,6 +4,7 @@ use serde_with::VecSkipError;
use super::TimeOverlay;
use super::{ContentRenderer, ContentsRenderer, Thumbnails, VideoListItem};
use crate::serializer::text::Text;
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
@ -63,11 +64,11 @@ pub struct GridRenderer {
pub struct ChannelVideo {
pub video_id: String,
pub thumbnail: Thumbnails,
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub title: String,
#[serde_as(as = "Option<crate::serializer::text::Text>")]
#[serde_as(as = "Option<Text>")]
pub published_time_text: Option<String>,
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub view_count_text: String,
#[serde_as(as = "VecSkipError<_>")]
pub thumbnail_overlays: Vec<TimeOverlay>,

View file

@ -7,6 +7,7 @@ pub mod video;
pub use channel::Channel;
pub use player::Player;
pub use playlist::Playlist;
pub use playlist::PlaylistCont;
pub use playlist_music::PlaylistMusic;
pub use video::Video;
pub use video::VideoComments;
@ -15,7 +16,7 @@ pub use video::VideoRecommendations;
use serde::Deserialize;
use serde_with::{serde_as, DefaultOnError, VecSkipError};
use crate::serializer::text::TextLink;
use crate::serializer::text::{Text, TextLink, TextLinks};
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
@ -93,10 +94,10 @@ pub struct VideoOwner {
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct VideoOwnerRenderer {
#[serde_as(as = "crate::serializer::text::TextLink")]
#[serde_as(as = "TextLink")]
pub title: TextLink,
pub thumbnail: Thumbnails,
#[serde_as(as = "Option<crate::serializer::text::Text>")]
#[serde_as(as = "Option<Text>")]
pub subscriber_count_text: Option<String>,
#[serde(default)]
#[serde_as(as = "VecSkipError<_>")]
@ -132,7 +133,7 @@ pub struct TimeOverlay {
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TimeOverlayRenderer {
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub text: String,
#[serde(default)]
#[serde_as(deserialize_as = "DefaultOnError")]
@ -198,7 +199,7 @@ pub struct MusicColumn {
#[serde_as]
#[derive(Clone, Debug, Deserialize)]
pub struct MusicColumnRenderer {
#[serde_as(as = "crate::serializer::text::TextLinks")]
#[serde_as(as = "TextLinks")]
pub text: Vec<TextLink>,
}
@ -213,3 +214,23 @@ pub struct MusicContinuation {
pub struct MusicContinuationData {
pub continuation: String,
}
impl From<Thumbnail> for crate::model::Thumbnail {
fn from(tn: Thumbnail) -> Self {
crate::model::Thumbnail {
url: tn.url,
width: tn.width,
height: tn.height,
}
}
}
impl From<Thumbnails> for Vec<crate::model::Thumbnail> {
fn from(ts: Thumbnails) -> Self {
let mut thumbnails = vec![];
for t in ts.thumbnails {
thumbnails.push(t.into());
}
thumbnails
}
}

View file

@ -3,9 +3,10 @@ use std::ops::Range;
use chrono::NaiveDate;
use serde::Deserialize;
use serde_with::serde_as;
use serde_with::{json::JsonString, DefaultOnError, VecSkipError};
use serde_with::{json::JsonString, DefaultOnError};
use super::Thumbnails;
use crate::serializer::{text::Text, MapResult, VecLogError};
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
@ -45,11 +46,11 @@ pub struct StreamingData {
#[serde_as(as = "JsonString")]
pub expires_in_seconds: u32,
#[serde(default)]
#[serde_as(as = "VecSkipError<_>")]
pub formats: Vec<Format>,
#[serde_as(as = "VecLogError<_>")]
pub formats: MapResult<Vec<Format>>,
#[serde(default)]
#[serde_as(as = "VecSkipError<_>")]
pub adaptive_formats: Vec<Format>,
#[serde_as(as = "VecLogError<_>")]
pub adaptive_formats: MapResult<Vec<Format>>,
/// Only on livestreams
pub dash_manifest_url: Option<String>,
/// Only on livestreams
@ -73,20 +74,20 @@ pub struct Format {
pub width: Option<u32>,
pub height: Option<u32>,
#[serde_as(as = "Option<crate::serializer::range::Range>")]
#[serde_as(as = "Option<crate::serializer::Range>")]
pub index_range: Option<Range<u32>>,
#[serde_as(as = "Option<crate::serializer::range::Range>")]
#[serde_as(as = "Option<crate::serializer::Range>")]
pub init_range: Option<Range<u32>>,
#[serde_as(as = "JsonString")]
pub content_length: u64,
#[serde_as(as = "Option<JsonString>")]
pub content_length: Option<u64>,
#[serde(default)]
#[serde_as(deserialize_as = "DefaultOnError")]
pub quality: Option<Quality>,
pub fps: Option<u8>,
pub quality_label: Option<String>,
pub average_bitrate: u32,
pub average_bitrate: Option<u32>,
pub color_info: Option<ColorInfo>,
// Audio only
@ -104,7 +105,9 @@ pub struct Format {
impl Format {
pub fn is_audio(&self) -> bool {
self.audio_quality.is_some() && self.audio_sample_rate.is_some()
self.content_length.is_some()
&& self.audio_quality.is_some()
&& self.audio_sample_rate.is_some()
}
pub fn is_video(&self) -> bool {
@ -188,7 +191,7 @@ pub struct PlayerCaptionsTracklistRenderer {
#[serde(rename_all = "camelCase")]
pub struct CaptionTrack {
pub base_url: String,
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub name: String,
pub language_code: String,
}

View file

@ -2,7 +2,8 @@ use serde::Deserialize;
use serde_with::serde_as;
use serde_with::{json::JsonString, DefaultOnError, VecSkipError};
use crate::serializer::text::TextLink;
use crate::serializer::text::{Text, TextLink};
use crate::serializer::{MapResult, VecLogError};
use super::{ContentRenderer, ContentsRenderer, Thumbnails, ThumbnailsWrap, VideoListItem};
@ -56,8 +57,8 @@ pub struct PlaylistVideoListRenderer {
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PlaylistVideoList {
#[serde_as(as = "VecSkipError<_>")]
pub contents: Vec<VideoListItem<PlaylistVideo>>,
#[serde_as(as = "VecLogError<_>")]
pub contents: MapResult<Vec<VideoListItem<PlaylistVideo>>>,
}
#[serde_as]
@ -66,10 +67,10 @@ pub struct PlaylistVideoList {
pub struct PlaylistVideo {
pub video_id: String,
pub thumbnail: Thumbnails,
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub title: String,
#[serde(rename = "shortBylineText")]
#[serde_as(as = "crate::serializer::text::TextLink")]
#[serde_as(as = "TextLink")]
pub channel: TextLink,
#[serde_as(as = "JsonString")]
pub length_seconds: u32,
@ -86,14 +87,14 @@ pub struct Header {
#[serde(rename_all = "camelCase")]
pub struct HeaderRenderer {
pub playlist_id: String,
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub title: String,
#[serde(default)]
#[serde_as(as = "DefaultOnError<Option<crate::serializer::text::Text>>")]
#[serde_as(as = "DefaultOnError<Option<Text>>")]
pub description_text: Option<String>,
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub num_videos_text: String,
#[serde_as(as = "Option<crate::serializer::text::TextLink>")]
#[serde_as(as = "Option<TextLink>")]
pub owner_text: Option<TextLink>,
// Alternative layout
@ -118,7 +119,7 @@ pub struct Byline {
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BylineRenderer {
#[serde_as(as = "crate::serializer::text::Text")]
#[serde_as(as = "Text")]
pub text: String,
}
@ -150,7 +151,7 @@ pub struct SidebarPrimaryInfoRenderer {
// - `"495", " videos"`
// - `"3,310,996 views"`
// - `"Last updated on ", "Aug 7, 2022"`
#[serde_as(as = "Vec<crate::serializer::text::Text>")]
#[serde_as(as = "Vec<Text>")]
pub stats: Vec<String>,
}
@ -172,7 +173,7 @@ pub struct OnResponseReceivedAction {
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AppendAction {
#[serde_as(as = "VecSkipError<_>")]
pub continuation_items: Vec<VideoListItem<PlaylistVideo>>,
#[serde_as(as = "VecLogError<_>")]
pub continuation_items: MapResult<Vec<VideoListItem<PlaylistVideo>>>,
pub target_id: String,
}

View file

@ -1,3 +1,5 @@
#![allow(clippy::enum_variant_names)]
use serde::Deserialize;
use serde_with::serde_as;
use serde_with::{DefaultOnError, VecSkipError};

View file

@ -1,6 +1,6 @@
---
source: src/client/player.rs
expression: player_data
expression: map_res.c
---
info:
id: pPvd8UxmSbQ
@ -184,6 +184,22 @@ video_only_streams:
format: mp4
codec: av01
throttled: false
- url: "https://rr5---sn-h0jeenek.googlevideo.com/videoplayback?c=ANDROID&dur=163.096&ei=q1jpYtOPEYSBgQeHmqbwAQ&expire=1659481355&fexp=24001373%2C24007246&fvip=4&id=o-AEDMTCojVtwpIKOdhBaxEHE5s322qnAJHGqa2r1F46BM&initcwndbps=1527500&ip=2003%3Ade%3Aaf0e%3A2f00%3Ade47%3A297%3Aa6db%3A774e&itag=22&lmt=1580005750956837&lsig=AG3C_xAwRgIhAOiL-qJ04sA8FSOkEJfOYl3gFe4SzwYu_rAf3DMLHYigAiEA0Upi1HqqIu7NH_LTDL0jT1R5TTozQypL5FiSP9RoqtU%3D&lsparams=mh%2Cmm%2Cmn%2Cms%2Cmv%2Cmvi%2Cpl%2Cinitcwndbps&mh=mQ&mime=video%2Fmp4&mm=31%2C29&mn=sn-h0jeenek%2Csn-h0jelnez&ms=au%2Crdu&mt=1659459429&mv=m&mvi=5&pl=37&ratebypass=yes&rbqsm=fr&requiressl=yes&sig=AOq0QJ8wRAIgFlQZgR63Yz9UgY9gVqiyGDVkZmSmACRP3-MmKN7CRzQCIAMHAwZbHmWL1qNH4Nu3A0pXZwErXMVPzMIt-PyxeZqa&source=youtube&sparams=expire%2Cei%2Cip%2Cid%2Citag%2Csource%2Crequiressl%2Cvprv%2Cmime%2Cratebypass%2Cdur%2Clmt&txp=2211222&vprv=1"
itag: 22
bitrate: 1574434
average_bitrate: 1574434
size: ~
index_range: ~
init_range: ~
width: 1280
height: 720
fps: 30
quality: 720p
hdr: false
mime: "video/mp4; codecs=\"avc1.64001F, mp4a.40.2\""
format: mp4
codec: avc1
throttled: false
- url: "https://rr5---sn-h0jeenek.googlevideo.com/videoplayback?c=ANDROID&clen=22365208&dur=163.046&ei=q1jpYtOPEYSBgQeHmqbwAQ&expire=1659481355&fexp=24001373%2C24007246&fvip=4&gir=yes&id=o-AEDMTCojVtwpIKOdhBaxEHE5s322qnAJHGqa2r1F46BM&initcwndbps=1527500&ip=2003%3Ade%3Aaf0e%3A2f00%3Ade47%3A297%3Aa6db%3A774e&itag=398&keepalive=yes&lmt=1608048380553749&lsig=AG3C_xAwRgIhAOiL-qJ04sA8FSOkEJfOYl3gFe4SzwYu_rAf3DMLHYigAiEA0Upi1HqqIu7NH_LTDL0jT1R5TTozQypL5FiSP9RoqtU%3D&lsparams=mh%2Cmm%2Cmn%2Cms%2Cmv%2Cmvi%2Cpl%2Cinitcwndbps&mh=mQ&mime=video%2Fmp4&mm=31%2C29&mn=sn-h0jeenek%2Csn-h0jelnez&ms=au%2Crdu&mt=1659459429&mv=m&mvi=5&otfp=1&pl=37&rbqsm=fr&requiressl=yes&sig=AOq0QJ8wRAIgR6KqCOoig_FMl2tWKa7qHSmCjIZa9S7ABzEI16qdO2sCIFXccwql4bqV9CHlqXY4tgxyMFUsp7vW4XUjxs3AyG6H&source=youtube&sparams=expire%2Cei%2Cip%2Cid%2Citag%2Csource%2Crequiressl%2Cvprv%2Cmime%2Cgir%2Cclen%2Cotfp%2Cdur%2Clmt&txp=1311222&vprv=1"
itag: 398
bitrate: 1348419

View file

@ -1,6 +1,6 @@
---
source: src/client/player.rs
expression: player_data
expression: map_res.c
---
info:
id: pPvd8UxmSbQ

View file

@ -1,6 +1,6 @@
---
source: src/client/player.rs
expression: player_data
expression: map_res.c
---
info:
id: pPvd8UxmSbQ

View file

@ -1,6 +1,6 @@
---
source: src/client/player.rs
expression: player_data
expression: map_res.c
---
info:
id: pPvd8UxmSbQ

View file

@ -1,6 +1,6 @@
---
source: src/client/player.rs
expression: player_data
expression: map_res.c
---
info:
id: pPvd8UxmSbQ

View file

@ -1,6 +1,6 @@
---
source: src/client/playlist.rs
expression: playlist_data
expression: map_res.c
---
id: PL5dDx681T4bR7ZF1IuWzOv1omlRbE7PiJ
name: Die schönsten deutschen Lieder | Beliebteste Lieder | Beste Deutsche Musik 2022

View file

@ -1,6 +1,6 @@
---
source: src/client/playlist.rs
expression: playlist_data
expression: map_res.c
---
id: PL1J-6JOckZtE_P9Xx8D3b2O6w0idhuKBe
name: Minecraft SHINE

View file

@ -1,6 +1,6 @@
---
source: src/client/playlist.rs
expression: playlist_data
expression: map_res.c
---
id: RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk
name: Easy Pop

View file

@ -1,111 +0,0 @@
use anyhow::Result;
use reqwest::Method;
use serde::Serialize;
use super::{response, ClientType, ContextYT, RustyTube};
#[derive(Clone, Debug, Serialize)]
struct QVideo {
context: ContextYT,
/// YouTube video ID
video_id: String,
/// Set to true to allow extraction of streams with sensitive content
content_check_ok: bool,
/// Probably refers to allowing sensitive content, too
racy_check_ok: bool,
}
#[derive(Clone, Debug, Serialize)]
struct QVideoCont {
context: ContextYT,
continuation: String,
}
impl RustyTube {
async fn get_video_response(&self, video_id: &str) -> Result<response::Video> {
let client = self.get_ytclient(ClientType::Desktop);
let context = client.get_context(true).await;
let request_body = QVideo {
context,
video_id: video_id.to_owned(),
content_check_ok: true,
racy_check_ok: true,
};
let resp = client
.request_builder(Method::POST, "next")
.await
.json(&request_body)
.send()
.await?
.error_for_status()?;
Ok(resp.json::<response::Video>().await?)
}
async fn get_comments_response(&self, ctoken: &str) -> Result<response::VideoComments> {
let client = self.get_ytclient(ClientType::Desktop);
let context = client.get_context(true).await;
let request_body = QVideoCont {
context,
continuation: ctoken.to_owned(),
};
let resp = client
.request_builder(Method::POST, "next")
.await
.json(&request_body)
.send()
.await?
.error_for_status()?;
Ok(resp.json::<response::VideoComments>().await?)
}
async fn get_recommendations_response(
&self,
ctoken: &str,
) -> Result<response::VideoRecommendations> {
let client = self.get_ytclient(ClientType::Desktop);
let context = client.get_context(true).await;
let request_body = QVideoCont {
context,
continuation: ctoken.to_owned(),
};
let resp = client
.request_builder(Method::POST, "next")
.await
.json(&request_body)
.send()
.await?
.error_for_status()?;
Ok(resp.json::<response::VideoRecommendations>().await?)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn t_get_video_response() {
let rt = RustyTube::new();
// rt.get_video("ZeerrnuLi5E").await.unwrap();
dbg!(rt.get_video_response("iQfSvIgIs_M").await.unwrap());
}
#[tokio::test]
async fn t_get_comments_response() {
let rt = RustyTube::new();
// rt.get_comments("Eg0SC2lRZlN2SWdJc19NGAYyJSIRIgtpUWZTdklnSXNfTTAAeAJCEGNvbW1lbnRzLXNlY3Rpb24%3D").await.unwrap();
dbg!(rt.get_comments_response("Eg0SC2lRZlN2SWdJc19NGAYychpFEhRVZ2lnVGJVTEZ6Qk5FWGdDb0FFQyICCAAqGFVDWFgwUldPSUJqdDRvM3ppSHUtNmE1QTILaVFmU3ZJZ0lzX01AAUgKQiljb21tZW50LXJlcGxpZXMtaXRlbS1VZ2lnVGJVTEZ6Qk5FWGdDb0FFQw%3D%3D").await.unwrap());
}
#[tokio::test]
async fn t_get_recommendations_response() {
let rt = RustyTube::new();
dbg!(rt.get_recommendations_response("CBQSExILaVFmU3ZJZ0lzX03AAQHIAQEYACqkBjJzNkw2d3pVQkFyUkJBb0Q4ajRBQ2c3Q1Bnc0lvWXlRejhLZnRZUGNBUW9EOGo0QUNnN0NQZ3NJeElEX2w0YjFtNnUtQVFvRDhqNEFDZzNDUGdvSXg5Ykx3WUNKenFwX0NnUHlQZ0FLRGNJLUNnaW83T2pqZzVPTHZEOEtBX0ktQUFvTndqNEtDTE9venZmQThybVhXd29EOGo0QUNnM0NQZ29JdzZETV9vSFk0cHRCQ2dQeVBnQUtEc0ktQ3dqbW9QbURpcHVPel80QkNnUHlQZ0FLRGNJLUNnalY4THpEazlfOTRCWUtBX0ktQUFvT3dqNExDTXVZNU9YZzE3ejV2d0VLQV9JLUFBb053ajRLQ1A3eHZiSGswTnVuYWdvRDhqNEFDZzdDUGdzSXFQYVU5ZGp2Ml96S0FRb0Q4ajRBQ2c3Q1Bnc0lfSW1acUtQOTlfQ09BUW9EOGo0QUNnM0NQZ29JeGRtNzlZS3prcUFqQ2dQeVBnQUtEY0ktQ2dpZ3FJMkg0UENRX2s0S0FfSS1BQW9Pd2o0TENQV0V5NV9ZeDhERl9nRUtBX0ktQUFvT3dqNExDTzJid3VuV3BPX3ppd0VLQV9JLUFBb2gwajRlQ2h4U1JFTk5WVU5ZV0RCU1YwOUpRbXAwTkc4emVtbElkUzAyWVRWQkNnUHlQZ0FLRGNJLUNnaXpqcXZwcDh5MWwwMEtBX0ktQUFvTndqNEtDTFhWbl83dHhfWDJOUW9EOGo0QUNnN0NQZ3NJNWR5ZWc1NjZyUGUwQVJJVUFBSUVCZ2dLREE0UUVoUVdHQm9jSGlBaUpDWWFCQWdBRUFFYUJBZ0NFQU1hQkFnRUVBVWFCQWdHRUFjYUJBZ0lFQWthQkFnS0VBc2FCQWdNRUEwYUJBZ09FQThhQkFnUUVCRWFCQWdTRUJNYUJBZ1VFQlVhQkFnV0VCY2FCQWdZRUJrYUJBZ2FFQnNhQkFnY0VCMGFCQWdlRUI4YUJBZ2dFQ0VhQkFnaUVDTWFCQWdrRUNVYUJBZ21FQ2NxRkFBQ0JBWUlDZ3dPRUJJVUZoZ2FIQjRnSWlRbWoPd2F0Y2gtbmV4dC1mZWVk").await.unwrap());
}
}

View file

@ -1,35 +0,0 @@
#![cfg(test)]
use std::{collections::BTreeMap, fs::File, io::BufReader};
use serde::{Serialize, Deserialize};
use crate::model::Language;
mod collect_playlist_dates;
mod gen_dictionary;
mod gen_locales;
const DICT_PATH: &str = "testfiles/date/dictionary.json";
type Dictionary = BTreeMap<Language, DictEntry>;
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(default)]
struct DictEntry {
equivalent: Vec<Language>,
by_char: bool,
timeago_tokens: BTreeMap<String, String>,
date_order: String,
months: BTreeMap<String, u8>,
timeago_nd_tokens: BTreeMap<String, String>,
}
fn read_dict() -> Dictionary {
let json_file = File::open(DICT_PATH).unwrap();
serde_json::from_reader(BufReader::new(json_file)).unwrap()
}
fn write_dict(dict: &Dictionary) {
let json_file = File::create(DICT_PATH).unwrap();
serde_json::to_writer_pretty(json_file, dict).unwrap();
}

View file

@ -3,19 +3,25 @@ use fancy_regex::Regex;
use log::debug;
use once_cell::sync::Lazy;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::result::Result::Ok;
use crate::cache::{Cache, DeobfData};
use crate::util;
pub struct Deobfuscator {
data: DeobfData,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct DeobfData {
pub js_url: String,
pub sig_fn: String,
pub nsig_fn: String,
pub sts: String,
}
impl Deobfuscator {
pub async fn from_fetched_info(http: Client, cache: Cache) -> Result<Self> {
let data = cache
.get_deobf_data(async move {
pub async fn new(http: Client) -> Result<Self> {
let js_url = get_player_js_url(&http)
.await
.context("Failed to retrieve player.js URL")?;
@ -30,16 +36,14 @@ impl Deobfuscator {
let nsig_fn = get_nsig_fn(&player_js)?;
let sts = get_sts(&player_js)?;
Ok(DeobfData {
Ok(Self {
data: DeobfData {
js_url,
nsig_fn,
sig_fn,
sts,
},
})
})
.await?;
Ok(Self { data })
}
pub fn deobfuscate_sig(&self, sig: &str) -> Result<String> {
@ -53,6 +57,10 @@ impl Deobfuscator {
pub fn get_sts(&self) -> String {
self.data.sts.to_owned()
}
pub fn get_data(&self) -> DeobfData {
self.data.to_owned()
}
}
impl From<DeobfData> for Deobfuscator {
@ -80,7 +88,7 @@ fn get_sig_fn_name(player_js: &str) -> Result<String> {
}
fn caller_function(fn_name: &str) -> String {
"var ".to_owned() + DEOBFUSCATION_FUNC_NAME + "=" + &fn_name + ";"
format!("var {}={};", DEOBFUSCATION_FUNC_NAME, fn_name)
}
fn get_sig_fn(player_js: &str) -> Result<String> {
@ -345,7 +353,7 @@ fn get_sts(player_js: &str) -> Result<String> {
Lazy::new(|| Regex::new("signatureTimestamp[=:](\\d+)").unwrap());
Ok(some_or_bail!(
STS_PATTERN.captures(&player_js)?,
STS_PATTERN.captures(player_js)?,
Err(anyhow!("could not find sts"))
)
.get(1)
@ -472,10 +480,7 @@ c[36](c[8],c[32]),c[20](c[25],c[10]),c[2](c[22],c[8]),c[32](c[20],c[16]),c[32](c
#[test(tokio::test)]
async fn t_update() {
let client = Client::new();
let cache = Cache::default();
let deobf = Deobfuscator::from_fetched_info(client, cache)
.await
.unwrap();
let deobf = Deobfuscator::new(client).await.unwrap();
let deobf_sig = deobf.deobfuscate_sig("GOqGOqGOq0QJ8wRAIgaryQHfplJ9xJSKFywyaSMHuuwZYsoMTAvRvfm51qIGECIA5061zWeyfMPX9hEl_U6f9J0tr7GTJMKyPf5XNrJb5fb5i").unwrap();
println!("{}", deobf_sig);

View file

@ -1,7 +1,7 @@
// This file is automatically generated. DO NOT EDIT.
use crate::{
model::Language,
timeago::{TaToken, TimeUnit, DateCmp},
timeago::{DateCmp, TaToken, TimeUnit},
};
pub struct Entry {

View file

@ -27,8 +27,8 @@ fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> {
let chunk_size = rng.gen_range(CHUNK_SIZE_MIN..CHUNK_SIZE_MAX);
let mut chunk_end = offset + chunk_size;
if size.is_some() {
chunk_end = chunk_end.min(size.unwrap() - 1)
if let Some(size) = size {
chunk_end = chunk_end.min(size - 1)
}
Range {
@ -41,7 +41,7 @@ fn parse_cr_header(cr_header: &str) -> Result<(u64, u64)> {
static PATTERN: Lazy<Regex> = Lazy::new(|| Regex::new(r#"bytes (\d+)-(\d+)/(\d+)"#).unwrap());
let captures = some_or_bail!(
PATTERN.captures(&cr_header).ok().flatten(),
PATTERN.captures(cr_header).ok().flatten(),
Err(anyhow!(
"Content-Range header '{}' does not match pattern.",
cr_header
@ -77,10 +77,7 @@ async fn download_single_file<P: Into<PathBuf>>(
let (url_base, url_params) = util::url_to_params(url)?;
let is_gvideo = url_base.ends_with(".googlevideo.com/videoplayback");
if is_gvideo {
size = url_params
.get("clen")
.map(|s| s.parse::<u64>().ok())
.flatten();
size = url_params.get("clen").and_then(|s| s.parse::<u64>().ok());
}
// Check if file is partially downloaded
@ -257,6 +254,7 @@ struct StreamDownload {
video_codec: Option<VideoCodec>,
}
#[allow(clippy::too_many_arguments)]
pub async fn download_video(
player_data: &VideoPlayer,
output_dir: &str,
@ -327,7 +325,7 @@ pub async fn download_video(
_ => {
let mut downloads: Vec<StreamDownload> = Vec::new();
video.map(|v| {
if let Some(v) = video {
downloads.push(StreamDownload {
file: download_dir.join(format!(
"{}.video{}",
@ -338,8 +336,8 @@ pub async fn download_video(
video_codec: Some(v.codec),
audio_codec: None,
});
});
audio.map(|a| {
}
if let Some(a) = audio {
downloads.push(StreamDownload {
file: download_dir.join(format!(
"{}.audio{}",
@ -350,7 +348,7 @@ pub async fn download_video(
video_codec: None,
audio_codec: Some(a.codec),
})
});
}
pb.set_message(format!("Downloading {}", title));
download_streams(&downloads, http, pb.clone()).await?;

View file

@ -1,18 +1,17 @@
#![allow(dead_code)]
#![warn(clippy::todo)]
#[macro_use]
mod macros;
#[cfg(test)]
mod codegen;
mod cache;
mod deobfuscate;
mod dictionary;
mod serializer;
mod timeago;
mod util;
pub mod cache;
pub mod client;
pub mod download;
pub mod model;
pub mod report;
pub mod timeago;

View file

@ -59,7 +59,7 @@ pub struct VideoStream {
pub itag: u32,
pub bitrate: u32,
pub average_bitrate: u32,
pub size: u64,
pub size: Option<u64>,
pub index_range: Option<Range<u32>>,
pub init_range: Option<Range<u32>>,
pub width: u32,

130
src/report.rs Normal file
View file

@ -0,0 +1,130 @@
use std::{
collections::BTreeMap,
fs::File,
path::{Path, PathBuf},
};
use anyhow::Result;
use chrono::{DateTime, Local};
use log::error;
use serde::{Deserialize, Serialize};
use crate::deobfuscate::DeobfData;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Report {
/// Rust package name (`rustypipe`)
pub package: String,
/// Package version (`0.1.0`)
pub version: String,
/// Date/Time when the event occurred
pub date: DateTime<Local>,
/// Report level
pub level: Level,
/// RustyPipe operation (e.g. `get_player`)
pub operation: String,
/// Error (if occurred)
pub error: Option<String>,
/// Detailed error/warning messages
pub msgs: Vec<String>,
/// Deobfuscation data (only for player requests)
#[serde(skip_serializing_if = "Option::is_none")]
pub deobf_data: Option<DeobfData>,
/// HTTP request data
pub http_request: HTTPRequest,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HTTPRequest {
/// Request URL
pub url: String,
/// HTTP method
pub method: String,
/// HTTP request header
pub req_header: BTreeMap<String, String>,
/// HTTP request body
pub req_body: String,
/// HTTP response status code
pub status: u16,
/// HTTP response body
pub resp_body: String,
}
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Level {
/// **Debug**: Operation successful, report generation was forced by setting
/// ``.report(true)``
DBG,
/// **Warning**: Operation successful, but some parts could not be deserialized
WRN,
/// **Error**: Operation failed
ERR,
}
pub trait Reporter {
fn report(&self, report: &Report);
}
pub struct FileReporter {
path: PathBuf,
}
impl FileReporter {
pub fn new<P: AsRef<Path>>(path: P) -> Self {
Self {
path: path.as_ref().to_path_buf(),
}
}
fn _report(&self, report: &Report) -> Result<()> {
#[cfg(not(feature = "report-yaml"))]
{
let report_path = get_report_path(&self.path, report, "json")?;
serde_json::to_writer_pretty(&File::create(report_path)?, &report)?;
}
#[cfg(feature = "report-yaml")]
{
let report_path = get_report_path(&self.path, report, "yaml")?;
serde_yaml::to_writer(&File::create(report_path)?, &report)?;
}
Ok(())
}
}
impl Default for FileReporter {
fn default() -> Self {
Self {
path: Path::new("rustypipe_reports").to_path_buf(),
}
}
}
impl Reporter for FileReporter {
fn report(&self, report: &Report) {
self._report(report)
.unwrap_or_else(|e| error!("Could not store report file. Err: {}", e));
}
}
fn get_report_path(root: &Path, report: &Report, ext: &str) -> Result<PathBuf> {
if !root.is_dir() {
std::fs::create_dir_all(root)?;
}
let filename_prefix = format!("{}_{:?}", report.date.format("%F_%H-%M-%S"), report.level);
let mut report_path = root.to_path_buf();
report_path.push(format!("{}.{}", filename_prefix, ext));
// ensure unique filename
for i in 1..u32::MAX {
if report_path.exists() {
report_path = root.to_path_buf();
report_path.push(format!("{}_{}.{}", filename_prefix, i, ext));
} else {
break;
}
}
Ok(report_path)
}

View file

@ -1,2 +1,40 @@
pub mod range;
pub mod text;
mod range;
mod vec_log_err;
pub use range::Range;
pub use vec_log_err::VecLogError;
use std::fmt::Debug;
/// This represents a result from a deserializing/mapping operation.
/// It holds the desired content (`c`) and a list of warning messages,
/// if there occurred minor error during the deserializing or mapping
/// (e.g. certain list items could not be deserialized).
#[derive(Clone)]
pub struct MapResult<T> {
pub c: T,
pub warnings: Vec<String>,
}
impl<T> Debug for MapResult<T>
where
T: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.c.fmt(f)
}
}
impl<T> Default for MapResult<T>
where
T: Default,
{
fn default() -> Self {
Self {
c: Default::default(),
warnings: Vec::new(),
}
}
}

View file

@ -223,11 +223,7 @@ impl<'de> DeserializeAs<'de, Vec<TextLink>> for TextLinks {
D: Deserializer<'de>,
{
let link = TextLinkInternal::deserialize(deserializer)?;
Ok(link
.runs
.iter()
.filter_map(|r| map_text_linkrun(r))
.collect())
Ok(link.runs.iter().filter_map(map_text_linkrun).collect())
}
}

View file

@ -0,0 +1,137 @@
use std::{fmt, marker::PhantomData};
use serde::{
de::{SeqAccess, Visitor},
Deserialize,
};
use serde_with::{de::DeserializeAsWrap, DeserializeAs};
use super::MapResult;
/// Deserializes a list of arbitrary items into a `MapResult`,
/// creating warnings for items that could not be deserialized.
///
/// This is similar to `VecSkipError`, but it does not silently ignore
/// faulty items.
pub struct VecLogError<T>(PhantomData<T>);
impl<'de, T, U> DeserializeAs<'de, MapResult<Vec<T>>> for VecLogError<U>
where
U: DeserializeAs<'de, T>,
{
fn deserialize_as<D>(deserializer: D) -> Result<MapResult<Vec<T>>, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(serde::Deserialize)]
#[serde(
untagged,
bound(deserialize = "DeserializeAsWrap<T, TAs>: Deserialize<'de>")
)]
enum GoodOrError<'a, T, TAs>
where
TAs: DeserializeAs<'a, T>,
{
Good(DeserializeAsWrap<T, TAs>),
Error(serde_json::value::Value),
#[serde(skip)]
_JustAMarkerForTheLifetime(PhantomData<&'a u32>),
}
struct SeqVisitor<T, U> {
marker: PhantomData<T>,
marker2: PhantomData<U>,
}
impl<'de, T, U> Visitor<'de> for SeqVisitor<T, U>
where
U: DeserializeAs<'de, T>,
{
type Value = MapResult<Vec<T>>;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a sequence")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut values = Vec::with_capacity(seq.size_hint().unwrap_or_default());
let mut warnings = Vec::new();
while let Some(value) = seq.next_element()? {
match value {
GoodOrError::<T, U>::Good(value) => {
values.push(value.into_inner());
}
GoodOrError::<T, U>::Error(value) => {
warnings.push(format!(
"error deserializing item: {}",
serde_json::to_string(&value).unwrap_or_default()
));
}
_ => {}
}
}
Ok(MapResult {
c: values,
warnings,
})
}
}
let visitor = SeqVisitor::<T, U> {
marker: PhantomData,
marker2: PhantomData,
};
deserializer.deserialize_seq(visitor)
}
}
#[cfg(test)]
mod tests {
use serde::Deserialize;
use serde_with::serde_as;
use crate::serializer::MapResult;
#[serde_as]
#[derive(Debug, Deserialize)]
struct S {
#[serde_as(as = "crate::serializer::VecLogError<_>")]
items: MapResult<Vec<Item>>,
}
#[derive(Debug, Deserialize)]
struct Item {
name: String,
}
#[test]
fn test() {
let json = r#"{"items": [{"name": "i1"}, {"xyz": "i2"}, {"name": "i3"}, {"namra": "i4"}]}"#;
let res = serde_json::from_str::<S>(json).unwrap();
insta::assert_debug_snapshot!(res, @r###"
S {
items: [
Item {
name: "i1",
},
Item {
name: "i3",
},
],
}
"###);
insta::assert_debug_snapshot!(res.items.warnings, @r###"
[
"error deserializing item: {\"xyz\":\"i2\"}",
"error deserializing item: {\"namra\":\"i4\"}",
]
"###);
}
}

View file

@ -52,24 +52,24 @@ impl Mul<u8> for TimeAgo {
}
}
impl Into<DateTime<Local>> for TimeAgo {
fn into(self) -> DateTime<Local> {
impl From<TimeAgo> for DateTime<Local> {
fn from(ta: TimeAgo) -> Self {
let ts = Local::now();
match self.unit {
TimeUnit::Second => ts - Duration::seconds(self.n as i64),
TimeUnit::Minute => ts - Duration::minutes(self.n as i64),
TimeUnit::Hour => ts - Duration::hours(self.n as i64),
TimeUnit::Day => ts - Duration::days(self.n as i64),
TimeUnit::Week => ts - Duration::weeks(self.n as i64),
TimeUnit::Month => chronoutil::shift_months(ts, -(self.n as i32)),
TimeUnit::Year => chronoutil::shift_years(ts, -(self.n as i32)),
match ta.unit {
TimeUnit::Second => ts - Duration::seconds(ta.n as i64),
TimeUnit::Minute => ts - Duration::minutes(ta.n as i64),
TimeUnit::Hour => ts - Duration::hours(ta.n as i64),
TimeUnit::Day => ts - Duration::days(ta.n as i64),
TimeUnit::Week => ts - Duration::weeks(ta.n as i64),
TimeUnit::Month => chronoutil::shift_months(ts, -(ta.n as i32)),
TimeUnit::Year => chronoutil::shift_years(ts, -(ta.n as i32)),
}
}
}
impl Into<DateTime<Local>> for ParsedDate {
fn into(self) -> DateTime<Local> {
match self {
impl From<ParsedDate> for DateTime<Local> {
fn from(date: ParsedDate) -> Self {
match date {
ParsedDate::Absolute(date) => Local
.from_local_datetime(&NaiveDateTime::new(date, NaiveTime::from_hms(0, 0, 0)))
.unwrap(),
@ -78,7 +78,7 @@ impl Into<DateTime<Local>> for ParsedDate {
}
}
pub fn filter_str(string: &str) -> String {
fn filter_str(string: &str) -> String {
string
.to_lowercase()
.chars()
@ -103,29 +103,23 @@ fn parse_ta_token(entry: &dictionary::Entry, nd: bool, filtered_str: &str) -> Op
if entry.by_char {
filtered_str.chars().find_map(|word| {
tokens
.get(&word.to_string())
.map(|t| match t.unit {
tokens.get(&word.to_string()).and_then(|t| match t.unit {
Some(unit) => Some(TimeAgo { n: t.n * qu, unit }),
None => {
qu = t.n;
None
}
})
.flatten()
})
} else {
filtered_str.split_whitespace().find_map(|word| {
tokens
.get(word)
.map(|t| match t.unit {
tokens.get(word).and_then(|t| match t.unit {
Some(unit) => Some(TimeAgo { n: t.n * qu, unit }),
None => {
qu = t.n;
None
}
})
.flatten()
})
}
}
@ -137,7 +131,7 @@ fn parse_textual_month(entry: &dictionary::Entry, filtered_str: &str) -> Option<
} else {
filtered_str
.split_whitespace()
.find_map(|word| entry.months.get(word).map(|n| *n))
.find_map(|word| entry.months.get(word).copied())
}
}
@ -145,7 +139,7 @@ pub fn parse_timeago(lang: Language, textual_date: &str) -> Option<TimeAgo> {
let entry = dictionary::entry(lang);
let filtered_str = filter_str(textual_date);
let qu: u8 = util::parse_numeric(&textual_date).unwrap_or(1);
let qu: u8 = util::parse_numeric(textual_date).unwrap_or(1);
parse_ta_token(&entry, false, &filtered_str).map(|ta| ta * qu)
}
@ -163,8 +157,7 @@ pub fn parse_textual_date(lang: Language, textual_date: &str) -> Option<ParsedDa
match nums.len() {
0 => match parse_ta_token(&entry, true, &filtered_str) {
Some(timeago) => Some(ParsedDate::Relative(timeago)),
None => parse_ta_token(&entry, false, &filtered_str)
.map(|timeago| ParsedDate::Relative(timeago)),
None => parse_ta_token(&entry, false, &filtered_str).map(ParsedDate::Relative),
},
1 => parse_ta_token(&entry, false, &filtered_str)
.map(|timeago| ParsedDate::Relative(timeago * nums[0] as u8)),
@ -189,7 +182,7 @@ pub fn parse_textual_date(lang: Language, textual_date: &str) -> Option<ParsedDa
match (y, m, d) {
(Some(y), Some(m), Some(d)) => {
NaiveDate::from_ymd_opt(y.into(), m.into(), d.into())
.map(|d| ParsedDate::Absolute(d))
.map(ParsedDate::Absolute)
}
_ => None,
}

View file

@ -41,16 +41,15 @@ pub fn generate_content_playback_nonce() -> String {
///
/// `example.com/api?k1=v1&k2=v2 => example.com/api; {k1: v1, k2: v2}`
pub fn url_to_params(url: &str) -> Result<(String, BTreeMap<String, String>)> {
let parsed_url = Url::parse(url)?;
let mut parsed_url = Url::parse(url)?;
let url_params: BTreeMap<String, String> = parsed_url
.query_pairs()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
let mut url_base = parsed_url.clone();
url_base.set_query(None);
parsed_url.set_query(None);
Ok((url_base.to_string(), url_params))
Ok((parsed_url.to_string(), url_params))
}
/// Parse a string after removing all non-numeric characters
@ -90,6 +89,21 @@ where
numbers
}
pub fn retry_delay(
n_past_retries: u32,
min_retry_interval: u32,
max_retry_interval: u32,
backoff_base: u32,
) -> u32 {
let unjittered_delay = backoff_base.checked_pow(n_past_retries).unwrap_or(u32::MAX);
let jitter_factor = rand::thread_rng().gen_range(800..1500);
let jittered_delay = unjittered_delay
.checked_mul(jitter_factor)
.unwrap_or(u32::MAX);
min_retry_interval.max(jittered_delay.min(max_retry_interval))
}
#[cfg(test)]
mod tests {
use rstest::rstest;
@ -112,4 +126,20 @@ mod tests {
let n = parse_numeric_vec::<u32>(string);
assert_eq!(n, expect);
}
#[rstest]
#[case(0, 800, 1500)]
#[case(1, 2400, 4500)]
#[case(2, 7200, 13500)]
#[case(100, 60000, 60000)]
fn t_retry_delay(#[case] n: u32, #[case] expect_min: u32, #[case] expect_max: u32) {
let res = retry_delay(n, 1000, 60000, 3);
assert!(
res >= expect_min && res <= expect_max,
"res: {} not within {} and {}",
res,
expect_min,
expect_max
);
}
}