Compare commits
No commits in common. "cbeb14f3fd235cd01ed118c4237e3581a24d7541" and "a2bbc850a735afa29943146d90410ba1ebe8fa6a" have entirely different histories.
cbeb14f3fd
...
a2bbc850a7
47 changed files with 493 additions and 862 deletions
|
@ -6,7 +6,6 @@ authors = ["ThetaDev <t.testboy@gmail.com>"]
|
|||
license = "GPL-3.0"
|
||||
description = "Client for the public YouTube / YouTube Music API (Innertube), inspired by NewPipe"
|
||||
keywords = ["youtube", "video", "music"]
|
||||
categories = ["api-bindings", "multimedia"]
|
||||
|
||||
include = ["/src", "README.md", "LICENSE", "!snapshots"]
|
||||
|
||||
|
|
129
README.md
129
README.md
|
@ -2,16 +2,16 @@
|
|||
|
||||
[![CI status](https://ci.thetadev.de/api/badges/ThetaDev/rustypipe/status.svg)](https://ci.thetadev.de/ThetaDev/rustypipe)
|
||||
|
||||
Client for the public YouTube / YouTube Music API (Innertube), inspired by
|
||||
[NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
||||
Client for the public YouTube / YouTube Music API (Innertube),
|
||||
inspired by [NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
||||
|
||||
## Features
|
||||
|
||||
### YouTube
|
||||
|
||||
- **Player** (video/audio streams, subtitles)
|
||||
- **VideoDetails** (metadata, comments, recommended videos)
|
||||
- **Playlist**
|
||||
- **VideoDetails** (metadata, comments, recommended videos)
|
||||
- **Channel** (videos, shorts, livestreams, playlists, info, search)
|
||||
- **ChannelRSS**
|
||||
- **Search** (with filters)
|
||||
|
@ -31,126 +31,3 @@ Client for the public YouTube / YouTube Music API (Innertube), inspired by
|
|||
- **Moods/Genres**
|
||||
- **Charts**
|
||||
- **New** (albums, music videos)
|
||||
|
||||
## Getting started
|
||||
|
||||
### Cargo.toml
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
rustypipe = "0.1.0"
|
||||
tokio = { version = "1.20.0", features = ["macros", "rt-multi-thread"] }
|
||||
```
|
||||
|
||||
### Watch a video
|
||||
|
||||
```rust ignore
|
||||
use std::process::Command;
|
||||
|
||||
use rustypipe::{client::RustyPipe, param::StreamFilter};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// Create a client
|
||||
let rp = RustyPipe::new();
|
||||
// Fetch the player
|
||||
let player = rp.query().player("pPvd8UxmSbQ").await.unwrap();
|
||||
// Select the best streams
|
||||
let (video, audio) = player.select_video_audio_stream(&StreamFilter::default());
|
||||
|
||||
// Open mpv player
|
||||
let mut args = vec![video.expect("no video stream").url.to_owned()];
|
||||
if let Some(audio) = audio {
|
||||
args.push(format!("--audio-file={}", audio.url));
|
||||
}
|
||||
Command::new("mpv").args(args).output().unwrap();
|
||||
}
|
||||
```
|
||||
|
||||
### Get a playlist
|
||||
|
||||
```rust ignore
|
||||
use rustypipe::client::RustyPipe
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// Create a client
|
||||
let rp = RustyPipe::new();
|
||||
// Get the playlist
|
||||
let playlist = rp
|
||||
.query()
|
||||
.playlist("PL2_OBreMn7FrsiSW0VDZjdq0xqUKkZYHT")
|
||||
.await
|
||||
.unwrap();
|
||||
// Get all items (maximum: 1000)
|
||||
playlist.videos.extend_limit(rp.query(), 1000).await.unwrap();
|
||||
|
||||
println!("Name: {}", playlist.name);
|
||||
println!("Author: {}", playlist.channel.unwrap().name);
|
||||
println!("Last update: {}", playlist.last_update.unwrap());
|
||||
|
||||
playlist
|
||||
.videos
|
||||
.items
|
||||
.iter()
|
||||
.for_each(|v| println!("[{}] {} ({}s)", v.id, v.name, v.length));
|
||||
}
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```txt
|
||||
Name: Homelab
|
||||
Author: Jeff Geerling
|
||||
Last update: 2023-05-04
|
||||
[cVWF3u-y-Zg] I put a computer in my computer (720s)
|
||||
[ecdm3oA-QdQ] 6-in-1: Build a 6-node Ceph cluster on this Mini ITX Motherboard (783s)
|
||||
[xvE4HNJZeIg] Scrapyard Server: Fastest all-SSD NAS! (733s)
|
||||
[RvnG-ywF6_s] Nanosecond clock sync with a Raspberry Pi (836s)
|
||||
[R2S2RMNv7OU] I made the Petabyte Raspberry Pi even faster! (572s)
|
||||
[FG--PtrDmw4] Hiding Macs in my Rack! (515s)
|
||||
...
|
||||
```
|
||||
|
||||
### Get a channel
|
||||
|
||||
```rust ignore
|
||||
use rustypipe::client::RustyPipe
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// Create a client
|
||||
let rp = RustyPipe::new();
|
||||
// Get the channel
|
||||
let channel = rp
|
||||
.query()
|
||||
.channel_videos("UCl2mFZoRqjw_ELax4Yisf6w")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
println!("Name: {}", channel.name);
|
||||
println!("Description: {}", channel.description);
|
||||
println!("Subscribers: {}", channel.subscriber_count.unwrap());
|
||||
|
||||
channel
|
||||
.content
|
||||
.items
|
||||
.iter()
|
||||
.for_each(|v| println!("[{}] {} ({}s)", v.id, v.name, v.length.unwrap()));
|
||||
}
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```txt
|
||||
Name: Louis Rossmann
|
||||
Description: I discuss random things of interest to me. (...)
|
||||
Subscribers: 1780000
|
||||
[qBHgJx_rb8E] Introducing Rossmann senior, a genuine fossil 😃 (122s)
|
||||
[TmV8eAtXc3s] Am I wrong about CompTIA? (592s)
|
||||
[CjOJJc1qzdY] How FUTO projects loosen Google's grip on your life! (588s)
|
||||
[0A10JtkkL9A] a private moment between a man and his kitten (522s)
|
||||
[zbHq5_1Cd5U] Is Texas mandating auto repair shops use OEM parts? SB1083 analysis & breakdown; tldr, no. (645s)
|
||||
[6Fv8bd9ICb4] Who owns this? (199s)
|
||||
...
|
||||
```
|
||||
|
|
|
@ -2,11 +2,6 @@
|
|||
name = "rustypipe-cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["ThetaDev <t.testboy@gmail.com>"]
|
||||
license = "GPL-3.0"
|
||||
description = "CLI for RustyPipe - download videos and extract data from YouTube / YouTube Music"
|
||||
keywords = ["youtube", "video", "music"]
|
||||
categories = ["multimedia"]
|
||||
|
||||
[features]
|
||||
default = ["rustls-tls-native-roots"]
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#![warn(clippy::todo, clippy::dbg_macro)]
|
||||
|
||||
use std::{path::PathBuf, time::Duration};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
@ -283,9 +281,9 @@ fn print_data<T: Serialize>(data: &T, format: Format, pretty: bool) {
|
|||
match format {
|
||||
Format::Json => {
|
||||
if pretty {
|
||||
serde_json::to_writer_pretty(stdout, data).unwrap();
|
||||
serde_json::to_writer_pretty(stdout, data).unwrap()
|
||||
} else {
|
||||
serde_json::to_writer(stdout, data).unwrap();
|
||||
serde_json::to_writer(stdout, data).unwrap()
|
||||
}
|
||||
}
|
||||
Format::Yaml => serde_yaml::to_writer(stdout, data).unwrap(),
|
||||
|
@ -362,7 +360,7 @@ async fn download_videos(
|
|||
&video.id,
|
||||
&video.name,
|
||||
output_dir,
|
||||
output_fname.clone(),
|
||||
output_fname.to_owned(),
|
||||
resolution,
|
||||
"ffmpeg",
|
||||
rp,
|
||||
|
@ -634,7 +632,9 @@ async fn main() {
|
|||
} => match music {
|
||||
None => match channel {
|
||||
Some(channel) => {
|
||||
rustypipe::validate::channel_id(&channel).unwrap();
|
||||
if !rustypipe::validate::channel_id(&channel) {
|
||||
panic!("invalid channel id")
|
||||
}
|
||||
let res = rp.query().channel_search(&channel, &query).await.unwrap();
|
||||
print_data(&res, format, pretty);
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ pub enum ABTest {
|
|||
TrendsPageHeaderRenderer = 5,
|
||||
}
|
||||
|
||||
const TESTS_TO_RUN: [ABTest; 2] = [ABTest::TrendsVideoTab, ABTest::TrendsPageHeaderRenderer];
|
||||
const TESTS_TO_RUN: [ABTest; 1] = [ABTest::TrendsVideoTab];
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ABTestRes {
|
||||
|
@ -102,10 +102,10 @@ pub async fn run_test(
|
|||
let count = results.iter().filter(|(p, _)| *p).count();
|
||||
let vd_present = results
|
||||
.iter()
|
||||
.find_map(|(p, vd)| if *p { Some(vd.clone()) } else { None });
|
||||
.find_map(|(p, vd)| if *p { Some(vd.to_owned()) } else { None });
|
||||
let vd_absent = results
|
||||
.iter()
|
||||
.find_map(|(p, vd)| if *p { None } else { Some(vd.clone()) });
|
||||
.find_map(|(p, vd)| if !*p { Some(vd.to_owned()) } else { None });
|
||||
|
||||
(count, vd_present, vd_absent)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use path_macro::path;
|
|||
use rustypipe::{
|
||||
client::{ClientType, RustyPipe, RustyPipeQuery},
|
||||
model::AlbumType,
|
||||
param::{Language, LANGUAGES},
|
||||
param::{locale::LANGUAGES, Language},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
|
||||
|
@ -58,7 +58,7 @@ pub fn write_samples_to_dict() {
|
|||
let collected: BTreeMap<Language, BTreeMap<AlbumType, String>> =
|
||||
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||
let mut dict = util::read_dict();
|
||||
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
||||
|
||||
for lang in langs {
|
||||
let dict_entry = dict.entry(lang).or_default();
|
||||
|
@ -66,13 +66,13 @@ pub fn write_samples_to_dict() {
|
|||
let mut e_langs = dict_entry.equivalent.clone();
|
||||
e_langs.push(lang);
|
||||
|
||||
for lang in &e_langs {
|
||||
e_langs.iter().for_each(|lang| {
|
||||
collected.get(lang).unwrap().iter().for_each(|(t, v)| {
|
||||
dict_entry
|
||||
.album_types
|
||||
.insert(v.to_lowercase().trim().to_owned(), *t);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
util::write_dict(dict);
|
||||
|
|
|
@ -11,7 +11,7 @@ use once_cell::sync::Lazy;
|
|||
use path_macro::path;
|
||||
use regex::Regex;
|
||||
use rustypipe::client::{ClientType, RustyPipe, RustyPipeQuery};
|
||||
use rustypipe::param::{Language, LANGUAGES};
|
||||
use rustypipe::param::{locale::LANGUAGES, Language};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::model::{Channel, ContinuationResponse};
|
||||
|
@ -111,7 +111,7 @@ pub async fn collect_large_numbers(concurrency: usize) {
|
|||
.unwrap();
|
||||
|
||||
channel.view_counts.iter().for_each(|(num, txt)| {
|
||||
entry.insert(txt.clone(), *num);
|
||||
entry.insert(txt.to_owned(), *num);
|
||||
});
|
||||
entry.insert(channel.subscriber_count, subscriber_counts[*ch_id]);
|
||||
|
||||
|
@ -147,7 +147,7 @@ pub fn write_samples_to_dict() {
|
|||
let collected_nums: CollectedNumbers =
|
||||
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||
let mut dict = util::read_dict();
|
||||
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
||||
|
||||
static POINT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\d(\.|,)\d{1,3}(?:\D|$)").unwrap());
|
||||
|
||||
|
@ -176,7 +176,10 @@ pub fn write_samples_to_dict() {
|
|||
})
|
||||
.unwrap();
|
||||
|
||||
let decimal_point = if comma_decimal { "," } else { "." };
|
||||
let decimal_point = match comma_decimal {
|
||||
true => ",",
|
||||
false => ".",
|
||||
};
|
||||
|
||||
// Search for tokens
|
||||
|
||||
|
@ -214,17 +217,13 @@ pub fn write_samples_to_dict() {
|
|||
for lang in e_langs {
|
||||
let entry = collected_nums.get(&lang).unwrap();
|
||||
|
||||
for (txt, val) in entry.iter() {
|
||||
entry.iter().for_each(|(txt, val)| {
|
||||
let filtered = util::filter_largenumstr(txt);
|
||||
let mag = get_mag(*val);
|
||||
|
||||
let tokens: Vec<String> = if dict_entry.by_char || lang == Language::Ko {
|
||||
filtered.chars().map(|c| c.to_string()).collect()
|
||||
} else {
|
||||
filtered
|
||||
.split_whitespace()
|
||||
.map(std::string::ToString::to_string)
|
||||
.collect()
|
||||
let tokens: Vec<String> = match dict_entry.by_char || lang == Language::Ko {
|
||||
true => filtered.chars().map(|c| c.to_string()).collect(),
|
||||
false => filtered.split_whitespace().map(|c| c.to_string()).collect(),
|
||||
};
|
||||
|
||||
match util::parse_numeric::<u64>(txt.split(decimal_point).next().unwrap()) {
|
||||
|
@ -232,7 +231,7 @@ pub fn write_samples_to_dict() {
|
|||
let mag_before_point = get_mag(num_before_point);
|
||||
let mut mag_remaining = mag - mag_before_point;
|
||||
|
||||
for t in &tokens {
|
||||
tokens.iter().for_each(|t| {
|
||||
// These tokens are correct in all languages
|
||||
// and are used to parse combined prefixes like `1.1K crore` (en-IN)
|
||||
let known_tmag: u8 = if t.len() == 1 {
|
||||
|
@ -252,26 +251,26 @@ pub fn write_samples_to_dict() {
|
|||
.checked_sub(known_tmag)
|
||||
.expect("known magnitude incorrect");
|
||||
} else {
|
||||
insert_token(t.clone(), mag_remaining);
|
||||
}
|
||||
insert_nd_token(t.clone(), None);
|
||||
insert_token(t.to_owned(), mag_remaining);
|
||||
}
|
||||
insert_nd_token(t.to_owned(), None);
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
if matches!(e.kind(), std::num::IntErrorKind::Empty) {
|
||||
// Text does not contain any digits, search for nd_tokens
|
||||
for t in &tokens {
|
||||
tokens.iter().for_each(|t| {
|
||||
insert_nd_token(
|
||||
t.clone(),
|
||||
t.to_owned(),
|
||||
Some((*val).try_into().expect("nd_token value too large")),
|
||||
);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
panic!("{e}, txt: {txt}")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Insert collected data into dictionary
|
||||
|
@ -370,7 +369,7 @@ async fn get_channel(query: &RustyPipeQuery, channel_id: &str) -> Result<Channel
|
|||
.navigation_endpoint
|
||||
.continuation_command
|
||||
.token
|
||||
.clone()
|
||||
.to_owned()
|
||||
})
|
||||
});
|
||||
|
||||
|
@ -381,7 +380,7 @@ async fn get_channel(query: &RustyPipeQuery, channel_id: &str) -> Result<Channel
|
|||
let v = &itm.rich_item_renderer.content.video_renderer;
|
||||
(
|
||||
util::parse_numeric(&v.view_count_text.text).unwrap_or_default(),
|
||||
v.short_view_count_text.text.clone(),
|
||||
v.short_view_count_text.text.to_owned(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
@ -400,20 +399,22 @@ async fn get_channel(query: &RustyPipeQuery, channel_id: &str) -> Result<Channel
|
|||
|
||||
let continuation = serde_json::from_str::<ContinuationResponse>(&resp)?;
|
||||
|
||||
for action in &continuation.on_response_received_actions {
|
||||
action
|
||||
.reload_continuation_items_command
|
||||
continuation
|
||||
.on_response_received_actions
|
||||
.iter()
|
||||
.for_each(|a| {
|
||||
a.reload_continuation_items_command
|
||||
.continuation_items
|
||||
.iter()
|
||||
.for_each(|itm| {
|
||||
let v = &itm.rich_item_renderer.content.video_renderer;
|
||||
view_counts.insert(
|
||||
util::parse_numeric(&v.view_count_text.text).unwrap(),
|
||||
v.short_view_count_text.text.clone(),
|
||||
v.short_view_count_text.text.to_owned(),
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(ChannelData {
|
||||
view_counts,
|
||||
|
|
|
@ -9,7 +9,7 @@ use futures::{stream, StreamExt};
|
|||
use path_macro::path;
|
||||
use rustypipe::{
|
||||
client::RustyPipe,
|
||||
param::{Language, LANGUAGES},
|
||||
param::{locale::LANGUAGES, Language},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
|
@ -118,7 +118,7 @@ pub fn write_samples_to_dict() {
|
|||
let collected_dates: CollectedDates =
|
||||
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||
let mut dict = util::read_dict();
|
||||
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
||||
|
||||
let months = [
|
||||
DateCase::Jan,
|
||||
|
@ -159,7 +159,7 @@ pub fn write_samples_to_dict() {
|
|||
.for_each(|l| datestr_tables.push(collected_dates.get(l).unwrap()));
|
||||
|
||||
let dict_entry = dict.entry(lang).or_default();
|
||||
let mut num_order = String::new();
|
||||
let mut num_order = "".to_owned();
|
||||
|
||||
let collect_nd_tokens = !matches!(
|
||||
lang,
|
||||
|
@ -236,30 +236,30 @@ pub fn write_samples_to_dict() {
|
|||
});
|
||||
});
|
||||
|
||||
for (word, m) in &month_words {
|
||||
month_words.iter().for_each(|(word, m)| {
|
||||
if *m != 0 {
|
||||
dict_entry.months.insert(word.clone(), *m as u8);
|
||||
dict_entry.months.insert(word.to_owned(), *m as u8);
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if collect_nd_tokens {
|
||||
for (word, n) in &td_words {
|
||||
td_words.iter().for_each(|(word, n)| {
|
||||
match n {
|
||||
// Today
|
||||
1 => {
|
||||
dict_entry
|
||||
.timeago_nd_tokens
|
||||
.insert(word.clone(), "0D".to_owned());
|
||||
.insert(word.to_owned(), "0D".to_owned());
|
||||
}
|
||||
// Yesterday
|
||||
2 => {
|
||||
dict_entry
|
||||
.timeago_nd_tokens
|
||||
.insert(word.clone(), "1D".to_owned());
|
||||
.insert(word.to_owned(), "1D".to_owned());
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
if datestr_tables.len() == 1 && dict_entry.timeago_nd_tokens.len() > 2 {
|
||||
println!(
|
||||
|
|
|
@ -9,7 +9,7 @@ use futures::{stream, StreamExt};
|
|||
use path_macro::path;
|
||||
use rustypipe::{
|
||||
client::{ClientType, RustyPipe, RustyPipeQuery},
|
||||
param::{Language, LANGUAGES},
|
||||
param::{locale::LANGUAGES, Language},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
|
@ -67,7 +67,7 @@ pub fn parse_video_durations() {
|
|||
let durations: CollectedDurations = serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||
|
||||
let mut dict = util::read_dict();
|
||||
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
||||
|
||||
for lang in langs {
|
||||
let dict_entry = dict.entry(lang).or_default();
|
||||
|
@ -83,7 +83,7 @@ pub fn parse_video_durations() {
|
|||
by_char: bool,
|
||||
val: u32,
|
||||
expect: u32,
|
||||
w: &str,
|
||||
w: String,
|
||||
unit: TimeUnit,
|
||||
) -> bool {
|
||||
let ok = val == expect || val * 2 == expect;
|
||||
|
@ -168,23 +168,23 @@ pub fn parse_video_durations() {
|
|||
let p2_n = p2.digits.parse::<u32>().unwrap_or(1);
|
||||
|
||||
assert!(
|
||||
check_add_word(words, by_char, p1_n, m, &p1.word, TimeUnit::Minute),
|
||||
check_add_word(words, by_char, p1_n, m, p1.word, TimeUnit::Minute),
|
||||
"{txt}: min parse error"
|
||||
);
|
||||
assert!(
|
||||
check_add_word(words, by_char, p2_n, s, &p2.word, TimeUnit::Second),
|
||||
check_add_word(words, by_char, p2_n, s, p2.word, TimeUnit::Second),
|
||||
"{txt}: sec parse error"
|
||||
);
|
||||
}
|
||||
None => {
|
||||
if s == 0 {
|
||||
assert!(
|
||||
check_add_word(words, by_char, p1_n, m, &p1.word, TimeUnit::Minute),
|
||||
check_add_word(words, by_char, p1_n, m, p1.word, TimeUnit::Minute),
|
||||
"{txt}: min parse error"
|
||||
);
|
||||
} else if m == 0 {
|
||||
assert!(
|
||||
check_add_word(words, by_char, p1_n, s, &p1.word, TimeUnit::Second),
|
||||
check_add_word(words, by_char, p1_n, s, p1.word, TimeUnit::Second),
|
||||
"{txt}: sec parse error"
|
||||
);
|
||||
} else {
|
||||
|
@ -206,11 +206,11 @@ pub fn parse_video_durations() {
|
|||
|
||||
// dbg!(&words);
|
||||
|
||||
for (k, v) in words {
|
||||
words.into_iter().for_each(|(k, v)| {
|
||||
if let Some(v) = v {
|
||||
dict_entry.timeago_tokens.insert(k, v.to_string());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -345,8 +345,7 @@ mod tests {
|
|||
let ul: LanguageIdentifier =
|
||||
lang.to_string().split('-').next().unwrap().parse().unwrap();
|
||||
|
||||
let pr = PluralRules::create(ul, PluralRuleType::CARDINAL)
|
||||
.unwrap_or_else(|_| panic!("{}", lang.to_string()));
|
||||
let pr = PluralRules::create(ul, PluralRuleType::CARDINAL).expect(&lang.to_string());
|
||||
|
||||
let mut plurals_m: HashSet<PluralCategory> = HashSet::new();
|
||||
for n in 1..60 {
|
||||
|
@ -354,11 +353,11 @@ mod tests {
|
|||
}
|
||||
let mut plurals_s = plurals_m.clone();
|
||||
|
||||
for v in durations.values() {
|
||||
durations.values().for_each(|v| {
|
||||
let (m, s) = split_duration(*v);
|
||||
plurals_m.remove(&pr.select(m).unwrap().into());
|
||||
plurals_s.remove(&pr.select(s).unwrap().into());
|
||||
}
|
||||
});
|
||||
|
||||
if !plurals_m.is_empty() {
|
||||
println!("{lang}: missing minutes {plurals_m:?}");
|
||||
|
|
|
@ -35,18 +35,14 @@ pub fn generate_dictionary() {
|
|||
|
||||
let code_head = r#"// This file is automatically generated. DO NOT EDIT.
|
||||
// See codegen/gen_dictionary.rs for the generation code.
|
||||
#![allow(clippy::unreadable_literal)]
|
||||
|
||||
//! The dictionary contains the information required to parse dates and numbers
|
||||
//! in all supported languages.
|
||||
|
||||
use crate::{
|
||||
model::AlbumType,
|
||||
param::Language,
|
||||
util::timeago::{DateCmp, TaToken, TimeUnit},
|
||||
};
|
||||
|
||||
/// Dictionary entry containing language-specific parsing information
|
||||
/// The dictionary contains the information required to parse dates and numbers
|
||||
/// in all supported languages.
|
||||
pub(crate) struct Entry {
|
||||
/// Tokens for parsing timeago strings.
|
||||
///
|
||||
|
@ -94,11 +90,11 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
|||
"#
|
||||
.to_owned();
|
||||
|
||||
for (lang, entry) in &dict {
|
||||
dict.iter().for_each(|(lang, entry)| {
|
||||
// Match selector
|
||||
let mut selector = format!("Language::{lang:?}");
|
||||
entry.equivalent.iter().for_each(|eq| {
|
||||
write!(selector, " | Language::{eq:?}").unwrap();
|
||||
let _ = write!(selector, " | Language::{eq:?}");
|
||||
});
|
||||
|
||||
// Timeago tokens
|
||||
|
@ -136,7 +132,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
|||
// Date order
|
||||
let mut date_order = "&[".to_owned();
|
||||
entry.date_order.chars().for_each(|c| {
|
||||
write!(date_order, "DateCmp::{c}, ").unwrap();
|
||||
let _ = write!(date_order, "DateCmp::{c}, ");
|
||||
});
|
||||
date_order = date_order.trim_end_matches([' ', ',']).to_owned() + "]";
|
||||
|
||||
|
@ -158,31 +154,16 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
|||
album_types.entry(txt, &format!("AlbumType::{album_type:?}"));
|
||||
});
|
||||
|
||||
let code_ta_tokens = &ta_tokens
|
||||
.build()
|
||||
.to_string()
|
||||
.replace('\n', "\n ");
|
||||
let code_ta_nd_tokens = &ta_nd_tokens
|
||||
.build()
|
||||
.to_string()
|
||||
.replace('\n', "\n ");
|
||||
let code_ta_tokens = &ta_tokens.build().to_string().replace('\n', "\n ");
|
||||
let code_ta_nd_tokens = &ta_nd_tokens.build().to_string().replace('\n', "\n ");
|
||||
let code_months = &months.build().to_string().replace('\n', "\n ");
|
||||
let code_number_tokens = &number_tokens
|
||||
.build()
|
||||
.to_string()
|
||||
.replace('\n', "\n ");
|
||||
let code_number_nd_tokens = &number_nd_tokens
|
||||
.build()
|
||||
.to_string()
|
||||
.replace('\n', "\n ");
|
||||
let code_album_types = &album_types
|
||||
.build()
|
||||
.to_string()
|
||||
.replace('\n', "\n ");
|
||||
let code_number_tokens = &number_tokens.build().to_string().replace('\n', "\n ");
|
||||
let code_number_nd_tokens = &number_nd_tokens.build().to_string().replace('\n', "\n ");
|
||||
let code_album_types = &album_types.build().to_string().replace('\n', "\n ");
|
||||
|
||||
write!(code_timeago_tokens, "{} => Entry {{\n timeago_tokens: {},\n date_order: {},\n months: {},\n timeago_nd_tokens: {},\n comma_decimal: {:?},\n number_tokens: {},\n number_nd_tokens: {},\n album_types: {},\n }},\n ",
|
||||
selector, code_ta_tokens, date_order, code_months, code_ta_nd_tokens, entry.comma_decimal, code_number_tokens, code_number_nd_tokens, code_album_types).unwrap();
|
||||
}
|
||||
});
|
||||
|
||||
code_timeago_tokens = code_timeago_tokens.trim_end().to_owned() + "\n }\n}\n";
|
||||
|
||||
|
|
|
@ -227,7 +227,7 @@ pub enum Country {
|
|||
"#
|
||||
.to_owned();
|
||||
|
||||
for (code, native_name) in &languages {
|
||||
languages.iter().for_each(|(code, native_name)| {
|
||||
let enum_name = code
|
||||
.split('-')
|
||||
.map(|c| {
|
||||
|
@ -262,10 +262,10 @@ pub enum Country {
|
|||
" Language::{enum_name} => \"{native_name}\","
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
code_langs += "}\n";
|
||||
|
||||
for (c, n) in &countries {
|
||||
countries.iter().for_each(|(c, n)| {
|
||||
let enum_name = c[0..1].to_owned().to_uppercase() + &c[1..].to_owned().to_lowercase();
|
||||
|
||||
// Country enum
|
||||
|
@ -281,7 +281,7 @@ pub enum Country {
|
|||
" Country::{enum_name} => \"{n}\","
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
});
|
||||
|
||||
// Add Country::Zz / Global
|
||||
code_countries += " /// Global (can only be used for music charts)\n";
|
||||
|
@ -368,8 +368,8 @@ fn map_language_section(section: &CompactLinkRendererWrap) -> BTreeMap<String, S
|
|||
.actions[0]
|
||||
.select_language_command
|
||||
.hl
|
||||
.clone(),
|
||||
i.compact_link_renderer.title.text.clone(),
|
||||
.to_owned(),
|
||||
i.compact_link_renderer.title.text.to_owned(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#![warn(clippy::todo)]
|
||||
|
||||
mod abtest;
|
||||
mod collect_album_types;
|
||||
mod collect_large_numbers;
|
||||
|
@ -92,7 +90,7 @@ async fn main() {
|
|||
}
|
||||
None => {
|
||||
let res = abtest::run_all_tests(n, cli.concurrency).await;
|
||||
println!("{}", serde_json::to_string_pretty(&res).unwrap());
|
||||
println!("{}", serde_json::to_string_pretty(&res).unwrap())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -2,11 +2,6 @@
|
|||
name = "rustypipe-downloader"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
authors = ["ThetaDev <t.testboy@gmail.com>"]
|
||||
license = "GPL-3.0"
|
||||
description = "Downloader extension for RustyPipe"
|
||||
keywords = ["youtube", "video", "music"]
|
||||
categories = ["multimedia"]
|
||||
|
||||
[features]
|
||||
default = ["default-tls"]
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#![warn(clippy::todo, clippy::dbg_macro)]
|
||||
|
||||
//! # YouTube audio/video downloader
|
||||
|
||||
mod util;
|
||||
|
@ -27,8 +25,8 @@ use util::DownloadError;
|
|||
|
||||
type Result<T> = core::result::Result<T, DownloadError>;
|
||||
|
||||
const CHUNK_SIZE_MIN: u64 = 9_000_000;
|
||||
const CHUNK_SIZE_MAX: u64 = 10_000_000;
|
||||
const CHUNK_SIZE_MIN: u64 = 9000000;
|
||||
const CHUNK_SIZE_MAX: u64 = 10000000;
|
||||
|
||||
fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> {
|
||||
let mut rng = rand::thread_rng();
|
||||
|
@ -36,7 +34,7 @@ fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> {
|
|||
let mut chunk_end = offset + chunk_size;
|
||||
|
||||
if let Some(size) = size {
|
||||
chunk_end = chunk_end.min(size - 1);
|
||||
chunk_end = chunk_end.min(size - 1)
|
||||
}
|
||||
|
||||
Range {
|
||||
|
@ -298,7 +296,7 @@ pub async fn download_video(
|
|||
) -> Result<()> {
|
||||
// Download filepath
|
||||
let download_dir = PathBuf::from(output_dir);
|
||||
let title = player_data.details.name.clone();
|
||||
let title = player_data.details.name.to_owned();
|
||||
let output_fname_set = output_fname.is_some();
|
||||
let output_fname = output_fname.unwrap_or_else(|| {
|
||||
filenamify::filenamify(format!("{} [{}]", title, player_data.details.id))
|
||||
|
@ -334,13 +332,14 @@ pub async fn download_video(
|
|||
return Err(DownloadError::Input(
|
||||
format!("File {} already exists", output_path.to_string_lossy()).into(),
|
||||
))?;
|
||||
}
|
||||
} else {
|
||||
info!(
|
||||
"Downloaded video {} already exists",
|
||||
output_path.to_string_lossy()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
match (video, audio) {
|
||||
// Downloading combined video/audio stream (no conversion)
|
||||
|
@ -365,7 +364,7 @@ pub async fn download_video(
|
|||
output_fname,
|
||||
v.format.extension()
|
||||
)),
|
||||
url: v.url.clone(),
|
||||
url: v.url.to_owned(),
|
||||
video_codec: Some(v.codec),
|
||||
audio_codec: None,
|
||||
});
|
||||
|
@ -377,10 +376,10 @@ pub async fn download_video(
|
|||
output_fname,
|
||||
a.format.extension()
|
||||
)),
|
||||
url: a.url.clone(),
|
||||
url: a.url.to_owned(),
|
||||
video_codec: None,
|
||||
audio_codec: Some(a.codec),
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pb.set_message(format!("Downloading {title}"));
|
||||
|
@ -397,7 +396,7 @@ pub async fn download_video(
|
|||
|
||||
// Delete original files
|
||||
stream::iter(&downloads)
|
||||
.map(|d| fs::remove_file(d.file.clone()))
|
||||
.map(|d| fs::remove_file(d.file.to_owned()))
|
||||
.buffer_unordered(downloads.len())
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
|
@ -418,7 +417,7 @@ async fn download_streams(
|
|||
let n = downloads.len();
|
||||
|
||||
stream::iter(downloads)
|
||||
.map(|d| download_single_file(&d.url, d.file.clone(), http.clone(), pb.clone()))
|
||||
.map(|d| download_single_file(&d.url, d.file.to_owned(), http.clone(), pb.clone()))
|
||||
.buffer_unordered(n)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
|
@ -440,7 +439,7 @@ async fn convert_streams<P: Into<PathBuf>>(
|
|||
|
||||
downloads.iter().enumerate().for_each(|(i, d)| {
|
||||
args.push("-i".into());
|
||||
args.push(d.file.clone().into());
|
||||
args.push(d.file.to_owned().into());
|
||||
|
||||
mapping_args.push("-map".into());
|
||||
mapping_args.push(i.to_string().into());
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
Source: https://github.com/TeamNewPipe/NewPipe/pull/9182#issuecomment-1508938841
|
||||
|
||||
Note: we recently discovered that YouTube system playlists exist for regular videos of channels, for livestreams, and shorts as chronological ones (the shorts one was already known) and popular ones.
|
||||
They correspond basically to the results of the sort filters available on the channels streams tab on YouTube's interface
|
||||
|
||||
So, basically shortcuts for the lazy/incurious?
|
||||
|
||||
Same procedure as the one described in the 0.24.1 changelog, except that you need to change the prefix UU (all user uploads) to:
|
||||
|
||||
UULF for regular videos only,
|
||||
UULV for livestreams only,
|
||||
UUSH for shorts only,
|
||||
UULP for popular regular videos,
|
||||
UUPS for popular shorts,
|
||||
UUPV for popular livestreams
|
||||
UUMF: members only regular videos
|
||||
UUMV: members only livestreams
|
||||
UUMS is probably for members-only shorts, we need to found a channel making shorts restricted to channel members
|
23
src/cache.rs
23
src/cache.rs
|
@ -1,19 +1,4 @@
|
|||
//! # Persistent cache storage
|
||||
//!
|
||||
//! RustyPipe caches some information fetched from YouTube: specifically
|
||||
//! the client versions and the JavaScript code used to deobfuscate the stream URLs.
|
||||
//!
|
||||
//! Without a persistent cache storage, this information would have to be re-fetched
|
||||
//! with every new instantiation of the client. This would make operation a lot slower,
|
||||
//! especially with CLI applications. For this reason, persisting the cache between
|
||||
//! program executions is recommended.
|
||||
//!
|
||||
//! Since there are many diferent ways to store this data (Text file, SQL, Redis, etc),
|
||||
//! RustyPipe allows you to plug in your own cache storage by implementing the
|
||||
//! [`CacheStorage`] trait.
|
||||
//!
|
||||
//! RustyPipe already comes with the [`FileStorage`] implementation which stores
|
||||
//! the cache as a JSON file.
|
||||
//! Persistent cache storage
|
||||
|
||||
use std::{
|
||||
fs,
|
||||
|
@ -24,16 +9,14 @@ use log::error;
|
|||
|
||||
pub(crate) const DEFAULT_CACHE_FILE: &str = "rustypipe_cache.json";
|
||||
|
||||
/// Cache storage trait
|
||||
///
|
||||
/// RustyPipe has to cache some information fetched from YouTube: specifically
|
||||
/// the client versions and the JavaScript code used to deobfuscate the stream URLs.
|
||||
///
|
||||
/// This trait is used to abstract the cache storage behavior so you can store
|
||||
/// cache data in your preferred way (File, SQL, Redis, etc).
|
||||
///
|
||||
/// The cache is read when building the [`RustyPipe`](crate::client::RustyPipe)
|
||||
/// client and updated whenever additional data is fetched.
|
||||
/// The cache is read when building the [`crate::client::RustyPipe`] client and updated
|
||||
/// whenever additional data is fetched.
|
||||
pub trait CacheStorage: Sync + Send {
|
||||
/// Write the given string to the cache
|
||||
fn write(&self, data: &str);
|
||||
|
|
|
@ -98,7 +98,7 @@ impl RustyPipeQuery {
|
|||
.await
|
||||
}
|
||||
|
||||
/// Get the videos of the given tab (Shorts, Livestreams) from a YouTube channel
|
||||
/// Get the specified video tab from a YouTube channel
|
||||
pub async fn channel_videos_tab<S: AsRef<str>>(
|
||||
&self,
|
||||
channel_id: S,
|
||||
|
@ -108,7 +108,7 @@ impl RustyPipeQuery {
|
|||
.await
|
||||
}
|
||||
|
||||
/// Get a ordered list of videos from the given tab (Shorts, Livestreams) of a YouTube channel
|
||||
/// Get a ordered list of videos from the specified tab of a YouTube channel
|
||||
///
|
||||
/// This function does not return channel metadata.
|
||||
pub async fn channel_videos_tab_order<S: AsRef<str>>(
|
||||
|
@ -322,7 +322,7 @@ fn map_vanity_url(url: &str, id: &str) -> Option<String> {
|
|||
|
||||
Url::parse(url).ok().map(|mut parsed_url| {
|
||||
// The vanity URL from YouTube is http for some reason
|
||||
_ = parsed_url.set_scheme("https");
|
||||
let _ = parsed_url.set_scheme("https");
|
||||
parsed_url.to_string()
|
||||
})
|
||||
}
|
||||
|
@ -392,7 +392,10 @@ fn map_channel(
|
|||
content: (),
|
||||
},
|
||||
response::channel::Header::CarouselHeaderRenderer(carousel) => {
|
||||
let hdata = carousel.contents.into_iter().find_map(|item| {
|
||||
let hdata = carousel
|
||||
.contents
|
||||
.into_iter()
|
||||
.filter_map(|item| {
|
||||
match item {
|
||||
response::channel::CarouselHeaderRendererItem::TopicChannelDetailsRenderer {
|
||||
subscriber_count_text,
|
||||
|
@ -401,7 +404,8 @@ fn map_channel(
|
|||
} => Some((subscriber_count_text.or(subtitle), avatar)),
|
||||
response::channel::CarouselHeaderRendererItem::None => None,
|
||||
}
|
||||
});
|
||||
})
|
||||
.next();
|
||||
|
||||
Channel {
|
||||
id: metadata.external_id,
|
||||
|
@ -564,7 +568,7 @@ fn _order_ctoken(
|
|||
pb_80226972.string(3, &pbi.to_base64());
|
||||
|
||||
let mut pb = ProtoBuilder::new();
|
||||
pb.embedded(80_226_972, pb_80226972);
|
||||
pb.embedded(80226972, pb_80226972);
|
||||
|
||||
pb.to_base64()
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::collections::BTreeMap;
|
|||
use crate::{
|
||||
error::{Error, ExtractionError},
|
||||
model::ChannelRss,
|
||||
report::{Report, RustyPipeInfo},
|
||||
report::Report,
|
||||
};
|
||||
|
||||
use super::{response, RustyPipeQuery};
|
||||
|
@ -15,11 +15,12 @@ impl RustyPipeQuery {
|
|||
///
|
||||
/// Fetching RSS feeds is a lot faster than querying the InnerTube API, so this method is great
|
||||
/// for checking a lot of channels or implementing a subscription feed.
|
||||
///
|
||||
/// The downside of using the RSS feed is that it does not provide video durations.
|
||||
pub async fn channel_rss<S: AsRef<str>>(&self, channel_id: S) -> Result<ChannelRss, Error> {
|
||||
let channel_id = channel_id.as_ref();
|
||||
let url = format!("https://www.youtube.com/feeds/videos.xml?channel_id={channel_id}");
|
||||
let url = format!(
|
||||
"https://www.youtube.com/feeds/videos.xml?channel_id={}",
|
||||
channel_id,
|
||||
);
|
||||
let xml = self
|
||||
.client
|
||||
.http_request_txt(&self.client.inner.http.get(&url).build()?)
|
||||
|
@ -37,15 +38,15 @@ impl RustyPipeQuery {
|
|||
Err(e) => {
|
||||
if let Some(reporter) = &self.client.inner.reporter {
|
||||
let report = Report {
|
||||
info: RustyPipeInfo::default(),
|
||||
info: Default::default(),
|
||||
level: crate::report::Level::ERR,
|
||||
operation: "channel_rss",
|
||||
operation: "channel_rss".to_owned(),
|
||||
error: Some(e.to_string()),
|
||||
msgs: Vec::new(),
|
||||
deobf_data: None,
|
||||
http_request: crate::report::HTTPRequest {
|
||||
url: &url,
|
||||
method: "GET",
|
||||
url,
|
||||
method: "GET".to_owned(),
|
||||
req_header: BTreeMap::new(),
|
||||
req_body: String::new(),
|
||||
status: 200,
|
||||
|
|
|
@ -39,7 +39,7 @@ use crate::{
|
|||
deobfuscate::DeobfData,
|
||||
error::{Error, ExtractionError},
|
||||
param::{Country, Language},
|
||||
report::{FileReporter, Level, Report, Reporter, RustyPipeInfo, DEFAULT_REPORT_DIR},
|
||||
report::{FileReporter, Level, Report, Reporter, DEFAULT_REPORT_DIR},
|
||||
serializer::MapResult,
|
||||
util,
|
||||
};
|
||||
|
@ -73,7 +73,7 @@ pub enum ClientType {
|
|||
}
|
||||
|
||||
impl ClientType {
|
||||
fn is_web(self) -> bool {
|
||||
fn is_web(&self) -> bool {
|
||||
match self {
|
||||
ClientType::Desktop | ClientType::DesktopMusic | ClientType::TvHtml5Embed => true,
|
||||
ClientType::Android | ClientType::Ios => false,
|
||||
|
@ -118,11 +118,11 @@ struct ClientInfo<'a> {
|
|||
impl Default for ClientInfo<'_> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
client_name: "",
|
||||
client_version: Cow::default(),
|
||||
client_name: Default::default(),
|
||||
client_version: Default::default(),
|
||||
client_screen: None,
|
||||
device_model: None,
|
||||
platform: "",
|
||||
platform: Default::default(),
|
||||
original_url: None,
|
||||
visitor_data: None,
|
||||
hl: Language::En,
|
||||
|
@ -214,9 +214,9 @@ static CLIENT_VERSION_REGEXES: Lazy<[Regex; 1]> =
|
|||
|
||||
/// The RustyPipe client used to access YouTube's API
|
||||
///
|
||||
/// RustyPipe uses an [`Arc`] internally, so if you are using the client
|
||||
/// at multiple locations, you can just clone it. Note that query options
|
||||
/// (lang/country/report/visitor data) are not shared between clones.
|
||||
/// RustyPipe includes an `Arc` internally, so if you are using the client
|
||||
/// at multiple locations, you can just clone it. Note that options (lang/country/report)
|
||||
/// are not shared between clones.
|
||||
#[derive(Clone)]
|
||||
pub struct RustyPipe {
|
||||
inner: Arc<RustyPipeRef>,
|
||||
|
@ -268,78 +268,10 @@ impl<T> DefaultOpt<T> {
|
|||
}
|
||||
}
|
||||
|
||||
/// # RustyPipe query
|
||||
/// RustyPipe query object
|
||||
///
|
||||
/// ## Queries
|
||||
///
|
||||
/// ### YouTube
|
||||
///
|
||||
/// - **Video**
|
||||
/// - [`player`](RustyPipeQuery::player)
|
||||
/// - [`video_details`](RustyPipeQuery::video_details)
|
||||
/// - [`video_comments`](RustyPipeQuery::video_comments)
|
||||
/// - **Channel**
|
||||
/// - [`channel_videos`](RustyPipeQuery::channel_videos)
|
||||
/// - [`channel_videos_order`](RustyPipeQuery::channel_videos_order)
|
||||
/// - [`channel_videos_tab`](RustyPipeQuery::channel_videos_tab)
|
||||
/// - [`channel_videos_tab_order`](RustyPipeQuery::channel_videos_tab_order)
|
||||
/// - [`channel_playlists`](RustyPipeQuery::channel_playlists)
|
||||
/// - [`channel_search`](RustyPipeQuery::channel_search)
|
||||
/// - [`channel_info`](RustyPipeQuery::channel_info)
|
||||
/// - [`channel_rss`](RustyPipeQuery::channel_rss) (🔒 Feature `rss`)
|
||||
/// - **Playlist** [`playlist`](RustyPipeQuery::playlist)
|
||||
/// - **Search**
|
||||
/// - [`search`](RustyPipeQuery::search)
|
||||
/// - [`search_filter`](RustyPipeQuery::search_filter)
|
||||
/// - [`search_suggestion`](RustyPipeQuery::search_suggestion)
|
||||
/// - **Trending** [`trending`](RustyPipeQuery::trending)
|
||||
/// - **Resolver** (convert URLs and strings to YouTube IDs)
|
||||
/// - [`resolve_url`](RustyPipeQuery::resolve_url)
|
||||
/// - [`resolve_string`](RustyPipeQuery::resolve_string)
|
||||
///
|
||||
/// ### YouTube Music
|
||||
///
|
||||
/// - **Playlist** [`music_playlist`](RustyPipeQuery::music_playlist)
|
||||
/// - **Album** [`music_album`](RustyPipeQuery::music_album)
|
||||
/// - **Artist** [`music_artist`](RustyPipeQuery::music_artist)
|
||||
/// - **Search**
|
||||
/// - [`music_search`](RustyPipeQuery::music_search)
|
||||
/// - [`music_search_tracks`](RustyPipeQuery::music_search_tracks)
|
||||
/// - [`music_search_videos`](RustyPipeQuery::music_search_videos)
|
||||
/// - [`music_search_albums`](RustyPipeQuery::music_search_albums)
|
||||
/// - [`music_search_artists`](RustyPipeQuery::music_search_artists)
|
||||
/// - [`music_search_playlists`](RustyPipeQuery::music_search_playlists)
|
||||
/// - [`music_search_playlists_filter`](RustyPipeQuery::music_search_playlists_filter)
|
||||
/// - [`music_search_suggestion`](RustyPipeQuery::music_search_suggestion)
|
||||
/// - **Radio**
|
||||
/// - [`music_radio`](RustyPipeQuery::music_radio)
|
||||
/// - [`music_radio_playlist`](RustyPipeQuery::music_radio_playlist)
|
||||
/// - [`music_radio_track`](RustyPipeQuery::music_radio_track)
|
||||
/// - **Track details**
|
||||
/// - [`music_details`](RustyPipeQuery::music_details)
|
||||
/// - [`music_lyrics`](RustyPipeQuery::music_lyrics)
|
||||
/// - [`music_related`](RustyPipeQuery::music_related)
|
||||
/// - **Moods/Genres**
|
||||
/// - [`music_genres`](RustyPipeQuery::music_genres)
|
||||
/// - [`music_genre`](RustyPipeQuery::music_genre)
|
||||
/// - **Charts** [`music_charts`](RustyPipeQuery::music_charts)
|
||||
/// - **New**
|
||||
/// - [`music_new_albums`](RustyPipeQuery::music_new_albums)
|
||||
/// - [`music_new_videos`](RustyPipeQuery::music_new_videos)
|
||||
///
|
||||
/// ## Options
|
||||
///
|
||||
/// You can set the language, country and visitor data cookie for individual requests.
|
||||
///
|
||||
/// ```
|
||||
/// # use rustypipe::client::RustyPipe;
|
||||
/// let rp = RustyPipe::new();
|
||||
/// rp.query()
|
||||
/// .country(rustypipe::param::Country::De)
|
||||
/// .lang(rustypipe::param::Language::De)
|
||||
/// .visitor_data("CgthZVRCd1dkbTlRWSj3v_miBg%3D%3D")
|
||||
/// .player("ZeerrnuLi5E");
|
||||
/// ```
|
||||
/// Contains a reference to the RustyPipe client as well as query-specific
|
||||
/// options (e.g. language preference).
|
||||
#[derive(Clone)]
|
||||
pub struct RustyPipeQuery {
|
||||
client: RustyPipe,
|
||||
|
@ -429,10 +361,9 @@ impl Default for RustyPipeBuilder {
|
|||
}
|
||||
|
||||
impl RustyPipeBuilder {
|
||||
/// Return a new `RustyPipeBuilder`.
|
||||
/// Constructs a new `RustyPipeBuilder`.
|
||||
///
|
||||
/// This is the same as [`RustyPipe::builder`]
|
||||
#[must_use]
|
||||
/// This is the same as `RustyPipe::builder()`
|
||||
pub fn new() -> Self {
|
||||
RustyPipeBuilder {
|
||||
default_opts: RustyPipeOpts::default(),
|
||||
|
@ -445,8 +376,7 @@ impl RustyPipeBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
/// Return a new, configured RustyPipe instance.
|
||||
#[must_use]
|
||||
/// Returns a new, configured RustyPipe instance.
|
||||
pub fn build(self) -> RustyPipe {
|
||||
let mut client_builder = ClientBuilder::new()
|
||||
.user_agent(self.user_agent.unwrap_or_else(|| DEFAULT_UA.to_owned()))
|
||||
|
@ -511,7 +441,6 @@ impl RustyPipeBuilder {
|
|||
/// This option has no effect if the storage backend or reporter are manually set or disabled.
|
||||
///
|
||||
/// **Default value**: current working directory
|
||||
#[must_use]
|
||||
pub fn storage_dir<P: Into<PathBuf>>(mut self, path: P) -> Self {
|
||||
self.storage_dir = Some(path.into());
|
||||
self
|
||||
|
@ -522,14 +451,12 @@ impl RustyPipeBuilder {
|
|||
/// program executions.
|
||||
///
|
||||
/// **Default value**: [`FileStorage`] in `rustypipe_cache.json`
|
||||
#[must_use]
|
||||
pub fn storage(mut self, storage: Box<dyn CacheStorage>) -> Self {
|
||||
self.storage = DefaultOpt::Some(storage);
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable cache storage
|
||||
#[must_use]
|
||||
pub fn no_storage(mut self) -> Self {
|
||||
self.storage = DefaultOpt::None;
|
||||
self
|
||||
|
@ -538,14 +465,12 @@ impl RustyPipeBuilder {
|
|||
/// Add a `Reporter` to collect error details
|
||||
///
|
||||
/// **Default value**: [`FileReporter`] creating reports in `./rustypipe_reports`
|
||||
#[must_use]
|
||||
pub fn reporter(mut self, reporter: Box<dyn Reporter>) -> Self {
|
||||
self.reporter = DefaultOpt::Some(reporter);
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable the creation of report files in case of errors and warnings.
|
||||
#[must_use]
|
||||
pub fn no_reporter(mut self) -> Self {
|
||||
self.reporter = DefaultOpt::None;
|
||||
self
|
||||
|
@ -557,14 +482,12 @@ impl RustyPipeBuilder {
|
|||
/// response body has finished.
|
||||
///
|
||||
/// **Default value**: 10s
|
||||
#[must_use]
|
||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
||||
self.timeout = DefaultOpt::Some(timeout);
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable the HTTP request timeout.
|
||||
#[must_use]
|
||||
pub fn no_timeout(mut self) -> Self {
|
||||
self.timeout = DefaultOpt::None;
|
||||
self
|
||||
|
@ -579,7 +502,6 @@ impl RustyPipeBuilder {
|
|||
/// random jitter to be less predictable).
|
||||
///
|
||||
/// **Default value**: 2
|
||||
#[must_use]
|
||||
pub fn n_http_retries(mut self, n_retries: u32) -> Self {
|
||||
self.n_http_retries = n_retries;
|
||||
self
|
||||
|
@ -589,44 +511,37 @@ impl RustyPipeBuilder {
|
|||
///
|
||||
/// **Default value**: `Mozilla/5.0 (X11; Linux x86_64; rv:102.0) Gecko/20100101 Firefox/102.0`
|
||||
/// (Firefox ESR on Debian)
|
||||
#[must_use]
|
||||
pub fn user_agent<S: Into<String>>(mut self, user_agent: S) -> Self {
|
||||
self.user_agent = Some(user_agent.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the language parameter used when accessing the YouTube API.
|
||||
///
|
||||
/// This will change multilanguage video titles, descriptions and textual dates
|
||||
///
|
||||
/// **Default value**: `Language::En` (English)
|
||||
///
|
||||
/// **Info**: you can set this option for individual queries, too
|
||||
#[must_use]
|
||||
pub fn lang(mut self, lang: Language) -> Self {
|
||||
self.default_opts.lang = lang;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the country parameter used when accessing the YouTube API.
|
||||
///
|
||||
/// This will change trends and recommended content.
|
||||
///
|
||||
/// **Default value**: `Country::Us` (USA)
|
||||
///
|
||||
/// **Info**: you can set this option for individual queries, too
|
||||
#[must_use]
|
||||
pub fn country(mut self, country: Country) -> Self {
|
||||
self.default_opts.country = validate_country(country);
|
||||
self
|
||||
}
|
||||
|
||||
/// Generate a report on every operation.
|
||||
///
|
||||
/// This should only be used for debugging.
|
||||
///
|
||||
/// **Info**: you can set this option for individual queries, too
|
||||
#[must_use]
|
||||
pub fn report(mut self) -> Self {
|
||||
self.default_opts.report = true;
|
||||
self
|
||||
|
@ -634,44 +549,23 @@ impl RustyPipeBuilder {
|
|||
|
||||
/// Enable strict mode, causing operations to fail if there
|
||||
/// are warnings during deserialization (e.g. invalid items).
|
||||
///
|
||||
/// This should only be used for testing.
|
||||
///
|
||||
/// **Info**: you can set this option for individual queries, too
|
||||
#[must_use]
|
||||
pub fn strict(mut self) -> Self {
|
||||
self.default_opts.strict = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the YouTube visitor data cookie
|
||||
///
|
||||
/// YouTube assigns a session cookie to each user which is used for personalized
|
||||
/// recommendations. By default, RustyPipe does not send this cookie to preserve
|
||||
/// user privacy. For requests that mandatate the cookie, a new one is requested
|
||||
/// for every query.
|
||||
///
|
||||
/// This option allows you to manually set the visitor data cookie of your client,
|
||||
/// allowing you to get personalized recommendations or reproduce A/B tests.
|
||||
///
|
||||
/// Note that YouTube has a rate limit on the number of requests from a single
|
||||
/// visitor, so you should not use the same vistor data cookie for batch operations.
|
||||
///
|
||||
/// **Info**: you can set this option for individual queries, too
|
||||
#[must_use]
|
||||
/// Set the default YouTube visitor data cookie
|
||||
pub fn visitor_data<S: Into<String>>(mut self, visitor_data: S) -> Self {
|
||||
self.default_opts.visitor_data = Some(visitor_data.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the YouTube visitor data cookie to an optional value
|
||||
///
|
||||
/// see also [`RustyPipeBuilder::visitor_data`]
|
||||
///
|
||||
/// **Info**: you can set this option for individual queries, too
|
||||
#[must_use]
|
||||
pub fn visitor_data_opt<S: Into<String>>(mut self, visitor_data: Option<S>) -> Self {
|
||||
self.default_opts.visitor_data = visitor_data.map(S::into);
|
||||
/// Set the default YouTube visitor data cookie to an optional value
|
||||
pub fn visitor_data_opt(mut self, visitor_data: Option<String>) -> Self {
|
||||
self.default_opts.visitor_data = visitor_data;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
@ -685,22 +579,19 @@ impl Default for RustyPipe {
|
|||
impl RustyPipe {
|
||||
/// Create a new RustyPipe instance with default settings.
|
||||
///
|
||||
/// To create an instance with custom options, use [`RustyPipeBuilder`] instead.
|
||||
#[must_use]
|
||||
/// To create an instance with custom options, use `RustyPipeBuilder` instead.
|
||||
pub fn new() -> Self {
|
||||
RustyPipeBuilder::new().build()
|
||||
}
|
||||
|
||||
/// Create a new [`RustyPipeBuilder`]
|
||||
/// Constructs a new `RustyPipeBuilder`.
|
||||
///
|
||||
/// This is the same as [`RustyPipeBuilder::new`]
|
||||
#[must_use]
|
||||
/// This is the same as `RustyPipeBuilder::new()`
|
||||
pub fn builder() -> RustyPipeBuilder {
|
||||
RustyPipeBuilder::new()
|
||||
}
|
||||
|
||||
/// Create a new [`RustyPipeQuery`] to run an API request
|
||||
#[must_use]
|
||||
/// Constructs a new `RustyPipeQuery`.
|
||||
pub fn query(&self) -> RustyPipeQuery {
|
||||
RustyPipeQuery {
|
||||
client: self.clone(),
|
||||
|
@ -799,7 +690,7 @@ impl RustyPipe {
|
|||
.get(sw_url)
|
||||
.header(header::ORIGIN, origin)
|
||||
.header(header::REFERER, origin)
|
||||
.header(header::COOKIE, self.inner.consent_cookie.clone())
|
||||
.header(header::COOKIE, self.inner.consent_cookie.to_owned())
|
||||
.build()
|
||||
.unwrap(),
|
||||
)
|
||||
|
@ -848,13 +739,13 @@ impl RustyPipe {
|
|||
let mut desktop_client = self.inner.cache.desktop_client.write().await;
|
||||
|
||||
match desktop_client.get() {
|
||||
Some(cdata) => cdata.version.clone(),
|
||||
Some(cdata) => cdata.version.to_owned(),
|
||||
None => {
|
||||
log::debug!("getting desktop client version");
|
||||
match self.extract_desktop_client_version().await {
|
||||
Ok(version) => {
|
||||
*desktop_client = CacheEntry::from(ClientData {
|
||||
version: version.clone(),
|
||||
version: version.to_owned(),
|
||||
});
|
||||
drop(desktop_client);
|
||||
self.store_cache().await;
|
||||
|
@ -880,13 +771,13 @@ impl RustyPipe {
|
|||
let mut music_client = self.inner.cache.music_client.write().await;
|
||||
|
||||
match music_client.get() {
|
||||
Some(cdata) => cdata.version.clone(),
|
||||
Some(cdata) => cdata.version.to_owned(),
|
||||
None => {
|
||||
log::debug!("getting music client version");
|
||||
match self.extract_music_client_version().await {
|
||||
Ok(version) => {
|
||||
*music_client = CacheEntry::from(ClientData {
|
||||
version: version.clone(),
|
||||
version: version.to_owned(),
|
||||
});
|
||||
drop(music_client);
|
||||
self.store_cache().await;
|
||||
|
@ -935,12 +826,8 @@ impl RustyPipe {
|
|||
}
|
||||
}
|
||||
|
||||
/// Request a new visitor data cookie from YouTube
|
||||
///
|
||||
/// Since the cookie is shared between YT and YTM and the YTM page loads faster,
|
||||
/// we request that.
|
||||
async fn get_visitor_data(&self) -> Result<String, Error> {
|
||||
log::debug!("getting YT visitor data");
|
||||
log::debug!("getting YTM visitor data");
|
||||
let resp = self.inner.http.get(YOUTUBE_MUSIC_HOME_URL).send().await?;
|
||||
|
||||
resp.headers()
|
||||
|
@ -962,27 +849,21 @@ impl RustyPipe {
|
|||
|
||||
impl RustyPipeQuery {
|
||||
/// Set the language parameter used when accessing the YouTube API
|
||||
///
|
||||
/// This will change multilanguage video titles, descriptions and textual dates
|
||||
#[must_use]
|
||||
pub fn lang(mut self, lang: Language) -> Self {
|
||||
self.opts.lang = lang;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the country parameter used when accessing the YouTube API.
|
||||
///
|
||||
/// This will change trends and recommended content.
|
||||
#[must_use]
|
||||
pub fn country(mut self, country: Country) -> Self {
|
||||
self.opts.country = validate_country(country);
|
||||
self
|
||||
}
|
||||
|
||||
/// Generate a report on every operation.
|
||||
///
|
||||
/// This should only be used for debugging.
|
||||
#[must_use]
|
||||
pub fn report(mut self) -> Self {
|
||||
self.opts.report = true;
|
||||
self
|
||||
|
@ -990,38 +871,21 @@ impl RustyPipeQuery {
|
|||
|
||||
/// Enable strict mode, causing operations to fail if there
|
||||
/// are warnings during deserialization (e.g. invalid items).
|
||||
///
|
||||
/// This should only be used for testing.
|
||||
#[must_use]
|
||||
pub fn strict(mut self) -> Self {
|
||||
self.opts.strict = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the YouTube visitor data cookie
|
||||
///
|
||||
/// YouTube assigns a session cookie to each user which is used for personalized
|
||||
/// recommendations. By default, RustyPipe does not send this cookie to preserve
|
||||
/// user privacy. For requests that mandatate the cookie, a new one is requested
|
||||
/// for every query.
|
||||
///
|
||||
/// This option allows you to manually set the visitor data cookie of your query,
|
||||
/// allowing you to get personalized recommendations or reproduce A/B tests.
|
||||
///
|
||||
/// Note that YouTube has a rate limit on the number of requests from a single
|
||||
/// visitor, so you should not use the same vistor data cookie for batch operations.
|
||||
#[must_use]
|
||||
pub fn visitor_data<S: Into<String>>(mut self, visitor_data: S) -> Self {
|
||||
self.opts.visitor_data = Some(visitor_data.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the YouTube visitor data cookie to an optional value
|
||||
///
|
||||
/// see also [`RustyPipeQuery::visitor_data`]
|
||||
#[must_use]
|
||||
pub fn visitor_data_opt<S: Into<String>>(mut self, visitor_data: Option<S>) -> Self {
|
||||
self.opts.visitor_data = visitor_data.map(S::into);
|
||||
pub fn visitor_data_opt(mut self, visitor_data: Option<String>) -> Self {
|
||||
self.opts.visitor_data = visitor_data;
|
||||
self
|
||||
}
|
||||
|
||||
|
@ -1037,10 +901,13 @@ impl RustyPipeQuery {
|
|||
localized: bool,
|
||||
visitor_data: Option<&'a str>,
|
||||
) -> YTContext {
|
||||
let (hl, gl) = if localized {
|
||||
(self.opts.lang, self.opts.country)
|
||||
} else {
|
||||
(Language::En, Country::Us)
|
||||
let hl = match localized {
|
||||
true => self.opts.lang,
|
||||
false => Language::En,
|
||||
};
|
||||
let gl = match localized {
|
||||
true => self.opts.country,
|
||||
false => Country::Us,
|
||||
};
|
||||
let visitor_data = self.opts.visitor_data.as_deref().or(visitor_data);
|
||||
|
||||
|
@ -1142,7 +1009,7 @@ impl RustyPipeQuery {
|
|||
))
|
||||
.header(header::ORIGIN, YOUTUBE_HOME_URL)
|
||||
.header(header::REFERER, YOUTUBE_HOME_URL)
|
||||
.header(header::COOKIE, self.client.inner.consent_cookie.clone())
|
||||
.header(header::COOKIE, self.client.inner.consent_cookie.to_owned())
|
||||
.header("X-YouTube-Client-Name", "1")
|
||||
.header(
|
||||
"X-YouTube-Client-Version",
|
||||
|
@ -1157,7 +1024,7 @@ impl RustyPipeQuery {
|
|||
))
|
||||
.header(header::ORIGIN, YOUTUBE_MUSIC_HOME_URL)
|
||||
.header(header::REFERER, YOUTUBE_MUSIC_HOME_URL)
|
||||
.header(header::COOKIE, self.client.inner.consent_cookie.clone())
|
||||
.header(header::COOKIE, self.client.inner.consent_cookie.to_owned())
|
||||
.header("X-YouTube-Client-Name", "67")
|
||||
.header(
|
||||
"X-YouTube-Client-Version",
|
||||
|
@ -1210,7 +1077,7 @@ impl RustyPipeQuery {
|
|||
/// Get a YouTube visitor data cookie, which is necessary for certain requests
|
||||
async fn get_visitor_data(&self) -> Result<String, Error> {
|
||||
match &self.opts.visitor_data {
|
||||
Some(vd) => Ok(vd.clone()),
|
||||
Some(vd) => Ok(vd.to_owned()),
|
||||
None => self.client.get_visitor_data().await,
|
||||
}
|
||||
}
|
||||
|
@ -1356,19 +1223,21 @@ impl RustyPipeQuery {
|
|||
if level > Level::DBG || self.opts.report {
|
||||
if let Some(reporter) = &self.client.inner.reporter {
|
||||
let report = Report {
|
||||
info: RustyPipeInfo::default(),
|
||||
info: Default::default(),
|
||||
level,
|
||||
operation: &format!("{operation}({id})"),
|
||||
operation: format!("{operation}({id})"),
|
||||
error,
|
||||
msgs,
|
||||
deobf_data: deobf.cloned(),
|
||||
http_request: crate::report::HTTPRequest {
|
||||
url: request.url().as_str(),
|
||||
method: request.method().as_str(),
|
||||
url: request.url().to_string(),
|
||||
method: "POST".to_string(),
|
||||
req_header: request
|
||||
.headers()
|
||||
.iter()
|
||||
.map(|(k, v)| (k.as_str(), v.to_str().unwrap_or_default().to_owned()))
|
||||
.map(|(k, v)| {
|
||||
(k.to_string(), v.to_str().unwrap_or_default().to_owned())
|
||||
})
|
||||
.collect(),
|
||||
req_body: serde_json::to_string(body).unwrap_or_default(),
|
||||
status: req_res.status.into(),
|
||||
|
|
|
@ -26,10 +26,9 @@ impl RustyPipeQuery {
|
|||
all_albums: bool,
|
||||
) -> Result<MusicArtist, Error> {
|
||||
let artist_id = artist_id.as_ref();
|
||||
let visitor_data = if all_albums {
|
||||
Some(self.get_visitor_data().await?)
|
||||
} else {
|
||||
None
|
||||
let visitor_data = match all_albums {
|
||||
true => Some(self.get_visitor_data().await?),
|
||||
false => None,
|
||||
};
|
||||
|
||||
let res = self._music_artist(artist_id, visitor_data.as_deref()).await;
|
||||
|
@ -197,7 +196,7 @@ fn map_artist_page(
|
|||
lang,
|
||||
ArtistId {
|
||||
id: Some(id.to_owned()),
|
||||
name: header.title.clone(),
|
||||
name: header.title.to_owned(),
|
||||
},
|
||||
);
|
||||
|
||||
|
|
|
@ -60,7 +60,7 @@ impl RustyPipeQuery {
|
|||
// In rare cases, albums may have track numbers =0 (example: MPREb_RM0QfZ0eSKL)
|
||||
// They should be replaced with the track number derived from the previous track.
|
||||
let mut n_prev = 0;
|
||||
for track in &mut album.tracks {
|
||||
for track in album.tracks.iter_mut() {
|
||||
let tn = track.track_nr.unwrap_or_default();
|
||||
if tn == 0 {
|
||||
n_prev += 1;
|
||||
|
@ -80,7 +80,7 @@ impl RustyPipeQuery {
|
|||
.enumerate()
|
||||
.filter_map(|(i, track)| {
|
||||
if track.is_video {
|
||||
Some((i, track.name.clone()))
|
||||
Some((i, track.name.to_owned()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ impl RustyPipeQuery {
|
|||
for (i, title) in to_replace {
|
||||
let found_track = playlist.tracks.items.iter().find_map(|track| {
|
||||
if track.name == title && !track.is_video {
|
||||
Some((track.id.clone(), track.duration))
|
||||
Some((track.id.to_owned(), track.duration))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -173,7 +173,7 @@ impl MapResponse<MusicPlaylist> for response::MusicPlaylist {
|
|||
.split(|p| p == DOT_SEPARATOR)
|
||||
.collect::<Vec<_>>();
|
||||
parts
|
||||
.get(usize::from(parts.len() > 2))
|
||||
.get(if parts.len() > 2 { 1 } else { 0 })
|
||||
.and_then(|txt| util::parse_numeric::<u64>(&txt[0]).ok())
|
||||
})
|
||||
} else {
|
||||
|
@ -293,7 +293,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
|||
match section {
|
||||
response::music_item::ItemSection::MusicShelfRenderer(sh) => shelf = Some(sh),
|
||||
response::music_item::ItemSection::MusicCarouselShelfRenderer(sh) => {
|
||||
album_variants = Some(sh.contents);
|
||||
album_variants = Some(sh.contents)
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
@ -355,7 +355,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
|||
)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let artist_id = artist_id.or_else(|| artists.first().and_then(|a| a.id.clone()));
|
||||
let artist_id = artist_id.or_else(|| artists.first().and_then(|a| a.id.to_owned()));
|
||||
|
||||
let mut mapper = MusicListMapper::with_album(
|
||||
lang,
|
||||
|
@ -363,7 +363,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
|||
by_va,
|
||||
AlbumId {
|
||||
id: id.to_owned(),
|
||||
name: header.title.clone(),
|
||||
name: header.title.to_owned(),
|
||||
},
|
||||
);
|
||||
mapper.map_response(shelf.contents);
|
||||
|
|
|
@ -170,10 +170,9 @@ impl RustyPipeQuery {
|
|||
) -> Result<MusicSearchFiltered<MusicPlaylistItem>, Error> {
|
||||
self._music_search_playlists(
|
||||
query,
|
||||
if community {
|
||||
Params::CommunityPlaylists
|
||||
} else {
|
||||
Params::YtmPlaylists
|
||||
match community {
|
||||
true => Params::CommunityPlaylists,
|
||||
false => Params::YtmPlaylists,
|
||||
},
|
||||
)
|
||||
.await
|
||||
|
@ -267,7 +266,7 @@ impl MapResponse<MusicSearchResult> for response::MusicSearch {
|
|||
}
|
||||
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
||||
if let Some(corrected) = contents.into_iter().next() {
|
||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query);
|
||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
||||
}
|
||||
}
|
||||
response::music_search::ItemSection::None => {}
|
||||
|
@ -325,7 +324,7 @@ impl<T: FromYtItem> MapResponse<MusicSearchFiltered<T>> for response::MusicSearc
|
|||
}
|
||||
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
||||
if let Some(corrected) = contents.into_iter().next() {
|
||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query);
|
||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
||||
}
|
||||
}
|
||||
response::music_search::ItemSection::None => {}
|
||||
|
|
|
@ -177,12 +177,12 @@ impl MapResponse<VideoPlayer> for response::Player {
|
|||
}
|
||||
response::player::PlayabilityStatus::LoginRequired { reason, messages } => {
|
||||
let mut msg = reason;
|
||||
for m in &messages {
|
||||
messages.iter().for_each(|m| {
|
||||
if !msg.is_empty() {
|
||||
msg.push(' ');
|
||||
}
|
||||
msg.push_str(m);
|
||||
}
|
||||
});
|
||||
|
||||
// reason (age restriction): "Sign in to confirm your age"
|
||||
// or: "This video may be inappropriate for some users."
|
||||
|
@ -341,8 +341,8 @@ impl MapResponse<VideoPlayer> for response::Player {
|
|||
+ "&sigh="
|
||||
+ sigh;
|
||||
|
||||
let sprite_count = (f64::from(total_count)
|
||||
/ f64::from(frames_per_page_x * frames_per_page_y))
|
||||
let sprite_count = ((total_count as f64)
|
||||
/ (frames_per_page_x * frames_per_page_y) as f64)
|
||||
.ceil() as u32;
|
||||
|
||||
Some(Frameset {
|
||||
|
@ -413,11 +413,11 @@ fn deobf_nsig(
|
|||
let nsig: String;
|
||||
if let Some(n) = url_params.get("n") {
|
||||
nsig = if n == &last_nsig[0] {
|
||||
last_nsig[1].clone()
|
||||
last_nsig[1].to_owned()
|
||||
} else {
|
||||
let nsig = deobf.deobfuscate_nsig(n)?;
|
||||
last_nsig[0] = n.to_string();
|
||||
last_nsig[1] = nsig.clone();
|
||||
last_nsig[1] = nsig.to_owned();
|
||||
nsig
|
||||
};
|
||||
|
||||
|
@ -490,7 +490,9 @@ fn map_video_stream(
|
|||
deobf: &Deobfuscator,
|
||||
last_nsig: &mut [String; 2],
|
||||
) -> MapResult<Option<VideoStream>> {
|
||||
let Some((mtype, codecs)) = parse_mime(&f.mime_type) else {
|
||||
let (mtype, codecs) = match parse_mime(&f.mime_type) {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return MapResult {
|
||||
c: None,
|
||||
warnings: vec![format!(
|
||||
|
@ -498,12 +500,16 @@ fn map_video_stream(
|
|||
&f.mime_type, &f
|
||||
)],
|
||||
}
|
||||
}
|
||||
};
|
||||
let Some(format) = get_video_format(mtype) else {
|
||||
let format = match get_video_format(mtype) {
|
||||
Some(f) => f,
|
||||
None => {
|
||||
return MapResult {
|
||||
c: None,
|
||||
warnings: vec![format!("invalid video format. itag: {}", f.itag)],
|
||||
}
|
||||
}
|
||||
};
|
||||
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
|
||||
|
||||
|
@ -526,9 +532,9 @@ fn map_video_stream(
|
|||
quality: f.quality_label.unwrap(),
|
||||
hdr: f.color_info.unwrap_or_default().primaries
|
||||
== player::Primaries::ColorPrimariesBt2020,
|
||||
mime: f.mime_type.to_owned(),
|
||||
format,
|
||||
codec: get_video_codec(codecs),
|
||||
mime: f.mime_type,
|
||||
throttled: url.throttled,
|
||||
}),
|
||||
warnings: map_res.warnings,
|
||||
|
@ -545,7 +551,9 @@ fn map_audio_stream(
|
|||
deobf: &Deobfuscator,
|
||||
last_nsig: &mut [String; 2],
|
||||
) -> MapResult<Option<AudioStream>> {
|
||||
let Some((mtype, codecs)) = parse_mime(&f.mime_type) else {
|
||||
let (mtype, codecs) = match parse_mime(&f.mime_type) {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return MapResult {
|
||||
c: None,
|
||||
warnings: vec![format!(
|
||||
|
@ -553,12 +561,16 @@ fn map_audio_stream(
|
|||
&f.mime_type, &f
|
||||
)],
|
||||
}
|
||||
}
|
||||
};
|
||||
let Some(format) = get_audio_format(mtype) else {
|
||||
let format = match get_audio_format(mtype) {
|
||||
Some(f) => f,
|
||||
None => {
|
||||
return MapResult {
|
||||
c: None,
|
||||
warnings: vec![format!("invalid audio format. itag: {}", f.itag)],
|
||||
}
|
||||
}
|
||||
};
|
||||
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
|
||||
let mut warnings = map_res.warnings;
|
||||
|
@ -574,9 +586,9 @@ fn map_audio_stream(
|
|||
index_range: f.index_range,
|
||||
init_range: f.init_range,
|
||||
duration_ms: f.approx_duration_ms,
|
||||
mime: f.mime_type.to_owned(),
|
||||
format,
|
||||
codec: get_audio_codec(codecs),
|
||||
mime: f.mime_type,
|
||||
channels: f.audio_channels,
|
||||
loudness_db: f.loudness_db,
|
||||
throttled: url.throttled,
|
||||
|
@ -674,7 +686,7 @@ fn map_audio_track(
|
|||
}
|
||||
},
|
||||
_ => {}
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
AudioTrack {
|
||||
|
|
|
@ -60,8 +60,9 @@ impl MapResponse<Playlist> for response::Playlist {
|
|||
lang: crate::param::Language,
|
||||
_deobf: Option<&crate::deobfuscate::DeobfData>,
|
||||
) -> Result<MapResult<Playlist>, ExtractionError> {
|
||||
let (Some(contents), Some(header)) = (self.contents, self.header) else {
|
||||
return Err(response::alerts_to_err(id, self.alerts));
|
||||
let (contents, header) = match (self.contents, self.header) {
|
||||
(Some(contents), Some(header)) => (contents, header),
|
||||
_ => return Err(response::alerts_to_err(id, self.alerts)),
|
||||
};
|
||||
|
||||
let video_items = contents
|
||||
|
|
|
@ -87,9 +87,11 @@ impl From<ChannelRss> for crate::model::ChannelRss {
|
|||
feed.entry
|
||||
.iter()
|
||||
.find_map(|entry| {
|
||||
Some(entry.channel_id.as_str())
|
||||
.filter(|id| id.is_empty())
|
||||
.map(str::to_owned)
|
||||
if !entry.channel_id.is_empty() {
|
||||
Some(entry.channel_id.to_owned())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.or_else(|| {
|
||||
feed.author
|
||||
|
|
|
@ -349,7 +349,7 @@ impl From<Icon> for crate::model::Verification {
|
|||
match icon.icon_type {
|
||||
IconType::Check => Self::Verified,
|
||||
IconType::OfficialArtistBadge => Self::Artist,
|
||||
IconType::Like => Self::None,
|
||||
_ => Self::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -500,7 +500,7 @@ impl MusicListMapper {
|
|||
|
||||
let pt_id = item
|
||||
.navigation_endpoint
|
||||
.and_then(NavigationEndpoint::music_page)
|
||||
.and_then(|ne| ne.music_page())
|
||||
.or_else(|| {
|
||||
c1.and_then(|c1| {
|
||||
c1.renderer.text.0.into_iter().next().and_then(|t| match t {
|
||||
|
@ -796,7 +796,7 @@ impl MusicListMapper {
|
|||
name: item.title,
|
||||
duration: None,
|
||||
cover: item.thumbnail_renderer.into(),
|
||||
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
||||
artists,
|
||||
album: None,
|
||||
view_count: subtitle_p2.and_then(|c| {
|
||||
|
@ -872,7 +872,7 @@ impl MusicListMapper {
|
|||
id,
|
||||
name: item.title,
|
||||
cover: item.thumbnail_renderer.into(),
|
||||
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
||||
artists,
|
||||
album_type,
|
||||
year,
|
||||
|
@ -886,7 +886,8 @@ impl MusicListMapper {
|
|||
let from_ytm = subtitle_p2
|
||||
.as_ref()
|
||||
.and_then(|p| p.0.first())
|
||||
.map_or(true, util::is_ytm);
|
||||
.map(util::is_ytm)
|
||||
.unwrap_or(true);
|
||||
let channel = subtitle_p2.and_then(|p| {
|
||||
p.0.into_iter().find_map(|c| ChannelId::try_from(c).ok())
|
||||
});
|
||||
|
@ -972,7 +973,7 @@ impl MusicListMapper {
|
|||
id,
|
||||
name: card.title,
|
||||
cover: card.thumbnail.into(),
|
||||
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
||||
artists,
|
||||
album_type,
|
||||
year: subtitle_p3.and_then(|y| util::parse_numeric(y.first_str()).ok()),
|
||||
|
@ -1009,7 +1010,7 @@ impl MusicListMapper {
|
|||
name: card.title,
|
||||
duration,
|
||||
cover: card.thumbnail.into(),
|
||||
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
||||
artists,
|
||||
album,
|
||||
view_count,
|
||||
|
@ -1023,7 +1024,8 @@ impl MusicListMapper {
|
|||
let from_ytm = subtitle_p2
|
||||
.as_ref()
|
||||
.and_then(|p| p.0.first())
|
||||
.map_or(true, util::is_ytm);
|
||||
.map(util::is_ytm)
|
||||
.unwrap_or(true);
|
||||
let channel = subtitle_p2
|
||||
.and_then(|p| p.0.into_iter().find_map(|c| ChannelId::try_from(c).ok()));
|
||||
let track_count =
|
||||
|
@ -1119,17 +1121,10 @@ impl MusicListMapper {
|
|||
}
|
||||
}
|
||||
|
||||
/// Sometimes the YT Music API returns responses containing unknown items.
|
||||
///
|
||||
/// In this case, the response data is likely missing some fields, which leads to
|
||||
/// parsing errors and wrong data being extracted.
|
||||
///
|
||||
/// Therefore it is safest to discard such responses and retry the request.
|
||||
pub fn check_unknown(&self) -> Result<(), ExtractionError> {
|
||||
if self.has_unknown {
|
||||
Err(ExtractionError::InvalidData("unknown YTM items".into()))
|
||||
} else {
|
||||
Ok(())
|
||||
match self.has_unknown {
|
||||
true => Err(ExtractionError::InvalidData("unknown YTM items".into())),
|
||||
false => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1166,7 +1161,7 @@ fn map_artist_id_fallback(
|
|||
fallback_artist: Option<&ArtistId>,
|
||||
) -> Option<String> {
|
||||
menu.and_then(|m| map_artist_id(m.menu_renderer.contents))
|
||||
.or_else(|| fallback_artist.and_then(|a| a.id.clone()))
|
||||
.or_else(|| fallback_artist.and_then(|a| a.id.to_owned()))
|
||||
}
|
||||
|
||||
pub(crate) fn map_artist_id(entries: Vec<MusicItemMenuEntry>) -> Option<String> {
|
||||
|
|
|
@ -69,7 +69,6 @@ impl<'de> Deserialize<'de> for BrowseEndpoint {
|
|||
let bep = BEp::deserialize(deserializer)?;
|
||||
|
||||
// Remove the VL prefix from the playlist id
|
||||
#[allow(clippy::map_unwrap_or)]
|
||||
let browse_id = bep
|
||||
.browse_endpoint_context_supported_configs
|
||||
.as_ref()
|
||||
|
@ -168,8 +167,9 @@ pub(crate) enum PageType {
|
|||
impl PageType {
|
||||
pub(crate) fn to_url_target(self, id: String) -> Option<UrlTarget> {
|
||||
match self {
|
||||
PageType::Artist | PageType::Channel => Some(UrlTarget::Channel { id }),
|
||||
PageType::Artist => Some(UrlTarget::Channel { id }),
|
||||
PageType::Album => Some(UrlTarget::Album { id }),
|
||||
PageType::Channel => Some(UrlTarget::Channel { id }),
|
||||
PageType::Playlist => Some(UrlTarget::Playlist { id }),
|
||||
PageType::Unknown => None,
|
||||
}
|
||||
|
|
|
@ -419,8 +419,8 @@ impl<T> YouTubeListMapper<T> {
|
|||
Self {
|
||||
lang,
|
||||
channel: Some(ChannelTag {
|
||||
id: channel.id.clone(),
|
||||
name: channel.name.clone(),
|
||||
id: channel.id.to_owned(),
|
||||
name: channel.name.to_owned(),
|
||||
avatar: Vec::new(),
|
||||
verification: channel.verification,
|
||||
subscriber_count: channel.subscriber_count,
|
||||
|
@ -572,15 +572,14 @@ impl<T> YouTubeListMapper<T> {
|
|||
|
||||
fn map_channel(&mut self, channel: ChannelRenderer) -> ChannelItem {
|
||||
// channel handle instead of subscriber count (A/B test 3)
|
||||
let (sc_txt, vc_text) = if channel
|
||||
let (sc_txt, vc_text) = match channel
|
||||
.subscriber_count_text
|
||||
.as_ref()
|
||||
.map(|txt| txt.starts_with('@'))
|
||||
.unwrap_or_default()
|
||||
{
|
||||
(channel.video_count_text, None)
|
||||
} else {
|
||||
(channel.subscriber_count_text, channel.video_count_text)
|
||||
true => (channel.video_count_text, None),
|
||||
false => (channel.subscriber_count_text, channel.video_count_text),
|
||||
};
|
||||
|
||||
ChannelItem {
|
||||
|
@ -644,7 +643,7 @@ impl YouTubeListMapper<YouTubeItem> {
|
|||
.map(|url| (l.title, util::sanitize_yt_url(&url.url)))
|
||||
})
|
||||
.collect(),
|
||||
});
|
||||
})
|
||||
}
|
||||
YouTubeListItem::RichItemRenderer { content } => {
|
||||
self.map_item(*content);
|
||||
|
@ -702,7 +701,7 @@ impl YouTubeListMapper<PlaylistItem> {
|
|||
match item {
|
||||
YouTubeListItem::PlaylistRenderer(playlist) => {
|
||||
let mapped = self.map_playlist(playlist);
|
||||
self.items.push(mapped);
|
||||
self.items.push(mapped)
|
||||
}
|
||||
YouTubeListItem::ContinuationItemRenderer {
|
||||
continuation_endpoint,
|
||||
|
|
|
@ -26,7 +26,7 @@ impl RustyPipeQuery {
|
|||
/// from alternative YouTube frontends like Piped or Invidious.
|
||||
///
|
||||
/// The `resolve_albums` flag enables resolving YTM album URLs (e.g.
|
||||
/// `OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE`) to their short album ids (`MPREb_GyH43gCvdM5`).
|
||||
/// `OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE`) to their short album id (`MPREb_GyH43gCvdM5`).
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
|
@ -168,13 +168,12 @@ impl RustyPipeQuery {
|
|||
e,
|
||||
Error::Extraction(ExtractionError::NotFound { .. })
|
||||
) {
|
||||
if util::VIDEO_ID_REGEX.is_match(id) {
|
||||
Ok(UrlTarget::Video {
|
||||
match util::VIDEO_ID_REGEX.is_match(id) {
|
||||
true => Ok(UrlTarget::Video {
|
||||
id: id.to_owned(),
|
||||
start_time: get_start_time(),
|
||||
})
|
||||
} else {
|
||||
Err(e)
|
||||
}),
|
||||
false => Err(e),
|
||||
}
|
||||
} else {
|
||||
Err(e)
|
||||
|
@ -218,7 +217,7 @@ impl RustyPipeQuery {
|
|||
/// rp.query().resolve_string("LinusTechTips", true).await.unwrap(),
|
||||
/// UrlTarget::Channel {id: "UCXuqSBlHAE6Xw-yeJA0Tunw".to_owned()}
|
||||
/// );
|
||||
/// // Playlist
|
||||
/// //
|
||||
/// assert_eq!(
|
||||
/// rp.query().resolve_string("PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI", true).await.unwrap(),
|
||||
/// UrlTarget::Playlist {id: "PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI".to_owned()}
|
||||
|
|
|
@ -393,7 +393,7 @@ impl MapResponse<Paginator<Comment>> for response::VideoComments {
|
|||
lang,
|
||||
);
|
||||
comments.push(res.c);
|
||||
warnings.append(&mut res.warnings);
|
||||
warnings.append(&mut res.warnings)
|
||||
}
|
||||
response::video_details::CommentListItem::CommentRenderer(comment) => {
|
||||
let mut res = map_comment(
|
||||
|
@ -403,7 +403,7 @@ impl MapResponse<Paginator<Comment>> for response::VideoComments {
|
|||
lang,
|
||||
);
|
||||
comments.push(res.c);
|
||||
warnings.append(&mut res.warnings);
|
||||
warnings.append(&mut res.warnings)
|
||||
}
|
||||
response::video_details::CommentListItem::ContinuationItemRenderer {
|
||||
continuation_endpoint,
|
||||
|
@ -433,11 +433,11 @@ fn map_recommendations(
|
|||
let mut mapper = response::YouTubeListMapper::<VideoItem>::new(lang);
|
||||
mapper.map_response(r);
|
||||
|
||||
mapper.ctoken = mapper.ctoken.or_else(|| {
|
||||
continuations
|
||||
.and_then(|c| c.into_iter().next())
|
||||
.map(|c| c.next_continuation_data.continuation)
|
||||
});
|
||||
if let Some(continuations) = continuations {
|
||||
continuations.into_iter().for_each(|c| {
|
||||
mapper.ctoken = Some(c.next_continuation_data.continuation);
|
||||
})
|
||||
};
|
||||
|
||||
MapResult {
|
||||
c: Paginator::new_ext(
|
||||
|
|
|
@ -238,7 +238,7 @@ fn extract_js_fn(js: &str, name: &str) -> Result<String, DeobfError> {
|
|||
|
||||
fn get_nsig_fn(player_js: &str) -> Result<String, DeobfError> {
|
||||
let function_name = get_nsig_fn_name(player_js)?;
|
||||
let function_base = function_name.clone() + "=function";
|
||||
let function_base = function_name.to_owned() + "=function";
|
||||
let offset = player_js.find(&function_base).unwrap_or_default();
|
||||
|
||||
extract_js_fn(&player_js[offset..], &function_name)
|
||||
|
|
12
src/error.rs
12
src/error.rs
|
@ -81,8 +81,7 @@ pub enum ExtractionError {
|
|||
pub enum UnavailabilityReason {
|
||||
/// Video is age restricted.
|
||||
///
|
||||
/// Age restriction may be circumvented with the
|
||||
/// [`ClientType::TvHtml5Embed`](crate::client::ClientType::TvHtml5Embed) client.
|
||||
/// Age restriction may be circumvented with the [`crate::client::ClientType::TvHtml5Embed`] client.
|
||||
AgeRestricted,
|
||||
/// Video was deleted or censored
|
||||
Deleted,
|
||||
|
@ -124,7 +123,7 @@ impl Display for UnavailabilityReason {
|
|||
}
|
||||
|
||||
pub(crate) mod internal {
|
||||
use super::{Error, ExtractionError};
|
||||
use super::*;
|
||||
|
||||
/// Error that occurred during the initialization
|
||||
/// or use of the YouTube URL signature deobfuscator.
|
||||
|
@ -167,7 +166,7 @@ impl From<reqwest::Error> for Error {
|
|||
fn from(value: reqwest::Error) -> Self {
|
||||
if value.is_status() {
|
||||
if let Some(status) = value.status() {
|
||||
return Self::HttpStatus(status.as_u16(), Cow::default());
|
||||
return Self::HttpStatus(status.as_u16(), Default::default());
|
||||
}
|
||||
}
|
||||
Self::Http(value.to_string().into())
|
||||
|
@ -186,9 +185,8 @@ impl Error {
|
|||
matches!(
|
||||
self,
|
||||
Self::HttpStatus(_, _)
|
||||
| Self::Extraction(
|
||||
ExtractionError::InvalidData(_) | ExtractionError::WrongResult(_)
|
||||
)
|
||||
| Self::Extraction(ExtractionError::InvalidData(_))
|
||||
| Self::Extraction(ExtractionError::WrongResult(_))
|
||||
)
|
||||
}
|
||||
|
||||
|
|
21
src/lib.rs
21
src/lib.rs
|
@ -1,24 +1,5 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
#![warn(missing_docs, clippy::todo, clippy::dbg_macro, clippy::pedantic)]
|
||||
#![allow(
|
||||
clippy::doc_markdown,
|
||||
clippy::similar_names,
|
||||
clippy::items_after_statements,
|
||||
clippy::too_many_lines,
|
||||
clippy::module_name_repetitions,
|
||||
clippy::must_use_candidate,
|
||||
clippy::cast_possible_truncation,
|
||||
clippy::cast_sign_loss,
|
||||
clippy::cast_precision_loss,
|
||||
clippy::single_match_else,
|
||||
clippy::missing_errors_doc,
|
||||
clippy::missing_panics_doc
|
||||
)]
|
||||
|
||||
//! ## Go to
|
||||
//!
|
||||
//! - Client ([`rustypipe::client::Rustypipe`](crate::client::RustyPipe))
|
||||
//! - Query ([`rustypipe::client::RustypipeQuery`](crate::client::RustyPipeQuery))
|
||||
#![warn(missing_docs, clippy::todo, clippy::dbg_macro)]
|
||||
|
||||
mod deobfuscate;
|
||||
mod serializer;
|
||||
|
|
|
@ -16,7 +16,7 @@ use serde_with::serde_as;
|
|||
use time::{Date, OffsetDateTime};
|
||||
|
||||
use self::{paginator::Paginator, richtext::RichText};
|
||||
use crate::{error::Error, param::Country, serializer::DateYmd, validate};
|
||||
use crate::{error::Error, param::Country, serializer::DateYmd, util};
|
||||
|
||||
/*
|
||||
#COMMON
|
||||
|
@ -110,10 +110,22 @@ impl UrlTarget {
|
|||
/// Validate the YouTube ID from the URL target
|
||||
pub(crate) fn validate(&self) -> Result<(), Error> {
|
||||
match self {
|
||||
UrlTarget::Video { id, .. } => validate::video_id(id),
|
||||
UrlTarget::Channel { id } => validate::channel_id(id),
|
||||
UrlTarget::Playlist { id } => validate::playlist_id(id),
|
||||
UrlTarget::Album { id } => validate::album_id(id),
|
||||
UrlTarget::Video { id, .. } => match util::VIDEO_ID_REGEX.is_match(id) {
|
||||
true => Ok(()),
|
||||
false => Err(Error::Other("invalid video id".into())),
|
||||
},
|
||||
UrlTarget::Channel { id } => match util::CHANNEL_ID_REGEX.is_match(id) {
|
||||
true => Ok(()),
|
||||
false => Err(Error::Other("invalid channel id".into())),
|
||||
},
|
||||
UrlTarget::Playlist { id } => match util::PLAYLIST_ID_REGEX.is_match(id) {
|
||||
true => Ok(()),
|
||||
false => Err(Error::Other("invalid playlist id".into())),
|
||||
},
|
||||
UrlTarget::Album { id } => match util::ALBUM_ID_REGEX.is_match(id) {
|
||||
true => Ok(()),
|
||||
false => Err(Error::Other("invalid album id".into())),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -245,15 +257,15 @@ pub struct AudioStream {
|
|||
pub codec: AudioCodec,
|
||||
/// Number of audio channels
|
||||
pub channels: Option<u8>,
|
||||
/// Audio loudness for volume normalization
|
||||
/// Audio loudness for ReplayGain correction
|
||||
///
|
||||
/// The track volume correction factor (0-1) can be calculated using this formula
|
||||
///
|
||||
/// `10^(-loudness_db/20)`
|
||||
///
|
||||
/// Note that the `loudness_db` value is the inverse of the usual ReplayGain track gain
|
||||
/// parameter, i.e. a value of 6 means the volume should be reduced by 6dB and the
|
||||
/// track gain parameter would be -6.
|
||||
/// Note that the value is the inverse of the usual track gain parameter, i.e. a
|
||||
/// value of 6 means the volume should be reduced by 6dB and the ReplayGain track gain
|
||||
/// parameter would be -6.
|
||||
///
|
||||
/// More information about ReplayGain and how to apply this infomation to audio files
|
||||
/// can be found here: <https://wiki.hydrogenaud.io/index.php?title=ReplayGain_1.0_specification>.
|
||||
|
|
|
@ -61,9 +61,9 @@ impl TextComponent {
|
|||
/// Get the text from the component
|
||||
pub fn get_text(&self) -> &str {
|
||||
match self {
|
||||
TextComponent::Text(text)
|
||||
| TextComponent::Web { text, .. }
|
||||
| TextComponent::YouTube { text, .. } => text,
|
||||
TextComponent::Text(text) => text,
|
||||
TextComponent::Web { text, .. } => text,
|
||||
TextComponent::YouTube { text, .. } => text,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,7 +73,7 @@ impl TextComponent {
|
|||
pub fn get_url(&self, yt_host: &str) -> String {
|
||||
match self {
|
||||
TextComponent::Text(_) => String::new(),
|
||||
TextComponent::Web { url, .. } => url.clone(),
|
||||
TextComponent::Web { url, .. } => url.to_owned(),
|
||||
TextComponent::YouTube { target, .. } => target.to_url_yt_host(yt_host),
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ impl TextComponent {
|
|||
impl ToPlaintext for TextComponent {
|
||||
fn to_plaintext_yt_host(&self, yt_host: &str) -> String {
|
||||
match self {
|
||||
TextComponent::Text(text) => text.clone(),
|
||||
TextComponent::Text(text) => text.to_owned(),
|
||||
_ => self.get_url(yt_host),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,14 +1,11 @@
|
|||
//! # Query parameters
|
||||
//!
|
||||
//! This module contains structs and enums used as input parameters
|
||||
//! for the functions in RustyPipe.
|
||||
//! Query parameters
|
||||
|
||||
mod locale;
|
||||
mod stream_filter;
|
||||
|
||||
pub mod locale;
|
||||
pub mod search_filter;
|
||||
|
||||
pub use locale::{Country, Language, COUNTRIES, LANGUAGES};
|
||||
pub use locale::{Country, Language};
|
||||
pub use stream_filter::StreamFilter;
|
||||
|
||||
/// Channel video tab
|
||||
|
@ -33,7 +30,7 @@ pub enum ChannelOrder {
|
|||
|
||||
impl ChannelVideoTab {
|
||||
/// Get the tab ID used to create ordered continuation tokens
|
||||
pub(crate) const fn order_ctoken_id(self) -> u32 {
|
||||
pub(crate) const fn order_ctoken_id(&self) -> u32 {
|
||||
match self {
|
||||
ChannelVideoTab::Videos => 15,
|
||||
ChannelVideoTab::Shorts => 10,
|
||||
|
|
|
@ -93,90 +93,77 @@ pub enum Length {
|
|||
|
||||
impl SearchFilter {
|
||||
/// Get a new [`SearchFilter`]
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Sort the search results
|
||||
#[must_use]
|
||||
pub fn sort(mut self, sort: Order) -> Self {
|
||||
self.sort = Some(sort);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sort the search results
|
||||
#[must_use]
|
||||
pub fn sort_opt(mut self, sort: Option<Order>) -> Self {
|
||||
self.sort = sort;
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos with specific features
|
||||
#[must_use]
|
||||
pub fn feature(mut self, feature: Feature) -> Self {
|
||||
self.features.insert(feature);
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos with specific features
|
||||
#[must_use]
|
||||
pub fn features(mut self, features: BTreeSet<Feature>) -> Self {
|
||||
self.features = features;
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos by upload date range
|
||||
#[must_use]
|
||||
pub fn date(mut self, date: UploadDate) -> Self {
|
||||
self.date = Some(date);
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos by upload date range
|
||||
#[must_use]
|
||||
pub fn date_opt(mut self, date: Option<UploadDate>) -> Self {
|
||||
self.date = date;
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos by item type
|
||||
#[must_use]
|
||||
pub fn item_type(mut self, item_type: ItemType) -> Self {
|
||||
self.item_type = Some(item_type);
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos by item type
|
||||
#[must_use]
|
||||
pub fn item_type_opt(mut self, item_type: Option<ItemType>) -> Self {
|
||||
self.item_type = item_type;
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos by length range
|
||||
#[must_use]
|
||||
pub fn length(mut self, length: Length) -> Self {
|
||||
self.length = Some(length);
|
||||
self
|
||||
}
|
||||
|
||||
/// Filter videos by length range
|
||||
#[must_use]
|
||||
pub fn length_opt(mut self, length: Option<Length>) -> Self {
|
||||
self.length = length;
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable the automatic correction of mistyped search terms
|
||||
#[must_use]
|
||||
pub fn verbatim(mut self) -> Self {
|
||||
self.verbatim = true;
|
||||
self
|
||||
}
|
||||
|
||||
/// Disable the automatic correction of mistyped search terms
|
||||
#[must_use]
|
||||
pub fn verbatim_set(mut self, verbatim: bool) -> Self {
|
||||
self.verbatim = verbatim;
|
||||
self
|
||||
|
@ -210,7 +197,7 @@ impl SearchFilter {
|
|||
if self.verbatim {
|
||||
let mut extras = ProtoBuilder::new();
|
||||
extras.varint(1, 1);
|
||||
pb.embedded(8, extras);
|
||||
pb.embedded(8, extras)
|
||||
}
|
||||
|
||||
pb.to_base64()
|
||||
|
|
|
@ -32,41 +32,36 @@ enum FilterResult {
|
|||
|
||||
impl FilterResult {
|
||||
fn hard(val: bool) -> Self {
|
||||
if val {
|
||||
Self::Match
|
||||
} else {
|
||||
Self::Deny
|
||||
match val {
|
||||
true => Self::Match,
|
||||
false => Self::Deny,
|
||||
}
|
||||
}
|
||||
|
||||
fn soft(val: bool) -> Self {
|
||||
if val {
|
||||
Self::Match
|
||||
} else {
|
||||
Self::AllowLowest
|
||||
match val {
|
||||
true => Self::Match,
|
||||
false => Self::AllowLowest,
|
||||
}
|
||||
}
|
||||
|
||||
fn allow(val: bool) -> Self {
|
||||
if val {
|
||||
Self::Allow
|
||||
} else {
|
||||
Self::Deny
|
||||
match val {
|
||||
true => Self::Allow,
|
||||
false => Self::Deny,
|
||||
}
|
||||
}
|
||||
|
||||
fn join(self, other: Self) -> Self {
|
||||
if self == Self::Deny {
|
||||
Self::Deny
|
||||
} else {
|
||||
self.min(other)
|
||||
match self == Self::Deny {
|
||||
true => Self::Deny,
|
||||
false => self.min(other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> StreamFilter<'a> {
|
||||
/// Create a new [`StreamFilter`]
|
||||
#[must_use]
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
@ -75,7 +70,6 @@ impl<'a> StreamFilter<'a> {
|
|||
///
|
||||
/// This is a soft filter, so if there is no stream with a bitrate
|
||||
/// <= the limit, the stream with the next higher bitrate is returned.
|
||||
#[must_use]
|
||||
pub fn audio_max_bitrate(mut self, max_bitrate: u32) -> Self {
|
||||
self.audio_max_bitrate = Some(max_bitrate);
|
||||
self
|
||||
|
@ -89,7 +83,6 @@ impl<'a> StreamFilter<'a> {
|
|||
}
|
||||
|
||||
/// Set the supported audio container formats
|
||||
#[must_use]
|
||||
pub fn audio_formats(mut self, formats: &'a [AudioFormat]) -> Self {
|
||||
self.audio_formats = Some(formats);
|
||||
self
|
||||
|
@ -103,7 +96,6 @@ impl<'a> StreamFilter<'a> {
|
|||
}
|
||||
|
||||
/// Set the supported audio codecs
|
||||
#[must_use]
|
||||
pub fn audio_codecs(mut self, codecs: &'a [AudioCodec]) -> Self {
|
||||
self.audio_codecs = Some(codecs);
|
||||
self
|
||||
|
@ -122,7 +114,6 @@ impl<'a> StreamFilter<'a> {
|
|||
///
|
||||
/// If this filter is unset or no stream matches,
|
||||
/// the filter returns the default audio stream.
|
||||
#[must_use]
|
||||
pub fn audio_language(mut self, language: &'a str) -> Self {
|
||||
self.audio_language = Some(language);
|
||||
self
|
||||
|
@ -132,13 +123,10 @@ impl<'a> StreamFilter<'a> {
|
|||
match &self.audio_language {
|
||||
Some(language) => match &stream.track {
|
||||
Some(track) => match &track.lang {
|
||||
Some(track_lang) => {
|
||||
if track_lang == language {
|
||||
FilterResult::Match
|
||||
} else {
|
||||
FilterResult::allow(track.is_default)
|
||||
}
|
||||
}
|
||||
Some(track_lang) => match track_lang == language {
|
||||
true => FilterResult::Match,
|
||||
false => FilterResult::allow(track.is_default),
|
||||
},
|
||||
None => FilterResult::allow(track.is_default),
|
||||
},
|
||||
None => FilterResult::Match,
|
||||
|
@ -152,7 +140,6 @@ impl<'a> StreamFilter<'a> {
|
|||
///
|
||||
/// This is a soft filter, so if there is no stream with a resolution
|
||||
/// <= the limit, the stream with the next higher resolution is returned.
|
||||
#[must_use]
|
||||
pub fn video_max_res(mut self, max_res: u32) -> Self {
|
||||
self.video_max_res = Some(max_res);
|
||||
self
|
||||
|
@ -169,7 +156,6 @@ impl<'a> StreamFilter<'a> {
|
|||
///
|
||||
/// This is a soft filter, so if there is no stream with a framerate
|
||||
/// <= the limit, the stream with the next higher framerate is returned.
|
||||
#[must_use]
|
||||
pub fn video_max_fps(mut self, max_fps: u8) -> Self {
|
||||
self.video_max_fps = Some(max_fps);
|
||||
self
|
||||
|
@ -183,7 +169,6 @@ impl<'a> StreamFilter<'a> {
|
|||
}
|
||||
|
||||
/// Set the supported video container formats
|
||||
#[must_use]
|
||||
pub fn video_formats(mut self, formats: &'a [VideoFormat]) -> Self {
|
||||
self.video_formats = Some(formats);
|
||||
self
|
||||
|
@ -197,7 +182,6 @@ impl<'a> StreamFilter<'a> {
|
|||
}
|
||||
|
||||
/// Set the supported video codecs
|
||||
#[must_use]
|
||||
pub fn video_codecs(mut self, codecs: &'a [VideoCodec]) -> Self {
|
||||
self.video_codecs = Some(codecs);
|
||||
self
|
||||
|
@ -211,7 +195,6 @@ impl<'a> StreamFilter<'a> {
|
|||
}
|
||||
|
||||
/// Allow HDR videos
|
||||
#[must_use]
|
||||
pub fn video_hdr(mut self) -> Self {
|
||||
self.video_hdr = true;
|
||||
self
|
||||
|
@ -225,7 +208,6 @@ impl<'a> StreamFilter<'a> {
|
|||
}
|
||||
|
||||
/// Output no video stream (audio only)
|
||||
#[must_use]
|
||||
pub fn no_video(mut self) -> Self {
|
||||
self.video_none = true;
|
||||
self
|
||||
|
@ -254,7 +236,6 @@ impl<'a> StreamFilter<'a> {
|
|||
|
||||
impl VideoPlayer {
|
||||
/// Select the audio stream which is the best match for the given [`StreamFilter`]
|
||||
#[must_use]
|
||||
pub fn select_audio_stream(&self, filter: &StreamFilter) -> Option<&AudioStream> {
|
||||
let mut fallback: Option<&AudioStream> = None;
|
||||
|
||||
|
|
|
@ -37,13 +37,13 @@ const FILENAME_FORMAT: &[time::format_description::FormatItem] =
|
|||
/// RustyPipe error report
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct Report<'a> {
|
||||
pub struct Report {
|
||||
/// Information about the RustyPipe client
|
||||
pub info: RustyPipeInfo<'a>,
|
||||
pub info: RustyPipeInfo,
|
||||
/// Severity of the report
|
||||
pub level: Level,
|
||||
/// RustyPipe operation (e.g. `get_player`)
|
||||
pub operation: &'a str,
|
||||
pub operation: String,
|
||||
/// Error (if occurred)
|
||||
pub error: Option<String>,
|
||||
/// Detailed error/warning messages
|
||||
|
@ -52,17 +52,17 @@ pub struct Report<'a> {
|
|||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub deobf_data: Option<DeobfData>,
|
||||
/// HTTP request data
|
||||
pub http_request: HTTPRequest<'a>,
|
||||
pub http_request: HTTPRequest,
|
||||
}
|
||||
|
||||
/// Information about the RustyPipe client
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct RustyPipeInfo<'a> {
|
||||
pub struct RustyPipeInfo {
|
||||
/// Rust package name (`rustypipe`)
|
||||
pub package: &'a str,
|
||||
pub package: String,
|
||||
/// Package version (`0.1.0`)
|
||||
pub version: &'a str,
|
||||
pub version: String,
|
||||
/// Date/Time when the event occurred
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub date: OffsetDateTime,
|
||||
|
@ -71,13 +71,13 @@ pub struct RustyPipeInfo<'a> {
|
|||
/// Reported HTTP request data
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct HTTPRequest<'a> {
|
||||
pub struct HTTPRequest {
|
||||
/// Request URL
|
||||
pub url: &'a str,
|
||||
pub url: String,
|
||||
/// HTTP method
|
||||
pub method: &'a str,
|
||||
pub method: String,
|
||||
/// HTTP request header
|
||||
pub req_header: BTreeMap<&'a str, String>,
|
||||
pub req_header: BTreeMap<String, String>,
|
||||
/// HTTP request body
|
||||
pub req_body: String,
|
||||
/// HTTP response status code
|
||||
|
@ -98,11 +98,11 @@ pub enum Level {
|
|||
ERR,
|
||||
}
|
||||
|
||||
impl Default for RustyPipeInfo<'_> {
|
||||
impl Default for RustyPipeInfo {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
package: env!("CARGO_PKG_NAME"),
|
||||
version: env!("CARGO_PKG_VERSION"),
|
||||
package: "rustypipe".to_owned(),
|
||||
version: "0.1.0".to_owned(),
|
||||
date: util::now_sec(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -349,9 +349,15 @@ impl From<TextComponent> for crate::model::ArtistId {
|
|||
name: text,
|
||||
},
|
||||
},
|
||||
TextComponent::Video { text, .. }
|
||||
| TextComponent::Web { text, .. }
|
||||
| TextComponent::Text { text } => Self {
|
||||
TextComponent::Video { text, .. } => Self {
|
||||
id: None,
|
||||
name: text,
|
||||
},
|
||||
TextComponent::Web { text, .. } => Self {
|
||||
id: None,
|
||||
name: text,
|
||||
},
|
||||
TextComponent::Text { text } => Self {
|
||||
id: None,
|
||||
name: text,
|
||||
},
|
||||
|
@ -400,10 +406,10 @@ impl From<TextComponents> for crate::model::richtext::RichText {
|
|||
impl TextComponent {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match self {
|
||||
TextComponent::Video { text, .. }
|
||||
| TextComponent::Browse { text, .. }
|
||||
| TextComponent::Web { text, .. }
|
||||
| TextComponent::Text { text } => text,
|
||||
TextComponent::Video { text, .. } => text,
|
||||
TextComponent::Browse { text, .. } => text,
|
||||
TextComponent::Web { text, .. } => text,
|
||||
TextComponent::Text { text } => text,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,10 +417,7 @@ impl TextComponent {
|
|||
impl TextComponents {
|
||||
/// Return the string representation of the first text component
|
||||
pub fn first_str(&self) -> &str {
|
||||
self.0
|
||||
.first()
|
||||
.map(TextComponent::as_str)
|
||||
.unwrap_or_default()
|
||||
self.0.first().map(|t| t.as_str()).unwrap_or_default()
|
||||
}
|
||||
|
||||
/// Split the text components using the given separation string.
|
||||
|
@ -437,7 +440,7 @@ impl TextComponents {
|
|||
}
|
||||
|
||||
if !inner.is_empty() {
|
||||
buf.push(TextComponents(inner));
|
||||
buf.push(TextComponents(inner))
|
||||
}
|
||||
|
||||
buf
|
||||
|
@ -446,7 +449,7 @@ impl TextComponents {
|
|||
|
||||
impl ToString for TextComponents {
|
||||
fn to_string(&self) -> String {
|
||||
self.0.iter().map(TextComponent::as_str).collect::<String>()
|
||||
self.0.iter().map(|x| x.as_str()).collect::<String>()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,17 +1,13 @@
|
|||
// This file is automatically generated. DO NOT EDIT.
|
||||
// See codegen/gen_dictionary.rs for the generation code.
|
||||
#![allow(clippy::unreadable_literal)]
|
||||
|
||||
//! The dictionary contains the information required to parse dates and numbers
|
||||
//! in all supported languages.
|
||||
|
||||
use crate::{
|
||||
model::AlbumType,
|
||||
param::Language,
|
||||
util::timeago::{DateCmp, TaToken, TimeUnit},
|
||||
};
|
||||
|
||||
/// Dictionary entry containing language-specific parsing information
|
||||
/// The dictionary contains the information required to parse dates and numbers
|
||||
/// in all supported languages.
|
||||
pub(crate) struct Entry {
|
||||
/// Tokens for parsing timeago strings.
|
||||
///
|
||||
|
|
|
@ -26,7 +26,7 @@ pub static VIDEO_ID_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^[A-Za-z0-9_-
|
|||
pub static CHANNEL_ID_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^UC[A-Za-z0-9_-]{22}$").unwrap());
|
||||
pub static PLAYLIST_ID_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^(?:PL|RDCLAK|OLAK|UU)[A-Za-z0-9_-]{16,50}$").unwrap());
|
||||
Lazy::new(|| Regex::new(r"^(?:PL|RDCLAK|OLAK)[A-Za-z0-9_-]{16,50}$").unwrap());
|
||||
pub static ALBUM_ID_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^MPREb_[A-Za-z0-9_-]{11}$").unwrap());
|
||||
pub static VANITY_PATH_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||
|
@ -91,7 +91,7 @@ pub fn random_uuid() -> String {
|
|||
rng.gen::<u16>(),
|
||||
rng.gen::<u16>(),
|
||||
rng.gen::<u16>(),
|
||||
rng.gen::<u64>() & 0xffff_ffff_ffff,
|
||||
rng.gen::<u64>() & 0xffffffffffff,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -315,7 +315,10 @@ where
|
|||
|
||||
let dict_entry = dictionary::entry(lang);
|
||||
let by_char = lang_by_char(lang) || lang == Language::Ko;
|
||||
let decimal_point = if dict_entry.comma_decimal { ',' } else { '.' };
|
||||
let decimal_point = match dict_entry.comma_decimal {
|
||||
true => ',',
|
||||
false => '.',
|
||||
};
|
||||
|
||||
let mut digits = String::new();
|
||||
let mut filtered = String::new();
|
||||
|
@ -342,14 +345,14 @@ where
|
|||
if digits.is_empty() {
|
||||
SplitTokens::new(&filtered, by_char)
|
||||
.find_map(|token| dict_entry.number_nd_tokens.get(token))
|
||||
.and_then(|n| (u64::from(*n)).try_into().ok())
|
||||
.and_then(|n| (*n as u64).try_into().ok())
|
||||
} else {
|
||||
let num = digits.parse::<u64>().ok()?;
|
||||
|
||||
exp += SplitTokens::new(&filtered, by_char)
|
||||
.filter_map(|token| match token {
|
||||
"k" => Some(3),
|
||||
_ => dict_entry.number_tokens.get(token).map(|t| i32::from(*t)),
|
||||
_ => dict_entry.number_tokens.get(token).map(|t| *t as i32),
|
||||
})
|
||||
.sum::<i32>();
|
||||
|
||||
|
@ -444,10 +447,9 @@ pub enum SplitTokens<'a> {
|
|||
|
||||
impl<'a> SplitTokens<'a> {
|
||||
pub fn new(s: &'a str, by_char: bool) -> Self {
|
||||
if by_char {
|
||||
Self::Char(SplitChar::from(s))
|
||||
} else {
|
||||
Self::Word(s.split_whitespace())
|
||||
match by_char {
|
||||
true => Self::Char(SplitChar::from(s)),
|
||||
false => Self::Word(s.split_whitespace()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,8 +33,8 @@ impl ProtoBuilder {
|
|||
///
|
||||
/// Reference: <https://developers.google.com/protocol-buffers/docs/encoding?hl=en#structure>
|
||||
fn _field(&mut self, field: u32, wire: u8) {
|
||||
let fbits = u64::from(field) << 3;
|
||||
let wbits = u64::from(wire) & 0x07;
|
||||
let fbits: u64 = (field as u64) << 3;
|
||||
let wbits = wire as u64 & 0x07;
|
||||
let val: u64 = fbits | wbits;
|
||||
self._varint(val);
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ fn parse_varint<P: Iterator<Item = u8>>(pb: &mut P) -> Option<u64> {
|
|||
|
||||
for b in pb.by_ref() {
|
||||
let value = b & 0x7f;
|
||||
result |= u64::from(value) << (7 * num_read);
|
||||
result |= (value as u64) << (7 * num_read);
|
||||
num_read += 1;
|
||||
|
||||
if b & 0x80 == 0 {
|
||||
|
@ -118,9 +118,10 @@ pub fn string_from_pb<P: IntoIterator<Item = u8>>(pb: P, field: u32) -> Option<S
|
|||
buf.push(pb.next()?);
|
||||
}
|
||||
return String::from_utf8(buf).ok();
|
||||
}
|
||||
} else {
|
||||
len
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
for _ in 0..to_skip {
|
||||
|
|
|
@ -77,7 +77,7 @@ pub enum DateCmp {
|
|||
}
|
||||
|
||||
impl TimeUnit {
|
||||
pub fn secs(self) -> i64 {
|
||||
pub fn secs(&self) -> i64 {
|
||||
match self {
|
||||
TimeUnit::Second => 1,
|
||||
TimeUnit::Minute => 60,
|
||||
|
@ -91,7 +91,7 @@ impl TimeUnit {
|
|||
}
|
||||
|
||||
impl TimeAgo {
|
||||
fn secs(self) -> i64 {
|
||||
fn secs(&self) -> i64 {
|
||||
i64::from(self.n) * self.unit.secs()
|
||||
}
|
||||
}
|
||||
|
@ -117,8 +117,8 @@ impl From<TimeAgo> for OffsetDateTime {
|
|||
fn from(ta: TimeAgo) -> Self {
|
||||
let ts = util::now_sec();
|
||||
match ta.unit {
|
||||
TimeUnit::Month => ts.replace_date(util::shift_months(ts.date(), -i32::from(ta.n))),
|
||||
TimeUnit::Year => ts.replace_date(util::shift_years(ts.date(), -i32::from(ta.n))),
|
||||
TimeUnit::Month => ts.replace_date(util::shift_months(ts.date(), -(ta.n as i32))),
|
||||
TimeUnit::Year => ts.replace_date(util::shift_years(ts.date(), -(ta.n as i32))),
|
||||
_ => ts - Duration::from(ta),
|
||||
}
|
||||
}
|
||||
|
@ -156,10 +156,9 @@ struct TaTokenParser<'a> {
|
|||
|
||||
impl<'a> TaTokenParser<'a> {
|
||||
fn new(entry: &'a dictionary::Entry, by_char: bool, nd: bool, filtered_str: &'a str) -> Self {
|
||||
let tokens = if nd {
|
||||
&entry.timeago_nd_tokens
|
||||
} else {
|
||||
&entry.timeago_tokens
|
||||
let tokens = match nd {
|
||||
true => &entry.timeago_nd_tokens,
|
||||
false => &entry.timeago_tokens,
|
||||
};
|
||||
Self {
|
||||
iter: SplitTokens::new(filtered_str, by_char),
|
||||
|
@ -210,7 +209,7 @@ pub fn parse_timeago(lang: Language, textual_date: &str) -> Option<TimeAgo> {
|
|||
///
|
||||
/// Returns [`None`] if the date could not be parsed.
|
||||
pub fn parse_timeago_dt(lang: Language, textual_date: &str) -> Option<OffsetDateTime> {
|
||||
parse_timeago(lang, textual_date).map(OffsetDateTime::from)
|
||||
parse_timeago(lang, textual_date).map(|ta| ta.into())
|
||||
}
|
||||
|
||||
pub fn parse_timeago_dt_or_warn(
|
||||
|
@ -261,7 +260,7 @@ pub fn parse_textual_date(lang: Language, textual_date: &str) -> Option<ParsedDa
|
|||
|
||||
// Chinese/Japanese dont use textual months
|
||||
if m.is_none() && !by_char {
|
||||
m = parse_textual_month(&entry, &filtered_str).map(u16::from);
|
||||
m = parse_textual_month(&entry, &filtered_str).map(|n| n as u16);
|
||||
}
|
||||
|
||||
match (y, m, d) {
|
||||
|
@ -283,7 +282,7 @@ pub fn parse_textual_date(lang: Language, textual_date: &str) -> Option<ParsedDa
|
|||
///
|
||||
/// Returns None if the date could not be parsed.
|
||||
pub fn parse_textual_date_to_dt(lang: Language, textual_date: &str) -> Option<OffsetDateTime> {
|
||||
parse_textual_date(lang, textual_date).map(OffsetDateTime::from)
|
||||
parse_textual_date(lang, textual_date).map(|ta| ta.into())
|
||||
}
|
||||
|
||||
pub fn parse_textual_date_or_warn(
|
||||
|
|
118
src/validate.rs
118
src/validate.rs
|
@ -8,10 +8,10 @@
|
|||
//! [string resolver](crate::client::RustyPipeQuery::resolve_string) is great for handling
|
||||
//! arbitrary input and returns a [`UrlTarget`](crate::model::UrlTarget) enum that tells you
|
||||
//! whether the given URL points to a video, channel, playlist, etc.
|
||||
//! - The validation functions of this module are meant vor validating specific data (video IDs,
|
||||
//! - The validation functions of this module are meant vor validating concrete data (video IDs,
|
||||
//! channel IDs, playlist IDs) and return [`true`] if the given input is valid
|
||||
|
||||
use crate::{error::Error, util};
|
||||
use crate::util;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
|
||||
|
@ -22,15 +22,12 @@ use regex::Regex;
|
|||
/// # Examples
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::video_id("dQw4w9WgXcQ").is_ok());
|
||||
/// assert!(validate::video_id("Abcd").is_err());
|
||||
/// assert!(validate::video_id("dQw4w9WgXc@").is_err());
|
||||
/// assert!(validate::video_id("dQw4w9WgXcQ"));
|
||||
/// assert!(!validate::video_id("Abcd"));
|
||||
/// assert!(!validate::video_id("dQw4w9WgXc@"));
|
||||
/// ```
|
||||
pub fn video_id<S: AsRef<str>>(video_id: S) -> Result<(), Error> {
|
||||
check(
|
||||
util::VIDEO_ID_REGEX.is_match(video_id.as_ref()),
|
||||
"invalid video id",
|
||||
)
|
||||
pub fn video_id<S: AsRef<str>>(video_id: S) -> bool {
|
||||
util::VIDEO_ID_REGEX.is_match(video_id.as_ref())
|
||||
}
|
||||
|
||||
/// Validate the given channel ID
|
||||
|
@ -41,15 +38,12 @@ pub fn video_id<S: AsRef<str>>(video_id: S) -> Result<(), Error> {
|
|||
/// # Examples
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::channel_id("UC2DjFE7Xf11URZqWBigcVOQ").is_ok());
|
||||
/// assert!(validate::channel_id("Abcd").is_err());
|
||||
/// assert!(validate::channel_id("XY2DjFE7Xf11URZqWBigcVOQ").is_err());
|
||||
/// assert!(validate::channel_id("UC2DjFE7Xf11URZqWBigcVOQ"));
|
||||
/// assert!(!validate::channel_id("Abcd"));
|
||||
/// assert!(!validate::channel_id("XY2DjFE7Xf11URZqWBigcVOQ"));
|
||||
/// ```
|
||||
pub fn channel_id<S: AsRef<str>>(channel_id: S) -> Result<(), Error> {
|
||||
check(
|
||||
util::CHANNEL_ID_REGEX.is_match(channel_id.as_ref()),
|
||||
"invalid channel id",
|
||||
)
|
||||
pub fn channel_id<S: AsRef<str>>(channel_id: S) -> bool {
|
||||
util::CHANNEL_ID_REGEX.is_match(channel_id.as_ref())
|
||||
}
|
||||
|
||||
/// Validate the given playlist ID
|
||||
|
@ -61,17 +55,14 @@ pub fn channel_id<S: AsRef<str>>(channel_id: S) -> Result<(), Error> {
|
|||
/// # Examples
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::playlist_id("PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI").is_ok());
|
||||
/// assert!(validate::playlist_id("RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk").is_ok());
|
||||
/// assert!(validate::playlist_id("OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE").is_ok());
|
||||
/// assert!(validate::playlist_id("PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI"));
|
||||
/// assert!(validate::playlist_id("RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk"));
|
||||
/// assert!(validate::playlist_id("OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE"));
|
||||
///
|
||||
/// assert!(validate::playlist_id("Abcd").is_err());
|
||||
/// assert!(!validate::playlist_id("Abcd"));
|
||||
/// ```
|
||||
pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> Result<(), Error> {
|
||||
check(
|
||||
util::PLAYLIST_ID_REGEX.is_match(playlist_id.as_ref()),
|
||||
"invalid playlist id",
|
||||
)
|
||||
pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> bool {
|
||||
util::PLAYLIST_ID_REGEX.is_match(playlist_id.as_ref())
|
||||
}
|
||||
|
||||
/// Validate the given album ID
|
||||
|
@ -82,8 +73,8 @@ pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> Result<(), Error> {
|
|||
/// # Examples
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::album_id("MPREb_GyH43gCvdM5").is_ok());
|
||||
/// assert!(validate::album_id("Abcd_GyH43gCvdM5").is_err());
|
||||
/// assert!(validate::album_id("MPREb_GyH43gCvdM5"));
|
||||
/// assert!(!validate::album_id("Abcd_GyH43gCvdM5"));
|
||||
/// ```
|
||||
///
|
||||
/// # Note
|
||||
|
@ -95,11 +86,8 @@ pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> Result<(), Error> {
|
|||
/// If you have the playlist ID of an album and need the album ID, you can use the
|
||||
/// [string resolver](crate::client::RustyPipeQuery::resolve_string) with the `resolve_albums`
|
||||
/// option enabled.
|
||||
pub fn album_id<S: AsRef<str>>(album_id: S) -> Result<(), Error> {
|
||||
check(
|
||||
util::ALBUM_ID_REGEX.is_match(album_id.as_ref()),
|
||||
"invalid album id",
|
||||
)
|
||||
pub fn album_id<S: AsRef<str>>(album_id: S) -> bool {
|
||||
util::ALBUM_ID_REGEX.is_match(album_id.as_ref())
|
||||
}
|
||||
|
||||
/// Validate the given radio ID
|
||||
|
@ -119,18 +107,15 @@ pub fn album_id<S: AsRef<str>>(album_id: S) -> Result<(), Error> {
|
|||
///
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::radio_id("RDEMSuoM_jxfse1_g8uCO7MCtg").is_ok());
|
||||
/// assert!(validate::radio_id("Abcd").is_err());
|
||||
/// assert!(validate::radio_id("XYEMSuoM_jxfse1_g8uCO7MCtg").is_err());
|
||||
/// assert!(validate::radio_id("RDEMSuoM_jxfse1_g8uCO7MCtg"));
|
||||
/// assert!(!validate::radio_id("Abcd"));
|
||||
/// assert!(!validate::radio_id("XYEMSuoM_jxfse1_g8uCO7MCtg"));
|
||||
/// ```
|
||||
pub fn radio_id<S: AsRef<str>>(radio_id: S) -> Result<(), Error> {
|
||||
pub fn radio_id<S: AsRef<str>>(radio_id: S) -> bool {
|
||||
static RADIO_ID_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^RD[A-Za-z0-9_-]{22,50}$").unwrap());
|
||||
|
||||
check(
|
||||
RADIO_ID_REGEX.is_match(radio_id.as_ref()),
|
||||
"invalid radio id",
|
||||
)
|
||||
RADIO_ID_REGEX.is_match(radio_id.as_ref())
|
||||
}
|
||||
|
||||
/// Validate the given genre ID
|
||||
|
@ -142,21 +127,18 @@ pub fn radio_id<S: AsRef<str>>(radio_id: S) -> Result<(), Error> {
|
|||
///
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::genre_id("ggMPOg1uX1JOQWZFeDByc2Jm").is_ok());
|
||||
/// assert!(validate::genre_id("Abcd").is_err());
|
||||
/// assert!(validate::genre_id("ggAbcg1uX1JOQWZFeDByc2Jm").is_err());
|
||||
/// assert!(validate::genre_id("ggMPOg1uX1JOQWZFeDByc2Jm"));
|
||||
/// assert!(!validate::genre_id("Abcd"));
|
||||
/// assert!(!validate::genre_id("ggAbcg1uX1JOQWZFeDByc2Jm"));
|
||||
/// ```
|
||||
pub fn genre_id<S: AsRef<str>>(genre_id: S) -> Result<(), Error> {
|
||||
pub fn genre_id<S: AsRef<str>>(genre_id: S) -> bool {
|
||||
static GENRE_ID_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^ggMPO[A-Za-z0-9_-]{19}$").unwrap());
|
||||
|
||||
check(
|
||||
GENRE_ID_REGEX.is_match(genre_id.as_ref()),
|
||||
"invalid genre id",
|
||||
)
|
||||
GENRE_ID_REGEX.is_match(genre_id.as_ref())
|
||||
}
|
||||
|
||||
/// Validate the given related tracks ID
|
||||
/// Validate the given related ID
|
||||
///
|
||||
/// YouTube related IDs are exactly 17 characters long, start with the characters `MPTRt_`,
|
||||
/// followed by 11 of these characters: `A-Za-z0-9_-`.
|
||||
|
@ -165,18 +147,15 @@ pub fn genre_id<S: AsRef<str>>(genre_id: S) -> Result<(), Error> {
|
|||
///
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::track_related_id("MPTRt_wrKjTn9hmry").is_ok());
|
||||
/// assert!(validate::track_related_id("Abcd").is_err());
|
||||
/// assert!(validate::track_related_id("Abcdt_wrKjTn9hmry").is_err());
|
||||
/// assert!(validate::track_related_id("MPTRt_wrKjTn9hmry"));
|
||||
/// assert!(!validate::track_related_id("Abcd"));
|
||||
/// assert!(!validate::track_related_id("Abcdt_wrKjTn9hmry"));
|
||||
/// ```
|
||||
pub fn track_related_id<S: AsRef<str>>(related_id: S) -> Result<(), Error> {
|
||||
pub fn track_related_id<S: AsRef<str>>(related_id: S) -> bool {
|
||||
static RELATED_ID_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^MPTRt_[A-Za-z0-9_-]{11}$").unwrap());
|
||||
|
||||
check(
|
||||
RELATED_ID_REGEX.is_match(related_id.as_ref()),
|
||||
"invalid related track id",
|
||||
)
|
||||
RELATED_ID_REGEX.is_match(related_id.as_ref())
|
||||
}
|
||||
|
||||
/// Validate the given lyrics ID
|
||||
|
@ -188,24 +167,13 @@ pub fn track_related_id<S: AsRef<str>>(related_id: S) -> Result<(), Error> {
|
|||
///
|
||||
/// ```
|
||||
/// # use rustypipe::validate;
|
||||
/// assert!(validate::track_lyrics_id("MPLYt_wrKjTn9hmry").is_ok());
|
||||
/// assert!(validate::track_lyrics_id("Abcd").is_err());
|
||||
/// assert!(validate::track_lyrics_id("Abcdt_wrKjTn9hmry").is_err());
|
||||
/// assert!(validate::track_lyrics_id("MPLYt_wrKjTn9hmry"));
|
||||
/// assert!(!validate::track_lyrics_id("Abcd"));
|
||||
/// assert!(!validate::track_lyrics_id("Abcdt_wrKjTn9hmry"));
|
||||
/// ```
|
||||
pub fn track_lyrics_id<S: AsRef<str>>(lyrics_id: S) -> Result<(), Error> {
|
||||
pub fn track_lyrics_id<S: AsRef<str>>(lyrics_id: S) -> bool {
|
||||
static LYRICS_ID_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"^MPLYt_[A-Za-z0-9_-]{11}$").unwrap());
|
||||
|
||||
check(
|
||||
LYRICS_ID_REGEX.is_match(lyrics_id.as_ref()),
|
||||
"invalid lyrics id",
|
||||
)
|
||||
}
|
||||
|
||||
fn check(res: bool, msg: &'static str) -> Result<(), Error> {
|
||||
if res {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Error::Other(msg.into()))
|
||||
}
|
||||
LYRICS_ID_REGEX.is_match(lyrics_id.as_ref())
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) {
|
|||
assert_eq!(player_data.details.channel.name, "NoCopyrightSounds");
|
||||
assert_gte(player_data.details.view_count, 146_818_808, "view count");
|
||||
assert_eq!(player_data.details.keywords[0], "spektrem");
|
||||
assert!(!player_data.details.is_live_content);
|
||||
assert_eq!(player_data.details.is_live_content, false);
|
||||
|
||||
if client_type == ClientType::Ios {
|
||||
let video = player_data
|
||||
|
@ -68,21 +68,21 @@ fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) {
|
|||
.unwrap();
|
||||
|
||||
// Bitrates may change between requests
|
||||
assert_approx(f64::from(video.bitrate), 1_507_068.0);
|
||||
assert_eq!(video.average_bitrate, 1_345_149);
|
||||
assert_eq!(video.size.unwrap(), 43_553_412);
|
||||
assert_approx(video.bitrate as f64, 1507068.0);
|
||||
assert_eq!(video.average_bitrate, 1345149);
|
||||
assert_eq!(video.size.unwrap(), 43553412);
|
||||
assert_eq!(video.width, 1280);
|
||||
assert_eq!(video.height, 720);
|
||||
assert_eq!(video.fps, 30);
|
||||
assert_eq!(video.quality, "720p");
|
||||
assert!(!video.hdr);
|
||||
assert_eq!(video.hdr, false);
|
||||
assert_eq!(video.mime, "video/webm; codecs=\"vp09.00.31.08\"");
|
||||
assert_eq!(video.format, VideoFormat::Webm);
|
||||
assert_eq!(video.codec, VideoCodec::Vp9);
|
||||
|
||||
assert_approx(f64::from(audio.bitrate), 130_685.0);
|
||||
assert_approx(f64::from(audio.average_bitrate), 129_496.0);
|
||||
assert_approx(audio.size as f64, 4_193_863.0);
|
||||
assert_approx(audio.bitrate as f64, 130685.0);
|
||||
assert_approx(audio.average_bitrate as f64, 129496.0);
|
||||
assert_approx(audio.size as f64, 4193863.0);
|
||||
assert_eq!(audio.mime, "audio/mp4; codecs=\"mp4a.40.2\"");
|
||||
assert_eq!(audio.format, AudioFormat::M4a);
|
||||
assert_eq!(audio.codec, AudioCodec::Mp4a);
|
||||
|
@ -101,26 +101,26 @@ fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) {
|
|||
.find(|s| s.itag == 251)
|
||||
.expect("audio stream not found");
|
||||
|
||||
assert_approx(f64::from(video.bitrate), 1_340_829.0);
|
||||
assert_approx(f64::from(video.average_bitrate), 1_233_444.0);
|
||||
assert_approx(video.size.unwrap() as f64, 39_936_630.0);
|
||||
assert_approx(video.bitrate as f64, 1340829.0);
|
||||
assert_approx(video.average_bitrate as f64, 1233444.0);
|
||||
assert_approx(video.size.unwrap() as f64, 39936630.0);
|
||||
assert_eq!(video.width, 1280);
|
||||
assert_eq!(video.height, 720);
|
||||
assert_eq!(video.fps, 30);
|
||||
assert_eq!(video.quality, "720p");
|
||||
assert!(!video.hdr);
|
||||
assert_eq!(video.hdr, false);
|
||||
assert_eq!(video.mime, "video/mp4; codecs=\"av01.0.05M.08\"");
|
||||
assert_eq!(video.format, VideoFormat::Mp4);
|
||||
assert_eq!(video.codec, VideoCodec::Av01);
|
||||
assert!(!video.throttled);
|
||||
assert_eq!(video.throttled, false);
|
||||
|
||||
assert_approx(f64::from(audio.bitrate), 142_718.0);
|
||||
assert_approx(f64::from(audio.average_bitrate), 130_708.0);
|
||||
assert_approx(audio.size as f64, 4_232_344.0);
|
||||
assert_approx(audio.bitrate as f64, 142718.0);
|
||||
assert_approx(audio.average_bitrate as f64, 130708.0);
|
||||
assert_approx(audio.size as f64, 4232344.0);
|
||||
assert_eq!(audio.mime, "audio/webm; codecs=\"opus\"");
|
||||
assert_eq!(audio.format, AudioFormat::Webm);
|
||||
assert_eq!(audio.codec, AudioCodec::Opus);
|
||||
assert!(!audio.throttled);
|
||||
assert_eq!(audio.throttled, false);
|
||||
|
||||
check_video_stream(video);
|
||||
check_video_stream(audio);
|
||||
|
@ -151,7 +151,7 @@ fn check_video_stream(s: impl YtStream) {
|
|||
260,
|
||||
"UC2llNlEM62gU-_fXPHfgbDg",
|
||||
"Oonagh",
|
||||
830_900,
|
||||
830900,
|
||||
false,
|
||||
false
|
||||
)]
|
||||
|
@ -873,7 +873,7 @@ fn channel_info(rp: RustyPipe) {
|
|||
|
||||
assert_gte(
|
||||
channel.content.view_count.unwrap(),
|
||||
186_854_340,
|
||||
186854340,
|
||||
"channel views",
|
||||
);
|
||||
|
||||
|
@ -1467,7 +1467,7 @@ fn music_artist(
|
|||
.for_each(|t| assert!(!t.avatar.is_empty()));
|
||||
|
||||
// Sort albums to ensure consistent order
|
||||
artist.albums.sort_by_key(|a| a.id.clone());
|
||||
artist.albums.sort_by_key(|a| a.id.to_owned());
|
||||
|
||||
if unlocalized {
|
||||
insta::assert_ron_snapshot!(format!("music_artist_{name}"), artist, {
|
||||
|
@ -1944,19 +1944,19 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
|||
let mut track_albums = 0;
|
||||
|
||||
for track in related.tracks {
|
||||
validate::video_id(&track.id).unwrap();
|
||||
assert_video_id(&track.id);
|
||||
assert!(!track.name.is_empty());
|
||||
assert!(!track.cover.is_empty(), "got no cover");
|
||||
|
||||
if let Some(artist_id) = track.artist_id {
|
||||
validate::channel_id(&artist_id).unwrap();
|
||||
assert_channel_id(&artist_id);
|
||||
track_artist_ids += 1;
|
||||
}
|
||||
|
||||
let artist = track.artists.first().unwrap();
|
||||
assert!(!artist.name.is_empty());
|
||||
if let Some(artist_id) = &artist.id {
|
||||
validate::channel_id(&artist_id).unwrap();
|
||||
assert_channel_id(artist_id);
|
||||
track_artists += 1;
|
||||
}
|
||||
|
||||
|
@ -1968,7 +1968,7 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
|||
|
||||
assert!(track.view_count.is_none());
|
||||
if let Some(album) = track.album {
|
||||
validate::album_id(&album.id).unwrap();
|
||||
assert_album_id(&album.id);
|
||||
assert!(!album.name.is_empty());
|
||||
track_albums += 1;
|
||||
}
|
||||
|
@ -1985,18 +1985,18 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
|||
if full {
|
||||
assert_gte(related.albums.len(), 10, "albums");
|
||||
for album in related.albums {
|
||||
validate::album_id(&album.id).unwrap();
|
||||
assert_album_id(&album.id);
|
||||
assert!(!album.name.is_empty());
|
||||
assert!(!album.cover.is_empty(), "got no cover");
|
||||
|
||||
let artist = album.artists.first().unwrap();
|
||||
validate::channel_id(artist.id.as_ref().unwrap()).unwrap();
|
||||
assert_channel_id(artist.id.as_ref().unwrap());
|
||||
assert!(!artist.name.is_empty());
|
||||
}
|
||||
|
||||
assert_gte(related.artists.len(), 10, "artists");
|
||||
for artist in related.artists {
|
||||
validate::channel_id(&artist.id).unwrap();
|
||||
assert_channel_id(&artist.id);
|
||||
assert!(!artist.name.is_empty());
|
||||
assert!(!artist.avatar.is_empty(), "got no avatar");
|
||||
assert_gte(artist.subscriber_count.unwrap(), 5000, "subscribers")
|
||||
|
@ -2004,7 +2004,7 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
|||
|
||||
assert_gte(related.playlists.len(), 10, "playlists");
|
||||
for playlist in related.playlists {
|
||||
validate::playlist_id(&playlist.id).unwrap();
|
||||
assert_playlist_id(&playlist.id);
|
||||
assert!(!playlist.name.is_empty());
|
||||
assert!(
|
||||
!playlist.thumbnail.is_empty(),
|
||||
|
@ -2018,7 +2018,7 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
|||
playlist.id
|
||||
);
|
||||
let channel = playlist.channel.unwrap();
|
||||
validate::channel_id(&channel.id).unwrap();
|
||||
assert_channel_id(&channel.id);
|
||||
assert!(!channel.name.is_empty());
|
||||
} else {
|
||||
assert!(playlist.channel.is_none());
|
||||
|
@ -2134,7 +2134,7 @@ fn music_new_albums(rp: RustyPipe) {
|
|||
assert_gte(albums.len(), 10, "albums");
|
||||
|
||||
for album in albums {
|
||||
validate::album_id(&album.id).unwrap();
|
||||
assert_album_id(&album.id);
|
||||
assert!(!album.name.is_empty());
|
||||
assert!(!album.cover.is_empty(), "got no cover");
|
||||
}
|
||||
|
@ -2146,7 +2146,7 @@ fn music_new_videos(rp: RustyPipe) {
|
|||
assert_gte(videos.len(), 5, "videos");
|
||||
|
||||
for video in videos {
|
||||
validate::video_id(&video.id).unwrap();
|
||||
assert_video_id(&video.id);
|
||||
assert!(!video.name.is_empty());
|
||||
assert!(!video.cover.is_empty(), "got no cover");
|
||||
assert_gte(video.view_count.unwrap(), 1000, "views");
|
||||
|
@ -2174,10 +2174,10 @@ fn music_genres(rp: RustyPipe, unlocalized: bool) {
|
|||
assert_eq!(pop.name, "Pop");
|
||||
assert!(!pop.is_mood);
|
||||
|
||||
for g in &genres {
|
||||
validate::genre_id(&g.id).unwrap();
|
||||
assert_gte(g.color, 0xff00_0000, "color");
|
||||
}
|
||||
genres.iter().for_each(|g| {
|
||||
assert!(validate::genre_id(&g.id));
|
||||
assert_gte(g.color, 0xff000000, "color");
|
||||
});
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
|
@ -2202,7 +2202,7 @@ fn music_genre(#[case] id: &str, #[case] name: &str, rp: RustyPipe, unlocalized:
|
|||
genre.sections.iter().for_each(|section| {
|
||||
assert!(!section.name.is_empty());
|
||||
section.playlists.iter().for_each(|playlist| {
|
||||
validate::playlist_id(&playlist.id).unwrap();
|
||||
assert_playlist_id(&playlist.id);
|
||||
assert!(!playlist.name.is_empty());
|
||||
assert!(!playlist.thumbnail.is_empty(), "got no cover");
|
||||
|
||||
|
@ -2213,14 +2213,14 @@ fn music_genre(#[case] id: &str, #[case] name: &str, rp: RustyPipe, unlocalized:
|
|||
playlist.id
|
||||
);
|
||||
let channel = playlist.channel.as_ref().unwrap();
|
||||
validate::channel_id(&channel.id).unwrap();
|
||||
assert_channel_id(&channel.id);
|
||||
assert!(!channel.name.is_empty());
|
||||
} else {
|
||||
assert!(playlist.channel.is_none());
|
||||
}
|
||||
});
|
||||
if let Some(subgenre_id) = §ion.subgenre_id {
|
||||
subgenres.push((subgenre_id.clone(), section.name.clone()));
|
||||
subgenres.push((subgenre_id.to_owned(), section.name.to_owned()));
|
||||
}
|
||||
});
|
||||
subgenres
|
||||
|
@ -2290,7 +2290,8 @@ fn invalid_ctoken(#[case] ep: ContinuationEndpoint, rp: RustyPipe) {
|
|||
fn lang() -> Language {
|
||||
std::env::var("YT_LANG")
|
||||
.ok()
|
||||
.map_or(Language::En, |l| Language::from_str(&l).unwrap())
|
||||
.map(|l| Language::from_str(&l).unwrap())
|
||||
.unwrap_or(Language::En)
|
||||
}
|
||||
|
||||
/// Get a new RustyPipe instance
|
||||
|
@ -2361,6 +2362,22 @@ fn assert_next_items<T: FromYtItem, Q: AsRef<RustyPipeQuery>>(
|
|||
assert_gte(p.items.len(), n_items, "items");
|
||||
}
|
||||
|
||||
fn assert_video_id(id: &str) {
|
||||
assert!(validate::video_id(id), "invalid video id: `{id}`")
|
||||
}
|
||||
|
||||
fn assert_channel_id(id: &str) {
|
||||
assert!(validate::channel_id(id), "invalid channel id: `{id}`");
|
||||
}
|
||||
|
||||
fn assert_album_id(id: &str) {
|
||||
assert!(validate::album_id(id), "invalid album id: `{id}`");
|
||||
}
|
||||
|
||||
fn assert_playlist_id(id: &str) {
|
||||
assert!(validate::playlist_id(id), "invalid playlist id: `{id}`");
|
||||
}
|
||||
|
||||
fn assert_frameset(frameset: &Frameset) {
|
||||
assert_gte(frameset.frame_height, 20, "frame height");
|
||||
assert_gte(frameset.frame_height, 20, "frame width");
|
||||
|
|
Loading…
Reference in a new issue