Compare commits
3 commits
a2bbc850a7
...
cbeb14f3fd
Author | SHA1 | Date | |
---|---|---|---|
cbeb14f3fd | |||
81280200f7 | |||
a6bf9359b9 |
47 changed files with 862 additions and 493 deletions
|
@ -6,6 +6,7 @@ authors = ["ThetaDev <t.testboy@gmail.com>"]
|
||||||
license = "GPL-3.0"
|
license = "GPL-3.0"
|
||||||
description = "Client for the public YouTube / YouTube Music API (Innertube), inspired by NewPipe"
|
description = "Client for the public YouTube / YouTube Music API (Innertube), inspired by NewPipe"
|
||||||
keywords = ["youtube", "video", "music"]
|
keywords = ["youtube", "video", "music"]
|
||||||
|
categories = ["api-bindings", "multimedia"]
|
||||||
|
|
||||||
include = ["/src", "README.md", "LICENSE", "!snapshots"]
|
include = ["/src", "README.md", "LICENSE", "!snapshots"]
|
||||||
|
|
||||||
|
|
129
README.md
129
README.md
|
@ -2,16 +2,16 @@
|
||||||
|
|
||||||
[![CI status](https://ci.thetadev.de/api/badges/ThetaDev/rustypipe/status.svg)](https://ci.thetadev.de/ThetaDev/rustypipe)
|
[![CI status](https://ci.thetadev.de/api/badges/ThetaDev/rustypipe/status.svg)](https://ci.thetadev.de/ThetaDev/rustypipe)
|
||||||
|
|
||||||
Client for the public YouTube / YouTube Music API (Innertube),
|
Client for the public YouTube / YouTube Music API (Innertube), inspired by
|
||||||
inspired by [NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
[NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
### YouTube
|
### YouTube
|
||||||
|
|
||||||
- **Player** (video/audio streams, subtitles)
|
- **Player** (video/audio streams, subtitles)
|
||||||
- **Playlist**
|
|
||||||
- **VideoDetails** (metadata, comments, recommended videos)
|
- **VideoDetails** (metadata, comments, recommended videos)
|
||||||
|
- **Playlist**
|
||||||
- **Channel** (videos, shorts, livestreams, playlists, info, search)
|
- **Channel** (videos, shorts, livestreams, playlists, info, search)
|
||||||
- **ChannelRSS**
|
- **ChannelRSS**
|
||||||
- **Search** (with filters)
|
- **Search** (with filters)
|
||||||
|
@ -31,3 +31,126 @@ inspired by [NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
||||||
- **Moods/Genres**
|
- **Moods/Genres**
|
||||||
- **Charts**
|
- **Charts**
|
||||||
- **New** (albums, music videos)
|
- **New** (albums, music videos)
|
||||||
|
|
||||||
|
## Getting started
|
||||||
|
|
||||||
|
### Cargo.toml
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[dependencies]
|
||||||
|
rustypipe = "0.1.0"
|
||||||
|
tokio = { version = "1.20.0", features = ["macros", "rt-multi-thread"] }
|
||||||
|
```
|
||||||
|
|
||||||
|
### Watch a video
|
||||||
|
|
||||||
|
```rust ignore
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use rustypipe::{client::RustyPipe, param::StreamFilter};
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
// Create a client
|
||||||
|
let rp = RustyPipe::new();
|
||||||
|
// Fetch the player
|
||||||
|
let player = rp.query().player("pPvd8UxmSbQ").await.unwrap();
|
||||||
|
// Select the best streams
|
||||||
|
let (video, audio) = player.select_video_audio_stream(&StreamFilter::default());
|
||||||
|
|
||||||
|
// Open mpv player
|
||||||
|
let mut args = vec![video.expect("no video stream").url.to_owned()];
|
||||||
|
if let Some(audio) = audio {
|
||||||
|
args.push(format!("--audio-file={}", audio.url));
|
||||||
|
}
|
||||||
|
Command::new("mpv").args(args).output().unwrap();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get a playlist
|
||||||
|
|
||||||
|
```rust ignore
|
||||||
|
use rustypipe::client::RustyPipe
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
// Create a client
|
||||||
|
let rp = RustyPipe::new();
|
||||||
|
// Get the playlist
|
||||||
|
let playlist = rp
|
||||||
|
.query()
|
||||||
|
.playlist("PL2_OBreMn7FrsiSW0VDZjdq0xqUKkZYHT")
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
// Get all items (maximum: 1000)
|
||||||
|
playlist.videos.extend_limit(rp.query(), 1000).await.unwrap();
|
||||||
|
|
||||||
|
println!("Name: {}", playlist.name);
|
||||||
|
println!("Author: {}", playlist.channel.unwrap().name);
|
||||||
|
println!("Last update: {}", playlist.last_update.unwrap());
|
||||||
|
|
||||||
|
playlist
|
||||||
|
.videos
|
||||||
|
.items
|
||||||
|
.iter()
|
||||||
|
.for_each(|v| println!("[{}] {} ({}s)", v.id, v.name, v.length));
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```txt
|
||||||
|
Name: Homelab
|
||||||
|
Author: Jeff Geerling
|
||||||
|
Last update: 2023-05-04
|
||||||
|
[cVWF3u-y-Zg] I put a computer in my computer (720s)
|
||||||
|
[ecdm3oA-QdQ] 6-in-1: Build a 6-node Ceph cluster on this Mini ITX Motherboard (783s)
|
||||||
|
[xvE4HNJZeIg] Scrapyard Server: Fastest all-SSD NAS! (733s)
|
||||||
|
[RvnG-ywF6_s] Nanosecond clock sync with a Raspberry Pi (836s)
|
||||||
|
[R2S2RMNv7OU] I made the Petabyte Raspberry Pi even faster! (572s)
|
||||||
|
[FG--PtrDmw4] Hiding Macs in my Rack! (515s)
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get a channel
|
||||||
|
|
||||||
|
```rust ignore
|
||||||
|
use rustypipe::client::RustyPipe
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
// Create a client
|
||||||
|
let rp = RustyPipe::new();
|
||||||
|
// Get the channel
|
||||||
|
let channel = rp
|
||||||
|
.query()
|
||||||
|
.channel_videos("UCl2mFZoRqjw_ELax4Yisf6w")
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
println!("Name: {}", channel.name);
|
||||||
|
println!("Description: {}", channel.description);
|
||||||
|
println!("Subscribers: {}", channel.subscriber_count.unwrap());
|
||||||
|
|
||||||
|
channel
|
||||||
|
.content
|
||||||
|
.items
|
||||||
|
.iter()
|
||||||
|
.for_each(|v| println!("[{}] {} ({}s)", v.id, v.name, v.length.unwrap()));
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output:**
|
||||||
|
|
||||||
|
```txt
|
||||||
|
Name: Louis Rossmann
|
||||||
|
Description: I discuss random things of interest to me. (...)
|
||||||
|
Subscribers: 1780000
|
||||||
|
[qBHgJx_rb8E] Introducing Rossmann senior, a genuine fossil 😃 (122s)
|
||||||
|
[TmV8eAtXc3s] Am I wrong about CompTIA? (592s)
|
||||||
|
[CjOJJc1qzdY] How FUTO projects loosen Google's grip on your life! (588s)
|
||||||
|
[0A10JtkkL9A] a private moment between a man and his kitten (522s)
|
||||||
|
[zbHq5_1Cd5U] Is Texas mandating auto repair shops use OEM parts? SB1083 analysis & breakdown; tldr, no. (645s)
|
||||||
|
[6Fv8bd9ICb4] Who owns this? (199s)
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
|
@ -2,6 +2,11 @@
|
||||||
name = "rustypipe-cli"
|
name = "rustypipe-cli"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
authors = ["ThetaDev <t.testboy@gmail.com>"]
|
||||||
|
license = "GPL-3.0"
|
||||||
|
description = "CLI for RustyPipe - download videos and extract data from YouTube / YouTube Music"
|
||||||
|
keywords = ["youtube", "video", "music"]
|
||||||
|
categories = ["multimedia"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["rustls-tls-native-roots"]
|
default = ["rustls-tls-native-roots"]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![warn(clippy::todo, clippy::dbg_macro)]
|
||||||
|
|
||||||
use std::{path::PathBuf, time::Duration};
|
use std::{path::PathBuf, time::Duration};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
|
@ -281,9 +283,9 @@ fn print_data<T: Serialize>(data: &T, format: Format, pretty: bool) {
|
||||||
match format {
|
match format {
|
||||||
Format::Json => {
|
Format::Json => {
|
||||||
if pretty {
|
if pretty {
|
||||||
serde_json::to_writer_pretty(stdout, data).unwrap()
|
serde_json::to_writer_pretty(stdout, data).unwrap();
|
||||||
} else {
|
} else {
|
||||||
serde_json::to_writer(stdout, data).unwrap()
|
serde_json::to_writer(stdout, data).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Format::Yaml => serde_yaml::to_writer(stdout, data).unwrap(),
|
Format::Yaml => serde_yaml::to_writer(stdout, data).unwrap(),
|
||||||
|
@ -360,7 +362,7 @@ async fn download_videos(
|
||||||
&video.id,
|
&video.id,
|
||||||
&video.name,
|
&video.name,
|
||||||
output_dir,
|
output_dir,
|
||||||
output_fname.to_owned(),
|
output_fname.clone(),
|
||||||
resolution,
|
resolution,
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
rp,
|
rp,
|
||||||
|
@ -632,9 +634,7 @@ async fn main() {
|
||||||
} => match music {
|
} => match music {
|
||||||
None => match channel {
|
None => match channel {
|
||||||
Some(channel) => {
|
Some(channel) => {
|
||||||
if !rustypipe::validate::channel_id(&channel) {
|
rustypipe::validate::channel_id(&channel).unwrap();
|
||||||
panic!("invalid channel id")
|
|
||||||
}
|
|
||||||
let res = rp.query().channel_search(&channel, &query).await.unwrap();
|
let res = rp.query().channel_search(&channel, &query).await.unwrap();
|
||||||
print_data(&res, format, pretty);
|
print_data(&res, format, pretty);
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ pub enum ABTest {
|
||||||
TrendsPageHeaderRenderer = 5,
|
TrendsPageHeaderRenderer = 5,
|
||||||
}
|
}
|
||||||
|
|
||||||
const TESTS_TO_RUN: [ABTest; 1] = [ABTest::TrendsVideoTab];
|
const TESTS_TO_RUN: [ABTest; 2] = [ABTest::TrendsVideoTab, ABTest::TrendsPageHeaderRenderer];
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct ABTestRes {
|
pub struct ABTestRes {
|
||||||
|
@ -102,10 +102,10 @@ pub async fn run_test(
|
||||||
let count = results.iter().filter(|(p, _)| *p).count();
|
let count = results.iter().filter(|(p, _)| *p).count();
|
||||||
let vd_present = results
|
let vd_present = results
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|(p, vd)| if *p { Some(vd.to_owned()) } else { None });
|
.find_map(|(p, vd)| if *p { Some(vd.clone()) } else { None });
|
||||||
let vd_absent = results
|
let vd_absent = results
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|(p, vd)| if !*p { Some(vd.to_owned()) } else { None });
|
.find_map(|(p, vd)| if *p { None } else { Some(vd.clone()) });
|
||||||
|
|
||||||
(count, vd_present, vd_absent)
|
(count, vd_present, vd_absent)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,7 @@ use path_macro::path;
|
||||||
use rustypipe::{
|
use rustypipe::{
|
||||||
client::{ClientType, RustyPipe, RustyPipeQuery},
|
client::{ClientType, RustyPipe, RustyPipeQuery},
|
||||||
model::AlbumType,
|
model::AlbumType,
|
||||||
param::{locale::LANGUAGES, Language},
|
param::{Language, LANGUAGES},
|
||||||
};
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ pub fn write_samples_to_dict() {
|
||||||
let collected: BTreeMap<Language, BTreeMap<AlbumType, String>> =
|
let collected: BTreeMap<Language, BTreeMap<AlbumType, String>> =
|
||||||
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||||
let mut dict = util::read_dict();
|
let mut dict = util::read_dict();
|
||||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||||
|
|
||||||
for lang in langs {
|
for lang in langs {
|
||||||
let dict_entry = dict.entry(lang).or_default();
|
let dict_entry = dict.entry(lang).or_default();
|
||||||
|
@ -66,13 +66,13 @@ pub fn write_samples_to_dict() {
|
||||||
let mut e_langs = dict_entry.equivalent.clone();
|
let mut e_langs = dict_entry.equivalent.clone();
|
||||||
e_langs.push(lang);
|
e_langs.push(lang);
|
||||||
|
|
||||||
e_langs.iter().for_each(|lang| {
|
for lang in &e_langs {
|
||||||
collected.get(lang).unwrap().iter().for_each(|(t, v)| {
|
collected.get(lang).unwrap().iter().for_each(|(t, v)| {
|
||||||
dict_entry
|
dict_entry
|
||||||
.album_types
|
.album_types
|
||||||
.insert(v.to_lowercase().trim().to_owned(), *t);
|
.insert(v.to_lowercase().trim().to_owned(), *t);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
util::write_dict(dict);
|
util::write_dict(dict);
|
||||||
|
|
|
@ -11,7 +11,7 @@ use once_cell::sync::Lazy;
|
||||||
use path_macro::path;
|
use path_macro::path;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustypipe::client::{ClientType, RustyPipe, RustyPipeQuery};
|
use rustypipe::client::{ClientType, RustyPipe, RustyPipeQuery};
|
||||||
use rustypipe::param::{locale::LANGUAGES, Language};
|
use rustypipe::param::{Language, LANGUAGES};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
use crate::model::{Channel, ContinuationResponse};
|
use crate::model::{Channel, ContinuationResponse};
|
||||||
|
@ -111,7 +111,7 @@ pub async fn collect_large_numbers(concurrency: usize) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
channel.view_counts.iter().for_each(|(num, txt)| {
|
channel.view_counts.iter().for_each(|(num, txt)| {
|
||||||
entry.insert(txt.to_owned(), *num);
|
entry.insert(txt.clone(), *num);
|
||||||
});
|
});
|
||||||
entry.insert(channel.subscriber_count, subscriber_counts[*ch_id]);
|
entry.insert(channel.subscriber_count, subscriber_counts[*ch_id]);
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ pub fn write_samples_to_dict() {
|
||||||
let collected_nums: CollectedNumbers =
|
let collected_nums: CollectedNumbers =
|
||||||
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||||
let mut dict = util::read_dict();
|
let mut dict = util::read_dict();
|
||||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||||
|
|
||||||
static POINT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\d(\.|,)\d{1,3}(?:\D|$)").unwrap());
|
static POINT_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\d(\.|,)\d{1,3}(?:\D|$)").unwrap());
|
||||||
|
|
||||||
|
@ -176,10 +176,7 @@ pub fn write_samples_to_dict() {
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let decimal_point = match comma_decimal {
|
let decimal_point = if comma_decimal { "," } else { "." };
|
||||||
true => ",",
|
|
||||||
false => ".",
|
|
||||||
};
|
|
||||||
|
|
||||||
// Search for tokens
|
// Search for tokens
|
||||||
|
|
||||||
|
@ -217,13 +214,17 @@ pub fn write_samples_to_dict() {
|
||||||
for lang in e_langs {
|
for lang in e_langs {
|
||||||
let entry = collected_nums.get(&lang).unwrap();
|
let entry = collected_nums.get(&lang).unwrap();
|
||||||
|
|
||||||
entry.iter().for_each(|(txt, val)| {
|
for (txt, val) in entry.iter() {
|
||||||
let filtered = util::filter_largenumstr(txt);
|
let filtered = util::filter_largenumstr(txt);
|
||||||
let mag = get_mag(*val);
|
let mag = get_mag(*val);
|
||||||
|
|
||||||
let tokens: Vec<String> = match dict_entry.by_char || lang == Language::Ko {
|
let tokens: Vec<String> = if dict_entry.by_char || lang == Language::Ko {
|
||||||
true => filtered.chars().map(|c| c.to_string()).collect(),
|
filtered.chars().map(|c| c.to_string()).collect()
|
||||||
false => filtered.split_whitespace().map(|c| c.to_string()).collect(),
|
} else {
|
||||||
|
filtered
|
||||||
|
.split_whitespace()
|
||||||
|
.map(std::string::ToString::to_string)
|
||||||
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
match util::parse_numeric::<u64>(txt.split(decimal_point).next().unwrap()) {
|
match util::parse_numeric::<u64>(txt.split(decimal_point).next().unwrap()) {
|
||||||
|
@ -231,7 +232,7 @@ pub fn write_samples_to_dict() {
|
||||||
let mag_before_point = get_mag(num_before_point);
|
let mag_before_point = get_mag(num_before_point);
|
||||||
let mut mag_remaining = mag - mag_before_point;
|
let mut mag_remaining = mag - mag_before_point;
|
||||||
|
|
||||||
tokens.iter().for_each(|t| {
|
for t in &tokens {
|
||||||
// These tokens are correct in all languages
|
// These tokens are correct in all languages
|
||||||
// and are used to parse combined prefixes like `1.1K crore` (en-IN)
|
// and are used to parse combined prefixes like `1.1K crore` (en-IN)
|
||||||
let known_tmag: u8 = if t.len() == 1 {
|
let known_tmag: u8 = if t.len() == 1 {
|
||||||
|
@ -251,26 +252,26 @@ pub fn write_samples_to_dict() {
|
||||||
.checked_sub(known_tmag)
|
.checked_sub(known_tmag)
|
||||||
.expect("known magnitude incorrect");
|
.expect("known magnitude incorrect");
|
||||||
} else {
|
} else {
|
||||||
insert_token(t.to_owned(), mag_remaining);
|
insert_token(t.clone(), mag_remaining);
|
||||||
|
}
|
||||||
|
insert_nd_token(t.clone(), None);
|
||||||
}
|
}
|
||||||
insert_nd_token(t.to_owned(), None);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if matches!(e.kind(), std::num::IntErrorKind::Empty) {
|
if matches!(e.kind(), std::num::IntErrorKind::Empty) {
|
||||||
// Text does not contain any digits, search for nd_tokens
|
// Text does not contain any digits, search for nd_tokens
|
||||||
tokens.iter().for_each(|t| {
|
for t in &tokens {
|
||||||
insert_nd_token(
|
insert_nd_token(
|
||||||
t.to_owned(),
|
t.clone(),
|
||||||
Some((*val).try_into().expect("nd_token value too large")),
|
Some((*val).try_into().expect("nd_token value too large")),
|
||||||
);
|
);
|
||||||
});
|
}
|
||||||
} else {
|
} else {
|
||||||
panic!("{e}, txt: {txt}")
|
panic!("{e}, txt: {txt}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert collected data into dictionary
|
// Insert collected data into dictionary
|
||||||
|
@ -369,7 +370,7 @@ async fn get_channel(query: &RustyPipeQuery, channel_id: &str) -> Result<Channel
|
||||||
.navigation_endpoint
|
.navigation_endpoint
|
||||||
.continuation_command
|
.continuation_command
|
||||||
.token
|
.token
|
||||||
.to_owned()
|
.clone()
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -380,7 +381,7 @@ async fn get_channel(query: &RustyPipeQuery, channel_id: &str) -> Result<Channel
|
||||||
let v = &itm.rich_item_renderer.content.video_renderer;
|
let v = &itm.rich_item_renderer.content.video_renderer;
|
||||||
(
|
(
|
||||||
util::parse_numeric(&v.view_count_text.text).unwrap_or_default(),
|
util::parse_numeric(&v.view_count_text.text).unwrap_or_default(),
|
||||||
v.short_view_count_text.text.to_owned(),
|
v.short_view_count_text.text.clone(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -399,22 +400,20 @@ async fn get_channel(query: &RustyPipeQuery, channel_id: &str) -> Result<Channel
|
||||||
|
|
||||||
let continuation = serde_json::from_str::<ContinuationResponse>(&resp)?;
|
let continuation = serde_json::from_str::<ContinuationResponse>(&resp)?;
|
||||||
|
|
||||||
continuation
|
for action in &continuation.on_response_received_actions {
|
||||||
.on_response_received_actions
|
action
|
||||||
.iter()
|
.reload_continuation_items_command
|
||||||
.for_each(|a| {
|
|
||||||
a.reload_continuation_items_command
|
|
||||||
.continuation_items
|
.continuation_items
|
||||||
.iter()
|
.iter()
|
||||||
.for_each(|itm| {
|
.for_each(|itm| {
|
||||||
let v = &itm.rich_item_renderer.content.video_renderer;
|
let v = &itm.rich_item_renderer.content.video_renderer;
|
||||||
view_counts.insert(
|
view_counts.insert(
|
||||||
util::parse_numeric(&v.view_count_text.text).unwrap(),
|
util::parse_numeric(&v.view_count_text.text).unwrap(),
|
||||||
v.short_view_count_text.text.to_owned(),
|
v.short_view_count_text.text.clone(),
|
||||||
);
|
);
|
||||||
})
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(ChannelData {
|
Ok(ChannelData {
|
||||||
view_counts,
|
view_counts,
|
||||||
|
|
|
@ -9,7 +9,7 @@ use futures::{stream, StreamExt};
|
||||||
use path_macro::path;
|
use path_macro::path;
|
||||||
use rustypipe::{
|
use rustypipe::{
|
||||||
client::RustyPipe,
|
client::RustyPipe,
|
||||||
param::{locale::LANGUAGES, Language},
|
param::{Language, LANGUAGES},
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
@ -118,7 +118,7 @@ pub fn write_samples_to_dict() {
|
||||||
let collected_dates: CollectedDates =
|
let collected_dates: CollectedDates =
|
||||||
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||||
let mut dict = util::read_dict();
|
let mut dict = util::read_dict();
|
||||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||||
|
|
||||||
let months = [
|
let months = [
|
||||||
DateCase::Jan,
|
DateCase::Jan,
|
||||||
|
@ -159,7 +159,7 @@ pub fn write_samples_to_dict() {
|
||||||
.for_each(|l| datestr_tables.push(collected_dates.get(l).unwrap()));
|
.for_each(|l| datestr_tables.push(collected_dates.get(l).unwrap()));
|
||||||
|
|
||||||
let dict_entry = dict.entry(lang).or_default();
|
let dict_entry = dict.entry(lang).or_default();
|
||||||
let mut num_order = "".to_owned();
|
let mut num_order = String::new();
|
||||||
|
|
||||||
let collect_nd_tokens = !matches!(
|
let collect_nd_tokens = !matches!(
|
||||||
lang,
|
lang,
|
||||||
|
@ -236,30 +236,30 @@ pub fn write_samples_to_dict() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
month_words.iter().for_each(|(word, m)| {
|
for (word, m) in &month_words {
|
||||||
if *m != 0 {
|
if *m != 0 {
|
||||||
dict_entry.months.insert(word.to_owned(), *m as u8);
|
dict_entry.months.insert(word.clone(), *m as u8);
|
||||||
};
|
};
|
||||||
});
|
}
|
||||||
|
|
||||||
if collect_nd_tokens {
|
if collect_nd_tokens {
|
||||||
td_words.iter().for_each(|(word, n)| {
|
for (word, n) in &td_words {
|
||||||
match n {
|
match n {
|
||||||
// Today
|
// Today
|
||||||
1 => {
|
1 => {
|
||||||
dict_entry
|
dict_entry
|
||||||
.timeago_nd_tokens
|
.timeago_nd_tokens
|
||||||
.insert(word.to_owned(), "0D".to_owned());
|
.insert(word.clone(), "0D".to_owned());
|
||||||
}
|
}
|
||||||
// Yesterday
|
// Yesterday
|
||||||
2 => {
|
2 => {
|
||||||
dict_entry
|
dict_entry
|
||||||
.timeago_nd_tokens
|
.timeago_nd_tokens
|
||||||
.insert(word.to_owned(), "1D".to_owned());
|
.insert(word.clone(), "1D".to_owned());
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
});
|
}
|
||||||
|
|
||||||
if datestr_tables.len() == 1 && dict_entry.timeago_nd_tokens.len() > 2 {
|
if datestr_tables.len() == 1 && dict_entry.timeago_nd_tokens.len() > 2 {
|
||||||
println!(
|
println!(
|
||||||
|
|
|
@ -9,7 +9,7 @@ use futures::{stream, StreamExt};
|
||||||
use path_macro::path;
|
use path_macro::path;
|
||||||
use rustypipe::{
|
use rustypipe::{
|
||||||
client::{ClientType, RustyPipe, RustyPipeQuery},
|
client::{ClientType, RustyPipe, RustyPipeQuery},
|
||||||
param::{locale::LANGUAGES, Language},
|
param::{Language, LANGUAGES},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
@ -67,7 +67,7 @@ pub fn parse_video_durations() {
|
||||||
let durations: CollectedDurations = serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
let durations: CollectedDurations = serde_json::from_reader(BufReader::new(json_file)).unwrap();
|
||||||
|
|
||||||
let mut dict = util::read_dict();
|
let mut dict = util::read_dict();
|
||||||
let langs = dict.keys().map(|k| k.to_owned()).collect::<Vec<_>>();
|
let langs = dict.keys().copied().collect::<Vec<_>>();
|
||||||
|
|
||||||
for lang in langs {
|
for lang in langs {
|
||||||
let dict_entry = dict.entry(lang).or_default();
|
let dict_entry = dict.entry(lang).or_default();
|
||||||
|
@ -83,7 +83,7 @@ pub fn parse_video_durations() {
|
||||||
by_char: bool,
|
by_char: bool,
|
||||||
val: u32,
|
val: u32,
|
||||||
expect: u32,
|
expect: u32,
|
||||||
w: String,
|
w: &str,
|
||||||
unit: TimeUnit,
|
unit: TimeUnit,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let ok = val == expect || val * 2 == expect;
|
let ok = val == expect || val * 2 == expect;
|
||||||
|
@ -168,23 +168,23 @@ pub fn parse_video_durations() {
|
||||||
let p2_n = p2.digits.parse::<u32>().unwrap_or(1);
|
let p2_n = p2.digits.parse::<u32>().unwrap_or(1);
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
check_add_word(words, by_char, p1_n, m, p1.word, TimeUnit::Minute),
|
check_add_word(words, by_char, p1_n, m, &p1.word, TimeUnit::Minute),
|
||||||
"{txt}: min parse error"
|
"{txt}: min parse error"
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
check_add_word(words, by_char, p2_n, s, p2.word, TimeUnit::Second),
|
check_add_word(words, by_char, p2_n, s, &p2.word, TimeUnit::Second),
|
||||||
"{txt}: sec parse error"
|
"{txt}: sec parse error"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if s == 0 {
|
if s == 0 {
|
||||||
assert!(
|
assert!(
|
||||||
check_add_word(words, by_char, p1_n, m, p1.word, TimeUnit::Minute),
|
check_add_word(words, by_char, p1_n, m, &p1.word, TimeUnit::Minute),
|
||||||
"{txt}: min parse error"
|
"{txt}: min parse error"
|
||||||
);
|
);
|
||||||
} else if m == 0 {
|
} else if m == 0 {
|
||||||
assert!(
|
assert!(
|
||||||
check_add_word(words, by_char, p1_n, s, p1.word, TimeUnit::Second),
|
check_add_word(words, by_char, p1_n, s, &p1.word, TimeUnit::Second),
|
||||||
"{txt}: sec parse error"
|
"{txt}: sec parse error"
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
@ -206,11 +206,11 @@ pub fn parse_video_durations() {
|
||||||
|
|
||||||
// dbg!(&words);
|
// dbg!(&words);
|
||||||
|
|
||||||
words.into_iter().for_each(|(k, v)| {
|
for (k, v) in words {
|
||||||
if let Some(v) = v {
|
if let Some(v) = v {
|
||||||
dict_entry.timeago_tokens.insert(k, v.to_string());
|
dict_entry.timeago_tokens.insert(k, v.to_string());
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -345,7 +345,8 @@ mod tests {
|
||||||
let ul: LanguageIdentifier =
|
let ul: LanguageIdentifier =
|
||||||
lang.to_string().split('-').next().unwrap().parse().unwrap();
|
lang.to_string().split('-').next().unwrap().parse().unwrap();
|
||||||
|
|
||||||
let pr = PluralRules::create(ul, PluralRuleType::CARDINAL).expect(&lang.to_string());
|
let pr = PluralRules::create(ul, PluralRuleType::CARDINAL)
|
||||||
|
.unwrap_or_else(|_| panic!("{}", lang.to_string()));
|
||||||
|
|
||||||
let mut plurals_m: HashSet<PluralCategory> = HashSet::new();
|
let mut plurals_m: HashSet<PluralCategory> = HashSet::new();
|
||||||
for n in 1..60 {
|
for n in 1..60 {
|
||||||
|
@ -353,11 +354,11 @@ mod tests {
|
||||||
}
|
}
|
||||||
let mut plurals_s = plurals_m.clone();
|
let mut plurals_s = plurals_m.clone();
|
||||||
|
|
||||||
durations.values().for_each(|v| {
|
for v in durations.values() {
|
||||||
let (m, s) = split_duration(*v);
|
let (m, s) = split_duration(*v);
|
||||||
plurals_m.remove(&pr.select(m).unwrap().into());
|
plurals_m.remove(&pr.select(m).unwrap().into());
|
||||||
plurals_s.remove(&pr.select(s).unwrap().into());
|
plurals_s.remove(&pr.select(s).unwrap().into());
|
||||||
});
|
}
|
||||||
|
|
||||||
if !plurals_m.is_empty() {
|
if !plurals_m.is_empty() {
|
||||||
println!("{lang}: missing minutes {plurals_m:?}");
|
println!("{lang}: missing minutes {plurals_m:?}");
|
||||||
|
|
|
@ -35,14 +35,18 @@ pub fn generate_dictionary() {
|
||||||
|
|
||||||
let code_head = r#"// This file is automatically generated. DO NOT EDIT.
|
let code_head = r#"// This file is automatically generated. DO NOT EDIT.
|
||||||
// See codegen/gen_dictionary.rs for the generation code.
|
// See codegen/gen_dictionary.rs for the generation code.
|
||||||
|
#![allow(clippy::unreadable_literal)]
|
||||||
|
|
||||||
|
//! The dictionary contains the information required to parse dates and numbers
|
||||||
|
//! in all supported languages.
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
model::AlbumType,
|
model::AlbumType,
|
||||||
param::Language,
|
param::Language,
|
||||||
util::timeago::{DateCmp, TaToken, TimeUnit},
|
util::timeago::{DateCmp, TaToken, TimeUnit},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The dictionary contains the information required to parse dates and numbers
|
/// Dictionary entry containing language-specific parsing information
|
||||||
/// in all supported languages.
|
|
||||||
pub(crate) struct Entry {
|
pub(crate) struct Entry {
|
||||||
/// Tokens for parsing timeago strings.
|
/// Tokens for parsing timeago strings.
|
||||||
///
|
///
|
||||||
|
@ -90,11 +94,11 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
||||||
"#
|
"#
|
||||||
.to_owned();
|
.to_owned();
|
||||||
|
|
||||||
dict.iter().for_each(|(lang, entry)| {
|
for (lang, entry) in &dict {
|
||||||
// Match selector
|
// Match selector
|
||||||
let mut selector = format!("Language::{lang:?}");
|
let mut selector = format!("Language::{lang:?}");
|
||||||
entry.equivalent.iter().for_each(|eq| {
|
entry.equivalent.iter().for_each(|eq| {
|
||||||
let _ = write!(selector, " | Language::{eq:?}");
|
write!(selector, " | Language::{eq:?}").unwrap();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Timeago tokens
|
// Timeago tokens
|
||||||
|
@ -132,7 +136,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
||||||
// Date order
|
// Date order
|
||||||
let mut date_order = "&[".to_owned();
|
let mut date_order = "&[".to_owned();
|
||||||
entry.date_order.chars().for_each(|c| {
|
entry.date_order.chars().for_each(|c| {
|
||||||
let _ = write!(date_order, "DateCmp::{c}, ");
|
write!(date_order, "DateCmp::{c}, ").unwrap();
|
||||||
});
|
});
|
||||||
date_order = date_order.trim_end_matches([' ', ',']).to_owned() + "]";
|
date_order = date_order.trim_end_matches([' ', ',']).to_owned() + "]";
|
||||||
|
|
||||||
|
@ -154,16 +158,31 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
||||||
album_types.entry(txt, &format!("AlbumType::{album_type:?}"));
|
album_types.entry(txt, &format!("AlbumType::{album_type:?}"));
|
||||||
});
|
});
|
||||||
|
|
||||||
let code_ta_tokens = &ta_tokens.build().to_string().replace('\n', "\n ");
|
let code_ta_tokens = &ta_tokens
|
||||||
let code_ta_nd_tokens = &ta_nd_tokens.build().to_string().replace('\n', "\n ");
|
.build()
|
||||||
|
.to_string()
|
||||||
|
.replace('\n', "\n ");
|
||||||
|
let code_ta_nd_tokens = &ta_nd_tokens
|
||||||
|
.build()
|
||||||
|
.to_string()
|
||||||
|
.replace('\n', "\n ");
|
||||||
let code_months = &months.build().to_string().replace('\n', "\n ");
|
let code_months = &months.build().to_string().replace('\n', "\n ");
|
||||||
let code_number_tokens = &number_tokens.build().to_string().replace('\n', "\n ");
|
let code_number_tokens = &number_tokens
|
||||||
let code_number_nd_tokens = &number_nd_tokens.build().to_string().replace('\n', "\n ");
|
.build()
|
||||||
let code_album_types = &album_types.build().to_string().replace('\n', "\n ");
|
.to_string()
|
||||||
|
.replace('\n', "\n ");
|
||||||
|
let code_number_nd_tokens = &number_nd_tokens
|
||||||
|
.build()
|
||||||
|
.to_string()
|
||||||
|
.replace('\n', "\n ");
|
||||||
|
let code_album_types = &album_types
|
||||||
|
.build()
|
||||||
|
.to_string()
|
||||||
|
.replace('\n', "\n ");
|
||||||
|
|
||||||
write!(code_timeago_tokens, "{} => Entry {{\n timeago_tokens: {},\n date_order: {},\n months: {},\n timeago_nd_tokens: {},\n comma_decimal: {:?},\n number_tokens: {},\n number_nd_tokens: {},\n album_types: {},\n }},\n ",
|
write!(code_timeago_tokens, "{} => Entry {{\n timeago_tokens: {},\n date_order: {},\n months: {},\n timeago_nd_tokens: {},\n comma_decimal: {:?},\n number_tokens: {},\n number_nd_tokens: {},\n album_types: {},\n }},\n ",
|
||||||
selector, code_ta_tokens, date_order, code_months, code_ta_nd_tokens, entry.comma_decimal, code_number_tokens, code_number_nd_tokens, code_album_types).unwrap();
|
selector, code_ta_tokens, date_order, code_months, code_ta_nd_tokens, entry.comma_decimal, code_number_tokens, code_number_nd_tokens, code_album_types).unwrap();
|
||||||
});
|
}
|
||||||
|
|
||||||
code_timeago_tokens = code_timeago_tokens.trim_end().to_owned() + "\n }\n}\n";
|
code_timeago_tokens = code_timeago_tokens.trim_end().to_owned() + "\n }\n}\n";
|
||||||
|
|
||||||
|
|
|
@ -227,7 +227,7 @@ pub enum Country {
|
||||||
"#
|
"#
|
||||||
.to_owned();
|
.to_owned();
|
||||||
|
|
||||||
languages.iter().for_each(|(code, native_name)| {
|
for (code, native_name) in &languages {
|
||||||
let enum_name = code
|
let enum_name = code
|
||||||
.split('-')
|
.split('-')
|
||||||
.map(|c| {
|
.map(|c| {
|
||||||
|
@ -262,10 +262,10 @@ pub enum Country {
|
||||||
" Language::{enum_name} => \"{native_name}\","
|
" Language::{enum_name} => \"{native_name}\","
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
});
|
}
|
||||||
code_langs += "}\n";
|
code_langs += "}\n";
|
||||||
|
|
||||||
countries.iter().for_each(|(c, n)| {
|
for (c, n) in &countries {
|
||||||
let enum_name = c[0..1].to_owned().to_uppercase() + &c[1..].to_owned().to_lowercase();
|
let enum_name = c[0..1].to_owned().to_uppercase() + &c[1..].to_owned().to_lowercase();
|
||||||
|
|
||||||
// Country enum
|
// Country enum
|
||||||
|
@ -281,7 +281,7 @@ pub enum Country {
|
||||||
" Country::{enum_name} => \"{n}\","
|
" Country::{enum_name} => \"{n}\","
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
});
|
}
|
||||||
|
|
||||||
// Add Country::Zz / Global
|
// Add Country::Zz / Global
|
||||||
code_countries += " /// Global (can only be used for music charts)\n";
|
code_countries += " /// Global (can only be used for music charts)\n";
|
||||||
|
@ -368,8 +368,8 @@ fn map_language_section(section: &CompactLinkRendererWrap) -> BTreeMap<String, S
|
||||||
.actions[0]
|
.actions[0]
|
||||||
.select_language_command
|
.select_language_command
|
||||||
.hl
|
.hl
|
||||||
.to_owned(),
|
.clone(),
|
||||||
i.compact_link_renderer.title.text.to_owned(),
|
i.compact_link_renderer.title.text.clone(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![warn(clippy::todo)]
|
||||||
|
|
||||||
mod abtest;
|
mod abtest;
|
||||||
mod collect_album_types;
|
mod collect_album_types;
|
||||||
mod collect_large_numbers;
|
mod collect_large_numbers;
|
||||||
|
@ -90,7 +92,7 @@ async fn main() {
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
let res = abtest::run_all_tests(n, cli.concurrency).await;
|
let res = abtest::run_all_tests(n, cli.concurrency).await;
|
||||||
println!("{}", serde_json::to_string_pretty(&res).unwrap())
|
println!("{}", serde_json::to_string_pretty(&res).unwrap());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,11 @@
|
||||||
name = "rustypipe-downloader"
|
name = "rustypipe-downloader"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
authors = ["ThetaDev <t.testboy@gmail.com>"]
|
||||||
|
license = "GPL-3.0"
|
||||||
|
description = "Downloader extension for RustyPipe"
|
||||||
|
keywords = ["youtube", "video", "music"]
|
||||||
|
categories = ["multimedia"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["default-tls"]
|
default = ["default-tls"]
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
#![warn(clippy::todo, clippy::dbg_macro)]
|
||||||
|
|
||||||
//! # YouTube audio/video downloader
|
//! # YouTube audio/video downloader
|
||||||
|
|
||||||
mod util;
|
mod util;
|
||||||
|
@ -25,8 +27,8 @@ use util::DownloadError;
|
||||||
|
|
||||||
type Result<T> = core::result::Result<T, DownloadError>;
|
type Result<T> = core::result::Result<T, DownloadError>;
|
||||||
|
|
||||||
const CHUNK_SIZE_MIN: u64 = 9000000;
|
const CHUNK_SIZE_MIN: u64 = 9_000_000;
|
||||||
const CHUNK_SIZE_MAX: u64 = 10000000;
|
const CHUNK_SIZE_MAX: u64 = 10_000_000;
|
||||||
|
|
||||||
fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> {
|
fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
|
@ -34,7 +36,7 @@ fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> {
|
||||||
let mut chunk_end = offset + chunk_size;
|
let mut chunk_end = offset + chunk_size;
|
||||||
|
|
||||||
if let Some(size) = size {
|
if let Some(size) = size {
|
||||||
chunk_end = chunk_end.min(size - 1)
|
chunk_end = chunk_end.min(size - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
Range {
|
Range {
|
||||||
|
@ -296,7 +298,7 @@ pub async fn download_video(
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
// Download filepath
|
// Download filepath
|
||||||
let download_dir = PathBuf::from(output_dir);
|
let download_dir = PathBuf::from(output_dir);
|
||||||
let title = player_data.details.name.to_owned();
|
let title = player_data.details.name.clone();
|
||||||
let output_fname_set = output_fname.is_some();
|
let output_fname_set = output_fname.is_some();
|
||||||
let output_fname = output_fname.unwrap_or_else(|| {
|
let output_fname = output_fname.unwrap_or_else(|| {
|
||||||
filenamify::filenamify(format!("{} [{}]", title, player_data.details.id))
|
filenamify::filenamify(format!("{} [{}]", title, player_data.details.id))
|
||||||
|
@ -332,14 +334,13 @@ pub async fn download_video(
|
||||||
return Err(DownloadError::Input(
|
return Err(DownloadError::Input(
|
||||||
format!("File {} already exists", output_path.to_string_lossy()).into(),
|
format!("File {} already exists", output_path.to_string_lossy()).into(),
|
||||||
))?;
|
))?;
|
||||||
} else {
|
}
|
||||||
info!(
|
info!(
|
||||||
"Downloaded video {} already exists",
|
"Downloaded video {} already exists",
|
||||||
output_path.to_string_lossy()
|
output_path.to_string_lossy()
|
||||||
);
|
);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
match (video, audio) {
|
match (video, audio) {
|
||||||
// Downloading combined video/audio stream (no conversion)
|
// Downloading combined video/audio stream (no conversion)
|
||||||
|
@ -364,7 +365,7 @@ pub async fn download_video(
|
||||||
output_fname,
|
output_fname,
|
||||||
v.format.extension()
|
v.format.extension()
|
||||||
)),
|
)),
|
||||||
url: v.url.to_owned(),
|
url: v.url.clone(),
|
||||||
video_codec: Some(v.codec),
|
video_codec: Some(v.codec),
|
||||||
audio_codec: None,
|
audio_codec: None,
|
||||||
});
|
});
|
||||||
|
@ -376,10 +377,10 @@ pub async fn download_video(
|
||||||
output_fname,
|
output_fname,
|
||||||
a.format.extension()
|
a.format.extension()
|
||||||
)),
|
)),
|
||||||
url: a.url.to_owned(),
|
url: a.url.clone(),
|
||||||
video_codec: None,
|
video_codec: None,
|
||||||
audio_codec: Some(a.codec),
|
audio_codec: Some(a.codec),
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pb.set_message(format!("Downloading {title}"));
|
pb.set_message(format!("Downloading {title}"));
|
||||||
|
@ -396,7 +397,7 @@ pub async fn download_video(
|
||||||
|
|
||||||
// Delete original files
|
// Delete original files
|
||||||
stream::iter(&downloads)
|
stream::iter(&downloads)
|
||||||
.map(|d| fs::remove_file(d.file.to_owned()))
|
.map(|d| fs::remove_file(d.file.clone()))
|
||||||
.buffer_unordered(downloads.len())
|
.buffer_unordered(downloads.len())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.await
|
.await
|
||||||
|
@ -417,7 +418,7 @@ async fn download_streams(
|
||||||
let n = downloads.len();
|
let n = downloads.len();
|
||||||
|
|
||||||
stream::iter(downloads)
|
stream::iter(downloads)
|
||||||
.map(|d| download_single_file(&d.url, d.file.to_owned(), http.clone(), pb.clone()))
|
.map(|d| download_single_file(&d.url, d.file.clone(), http.clone(), pb.clone()))
|
||||||
.buffer_unordered(n)
|
.buffer_unordered(n)
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
.await
|
.await
|
||||||
|
@ -439,7 +440,7 @@ async fn convert_streams<P: Into<PathBuf>>(
|
||||||
|
|
||||||
downloads.iter().enumerate().for_each(|(i, d)| {
|
downloads.iter().enumerate().for_each(|(i, d)| {
|
||||||
args.push("-i".into());
|
args.push("-i".into());
|
||||||
args.push(d.file.to_owned().into());
|
args.push(d.file.clone().into());
|
||||||
|
|
||||||
mapping_args.push("-map".into());
|
mapping_args.push("-map".into());
|
||||||
mapping_args.push(i.to_string().into());
|
mapping_args.push(i.to_string().into());
|
||||||
|
|
18
notes/channel_playlist.txt
Normal file
18
notes/channel_playlist.txt
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
Source: https://github.com/TeamNewPipe/NewPipe/pull/9182#issuecomment-1508938841
|
||||||
|
|
||||||
|
Note: we recently discovered that YouTube system playlists exist for regular videos of channels, for livestreams, and shorts as chronological ones (the shorts one was already known) and popular ones.
|
||||||
|
They correspond basically to the results of the sort filters available on the channels streams tab on YouTube's interface
|
||||||
|
|
||||||
|
So, basically shortcuts for the lazy/incurious?
|
||||||
|
|
||||||
|
Same procedure as the one described in the 0.24.1 changelog, except that you need to change the prefix UU (all user uploads) to:
|
||||||
|
|
||||||
|
UULF for regular videos only,
|
||||||
|
UULV for livestreams only,
|
||||||
|
UUSH for shorts only,
|
||||||
|
UULP for popular regular videos,
|
||||||
|
UUPS for popular shorts,
|
||||||
|
UUPV for popular livestreams
|
||||||
|
UUMF: members only regular videos
|
||||||
|
UUMV: members only livestreams
|
||||||
|
UUMS is probably for members-only shorts, we need to found a channel making shorts restricted to channel members
|
23
src/cache.rs
23
src/cache.rs
|
@ -1,4 +1,19 @@
|
||||||
//! Persistent cache storage
|
//! # Persistent cache storage
|
||||||
|
//!
|
||||||
|
//! RustyPipe caches some information fetched from YouTube: specifically
|
||||||
|
//! the client versions and the JavaScript code used to deobfuscate the stream URLs.
|
||||||
|
//!
|
||||||
|
//! Without a persistent cache storage, this information would have to be re-fetched
|
||||||
|
//! with every new instantiation of the client. This would make operation a lot slower,
|
||||||
|
//! especially with CLI applications. For this reason, persisting the cache between
|
||||||
|
//! program executions is recommended.
|
||||||
|
//!
|
||||||
|
//! Since there are many diferent ways to store this data (Text file, SQL, Redis, etc),
|
||||||
|
//! RustyPipe allows you to plug in your own cache storage by implementing the
|
||||||
|
//! [`CacheStorage`] trait.
|
||||||
|
//!
|
||||||
|
//! RustyPipe already comes with the [`FileStorage`] implementation which stores
|
||||||
|
//! the cache as a JSON file.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
fs,
|
fs,
|
||||||
|
@ -9,14 +24,16 @@ use log::error;
|
||||||
|
|
||||||
pub(crate) const DEFAULT_CACHE_FILE: &str = "rustypipe_cache.json";
|
pub(crate) const DEFAULT_CACHE_FILE: &str = "rustypipe_cache.json";
|
||||||
|
|
||||||
|
/// Cache storage trait
|
||||||
|
///
|
||||||
/// RustyPipe has to cache some information fetched from YouTube: specifically
|
/// RustyPipe has to cache some information fetched from YouTube: specifically
|
||||||
/// the client versions and the JavaScript code used to deobfuscate the stream URLs.
|
/// the client versions and the JavaScript code used to deobfuscate the stream URLs.
|
||||||
///
|
///
|
||||||
/// This trait is used to abstract the cache storage behavior so you can store
|
/// This trait is used to abstract the cache storage behavior so you can store
|
||||||
/// cache data in your preferred way (File, SQL, Redis, etc).
|
/// cache data in your preferred way (File, SQL, Redis, etc).
|
||||||
///
|
///
|
||||||
/// The cache is read when building the [`crate::client::RustyPipe`] client and updated
|
/// The cache is read when building the [`RustyPipe`](crate::client::RustyPipe)
|
||||||
/// whenever additional data is fetched.
|
/// client and updated whenever additional data is fetched.
|
||||||
pub trait CacheStorage: Sync + Send {
|
pub trait CacheStorage: Sync + Send {
|
||||||
/// Write the given string to the cache
|
/// Write the given string to the cache
|
||||||
fn write(&self, data: &str);
|
fn write(&self, data: &str);
|
||||||
|
|
|
@ -98,7 +98,7 @@ impl RustyPipeQuery {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the specified video tab from a YouTube channel
|
/// Get the videos of the given tab (Shorts, Livestreams) from a YouTube channel
|
||||||
pub async fn channel_videos_tab<S: AsRef<str>>(
|
pub async fn channel_videos_tab<S: AsRef<str>>(
|
||||||
&self,
|
&self,
|
||||||
channel_id: S,
|
channel_id: S,
|
||||||
|
@ -108,7 +108,7 @@ impl RustyPipeQuery {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a ordered list of videos from the specified tab of a YouTube channel
|
/// Get a ordered list of videos from the given tab (Shorts, Livestreams) of a YouTube channel
|
||||||
///
|
///
|
||||||
/// This function does not return channel metadata.
|
/// This function does not return channel metadata.
|
||||||
pub async fn channel_videos_tab_order<S: AsRef<str>>(
|
pub async fn channel_videos_tab_order<S: AsRef<str>>(
|
||||||
|
@ -322,7 +322,7 @@ fn map_vanity_url(url: &str, id: &str) -> Option<String> {
|
||||||
|
|
||||||
Url::parse(url).ok().map(|mut parsed_url| {
|
Url::parse(url).ok().map(|mut parsed_url| {
|
||||||
// The vanity URL from YouTube is http for some reason
|
// The vanity URL from YouTube is http for some reason
|
||||||
let _ = parsed_url.set_scheme("https");
|
_ = parsed_url.set_scheme("https");
|
||||||
parsed_url.to_string()
|
parsed_url.to_string()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -392,10 +392,7 @@ fn map_channel(
|
||||||
content: (),
|
content: (),
|
||||||
},
|
},
|
||||||
response::channel::Header::CarouselHeaderRenderer(carousel) => {
|
response::channel::Header::CarouselHeaderRenderer(carousel) => {
|
||||||
let hdata = carousel
|
let hdata = carousel.contents.into_iter().find_map(|item| {
|
||||||
.contents
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|item| {
|
|
||||||
match item {
|
match item {
|
||||||
response::channel::CarouselHeaderRendererItem::TopicChannelDetailsRenderer {
|
response::channel::CarouselHeaderRendererItem::TopicChannelDetailsRenderer {
|
||||||
subscriber_count_text,
|
subscriber_count_text,
|
||||||
|
@ -404,8 +401,7 @@ fn map_channel(
|
||||||
} => Some((subscriber_count_text.or(subtitle), avatar)),
|
} => Some((subscriber_count_text.or(subtitle), avatar)),
|
||||||
response::channel::CarouselHeaderRendererItem::None => None,
|
response::channel::CarouselHeaderRendererItem::None => None,
|
||||||
}
|
}
|
||||||
})
|
});
|
||||||
.next();
|
|
||||||
|
|
||||||
Channel {
|
Channel {
|
||||||
id: metadata.external_id,
|
id: metadata.external_id,
|
||||||
|
@ -568,7 +564,7 @@ fn _order_ctoken(
|
||||||
pb_80226972.string(3, &pbi.to_base64());
|
pb_80226972.string(3, &pbi.to_base64());
|
||||||
|
|
||||||
let mut pb = ProtoBuilder::new();
|
let mut pb = ProtoBuilder::new();
|
||||||
pb.embedded(80226972, pb_80226972);
|
pb.embedded(80_226_972, pb_80226972);
|
||||||
|
|
||||||
pb.to_base64()
|
pb.to_base64()
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::collections::BTreeMap;
|
||||||
use crate::{
|
use crate::{
|
||||||
error::{Error, ExtractionError},
|
error::{Error, ExtractionError},
|
||||||
model::ChannelRss,
|
model::ChannelRss,
|
||||||
report::Report,
|
report::{Report, RustyPipeInfo},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{response, RustyPipeQuery};
|
use super::{response, RustyPipeQuery};
|
||||||
|
@ -15,12 +15,11 @@ impl RustyPipeQuery {
|
||||||
///
|
///
|
||||||
/// Fetching RSS feeds is a lot faster than querying the InnerTube API, so this method is great
|
/// Fetching RSS feeds is a lot faster than querying the InnerTube API, so this method is great
|
||||||
/// for checking a lot of channels or implementing a subscription feed.
|
/// for checking a lot of channels or implementing a subscription feed.
|
||||||
|
///
|
||||||
|
/// The downside of using the RSS feed is that it does not provide video durations.
|
||||||
pub async fn channel_rss<S: AsRef<str>>(&self, channel_id: S) -> Result<ChannelRss, Error> {
|
pub async fn channel_rss<S: AsRef<str>>(&self, channel_id: S) -> Result<ChannelRss, Error> {
|
||||||
let channel_id = channel_id.as_ref();
|
let channel_id = channel_id.as_ref();
|
||||||
let url = format!(
|
let url = format!("https://www.youtube.com/feeds/videos.xml?channel_id={channel_id}");
|
||||||
"https://www.youtube.com/feeds/videos.xml?channel_id={}",
|
|
||||||
channel_id,
|
|
||||||
);
|
|
||||||
let xml = self
|
let xml = self
|
||||||
.client
|
.client
|
||||||
.http_request_txt(&self.client.inner.http.get(&url).build()?)
|
.http_request_txt(&self.client.inner.http.get(&url).build()?)
|
||||||
|
@ -38,15 +37,15 @@ impl RustyPipeQuery {
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if let Some(reporter) = &self.client.inner.reporter {
|
if let Some(reporter) = &self.client.inner.reporter {
|
||||||
let report = Report {
|
let report = Report {
|
||||||
info: Default::default(),
|
info: RustyPipeInfo::default(),
|
||||||
level: crate::report::Level::ERR,
|
level: crate::report::Level::ERR,
|
||||||
operation: "channel_rss".to_owned(),
|
operation: "channel_rss",
|
||||||
error: Some(e.to_string()),
|
error: Some(e.to_string()),
|
||||||
msgs: Vec::new(),
|
msgs: Vec::new(),
|
||||||
deobf_data: None,
|
deobf_data: None,
|
||||||
http_request: crate::report::HTTPRequest {
|
http_request: crate::report::HTTPRequest {
|
||||||
url,
|
url: &url,
|
||||||
method: "GET".to_owned(),
|
method: "GET",
|
||||||
req_header: BTreeMap::new(),
|
req_header: BTreeMap::new(),
|
||||||
req_body: String::new(),
|
req_body: String::new(),
|
||||||
status: 200,
|
status: 200,
|
||||||
|
|
|
@ -39,7 +39,7 @@ use crate::{
|
||||||
deobfuscate::DeobfData,
|
deobfuscate::DeobfData,
|
||||||
error::{Error, ExtractionError},
|
error::{Error, ExtractionError},
|
||||||
param::{Country, Language},
|
param::{Country, Language},
|
||||||
report::{FileReporter, Level, Report, Reporter, DEFAULT_REPORT_DIR},
|
report::{FileReporter, Level, Report, Reporter, RustyPipeInfo, DEFAULT_REPORT_DIR},
|
||||||
serializer::MapResult,
|
serializer::MapResult,
|
||||||
util,
|
util,
|
||||||
};
|
};
|
||||||
|
@ -73,7 +73,7 @@ pub enum ClientType {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClientType {
|
impl ClientType {
|
||||||
fn is_web(&self) -> bool {
|
fn is_web(self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
ClientType::Desktop | ClientType::DesktopMusic | ClientType::TvHtml5Embed => true,
|
ClientType::Desktop | ClientType::DesktopMusic | ClientType::TvHtml5Embed => true,
|
||||||
ClientType::Android | ClientType::Ios => false,
|
ClientType::Android | ClientType::Ios => false,
|
||||||
|
@ -118,11 +118,11 @@ struct ClientInfo<'a> {
|
||||||
impl Default for ClientInfo<'_> {
|
impl Default for ClientInfo<'_> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
client_name: Default::default(),
|
client_name: "",
|
||||||
client_version: Default::default(),
|
client_version: Cow::default(),
|
||||||
client_screen: None,
|
client_screen: None,
|
||||||
device_model: None,
|
device_model: None,
|
||||||
platform: Default::default(),
|
platform: "",
|
||||||
original_url: None,
|
original_url: None,
|
||||||
visitor_data: None,
|
visitor_data: None,
|
||||||
hl: Language::En,
|
hl: Language::En,
|
||||||
|
@ -214,9 +214,9 @@ static CLIENT_VERSION_REGEXES: Lazy<[Regex; 1]> =
|
||||||
|
|
||||||
/// The RustyPipe client used to access YouTube's API
|
/// The RustyPipe client used to access YouTube's API
|
||||||
///
|
///
|
||||||
/// RustyPipe includes an `Arc` internally, so if you are using the client
|
/// RustyPipe uses an [`Arc`] internally, so if you are using the client
|
||||||
/// at multiple locations, you can just clone it. Note that options (lang/country/report)
|
/// at multiple locations, you can just clone it. Note that query options
|
||||||
/// are not shared between clones.
|
/// (lang/country/report/visitor data) are not shared between clones.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct RustyPipe {
|
pub struct RustyPipe {
|
||||||
inner: Arc<RustyPipeRef>,
|
inner: Arc<RustyPipeRef>,
|
||||||
|
@ -268,10 +268,78 @@ impl<T> DefaultOpt<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// RustyPipe query object
|
/// # RustyPipe query
|
||||||
///
|
///
|
||||||
/// Contains a reference to the RustyPipe client as well as query-specific
|
/// ## Queries
|
||||||
/// options (e.g. language preference).
|
///
|
||||||
|
/// ### YouTube
|
||||||
|
///
|
||||||
|
/// - **Video**
|
||||||
|
/// - [`player`](RustyPipeQuery::player)
|
||||||
|
/// - [`video_details`](RustyPipeQuery::video_details)
|
||||||
|
/// - [`video_comments`](RustyPipeQuery::video_comments)
|
||||||
|
/// - **Channel**
|
||||||
|
/// - [`channel_videos`](RustyPipeQuery::channel_videos)
|
||||||
|
/// - [`channel_videos_order`](RustyPipeQuery::channel_videos_order)
|
||||||
|
/// - [`channel_videos_tab`](RustyPipeQuery::channel_videos_tab)
|
||||||
|
/// - [`channel_videos_tab_order`](RustyPipeQuery::channel_videos_tab_order)
|
||||||
|
/// - [`channel_playlists`](RustyPipeQuery::channel_playlists)
|
||||||
|
/// - [`channel_search`](RustyPipeQuery::channel_search)
|
||||||
|
/// - [`channel_info`](RustyPipeQuery::channel_info)
|
||||||
|
/// - [`channel_rss`](RustyPipeQuery::channel_rss) (🔒 Feature `rss`)
|
||||||
|
/// - **Playlist** [`playlist`](RustyPipeQuery::playlist)
|
||||||
|
/// - **Search**
|
||||||
|
/// - [`search`](RustyPipeQuery::search)
|
||||||
|
/// - [`search_filter`](RustyPipeQuery::search_filter)
|
||||||
|
/// - [`search_suggestion`](RustyPipeQuery::search_suggestion)
|
||||||
|
/// - **Trending** [`trending`](RustyPipeQuery::trending)
|
||||||
|
/// - **Resolver** (convert URLs and strings to YouTube IDs)
|
||||||
|
/// - [`resolve_url`](RustyPipeQuery::resolve_url)
|
||||||
|
/// - [`resolve_string`](RustyPipeQuery::resolve_string)
|
||||||
|
///
|
||||||
|
/// ### YouTube Music
|
||||||
|
///
|
||||||
|
/// - **Playlist** [`music_playlist`](RustyPipeQuery::music_playlist)
|
||||||
|
/// - **Album** [`music_album`](RustyPipeQuery::music_album)
|
||||||
|
/// - **Artist** [`music_artist`](RustyPipeQuery::music_artist)
|
||||||
|
/// - **Search**
|
||||||
|
/// - [`music_search`](RustyPipeQuery::music_search)
|
||||||
|
/// - [`music_search_tracks`](RustyPipeQuery::music_search_tracks)
|
||||||
|
/// - [`music_search_videos`](RustyPipeQuery::music_search_videos)
|
||||||
|
/// - [`music_search_albums`](RustyPipeQuery::music_search_albums)
|
||||||
|
/// - [`music_search_artists`](RustyPipeQuery::music_search_artists)
|
||||||
|
/// - [`music_search_playlists`](RustyPipeQuery::music_search_playlists)
|
||||||
|
/// - [`music_search_playlists_filter`](RustyPipeQuery::music_search_playlists_filter)
|
||||||
|
/// - [`music_search_suggestion`](RustyPipeQuery::music_search_suggestion)
|
||||||
|
/// - **Radio**
|
||||||
|
/// - [`music_radio`](RustyPipeQuery::music_radio)
|
||||||
|
/// - [`music_radio_playlist`](RustyPipeQuery::music_radio_playlist)
|
||||||
|
/// - [`music_radio_track`](RustyPipeQuery::music_radio_track)
|
||||||
|
/// - **Track details**
|
||||||
|
/// - [`music_details`](RustyPipeQuery::music_details)
|
||||||
|
/// - [`music_lyrics`](RustyPipeQuery::music_lyrics)
|
||||||
|
/// - [`music_related`](RustyPipeQuery::music_related)
|
||||||
|
/// - **Moods/Genres**
|
||||||
|
/// - [`music_genres`](RustyPipeQuery::music_genres)
|
||||||
|
/// - [`music_genre`](RustyPipeQuery::music_genre)
|
||||||
|
/// - **Charts** [`music_charts`](RustyPipeQuery::music_charts)
|
||||||
|
/// - **New**
|
||||||
|
/// - [`music_new_albums`](RustyPipeQuery::music_new_albums)
|
||||||
|
/// - [`music_new_videos`](RustyPipeQuery::music_new_videos)
|
||||||
|
///
|
||||||
|
/// ## Options
|
||||||
|
///
|
||||||
|
/// You can set the language, country and visitor data cookie for individual requests.
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use rustypipe::client::RustyPipe;
|
||||||
|
/// let rp = RustyPipe::new();
|
||||||
|
/// rp.query()
|
||||||
|
/// .country(rustypipe::param::Country::De)
|
||||||
|
/// .lang(rustypipe::param::Language::De)
|
||||||
|
/// .visitor_data("CgthZVRCd1dkbTlRWSj3v_miBg%3D%3D")
|
||||||
|
/// .player("ZeerrnuLi5E");
|
||||||
|
/// ```
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct RustyPipeQuery {
|
pub struct RustyPipeQuery {
|
||||||
client: RustyPipe,
|
client: RustyPipe,
|
||||||
|
@ -361,9 +429,10 @@ impl Default for RustyPipeBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RustyPipeBuilder {
|
impl RustyPipeBuilder {
|
||||||
/// Constructs a new `RustyPipeBuilder`.
|
/// Return a new `RustyPipeBuilder`.
|
||||||
///
|
///
|
||||||
/// This is the same as `RustyPipe::builder()`
|
/// This is the same as [`RustyPipe::builder`]
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
RustyPipeBuilder {
|
RustyPipeBuilder {
|
||||||
default_opts: RustyPipeOpts::default(),
|
default_opts: RustyPipeOpts::default(),
|
||||||
|
@ -376,7 +445,8 @@ impl RustyPipeBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a new, configured RustyPipe instance.
|
/// Return a new, configured RustyPipe instance.
|
||||||
|
#[must_use]
|
||||||
pub fn build(self) -> RustyPipe {
|
pub fn build(self) -> RustyPipe {
|
||||||
let mut client_builder = ClientBuilder::new()
|
let mut client_builder = ClientBuilder::new()
|
||||||
.user_agent(self.user_agent.unwrap_or_else(|| DEFAULT_UA.to_owned()))
|
.user_agent(self.user_agent.unwrap_or_else(|| DEFAULT_UA.to_owned()))
|
||||||
|
@ -441,6 +511,7 @@ impl RustyPipeBuilder {
|
||||||
/// This option has no effect if the storage backend or reporter are manually set or disabled.
|
/// This option has no effect if the storage backend or reporter are manually set or disabled.
|
||||||
///
|
///
|
||||||
/// **Default value**: current working directory
|
/// **Default value**: current working directory
|
||||||
|
#[must_use]
|
||||||
pub fn storage_dir<P: Into<PathBuf>>(mut self, path: P) -> Self {
|
pub fn storage_dir<P: Into<PathBuf>>(mut self, path: P) -> Self {
|
||||||
self.storage_dir = Some(path.into());
|
self.storage_dir = Some(path.into());
|
||||||
self
|
self
|
||||||
|
@ -451,12 +522,14 @@ impl RustyPipeBuilder {
|
||||||
/// program executions.
|
/// program executions.
|
||||||
///
|
///
|
||||||
/// **Default value**: [`FileStorage`] in `rustypipe_cache.json`
|
/// **Default value**: [`FileStorage`] in `rustypipe_cache.json`
|
||||||
|
#[must_use]
|
||||||
pub fn storage(mut self, storage: Box<dyn CacheStorage>) -> Self {
|
pub fn storage(mut self, storage: Box<dyn CacheStorage>) -> Self {
|
||||||
self.storage = DefaultOpt::Some(storage);
|
self.storage = DefaultOpt::Some(storage);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disable cache storage
|
/// Disable cache storage
|
||||||
|
#[must_use]
|
||||||
pub fn no_storage(mut self) -> Self {
|
pub fn no_storage(mut self) -> Self {
|
||||||
self.storage = DefaultOpt::None;
|
self.storage = DefaultOpt::None;
|
||||||
self
|
self
|
||||||
|
@ -465,12 +538,14 @@ impl RustyPipeBuilder {
|
||||||
/// Add a `Reporter` to collect error details
|
/// Add a `Reporter` to collect error details
|
||||||
///
|
///
|
||||||
/// **Default value**: [`FileReporter`] creating reports in `./rustypipe_reports`
|
/// **Default value**: [`FileReporter`] creating reports in `./rustypipe_reports`
|
||||||
|
#[must_use]
|
||||||
pub fn reporter(mut self, reporter: Box<dyn Reporter>) -> Self {
|
pub fn reporter(mut self, reporter: Box<dyn Reporter>) -> Self {
|
||||||
self.reporter = DefaultOpt::Some(reporter);
|
self.reporter = DefaultOpt::Some(reporter);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disable the creation of report files in case of errors and warnings.
|
/// Disable the creation of report files in case of errors and warnings.
|
||||||
|
#[must_use]
|
||||||
pub fn no_reporter(mut self) -> Self {
|
pub fn no_reporter(mut self) -> Self {
|
||||||
self.reporter = DefaultOpt::None;
|
self.reporter = DefaultOpt::None;
|
||||||
self
|
self
|
||||||
|
@ -482,12 +557,14 @@ impl RustyPipeBuilder {
|
||||||
/// response body has finished.
|
/// response body has finished.
|
||||||
///
|
///
|
||||||
/// **Default value**: 10s
|
/// **Default value**: 10s
|
||||||
|
#[must_use]
|
||||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
pub fn timeout(mut self, timeout: Duration) -> Self {
|
||||||
self.timeout = DefaultOpt::Some(timeout);
|
self.timeout = DefaultOpt::Some(timeout);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disable the HTTP request timeout.
|
/// Disable the HTTP request timeout.
|
||||||
|
#[must_use]
|
||||||
pub fn no_timeout(mut self) -> Self {
|
pub fn no_timeout(mut self) -> Self {
|
||||||
self.timeout = DefaultOpt::None;
|
self.timeout = DefaultOpt::None;
|
||||||
self
|
self
|
||||||
|
@ -502,6 +579,7 @@ impl RustyPipeBuilder {
|
||||||
/// random jitter to be less predictable).
|
/// random jitter to be less predictable).
|
||||||
///
|
///
|
||||||
/// **Default value**: 2
|
/// **Default value**: 2
|
||||||
|
#[must_use]
|
||||||
pub fn n_http_retries(mut self, n_retries: u32) -> Self {
|
pub fn n_http_retries(mut self, n_retries: u32) -> Self {
|
||||||
self.n_http_retries = n_retries;
|
self.n_http_retries = n_retries;
|
||||||
self
|
self
|
||||||
|
@ -511,37 +589,44 @@ impl RustyPipeBuilder {
|
||||||
///
|
///
|
||||||
/// **Default value**: `Mozilla/5.0 (X11; Linux x86_64; rv:102.0) Gecko/20100101 Firefox/102.0`
|
/// **Default value**: `Mozilla/5.0 (X11; Linux x86_64; rv:102.0) Gecko/20100101 Firefox/102.0`
|
||||||
/// (Firefox ESR on Debian)
|
/// (Firefox ESR on Debian)
|
||||||
|
#[must_use]
|
||||||
pub fn user_agent<S: Into<String>>(mut self, user_agent: S) -> Self {
|
pub fn user_agent<S: Into<String>>(mut self, user_agent: S) -> Self {
|
||||||
self.user_agent = Some(user_agent.into());
|
self.user_agent = Some(user_agent.into());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the language parameter used when accessing the YouTube API.
|
/// Set the language parameter used when accessing the YouTube API.
|
||||||
|
///
|
||||||
/// This will change multilanguage video titles, descriptions and textual dates
|
/// This will change multilanguage video titles, descriptions and textual dates
|
||||||
///
|
///
|
||||||
/// **Default value**: `Language::En` (English)
|
/// **Default value**: `Language::En` (English)
|
||||||
///
|
///
|
||||||
/// **Info**: you can set this option for individual queries, too
|
/// **Info**: you can set this option for individual queries, too
|
||||||
|
#[must_use]
|
||||||
pub fn lang(mut self, lang: Language) -> Self {
|
pub fn lang(mut self, lang: Language) -> Self {
|
||||||
self.default_opts.lang = lang;
|
self.default_opts.lang = lang;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the country parameter used when accessing the YouTube API.
|
/// Set the country parameter used when accessing the YouTube API.
|
||||||
|
///
|
||||||
/// This will change trends and recommended content.
|
/// This will change trends and recommended content.
|
||||||
///
|
///
|
||||||
/// **Default value**: `Country::Us` (USA)
|
/// **Default value**: `Country::Us` (USA)
|
||||||
///
|
///
|
||||||
/// **Info**: you can set this option for individual queries, too
|
/// **Info**: you can set this option for individual queries, too
|
||||||
|
#[must_use]
|
||||||
pub fn country(mut self, country: Country) -> Self {
|
pub fn country(mut self, country: Country) -> Self {
|
||||||
self.default_opts.country = validate_country(country);
|
self.default_opts.country = validate_country(country);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a report on every operation.
|
/// Generate a report on every operation.
|
||||||
|
///
|
||||||
/// This should only be used for debugging.
|
/// This should only be used for debugging.
|
||||||
///
|
///
|
||||||
/// **Info**: you can set this option for individual queries, too
|
/// **Info**: you can set this option for individual queries, too
|
||||||
|
#[must_use]
|
||||||
pub fn report(mut self) -> Self {
|
pub fn report(mut self) -> Self {
|
||||||
self.default_opts.report = true;
|
self.default_opts.report = true;
|
||||||
self
|
self
|
||||||
|
@ -549,23 +634,44 @@ impl RustyPipeBuilder {
|
||||||
|
|
||||||
/// Enable strict mode, causing operations to fail if there
|
/// Enable strict mode, causing operations to fail if there
|
||||||
/// are warnings during deserialization (e.g. invalid items).
|
/// are warnings during deserialization (e.g. invalid items).
|
||||||
|
///
|
||||||
/// This should only be used for testing.
|
/// This should only be used for testing.
|
||||||
///
|
///
|
||||||
/// **Info**: you can set this option for individual queries, too
|
/// **Info**: you can set this option for individual queries, too
|
||||||
|
#[must_use]
|
||||||
pub fn strict(mut self) -> Self {
|
pub fn strict(mut self) -> Self {
|
||||||
self.default_opts.strict = true;
|
self.default_opts.strict = true;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the default YouTube visitor data cookie
|
/// Set the YouTube visitor data cookie
|
||||||
|
///
|
||||||
|
/// YouTube assigns a session cookie to each user which is used for personalized
|
||||||
|
/// recommendations. By default, RustyPipe does not send this cookie to preserve
|
||||||
|
/// user privacy. For requests that mandatate the cookie, a new one is requested
|
||||||
|
/// for every query.
|
||||||
|
///
|
||||||
|
/// This option allows you to manually set the visitor data cookie of your client,
|
||||||
|
/// allowing you to get personalized recommendations or reproduce A/B tests.
|
||||||
|
///
|
||||||
|
/// Note that YouTube has a rate limit on the number of requests from a single
|
||||||
|
/// visitor, so you should not use the same vistor data cookie for batch operations.
|
||||||
|
///
|
||||||
|
/// **Info**: you can set this option for individual queries, too
|
||||||
|
#[must_use]
|
||||||
pub fn visitor_data<S: Into<String>>(mut self, visitor_data: S) -> Self {
|
pub fn visitor_data<S: Into<String>>(mut self, visitor_data: S) -> Self {
|
||||||
self.default_opts.visitor_data = Some(visitor_data.into());
|
self.default_opts.visitor_data = Some(visitor_data.into());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the default YouTube visitor data cookie to an optional value
|
/// Set the YouTube visitor data cookie to an optional value
|
||||||
pub fn visitor_data_opt(mut self, visitor_data: Option<String>) -> Self {
|
///
|
||||||
self.default_opts.visitor_data = visitor_data;
|
/// see also [`RustyPipeBuilder::visitor_data`]
|
||||||
|
///
|
||||||
|
/// **Info**: you can set this option for individual queries, too
|
||||||
|
#[must_use]
|
||||||
|
pub fn visitor_data_opt<S: Into<String>>(mut self, visitor_data: Option<S>) -> Self {
|
||||||
|
self.default_opts.visitor_data = visitor_data.map(S::into);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -579,19 +685,22 @@ impl Default for RustyPipe {
|
||||||
impl RustyPipe {
|
impl RustyPipe {
|
||||||
/// Create a new RustyPipe instance with default settings.
|
/// Create a new RustyPipe instance with default settings.
|
||||||
///
|
///
|
||||||
/// To create an instance with custom options, use `RustyPipeBuilder` instead.
|
/// To create an instance with custom options, use [`RustyPipeBuilder`] instead.
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
RustyPipeBuilder::new().build()
|
RustyPipeBuilder::new().build()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a new `RustyPipeBuilder`.
|
/// Create a new [`RustyPipeBuilder`]
|
||||||
///
|
///
|
||||||
/// This is the same as `RustyPipeBuilder::new()`
|
/// This is the same as [`RustyPipeBuilder::new`]
|
||||||
|
#[must_use]
|
||||||
pub fn builder() -> RustyPipeBuilder {
|
pub fn builder() -> RustyPipeBuilder {
|
||||||
RustyPipeBuilder::new()
|
RustyPipeBuilder::new()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Constructs a new `RustyPipeQuery`.
|
/// Create a new [`RustyPipeQuery`] to run an API request
|
||||||
|
#[must_use]
|
||||||
pub fn query(&self) -> RustyPipeQuery {
|
pub fn query(&self) -> RustyPipeQuery {
|
||||||
RustyPipeQuery {
|
RustyPipeQuery {
|
||||||
client: self.clone(),
|
client: self.clone(),
|
||||||
|
@ -690,7 +799,7 @@ impl RustyPipe {
|
||||||
.get(sw_url)
|
.get(sw_url)
|
||||||
.header(header::ORIGIN, origin)
|
.header(header::ORIGIN, origin)
|
||||||
.header(header::REFERER, origin)
|
.header(header::REFERER, origin)
|
||||||
.header(header::COOKIE, self.inner.consent_cookie.to_owned())
|
.header(header::COOKIE, self.inner.consent_cookie.clone())
|
||||||
.build()
|
.build()
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
)
|
)
|
||||||
|
@ -739,13 +848,13 @@ impl RustyPipe {
|
||||||
let mut desktop_client = self.inner.cache.desktop_client.write().await;
|
let mut desktop_client = self.inner.cache.desktop_client.write().await;
|
||||||
|
|
||||||
match desktop_client.get() {
|
match desktop_client.get() {
|
||||||
Some(cdata) => cdata.version.to_owned(),
|
Some(cdata) => cdata.version.clone(),
|
||||||
None => {
|
None => {
|
||||||
log::debug!("getting desktop client version");
|
log::debug!("getting desktop client version");
|
||||||
match self.extract_desktop_client_version().await {
|
match self.extract_desktop_client_version().await {
|
||||||
Ok(version) => {
|
Ok(version) => {
|
||||||
*desktop_client = CacheEntry::from(ClientData {
|
*desktop_client = CacheEntry::from(ClientData {
|
||||||
version: version.to_owned(),
|
version: version.clone(),
|
||||||
});
|
});
|
||||||
drop(desktop_client);
|
drop(desktop_client);
|
||||||
self.store_cache().await;
|
self.store_cache().await;
|
||||||
|
@ -771,13 +880,13 @@ impl RustyPipe {
|
||||||
let mut music_client = self.inner.cache.music_client.write().await;
|
let mut music_client = self.inner.cache.music_client.write().await;
|
||||||
|
|
||||||
match music_client.get() {
|
match music_client.get() {
|
||||||
Some(cdata) => cdata.version.to_owned(),
|
Some(cdata) => cdata.version.clone(),
|
||||||
None => {
|
None => {
|
||||||
log::debug!("getting music client version");
|
log::debug!("getting music client version");
|
||||||
match self.extract_music_client_version().await {
|
match self.extract_music_client_version().await {
|
||||||
Ok(version) => {
|
Ok(version) => {
|
||||||
*music_client = CacheEntry::from(ClientData {
|
*music_client = CacheEntry::from(ClientData {
|
||||||
version: version.to_owned(),
|
version: version.clone(),
|
||||||
});
|
});
|
||||||
drop(music_client);
|
drop(music_client);
|
||||||
self.store_cache().await;
|
self.store_cache().await;
|
||||||
|
@ -826,8 +935,12 @@ impl RustyPipe {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Request a new visitor data cookie from YouTube
|
||||||
|
///
|
||||||
|
/// Since the cookie is shared between YT and YTM and the YTM page loads faster,
|
||||||
|
/// we request that.
|
||||||
async fn get_visitor_data(&self) -> Result<String, Error> {
|
async fn get_visitor_data(&self) -> Result<String, Error> {
|
||||||
log::debug!("getting YTM visitor data");
|
log::debug!("getting YT visitor data");
|
||||||
let resp = self.inner.http.get(YOUTUBE_MUSIC_HOME_URL).send().await?;
|
let resp = self.inner.http.get(YOUTUBE_MUSIC_HOME_URL).send().await?;
|
||||||
|
|
||||||
resp.headers()
|
resp.headers()
|
||||||
|
@ -849,21 +962,27 @@ impl RustyPipe {
|
||||||
|
|
||||||
impl RustyPipeQuery {
|
impl RustyPipeQuery {
|
||||||
/// Set the language parameter used when accessing the YouTube API
|
/// Set the language parameter used when accessing the YouTube API
|
||||||
|
///
|
||||||
/// This will change multilanguage video titles, descriptions and textual dates
|
/// This will change multilanguage video titles, descriptions and textual dates
|
||||||
|
#[must_use]
|
||||||
pub fn lang(mut self, lang: Language) -> Self {
|
pub fn lang(mut self, lang: Language) -> Self {
|
||||||
self.opts.lang = lang;
|
self.opts.lang = lang;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the country parameter used when accessing the YouTube API.
|
/// Set the country parameter used when accessing the YouTube API.
|
||||||
|
///
|
||||||
/// This will change trends and recommended content.
|
/// This will change trends and recommended content.
|
||||||
|
#[must_use]
|
||||||
pub fn country(mut self, country: Country) -> Self {
|
pub fn country(mut self, country: Country) -> Self {
|
||||||
self.opts.country = validate_country(country);
|
self.opts.country = validate_country(country);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a report on every operation.
|
/// Generate a report on every operation.
|
||||||
|
///
|
||||||
/// This should only be used for debugging.
|
/// This should only be used for debugging.
|
||||||
|
#[must_use]
|
||||||
pub fn report(mut self) -> Self {
|
pub fn report(mut self) -> Self {
|
||||||
self.opts.report = true;
|
self.opts.report = true;
|
||||||
self
|
self
|
||||||
|
@ -871,21 +990,38 @@ impl RustyPipeQuery {
|
||||||
|
|
||||||
/// Enable strict mode, causing operations to fail if there
|
/// Enable strict mode, causing operations to fail if there
|
||||||
/// are warnings during deserialization (e.g. invalid items).
|
/// are warnings during deserialization (e.g. invalid items).
|
||||||
|
///
|
||||||
/// This should only be used for testing.
|
/// This should only be used for testing.
|
||||||
|
#[must_use]
|
||||||
pub fn strict(mut self) -> Self {
|
pub fn strict(mut self) -> Self {
|
||||||
self.opts.strict = true;
|
self.opts.strict = true;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the YouTube visitor data cookie
|
/// Set the YouTube visitor data cookie
|
||||||
|
///
|
||||||
|
/// YouTube assigns a session cookie to each user which is used for personalized
|
||||||
|
/// recommendations. By default, RustyPipe does not send this cookie to preserve
|
||||||
|
/// user privacy. For requests that mandatate the cookie, a new one is requested
|
||||||
|
/// for every query.
|
||||||
|
///
|
||||||
|
/// This option allows you to manually set the visitor data cookie of your query,
|
||||||
|
/// allowing you to get personalized recommendations or reproduce A/B tests.
|
||||||
|
///
|
||||||
|
/// Note that YouTube has a rate limit on the number of requests from a single
|
||||||
|
/// visitor, so you should not use the same vistor data cookie for batch operations.
|
||||||
|
#[must_use]
|
||||||
pub fn visitor_data<S: Into<String>>(mut self, visitor_data: S) -> Self {
|
pub fn visitor_data<S: Into<String>>(mut self, visitor_data: S) -> Self {
|
||||||
self.opts.visitor_data = Some(visitor_data.into());
|
self.opts.visitor_data = Some(visitor_data.into());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the YouTube visitor data cookie to an optional value
|
/// Set the YouTube visitor data cookie to an optional value
|
||||||
pub fn visitor_data_opt(mut self, visitor_data: Option<String>) -> Self {
|
///
|
||||||
self.opts.visitor_data = visitor_data;
|
/// see also [`RustyPipeQuery::visitor_data`]
|
||||||
|
#[must_use]
|
||||||
|
pub fn visitor_data_opt<S: Into<String>>(mut self, visitor_data: Option<S>) -> Self {
|
||||||
|
self.opts.visitor_data = visitor_data.map(S::into);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -901,13 +1037,10 @@ impl RustyPipeQuery {
|
||||||
localized: bool,
|
localized: bool,
|
||||||
visitor_data: Option<&'a str>,
|
visitor_data: Option<&'a str>,
|
||||||
) -> YTContext {
|
) -> YTContext {
|
||||||
let hl = match localized {
|
let (hl, gl) = if localized {
|
||||||
true => self.opts.lang,
|
(self.opts.lang, self.opts.country)
|
||||||
false => Language::En,
|
} else {
|
||||||
};
|
(Language::En, Country::Us)
|
||||||
let gl = match localized {
|
|
||||||
true => self.opts.country,
|
|
||||||
false => Country::Us,
|
|
||||||
};
|
};
|
||||||
let visitor_data = self.opts.visitor_data.as_deref().or(visitor_data);
|
let visitor_data = self.opts.visitor_data.as_deref().or(visitor_data);
|
||||||
|
|
||||||
|
@ -1009,7 +1142,7 @@ impl RustyPipeQuery {
|
||||||
))
|
))
|
||||||
.header(header::ORIGIN, YOUTUBE_HOME_URL)
|
.header(header::ORIGIN, YOUTUBE_HOME_URL)
|
||||||
.header(header::REFERER, YOUTUBE_HOME_URL)
|
.header(header::REFERER, YOUTUBE_HOME_URL)
|
||||||
.header(header::COOKIE, self.client.inner.consent_cookie.to_owned())
|
.header(header::COOKIE, self.client.inner.consent_cookie.clone())
|
||||||
.header("X-YouTube-Client-Name", "1")
|
.header("X-YouTube-Client-Name", "1")
|
||||||
.header(
|
.header(
|
||||||
"X-YouTube-Client-Version",
|
"X-YouTube-Client-Version",
|
||||||
|
@ -1024,7 +1157,7 @@ impl RustyPipeQuery {
|
||||||
))
|
))
|
||||||
.header(header::ORIGIN, YOUTUBE_MUSIC_HOME_URL)
|
.header(header::ORIGIN, YOUTUBE_MUSIC_HOME_URL)
|
||||||
.header(header::REFERER, YOUTUBE_MUSIC_HOME_URL)
|
.header(header::REFERER, YOUTUBE_MUSIC_HOME_URL)
|
||||||
.header(header::COOKIE, self.client.inner.consent_cookie.to_owned())
|
.header(header::COOKIE, self.client.inner.consent_cookie.clone())
|
||||||
.header("X-YouTube-Client-Name", "67")
|
.header("X-YouTube-Client-Name", "67")
|
||||||
.header(
|
.header(
|
||||||
"X-YouTube-Client-Version",
|
"X-YouTube-Client-Version",
|
||||||
|
@ -1077,7 +1210,7 @@ impl RustyPipeQuery {
|
||||||
/// Get a YouTube visitor data cookie, which is necessary for certain requests
|
/// Get a YouTube visitor data cookie, which is necessary for certain requests
|
||||||
async fn get_visitor_data(&self) -> Result<String, Error> {
|
async fn get_visitor_data(&self) -> Result<String, Error> {
|
||||||
match &self.opts.visitor_data {
|
match &self.opts.visitor_data {
|
||||||
Some(vd) => Ok(vd.to_owned()),
|
Some(vd) => Ok(vd.clone()),
|
||||||
None => self.client.get_visitor_data().await,
|
None => self.client.get_visitor_data().await,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1223,21 +1356,19 @@ impl RustyPipeQuery {
|
||||||
if level > Level::DBG || self.opts.report {
|
if level > Level::DBG || self.opts.report {
|
||||||
if let Some(reporter) = &self.client.inner.reporter {
|
if let Some(reporter) = &self.client.inner.reporter {
|
||||||
let report = Report {
|
let report = Report {
|
||||||
info: Default::default(),
|
info: RustyPipeInfo::default(),
|
||||||
level,
|
level,
|
||||||
operation: format!("{operation}({id})"),
|
operation: &format!("{operation}({id})"),
|
||||||
error,
|
error,
|
||||||
msgs,
|
msgs,
|
||||||
deobf_data: deobf.cloned(),
|
deobf_data: deobf.cloned(),
|
||||||
http_request: crate::report::HTTPRequest {
|
http_request: crate::report::HTTPRequest {
|
||||||
url: request.url().to_string(),
|
url: request.url().as_str(),
|
||||||
method: "POST".to_string(),
|
method: request.method().as_str(),
|
||||||
req_header: request
|
req_header: request
|
||||||
.headers()
|
.headers()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(k, v)| {
|
.map(|(k, v)| (k.as_str(), v.to_str().unwrap_or_default().to_owned()))
|
||||||
(k.to_string(), v.to_str().unwrap_or_default().to_owned())
|
|
||||||
})
|
|
||||||
.collect(),
|
.collect(),
|
||||||
req_body: serde_json::to_string(body).unwrap_or_default(),
|
req_body: serde_json::to_string(body).unwrap_or_default(),
|
||||||
status: req_res.status.into(),
|
status: req_res.status.into(),
|
||||||
|
|
|
@ -26,9 +26,10 @@ impl RustyPipeQuery {
|
||||||
all_albums: bool,
|
all_albums: bool,
|
||||||
) -> Result<MusicArtist, Error> {
|
) -> Result<MusicArtist, Error> {
|
||||||
let artist_id = artist_id.as_ref();
|
let artist_id = artist_id.as_ref();
|
||||||
let visitor_data = match all_albums {
|
let visitor_data = if all_albums {
|
||||||
true => Some(self.get_visitor_data().await?),
|
Some(self.get_visitor_data().await?)
|
||||||
false => None,
|
} else {
|
||||||
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let res = self._music_artist(artist_id, visitor_data.as_deref()).await;
|
let res = self._music_artist(artist_id, visitor_data.as_deref()).await;
|
||||||
|
@ -196,7 +197,7 @@ fn map_artist_page(
|
||||||
lang,
|
lang,
|
||||||
ArtistId {
|
ArtistId {
|
||||||
id: Some(id.to_owned()),
|
id: Some(id.to_owned()),
|
||||||
name: header.title.to_owned(),
|
name: header.title.clone(),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ impl RustyPipeQuery {
|
||||||
// In rare cases, albums may have track numbers =0 (example: MPREb_RM0QfZ0eSKL)
|
// In rare cases, albums may have track numbers =0 (example: MPREb_RM0QfZ0eSKL)
|
||||||
// They should be replaced with the track number derived from the previous track.
|
// They should be replaced with the track number derived from the previous track.
|
||||||
let mut n_prev = 0;
|
let mut n_prev = 0;
|
||||||
for track in album.tracks.iter_mut() {
|
for track in &mut album.tracks {
|
||||||
let tn = track.track_nr.unwrap_or_default();
|
let tn = track.track_nr.unwrap_or_default();
|
||||||
if tn == 0 {
|
if tn == 0 {
|
||||||
n_prev += 1;
|
n_prev += 1;
|
||||||
|
@ -80,7 +80,7 @@ impl RustyPipeQuery {
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.filter_map(|(i, track)| {
|
.filter_map(|(i, track)| {
|
||||||
if track.is_video {
|
if track.is_video {
|
||||||
Some((i, track.name.to_owned()))
|
Some((i, track.name.clone()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -97,7 +97,7 @@ impl RustyPipeQuery {
|
||||||
for (i, title) in to_replace {
|
for (i, title) in to_replace {
|
||||||
let found_track = playlist.tracks.items.iter().find_map(|track| {
|
let found_track = playlist.tracks.items.iter().find_map(|track| {
|
||||||
if track.name == title && !track.is_video {
|
if track.name == title && !track.is_video {
|
||||||
Some((track.id.to_owned(), track.duration))
|
Some((track.id.clone(), track.duration))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -173,7 +173,7 @@ impl MapResponse<MusicPlaylist> for response::MusicPlaylist {
|
||||||
.split(|p| p == DOT_SEPARATOR)
|
.split(|p| p == DOT_SEPARATOR)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
parts
|
parts
|
||||||
.get(if parts.len() > 2 { 1 } else { 0 })
|
.get(usize::from(parts.len() > 2))
|
||||||
.and_then(|txt| util::parse_numeric::<u64>(&txt[0]).ok())
|
.and_then(|txt| util::parse_numeric::<u64>(&txt[0]).ok())
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -293,7 +293,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
||||||
match section {
|
match section {
|
||||||
response::music_item::ItemSection::MusicShelfRenderer(sh) => shelf = Some(sh),
|
response::music_item::ItemSection::MusicShelfRenderer(sh) => shelf = Some(sh),
|
||||||
response::music_item::ItemSection::MusicCarouselShelfRenderer(sh) => {
|
response::music_item::ItemSection::MusicCarouselShelfRenderer(sh) => {
|
||||||
album_variants = Some(sh.contents)
|
album_variants = Some(sh.contents);
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
@ -355,7 +355,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
let artist_id = artist_id.or_else(|| artists.first().and_then(|a| a.id.to_owned()));
|
let artist_id = artist_id.or_else(|| artists.first().and_then(|a| a.id.clone()));
|
||||||
|
|
||||||
let mut mapper = MusicListMapper::with_album(
|
let mut mapper = MusicListMapper::with_album(
|
||||||
lang,
|
lang,
|
||||||
|
@ -363,7 +363,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
||||||
by_va,
|
by_va,
|
||||||
AlbumId {
|
AlbumId {
|
||||||
id: id.to_owned(),
|
id: id.to_owned(),
|
||||||
name: header.title.to_owned(),
|
name: header.title.clone(),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
mapper.map_response(shelf.contents);
|
mapper.map_response(shelf.contents);
|
||||||
|
|
|
@ -170,9 +170,10 @@ impl RustyPipeQuery {
|
||||||
) -> Result<MusicSearchFiltered<MusicPlaylistItem>, Error> {
|
) -> Result<MusicSearchFiltered<MusicPlaylistItem>, Error> {
|
||||||
self._music_search_playlists(
|
self._music_search_playlists(
|
||||||
query,
|
query,
|
||||||
match community {
|
if community {
|
||||||
true => Params::CommunityPlaylists,
|
Params::CommunityPlaylists
|
||||||
false => Params::YtmPlaylists,
|
} else {
|
||||||
|
Params::YtmPlaylists
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
|
@ -266,7 +267,7 @@ impl MapResponse<MusicSearchResult> for response::MusicSearch {
|
||||||
}
|
}
|
||||||
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
||||||
if let Some(corrected) = contents.into_iter().next() {
|
if let Some(corrected) = contents.into_iter().next() {
|
||||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
response::music_search::ItemSection::None => {}
|
response::music_search::ItemSection::None => {}
|
||||||
|
@ -324,7 +325,7 @@ impl<T: FromYtItem> MapResponse<MusicSearchFiltered<T>> for response::MusicSearc
|
||||||
}
|
}
|
||||||
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
||||||
if let Some(corrected) = contents.into_iter().next() {
|
if let Some(corrected) = contents.into_iter().next() {
|
||||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
response::music_search::ItemSection::None => {}
|
response::music_search::ItemSection::None => {}
|
||||||
|
|
|
@ -177,12 +177,12 @@ impl MapResponse<VideoPlayer> for response::Player {
|
||||||
}
|
}
|
||||||
response::player::PlayabilityStatus::LoginRequired { reason, messages } => {
|
response::player::PlayabilityStatus::LoginRequired { reason, messages } => {
|
||||||
let mut msg = reason;
|
let mut msg = reason;
|
||||||
messages.iter().for_each(|m| {
|
for m in &messages {
|
||||||
if !msg.is_empty() {
|
if !msg.is_empty() {
|
||||||
msg.push(' ');
|
msg.push(' ');
|
||||||
}
|
}
|
||||||
msg.push_str(m);
|
msg.push_str(m);
|
||||||
});
|
}
|
||||||
|
|
||||||
// reason (age restriction): "Sign in to confirm your age"
|
// reason (age restriction): "Sign in to confirm your age"
|
||||||
// or: "This video may be inappropriate for some users."
|
// or: "This video may be inappropriate for some users."
|
||||||
|
@ -341,8 +341,8 @@ impl MapResponse<VideoPlayer> for response::Player {
|
||||||
+ "&sigh="
|
+ "&sigh="
|
||||||
+ sigh;
|
+ sigh;
|
||||||
|
|
||||||
let sprite_count = ((total_count as f64)
|
let sprite_count = (f64::from(total_count)
|
||||||
/ (frames_per_page_x * frames_per_page_y) as f64)
|
/ f64::from(frames_per_page_x * frames_per_page_y))
|
||||||
.ceil() as u32;
|
.ceil() as u32;
|
||||||
|
|
||||||
Some(Frameset {
|
Some(Frameset {
|
||||||
|
@ -413,11 +413,11 @@ fn deobf_nsig(
|
||||||
let nsig: String;
|
let nsig: String;
|
||||||
if let Some(n) = url_params.get("n") {
|
if let Some(n) = url_params.get("n") {
|
||||||
nsig = if n == &last_nsig[0] {
|
nsig = if n == &last_nsig[0] {
|
||||||
last_nsig[1].to_owned()
|
last_nsig[1].clone()
|
||||||
} else {
|
} else {
|
||||||
let nsig = deobf.deobfuscate_nsig(n)?;
|
let nsig = deobf.deobfuscate_nsig(n)?;
|
||||||
last_nsig[0] = n.to_string();
|
last_nsig[0] = n.to_string();
|
||||||
last_nsig[1] = nsig.to_owned();
|
last_nsig[1] = nsig.clone();
|
||||||
nsig
|
nsig
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -490,9 +490,7 @@ fn map_video_stream(
|
||||||
deobf: &Deobfuscator,
|
deobf: &Deobfuscator,
|
||||||
last_nsig: &mut [String; 2],
|
last_nsig: &mut [String; 2],
|
||||||
) -> MapResult<Option<VideoStream>> {
|
) -> MapResult<Option<VideoStream>> {
|
||||||
let (mtype, codecs) = match parse_mime(&f.mime_type) {
|
let Some((mtype, codecs)) = parse_mime(&f.mime_type) else {
|
||||||
Some(x) => x,
|
|
||||||
None => {
|
|
||||||
return MapResult {
|
return MapResult {
|
||||||
c: None,
|
c: None,
|
||||||
warnings: vec![format!(
|
warnings: vec![format!(
|
||||||
|
@ -500,16 +498,12 @@ fn map_video_stream(
|
||||||
&f.mime_type, &f
|
&f.mime_type, &f
|
||||||
)],
|
)],
|
||||||
}
|
}
|
||||||
}
|
|
||||||
};
|
};
|
||||||
let format = match get_video_format(mtype) {
|
let Some(format) = get_video_format(mtype) else {
|
||||||
Some(f) => f,
|
|
||||||
None => {
|
|
||||||
return MapResult {
|
return MapResult {
|
||||||
c: None,
|
c: None,
|
||||||
warnings: vec![format!("invalid video format. itag: {}", f.itag)],
|
warnings: vec![format!("invalid video format. itag: {}", f.itag)],
|
||||||
}
|
}
|
||||||
}
|
|
||||||
};
|
};
|
||||||
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
|
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
|
||||||
|
|
||||||
|
@ -532,9 +526,9 @@ fn map_video_stream(
|
||||||
quality: f.quality_label.unwrap(),
|
quality: f.quality_label.unwrap(),
|
||||||
hdr: f.color_info.unwrap_or_default().primaries
|
hdr: f.color_info.unwrap_or_default().primaries
|
||||||
== player::Primaries::ColorPrimariesBt2020,
|
== player::Primaries::ColorPrimariesBt2020,
|
||||||
mime: f.mime_type.to_owned(),
|
|
||||||
format,
|
format,
|
||||||
codec: get_video_codec(codecs),
|
codec: get_video_codec(codecs),
|
||||||
|
mime: f.mime_type,
|
||||||
throttled: url.throttled,
|
throttled: url.throttled,
|
||||||
}),
|
}),
|
||||||
warnings: map_res.warnings,
|
warnings: map_res.warnings,
|
||||||
|
@ -551,9 +545,7 @@ fn map_audio_stream(
|
||||||
deobf: &Deobfuscator,
|
deobf: &Deobfuscator,
|
||||||
last_nsig: &mut [String; 2],
|
last_nsig: &mut [String; 2],
|
||||||
) -> MapResult<Option<AudioStream>> {
|
) -> MapResult<Option<AudioStream>> {
|
||||||
let (mtype, codecs) = match parse_mime(&f.mime_type) {
|
let Some((mtype, codecs)) = parse_mime(&f.mime_type) else {
|
||||||
Some(x) => x,
|
|
||||||
None => {
|
|
||||||
return MapResult {
|
return MapResult {
|
||||||
c: None,
|
c: None,
|
||||||
warnings: vec![format!(
|
warnings: vec![format!(
|
||||||
|
@ -561,16 +553,12 @@ fn map_audio_stream(
|
||||||
&f.mime_type, &f
|
&f.mime_type, &f
|
||||||
)],
|
)],
|
||||||
}
|
}
|
||||||
}
|
|
||||||
};
|
};
|
||||||
let format = match get_audio_format(mtype) {
|
let Some(format) = get_audio_format(mtype) else {
|
||||||
Some(f) => f,
|
|
||||||
None => {
|
|
||||||
return MapResult {
|
return MapResult {
|
||||||
c: None,
|
c: None,
|
||||||
warnings: vec![format!("invalid audio format. itag: {}", f.itag)],
|
warnings: vec![format!("invalid audio format. itag: {}", f.itag)],
|
||||||
}
|
}
|
||||||
}
|
|
||||||
};
|
};
|
||||||
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
|
let map_res = map_url(&f.url, &f.signature_cipher, deobf, last_nsig);
|
||||||
let mut warnings = map_res.warnings;
|
let mut warnings = map_res.warnings;
|
||||||
|
@ -586,9 +574,9 @@ fn map_audio_stream(
|
||||||
index_range: f.index_range,
|
index_range: f.index_range,
|
||||||
init_range: f.init_range,
|
init_range: f.init_range,
|
||||||
duration_ms: f.approx_duration_ms,
|
duration_ms: f.approx_duration_ms,
|
||||||
mime: f.mime_type.to_owned(),
|
|
||||||
format,
|
format,
|
||||||
codec: get_audio_codec(codecs),
|
codec: get_audio_codec(codecs),
|
||||||
|
mime: f.mime_type,
|
||||||
channels: f.audio_channels,
|
channels: f.audio_channels,
|
||||||
loudness_db: f.loudness_db,
|
loudness_db: f.loudness_db,
|
||||||
throttled: url.throttled,
|
throttled: url.throttled,
|
||||||
|
@ -686,7 +674,7 @@ fn map_audio_track(
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => {}
|
_ => {}
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioTrack {
|
AudioTrack {
|
||||||
|
|
|
@ -60,9 +60,8 @@ impl MapResponse<Playlist> for response::Playlist {
|
||||||
lang: crate::param::Language,
|
lang: crate::param::Language,
|
||||||
_deobf: Option<&crate::deobfuscate::DeobfData>,
|
_deobf: Option<&crate::deobfuscate::DeobfData>,
|
||||||
) -> Result<MapResult<Playlist>, ExtractionError> {
|
) -> Result<MapResult<Playlist>, ExtractionError> {
|
||||||
let (contents, header) = match (self.contents, self.header) {
|
let (Some(contents), Some(header)) = (self.contents, self.header) else {
|
||||||
(Some(contents), Some(header)) => (contents, header),
|
return Err(response::alerts_to_err(id, self.alerts));
|
||||||
_ => return Err(response::alerts_to_err(id, self.alerts)),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let video_items = contents
|
let video_items = contents
|
||||||
|
|
|
@ -87,11 +87,9 @@ impl From<ChannelRss> for crate::model::ChannelRss {
|
||||||
feed.entry
|
feed.entry
|
||||||
.iter()
|
.iter()
|
||||||
.find_map(|entry| {
|
.find_map(|entry| {
|
||||||
if !entry.channel_id.is_empty() {
|
Some(entry.channel_id.as_str())
|
||||||
Some(entry.channel_id.to_owned())
|
.filter(|id| id.is_empty())
|
||||||
} else {
|
.map(str::to_owned)
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
feed.author
|
feed.author
|
||||||
|
|
|
@ -349,7 +349,7 @@ impl From<Icon> for crate::model::Verification {
|
||||||
match icon.icon_type {
|
match icon.icon_type {
|
||||||
IconType::Check => Self::Verified,
|
IconType::Check => Self::Verified,
|
||||||
IconType::OfficialArtistBadge => Self::Artist,
|
IconType::OfficialArtistBadge => Self::Artist,
|
||||||
_ => Self::None,
|
IconType::Like => Self::None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -500,7 +500,7 @@ impl MusicListMapper {
|
||||||
|
|
||||||
let pt_id = item
|
let pt_id = item
|
||||||
.navigation_endpoint
|
.navigation_endpoint
|
||||||
.and_then(|ne| ne.music_page())
|
.and_then(NavigationEndpoint::music_page)
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
c1.and_then(|c1| {
|
c1.and_then(|c1| {
|
||||||
c1.renderer.text.0.into_iter().next().and_then(|t| match t {
|
c1.renderer.text.0.into_iter().next().and_then(|t| match t {
|
||||||
|
@ -796,7 +796,7 @@ impl MusicListMapper {
|
||||||
name: item.title,
|
name: item.title,
|
||||||
duration: None,
|
duration: None,
|
||||||
cover: item.thumbnail_renderer.into(),
|
cover: item.thumbnail_renderer.into(),
|
||||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||||
artists,
|
artists,
|
||||||
album: None,
|
album: None,
|
||||||
view_count: subtitle_p2.and_then(|c| {
|
view_count: subtitle_p2.and_then(|c| {
|
||||||
|
@ -872,7 +872,7 @@ impl MusicListMapper {
|
||||||
id,
|
id,
|
||||||
name: item.title,
|
name: item.title,
|
||||||
cover: item.thumbnail_renderer.into(),
|
cover: item.thumbnail_renderer.into(),
|
||||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||||
artists,
|
artists,
|
||||||
album_type,
|
album_type,
|
||||||
year,
|
year,
|
||||||
|
@ -886,8 +886,7 @@ impl MusicListMapper {
|
||||||
let from_ytm = subtitle_p2
|
let from_ytm = subtitle_p2
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|p| p.0.first())
|
.and_then(|p| p.0.first())
|
||||||
.map(util::is_ytm)
|
.map_or(true, util::is_ytm);
|
||||||
.unwrap_or(true);
|
|
||||||
let channel = subtitle_p2.and_then(|p| {
|
let channel = subtitle_p2.and_then(|p| {
|
||||||
p.0.into_iter().find_map(|c| ChannelId::try_from(c).ok())
|
p.0.into_iter().find_map(|c| ChannelId::try_from(c).ok())
|
||||||
});
|
});
|
||||||
|
@ -973,7 +972,7 @@ impl MusicListMapper {
|
||||||
id,
|
id,
|
||||||
name: card.title,
|
name: card.title,
|
||||||
cover: card.thumbnail.into(),
|
cover: card.thumbnail.into(),
|
||||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||||
artists,
|
artists,
|
||||||
album_type,
|
album_type,
|
||||||
year: subtitle_p3.and_then(|y| util::parse_numeric(y.first_str()).ok()),
|
year: subtitle_p3.and_then(|y| util::parse_numeric(y.first_str()).ok()),
|
||||||
|
@ -1010,7 +1009,7 @@ impl MusicListMapper {
|
||||||
name: card.title,
|
name: card.title,
|
||||||
duration,
|
duration,
|
||||||
cover: card.thumbnail.into(),
|
cover: card.thumbnail.into(),
|
||||||
artist_id: artists.first().and_then(|a| a.id.to_owned()),
|
artist_id: artists.first().and_then(|a| a.id.clone()),
|
||||||
artists,
|
artists,
|
||||||
album,
|
album,
|
||||||
view_count,
|
view_count,
|
||||||
|
@ -1024,8 +1023,7 @@ impl MusicListMapper {
|
||||||
let from_ytm = subtitle_p2
|
let from_ytm = subtitle_p2
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|p| p.0.first())
|
.and_then(|p| p.0.first())
|
||||||
.map(util::is_ytm)
|
.map_or(true, util::is_ytm);
|
||||||
.unwrap_or(true);
|
|
||||||
let channel = subtitle_p2
|
let channel = subtitle_p2
|
||||||
.and_then(|p| p.0.into_iter().find_map(|c| ChannelId::try_from(c).ok()));
|
.and_then(|p| p.0.into_iter().find_map(|c| ChannelId::try_from(c).ok()));
|
||||||
let track_count =
|
let track_count =
|
||||||
|
@ -1121,10 +1119,17 @@ impl MusicListMapper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sometimes the YT Music API returns responses containing unknown items.
|
||||||
|
///
|
||||||
|
/// In this case, the response data is likely missing some fields, which leads to
|
||||||
|
/// parsing errors and wrong data being extracted.
|
||||||
|
///
|
||||||
|
/// Therefore it is safest to discard such responses and retry the request.
|
||||||
pub fn check_unknown(&self) -> Result<(), ExtractionError> {
|
pub fn check_unknown(&self) -> Result<(), ExtractionError> {
|
||||||
match self.has_unknown {
|
if self.has_unknown {
|
||||||
true => Err(ExtractionError::InvalidData("unknown YTM items".into())),
|
Err(ExtractionError::InvalidData("unknown YTM items".into()))
|
||||||
false => Ok(()),
|
} else {
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1161,7 +1166,7 @@ fn map_artist_id_fallback(
|
||||||
fallback_artist: Option<&ArtistId>,
|
fallback_artist: Option<&ArtistId>,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
menu.and_then(|m| map_artist_id(m.menu_renderer.contents))
|
menu.and_then(|m| map_artist_id(m.menu_renderer.contents))
|
||||||
.or_else(|| fallback_artist.and_then(|a| a.id.to_owned()))
|
.or_else(|| fallback_artist.and_then(|a| a.id.clone()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn map_artist_id(entries: Vec<MusicItemMenuEntry>) -> Option<String> {
|
pub(crate) fn map_artist_id(entries: Vec<MusicItemMenuEntry>) -> Option<String> {
|
||||||
|
|
|
@ -69,6 +69,7 @@ impl<'de> Deserialize<'de> for BrowseEndpoint {
|
||||||
let bep = BEp::deserialize(deserializer)?;
|
let bep = BEp::deserialize(deserializer)?;
|
||||||
|
|
||||||
// Remove the VL prefix from the playlist id
|
// Remove the VL prefix from the playlist id
|
||||||
|
#[allow(clippy::map_unwrap_or)]
|
||||||
let browse_id = bep
|
let browse_id = bep
|
||||||
.browse_endpoint_context_supported_configs
|
.browse_endpoint_context_supported_configs
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -167,9 +168,8 @@ pub(crate) enum PageType {
|
||||||
impl PageType {
|
impl PageType {
|
||||||
pub(crate) fn to_url_target(self, id: String) -> Option<UrlTarget> {
|
pub(crate) fn to_url_target(self, id: String) -> Option<UrlTarget> {
|
||||||
match self {
|
match self {
|
||||||
PageType::Artist => Some(UrlTarget::Channel { id }),
|
PageType::Artist | PageType::Channel => Some(UrlTarget::Channel { id }),
|
||||||
PageType::Album => Some(UrlTarget::Album { id }),
|
PageType::Album => Some(UrlTarget::Album { id }),
|
||||||
PageType::Channel => Some(UrlTarget::Channel { id }),
|
|
||||||
PageType::Playlist => Some(UrlTarget::Playlist { id }),
|
PageType::Playlist => Some(UrlTarget::Playlist { id }),
|
||||||
PageType::Unknown => None,
|
PageType::Unknown => None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -419,8 +419,8 @@ impl<T> YouTubeListMapper<T> {
|
||||||
Self {
|
Self {
|
||||||
lang,
|
lang,
|
||||||
channel: Some(ChannelTag {
|
channel: Some(ChannelTag {
|
||||||
id: channel.id.to_owned(),
|
id: channel.id.clone(),
|
||||||
name: channel.name.to_owned(),
|
name: channel.name.clone(),
|
||||||
avatar: Vec::new(),
|
avatar: Vec::new(),
|
||||||
verification: channel.verification,
|
verification: channel.verification,
|
||||||
subscriber_count: channel.subscriber_count,
|
subscriber_count: channel.subscriber_count,
|
||||||
|
@ -572,14 +572,15 @@ impl<T> YouTubeListMapper<T> {
|
||||||
|
|
||||||
fn map_channel(&mut self, channel: ChannelRenderer) -> ChannelItem {
|
fn map_channel(&mut self, channel: ChannelRenderer) -> ChannelItem {
|
||||||
// channel handle instead of subscriber count (A/B test 3)
|
// channel handle instead of subscriber count (A/B test 3)
|
||||||
let (sc_txt, vc_text) = match channel
|
let (sc_txt, vc_text) = if channel
|
||||||
.subscriber_count_text
|
.subscriber_count_text
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|txt| txt.starts_with('@'))
|
.map(|txt| txt.starts_with('@'))
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
{
|
{
|
||||||
true => (channel.video_count_text, None),
|
(channel.video_count_text, None)
|
||||||
false => (channel.subscriber_count_text, channel.video_count_text),
|
} else {
|
||||||
|
(channel.subscriber_count_text, channel.video_count_text)
|
||||||
};
|
};
|
||||||
|
|
||||||
ChannelItem {
|
ChannelItem {
|
||||||
|
@ -643,7 +644,7 @@ impl YouTubeListMapper<YouTubeItem> {
|
||||||
.map(|url| (l.title, util::sanitize_yt_url(&url.url)))
|
.map(|url| (l.title, util::sanitize_yt_url(&url.url)))
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
YouTubeListItem::RichItemRenderer { content } => {
|
YouTubeListItem::RichItemRenderer { content } => {
|
||||||
self.map_item(*content);
|
self.map_item(*content);
|
||||||
|
@ -701,7 +702,7 @@ impl YouTubeListMapper<PlaylistItem> {
|
||||||
match item {
|
match item {
|
||||||
YouTubeListItem::PlaylistRenderer(playlist) => {
|
YouTubeListItem::PlaylistRenderer(playlist) => {
|
||||||
let mapped = self.map_playlist(playlist);
|
let mapped = self.map_playlist(playlist);
|
||||||
self.items.push(mapped)
|
self.items.push(mapped);
|
||||||
}
|
}
|
||||||
YouTubeListItem::ContinuationItemRenderer {
|
YouTubeListItem::ContinuationItemRenderer {
|
||||||
continuation_endpoint,
|
continuation_endpoint,
|
||||||
|
|
|
@ -26,7 +26,7 @@ impl RustyPipeQuery {
|
||||||
/// from alternative YouTube frontends like Piped or Invidious.
|
/// from alternative YouTube frontends like Piped or Invidious.
|
||||||
///
|
///
|
||||||
/// The `resolve_albums` flag enables resolving YTM album URLs (e.g.
|
/// The `resolve_albums` flag enables resolving YTM album URLs (e.g.
|
||||||
/// `OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE`) to their short album id (`MPREb_GyH43gCvdM5`).
|
/// `OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE`) to their short album ids (`MPREb_GyH43gCvdM5`).
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -168,12 +168,13 @@ impl RustyPipeQuery {
|
||||||
e,
|
e,
|
||||||
Error::Extraction(ExtractionError::NotFound { .. })
|
Error::Extraction(ExtractionError::NotFound { .. })
|
||||||
) {
|
) {
|
||||||
match util::VIDEO_ID_REGEX.is_match(id) {
|
if util::VIDEO_ID_REGEX.is_match(id) {
|
||||||
true => Ok(UrlTarget::Video {
|
Ok(UrlTarget::Video {
|
||||||
id: id.to_owned(),
|
id: id.to_owned(),
|
||||||
start_time: get_start_time(),
|
start_time: get_start_time(),
|
||||||
}),
|
})
|
||||||
false => Err(e),
|
} else {
|
||||||
|
Err(e)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(e)
|
Err(e)
|
||||||
|
@ -217,7 +218,7 @@ impl RustyPipeQuery {
|
||||||
/// rp.query().resolve_string("LinusTechTips", true).await.unwrap(),
|
/// rp.query().resolve_string("LinusTechTips", true).await.unwrap(),
|
||||||
/// UrlTarget::Channel {id: "UCXuqSBlHAE6Xw-yeJA0Tunw".to_owned()}
|
/// UrlTarget::Channel {id: "UCXuqSBlHAE6Xw-yeJA0Tunw".to_owned()}
|
||||||
/// );
|
/// );
|
||||||
/// //
|
/// // Playlist
|
||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
/// rp.query().resolve_string("PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI", true).await.unwrap(),
|
/// rp.query().resolve_string("PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI", true).await.unwrap(),
|
||||||
/// UrlTarget::Playlist {id: "PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI".to_owned()}
|
/// UrlTarget::Playlist {id: "PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI".to_owned()}
|
||||||
|
|
|
@ -393,7 +393,7 @@ impl MapResponse<Paginator<Comment>> for response::VideoComments {
|
||||||
lang,
|
lang,
|
||||||
);
|
);
|
||||||
comments.push(res.c);
|
comments.push(res.c);
|
||||||
warnings.append(&mut res.warnings)
|
warnings.append(&mut res.warnings);
|
||||||
}
|
}
|
||||||
response::video_details::CommentListItem::CommentRenderer(comment) => {
|
response::video_details::CommentListItem::CommentRenderer(comment) => {
|
||||||
let mut res = map_comment(
|
let mut res = map_comment(
|
||||||
|
@ -403,7 +403,7 @@ impl MapResponse<Paginator<Comment>> for response::VideoComments {
|
||||||
lang,
|
lang,
|
||||||
);
|
);
|
||||||
comments.push(res.c);
|
comments.push(res.c);
|
||||||
warnings.append(&mut res.warnings)
|
warnings.append(&mut res.warnings);
|
||||||
}
|
}
|
||||||
response::video_details::CommentListItem::ContinuationItemRenderer {
|
response::video_details::CommentListItem::ContinuationItemRenderer {
|
||||||
continuation_endpoint,
|
continuation_endpoint,
|
||||||
|
@ -433,11 +433,11 @@ fn map_recommendations(
|
||||||
let mut mapper = response::YouTubeListMapper::<VideoItem>::new(lang);
|
let mut mapper = response::YouTubeListMapper::<VideoItem>::new(lang);
|
||||||
mapper.map_response(r);
|
mapper.map_response(r);
|
||||||
|
|
||||||
if let Some(continuations) = continuations {
|
mapper.ctoken = mapper.ctoken.or_else(|| {
|
||||||
continuations.into_iter().for_each(|c| {
|
continuations
|
||||||
mapper.ctoken = Some(c.next_continuation_data.continuation);
|
.and_then(|c| c.into_iter().next())
|
||||||
})
|
.map(|c| c.next_continuation_data.continuation)
|
||||||
};
|
});
|
||||||
|
|
||||||
MapResult {
|
MapResult {
|
||||||
c: Paginator::new_ext(
|
c: Paginator::new_ext(
|
||||||
|
|
|
@ -238,7 +238,7 @@ fn extract_js_fn(js: &str, name: &str) -> Result<String, DeobfError> {
|
||||||
|
|
||||||
fn get_nsig_fn(player_js: &str) -> Result<String, DeobfError> {
|
fn get_nsig_fn(player_js: &str) -> Result<String, DeobfError> {
|
||||||
let function_name = get_nsig_fn_name(player_js)?;
|
let function_name = get_nsig_fn_name(player_js)?;
|
||||||
let function_base = function_name.to_owned() + "=function";
|
let function_base = function_name.clone() + "=function";
|
||||||
let offset = player_js.find(&function_base).unwrap_or_default();
|
let offset = player_js.find(&function_base).unwrap_or_default();
|
||||||
|
|
||||||
extract_js_fn(&player_js[offset..], &function_name)
|
extract_js_fn(&player_js[offset..], &function_name)
|
||||||
|
|
12
src/error.rs
12
src/error.rs
|
@ -81,7 +81,8 @@ pub enum ExtractionError {
|
||||||
pub enum UnavailabilityReason {
|
pub enum UnavailabilityReason {
|
||||||
/// Video is age restricted.
|
/// Video is age restricted.
|
||||||
///
|
///
|
||||||
/// Age restriction may be circumvented with the [`crate::client::ClientType::TvHtml5Embed`] client.
|
/// Age restriction may be circumvented with the
|
||||||
|
/// [`ClientType::TvHtml5Embed`](crate::client::ClientType::TvHtml5Embed) client.
|
||||||
AgeRestricted,
|
AgeRestricted,
|
||||||
/// Video was deleted or censored
|
/// Video was deleted or censored
|
||||||
Deleted,
|
Deleted,
|
||||||
|
@ -123,7 +124,7 @@ impl Display for UnavailabilityReason {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) mod internal {
|
pub(crate) mod internal {
|
||||||
use super::*;
|
use super::{Error, ExtractionError};
|
||||||
|
|
||||||
/// Error that occurred during the initialization
|
/// Error that occurred during the initialization
|
||||||
/// or use of the YouTube URL signature deobfuscator.
|
/// or use of the YouTube URL signature deobfuscator.
|
||||||
|
@ -166,7 +167,7 @@ impl From<reqwest::Error> for Error {
|
||||||
fn from(value: reqwest::Error) -> Self {
|
fn from(value: reqwest::Error) -> Self {
|
||||||
if value.is_status() {
|
if value.is_status() {
|
||||||
if let Some(status) = value.status() {
|
if let Some(status) = value.status() {
|
||||||
return Self::HttpStatus(status.as_u16(), Default::default());
|
return Self::HttpStatus(status.as_u16(), Cow::default());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Self::Http(value.to_string().into())
|
Self::Http(value.to_string().into())
|
||||||
|
@ -185,8 +186,9 @@ impl Error {
|
||||||
matches!(
|
matches!(
|
||||||
self,
|
self,
|
||||||
Self::HttpStatus(_, _)
|
Self::HttpStatus(_, _)
|
||||||
| Self::Extraction(ExtractionError::InvalidData(_))
|
| Self::Extraction(
|
||||||
| Self::Extraction(ExtractionError::WrongResult(_))
|
ExtractionError::InvalidData(_) | ExtractionError::WrongResult(_)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
21
src/lib.rs
21
src/lib.rs
|
@ -1,5 +1,24 @@
|
||||||
#![doc = include_str!("../README.md")]
|
#![doc = include_str!("../README.md")]
|
||||||
#![warn(missing_docs, clippy::todo, clippy::dbg_macro)]
|
#![warn(missing_docs, clippy::todo, clippy::dbg_macro, clippy::pedantic)]
|
||||||
|
#![allow(
|
||||||
|
clippy::doc_markdown,
|
||||||
|
clippy::similar_names,
|
||||||
|
clippy::items_after_statements,
|
||||||
|
clippy::too_many_lines,
|
||||||
|
clippy::module_name_repetitions,
|
||||||
|
clippy::must_use_candidate,
|
||||||
|
clippy::cast_possible_truncation,
|
||||||
|
clippy::cast_sign_loss,
|
||||||
|
clippy::cast_precision_loss,
|
||||||
|
clippy::single_match_else,
|
||||||
|
clippy::missing_errors_doc,
|
||||||
|
clippy::missing_panics_doc
|
||||||
|
)]
|
||||||
|
|
||||||
|
//! ## Go to
|
||||||
|
//!
|
||||||
|
//! - Client ([`rustypipe::client::Rustypipe`](crate::client::RustyPipe))
|
||||||
|
//! - Query ([`rustypipe::client::RustypipeQuery`](crate::client::RustyPipeQuery))
|
||||||
|
|
||||||
mod deobfuscate;
|
mod deobfuscate;
|
||||||
mod serializer;
|
mod serializer;
|
||||||
|
|
|
@ -16,7 +16,7 @@ use serde_with::serde_as;
|
||||||
use time::{Date, OffsetDateTime};
|
use time::{Date, OffsetDateTime};
|
||||||
|
|
||||||
use self::{paginator::Paginator, richtext::RichText};
|
use self::{paginator::Paginator, richtext::RichText};
|
||||||
use crate::{error::Error, param::Country, serializer::DateYmd, util};
|
use crate::{error::Error, param::Country, serializer::DateYmd, validate};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
#COMMON
|
#COMMON
|
||||||
|
@ -110,22 +110,10 @@ impl UrlTarget {
|
||||||
/// Validate the YouTube ID from the URL target
|
/// Validate the YouTube ID from the URL target
|
||||||
pub(crate) fn validate(&self) -> Result<(), Error> {
|
pub(crate) fn validate(&self) -> Result<(), Error> {
|
||||||
match self {
|
match self {
|
||||||
UrlTarget::Video { id, .. } => match util::VIDEO_ID_REGEX.is_match(id) {
|
UrlTarget::Video { id, .. } => validate::video_id(id),
|
||||||
true => Ok(()),
|
UrlTarget::Channel { id } => validate::channel_id(id),
|
||||||
false => Err(Error::Other("invalid video id".into())),
|
UrlTarget::Playlist { id } => validate::playlist_id(id),
|
||||||
},
|
UrlTarget::Album { id } => validate::album_id(id),
|
||||||
UrlTarget::Channel { id } => match util::CHANNEL_ID_REGEX.is_match(id) {
|
|
||||||
true => Ok(()),
|
|
||||||
false => Err(Error::Other("invalid channel id".into())),
|
|
||||||
},
|
|
||||||
UrlTarget::Playlist { id } => match util::PLAYLIST_ID_REGEX.is_match(id) {
|
|
||||||
true => Ok(()),
|
|
||||||
false => Err(Error::Other("invalid playlist id".into())),
|
|
||||||
},
|
|
||||||
UrlTarget::Album { id } => match util::ALBUM_ID_REGEX.is_match(id) {
|
|
||||||
true => Ok(()),
|
|
||||||
false => Err(Error::Other("invalid album id".into())),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -257,15 +245,15 @@ pub struct AudioStream {
|
||||||
pub codec: AudioCodec,
|
pub codec: AudioCodec,
|
||||||
/// Number of audio channels
|
/// Number of audio channels
|
||||||
pub channels: Option<u8>,
|
pub channels: Option<u8>,
|
||||||
/// Audio loudness for ReplayGain correction
|
/// Audio loudness for volume normalization
|
||||||
///
|
///
|
||||||
/// The track volume correction factor (0-1) can be calculated using this formula
|
/// The track volume correction factor (0-1) can be calculated using this formula
|
||||||
///
|
///
|
||||||
/// `10^(-loudness_db/20)`
|
/// `10^(-loudness_db/20)`
|
||||||
///
|
///
|
||||||
/// Note that the value is the inverse of the usual track gain parameter, i.e. a
|
/// Note that the `loudness_db` value is the inverse of the usual ReplayGain track gain
|
||||||
/// value of 6 means the volume should be reduced by 6dB and the ReplayGain track gain
|
/// parameter, i.e. a value of 6 means the volume should be reduced by 6dB and the
|
||||||
/// parameter would be -6.
|
/// track gain parameter would be -6.
|
||||||
///
|
///
|
||||||
/// More information about ReplayGain and how to apply this infomation to audio files
|
/// More information about ReplayGain and how to apply this infomation to audio files
|
||||||
/// can be found here: <https://wiki.hydrogenaud.io/index.php?title=ReplayGain_1.0_specification>.
|
/// can be found here: <https://wiki.hydrogenaud.io/index.php?title=ReplayGain_1.0_specification>.
|
||||||
|
|
|
@ -61,9 +61,9 @@ impl TextComponent {
|
||||||
/// Get the text from the component
|
/// Get the text from the component
|
||||||
pub fn get_text(&self) -> &str {
|
pub fn get_text(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
TextComponent::Text(text) => text,
|
TextComponent::Text(text)
|
||||||
TextComponent::Web { text, .. } => text,
|
| TextComponent::Web { text, .. }
|
||||||
TextComponent::YouTube { text, .. } => text,
|
| TextComponent::YouTube { text, .. } => text,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ impl TextComponent {
|
||||||
pub fn get_url(&self, yt_host: &str) -> String {
|
pub fn get_url(&self, yt_host: &str) -> String {
|
||||||
match self {
|
match self {
|
||||||
TextComponent::Text(_) => String::new(),
|
TextComponent::Text(_) => String::new(),
|
||||||
TextComponent::Web { url, .. } => url.to_owned(),
|
TextComponent::Web { url, .. } => url.clone(),
|
||||||
TextComponent::YouTube { target, .. } => target.to_url_yt_host(yt_host),
|
TextComponent::YouTube { target, .. } => target.to_url_yt_host(yt_host),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -82,7 +82,7 @@ impl TextComponent {
|
||||||
impl ToPlaintext for TextComponent {
|
impl ToPlaintext for TextComponent {
|
||||||
fn to_plaintext_yt_host(&self, yt_host: &str) -> String {
|
fn to_plaintext_yt_host(&self, yt_host: &str) -> String {
|
||||||
match self {
|
match self {
|
||||||
TextComponent::Text(text) => text.to_owned(),
|
TextComponent::Text(text) => text.clone(),
|
||||||
_ => self.get_url(yt_host),
|
_ => self.get_url(yt_host),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,14 @@
|
||||||
//! Query parameters
|
//! # Query parameters
|
||||||
|
//!
|
||||||
|
//! This module contains structs and enums used as input parameters
|
||||||
|
//! for the functions in RustyPipe.
|
||||||
|
|
||||||
|
mod locale;
|
||||||
mod stream_filter;
|
mod stream_filter;
|
||||||
|
|
||||||
pub mod locale;
|
|
||||||
pub mod search_filter;
|
pub mod search_filter;
|
||||||
|
|
||||||
pub use locale::{Country, Language};
|
pub use locale::{Country, Language, COUNTRIES, LANGUAGES};
|
||||||
pub use stream_filter::StreamFilter;
|
pub use stream_filter::StreamFilter;
|
||||||
|
|
||||||
/// Channel video tab
|
/// Channel video tab
|
||||||
|
@ -30,7 +33,7 @@ pub enum ChannelOrder {
|
||||||
|
|
||||||
impl ChannelVideoTab {
|
impl ChannelVideoTab {
|
||||||
/// Get the tab ID used to create ordered continuation tokens
|
/// Get the tab ID used to create ordered continuation tokens
|
||||||
pub(crate) const fn order_ctoken_id(&self) -> u32 {
|
pub(crate) const fn order_ctoken_id(self) -> u32 {
|
||||||
match self {
|
match self {
|
||||||
ChannelVideoTab::Videos => 15,
|
ChannelVideoTab::Videos => 15,
|
||||||
ChannelVideoTab::Shorts => 10,
|
ChannelVideoTab::Shorts => 10,
|
||||||
|
|
|
@ -93,77 +93,90 @@ pub enum Length {
|
||||||
|
|
||||||
impl SearchFilter {
|
impl SearchFilter {
|
||||||
/// Get a new [`SearchFilter`]
|
/// Get a new [`SearchFilter`]
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self::default()
|
Self::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sort the search results
|
/// Sort the search results
|
||||||
|
#[must_use]
|
||||||
pub fn sort(mut self, sort: Order) -> Self {
|
pub fn sort(mut self, sort: Order) -> Self {
|
||||||
self.sort = Some(sort);
|
self.sort = Some(sort);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sort the search results
|
/// Sort the search results
|
||||||
|
#[must_use]
|
||||||
pub fn sort_opt(mut self, sort: Option<Order>) -> Self {
|
pub fn sort_opt(mut self, sort: Option<Order>) -> Self {
|
||||||
self.sort = sort;
|
self.sort = sort;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos with specific features
|
/// Filter videos with specific features
|
||||||
|
#[must_use]
|
||||||
pub fn feature(mut self, feature: Feature) -> Self {
|
pub fn feature(mut self, feature: Feature) -> Self {
|
||||||
self.features.insert(feature);
|
self.features.insert(feature);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos with specific features
|
/// Filter videos with specific features
|
||||||
|
#[must_use]
|
||||||
pub fn features(mut self, features: BTreeSet<Feature>) -> Self {
|
pub fn features(mut self, features: BTreeSet<Feature>) -> Self {
|
||||||
self.features = features;
|
self.features = features;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos by upload date range
|
/// Filter videos by upload date range
|
||||||
|
#[must_use]
|
||||||
pub fn date(mut self, date: UploadDate) -> Self {
|
pub fn date(mut self, date: UploadDate) -> Self {
|
||||||
self.date = Some(date);
|
self.date = Some(date);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos by upload date range
|
/// Filter videos by upload date range
|
||||||
|
#[must_use]
|
||||||
pub fn date_opt(mut self, date: Option<UploadDate>) -> Self {
|
pub fn date_opt(mut self, date: Option<UploadDate>) -> Self {
|
||||||
self.date = date;
|
self.date = date;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos by item type
|
/// Filter videos by item type
|
||||||
|
#[must_use]
|
||||||
pub fn item_type(mut self, item_type: ItemType) -> Self {
|
pub fn item_type(mut self, item_type: ItemType) -> Self {
|
||||||
self.item_type = Some(item_type);
|
self.item_type = Some(item_type);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos by item type
|
/// Filter videos by item type
|
||||||
|
#[must_use]
|
||||||
pub fn item_type_opt(mut self, item_type: Option<ItemType>) -> Self {
|
pub fn item_type_opt(mut self, item_type: Option<ItemType>) -> Self {
|
||||||
self.item_type = item_type;
|
self.item_type = item_type;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos by length range
|
/// Filter videos by length range
|
||||||
|
#[must_use]
|
||||||
pub fn length(mut self, length: Length) -> Self {
|
pub fn length(mut self, length: Length) -> Self {
|
||||||
self.length = Some(length);
|
self.length = Some(length);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Filter videos by length range
|
/// Filter videos by length range
|
||||||
|
#[must_use]
|
||||||
pub fn length_opt(mut self, length: Option<Length>) -> Self {
|
pub fn length_opt(mut self, length: Option<Length>) -> Self {
|
||||||
self.length = length;
|
self.length = length;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disable the automatic correction of mistyped search terms
|
/// Disable the automatic correction of mistyped search terms
|
||||||
|
#[must_use]
|
||||||
pub fn verbatim(mut self) -> Self {
|
pub fn verbatim(mut self) -> Self {
|
||||||
self.verbatim = true;
|
self.verbatim = true;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Disable the automatic correction of mistyped search terms
|
/// Disable the automatic correction of mistyped search terms
|
||||||
|
#[must_use]
|
||||||
pub fn verbatim_set(mut self, verbatim: bool) -> Self {
|
pub fn verbatim_set(mut self, verbatim: bool) -> Self {
|
||||||
self.verbatim = verbatim;
|
self.verbatim = verbatim;
|
||||||
self
|
self
|
||||||
|
@ -197,7 +210,7 @@ impl SearchFilter {
|
||||||
if self.verbatim {
|
if self.verbatim {
|
||||||
let mut extras = ProtoBuilder::new();
|
let mut extras = ProtoBuilder::new();
|
||||||
extras.varint(1, 1);
|
extras.varint(1, 1);
|
||||||
pb.embedded(8, extras)
|
pb.embedded(8, extras);
|
||||||
}
|
}
|
||||||
|
|
||||||
pb.to_base64()
|
pb.to_base64()
|
||||||
|
|
|
@ -32,36 +32,41 @@ enum FilterResult {
|
||||||
|
|
||||||
impl FilterResult {
|
impl FilterResult {
|
||||||
fn hard(val: bool) -> Self {
|
fn hard(val: bool) -> Self {
|
||||||
match val {
|
if val {
|
||||||
true => Self::Match,
|
Self::Match
|
||||||
false => Self::Deny,
|
} else {
|
||||||
|
Self::Deny
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn soft(val: bool) -> Self {
|
fn soft(val: bool) -> Self {
|
||||||
match val {
|
if val {
|
||||||
true => Self::Match,
|
Self::Match
|
||||||
false => Self::AllowLowest,
|
} else {
|
||||||
|
Self::AllowLowest
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn allow(val: bool) -> Self {
|
fn allow(val: bool) -> Self {
|
||||||
match val {
|
if val {
|
||||||
true => Self::Allow,
|
Self::Allow
|
||||||
false => Self::Deny,
|
} else {
|
||||||
|
Self::Deny
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn join(self, other: Self) -> Self {
|
fn join(self, other: Self) -> Self {
|
||||||
match self == Self::Deny {
|
if self == Self::Deny {
|
||||||
true => Self::Deny,
|
Self::Deny
|
||||||
false => self.min(other),
|
} else {
|
||||||
|
self.min(other)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> StreamFilter<'a> {
|
impl<'a> StreamFilter<'a> {
|
||||||
/// Create a new [`StreamFilter`]
|
/// Create a new [`StreamFilter`]
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self::default()
|
Self::default()
|
||||||
}
|
}
|
||||||
|
@ -70,6 +75,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
///
|
///
|
||||||
/// This is a soft filter, so if there is no stream with a bitrate
|
/// This is a soft filter, so if there is no stream with a bitrate
|
||||||
/// <= the limit, the stream with the next higher bitrate is returned.
|
/// <= the limit, the stream with the next higher bitrate is returned.
|
||||||
|
#[must_use]
|
||||||
pub fn audio_max_bitrate(mut self, max_bitrate: u32) -> Self {
|
pub fn audio_max_bitrate(mut self, max_bitrate: u32) -> Self {
|
||||||
self.audio_max_bitrate = Some(max_bitrate);
|
self.audio_max_bitrate = Some(max_bitrate);
|
||||||
self
|
self
|
||||||
|
@ -83,6 +89,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the supported audio container formats
|
/// Set the supported audio container formats
|
||||||
|
#[must_use]
|
||||||
pub fn audio_formats(mut self, formats: &'a [AudioFormat]) -> Self {
|
pub fn audio_formats(mut self, formats: &'a [AudioFormat]) -> Self {
|
||||||
self.audio_formats = Some(formats);
|
self.audio_formats = Some(formats);
|
||||||
self
|
self
|
||||||
|
@ -96,6 +103,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the supported audio codecs
|
/// Set the supported audio codecs
|
||||||
|
#[must_use]
|
||||||
pub fn audio_codecs(mut self, codecs: &'a [AudioCodec]) -> Self {
|
pub fn audio_codecs(mut self, codecs: &'a [AudioCodec]) -> Self {
|
||||||
self.audio_codecs = Some(codecs);
|
self.audio_codecs = Some(codecs);
|
||||||
self
|
self
|
||||||
|
@ -114,6 +122,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
///
|
///
|
||||||
/// If this filter is unset or no stream matches,
|
/// If this filter is unset or no stream matches,
|
||||||
/// the filter returns the default audio stream.
|
/// the filter returns the default audio stream.
|
||||||
|
#[must_use]
|
||||||
pub fn audio_language(mut self, language: &'a str) -> Self {
|
pub fn audio_language(mut self, language: &'a str) -> Self {
|
||||||
self.audio_language = Some(language);
|
self.audio_language = Some(language);
|
||||||
self
|
self
|
||||||
|
@ -123,10 +132,13 @@ impl<'a> StreamFilter<'a> {
|
||||||
match &self.audio_language {
|
match &self.audio_language {
|
||||||
Some(language) => match &stream.track {
|
Some(language) => match &stream.track {
|
||||||
Some(track) => match &track.lang {
|
Some(track) => match &track.lang {
|
||||||
Some(track_lang) => match track_lang == language {
|
Some(track_lang) => {
|
||||||
true => FilterResult::Match,
|
if track_lang == language {
|
||||||
false => FilterResult::allow(track.is_default),
|
FilterResult::Match
|
||||||
},
|
} else {
|
||||||
|
FilterResult::allow(track.is_default)
|
||||||
|
}
|
||||||
|
}
|
||||||
None => FilterResult::allow(track.is_default),
|
None => FilterResult::allow(track.is_default),
|
||||||
},
|
},
|
||||||
None => FilterResult::Match,
|
None => FilterResult::Match,
|
||||||
|
@ -140,6 +152,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
///
|
///
|
||||||
/// This is a soft filter, so if there is no stream with a resolution
|
/// This is a soft filter, so if there is no stream with a resolution
|
||||||
/// <= the limit, the stream with the next higher resolution is returned.
|
/// <= the limit, the stream with the next higher resolution is returned.
|
||||||
|
#[must_use]
|
||||||
pub fn video_max_res(mut self, max_res: u32) -> Self {
|
pub fn video_max_res(mut self, max_res: u32) -> Self {
|
||||||
self.video_max_res = Some(max_res);
|
self.video_max_res = Some(max_res);
|
||||||
self
|
self
|
||||||
|
@ -156,6 +169,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
///
|
///
|
||||||
/// This is a soft filter, so if there is no stream with a framerate
|
/// This is a soft filter, so if there is no stream with a framerate
|
||||||
/// <= the limit, the stream with the next higher framerate is returned.
|
/// <= the limit, the stream with the next higher framerate is returned.
|
||||||
|
#[must_use]
|
||||||
pub fn video_max_fps(mut self, max_fps: u8) -> Self {
|
pub fn video_max_fps(mut self, max_fps: u8) -> Self {
|
||||||
self.video_max_fps = Some(max_fps);
|
self.video_max_fps = Some(max_fps);
|
||||||
self
|
self
|
||||||
|
@ -169,6 +183,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the supported video container formats
|
/// Set the supported video container formats
|
||||||
|
#[must_use]
|
||||||
pub fn video_formats(mut self, formats: &'a [VideoFormat]) -> Self {
|
pub fn video_formats(mut self, formats: &'a [VideoFormat]) -> Self {
|
||||||
self.video_formats = Some(formats);
|
self.video_formats = Some(formats);
|
||||||
self
|
self
|
||||||
|
@ -182,6 +197,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the supported video codecs
|
/// Set the supported video codecs
|
||||||
|
#[must_use]
|
||||||
pub fn video_codecs(mut self, codecs: &'a [VideoCodec]) -> Self {
|
pub fn video_codecs(mut self, codecs: &'a [VideoCodec]) -> Self {
|
||||||
self.video_codecs = Some(codecs);
|
self.video_codecs = Some(codecs);
|
||||||
self
|
self
|
||||||
|
@ -195,6 +211,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allow HDR videos
|
/// Allow HDR videos
|
||||||
|
#[must_use]
|
||||||
pub fn video_hdr(mut self) -> Self {
|
pub fn video_hdr(mut self) -> Self {
|
||||||
self.video_hdr = true;
|
self.video_hdr = true;
|
||||||
self
|
self
|
||||||
|
@ -208,6 +225,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Output no video stream (audio only)
|
/// Output no video stream (audio only)
|
||||||
|
#[must_use]
|
||||||
pub fn no_video(mut self) -> Self {
|
pub fn no_video(mut self) -> Self {
|
||||||
self.video_none = true;
|
self.video_none = true;
|
||||||
self
|
self
|
||||||
|
@ -236,6 +254,7 @@ impl<'a> StreamFilter<'a> {
|
||||||
|
|
||||||
impl VideoPlayer {
|
impl VideoPlayer {
|
||||||
/// Select the audio stream which is the best match for the given [`StreamFilter`]
|
/// Select the audio stream which is the best match for the given [`StreamFilter`]
|
||||||
|
#[must_use]
|
||||||
pub fn select_audio_stream(&self, filter: &StreamFilter) -> Option<&AudioStream> {
|
pub fn select_audio_stream(&self, filter: &StreamFilter) -> Option<&AudioStream> {
|
||||||
let mut fallback: Option<&AudioStream> = None;
|
let mut fallback: Option<&AudioStream> = None;
|
||||||
|
|
||||||
|
|
|
@ -37,13 +37,13 @@ const FILENAME_FORMAT: &[time::format_description::FormatItem] =
|
||||||
/// RustyPipe error report
|
/// RustyPipe error report
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub struct Report {
|
pub struct Report<'a> {
|
||||||
/// Information about the RustyPipe client
|
/// Information about the RustyPipe client
|
||||||
pub info: RustyPipeInfo,
|
pub info: RustyPipeInfo<'a>,
|
||||||
/// Severity of the report
|
/// Severity of the report
|
||||||
pub level: Level,
|
pub level: Level,
|
||||||
/// RustyPipe operation (e.g. `get_player`)
|
/// RustyPipe operation (e.g. `get_player`)
|
||||||
pub operation: String,
|
pub operation: &'a str,
|
||||||
/// Error (if occurred)
|
/// Error (if occurred)
|
||||||
pub error: Option<String>,
|
pub error: Option<String>,
|
||||||
/// Detailed error/warning messages
|
/// Detailed error/warning messages
|
||||||
|
@ -52,17 +52,17 @@ pub struct Report {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub deobf_data: Option<DeobfData>,
|
pub deobf_data: Option<DeobfData>,
|
||||||
/// HTTP request data
|
/// HTTP request data
|
||||||
pub http_request: HTTPRequest,
|
pub http_request: HTTPRequest<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Information about the RustyPipe client
|
/// Information about the RustyPipe client
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub struct RustyPipeInfo {
|
pub struct RustyPipeInfo<'a> {
|
||||||
/// Rust package name (`rustypipe`)
|
/// Rust package name (`rustypipe`)
|
||||||
pub package: String,
|
pub package: &'a str,
|
||||||
/// Package version (`0.1.0`)
|
/// Package version (`0.1.0`)
|
||||||
pub version: String,
|
pub version: &'a str,
|
||||||
/// Date/Time when the event occurred
|
/// Date/Time when the event occurred
|
||||||
#[serde(with = "time::serde::rfc3339")]
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
pub date: OffsetDateTime,
|
pub date: OffsetDateTime,
|
||||||
|
@ -71,13 +71,13 @@ pub struct RustyPipeInfo {
|
||||||
/// Reported HTTP request data
|
/// Reported HTTP request data
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub struct HTTPRequest {
|
pub struct HTTPRequest<'a> {
|
||||||
/// Request URL
|
/// Request URL
|
||||||
pub url: String,
|
pub url: &'a str,
|
||||||
/// HTTP method
|
/// HTTP method
|
||||||
pub method: String,
|
pub method: &'a str,
|
||||||
/// HTTP request header
|
/// HTTP request header
|
||||||
pub req_header: BTreeMap<String, String>,
|
pub req_header: BTreeMap<&'a str, String>,
|
||||||
/// HTTP request body
|
/// HTTP request body
|
||||||
pub req_body: String,
|
pub req_body: String,
|
||||||
/// HTTP response status code
|
/// HTTP response status code
|
||||||
|
@ -98,11 +98,11 @@ pub enum Level {
|
||||||
ERR,
|
ERR,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for RustyPipeInfo {
|
impl Default for RustyPipeInfo<'_> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
package: "rustypipe".to_owned(),
|
package: env!("CARGO_PKG_NAME"),
|
||||||
version: "0.1.0".to_owned(),
|
version: env!("CARGO_PKG_VERSION"),
|
||||||
date: util::now_sec(),
|
date: util::now_sec(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -349,15 +349,9 @@ impl From<TextComponent> for crate::model::ArtistId {
|
||||||
name: text,
|
name: text,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
TextComponent::Video { text, .. } => Self {
|
TextComponent::Video { text, .. }
|
||||||
id: None,
|
| TextComponent::Web { text, .. }
|
||||||
name: text,
|
| TextComponent::Text { text } => Self {
|
||||||
},
|
|
||||||
TextComponent::Web { text, .. } => Self {
|
|
||||||
id: None,
|
|
||||||
name: text,
|
|
||||||
},
|
|
||||||
TextComponent::Text { text } => Self {
|
|
||||||
id: None,
|
id: None,
|
||||||
name: text,
|
name: text,
|
||||||
},
|
},
|
||||||
|
@ -406,10 +400,10 @@ impl From<TextComponents> for crate::model::richtext::RichText {
|
||||||
impl TextComponent {
|
impl TextComponent {
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn as_str(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
TextComponent::Video { text, .. } => text,
|
TextComponent::Video { text, .. }
|
||||||
TextComponent::Browse { text, .. } => text,
|
| TextComponent::Browse { text, .. }
|
||||||
TextComponent::Web { text, .. } => text,
|
| TextComponent::Web { text, .. }
|
||||||
TextComponent::Text { text } => text,
|
| TextComponent::Text { text } => text,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -417,7 +411,10 @@ impl TextComponent {
|
||||||
impl TextComponents {
|
impl TextComponents {
|
||||||
/// Return the string representation of the first text component
|
/// Return the string representation of the first text component
|
||||||
pub fn first_str(&self) -> &str {
|
pub fn first_str(&self) -> &str {
|
||||||
self.0.first().map(|t| t.as_str()).unwrap_or_default()
|
self.0
|
||||||
|
.first()
|
||||||
|
.map(TextComponent::as_str)
|
||||||
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Split the text components using the given separation string.
|
/// Split the text components using the given separation string.
|
||||||
|
@ -440,7 +437,7 @@ impl TextComponents {
|
||||||
}
|
}
|
||||||
|
|
||||||
if !inner.is_empty() {
|
if !inner.is_empty() {
|
||||||
buf.push(TextComponents(inner))
|
buf.push(TextComponents(inner));
|
||||||
}
|
}
|
||||||
|
|
||||||
buf
|
buf
|
||||||
|
@ -449,7 +446,7 @@ impl TextComponents {
|
||||||
|
|
||||||
impl ToString for TextComponents {
|
impl ToString for TextComponents {
|
||||||
fn to_string(&self) -> String {
|
fn to_string(&self) -> String {
|
||||||
self.0.iter().map(|x| x.as_str()).collect::<String>()
|
self.0.iter().map(TextComponent::as_str).collect::<String>()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
// This file is automatically generated. DO NOT EDIT.
|
// This file is automatically generated. DO NOT EDIT.
|
||||||
// See codegen/gen_dictionary.rs for the generation code.
|
// See codegen/gen_dictionary.rs for the generation code.
|
||||||
|
#![allow(clippy::unreadable_literal)]
|
||||||
|
|
||||||
|
//! The dictionary contains the information required to parse dates and numbers
|
||||||
|
//! in all supported languages.
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
model::AlbumType,
|
model::AlbumType,
|
||||||
param::Language,
|
param::Language,
|
||||||
util::timeago::{DateCmp, TaToken, TimeUnit},
|
util::timeago::{DateCmp, TaToken, TimeUnit},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The dictionary contains the information required to parse dates and numbers
|
/// Dictionary entry containing language-specific parsing information
|
||||||
/// in all supported languages.
|
|
||||||
pub(crate) struct Entry {
|
pub(crate) struct Entry {
|
||||||
/// Tokens for parsing timeago strings.
|
/// Tokens for parsing timeago strings.
|
||||||
///
|
///
|
||||||
|
|
|
@ -26,7 +26,7 @@ pub static VIDEO_ID_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"^[A-Za-z0-9_-
|
||||||
pub static CHANNEL_ID_REGEX: Lazy<Regex> =
|
pub static CHANNEL_ID_REGEX: Lazy<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^UC[A-Za-z0-9_-]{22}$").unwrap());
|
Lazy::new(|| Regex::new(r"^UC[A-Za-z0-9_-]{22}$").unwrap());
|
||||||
pub static PLAYLIST_ID_REGEX: Lazy<Regex> =
|
pub static PLAYLIST_ID_REGEX: Lazy<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^(?:PL|RDCLAK|OLAK)[A-Za-z0-9_-]{16,50}$").unwrap());
|
Lazy::new(|| Regex::new(r"^(?:PL|RDCLAK|OLAK|UU)[A-Za-z0-9_-]{16,50}$").unwrap());
|
||||||
pub static ALBUM_ID_REGEX: Lazy<Regex> =
|
pub static ALBUM_ID_REGEX: Lazy<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^MPREb_[A-Za-z0-9_-]{11}$").unwrap());
|
Lazy::new(|| Regex::new(r"^MPREb_[A-Za-z0-9_-]{11}$").unwrap());
|
||||||
pub static VANITY_PATH_REGEX: Lazy<Regex> = Lazy::new(|| {
|
pub static VANITY_PATH_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||||
|
@ -91,7 +91,7 @@ pub fn random_uuid() -> String {
|
||||||
rng.gen::<u16>(),
|
rng.gen::<u16>(),
|
||||||
rng.gen::<u16>(),
|
rng.gen::<u16>(),
|
||||||
rng.gen::<u16>(),
|
rng.gen::<u16>(),
|
||||||
rng.gen::<u64>() & 0xffffffffffff,
|
rng.gen::<u64>() & 0xffff_ffff_ffff,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -315,10 +315,7 @@ where
|
||||||
|
|
||||||
let dict_entry = dictionary::entry(lang);
|
let dict_entry = dictionary::entry(lang);
|
||||||
let by_char = lang_by_char(lang) || lang == Language::Ko;
|
let by_char = lang_by_char(lang) || lang == Language::Ko;
|
||||||
let decimal_point = match dict_entry.comma_decimal {
|
let decimal_point = if dict_entry.comma_decimal { ',' } else { '.' };
|
||||||
true => ',',
|
|
||||||
false => '.',
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut digits = String::new();
|
let mut digits = String::new();
|
||||||
let mut filtered = String::new();
|
let mut filtered = String::new();
|
||||||
|
@ -345,14 +342,14 @@ where
|
||||||
if digits.is_empty() {
|
if digits.is_empty() {
|
||||||
SplitTokens::new(&filtered, by_char)
|
SplitTokens::new(&filtered, by_char)
|
||||||
.find_map(|token| dict_entry.number_nd_tokens.get(token))
|
.find_map(|token| dict_entry.number_nd_tokens.get(token))
|
||||||
.and_then(|n| (*n as u64).try_into().ok())
|
.and_then(|n| (u64::from(*n)).try_into().ok())
|
||||||
} else {
|
} else {
|
||||||
let num = digits.parse::<u64>().ok()?;
|
let num = digits.parse::<u64>().ok()?;
|
||||||
|
|
||||||
exp += SplitTokens::new(&filtered, by_char)
|
exp += SplitTokens::new(&filtered, by_char)
|
||||||
.filter_map(|token| match token {
|
.filter_map(|token| match token {
|
||||||
"k" => Some(3),
|
"k" => Some(3),
|
||||||
_ => dict_entry.number_tokens.get(token).map(|t| *t as i32),
|
_ => dict_entry.number_tokens.get(token).map(|t| i32::from(*t)),
|
||||||
})
|
})
|
||||||
.sum::<i32>();
|
.sum::<i32>();
|
||||||
|
|
||||||
|
@ -447,9 +444,10 @@ pub enum SplitTokens<'a> {
|
||||||
|
|
||||||
impl<'a> SplitTokens<'a> {
|
impl<'a> SplitTokens<'a> {
|
||||||
pub fn new(s: &'a str, by_char: bool) -> Self {
|
pub fn new(s: &'a str, by_char: bool) -> Self {
|
||||||
match by_char {
|
if by_char {
|
||||||
true => Self::Char(SplitChar::from(s)),
|
Self::Char(SplitChar::from(s))
|
||||||
false => Self::Word(s.split_whitespace()),
|
} else {
|
||||||
|
Self::Word(s.split_whitespace())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,8 +33,8 @@ impl ProtoBuilder {
|
||||||
///
|
///
|
||||||
/// Reference: <https://developers.google.com/protocol-buffers/docs/encoding?hl=en#structure>
|
/// Reference: <https://developers.google.com/protocol-buffers/docs/encoding?hl=en#structure>
|
||||||
fn _field(&mut self, field: u32, wire: u8) {
|
fn _field(&mut self, field: u32, wire: u8) {
|
||||||
let fbits: u64 = (field as u64) << 3;
|
let fbits = u64::from(field) << 3;
|
||||||
let wbits = wire as u64 & 0x07;
|
let wbits = u64::from(wire) & 0x07;
|
||||||
let val: u64 = fbits | wbits;
|
let val: u64 = fbits | wbits;
|
||||||
self._varint(val);
|
self._varint(val);
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ fn parse_varint<P: Iterator<Item = u8>>(pb: &mut P) -> Option<u64> {
|
||||||
|
|
||||||
for b in pb.by_ref() {
|
for b in pb.by_ref() {
|
||||||
let value = b & 0x7f;
|
let value = b & 0x7f;
|
||||||
result |= (value as u64) << (7 * num_read);
|
result |= u64::from(value) << (7 * num_read);
|
||||||
num_read += 1;
|
num_read += 1;
|
||||||
|
|
||||||
if b & 0x80 == 0 {
|
if b & 0x80 == 0 {
|
||||||
|
@ -118,9 +118,8 @@ pub fn string_from_pb<P: IntoIterator<Item = u8>>(pb: P, field: u32) -> Option<S
|
||||||
buf.push(pb.next()?);
|
buf.push(pb.next()?);
|
||||||
}
|
}
|
||||||
return String::from_utf8(buf).ok();
|
return String::from_utf8(buf).ok();
|
||||||
} else {
|
|
||||||
len
|
|
||||||
}
|
}
|
||||||
|
len
|
||||||
}
|
}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
|
@ -77,7 +77,7 @@ pub enum DateCmp {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TimeUnit {
|
impl TimeUnit {
|
||||||
pub fn secs(&self) -> i64 {
|
pub fn secs(self) -> i64 {
|
||||||
match self {
|
match self {
|
||||||
TimeUnit::Second => 1,
|
TimeUnit::Second => 1,
|
||||||
TimeUnit::Minute => 60,
|
TimeUnit::Minute => 60,
|
||||||
|
@ -91,7 +91,7 @@ impl TimeUnit {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TimeAgo {
|
impl TimeAgo {
|
||||||
fn secs(&self) -> i64 {
|
fn secs(self) -> i64 {
|
||||||
i64::from(self.n) * self.unit.secs()
|
i64::from(self.n) * self.unit.secs()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -117,8 +117,8 @@ impl From<TimeAgo> for OffsetDateTime {
|
||||||
fn from(ta: TimeAgo) -> Self {
|
fn from(ta: TimeAgo) -> Self {
|
||||||
let ts = util::now_sec();
|
let ts = util::now_sec();
|
||||||
match ta.unit {
|
match ta.unit {
|
||||||
TimeUnit::Month => ts.replace_date(util::shift_months(ts.date(), -(ta.n as i32))),
|
TimeUnit::Month => ts.replace_date(util::shift_months(ts.date(), -i32::from(ta.n))),
|
||||||
TimeUnit::Year => ts.replace_date(util::shift_years(ts.date(), -(ta.n as i32))),
|
TimeUnit::Year => ts.replace_date(util::shift_years(ts.date(), -i32::from(ta.n))),
|
||||||
_ => ts - Duration::from(ta),
|
_ => ts - Duration::from(ta),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -156,9 +156,10 @@ struct TaTokenParser<'a> {
|
||||||
|
|
||||||
impl<'a> TaTokenParser<'a> {
|
impl<'a> TaTokenParser<'a> {
|
||||||
fn new(entry: &'a dictionary::Entry, by_char: bool, nd: bool, filtered_str: &'a str) -> Self {
|
fn new(entry: &'a dictionary::Entry, by_char: bool, nd: bool, filtered_str: &'a str) -> Self {
|
||||||
let tokens = match nd {
|
let tokens = if nd {
|
||||||
true => &entry.timeago_nd_tokens,
|
&entry.timeago_nd_tokens
|
||||||
false => &entry.timeago_tokens,
|
} else {
|
||||||
|
&entry.timeago_tokens
|
||||||
};
|
};
|
||||||
Self {
|
Self {
|
||||||
iter: SplitTokens::new(filtered_str, by_char),
|
iter: SplitTokens::new(filtered_str, by_char),
|
||||||
|
@ -209,7 +210,7 @@ pub fn parse_timeago(lang: Language, textual_date: &str) -> Option<TimeAgo> {
|
||||||
///
|
///
|
||||||
/// Returns [`None`] if the date could not be parsed.
|
/// Returns [`None`] if the date could not be parsed.
|
||||||
pub fn parse_timeago_dt(lang: Language, textual_date: &str) -> Option<OffsetDateTime> {
|
pub fn parse_timeago_dt(lang: Language, textual_date: &str) -> Option<OffsetDateTime> {
|
||||||
parse_timeago(lang, textual_date).map(|ta| ta.into())
|
parse_timeago(lang, textual_date).map(OffsetDateTime::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_timeago_dt_or_warn(
|
pub fn parse_timeago_dt_or_warn(
|
||||||
|
@ -260,7 +261,7 @@ pub fn parse_textual_date(lang: Language, textual_date: &str) -> Option<ParsedDa
|
||||||
|
|
||||||
// Chinese/Japanese dont use textual months
|
// Chinese/Japanese dont use textual months
|
||||||
if m.is_none() && !by_char {
|
if m.is_none() && !by_char {
|
||||||
m = parse_textual_month(&entry, &filtered_str).map(|n| n as u16);
|
m = parse_textual_month(&entry, &filtered_str).map(u16::from);
|
||||||
}
|
}
|
||||||
|
|
||||||
match (y, m, d) {
|
match (y, m, d) {
|
||||||
|
@ -282,7 +283,7 @@ pub fn parse_textual_date(lang: Language, textual_date: &str) -> Option<ParsedDa
|
||||||
///
|
///
|
||||||
/// Returns None if the date could not be parsed.
|
/// Returns None if the date could not be parsed.
|
||||||
pub fn parse_textual_date_to_dt(lang: Language, textual_date: &str) -> Option<OffsetDateTime> {
|
pub fn parse_textual_date_to_dt(lang: Language, textual_date: &str) -> Option<OffsetDateTime> {
|
||||||
parse_textual_date(lang, textual_date).map(|ta| ta.into())
|
parse_textual_date(lang, textual_date).map(OffsetDateTime::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_textual_date_or_warn(
|
pub fn parse_textual_date_or_warn(
|
||||||
|
|
118
src/validate.rs
118
src/validate.rs
|
@ -8,10 +8,10 @@
|
||||||
//! [string resolver](crate::client::RustyPipeQuery::resolve_string) is great for handling
|
//! [string resolver](crate::client::RustyPipeQuery::resolve_string) is great for handling
|
||||||
//! arbitrary input and returns a [`UrlTarget`](crate::model::UrlTarget) enum that tells you
|
//! arbitrary input and returns a [`UrlTarget`](crate::model::UrlTarget) enum that tells you
|
||||||
//! whether the given URL points to a video, channel, playlist, etc.
|
//! whether the given URL points to a video, channel, playlist, etc.
|
||||||
//! - The validation functions of this module are meant vor validating concrete data (video IDs,
|
//! - The validation functions of this module are meant vor validating specific data (video IDs,
|
||||||
//! channel IDs, playlist IDs) and return [`true`] if the given input is valid
|
//! channel IDs, playlist IDs) and return [`true`] if the given input is valid
|
||||||
|
|
||||||
use crate::util;
|
use crate::{error::Error, util};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
|
@ -22,12 +22,15 @@ use regex::Regex;
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::video_id("dQw4w9WgXcQ"));
|
/// assert!(validate::video_id("dQw4w9WgXcQ").is_ok());
|
||||||
/// assert!(!validate::video_id("Abcd"));
|
/// assert!(validate::video_id("Abcd").is_err());
|
||||||
/// assert!(!validate::video_id("dQw4w9WgXc@"));
|
/// assert!(validate::video_id("dQw4w9WgXc@").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn video_id<S: AsRef<str>>(video_id: S) -> bool {
|
pub fn video_id<S: AsRef<str>>(video_id: S) -> Result<(), Error> {
|
||||||
util::VIDEO_ID_REGEX.is_match(video_id.as_ref())
|
check(
|
||||||
|
util::VIDEO_ID_REGEX.is_match(video_id.as_ref()),
|
||||||
|
"invalid video id",
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate the given channel ID
|
/// Validate the given channel ID
|
||||||
|
@ -38,12 +41,15 @@ pub fn video_id<S: AsRef<str>>(video_id: S) -> bool {
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::channel_id("UC2DjFE7Xf11URZqWBigcVOQ"));
|
/// assert!(validate::channel_id("UC2DjFE7Xf11URZqWBigcVOQ").is_ok());
|
||||||
/// assert!(!validate::channel_id("Abcd"));
|
/// assert!(validate::channel_id("Abcd").is_err());
|
||||||
/// assert!(!validate::channel_id("XY2DjFE7Xf11URZqWBigcVOQ"));
|
/// assert!(validate::channel_id("XY2DjFE7Xf11URZqWBigcVOQ").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn channel_id<S: AsRef<str>>(channel_id: S) -> bool {
|
pub fn channel_id<S: AsRef<str>>(channel_id: S) -> Result<(), Error> {
|
||||||
util::CHANNEL_ID_REGEX.is_match(channel_id.as_ref())
|
check(
|
||||||
|
util::CHANNEL_ID_REGEX.is_match(channel_id.as_ref()),
|
||||||
|
"invalid channel id",
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate the given playlist ID
|
/// Validate the given playlist ID
|
||||||
|
@ -55,14 +61,17 @@ pub fn channel_id<S: AsRef<str>>(channel_id: S) -> bool {
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::playlist_id("PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI"));
|
/// assert!(validate::playlist_id("PL4lEESSgxM_5O81EvKCmBIm_JT5Q7JeaI").is_ok());
|
||||||
/// assert!(validate::playlist_id("RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk"));
|
/// assert!(validate::playlist_id("RDCLAK5uy_kFQXdnqMaQCVx2wpUM4ZfbsGCDibZtkJk").is_ok());
|
||||||
/// assert!(validate::playlist_id("OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE"));
|
/// assert!(validate::playlist_id("OLAK5uy_k0yFrZlFRgCf3rLPza-lkRmCrtLPbK9pE").is_ok());
|
||||||
///
|
///
|
||||||
/// assert!(!validate::playlist_id("Abcd"));
|
/// assert!(validate::playlist_id("Abcd").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> bool {
|
pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> Result<(), Error> {
|
||||||
util::PLAYLIST_ID_REGEX.is_match(playlist_id.as_ref())
|
check(
|
||||||
|
util::PLAYLIST_ID_REGEX.is_match(playlist_id.as_ref()),
|
||||||
|
"invalid playlist id",
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate the given album ID
|
/// Validate the given album ID
|
||||||
|
@ -73,8 +82,8 @@ pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> bool {
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::album_id("MPREb_GyH43gCvdM5"));
|
/// assert!(validate::album_id("MPREb_GyH43gCvdM5").is_ok());
|
||||||
/// assert!(!validate::album_id("Abcd_GyH43gCvdM5"));
|
/// assert!(validate::album_id("Abcd_GyH43gCvdM5").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
///
|
///
|
||||||
/// # Note
|
/// # Note
|
||||||
|
@ -86,8 +95,11 @@ pub fn playlist_id<S: AsRef<str>>(playlist_id: S) -> bool {
|
||||||
/// If you have the playlist ID of an album and need the album ID, you can use the
|
/// If you have the playlist ID of an album and need the album ID, you can use the
|
||||||
/// [string resolver](crate::client::RustyPipeQuery::resolve_string) with the `resolve_albums`
|
/// [string resolver](crate::client::RustyPipeQuery::resolve_string) with the `resolve_albums`
|
||||||
/// option enabled.
|
/// option enabled.
|
||||||
pub fn album_id<S: AsRef<str>>(album_id: S) -> bool {
|
pub fn album_id<S: AsRef<str>>(album_id: S) -> Result<(), Error> {
|
||||||
util::ALBUM_ID_REGEX.is_match(album_id.as_ref())
|
check(
|
||||||
|
util::ALBUM_ID_REGEX.is_match(album_id.as_ref()),
|
||||||
|
"invalid album id",
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate the given radio ID
|
/// Validate the given radio ID
|
||||||
|
@ -107,15 +119,18 @@ pub fn album_id<S: AsRef<str>>(album_id: S) -> bool {
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::radio_id("RDEMSuoM_jxfse1_g8uCO7MCtg"));
|
/// assert!(validate::radio_id("RDEMSuoM_jxfse1_g8uCO7MCtg").is_ok());
|
||||||
/// assert!(!validate::radio_id("Abcd"));
|
/// assert!(validate::radio_id("Abcd").is_err());
|
||||||
/// assert!(!validate::radio_id("XYEMSuoM_jxfse1_g8uCO7MCtg"));
|
/// assert!(validate::radio_id("XYEMSuoM_jxfse1_g8uCO7MCtg").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn radio_id<S: AsRef<str>>(radio_id: S) -> bool {
|
pub fn radio_id<S: AsRef<str>>(radio_id: S) -> Result<(), Error> {
|
||||||
static RADIO_ID_REGEX: Lazy<Regex> =
|
static RADIO_ID_REGEX: Lazy<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^RD[A-Za-z0-9_-]{22,50}$").unwrap());
|
Lazy::new(|| Regex::new(r"^RD[A-Za-z0-9_-]{22,50}$").unwrap());
|
||||||
|
|
||||||
RADIO_ID_REGEX.is_match(radio_id.as_ref())
|
check(
|
||||||
|
RADIO_ID_REGEX.is_match(radio_id.as_ref()),
|
||||||
|
"invalid radio id",
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate the given genre ID
|
/// Validate the given genre ID
|
||||||
|
@ -127,18 +142,21 @@ pub fn radio_id<S: AsRef<str>>(radio_id: S) -> bool {
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::genre_id("ggMPOg1uX1JOQWZFeDByc2Jm"));
|
/// assert!(validate::genre_id("ggMPOg1uX1JOQWZFeDByc2Jm").is_ok());
|
||||||
/// assert!(!validate::genre_id("Abcd"));
|
/// assert!(validate::genre_id("Abcd").is_err());
|
||||||
/// assert!(!validate::genre_id("ggAbcg1uX1JOQWZFeDByc2Jm"));
|
/// assert!(validate::genre_id("ggAbcg1uX1JOQWZFeDByc2Jm").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn genre_id<S: AsRef<str>>(genre_id: S) -> bool {
|
pub fn genre_id<S: AsRef<str>>(genre_id: S) -> Result<(), Error> {
|
||||||
static GENRE_ID_REGEX: Lazy<Regex> =
|
static GENRE_ID_REGEX: Lazy<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^ggMPO[A-Za-z0-9_-]{19}$").unwrap());
|
Lazy::new(|| Regex::new(r"^ggMPO[A-Za-z0-9_-]{19}$").unwrap());
|
||||||
|
|
||||||
GENRE_ID_REGEX.is_match(genre_id.as_ref())
|
check(
|
||||||
|
GENRE_ID_REGEX.is_match(genre_id.as_ref()),
|
||||||
|
"invalid genre id",
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate the given related ID
|
/// Validate the given related tracks ID
|
||||||
///
|
///
|
||||||
/// YouTube related IDs are exactly 17 characters long, start with the characters `MPTRt_`,
|
/// YouTube related IDs are exactly 17 characters long, start with the characters `MPTRt_`,
|
||||||
/// followed by 11 of these characters: `A-Za-z0-9_-`.
|
/// followed by 11 of these characters: `A-Za-z0-9_-`.
|
||||||
|
@ -147,15 +165,18 @@ pub fn genre_id<S: AsRef<str>>(genre_id: S) -> bool {
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::track_related_id("MPTRt_wrKjTn9hmry"));
|
/// assert!(validate::track_related_id("MPTRt_wrKjTn9hmry").is_ok());
|
||||||
/// assert!(!validate::track_related_id("Abcd"));
|
/// assert!(validate::track_related_id("Abcd").is_err());
|
||||||
/// assert!(!validate::track_related_id("Abcdt_wrKjTn9hmry"));
|
/// assert!(validate::track_related_id("Abcdt_wrKjTn9hmry").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn track_related_id<S: AsRef<str>>(related_id: S) -> bool {
|
pub fn track_related_id<S: AsRef<str>>(related_id: S) -> Result<(), Error> {
|
||||||
static RELATED_ID_REGEX: Lazy<Regex> =
|
static RELATED_ID_REGEX: Lazy<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^MPTRt_[A-Za-z0-9_-]{11}$").unwrap());
|
Lazy::new(|| Regex::new(r"^MPTRt_[A-Za-z0-9_-]{11}$").unwrap());
|
||||||
|
|
||||||
RELATED_ID_REGEX.is_match(related_id.as_ref())
|
check(
|
||||||
|
RELATED_ID_REGEX.is_match(related_id.as_ref()),
|
||||||
|
"invalid related track id",
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate the given lyrics ID
|
/// Validate the given lyrics ID
|
||||||
|
@ -167,13 +188,24 @@ pub fn track_related_id<S: AsRef<str>>(related_id: S) -> bool {
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
/// # use rustypipe::validate;
|
/// # use rustypipe::validate;
|
||||||
/// assert!(validate::track_lyrics_id("MPLYt_wrKjTn9hmry"));
|
/// assert!(validate::track_lyrics_id("MPLYt_wrKjTn9hmry").is_ok());
|
||||||
/// assert!(!validate::track_lyrics_id("Abcd"));
|
/// assert!(validate::track_lyrics_id("Abcd").is_err());
|
||||||
/// assert!(!validate::track_lyrics_id("Abcdt_wrKjTn9hmry"));
|
/// assert!(validate::track_lyrics_id("Abcdt_wrKjTn9hmry").is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn track_lyrics_id<S: AsRef<str>>(lyrics_id: S) -> bool {
|
pub fn track_lyrics_id<S: AsRef<str>>(lyrics_id: S) -> Result<(), Error> {
|
||||||
static LYRICS_ID_REGEX: Lazy<Regex> =
|
static LYRICS_ID_REGEX: Lazy<Regex> =
|
||||||
Lazy::new(|| Regex::new(r"^MPLYt_[A-Za-z0-9_-]{11}$").unwrap());
|
Lazy::new(|| Regex::new(r"^MPLYt_[A-Za-z0-9_-]{11}$").unwrap());
|
||||||
|
|
||||||
LYRICS_ID_REGEX.is_match(lyrics_id.as_ref())
|
check(
|
||||||
|
LYRICS_ID_REGEX.is_match(lyrics_id.as_ref()),
|
||||||
|
"invalid lyrics id",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn check(res: bool, msg: &'static str) -> Result<(), Error> {
|
||||||
|
if res {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(Error::Other(msg.into()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) {
|
||||||
assert_eq!(player_data.details.channel.name, "NoCopyrightSounds");
|
assert_eq!(player_data.details.channel.name, "NoCopyrightSounds");
|
||||||
assert_gte(player_data.details.view_count, 146_818_808, "view count");
|
assert_gte(player_data.details.view_count, 146_818_808, "view count");
|
||||||
assert_eq!(player_data.details.keywords[0], "spektrem");
|
assert_eq!(player_data.details.keywords[0], "spektrem");
|
||||||
assert_eq!(player_data.details.is_live_content, false);
|
assert!(!player_data.details.is_live_content);
|
||||||
|
|
||||||
if client_type == ClientType::Ios {
|
if client_type == ClientType::Ios {
|
||||||
let video = player_data
|
let video = player_data
|
||||||
|
@ -68,21 +68,21 @@ fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Bitrates may change between requests
|
// Bitrates may change between requests
|
||||||
assert_approx(video.bitrate as f64, 1507068.0);
|
assert_approx(f64::from(video.bitrate), 1_507_068.0);
|
||||||
assert_eq!(video.average_bitrate, 1345149);
|
assert_eq!(video.average_bitrate, 1_345_149);
|
||||||
assert_eq!(video.size.unwrap(), 43553412);
|
assert_eq!(video.size.unwrap(), 43_553_412);
|
||||||
assert_eq!(video.width, 1280);
|
assert_eq!(video.width, 1280);
|
||||||
assert_eq!(video.height, 720);
|
assert_eq!(video.height, 720);
|
||||||
assert_eq!(video.fps, 30);
|
assert_eq!(video.fps, 30);
|
||||||
assert_eq!(video.quality, "720p");
|
assert_eq!(video.quality, "720p");
|
||||||
assert_eq!(video.hdr, false);
|
assert!(!video.hdr);
|
||||||
assert_eq!(video.mime, "video/webm; codecs=\"vp09.00.31.08\"");
|
assert_eq!(video.mime, "video/webm; codecs=\"vp09.00.31.08\"");
|
||||||
assert_eq!(video.format, VideoFormat::Webm);
|
assert_eq!(video.format, VideoFormat::Webm);
|
||||||
assert_eq!(video.codec, VideoCodec::Vp9);
|
assert_eq!(video.codec, VideoCodec::Vp9);
|
||||||
|
|
||||||
assert_approx(audio.bitrate as f64, 130685.0);
|
assert_approx(f64::from(audio.bitrate), 130_685.0);
|
||||||
assert_approx(audio.average_bitrate as f64, 129496.0);
|
assert_approx(f64::from(audio.average_bitrate), 129_496.0);
|
||||||
assert_approx(audio.size as f64, 4193863.0);
|
assert_approx(audio.size as f64, 4_193_863.0);
|
||||||
assert_eq!(audio.mime, "audio/mp4; codecs=\"mp4a.40.2\"");
|
assert_eq!(audio.mime, "audio/mp4; codecs=\"mp4a.40.2\"");
|
||||||
assert_eq!(audio.format, AudioFormat::M4a);
|
assert_eq!(audio.format, AudioFormat::M4a);
|
||||||
assert_eq!(audio.codec, AudioCodec::Mp4a);
|
assert_eq!(audio.codec, AudioCodec::Mp4a);
|
||||||
|
@ -101,26 +101,26 @@ fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) {
|
||||||
.find(|s| s.itag == 251)
|
.find(|s| s.itag == 251)
|
||||||
.expect("audio stream not found");
|
.expect("audio stream not found");
|
||||||
|
|
||||||
assert_approx(video.bitrate as f64, 1340829.0);
|
assert_approx(f64::from(video.bitrate), 1_340_829.0);
|
||||||
assert_approx(video.average_bitrate as f64, 1233444.0);
|
assert_approx(f64::from(video.average_bitrate), 1_233_444.0);
|
||||||
assert_approx(video.size.unwrap() as f64, 39936630.0);
|
assert_approx(video.size.unwrap() as f64, 39_936_630.0);
|
||||||
assert_eq!(video.width, 1280);
|
assert_eq!(video.width, 1280);
|
||||||
assert_eq!(video.height, 720);
|
assert_eq!(video.height, 720);
|
||||||
assert_eq!(video.fps, 30);
|
assert_eq!(video.fps, 30);
|
||||||
assert_eq!(video.quality, "720p");
|
assert_eq!(video.quality, "720p");
|
||||||
assert_eq!(video.hdr, false);
|
assert!(!video.hdr);
|
||||||
assert_eq!(video.mime, "video/mp4; codecs=\"av01.0.05M.08\"");
|
assert_eq!(video.mime, "video/mp4; codecs=\"av01.0.05M.08\"");
|
||||||
assert_eq!(video.format, VideoFormat::Mp4);
|
assert_eq!(video.format, VideoFormat::Mp4);
|
||||||
assert_eq!(video.codec, VideoCodec::Av01);
|
assert_eq!(video.codec, VideoCodec::Av01);
|
||||||
assert_eq!(video.throttled, false);
|
assert!(!video.throttled);
|
||||||
|
|
||||||
assert_approx(audio.bitrate as f64, 142718.0);
|
assert_approx(f64::from(audio.bitrate), 142_718.0);
|
||||||
assert_approx(audio.average_bitrate as f64, 130708.0);
|
assert_approx(f64::from(audio.average_bitrate), 130_708.0);
|
||||||
assert_approx(audio.size as f64, 4232344.0);
|
assert_approx(audio.size as f64, 4_232_344.0);
|
||||||
assert_eq!(audio.mime, "audio/webm; codecs=\"opus\"");
|
assert_eq!(audio.mime, "audio/webm; codecs=\"opus\"");
|
||||||
assert_eq!(audio.format, AudioFormat::Webm);
|
assert_eq!(audio.format, AudioFormat::Webm);
|
||||||
assert_eq!(audio.codec, AudioCodec::Opus);
|
assert_eq!(audio.codec, AudioCodec::Opus);
|
||||||
assert_eq!(audio.throttled, false);
|
assert!(!audio.throttled);
|
||||||
|
|
||||||
check_video_stream(video);
|
check_video_stream(video);
|
||||||
check_video_stream(audio);
|
check_video_stream(audio);
|
||||||
|
@ -151,7 +151,7 @@ fn check_video_stream(s: impl YtStream) {
|
||||||
260,
|
260,
|
||||||
"UC2llNlEM62gU-_fXPHfgbDg",
|
"UC2llNlEM62gU-_fXPHfgbDg",
|
||||||
"Oonagh",
|
"Oonagh",
|
||||||
830900,
|
830_900,
|
||||||
false,
|
false,
|
||||||
false
|
false
|
||||||
)]
|
)]
|
||||||
|
@ -873,7 +873,7 @@ fn channel_info(rp: RustyPipe) {
|
||||||
|
|
||||||
assert_gte(
|
assert_gte(
|
||||||
channel.content.view_count.unwrap(),
|
channel.content.view_count.unwrap(),
|
||||||
186854340,
|
186_854_340,
|
||||||
"channel views",
|
"channel views",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1467,7 +1467,7 @@ fn music_artist(
|
||||||
.for_each(|t| assert!(!t.avatar.is_empty()));
|
.for_each(|t| assert!(!t.avatar.is_empty()));
|
||||||
|
|
||||||
// Sort albums to ensure consistent order
|
// Sort albums to ensure consistent order
|
||||||
artist.albums.sort_by_key(|a| a.id.to_owned());
|
artist.albums.sort_by_key(|a| a.id.clone());
|
||||||
|
|
||||||
if unlocalized {
|
if unlocalized {
|
||||||
insta::assert_ron_snapshot!(format!("music_artist_{name}"), artist, {
|
insta::assert_ron_snapshot!(format!("music_artist_{name}"), artist, {
|
||||||
|
@ -1944,19 +1944,19 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
||||||
let mut track_albums = 0;
|
let mut track_albums = 0;
|
||||||
|
|
||||||
for track in related.tracks {
|
for track in related.tracks {
|
||||||
assert_video_id(&track.id);
|
validate::video_id(&track.id).unwrap();
|
||||||
assert!(!track.name.is_empty());
|
assert!(!track.name.is_empty());
|
||||||
assert!(!track.cover.is_empty(), "got no cover");
|
assert!(!track.cover.is_empty(), "got no cover");
|
||||||
|
|
||||||
if let Some(artist_id) = track.artist_id {
|
if let Some(artist_id) = track.artist_id {
|
||||||
assert_channel_id(&artist_id);
|
validate::channel_id(&artist_id).unwrap();
|
||||||
track_artist_ids += 1;
|
track_artist_ids += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
let artist = track.artists.first().unwrap();
|
let artist = track.artists.first().unwrap();
|
||||||
assert!(!artist.name.is_empty());
|
assert!(!artist.name.is_empty());
|
||||||
if let Some(artist_id) = &artist.id {
|
if let Some(artist_id) = &artist.id {
|
||||||
assert_channel_id(artist_id);
|
validate::channel_id(&artist_id).unwrap();
|
||||||
track_artists += 1;
|
track_artists += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1968,7 +1968,7 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
||||||
|
|
||||||
assert!(track.view_count.is_none());
|
assert!(track.view_count.is_none());
|
||||||
if let Some(album) = track.album {
|
if let Some(album) = track.album {
|
||||||
assert_album_id(&album.id);
|
validate::album_id(&album.id).unwrap();
|
||||||
assert!(!album.name.is_empty());
|
assert!(!album.name.is_empty());
|
||||||
track_albums += 1;
|
track_albums += 1;
|
||||||
}
|
}
|
||||||
|
@ -1985,18 +1985,18 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
||||||
if full {
|
if full {
|
||||||
assert_gte(related.albums.len(), 10, "albums");
|
assert_gte(related.albums.len(), 10, "albums");
|
||||||
for album in related.albums {
|
for album in related.albums {
|
||||||
assert_album_id(&album.id);
|
validate::album_id(&album.id).unwrap();
|
||||||
assert!(!album.name.is_empty());
|
assert!(!album.name.is_empty());
|
||||||
assert!(!album.cover.is_empty(), "got no cover");
|
assert!(!album.cover.is_empty(), "got no cover");
|
||||||
|
|
||||||
let artist = album.artists.first().unwrap();
|
let artist = album.artists.first().unwrap();
|
||||||
assert_channel_id(artist.id.as_ref().unwrap());
|
validate::channel_id(artist.id.as_ref().unwrap()).unwrap();
|
||||||
assert!(!artist.name.is_empty());
|
assert!(!artist.name.is_empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_gte(related.artists.len(), 10, "artists");
|
assert_gte(related.artists.len(), 10, "artists");
|
||||||
for artist in related.artists {
|
for artist in related.artists {
|
||||||
assert_channel_id(&artist.id);
|
validate::channel_id(&artist.id).unwrap();
|
||||||
assert!(!artist.name.is_empty());
|
assert!(!artist.name.is_empty());
|
||||||
assert!(!artist.avatar.is_empty(), "got no avatar");
|
assert!(!artist.avatar.is_empty(), "got no avatar");
|
||||||
assert_gte(artist.subscriber_count.unwrap(), 5000, "subscribers")
|
assert_gte(artist.subscriber_count.unwrap(), 5000, "subscribers")
|
||||||
|
@ -2004,7 +2004,7 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
||||||
|
|
||||||
assert_gte(related.playlists.len(), 10, "playlists");
|
assert_gte(related.playlists.len(), 10, "playlists");
|
||||||
for playlist in related.playlists {
|
for playlist in related.playlists {
|
||||||
assert_playlist_id(&playlist.id);
|
validate::playlist_id(&playlist.id).unwrap();
|
||||||
assert!(!playlist.name.is_empty());
|
assert!(!playlist.name.is_empty());
|
||||||
assert!(
|
assert!(
|
||||||
!playlist.thumbnail.is_empty(),
|
!playlist.thumbnail.is_empty(),
|
||||||
|
@ -2018,7 +2018,7 @@ fn music_related(#[case] id: &str, #[case] full: bool, rp: RustyPipe) {
|
||||||
playlist.id
|
playlist.id
|
||||||
);
|
);
|
||||||
let channel = playlist.channel.unwrap();
|
let channel = playlist.channel.unwrap();
|
||||||
assert_channel_id(&channel.id);
|
validate::channel_id(&channel.id).unwrap();
|
||||||
assert!(!channel.name.is_empty());
|
assert!(!channel.name.is_empty());
|
||||||
} else {
|
} else {
|
||||||
assert!(playlist.channel.is_none());
|
assert!(playlist.channel.is_none());
|
||||||
|
@ -2134,7 +2134,7 @@ fn music_new_albums(rp: RustyPipe) {
|
||||||
assert_gte(albums.len(), 10, "albums");
|
assert_gte(albums.len(), 10, "albums");
|
||||||
|
|
||||||
for album in albums {
|
for album in albums {
|
||||||
assert_album_id(&album.id);
|
validate::album_id(&album.id).unwrap();
|
||||||
assert!(!album.name.is_empty());
|
assert!(!album.name.is_empty());
|
||||||
assert!(!album.cover.is_empty(), "got no cover");
|
assert!(!album.cover.is_empty(), "got no cover");
|
||||||
}
|
}
|
||||||
|
@ -2146,7 +2146,7 @@ fn music_new_videos(rp: RustyPipe) {
|
||||||
assert_gte(videos.len(), 5, "videos");
|
assert_gte(videos.len(), 5, "videos");
|
||||||
|
|
||||||
for video in videos {
|
for video in videos {
|
||||||
assert_video_id(&video.id);
|
validate::video_id(&video.id).unwrap();
|
||||||
assert!(!video.name.is_empty());
|
assert!(!video.name.is_empty());
|
||||||
assert!(!video.cover.is_empty(), "got no cover");
|
assert!(!video.cover.is_empty(), "got no cover");
|
||||||
assert_gte(video.view_count.unwrap(), 1000, "views");
|
assert_gte(video.view_count.unwrap(), 1000, "views");
|
||||||
|
@ -2174,10 +2174,10 @@ fn music_genres(rp: RustyPipe, unlocalized: bool) {
|
||||||
assert_eq!(pop.name, "Pop");
|
assert_eq!(pop.name, "Pop");
|
||||||
assert!(!pop.is_mood);
|
assert!(!pop.is_mood);
|
||||||
|
|
||||||
genres.iter().for_each(|g| {
|
for g in &genres {
|
||||||
assert!(validate::genre_id(&g.id));
|
validate::genre_id(&g.id).unwrap();
|
||||||
assert_gte(g.color, 0xff000000, "color");
|
assert_gte(g.color, 0xff00_0000, "color");
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
|
@ -2202,7 +2202,7 @@ fn music_genre(#[case] id: &str, #[case] name: &str, rp: RustyPipe, unlocalized:
|
||||||
genre.sections.iter().for_each(|section| {
|
genre.sections.iter().for_each(|section| {
|
||||||
assert!(!section.name.is_empty());
|
assert!(!section.name.is_empty());
|
||||||
section.playlists.iter().for_each(|playlist| {
|
section.playlists.iter().for_each(|playlist| {
|
||||||
assert_playlist_id(&playlist.id);
|
validate::playlist_id(&playlist.id).unwrap();
|
||||||
assert!(!playlist.name.is_empty());
|
assert!(!playlist.name.is_empty());
|
||||||
assert!(!playlist.thumbnail.is_empty(), "got no cover");
|
assert!(!playlist.thumbnail.is_empty(), "got no cover");
|
||||||
|
|
||||||
|
@ -2213,14 +2213,14 @@ fn music_genre(#[case] id: &str, #[case] name: &str, rp: RustyPipe, unlocalized:
|
||||||
playlist.id
|
playlist.id
|
||||||
);
|
);
|
||||||
let channel = playlist.channel.as_ref().unwrap();
|
let channel = playlist.channel.as_ref().unwrap();
|
||||||
assert_channel_id(&channel.id);
|
validate::channel_id(&channel.id).unwrap();
|
||||||
assert!(!channel.name.is_empty());
|
assert!(!channel.name.is_empty());
|
||||||
} else {
|
} else {
|
||||||
assert!(playlist.channel.is_none());
|
assert!(playlist.channel.is_none());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if let Some(subgenre_id) = §ion.subgenre_id {
|
if let Some(subgenre_id) = §ion.subgenre_id {
|
||||||
subgenres.push((subgenre_id.to_owned(), section.name.to_owned()));
|
subgenres.push((subgenre_id.clone(), section.name.clone()));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
subgenres
|
subgenres
|
||||||
|
@ -2290,8 +2290,7 @@ fn invalid_ctoken(#[case] ep: ContinuationEndpoint, rp: RustyPipe) {
|
||||||
fn lang() -> Language {
|
fn lang() -> Language {
|
||||||
std::env::var("YT_LANG")
|
std::env::var("YT_LANG")
|
||||||
.ok()
|
.ok()
|
||||||
.map(|l| Language::from_str(&l).unwrap())
|
.map_or(Language::En, |l| Language::from_str(&l).unwrap())
|
||||||
.unwrap_or(Language::En)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a new RustyPipe instance
|
/// Get a new RustyPipe instance
|
||||||
|
@ -2362,22 +2361,6 @@ fn assert_next_items<T: FromYtItem, Q: AsRef<RustyPipeQuery>>(
|
||||||
assert_gte(p.items.len(), n_items, "items");
|
assert_gte(p.items.len(), n_items, "items");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_video_id(id: &str) {
|
|
||||||
assert!(validate::video_id(id), "invalid video id: `{id}`")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_channel_id(id: &str) {
|
|
||||||
assert!(validate::channel_id(id), "invalid channel id: `{id}`");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_album_id(id: &str) {
|
|
||||||
assert!(validate::album_id(id), "invalid album id: `{id}`");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_playlist_id(id: &str) {
|
|
||||||
assert!(validate::playlist_id(id), "invalid playlist id: `{id}`");
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_frameset(frameset: &Frameset) {
|
fn assert_frameset(frameset: &Frameset) {
|
||||||
assert_gte(frameset.frame_height, 20, "frame height");
|
assert_gte(frameset.frame_height, 20, "frame height");
|
||||||
assert_gte(frameset.frame_height, 20, "frame width");
|
assert_gte(frameset.frame_height, 20, "frame width");
|
||||||
|
|
Loading…
Reference in a new issue