Compare commits

...

38 commits

Author SHA1 Message Date
c6abd89087
test: fix tests 2025-04-18 16:38:44 +02:00
703f350b6b
chore(release): release rustypipe v0.11.3 2025-04-03 13:39:28 +02:00
af415ddf8f chore(deps): update rust crate rand to 0.9.0 2025-04-03 11:08:18 +00:00
daf3d035be
fix: handle music artist not found 2025-03-31 18:11:14 +02:00
187bf1c9a0
fix: switch client if no adaptive stream URLs were returned 2025-03-26 02:44:08 +01:00
ea80717f69
fix: handle music playlist/album not found 2025-03-26 02:35:03 +01:00
939a7aea61
fix: deobfuscator: handle global functions as well 2025-03-26 02:12:18 +01:00
47bea4eed2
test: update music_artist_basic snapshot 2025-03-26 01:38:35 +01:00
189ba81a42
fix: extractor: small simplification 2025-03-26 01:38:12 +01:00
ac44e95a88
fix: extractor: global variable extraction fixed 2025-03-26 01:20:35 +01:00
23c8775326
chore(release): release rustypipe v0.11.2 2025-03-24 01:50:53 +01:00
07db7b1166
fix: handle player returning no adaptive stream URLs 2025-03-24 01:28:07 +01:00
4ce6746be5
fix: extract deobf data with global strings variable 2025-03-24 01:12:01 +01:00
e8acbfbbcf
fix: A/B test 22: commandExecutorCommand for playlist continuations 2025-03-16 19:45:14 +01:00
fcf27aa3b2
chore(release): release rustypipe-cli v0.7.2 2025-03-16 18:20:32 +01:00
64ed3b14e3
chore(release): release rustypipe v0.11.1 2025-03-16 18:13:55 +01:00
63a6f50a8b
fix: always skip failed clients 2025-03-16 16:51:43 +01:00
8342caeb0f
fix: desktop client: generate PO token from user_syncid when authenticated 2025-03-16 01:56:29 +01:00
c04b60604d
fix: simplify get_player_from_clients logic 2025-03-16 01:24:54 +01:00
2f18efa1cf
fix: log download URL 2025-03-16 01:21:29 +01:00
b8f61c9bae
test: skip android client test 2025-03-04 22:50:33 +01:00
9ed1306f3a
chore(deps): update rust crate rstest to 0.25.0 2025-03-04 22:48:10 +01:00
6d481c16d0
update smartcrop2 to v0.4.0, remove black borders from album covers 2025-03-04 22:38:01 +01:00
144a670da1
chore(release): release rustypipe-cli v0.7.1 2025-02-26 19:48:12 +01:00
035c07f170
chore(deps): update rustypipe to 0.11.0 2025-02-26 19:47:42 +01:00
9bfd3ee1ba
chore(release): release rustypipe-downloader v0.3.1 2025-02-26 19:45:43 +01:00
1adcb12932
chore(release): release rustypipe v0.11.0 2025-02-26 19:41:36 +01:00
e7ef067f43
small doc fix 2025-02-26 19:40:10 +01:00
f3057b4d63
chore: remove commented-out debug statements 2025-02-26 19:32:46 +01:00
6737512f5f
fix: A/B test 21: music album recommendations 2025-02-26 15:21:47 +01:00
544782f8de
feat: add original album track count, fix fetching albums with more than 200 tracks 2025-02-26 15:21:47 +01:00
83f8652776 ci: disable renovate 2025-02-22 23:02:15 +00:00
739eac4d1f
test: fix tests 2025-02-18 00:16:09 +01:00
4d60e64f2c
ci: remove workflow_dispatch trigger 2025-02-09 04:35:30 +01:00
45d3a9cd33
ci: add CLI release files 2025-02-09 03:57:13 +01:00
f8a0a253cc
change line in downloader changelog 2025-02-09 03:15:30 +01:00
8933c6fa2a
chore(release): release rustypipe-cli v0.7.0 2025-02-09 03:14:30 +01:00
629b5905da
feat: add verbose flag 2025-02-09 03:09:47 +01:00
65 changed files with 46901 additions and 486 deletions

View file

@ -66,3 +66,4 @@ jobs:
with: with:
title: "${{ env.CRATE }} ${{ env.CRATE_VERSION }}" title: "${{ env.CRATE }} ${{ env.CRATE_VERSION }}"
body: "${{ env.CHANGELOG }}" body: "${{ env.CHANGELOG }}"
files: dist/*

View file

@ -3,6 +3,55 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [v0.11.3](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe/v0.11.2..rustypipe/v0.11.3) - 2025-04-03
### 🐛 Bug Fixes
- Deobfuscator: global variable extraction fixed - ([ac44e95](https://codeberg.org/ThetaDev/rustypipe/commit/ac44e95a88d95f9d2d1ec672f86ca9d31d6991b9))
- Deobfuscator: small simplification - ([189ba81](https://codeberg.org/ThetaDev/rustypipe/commit/189ba81a42e6c09f6af4d2768c449c22b864101e))
- Deobfuscator: handle global functions as well - ([939a7ae](https://codeberg.org/ThetaDev/rustypipe/commit/939a7aea61a3eee4c1e67bfbfc835f0ce3934171))
- Handle music playlist/album not found - ([ea80717](https://codeberg.org/ThetaDev/rustypipe/commit/ea80717f692b2c45b5063c362c9fa8ebca5a3471))
- Switch client if no adaptive stream URLs were returned - ([187bf1c](https://codeberg.org/ThetaDev/rustypipe/commit/187bf1c9a0e846bff205e0d71a19c5a1ce7b1943))
- Handle music artist not found - ([daf3d03](https://codeberg.org/ThetaDev/rustypipe/commit/daf3d035be38b59aef1ae205ac91c2bbdda2fe66))
### ⚙️ Miscellaneous Tasks
- *(deps)* Update rust crate rand to 0.9.0 - ([af415dd](https://codeberg.org/ThetaDev/rustypipe/commit/af415ddf8f94f00edb918f271d8e6336503e9faf))
## [v0.11.2](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe/v0.11.1..rustypipe/v0.11.2) - 2025-03-24
### 🐛 Bug Fixes
- A/B test 22: commandExecutorCommand for playlist continuations - ([e8acbfb](https://codeberg.org/ThetaDev/rustypipe/commit/e8acbfbbcf5d31b5ac34410ddf334e5534e3762f))
- Extract deobf data with global strings variable - ([4ce6746](https://codeberg.org/ThetaDev/rustypipe/commit/4ce6746be538564e79f7e3c67d7a91aaa53f48ea))
- Handle player returning no adaptive stream URLs - ([07db7b1](https://codeberg.org/ThetaDev/rustypipe/commit/07db7b1166e912e1554f98f2ae20c2c356fed38f))
## [v0.11.1](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe/v0.11.0..rustypipe/v0.11.1) - 2025-03-16
### 🐛 Bug Fixes
- Simplify get_player_from_clients logic - ([c04b606](https://codeberg.org/ThetaDev/rustypipe/commit/c04b60604d2628bf8f0e3de453c243adbb966e57))
- Desktop client: generate PO token from user_syncid when authenticated - ([8342cae](https://codeberg.org/ThetaDev/rustypipe/commit/8342caeb0f566a38060a6ec69f3ca65b9a2afcd6))
- Always skip failed clients - ([63a6f50](https://codeberg.org/ThetaDev/rustypipe/commit/63a6f50a8b5ad6bb984282335c1481ae3cd2fe83))
### ⚙️ Miscellaneous Tasks
- *(deps)* Update rust crate rstest to 0.25.0 - ([9ed1306](https://codeberg.org/ThetaDev/rustypipe/commit/9ed1306f3aaeb993c409997ddfbc47499e4f4d22))
## [v0.11.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe/v0.10.0..rustypipe/v0.11.0) - 2025-02-26
### 🚀 Features
- Add original album track count, fix fetching albums with more than 200 tracks - ([544782f](https://codeberg.org/ThetaDev/rustypipe/commit/544782f8de728cda0aca9a1cb95837cdfbd001f1))
### 🐛 Bug Fixes
- A/B test 21: music album recommendations - ([6737512](https://codeberg.org/ThetaDev/rustypipe/commit/6737512f5f67c8cd05d4552dd0e0f24381035b35))
## [v0.10.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe/v0.9.0..rustypipe/v0.10.0) - 2025-02-09 ## [v0.10.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe/v0.9.0..rustypipe/v0.10.0) - 2025-02-09
### 🚀 Features ### 🚀 Features

View file

@ -1,6 +1,6 @@
[package] [package]
name = "rustypipe" name = "rustypipe"
version = "0.10.0" version = "0.11.3"
rust-version = "1.67.1" rust-version = "1.67.1"
edition.workspace = true edition.workspace = true
authors.workspace = true authors.workspace = true
@ -40,7 +40,7 @@ serde_with = { version = "3.0.0", default-features = false, features = [
] } ] }
serde_plain = "1.0.0" serde_plain = "1.0.0"
sha1 = "0.10.0" sha1 = "0.10.0"
rand = "0.8.0" rand = "0.9.0"
time = { version = "0.3.37", features = [ time = { version = "0.3.37", features = [
"macros", "macros",
"serde-human-readable", "serde-human-readable",
@ -67,15 +67,15 @@ dirs = "6.0.0"
filenamify = "0.1.0" filenamify = "0.1.0"
# Testing # Testing
rstest = "0.24.0" rstest = "0.25.0"
tokio-test = "0.4.2" tokio-test = "0.4.2"
insta = { version = "1.17.1", features = ["ron", "redactions"] } insta = { version = "1.17.1", features = ["ron", "redactions"] }
path_macro = "1.0.0" path_macro = "1.0.0"
tracing-test = "0.2.5" tracing-test = "0.2.5"
# Included crates # Included crates
rustypipe = { path = ".", version = "0.10.0", default-features = false } rustypipe = { path = ".", version = "0.11.3", default-features = false }
rustypipe-downloader = { path = "./downloader", version = "0.3.0", default-features = false, features = [ rustypipe-downloader = { path = "./downloader", version = "0.3.1", default-features = false, features = [
"indicatif", "indicatif",
"audiotag", "audiotag",
] } ] }

View file

@ -3,6 +3,52 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [v0.7.2](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-cli/v0.7.1..rustypipe-cli/v0.7.2) - 2025-03-16
### ⚙️ Miscellaneous Tasks
- *(deps)* Update rustypipe to 0.11.1
- *(deps)* Update rustypipe-downloader to 0.3.1
- *(deps)* Update rust crate rstest to 0.25.0 - ([9ed1306](https://codeberg.org/ThetaDev/rustypipe/commit/9ed1306f3aaeb993c409997ddfbc47499e4f4d22))
## [v0.7.1](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-cli/v0.7.0..rustypipe-cli/v0.7.1) - 2025-02-26
### ⚙️ Miscellaneous Tasks
- *(deps)* Update rustypipe to 0.11.0 - ([035c07f](https://codeberg.org/ThetaDev/rustypipe/commit/035c07f170aa293bcc626f27998c2b2b28660881))
## [v0.7.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-cli/v0.6.0..rustypipe-cli/v0.7.0) - 2025-02-09
### 🚀 Features
- Add support for rustypipe-botguard to get PO tokens - ([b90a252](https://codeberg.org/ThetaDev/rustypipe/commit/b90a252a5e1bf05a5294168b0ec16a73cbb88f42))
- [**breaking**] Remove manual PO token options from downloader/cli, add new rustypipe-botguard options - ([cddb32f](https://codeberg.org/ThetaDev/rustypipe/commit/cddb32f190276265258c6ab45b3d43a8891c4b39))
- Add session po token cache - ([b72b501](https://codeberg.org/ThetaDev/rustypipe/commit/b72b501b6dbcf4333b24cd80e7c8c61b0c21ec91))
- Add timezone query option - ([3a2370b](https://codeberg.org/ThetaDev/rustypipe/commit/3a2370b97ca3d0f40d72d66a23295557317d29fb))
- Add --timezone-local CLI option - ([4f2bb47](https://codeberg.org/ThetaDev/rustypipe/commit/4f2bb47ab42ae0c68a64f3b3c2831fa7850b6f56))
- Add verbose flag - ([629b590](https://codeberg.org/ThetaDev/rustypipe/commit/629b5905da653c6fe0f3c6b5814dd2f49030e7ed))
### 🐛 Bug Fixes
- Parsing mixed-case language codes like zh-CN - ([9c73ed4](https://codeberg.org/ThetaDev/rustypipe/commit/9c73ed4b3008cb093c0fa7fd94fd9f1ba8cd3627))
### 🚜 Refactor
- [**breaking**] Add client_type field to DownloadError, rename cli option po-token-cache to pot-cache - ([594e675](https://codeberg.org/ThetaDev/rustypipe/commit/594e675b39efc5fbcdbd5e920a4d2cdee64f718e))
- Rename rustypipe-cli binary to rustypipe - ([c1a872e](https://codeberg.org/ThetaDev/rustypipe/commit/c1a872e1c14ea0956053bd7c65f6875b1cb3bc55))
### 📚 Documentation
- Add Botguard info to README - ([9957add](https://codeberg.org/ThetaDev/rustypipe/commit/9957add2b5d6391b2c1869d2019fd7dd91b8cd41))
### ⚙️ Miscellaneous Tasks
- *(deps)* Update rustypipe to 0.10.0
- *(deps)* Update rust crate rquickjs to 0.9.0 (#33) - ([2c8ac41](https://codeberg.org/ThetaDev/rustypipe/commit/2c8ac410aa535d83f8bcc7181f81914b13bceb77))
## [v0.6.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-cli/v0.5.0..rustypipe-cli/v0.6.0) - 2025-01-16 ## [v0.6.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-cli/v0.5.0..rustypipe-cli/v0.6.0) - 2025-01-16
### 🚀 Features ### 🚀 Features

View file

@ -1,6 +1,6 @@
[package] [package]
name = "rustypipe-cli" name = "rustypipe-cli"
version = "0.6.0" version = "0.7.2"
rust-version = "1.70.0" rust-version = "1.70.0"
edition.workspace = true edition.workspace = true
authors.workspace = true authors.workspace = true

View file

@ -139,8 +139,8 @@ Fetch a list of all the items saved in your YouTube/YouTube Music profile.
- **Proxy:** RustyPipe respects the environment variables `HTTP_PROXY`, `HTTPS_PROXY` - **Proxy:** RustyPipe respects the environment variables `HTTP_PROXY`, `HTTPS_PROXY`
and `ALL_PROXY` and `ALL_PROXY`
- **Logging:** You can change the log level with the `RUST_LOG` environment variable, it - **Logging:** Enable debug logging with the `-v` (verbose) flag. If you want more
is set to `info` by default fine-grained control, use the `RUST_LOG` environment variable.
- **Visitor data:** A custom visitor data ID can be used with the `--vdata` flag - **Visitor data:** A custom visitor data ID can be used with the `--vdata` flag
- **Authentication:** Use the commands `rustypipe login` and `rustypipe login --cookie` - **Authentication:** Use the commands `rustypipe login` and `rustypipe login --cookie`
to log into your Google account using either OAuth or YouTube cookies. With the to log into your Google account using either OAuth or YouTube cookies. With the

View file

@ -80,6 +80,9 @@ struct Cli {
/// Enable caching for session-bound PO tokens /// Enable caching for session-bound PO tokens
#[clap(long, global = true)] #[clap(long, global = true)]
pot_cache: bool, pot_cache: bool,
/// Enable debug logging
#[clap(short, long, global = true)]
verbose: bool,
} }
#[derive(Parser)] #[derive(Parser)]
@ -878,12 +881,15 @@ async fn run() -> anyhow::Result<()> {
let cli = Cli::parse(); let cli = Cli::parse();
let multi = MultiProgress::new(); let multi = MultiProgress::new();
tracing_subscriber::fmt::SubscriberBuilder::default() let mut env_filter = EnvFilter::builder()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into()) .with_default_directive(LevelFilter::INFO.into())
.from_env_lossy(), .from_env_lossy();
) if cli.verbose {
env_filter = env_filter.add_directive("rustypipe=debug".parse().unwrap());
}
tracing_subscriber::fmt::SubscriberBuilder::default()
.with_env_filter(env_filter)
.with_writer(ProgWriter(multi.clone())) .with_writer(ProgWriter(multi.clone()))
.init(); .init();

View file

@ -40,12 +40,15 @@ pub enum ABTest {
MusicPlaylistFacepile = 18, MusicPlaylistFacepile = 18,
MusicAlbumGroupsReordered = 19, MusicAlbumGroupsReordered = 19,
MusicContinuationItemRenderer = 20, MusicContinuationItemRenderer = 20,
AlbumRecommends = 21,
CommandExecutorCommand = 22,
} }
/// List of active A/B tests that are run when none is manually specified /// List of active A/B tests that are run when none is manually specified
const TESTS_TO_RUN: &[ABTest] = &[ const TESTS_TO_RUN: &[ABTest] = &[
ABTest::MusicAlbumGroupsReordered, ABTest::MusicAlbumGroupsReordered,
ABTest::MusicContinuationItemRenderer, ABTest::AlbumRecommends,
ABTest::CommandExecutorCommand,
]; ];
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -121,6 +124,8 @@ pub async fn run_test(
ABTest::MusicContinuationItemRenderer => { ABTest::MusicContinuationItemRenderer => {
music_continuation_item_renderer(&query).await music_continuation_item_renderer(&query).await
} }
ABTest::AlbumRecommends => album_recommends(&query).await,
ABTest::CommandExecutorCommand => command_executor_command(&query).await,
} }
.unwrap(); .unwrap();
pb.inc(1); pb.inc(1);
@ -443,3 +448,33 @@ pub async fn music_continuation_item_renderer(rp: &RustyPipeQuery) -> Result<boo
.await?; .await?;
Ok(res.contains("\"continuationItemRenderer\"")) Ok(res.contains("\"continuationItemRenderer\""))
} }
pub async fn album_recommends(rp: &RustyPipeQuery) -> Result<bool> {
let id = "MPREb_u1I69lSAe5v";
let res = rp
.raw(
ClientType::DesktopMusic,
"browse",
&QBrowse {
browse_id: id,
params: None,
},
)
.await?;
Ok(res.contains("\"musicCarouselShelfRenderer\""))
}
pub async fn command_executor_command(rp: &RustyPipeQuery) -> Result<bool> {
let id = "VLPLbZIPy20-1pN7mqjckepWF78ndb6ci_qi";
let res = rp
.raw(
ClientType::Desktop,
"browse",
&QBrowse {
browse_id: id,
params: None,
},
)
.await?;
Ok(res.contains("\"commandExecutorCommand\""))
}

View file

@ -0,0 +1,130 @@
use std::{collections::BTreeMap, fs::File, io::BufReader};
use path_macro::path;
use rustypipe::{
client::{ClientType, RustyPipe},
param::{Language, LANGUAGES},
};
use serde::Deserialize;
use serde_with::rust::deserialize_ignore_any;
use crate::{
model::{QBrowse, SectionList, TextRuns},
util::{self, DICT_DIR},
};
pub async fn collect_album_versions_titles() {
let json_path = path!(*DICT_DIR / "other_versions_titles.json");
let mut res = BTreeMap::new();
let rp = RustyPipe::new();
for lang in LANGUAGES {
let query = QBrowse {
browse_id: "MPREb_nlBWQROfvjo",
params: None,
};
let raw_resp = rp
.query()
.lang(lang)
.raw(ClientType::DesktopMusic, "browse", &query)
.await
.unwrap();
let data = serde_json::from_str::<AlbumData>(&raw_resp).unwrap();
let title = data
.contents
.two_column_browse_results_renderer
.secondary_contents
.section_list_renderer
.contents
.into_iter()
.find_map(|x| match x {
ItemSection::MusicCarouselShelfRenderer(music_carousel_shelf) => {
Some(music_carousel_shelf)
}
ItemSection::None => None,
})
.expect("other versions")
.header
.expect("header")
.music_carousel_shelf_basic_header_renderer
.title
.runs
.into_iter()
.next()
.unwrap()
.text;
println!("{lang}: {title}");
res.insert(lang, title);
}
let file = File::create(json_path).unwrap();
serde_json::to_writer_pretty(file, &res).unwrap();
}
pub fn write_samples_to_dict() {
let json_path = path!(*DICT_DIR / "other_versions_titles.json");
let json_file = File::open(json_path).unwrap();
let collected: BTreeMap<Language, String> =
serde_json::from_reader(BufReader::new(json_file)).unwrap();
let mut dict = util::read_dict();
let langs = dict.keys().copied().collect::<Vec<_>>();
for lang in langs {
let dict_entry = dict.entry(lang).or_default();
let e = collected.get(&lang).unwrap();
assert_eq!(e, e.trim());
dict_entry.album_versions_title = e.to_owned();
for lang in &dict_entry.equivalent {
let ee = collected.get(lang).unwrap();
if ee != e {
panic!("equivalent lang conflict, lang: {lang}");
}
}
}
util::write_dict(dict);
}
#[derive(Debug, Deserialize)]
struct AlbumData {
contents: AlbumDataContents,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct AlbumDataContents {
two_column_browse_results_renderer: X1,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct X1 {
secondary_contents: SectionList<ItemSection>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
enum ItemSection {
MusicCarouselShelfRenderer(MusicCarouselShelf),
#[serde(other, deserialize_with = "deserialize_ignore_any")]
None,
}
#[derive(Debug, Deserialize)]
struct MusicCarouselShelf {
header: Option<MusicCarouselShelfHeader>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct MusicCarouselShelfHeader {
music_carousel_shelf_basic_header_renderer: MusicCarouselShelfHeaderRenderer,
}
#[derive(Debug, Deserialize)]
struct MusicCarouselShelfHeaderRenderer {
title: TextRuns,
}

View file

@ -204,8 +204,6 @@ pub fn parse_video_durations() {
parse(&mut words, lang, dict_entry.by_char, txt, *d); parse(&mut words, lang, dict_entry.by_char, txt, *d);
} }
// dbg!(&words);
for (k, v) in words { for (k, v) in words {
if let Some(v) = v { if let Some(v) = v {
dict_entry.timeago_tokens.insert(k, v.to_string()); dict_entry.timeago_tokens.insert(k, v.to_string());

View file

@ -90,6 +90,8 @@ pub(crate) struct Entry {
pub chan_prefix: &'static str, pub chan_prefix: &'static str,
/// Channel name suffix on playlist pages /// Channel name suffix on playlist pages
pub chan_suffix: &'static str, pub chan_suffix: &'static str,
/// "Other versions" title on album pages
pub album_versions_title: &'static str,
} }
"#; "#;
@ -178,8 +180,8 @@ pub(crate) fn entry(lang: Language) -> Entry {
.to_string() .to_string()
.replace('\n', "\n "); .replace('\n', "\n ");
write!(code_timeago_tokens, "{} => Entry {{\n timeago_tokens: {},\n month_before_day: {:?},\n months: {},\n timeago_nd_tokens: {},\n comma_decimal: {:?},\n number_tokens: {},\n number_nd_tokens: {},\n album_types: {},\n chan_prefix: {:?},\n chan_suffix: {:?},\n }},\n ", write!(code_timeago_tokens, "{} => Entry {{\n timeago_tokens: {},\n month_before_day: {:?},\n months: {},\n timeago_nd_tokens: {},\n comma_decimal: {:?},\n number_tokens: {},\n number_nd_tokens: {},\n album_types: {},\n chan_prefix: {:?},\n chan_suffix: {:?},\n album_versions_title: {:?},\n }},\n ",
selector, code_ta_tokens, entry.month_before_day, code_months, code_ta_nd_tokens, entry.comma_decimal, code_number_tokens, code_number_nd_tokens, code_album_types, entry.chan_prefix, entry.chan_suffix).unwrap(); selector, code_ta_tokens, entry.month_before_day, code_months, code_ta_nd_tokens, entry.comma_decimal, code_number_tokens, code_number_nd_tokens, code_album_types, entry.chan_prefix, entry.chan_suffix, entry.album_versions_title).unwrap();
} }
code_timeago_tokens = code_timeago_tokens.trim_end().to_owned() + "\n }\n}\n"; code_timeago_tokens = code_timeago_tokens.trim_end().to_owned() + "\n }\n}\n";

View file

@ -2,6 +2,7 @@
mod abtest; mod abtest;
mod collect_album_types; mod collect_album_types;
mod collect_album_versions_titles;
mod collect_chan_prefixes; mod collect_chan_prefixes;
mod collect_history_dates; mod collect_history_dates;
mod collect_large_numbers; mod collect_large_numbers;
@ -34,12 +35,14 @@ enum Commands {
CollectHistoryDates, CollectHistoryDates,
CollectMusicHistoryDates, CollectMusicHistoryDates,
CollectChanPrefixes, CollectChanPrefixes,
CollectAlbumVersionsTitles,
ParsePlaylistDates, ParsePlaylistDates,
ParseHistoryDates, ParseHistoryDates,
ParseLargeNumbers, ParseLargeNumbers,
ParseAlbumTypes, ParseAlbumTypes,
ParseVideoDurations, ParseVideoDurations,
ParseChanPrefixes, ParseChanPrefixes,
ParseAlbumVersionsTitles,
GenLocales, GenLocales,
GenDict, GenDict,
DownloadTestfiles, DownloadTestfiles,
@ -58,28 +61,25 @@ async fn main() {
match cli.command { match cli.command {
Commands::CollectPlaylistDates => { Commands::CollectPlaylistDates => {
collect_playlist_dates::collect_dates(cli.concurrency).await; collect_playlist_dates::collect_dates(cli.concurrency).await
} }
Commands::CollectLargeNumbers => { Commands::CollectLargeNumbers => {
collect_large_numbers::collect_large_numbers(cli.concurrency).await; collect_large_numbers::collect_large_numbers(cli.concurrency).await
} }
Commands::CollectAlbumTypes => { Commands::CollectAlbumTypes => {
collect_album_types::collect_album_types(cli.concurrency).await; collect_album_types::collect_album_types(cli.concurrency).await
} }
Commands::CollectVideoDurations => { Commands::CollectVideoDurations => {
collect_video_durations::collect_video_durations(cli.concurrency).await; collect_video_durations::collect_video_durations(cli.concurrency).await
} }
Commands::CollectVideoDates => { Commands::CollectVideoDates => {
collect_video_dates::collect_video_dates(cli.concurrency).await; collect_video_dates::collect_video_dates(cli.concurrency).await
} }
Commands::CollectHistoryDates => { Commands::CollectHistoryDates => collect_history_dates::collect_dates().await,
collect_history_dates::collect_dates().await; Commands::CollectMusicHistoryDates => collect_history_dates::collect_dates_music().await,
} Commands::CollectChanPrefixes => collect_chan_prefixes::collect_chan_prefixes().await,
Commands::CollectMusicHistoryDates => { Commands::CollectAlbumVersionsTitles => {
collect_history_dates::collect_dates_music().await; collect_album_versions_titles::collect_album_versions_titles().await
}
Commands::CollectChanPrefixes => {
collect_chan_prefixes::collect_chan_prefixes().await;
} }
Commands::ParsePlaylistDates => collect_playlist_dates::write_samples_to_dict(), Commands::ParsePlaylistDates => collect_playlist_dates::write_samples_to_dict(),
Commands::ParseHistoryDates => collect_history_dates::write_samples_to_dict(), Commands::ParseHistoryDates => collect_history_dates::write_samples_to_dict(),
@ -87,9 +87,10 @@ async fn main() {
Commands::ParseAlbumTypes => collect_album_types::write_samples_to_dict(), Commands::ParseAlbumTypes => collect_album_types::write_samples_to_dict(),
Commands::ParseVideoDurations => collect_video_durations::parse_video_durations(), Commands::ParseVideoDurations => collect_video_durations::parse_video_durations(),
Commands::ParseChanPrefixes => collect_chan_prefixes::write_samples_to_dict(), Commands::ParseChanPrefixes => collect_chan_prefixes::write_samples_to_dict(),
Commands::GenLocales => { Commands::ParseAlbumVersionsTitles => {
gen_locales::generate_locales().await; collect_album_versions_titles::write_samples_to_dict()
} }
Commands::GenLocales => gen_locales::generate_locales().await,
Commands::GenDict => gen_dictionary::generate_dictionary(), Commands::GenDict => gen_dictionary::generate_dictionary(),
Commands::DownloadTestfiles => download_testfiles::download_testfiles().await, Commands::DownloadTestfiles => download_testfiles::download_testfiles().await,
Commands::AbTest { id, n } => { Commands::AbTest { id, n } => {

View file

@ -61,6 +61,8 @@ pub struct DictEntry {
pub chan_prefix: String, pub chan_prefix: String,
/// Channel name suffix on playlist pages /// Channel name suffix on playlist pages
pub chan_suffix: String, pub chan_suffix: String,
/// "Other versions" title on album pages
pub album_versions_title: String,
} }
/// Parsed TimeAgo string, contains amount and time unit. /// Parsed TimeAgo string, contains amount and time unit.

View file

@ -3,11 +3,18 @@
All notable changes to this project will be documented in this file. All notable changes to this project will be documented in this file.
## [v0.3.1](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-downloader/v0.3.0..rustypipe-downloader/v0.3.1) - 2024-12-20
### ⚙️ Miscellaneous Tasks
- *(deps)* Update rustypipe to 0.11.0
## [v0.3.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-downloader/v0.2.7..rustypipe-downloader/v0.3.0) - 2025-02-09 ## [v0.3.0](https://codeberg.org/ThetaDev/rustypipe/compare/rustypipe-downloader/v0.2.7..rustypipe-downloader/v0.3.0) - 2025-02-09
### 🚀 Features ### 🚀 Features
- [**breaking**] Add userdata feature for all personal data queries (playback history, subscriptions) - ([65cb424](https://codeberg.org/ThetaDev/rustypipe/commit/65cb4244c6ab547f53d0cb12af802c4189188c86)) - [**breaking**] Remove manual PO token options from downloader in favor of rustypipe-botguard - ([cddb32f](https://codeberg.org/ThetaDev/rustypipe/commit/cddb32f190276265258c6ab45b3d43a8891c4b39))
### 🐛 Bug Fixes ### 🐛 Bug Fixes

View file

@ -1,6 +1,6 @@
[package] [package]
name = "rustypipe-downloader" name = "rustypipe-downloader"
version = "0.3.0" version = "0.3.1"
rust-version = "1.67.1" rust-version = "1.67.1"
edition.workspace = true edition.workspace = true
authors.workspace = true authors.workspace = true
@ -51,7 +51,7 @@ image = { version = "0.25.0", optional = true, default-features = false, feature
"jpeg", "jpeg",
"webp", "webp",
] } ] }
smartcrop2 = { version = "0.3.1", optional = true } smartcrop2 = { version = "0.4.0", optional = true }
[dev-dependencies] [dev-dependencies]
path_macro.workspace = true path_macro.workspace = true

View file

@ -1033,7 +1033,11 @@ impl DownloadQuery {
image::load_from_memory(&img_bts)? image::load_from_memory(&img_bts)?
}; };
let crop = smartcrop::find_best_crop(&img, NonZeroU32::MIN, NonZeroU32::MIN) let crop = smartcrop::find_best_crop_no_borders(
&img,
NonZeroU32::MIN,
NonZeroU32::MIN,
)
.map_err(|e| DownloadError::AudioTag(format!("image crop: {e}").into()))? .map_err(|e| DownloadError::AudioTag(format!("image crop: {e}").into()))?
.crop; .crop;
img = img.crop_imm(crop.x, crop.y, crop.width, crop.height); img = img.crop_imm(crop.x, crop.y, crop.width, crop.height);
@ -1063,8 +1067,8 @@ impl DownloadQuery {
} }
fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> { fn get_download_range(offset: u64, size: Option<u64>) -> Range<u64> {
let mut rng = rand::thread_rng(); let mut rng = rand::rng();
let chunk_size = rng.gen_range(CHUNK_SIZE_MIN..CHUNK_SIZE_MAX); let chunk_size = rng.random_range(CHUNK_SIZE_MIN..CHUNK_SIZE_MAX);
let mut chunk_end = offset + chunk_size; let mut chunk_end = offset + chunk_size;
if let Some(size) = size { if let Some(size) = size {
@ -1197,6 +1201,8 @@ async fn download_single_file(
} }
} }
tracing::debug!("downloading {} to {}", url, output.to_string_lossy());
let mut file = fs::OpenOptions::new() let mut file = fs::OpenOptions::new()
.append(true) .append(true)
.create(true) .create(true)

View file

@ -3,13 +3,13 @@
When YouTube introduces a new feature, it does so gradually. When a user creates a new When YouTube introduces a new feature, it does so gradually. When a user creates a new
session, YouTube decided randomly which new features should be enabled. session, YouTube decided randomly which new features should be enabled.
YouTube sessions are identified by the visitor data ID. This cookie is sent with YouTube sessions are identified by the visitor data ID. This cookie is sent with every
every API request using the `context.client.visitor_data` JSON parameter. It is also API request using the `context.client.visitor_data` JSON parameter. It is also returned
returned in the `responseContext.visitorData` response parameter and stored as the in the `responseContext.visitorData` response parameter and stored as the `__SECURE-YEC`
`__SECURE-YEC` cookie. cookie.
By sending the same visitor data ID, A/B tests can be reproduced, which is important By sending the same visitor data ID, A/B tests can be reproduced, which is important for
for testing alternative YouTube clients. testing alternative YouTube clients.
This page lists all A/B tests that were encountered while maintaining the RustyPipe This page lists all A/B tests that were encountered while maintaining the RustyPipe
client. client.
@ -1030,7 +1030,7 @@ commandContext missing).
- **Encountered on:** 13.01.2025 - **Encountered on:** 13.01.2025
- **Impact:** 🟢 Low - **Impact:** 🟢 Low
- **Endpoint:** browse (YTM) - **Endpoint:** browse (YTM)
- **Status:** Common (10%) - **Status:** Frequent (59%)
YouTube Music used to group artist albums into 2 rows: "Albums" and "Singles". YouTube Music used to group artist albums into 2 rows: "Albums" and "Singles".
@ -1042,7 +1042,7 @@ omitted for albums in their group, while singles and EPs have a label with their
- **Encountered on:** 25.01.2025 - **Encountered on:** 25.01.2025
- **Impact:** 🟢 Low - **Impact:** 🟢 Low
- **Endpoint:** browse (YTM) - **Endpoint:** browse (YTM)
- **Status:** Common (4%) - **Status:** Stabilized
YouTube Music now uses a `continuationItemRenderer` for music playlists instead of YouTube Music now uses a `continuationItemRenderer` for music playlists instead of
putting the continuations in a separate attribute of the MusicShelf. putting the continuations in a separate attribute of the MusicShelf.
@ -1052,3 +1052,52 @@ items.
YouTube Music now also sends a random 16-character string as a `clientScreenNonce` in YouTube Music now also sends a random 16-character string as a `clientScreenNonce` in
the request context. This is not mandatory though. the request context. This is not mandatory though.
## [21] Music album recommendations
- **Encountered on:** 26.02.2025
- **Impact:** 🟢 Low
- **Endpoint:** browse (YTM)
- **Status:** Common (15%)
![A/B test 21 screenshot](./_img/ab_21.png)
YouTube Music has added "Recommended" and "More from \<Artist\>" carousels to album
pages. The difficulty is distinguishing them reliably for parsing the album variants.
The current solution is adding the "Other versions" title in all languages to the
dictionary and comparing it.
## [22] commandExecutorCommand for continuations
- **Encountered on:** 16.03.2025
- **Impact:** 🟢 Low
- **Endpoint:** browse (YTM)
- **Status:** Experimental (1%)
YouTube playlists may use a commandExecutorCommand which holds a list of commands: the
`continuationCommand` that needs to be extracted as well as a `playlistVotingRefreshPopupCommand`.
```json
{
"continuationItemRenderer": {
"continuationEndpoint": {
"commandExecutorCommand": {
"commands": [
{
"playlistVotingRefreshPopupCommand": {
"command": {}
}
},
{
"continuationCommand": {
"request": "CONTINUATION_REQUEST_TYPE_BROWSE",
"token": "4qmFsgKBARIkVkxQTGJaSVB5MjAtMXBON21xamNrZXBXRjc4bmRiNmNpX3FpGjRDQUY2SGxCVU9rTklTV2xGUkVreVVtdEZOVTVFU1hsU2FrWkRVa1JKZWs1NldRJTNEJTNEmgIiUExiWklQeTIwLTFwTjdtcWpja2VwV0Y3OG5kYjZjaV9xaQ%3D%3D"
}
}
]
}
}
}
}
```

BIN
notes/_img/ab_21.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 290 KiB

View file

@ -293,8 +293,10 @@ struct OauthToken {
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
struct AuthCookie { struct AuthCookie {
cookie: String, cookie: String,
#[serde(alias = "account_syncid", skip_serializing_if = "Option::is_none")]
channel_syncid: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
account_syncid: Option<String>, user_syncid: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
session_index: Option<String>, session_index: Option<String>,
} }
@ -319,8 +321,9 @@ impl AuthCookie {
fn new(cookie: String) -> Self { fn new(cookie: String) -> Self {
Self { Self {
cookie, cookie,
account_syncid: None, channel_syncid: None,
session_index: None, session_index: None,
user_syncid: None,
} }
} }
} }
@ -1591,6 +1594,17 @@ impl RustyPipe {
.ok_or(Error::Auth(AuthError::NoLogin)) .ok_or(Error::Auth(AuthError::NoLogin))
} }
fn user_auth_datasync_id(&self) -> Result<String, Error> {
self.inner
.cache
.auth_cookie
.read()
.unwrap()
.as_ref()
.and_then(|c| c.user_syncid.as_ref().map(|id| id.to_owned()))
.ok_or(Error::Auth(AuthError::NoLogin))
}
/// Set the user authentication cookie /// Set the user authentication cookie
/// ///
/// The cookie is used for authenticated requests with browser-based clients /// The cookie is used for authenticated requests with browser-based clients
@ -1685,17 +1699,17 @@ impl RustyPipe {
))?; ))?;
// datasyncid is of the form "channel_syncid||user_syncid" for secondary channel // datasyncid is of the form "channel_syncid||user_syncid" for secondary channel
// and just "user_syncid||" for primary channel. We only want the channel_syncid // and just "user_syncid||" for primary channel.
let (channel_syncid, user_syncid) = let (p1, p2) =
datasync_id datasync_id
.split_once("||") .split_once("||")
.ok_or(Error::Extraction(ExtractionError::InvalidData( .ok_or(Error::Extraction(ExtractionError::InvalidData(
"datasyncId does not contain || seperator".into(), "datasyncId does not contain || seperator".into(),
)))?; )))?;
auth_cookie.account_syncid = if user_syncid.is_empty() { (auth_cookie.channel_syncid, auth_cookie.user_syncid) = if p2.is_empty() {
None (None, Some(p1.to_owned()))
} else { } else {
Some(channel_syncid.to_owned()) (Some(p1.to_owned()), Some(p2.to_owned()))
}; };
auth_cookie.session_index = Some( auth_cookie.session_index = Some(
@ -2129,7 +2143,7 @@ impl RustyPipeQuery {
if let Some(session_index) = auth_cookie.session_index { if let Some(session_index) = auth_cookie.session_index {
r = r.header("X-Goog-AuthUser", session_index); r = r.header("X-Goog-AuthUser", session_index);
} }
if let Some(account_syncid) = auth_cookie.account_syncid { if let Some(account_syncid) = auth_cookie.channel_syncid {
r = r.header("X-Goog-PageId", account_syncid); r = r.header("X-Goog-PageId", account_syncid);
} }
cookie = Some(auth_cookie.cookie); cookie = Some(auth_cookie.cookie);

View file

@ -154,9 +154,24 @@ fn map_artist_page(
ctx: &MapRespCtx<'_>, ctx: &MapRespCtx<'_>,
skip_extendables: bool, skip_extendables: bool,
) -> Result<MapResult<(MusicArtist, bool)>, ExtractionError> { ) -> Result<MapResult<(MusicArtist, bool)>, ExtractionError> {
// dbg!(&res); let contents = match res.contents {
Some(c) => c,
None => {
if res.microformat.microformat_data_renderer.noindex {
return Err(ExtractionError::NotFound {
id: ctx.id.to_owned(),
msg: "no contents".into(),
});
} else {
return Err(ExtractionError::InvalidData("no contents".into()));
}
}
};
let header = res.header.music_immersive_header_renderer; let header = res
.header
.ok_or(ExtractionError::InvalidData("no header".into()))?
.music_immersive_header_renderer;
if let Some(share) = header.share_endpoint { if let Some(share) = header.share_endpoint {
let pb = share.share_entity_endpoint.serialized_share_entity; let pb = share.share_entity_endpoint.serialized_share_entity;
@ -173,8 +188,7 @@ fn map_artist_page(
} }
} }
let sections = res let sections = contents
.contents
.single_column_browse_results_renderer .single_column_browse_results_renderer
.contents .contents
.into_iter() .into_iter()
@ -338,8 +352,6 @@ impl MapResponse<FirstAlbumPage> for response::MusicArtistAlbums {
self, self,
ctx: &MapRespCtx<'_>, ctx: &MapRespCtx<'_>,
) -> Result<MapResult<FirstAlbumPage>, ExtractionError> { ) -> Result<MapResult<FirstAlbumPage>, ExtractionError> {
// dbg!(&self);
let Some(header) = self.header else { let Some(header) = self.header else {
return Err(ExtractionError::NotFound { return Err(ExtractionError::NotFound {
id: ctx.id.into(), id: ctx.id.into(),

View file

@ -105,8 +105,6 @@ impl MapResponse<Vec<MusicGenreItem>> for response::MusicGenres {
impl MapResponse<MusicGenre> for response::MusicGenre { impl MapResponse<MusicGenre> for response::MusicGenre {
fn map_response(self, ctx: &MapRespCtx<'_>) -> Result<MapResult<MusicGenre>, ExtractionError> { fn map_response(self, ctx: &MapRespCtx<'_>) -> Result<MapResult<MusicGenre>, ExtractionError> {
// dbg!(&self);
let content = self let content = self
.contents .contents
.single_column_browse_results_renderer .single_column_browse_results_renderer

View file

@ -9,7 +9,7 @@ use crate::{
AlbumId, ChannelId, MusicAlbum, MusicPlaylist, TrackItem, TrackType, AlbumId, ChannelId, MusicAlbum, MusicPlaylist, TrackItem, TrackType,
}, },
serializer::{text::TextComponents, MapResult}, serializer::{text::TextComponents, MapResult},
util::{self, TryRemove, DOT_SEPARATOR}, util::{self, dictionary, TryRemove, DOT_SEPARATOR},
}; };
use self::response::url_endpoint::MusicPageType; use self::response::url_endpoint::MusicPageType;
@ -95,11 +95,21 @@ impl RustyPipeQuery {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if !to_replace.is_empty() { let last_tn = album
.tracks
.last()
.and_then(|t| t.track_nr)
.unwrap_or_default();
if !to_replace.is_empty() || last_tn < album.track_count {
tracing::debug!(
"fetching album playlist ({} tracks, {} to replace)",
album.track_count,
to_replace.len()
);
let mut playlist = self.music_playlist(playlist_id).await?; let mut playlist = self.music_playlist(playlist_id).await?;
playlist playlist
.tracks .tracks
.extend_limit(&self, album.tracks.len()) .extend_limit(&self, album.track_count.into())
.await?; .await?;
for (i, title) in to_replace { for (i, title) in to_replace {
@ -118,6 +128,18 @@ impl RustyPipeQuery {
album.tracks[i].track_type = TrackType::Track; album.tracks[i].track_type = TrackType::Track;
} }
} }
// Extend the list of album tracks with the ones from the playlist if the playlist returned more tracks
// This is the case for albums with more than 200 tracks (e.g. audiobooks)
if album.tracks.len() < playlist.tracks.items.len() {
let mut tn = last_tn;
for mut t in playlist.tracks.items.into_iter().skip(album.tracks.len()) {
tn += 1;
t.album = album.tracks.first().and_then(|t| t.album.clone());
t.track_nr = Some(tn);
album.tracks.push(t);
}
}
} }
} }
Ok(album) Ok(album)
@ -129,9 +151,21 @@ impl MapResponse<MusicPlaylist> for response::MusicPlaylist {
self, self,
ctx: &MapRespCtx<'_>, ctx: &MapRespCtx<'_>,
) -> Result<MapResult<MusicPlaylist>, ExtractionError> { ) -> Result<MapResult<MusicPlaylist>, ExtractionError> {
// dbg!(&self); let contents = match self.contents {
Some(c) => c,
None => {
if self.microformat.microformat_data_renderer.noindex {
return Err(ExtractionError::NotFound {
id: ctx.id.to_owned(),
msg: "no contents".into(),
});
} else {
return Err(ExtractionError::InvalidData("no contents".into()));
}
}
};
let (header, music_contents) = match self.contents { let (header, music_contents) = match contents {
response::music_playlist::Contents::SingleColumnBrowseResultsRenderer(c) => ( response::music_playlist::Contents::SingleColumnBrowseResultsRenderer(c) => (
self.header, self.header,
c.contents c.contents
@ -318,9 +352,21 @@ impl MapResponse<MusicPlaylist> for response::MusicPlaylist {
impl MapResponse<MusicAlbum> for response::MusicPlaylist { impl MapResponse<MusicAlbum> for response::MusicPlaylist {
fn map_response(self, ctx: &MapRespCtx<'_>) -> Result<MapResult<MusicAlbum>, ExtractionError> { fn map_response(self, ctx: &MapRespCtx<'_>) -> Result<MapResult<MusicAlbum>, ExtractionError> {
// dbg!(&self); let contents = match self.contents {
Some(c) => c,
None => {
if self.microformat.microformat_data_renderer.noindex {
return Err(ExtractionError::NotFound {
id: ctx.id.to_owned(),
msg: "no contents".into(),
});
} else {
return Err(ExtractionError::InvalidData("no contents".into()));
}
}
};
let (header, sections) = match self.contents { let (header, sections) = match contents {
response::music_playlist::Contents::SingleColumnBrowseResultsRenderer(c) => ( response::music_playlist::Contents::SingleColumnBrowseResultsRenderer(c) => (
self.header, self.header,
c.contents c.contents
@ -360,8 +406,19 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
match section { match section {
response::music_item::ItemSection::MusicShelfRenderer(sh) => shelf = Some(sh), response::music_item::ItemSection::MusicShelfRenderer(sh) => shelf = Some(sh),
response::music_item::ItemSection::MusicCarouselShelfRenderer(sh) => { response::music_item::ItemSection::MusicCarouselShelfRenderer(sh) => {
if sh
.header
.map(|h| {
h.music_carousel_shelf_basic_header_renderer
.title
.first_str()
== dictionary::entry(ctx.lang).album_versions_title
})
.unwrap_or_default()
{
album_variants = Some(sh.contents); album_variants = Some(sh.contents);
} }
}
_ => (), _ => (),
} }
} }
@ -425,10 +482,12 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
} }
} }
let playlist_id = self.microformat.and_then(|mf| { let playlist_id = self
mf.microformat_data_renderer .microformat
.microformat_data_renderer
.url_canonical .url_canonical
.strip_prefix("https://music.youtube.com/playlist?list=") .and_then(|x| {
x.strip_prefix("https://music.youtube.com/playlist?list=")
.map(str::to_owned) .map(str::to_owned)
}); });
let (playlist_id, artist_id) = header let (playlist_id, artist_id) = header
@ -457,6 +516,14 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
.unwrap_or_default(); .unwrap_or_default();
let artist_id = artist_id.or_else(|| artists.first().and_then(|a| a.id.clone())); let artist_id = artist_id.or_else(|| artists.first().and_then(|a| a.id.clone()));
let second_subtitle_parts = header
.second_subtitle
.split(|p| p == DOT_SEPARATOR)
.collect::<Vec<_>>();
let track_count = second_subtitle_parts
.get(usize::from(second_subtitle_parts.len() > 2))
.and_then(|txt| util::parse_numeric::<u16>(&txt[0]).ok());
let mut mapper = MusicListMapper::with_album( let mut mapper = MusicListMapper::with_album(
ctx.lang, ctx.lang,
artists.clone(), artists.clone(),
@ -491,6 +558,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
album_type, album_type,
year, year,
by_va, by_va,
track_count: track_count.unwrap_or(tracks_res.c.len() as u16),
tracks: tracks_res.c, tracks: tracks_res.c,
variants: variants_res.c, variants: variants_res.c,
}, },
@ -541,8 +609,8 @@ mod tests {
#[case::single("single", "MPREb_bHfHGoy7vuv")] #[case::single("single", "MPREb_bHfHGoy7vuv")]
#[case::description("description", "MPREb_PiyfuVl6aYd")] #[case::description("description", "MPREb_PiyfuVl6aYd")]
#[case::unavailable("unavailable", "MPREb_AzuWg8qAVVl")] #[case::unavailable("unavailable", "MPREb_AzuWg8qAVVl")]
#[case::unavailable("unavailable", "MPREb_AzuWg8qAVVl")]
#[case::two_columns("20240228_twoColumns", "MPREb_bHfHGoy7vuv")] #[case::two_columns("20240228_twoColumns", "MPREb_bHfHGoy7vuv")]
#[case::recommends("20250225_recommends", "MPREb_u1I69lSAe5v")]
fn map_music_album(#[case] name: &str, #[case] id: &str) { fn map_music_album(#[case] name: &str, #[case] id: &str) {
let json_path = path!(*TESTFILES / "music_playlist" / format!("album_{name}.json")); let json_path = path!(*TESTFILES / "music_playlist" / format!("album_{name}.json"));
let json_file = File::open(json_path).unwrap(); let json_file = File::open(json_path).unwrap();

View file

@ -155,8 +155,6 @@ impl<T: FromYtItem> MapResponse<MusicSearchResult<T>> for response::MusicSearch
self, self,
ctx: &MapRespCtx<'_>, ctx: &MapRespCtx<'_>,
) -> Result<MapResult<MusicSearchResult<T>>, ExtractionError> { ) -> Result<MapResult<MusicSearchResult<T>>, ExtractionError> {
// dbg!(&self);
let tabs = self.contents.tabbed_search_results_renderer.contents; let tabs = self.contents.tabbed_search_results_renderer.contents;
let sections = tabs let sections = tabs
.into_iter() .into_iter()

View file

@ -249,11 +249,9 @@ impl MapResponse<Paginator<HistoryItem<VideoItem>>> for response::Continuation {
&mut map_res, &mut map_res,
); );
} }
response::YouTubeListItem::ContinuationItemRenderer { response::YouTubeListItem::ContinuationItemRenderer(ep) => {
continuation_endpoint,
} => {
if ctoken.is_none() { if ctoken.is_none() {
ctoken = Some(continuation_endpoint.continuation_command.token); ctoken = ep.continuation_endpoint.into_token();
} }
} }
_ => {} _ => {}

View file

@ -1,6 +1,6 @@
use std::{ use std::{
borrow::Cow, borrow::Cow,
collections::{BTreeMap, HashMap}, collections::{BTreeMap, HashMap, HashSet},
fmt::Debug, fmt::Debug,
}; };
@ -104,42 +104,29 @@ impl RustyPipeQuery {
) -> Result<VideoPlayer, Error> { ) -> Result<VideoPlayer, Error> {
let video_id = video_id.as_ref(); let video_id = video_id.as_ref();
let mut last_e = None; let mut last_e = None;
let mut query = Cow::Borrowed(self);
let mut clients_iter = clients.iter().peekable(); let mut clients_iter = clients.iter().peekable();
let mut failed_clients = HashSet::new();
while let Some(client) = clients_iter.next() { while let Some(client) = clients_iter.next() {
if self.opts.auth == Some(true) && !self.auth_enabled(*client) { if query.opts.auth == Some(true) && !self.auth_enabled(*client) {
// If no client has auth enabled, return NoLogin error instead of "no clients" // If no client has auth enabled, return NoLogin error instead of "no clients"
if last_e.is_none() { if last_e.is_none() {
last_e = Some(Error::Auth(AuthError::NoLogin)); last_e = Some(Error::Auth(AuthError::NoLogin));
} }
continue; continue;
} }
if failed_clients.contains(client) {
continue;
}
let res = self.player_from_client(video_id, *client).await; let res = query.player_from_client(video_id, *client).await;
match res { match res {
Ok(res) => return Ok(res), Ok(res) => return Ok(res),
Err(Error::Extraction(e)) => { Err(Error::Extraction(e)) => {
if e.use_login() { if e.use_login() && query.opts.auth.is_none() {
if let Some(c) = self.auth_enabled_client(clients) { clients_iter = clients.iter().peekable();
tracing::info!("{e}; fetching player with login"); query = Cow::Owned(self.clone().authenticated());
match self
.clone()
.authenticated()
.player_from_client(video_id, c)
.await
{
Ok(res) => return Ok(res),
Err(Error::Extraction(e)) => {
if !e.switch_client() {
return Err(Error::Extraction(e));
}
}
Err(e) => return Err(e),
}
} else {
return Err(Error::Extraction(e));
}
} else if !e.switch_client() { } else if !e.switch_client() {
return Err(Error::Extraction(e)); return Err(Error::Extraction(e));
} }
@ -147,6 +134,7 @@ impl RustyPipeQuery {
tracing::warn!("error fetching player with {client:?} client: {e}; retrying with {next_client:?} client"); tracing::warn!("error fetching player with {client:?} client: {e}; retrying with {next_client:?} client");
} }
last_e = Some(Error::Extraction(e)); last_e = Some(Error::Extraction(e));
failed_clients.insert(*client);
} }
Err(e) => return Err(e), Err(e) => return Err(e),
} }
@ -156,22 +144,27 @@ impl RustyPipeQuery {
async fn get_player_po_token(&self, video_id: &str) -> Result<PlayerPoToken, Error> { async fn get_player_po_token(&self, video_id: &str) -> Result<PlayerPoToken, Error> {
if let Some(bg) = &self.client.inner.botguard { if let Some(bg) = &self.client.inner.botguard {
let (ident, visitor_data) = if self.opts.auth == Some(true) {
(self.client.user_auth_datasync_id()?, None)
} else {
let visitor_data = self.get_visitor_data(false).await?; let visitor_data = self.get_visitor_data(false).await?;
(visitor_data.to_owned(), Some(visitor_data))
};
if bg.po_token_cache { if bg.po_token_cache {
let session_token = self.get_session_po_token(&visitor_data).await?; let session_token = self.get_session_po_token(&ident).await?;
Ok(PlayerPoToken { Ok(PlayerPoToken {
visitor_data: Some(visitor_data), visitor_data,
session_po_token: Some(session_token), session_po_token: Some(session_token),
content_po_token: None, content_po_token: None,
}) })
} else { } else {
let (po_tokens, valid_until) = let (po_tokens, valid_until) = self.get_po_tokens(&[video_id, &ident]).await?;
self.get_po_tokens(&[video_id, &visitor_data]).await?;
let mut po_tokens = po_tokens.into_iter(); let mut po_tokens = po_tokens.into_iter();
let po_token = po_tokens.next().unwrap(); let po_token = po_tokens.next().unwrap();
let session_po_token = po_tokens.next().unwrap(); let session_po_token = po_tokens.next().unwrap();
Ok(PlayerPoToken { Ok(PlayerPoToken {
visitor_data: Some(visitor_data), visitor_data,
session_po_token: Some(PoToken { session_po_token: Some(PoToken {
po_token: session_po_token, po_token: session_po_token,
valid_until, valid_until,
@ -191,6 +184,11 @@ impl RustyPipeQuery {
video_id: S, video_id: S,
client_type: ClientType, client_type: ClientType,
) -> Result<VideoPlayer, Error> { ) -> Result<VideoPlayer, Error> {
if self.opts.auth == Some(true) {
tracing::info!("fetching {client_type:?} player with login");
} else {
tracing::debug!("fetching {client_type:?} player");
}
let video_id = video_id.as_ref(); let video_id = video_id.as_ref();
let (deobf, player_po) = tokio::try_join!( let (deobf, player_po) = tokio::try_join!(
@ -385,6 +383,21 @@ impl MapResponse<VideoPlayer> for response::Player {
video_details.video_id, ctx.id video_details.video_id, ctx.id
))); )));
} }
// Sometimes YouTube Desktop does not output any URLs for adaptive streams.
// Since this is currently rare, it is best to retry the request in this case.
if !is_live
&& !streaming_data.adaptive_formats.c.is_empty()
&& streaming_data
.adaptive_formats
.c
.iter()
.all(|f| f.url.is_none() && f.signature_cipher.is_none())
{
return Err(ExtractionError::Unavailable {
reason: UnavailabilityReason::TryAgain,
msg: "no adaptive stream URLs".to_owned(),
});
}
let video_info = VideoPlayerDetails { let video_info = VideoPlayerDetails {
id: video_details.video_id, id: video_details.video_id,
@ -627,7 +640,7 @@ impl<'a> StreamsMapper<'a> {
fn deobf(&self) -> Result<&Deobfuscator, DeobfError> { fn deobf(&self) -> Result<&Deobfuscator, DeobfError> {
self.deobf self.deobf
.as_ref() .as_ref()
.ok_or(DeobfError::Other("no deobfuscator")) .ok_or(DeobfError::Other("no deobfuscator".into()))
} }
fn cipher_to_url_params( fn cipher_to_url_params(

View file

@ -257,6 +257,7 @@ mod tests {
#[case::nomusic("nomusic", "PL1J-6JOckZtE_P9Xx8D3b2O6w0idhuKBe")] #[case::nomusic("nomusic", "PL1J-6JOckZtE_P9Xx8D3b2O6w0idhuKBe")]
#[case::live("live", "UULVvqRdlKsE5Q8mf8YXbdIJLw")] #[case::live("live", "UULVvqRdlKsE5Q8mf8YXbdIJLw")]
#[case::pageheader("20241011_pageheader", "PLT2w2oBf1TZKyvY_M6JsASs73m-wjLzH5")] #[case::pageheader("20241011_pageheader", "PLT2w2oBf1TZKyvY_M6JsASs73m-wjLzH5")]
#[case::cmdexecutor("20250316_cmdexecutor", "PLbZIPy20-1pN7mqjckepWF78ndb6ci_qi")]
fn map_playlist_data(#[case] name: &str, #[case] id: &str) { fn map_playlist_data(#[case] name: &str, #[case] id: &str) {
let json_path = path!(*TESTFILES / "playlist" / format!("playlist_{name}.json")); let json_path = path!(*TESTFILES / "playlist" / format!("playlist_{name}.json"));
let json_file = File::open(json_path).unwrap(); let json_file = File::open(json_path).unwrap();

View file

@ -152,9 +152,16 @@ pub(crate) struct ContinuationItemRenderer {
pub continuation_endpoint: ContinuationEndpoint, pub continuation_endpoint: ContinuationEndpoint,
} }
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub(crate) enum ContinuationEndpoint {
ContinuationCommand(ContinuationCommandWrap),
CommandExecutorCommand(CommandExecutorCommandWrap),
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(crate) struct ContinuationEndpoint { pub(crate) struct ContinuationCommandWrap {
pub continuation_command: ContinuationCommand, pub continuation_command: ContinuationCommand,
} }
@ -164,7 +171,34 @@ pub(crate) struct ContinuationCommand {
pub token: String, pub token: String,
} }
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct CommandExecutorCommandWrap {
pub command_executor_command: CommandExecutorCommand,
}
#[serde_as] #[serde_as]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct CommandExecutorCommand {
#[serde_as(as = "VecSkipError<_>")]
commands: Vec<ContinuationCommandWrap>,
}
impl ContinuationEndpoint {
pub fn into_token(self) -> Option<String> {
match self {
Self::ContinuationCommand(cmd) => Some(cmd.continuation_command.token),
Self::CommandExecutorCommand(cmd) => cmd
.command_executor_command
.commands
.into_iter()
.next()
.map(|c| c.continuation_command.token),
}
}
}
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(crate) struct Icon { pub(crate) struct Icon {

View file

@ -5,7 +5,8 @@ use crate::serializer::text::Text;
use super::{ use super::{
music_item::{ music_item::{
Button, Grid, ItemSection, MusicThumbnailRenderer, SimpleHeader, SingleColumnBrowseResult, Button, Grid, ItemSection, MusicMicroformat, MusicThumbnailRenderer, SimpleHeader,
SingleColumnBrowseResult,
}, },
SectionList, Tab, SectionList, Tab,
}; };
@ -14,8 +15,10 @@ use super::{
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(crate) struct MusicArtist { pub(crate) struct MusicArtist {
pub contents: SingleColumnBrowseResult<Tab<SectionList<ItemSection>>>, pub contents: Option<SingleColumnBrowseResult<Tab<SectionList<ItemSection>>>>,
pub header: Header, pub header: Option<Header>,
#[serde(default)]
pub microformat: MusicMicroformat,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]

View file

@ -433,6 +433,22 @@ pub(crate) enum TrackBadge {
LiveBadgeRenderer {}, LiveBadgeRenderer {},
} }
#[serde_as]
#[derive(Default, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct MusicMicroformat {
#[serde_as(as = "DefaultOnError")]
pub microformat_data_renderer: MicroformatData,
}
#[derive(Default, Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct MicroformatData {
pub url_canonical: Option<String>,
#[serde(default)]
pub noindex: bool,
}
/* /*
#MAPPER #MAPPER
*/ */
@ -530,7 +546,9 @@ impl MusicListMapper {
MusicResponseItem::ContinuationItemRenderer { MusicResponseItem::ContinuationItemRenderer {
continuation_endpoint, continuation_endpoint,
} => { } => {
self.ctoken = Some(continuation_endpoint.continuation_command.token); if self.ctoken.is_none() {
self.ctoken = continuation_endpoint.into_token();
}
Ok(None) Ok(None)
} }
} }

View file

@ -5,22 +5,21 @@ use crate::serializer::text::{AttributedText, Text, TextComponents};
use super::{ use super::{
music_item::{ music_item::{
Button, ItemSection, MusicContentsRenderer, MusicItemMenuEntry, MusicThumbnailRenderer, Button, ItemSection, MusicContentsRenderer, MusicItemMenuEntry, MusicMicroformat,
MusicThumbnailRenderer,
}, },
url_endpoint::OnTapWrap, url_endpoint::OnTapWrap,
ContentsRenderer, SectionList, Tab, ContentsRenderer, SectionList, Tab,
}; };
/// Response model for YouTube Music playlists and albums /// Response model for YouTube Music playlists and albums
#[serde_as]
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(crate) struct MusicPlaylist { pub(crate) struct MusicPlaylist {
pub contents: Contents, pub contents: Option<Contents>,
pub header: Option<Header>, pub header: Option<Header>,
#[serde(default)] #[serde(default)]
#[serde_as(as = "DefaultOnError")] pub microformat: MusicMicroformat,
pub microformat: Option<Microformat>,
} }
#[serde_as] #[serde_as]
@ -162,15 +161,3 @@ pub(crate) struct AvatarStackViewModel {
pub(crate) struct AvatarStackRendererContext { pub(crate) struct AvatarStackRendererContext {
pub command_context: Option<OnTapWrap>, pub command_context: Option<OnTapWrap>,
} }
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct Microformat {
pub microformat_data_renderer: MicroformatData,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct MicroformatData {
pub url_canonical: String,
}

View file

@ -530,15 +530,14 @@ pub(crate) enum ContinuationItemVariants {
} }
impl ContinuationItemVariants { impl ContinuationItemVariants {
pub fn token(self) -> String { pub fn into_token(self) -> Option<String> {
match self { match self {
ContinuationItemVariants::Ep { ContinuationItemVariants::Ep {
continuation_endpoint, continuation_endpoint,
} => continuation_endpoint, } => continuation_endpoint,
ContinuationItemVariants::Btn { button } => button.button_renderer.command, ContinuationItemVariants::Btn { button } => button.button_renderer.command,
} }
.continuation_command .into_token()
.token
} }
} }

View file

@ -4,7 +4,7 @@ use serde_with::{
}; };
use time::OffsetDateTime; use time::OffsetDateTime;
use super::{ChannelBadge, ContentImage, ContinuationEndpoint, PhMetadataView, Thumbnails}; use super::{ChannelBadge, ContentImage, ContinuationItemRenderer, PhMetadataView, Thumbnails};
use crate::{ use crate::{
model::{Channel, ChannelItem, ChannelTag, PlaylistItem, VideoItem, YouTubeItem}, model::{Channel, ChannelItem, ChannelTag, PlaylistItem, VideoItem, YouTubeItem},
param::Language, param::Language,
@ -37,12 +37,9 @@ pub(crate) enum YouTubeListItem {
LockupViewModel(LockupViewModel), LockupViewModel(LockupViewModel),
/// Continauation items are located at the end of a list /// Continuation items are located at the end of a list
/// and contain the continuation token for progressive loading /// and contain the continuation token for progressive loading
#[serde(rename_all = "camelCase")] ContinuationItemRenderer(ContinuationItemRenderer),
ContinuationItemRenderer {
continuation_endpoint: ContinuationEndpoint,
},
/// Corrected search query /// Corrected search query
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -838,9 +835,11 @@ impl YouTubeListMapper<YouTubeItem> {
self.items.push(mapped); self.items.push(mapped);
} }
} }
YouTubeListItem::ContinuationItemRenderer { YouTubeListItem::ContinuationItemRenderer(r) => {
continuation_endpoint, if self.ctoken.is_none() {
} => self.ctoken = Some(continuation_endpoint.continuation_command.token), self.ctoken = r.continuation_endpoint.into_token();
}
}
YouTubeListItem::ShowingResultsForRenderer { corrected_query } => { YouTubeListItem::ShowingResultsForRenderer { corrected_query } => {
self.corrected_query = Some(corrected_query); self.corrected_query = Some(corrected_query);
} }
@ -886,9 +885,11 @@ impl YouTubeListMapper<VideoItem> {
self.items.push(mapped); self.items.push(mapped);
} }
} }
YouTubeListItem::ContinuationItemRenderer { YouTubeListItem::ContinuationItemRenderer(r) => {
continuation_endpoint, if self.ctoken.is_none() {
} => self.ctoken = Some(continuation_endpoint.continuation_command.token), self.ctoken = r.continuation_endpoint.into_token();
}
}
YouTubeListItem::ShowingResultsForRenderer { corrected_query } => { YouTubeListItem::ShowingResultsForRenderer { corrected_query } => {
self.corrected_query = Some(corrected_query); self.corrected_query = Some(corrected_query);
} }
@ -938,9 +939,11 @@ impl YouTubeListMapper<PlaylistItem> {
self.items.push(mapped); self.items.push(mapped);
} }
} }
YouTubeListItem::ContinuationItemRenderer { YouTubeListItem::ContinuationItemRenderer(r) => {
continuation_endpoint, if self.ctoken.is_none() {
} => self.ctoken = Some(continuation_endpoint.continuation_command.token), self.ctoken = r.continuation_endpoint.into_token();
}
}
YouTubeListItem::ShowingResultsForRenderer { corrected_query } => { YouTubeListItem::ShowingResultsForRenderer { corrected_query } => {
self.corrected_query = Some(corrected_query); self.corrected_query = Some(corrected_query);
} }

View file

@ -43,6 +43,7 @@ MusicAlbum(
album_type: single, album_type: single,
year: Some(2020), year: Some(2020),
by_va: false, by_va: false,
track_count: 1,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "XX0epju-YvY", id: "XX0epju-YvY",

View file

@ -0,0 +1,151 @@
---
source: src/client/music_playlist.rs
expression: map_res.c
---
MusicAlbum(
id: "MPREb_u1I69lSAe5v",
playlist_id: Some("OLAK5uy_lGP_zv0vJDUlecQDzugUJmjcF7pvyVNyY"),
name: "Waldbrand",
cover: [
Thumbnail(
url: "https://lh3.googleusercontent.com/IYxE8yTIpFUu0OayA5SaxFEn6zQ7T21hpkvI8CODY9NEH1XIhyoUhGohkZuaK-xSu22BC4wjp6srNjIW=w60-h60-l90-rj",
width: 60,
height: 60,
),
Thumbnail(
url: "https://lh3.googleusercontent.com/IYxE8yTIpFUu0OayA5SaxFEn6zQ7T21hpkvI8CODY9NEH1XIhyoUhGohkZuaK-xSu22BC4wjp6srNjIW=w120-h120-l90-rj",
width: 120,
height: 120,
),
Thumbnail(
url: "https://lh3.googleusercontent.com/IYxE8yTIpFUu0OayA5SaxFEn6zQ7T21hpkvI8CODY9NEH1XIhyoUhGohkZuaK-xSu22BC4wjp6srNjIW=w226-h226-l90-rj",
width: 226,
height: 226,
),
Thumbnail(
url: "https://lh3.googleusercontent.com/IYxE8yTIpFUu0OayA5SaxFEn6zQ7T21hpkvI8CODY9NEH1XIhyoUhGohkZuaK-xSu22BC4wjp6srNjIW=w544-h544-l90-rj",
width: 544,
height: 544,
),
],
artists: [
ArtistId(
id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
name: "Madeline Juno",
),
],
artist_id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
description: None,
album_type: ep,
year: Some(2016),
by_va: false,
track_count: 5,
tracks: [
TrackItem(
id: "aGd3VKSOTxY",
name: "Ich wache auf",
duration: Some(222),
cover: [],
artists: [
ArtistId(
id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
name: "Madeline Juno",
),
],
artist_id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
album: Some(AlbumId(
id: "MPREb_u1I69lSAe5v",
name: "Waldbrand",
)),
view_count: Some(208000),
track_type: track,
track_nr: Some(1),
by_va: false,
),
TrackItem(
id: "lhPOMUjV4rE",
name: "Waldbrand",
duration: Some(209),
cover: [],
artists: [
ArtistId(
id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
name: "Madeline Juno",
),
],
artist_id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
album: Some(AlbumId(
id: "MPREb_u1I69lSAe5v",
name: "Waldbrand",
)),
view_count: Some(6000000),
track_type: video,
track_nr: Some(2),
by_va: false,
),
TrackItem(
id: "Bu26uFtpt58",
name: "Verlernt",
duration: Some(224),
cover: [],
artists: [
ArtistId(
id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
name: "Madeline Juno",
),
],
artist_id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
album: Some(AlbumId(
id: "MPREb_u1I69lSAe5v",
name: "Waldbrand",
)),
view_count: Some(418000),
track_type: track,
track_nr: Some(3),
by_va: false,
),
TrackItem(
id: "RgwNqqiVqdY",
name: "In Farbe",
duration: Some(222),
cover: [],
artists: [
ArtistId(
id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
name: "Madeline Juno",
),
],
artist_id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
album: Some(AlbumId(
id: "MPREb_u1I69lSAe5v",
name: "Waldbrand",
)),
view_count: Some(127000),
track_type: track,
track_nr: Some(4),
by_va: false,
),
TrackItem(
id: "2TuOh30XbCI",
name: "Stadt im Hinterland",
duration: Some(198),
cover: [],
artists: [
ArtistId(
id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
name: "Madeline Juno",
),
],
artist_id: Some("UCpJyCbFbdTrx0M90HCNBHFQ"),
album: Some(AlbumId(
id: "MPREb_u1I69lSAe5v",
name: "Waldbrand",
)),
view_count: Some(79000),
track_type: track,
track_nr: Some(5),
by_va: false,
),
],
variants: [],
)

View file

@ -43,6 +43,7 @@ MusicAlbum(
album_type: album, album_type: album,
year: Some(2015), year: Some(2015),
by_va: false, by_va: false,
track_count: 11,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "YQHsXMglC9A", id: "YQHsXMglC9A",

View file

@ -39,6 +39,7 @@ MusicAlbum(
album_type: album, album_type: album,
year: Some(2016), year: Some(2016),
by_va: false, by_va: false,
track_count: 18,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "g0iRiJ_ck48", id: "g0iRiJ_ck48",

View file

@ -43,6 +43,7 @@ MusicAlbum(
album_type: single, album_type: single,
year: Some(2020), year: Some(2020),
by_va: false, by_va: false,
track_count: 1,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "XX0epju-YvY", id: "XX0epju-YvY",

View file

@ -34,6 +34,7 @@ MusicAlbum(
album_type: album, album_type: album,
year: Some(2019), year: Some(2019),
by_va: true, by_va: true,
track_count: 18,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "JWeJHN5P-E8", id: "JWeJHN5P-E8",

View file

@ -34,6 +34,7 @@ MusicAlbum(
album_type: single, album_type: single,
year: Some(2022), year: Some(2022),
by_va: true, by_va: true,
track_count: 6,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "8IqLxg0GqXc", id: "8IqLxg0GqXc",

View file

@ -207,11 +207,9 @@ impl MapResponse<Paginator<HistoryItem<VideoItem>>> for response::History {
&mut map_res, &mut map_res,
); );
} }
response::YouTubeListItem::ContinuationItemRenderer { response::YouTubeListItem::ContinuationItemRenderer(ep) => {
continuation_endpoint,
} => {
if ctoken.is_none() { if ctoken.is_none() {
ctoken = Some(continuation_endpoint.continuation_command.token); ctoken = ep.continuation_endpoint.into_token();
} }
} }
_ => {} _ => {}

View file

@ -208,11 +208,10 @@ impl MapResponse<VideoDetails> for response::VideoDetails {
) )
}); });
let comment_ctoken = comment_ctoken_section.map(|s| { let comment_ctoken = comment_ctoken_section.and_then(|s| {
s.continuation_item_renderer s.continuation_item_renderer
.continuation_endpoint .continuation_endpoint
.continuation_command .into_token()
.token
}); });
let (owner, description, is_ccommons) = match secondary_info { let (owner, description, is_ccommons) = match secondary_info {
@ -333,7 +332,7 @@ impl MapResponse<VideoDetails> for response::VideoDetails {
.sub_menu_items; .sub_menu_items;
items items
.try_swap_remove(1) .try_swap_remove(1)
.map(|c| c.service_endpoint.continuation_command.token) .and_then(|c| c.service_endpoint.into_token())
}); });
Ok(MapResult { Ok(MapResult {
@ -453,7 +452,9 @@ impl MapResponse<Paginator<Comment>> for response::VideoComments {
} }
} }
response::video_details::CommentListItem::ContinuationItemRenderer(cont) => { response::video_details::CommentListItem::ContinuationItemRenderer(cont) => {
ctoken = Some(cont.token()); if ctoken.is_none() {
ctoken = cont.into_token();
}
} }
response::video_details::CommentListItem::CommentsHeaderRenderer { count_text } => { response::video_details::CommentListItem::CommentsHeaderRenderer { count_text } => {
comment_count = count_text comment_count = count_text
@ -520,7 +521,9 @@ fn map_replies(
)) ))
} }
response::video_details::CommentListItem::ContinuationItemRenderer(cont) => { response::video_details::CommentListItem::ContinuationItemRenderer(cont) => {
reply_ctoken = Some(cont.token()); if reply_ctoken.is_none() {
reply_ctoken = cont.into_token();
}
None None
} }
_ => None, _ => None,

View file

@ -3,7 +3,7 @@ use std::collections::HashMap;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use reqwest::Client; use reqwest::Client;
use ress::tokens::Token; use ress::tokens::{Keyword, Punct, Token};
use rquickjs::{Context, Runtime}; use rquickjs::{Context, Runtime};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -106,7 +106,7 @@ impl Deobfuscator {
.with(|ctx| call_fn(&ctx, DEOBF_NSIG_FUNC_NAME, nsig))?; .with(|ctx| call_fn(&ctx, DEOBF_NSIG_FUNC_NAME, nsig))?;
tracing::trace!("deobf nsig: {nsig} -> {res}"); tracing::trace!("deobf nsig: {nsig} -> {res}");
if res.starts_with("enhanced_except_") || res.ends_with(nsig) { if res.starts_with("enhanced_except_") || res.ends_with(nsig) {
return Err(DeobfError::Other("nsig fn returned an exception")); return Err(DeobfError::Other("nsig fn returned an exception".into()));
} }
Ok(res) Ok(res)
} }
@ -134,55 +134,21 @@ fn caller_function(mapped_name: &str, fn_name: &str) -> String {
} }
fn get_sig_fn(player_js: &str) -> Result<String, DeobfError> { fn get_sig_fn(player_js: &str) -> Result<String, DeobfError> {
let dfunc_name = get_sig_fn_name(player_js)?; let name = get_sig_fn_name(player_js)?;
let code = extract_js_fn(player_js, &name)?;
let js_fn = format!("{}{}", code, caller_function(DEOBF_SIG_FUNC_NAME, &name));
let function_pattern_str = format!(
r#"({}=function\([\w]+\)\{{.+?\}})"#,
dfunc_name.replace('$', "\\$")
);
let function_pattern = Regex::new(&function_pattern_str)
.map_err(|_| DeobfError::Other("could not parse sig fn pattern regex"))?;
let deobfuscate_function = format!(
"var {};",
&function_pattern
.captures(player_js)
.ok_or(DeobfError::Extraction("sig fn"))?[1]
);
let helper_object_name_pattern = Regex::new(r";([\w\$]{2,3})\...\(").unwrap();
let helper_object_name = helper_object_name_pattern
.captures(&deobfuscate_function)
.ok_or(DeobfError::Extraction("sig fn helper object name"))?
.get(1)
.unwrap()
.as_str();
let helper_pattern_str = format!(
r#"(var {}=\{{.+?\}}\}};)"#,
helper_object_name.replace('$', "\\$")
);
let helper_pattern = Regex::new(&helper_pattern_str)
.map_err(|_| DeobfError::Other("could not parse helper pattern regex"))?;
let player_js_nonl = player_js.replace('\n', "");
let helper_object = &helper_pattern
.captures(&player_js_nonl)
.ok_or(DeobfError::Extraction("sig fn helper object"))?[1];
let js_fn = helper_object.to_owned()
+ &deobfuscate_function
+ &caller_function(DEOBF_SIG_FUNC_NAME, &dfunc_name);
tracing::trace!("sig_fn: {js_fn}"); tracing::trace!("sig_fn: {js_fn}");
verify_fn(&js_fn, DEOBF_SIG_FUNC_NAME)?; verify_fn(&js_fn, DEOBF_SIG_FUNC_NAME)?;
tracing::debug!("successfully extracted sig fn `{dfunc_name}`"); tracing::debug!("successfully extracted sig fn `{name}`");
Ok(js_fn) Ok(js_fn)
} }
fn get_nsig_fn_names(player_js: &str) -> impl Iterator<Item = String> + '_ { fn get_nsig_fn_names(player_js: &str) -> impl Iterator<Item = String> + '_ {
static FUNCTION_NAME_REGEX: Lazy<Regex> = Lazy::new(|| { static FUNCTION_NAME_REGEX: Lazy<Regex> = Lazy::new(|| {
// x.get( .. y=functionName[array_num](z) .. x.set( // x.get( OR index.m3u8 OR delete x.y.file .. y=functionName[array_num](z) .. x.set(
Regex::new(r#"(?:[\w$]\.get\(|index\.m3u8).+[a-zA-Z]=([\w$]{2,})(?:\[(\d+)\])?\([a-zA-Z0-9]\).+[a-zA-Z0-9]\.set\("#) Regex::new(r#"(?:[\w$]\.get\(|index\.m3u8|delete [\w$]+\.[\w$]+\.file).+[a-zA-Z]=([\w$]{2,})(?:\[(\d+)\])?\([a-zA-Z0-9]\).+[a-zA-Z0-9]\.set\("#)
.unwrap() .unwrap()
}); });
@ -206,26 +172,71 @@ fn get_nsig_fn_names(player_js: &str) -> impl Iterator<Item = String> + '_ {
}) })
} }
fn extract_js_fn(js: &str, offset: usize, name: &str) -> Result<String, DeobfError> { fn extract_js_fn(js: &str, name: &str) -> Result<String, DeobfError> {
let function_base_re = Regex::new(&format!(r#"{}\s*=\s*function\("#, regex::escape(name)))
.map_err(|e| DeobfError::Other(format!("parsing regex for {name}: {e}").into()))?;
let offset = function_base_re
.find(js)
.ok_or(DeobfError::Extraction("could not find function base"))?
.start();
let scan = ress::Scanner::new(&js[offset..]); let scan = ress::Scanner::new(&js[offset..]);
let mut state = 0; let mut state = 0;
let mut level = 0;
let mut start = 0; #[derive(Default, Clone, PartialEq, Eq)]
let mut end = 0; struct Level {
brace: isize,
paren: isize,
bracket: isize,
}
let mut level = Level::default();
let mut start = 0usize;
let mut end = 0usize;
let mut period_before = false; let mut period_before = false;
let mut last_ident = None; let mut function_before = false;
let mut idents: HashMap<String, usize> = HashMap::new(); let mut idents: HashMap<String, bool> = HashMap::new();
// Set if the current statement is a variable/function param definition
// First value is the brace level, second is true if we are on the right hand side of an assignment
let mut var_def_stmt: Option<(Level, bool)> = None;
let global_objects = [ let global_objects = [
"NaN", "Infinity", "Object", "Function", "Boolean", "Symbol", "Error", "Number", "BigInt", "globalThis",
"Math", "Date", "String", "RegExp", "Array", "Map", "Set", "NaN",
"undefined",
"Infinity",
"Object",
"Function",
"Boolean",
"Symbol",
"Error",
"Number",
"BigInt",
"Math",
"Date",
"String",
"RegExp",
"Array",
"Map",
"Set",
"eval",
"isFinite",
"isNaN",
"parseFloat",
"parseInt",
"decodeURI",
"decodeURIComponent",
"encodeURI",
"encodeURIComponent",
"escape",
"unescape",
]; ];
for item in scan { for item in scan {
let it = item?; let it = item?;
let token = it.token; let token = it.token;
match state { match state {
// Looking for fn name // Looking for fn name
0 => { 0 => {
@ -236,47 +247,116 @@ fn extract_js_fn(js: &str, offset: usize, name: &str) -> Result<String, DeobfErr
} }
// Looking for equals // Looking for equals
1 => { 1 => {
if token.matches_punct(ress::tokens::Punct::Equal) { if token.matches_punct(Punct::Equal) {
state = 2; state = 2;
} else { } else {
state = 0; state = 0;
} }
} }
2 => { 2 => {
// Looking for begin/end braces match &token {
if token.matches_punct(ress::tokens::Punct::OpenBrace) { Token::Punct(punct) => {
level += 1; let var_def_this_lvl = || {
} else if token.matches_punct(ress::tokens::Punct::CloseBrace) { var_def_stmt
level -= 1; .as_ref()
.map(|(x, _)| x == &level)
.unwrap_or_default()
};
if level == 0 { match punct {
Punct::OpenBrace => {
level.brace += 1;
}
Punct::CloseBrace => {
if var_def_this_lvl() {
var_def_stmt = None;
}
level.brace -= 1;
if level.brace == 0 {
end = it.span.end; end = it.span.end;
state = 3; state = 3;
break; break;
} }
} }
Punct::OpenParen => {
// Looking for variable names level.paren += 1;
if let Token::Ident(id) = &token { }
if !period_before { Punct::CloseParen => {
let id_str = id.to_string(); if var_def_this_lvl() {
if !global_objects.contains(&id_str.as_str()) { var_def_stmt = None;
last_ident = Some(id.to_string()); }
level.paren -= 1;
}
Punct::OpenBracket => {
level.bracket += 1;
}
Punct::CloseBracket => {
if var_def_this_lvl() {
var_def_stmt = None;
}
level.bracket -= 1;
}
Punct::SemiColon => {
if var_def_this_lvl() {
var_def_stmt = None;
} }
} }
} else if last_ident.is_some() Punct::Comma => {
&& !token.matches_punct(ress::tokens::Punct::OpenParen) if let Some((lvl, rhs)) = &mut var_def_stmt {
if lvl == &level {
*rhs = false;
}
}
}
Punct::Equal => {
if let Some((lvl, rhs)) = &mut var_def_stmt {
if lvl == &level {
*rhs = true;
}
}
}
_ => {}
}
}
Token::Keyword(kw) => match kw {
Keyword::Var(_) | Keyword::Let(_) | Keyword::Const(_) => {
var_def_stmt = Some((level.clone(), false));
}
Keyword::Function(_) => {
let mut l = level.clone();
l.paren += 1;
var_def_stmt = Some((l, false));
}
_ => {}
},
Token::Ident(id) => {
// Ignore object attributes and 1char long local vars
if !period_before
&& id.as_ref().len() > 1
&& !global_objects.contains(&id.as_ref())
{ {
let n = idents.entry(last_ident.unwrap()).or_default(); // If we are on the left hand side of a variable definition statement
*n += 1; // or after "function", mark the variable name as defined
last_ident = None; if var_def_stmt
.as_ref()
.map(|(lvl, rhs)| lvl == &level && !rhs)
.unwrap_or_default()
|| function_before
{
idents.insert(id.to_string(), true);
} else { } else {
last_ident = None; idents.entry(id.to_string()).or_default();
}
}
}
_ => {}
} }
} }
_ => break, _ => break,
}; };
period_before = token.matches_punct(ress::tokens::Punct::Period); period_before = token.matches_punct(Punct::Period);
function_before = matches!(&token, Token::Keyword(Keyword::Function(_)));
} }
if state != 3 { if state != 3 {
@ -287,9 +367,10 @@ fn extract_js_fn(js: &str, offset: usize, name: &str) -> Result<String, DeobfErr
let mut code = format!("var {};", &js[fn_range.clone()]); let mut code = format!("var {};", &js[fn_range.clone()]);
let rt = rquickjs::Runtime::new()?; let rt = rquickjs::Runtime::new()?;
for (ident, _) in idents.into_iter().filter(|(_, v)| *v == 1) { for (ident, _) in idents.into_iter().filter(|(_, v)| !v) {
let var_pattern_str = format!(r#"(^|[^\w$]){}\s*=[^=]"#, regex::escape(&ident)); let var_pattern_str = format!(r#"(^|[^\w$\.]){}\s*=[^=]"#, regex::escape(&ident));
let re = Regex::new(&var_pattern_str).unwrap(); let re = Regex::new(&var_pattern_str)
.map_err(|e| DeobfError::Other(format!("parsing regex for {ident}: {e}").into()))?;
let found_variable = re let found_variable = re
.captures_iter(js) .captures_iter(js)
.filter(|cap| { .filter(|cap| {
@ -347,13 +428,13 @@ fn extract_js_var(js: &str) -> Option<&str> {
if let Token::Punct(p) = &token { if let Token::Punct(p) = &token {
match p { match p {
ress::tokens::Punct::OpenBrace => braces.push(b'{'), Punct::OpenBrace => braces.push(b'{'),
ress::tokens::Punct::OpenBracket => braces.push(b'['), Punct::OpenBracket => braces.push(b'['),
ress::tokens::Punct::OpenParen => braces.push(b'('), Punct::OpenParen => braces.push(b'('),
ress::tokens::Punct::CloseBrace => close_brace(&mut braces, b'{')?, Punct::CloseBrace => close_brace(&mut braces, b'{')?,
ress::tokens::Punct::CloseBracket => close_brace(&mut braces, b'[')?, Punct::CloseBracket => close_brace(&mut braces, b'[')?,
ress::tokens::Punct::CloseParen => close_brace(&mut braces, b'(')?, Punct::CloseParen => close_brace(&mut braces, b'(')?,
ress::tokens::Punct::Comma | ress::tokens::Punct::SemiColon => { Punct::Comma | Punct::SemiColon => {
if braces.is_empty() { if braces.is_empty() {
end = it.span.start; end = it.span.start;
break; break;
@ -388,23 +469,19 @@ fn verify_fn(js_fn: &str, fn_name: &str) -> Result<(), DeobfError> {
})?; })?;
if res.is_empty() { if res.is_empty() {
return Err(DeobfError::Other("deobfuscation fn returned empty string")); return Err(DeobfError::Other(
"deobfuscation fn returned empty string".into(),
));
} }
if res.starts_with("enhanced_except_") || res.ends_with(&testinp) { if res.starts_with("enhanced_except_") || res.ends_with(&testinp) {
return Err(DeobfError::Other("nsig fn returned an exception")); return Err(DeobfError::Other("nsig fn returned an exception".into()));
} }
Ok(()) Ok(())
} }
fn get_nsig_fn(player_js: &str) -> Result<String, DeobfError> { fn get_nsig_fn(player_js: &str) -> Result<String, DeobfError> {
let extract_fn = |name: &str| -> Result<String, DeobfError> { let extract_fn = |name: &str| -> Result<String, DeobfError> {
let function_base = format!("{name}=function"); let code = extract_js_fn(player_js, name)?;
let offset = player_js
.find(&function_base)
.ok_or(DeobfError::Extraction("could not find function base"))?;
let code = extract_js_fn(player_js, offset, name)?;
let js_fn = format!("{}{}", code, caller_function(DEOBF_NSIG_FUNC_NAME, name)); let js_fn = format!("{}{}", code, caller_function(DEOBF_NSIG_FUNC_NAME, name));
tracing::trace!("nsig_fn: {js_fn}"); tracing::trace!("nsig_fn: {js_fn}");
verify_fn(&js_fn, DEOBF_NSIG_FUNC_NAME)?; verify_fn(&js_fn, DEOBF_NSIG_FUNC_NAME)?;
@ -472,7 +549,9 @@ mod tests {
std::fs::read_to_string(js_path).unwrap() std::fs::read_to_string(js_path).unwrap()
}); });
const SIG_DEOBF_FUNC: &str = r#"var qB={w8:function(a){a.reverse()},EC:function(a,b){var c=a[0];a[0]=a[b%a.length];a[b%a.length]=c},Np:function(a,b){a.splice(0,b)}};var Rva=function(a){a=a.split("");qB.Np(a,3);qB.w8(a,41);qB.EC(a,55);qB.Np(a,3);qB.w8(a,33);qB.Np(a,3);qB.EC(a,48);qB.EC(a,17);qB.EC(a,43);return a.join("")};var deobf_sig=Rva;"#; const SIG_DEOBF_FUNC: &str = r#"var qB={w8:function(a){a.reverse()},
EC:function(a,b){var c=a[0];a[0]=a[b%a.length];a[b%a.length]=c},
Np:function(a,b){a.splice(0,b)}}; var Rva=function(a){a=a.split("");qB.Np(a,3);qB.w8(a,41);qB.EC(a,55);qB.Np(a,3);qB.w8(a,33);qB.Np(a,3);qB.EC(a,48);qB.EC(a,17);qB.EC(a,43);return a.join("")};var deobf_sig=Rva;"#;
const NSIG_DEOBF_FUNC: &str = r#"var Vo=function(a){var b=a.split(""),c=[function(d,e,f){var h=f.length;d.forEach(function(l,m,n){this.push(n[m]=f[(f.indexOf(l)-f.indexOf(this[m])+m+h--)%f.length])},e.split(""))}, const NSIG_DEOBF_FUNC: &str = r#"var Vo=function(a){var b=a.split(""),c=[function(d,e,f){var h=f.length;d.forEach(function(l,m,n){this.push(n[m]=f[(f.indexOf(l)-f.indexOf(this[m])+m+h--)%f.length])},e.split(""))},
928409064,-595856984,1403221911,653089124,-168714481,-1883008765,158931990,1346921902,361518508,1403221911,-362174697,-233641452,function(){for(var d=64,e=[];++d-e.length-32;){switch(d){case 91:d=44;continue;case 123:d=65;break;case 65:d-=18;continue;case 58:d=96;continue;case 46:d=95}e.push(String.fromCharCode(d))}return e}, 928409064,-595856984,1403221911,653089124,-168714481,-1883008765,158931990,1346921902,361518508,1403221911,-362174697,-233641452,function(){for(var d=64,e=[];++d-e.length-32;){switch(d){case 91:d=44;continue;case 123:d=65;break;case 65:d-=18;continue;case 58:d=96;continue;case 46:d=95}e.push(String.fromCharCode(d))}return e},
b,158931990,791141857,-907319795,-1776185924,1595027902,-829736173,function(d,e){e=(e%d.length+d.length)%d.length;d.splice(0,1,d.splice(e,1,d[0])[0])}, b,158931990,791141857,-907319795,-1776185924,1595027902,-829736173,function(d,e){e=(e%d.length+d.length)%d.length;d.splice(0,1,d.splice(e,1,d[0])[0])},
@ -525,7 +604,7 @@ c[36](c[8],c[32]),c[20](c[25],c[10]),c[2](c[22],c[8]),c[32](c[20],c[16]),c[32](c
#[test] #[test]
fn t_extract_js_fn() { fn t_extract_js_fn() {
let base_js = "Wka = function(d){let x=10/2;return /,,[/,913,/](,)}/}let a = 42;"; let base_js = "Wka = function(d){let x=10/2;return /,,[/,913,/](,)}/}let a = 42;";
let res = extract_js_fn(base_js, 0, "Wka").unwrap(); let res = extract_js_fn(base_js, "Wka").unwrap();
assert_eq!( assert_eq!(
res, res,
"var Wka = function(d){let x=10/2;return /,,[/,913,/](,)}/};" "var Wka = function(d){let x=10/2;return /,,[/,913,/](,)}/};"
@ -536,7 +615,7 @@ c[36](c[8],c[32]),c[20](c[25],c[10]),c[2](c[22],c[8]),c[32](c[20],c[16]),c[32](c
fn t_extract_js_fn_eviljs() { fn t_extract_js_fn_eviljs() {
// Evil JavaScript code containing braces within strings and regular expressions // Evil JavaScript code containing braces within strings and regular expressions
let base_js = "Wka = function(d){var x = [/,,/,913,/(,)}/,\"abcdef}\\\"\",];var y = 10/2/1;return x[1][y];}//some={}random-padding+;"; let base_js = "Wka = function(d){var x = [/,,/,913,/(,)}/,\"abcdef}\\\"\",];var y = 10/2/1;return x[1][y];}//some={}random-padding+;";
let res = extract_js_fn(base_js, 0, "Wka").unwrap(); let res = extract_js_fn(base_js, "Wka").unwrap();
assert_eq!( assert_eq!(
res, res,
"var Wka = function(d){var x = [/,,/,913,/(,)}/,\"abcdef}\\\"\",];var y = 10/2/1;return x[1][y];};" "var Wka = function(d){var x = [/,,/,913,/(,)}/,\"abcdef}\\\"\",];var y = 10/2/1;return x[1][y];};"
@ -545,33 +624,43 @@ c[36](c[8],c[32]),c[20](c[25],c[10]),c[2](c[22],c[8]),c[32](c[20],c[16]),c[32](c
#[test] #[test]
fn t_extract_js_fn_outside_vars() { fn t_extract_js_fn_outside_vars() {
let base_js = "let a = 42;foo();var b=11;bar();Wka = function(d){var x=1+2+a*b;return x;}"; let base_js = "let a1 = 42;foo();var b1=11;var da=77;bar();Wka = function(da){var xy=1+2+a1*b1;return xy;}";
let res = extract_js_fn(base_js, 0, "Wka").unwrap(); let res = extract_js_fn(base_js, "Wka").unwrap();
// order of variables is non-reproducible // order of variables is non-reproducible
assert!( assert!(
res == "var a = 42; var b=11; var Wka = function(d){var x=1+2+a*b;return x;};" res == "var a1 = 42; var b1=11; var Wka = function(da){var xy=1+2+a1*b1;return xy;};"
|| res == "var b=11; var a = 42; var Wka = function(d){var x=1+2+a*b;return x;};", || res == "var b1=11; var a1 = 42; var Wka = function(da){var xy=1+2+a1*b1;return xy;};",
"got {res}" "got {res}"
); );
} }
#[test] #[test]
fn t_extract_js_fn_outside_vars2() { fn t_extract_js_fn_outside_vars2() {
let base_js = "{let a = {v1:1,v2:2}}foo();Wka = function(d){var x=1+2+a.v1;return x;}"; let base_js = "{let a1 = {v1:1,v2:2}}foo();Wka = function(d){var x=1+2+a1.v1;return x;}";
let res = extract_js_fn(base_js, 0, "Wka").unwrap(); let res = extract_js_fn(base_js, "Wka").unwrap();
assert_eq!( assert_eq!(
res, res,
"var a = {v1:1,v2:2}; var Wka = function(d){var x=1+2+a.v1;return x;};" "var a1 = {v1:1,v2:2}; var Wka = function(d){var x=1+2+a1.v1;return x;};"
); );
} }
#[test] #[test]
fn t_extract_js_fn_outside_vars3() { fn t_extract_js_fn_outside_vars3() {
let base_js = "Wka = function(d){var x=1+2+a[0];return x;};let a=[1,2,3]"; let base_js = "Wka = function(d){var x=1+2+a1[0];return x;};let a1=[1,2,3]";
let res = extract_js_fn(base_js, 0, "Wka").unwrap(); let res = extract_js_fn(base_js, "Wka").unwrap();
assert_eq!( assert_eq!(
res, res,
"var a=[1,2,3]; var Wka = function(d){var x=1+2+a[0];return x;};" "var a1=[1,2,3]; var Wka = function(d){var x=1+2+a1[0];return x;};"
);
}
#[test]
fn t_extract_js_fn_outside_vars4() {
let base_js = "let a0=123456;let a1=function(a){return a};let Wka = function(d){var x=1+2+a1();return x;}";
let res = extract_js_fn(base_js, "Wka").unwrap();
assert_eq!(
res,
"var a1=function(a){return a}; var Wka = function(d){var x=1+2+a1();return x;};"
); );
} }
@ -625,65 +714,86 @@ c[36](c[8],c[32]),c[20](c[25],c[10]),c[2](c[22],c[8]),c[32](c[20],c[16]),c[32](c
} }
// Test cases from https://github.com/yt-dlp/yt-dlp/blob/master/test/test_youtube_signature.py // Test cases from https://github.com/yt-dlp/yt-dlp/blob/master/test/test_youtube_signature.py
#[rstest]
#[case("6ed0d907", "AOq0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL2QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0")]
#[case("3bb1f723", "MyOSJXtKI3m-uME_jv7-pT12gOFC02RFkGoqWpzE0Cs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA")]
#[case("2f1832d2", "0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xxAj7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJ2OySqa0q")]
#[tokio::test] #[tokio::test]
#[traced_test] #[traced_test]
async fn sig_tests(#[case] js_hash: &str, #[case] exp_sig: &str) { async fn sig_tests() {
let cases = [
("6ed0d907", "AOq0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xx8j7v1pDL2QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJoOySqa0"),
("3bb1f723", "MyOSJXtKI3m-uME_jv7-pT12gOFC02RFkGoqWpzE0Cs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA"),
("2f1832d2", "0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xxAj7v1pDL0QwbdV96sCIEzpWqMGkFR20CFOg51Tp-7vj_EMu-m37KtXJ2OySqa0q"),
("643afba4", "AAOAOq0QJ8wRAIgXmPlOPSBkkUs1bYFYlJCfe29xx8j7vgpDL0QwbdV06sCIEzpWqMGkFR20CFOS21Tp-7vj_EMu-m37KtXJoOy1"),
("363db69b", "0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpz2ICs6EVdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA"),
];
for (js_hash, exp_sig) in cases {
let span = tracing::span!(tracing::Level::ERROR, "sig_test", js_hash);
let _enter = span.enter();
let (js_url, js_path) = player_js_file(js_hash).await; let (js_url, js_path) = player_js_file(js_hash).await;
let player_js = std::fs::read_to_string(js_path).unwrap(); let player_js = std::fs::read_to_string(js_path).unwrap();
let deobf_data = DeobfData::extract_fns(&js_url, &player_js).unwrap(); let deobf_data = DeobfData::extract_fns(&js_url, &player_js).unwrap();
let deobf = Deobfuscator::new(&deobf_data).unwrap(); let deobf = Deobfuscator::new(&deobf_data).unwrap();
let deobf_sig = deobf.deobfuscate_sig("2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA").unwrap(); let deobf_sig = deobf.deobfuscate_sig("2aq0aqSyOoJXtK73m-uME_jv7-pT15gOFC02RFkGMqWpzEICs69VdbwQ0LDp1v7j8xx92efCJlYFYb1sUkkBSPOlPmXgIARw8JQ0qOAOAA").unwrap();
assert_eq!(deobf_sig, exp_sig, "js: {js_hash}"); assert_eq!(deobf_sig, exp_sig, "[{js_hash}]");
}
} }
#[rstest]
#[case("7862ca1f", "X_LCxVDjAavgE5t", "yxJ1dM6iz5ogUg")]
#[case("9216d1f7", "SLp9F5bwjAdhE9F-", "gWnb9IK2DJ8Q1w")]
#[case("f8cb7a3b", "oBo2h5euWy6osrUt", "ivXHpm7qJjJN")]
#[case("2dfe380c", "oBo2h5euWy6osrUt", "3DIBbn3qdQ")]
#[case("f1ca6900", "cu3wyu6LQn2hse", "jvxetvmlI9AN9Q")]
#[case("8040e515", "wvOFaY-yjgDuIEg5", "HkfBFDHmgw4rsw")]
#[case("e06dea74", "AiuodmaDDYw8d3y4bf", "ankd8eza2T6Qmw")]
#[case("5dd88d1d", "kSxKFLeqzv_ZyHSAt", "n8gS8oRlHOxPFA")]
#[case("324f67b9", "xdftNy7dh9QGnhW", "22qLGxrmX8F1rA")]
#[case("4c3f79c5", "TDCstCG66tEAO5pR9o", "dbxNtZ14c-yWyw")]
#[case("c81bbb4a", "gre3EcLurNY2vqp94", "Z9DfGxWP115WTg")]
#[case("1f7d5369", "batNX7sYqIJdkJ", "IhOkL_zxbkOZBw")]
#[case("009f1d77", "5dwFHw8aFWQUQtffRq", "audescmLUzI3jw")]
#[case("dc0c6770", "5EHDMgYLV6HPGk_Mu-kk", "n9lUJLHbxUI0GQ")]
#[case("113ca41c", "cgYl-tlYkhjT7A", "hI7BBr2zUgcmMg")]
#[case("c57c113c", "M92UUMHa8PdvPd3wyM", "3hPqLJsiNZx7yA")]
#[case("5a3b6271", "B2j7f_UPT4rfje85Lu_e", "m5DmNymaGQ5RdQ")]
#[case("7a062b77", "NRcE3y3mVtm_cV-W", "VbsCYUATvqlt5w")]
#[case("dac945fd", "o8BkRxXhuYsBCWi6RplPdP", "3Lx32v_hmzTm6A")]
#[case("6f20102c", "lE8DhoDmKqnmJJ", "pJTTX6XyJP2BYw")]
#[case("cfa9e7cb", "aCi3iElgd2kq0bxVbQ", "QX1y8jGb2IbZ0w")]
#[case("8c7583ff", "1wWCVpRR96eAmMI87L", "KSkWAVv1ZQxC3A")]
#[case("b7910ca8", "_hXMCwMt9qE310D", "LoZMgkkofRMCZQ")]
#[case("590f65a6", "1tm7-g_A9zsI8_Lay_", "xI4Vem4Put_rOg")]
#[case("b22ef6e7", "b6HcntHGkvBLk_FRf", "kNPW6A7FyP2l8A")]
#[case("3400486c", "lL46g3XifCKUZn1Xfw", "z767lhet6V2Skl")]
#[case("20dfca59", "-fLCxedkAk4LUTK2", "O8kfRq1y1eyHGw")]
#[case("b12cc44b", "keLa5R2U00sR9SQK", "N1OGyujjEwMnLw")]
#[case("3bb1f723", "gK15nzVyaXE9RsMP3z", "ZFFWFLPWx9DEgQ")]
#[case("2f1832d2", "YWt1qdbe8SAfkoPHW5d", "RrRjWQOJmBiP")]
#[case("19d2ae9d", "YWt1qdbe8SAfkoPHW5d", "CS6dVTYzpZrAZ5TD")]
#[tokio::test] #[tokio::test]
#[traced_test] #[traced_test]
async fn nsig_tests(#[case] js_hash: &str, #[case] nsig_in: &str, #[case] expect: &str) { async fn nsig_tests() {
let cases = [
("7862ca1f", "X_LCxVDjAavgE5t", "yxJ1dM6iz5ogUg"),
("9216d1f7", "SLp9F5bwjAdhE9F-", "gWnb9IK2DJ8Q1w"),
("f8cb7a3b", "oBo2h5euWy6osrUt", "ivXHpm7qJjJN"),
("2dfe380c", "oBo2h5euWy6osrUt", "3DIBbn3qdQ"),
("f1ca6900", "cu3wyu6LQn2hse", "jvxetvmlI9AN9Q"),
("8040e515", "wvOFaY-yjgDuIEg5", "HkfBFDHmgw4rsw"),
("e06dea74", "AiuodmaDDYw8d3y4bf", "ankd8eza2T6Qmw"),
("5dd88d1d", "kSxKFLeqzv_ZyHSAt", "n8gS8oRlHOxPFA"),
("324f67b9", "xdftNy7dh9QGnhW", "22qLGxrmX8F1rA"),
("4c3f79c5", "TDCstCG66tEAO5pR9o", "dbxNtZ14c-yWyw"),
("c81bbb4a", "gre3EcLurNY2vqp94", "Z9DfGxWP115WTg"),
("1f7d5369", "batNX7sYqIJdkJ", "IhOkL_zxbkOZBw"),
("009f1d77", "5dwFHw8aFWQUQtffRq", "audescmLUzI3jw"),
("dc0c6770", "5EHDMgYLV6HPGk_Mu-kk", "n9lUJLHbxUI0GQ"),
("113ca41c", "cgYl-tlYkhjT7A", "hI7BBr2zUgcmMg"),
("c57c113c", "M92UUMHa8PdvPd3wyM", "3hPqLJsiNZx7yA"),
("5a3b6271", "B2j7f_UPT4rfje85Lu_e", "m5DmNymaGQ5RdQ"),
("7a062b77", "NRcE3y3mVtm_cV-W", "VbsCYUATvqlt5w"),
("dac945fd", "o8BkRxXhuYsBCWi6RplPdP", "3Lx32v_hmzTm6A"),
("6f20102c", "lE8DhoDmKqnmJJ", "pJTTX6XyJP2BYw"),
("cfa9e7cb", "aCi3iElgd2kq0bxVbQ", "QX1y8jGb2IbZ0w"),
("8c7583ff", "1wWCVpRR96eAmMI87L", "KSkWAVv1ZQxC3A"),
("b7910ca8", "_hXMCwMt9qE310D", "LoZMgkkofRMCZQ"),
("590f65a6", "1tm7-g_A9zsI8_Lay_", "xI4Vem4Put_rOg"),
("b22ef6e7", "b6HcntHGkvBLk_FRf", "kNPW6A7FyP2l8A"),
("3400486c", "lL46g3XifCKUZn1Xfw", "z767lhet6V2Skl"),
("20dfca59", "-fLCxedkAk4LUTK2", "O8kfRq1y1eyHGw"),
("b12cc44b", "keLa5R2U00sR9SQK", "N1OGyujjEwMnLw"),
("3bb1f723", "gK15nzVyaXE9RsMP3z", "ZFFWFLPWx9DEgQ"),
("2f1832d2", "YWt1qdbe8SAfkoPHW5d", "RrRjWQOJmBiP"),
("19d2ae9d", "YWt1qdbe8SAfkoPHW5d", "CS6dVTYzpZrAZ5TD"),
("e7567ecf", "Sy4aDGc0VpYRR9ew_", "5UPOT1VhoZxNLQ"),
("d50f54ef", "Ha7507LzRmH3Utygtj", "XFTb2HoeOE5MHg"),
("074a8365", "Ha7507LzRmH3Utygtj", "ufTsrE0IVYrkl8v"),
("643afba4", "N5uAlLqm0eg1GyHO", "dCBQOejdq5s-ww"),
("69f581a5", "-qIP447rVlTTwaZjY", "KNcGOksBAvwqQg"),
("363db69b", "eWYu5d5YeY_4LyEDc", "XJQqf-N7Xra3gg"),
];
for (js_hash, nsig_in, exp_nsig) in cases {
let span = tracing::span!(tracing::Level::ERROR, "nsig_test", js_hash);
let _enter = span.enter();
let (js_url, js_path) = player_js_file(js_hash).await; let (js_url, js_path) = player_js_file(js_hash).await;
let player_js = std::fs::read_to_string(js_path).unwrap(); let player_js = std::fs::read_to_string(js_path).unwrap();
let deobf_data = DeobfData::extract_fns(&js_url, &player_js).unwrap(); let deobf_data = DeobfData::extract_fns(&js_url, &player_js).expect(js_hash);
let deobf = Deobfuscator::new(&deobf_data).unwrap(); let deobf = Deobfuscator::new(&deobf_data).expect(js_hash);
let deobf_nsig = deobf.deobfuscate_nsig(nsig_in).unwrap(); let deobf_nsig = deobf.deobfuscate_nsig(nsig_in).expect(js_hash);
assert_eq!(deobf_nsig, expect, "js: {js_hash}"); assert_eq!(deobf_nsig, exp_nsig, "[{js_hash}]");
}
} }
#[tokio::test] #[tokio::test]

View file

@ -151,6 +151,8 @@ pub enum AuthError {
} }
pub(crate) mod internal { pub(crate) mod internal {
use std::borrow::Cow;
use super::{Error, ExtractionError}; use super::{Error, ExtractionError};
/// Error that occurred during the initialization /// Error that occurred during the initialization
@ -168,7 +170,7 @@ pub(crate) mod internal {
Extraction(&'static str), Extraction(&'static str),
/// Unspecified error /// Unspecified error
#[error("error: {0}")] #[error("error: {0}")]
Other(&'static str), Other(Cow<'static, str>),
} }
impl From<DeobfError> for Error { impl From<DeobfError> for Error {

View file

@ -38,7 +38,7 @@ pub struct Thumbnail {
pub enum UrlTarget { pub enum UrlTarget {
/// YouTube video /// YouTube video
/// ///
/// Example: <youtube.com/watch?v=ZeerrnuLi5E> /// Example: <https://youtube.com/watch?v=ZeerrnuLi5E>
Video { Video {
/// Unique YouTube video ID /// Unique YouTube video ID
id: String, id: String,
@ -1234,6 +1234,8 @@ pub struct MusicAlbum {
pub year: Option<u16>, pub year: Option<u16>,
/// Is the album by 'Various artists'? /// Is the album by 'Various artists'?
pub by_va: bool, pub by_va: bool,
/// Number of album tracks
pub track_count: u16,
/// Album tracks /// Album tracks
pub tracks: Vec<TrackItem>, pub tracks: Vec<TrackItem>,
/// Album variants /// Album variants

View file

@ -53,6 +53,8 @@ pub(crate) struct Entry {
pub chan_prefix: &'static str, pub chan_prefix: &'static str,
/// Channel name suffix on playlist pages /// Channel name suffix on playlist pages
pub chan_suffix: &'static str, pub chan_suffix: &'static str,
/// "Other versions" title on album pages
pub album_versions_title: &'static str,
} }
#[rustfmt::skip] #[rustfmt::skip]
@ -183,6 +185,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "deur", chan_prefix: "deur",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Ander weergawes",
}, },
Language::Am => Entry { Language::Am => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -310,6 +313,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "", chan_suffix: "",
album_versions_title: "ሌሎች ስሪቶች",
}, },
Language::Ar => Entry { Language::Ar => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -445,6 +449,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "بواسطة", chan_prefix: "بواسطة",
chan_suffix: "", chan_suffix: "",
album_versions_title: "إصدارات أخرى",
}, },
Language::As => Entry { Language::As => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -567,6 +572,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "ৰ দ\u{9cd}\u{9be}\u{9be}", chan_suffix: "ৰ দ\u{9cd}\u{9be}\u{9be}",
album_versions_title: "অন\u{9cd}য সংস\u{9cd}কৰণ",
}, },
Language::Az => Entry { Language::Az => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -682,6 +688,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "by", chan_prefix: "by",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Digər versiyalar",
}, },
Language::Be => Entry { Language::Be => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -829,6 +836,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "ад", chan_prefix: "ад",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Іншыя версіі",
}, },
Language::Bg => Entry { Language::Bg => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -945,6 +953,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "от", chan_prefix: "от",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Други версии",
}, },
Language::Bn => Entry { Language::Bn => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1062,6 +1071,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: ",", chan_prefix: ",",
chan_suffix: "\u{9cd}\u{9be}\u{9be}", chan_suffix: "\u{9cd}\u{9be}\u{9be}",
album_versions_title: "অন\u{9cd}য সংস\u{9cd}করণগ\u{9c1}লি",
}, },
Language::Bs => Entry { Language::Bs => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1201,6 +1211,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "od", chan_prefix: "od",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Druge verzije",
}, },
Language::Ca => Entry { Language::Ca => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1325,6 +1336,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "de:", chan_prefix: "de:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Altres versions",
}, },
Language::Cs => Entry { Language::Cs => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1455,6 +1467,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "autor:", chan_prefix: "autor:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Jiné verze",
}, },
Language::Da => Entry { Language::Da => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1579,6 +1592,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "af", chan_prefix: "af",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Andre versioner",
}, },
Language::De => Entry { Language::De => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1700,6 +1714,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "von", chan_prefix: "von",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Weitere Versionen",
}, },
Language::El => Entry { Language::El => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1830,6 +1845,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "από το χρήστη", chan_prefix: "από το χρήστη",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Άλλες εκτελέσεις",
}, },
Language::En | Language::EnGb | Language::EnIn => Entry { Language::En | Language::EnGb | Language::EnIn => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -1971,6 +1987,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "by", chan_prefix: "by",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Other versions",
}, },
Language::Es => Entry { Language::Es => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2098,6 +2115,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "de", chan_prefix: "de",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Otras versiones",
}, },
Language::EsUs | Language::Es419 => Entry { Language::EsUs | Language::Es419 => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2226,6 +2244,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "de", chan_prefix: "de",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Otras versiones",
}, },
Language::Et => Entry { Language::Et => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2351,6 +2370,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "kanalilt", chan_prefix: "kanalilt",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Teised versioonid",
}, },
Language::Eu => Entry { Language::Eu => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2467,6 +2487,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "egilea:", chan_prefix: "egilea:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Beste bertsio batzuk",
}, },
Language::Fa => Entry { Language::Fa => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2574,6 +2595,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "توسط", chan_prefix: "توسط",
chan_suffix: "", chan_suffix: "",
album_versions_title: "نسخه\u{200c}های دیگر",
}, },
Language::Fi => Entry { Language::Fi => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2693,6 +2715,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "tekijä:", chan_prefix: "tekijä:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Muut versiot",
}, },
Language::Fil => Entry { Language::Fil => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2810,6 +2833,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "ni/ng", chan_prefix: "ni/ng",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Iba pang bersyon",
}, },
Language::Fr | Language::FrCa => Entry { Language::Fr | Language::FrCa => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -2941,6 +2965,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "de", chan_prefix: "de",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Autres versions",
}, },
Language::Gl => Entry { Language::Gl => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3065,6 +3090,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "de", chan_prefix: "de",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Outras versións",
}, },
Language::Gu => Entry { Language::Gu => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3170,6 +3196,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{acd}વારા", chan_suffix: "\u{acd}વારા",
album_versions_title: "અન\u{acd}ય વર\u{acd}ઝન",
}, },
Language::Hi => Entry { Language::Hi => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3286,6 +3313,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{947}\u{93c}रिए", chan_suffix: "\u{947}\u{93c}रिए",
album_versions_title: "अन\u{94d}य वर\u{94d}शन",
}, },
Language::Hr => Entry { Language::Hr => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3425,6 +3453,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "omogućio kanal", chan_prefix: "omogućio kanal",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Druge verzije",
}, },
Language::Hu => Entry { Language::Hu => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3554,6 +3583,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "csatornától", chan_suffix: "csatornától",
album_versions_title: "Más verziók",
}, },
Language::Hy => Entry { Language::Hy => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3676,6 +3706,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "հեղինակ՝", chan_prefix: "հեղինակ՝",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Այլ տարբերակներ",
}, },
Language::Id => Entry { Language::Id => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3794,6 +3825,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "oleh", chan_prefix: "oleh",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Versi lainnya",
}, },
Language::Is => Entry { Language::Is => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -3928,6 +3960,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "eftir", chan_prefix: "eftir",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Aðrar útgáfur",
}, },
Language::It => Entry { Language::It => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4060,6 +4093,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "di", chan_prefix: "di",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Altre versioni",
}, },
Language::Iw => Entry { Language::Iw => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4198,6 +4232,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "מאת", chan_prefix: "מאת",
chan_suffix: "", chan_suffix: "",
album_versions_title: "גרסאות אחרות",
}, },
Language::Ja => Entry { Language::Ja => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4278,6 +4313,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "作成者:", chan_prefix: "作成者:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "他のバージョン",
}, },
Language::Ka => Entry { Language::Ka => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4400,6 +4436,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "-ის მიერ", chan_suffix: "-ის მიერ",
album_versions_title: "სხვა ვერსიები",
}, },
Language::Kk => Entry { Language::Kk => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4523,6 +4560,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "қосқан", chan_prefix: "қосқан",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Басқа нұсқалары",
}, },
Language::Km => Entry { Language::Km => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4623,6 +4661,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "ដោយ", chan_prefix: "ដោយ",
chan_suffix: "", chan_suffix: "",
album_versions_title: "\u{17d2}រភេទផ\u{17d2}សេងៗ",
}, },
Language::Kn => Entry { Language::Kn => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4749,6 +4788,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "ಚಾನಲ\u{ccd}\u{200c}\u{cbf}ಂದ", chan_suffix: "ಚಾನಲ\u{ccd}\u{200c}\u{cbf}ಂದ",
album_versions_title: "ಇತರ ಆವೃತ\u{ccd}\u{cbf}ಗಳು",
}, },
Language::Ko => Entry { Language::Ko => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4832,6 +4872,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "게시자:", chan_prefix: "게시자:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "다른 버전",
}, },
Language::Ky => Entry { Language::Ky => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -4950,6 +4991,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "каналы аркылуу", chan_suffix: "каналы аркылуу",
album_versions_title: "Башка версиялар",
}, },
Language::Lo => Entry { Language::Lo => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5076,6 +5118,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "ໂດຍ", chan_prefix: "ໂດຍ",
chan_suffix: "", chan_suffix: "",
album_versions_title: "ເວ\u{eb5}\u{eb1}ນອ\u{eb7}\u{ec8}ນໆ",
}, },
Language::Lt => Entry { Language::Lt => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5210,6 +5253,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "pridėjo", chan_prefix: "pridėjo",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Kitos versijos",
}, },
Language::Lv => Entry { Language::Lv => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5344,6 +5388,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "autors:", chan_prefix: "autors:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Citas versijas",
}, },
Language::Mk => Entry { Language::Mk => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5471,6 +5516,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "од", chan_prefix: "од",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Други верзии",
}, },
Language::Ml => Entry { Language::Ml => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5585,6 +5631,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{d41}ഖേന", chan_suffix: "\u{d41}ഖേന",
album_versions_title: "മറ\u{d4d}\u{d4d} പതിപ\u{d4d}\u{d41}കൾ",
}, },
Language::Mn => Entry { Language::Mn => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5689,6 +5736,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "сувгийн нэр:", chan_prefix: "сувгийн нэр:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Бусад хувилбар",
}, },
Language::Mr => Entry { Language::Mr => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5813,6 +5861,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{94d}वार\u{947}", chan_suffix: "\u{94d}वार\u{947}",
album_versions_title: "इतर आव\u{943}\u{94d}\u{94d}या",
}, },
Language::Ms => Entry { Language::Ms => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -5926,6 +5975,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "oleh", chan_prefix: "oleh",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Versi lain",
}, },
Language::My => Entry { Language::My => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6046,6 +6096,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{103e}", chan_suffix: "\u{103e}",
album_versions_title: "အခြား ဗားရ\u{103e}\u{103a}းများ",
}, },
Language::Ne => Entry { Language::Ne => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6149,6 +6200,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{94d}वारा", chan_suffix: "\u{94d}वारा",
album_versions_title: "अन\u{94d}य स\u{902}\u{94d}करणहर\u{942}",
}, },
Language::Nl => Entry { Language::Nl => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6271,6 +6323,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "door", chan_prefix: "door",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Andere versies",
}, },
Language::No => Entry { Language::No => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6399,6 +6452,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "av", chan_prefix: "av",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Andre versjoner",
}, },
Language::Or => Entry { Language::Or => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6514,6 +6568,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{b4d}\u{b3e}\u{b3e}", chan_suffix: "\u{b4d}\u{b3e}\u{b3e}",
album_versions_title: "ଅନ\u{b4d}ୟ ସଂସ\u{b4d}କରଣଗ\u{b41}\u{b3c}\u{b3f}",
}, },
Language::Pa => Entry { Language::Pa => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6629,6 +6684,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{a71}\u{a4b}\u{a02}", chan_suffix: "\u{a71}\u{a4b}\u{a02}",
album_versions_title: "\u{a4b}ਰ ਵਰਜਨ",
}, },
Language::Pl => Entry { Language::Pl => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6774,6 +6830,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "autor:", chan_prefix: "autor:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Inne wersje",
}, },
Language::Pt => Entry { Language::Pt => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -6903,6 +6960,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "por", chan_prefix: "por",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Outras versões",
}, },
Language::PtPt => Entry { Language::PtPt => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7015,6 +7073,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "de", chan_prefix: "de",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Outras versões",
}, },
Language::Ro => Entry { Language::Ro => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7143,6 +7202,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "de", chan_prefix: "de",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Alte versiuni",
}, },
Language::Ru => Entry { Language::Ru => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7286,6 +7346,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Другие версии",
}, },
Language::Si => Entry { Language::Si => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7397,6 +7458,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{dd2}\u{dd2}\u{dca}", chan_suffix: "\u{dd2}\u{dd2}\u{dca}",
album_versions_title: "අනෙක\u{dd4}\u{dca} අන\u{dd4}\u{dcf}දයන\u{dca}",
}, },
Language::Sk => Entry { Language::Sk => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7527,6 +7589,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "Autori:", chan_prefix: "Autori:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Ďalšie verzie",
}, },
Language::Sl => Entry { Language::Sl => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7676,6 +7739,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "kanal", chan_prefix: "kanal",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Druge različice",
}, },
Language::Sq => Entry { Language::Sq => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7796,6 +7860,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "nga", chan_prefix: "nga",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Versione të tjera",
}, },
Language::Sr => Entry { Language::Sr => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -7926,6 +7991,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "са канала", chan_prefix: "са канала",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Друге верзије",
}, },
Language::SrLatn => Entry { Language::SrLatn => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8056,6 +8122,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "sa kanala", chan_prefix: "sa kanala",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Druge verzije",
}, },
Language::Sv => Entry { Language::Sv => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8178,6 +8245,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "från", chan_prefix: "från",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Andra versioner",
}, },
Language::Sw => Entry { Language::Sw => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8291,6 +8359,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "kutoka", chan_prefix: "kutoka",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Matoleo mengine",
}, },
Language::Ta => Entry { Language::Ta => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8421,6 +8490,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "வழங\u{bcd}கியவர\u{bcd}:", chan_prefix: "வழங\u{bcd}கியவர\u{bcd}:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "பிற பதிப\u{bcd}புகள\u{bcd}",
}, },
Language::Te => Entry { Language::Te => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8547,6 +8617,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "\u{c3e}\u{c46}\u{c4d}\u{c4d}\u{c3e}\u{c3e}", chan_suffix: "\u{c3e}\u{c46}\u{c4d}\u{c4d}\u{c3e}\u{c3e}",
album_versions_title: "ఇతర వ\u{c46}\u{c4d}షన\u{c4d}\u{200c}లు",
}, },
Language::Th => Entry { Language::Th => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8677,6 +8748,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "โดย", chan_prefix: "โดย",
chan_suffix: "", chan_suffix: "",
album_versions_title: "เวอร\u{e4c}\u{e31}นอ\u{e37}\u{e48}นๆ",
}, },
Language::Tr => Entry { Language::Tr => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8797,6 +8869,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "tarafından", chan_suffix: "tarafından",
album_versions_title: "Diğer versiyonlar",
}, },
Language::Uk => Entry { Language::Uk => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -8945,6 +9018,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "власник:", chan_prefix: "власник:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Інші версії",
}, },
Language::Ur => Entry { Language::Ur => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -9070,6 +9144,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "منجانب", chan_prefix: "منجانب",
chan_suffix: "", chan_suffix: "",
album_versions_title: "دیگر ورژنز",
}, },
Language::Uz => Entry { Language::Uz => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -9184,6 +9259,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "muallif:", chan_prefix: "muallif:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Boshqa versiyalari",
}, },
Language::Vi => Entry { Language::Vi => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -9265,6 +9341,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "của", chan_prefix: "của",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Các phiên bản khác",
}, },
Language::ZhCn => Entry { Language::ZhCn => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -9362,6 +9439,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "创建者:", chan_prefix: "创建者:",
chan_suffix: "", chan_suffix: "",
album_versions_title: "其他版本",
}, },
Language::ZhHk => Entry { Language::ZhHk => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -9443,6 +9521,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "來自", chan_prefix: "來自",
chan_suffix: "", chan_suffix: "",
album_versions_title: "其他版本",
}, },
Language::ZhTw => Entry { Language::ZhTw => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -9523,6 +9602,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "", chan_prefix: "",
chan_suffix: "建立", chan_suffix: "建立",
album_versions_title: "其他版本",
}, },
Language::Zu => Entry { Language::Zu => Entry {
timeago_tokens: ::phf::Map { timeago_tokens: ::phf::Map {
@ -9658,6 +9738,7 @@ pub(crate) fn entry(lang: Language) -> Entry {
}, },
chan_prefix: "ka-", chan_prefix: "ka-",
chan_suffix: "", chan_suffix: "",
album_versions_title: "Ezinye izinguqulo",
}, },
} }
} }

View file

@ -75,10 +75,10 @@ pub fn get_cg_from_fancy_regexes(regexes: &[&str], text: &str, cg_name: &str) ->
/// Generate a random string with given length and byte charset. /// Generate a random string with given length and byte charset.
fn random_string(charset: &[u8], length: usize) -> String { fn random_string(charset: &[u8], length: usize) -> String {
let mut result = String::with_capacity(length); let mut result = String::with_capacity(length);
let mut rng = rand::thread_rng(); let mut rng = rand::rng();
for _ in 0..length { for _ in 0..length {
result.push(char::from(charset[rng.gen_range(0..charset.len())])); result.push(char::from(charset[rng.random_range(0..charset.len())]));
} }
result result
@ -90,14 +90,14 @@ pub fn generate_content_playback_nonce() -> String {
} }
pub fn random_uuid() -> String { pub fn random_uuid() -> String {
let mut rng = rand::thread_rng(); let mut rng = rand::rng();
format!( format!(
"{:08x}-{:04x}-{:04x}-{:04x}-{:012x}", "{:08x}-{:04x}-{:04x}-{:04x}-{:012x}",
rng.gen::<u32>(), rng.random::<u32>(),
rng.gen::<u16>(), rng.random::<u16>(),
rng.gen::<u16>(), rng.random::<u16>(),
rng.gen::<u16>(), rng.random::<u16>(),
rng.gen::<u64>() & 0xffff_ffff_ffff, rng.random::<u64>() & 0xffff_ffff_ffff,
) )
} }
@ -229,7 +229,7 @@ pub fn retry_delay(
backoff_base: u32, backoff_base: u32,
) -> u32 { ) -> u32 {
let unjittered_delay = backoff_base.checked_pow(n_past_retries).unwrap_or(u32::MAX); let unjittered_delay = backoff_base.checked_pow(n_past_retries).unwrap_or(u32::MAX);
let jitter_factor = rand::thread_rng().gen_range(800..1500); let jitter_factor = rand::rng().random_range(800..1500);
let jittered_delay = unjittered_delay let jittered_delay = unjittered_delay
.checked_mul(jitter_factor) .checked_mul(jitter_factor)
.unwrap_or(u32::MAX); .unwrap_or(u32::MAX);

View file

@ -148,8 +148,8 @@ impl VisitorDataCache {
{ {
let vds = self.inner.visitor_data.read().unwrap(); let vds = self.inner.visitor_data.read().unwrap();
if !vds.is_empty() { if !vds.is_empty() {
let mut rng = rand::thread_rng(); let mut rng = rand::rng();
let vd = vds[rng.gen_range(0..vds.len())].to_owned(); let vd = vds[rng.random_range(0..vds.len())].to_owned();
tracing::debug!("visitor data {vd} picked from cache"); tracing::debug!("visitor data {vd} picked from cache");
return Ok(vd); return Ok(vd);
} }
@ -245,12 +245,21 @@ mod tests {
for _ in 0..4 { for _ in 0..4 {
cache.get().await.unwrap(); cache.get().await.unwrap();
} }
tokio::time::sleep(Duration::from_millis(1000)).await;
for _ in 0..3 {
tokio::time::sleep(Duration::from_millis(1000)).await;
{
let vd = cache.inner.visitor_data.read().unwrap();
if !vd.contains(&v1) {
break;
}
}
}
{ {
let vd = cache.inner.visitor_data.read().unwrap(); let vd = cache.inner.visitor_data.read().unwrap();
assert!(!vd.contains(&v1), "first token still present"); assert!(!vd.contains(&v1), "first token still present");
} }
assert_eq!(cache.get_pot(&v1), None); assert_eq!(cache.get_pot(&v1), None);
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,85 @@
{
"af": "Ander weergawes",
"am": "ሌሎች ስሪቶች",
"ar": "إصدارات أخرى",
"as": "অন্য সংস্কৰণ",
"az": "Digər versiyalar",
"be": "Іншыя версіі",
"bg": "Други версии",
"bn": "অন্য সংস্করণগুলি",
"bs": "Druge verzije",
"ca": "Altres versions",
"cs": "Jiné verze",
"da": "Andre versioner",
"de": "Weitere Versionen",
"el": "Άλλες εκτελέσεις",
"en": "Other versions",
"en-GB": "Other versions",
"en-IN": "Other versions",
"es": "Otras versiones",
"es-419": "Otras versiones",
"es-US": "Otras versiones",
"et": "Teised versioonid",
"eu": "Beste bertsio batzuk",
"fa": "نسخه‌های دیگر",
"fi": "Muut versiot",
"fil": "Iba pang bersyon",
"fr": "Autres versions",
"fr-CA": "Autres versions",
"gl": "Outras versións",
"gu": "અન્ય વર્ઝન",
"hi": "अन्य वर्शन",
"hr": "Druge verzije",
"hu": "Más verziók",
"hy": "Այլ տարբերակներ",
"id": "Versi lainnya",
"is": "Aðrar útgáfur",
"it": "Altre versioni",
"iw": "גרסאות אחרות",
"ja": "他のバージョン",
"ka": "სხვა ვერსიები",
"kk": "Басқа нұсқалары",
"km": "ប្រភេទផ្សេងៗ",
"kn": "ಇತರ ಆವೃತ್ತಿಗಳು",
"ko": "다른 버전",
"ky": "Башка версиялар",
"lo": "ເວີຊັນອື່ນໆ",
"lt": "Kitos versijos",
"lv": "Citas versijas",
"mk": "Други верзии",
"ml": "മറ്റ് പതിപ്പുകൾ",
"mn": "Бусад хувилбар",
"mr": "इतर आवृत्त्या",
"ms": "Versi lain",
"my": "အခြား ဗားရှင်းများ",
"ne": "अन्य संस्करणहरू",
"nl": "Andere versies",
"no": "Andre versjoner",
"or": "ଅନ୍ୟ ସଂସ୍କରଣଗୁଡ଼ିକ",
"pa": "ਹੋਰ ਵਰਜਨ",
"pl": "Inne wersje",
"pt": "Outras versões",
"pt-PT": "Outras versões",
"ro": "Alte versiuni",
"ru": "Другие версии",
"si": "අනෙකුත් අනුවාදයන්",
"sk": "Ďalšie verzie",
"sl": "Druge različice",
"sq": "Versione të tjera",
"sr": "Друге верзије",
"sr-Latn": "Druge verzije",
"sv": "Andra versioner",
"sw": "Matoleo mengine",
"ta": "பிற பதிப்புகள்",
"te": "ఇతర వెర్షన్‌లు",
"th": "เวอร์ชันอื่นๆ",
"tr": "Diğer versiyonlar",
"uk": "Інші версії",
"ur": "دیگر ورژنز",
"uz": "Boshqa versiyalari",
"vi": "Các phiên bản khác",
"zh-CN": "其他版本",
"zh-HK": "其他版本",
"zh-TW": "其他版本",
"zu": "Ezinye izinguqulo"
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -18,6 +18,7 @@ MusicAlbum(
album_type: ep, album_type: ep,
year: Some(2016), year: Some(2016),
by_va: false, by_va: false,
track_count: 5,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "aGd3VKSOTxY", id: "aGd3VKSOTxY",

View file

@ -13,6 +13,7 @@ MusicAlbum(
album_type: album, album_type: album,
year: Some(2024), year: Some(2024),
by_va: true, by_va: true,
track_count: 14,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "ilNEztApdjI", id: "ilNEztApdjI",

View file

@ -26,6 +26,7 @@ MusicAlbum(
album_type: single, album_type: single,
year: None, year: None,
by_va: false, by_va: false,
track_count: 1,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "1Sz3lUVGBSM", id: "1Sz3lUVGBSM",

View file

@ -36,6 +36,7 @@ MusicAlbum(
album_type: album, album_type: album,
year: Some(2011), year: Some(2011),
by_va: false, by_va: false,
track_count: 15,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "js0moD0CIRQ", id: "js0moD0CIRQ",

View file

@ -22,6 +22,7 @@ MusicAlbum(
album_type: show, album_type: show,
year: Some(2015), year: Some(2015),
by_va: false, by_va: false,
track_count: 27,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "ZIjGPc6vG0Y", id: "ZIjGPc6vG0Y",

View file

@ -22,6 +22,7 @@ MusicAlbum(
album_type: single, album_type: single,
year: Some(2020), year: Some(2020),
by_va: false, by_va: false,
track_count: 1,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "VU6lEv0PKAo", id: "VU6lEv0PKAo",

View file

@ -26,6 +26,7 @@ MusicAlbum(
album_type: album, album_type: album,
year: Some(2019), year: Some(2019),
by_va: false, by_va: false,
track_count: 18,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "R3VIKRtzAdE", id: "R3VIKRtzAdE",

View file

@ -13,6 +13,7 @@ MusicAlbum(
album_type: single, album_type: single,
year: Some(2022), year: Some(2022),
by_va: true, by_va: true,
track_count: 6,
tracks: [ tracks: [
TrackItem( TrackItem(
id: "Tzai7JXo45w", id: "Tzai7JXo45w",

View file

@ -42,8 +42,8 @@ MusicArtist(
by_va: false, by_va: false,
), ),
AlbumItem( AlbumItem(
id: "MPREb_6PEkIQE7sWY", id: "MPREb_HrCgErOdgCv",
name: "An deiner Seite (Online Version)", name: "Freiheit",
cover: "[cover]", cover: "[cover]",
artists: [ artists: [
ArtistId( ArtistId(
@ -52,8 +52,8 @@ MusicArtist(
), ),
], ],
artist_id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"), artist_id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"),
album_type: ep, album_type: album,
year: Some(2008), year: Some(2004),
by_va: false, by_va: false,
), ),
AlbumItem( AlbumItem(
@ -87,8 +87,8 @@ MusicArtist(
by_va: false, by_va: false,
), ),
AlbumItem( AlbumItem(
id: "MPREb_QEClJsuO9xM", id: "MPREb_Oq0WKqNwSVY",
name: "So wie Du warst", name: "Das 2. Gebot",
cover: "[cover]", cover: "[cover]",
artists: [ artists: [
ArtistId( ArtistId(
@ -97,8 +97,8 @@ MusicArtist(
), ),
], ],
artist_id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"), artist_id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"),
album_type: single, album_type: album,
year: Some(2012), year: Some(2003),
by_va: false, by_va: false,
), ),
AlbumItem( AlbumItem(
@ -251,6 +251,21 @@ MusicArtist(
year: Some(2015), year: Some(2015),
by_va: false, by_va: false,
), ),
AlbumItem(
id: "MPREb_ohcGTZrqKPZ",
name: "Zelluloid",
cover: "[cover]",
artists: [
ArtistId(
id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"),
name: "Unheilig",
),
],
artist_id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"),
album_type: album,
year: Some(2004),
by_va: false,
),
AlbumItem( AlbumItem(
id: "MPREb_pWpeXxATZYb", id: "MPREb_pWpeXxATZYb",
name: "Wir sind alle wie eins", name: "Wir sind alle wie eins",
@ -266,21 +281,6 @@ MusicArtist(
year: Some(2014), year: Some(2014),
by_va: false, by_va: false,
), ),
AlbumItem(
id: "MPREb_rHhaDLqalbT",
name: "Winter (EP)",
cover: "[cover]",
artists: [
ArtistId(
id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"),
name: "Unheilig",
),
],
artist_id: Some("UC7cl4MmM6ZZ2TcFyMk_b4pg"),
album_type: ep,
year: Some(2010),
by_va: false,
),
AlbumItem( AlbumItem(
id: "MPREb_saXgTKNPaSu", id: "MPREb_saXgTKNPaSu",
name: "Zeit zu gehen", name: "Zeit zu gehen",

View file

@ -146,21 +146,6 @@ MusicArtist(
year: Some(2015), year: Some(2015),
by_va: false, by_va: false,
), ),
AlbumItem(
id: "MPREb_ghrNI6BJSM8",
name: "Friends And Family",
cover: "[cover]",
artists: [
ArtistId(
id: Some("UCFKUUtHjT4iq3p0JJA13SOA"),
name: "Every Time I Die",
),
],
artist_id: Some("UCFKUUtHjT4iq3p0JJA13SOA"),
album_type: album,
year: Some(2017),
by_va: false,
),
AlbumItem( AlbumItem(
id: "MPREb_h0UZr2ALQXf", id: "MPREb_h0UZr2ALQXf",
name: "From Parts Unknown (Deluxe Edition)", name: "From Parts Unknown (Deluxe Edition)",

View file

@ -30,7 +30,7 @@ use rustypipe::validate;
#[case::desktop(ClientType::Desktop)] #[case::desktop(ClientType::Desktop)]
#[case::tv(ClientType::Tv)] #[case::tv(ClientType::Tv)]
#[case::mobile(ClientType::Mobile)] #[case::mobile(ClientType::Mobile)]
#[case::android(ClientType::Android)] // #[case::android(ClientType::Android)] Removed since it requires Android device attestation
#[case::ios(ClientType::Ios)] #[case::ios(ClientType::Ios)]
#[tokio::test] #[tokio::test]
async fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) { async fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe) {
@ -40,8 +40,6 @@ async fn get_player_from_client(#[case] client_type: ClientType, rp: RustyPipe)
.await .await
.unwrap(); .unwrap();
// dbg!(&player_data);
assert_eq!(player_data.details.id, "n4tK7LYFxI0"); assert_eq!(player_data.details.id, "n4tK7LYFxI0");
assert_eq!(player_data.details.duration, 259); assert_eq!(player_data.details.duration, 259);
assert!(!player_data.details.thumbnail.is_empty()); assert!(!player_data.details.thumbnail.is_empty());
@ -482,8 +480,6 @@ async fn playlist_not_found(rp: RustyPipe) {
async fn get_video_details(rp: RustyPipe) { async fn get_video_details(rp: RustyPipe) {
let details = rp.query().video_details("ZeerrnuLi5E").await.unwrap(); let details = rp.query().video_details("ZeerrnuLi5E").await.unwrap();
// dbg!(&details);
assert_eq!(details.id, "ZeerrnuLi5E"); assert_eq!(details.id, "ZeerrnuLi5E");
assert_eq!(details.name, "aespa 에스파 'Black Mamba' MV"); assert_eq!(details.name, "aespa 에스파 'Black Mamba' MV");
let desc = details.description.to_plaintext(); let desc = details.description.to_plaintext();
@ -519,8 +515,6 @@ async fn get_video_details(rp: RustyPipe) {
async fn get_video_details_music(rp: RustyPipe) { async fn get_video_details_music(rp: RustyPipe) {
let details = rp.query().video_details("XuM2onMGvTI").await.unwrap(); let details = rp.query().video_details("XuM2onMGvTI").await.unwrap();
// dbg!(&details);
assert_eq!(details.id, "XuM2onMGvTI"); assert_eq!(details.id, "XuM2onMGvTI");
assert_eq!(details.name, "Gäa"); assert_eq!(details.name, "Gäa");
let desc = details.description.to_plaintext(); let desc = details.description.to_plaintext();
@ -557,8 +551,6 @@ async fn get_video_details_music(rp: RustyPipe) {
async fn get_video_details_ccommons(rp: RustyPipe) { async fn get_video_details_ccommons(rp: RustyPipe) {
let details = rp.query().video_details("0rb9CfOvojk").await.unwrap(); let details = rp.query().video_details("0rb9CfOvojk").await.unwrap();
// dbg!(&details);
assert_eq!(details.id, "0rb9CfOvojk"); assert_eq!(details.id, "0rb9CfOvojk");
assert_eq!( assert_eq!(
details.name, details.name,
@ -597,8 +589,6 @@ async fn get_video_details_ccommons(rp: RustyPipe) {
async fn get_video_details_chapters(rp: RustyPipe) { async fn get_video_details_chapters(rp: RustyPipe) {
let details = rp.query().video_details("nFDBxBUfE74").await.unwrap(); let details = rp.query().video_details("nFDBxBUfE74").await.unwrap();
// dbg!(&details);
assert_eq!(details.id, "nFDBxBUfE74"); assert_eq!(details.id, "nFDBxBUfE74");
assert_eq!(details.name, "The Prepper PC"); assert_eq!(details.name, "The Prepper PC");
let desc = details.description.to_plaintext(); let desc = details.description.to_plaintext();
@ -717,8 +707,6 @@ async fn get_video_details_chapters(rp: RustyPipe) {
async fn get_video_details_live(rp: RustyPipe) { async fn get_video_details_live(rp: RustyPipe) {
let details = rp.query().video_details("jfKfPfyJRdk").await.unwrap(); let details = rp.query().video_details("jfKfPfyJRdk").await.unwrap();
// dbg!(&details);
assert_eq!(details.id, "jfKfPfyJRdk"); assert_eq!(details.id, "jfKfPfyJRdk");
assert_eq!( assert_eq!(
details.name, details.name,
@ -759,8 +747,6 @@ async fn get_video_details_live(rp: RustyPipe) {
async fn get_video_details_agelimit(rp: RustyPipe) { async fn get_video_details_agelimit(rp: RustyPipe) {
let details = rp.query().video_details("ZDKQmBWTRnw").await.unwrap(); let details = rp.query().video_details("ZDKQmBWTRnw").await.unwrap();
// dbg!(&details);
assert_eq!(details.id, "ZDKQmBWTRnw"); assert_eq!(details.id, "ZDKQmBWTRnw");
assert_eq!( assert_eq!(
details.name, details.name,
@ -864,7 +850,6 @@ async fn channel_videos(rp: RustyPipe) {
.await .await
.unwrap(); .unwrap();
// dbg!(&channel);
assert_channel_eevblog(&channel); assert_channel_eevblog(&channel);
assert!( assert!(
@ -890,7 +875,6 @@ async fn channel_shorts(rp: RustyPipe) {
.await .await
.unwrap(); .unwrap();
// dbg!(&channel);
assert_eq!(channel.id, "UCh8gHdtzO2tXd593_bjErWg"); assert_eq!(channel.id, "UCh8gHdtzO2tXd593_bjErWg");
assert_eq!(channel.name, "Doobydobap"); assert_eq!(channel.name, "Doobydobap");
assert_eq!(channel.handle.as_deref(), Some("@Doobydobap")); assert_eq!(channel.handle.as_deref(), Some("@Doobydobap"));
@ -919,7 +903,6 @@ async fn channel_livestreams(rp: RustyPipe) {
.await .await
.unwrap(); .unwrap();
// dbg!(&channel);
assert_channel_eevblog(&channel); assert_channel_eevblog(&channel);
assert!( assert!(
@ -2136,10 +2119,12 @@ async fn music_search_artists(rp: RustyPipe, unlocalized: bool) {
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]
async fn music_search_artists_cont(rp: RustyPipe) { async fn music_search_artists_cont(rp: RustyPipe) {
let res = rp.query().music_search_artists("boys").await.unwrap(); let res = rp.query().music_search_artists("girls").await.unwrap();
assert_eq!(res.corrected_query, None); assert_eq!(res.corrected_query, None);
if !res.items.is_exhausted() {
assert_next(res.items, rp.query(), 15, 2, true).await; assert_next(res.items, rp.query(), 15, 2, true).await;
}
} }
#[rstest] #[rstest]
@ -2611,7 +2596,7 @@ async fn music_genres(rp: RustyPipe, unlocalized: bool) {
} }
#[rstest] #[rstest]
#[case::chill("ggMPOg1uX1JOQWZFeDByc2Jm", "Chill")] #[case::party("ggMPOg1uX2w1aW1CRDFTSUNo", "Party")]
#[case::pop("ggMPOg1uX1lMbVZmbzl6NlJ3", "Pop")] #[case::pop("ggMPOg1uX1lMbVZmbzl6NlJ3", "Pop")]
#[tokio::test] #[tokio::test]
async fn music_genre(#[case] id: &str, #[case] name: &str, rp: RustyPipe, unlocalized: bool) { async fn music_genre(#[case] id: &str, #[case] name: &str, rp: RustyPipe, unlocalized: bool) {
@ -2655,7 +2640,7 @@ async fn music_genre(#[case] id: &str, #[case] name: &str, rp: RustyPipe, unloca
let subgenres = check_music_genre(genre, id, name, unlocalized); let subgenres = check_music_genre(genre, id, name, unlocalized);
if name == "Chill" { if name == "Party" {
assert_gte(subgenres.len(), 2, "subgenres"); assert_gte(subgenres.len(), 2, "subgenres");
} }
@ -2946,7 +2931,11 @@ async fn assert_next<T: FromYtItem, Q: AsRef<RustyPipeQuery>>(
} }
for i in 0..n_pages { for i in 0..n_pages {
p = p.next(query).await.unwrap().expect("paginator exhausted"); match p.next(query).await.unwrap() {
Some(np) => p = np,
None => panic!("paginator exhausted after {i} pages"),
}
assert_gte( assert_gte(
p.items.len(), p.items.len(),
min_items, min_items,