Compare commits
No commits in common. "c3f82f765bf1e957e8efad10acd0d050f4f0cf17" and "b3331b36a7a1331f8a7834424cebce006e9bffff" have entirely different histories.
c3f82f765b
...
b3331b36a7
16 changed files with 192 additions and 264 deletions
40
README.md
40
README.md
|
@ -1,7 +1,5 @@
|
||||||
# RustyPipe
|
# RustyPipe
|
||||||
|
|
||||||
[![CI status](https://ci.thetadev.de/api/badges/ThetaDev/rustypipe/status.svg)](https://ci.thetadev.de/ThetaDev/rustypipe)
|
|
||||||
|
|
||||||
Client for the public YouTube / YouTube Music API (Innertube),
|
Client for the public YouTube / YouTube Music API (Innertube),
|
||||||
inspired by [NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
inspired by [NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
||||||
|
|
||||||
|
@ -9,25 +7,25 @@ inspired by [NewPipe](https://github.com/TeamNewPipe/NewPipeExtractor).
|
||||||
|
|
||||||
### YouTube
|
### YouTube
|
||||||
|
|
||||||
- **Player** (video/audio streams, subtitles)
|
- [X] **Player** (video/audio streams, subtitles)
|
||||||
- **Playlist**
|
- [X] **Playlist**
|
||||||
- **VideoDetails** (metadata, comments, recommended videos)
|
- [X] **VideoDetails** (metadata, comments, recommended videos)
|
||||||
- **Channel** (videos, shorts, livestreams, playlists, info, search)
|
- [X] **Channel** (videos, shorts, livestreams, playlists, info, search)
|
||||||
- **ChannelRSS**
|
- [X] **ChannelRSS**
|
||||||
- **Search** (with filters)
|
- [X] **Search** (with filters)
|
||||||
- **Search suggestions**
|
- [X] **Search suggestions**
|
||||||
- **Trending**
|
- [X] **Trending**
|
||||||
- **URL resolver**
|
- [X] **URL resolver**
|
||||||
|
|
||||||
### YouTube Music
|
### YouTube Music
|
||||||
|
|
||||||
- **Playlist**
|
- [X] **Playlist**
|
||||||
- **Album**
|
- [X] **Album**
|
||||||
- **Artist**
|
- [X] **Artist**
|
||||||
- **Search**
|
- [X] **Search**
|
||||||
- **Search suggestions**
|
- [X] **Search suggestions**
|
||||||
- **Radio**
|
- [X] **Radio**
|
||||||
- **Track details** (lyrics, recommendations)
|
- [X] **Track details** (lyrics, recommendations)
|
||||||
- **Moods/Genres**
|
- [X] **Moods/Genres**
|
||||||
- **Charts**
|
- [X] **Charts**
|
||||||
- **New** (albums, music videos)
|
- [X] **New**
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
||||||
error::{Error, ExtractionError},
|
error::{Error, ExtractionError},
|
||||||
model::{AlbumItem, ArtistId, MusicArtist},
|
model::{AlbumItem, ArtistId, MusicArtist},
|
||||||
serializer::MapResult,
|
serializer::MapResult,
|
||||||
util,
|
util::{self, TryRemove},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
@ -331,12 +331,9 @@ impl MapResponse<Vec<AlbumItem>> for response::MusicArtistAlbums {
|
||||||
) -> Result<MapResult<Vec<AlbumItem>>, ExtractionError> {
|
) -> Result<MapResult<Vec<AlbumItem>>, ExtractionError> {
|
||||||
// dbg!(&self);
|
// dbg!(&self);
|
||||||
|
|
||||||
let grids = self
|
let mut content = self.contents.single_column_browse_results_renderer.contents;
|
||||||
.contents
|
let grids = content
|
||||||
.single_column_browse_results_renderer
|
.try_swap_remove(0)
|
||||||
.contents
|
|
||||||
.into_iter()
|
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no content")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no content")))?
|
||||||
.tab_renderer
|
.tab_renderer
|
||||||
.content
|
.content
|
||||||
|
|
|
@ -125,17 +125,14 @@ impl MapResponse<MusicPlaylist> for response::MusicPlaylist {
|
||||||
) -> Result<MapResult<MusicPlaylist>, ExtractionError> {
|
) -> Result<MapResult<MusicPlaylist>, ExtractionError> {
|
||||||
// dbg!(&self);
|
// dbg!(&self);
|
||||||
|
|
||||||
let music_contents = self
|
let mut content = self.contents.single_column_browse_results_renderer.contents;
|
||||||
.contents
|
let mut music_contents = content
|
||||||
.single_column_browse_results_renderer
|
.try_swap_remove(0)
|
||||||
.contents
|
|
||||||
.into_iter()
|
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no content")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no content")))?
|
||||||
.tab_renderer
|
.tab_renderer
|
||||||
.content
|
.content
|
||||||
.section_list_renderer;
|
.section_list_renderer;
|
||||||
let shelf = music_contents
|
let mut shelf = music_contents
|
||||||
.contents
|
.contents
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.find_map(|section| match section {
|
.find_map(|section| match section {
|
||||||
|
@ -160,8 +157,7 @@ impl MapResponse<MusicPlaylist> for response::MusicPlaylist {
|
||||||
|
|
||||||
let ctoken = shelf
|
let ctoken = shelf
|
||||||
.continuations
|
.continuations
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.map(|cont| cont.next_continuation_data.continuation);
|
.map(|cont| cont.next_continuation_data.continuation);
|
||||||
|
|
||||||
let track_count = if ctoken.is_some() {
|
let track_count = if ctoken.is_some() {
|
||||||
|
@ -181,8 +177,7 @@ impl MapResponse<MusicPlaylist> for response::MusicPlaylist {
|
||||||
|
|
||||||
let related_ctoken = music_contents
|
let related_ctoken = music_contents
|
||||||
.continuations
|
.continuations
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.map(|c| c.next_continuation_data.continuation);
|
.map(|c| c.next_continuation_data.continuation);
|
||||||
|
|
||||||
let (from_ytm, channel, name, thumbnail, description) = match self.header {
|
let (from_ytm, channel, name, thumbnail, description) = match self.header {
|
||||||
|
@ -274,12 +269,9 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no header")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no header")))?
|
||||||
.music_detail_header_renderer;
|
.music_detail_header_renderer;
|
||||||
|
|
||||||
let sections = self
|
let mut content = self.contents.single_column_browse_results_renderer.contents;
|
||||||
.contents
|
let sections = content
|
||||||
.single_column_browse_results_renderer
|
.try_swap_remove(0)
|
||||||
.contents
|
|
||||||
.into_iter()
|
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no content")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no content")))?
|
||||||
.tab_renderer
|
.tab_renderer
|
||||||
.content
|
.content
|
||||||
|
@ -328,8 +320,7 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
||||||
|
|
||||||
let (artists, by_va) = map_artists(artists_p);
|
let (artists, by_va) = map_artists(artists_p);
|
||||||
let album_type_txt = subtitle_split
|
let album_type_txt = subtitle_split
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.map(|part| part.to_string())
|
.map(|part| part.to_string())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
@ -338,13 +329,12 @@ impl MapResponse<MusicAlbum> for response::MusicPlaylist {
|
||||||
|
|
||||||
let (artist_id, playlist_id) = header
|
let (artist_id, playlist_id) = header
|
||||||
.menu
|
.menu
|
||||||
.map(|menu| {
|
.map(|mut menu| {
|
||||||
(
|
(
|
||||||
map_artist_id(menu.menu_renderer.items),
|
map_artist_id(menu.menu_renderer.items),
|
||||||
menu.menu_renderer
|
menu.menu_renderer
|
||||||
.top_level_buttons
|
.top_level_buttons
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.map(|btn| {
|
.map(|btn| {
|
||||||
btn.button_renderer
|
btn.button_renderer
|
||||||
.navigation_endpoint
|
.navigation_endpoint
|
||||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
||||||
MusicSearchFiltered, MusicSearchResult, MusicSearchSuggestion, TrackItem,
|
MusicSearchFiltered, MusicSearchResult, MusicSearchSuggestion, TrackItem,
|
||||||
},
|
},
|
||||||
serializer::MapResult,
|
serializer::MapResult,
|
||||||
|
util::TryRemove,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{response, ClientType, MapResponse, RustyPipeQuery, YTContext};
|
use super::{response, ClientType, MapResponse, RustyPipeQuery, YTContext};
|
||||||
|
@ -233,12 +234,9 @@ impl MapResponse<MusicSearchResult> for response::MusicSearch {
|
||||||
) -> Result<MapResult<MusicSearchResult>, crate::error::ExtractionError> {
|
) -> Result<MapResult<MusicSearchResult>, crate::error::ExtractionError> {
|
||||||
// dbg!(&self);
|
// dbg!(&self);
|
||||||
|
|
||||||
let sections = self
|
let mut tabs = self.contents.tabbed_search_results_renderer.contents;
|
||||||
.contents
|
let sections = tabs
|
||||||
.tabbed_search_results_renderer
|
.try_swap_remove(0)
|
||||||
.contents
|
|
||||||
.into_iter()
|
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no tab")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no tab")))?
|
||||||
.tab_renderer
|
.tab_renderer
|
||||||
.content
|
.content
|
||||||
|
@ -264,8 +262,8 @@ impl MapResponse<MusicSearchResult> for response::MusicSearch {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
response::music_search::ItemSection::ItemSectionRenderer { mut contents } => {
|
||||||
if let Some(corrected) = contents.into_iter().next() {
|
if let Some(corrected) = contents.try_swap_remove(0) {
|
||||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -297,10 +295,9 @@ impl<T: FromYtItem> MapResponse<MusicSearchFiltered<T>> for response::MusicSearc
|
||||||
) -> Result<MapResult<MusicSearchFiltered<T>>, ExtractionError> {
|
) -> Result<MapResult<MusicSearchFiltered<T>>, ExtractionError> {
|
||||||
// dbg!(&self);
|
// dbg!(&self);
|
||||||
|
|
||||||
let tabs = self.contents.tabbed_search_results_renderer.contents;
|
let mut tabs = self.contents.tabbed_search_results_renderer.contents;
|
||||||
let sections = tabs
|
let sections = tabs
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no tab")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no tab")))?
|
||||||
.tab_renderer
|
.tab_renderer
|
||||||
.content
|
.content
|
||||||
|
@ -312,17 +309,17 @@ impl<T: FromYtItem> MapResponse<MusicSearchFiltered<T>> for response::MusicSearc
|
||||||
let mut mapper = MusicListMapper::new(lang);
|
let mut mapper = MusicListMapper::new(lang);
|
||||||
|
|
||||||
sections.into_iter().for_each(|section| match section {
|
sections.into_iter().for_each(|section| match section {
|
||||||
response::music_search::ItemSection::MusicShelfRenderer(shelf) => {
|
response::music_search::ItemSection::MusicShelfRenderer(mut shelf) => {
|
||||||
mapper.map_response(shelf.contents);
|
mapper.map_response(shelf.contents);
|
||||||
if let Some(cont) = shelf.continuations.into_iter().next() {
|
if let Some(cont) = shelf.continuations.try_swap_remove(0) {
|
||||||
ctoken = Some(cont.next_continuation_data.continuation);
|
ctoken = Some(cont.next_continuation_data.continuation);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
response::music_search::ItemSection::MusicCardShelfRenderer(card) => {
|
response::music_search::ItemSection::MusicCardShelfRenderer(card) => {
|
||||||
mapper.map_card(card);
|
mapper.map_card(card);
|
||||||
}
|
}
|
||||||
response::music_search::ItemSection::ItemSectionRenderer { contents } => {
|
response::music_search::ItemSection::ItemSectionRenderer { mut contents } => {
|
||||||
if let Some(corrected) = contents.into_iter().next() {
|
if let Some(corrected) = contents.try_swap_remove(0) {
|
||||||
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
corrected_query = Some(corrected.showing_results_for_renderer.corrected_query)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -407,7 +404,7 @@ mod tests {
|
||||||
#[case::default("default")]
|
#[case::default("default")]
|
||||||
#[case::typo("typo")]
|
#[case::typo("typo")]
|
||||||
#[case::radio("radio")]
|
#[case::radio("radio")]
|
||||||
#[case::artist("artist")]
|
#[case::radio("artist")]
|
||||||
fn map_music_search_main(#[case] name: &str) {
|
fn map_music_search_main(#[case] name: &str) {
|
||||||
let json_path = path!(*TESTFILES / "music_search" / format!("main_{name}.json"));
|
let json_path = path!(*TESTFILES / "music_search" / format!("main_{name}.json"));
|
||||||
let json_file = File::open(json_path).unwrap();
|
let json_file = File::open(json_path).unwrap();
|
||||||
|
|
|
@ -5,6 +5,7 @@ use crate::model::{
|
||||||
Comment, MusicItem, PlaylistVideo, YouTubeItem,
|
Comment, MusicItem, PlaylistVideo, YouTubeItem,
|
||||||
};
|
};
|
||||||
use crate::serializer::MapResult;
|
use crate::serializer::MapResult;
|
||||||
|
use crate::util::TryRemove;
|
||||||
|
|
||||||
use super::response::music_item::{map_queue_item, MusicListMapper, PlaylistPanelVideo};
|
use super::response::music_item::{map_queue_item, MusicListMapper, PlaylistPanelVideo};
|
||||||
use super::{response, ClientType, MapResponse, QContinuation, RustyPipeQuery};
|
use super::{response, ClientType, MapResponse, QContinuation, RustyPipeQuery};
|
||||||
|
@ -99,10 +100,9 @@ impl MapResponse<Paginator<YouTubeItem>> for response::Continuation {
|
||||||
) -> Result<MapResult<Paginator<YouTubeItem>>, ExtractionError> {
|
) -> Result<MapResult<Paginator<YouTubeItem>>, ExtractionError> {
|
||||||
let items = self
|
let items = self
|
||||||
.on_response_received_actions
|
.on_response_received_actions
|
||||||
.and_then(|actions| {
|
.and_then(|mut actions| {
|
||||||
actions
|
actions
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.map(|action| action.append_continuation_items_action.continuation_items)
|
.map(|action| action.append_continuation_items_action.continuation_items)
|
||||||
})
|
})
|
||||||
.or_else(|| {
|
.or_else(|| {
|
||||||
|
@ -168,8 +168,7 @@ impl MapResponse<Paginator<MusicItem>> for response::MusicContinuation {
|
||||||
|
|
||||||
let map_res = mapper.items();
|
let map_res = mapper.items();
|
||||||
let ctoken = continuations
|
let ctoken = continuations
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.map(|cont| cont.next_continuation_data.continuation);
|
.map(|cont| cont.next_continuation_data.continuation);
|
||||||
|
|
||||||
Ok(MapResult {
|
Ok(MapResult {
|
||||||
|
|
|
@ -65,11 +65,10 @@ impl MapResponse<Playlist> for response::Playlist {
|
||||||
_ => return Err(response::alerts_to_err(self.alerts)),
|
_ => return Err(response::alerts_to_err(self.alerts)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let video_items = contents
|
let mut tcbr_contents = contents.two_column_browse_results_renderer.contents;
|
||||||
.two_column_browse_results_renderer
|
|
||||||
.contents
|
let video_items = tcbr_contents
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
||||||
"twoColumnBrowseResultsRenderer empty",
|
"twoColumnBrowseResultsRenderer empty",
|
||||||
)))?
|
)))?
|
||||||
|
@ -77,15 +76,13 @@ impl MapResponse<Playlist> for response::Playlist {
|
||||||
.content
|
.content
|
||||||
.section_list_renderer
|
.section_list_renderer
|
||||||
.contents
|
.contents
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
||||||
"sectionListRenderer empty",
|
"sectionListRenderer empty",
|
||||||
)))?
|
)))?
|
||||||
.item_section_renderer
|
.item_section_renderer
|
||||||
.contents
|
.contents
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
||||||
"itemSectionRenderer empty",
|
"itemSectionRenderer empty",
|
||||||
)))?
|
)))?
|
||||||
|
@ -96,11 +93,10 @@ impl MapResponse<Playlist> for response::Playlist {
|
||||||
|
|
||||||
let (thumbnails, last_update_txt) = match self.sidebar {
|
let (thumbnails, last_update_txt) = match self.sidebar {
|
||||||
Some(sidebar) => {
|
Some(sidebar) => {
|
||||||
let sidebar_items = sidebar.playlist_sidebar_renderer.contents;
|
let mut sidebar_items = sidebar.playlist_sidebar_renderer.contents;
|
||||||
let mut primary =
|
let mut primary =
|
||||||
sidebar_items
|
sidebar_items
|
||||||
.into_iter()
|
.try_swap_remove(0)
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed(
|
||||||
"no primary sidebar",
|
"no primary sidebar",
|
||||||
)))?;
|
)))?;
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{
|
||||||
text::{Text, TextComponents},
|
text::{Text, TextComponents},
|
||||||
MapResult,
|
MapResult,
|
||||||
},
|
},
|
||||||
util::{self, dictionary},
|
util::{self, dictionary, TryRemove},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
@ -587,14 +587,14 @@ impl MusicListMapper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Playlist item
|
// Playlist item
|
||||||
FlexColumnDisplayStyle::Default => (
|
FlexColumnDisplayStyle::Default => {
|
||||||
c2.map(TextComponents::from),
|
let mut fixed_columns = item.fixed_columns;
|
||||||
c3.map(TextComponents::from),
|
(
|
||||||
item.fixed_columns
|
c2.map(TextComponents::from),
|
||||||
.into_iter()
|
c3.map(TextComponents::from),
|
||||||
.next()
|
fixed_columns.try_swap_remove(0).map(TextComponents::from),
|
||||||
.map(TextComponents::from),
|
)
|
||||||
),
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let duration =
|
let duration =
|
||||||
|
|
|
@ -477,7 +477,7 @@ impl<T> YouTubeListMapper<T> {
|
||||||
is_upcoming: video.upcoming_event_data.is_some(),
|
is_upcoming: video.upcoming_event_data.is_some(),
|
||||||
short_description: video
|
short_description: video
|
||||||
.detailed_metadata_snippets
|
.detailed_metadata_snippets
|
||||||
.and_then(|snippets| snippets.into_iter().next().map(|s| s.snippet_text))
|
.and_then(|mut snippets| snippets.try_swap_remove(0).map(|s| s.snippet_text))
|
||||||
.or(video.description_snippet),
|
.or(video.description_snippet),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ use crate::{
|
||||||
model::{paginator::Paginator, VideoItem},
|
model::{paginator::Paginator, VideoItem},
|
||||||
param::Language,
|
param::Language,
|
||||||
serializer::MapResult,
|
serializer::MapResult,
|
||||||
|
util::TryRemove,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{response, ClientType, MapResponse, QBrowse, QBrowseParams, RustyPipeQuery};
|
use super::{response, ClientType, MapResponse, QBrowse, QBrowseParams, RustyPipeQuery};
|
||||||
|
@ -55,12 +56,9 @@ impl MapResponse<Paginator<VideoItem>> for response::Startpage {
|
||||||
lang: crate::param::Language,
|
lang: crate::param::Language,
|
||||||
_deobf: Option<&crate::deobfuscate::DeobfData>,
|
_deobf: Option<&crate::deobfuscate::DeobfData>,
|
||||||
) -> Result<MapResult<Paginator<VideoItem>>, ExtractionError> {
|
) -> Result<MapResult<Paginator<VideoItem>>, ExtractionError> {
|
||||||
let grid = self
|
let mut contents = self.contents.two_column_browse_results_renderer.contents;
|
||||||
.contents
|
let grid = contents
|
||||||
.two_column_browse_results_renderer
|
.try_swap_remove(0)
|
||||||
.contents
|
|
||||||
.into_iter()
|
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no contents")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no contents")))?
|
||||||
.tab_renderer
|
.tab_renderer
|
||||||
.content
|
.content
|
||||||
|
@ -82,12 +80,9 @@ impl MapResponse<Vec<VideoItem>> for response::Trending {
|
||||||
lang: crate::param::Language,
|
lang: crate::param::Language,
|
||||||
_deobf: Option<&crate::deobfuscate::DeobfData>,
|
_deobf: Option<&crate::deobfuscate::DeobfData>,
|
||||||
) -> Result<MapResult<Vec<VideoItem>>, ExtractionError> {
|
) -> Result<MapResult<Vec<VideoItem>>, ExtractionError> {
|
||||||
let items = self
|
let mut contents = self.contents.two_column_browse_results_renderer.contents;
|
||||||
.contents
|
let items = contents
|
||||||
.two_column_browse_results_renderer
|
.try_swap_remove(0)
|
||||||
.contents
|
|
||||||
.into_iter()
|
|
||||||
.next()
|
|
||||||
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no contents")))?
|
.ok_or(ExtractionError::InvalidData(Cow::Borrowed("no contents")))?
|
||||||
.tab_renderer
|
.tab_renderer
|
||||||
.content
|
.content
|
||||||
|
|
|
@ -129,11 +129,11 @@ impl MapResponse<VideoDetails> for response::VideoDetails {
|
||||||
}
|
}
|
||||||
response::video_details::VideoResultsItem::ItemSectionRenderer(section) => {
|
response::video_details::VideoResultsItem::ItemSectionRenderer(section) => {
|
||||||
match section {
|
match section {
|
||||||
response::video_details::ItemSection::CommentsEntryPoint { contents } => {
|
response::video_details::ItemSection::CommentsEntryPoint { mut contents } => {
|
||||||
comment_count_section = contents.into_iter().next();
|
comment_count_section = contents.try_swap_remove(0);
|
||||||
}
|
}
|
||||||
response::video_details::ItemSection::CommentItemSection { contents } => {
|
response::video_details::ItemSection::CommentItemSection { mut contents } => {
|
||||||
comment_ctoken_section = contents.into_iter().next();
|
comment_ctoken_section = contents.try_swap_remove(0);
|
||||||
}
|
}
|
||||||
response::video_details::ItemSection::None => {}
|
response::video_details::ItemSection::None => {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -393,14 +393,13 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
number_nd_tokens: ::phf::Map {
|
number_nd_tokens: ::phf::Map {
|
||||||
key: 12913932095322966823,
|
key: 15467950696543387533,
|
||||||
disps: &[
|
disps: &[
|
||||||
(0, 0),
|
(0, 0),
|
||||||
],
|
],
|
||||||
entries: &[
|
entries: &[
|
||||||
("১", 1),
|
|
||||||
("ন\u{9be}ই", 0),
|
("ন\u{9be}ই", 0),
|
||||||
("১ট\u{9be}", 1),
|
("১", 1),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
album_types: ::phf::Map {
|
album_types: ::phf::Map {
|
||||||
|
@ -4660,7 +4659,6 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
||||||
],
|
],
|
||||||
entries: &[
|
entries: &[
|
||||||
("ingen", 0),
|
("ingen", 0),
|
||||||
("én", 1),
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
album_types: ::phf::Map {
|
album_types: ::phf::Map {
|
||||||
|
@ -5034,10 +5032,8 @@ pub(crate) fn entry(lang: Language) -> Entry {
|
||||||
number_nd_tokens: ::phf::Map {
|
number_nd_tokens: ::phf::Map {
|
||||||
key: 12913932095322966823,
|
key: 12913932095322966823,
|
||||||
disps: &[
|
disps: &[
|
||||||
(0, 0),
|
|
||||||
],
|
],
|
||||||
entries: &[
|
entries: &[
|
||||||
("um", 1),
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
album_types: ::phf::Map {
|
album_types: ::phf::Map {
|
||||||
|
|
112
src/util/mod.rs
112
src/util/mod.rs
|
@ -10,7 +10,7 @@ pub use protobuf::{string_from_pb, ProtoBuilder};
|
||||||
use std::{
|
use std::{
|
||||||
borrow::{Borrow, Cow},
|
borrow::{Borrow, Cow},
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
str::{FromStr, SplitWhitespace},
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
|
@ -331,18 +331,36 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
if digits.is_empty() {
|
if digits.is_empty() {
|
||||||
SplitTokens::new(&filtered, by_char)
|
if by_char {
|
||||||
.find_map(|token| dict_entry.number_nd_tokens.get(token))
|
filtered
|
||||||
.and_then(|n| (*n as u64).try_into().ok())
|
.chars()
|
||||||
|
.find_map(|c| dict_entry.number_nd_tokens.get(&c.to_string()))
|
||||||
|
.and_then(|n| (*n as u64).try_into().ok())
|
||||||
|
} else {
|
||||||
|
filtered
|
||||||
|
.split_whitespace()
|
||||||
|
.find_map(|token| dict_entry.number_nd_tokens.get(token))
|
||||||
|
.and_then(|n| (*n as u64).try_into().ok())
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
let num = digits.parse::<u64>().ok()?;
|
let num = digits.parse::<u64>().ok()?;
|
||||||
|
|
||||||
exp += SplitTokens::new(&filtered, by_char)
|
let lookup_token = |token: &str| match token {
|
||||||
.filter_map(|token| match token {
|
"k" => Some(3),
|
||||||
"k" => Some(3),
|
_ => dict_entry.number_tokens.get(token).map(|t| *t as i32),
|
||||||
_ => dict_entry.number_tokens.get(token).map(|t| *t as i32),
|
};
|
||||||
})
|
|
||||||
.sum::<i32>();
|
if by_char {
|
||||||
|
exp += filtered
|
||||||
|
.chars()
|
||||||
|
.filter_map(|token| lookup_token(&token.to_string()))
|
||||||
|
.sum::<i32>();
|
||||||
|
} else {
|
||||||
|
exp += filtered
|
||||||
|
.split_whitespace()
|
||||||
|
.filter_map(lookup_token)
|
||||||
|
.sum::<i32>();
|
||||||
|
}
|
||||||
|
|
||||||
F::try_from(num.checked_mul((10_u64).checked_pow(exp.try_into().ok()?)?)?).ok()
|
F::try_from(num.checked_mul((10_u64).checked_pow(exp.try_into().ok()?)?)?).ok()
|
||||||
}
|
}
|
||||||
|
@ -397,62 +415,6 @@ pub fn b64_decode<T: AsRef<[u8]>>(input: T) -> Result<Vec<u8>, base64::DecodeErr
|
||||||
base64::engine::general_purpose::STANDARD.decode(input)
|
base64::engine::general_purpose::STANDARD.decode(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An iterator over the chars in a string (in str format)
|
|
||||||
pub struct SplitChar<'a> {
|
|
||||||
txt: &'a str,
|
|
||||||
index: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a str> for SplitChar<'a> {
|
|
||||||
fn from(value: &'a str) -> Self {
|
|
||||||
Self {
|
|
||||||
txt: value,
|
|
||||||
index: 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for SplitChar<'a> {
|
|
||||||
type Item = &'a str;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
self.txt
|
|
||||||
.get(self.index..)
|
|
||||||
.and_then(|txt| txt.chars().next())
|
|
||||||
.map(|c| {
|
|
||||||
let start = self.index;
|
|
||||||
self.index += c.len_utf8();
|
|
||||||
&self.txt[start..self.index]
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An iterator for parsing strings. It can either iterate over words or characters.
|
|
||||||
pub enum SplitTokens<'a> {
|
|
||||||
Word(SplitWhitespace<'a>),
|
|
||||||
Char(SplitChar<'a>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> SplitTokens<'a> {
|
|
||||||
pub fn new(s: &'a str, by_char: bool) -> Self {
|
|
||||||
match by_char {
|
|
||||||
true => Self::Char(SplitChar::from(s)),
|
|
||||||
false => Self::Word(s.split_whitespace()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for SplitTokens<'a> {
|
|
||||||
type Item = &'a str;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
match self {
|
|
||||||
SplitTokens::Word(iter) => iter.next(),
|
|
||||||
SplitTokens::Char(iter) => iter.next(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod tests {
|
pub(crate) mod tests {
|
||||||
use std::{fs::File, io::BufReader, path::PathBuf};
|
use std::{fs::File, io::BufReader, path::PathBuf};
|
||||||
|
@ -588,22 +550,4 @@ pub(crate) mod tests {
|
||||||
let res = parse_large_numstr::<u64>(string, lang).expect(&emsg);
|
let res = parse_large_numstr::<u64>(string, lang).expect(&emsg);
|
||||||
assert_eq!(res, rounded, "{emsg}");
|
assert_eq!(res, rounded, "{emsg}");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn split_char() {
|
|
||||||
let teststr = "abc今天更新def";
|
|
||||||
let res = SplitTokens::new(teststr, true).collect::<Vec<_>>();
|
|
||||||
assert_eq!(res.len(), 10);
|
|
||||||
let res_str = res.into_iter().collect::<String>();
|
|
||||||
assert_eq!(res_str, teststr)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn split_words() {
|
|
||||||
let teststr = "abc 今天更新 ghi";
|
|
||||||
let res = SplitTokens::new(teststr, false).collect::<Vec<_>>();
|
|
||||||
assert_eq!(res.len(), 3);
|
|
||||||
let res_str = res.join(" ");
|
|
||||||
assert_eq!(res_str, teststr)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ use time::{Date, Duration, Month, OffsetDateTime};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
param::Language,
|
param::Language,
|
||||||
util::{self, dictionary, SplitTokens},
|
util::{self, dictionary},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Parsed TimeAgo string, contains amount and time unit.
|
/// Parsed TimeAgo string, contains amount and time unit.
|
||||||
|
@ -149,32 +149,21 @@ fn filter_str(string: &str) -> String {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
struct TaTokenParser<'a> {
|
fn parse_ta_token(
|
||||||
iter: SplitTokens<'a>,
|
entry: &dictionary::Entry,
|
||||||
tokens: &'a phf::Map<&'static str, TaToken>,
|
by_char: bool,
|
||||||
}
|
nd: bool,
|
||||||
|
filtered_str: &str,
|
||||||
|
) -> Option<TimeAgo> {
|
||||||
|
let tokens = match nd {
|
||||||
|
true => &entry.timeago_nd_tokens,
|
||||||
|
false => &entry.timeago_tokens,
|
||||||
|
};
|
||||||
|
let mut qu = 1;
|
||||||
|
|
||||||
impl<'a> TaTokenParser<'a> {
|
if by_char {
|
||||||
fn new(entry: &'a dictionary::Entry, by_char: bool, nd: bool, filtered_str: &'a str) -> Self {
|
filtered_str.chars().find_map(|word| {
|
||||||
let tokens = match nd {
|
tokens.get(&word.to_string()).and_then(|t| match t.unit {
|
||||||
true => &entry.timeago_nd_tokens,
|
|
||||||
false => &entry.timeago_tokens,
|
|
||||||
};
|
|
||||||
Self {
|
|
||||||
iter: SplitTokens::new(filtered_str, by_char),
|
|
||||||
tokens,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for TaTokenParser<'a> {
|
|
||||||
type Item = TimeAgo;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
// Quantity for parsing separate quantity + unit tokens
|
|
||||||
let mut qu = 1;
|
|
||||||
self.iter.find_map(|word| {
|
|
||||||
self.tokens.get(word).and_then(|t| match t.unit {
|
|
||||||
Some(unit) => Some(TimeAgo { n: t.n * qu, unit }),
|
Some(unit) => Some(TimeAgo { n: t.n * qu, unit }),
|
||||||
None => {
|
None => {
|
||||||
qu = t.n;
|
qu = t.n;
|
||||||
|
@ -182,6 +171,57 @@ impl<'a> Iterator for TaTokenParser<'a> {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
filtered_str.split_whitespace().find_map(|word| {
|
||||||
|
tokens.get(word).and_then(|t| match t.unit {
|
||||||
|
Some(unit) => Some(TimeAgo { n: t.n * qu, unit }),
|
||||||
|
None => {
|
||||||
|
qu = t.n;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_ta_tokens(
|
||||||
|
entry: &dictionary::Entry,
|
||||||
|
by_char: bool,
|
||||||
|
nd: bool,
|
||||||
|
filtered_str: &str,
|
||||||
|
) -> Vec<TimeAgo> {
|
||||||
|
let tokens = match nd {
|
||||||
|
true => &entry.timeago_nd_tokens,
|
||||||
|
false => &entry.timeago_tokens,
|
||||||
|
};
|
||||||
|
let mut qu = 1;
|
||||||
|
|
||||||
|
if by_char {
|
||||||
|
filtered_str
|
||||||
|
.chars()
|
||||||
|
.filter_map(|word| {
|
||||||
|
tokens.get(&word.to_string()).and_then(|t| match t.unit {
|
||||||
|
Some(unit) => Some(TimeAgo { n: t.n * qu, unit }),
|
||||||
|
None => {
|
||||||
|
qu = t.n;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
filtered_str
|
||||||
|
.split_whitespace()
|
||||||
|
.filter_map(|word| {
|
||||||
|
tokens.get(word).and_then(|t| match t.unit {
|
||||||
|
Some(unit) => Some(TimeAgo { n: t.n * qu, unit }),
|
||||||
|
None => {
|
||||||
|
qu = t.n;
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,9 +240,7 @@ pub fn parse_timeago(lang: Language, textual_date: &str) -> Option<TimeAgo> {
|
||||||
|
|
||||||
let qu: u8 = util::parse_numeric(textual_date).unwrap_or(1);
|
let qu: u8 = util::parse_numeric(textual_date).unwrap_or(1);
|
||||||
|
|
||||||
TaTokenParser::new(&entry, util::lang_by_char(lang), false, &filtered_str)
|
parse_ta_token(&entry, util::lang_by_char(lang), false, &filtered_str).map(|ta| ta * qu)
|
||||||
.next()
|
|
||||||
.map(|ta| ta * qu)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a TimeAgo string (e.g. "29 minutes ago") into a Chrono DateTime object.
|
/// Parse a TimeAgo string (e.g. "29 minutes ago") into a Chrono DateTime object.
|
||||||
|
@ -235,14 +273,11 @@ pub fn parse_textual_date(lang: Language, textual_date: &str) -> Option<ParsedDa
|
||||||
let nums = util::parse_numeric_vec::<u16>(textual_date);
|
let nums = util::parse_numeric_vec::<u16>(textual_date);
|
||||||
|
|
||||||
match nums.len() {
|
match nums.len() {
|
||||||
0 => match TaTokenParser::new(&entry, by_char, true, &filtered_str).next() {
|
0 => match parse_ta_token(&entry, by_char, true, &filtered_str) {
|
||||||
Some(timeago) => Some(ParsedDate::Relative(timeago)),
|
Some(timeago) => Some(ParsedDate::Relative(timeago)),
|
||||||
None => TaTokenParser::new(&entry, by_char, false, &filtered_str)
|
None => parse_ta_token(&entry, by_char, false, &filtered_str).map(ParsedDate::Relative),
|
||||||
.next()
|
|
||||||
.map(ParsedDate::Relative),
|
|
||||||
},
|
},
|
||||||
1 => TaTokenParser::new(&entry, by_char, false, &filtered_str)
|
1 => parse_ta_token(&entry, by_char, false, &filtered_str)
|
||||||
.next()
|
|
||||||
.map(|timeago| ParsedDate::Relative(timeago * nums[0] as u8)),
|
.map(|timeago| ParsedDate::Relative(timeago * nums[0] as u8)),
|
||||||
2..=3 => {
|
2..=3 => {
|
||||||
if nums.len() == entry.date_order.len() {
|
if nums.len() == entry.date_order.len() {
|
||||||
|
@ -313,10 +348,12 @@ pub fn parse_video_duration(lang: Language, video_duration: &str) -> Option<u32>
|
||||||
} else {
|
} else {
|
||||||
part.digits.parse::<u32>().ok()?
|
part.digits.parse::<u32>().ok()?
|
||||||
};
|
};
|
||||||
let mut tokens = TaTokenParser::new(&entry, by_char, false, &part.word).peekable();
|
let tokens = parse_ta_tokens(&entry, by_char, false, &part.word);
|
||||||
tokens.peek()?;
|
if tokens.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
tokens.for_each(|ta| {
|
tokens.iter().for_each(|ta| {
|
||||||
secs += n * ta.secs() as u32;
|
secs += n * ta.secs() as u32;
|
||||||
n = 1;
|
n = 1;
|
||||||
});
|
});
|
||||||
|
@ -768,12 +805,4 @@ mod tests {
|
||||||
let now = OffsetDateTime::now_utc();
|
let now = OffsetDateTime::now_utc();
|
||||||
assert_eq!(date.year(), now.year() - 1);
|
assert_eq!(date.year(), now.year() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn tx() {
|
|
||||||
let s = "Abcdef";
|
|
||||||
let lc: (usize, char) = s.char_indices().last().unwrap();
|
|
||||||
let t = &s[(lc.0 + lc.1.len_utf8())..];
|
|
||||||
dbg!(&t);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -201,8 +201,7 @@
|
||||||
},
|
},
|
||||||
"number_nd_tokens": {
|
"number_nd_tokens": {
|
||||||
"নাই": 0,
|
"নাই": 0,
|
||||||
"১": 1,
|
"১": 1
|
||||||
"১টা": 1
|
|
||||||
},
|
},
|
||||||
"album_types": {
|
"album_types": {
|
||||||
"ep": "Ep",
|
"ep": "Ep",
|
||||||
|
@ -2663,8 +2662,7 @@
|
||||||
"mrd": 9
|
"mrd": 9
|
||||||
},
|
},
|
||||||
"number_nd_tokens": {
|
"number_nd_tokens": {
|
||||||
"ingen": 0,
|
"ingen": 0
|
||||||
"én": 1
|
|
||||||
},
|
},
|
||||||
"album_types": {
|
"album_types": {
|
||||||
"album": "Album",
|
"album": "Album",
|
||||||
|
@ -2887,9 +2885,7 @@
|
||||||
"mi": 6,
|
"mi": 6,
|
||||||
"mil": 3
|
"mil": 3
|
||||||
},
|
},
|
||||||
"number_nd_tokens": {
|
"number_nd_tokens": {},
|
||||||
"um": 1
|
|
||||||
},
|
|
||||||
"album_types": {
|
"album_types": {
|
||||||
"audiolivro": "Audiobook",
|
"audiolivro": "Audiobook",
|
||||||
"ep": "Ep",
|
"ep": "Ep",
|
||||||
|
|
|
@ -16,9 +16,7 @@
|
||||||
"শঃ": null
|
"শঃ": null
|
||||||
},
|
},
|
||||||
"number_nd_tokens": {
|
"number_nd_tokens": {
|
||||||
"কোনো": null,
|
"কোনো": null
|
||||||
"ভিডিঅ’": null,
|
|
||||||
"১টা": 1
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bn": {
|
"bn": {
|
||||||
|
@ -113,8 +111,7 @@
|
||||||
},
|
},
|
||||||
"no": {
|
"no": {
|
||||||
"number_nd_tokens": {
|
"number_nd_tokens": {
|
||||||
"avspillinger": null,
|
"avspillinger": null
|
||||||
"én": 1
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"or": {
|
"or": {
|
||||||
|
@ -132,11 +129,6 @@
|
||||||
"ਨੇ": null
|
"ਨੇ": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"pt": {
|
|
||||||
"number_nd_tokens": {
|
|
||||||
"um": 1
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ro": {
|
"ro": {
|
||||||
"number_nd_tokens": {
|
"number_nd_tokens": {
|
||||||
"abonat": null,
|
"abonat": null,
|
||||||
|
|
|
@ -1108,8 +1108,7 @@ fn search_empty(rp: RustyPipe) {
|
||||||
fn search_suggestion(rp: RustyPipe) {
|
fn search_suggestion(rp: RustyPipe) {
|
||||||
let result = tokio_test::block_on(rp.query().search_suggestion("hunger ga")).unwrap();
|
let result = tokio_test::block_on(rp.query().search_suggestion("hunger ga")).unwrap();
|
||||||
|
|
||||||
assert!(result.iter().any(|s| s.starts_with("hunger games ")));
|
assert!(result.contains(&"hunger games".to_owned()));
|
||||||
assert_gte(result.len(), 10, "search suggestions");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rstest]
|
#[rstest]
|
||||||
|
|
Loading…
Reference in a new issue