Compare commits

..

39 commits
v0.1.0 ... main

Author SHA1 Message Date
256442abda finished docs, updated compose
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-09 20:29:59 +02:00
17ece4401d Bump version: 0.4.5 → 0.4.6 2022-07-09 02:08:48 +02:00
7d6477425e write docs
improve docker compose
2022-07-09 02:08:39 +02:00
283e3d50c7 fix error deleting cache folder while running job 2022-07-08 22:38:58 +02:00
95cad578d0 dont count filtered videos as pending 2022-07-06 09:50:42 +02:00
7df25baa5c Bump version: 0.4.4 → 0.4.5
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 22:25:21 +02:00
e5570c8160 finally fixed cropping thumbnails 2022-07-05 22:18:10 +02:00
0d2efcd3e3 Bump version: 0.4.3 → 0.4.4
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 21:05:23 +02:00
f9db556a41 enable cropping video thumbnails 2022-07-05 21:04:56 +02:00
28e48220c2 Bump version: 0.4.2 → 0.4.3
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 21:01:39 +02:00
f50fa96dc4 fix downloading items display 2022-07-05 21:01:18 +02:00
8127242487 Bump version: 0.4.1 → 0.4.2
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 20:35:17 +02:00
9d41e6d5c3 add thumbnail resizing 2022-07-05 20:34:58 +02:00
fb6d830897 Bump version: 0.4.0 → 0.4.1
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 14:55:08 +02:00
0e93d36191 update dependencies 2022-07-05 14:54:53 +02:00
bbd2e7f73e Merge branch 'docs' 2022-07-05 14:53:08 +02:00
1e59cd70ae add global docker urls
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 14:19:14 +02:00
e4e3c890b1 remove docker build step
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 14:10:45 +02:00
86cbe478a6 Bump version: 0.3.1 → 0.4.0 2022-07-05 13:57:07 +02:00
3479365c52 publish docker images with ci
All checks were successful
continuous-integration/drone/push Build is passing
2022-07-05 13:56:07 +02:00
9d53d79f95 show number of pending videos 2022-07-05 13:33:20 +02:00
83e1d9a406 add deletion of jobs
handle unavailable videos
2022-07-05 13:17:10 +02:00
877f48b7d7 longer sqlite timeout 2022-07-04 12:10:40 +02:00
8e1572000a add buildah script
All checks were successful
continuous-integration/drone/push Build is passing
2022-06-29 23:56:19 +02:00
991fe3df30 Bump version: 0.3.1 → 0.3.2
All checks were successful
continuous-integration/drone/push Build is passing
2022-06-29 23:26:04 +02:00
ca5062cced add daily cache cleanup 2022-06-29 23:25:43 +02:00
1f7140131f move db folder 2022-06-29 22:46:42 +02:00
fff882894f add sphinx project
All checks were successful
continuous-integration/drone/push Build is passing
2022-06-29 22:41:57 +02:00
297daaa51b Bump version: 0.3.0 → 0.3.1
All checks were successful
continuous-integration/drone/push Build is passing
2022-06-27 23:41:06 +02:00
5a76eb0559 show number of running tasks
show channel name in search items
2022-06-27 23:40:48 +02:00
7c741a476f Bump version: 0.2.0 → 0.3.0
All checks were successful
continuous-integration/drone/push Build is passing
2022-06-27 23:09:56 +02:00
e5c1fbdfb4 add search 2022-06-27 23:09:42 +02:00
df90e42729 allow variable number of feed items 2022-06-27 22:44:16 +02:00
f21387a23c add opml download 2022-06-27 22:38:12 +02:00
ded1895adb limit downloads view to 100 items 2022-06-27 21:56:18 +02:00
ede37ebe17 Bump version: 0.1.1 → 0.2.0
All checks were successful
continuous-integration/drone/push Build is passing
2022-06-27 12:43:06 +02:00
0d34a96227 add downloads view
fix yt-dlp cache not writable
fix video count
2022-06-27 12:42:59 +02:00
caefa4dd37 Bump version: 0.1.0 → 0.1.1
All checks were successful
continuous-integration/drone/push Build is passing
2022-06-27 02:14:31 +02:00
580075e140 add cache folder 2022-06-27 02:14:19 +02:00
57 changed files with 1578 additions and 374 deletions

View file

@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.1.0
current_version = 0.4.6
commit = True
tag = True

View file

@ -36,6 +36,27 @@ steps:
depends_on:
- install dependencies
# - name: build container
# image: quay.io/buildah/stable
# when:
# event:
# - tag
# commands:
# - buildah login -u $DOCKER_USER -p $DOCKER_PASS -- $DOCKER_REGISTRY
# - buildah manifest create ucast
# - buildah bud --tag code.thetadev.de/hsa/ucast:latest --manifest ucast --arch amd64 --build-arg TARGETPLATFORM=linux/amd64 -f deploy/Dockerfile .
# - buildah bud --tag code.thetadev.de/hsa/ucast:latest --manifest ucast --arch arm64 --build-arg TARGETPLATFORM=linux/arm64 -f deploy/Dockerfile .
# - buildah manifest push --all ucast docker://code.thetadev.de/hsa/ucast:latest
# environment:
# DOCKER_REGISTRY:
# from_secret: docker_registry
# DOCKER_USER:
# from_secret: docker_username
# DOCKER_PASS:
# from_secret: docker_password
# depends_on:
# - test
volumes:
- name: cache
temp: { }

View file

@ -19,16 +19,16 @@
grid-column: auto
@include tablet
grid-template-columns: repeat(3,minmax(0,1fr))
grid-template-columns: repeat(3, minmax(0, 1fr))
@include desktop
grid-template-columns: repeat(4,minmax(0,1fr))
grid-template-columns: repeat(4, minmax(0, 1fr))
@include widescreen
grid-template-columns: repeat(5,minmax(0,1fr))
grid-template-columns: repeat(5, minmax(0, 1fr))
@include fullhd
grid-template-columns: repeat(6,minmax(0,1fr))
grid-template-columns: repeat(6, minmax(0, 1fr))
.video-card
display: flex
@ -43,3 +43,6 @@
// Fix almost invisible navbar items on mobile
.navbar-item
color: #fff
.overflow-x
overflow-x: auto

View file

@ -1,5 +1,5 @@
# This has to be built with docker buildx to set the TARGETPLATFORM argument
FROM python:3.10
FROM registry.hub.docker.com/library/python:3.10
ARG TARGETPLATFORM

View file

@ -1,16 +1,13 @@
FROM thetadev256/ucast-dev
FROM registry.hub.docker.com/thetadev256/ucast-dev
COPY . /build
WORKDIR /build
RUN poetry build -f wheel
FROM python:3.10
FROM registry.hub.docker.com/library/python:3.10
ARG TARGETPLATFORM
COPY --from=0 /build/dist /install
RUN pip install -- /install/*.whl gunicorn honcho
# ffmpeg static source (https://johnvansickle.com/ffmpeg/)
RUN set -e; \
mkdir /build_ffmpeg; \
@ -41,6 +38,10 @@ COPY ./deploy/nginx.conf /etc/nginx/nginx.conf
COPY ./deploy/nginx /etc/nginx/conf.d
COPY ./deploy/entrypoint.py /entrypoint.py
COPY --from=0 /build/dist /install
RUN pip install -- /install/*.whl gunicorn honcho && \
rm -rf ~/.cache/pip
ENV UCAST_WORKDIR=/ucast
EXPOSE 8001

44
deploy/build.sh Executable file
View file

@ -0,0 +1,44 @@
#!/bin/sh
set -e
# Source: https://danmanners.com/posts/2022-01-buildah-multi-arch/
# Set your manifest name
export MANIFEST_NAME="ucast"
# Set the required variables
export BUILD_PATH="."
export DOCKERFILE="deploy/Dockerfile"
export REGISTRY="registry.hub.docker.com"
export USER="thetadev256"
export IMAGE_NAME="ucast"
export IMAGE_TAG="v0.3.2"
# Create a multi-architecture manifest
buildah manifest create ${MANIFEST_NAME}
# Build your amd64 architecture container
buildah bud \
--tag "${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}" \
--manifest ${MANIFEST_NAME} \
--arch amd64 \
--build-arg TARGETPLATFORM=linux/amd64 \
-f ${DOCKERFILE} \
${BUILD_PATH}
# Build your arm64 architecture container
buildah bud \
--tag "${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}" \
--manifest ${MANIFEST_NAME} \
--arch arm64 \
--build-arg TARGETPLATFORM=linux/arm64 \
-f ${DOCKERFILE} \
${BUILD_PATH}
# Push the full manifest, with both CPU Architectures
buildah manifest push --all \
${MANIFEST_NAME} \
"docker://${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}"
buildah manifest push --all \
${MANIFEST_NAME} \
"docker://${REGISTRY}/${USER}/${IMAGE_NAME}"

View file

@ -3,6 +3,7 @@ services:
ucast:
image: thetadev256/ucast
user: 1000:1000
restart: unless-stopped
ports:
- "8001:8001"
volumes:
@ -10,7 +11,11 @@ services:
environment:
UCAST_REDIS_URL: "redis://redis:6379"
UCAST_SECRET_KEY: "django-insecure-Es/+plApGxNBy8+ewB+74zMlmfV2H3whw6gu7i0ESwGrEWAUYRP3HM2EX0PLr3UJ"
UCAST_ALLOWED_HOSTS: ".localhost,127.0.0.1"
UCAST_N_WORKERS: 2
UCAST_TZ: "Europe/Berlin"
redis:
container_name: redis
image: redis:alpine
restart: unless-stopped

3
docs/.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
/.tox
/build
/venv

20
docs/Makefile Normal file
View file

@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

108
docs/_latex/hsastyle.sty Normal file
View file

@ -0,0 +1,108 @@
\usepackage[absolute]{textpos}
\usepackage{setspace}
\newcommand{\hsamaketitle}{%
\let\sphinxrestorepageanchorsetting\relax
\ifHy@pageanchor\def\sphinxrestorepageanchorsetting{\Hy@pageanchortrue}\fi
\hypersetup{pdfauthor={\@author},
pdftitle={\@title},
pdfsubject={\subtitle},
pdfkeywords={Forschung, Entwicklung, Informatik},
}
\hypersetup{pageanchor=false}% avoid duplicate destination warnings
\begin{titlepage}
% Deckblatt - Hochschule Augsburg
\thispagestyle{empty}\null
% Logo - Hochschule Augsburg - Informatik
\begin{textblock}{10}(8.0,1.1)
\begin{figure}[h]
\centering
\includegraphics[width=0.45\textwidth]{hsa_informatik_logo_lq.pdf}
\end{figure}
\end{textblock}
% Text unter Logo
\begin{textblock}{15}(12.43,2.4)
\LARGE
\textsf{
\textbf{\textcolor[rgb]{1,0.41,0.13}{\\
\begin{flushleft}
Fakultät für\\
Informatik\\
\end{flushleft}
}
}
}
\end{textblock}
% Textbox links - Informationen
\begin{textblock}{15}(2,2)
%\LARGE
\begin{flushleft}
\begin{spacing} {1.2}
\huge
\textbf{\@title}
\vspace{30pt}
\textcolor[rgb]{1,0.41,0.13}{\\
\textbf{\subtitle}}\\
\vspace{60pt}
\LARGE
Studienrichtung\\
\hscourse\\
\vspace{30pt}
\@author\\
\vspace{60pt}
\LARGE
Prüfer: \examiner\\
\vspace{10pt}
Abgabedatum: \deadline\\
\end{spacing}
\end{flushleft}
\end{textblock}
% Textbox rechts - Hochschule
\begin{textblock}{5}(12.45,8.0)
\textcolor[rgb]{1,0,0}{\\
\footnotesize
\begin{flushleft}
\begin{spacing} {1.3}
Hochschule f\"ur angewandte\\
Wissenschaften Augsburg\\
\vspace{4pt}
An der Hochschule 1\\
D-86161 Augsburg\\
\vspace{4pt}
Telefon +49 821 55 86-0\\
Fax +49 821 55 86-3222\\
www.hs-augsburg.de\\
info(at)hs-augsburg-de
\end{spacing}
\end{flushleft}
}
\end{textblock}
% Textbox rechts mitte - Fakultät
\begin{textblock}{5}(12.45,11.4)
\footnotesize
\begin{flushleft}
\begin{spacing} {1.3}
Fakult\"at f\"ur Informatik\\
Telefon +49 821 55 86-3450\\
Fax \hspace{10pt} +49 821 55 86-3499\\
\end{spacing}
\end{flushleft}
\end{textblock}
\end{titlepage}%
\setcounter{footnote}{0}%
\let\thanks\relax\let\maketitle\relax
%\gdef\@thanks{}\gdef\@author{}\gdef\@title{}
\clearpage
\ifdefined\sphinxbackoftitlepage\sphinxbackoftitlepage\fi
\if@openright\cleardoublepage\else\clearpage\fi
\sphinxrestorepageanchorsetting
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

92
docs/conf.py Normal file
View file

@ -0,0 +1,92 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('../code'))
# -- Project information -----------------------------------------------------
project = "Ucast"
subtitle = "Projektarbeit Webtechnologien"
author = "Thomas Hampp"
copyright = "2022 " + author
examiner = "Fabian Ziegler"
deadline = "09.07.2022"
course = "Master Informatik"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinxcontrib.cairosvgconverter",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "de"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [".tox"]
# Pygments-Styling used for code syntax highlighting.
# See this page for an overview of all styles including live demo:
# https://pygments.org/demo/
pygments_style = "vs"
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# -- Options for PDF output -------------------------------------------------
latex_engine = "xelatex"
# latex_theme = 'hsathesis'
latex_elements = {
"extraclassoptions": "openany,oneside",
"preamble": r"""
\usepackage{hsastyle}
\newcommand\subtitle{%s}
\newcommand\deadline{%s}
\newcommand\examiner{%s}
\newcommand\hscourse{%s}
"""
% (subtitle, deadline, examiner, course),
"maketitle": r"\hsamaketitle",
}
latex_additional_files = [
"_latex/logos/hsa_informatik_logo_lq.pdf",
"_latex/hsastyle.sty",
]

9
docs/index.rst Normal file
View file

@ -0,0 +1,9 @@
Ucast
#####
.. toctree::
:maxdepth: 2
:caption: Inhalt:
:glob:
src/*

4
docs/requirements.txt Normal file
View file

@ -0,0 +1,4 @@
Sphinx==4.4.0
sphinx-autobuild
sphinx-rtd-theme
sphinxcontrib-svg2pdfconverter[CairoSVG]

245
docs/src/0_intro.rst Normal file
View file

@ -0,0 +1,245 @@
Einleitung
##########
Bei den meisten YouTube-Videos, die ich mir anschaue, handelt es sich um
Nachrichten oder Kommentarvideos. Da diese Videos sehr textlastig sind,
spiele ich sie oft im Hintergrund ab und arbeite währenddessen an meinen Projekten.
Unterwegs habe ich aber keine Möglichkeit, YouTube-Videos im Hintergrund
abzuspielen, da die YouTube-App im Hintergrund die Wiedergabe unterbricht.
Es ist zwar möglich, YouTube-Videos mit entsprechenden Webdiensten herunterzuladen,
dies ist aber relativ unkomfortabel.
Deshalb höre ich unterwegs häufiger Podcasts, die mit entsprechenden Apps
(ich benutze AntennaPod) sowohl gestreamt als auch offline aufs Handy geladen werden
können.
Ich habe dann überlegt, ob es möglch wäre, YouTube-Kanäle automatisch in Podcasts
umzuwandeln. So kam ich auf die Idee, einen Server zu entwickeln,
der YouTube-Videos automatisch als MP3-Dateien herunterlädt und im Podcast-Format
bereitstellt. Auf diese Weise kann man sich die Audioinhalte von YouTube sowohl
am PC als auch unterwegs mit einer Podcast-App anhören.
Technik
#######
Webframework
************
Ich habe ucast mit dem Webframework Django entwickelt. Django hat den Vorteil,
das es grundlegende Funktionen von Webanwendungen wie ein Login-System bereits
implementiert hat. Dadurch konnte ich mich schneller auf die eigentlichen Features
meiner Anwendung konzentrieren.
YouTube-Downloading
*******************
Zum Herunterladen von Videos wird die Python-Library
`yt-dlp <https://github.com/yt-dlp/yt-dlp>`_ verwendet.
Diese Library kann Videos von YouTube und diversen anderen Videoplattformen
herunterladen und mithilfe von ffmpeg ins MP3-Format konvertieren.
Yt-dlp benötigt den Link oder die YouTube-ID eines Videos, um es herunterladen zu können.
Deswegen wird zusätzlich eine Möglichkeit benötigt, die aktuellen Videos eines
Kanals und dessen Metadaten (Profilbild, Beschreibung) abzurufen.
Hierfür gibt es zwei Möglichkeiten:
erstens Scraping der YouTube-Webseite und zweitens YouTube's eigene RSS-Feeds.
YouTube stellt für jeden Kanal einen RSS-Feed unter der Adresse
``https://www.youtube.com/feeds/videos.xml?channel_id=<Kanal-ID>`` bereit.
Der Feed listet allerdings nur die letzten 15 Videos eines Kanals auf.
Um ältere Videos sowie die Metadaten eines Kanals abrufen
zu können, muss die YouTube-Webseite aufgerufen und geparsed werden. Hierfür habe ich
die ``scrapetube``-Library als Grundlage verwendet und um eine Methode zum Abrufen
von Kanalinformationen erweitert.
Task-Queue
**********
Ucast muss regelmäßig die abonnierten Kanäle abrufen und Videos herunterladen.
Hier kommt eine `Task-Queue <https://python-rq.org>`_
zum Einsatz. Die Webanwendung kann neue Tasks in die
Queue einreihen, die dann im Hintergrund von Workern ausgeführt werden.
Mit einem Scheduler ist es auch möglich, periodisch (bspw. alle 15 Minuten)
Tasks auszuführen.
Die Queue benötigt eine Möglichkeit, Daten zwischen der Anwendung und den Workern
auszutauschen. Hier kommt eine Redis-Datenbank zum Einsatz.
Frontend
********
Da Ucast keine komplexen Funktionen auf der Clientseite bereitstellen muss,
wird das Frontend mithilfe von Django-Templates serverseitig gerendert und es
wurde auf ein Frontend-Framework verzichtet. Als CSS-Framework habe ich Bulma
verwendet, was eine Bibliothek von Komponenten bereitstellt. Bulma ist in Sass
geschrieben, wodurch es einfach an ein gewünschtes Designsthema angepasst werden kann.
Komplett auf Javascript verzichtet habe ich jedoch nicht.
Beispielsweise habe ich ``clipboard.js`` verwendet, um die Feed-URLs mit Klick auf einen
Button kopieren zu können.
Das endlose Scrolling auf den Videoseiten habe ich mit ``htmx`` umgesetzt, einer
JS-Library, mit der man dynamisch Webinhalte nachladen kann, ohne dafür eigenen
JS-Code zu schreiben.
Inbetriebnahme
##############
Docker-Compose
**************
Ucast ist als Docker-Image mit dem Namen
`thetadev256/ucast <https://hub.docker.com/r/thetadev256/ucast>`_ verfügbar.
Eine docker-compose-Datei mit einer Basiskonfiguration befindet sich im
Projektordner unter ``deploy/docker-compose.yml``. Um Ucast zu starten, müssen
die folgenden Befehle ausgeführt werden.
.. code-block:: sh
mkdir _run # Arbeitsverzeichnis erstellen
docker-compose -f deploy/docker-compose.yml up -d # Anwendung starten
docker exec -it ucast-ucast-1 ucast-manage createsuperuser # Benutzerkonto anlegen
Die Weboberfläche ist unter http://127.0.0.1:8001 erreichbar.
Konfiguration
*************
Die Konfiguration erfolgt durch Umgebungsvariablen. Alle Umgebungsvariablen
sind mit dem Präfix ``UCAST_`` zu versehen (z.B. ``UCAST_DEBUG``).
**DEBUG**
`Debug-Modus <https://docs.djangoproject.com/en/4.0/ref/settings/#debug>`_ von Django aktivieren.
Standard: ``false``
**ALLOWED_HOSTS**
Erlaubte `Hosts/Domains <https://docs.djangoproject.com/en/4.0/ref/settings/#allowed-hosts>`_.
Beispiel: ``"ucast.thetadev.de"``
**DB_ENGINE**
Verwendete Datenbanksoftware (``sqlite`` / ``mysql`` / ``postgresql``).
Standard: ``sqlite``
**DB_NAME**
Name der Datenbank. Standard: ``db``
**DB_HOST**
Adresse der Datenbank. Standard: ``127.0.0.1``
**DB_PORT**
Port der Datenbank. Standard: 3306 (mysql), 5432 (postgresql)
**DB_USER**, **DB_PASS**
Benutzername/Passwort für die Datenbank
**WORKDIR**
Hauptverzeichnis für Ucast (Siehe Verzeichnisstruktur).
Standard: aktuelles Arbeitsverzeichnis
**STATIC_ROOT**
Ordner für statische Dateien (``WORKDIR/static``)
**DOWNLOAD_ROOT**
Ordner für heruntergeladene Bilder und Audiodateien (``WORKDIR/data``)
**CACHE_ROOT**
Ordner für temporäre Dateien (``{WORKDIR}/cache``)
**DB_DIR**
Ordner für die SQLite-Datenbankdatei (``{WORKDIR}/db``)
**TZ**
Zeitzone. Standard: Systemeinstellung
**REDIS_URL**
Redis-Addresse. Standard: ``redis://localhost:6379``
**REDIS_QUEUE_TIMEOUT**
Timeout für gestartete Jobs [s]. Standard: 600
**REDIS_QUEUE_RESULT_TTL**
Speicherdauer für abgeschlossene Tasks [s]. Standard: 600
**YT_UPDATE_INTERVAL**
Zeitabstand, in dem die YouTube-Kanäle abgerufen werden [s].
Standard: 900
**FEED_MAX_ITEMS**
Maximale Anzahl Videos, die in den Feeds enthalten sind.
Standard: 50
**N_WORKERS**
Anzahl an Worker-Prozessen, die gestartet werden sollen
(nur im Docker-Container verfügbar).
Standard: 1
Verzeichnisstruktur
*******************
Ucast erstellt in seinem Arbeitsverzeichnis vier Unterordner, in denen die
Daten der Anwendung abgelegt werden.
.. code-block:: txt
- workdir
|_ cache Temporäre Dateien
|_ data Heruntergeladene Medien
|_ db SQLite-Datenbank
|_ static Statische Websitedaten
Bedienung
#########
Nach dem Login kommt man auf die Übersichtsseite, auf der alle abonnierten
Kanäle aufgelistet werden. Um einen neuen Kanal zu abonnieren, muss die YouTube-URL
(z.B. https://youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q)
in das Eingabefeld kopiert werden.
Wurde ein neuer Kanal hinzugefügt, beginnt ucast damit, die neuesten 15 Videos
herunterzuladen. Um zu überprüfen, welche Videos momentan heruntergeladen werden,
kann man auf die *Downloads*-Seite gehen. Auf dieser Seite werden auch fehlgeschlagene
Downloadtasks aufgelistet, die auch manuell wiederholt werden können (bspw. nach einem
Ausfall der Internetverbindung). Es gibt auch eine Suchfunktion, mit der man nach
einem Video mit einem bestimmten Titel suchen kann.
Um die abonnierten Kanäle zu seinem Podcast-Client hinzuzufügen, kann man die
Feed-URL auf der Übersichtsseite einfach kopieren und einfügen.
Die meisten Podcast-Clients bieten zudem eine Funktion zum Import von OPML-Dateien an.
In diesem Fall kann man einfach auf den Link *Download OPML* unten auf der Seite
klicken und die heruntergeladen Datei importieren. Auf diese Weise hat man schnell
alle abonnierten Kanäle zu seinem Podcast-Client hinzugefügt.
Fazit
#####
Ich betreibe Ucast seit einer Woche auf meiner NAS
und verwende es, um mir Videos sowohl am Rechner als auch unterwegs anzuhören.
In den ersten Tagen habe ich noch einige Bugs festgestellt, die beseitigt werden
mussten. Beispielsweise liegen nicht alle YouTube-Thumbnails im 16:9-Format vor,
weswegen sie zugeschnitten werden müssen, um das Layout der Webseite nicht zu
verschieben.
Am Anfang habe ich geplant, `SponsorBlock <https://sponsor.ajay.app>`_ in Ucast
zu integrieren, um Werbeinhalte aus den Videos zu entfernen. Yt-dlp hat dieses
Feature bereits integriert. Allerdings basiert Sponsorblock auf einer von der
Community verwalteten Datenbank, d.h. je nach Beliebtheit des Videos dauert es
zwischen einer halben und mehreren Stunden nach Release, bis Markierungen verfügbar
sind. Damit Sponsorblock zuverlässig funktioniert, müsste Ucast regelmäßig nach dem
Release des Videos die Datenbank abfragen und das Video bei Änderungen erneut
herunterladen und zuschneiden. Dies war mir zunächst zu komplex und ich habe mich
dazu entschieden, das Feature erst in Zukunft umzusetzen.
Ein weiteres Feature, das ich in Zukunft umsetzen werde,
ist die Unterstützung von alternativen Videoplattformen wie Peertube,
Odysee und Bitchute.

20
docs/tox.ini Normal file
View file

@ -0,0 +1,20 @@
[tox]
skipsdist = True
envlist =
html
pdf
[testenv]
description = Dokumentation bauen
deps = -r{toxinidir}/requirements.txt
[testenv:html]
commands = sphinx-build -b html -d build/doctrees . build/html
[testenv:pdf]
allowlist_externals = make
commands = make latexpdf
[testenv:live]
description = Live update mit sphinx-autobuild
commands = sphinx-autobuild . build/html --open-browser

379
poetry.lock generated
View file

@ -68,7 +68,7 @@ python-versions = ">=3.5"
[[package]]
name = "certifi"
version = "2022.5.18.1"
version = "2022.6.15"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
@ -76,7 +76,7 @@ python-versions = ">=3.6"
[[package]]
name = "cffi"
version = "1.15.0"
version = "1.15.1"
description = "Foreign Function Interface for Python calling C code."
category = "main"
optional = false
@ -95,11 +95,11 @@ python-versions = ">=3.6.1"
[[package]]
name = "charset-normalizer"
version = "2.0.12"
version = "2.1.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.5.0"
python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
@ -117,7 +117,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
version = "0.4.4"
version = "0.4.5"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
@ -183,7 +183,7 @@ python-versions = "*"
[[package]]
name = "django"
version = "4.0.4"
version = "4.0.6"
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
category = "main"
optional = false
@ -222,14 +222,14 @@ Django = ">=3.2"
[[package]]
name = "fakeredis"
version = "1.8"
version = "1.8.1"
description = "Fake implementation of redis API for testing purposes."
category = "dev"
optional = false
python-versions = ">=3.7,<4.0"
[package.dependencies]
redis = "<=4.3.1"
redis = "<4.4"
six = ">=1.16.0,<2.0.0"
sortedcontainers = ">=2.4.0,<3.0.0"
@ -250,7 +250,7 @@ sgmllib3k = "*"
[[package]]
name = "filelock"
version = "3.7.0"
version = "3.7.1"
description = "A platform independent file lock."
category = "dev"
optional = false
@ -349,7 +349,7 @@ python-versions = ">=3.5, <4"
[[package]]
name = "mysqlclient"
version = "2.1.0"
version = "2.1.1"
description = "Python interface to MySQL"
category = "main"
optional = false
@ -357,11 +357,11 @@ python-versions = ">=3.5"
[[package]]
name = "nodeenv"
version = "1.6.0"
version = "1.7.0"
description = "Node.js virtual environment builder"
category = "dev"
optional = false
python-versions = "*"
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
[[package]]
name = "packaging"
@ -376,14 +376,14 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
[[package]]
name = "pillow"
version = "9.1.1"
version = "9.2.0"
description = "Python Imaging Library (Fork)"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinx-rtd-theme (>=1.0)", "sphinxext-opengraph"]
docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"]
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
@ -452,7 +452,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pycryptodomex"
version = "3.14.1"
version = "3.15.0"
description = "Cryptographic library for Python"
category = "main"
optional = false
@ -522,7 +522,7 @@ testing = ["django", "django-configurations (>=2.0)"]
[[package]]
name = "pytest-mock"
version = "3.7.0"
version = "3.8.2"
description = "Thin-wrapper around the mock package for easier use with pytest"
category = "dev"
optional = false
@ -580,7 +580,7 @@ python-versions = ">=3.6"
[[package]]
name = "redis"
version = "4.3.1"
version = "4.3.4"
description = "Python client for Redis database and key-value store"
category = "main"
optional = false
@ -597,21 +597,21 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"
[[package]]
name = "requests"
version = "2.27.1"
version = "2.28.1"
description = "Python HTTP for Humans."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
python-versions = ">=3.7, <4"
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""}
idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""}
charset-normalizer = ">=2,<3"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rq"
@ -717,7 +717,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
version = "20.14.1"
version = "20.15.1"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
@ -759,7 +759,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
[[package]]
name = "yt-dlp"
version = "2022.5.18"
version = "2022.6.29"
description = "A youtube-dl fork with additional features and patches"
category = "main"
optional = false
@ -776,7 +776,7 @@ websockets = "*"
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "41be11c588a5d47c4ddc2e06b5699ee9db0c3888e3576c0134d55e065f53cc0d"
content-hash = "f582f6abb81e647dd6874bb00768645e574c3c4c85ee366d9cd74b675717e216"
[metadata.files]
asgiref = [
@ -896,76 +896,90 @@ bump2version = [
{file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"},
]
certifi = [
{file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"},
{file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"},
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
]
cffi = [
{file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
{file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"},
{file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"},
{file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"},
{file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"},
{file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"},
{file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"},
{file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"},
{file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"},
{file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"},
{file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"},
{file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"},
{file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"},
{file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"},
{file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"},
{file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"},
{file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"},
{file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"},
{file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"},
{file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"},
{file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"},
{file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"},
{file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"},
{file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"},
{file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"},
{file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"},
{file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"},
{file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"},
{file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"},
{file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"},
{file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"},
{file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"},
{file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"},
{file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"},
{file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"},
{file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"},
{file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"},
{file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"},
{file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"},
{file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"},
{file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"},
{file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"},
{file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"},
{file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"},
{file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"},
{file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"},
{file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"},
{file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"},
{file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"},
{file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"},
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
{file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
{file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
{file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
{file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
{file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
{file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
{file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
{file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
{file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
{file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
{file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
{file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
{file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
{file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
{file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
{file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
{file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
{file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
{file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
{file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
{file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
{file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
{file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
{file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
]
cfgv = [
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
{file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
]
charset-normalizer = [
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
{file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"},
{file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"},
]
click = [
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
{file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
{file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
]
colorthief = [
{file = "colorthief-0.2.1-py2.py3-none-any.whl", hash = "sha256:b04fc8ce5cf9c888768745e29cb19b7b688d5711af6fba26e8057debabec56b9"},
@ -1027,8 +1041,8 @@ distlib = [
{file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"},
]
django = [
{file = "Django-4.0.4-py3-none-any.whl", hash = "sha256:07c8638e7a7f548dc0acaaa7825d84b7bd42b10e8d22268b3d572946f1e9b687"},
{file = "Django-4.0.4.tar.gz", hash = "sha256:4e8177858524417563cc0430f29ea249946d831eacb0068a1455686587df40b5"},
{file = "Django-4.0.6-py3-none-any.whl", hash = "sha256:ca54ebedfcbc60d191391efbf02ba68fb52165b8bf6ccd6fe71f098cac1fe59e"},
{file = "Django-4.0.6.tar.gz", hash = "sha256:a67a793ff6827fd373555537dca0da293a63a316fe34cb7f367f898ccca3c3ae"},
]
django-bulma = [
{file = "django-bulma-0.8.3.tar.gz", hash = "sha256:b794b4e64f482de77f376451f7cd8b3c8448eb68e5a24c51b9190625a08b0b30"},
@ -1039,16 +1053,16 @@ django-htmx = [
{file = "django_htmx-1.12.0-py3-none-any.whl", hash = "sha256:e8351b9251642a5a550a18c6958727ea9b33574bb412b1900fa5ab0d5dd9db40"},
]
fakeredis = [
{file = "fakeredis-1.8-py3-none-any.whl", hash = "sha256:65dcd78c0cd29d17daccce9f58698f6ab61ad7a404eab373fcad2b76fe8db03d"},
{file = "fakeredis-1.8.tar.gz", hash = "sha256:cbf8d74ae06672d40b2fa88b9ee4f1d6efd56b06b2e7f0be2c639647f00643f1"},
{file = "fakeredis-1.8.1-py3-none-any.whl", hash = "sha256:4a0f8fe0d5c18147864db50ae2e86f667420ea06653bec08b3a5fccfd3fbde6f"},
{file = "fakeredis-1.8.1.tar.gz", hash = "sha256:ca516f86181f85615cd8210854b43acbe7b1f37ed8a082c5557749c73f2f0dd3"},
]
feedparser = [
{file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"},
{file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"},
]
filelock = [
{file = "filelock-3.7.0-py3-none-any.whl", hash = "sha256:c7b5fdb219b398a5b28c8e4c1893ef5f98ece6a38c6ab2c22e26ec161556fed6"},
{file = "filelock-3.7.0.tar.gz", hash = "sha256:b795f1b42a61bbf8ec7113c341dad679d772567b936fbd1bf43c9a238e673e20"},
{file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"},
{file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"},
]
font-source-sans-pro = [
{file = "font-source-sans-pro-0.0.1.tar.gz", hash = "sha256:3f81d8e52b0d7e930e2c867c0d3ee549312d03f97b71b664a8361006311f72e5"},
@ -1089,59 +1103,81 @@ mutagen = [
{file = "mutagen-1.45.1.tar.gz", hash = "sha256:6397602efb3c2d7baebd2166ed85731ae1c1d475abca22090b7141ff5034b3e1"},
]
mysqlclient = [
{file = "mysqlclient-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:02c8826e6add9b20f4cb12dcf016485f7b1d6e30356a1204d05431867a1b3947"},
{file = "mysqlclient-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b62d23c11c516cedb887377c8807628c1c65d57593b57853186a6ee18b0c6a5b"},
{file = "mysqlclient-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2c8410f54492a3d2488a6a53e2d85b7e016751a1e7d116e7aea9c763f59f5e8c"},
{file = "mysqlclient-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6279263d5a9feca3e0edbc2b2a52c057375bf301d47da2089c075ff76331d14"},
{file = "mysqlclient-2.1.0.tar.gz", hash = "sha256:973235686f1b720536d417bf0a0d39b4ab3d5086b2b6ad5e6752393428c02b12"},
{file = "mysqlclient-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1ed71bd6244993b526113cca3df66428609f90e4652f37eb51c33496d478b37"},
{file = "mysqlclient-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:c812b67e90082a840efb82a8978369e6e69fc62ce1bda4ca8f3084a9d862308b"},
{file = "mysqlclient-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:0d1cd3a5a4d28c222fa199002810e8146cffd821410b67851af4cc80aeccd97c"},
{file = "mysqlclient-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b355c8b5a7d58f2e909acdbb050858390ee1b0e13672ae759e5e784110022994"},
{file = "mysqlclient-2.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:996924f3483fd36a34a5812210c69e71dea5a3d5978d01199b78b7f6d485c855"},
{file = "mysqlclient-2.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:dea88c8d3f5a5d9293dfe7f087c16dd350ceb175f2f6631c9cf4caf3e19b7a96"},
{file = "mysqlclient-2.1.1.tar.gz", hash = "sha256:828757e419fb11dd6c5ed2576ec92c3efaa93a0f7c39e263586d1ee779c3d782"},
]
nodeenv = [
{file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"},
{file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"},
{file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
{file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
]
packaging = [
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pillow = [
{file = "Pillow-9.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe"},
{file = "Pillow-9.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e"},
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602"},
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530"},
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2"},
{file = "Pillow-9.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a"},
{file = "Pillow-9.1.1-cp310-cp310-win32.whl", hash = "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c"},
{file = "Pillow-9.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108"},
{file = "Pillow-9.1.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b"},
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d"},
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84"},
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4"},
{file = "Pillow-9.1.1-cp37-cp37m-win32.whl", hash = "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578"},
{file = "Pillow-9.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9"},
{file = "Pillow-9.1.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf"},
{file = "Pillow-9.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b"},
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546"},
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f"},
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a"},
{file = "Pillow-9.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1"},
{file = "Pillow-9.1.1-cp38-cp38-win32.whl", hash = "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54"},
{file = "Pillow-9.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf"},
{file = "Pillow-9.1.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92"},
{file = "Pillow-9.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a"},
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251"},
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d"},
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1"},
{file = "Pillow-9.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601"},
{file = "Pillow-9.1.1-cp39-cp39-win32.whl", hash = "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45"},
{file = "Pillow-9.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c"},
{file = "Pillow-9.1.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6"},
{file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098"},
{file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765"},
{file = "Pillow-9.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8"},
{file = "Pillow-9.1.1.tar.gz", hash = "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0"},
{file = "Pillow-9.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a9c9bc489f8ab30906d7a85afac4b4944a572a7432e00698a7239f44a44e6efb"},
{file = "Pillow-9.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:510cef4a3f401c246cfd8227b300828715dd055463cdca6176c2e4036df8bd4f"},
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7888310f6214f19ab2b6df90f3f06afa3df7ef7355fc025e78a3044737fab1f5"},
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831e648102c82f152e14c1a0938689dbb22480c548c8d4b8b248b3e50967b88c"},
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cc1d2451e8a3b4bfdb9caf745b58e6c7a77d2e469159b0d527a4554d73694d1"},
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:136659638f61a251e8ed3b331fc6ccd124590eeff539de57c5f80ef3a9594e58"},
{file = "Pillow-9.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6e8c66f70fb539301e064f6478d7453e820d8a2c631da948a23384865cd95544"},
{file = "Pillow-9.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:37ff6b522a26d0538b753f0b4e8e164fdada12db6c6f00f62145d732d8a3152e"},
{file = "Pillow-9.2.0-cp310-cp310-win32.whl", hash = "sha256:c79698d4cd9318d9481d89a77e2d3fcaeff5486be641e60a4b49f3d2ecca4e28"},
{file = "Pillow-9.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:254164c57bab4b459f14c64e93df11eff5ded575192c294a0c49270f22c5d93d"},
{file = "Pillow-9.2.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:408673ed75594933714482501fe97e055a42996087eeca7e5d06e33218d05aa8"},
{file = "Pillow-9.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:727dd1389bc5cb9827cbd1f9d40d2c2a1a0c9b32dd2261db522d22a604a6eec9"},
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50dff9cc21826d2977ef2d2a205504034e3a4563ca6f5db739b0d1026658e004"},
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb6259196a589123d755380b65127ddc60f4c64b21fc3bb46ce3a6ea663659b0"},
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0554af24df2bf96618dac71ddada02420f946be943b181108cac55a7a2dcd4"},
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:15928f824870535c85dbf949c09d6ae7d3d6ac2d6efec80f3227f73eefba741c"},
{file = "Pillow-9.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:bdd0de2d64688ecae88dd8935012c4a72681e5df632af903a1dca8c5e7aa871a"},
{file = "Pillow-9.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5b87da55a08acb586bad5c3aa3b86505f559b84f39035b233d5bf844b0834b1"},
{file = "Pillow-9.2.0-cp311-cp311-win32.whl", hash = "sha256:b6d5e92df2b77665e07ddb2e4dbd6d644b78e4c0d2e9272a852627cdba0d75cf"},
{file = "Pillow-9.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6bf088c1ce160f50ea40764f825ec9b72ed9da25346216b91361eef8ad1b8f8c"},
{file = "Pillow-9.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:2c58b24e3a63efd22554c676d81b0e57f80e0a7d3a5874a7e14ce90ec40d3069"},
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef7592281f7c174d3d6cbfbb7ee5984a671fcd77e3fc78e973d492e9bf0eb3f"},
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd7b9c7139dc8258d164b55696ecd16c04607f1cc33ba7af86613881ffe4ac8"},
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a138441e95562b3c078746a22f8fca8ff1c22c014f856278bdbdd89ca36cff1b"},
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:93689632949aff41199090eff5474f3990b6823404e45d66a5d44304e9cdc467"},
{file = "Pillow-9.2.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:f3fac744f9b540148fa7715a435d2283b71f68bfb6d4aae24482a890aed18b59"},
{file = "Pillow-9.2.0-cp37-cp37m-win32.whl", hash = "sha256:fa768eff5f9f958270b081bb33581b4b569faabf8774726b283edb06617101dc"},
{file = "Pillow-9.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:69bd1a15d7ba3694631e00df8de65a8cb031911ca11f44929c97fe05eb9b6c1d"},
{file = "Pillow-9.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:030e3460861488e249731c3e7ab59b07c7853838ff3b8e16aac9561bb345da14"},
{file = "Pillow-9.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:74a04183e6e64930b667d321524e3c5361094bb4af9083db5c301db64cd341f3"},
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d33a11f601213dcd5718109c09a52c2a1c893e7461f0be2d6febc2879ec2402"},
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fd6f5e3c0e4697fa7eb45b6e93996299f3feee73a3175fa451f49a74d092b9f"},
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a647c0d4478b995c5e54615a2e5360ccedd2f85e70ab57fbe817ca613d5e63b8"},
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4134d3f1ba5f15027ff5c04296f13328fecd46921424084516bdb1b2548e66ff"},
{file = "Pillow-9.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:bc431b065722a5ad1dfb4df354fb9333b7a582a5ee39a90e6ffff688d72f27a1"},
{file = "Pillow-9.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1536ad017a9f789430fb6b8be8bf99d2f214c76502becc196c6f2d9a75b01b76"},
{file = "Pillow-9.2.0-cp38-cp38-win32.whl", hash = "sha256:2ad0d4df0f5ef2247e27fc790d5c9b5a0af8ade9ba340db4a73bb1a4a3e5fb4f"},
{file = "Pillow-9.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:ec52c351b35ca269cb1f8069d610fc45c5bd38c3e91f9ab4cbbf0aebc136d9c8"},
{file = "Pillow-9.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ed2c4ef2451de908c90436d6e8092e13a43992f1860275b4d8082667fbb2ffc"},
{file = "Pillow-9.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ad2f835e0ad81d1689f1b7e3fbac7b01bb8777d5a985c8962bedee0cc6d43da"},
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea98f633d45f7e815db648fd7ff0f19e328302ac36427343e4432c84432e7ff4"},
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7761afe0126d046974a01e030ae7529ed0ca6a196de3ec6937c11df0df1bc91c"},
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a54614049a18a2d6fe156e68e188da02a046a4a93cf24f373bffd977e943421"},
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:5aed7dde98403cd91d86a1115c78d8145c83078e864c1de1064f52e6feb61b20"},
{file = "Pillow-9.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13b725463f32df1bfeacbf3dd197fb358ae8ebcd8c5548faa75126ea425ccb60"},
{file = "Pillow-9.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:808add66ea764ed97d44dda1ac4f2cfec4c1867d9efb16a33d158be79f32b8a4"},
{file = "Pillow-9.2.0-cp39-cp39-win32.whl", hash = "sha256:337a74fd2f291c607d220c793a8135273c4c2ab001b03e601c36766005f36885"},
{file = "Pillow-9.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:fac2d65901fb0fdf20363fbd345c01958a742f2dc62a8dd4495af66e3ff502a4"},
{file = "Pillow-9.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ad2277b185ebce47a63f4dc6302e30f05762b688f8dc3de55dbae4651872cdf3"},
{file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7b502bc34f6e32ba022b4a209638f9e097d7a9098104ae420eb8186217ebbb"},
{file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d1f14f5f691f55e1b47f824ca4fdcb4b19b4323fe43cc7bb105988cad7496be"},
{file = "Pillow-9.2.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:dfe4c1fedfde4e2fbc009d5ad420647f7730d719786388b7de0999bf32c0d9fd"},
{file = "Pillow-9.2.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:f07f1f00e22b231dd3d9b9208692042e29792d6bd4f6639415d2f23158a80013"},
{file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1802f34298f5ba11d55e5bb09c31997dc0c6aed919658dfdf0198a2fe75d5490"},
{file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17d4cafe22f050b46d983b71c707162d63d796a1235cdf8b9d7a112e97b15bac"},
{file = "Pillow-9.2.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96b5e6874431df16aee0c1ba237574cb6dff1dcb173798faa6a9d8b399a05d0e"},
{file = "Pillow-9.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0030fdbd926fb85844b8b92e2f9449ba89607231d3dd597a21ae72dc7fe26927"},
{file = "Pillow-9.2.0.tar.gz", hash = "sha256:75e636fd3e0fb872693f23ccb8a5ff2cd578801251f3a4f6854c6a5d437d3c04"},
]
platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
@ -1177,33 +1213,36 @@ pycparser = [
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
pycryptodomex = [
{file = "pycryptodomex-3.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca88f2f7020002638276439a01ffbb0355634907d1aa5ca91f3dc0c2e44e8f3b"},
{file = "pycryptodomex-3.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:8536bc08d130cae6dcba1ea689f2913dfd332d06113904d171f2f56da6228e89"},
{file = "pycryptodomex-3.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:406ec8cfe0c098fadb18d597dc2ee6de4428d640c0ccafa453f3d9b2e58d29e2"},
{file = "pycryptodomex-3.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:da8db8374295fb532b4b0c467e66800ef17d100e4d5faa2bbbd6df35502da125"},
{file = "pycryptodomex-3.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:d709572d64825d8d59ea112e11cc7faf6007f294e9951324b7574af4251e4de8"},
{file = "pycryptodomex-3.14.1-cp27-cp27m-win32.whl", hash = "sha256:3da13c2535b7aea94cc2a6d1b1b37746814c74b6e80790daddd55ca5c120a489"},
{file = "pycryptodomex-3.14.1-cp27-cp27m-win_amd64.whl", hash = "sha256:298c00ea41a81a491d5b244d295d18369e5aac4b61b77b2de5b249ca61cd6659"},
{file = "pycryptodomex-3.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:77931df40bb5ce5e13f4de2bfc982b2ddc0198971fbd947776c8bb5050896eb2"},
{file = "pycryptodomex-3.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:c5dd3ffa663c982d7f1be9eb494a8924f6d40e2e2f7d1d27384cfab1b2ac0662"},
{file = "pycryptodomex-3.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2aa887683eee493e015545bd69d3d21ac8d5ad582674ec98f4af84511e353e45"},
{file = "pycryptodomex-3.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:8085bd0ad2034352eee4d4f3e2da985c2749cb7344b939f4d95ead38c2520859"},
{file = "pycryptodomex-3.14.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:e95a4a6c54d27a84a4624d2af8bb9ee178111604653194ca6880c98dcad92f48"},
{file = "pycryptodomex-3.14.1-cp35-abi3-manylinux1_i686.whl", hash = "sha256:a4d412eba5679ede84b41dbe48b1bed8f33131ab9db06c238a235334733acc5e"},
{file = "pycryptodomex-3.14.1-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:d2cce1c82a7845d7e2e8a0956c6b7ed3f1661c9acf18eb120fc71e098ab5c6fe"},
{file = "pycryptodomex-3.14.1-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:f75009715dcf4a3d680c2338ab19dac5498f8121173a929872950f4fb3a48fbf"},
{file = "pycryptodomex-3.14.1-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:1ca8e1b4c62038bb2da55451385246f51f412c5f5eabd64812c01766a5989b4a"},
{file = "pycryptodomex-3.14.1-cp35-abi3-win32.whl", hash = "sha256:ee835def05622e0c8b1435a906491760a43d0c462f065ec9143ec4b8d79f8bff"},
{file = "pycryptodomex-3.14.1-cp35-abi3-win_amd64.whl", hash = "sha256:b5a185ae79f899b01ca49f365bdf15a45d78d9856f09b0de1a41b92afce1a07f"},
{file = "pycryptodomex-3.14.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:797a36bd1f69df9e2798e33edb4bd04e5a30478efc08f9428c087f17f65a7045"},
{file = "pycryptodomex-3.14.1-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:aebecde2adc4a6847094d3bd6a8a9538ef3438a5ea84ac1983fcb167db614461"},
{file = "pycryptodomex-3.14.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:f8524b8bc89470cec7ac51734907818d3620fb1637f8f8b542d650ebec42a126"},
{file = "pycryptodomex-3.14.1-pp27-pypy_73-win32.whl", hash = "sha256:4d0db8df9ffae36f416897ad184608d9d7a8c2b46c4612c6bc759b26c073f750"},
{file = "pycryptodomex-3.14.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b276cc4deb4a80f9dfd47a41ebb464b1fe91efd8b1b8620cf5ccf8b824b850d6"},
{file = "pycryptodomex-3.14.1-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:e36c7e3b5382cd5669cf199c4a04a0279a43b2a3bdd77627e9b89778ac9ec08c"},
{file = "pycryptodomex-3.14.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c4d8977ccda886d88dc3ca789de2f1adc714df912ff3934b3d0a3f3d777deafb"},
{file = "pycryptodomex-3.14.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:530756d2faa40af4c1f74123e1d889bd07feae45bac2fd32f259a35f7aa74151"},
{file = "pycryptodomex-3.14.1.tar.gz", hash = "sha256:2ce76ed0081fd6ac8c74edc75b9d14eca2064173af79843c24fa62573263c1f2"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6f5b6ba8aefd624834bc177a2ac292734996bb030f9d1b388e7504103b6fcddf"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:4540904c09704b6f831059c0dfb38584acb82cb97b0125cd52688c1f1e3fffa6"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:0fadb9f7fa3150577800eef35f62a8a24b9ddf1563ff060d9bd3af22d3952c8c"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fc9bc7a9b79fe5c750fc81a307052f8daabb709bdaabb0fb18fb136b66b653b5"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f8be976cec59b11f011f790b88aca67b4ea2bd286578d0bd3e31bcd19afcd3e4"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:78d9621cf0ea35abf2d38fa2ca6d0634eab6c991a78373498ab149953787e5e5"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-win32.whl", hash = "sha256:b6306403228edde6e289f626a3908a2f7f67c344e712cf7c0a508bab3ad9e381"},
{file = "pycryptodomex-3.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:48697790203909fab02a33226fda546604f4e2653f9d47bc5d3eb40879fa7c64"},
{file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:18e2ab4813883ae63396c0ffe50b13554b32bb69ec56f0afaf052e7a7ae0d55b"},
{file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3709f13ca3852b0b07fc04a2c03b379189232b24007c466be0f605dd4723e9d4"},
{file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:191e73bc84a8064ad1874dba0ebadedd7cce4dedee998549518f2c74a003b2e1"},
{file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e3164a18348bd53c69b4435ebfb4ac8a4076291ffa2a70b54f0c4b80c7834b1d"},
{file = "pycryptodomex-3.15.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:5676a132169a1c1a3712edf25250722ebc8c9102aa9abd814df063ca8362454f"},
{file = "pycryptodomex-3.15.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:e2b12968522a0358b8917fc7b28865acac002f02f4c4c6020fcb264d76bfd06d"},
{file = "pycryptodomex-3.15.0-cp35-abi3-manylinux1_i686.whl", hash = "sha256:e47bf8776a7e15576887f04314f5228c6527b99946e6638cf2f16da56d260cab"},
{file = "pycryptodomex-3.15.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:996e1ba717077ce1e6d4849af7a1426f38b07b3d173b879e27d5e26d2e958beb"},
{file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:65204412d0c6a8e3c41e21e93a5e6054a74fea501afa03046a388cf042e3377a"},
{file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:dd452a5af7014e866206d41751886c9b4bf379a339fdf2dbfc7dd16c0fb4f8e0"},
{file = "pycryptodomex-3.15.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:b9279adc16e4b0f590ceff581f53a80179b02cba9056010d733eb4196134a870"},
{file = "pycryptodomex-3.15.0-cp35-abi3-win32.whl", hash = "sha256:46b3f05f2f7ac7841053da4e0f69616929ca3c42f238c405f6c3df7759ad2780"},
{file = "pycryptodomex-3.15.0-cp35-abi3-win_amd64.whl", hash = "sha256:8eecdf9cdc7343001d047f951b9cc805cd68cb6cd77b20ea46af5bffc5bd3dfb"},
{file = "pycryptodomex-3.15.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:67e1e6a92151023ccdfcfbc0afb3314ad30080793b4c27956ea06ab1fb9bcd8a"},
{file = "pycryptodomex-3.15.0-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:c4cb9cb492ea7dcdf222a8d19a1d09002798ea516aeae8877245206d27326d86"},
{file = "pycryptodomex-3.15.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:94c7b60e1f52e1a87715571327baea0733708ab4723346598beca4a3b6879794"},
{file = "pycryptodomex-3.15.0-pp27-pypy_73-win32.whl", hash = "sha256:04cc393045a8f19dd110c975e30f38ed7ab3faf21ede415ea67afebd95a22380"},
{file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0776bfaf2c48154ab54ea45392847c1283d2fcf64e232e85565f858baedfc1fa"},
{file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:463119d7d22d0fc04a0f9122e9d3e6121c6648bcb12a052b51bd1eed1b996aa2"},
{file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a07a64709e366c2041cd5cfbca592b43998bf4df88f7b0ca73dca37071ccf1bd"},
{file = "pycryptodomex-3.15.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:35a8f7afe1867118330e2e0e0bf759c409e28557fb1fc2fbb1c6c937297dbe9a"},
{file = "pycryptodomex-3.15.0.tar.gz", hash = "sha256:7341f1bb2dadb0d1a0047f34c3a58208a92423cdbd3244d998e4b28df5eac0ed"},
]
pyparsing = [
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
@ -1222,8 +1261,8 @@ pytest-django = [
{file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"},
]
pytest-mock = [
{file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"},
{file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"},
{file = "pytest-mock-3.8.2.tar.gz", hash = "sha256:77f03f4554392558700295e05aed0b1096a20d4a60a4f3ddcde58b0c31c8fca2"},
{file = "pytest_mock-3.8.2-py3-none-any.whl", hash = "sha256:8a9e226d6c0ef09fcf20c94eb3405c388af438a90f3e39687f84166da82d5948"},
]
python-dateutil = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
@ -1273,12 +1312,12 @@ pyyaml = [
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
redis = [
{file = "redis-4.3.1-py3-none-any.whl", hash = "sha256:84316970995a7adb907a56754d2b92d88fc2d252963dc5ac34c88f0f1a22c25d"},
{file = "redis-4.3.1.tar.gz", hash = "sha256:94b617b4cd296e94991146f66fc5559756fbefe9493604f0312e4d3298ac63e9"},
{file = "redis-4.3.4-py3-none-any.whl", hash = "sha256:a52d5694c9eb4292770084fa8c863f79367ca19884b329ab574d5cb2036b3e54"},
{file = "redis-4.3.4.tar.gz", hash = "sha256:ddf27071df4adf3821c4f2ca59d67525c3a82e5f268bed97b813cb4fabf87880"},
]
requests = [
{file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"},
{file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"},
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
]
rq = [
{file = "rq-1.10.1-py2.py3-none-any.whl", hash = "sha256:92f4cf38b2364c1697b541e77c0fe62b7e5242fa864324f262be126ee2a07e3a"},
@ -1324,8 +1363,8 @@ urllib3 = [
{file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"},
]
virtualenv = [
{file = "virtualenv-20.14.1-py2.py3-none-any.whl", hash = "sha256:e617f16e25b42eb4f6e74096b9c9e37713cf10bf30168fb4a739f3fa8f898a3a"},
{file = "virtualenv-20.14.1.tar.gz", hash = "sha256:ef589a79795589aada0c1c5b319486797c03b67ac3984c48c669c0e4f50df3a5"},
{file = "virtualenv-20.15.1-py2.py3-none-any.whl", hash = "sha256:b30aefac647e86af6d82bfc944c556f8f1a9c90427b2fb4e3bfbf338cb82becf"},
{file = "virtualenv-20.15.1.tar.gz", hash = "sha256:288171134a2ff3bfb1a2f54f119e77cd1b81c29fc1265a2356f3e8d14c7d58c4"},
]
wcag-contrast-ratio = [
{file = "wcag-contrast-ratio-0.9.tar.gz", hash = "sha256:69192b8e5c0a7d0dc5ff1187eeb3e398141633a4bde51c69c87f58fe87ed361c"},
@ -1447,6 +1486,6 @@ wrapt = [
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
]
yt-dlp = [
{file = "yt-dlp-2022.5.18.tar.gz", hash = "sha256:3a7b59d2fb4b39ce8ba8e0b9c5a37fe20e5624f46a2346b4ae66ab1320e35134"},
{file = "yt_dlp-2022.5.18-py2.py3-none-any.whl", hash = "sha256:deec1009442312c1e2ee5298966842194d0e950b433f0d4fc844ef464b9c32a7"},
{file = "yt-dlp-2022.6.29.tar.gz", hash = "sha256:5fbfac72fd035d11bc2693e5d1cd6933b1bc0712f742f5082a261703810bb5c9"},
{file = "yt_dlp-2022.6.29-py2.py3-none-any.whl", hash = "sha256:a688f5cbc4a824456983774ccdd4a12befd379f6c92e25074fa85e7b8ce31704"},
]

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "ucast"
version = "0.1.0"
version = "0.4.6"
description = "YouTube to Podcast converter"
authors = ["Theta-Dev <t.testboy@gmail.com>"]
packages = [
@ -11,8 +11,8 @@ packages = [
[tool.poetry.dependencies]
python = "^3.10"
Django = "^4.0.4"
yt-dlp = "^2022.3.8"
requests = "^2.27.1"
yt-dlp = "^2022.6.29"
requests = "^2.28.1"
feedparser = "^6.0.8"
Pillow = "^9.1.0"
colorthief = "^0.2.1"
@ -22,7 +22,7 @@ fonts = "^0.0.3"
django-bulma = "^0.8.3"
python-dotenv = "^0.20.0"
psycopg2 = "^2.9.3"
mysqlclient = "^2.1.0"
mysqlclient = "^2.1.1"
python-slugify = "^6.1.2"
mutagen = "^1.45.1"
rq = "^1.10.1"

View file

@ -1,4 +1,4 @@
__version__ = "0.1.0"
__version__ = "0.4.6"
def template_context(request):

View file

@ -147,6 +147,12 @@ class UcastFeed(Feed):
return Channel.objects.get(slug=channel_slug)
def get_feed(self, channel: Channel, request: http.HttpRequest):
max_items = settings.FEED_MAX_ITEMS
try:
max_items = int(request.GET.get("items"))
except TypeError or ValueError:
pass
feed = self.feed_type(
title=channel.name,
link=channel.get_absolute_url(),
@ -158,7 +164,7 @@ class UcastFeed(Feed):
for video in channel.video_set.filter(downloaded__isnull=False).order_by(
"-published"
)[: settings.FEED_MAX_ITEMS]:
)[:max_items]:
feed.add_item(
title=video.title,
link=video.get_absolute_url(),

View file

@ -70,6 +70,16 @@ class Channel(models.Model):
"download_size__sum"
)
def vfilter_args(self) -> dict:
filter_args = {}
if self.skip_livestreams:
filter_args["is_livestream"] = False
if self.skip_shorts:
filter_args["is_short"] = False
return filter_args
def __str__(self):
return self.name

View file

@ -2,8 +2,10 @@ import redis
import rq
import rq_scheduler
from django.conf import settings
from django.db.models import ObjectDoesNotExist
from rq import registry
from ucast.models import Video
from ucast.service import util
@ -33,8 +35,7 @@ def get_worker(**kwargs) -> rq.Worker:
def enqueue(f, *args, **kwargs) -> rq.job.Job:
queue = get_queue()
# return queue.enqueue(f, *args, **kwargs)
return queue.enqueue_call(f, args, kwargs)
return queue.enqueue(f, *args, **kwargs)
def get_statistics() -> dict:
@ -90,3 +91,25 @@ def get_statistics() -> dict:
def get_failed_job_registry():
queue = get_queue()
return registry.FailedJobRegistry(queue.name, queue.connection)
def get_downloading_videos(offset=0, limit=-1):
queue = get_queue()
v_ids = set()
for job in queue.get_jobs(offset, limit):
if (
job.func_name == "ucast.tasks.download.download_video"
and job.args
and job.args[0] > 0
):
v_ids.add(job.args[0])
videos = []
for v_id in v_ids:
try:
videos.append(Video.objects.get(id=v_id))
except ObjectDoesNotExist:
pass
return videos

View file

@ -1,7 +1,7 @@
import shutil
from ucast.models import Channel, Video
from ucast.service import storage, util, youtube
from ucast.service import storage, util, videoutil, youtube
class ChannelAlreadyExistsException(Exception):
@ -12,8 +12,10 @@ class ChannelAlreadyExistsException(Exception):
def download_channel_avatar(channel: Channel):
store = storage.Storage()
channel_folder = store.get_or_create_channel_folder(channel.slug)
util.download_image_file(channel.avatar_url, channel_folder.file_avatar)
util.resize_avatar(channel_folder.file_avatar, channel_folder.file_avatar_sm)
util.download_image_file(
channel.avatar_url, channel_folder.file_avatar, videoutil.AVATAR_SIZE
)
videoutil.resize_avatar(channel_folder.file_avatar, channel_folder.file_avatar_sm)
def create_channel(channel_str: str) -> Channel:

40
ucast/service/opml.py Normal file
View file

@ -0,0 +1,40 @@
from dataclasses import dataclass
from typing import Iterable
from django.utils.xmlutils import SimplerXMLGenerator
from ucast.models import Channel
@dataclass
class FeedElement:
url: str
title: str
def __add_feed_element(handler: SimplerXMLGenerator, element: FeedElement):
handler.addQuickElement(
"outline", attrs={"xmlUrl": element.url, "title": element.title}
)
def write_opml(elements: Iterable[FeedElement], outfile):
handler = SimplerXMLGenerator(outfile, "utf-8", short_empty_elements=True)
handler.startDocument()
handler.startElement("opml", {})
handler.addQuickElement("head")
handler.startElement("body", {"version": "1.0"})
for element in elements:
__add_feed_element(handler, element)
handler.endElement("body")
handler.endElement("opml")
handler.endDocument()
def write_channels_opml(channels: Iterable[Channel], site_url: str, key: str, outfile):
elements = [
FeedElement(f"{site_url}/feed/{c.slug}?key={key}", c.name) for c in channels
]
write_opml(elements, outfile)

View file

@ -1,4 +1,7 @@
import os
import shutil
import tempfile
from datetime import datetime, timedelta
from pathlib import Path
from django.conf import settings
@ -62,3 +65,32 @@ class Storage:
if not cf.does_exist():
cf.create()
return cf
class Cache:
def __init__(self):
self.dir_cache = settings.CACHE_ROOT
self.dir_ytdlp_cache = self.dir_cache / "yt_dlp"
os.makedirs(self.dir_ytdlp_cache, exist_ok=True)
def create_tmpdir(self, prefix="dld") -> tempfile.TemporaryDirectory:
return tempfile.TemporaryDirectory(prefix=prefix + "_", dir=self.dir_cache)
def cleanup(self):
"""
Delete temporary directories that are older than 24h and are most likely left
over after unexpected shutdowns.
"""
for dirname in os.listdir(self.dir_cache):
if dirname == "yt_dlp":
continue
try:
ctime = os.path.getctime(dirname)
# Cache folders may get removed by concurrent jobs
except FileNotFoundError:
continue
age = datetime.now() - datetime.fromtimestamp(ctime)
if age > timedelta(days=1):
shutil.rmtree(self.dir_cache / dirname, ignore_errors=True)

View file

@ -4,7 +4,7 @@ import json
import os
import re
from pathlib import Path
from typing import Any, Union
from typing import Any, Optional, Tuple, Union
from urllib import parse
import requests
@ -12,9 +12,6 @@ import slugify
from django.utils import timezone
from PIL import Image
AVATAR_SM_WIDTH = 100
THUMBNAIL_SM_WIDTH = 360
EMOJI_PATTERN = re.compile(
"["
"\U0001F1E0-\U0001F1FF" # flags (iOS)
@ -39,13 +36,38 @@ def download_file(url: str, download_path: Path):
open(download_path, "wb").write(r.content)
def download_image_file(url: str, download_path: Path):
def resize_image(img: Image, resize: Tuple[int, int]):
if img.size == resize:
return img
w_ratio = resize[0] / img.width
h_ratio = resize[1] / img.height
box = None
# Too tall
if h_ratio < w_ratio:
crop_height = int(img.width / resize[0] * resize[1])
border = int((img.height - crop_height) / 2)
box = (0, border, img.width, img.height - border)
# Too wide
elif w_ratio < h_ratio:
crop_width = int(img.height / resize[1] * resize[0])
border = int((img.width - crop_width) / 2)
box = (border, 0, img.width - border, img.height)
return img.resize(resize, Image.Resampling.LANCZOS, box)
def download_image_file(
url: str, download_path: Path, resize: Optional[Tuple[int, int]] = None
):
"""
Download an image and convert it to the type given
by the path.
:param url: Image URL
:param download_path: Download path
:param resize: target image size (set to None for no resizing)
"""
r = requests.get(url, allow_redirects=True)
r.raise_for_status()
@ -55,30 +77,16 @@ def download_image_file(url: str, download_path: Path):
if img_ext == "jpeg":
img_ext = "jpg"
if "." + img_ext == download_path.suffix:
do_resize = resize and img.size != resize
if do_resize:
img = resize_image(img, resize)
if not do_resize and "." + img_ext == download_path.suffix:
open(download_path, "wb").write(r.content)
else:
img.save(download_path)
def resize_avatar(original_file: Path, new_file: Path):
avatar = Image.open(original_file)
avatar_new_height = int(AVATAR_SM_WIDTH / avatar.width * avatar.height)
avatar = avatar.resize(
(AVATAR_SM_WIDTH, avatar_new_height), Image.Resampling.LANCZOS
)
avatar.save(new_file)
def resize_thumbnail(original_file: Path, new_file: Path):
thumbnail = Image.open(original_file)
tn_new_height = int(THUMBNAIL_SM_WIDTH / thumbnail.width * thumbnail.height)
thumbnail = thumbnail.resize(
(THUMBNAIL_SM_WIDTH, tn_new_height), Image.Resampling.LANCZOS
)
thumbnail.save(new_file)
def get_slug(text: str) -> str:
return slugify.slugify(text, lowercase=False, separator="_")

View file

@ -2,6 +2,12 @@ from datetime import date
from pathlib import Path
from mutagen import id3
from PIL import Image
AVATAR_SM_WIDTH = 100
THUMBNAIL_SM_WIDTH = 360
THUMBNAIL_SIZE = (1280, 720)
AVATAR_SIZE = (900, 900)
def tag_audio(
@ -26,3 +32,21 @@ def tag_audio(
encoding=3, mime="image/png", type=3, desc="Cover", data=albumart.read()
)
tag.save()
def resize_avatar(original_file: Path, new_file: Path):
avatar = Image.open(original_file)
avatar_new_height = int(AVATAR_SM_WIDTH / avatar.width * avatar.height)
avatar = avatar.resize(
(AVATAR_SM_WIDTH, avatar_new_height), Image.Resampling.LANCZOS
)
avatar.save(new_file)
def resize_thumbnail(original_file: Path, new_file: Path):
thumbnail = Image.open(original_file)
tn_new_height = int(THUMBNAIL_SM_WIDTH / thumbnail.width * thumbnail.height)
thumbnail = thumbnail.resize(
(THUMBNAIL_SM_WIDTH, tn_new_height), Image.Resampling.LANCZOS
)
thumbnail.save(new_file)

View file

@ -2,7 +2,6 @@ import datetime
import logging
import re
import shutil
import tempfile
from dataclasses import dataclass
from operator import itemgetter
from pathlib import Path
@ -12,7 +11,7 @@ import feedparser
import requests
from yt_dlp import YoutubeDL
from ucast.service import scrapetube, util
from ucast.service import scrapetube, storage, util, videoutil
CHANID_REGEX = re.compile(r"""[-_a-zA-Z\d]{24}""")
@ -116,7 +115,7 @@ def download_thumbnail(vinfo: VideoDetails, download_path: Path):
logging.info(f"downloading thumbnail {url}...")
try:
util.download_image_file(url, download_path)
util.download_image_file(url, download_path, videoutil.THUMBNAIL_SIZE)
return
except requests.HTTPError:
logging.warning(f"downloading thumbnail {url} failed")
@ -126,7 +125,19 @@ def download_thumbnail(vinfo: VideoDetails, download_path: Path):
def get_video_details(video_id: str) -> VideoDetails:
with YoutubeDL() as ydl:
"""
Get the details of a YouTube video without downloading it.
:param video_id: YouTube video ID
:return: VideoDetails
"""
cache = storage.Cache()
ydl_params = {
"cachedir": str(cache.dir_ytdlp_cache),
}
with YoutubeDL(ydl_params) as ydl:
info = ydl.extract_info(video_id, download=False)
return VideoDetails.from_vinfo(info)
@ -142,7 +153,8 @@ def download_audio(
:param sponsorblock: Enable Sponsorblock
:return: VideoDetails
"""
tmpdir = tempfile.TemporaryDirectory(prefix="ucast_")
cache = storage.Cache()
tmpdir = cache.create_tmpdir()
tmp_dld_file = Path(tmpdir.name) / "audio.mp3"
ydl_params = {
@ -151,6 +163,7 @@ def download_audio(
{"key": "FFmpegExtractAudio", "preferredcodec": "mp3"},
],
"outtmpl": str(tmp_dld_file),
"cachedir": str(cache.dir_ytdlp_cache),
}
if sponsorblock:

View file

@ -10363,4 +10363,8 @@ a.has-text-danger-dark:hover, a.has-text-danger-dark:focus {
color: #fff;
}
.overflow-x {
overflow-x: auto;
}
/*# sourceMappingURL=style.css.map */

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -3,21 +3,48 @@ import os
from django.db.models import ObjectDoesNotExist
from django.utils import timezone
from yt_dlp.utils import DownloadError
from ucast import queue
from ucast.models import Channel, Video
from ucast.service import controller, cover, storage, util, videoutil, youtube
from ucast.service import controller, cover, storage, videoutil, youtube
def _load_scraped_video(vid: youtube.VideoScraped, channel: Channel):
# Use Redis to ensure the same video is not processed multiple times
redis = queue.get_redis_connection()
lock_key = f"ucast:lock_load_video:{vid.id}"
if not redis.set(lock_key, "1", 120, nx=True):
return
# Create video object if it does not exist
try:
video = Video.objects.get(video_id=vid.id)
except ObjectDoesNotExist:
details = youtube.get_video_details(vid.id)
try:
details = youtube.get_video_details(vid.id)
except DownloadError as e:
if "available" in e.msg:
# Create dummy video to prevent further download attempts
# of unavailable videos
video = Video(
video_id=vid.id,
title="",
slug="",
channel=channel,
published=timezone.datetime(2000, 1, 1, tzinfo=timezone.utc),
description="",
duration=0,
is_deleted=True,
)
video.save()
return
raise e
# Dont load active livestreams
if details.is_currently_live:
redis.delete(lock_key)
return
slug = Video.get_new_slug(
@ -42,18 +69,23 @@ def _load_scraped_video(vid: youtube.VideoScraped, channel: Channel):
and video.is_deleted is False
and channel.should_download(video)
):
queue.enqueue(download_video, video)
queue.enqueue(download_video, video.id)
redis.delete(lock_key)
def download_video(video: Video):
def download_video(v_id: int):
"""
Download a video including its thumbnail, create a cover image
and store everything in the channel folder.
:param video: Video object
:param v_id: Video ID
"""
# Return if the video was already downloaded by a previous task
video.refresh_from_db()
try:
video = Video.objects.get(id=v_id)
except ObjectDoesNotExist:
return
if video.downloaded:
return
@ -61,12 +93,19 @@ def download_video(video: Video):
channel_folder = store.get_or_create_channel_folder(video.channel.slug)
audio_file = channel_folder.get_audio(video.slug)
details = youtube.download_audio(video.video_id, audio_file)
try:
details = youtube.download_audio(video.video_id, audio_file)
except DownloadError as e:
if "available" in e.msg:
video.is_deleted = True
video.save()
return
raise e
# Download/convert thumbnails
tn_path = channel_folder.get_thumbnail(video.slug)
youtube.download_thumbnail(details, tn_path)
util.resize_thumbnail(tn_path, channel_folder.get_thumbnail(video.slug, True))
videoutil.resize_thumbnail(tn_path, channel_folder.get_thumbnail(video.slug, True))
cover_file = channel_folder.get_cover(video.slug)
if not os.path.isfile(channel_folder.file_avatar):
@ -96,8 +135,12 @@ def download_video(video: Video):
video.save()
def update_channel(channel: Channel):
def update_channel(c_id: int):
"""Update a single channel from its RSS feed"""
try:
channel = Channel.objects.get(id=c_id)
except ObjectDoesNotExist:
return
videos = youtube.get_channel_videos_from_feed(channel.channel_id)
for vid in videos:
@ -113,18 +156,23 @@ def update_channels():
This task is scheduled a regular intervals.
"""
for channel in Channel.objects.filter(active=True):
queue.enqueue(update_channel, channel)
queue.enqueue(update_channel, channel.id)
def download_channel(channel: Channel, limit: int):
def download_channel(c_id: int, limit: int):
"""
Download maximum number of videos from a channel.
:param channel: Channel object
:param c_id: Channel ID (Database)
:param limit: Max number of videos
"""
if limit < 1:
return
try:
channel = Channel.objects.get(id=c_id)
except ObjectDoesNotExist:
return
for vid in youtube.get_channel_videos_from_scraper(channel.channel_id, limit):
_load_scraped_video(vid, channel)

View file

@ -1,13 +1,20 @@
import os
from django.db.models import ObjectDoesNotExist
from django.utils import timezone
from PIL import Image
from ucast import queue
from ucast.models import Channel, Video
from ucast.service import cover, storage, util, videoutil, youtube
def recreate_cover(video: Video):
def recreate_cover(v_id: int):
try:
video = Video.objects.get(id=v_id)
except ObjectDoesNotExist:
return
store = storage.Storage()
cf = store.get_channel_folder(video.channel.slug)
@ -42,7 +49,33 @@ def recreate_cover(video: Video):
def recreate_covers():
for video in Video.objects.filter(downloaded__isnull=False):
queue.enqueue(recreate_cover, video)
queue.enqueue(recreate_cover, video.id)
def resize_thumbnail(v_id: int):
try:
video = Video.objects.get(id=v_id)
except ObjectDoesNotExist:
return
store = storage.Storage()
cf = store.get_channel_folder(video.channel.slug)
tn_path = cf.get_thumbnail(video.slug)
tn_img = Image.open(tn_path)
if tn_img.size != videoutil.THUMBNAIL_SIZE:
tn_img = util.resize_image(tn_img, videoutil.THUMBNAIL_SIZE)
tn_img.save(tn_path)
videoutil.resize_thumbnail(tn_path, cf.get_thumbnail(video.slug, True))
def resize_thumbnails():
"""
Used to unify thumbnail sizes for the existing collection before v0.4.2.
Needs to be triggered manually: ``manage.py rqenqueue ucast.tasks.library.resize_thumbnails``.
"""
for video in Video.objects.filter(downloaded__isnull=False):
queue.enqueue(resize_thumbnail, video.id)
def update_file_storage():
@ -69,7 +102,7 @@ def update_file_storage():
return
if not os.path.isfile(tn_file_sm):
util.resize_thumbnail(tn_file, tn_file_sm)
videoutil.resize_thumbnail(tn_file, tn_file_sm)
if not os.path.isfile(cover_file):
recreate_cover(video)
@ -81,7 +114,12 @@ def update_file_storage():
video.save()
def update_channel_info(channel: Channel):
def update_channel_info(ch_id: int):
try:
channel = Channel.objects.get(id=ch_id)
except ObjectDoesNotExist:
return
channel_data = youtube.get_channel_metadata(
youtube.channel_url_from_id(channel.channel_id)
)
@ -90,8 +128,12 @@ def update_channel_info(channel: Channel):
store = storage.Storage()
channel_folder = store.get_or_create_channel_folder(channel.slug)
util.download_image_file(channel_data.avatar_url, channel_folder.file_avatar)
util.resize_avatar(channel_folder.file_avatar, channel_folder.file_avatar_sm)
util.download_image_file(
channel_data.avatar_url, channel_folder.file_avatar, videoutil.AVATAR_SIZE
)
videoutil.resize_avatar(
channel_folder.file_avatar, channel_folder.file_avatar_sm
)
channel.avatar_url = channel_data.avatar_url
@ -104,4 +146,9 @@ def update_channel_info(channel: Channel):
def update_channel_infos():
for channel in Channel.objects.filter(active=True):
queue.enqueue(update_channel_info, channel)
queue.enqueue(update_channel_info, channel.id)
def clean_cache():
cache = storage.Cache()
cache.cleanup()

View file

@ -1,5 +1,5 @@
import logging
from datetime import datetime
from datetime import datetime, timedelta
from django.conf import settings
@ -28,8 +28,15 @@ def register_scheduled_jobs():
)
scheduler.schedule(
datetime.utcnow(),
datetime.utcnow() + timedelta(days=1),
library.update_channel_infos,
id="schedule_update_channel_infos",
interval=24 * 3600,
)
scheduler.schedule(
datetime.utcnow() + timedelta(days=1),
library.clean_cache,
id="schedule_clean_cache",
interval=24 * 3600,
)

View file

@ -27,8 +27,11 @@
</div>
<div class="navbar-end">
<a class="navbar-item" href="{% url 'download_errors' %}">
Errors
<a class="navbar-item" href="{% url 'search' %}">
Search
</a>
<a class="navbar-item" href="{% url 'downloads' %}">
Downloads
</a>
{% url 'login' as login_url %}
{% url 'logout' as logout_url %}

View file

@ -68,6 +68,10 @@
</div>
</div>
{% endfor %}
<div>
<a href="{% url 'channels_opml' %}">Download OPML</a>
</div>
{% endblock content %}
{% block javascript %}

View file

@ -1,50 +0,0 @@
{% extends 'base.html' %}
{% block title %}ucast - Errors{% endblock %}
{% block content %}
<div class="mb-4">
<div>
<span class="title">Download errors</span>
</div>
</div>
{% if jobs %}
<div class="mb-4">
<form method="post" action="{% url 'download_errors_requeue_all' %}">
{% csrf_token %}
<button class="button is-primary">Requeue all</button>
</form>
</div>
<table class="table">
<thead>
<tr>
<th>ID</th>
<th>Function</th>
<th>Details</th>
<th>Requeue</th>
</tr>
</thead>
<tbody>
{% for job in jobs %}
<tr>
<td>{{ job.id }}</td>
<td>{{ job.func_name }}</td>
<td><a href="{% url 'error_details' job.id %}">Details</a></td>
<td>
<form method="post" action="{% url 'download_errors_requeue' %}">
{% csrf_token %}
<input type="hidden" name="id" value="{{ job.id }}">
<button class="button is-small">Requeue</button>
</form>
</td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No download errors</p>
{% endif %}
{% endblock content %}

View file

@ -0,0 +1,80 @@
{% extends 'base.html' %}
{% block title %}ucast - Downloads{% endblock %}
{% block content %}
<div class="mb-4">
<div>
<span class="title">Downloading</span>
</div>
</div>
{% if downloading_videos %}
<div class="mb-4" hx-get="{% url 'downloads' %}" hx-trigger="every 5s">
{% include "ucast/downloads_items.html" %}
</div>
{% else %}
<div class="mb-4">
<p>Not downloading any videos</p>
</div>
{% endif %}
<div class="mb-4">
<div>
<span class="title">Download errors</span>
</div>
</div>
<div class="mb-4">
{% if failed_jobs %}
<div class="level mb-4">
<form method="post" action="{% url 'download_errors_requeue_all' %}">
{% csrf_token %}
<button class="button is-primary">Requeue all</button>
</form>
<form method="post" action="{% url 'download_errors_delete_all' %}">
{% csrf_token %}
<button class="button is-danger">Delete all</button>
</form>
</div>
<table class="table">
<thead>
<tr>
<th>ID</th>
<th>Function</th>
<th>Details</th>
<th>Requeue</th>
<th>Delete</th>
</tr>
</thead>
<tbody>
{% for job in failed_jobs %}
<tr>
<td>{{ job.id }}</td>
<td>{{ job.func_name }}</td>
<td><a href="{% url 'error_details' job.id %}">Details</a></td>
<td>
<form method="post" action="{% url 'download_errors_requeue' %}">
{% csrf_token %}
<input type="hidden" name="id" value="{{ job.id }}">
<button class="button is-small">Requeue</button>
</form>
</td>
<td>
<form method="post" action="{% url 'download_errors_delete' %}">
{% csrf_token %}
<input type="hidden" name="id" value="{{ job.id }}">
<button class="button is-small is-danger">Delete</button>
</form>
</td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No download errors</p>
{% endif %}
</div>
{% endblock content %}

View file

@ -0,0 +1,26 @@
<div class="mb-4">
<a class="subtitle">{{ n_tasks }} Tasks</a>
</div>
<div class="mb-4 overflow-x">
<table class="table">
<thead>
<tr>
<th>Video-ID</th>
<th>Title</th>
<th>Channel</th>
<th>Published</th>
</tr>
</thead>
<tbody>
{% for video in downloading_videos %}
<tr>
<td><a href="{{ video.get_absolute_url }}">{{ video.video_id }}</a></td>
<td>{{ video.title }}</td>
<td><a href="{% url 'videos' video.channel.slug %}">{{ video.channel.name }}</a>
</td>
<td>{{ video.published|date:"SHORT_DATE_FORMAT" }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>

View file

@ -16,12 +16,17 @@
{{ job.exc_info }}
</pre>
<div>
<div class="level">
<form method="post" action="{% url 'download_errors_requeue' %}">
{% csrf_token %}
<input type="hidden" name="id" value="{{ job.id }}">
<button class="button is-primary">Requeue</button>
</form>
<form method="post" action="{% url 'download_errors_delete' %}">
{% csrf_token %}
<input type="hidden" name="id" value="{{ job.id }}">
<button class="button is-danger">Delete</button>
</form>
</div>
{% endblock content %}

View file

@ -0,0 +1,67 @@
{% extends 'base.html' %}
{% block title %}ucast - Search{% endblock %}
{% block content %}
<div class="box">
<form method="get">
<div class="field has-addons">
<div class="control is-flex-grow-1">
<input name="q" required class="input" type="text"
placeholder="Search" {% if query %}value="{{ query }}{% endif %}">
</div>
<div class="control">
<button type="submit" class="button is-primary">
<i class="fas fa-search"></i>
</button>
</div>
</div>
</form>
</div>
<div class="video-grid">
{% if videos %}
{% for video in videos %}
<div class="card video-card">
<a href="{{ video.get_absolute_url }}" target="_blank">
<img class="video-thumbnail"
src="/files/thumbnail/{{ video.channel.slug }}/{{ video.slug }}.webp?sm">
</a>
<div class="video-card-content is-flex-grow-1">
<a href="{{ video.get_absolute_url }}">{{ video.title }}</a>
</div>
<div class="video-card-content">
<div class="level">
<div style="max-width: 80%; overflow: hidden">
<span class="tag">
<i
class="fas fa-user"></i>&nbsp; <a href="{% url 'videos' video.channel.slug %}">{{ video.channel.name }}</a>
</span>
<span class="tag">
<i
class="fas fa-calendar"></i>&nbsp; {{ video.published|date:"SHORT_DATE_FORMAT" }}
</span>
</div>
<div class="field has-addons">
<div class="control">
<a class="button is-small is-success"
href="/files/audio/{{ video.channel.slug }}/{{ video.slug }}.mp3"
target="_blank">
<i class="fas fa-play"></i>
</a>
</div>
</div>
</div>
</div>
</div>
{% endfor %}
{% elif query %}
<p>No videos</p>
{% endif %}
</div>
{% endblock content %}

View file

@ -13,7 +13,11 @@
<span class="tag"><i
class="fas fa-user-group"></i>&nbsp; {{ channel.subscribers }}</span>
<span class="tag"><i
class="fas fa-video"></i>&nbsp; {{ videos|length }}</span>
class="fas fa-video"></i>&nbsp; {{ videos.paginator.count }}
{% if n_pending %}
({{ n_pending }})
{% endif %}
</span>
<span class="tag"><i
class="fas fa-database"></i>&nbsp; {{ channel.download_size|filesizeformat }}</span>
<a class="tag" href="{{ channel.get_absolute_url }}" target="_blank"><i
@ -44,13 +48,14 @@
<i class="fas fa-edit"></i>
</a>
</div>
<div class="control">
<div class="control">
<a class="button is-info" href="{% url 'channel_download' channel.slug %}">
<i class="fas fa-download"></i>
</a>
</div>
<div class="control">
<button type="submit" name="delete_channel" class="button is-danger dialog-confirm"
<button type="submit" name="delete_channel"
class="button is-danger dialog-confirm"
confirm-msg="Do you want to delete the channel '{{ channel.name }}' including {{ videos|length }} videos?">
<i class="fas fa-trash"></i>
</button>
@ -60,28 +65,38 @@
</form>
</div>
<div class="video-grid">
{% if not videos %}
<p>No videos</p>
{% if not videos %}
{% if n_pending %}
<p>There are {{ n_pending }} videos waiting to be downloaded.
Please wait a few minutes and refesh this page.
You can see the current status in the <i>Downloads</i> tab.
</p>
{% else %}
<p>No videos. If you have just added this channel,
you have to wait a minute for ucast to start looking for videos.</p>
{% endif %}
{% include "ucast/videos_items.html" %}
</div>
{% else %}
<div class="video-grid">
{% include "ucast/videos_items.html" %}
</div>
{% endif %}
{% if videos.has_previous or videos.has_next %}
<noscript>
<nav class="pagination is-centered mt-4" role="navigation" aria-label="pagination">
{% if videos.has_previous %}
<a class="pagination-previous" href="?page={{ videos.previous_page_number }}">Previous</a>
{% else %}
<a class="pagination-previous" disabled>Previous</a>
{% endif %}
{% if videos.has_next %}
<a class="pagination-next" href="?page={{ videos.next_page_number }}">Next
page</a>
{% else %}
<a class="pagination-previous" disabled>Previous</a>
{% endif %}
</nav>
</noscript>
<noscript>
<nav class="pagination is-centered mt-4" role="navigation"
aria-label="pagination">
{% if videos.has_previous %}
<a class="pagination-previous" href="?page={{ videos.previous_page_number }}">Previous</a>
{% else %}
<a class="pagination-previous" disabled>Previous</a>
{% endif %}
{% if videos.has_next %}
<a class="pagination-next" href="?page={{ videos.next_page_number }}">Next
page</a>
{% else %}
<a class="pagination-previous" disabled>Previous</a>
{% endif %}
</nav>
</noscript>
{% endif %}
{% endblock content %}

View file

@ -7,7 +7,7 @@
{% endif %}>
<a href="{{ video.get_absolute_url }}" target="_blank">
<img class="video-thumbnail"
src="/files/thumbnail/{{ channel.slug }}/{{ video.slug }}.webp?sm">
src="/files/thumbnail/{{ video.channel.slug }}/{{ video.slug }}.webp?sm">
</a>
<div class="video-card-content is-flex-grow-1">
@ -25,7 +25,7 @@
<div class="field has-addons">
<div class="control">
<a class="button is-small is-success"
href="/files/audio/{{ channel.slug }}/{{ video.slug }}.mp3"
href="/files/audio/{{ video.channel.slug }}/{{ video.slug }}.mp3"
target="_blank">
<i class="fas fa-play"></i>
</a>

Binary file not shown.

After

Width:  |  Height:  |  Size: 196 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 199 KiB

View file

@ -48,7 +48,7 @@ def _create_download_dir() -> Tuple[Path, TemporaryDirectory]:
shutil.copyfile(
tests.DIR_TESTFILES / "avatar" / f"{avatar}.jpg", cf.file_avatar
)
util.resize_avatar(cf.file_avatar, cf.file_avatar_sm)
videoutil.resize_avatar(cf.file_avatar, cf.file_avatar_sm)
return tmpdir, tmpdir_o
@ -75,7 +75,7 @@ def _add_download_dir_content():
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", file_audio)
shutil.copyfile(tests.DIR_TESTFILES / "thumbnail" / f"{vid}.webp", file_tn)
util.resize_thumbnail(file_tn, cf.get_thumbnail(video_slug, True))
videoutil.resize_thumbnail(file_tn, cf.get_thumbnail(video_slug, True))
cover.create_cover_file(
file_tn,
cf.file_avatar,
@ -116,6 +116,14 @@ def download_dir_content_mut() -> Path:
yield tmpdir
@pytest.fixture
def mock_redis(mocker) -> FakeRedis:
redis = FakeRedis()
mocker.patch.object(queue, "get_redis_connection")
queue.get_redis_connection.return_value = redis
return redis
@pytest.fixture
def rq_queue(mocker) -> rq.Queue:
test_queue = rq.Queue(is_async=False, connection=FakeRedis())

View file

@ -1,5 +1,6 @@
import os
import tempfile
from datetime import datetime, timedelta
from pathlib import Path
from ucast.service import storage
@ -54,3 +55,30 @@ def test_channel_folder():
== ucast_dir / "thumbnails" / "my_video_title_sm.webp"
)
assert cf.get_audio("my_video_title") == tmpdir / "my_video_title.mp3"
def test_clean_cache(settings, mocker):
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
os.mkdir(tmpdir / "yt_dlp")
os.mkdir(tmpdir / "dld_old")
os.mkdir(tmpdir / "dld_new")
def mock_ctime(path):
if path == "dld_new":
return datetime.now().timestamp()
if path == "dld_old":
return (datetime.now() - timedelta(days=1, minutes=1)).timestamp()
raise Exception("invalid path")
mocker.patch.object(os.path, "getctime", mock_ctime)
settings.CACHE_ROOT = tmpdir
cache = storage.Cache()
cache.cleanup()
assert os.path.isdir(tmpdir / "yt_dlp")
assert os.path.isdir(tmpdir / "dld_new")
assert not os.path.exists(tmpdir / "dld_old")

View file

@ -55,28 +55,22 @@ def test_download_image_file_conv():
assert diff.getbbox() is None
def test_resize_avatar():
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
source_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
resized_file = tmpdir / "avatar.webp"
@pytest.mark.parametrize(
"src_file",
[
"normal",
"tall",
"wide",
],
)
def test_resize_image(src_file: str):
src_path = tests.DIR_TESTFILES / "img" / f"{src_file}.png"
src_img = Image.open(src_path)
resized = util.resize_image(src_img, (500, 250))
util.resize_avatar(source_file, resized_file)
resized_avatar = Image.open(resized_file)
assert resized_avatar.size == (100, 100)
def test_resize_thumbnail():
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
source_file = tests.DIR_TESTFILES / "thumbnail" / "t1.webp"
resized_file = tmpdir / "thumbnail.webp"
util.resize_thumbnail(source_file, resized_file)
resized_thumbnail = Image.open(resized_file)
assert resized_thumbnail.size == (360, 202)
normal_img = Image.open(tests.DIR_TESTFILES / "img" / "normal.png")
diff = ImageChops.difference(resized, normal_img)
assert diff.getbbox() is None
@pytest.mark.parametrize(

View file

@ -57,3 +57,27 @@ https://youtu.be/ZPxEr4YdWt8"""
expected_cover_img = Image.open(cover_file)
diff = ImageChops.difference(tag_cover_img, expected_cover_img)
assert diff.getbbox() is None
def test_resize_avatar():
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
source_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
resized_file = tmpdir / "avatar.webp"
videoutil.resize_avatar(source_file, resized_file)
resized_avatar = Image.open(resized_file)
assert resized_avatar.size == (100, 100)
def test_resize_thumbnail():
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
source_file = tests.DIR_TESTFILES / "thumbnail" / "t1.webp"
resized_file = tmpdir / "thumbnail.webp"
videoutil.resize_thumbnail(source_file, resized_file)
resized_thumbnail = Image.open(resized_file)
assert resized_thumbnail.size == (360, 202)

View file

@ -1,21 +1,24 @@
import os
import pytest
from django.utils import timezone
from ucast import queue, tests
from ucast.models import Channel, Video
from ucast.service import storage
from ucast.service.youtube import VideoScraped
from ucast.tasks import download
CHANNEL_ID_THETADEV = "UCGiJh0NZ52wRhYKYnuZI08Q"
VIDEO_ID_INTRO = "I0RRENheeTo"
VIDEO_SLUG_INTRO = "20211010_No_copyright_intro_free_fire_intro_no_text_free_copy_right_free_templates_free_download"
VIDEO_ID_UNAVAILABLE = "K6CBuTy09CE"
@pytest.mark.django_db
def test_download_video(download_dir, rq_queue):
video = Video.objects.get(video_id=VIDEO_ID_INTRO)
job = queue.enqueue(download.download_video, video)
job = queue.enqueue(download.download_video, video.id)
store = storage.Storage()
cf = store.get_or_create_channel_folder(video.channel.slug)
@ -28,16 +31,45 @@ def test_download_video(download_dir, rq_queue):
assert os.path.isfile(cf.get_thumbnail(VIDEO_SLUG_INTRO, True))
@pytest.mark.django_db
def test_load_unavailable_video(download_dir, rq_queue, mock_redis):
channel = Channel.objects.get(channel_id=CHANNEL_ID_THETADEV)
download._load_scraped_video(VideoScraped(VIDEO_ID_UNAVAILABLE, None), channel)
video = Video.objects.get(video_id=VIDEO_ID_UNAVAILABLE)
assert video.is_deleted is True
@pytest.mark.django_db
def test_download_unavailable_video(download_dir, rq_queue):
channel = Channel.objects.get(channel_id=CHANNEL_ID_THETADEV)
video = Video(
video_id=VIDEO_ID_UNAVAILABLE,
title="",
slug="",
channel=channel,
published=timezone.datetime(2000, 1, 1, tzinfo=timezone.utc),
description="",
duration=0,
)
video.save()
job = queue.enqueue(download.download_video, video.id)
video.refresh_from_db()
assert job.is_finished
assert video.is_deleted
@pytest.mark.django_db
def test_update_channel(
download_dir, rq_queue, mock_get_video_details, mock_download_audio
download_dir, rq_queue, mock_redis, mock_get_video_details, mock_download_audio
):
# Remove 2 videos from the database so they can be imported
Video.objects.get(video_id="ZPxEr4YdWt8").delete()
Video.objects.get(video_id="_I5IFObm_-k").delete()
channel = Channel.objects.get(channel_id=CHANNEL_ID_THETADEV)
job = rq_queue.enqueue(download.update_channel, channel)
job = rq_queue.enqueue(download.update_channel, channel.id)
assert job.is_finished
mock_download_audio.assert_any_call(

View file

@ -19,7 +19,7 @@ def test_recreate_cover(download_dir_content_mut, rq_queue, mocker):
store = storage.Storage()
cf = store.get_or_create_channel_folder(video.channel.slug)
job = rq_queue.enqueue(library.recreate_cover, video)
job = rq_queue.enqueue(library.recreate_cover, video.id)
assert job.is_finished
create_cover_mock.assert_called_once_with(
@ -53,7 +53,7 @@ def test_update_channel_info(rq_queue, mock_get_channel_metadata):
channel.avatar_url = "Old avatar url"
channel.save()
job = rq_queue.enqueue(library.update_channel_info, channel)
job = rq_queue.enqueue(library.update_channel_info, channel.id)
assert job.is_finished
channel.refresh_from_db()

View file

@ -17,17 +17,31 @@ urlpatterns = [
views.channel_download,
name="channel_download",
),
path("errors", views.download_errors, name="download_errors"),
path("downloads", views.downloads, name="downloads"),
path(
"errors/requeue", views.download_errors_requeue, name="download_errors_requeue"
"downloads/requeue",
views.download_errors_requeue,
name="download_errors_requeue",
),
path(
"errors/requeue_all",
"downloads/requeue_all",
views.download_errors_requeue_all,
name="download_errors_requeue_all",
),
path("errors/<str:job_id>", views.error_details, name="error_details"),
path(
"downloads/delete",
views.download_errors_delete,
name="download_errors_delete",
),
path(
"downloads/delete_all",
views.download_errors_delete_all,
name="download_errors_delete_all",
),
path("downloads/error/<str:job_id>", views.error_details, name="error_details"),
path("feed/<str:channel>", views.podcast_feed, name="feed"),
path("opml", views.channels_opml, name="channels_opml"),
path("search", views.search, name="search"),
path("files/audio/<str:channel>/<str:video>", views.audio),
path("files/cover/<str:channel>/<str:video>", views.cover),
path("files/thumbnail/<str:channel>/<str:video>", views.thumbnail),

View file

@ -16,7 +16,7 @@ from django.utils.decorators import decorator_from_middleware
from ucast import feed, forms, queue
from ucast.models import Channel, User, Video
from ucast.service import controller, storage
from ucast.service import controller, opml, storage
from ucast.tasks import download
@ -33,7 +33,7 @@ def home(request: http.HttpRequest):
channel_str = form.cleaned_data["channel_str"]
try:
channel = controller.create_channel(channel_str)
queue.enqueue(download.update_channel, channel)
queue.enqueue(download.update_channel, channel.id)
except ValueError:
form.add_error("channel_str", "Channel URL invalid")
except controller.ChannelAlreadyExistsException:
@ -91,6 +91,13 @@ def videos(request: http.HttpRequest, channel: str):
if request.htmx:
template_name = "ucast/videos_items.html"
n_pending = Video.objects.filter(
channel=chan,
downloaded__isnull=True,
is_deleted=False,
**chan.vfilter_args(),
).count()
return render(
request,
template_name,
@ -98,6 +105,7 @@ def videos(request: http.HttpRequest, channel: str):
"videos": videos_p.get_page(page_number),
"channel": chan,
"site_url": site_url,
"n_pending": n_pending,
},
)
@ -139,7 +147,7 @@ def channel_download(request: http.HttpRequest, channel: str):
form = forms.DownloadChannelForm(request.POST)
if form.is_valid():
queue.enqueue(
download.download_channel, chan, form.cleaned_data["n_videos"]
download.download_channel, chan.id, form.cleaned_data["n_videos"]
)
return http.HttpResponseRedirect(reverse(videos, args=[channel]))
@ -154,12 +162,26 @@ def channel_download(request: http.HttpRequest, channel: str):
@login_required
def download_errors(request: http.HttpRequest):
def downloads(request: http.HttpRequest):
freg = queue.get_failed_job_registry()
ids = freg.get_job_ids(0, 50)
jobs = freg.job_class.fetch_many(ids, freg.connection, freg.serializer)
failed_jobs = freg.job_class.fetch_many(ids, freg.connection, freg.serializer)
return render(request, "ucast/download_errors.html", {"jobs": jobs})
downloading_videos = queue.get_downloading_videos(limit=100)
template_name = "ucast/downloads.html"
if request.htmx:
template_name = "ucast/downloads_items.html"
return render(
request,
template_name,
{
"failed_jobs": failed_jobs,
"downloading_videos": downloading_videos,
"n_tasks": queue.get_queue().count,
},
)
@login_required
@ -178,7 +200,7 @@ def download_errors_requeue(request: http.HttpRequest):
freg = queue.get_failed_job_registry()
freg.requeue(str(form.cleaned_data["id"]))
return http.HttpResponseRedirect(reverse(download_errors))
return http.HttpResponseRedirect(reverse(downloads))
@login_required
@ -187,7 +209,52 @@ def download_errors_requeue_all(request: http.HttpRequest):
for job_id in freg.get_job_ids():
freg.requeue(job_id)
return http.HttpResponseRedirect(reverse(download_errors))
return http.HttpResponseRedirect(reverse(downloads))
@login_required
def download_errors_delete(request: http.HttpRequest):
form = forms.RequeueForm(request.POST)
if form.is_valid():
freg = queue.get_failed_job_registry()
freg.remove(str(form.cleaned_data["id"]), delete_job=True)
return http.HttpResponseRedirect(reverse(downloads))
@login_required
def download_errors_delete_all(request: http.HttpRequest):
freg = queue.get_failed_job_registry()
for job_id in freg.get_job_ids():
freg.remove(job_id, delete_job=True)
return http.HttpResponseRedirect(reverse(downloads))
@login_required
def channels_opml(request: http.HttpRequest):
response = http.HttpResponse(
content_type="application/xml",
headers={"Content-Disposition": "attachment; filename=ucast_channels.opml"},
)
site_url = add_domain(get_current_site(request).domain, "", request.is_secure())
opml.write_channels_opml(
Channel.objects.all(), site_url, request.user.get_feed_key(), response
)
return response
@login_required
def search(request: http.HttpRequest):
query = request.GET.get("q")
vids = []
if query:
vids = Video.objects.filter(downloaded__isnull=False, title__icontains=query)[
:30
]
return render(request, "ucast/search.html", {"query": query, "videos": vids})
def _channel_file(channel: str, get_file: Callable[[storage.ChannelFolder], Path]):

View file

@ -32,8 +32,11 @@ def get_env(name, default=None):
def get_env_path(name, default=None):
raw_env = get_env(name)
if not raw_env:
return default
return Path(raw_env).absolute()
folder = default
else:
folder = Path(raw_env).absolute()
os.makedirs(folder, exist_ok=True)
return folder
def get_env_list(name):
@ -142,7 +145,10 @@ def _get_db_config() -> dict:
if db_engine == "sqlite":
return {
"ENGINE": "django.db.backends.sqlite3",
"NAME": BASE_DIR / f"{db_name}.sqlite",
"NAME": DB_DIR / f"{db_name}.sqlite",
"OPTIONS": {
"timeout": 20,
},
}
db_port = get_env("DB_PORT")
@ -162,6 +168,18 @@ def _get_db_config() -> dict:
}
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
STATIC_URL = "static/"
STATIC_ROOT = get_env_path("STATIC_ROOT", BASE_DIR / "static")
DOWNLOAD_ROOT = get_env_path("DOWNLOAD_ROOT", BASE_DIR / "data")
CACHE_ROOT = get_env_path("CACHE_ROOT", BASE_DIR / "cache")
DB_DIR = get_env_path("DB_DIR", BASE_DIR / "db")
STATICFILES_DIRS = [resources.path("ucast", "static")]
# Database
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
DATABASES = {
@ -202,15 +220,6 @@ USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
STATIC_URL = "static/"
STATIC_ROOT = get_env_path("STATIC_ROOT", BASE_DIR / "static")
DOWNLOAD_ROOT = get_env_path("DOWNLOAD_ROOT", BASE_DIR / "data")
STATICFILES_DIRS = [resources.path("ucast", "static")]
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field