Compare commits
10 commits
a3c7be3ae3
...
936a412caf
Author | SHA1 | Date | |
---|---|---|---|
936a412caf | |||
0fa6e5c07d | |||
5d49098004 | |||
4fd6239974 | |||
4b6733b9b6 | |||
28cb58356e | |||
8af98a44ae | |||
12e64e6c72 | |||
60250dd637 | |||
21552e6453 |
24
.drone.yml
|
@ -7,9 +7,31 @@ platform:
|
||||||
arch: ''
|
arch: ''
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Test
|
- name: install dependencies
|
||||||
image: thetadev256/ucast-dev
|
image: thetadev256/ucast-dev
|
||||||
|
volumes:
|
||||||
|
- name: cache
|
||||||
|
path: /root/.cache
|
||||||
commands:
|
commands:
|
||||||
- poetry install
|
- poetry install
|
||||||
|
- poetry run invoke reset
|
||||||
|
|
||||||
|
- name: lint
|
||||||
|
image: thetadev256/ucast-dev
|
||||||
|
volumes:
|
||||||
|
- name: cache
|
||||||
|
path: /root/.cache
|
||||||
|
commands:
|
||||||
- poetry run invoke lint
|
- poetry run invoke lint
|
||||||
|
|
||||||
|
- name: test
|
||||||
|
image: thetadev256/ucast-dev
|
||||||
|
volumes:
|
||||||
|
- name: cache
|
||||||
|
path: /root/.cache
|
||||||
|
commands:
|
||||||
- poetry run invoke test
|
- poetry run invoke test
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- name: cache
|
||||||
|
temp: { }
|
||||||
|
|
14
.editorconfig
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
end_of_line = lf
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
max_line_length = 88
|
||||||
|
|
||||||
|
[{Makefile,*.go}]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[*.{json,md,rst,ini,yml,yaml}]
|
||||||
|
indent_size = 2
|
5
.gitignore
vendored
|
@ -14,11 +14,6 @@ node_modules
|
||||||
# Jupyter
|
# Jupyter
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
|
|
||||||
# Media files
|
|
||||||
*.webm
|
|
||||||
*.mp4
|
|
||||||
*.mp3
|
|
||||||
|
|
||||||
# Application data
|
# Application data
|
||||||
/_run*
|
/_run*
|
||||||
*.sqlite3
|
*.sqlite3
|
||||||
|
|
|
@ -1,7 +1,14 @@
|
||||||
version: "3"
|
version: "3"
|
||||||
services:
|
services:
|
||||||
redis:
|
redis:
|
||||||
container_name: ucast-redis
|
container_name: redis
|
||||||
image: redis:alpine
|
image: redis:alpine
|
||||||
ports:
|
ports:
|
||||||
- "127.0.0.1:6379:6379"
|
- "127.0.0.1:6379:6379"
|
||||||
|
|
||||||
|
rq-dashboard:
|
||||||
|
image: eoranged/rq-dashboard
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:9181:9181"
|
||||||
|
environment:
|
||||||
|
RQ_DASHBOARD_REDIS_URL: "redis://redis:6379"
|
||||||
|
|
371
poetry.lock
generated
|
@ -1,6 +1,6 @@
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "asgiref"
|
name = "asgiref"
|
||||||
version = "3.5.1"
|
version = "3.5.2"
|
||||||
description = "ASGI specs, helper code, and adapters"
|
description = "ASGI specs, helper code, and adapters"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
@ -60,11 +60,11 @@ cffi = ">=1.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "certifi"
|
name = "certifi"
|
||||||
version = "2021.10.8"
|
version = "2022.5.18.1"
|
||||||
description = "Python package for providing Mozilla's CA Bundle."
|
description = "Python package for providing Mozilla's CA Bundle."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cffi"
|
name = "cffi"
|
||||||
|
@ -128,18 +128,29 @@ Pillow = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "coverage"
|
name = "coverage"
|
||||||
version = "6.3.3"
|
version = "6.4"
|
||||||
description = "Code coverage measurement for Python"
|
description = "Code coverage measurement for Python"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
tomli = {version = "*", optional = true, markers = "extra == \"toml\""}
|
tomli = {version = "*", optional = true, markers = "python_version < \"3.11\" and extra == \"toml\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
toml = ["tomli"]
|
toml = ["tomli"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "croniter"
|
||||||
|
version = "1.3.5"
|
||||||
|
description = "croniter provides iteration for datetime object with cron like format"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
python-dateutil = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deprecated"
|
name = "deprecated"
|
||||||
version = "1.2.13"
|
version = "1.2.13"
|
||||||
|
@ -190,9 +201,27 @@ python-versions = ">=3.7"
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
django = ">=2.2"
|
django = ">=2.2"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fakeredis"
|
||||||
|
version = "1.7.5"
|
||||||
|
description = "Fake implementation of redis API for testing purposes."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
packaging = "*"
|
||||||
|
redis = "<=4.3.1"
|
||||||
|
six = ">=1.12"
|
||||||
|
sortedcontainers = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
aioredis = ["aioredis"]
|
||||||
|
lua = ["lupa"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "feedparser"
|
name = "feedparser"
|
||||||
version = "6.0.8"
|
version = "6.0.10"
|
||||||
description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds"
|
description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
@ -203,7 +232,7 @@ sgmllib3k = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filelock"
|
name = "filelock"
|
||||||
version = "3.6.0"
|
version = "3.7.0"
|
||||||
description = "A platform independent file lock."
|
description = "A platform independent file lock."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
|
@ -229,9 +258,23 @@ category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "honcho"
|
||||||
|
version = "1.1.0"
|
||||||
|
description = "Honcho: a Python clone of Foreman. For managing Procfile-based applications."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
export = ["jinja2 (>=2.7,<3)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "identify"
|
name = "identify"
|
||||||
version = "2.5.0"
|
version = "2.5.1"
|
||||||
description = "File identification library for Python"
|
description = "File identification library for Python"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
|
@ -264,6 +307,19 @@ category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mock"
|
||||||
|
version = "4.0.3"
|
||||||
|
description = "Rolling backport of unittest.mock for all Pythons"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
build = ["twine", "wheel", "blurb"]
|
||||||
|
docs = ["sphinx"]
|
||||||
|
test = ["pytest (<5.4)", "pytest-cov"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mutagen"
|
name = "mutagen"
|
||||||
version = "1.45.1"
|
version = "1.45.1"
|
||||||
|
@ -301,7 +357,7 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pillow"
|
name = "pillow"
|
||||||
version = "9.1.0"
|
version = "9.1.1"
|
||||||
description = "Python Imaging Library (Fork)"
|
description = "Python Imaging Library (Fork)"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
@ -445,6 +501,31 @@ pytest = ">=5.4.0"
|
||||||
docs = ["sphinx", "sphinx-rtd-theme"]
|
docs = ["sphinx", "sphinx-rtd-theme"]
|
||||||
testing = ["django", "django-configurations (>=2.0)"]
|
testing = ["django", "django-configurations (>=2.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-mock"
|
||||||
|
version = "3.7.0"
|
||||||
|
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pytest = ">=5.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["pre-commit", "tox", "pytest-asyncio"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-dateutil"
|
||||||
|
version = "2.8.2"
|
||||||
|
description = "Extensions to the standard Python datetime module"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
six = ">=1.5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dotenv"
|
name = "python-dotenv"
|
||||||
version = "0.20.0"
|
version = "0.20.0"
|
||||||
|
@ -456,6 +537,20 @@ python-versions = ">=3.5"
|
||||||
[package.extras]
|
[package.extras]
|
||||||
cli = ["click (>=5.0)"]
|
cli = ["click (>=5.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-slugify"
|
||||||
|
version = "6.1.2"
|
||||||
|
description = "A Python slugify application that also handles Unicode"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
text-unidecode = ">=1.3"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
unidecode = ["Unidecode (>=1.1.1)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyyaml"
|
name = "pyyaml"
|
||||||
version = "6.0"
|
version = "6.0"
|
||||||
|
@ -519,6 +614,19 @@ python-versions = ">=3.5"
|
||||||
click = ">=5.0.0"
|
click = ">=5.0.0"
|
||||||
redis = ">=3.5.0"
|
redis = ">=3.5.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rq-scheduler"
|
||||||
|
version = "0.11.0"
|
||||||
|
description = "Provides job scheduling capabilities to RQ (Redis Queue)"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
croniter = ">=0.3.9"
|
||||||
|
python-dateutil = "*"
|
||||||
|
rq = ">=0.13"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sgmllib3k"
|
name = "sgmllib3k"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
|
@ -531,15 +639,15 @@ python-versions = "*"
|
||||||
name = "six"
|
name = "six"
|
||||||
version = "1.16.0"
|
version = "1.16.0"
|
||||||
description = "Python 2 and 3 compatibility utilities"
|
description = "Python 2 and 3 compatibility utilities"
|
||||||
category = "dev"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "slugify"
|
name = "sortedcontainers"
|
||||||
version = "0.0.1"
|
version = "2.4.0"
|
||||||
description = "A generic slugifier."
|
description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
|
||||||
category = "main"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
@ -551,6 +659,14 @@ category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "text-unidecode"
|
||||||
|
version = "1.3"
|
||||||
|
description = "The most basic Text::Unidecode port"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.10.2"
|
version = "0.10.2"
|
||||||
|
@ -632,7 +748,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yt-dlp"
|
name = "yt-dlp"
|
||||||
version = "2022.4.8"
|
version = "2022.5.18"
|
||||||
description = "A youtube-dl fork with additional features and patches"
|
description = "A youtube-dl fork with additional features and patches"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
|
@ -649,12 +765,12 @@ websockets = "*"
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.10"
|
python-versions = "^3.10"
|
||||||
content-hash = "2d9aa9c628676b6c9981964a7e01a8d0b0a291025b695c5d98441d29720bced0"
|
content-hash = "ad3a5ecd6fc1152dfdfda51ed1e401ec11a048661a04f42985c15bc28e8eda9f"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
asgiref = [
|
asgiref = [
|
||||||
{file = "asgiref-3.5.1-py3-none-any.whl", hash = "sha256:45a429524fba18aba9d512498b19d220c4d628e75b40cf5c627524dbaebc5cc1"},
|
{file = "asgiref-3.5.2-py3-none-any.whl", hash = "sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4"},
|
||||||
{file = "asgiref-3.5.1.tar.gz", hash = "sha256:fddeea3c53fa99d0cdb613c3941cc6e52d822491fc2753fba25768fb5bf4e865"},
|
{file = "asgiref-3.5.2.tar.gz", hash = "sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"},
|
||||||
]
|
]
|
||||||
async-timeout = [
|
async-timeout = [
|
||||||
{file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
|
{file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
|
||||||
|
@ -765,8 +881,8 @@ brotlicffi = [
|
||||||
{file = "brotlicffi-1.0.9.2.tar.gz", hash = "sha256:0c248a68129d8fc6a217767406c731e498c3e19a7be05ea0a90c3c86637b7d96"},
|
{file = "brotlicffi-1.0.9.2.tar.gz", hash = "sha256:0c248a68129d8fc6a217767406c731e498c3e19a7be05ea0a90c3c86637b7d96"},
|
||||||
]
|
]
|
||||||
certifi = [
|
certifi = [
|
||||||
{file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
|
{file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"},
|
||||||
{file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
|
{file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"},
|
||||||
]
|
]
|
||||||
cffi = [
|
cffi = [
|
||||||
{file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
|
{file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"},
|
||||||
|
@ -841,47 +957,51 @@ colorthief = [
|
||||||
{file = "colorthief-0.2.1.tar.gz", hash = "sha256:079cb0c95bdd669c4643e2f7494de13b0b6029d5cdbe2d74d5d3c3386bd57221"},
|
{file = "colorthief-0.2.1.tar.gz", hash = "sha256:079cb0c95bdd669c4643e2f7494de13b0b6029d5cdbe2d74d5d3c3386bd57221"},
|
||||||
]
|
]
|
||||||
coverage = [
|
coverage = [
|
||||||
{file = "coverage-6.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df32ee0f4935a101e4b9a5f07b617d884a531ed5666671ff6ac66d2e8e8246d8"},
|
{file = "coverage-6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75b5dbffc334e0beb4f6c503fb95e6d422770fd2d1b40a64898ea26d6c02742d"},
|
{file = "coverage-6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:114944e6061b68a801c5da5427b9173a0dd9d32cd5fcc18a13de90352843737d"},
|
{file = "coverage-6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab88a01cd180b5640ccc9c47232e31924d5f9967ab7edd7e5c91c68eee47a69"},
|
{file = "coverage-6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad8f9068f5972a46d50fe5f32c09d6ee11da69c560fcb1b4c3baea246ca4109b"},
|
{file = "coverage-6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4cd696aa712e6cd16898d63cf66139dc70d998f8121ab558f0e1936396dbc579"},
|
{file = "coverage-6.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c1a9942e282cc9d3ed522cd3e3cab081149b27ea3bda72d6f61f84eaf88c1a63"},
|
{file = "coverage-6.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c06455121a089252b5943ea682187a4e0a5cf0a3fb980eb8e7ce394b144430a9"},
|
{file = "coverage-6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-win32.whl", hash = "sha256:cb5311d6ccbd22578c80028c5e292a7ab9adb91bd62c1982087fad75abe2e63d"},
|
{file = "coverage-6.4-cp310-cp310-win32.whl", hash = "sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f"},
|
||||||
{file = "coverage-6.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d4a6f30f611e657495cc81a07ff7aa8cd949144e7667c5d3e680d73ba7a70e4"},
|
{file = "coverage-6.4-cp310-cp310-win_amd64.whl", hash = "sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79bf405432428e989cad7b8bc60581963238f7645ae8a404f5dce90236cc0293"},
|
{file = "coverage-6.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:338c417613f15596af9eb7a39353b60abec9d8ce1080aedba5ecee6a5d85f8d3"},
|
{file = "coverage-6.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db094a6a4ae6329ed322a8973f83630b12715654c197dd392410400a5bfa1a73"},
|
{file = "coverage-6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1414e8b124611bf4df8d77215bd32cba6e3425da8ce9c1f1046149615e3a9a31"},
|
{file = "coverage-6.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:93b16b08f94c92cab88073ffd185070cdcb29f1b98df8b28e6649145b7f2c90d"},
|
{file = "coverage-6.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbc86ae8cc129c801e7baaafe3addf3c8d49c9c1597c44bdf2d78139707c3c62"},
|
{file = "coverage-6.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b5ba058610e8289a07db2a57bce45a1793ec0d3d11db28c047aae2aa1a832572"},
|
{file = "coverage-6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-win32.whl", hash = "sha256:8329635c0781927a2c6ae068461e19674c564e05b86736ab8eb29c420ee7dc20"},
|
{file = "coverage-6.4-cp37-cp37m-win32.whl", hash = "sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a"},
|
||||||
{file = "coverage-6.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:e5af1feee71099ae2e3b086ec04f57f9950e1be9ecf6c420696fea7977b84738"},
|
{file = "coverage-6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e814a4a5a1d95223b08cdb0f4f57029e8eab22ffdbae2f97107aeef28554517e"},
|
{file = "coverage-6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f4fbf3633cb0713437291b8848634ea97f89c7e849c2be17a665611e433f53"},
|
{file = "coverage-6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3401b0d2ed9f726fadbfa35102e00d1b3547b73772a1de5508ef3bdbcb36afe7"},
|
{file = "coverage-6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8586b177b4407f988731eb7f41967415b2197f35e2a6ee1a9b9b561f6323c8e9"},
|
{file = "coverage-6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:892e7fe32191960da559a14536768a62e83e87bbb867e1b9c643e7e0fbce2579"},
|
{file = "coverage-6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afb03f981fadb5aed1ac6e3dd34f0488e1a0875623d557b6fad09b97a942b38a"},
|
{file = "coverage-6.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cbe91bc84be4e5ef0b1480d15c7b18e29c73bdfa33e07d3725da7d18e1b0aff2"},
|
{file = "coverage-6.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:91502bf27cbd5c83c95cfea291ef387469f2387508645602e1ca0fd8a4ba7548"},
|
{file = "coverage-6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-win32.whl", hash = "sha256:c488db059848702aff30aa1d90ef87928d4e72e4f00717343800546fdbff0a94"},
|
{file = "coverage-6.4-cp38-cp38-win32.whl", hash = "sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426"},
|
||||||
{file = "coverage-6.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6534fcdfb5c503affb6b1130db7b5bfc8a0f77fa34880146f7a5c117987d0"},
|
{file = "coverage-6.4-cp38-cp38-win_amd64.whl", hash = "sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cc692c9ee18f0dd3214843779ba6b275ee4bb9b9a5745ba64265bce911aefd1a"},
|
{file = "coverage-6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:462105283de203df8de58a68c1bb4ba2a8a164097c2379f664fa81d6baf94b81"},
|
{file = "coverage-6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc972d829ad5ef4d4c5fcabd2bbe2add84ce8236f64ba1c0c72185da3a273130"},
|
{file = "coverage-6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06f54765cdbce99901871d50fe9f41d58213f18e98b170a30ca34f47de7dd5e8"},
|
{file = "coverage-6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7835f76a081787f0ca62a53504361b3869840a1620049b56d803a8cb3a9eeea3"},
|
{file = "coverage-6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6f5fee77ec3384b934797f1873758f796dfb4f167e1296dc00f8b2e023ce6ee9"},
|
{file = "coverage-6.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:baa8be8aba3dd1e976e68677be68a960a633a6d44c325757aefaa4d66175050f"},
|
{file = "coverage-6.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d06380e777dd6b35ee936f333d55b53dc4a8271036ff884c909cf6e94be8b6c"},
|
{file = "coverage-6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-win32.whl", hash = "sha256:f8cabc5fd0091976ab7b020f5708335033e422de25e20ddf9416bdce2b7e07d8"},
|
{file = "coverage-6.4-cp39-cp39-win32.whl", hash = "sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3"},
|
||||||
{file = "coverage-6.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c9441d57b0963cf8340268ad62fc83de61f1613034b79c2b1053046af0c5284"},
|
{file = "coverage-6.4-cp39-cp39-win_amd64.whl", hash = "sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055"},
|
||||||
{file = "coverage-6.3.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:d522f1dc49127eab0bfbba4e90fa068ecff0899bbf61bf4065c790ddd6c177fe"},
|
{file = "coverage-6.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45"},
|
||||||
{file = "coverage-6.3.3.tar.gz", hash = "sha256:2781c43bffbbec2b8867376d4d61916f5e9c4cc168232528562a61d1b4b01879"},
|
{file = "coverage-6.4.tar.gz", hash = "sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49"},
|
||||||
|
]
|
||||||
|
croniter = [
|
||||||
|
{file = "croniter-1.3.5-py2.py3-none-any.whl", hash = "sha256:4f72faca42c00beb6e30907f1315145f43dfbe5ec0ad4ada24b4c0d57b86a33a"},
|
||||||
|
{file = "croniter-1.3.5.tar.gz", hash = "sha256:7592fc0e8a00d82af98dfa2768b75983b6fb4c2adc8f6d0d7c931a715b7cefee"},
|
||||||
]
|
]
|
||||||
deprecated = [
|
deprecated = [
|
||||||
{file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"},
|
{file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"},
|
||||||
|
@ -899,13 +1019,17 @@ django-bulma = [
|
||||||
{file = "django-bulma-0.8.3.tar.gz", hash = "sha256:b794b4e64f482de77f376451f7cd8b3c8448eb68e5a24c51b9190625a08b0b30"},
|
{file = "django-bulma-0.8.3.tar.gz", hash = "sha256:b794b4e64f482de77f376451f7cd8b3c8448eb68e5a24c51b9190625a08b0b30"},
|
||||||
{file = "django_bulma-0.8.3-py3-none-any.whl", hash = "sha256:0ef6e5c171c2a32010e724a8be61ba6cd0e55ebbd242cf6780560518483c4d00"},
|
{file = "django_bulma-0.8.3-py3-none-any.whl", hash = "sha256:0ef6e5c171c2a32010e724a8be61ba6cd0e55ebbd242cf6780560518483c4d00"},
|
||||||
]
|
]
|
||||||
|
fakeredis = [
|
||||||
|
{file = "fakeredis-1.7.5-py3-none-any.whl", hash = "sha256:c4ca2be686e7e7637756ccc7dcad8472a5e4866b065431107d7a4b7a250d4e6f"},
|
||||||
|
{file = "fakeredis-1.7.5.tar.gz", hash = "sha256:49375c630981dd4045d9a92e2709fcd4476c91f927e0228493eefa625e705133"},
|
||||||
|
]
|
||||||
feedparser = [
|
feedparser = [
|
||||||
{file = "feedparser-6.0.8-py3-none-any.whl", hash = "sha256:1b7f57841d9cf85074deb316ed2c795091a238adb79846bc46dccdaf80f9c59a"},
|
{file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"},
|
||||||
{file = "feedparser-6.0.8.tar.gz", hash = "sha256:5ce0410a05ab248c8c7cfca3a0ea2203968ee9ff4486067379af4827a59f9661"},
|
{file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"},
|
||||||
]
|
]
|
||||||
filelock = [
|
filelock = [
|
||||||
{file = "filelock-3.6.0-py3-none-any.whl", hash = "sha256:f8314284bfffbdcfa0ff3d7992b023d4c628ced6feb957351d4c48d059f56bc0"},
|
{file = "filelock-3.7.0-py3-none-any.whl", hash = "sha256:c7b5fdb219b398a5b28c8e4c1893ef5f98ece6a38c6ab2c22e26ec161556fed6"},
|
||||||
{file = "filelock-3.6.0.tar.gz", hash = "sha256:9cd540a9352e432c7246a48fe4e8712b10acb1df2ad1f30e8c070b82ae1fed85"},
|
{file = "filelock-3.7.0.tar.gz", hash = "sha256:b795f1b42a61bbf8ec7113c341dad679d772567b936fbd1bf43c9a238e673e20"},
|
||||||
]
|
]
|
||||||
font-source-sans-pro = [
|
font-source-sans-pro = [
|
||||||
{file = "font-source-sans-pro-0.0.1.tar.gz", hash = "sha256:3f81d8e52b0d7e930e2c867c0d3ee549312d03f97b71b664a8361006311f72e5"},
|
{file = "font-source-sans-pro-0.0.1.tar.gz", hash = "sha256:3f81d8e52b0d7e930e2c867c0d3ee549312d03f97b71b664a8361006311f72e5"},
|
||||||
|
@ -917,9 +1041,13 @@ fonts = [
|
||||||
{file = "fonts-0.0.3-py3-none-any.whl", hash = "sha256:e5f551379088ab260c2537980c3ccdff8af93408d9d4fa3319388d2ee25b7b6d"},
|
{file = "fonts-0.0.3-py3-none-any.whl", hash = "sha256:e5f551379088ab260c2537980c3ccdff8af93408d9d4fa3319388d2ee25b7b6d"},
|
||||||
{file = "fonts-0.0.3.tar.gz", hash = "sha256:c626655b75a60715e118e44e270656fd22fd8f54252901ff6ebf1308ad01c405"},
|
{file = "fonts-0.0.3.tar.gz", hash = "sha256:c626655b75a60715e118e44e270656fd22fd8f54252901ff6ebf1308ad01c405"},
|
||||||
]
|
]
|
||||||
|
honcho = [
|
||||||
|
{file = "honcho-1.1.0-py2.py3-none-any.whl", hash = "sha256:a4d6e3a88a7b51b66351ecfc6e9d79d8f4b87351db9ad7e923f5632cc498122f"},
|
||||||
|
{file = "honcho-1.1.0.tar.gz", hash = "sha256:c5eca0bded4bef6697a23aec0422fd4f6508ea3581979a3485fc4b89357eb2a9"},
|
||||||
|
]
|
||||||
identify = [
|
identify = [
|
||||||
{file = "identify-2.5.0-py2.py3-none-any.whl", hash = "sha256:3acfe15a96e4272b4ec5662ee3e231ceba976ef63fd9980ed2ce9cc415df393f"},
|
{file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"},
|
||||||
{file = "identify-2.5.0.tar.gz", hash = "sha256:c83af514ea50bf2be2c4a3f2fb349442b59dc87284558ae9ff54191bff3541d2"},
|
{file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"},
|
||||||
]
|
]
|
||||||
idna = [
|
idna = [
|
||||||
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
|
||||||
|
@ -933,6 +1061,10 @@ invoke = [
|
||||||
{file = "invoke-1.7.1-py3-none-any.whl", hash = "sha256:2dc975b4f92be0c0a174ad2d063010c8a1fdb5e9389d69871001118b4fcac4fb"},
|
{file = "invoke-1.7.1-py3-none-any.whl", hash = "sha256:2dc975b4f92be0c0a174ad2d063010c8a1fdb5e9389d69871001118b4fcac4fb"},
|
||||||
{file = "invoke-1.7.1.tar.gz", hash = "sha256:7b6deaf585eee0a848205d0b8c0014b9bf6f287a8eb798818a642dff1df14b19"},
|
{file = "invoke-1.7.1.tar.gz", hash = "sha256:7b6deaf585eee0a848205d0b8c0014b9bf6f287a8eb798818a642dff1df14b19"},
|
||||||
]
|
]
|
||||||
|
mock = [
|
||||||
|
{file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"},
|
||||||
|
{file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"},
|
||||||
|
]
|
||||||
mutagen = [
|
mutagen = [
|
||||||
{file = "mutagen-1.45.1-py3-none-any.whl", hash = "sha256:9c9f243fcec7f410f138cb12c21c84c64fde4195481a30c9bfb05b5f003adfed"},
|
{file = "mutagen-1.45.1-py3-none-any.whl", hash = "sha256:9c9f243fcec7f410f138cb12c21c84c64fde4195481a30c9bfb05b5f003adfed"},
|
||||||
{file = "mutagen-1.45.1.tar.gz", hash = "sha256:6397602efb3c2d7baebd2166ed85731ae1c1d475abca22090b7141ff5034b3e1"},
|
{file = "mutagen-1.45.1.tar.gz", hash = "sha256:6397602efb3c2d7baebd2166ed85731ae1c1d475abca22090b7141ff5034b3e1"},
|
||||||
|
@ -953,44 +1085,44 @@ packaging = [
|
||||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||||
]
|
]
|
||||||
pillow = [
|
pillow = [
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af79d3fde1fc2e33561166d62e3b63f0cc3e47b5a3a2e5fea40d4917754734ea"},
|
{file = "Pillow-9.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:42dfefbef90eb67c10c45a73a9bc1599d4dac920f7dfcbf4ec6b80cb620757fe"},
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55dd1cf09a1fd7c7b78425967aacae9b0d70125f7d3ab973fadc7b5abc3de652"},
|
{file = "Pillow-9.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffde4c6fabb52891d81606411cbfaf77756e3b561b566efd270b3ed3791fde4e"},
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66822d01e82506a19407d1afc104c3fcea3b81d5eb11485e593ad6b8492f995a"},
|
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c857532c719fb30fafabd2371ce9b7031812ff3889d75273827633bca0c4602"},
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5eaf3b42df2bcda61c53a742ee2c6e63f777d0e085bbc6b2ab7ed57deb13db7"},
|
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59789a7d06c742e9d13b883d5e3569188c16acb02eeed2510fd3bfdbc1bd1530"},
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01ce45deec9df310cbbee11104bae1a2a43308dd9c317f99235b6d3080ddd66e"},
|
{file = "Pillow-9.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d45dbe4b21a9679c3e8b3f7f4f42a45a7d3ddff8a4a16109dff0e1da30a35b2"},
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aea7ce61328e15943d7b9eaca87e81f7c62ff90f669116f857262e9da4057ba3"},
|
{file = "Pillow-9.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e9ed59d1b6ee837f4515b9584f3d26cf0388b742a11ecdae0d9237a94505d03a"},
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-win32.whl", hash = "sha256:7a053bd4d65a3294b153bdd7724dce864a1d548416a5ef61f6d03bf149205160"},
|
{file = "Pillow-9.1.1-cp310-cp310-win32.whl", hash = "sha256:b3fe2ff1e1715d4475d7e2c3e8dabd7c025f4410f79513b4ff2de3d51ce0fa9c"},
|
||||||
{file = "Pillow-9.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:97bda660702a856c2c9e12ec26fc6d187631ddfd896ff685814ab21ef0597033"},
|
{file = "Pillow-9.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5b650dbbc0969a4e226d98a0b440c2f07a850896aed9266b6fedc0f7e7834108"},
|
||||||
{file = "Pillow-9.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21dee8466b42912335151d24c1665fcf44dc2ee47e021d233a40c3ca5adae59c"},
|
{file = "Pillow-9.1.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:0b4d5ad2cd3a1f0d1df882d926b37dbb2ab6c823ae21d041b46910c8f8cd844b"},
|
||||||
{file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b6d4050b208c8ff886fd3db6690bf04f9a48749d78b41b7a5bf24c236ab0165"},
|
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9370d6744d379f2de5d7fa95cdbd3a4d92f0b0ef29609b4b1687f16bc197063d"},
|
||||||
{file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cfca31ab4c13552a0f354c87fbd7f162a4fafd25e6b521bba93a57fe6a3700a"},
|
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b761727ed7d593e49671d1827044b942dd2f4caae6e51bab144d4accf8244a84"},
|
||||||
{file = "Pillow-9.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed742214068efa95e9844c2d9129e209ed63f61baa4d54dbf4cf8b5e2d30ccf2"},
|
{file = "Pillow-9.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a66fe50386162df2da701b3722781cbe90ce043e7d53c1fd6bd801bca6b48d4"},
|
||||||
{file = "Pillow-9.1.0-cp37-cp37m-win32.whl", hash = "sha256:c9efef876c21788366ea1f50ecb39d5d6f65febe25ad1d4c0b8dff98843ac244"},
|
{file = "Pillow-9.1.1-cp37-cp37m-win32.whl", hash = "sha256:2b291cab8a888658d72b575a03e340509b6b050b62db1f5539dd5cd18fd50578"},
|
||||||
{file = "Pillow-9.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:de344bcf6e2463bb25179d74d6e7989e375f906bcec8cb86edb8b12acbc7dfef"},
|
{file = "Pillow-9.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1d4331aeb12f6b3791911a6da82de72257a99ad99726ed6b63f481c0184b6fb9"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:17869489de2fce6c36690a0c721bd3db176194af5f39249c1ac56d0bb0fcc512"},
|
{file = "Pillow-9.1.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8844217cdf66eabe39567118f229e275f0727e9195635a15e0e4b9227458daaf"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:25023a6209a4d7c42154073144608c9a71d3512b648a2f5d4465182cb93d3477"},
|
{file = "Pillow-9.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b6617221ff08fbd3b7a811950b5c3f9367f6e941b86259843eab77c8e3d2b56b"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8782189c796eff29dbb37dd87afa4ad4d40fc90b2742704f94812851b725964b"},
|
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20d514c989fa28e73a5adbddd7a171afa5824710d0ab06d4e1234195d2a2e546"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:463acf531f5d0925ca55904fa668bb3461c3ef6bc779e1d6d8a488092bdee378"},
|
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088df396b047477dd1bbc7de6e22f58400dae2f21310d9e2ec2933b2ef7dfa4f"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f42364485bfdab19c1373b5cd62f7c5ab7cc052e19644862ec8f15bb8af289e"},
|
{file = "Pillow-9.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53c27bd452e0f1bc4bfed07ceb235663a1df7c74df08e37fd6b03eb89454946a"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3fddcdb619ba04491e8f771636583a7cc5a5051cd193ff1aa1ee8616d2a692c5"},
|
{file = "Pillow-9.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3f6c1716c473ebd1649663bf3b42702d0d53e27af8b64642be0dd3598c761fb1"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-win32.whl", hash = "sha256:4fe29a070de394e449fd88ebe1624d1e2d7ddeed4c12e0b31624561b58948d9a"},
|
{file = "Pillow-9.1.1-cp38-cp38-win32.whl", hash = "sha256:c67db410508b9de9c4694c57ed754b65a460e4812126e87f5052ecf23a011a54"},
|
||||||
{file = "Pillow-9.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c24f718f9dd73bb2b31a6201e6db5ea4a61fdd1d1c200f43ee585fc6dcd21b34"},
|
{file = "Pillow-9.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:f054b020c4d7e9786ae0404278ea318768eb123403b18453e28e47cdb7a0a4bf"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fb89397013cf302f282f0fc998bb7abf11d49dcff72c8ecb320f76ea6e2c5717"},
|
{file = "Pillow-9.1.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:c17770a62a71718a74b7548098a74cd6880be16bcfff5f937f900ead90ca8e92"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c870193cce4b76713a2b29be5d8327c8ccbe0d4a49bc22968aa1e680930f5581"},
|
{file = "Pillow-9.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f6a6034140e9e17e9abc175fc7a266a6e63652028e157750bd98e804a8ed9a"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69e5ddc609230d4408277af135c5b5c8fe7a54b2bdb8ad7c5100b86b3aab04c6"},
|
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f372d0f08eff1475ef426344efe42493f71f377ec52237bf153c5713de987251"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35be4a9f65441d9982240e6966c1eaa1c654c4e5e931eaf580130409e31804d4"},
|
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09e67ef6e430f90caa093528bd758b0616f8165e57ed8d8ce014ae32df6a831d"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82283af99c1c3a5ba1da44c67296d5aad19f11c535b551a5ae55328a317ce331"},
|
{file = "Pillow-9.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66daa16952d5bf0c9d5389c5e9df562922a59bd16d77e2a276e575d32e38afd1"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a325ac71914c5c043fa50441b36606e64a10cd262de12f7a179620f579752ff8"},
|
{file = "Pillow-9.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d78ca526a559fb84faaaf84da2dd4addef5edb109db8b81677c0bb1aad342601"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-win32.whl", hash = "sha256:a598d8830f6ef5501002ae85c7dbfcd9c27cc4efc02a1989369303ba85573e58"},
|
{file = "Pillow-9.1.1-cp39-cp39-win32.whl", hash = "sha256:55e74faf8359ddda43fee01bffbc5bd99d96ea508d8a08c527099e84eb708f45"},
|
||||||
{file = "Pillow-9.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c51cb9edac8a5abd069fd0758ac0a8bfe52c261ee0e330f363548aca6893595"},
|
{file = "Pillow-9.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c150dbbb4a94ea4825d1e5f2c5501af7141ea95825fadd7829f9b11c97aaf6c"},
|
||||||
{file = "Pillow-9.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a336a4f74baf67e26f3acc4d61c913e378e931817cd1e2ef4dfb79d3e051b481"},
|
{file = "Pillow-9.1.1-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:769a7f131a2f43752455cc72f9f7a093c3ff3856bf976c5fb53a59d0ccc704f6"},
|
||||||
{file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb1b89b11256b5b6cad5e7593f9061ac4624f7651f7a8eb4dfa37caa1dfaa4d0"},
|
{file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:488f3383cf5159907d48d32957ac6f9ea85ccdcc296c14eca1a4e396ecc32098"},
|
||||||
{file = "Pillow-9.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:255c9d69754a4c90b0ee484967fc8818c7ff8311c6dddcc43a4340e10cd1636a"},
|
{file = "Pillow-9.1.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b525a356680022b0af53385944026d3486fc8c013638cf9900eb87c866afb4c"},
|
||||||
{file = "Pillow-9.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5a3ecc026ea0e14d0ad7cd990ea7f48bfcb3eb4271034657dc9d06933c6629a7"},
|
{file = "Pillow-9.1.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6e760cf01259a1c0a50f3c845f9cad1af30577fd8b670339b1659c6d0e7a41dd"},
|
||||||
{file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5b0ff59785d93b3437c3703e3c64c178aabada51dea2a7f2c5eccf1bcf565a3"},
|
{file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4165205a13b16a29e1ac57efeee6be2dfd5b5408122d59ef2145bc3239fa340"},
|
||||||
{file = "Pillow-9.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7110ec1701b0bf8df569a7592a196c9d07c764a0a74f65471ea56816f10e2c8"},
|
{file = "Pillow-9.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937a54e5694684f74dcbf6e24cc453bfc5b33940216ddd8f4cd8f0f79167f765"},
|
||||||
{file = "Pillow-9.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8d79c6f468215d1a8415aa53d9868a6b40c4682165b8cb62a221b1baa47db458"},
|
{file = "Pillow-9.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:baf3be0b9446a4083cc0c5bb9f9c964034be5374b5bc09757be89f5d2fa247b8"},
|
||||||
{file = "Pillow-9.1.0.tar.gz", hash = "sha256:f401ed2bbb155e1ade150ccc63db1a4f6c1909d3d378f7d1235a44e90d75fb97"},
|
{file = "Pillow-9.1.1.tar.gz", hash = "sha256:7502539939b53d7565f3d11d87c78e7ec900d3c72945d4ee0e2f250d598309a0"},
|
||||||
]
|
]
|
||||||
platformdirs = [
|
platformdirs = [
|
||||||
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
|
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
|
||||||
|
@ -1070,10 +1202,22 @@ pytest-django = [
|
||||||
{file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"},
|
{file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"},
|
||||||
{file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"},
|
{file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"},
|
||||||
]
|
]
|
||||||
|
pytest-mock = [
|
||||||
|
{file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"},
|
||||||
|
{file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"},
|
||||||
|
]
|
||||||
|
python-dateutil = [
|
||||||
|
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||||
|
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||||
|
]
|
||||||
python-dotenv = [
|
python-dotenv = [
|
||||||
{file = "python-dotenv-0.20.0.tar.gz", hash = "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f"},
|
{file = "python-dotenv-0.20.0.tar.gz", hash = "sha256:b7e3b04a59693c42c36f9ab1cc2acc46fa5df8c78e178fc33a8d4cd05c8d498f"},
|
||||||
{file = "python_dotenv-0.20.0-py3-none-any.whl", hash = "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938"},
|
{file = "python_dotenv-0.20.0-py3-none-any.whl", hash = "sha256:d92a187be61fe482e4fd675b6d52200e7be63a12b724abbf931a40ce4fa92938"},
|
||||||
]
|
]
|
||||||
|
python-slugify = [
|
||||||
|
{file = "python-slugify-6.1.2.tar.gz", hash = "sha256:272d106cb31ab99b3496ba085e3fea0e9e76dcde967b5e9992500d1f785ce4e1"},
|
||||||
|
{file = "python_slugify-6.1.2-py2.py3-none-any.whl", hash = "sha256:7b2c274c308b62f4269a9ba701aa69a797e9bca41aeee5b3a9e79e36b6656927"},
|
||||||
|
]
|
||||||
pyyaml = [
|
pyyaml = [
|
||||||
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
|
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
|
||||||
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
|
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
|
||||||
|
@ -1124,6 +1268,10 @@ rq = [
|
||||||
{file = "rq-1.10.1-py2.py3-none-any.whl", hash = "sha256:92f4cf38b2364c1697b541e77c0fe62b7e5242fa864324f262be126ee2a07e3a"},
|
{file = "rq-1.10.1-py2.py3-none-any.whl", hash = "sha256:92f4cf38b2364c1697b541e77c0fe62b7e5242fa864324f262be126ee2a07e3a"},
|
||||||
{file = "rq-1.10.1.tar.gz", hash = "sha256:62d06b44c3acfa5d1933c5a4ec3fbc2484144a8af60e318d0b8447c5236271e2"},
|
{file = "rq-1.10.1.tar.gz", hash = "sha256:62d06b44c3acfa5d1933c5a4ec3fbc2484144a8af60e318d0b8447c5236271e2"},
|
||||||
]
|
]
|
||||||
|
rq-scheduler = [
|
||||||
|
{file = "rq-scheduler-0.11.0.tar.gz", hash = "sha256:db79bb56cdbc4f7ffdd8bd659e389e91aa0db9c1abf002dc46f5dd6f0dbd2910"},
|
||||||
|
{file = "rq_scheduler-0.11.0-py2.py3-none-any.whl", hash = "sha256:da94e9b6badf112995ff38fe16192e4f4c43c412b3c9614684ed8c8f7ca517d2"},
|
||||||
|
]
|
||||||
sgmllib3k = [
|
sgmllib3k = [
|
||||||
{file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
|
{file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"},
|
||||||
]
|
]
|
||||||
|
@ -1131,13 +1279,18 @@ six = [
|
||||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||||
]
|
]
|
||||||
slugify = [
|
sortedcontainers = [
|
||||||
{file = "slugify-0.0.1.tar.gz", hash = "sha256:c5703cc11c1a6947536f3ce8bb306766b8bb5a84a53717f5a703ce0f18235e4c"},
|
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
|
||||||
|
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
|
||||||
]
|
]
|
||||||
sqlparse = [
|
sqlparse = [
|
||||||
{file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"},
|
{file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"},
|
||||||
{file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"},
|
{file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"},
|
||||||
]
|
]
|
||||||
|
text-unidecode = [
|
||||||
|
{file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"},
|
||||||
|
{file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"},
|
||||||
|
]
|
||||||
toml = [
|
toml = [
|
||||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||||
|
@ -1278,6 +1431,6 @@ wrapt = [
|
||||||
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
|
{file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"},
|
||||||
]
|
]
|
||||||
yt-dlp = [
|
yt-dlp = [
|
||||||
{file = "yt-dlp-2022.4.8.tar.gz", hash = "sha256:8758d016509d4574b90fbde975aa70adaef71ed5e7a195141588f6d6945205ba"},
|
{file = "yt-dlp-2022.5.18.tar.gz", hash = "sha256:3a7b59d2fb4b39ce8ba8e0b9c5a37fe20e5624f46a2346b4ae66ab1320e35134"},
|
||||||
{file = "yt_dlp-2022.4.8-py2.py3-none-any.whl", hash = "sha256:6edefe326b1e1478fdbe627a66203e5248a6b0dd50c101e682cf700ab70cdf72"},
|
{file = "yt_dlp-2022.5.18-py2.py3-none-any.whl", hash = "sha256:deec1009442312c1e2ee5298966842194d0e950b433f0d4fc844ef464b9c32a7"},
|
||||||
]
|
]
|
||||||
|
|
|
@ -24,9 +24,10 @@ django-bulma = "^0.8.3"
|
||||||
python-dotenv = "^0.20.0"
|
python-dotenv = "^0.20.0"
|
||||||
psycopg2 = "^2.9.3"
|
psycopg2 = "^2.9.3"
|
||||||
mysqlclient = "^2.1.0"
|
mysqlclient = "^2.1.0"
|
||||||
slugify = "^0.0.1"
|
python-slugify = "^6.1.2"
|
||||||
rq = "^1.10.1"
|
|
||||||
mutagen = "^1.45.1"
|
mutagen = "^1.45.1"
|
||||||
|
rq = "^1.10.1"
|
||||||
|
rq-scheduler = "^0.11.0"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "^7.1.1"
|
pytest = "^7.1.1"
|
||||||
|
@ -34,6 +35,9 @@ pytest-cov = "^3.0.0"
|
||||||
invoke = "^1.7.0"
|
invoke = "^1.7.0"
|
||||||
pytest-django = "^4.5.2"
|
pytest-django = "^4.5.2"
|
||||||
pre-commit = "^2.19.0"
|
pre-commit = "^2.19.0"
|
||||||
|
honcho = "^1.1.0"
|
||||||
|
pytest-mock = "^3.7.0"
|
||||||
|
fakeredis = "^1.7.5"
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
"ucast-manage" = "ucast_project.manage:main"
|
"ucast-manage" = "ucast_project.manage:main"
|
||||||
|
@ -42,6 +46,9 @@ pre-commit = "^2.19.0"
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
DJANGO_SETTINGS_MODULE = "ucast_project.settings"
|
||||||
|
|
||||||
[tool.flake8]
|
[tool.flake8]
|
||||||
extend-ignore = "E501"
|
extend-ignore = "E501"
|
||||||
|
|
||||||
|
|
19
tasks.py
|
@ -1,7 +1,9 @@
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
from honcho import manager
|
||||||
from invoke import Responder, task
|
from invoke import Responder, task
|
||||||
|
|
||||||
from ucast import tests
|
from ucast import tests
|
||||||
|
@ -89,8 +91,8 @@ def get_cover(c, vid=""):
|
||||||
cv_file = tests.DIR_TESTFILES / "cover" / f"c{ti}_gradient.png"
|
cv_file = tests.DIR_TESTFILES / "cover" / f"c{ti}_gradient.png"
|
||||||
cv_blur_file = tests.DIR_TESTFILES / "cover" / f"c{ti}_blur.png"
|
cv_blur_file = tests.DIR_TESTFILES / "cover" / f"c{ti}_blur.png"
|
||||||
|
|
||||||
tn_file = youtube.download_thumbnail(vinfo, tn_file)
|
youtube.download_thumbnail(vinfo, tn_file)
|
||||||
util.download_file(channel_metadata.avatar_url, av_file)
|
util.download_image_file(channel_metadata.avatar_url, av_file)
|
||||||
|
|
||||||
cover.create_cover_file(
|
cover.create_cover_file(
|
||||||
tn_file, av_file, title, channel_name, cover.COVER_STYLE_GRADIENT, cv_file
|
tn_file, av_file, title, channel_name, cover.COVER_STYLE_GRADIENT, cv_file
|
||||||
|
@ -115,3 +117,16 @@ def reset(c):
|
||||||
os.remove(FILE_DB)
|
os.remove(FILE_DB)
|
||||||
migrate(c)
|
migrate(c)
|
||||||
create_testuser(c)
|
create_testuser(c)
|
||||||
|
|
||||||
|
|
||||||
|
@task
|
||||||
|
def worker(c, n=2):
|
||||||
|
m = manager.Manager()
|
||||||
|
|
||||||
|
for i in range(n):
|
||||||
|
m.add_process(f"worker_{i}", "python manage.py rqworker")
|
||||||
|
|
||||||
|
m.add_process("scheduler", "python manage.py rqscheduler")
|
||||||
|
|
||||||
|
m.loop()
|
||||||
|
sys.exit(m.returncode)
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
__version__ = "0.0.1"
|
|
@ -1,3 +1,16 @@
|
||||||
from django.contrib import admin # noqa: F401
|
from django.contrib import admin
|
||||||
|
|
||||||
# Register your models here.
|
from ucast.models import Channel, Video
|
||||||
|
|
||||||
|
|
||||||
|
class ChannelAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ["name", "id"]
|
||||||
|
|
||||||
|
|
||||||
|
class VideoAdmin(admin.ModelAdmin):
|
||||||
|
list_display = ["title", "published"]
|
||||||
|
ordering = ("-published",)
|
||||||
|
|
||||||
|
|
||||||
|
admin.site.register(Channel, ChannelAdmin)
|
||||||
|
admin.site.register(Video, VideoAdmin)
|
||||||
|
|
0
ucast/management/__init__.py
Normal file
0
ucast/management/commands/__init__.py
Normal file
34
ucast/management/commands/rqenqueue.py
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
"""
|
||||||
|
Based on the django-rq package by Selwin Ong (MIT License)
|
||||||
|
https://github.com/rq/django-rq
|
||||||
|
"""
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from ucast import queue
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Queue a function with the given arguments."""
|
||||||
|
|
||||||
|
help = __doc__
|
||||||
|
args = "<function arg arg ...>"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--timeout", "-t", type=int, dest="timeout", help="A timeout in seconds"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument("args", nargs="*")
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
"""
|
||||||
|
Queues the function given with the first argument with the
|
||||||
|
parameters given with the rest of the argument list.
|
||||||
|
"""
|
||||||
|
verbosity = int(options.get("verbosity", 1))
|
||||||
|
timeout = options.get("timeout")
|
||||||
|
q = queue.get_queue()
|
||||||
|
job = q.enqueue_call(args[0], args=args[1:], timeout=timeout)
|
||||||
|
if verbosity:
|
||||||
|
print("Job %s created" % job.id)
|
58
ucast/management/commands/rqscheduler.py
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
"""
|
||||||
|
Based on the django-rq package by Selwin Ong (MIT License)
|
||||||
|
https://github.com/rq/django-rq
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from rq_scheduler.utils import setup_loghandlers
|
||||||
|
|
||||||
|
from ucast import queue
|
||||||
|
from ucast.tasks import schedule
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Runs RQ Scheduler"""
|
||||||
|
|
||||||
|
help = __doc__
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--pid",
|
||||||
|
action="store",
|
||||||
|
dest="pid",
|
||||||
|
default=None,
|
||||||
|
help="PID file to write the scheduler`s pid into",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--interval",
|
||||||
|
"-i",
|
||||||
|
type=int,
|
||||||
|
dest="interval",
|
||||||
|
default=60,
|
||||||
|
help="""How often the scheduler checks for new jobs to add to the
|
||||||
|
queue (in seconds).""",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
schedule.clear_scheduled_jobs()
|
||||||
|
schedule.register_scheduled_jobs()
|
||||||
|
|
||||||
|
pid = options.get("pid")
|
||||||
|
if pid:
|
||||||
|
with open(os.path.expanduser(pid), "w") as fp:
|
||||||
|
fp.write(str(os.getpid()))
|
||||||
|
|
||||||
|
# Verbosity is defined by default in BaseCommand for all commands
|
||||||
|
verbosity = options.get("verbosity")
|
||||||
|
if verbosity >= 2:
|
||||||
|
level = "DEBUG"
|
||||||
|
elif verbosity == 0:
|
||||||
|
level = "WARNING"
|
||||||
|
else:
|
||||||
|
level = "INFO"
|
||||||
|
setup_loghandlers(level)
|
||||||
|
|
||||||
|
scheduler = queue.get_scheduler(options.get("interval"))
|
||||||
|
scheduler.run()
|
121
ucast/management/commands/rqstats.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
"""
|
||||||
|
Based on the django-rq package by Selwin Ong (MIT License)
|
||||||
|
https://github.com/rq/django-rq
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
import click
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
from ucast import queue
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Print RQ statistics"""
|
||||||
|
|
||||||
|
help = __doc__
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"-j",
|
||||||
|
"--json",
|
||||||
|
action="store_true",
|
||||||
|
dest="json",
|
||||||
|
help="Output statistics as JSON",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-y",
|
||||||
|
"--yaml",
|
||||||
|
action="store_true",
|
||||||
|
dest="yaml",
|
||||||
|
help="Output statistics as YAML",
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-i",
|
||||||
|
"--interval",
|
||||||
|
dest="interval",
|
||||||
|
type=float,
|
||||||
|
help="Poll statistics every N seconds",
|
||||||
|
)
|
||||||
|
|
||||||
|
def _print_separator(self):
|
||||||
|
try:
|
||||||
|
click.echo(self._separator)
|
||||||
|
except AttributeError:
|
||||||
|
self._separator = "-" * self.table_width
|
||||||
|
click.echo(self._separator)
|
||||||
|
|
||||||
|
def _print_stats_dashboard(self, statistics):
|
||||||
|
if self.interval:
|
||||||
|
click.clear()
|
||||||
|
|
||||||
|
click.echo()
|
||||||
|
click.echo("Django RQ CLI Dashboard")
|
||||||
|
click.echo()
|
||||||
|
self._print_separator()
|
||||||
|
|
||||||
|
# Header
|
||||||
|
click.echo(
|
||||||
|
"""| %-15s|%10s |%10s |%10s |%10s |%10s |"""
|
||||||
|
% ("Name", "Queued", "Active", "Deferred", "Finished", "Workers")
|
||||||
|
)
|
||||||
|
|
||||||
|
self._print_separator()
|
||||||
|
|
||||||
|
click.echo(
|
||||||
|
"""| %-15s|%10s |%10s |%10s |%10s |%10s |"""
|
||||||
|
% (
|
||||||
|
statistics["name"],
|
||||||
|
statistics["jobs"],
|
||||||
|
statistics["started_jobs"],
|
||||||
|
statistics["deferred_jobs"],
|
||||||
|
statistics["finished_jobs"],
|
||||||
|
statistics["workers"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self._print_separator()
|
||||||
|
|
||||||
|
if self.interval:
|
||||||
|
click.echo()
|
||||||
|
click.echo("Press 'Ctrl+c' to quit")
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
|
||||||
|
if options.get("json"):
|
||||||
|
import json
|
||||||
|
|
||||||
|
click.echo(json.dumps(queue.get_statistics()))
|
||||||
|
return
|
||||||
|
|
||||||
|
if options.get("yaml"):
|
||||||
|
try:
|
||||||
|
import yaml
|
||||||
|
except ImportError:
|
||||||
|
click.echo("Aborting. LibYAML is not installed.")
|
||||||
|
return
|
||||||
|
# Disable YAML alias
|
||||||
|
yaml.Dumper.ignore_aliases = lambda *args: True
|
||||||
|
click.echo(yaml.dump(queue.get_statistics(), default_flow_style=False))
|
||||||
|
return
|
||||||
|
|
||||||
|
self.interval = options.get("interval")
|
||||||
|
|
||||||
|
# Arbitrary
|
||||||
|
self.table_width = 78
|
||||||
|
|
||||||
|
# Do not continuously poll
|
||||||
|
if not self.interval:
|
||||||
|
self._print_stats_dashboard(queue.get_statistics())
|
||||||
|
return
|
||||||
|
|
||||||
|
# Abuse clicks to 'live' render CLI dashboard
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
self._print_stats_dashboard(queue.get_statistics())
|
||||||
|
time.sleep(self.interval)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
103
ucast/management/commands/rqworker.py
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
"""
|
||||||
|
Based on the django-rq package by Selwin Ong (MIT License)
|
||||||
|
https://github.com/rq/django-rq
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db import connections
|
||||||
|
from redis.exceptions import ConnectionError
|
||||||
|
from rq import use_connection
|
||||||
|
from rq.logutils import setup_loghandlers
|
||||||
|
|
||||||
|
from ucast import queue
|
||||||
|
|
||||||
|
|
||||||
|
def reset_db_connections():
|
||||||
|
for c in connections.all():
|
||||||
|
c.close()
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""Runs RQ worker"""
|
||||||
|
|
||||||
|
help = __doc__
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
parser.add_argument(
|
||||||
|
"--pid",
|
||||||
|
action="store",
|
||||||
|
dest="pid",
|
||||||
|
default=None,
|
||||||
|
help="PID file to write the worker`s pid into",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--burst",
|
||||||
|
action="store_true",
|
||||||
|
dest="burst",
|
||||||
|
default=False,
|
||||||
|
help="Run worker in burst mode",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--with-scheduler",
|
||||||
|
action="store_true",
|
||||||
|
dest="with_scheduler",
|
||||||
|
default=False,
|
||||||
|
help="Run worker with scheduler enabled",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--name",
|
||||||
|
action="store",
|
||||||
|
dest="name",
|
||||||
|
default=None,
|
||||||
|
help="Name of the worker",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--worker-ttl",
|
||||||
|
action="store",
|
||||||
|
type=int,
|
||||||
|
dest="worker_ttl",
|
||||||
|
default=420,
|
||||||
|
help="Default worker timeout to be used",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
pid = options.get("pid")
|
||||||
|
if pid:
|
||||||
|
with open(os.path.expanduser(pid), "w") as fp:
|
||||||
|
fp.write(str(os.getpid()))
|
||||||
|
|
||||||
|
# Verbosity is defined by default in BaseCommand for all commands
|
||||||
|
verbosity = options.get("verbosity")
|
||||||
|
if verbosity >= 2:
|
||||||
|
level = "DEBUG"
|
||||||
|
elif verbosity == 0:
|
||||||
|
level = "WARNING"
|
||||||
|
else:
|
||||||
|
level = "INFO"
|
||||||
|
setup_loghandlers(level)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Instantiate a worker
|
||||||
|
worker_kwargs = {
|
||||||
|
"name": options["name"],
|
||||||
|
"default_worker_ttl": options["worker_ttl"],
|
||||||
|
}
|
||||||
|
w = queue.get_worker(**worker_kwargs)
|
||||||
|
|
||||||
|
# Call use_connection to push the redis connection into LocalStack
|
||||||
|
# without this, jobs using RQ's get_current_job() will fail
|
||||||
|
use_connection(w.connection)
|
||||||
|
# Close any opened DB connection before any fork
|
||||||
|
reset_db_connections()
|
||||||
|
|
||||||
|
w.work(
|
||||||
|
burst=options.get("burst", False),
|
||||||
|
with_scheduler=options.get("with_scheduler", False),
|
||||||
|
logging_level=level,
|
||||||
|
)
|
||||||
|
except ConnectionError as e:
|
||||||
|
self.stderr.write(str(e))
|
||||||
|
sys.exit(1)
|
|
@ -25,6 +25,7 @@ class Migration(migrations.Migration):
|
||||||
("skip_livestreams", models.BooleanField(default=True)),
|
("skip_livestreams", models.BooleanField(default=True)),
|
||||||
("skip_shorts", models.BooleanField(default=True)),
|
("skip_shorts", models.BooleanField(default=True)),
|
||||||
("keep_videos", models.IntegerField(default=None, null=True)),
|
("keep_videos", models.IntegerField(default=None, null=True)),
|
||||||
|
("avatar_url", models.CharField(max_length=250, null=True)),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
|
@ -48,6 +49,7 @@ class Migration(migrations.Migration):
|
||||||
("duration", models.IntegerField()),
|
("duration", models.IntegerField()),
|
||||||
("is_livestream", models.BooleanField(default=False)),
|
("is_livestream", models.BooleanField(default=False)),
|
||||||
("is_short", models.BooleanField(default=False)),
|
("is_short", models.BooleanField(default=False)),
|
||||||
|
("download_size", models.IntegerField(null=True)),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
|
@ -36,11 +36,21 @@ class Channel(models.Model):
|
||||||
skip_livestreams = models.BooleanField(default=True)
|
skip_livestreams = models.BooleanField(default=True)
|
||||||
skip_shorts = models.BooleanField(default=True)
|
skip_shorts = models.BooleanField(default=True)
|
||||||
keep_videos = models.IntegerField(null=True, default=None)
|
keep_videos = models.IntegerField(null=True, default=None)
|
||||||
|
avatar_url = models.CharField(max_length=250, null=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_new_slug(cls, name: str) -> str:
|
def get_new_slug(cls, name: str) -> str:
|
||||||
return _get_unique_slug(name, cls.objects, "channel")
|
return _get_unique_slug(name, cls.objects, "channel")
|
||||||
|
|
||||||
|
def get_full_description(self) -> str:
|
||||||
|
desc = f"https://www.youtube.com/channel/{self.id}"
|
||||||
|
if self.description:
|
||||||
|
desc = f"{self.description}\n\n{desc}"
|
||||||
|
return desc
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
class Video(models.Model):
|
class Video(models.Model):
|
||||||
id = models.CharField(max_length=30, primary_key=True)
|
id = models.CharField(max_length=30, primary_key=True)
|
||||||
|
@ -53,6 +63,7 @@ class Video(models.Model):
|
||||||
duration = models.IntegerField()
|
duration = models.IntegerField()
|
||||||
is_livestream = models.BooleanField(default=False)
|
is_livestream = models.BooleanField(default=False)
|
||||||
is_short = models.BooleanField(default=False)
|
is_short = models.BooleanField(default=False)
|
||||||
|
download_size = models.IntegerField(null=True)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_new_slug(cls, title: str, date: datetime.date, channel_id: str) -> str:
|
def get_new_slug(cls, title: str, date: datetime.date, channel_id: str) -> str:
|
||||||
|
@ -61,3 +72,12 @@ class Video(models.Model):
|
||||||
return _get_unique_slug(
|
return _get_unique_slug(
|
||||||
title_w_date, cls.objects.filter(channel_id=channel_id), "video"
|
title_w_date, cls.objects.filter(channel_id=channel_id), "video"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def get_full_description(self) -> str:
|
||||||
|
desc = f"https://youtu.be/{self.id}"
|
||||||
|
if self.description:
|
||||||
|
desc = f"{self.description}\n\n{desc}"
|
||||||
|
return desc
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.title
|
||||||
|
|
87
ucast/queue.py
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
import redis
|
||||||
|
import rq
|
||||||
|
import rq_scheduler
|
||||||
|
from django.conf import settings
|
||||||
|
from rq import registry
|
||||||
|
|
||||||
|
from ucast.service import util
|
||||||
|
|
||||||
|
|
||||||
|
def get_redis_connection() -> redis.client.Redis:
|
||||||
|
return redis.Redis.from_url(settings.REDIS_URL)
|
||||||
|
|
||||||
|
|
||||||
|
def get_queue() -> rq.Queue:
|
||||||
|
redis_conn = get_redis_connection()
|
||||||
|
return rq.Queue(default_timeout=settings.REDIS_QUEUE_TIMEOUT, connection=redis_conn)
|
||||||
|
|
||||||
|
|
||||||
|
def get_scheduler(interval=60) -> rq_scheduler.Scheduler:
|
||||||
|
redis_conn = get_redis_connection()
|
||||||
|
return rq_scheduler.Scheduler(connection=redis_conn, interval=interval)
|
||||||
|
|
||||||
|
|
||||||
|
def get_worker(**kwargs) -> rq.Worker:
|
||||||
|
queue = get_queue()
|
||||||
|
return rq.Worker(
|
||||||
|
queue,
|
||||||
|
connection=queue.connection,
|
||||||
|
default_result_ttl=settings.REDIS_QUEUE_RESULT_TTL,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def enqueue(f, *args, **kwargs) -> rq.job.Job:
|
||||||
|
queue = get_queue()
|
||||||
|
# return queue.enqueue(f, *args, **kwargs)
|
||||||
|
return queue.enqueue_call(f, args, kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_statistics() -> dict:
|
||||||
|
"""
|
||||||
|
Return statistics from the RQ Queue.
|
||||||
|
|
||||||
|
Taken from the django-rq package by Selwin Ong (MIT License)
|
||||||
|
https://github.com/rq/django-rq
|
||||||
|
|
||||||
|
:return: RQ statistics
|
||||||
|
"""
|
||||||
|
queue = get_queue()
|
||||||
|
connection = queue.connection
|
||||||
|
connection_kwargs = connection.connection_pool.connection_kwargs
|
||||||
|
|
||||||
|
# Raw access to the first item from left of the redis list.
|
||||||
|
# This might not be accurate since new job can be added from the left
|
||||||
|
# with `at_front` parameters.
|
||||||
|
# Ideally rq should supports Queue.oldest_job
|
||||||
|
last_job_id = connection.lindex(queue.key, 0)
|
||||||
|
last_job = queue.fetch_job(last_job_id.decode("utf-8")) if last_job_id else None
|
||||||
|
if last_job:
|
||||||
|
oldest_job_timestamp = util.to_localtime(last_job.enqueued_at).strftime(
|
||||||
|
"%Y-%m-%d, %H:%M:%S"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
oldest_job_timestamp = "-"
|
||||||
|
|
||||||
|
# parse_class and connection_pool are not needed and not JSON serializable
|
||||||
|
connection_kwargs.pop("parser_class", None)
|
||||||
|
connection_kwargs.pop("connection_pool", None)
|
||||||
|
|
||||||
|
finished_job_registry = registry.FinishedJobRegistry(queue.name, queue.connection)
|
||||||
|
started_job_registry = registry.StartedJobRegistry(queue.name, queue.connection)
|
||||||
|
deferred_job_registry = registry.DeferredJobRegistry(queue.name, queue.connection)
|
||||||
|
failed_job_registry = registry.FailedJobRegistry(queue.name, queue.connection)
|
||||||
|
scheduled_job_registry = registry.ScheduledJobRegistry(queue.name, queue.connection)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"name": queue.name,
|
||||||
|
"jobs": queue.count,
|
||||||
|
"oldest_job_timestamp": oldest_job_timestamp,
|
||||||
|
"connection_kwargs": connection_kwargs,
|
||||||
|
"workers": rq.Worker.count(queue=queue),
|
||||||
|
"finished_jobs": len(finished_job_registry),
|
||||||
|
"started_jobs": len(started_job_registry),
|
||||||
|
"deferred_jobs": len(deferred_job_registry),
|
||||||
|
"failed_jobs": len(failed_job_registry),
|
||||||
|
"scheduled_jobs": len(scheduled_job_registry),
|
||||||
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
import math
|
import math
|
||||||
|
import random
|
||||||
from importlib import resources
|
from importlib import resources
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Literal, Optional, Tuple
|
from typing import List, Literal, Optional, Tuple
|
||||||
|
@ -6,7 +7,7 @@ from typing import List, Literal, Optional, Tuple
|
||||||
import wcag_contrast_ratio
|
import wcag_contrast_ratio
|
||||||
from colorthief import ColorThief
|
from colorthief import ColorThief
|
||||||
from fonts.ttf import SourceSansPro
|
from fonts.ttf import SourceSansPro
|
||||||
from PIL import Image, ImageDraw, ImageFilter, ImageFont
|
from PIL import Image, ImageDraw, ImageEnhance, ImageFilter, ImageFont
|
||||||
|
|
||||||
from ucast.service import typ
|
from ucast.service import typ
|
||||||
|
|
||||||
|
@ -16,6 +17,7 @@ CoverStyle = Literal["blur", "gradient"]
|
||||||
|
|
||||||
CHAR_ELLIPSIS = "…"
|
CHAR_ELLIPSIS = "…"
|
||||||
COVER_WIDTH = 500
|
COVER_WIDTH = 500
|
||||||
|
MIN_CONTRAST = 4.5
|
||||||
|
|
||||||
|
|
||||||
def _split_text(
|
def _split_text(
|
||||||
|
@ -30,7 +32,7 @@ def _split_text(
|
||||||
:param text: Input text
|
:param text: Input text
|
||||||
:param font: Pillow ImageFont
|
:param font: Pillow ImageFont
|
||||||
:param line_spacing: Line spacing [px]
|
:param line_spacing: Line spacing [px]
|
||||||
:return:
|
:return: List of lines
|
||||||
"""
|
"""
|
||||||
if height < font.size:
|
if height < font.size:
|
||||||
return []
|
return []
|
||||||
|
@ -99,7 +101,6 @@ def _draw_text_box(
|
||||||
:param color: Text color
|
:param color: Text color
|
||||||
:param line_spacing: Line spacing [px]
|
:param line_spacing: Line spacing [px]
|
||||||
:param vertical_center: Center text vertically in the box
|
:param vertical_center: Center text vertically in the box
|
||||||
:return:
|
|
||||||
"""
|
"""
|
||||||
x_tl, y_tl, x_br, y_br = box
|
x_tl, y_tl, x_br, y_br = box
|
||||||
height = y_br - y_tl
|
height = y_br - y_tl
|
||||||
|
@ -144,7 +145,11 @@ def _interpolate_color(color_from: typ.Color, color_to: typ.Color, steps: int):
|
||||||
yield [round(f + det * i) for f, det in zip(color_from, det_co)]
|
yield [round(f + det * i) for f, det in zip(color_from, det_co)]
|
||||||
|
|
||||||
|
|
||||||
def _get_text_color(bg_color) -> typ.Color:
|
def _color_to_float(color: typ.Color) -> tuple[float, ...]:
|
||||||
|
return tuple(c / 255 for c in color)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_text_color(bg_color: typ.Color) -> typ.Color:
|
||||||
"""
|
"""
|
||||||
Return the text color (black or white) with the largest contrast
|
Return the text color (black or white) with the largest contrast
|
||||||
to a given background color.
|
to a given background color.
|
||||||
|
@ -152,26 +157,19 @@ def _get_text_color(bg_color) -> typ.Color:
|
||||||
:param bg_color: Background color
|
:param bg_color: Background color
|
||||||
:return: Text color
|
:return: Text color
|
||||||
"""
|
"""
|
||||||
color_decimal = tuple([c / 255 for c in bg_color])
|
color_float = _color_to_float(bg_color)
|
||||||
c_blk = wcag_contrast_ratio.rgb((0, 0, 0), color_decimal)
|
c_blk = wcag_contrast_ratio.rgb((0, 0, 0), color_float)
|
||||||
c_wht = wcag_contrast_ratio.rgb((1, 1, 1), color_decimal)
|
c_wht = wcag_contrast_ratio.rgb((1, 1, 1), color_float)
|
||||||
if c_wht > c_blk:
|
if c_wht > c_blk:
|
||||||
return 255, 255, 255
|
return 255, 255, 255
|
||||||
return 0, 0, 0
|
return 0, 0, 0
|
||||||
|
|
||||||
|
|
||||||
def _get_baseimage(
|
def _get_baseimage(thumbnail: Image.Image, style: CoverStyle):
|
||||||
thumbnail: Image.Image,
|
|
||||||
top_color: typ.Color,
|
|
||||||
bottom_color: typ.Color,
|
|
||||||
style: CoverStyle,
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Return the background image for the cover.
|
Return the background image for the cover.
|
||||||
|
|
||||||
:param thumbnail: Thumbnail image object
|
:param thumbnail: Thumbnail image object
|
||||||
:param top_color: Top color of the thumbnail image
|
|
||||||
:param bottom_color: Bottom color of the thumbnail image
|
|
||||||
:param style: Style of the cover image
|
:param style: Style of the cover image
|
||||||
:return: Base image
|
:return: Base image
|
||||||
"""
|
"""
|
||||||
|
@ -179,6 +177,15 @@ def _get_baseimage(
|
||||||
|
|
||||||
if style == COVER_STYLE_GRADIENT:
|
if style == COVER_STYLE_GRADIENT:
|
||||||
# Thumbnail with color gradient background
|
# Thumbnail with color gradient background
|
||||||
|
|
||||||
|
# Get dominant colors from the top and bottom 20% of the thumbnail image
|
||||||
|
top_part = thumbnail.crop((0, 0, COVER_WIDTH, int(thumbnail.height * 0.2)))
|
||||||
|
bottom_part = thumbnail.crop(
|
||||||
|
(0, int(thumbnail.height * 0.8), COVER_WIDTH, thumbnail.height)
|
||||||
|
)
|
||||||
|
top_color = _get_dominant_color(top_part)
|
||||||
|
bottom_color = _get_dominant_color(bottom_part)
|
||||||
|
|
||||||
cover_draw = ImageDraw.Draw(cover)
|
cover_draw = ImageDraw.Draw(cover)
|
||||||
|
|
||||||
for i, color in enumerate(
|
for i, color in enumerate(
|
||||||
|
@ -190,9 +197,9 @@ def _get_baseimage(
|
||||||
ctn_width = int(COVER_WIDTH / thumbnail.height * thumbnail.width)
|
ctn_width = int(COVER_WIDTH / thumbnail.height * thumbnail.width)
|
||||||
ctn_x_left = int((ctn_width - COVER_WIDTH) / 2)
|
ctn_x_left = int((ctn_width - COVER_WIDTH) / 2)
|
||||||
|
|
||||||
ctn = thumbnail.resize((ctn_width, COVER_WIDTH), Image.LANCZOS).filter(
|
ctn = thumbnail.resize(
|
||||||
ImageFilter.GaussianBlur(20)
|
(ctn_width, COVER_WIDTH), Image.Resampling.LANCZOS
|
||||||
)
|
).filter(ImageFilter.GaussianBlur(20))
|
||||||
cover.paste(ctn, (-ctn_x_left, 0))
|
cover.paste(ctn, (-ctn_x_left, 0))
|
||||||
|
|
||||||
return cover
|
return cover
|
||||||
|
@ -212,9 +219,84 @@ def _resize_thumbnail(thumbnail: Image.Image) -> Image.Image:
|
||||||
tn_crop_y_top = int((tn_resize_height - tn_height) / 2)
|
tn_crop_y_top = int((tn_resize_height - tn_height) / 2)
|
||||||
tn_crop_y_bottom = tn_resize_height - tn_crop_y_top
|
tn_crop_y_bottom = tn_resize_height - tn_crop_y_top
|
||||||
|
|
||||||
return thumbnail.resize((COVER_WIDTH, tn_resize_height), Image.LANCZOS).crop(
|
return thumbnail.resize(
|
||||||
(0, tn_crop_y_top, COVER_WIDTH, tn_crop_y_bottom)
|
(COVER_WIDTH, tn_resize_height), Image.Resampling.LANCZOS
|
||||||
)
|
).crop((0, tn_crop_y_top, COVER_WIDTH, tn_crop_y_bottom))
|
||||||
|
|
||||||
|
|
||||||
|
def _prepare_text_background(
|
||||||
|
base_img: Image.Image, bboxes: List[Tuple[int, int, int, int]]
|
||||||
|
) -> Tuple[Image.Image, typ.Color]:
|
||||||
|
"""
|
||||||
|
Return the preferred text color (black or white) and darken
|
||||||
|
the image if necessary
|
||||||
|
|
||||||
|
:param base_img: Image object
|
||||||
|
:param bboxes: Text boxes
|
||||||
|
:return: Updated image, text color
|
||||||
|
"""
|
||||||
|
rng = random.Random()
|
||||||
|
rng.seed(0x9B38D30461B7F0E6)
|
||||||
|
|
||||||
|
min_contrast_bk = 22
|
||||||
|
min_contrast_wt = 22
|
||||||
|
worst_color_wt = None
|
||||||
|
|
||||||
|
def corr_x(x: int) -> int:
|
||||||
|
return min(max(0, x), base_img.width)
|
||||||
|
|
||||||
|
def corr_y(y: int) -> int:
|
||||||
|
return min(max(0, y), base_img.height)
|
||||||
|
|
||||||
|
for bbox in bboxes:
|
||||||
|
x_tl, y_tl, x_br, y_br = bbox
|
||||||
|
x_tl = corr_x(x_tl)
|
||||||
|
y_tl = corr_y(y_tl)
|
||||||
|
x_br = corr_x(x_br)
|
||||||
|
y_br = corr_y(y_br)
|
||||||
|
|
||||||
|
height = y_br - y_tl
|
||||||
|
width = x_br - x_tl
|
||||||
|
|
||||||
|
for _ in range(math.ceil(width * height * 0.01)):
|
||||||
|
target_pos = (rng.randint(x_tl, x_br - 1), rng.randint(y_tl, y_br - 1))
|
||||||
|
img_color = base_img.getpixel(target_pos)
|
||||||
|
img_color_float = _color_to_float(img_color)
|
||||||
|
|
||||||
|
ct_bk = wcag_contrast_ratio.rgb((0, 0, 0), img_color_float)
|
||||||
|
ct_wt = wcag_contrast_ratio.rgb((1, 1, 1), img_color_float)
|
||||||
|
|
||||||
|
if ct_bk < min_contrast_bk:
|
||||||
|
min_contrast_bk = ct_bk
|
||||||
|
|
||||||
|
if ct_wt < min_contrast_wt:
|
||||||
|
worst_color_wt = img_color
|
||||||
|
min_contrast_wt = ct_wt
|
||||||
|
|
||||||
|
if min_contrast_bk >= MIN_CONTRAST:
|
||||||
|
return base_img, (0, 0, 0)
|
||||||
|
if min_contrast_wt >= MIN_CONTRAST:
|
||||||
|
return base_img, (255, 255, 255)
|
||||||
|
|
||||||
|
pixel = Image.new("RGB", (1, 1), worst_color_wt)
|
||||||
|
|
||||||
|
for i in range(1, 100):
|
||||||
|
brightness_f = 1 - i / 100
|
||||||
|
contrast_f = 1 - i / 1000
|
||||||
|
|
||||||
|
pixel_c = ImageEnhance.Brightness(pixel).enhance(brightness_f)
|
||||||
|
pixel_c = ImageEnhance.Contrast(pixel_c).enhance(contrast_f)
|
||||||
|
new_color = pixel_c.getpixel((0, 0))
|
||||||
|
|
||||||
|
if (
|
||||||
|
wcag_contrast_ratio.rgb((1, 1, 1), _color_to_float(new_color))
|
||||||
|
>= MIN_CONTRAST
|
||||||
|
):
|
||||||
|
new_img = ImageEnhance.Brightness(base_img).enhance(brightness_f)
|
||||||
|
new_img = ImageEnhance.Contrast(new_img).enhance(contrast_f)
|
||||||
|
return new_img, (255, 255, 255)
|
||||||
|
|
||||||
|
return base_img, (255, 255, 255)
|
||||||
|
|
||||||
|
|
||||||
def _draw_text_avatar(
|
def _draw_text_avatar(
|
||||||
|
@ -222,23 +304,60 @@ def _draw_text_avatar(
|
||||||
avatar: Optional[Image.Image],
|
avatar: Optional[Image.Image],
|
||||||
title: str,
|
title: str,
|
||||||
channel: str,
|
channel: str,
|
||||||
top_color: typ.Color,
|
) -> Image.Image:
|
||||||
bottom_color: typ.Color,
|
|
||||||
):
|
|
||||||
cover_draw = ImageDraw.Draw(cover)
|
|
||||||
|
|
||||||
# Add channel avatar
|
# Add channel avatar
|
||||||
avt_margin = 0
|
avt_margin = 0
|
||||||
avt_size = 0
|
avt_size = 0
|
||||||
|
|
||||||
tn_16_9_height = int(COVER_WIDTH / 16 * 9)
|
tn_16_9_height = int(COVER_WIDTH / 16 * 9) # typical: 281
|
||||||
tn_16_9_margin = int((COVER_WIDTH - tn_16_9_height) / 2)
|
tn_16_9_margin = int((COVER_WIDTH - tn_16_9_height) / 2) # typical: 110
|
||||||
|
|
||||||
if avatar:
|
if avatar:
|
||||||
avt_margin = int(tn_16_9_margin * 0.05)
|
avt_margin = int(tn_16_9_margin * 0.05) # typical: 14
|
||||||
avt_size = tn_16_9_margin - 2 * avt_margin
|
avt_size = tn_16_9_margin - 2 * avt_margin # typical: 82
|
||||||
|
|
||||||
avt = avatar.resize((avt_size, avt_size), Image.LANCZOS)
|
# Add text
|
||||||
|
text_margin_x = 16
|
||||||
|
text_margin_topleft = avt_margin + avt_size + text_margin_x # typical: 112
|
||||||
|
text_vertical_offset = -17
|
||||||
|
text_line_space = -4
|
||||||
|
|
||||||
|
fnt = ImageFont.truetype(SourceSansPro, 50)
|
||||||
|
top_text_box = ( # typical: (112, -17, 484, 110)
|
||||||
|
text_margin_topleft,
|
||||||
|
text_vertical_offset,
|
||||||
|
COVER_WIDTH - text_margin_x,
|
||||||
|
tn_16_9_margin,
|
||||||
|
)
|
||||||
|
bottom_text_box = ( # typical: (16, 373, 484, 500)
|
||||||
|
text_margin_x,
|
||||||
|
COVER_WIDTH - tn_16_9_margin + text_vertical_offset,
|
||||||
|
COVER_WIDTH - text_margin_x,
|
||||||
|
COVER_WIDTH,
|
||||||
|
)
|
||||||
|
|
||||||
|
cover, text_color = _prepare_text_background(cover, [top_text_box, bottom_text_box])
|
||||||
|
cover_draw = ImageDraw.Draw(cover)
|
||||||
|
|
||||||
|
_draw_text_box(
|
||||||
|
cover_draw,
|
||||||
|
top_text_box,
|
||||||
|
channel,
|
||||||
|
fnt,
|
||||||
|
text_color,
|
||||||
|
text_line_space,
|
||||||
|
)
|
||||||
|
_draw_text_box(
|
||||||
|
cover_draw,
|
||||||
|
bottom_text_box,
|
||||||
|
title,
|
||||||
|
fnt,
|
||||||
|
text_color,
|
||||||
|
text_line_space,
|
||||||
|
)
|
||||||
|
|
||||||
|
if avatar:
|
||||||
|
avt = avatar.resize((avt_size, avt_size), Image.Resampling.LANCZOS)
|
||||||
|
|
||||||
circle_mask = Image.new("L", (avt_size, avt_size))
|
circle_mask = Image.new("L", (avt_size, avt_size))
|
||||||
circle_mask_draw = ImageDraw.Draw(circle_mask)
|
circle_mask_draw = ImageDraw.Draw(circle_mask)
|
||||||
|
@ -246,42 +365,7 @@ def _draw_text_avatar(
|
||||||
|
|
||||||
cover.paste(avt, (avt_margin, avt_margin), circle_mask)
|
cover.paste(avt, (avt_margin, avt_margin), circle_mask)
|
||||||
|
|
||||||
# Add text
|
return cover
|
||||||
text_margin_x = 16
|
|
||||||
text_margin_topleft = avt_margin + avt_size + text_margin_x
|
|
||||||
text_vertical_offset = -17
|
|
||||||
text_line_space = -4
|
|
||||||
|
|
||||||
fnt = ImageFont.truetype(SourceSansPro, 50)
|
|
||||||
top_text_color = _get_text_color(top_color)
|
|
||||||
bottom_text_color = _get_text_color(bottom_color)
|
|
||||||
|
|
||||||
_draw_text_box(
|
|
||||||
cover_draw,
|
|
||||||
(
|
|
||||||
text_margin_topleft,
|
|
||||||
text_vertical_offset,
|
|
||||||
COVER_WIDTH - text_margin_x,
|
|
||||||
tn_16_9_margin,
|
|
||||||
),
|
|
||||||
channel,
|
|
||||||
fnt,
|
|
||||||
top_text_color,
|
|
||||||
text_line_space,
|
|
||||||
)
|
|
||||||
_draw_text_box(
|
|
||||||
cover_draw,
|
|
||||||
(
|
|
||||||
text_margin_x,
|
|
||||||
COVER_WIDTH - tn_16_9_margin + text_vertical_offset,
|
|
||||||
COVER_WIDTH - text_margin_x,
|
|
||||||
COVER_WIDTH,
|
|
||||||
),
|
|
||||||
title,
|
|
||||||
fnt,
|
|
||||||
bottom_text_color,
|
|
||||||
text_line_space,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_cover_image(
|
def _create_cover_image(
|
||||||
|
@ -303,20 +387,14 @@ def _create_cover_image(
|
||||||
"""
|
"""
|
||||||
tn = _resize_thumbnail(thumbnail)
|
tn = _resize_thumbnail(thumbnail)
|
||||||
|
|
||||||
# Get dominant colors from the top and bottom 20% of the thumbnail image
|
cover = _get_baseimage(tn, style)
|
||||||
top_part = tn.crop((0, 0, COVER_WIDTH, int(tn.height * 0.2)))
|
|
||||||
bottom_part = tn.crop((0, int(tn.height * 0.8), COVER_WIDTH, tn.height))
|
|
||||||
top_color = _get_dominant_color(top_part)
|
|
||||||
bottom_color = _get_dominant_color(bottom_part)
|
|
||||||
|
|
||||||
cover = _get_baseimage(tn, top_color, bottom_color, style)
|
cover = _draw_text_avatar(cover, avatar, title, channel)
|
||||||
|
|
||||||
# Insert thumbnail image in the middle
|
# Insert thumbnail image in the middle
|
||||||
tn_margin = int((COVER_WIDTH - tn.height) / 2)
|
tn_margin = int((COVER_WIDTH - tn.height) / 2)
|
||||||
cover.paste(tn, (0, tn_margin))
|
cover.paste(tn, (0, tn_margin))
|
||||||
|
|
||||||
_draw_text_avatar(cover, avatar, title, channel, top_color, bottom_color)
|
|
||||||
|
|
||||||
return cover
|
return cover
|
||||||
|
|
||||||
|
|
||||||
|
@ -332,7 +410,7 @@ def _create_blank_cover_image(
|
||||||
yt_icon_y_top = int((COVER_WIDTH - yt_icon.height) / 2)
|
yt_icon_y_top = int((COVER_WIDTH - yt_icon.height) / 2)
|
||||||
cover.paste(yt_icon, (yt_icon_x_left, yt_icon_y_top))
|
cover.paste(yt_icon, (yt_icon_x_left, yt_icon_y_top))
|
||||||
|
|
||||||
_draw_text_avatar(cover, avatar, title, channel, bg_color, bg_color)
|
_draw_text_avatar(cover, avatar, title, channel)
|
||||||
|
|
||||||
return cover
|
return cover
|
||||||
|
|
||||||
|
|
|
@ -1,32 +1,11 @@
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
import slugify
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
UCAST_DIRNAME = "_ucast"
|
UCAST_DIRNAME = "_ucast"
|
||||||
|
|
||||||
|
|
||||||
def _get_slug(str_in: str) -> str:
|
|
||||||
return slugify.slugify(str_in, lowercase=False, separator="_")
|
|
||||||
|
|
||||||
|
|
||||||
def _get_unique_slug(str_in: str, root_dir: Path, extension="") -> Tuple[Path, str]:
|
|
||||||
original_slug = _get_slug(str_in)
|
|
||||||
slug = original_slug
|
|
||||||
i = 0
|
|
||||||
|
|
||||||
while True:
|
|
||||||
testfile = root_dir / (slug + extension)
|
|
||||||
|
|
||||||
if not testfile.exists():
|
|
||||||
return testfile, slug
|
|
||||||
|
|
||||||
i += 1
|
|
||||||
slug = f"{original_slug}_{i}"
|
|
||||||
|
|
||||||
|
|
||||||
class ChannelFolder:
|
class ChannelFolder:
|
||||||
def __init__(self, dir_root: Path):
|
def __init__(self, dir_root: Path):
|
||||||
self.dir_root = dir_root
|
self.dir_root = dir_root
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
import shutil
|
import datetime
|
||||||
|
import io
|
||||||
|
import json
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import slugify
|
import slugify
|
||||||
|
from django.utils import timezone
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
AVATAR_SM_WIDTH = 100
|
AVATAR_SM_WIDTH = 100
|
||||||
|
@ -15,33 +19,92 @@ def download_file(url: str, download_path: Path):
|
||||||
open(download_path, "wb").write(r.content)
|
open(download_path, "wb").write(r.content)
|
||||||
|
|
||||||
|
|
||||||
def download_image_file(url: str, download_path: Path) -> Path:
|
def download_image_file(url: str, download_path: Path):
|
||||||
download_file(url, download_path)
|
"""
|
||||||
img = Image.open(download_path)
|
Download an image and convert it to the type given
|
||||||
img_ext = img.format.lower()
|
by the path.
|
||||||
img.close()
|
|
||||||
|
|
||||||
|
:param url: Image URL
|
||||||
|
:param download_path: Download path
|
||||||
|
"""
|
||||||
|
r = requests.get(url, allow_redirects=True)
|
||||||
|
r.raise_for_status()
|
||||||
|
|
||||||
|
img = Image.open(io.BytesIO(r.content))
|
||||||
|
img_ext = img.format.lower()
|
||||||
if img_ext == "jpeg":
|
if img_ext == "jpeg":
|
||||||
img_ext = "jpg"
|
img_ext = "jpg"
|
||||||
|
|
||||||
new_path = download_path.with_suffix("." + img_ext)
|
if "." + img_ext == download_path.suffix:
|
||||||
shutil.move(download_path, new_path)
|
open(download_path, "wb").write(r.content)
|
||||||
return new_path
|
else:
|
||||||
|
img.save(download_path)
|
||||||
|
|
||||||
|
|
||||||
def resize_avatar(original_file: Path, new_file: Path):
|
def resize_avatar(original_file: Path, new_file: Path):
|
||||||
avatar = Image.open(original_file)
|
avatar = Image.open(original_file)
|
||||||
avatar_new_height = int(AVATAR_SM_WIDTH / avatar.width * avatar.height)
|
avatar_new_height = int(AVATAR_SM_WIDTH / avatar.width * avatar.height)
|
||||||
avatar = avatar.resize((AVATAR_SM_WIDTH, avatar_new_height), Image.LANCZOS)
|
avatar = avatar.resize(
|
||||||
|
(AVATAR_SM_WIDTH, avatar_new_height), Image.Resampling.LANCZOS
|
||||||
|
)
|
||||||
avatar.save(new_file)
|
avatar.save(new_file)
|
||||||
|
|
||||||
|
|
||||||
def resize_thumbnail(original_file: Path, new_file: Path):
|
def resize_thumbnail(original_file: Path, new_file: Path):
|
||||||
thumbnail = Image.open(original_file)
|
thumbnail = Image.open(original_file)
|
||||||
tn_new_height = int(THUMBNAIL_SM_WIDTH / thumbnail.width * thumbnail.height)
|
tn_new_height = int(THUMBNAIL_SM_WIDTH / thumbnail.width * thumbnail.height)
|
||||||
thumbnail = thumbnail.resize((THUMBNAIL_SM_WIDTH, tn_new_height), Image.LANCZOS)
|
thumbnail = thumbnail.resize(
|
||||||
|
(THUMBNAIL_SM_WIDTH, tn_new_height), Image.Resampling.LANCZOS
|
||||||
|
)
|
||||||
thumbnail.save(new_file)
|
thumbnail.save(new_file)
|
||||||
|
|
||||||
|
|
||||||
def get_slug(str_in: str) -> str:
|
def get_slug(text: str) -> str:
|
||||||
return slugify.slugify(str_in, lowercase=False, separator="_")
|
return slugify.slugify(text, lowercase=False, separator="_")
|
||||||
|
|
||||||
|
|
||||||
|
def to_localtime(time: datetime.datetime):
|
||||||
|
"""Converts naive datetime to localtime based on settings"""
|
||||||
|
|
||||||
|
utc_time = time.replace(tzinfo=datetime.timezone.utc)
|
||||||
|
to_zone = timezone.get_default_timezone()
|
||||||
|
return utc_time.astimezone(to_zone)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_np_attrs(o) -> dict:
|
||||||
|
"""
|
||||||
|
Return all non-protected attributes of the given object.
|
||||||
|
:param o: Object
|
||||||
|
:return: Dict of attributes
|
||||||
|
"""
|
||||||
|
return {k: v for k, v in o.__dict__.items() if not k.startswith("_")}
|
||||||
|
|
||||||
|
|
||||||
|
def serializer(o: Any) -> Union[str, dict, int, float, bool]:
|
||||||
|
"""
|
||||||
|
Serialize object to json-storable format
|
||||||
|
:param o: Object to serialize
|
||||||
|
:return: Serialized output data
|
||||||
|
"""
|
||||||
|
if hasattr(o, "serialize"):
|
||||||
|
return o.serialize()
|
||||||
|
if isinstance(o, (datetime.datetime, datetime.date)):
|
||||||
|
return o.isoformat()
|
||||||
|
if isinstance(o, (bool, float, int)):
|
||||||
|
return o
|
||||||
|
if hasattr(o, "__dict__"):
|
||||||
|
return _get_np_attrs(o)
|
||||||
|
return str(o)
|
||||||
|
|
||||||
|
|
||||||
|
def to_json(o, pretty=False) -> str:
|
||||||
|
"""
|
||||||
|
Convert object to json.
|
||||||
|
Uses the ``serialize()`` method of the target object if available.
|
||||||
|
:param o: Object to serialize
|
||||||
|
:param pretty: Prettify with indents
|
||||||
|
:return: JSON string
|
||||||
|
"""
|
||||||
|
return json.dumps(
|
||||||
|
o, default=serializer, indent=2 if pretty else None, ensure_ascii=False
|
||||||
|
)
|
||||||
|
|
22
ucast/service/videoutil.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from mutagen import id3
|
||||||
|
|
||||||
|
from ucast.models import Video
|
||||||
|
|
||||||
|
|
||||||
|
def tag_audio(audio_path: Path, video: Video, cover_path: Path):
|
||||||
|
title_text = f"{video.published.date().isoformat()} {video.title}"
|
||||||
|
|
||||||
|
tag = id3.ID3(audio_path)
|
||||||
|
tag["TPE1"] = id3.TPE1(encoding=3, text=video.channel.name) # Artist
|
||||||
|
tag["TALB"] = id3.TALB(encoding=3, text=video.channel.name) # Album
|
||||||
|
tag["TIT2"] = id3.TIT2(encoding=3, text=title_text) # Title
|
||||||
|
tag["TDRC"] = id3.TDRC(encoding=3, text=video.published.date().isoformat()) # Date
|
||||||
|
tag["COMM"] = id3.COMM(encoding=3, text=video.get_full_description()) # Comment
|
||||||
|
|
||||||
|
with open(cover_path, "rb") as albumart:
|
||||||
|
tag["APIC"] = id3.APIC(
|
||||||
|
encoding=3, mime="image/png", type=3, desc="Cover", data=albumart.read()
|
||||||
|
)
|
||||||
|
tag.save()
|
|
@ -9,12 +9,12 @@ from typing import List, Optional
|
||||||
|
|
||||||
import feedparser
|
import feedparser
|
||||||
import requests
|
import requests
|
||||||
from django.conf import settings
|
|
||||||
from mutagen import id3
|
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
|
|
||||||
from ucast.service import scrapetube, util
|
from ucast.service import scrapetube, util
|
||||||
|
|
||||||
|
CHANID_REGEX = re.compile(r"""[-_a-zA-Z\d]{24}""")
|
||||||
|
|
||||||
|
|
||||||
class ItemNotFoundError(Exception):
|
class ItemNotFoundError(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -24,6 +24,10 @@ class ThumbnailNotFoundError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidMetadataError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class VideoScraped:
|
class VideoScraped:
|
||||||
"""
|
"""
|
||||||
|
@ -72,7 +76,8 @@ class VideoDetails:
|
||||||
thumbnails=info["thumbnails"],
|
thumbnails=info["thumbnails"],
|
||||||
is_currently_live=bool(info.get("is_live")),
|
is_currently_live=bool(info.get("is_live")),
|
||||||
is_livestream=info.get("is_live") or info.get("was_live"),
|
is_livestream=info.get("is_live") or info.get("was_live"),
|
||||||
is_short=info["duration"] <= 60 and info["width"] < info["height"],
|
is_short=info["duration"] <= 60
|
||||||
|
and (info["width"] or 0) < (info["height"] or 0),
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_scraped_data(self, scraped: VideoScraped):
|
def add_scraped_data(self, scraped: VideoScraped):
|
||||||
|
@ -93,7 +98,7 @@ class ChannelMetadata:
|
||||||
avatar_url: str
|
avatar_url: str
|
||||||
|
|
||||||
|
|
||||||
def download_thumbnail(vinfo: VideoDetails, download_path: Path) -> Path:
|
def download_thumbnail(vinfo: VideoDetails, download_path: Path):
|
||||||
"""
|
"""
|
||||||
Download the thumbnail image of a YouTube video and save it at the given filepath.
|
Download the thumbnail image of a YouTube video and save it at the given filepath.
|
||||||
The thumbnail file ending is added to the path.
|
The thumbnail file ending is added to the path.
|
||||||
|
@ -109,7 +114,8 @@ def download_thumbnail(vinfo: VideoDetails, download_path: Path) -> Path:
|
||||||
logging.info(f"downloading thumbnail {url}...")
|
logging.info(f"downloading thumbnail {url}...")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return util.download_image_file(url, download_path)
|
util.download_image_file(url, download_path)
|
||||||
|
return
|
||||||
except requests.HTTPError:
|
except requests.HTTPError:
|
||||||
logging.warning(f"downloading thumbnail {url} failed")
|
logging.warning(f"downloading thumbnail {url} failed")
|
||||||
pass
|
pass
|
||||||
|
@ -157,24 +163,6 @@ def download_audio(
|
||||||
return VideoDetails.from_vinfo(info)
|
return VideoDetails.from_vinfo(info)
|
||||||
|
|
||||||
|
|
||||||
def tag_audio(audio_path: Path, vinfo: VideoDetails, cover_path: Path):
|
|
||||||
title_text = f"{vinfo.published.date().isoformat()} {vinfo.title}"
|
|
||||||
|
|
||||||
audio = id3.ID3(audio_path)
|
|
||||||
audio["TPE1"] = id3.TPE1(encoding=3, text=vinfo.channel_name) # Artist
|
|
||||||
audio["TALB"] = id3.TALB(encoding=3, text=vinfo.channel_name) # Album
|
|
||||||
audio["TIT2"] = id3.TIT2(encoding=3, text=title_text) # Title
|
|
||||||
audio["TYER"] = id3.TYER(encoding=3, text=str(vinfo.published.year)) # Year
|
|
||||||
audio["TDAT"] = id3.TDAT(encoding=3, text=vinfo.published.strftime("%d%m")) # Date
|
|
||||||
audio["COMM"] = id3.COMM(encoding=3, text=f"YT-ID: {vinfo.id}") # Comment
|
|
||||||
|
|
||||||
with open(cover_path, "rb") as albumart:
|
|
||||||
audio["APIC"] = id3.APIC(
|
|
||||||
encoding=3, mime="image/png", type=3, desc="Cover", data=albumart.read()
|
|
||||||
)
|
|
||||||
audio.save()
|
|
||||||
|
|
||||||
|
|
||||||
def channel_url_from_id(channel_id: str) -> str:
|
def channel_url_from_id(channel_id: str) -> str:
|
||||||
return "https://www.youtube.com/channel/" + channel_id
|
return "https://www.youtube.com/channel/" + channel_id
|
||||||
|
|
||||||
|
@ -207,8 +195,7 @@ def channel_url_from_str(channel_str: str) -> str:
|
||||||
# Channel ID
|
# Channel ID
|
||||||
return "https://www.youtube.com/channel/" + match[2]
|
return "https://www.youtube.com/channel/" + match[2]
|
||||||
|
|
||||||
chanid_regex = re.compile(r"""[-_a-zA-Z\d]{24}""")
|
if CHANID_REGEX.match(channel_str):
|
||||||
if chanid_regex.match(channel_str):
|
|
||||||
return "https://www.youtube.com/channel/" + channel_str
|
return "https://www.youtube.com/channel/" + channel_str
|
||||||
|
|
||||||
raise ValueError("invalid channel string")
|
raise ValueError("invalid channel string")
|
||||||
|
@ -226,28 +213,23 @@ def get_channel_metadata(channel_url: str) -> ChannelMetadata:
|
||||||
|
|
||||||
channel_id = metadata["externalId"]
|
channel_id = metadata["externalId"]
|
||||||
name = metadata["title"]
|
name = metadata["title"]
|
||||||
description = metadata["description"]
|
description = metadata["description"].strip()
|
||||||
avatar = metadata["avatar"]["thumbnails"][0]["url"]
|
avatar = metadata["avatar"]["thumbnails"][0]["url"]
|
||||||
|
|
||||||
|
if not CHANID_REGEX.match(channel_id):
|
||||||
|
raise InvalidMetadataError(f"got invalid channel id {repr(channel_id)}")
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
raise InvalidMetadataError(f"no channel name found for channel {channel_id}")
|
||||||
|
|
||||||
|
if not avatar.startswith("https://"):
|
||||||
|
raise InvalidMetadataError(
|
||||||
|
f"got invalid avatar url for channel {channel_id}: {avatar}"
|
||||||
|
)
|
||||||
|
|
||||||
return ChannelMetadata(channel_id, name, description, avatar)
|
return ChannelMetadata(channel_id, name, description, avatar)
|
||||||
|
|
||||||
|
|
||||||
def download_avatar(avatar_url: str, download_path: Path) -> Path:
|
|
||||||
"""
|
|
||||||
Download the avatar image of a channel. The .jpg file ending
|
|
||||||
is added to the path.
|
|
||||||
|
|
||||||
:param avatar_url: Channel avatar URL
|
|
||||||
:param download_path: Download path
|
|
||||||
:return: Path with file ending
|
|
||||||
"""
|
|
||||||
logging.info(f"downloading avatar {avatar_url}...")
|
|
||||||
|
|
||||||
download_path = download_path.with_suffix(".jpg")
|
|
||||||
util.download_file(avatar_url, download_path)
|
|
||||||
return download_path
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_videos_from_feed(channel_id: str) -> List[VideoScraped]:
|
def get_channel_videos_from_feed(channel_id: str) -> List[VideoScraped]:
|
||||||
"""
|
"""
|
||||||
Return videos of a channel using YouTube's RSS feed. Using the feed is fast,
|
Return videos of a channel using YouTube's RSS feed. Using the feed is fast,
|
||||||
|
@ -291,9 +273,7 @@ def get_channel_videos_from_scraper(
|
||||||
"""
|
"""
|
||||||
videos = []
|
videos = []
|
||||||
|
|
||||||
for item in scrapetube.get_channel(
|
for item in scrapetube.get_channel(channel_url_from_id(channel_id), limit):
|
||||||
channel_url_from_id(channel_id), limit, settings.YOUTUBE_SCRAPE_DELAY
|
|
||||||
):
|
|
||||||
video_id = item.get("videoId")
|
video_id = item.get("videoId")
|
||||||
if not video_id:
|
if not video_id:
|
||||||
logging.warning(
|
logging.warning(
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
|
import os
|
||||||
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from ucast import queue
|
||||||
from ucast.models import Channel, Video
|
from ucast.models import Channel, Video
|
||||||
from ucast.service import cover, storage, util, youtube
|
from ucast.service import cover, storage, util, videoutil, youtube
|
||||||
|
|
||||||
store = storage.Storage()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_or_create_channel(channel_id: str) -> Channel:
|
def _get_or_create_channel(channel_id: str) -> Channel:
|
||||||
|
@ -14,12 +15,11 @@ def _get_or_create_channel(channel_id: str) -> Channel:
|
||||||
youtube.channel_url_from_id(channel_id)
|
youtube.channel_url_from_id(channel_id)
|
||||||
)
|
)
|
||||||
channel_slug = Channel.get_new_slug(channel_data.name)
|
channel_slug = Channel.get_new_slug(channel_data.name)
|
||||||
|
store = storage.Storage()
|
||||||
channel_folder = store.get_channel_folder(channel_slug)
|
channel_folder = store.get_channel_folder(channel_slug)
|
||||||
|
|
||||||
avatar_file = youtube.download_avatar(
|
util.download_image_file(channel_data.avatar_url, channel_folder.file_avatar)
|
||||||
channel_data.avatar_url, channel_folder.file_avatar
|
util.resize_avatar(channel_folder.file_avatar, channel_folder.file_avatar_sm)
|
||||||
)
|
|
||||||
util.resize_avatar(avatar_file, channel_folder.file_avatar_sm)
|
|
||||||
|
|
||||||
channel = Channel(
|
channel = Channel(
|
||||||
id=channel_id,
|
id=channel_id,
|
||||||
|
@ -60,32 +60,50 @@ def _load_scraped_video(vid: youtube.VideoScraped, channel: Channel):
|
||||||
)
|
)
|
||||||
video.save()
|
video.save()
|
||||||
|
|
||||||
|
queue.enqueue(download_video, video)
|
||||||
|
|
||||||
|
|
||||||
def download_video(video: Video):
|
def download_video(video: Video):
|
||||||
|
"""
|
||||||
|
Download a video including its thumbnail, create a cover image
|
||||||
|
and store everything in the channel folder.
|
||||||
|
|
||||||
|
:param video: Video object
|
||||||
|
"""
|
||||||
|
store = storage.Storage()
|
||||||
channel_folder = store.get_channel_folder(video.channel.slug)
|
channel_folder = store.get_channel_folder(video.channel.slug)
|
||||||
|
|
||||||
audio_file = channel_folder.get_audio(video.slug)
|
audio_file = channel_folder.get_audio(video.slug)
|
||||||
details = youtube.download_audio(video.id, audio_file)
|
details = youtube.download_audio(video.id, audio_file)
|
||||||
|
|
||||||
# Download/convert thumbnails
|
# Download/convert thumbnails
|
||||||
tn_path = youtube.download_thumbnail(
|
tn_path = channel_folder.get_thumbnail(video.slug)
|
||||||
details, channel_folder.get_thumbnail(video.slug)
|
youtube.download_thumbnail(details, tn_path)
|
||||||
)
|
|
||||||
util.resize_thumbnail(tn_path, channel_folder.get_thumbnail(video.slug, True))
|
util.resize_thumbnail(tn_path, channel_folder.get_thumbnail(video.slug, True))
|
||||||
cover_file = channel_folder.get_cover(video.slug)
|
cover_file = channel_folder.get_cover(video.slug)
|
||||||
cover.create_cover_file(
|
cover.create_cover_file(
|
||||||
tn_path,
|
tn_path,
|
||||||
channel_folder.file_avatar,
|
channel_folder.file_avatar,
|
||||||
details.title,
|
video.title,
|
||||||
video.channel.name,
|
video.channel.name,
|
||||||
cover.COVER_STYLE_BLUR,
|
cover.COVER_STYLE_BLUR,
|
||||||
cover_file,
|
cover_file,
|
||||||
)
|
)
|
||||||
|
|
||||||
youtube.tag_audio(audio_file, details, cover_file)
|
videoutil.tag_audio(audio_file, video, cover_file)
|
||||||
|
|
||||||
|
video.downloaded = timezone.now()
|
||||||
|
video.download_size = os.path.getsize(audio_file)
|
||||||
|
video.save()
|
||||||
|
|
||||||
|
|
||||||
def fetch_channel(channel_id: str, limit: int = None):
|
def import_channel(channel_id: str, limit: int = None):
|
||||||
|
"""
|
||||||
|
Add a new channel to ucast and download all existing videos.
|
||||||
|
|
||||||
|
:param channel_id: YT-Channel-ID
|
||||||
|
:param limit: Maximum number of videos to download
|
||||||
|
"""
|
||||||
channel = _get_or_create_channel(channel_id)
|
channel = _get_or_create_channel(channel_id)
|
||||||
|
|
||||||
if limit == 0:
|
if limit == 0:
|
||||||
|
@ -97,17 +115,18 @@ def fetch_channel(channel_id: str, limit: int = None):
|
||||||
_load_scraped_video(vid, channel)
|
_load_scraped_video(vid, channel)
|
||||||
|
|
||||||
|
|
||||||
|
def update_channel(channel: Channel):
|
||||||
|
"""Update a single channel from its RSS feed"""
|
||||||
|
videos = youtube.get_channel_videos_from_feed(channel.id)
|
||||||
|
|
||||||
|
for vid in videos:
|
||||||
|
_load_scraped_video(vid, channel)
|
||||||
|
|
||||||
|
|
||||||
def update_channels():
|
def update_channels():
|
||||||
|
"""
|
||||||
|
Update all channels from their RSS feeds and download new videos.
|
||||||
|
This task is scheduled a regular intervals.
|
||||||
|
"""
|
||||||
for channel in Channel.objects.filter(active=True):
|
for channel in Channel.objects.filter(active=True):
|
||||||
videos = youtube.get_channel_videos_from_feed(channel.id)
|
queue.enqueue(update_channel, channel)
|
||||||
|
|
||||||
for vid in videos:
|
|
||||||
_load_scraped_video(vid, channel)
|
|
||||||
|
|
||||||
|
|
||||||
def download_videos():
|
|
||||||
for video in Video.objects.filter(downloaded=None):
|
|
||||||
download_video(video)
|
|
||||||
|
|
||||||
video.downloaded = timezone.now()
|
|
||||||
video.save()
|
|
||||||
|
|
88
ucast/tasks/library.py
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from ucast import queue
|
||||||
|
from ucast.models import Channel, Video
|
||||||
|
from ucast.service import cover, storage, util, youtube
|
||||||
|
|
||||||
|
|
||||||
|
def recreate_cover(video: Video):
|
||||||
|
store = storage.Storage()
|
||||||
|
cf = store.get_channel_folder(video.channel.slug)
|
||||||
|
|
||||||
|
thumbnail_file = cf.get_thumbnail(video.slug)
|
||||||
|
cover_file = cf.get_cover(video.slug)
|
||||||
|
|
||||||
|
if not os.path.isfile(cf.file_avatar):
|
||||||
|
raise FileNotFoundError(f"could not find avatar for channel {video.channel_id}")
|
||||||
|
|
||||||
|
if not os.path.isfile(thumbnail_file):
|
||||||
|
raise FileNotFoundError(f"could not find thumbnail for video {video.id}")
|
||||||
|
|
||||||
|
cover.create_cover_file(
|
||||||
|
thumbnail_file,
|
||||||
|
cf.file_avatar,
|
||||||
|
video.title,
|
||||||
|
video.channel.name,
|
||||||
|
cover.COVER_STYLE_BLUR,
|
||||||
|
cover_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def recreate_covers():
|
||||||
|
for video in Video.objects.filter(downloaded__isnull=False):
|
||||||
|
queue.enqueue(recreate_cover, video)
|
||||||
|
|
||||||
|
|
||||||
|
def update_file_storage():
|
||||||
|
store = storage.Storage()
|
||||||
|
|
||||||
|
for video in Video.objects.all():
|
||||||
|
cf = store.get_channel_folder(video.channel.slug)
|
||||||
|
|
||||||
|
audio_file = cf.get_audio(video.slug)
|
||||||
|
cover_file = cf.get_cover(video.slug)
|
||||||
|
tn_file = cf.get_thumbnail(video.slug)
|
||||||
|
tn_file_sm = cf.get_thumbnail(video.slug, True)
|
||||||
|
|
||||||
|
if not os.path.isfile(audio_file) or not os.path.isfile(tn_file):
|
||||||
|
video.downloaded = None
|
||||||
|
video.download_size = None
|
||||||
|
video.save()
|
||||||
|
return
|
||||||
|
|
||||||
|
if not os.path.isfile(tn_file_sm):
|
||||||
|
util.resize_thumbnail(tn_file, tn_file_sm)
|
||||||
|
|
||||||
|
if not os.path.isfile(cover_file):
|
||||||
|
recreate_cover(video)
|
||||||
|
|
||||||
|
if video.downloaded is None:
|
||||||
|
video.downloaded = timezone.now()
|
||||||
|
|
||||||
|
video.download_size = os.path.getsize(audio_file)
|
||||||
|
video.save()
|
||||||
|
|
||||||
|
|
||||||
|
def update_channel_info(channel: Channel):
|
||||||
|
channel_data = youtube.get_channel_metadata(youtube.channel_url_from_id(channel.id))
|
||||||
|
|
||||||
|
if channel_data.avatar_url != channel.avatar_url:
|
||||||
|
store = storage.Storage()
|
||||||
|
channel_folder = store.get_channel_folder(channel.slug)
|
||||||
|
|
||||||
|
util.download_image_file(channel_data.avatar_url, channel_folder.file_avatar)
|
||||||
|
util.resize_avatar(channel_folder.file_avatar, channel_folder.file_avatar_sm)
|
||||||
|
|
||||||
|
channel.avatar_url = channel_data.avatar_url
|
||||||
|
|
||||||
|
channel.name = channel_data.name
|
||||||
|
channel.description = channel_data.description
|
||||||
|
|
||||||
|
channel.save()
|
||||||
|
|
||||||
|
|
||||||
|
def update_channel_infos():
|
||||||
|
for channel in Channel.objects.filter(active=True):
|
||||||
|
queue.enqueue(update_channel_info, channel)
|
28
ucast/tasks/schedule.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from ucast import queue
|
||||||
|
from ucast.tasks import download
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def clear_scheduled_jobs():
|
||||||
|
"""Delete all scheduled jobs to prevent duplicates"""
|
||||||
|
scheduler = queue.get_scheduler()
|
||||||
|
for job in scheduler.get_jobs():
|
||||||
|
log.debug("Deleting scheduled job %s", job)
|
||||||
|
job.delete()
|
||||||
|
|
||||||
|
|
||||||
|
def register_scheduled_jobs():
|
||||||
|
"""Register all scheduled jobs"""
|
||||||
|
scheduler = queue.get_scheduler()
|
||||||
|
scheduler.schedule(
|
||||||
|
datetime.utcnow(),
|
||||||
|
download.update_channels,
|
||||||
|
id="schedule_update_channels",
|
||||||
|
interval=settings.YT_UPDATE_INTERVAL,
|
||||||
|
)
|
11
ucast/templates/ucast/main.html
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>Ucast</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>Ucast</h1>
|
||||||
|
Hello World!
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -1,3 +1,80 @@
|
||||||
from importlib.resources import files
|
import json
|
||||||
|
import uuid
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime
|
||||||
|
from importlib import resources
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
DIR_TESTFILES = files("ucast.tests.testfiles")
|
from ucast.service import youtube
|
||||||
|
|
||||||
|
DIR_TESTFILES = resources.path("ucast.tests", "_testfiles")
|
||||||
|
|
||||||
|
|
||||||
|
def get_video_details(video_id: str):
|
||||||
|
with open(DIR_TESTFILES / "fixture" / "videodetails.json") as f:
|
||||||
|
videodetails = json.load(f)
|
||||||
|
|
||||||
|
vd_raw = videodetails[video_id]
|
||||||
|
vd_raw["published"] = datetime.fromisoformat(vd_raw["published"])
|
||||||
|
|
||||||
|
return youtube.VideoDetails(**vd_raw)
|
||||||
|
|
||||||
|
|
||||||
|
def get_channel_metadata(channel_url: str):
|
||||||
|
with open(DIR_TESTFILES / "fixture" / "channelmeta.json") as f:
|
||||||
|
channelmeta = json.load(f)
|
||||||
|
|
||||||
|
return youtube.ChannelMetadata(**channelmeta[channel_url])
|
||||||
|
|
||||||
|
|
||||||
|
_global_mock_calls: Dict[str, List["_GlobalMockCall"]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class _GlobalMockCall:
|
||||||
|
args: list
|
||||||
|
kwargs: dict
|
||||||
|
|
||||||
|
|
||||||
|
class GlobalMock:
|
||||||
|
def __init__(self):
|
||||||
|
self.uuid = str(uuid.uuid4())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def calls(self) -> List[_GlobalMockCall]:
|
||||||
|
global _global_mock_calls
|
||||||
|
|
||||||
|
if self.uuid not in _global_mock_calls:
|
||||||
|
_global_mock_calls[self.uuid] = []
|
||||||
|
|
||||||
|
return _global_mock_calls[self.uuid]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def n_calls(self) -> int:
|
||||||
|
return len(self.calls)
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
call = _GlobalMockCall(args, kwargs)
|
||||||
|
self.calls.append(call)
|
||||||
|
|
||||||
|
def assert_called(self):
|
||||||
|
if not self.calls:
|
||||||
|
raise AssertionError("Mock has never been called")
|
||||||
|
|
||||||
|
def assert_any_call(self, *args, **kwargs):
|
||||||
|
self.assert_called()
|
||||||
|
|
||||||
|
for call in self.calls:
|
||||||
|
if call.args == args and call.kwargs == kwargs:
|
||||||
|
return
|
||||||
|
|
||||||
|
raise AssertionError(
|
||||||
|
f"Call with args: {args}, kwargs: {kwargs} not found.\
|
||||||
|
Registered calls: {self.calls}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def assert_called_with(self, *args, **kwargs):
|
||||||
|
self.assert_called()
|
||||||
|
|
||||||
|
call = self.calls[-1]
|
||||||
|
assert call.args == args and call.kwargs == kwargs
|
||||||
|
|
BIN
ucast/tests/_testfiles/audio/audio1.mp3
Normal file
Before Width: | Height: | Size: 186 KiB After Width: | Height: | Size: 186 KiB |
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 53 KiB |
BIN
ucast/tests/_testfiles/avatar/a4.jpg
Normal file
After Width: | Height: | Size: 91 KiB |
Before Width: | Height: | Size: 26 KiB After Width: | Height: | Size: 26 KiB |
BIN
ucast/tests/_testfiles/cover/c1_blur.png
Normal file
After Width: | Height: | Size: 268 KiB |
Before Width: | Height: | Size: 234 KiB After Width: | Height: | Size: 234 KiB |
Before Width: | Height: | Size: 218 KiB After Width: | Height: | Size: 218 KiB |
Before Width: | Height: | Size: 215 KiB After Width: | Height: | Size: 215 KiB |
Before Width: | Height: | Size: 183 KiB After Width: | Height: | Size: 183 KiB |
Before Width: | Height: | Size: 216 KiB After Width: | Height: | Size: 216 KiB |
Before Width: | Height: | Size: 173 KiB After Width: | Height: | Size: 173 KiB |
20
ucast/tests/_testfiles/fixture/channelmeta.json
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q": {
|
||||||
|
"id": "UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"name": "ThetaDev",
|
||||||
|
"description": "I'm ThetaDev. I love creating cool projects using electronics, 3D printers and other awesome tech-based stuff.",
|
||||||
|
"avatar_url": "https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj"
|
||||||
|
},
|
||||||
|
"https://www.youtube.com/channel/UC2TXq_t06Hjdr2g_KdKpHQg": {
|
||||||
|
"id": "UC2TXq_t06Hjdr2g_KdKpHQg",
|
||||||
|
"name": "media.ccc.de",
|
||||||
|
"description": "The real official channel of the chaos computer club, operated by the CCC VOC (https://c3voc.de)",
|
||||||
|
"avatar_url": "https://yt3.ggpht.com/c1jcNSbPuOMDUieixkWIlXc82kMNJ8pCDmq5KtL8hjt74rAXLobsT9Y078-w5DK7ymKyDaqr=s900-c-k-c0x00ffffff-no-rj"
|
||||||
|
},
|
||||||
|
"https://www.youtube.com/channel/UCmLTTbctUZobNQrr8RtX8uQ": {
|
||||||
|
"id": "UCmLTTbctUZobNQrr8RtX8uQ",
|
||||||
|
"name": "Creative Commons",
|
||||||
|
"description": "Hello friends,\nWelcome to my channel CREATIVE COMMONS.\nOn this channel you will get all the videos absolutely free copyright and no matter how many videos you download there is no copyright claim you can download them and upload them to your channel and all the music is young Is on the channel they can also download and use in their videos on this channel you will find different videos in which OUTRO Videos, INTRO Videos, FREE MUSIC, FREE SOUND EFFECTS, LOWER THIRDS, and more.",
|
||||||
|
"avatar_url": "https://yt3.ggpht.com/-ybcsEHc8YCmKUZMr2bf4DZoDv7SKrutgKIh8kSxXugj296QkqtBZQXVzpuZ1Izs8kNUz35B=s900-c-k-c0x00ffffff-no-rj"
|
||||||
|
}
|
||||||
|
}
|
2524
ucast/tests/_testfiles/fixture/videodetails.json
Normal file
141
ucast/tests/_testfiles/fixture/videos.json
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"model": "ucast.channel",
|
||||||
|
"pk": "UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"fields": {
|
||||||
|
"name": "ThetaDev",
|
||||||
|
"slug": "ThetaDev",
|
||||||
|
"description": "I'm ThetaDev. I love creating cool projects using electronics, 3D printers and other awesome tech-based stuff.",
|
||||||
|
"active": true,
|
||||||
|
"skip_livestreams": true,
|
||||||
|
"skip_shorts": true,
|
||||||
|
"keep_videos": null,
|
||||||
|
"avatar_url": "https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "ucast.channel",
|
||||||
|
"pk": "UC2TXq_t06Hjdr2g_KdKpHQg",
|
||||||
|
"fields": {
|
||||||
|
"name": "media.ccc.de",
|
||||||
|
"slug": "media_ccc_de",
|
||||||
|
"description": "The real official channel of the chaos computer club, operated by the CCC VOC (https://c3voc.de)",
|
||||||
|
"active": true,
|
||||||
|
"skip_livestreams": true,
|
||||||
|
"skip_shorts": true,
|
||||||
|
"keep_videos": null,
|
||||||
|
"avatar_url": "https://yt3.ggpht.com/c1jcNSbPuOMDUieixkWIlXc82kMNJ8pCDmq5KtL8hjt74rAXLobsT9Y078-w5DK7ymKyDaqr=s900-c-k-c0x00ffffff-no-rj"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "ucast.channel",
|
||||||
|
"pk": "UCmLTTbctUZobNQrr8RtX8uQ",
|
||||||
|
"fields": {
|
||||||
|
"name": "Creative Commons",
|
||||||
|
"slug": "Creative_Commons",
|
||||||
|
"description": "Hello friends,\nWelcome to my channel CREATIVE COMMONS.\nOn this channel you will get all the videos absolutely free copyright and no matter how many videos you download there is no copyright claim you can download them and upload them to your channel and all the music is young Is on the channel they can also download and use in their videos on this channel you will find different videos in which OUTRO Videos, INTRO Videos, FREE MUSIC, FREE SOUND EFFECTS, LOWER THIRDS, and more.",
|
||||||
|
"active": true,
|
||||||
|
"skip_livestreams": true,
|
||||||
|
"skip_shorts": true,
|
||||||
|
"keep_videos": null,
|
||||||
|
"avatar_url": "https://yt3.ggpht.com/-ybcsEHc8YCmKUZMr2bf4DZoDv7SKrutgKIh8kSxXugj296QkqtBZQXVzpuZ1Izs8kNUz35B=s900-c-k-c0x00ffffff-no-rj"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
"model": "ucast.video",
|
||||||
|
"pk": "ZPxEr4YdWt8",
|
||||||
|
"fields": {
|
||||||
|
"title": "ThetaDev @ Embedded World 2019",
|
||||||
|
"slug": "20190602_ThetaDev_Embedded_World_2019",
|
||||||
|
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"published": "2019-06-02T00:00:00Z",
|
||||||
|
"downloaded": "2022-05-15T22:16:03.096Z",
|
||||||
|
"description": "This february I spent one day at the Embedded World in Nuremberg. They showed tons of interesting electronics stuff, so I had to take some pictures and videos for you to see ;-)\n\nSorry for the late upload, I just didn't have time to edit my footage.\n\nEmbedded World: https://www.embedded-world.de/\n\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev",
|
||||||
|
"duration": 267,
|
||||||
|
"is_livestream": false,
|
||||||
|
"is_short": false,
|
||||||
|
"download_size": 4558477
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "ucast.video",
|
||||||
|
"pk": "_I5IFObm_-k",
|
||||||
|
"fields": {
|
||||||
|
"title": "Easter special: 3D printed Bunny",
|
||||||
|
"slug": "20180331_Easter_special_3D_printed_Bunny",
|
||||||
|
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"published": "2018-03-31T00:00:00Z",
|
||||||
|
"downloaded": "2022-05-15T22:16:12.514Z",
|
||||||
|
"description": "Happy Easter 2018!\nThis is just a special video where I print a little bunny as an Easter gift for friends or relatives. I hope you like the model, too.\n\nSadly my camera doesn't support timelapses, so I had to record the whole 4h printing process in real time, resulting in 30GB of footage. But I think it was worth it ;-)\n\n__PROJECT_LINKS___________________________\nBunny: https://www.thingiverse.com/thing:287884\n\n__COMPONENT_SUPPLIERS__________________\n3D printer: https://www.prusa3d.com/\n3D printing filament: https://www.dasfilament.de/\n______________________________________________\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev",
|
||||||
|
"duration": 511,
|
||||||
|
"is_livestream": false,
|
||||||
|
"is_short": false,
|
||||||
|
"download_size": 8444518
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "ucast.video",
|
||||||
|
"pk": "mmEDPbbSnaY",
|
||||||
|
"fields": {
|
||||||
|
"title": "ThetaDevlog#2 - MySensors singleLED",
|
||||||
|
"slug": "20180326_ThetaDevlog_2_MySensors_singleLED",
|
||||||
|
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"published": "2018-03-26T00:00:00Z",
|
||||||
|
"downloaded": "2022-05-15T22:16:20.280Z",
|
||||||
|
"description": "The PCBs and components for the MySensors smart home devices arrived!\nIn this video I'll show you how to build the singleLED controller to switch/dim your 12V led lights. Detailed building instructions can be found on OpenHardware or GitHub.\n\n__PROJECT_LINKS___________________________\nOpenHardware: https://www.openhardware.io/view/563\nGitHub: https://github.com/Theta-Dev/MySensors-singleLED\n\nProgramming adapter: https://thdev.org/?Projects___misc___micro_JST\nBoard definitions: http://files.thdev.org/arduino/atmega.zip\n\n__COMPONENT_SUPPLIERS__________________\nElectronic components: https://www.aliexpress.com/\nPCBs: http://www.allpcb.com/\n3D printing filament: https://www.dasfilament.de/\n______________________________________________\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev\n______________________________________________\nMusic by Bartlebeats: https://bartlebeats.bandcamp.com",
|
||||||
|
"duration": 463,
|
||||||
|
"is_livestream": false,
|
||||||
|
"is_short": false,
|
||||||
|
"download_size": 7648860
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "ucast.video",
|
||||||
|
"pk": "Cda4zS-1j-k",
|
||||||
|
"fields": {
|
||||||
|
"title": "ThetaDevlog#1 - MySensors Smart Home!",
|
||||||
|
"slug": "20180217_ThetaDevlog_1_MySensors_Smart_Home",
|
||||||
|
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"published": "2018-02-17T00:00:00Z",
|
||||||
|
"downloaded": "2022-05-15T22:16:25.237Z",
|
||||||
|
"description": "Smart Home devices have been around for some time and can really make your life easier. But most of them are quite pricey and not always worth the money.\n\nHow about a sytem that costs only 5€ per device and has all the benefits of the expensive solutions? The open source project MySensors claims to do that. In this series I'll try this and find out whether it works!\n\n______________________________________________\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev",
|
||||||
|
"duration": 303,
|
||||||
|
"is_livestream": false,
|
||||||
|
"is_short": false,
|
||||||
|
"download_size": 5091124
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "ucast.video",
|
||||||
|
"pk": "2xfXsqyd8YA",
|
||||||
|
"fields": {
|
||||||
|
"title": "cy: Log4Shell - Bug oder Feature",
|
||||||
|
"slug": "20220521_cy_Log4Shell_Bug_oder_Feature",
|
||||||
|
"channel": "UC2TXq_t06Hjdr2g_KdKpHQg",
|
||||||
|
"published": "2022-05-21T00:00:00Z",
|
||||||
|
"downloaded": null,
|
||||||
|
"description": "https://media.ccc.de/v/gpn20-60-log4shell-bug-oder-feature\n\n\n\nUm den Jahreswechsel ging ein Aufschrei durch die IT-Abteilungen der Welt, der es bis in die Mainstream-Medien geschafft hat. Noch Wochen später zeigen sich Folgeprobleme in weit verbreiteter Software.\n \nIn Log4j, einer weit verbreiteten Java-Bibliothek wurde eine massive Sicherheitslücke gefunden, die die Ausführung von Schadcode auf einem entfernten System erlaubt.\nIn diesem Vortrag soll rekapitulierend erklärt werden, warum und wann es zu dem Problem kam und welche Auswirkungen bisher erkennbar sind. Ausserdem werden die technischen Details der Schwachstelle erklärt und in einer Live-Demo gezeigt, wie die Schwachstelle ausgenutzt werden kann.\n\n\n\ncy\n\nhttps://cfp.gulas.ch/gpn20/talk/77BCXN/\n\n#gpn20 #Security",
|
||||||
|
"duration": 3547,
|
||||||
|
"is_livestream": false,
|
||||||
|
"is_short": false,
|
||||||
|
"download_size": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "ucast.video",
|
||||||
|
"pk": "I0RRENheeTo",
|
||||||
|
"fields": {
|
||||||
|
"title": "No copyright intro free fire intro | no text | free copy right | free templates | free download",
|
||||||
|
"slug": "20211010_No_copyright_intro_free_fire_intro_no_text_free_copy_right_free_templates_free_download",
|
||||||
|
"channel": "UCmLTTbctUZobNQrr8RtX8uQ",
|
||||||
|
"published": "2021-10-10T00:00:00Z",
|
||||||
|
"downloaded": null,
|
||||||
|
"description": "Like Video▬▬▬▬▬❤\uD83D\uDC4D❤\n▬▬\uD83D\uDC47SUBSCRIBE OUR CHANNEL FOR LATEST UPDATES\uD83D\uDC46▬▬\nThis Channel: https://www.youtube.com/channel/UCmLTTbctUZobNQrr8RtX8uQ?sub_confirmation=1\nOther Channel: https://www.youtube.com/channel/UCKtfYFXi5A4KLIUdjgvfmHg?sub_confirmation=1\n▬▬▬▬▬▬▬▬/Subscription Free\\▬▬▬▬▬▬▬▬▬\n▬▬▬▬▬\uD83C\uDF81...Share Video To Friends...\uD83C\uDF81▬▬▬▬▬▬▬\n▬▬▬▬\uD83E\uDD14...Comment Any Questions....\uD83E\uDD14▬▬▬▬▬▬\nHello friends, \n Shahzaib Hassan and you are watching Creative Commons YouTube channel. On this channel, you will find all the videos absolutely free copyright which you can download and use in any project.\n It is copyright free so you won't have any problem using end screen for YouTube. if you use it or download and reupload it to your channel. By doing this you can use it for YouTube its use is absolutely free.\n ►I hope you'll like the video.◄\n ►Thanks For Watching◄ \nIf you really like this video then please don't forget to...\n\n\n▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬\n▬▬▬▬▬▬▬▬▬▬Tags\uD83D\uDC47▬▬▬▬▬▬▬▬▬▬\n#Creativecommons #commoncreative #free #freecopyright #nocopyright #nowatermark #freetouse #intro #notext #fireefire #channelintro",
|
||||||
|
"duration": 8,
|
||||||
|
"is_livestream": false,
|
||||||
|
"is_short": false,
|
||||||
|
"download_size": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
|
@ -1,8 +1,9 @@
|
||||||
### Quellen der Thumbnails/Avatarbilder zum Testen
|
### Quellen der Thumbnails/Avatarbilder/Audiodateien zum Testen
|
||||||
|
|
||||||
- a1/t1: [ThetaDev @ Embedded World 2019](https://www.youtube.com/watch?v=ZPxEr4YdWt8), by [ThetaDev](https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q) (CC-BY)
|
- a1/t1: [ThetaDev @ Embedded World 2019](https://www.youtube.com/watch?v=ZPxEr4YdWt8), by [ThetaDev](https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q) (CC-BY)
|
||||||
- a2/t2: [Sintel - Open Movie by Blender Foundation](https://www.youtube.com/watch?v=eRsGyueVLvQ), by [Blender](https://www.youtube.com/c/BlenderFoundation) (CC-BY)
|
- a2/t2: [Sintel - Open Movie by Blender Foundation](https://www.youtube.com/watch?v=eRsGyueVLvQ), by [Blender](https://www.youtube.com/c/BlenderFoundation) (CC-BY)
|
||||||
- a3/t3: [Systemabsturz Teaser zur DiVOC bb3](https://www.youtube.com/watch?v=uFqgQ35wyYY), by [media.ccc.de](https://www.youtube.com/channel/UC2TXq_t06Hjdr2g_KdKpHQg) (CC-BY)
|
- a3/t3: [Systemabsturz Teaser zur DiVOC bb3](https://www.youtube.com/watch?v=uFqgQ35wyYY), by [media.ccc.de](https://www.youtube.com/channel/UC2TXq_t06Hjdr2g_KdKpHQg) (CC-BY)
|
||||||
|
- audio1: [No copyright intro free fire intro](https://www.youtube.com/watch?v=I0RRENheeTo), by [Shahzaib Hassan](https://www.youtube.com/channel/UCmLTTbctUZobNQrr8RtX8uQ), (CC-BY)
|
||||||
|
|
||||||
### Weitere Testvideos
|
### Weitere Testvideos
|
||||||
|
|
BIN
ucast/tests/_testfiles/thumbnail/Cda4zS-1j-k.webp
Normal file
After Width: | Height: | Size: 96 KiB |
Before Width: | Height: | Size: 92 KiB After Width: | Height: | Size: 92 KiB |
BIN
ucast/tests/_testfiles/thumbnail/_I5IFObm_-k.webp
Normal file
After Width: | Height: | Size: 67 KiB |
BIN
ucast/tests/_testfiles/thumbnail/mmEDPbbSnaY.webp
Normal file
After Width: | Height: | Size: 60 KiB |
BIN
ucast/tests/_testfiles/thumbnail/t1.webp
Normal file
After Width: | Height: | Size: 92 KiB |
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
106
ucast/tests/conftest.py
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import rq
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.management import call_command
|
||||||
|
from fakeredis import FakeRedis
|
||||||
|
|
||||||
|
from ucast import queue, tests
|
||||||
|
from ucast.models import Video
|
||||||
|
from ucast.service import cover, storage, util, videoutil, youtube
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def django_db_setup(django_db_setup, django_db_blocker):
|
||||||
|
with django_db_blocker.unblock():
|
||||||
|
fixture_path = tests.DIR_TESTFILES / "fixture" / "videos.json"
|
||||||
|
call_command("loaddata", fixture_path)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def download_dir() -> Path:
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
settings.DOWNLOAD_ROOT = tmpdir
|
||||||
|
|
||||||
|
# Copy channel avatars
|
||||||
|
store = storage.Storage()
|
||||||
|
|
||||||
|
for slug, avatar in (
|
||||||
|
("ThetaDev", "a1"),
|
||||||
|
("media_ccc_de", "a3"),
|
||||||
|
("Creative_Commons", "a4"),
|
||||||
|
):
|
||||||
|
cf = store.get_channel_folder(slug)
|
||||||
|
shutil.copyfile(
|
||||||
|
tests.DIR_TESTFILES / "avatar" / f"{avatar}.jpg", cf.file_avatar
|
||||||
|
)
|
||||||
|
util.resize_avatar(cf.file_avatar, cf.file_avatar_sm)
|
||||||
|
|
||||||
|
yield tmpdir
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def download_dir_content(download_dir) -> Path:
|
||||||
|
store = storage.Storage()
|
||||||
|
|
||||||
|
for video in Video.objects.filter(downloaded__isnull=False):
|
||||||
|
cf = store.get_channel_folder(video.channel.slug)
|
||||||
|
file_audio = cf.get_audio(video.slug)
|
||||||
|
file_tn = cf.get_thumbnail(video.slug)
|
||||||
|
file_cover = cf.get_cover(video.slug)
|
||||||
|
|
||||||
|
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", file_audio)
|
||||||
|
shutil.copyfile(tests.DIR_TESTFILES / "thumbnail" / f"{video.id}.webp", file_tn)
|
||||||
|
util.resize_thumbnail(file_tn, cf.get_thumbnail(video.slug, True))
|
||||||
|
cover.create_cover_file(
|
||||||
|
file_tn,
|
||||||
|
cf.file_avatar,
|
||||||
|
video.title,
|
||||||
|
video.channel.name,
|
||||||
|
cover.COVER_STYLE_BLUR,
|
||||||
|
file_cover,
|
||||||
|
)
|
||||||
|
videoutil.tag_audio(file_audio, video, file_cover)
|
||||||
|
|
||||||
|
yield download_dir
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def rq_queue(mocker) -> rq.Queue:
|
||||||
|
test_queue = rq.Queue(is_async=False, connection=FakeRedis())
|
||||||
|
mocker.patch.object(queue, "get_queue")
|
||||||
|
queue.get_queue.return_value = test_queue
|
||||||
|
return test_queue
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_download_audio(mocker) -> mock.Mock:
|
||||||
|
def mockfn_download_audio(
|
||||||
|
video_id: str, download_path: Path, sponsorblock=False
|
||||||
|
) -> youtube.VideoDetails:
|
||||||
|
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", download_path)
|
||||||
|
return tests.get_video_details(video_id)
|
||||||
|
|
||||||
|
download_mock: mock.Mock = mocker.patch.object(youtube, "download_audio")
|
||||||
|
download_mock.side_effect = mockfn_download_audio
|
||||||
|
return download_mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_get_video_details(mocker) -> mock.Mock:
|
||||||
|
video_details_mock: mock.Mock = mocker.patch.object(youtube, "get_video_details")
|
||||||
|
video_details_mock.side_effect = tests.get_video_details
|
||||||
|
return video_details_mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_get_channel_metadata(mocker) -> mock.Mock:
|
||||||
|
channel_meta_mock: mock.Mock = mocker.patch.object(youtube, "get_channel_metadata")
|
||||||
|
channel_meta_mock.side_effect = tests.get_channel_metadata
|
||||||
|
return channel_meta_mock
|
0
ucast/tests/service/__init__.py
Normal file
56
ucast/tests/service/test_storage.py
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
import os
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ucast.service import storage
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_channel_folders(settings):
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
settings.DOWNLOAD_ROOT = tmpdir
|
||||||
|
|
||||||
|
store = storage.Storage()
|
||||||
|
cf1 = store.get_channel_folder("ThetaDev")
|
||||||
|
cf2 = store.get_channel_folder("Jeff_Geerling")
|
||||||
|
cf1b = store.get_channel_folder("ThetaDev")
|
||||||
|
|
||||||
|
cf1_path = tmpdir / "ThetaDev"
|
||||||
|
cf2_path = tmpdir / "Jeff_Geerling"
|
||||||
|
|
||||||
|
assert cf1.dir_root == cf1_path
|
||||||
|
assert cf1b.dir_root == cf1_path
|
||||||
|
assert cf2.dir_root == cf2_path
|
||||||
|
|
||||||
|
assert os.path.isdir(cf1_path)
|
||||||
|
assert os.path.isdir(cf2_path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_channel_folder():
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
ucast_dir = tmpdir / "_ucast"
|
||||||
|
|
||||||
|
cf = storage.ChannelFolder(tmpdir)
|
||||||
|
|
||||||
|
# Verify internal paths
|
||||||
|
assert cf.file_avatar == ucast_dir / "avatar.jpg"
|
||||||
|
assert cf.file_avatar_sm == ucast_dir / "avatar_sm.webp"
|
||||||
|
assert cf.dir_covers == ucast_dir / "covers"
|
||||||
|
assert cf.dir_thumbnails == ucast_dir / "thumbnails"
|
||||||
|
|
||||||
|
# Create the folder
|
||||||
|
assert not cf.does_exist()
|
||||||
|
cf.create()
|
||||||
|
assert cf.does_exist()
|
||||||
|
|
||||||
|
assert cf.get_cover("my_video_title") == ucast_dir / "covers" / "my_video_title.png"
|
||||||
|
assert (
|
||||||
|
cf.get_thumbnail("my_video_title")
|
||||||
|
== ucast_dir / "thumbnails" / "my_video_title.webp"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
cf.get_thumbnail("my_video_title", True)
|
||||||
|
== ucast_dir / "thumbnails" / "my_video_title_sm.webp"
|
||||||
|
)
|
||||||
|
assert cf.get_audio("my_video_title") == tmpdir / "my_video_title.mp3"
|
92
ucast/tests/service/test_util.py
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from PIL import Image, ImageChops
|
||||||
|
|
||||||
|
from ucast import tests
|
||||||
|
from ucast.service import util
|
||||||
|
|
||||||
|
TEST_FILE_URL = "https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj"
|
||||||
|
|
||||||
|
|
||||||
|
def test_download_file():
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
download_file = tmpdir / "download.jpg"
|
||||||
|
expected_tn_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
||||||
|
|
||||||
|
util.download_file(TEST_FILE_URL, download_file)
|
||||||
|
|
||||||
|
downloaded_avatar = Image.open(download_file)
|
||||||
|
expected_avatar = Image.open(expected_tn_file)
|
||||||
|
|
||||||
|
diff = ImageChops.difference(downloaded_avatar, expected_avatar)
|
||||||
|
assert diff.getbbox() is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_download_image_file():
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
download_file = tmpdir / "download.jpg"
|
||||||
|
expected_tn_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
||||||
|
|
||||||
|
util.download_image_file(TEST_FILE_URL, download_file)
|
||||||
|
|
||||||
|
downloaded_avatar = Image.open(download_file)
|
||||||
|
expected_avatar = Image.open(expected_tn_file)
|
||||||
|
|
||||||
|
diff = ImageChops.difference(downloaded_avatar, expected_avatar)
|
||||||
|
assert diff.getbbox() is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_download_image_file_conv():
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
download_file = tmpdir / "download.png"
|
||||||
|
expected_tn_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
||||||
|
|
||||||
|
util.download_image_file(TEST_FILE_URL, download_file)
|
||||||
|
|
||||||
|
downloaded_avatar = Image.open(download_file)
|
||||||
|
expected_avatar = Image.open(expected_tn_file)
|
||||||
|
|
||||||
|
diff = ImageChops.difference(downloaded_avatar, expected_avatar)
|
||||||
|
assert diff.getbbox() is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_resize_avatar():
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
source_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
||||||
|
resized_file = tmpdir / "avatar.webp"
|
||||||
|
|
||||||
|
util.resize_avatar(source_file, resized_file)
|
||||||
|
|
||||||
|
resized_avatar = Image.open(resized_file)
|
||||||
|
assert resized_avatar.size == (100, 100)
|
||||||
|
|
||||||
|
|
||||||
|
def test_resize_thumbnail():
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
source_file = tests.DIR_TESTFILES / "thumbnail" / "t1.webp"
|
||||||
|
resized_file = tmpdir / "thumbnail.webp"
|
||||||
|
|
||||||
|
util.resize_thumbnail(source_file, resized_file)
|
||||||
|
|
||||||
|
resized_thumbnail = Image.open(resized_file)
|
||||||
|
assert resized_thumbnail.size == (360, 202)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"text,expected_slug",
|
||||||
|
[
|
||||||
|
("Hello World 👋", "Hello_World"),
|
||||||
|
("ÄäÖöÜüß", "AaOoUuss"),
|
||||||
|
("오징어 게임", "ojingeo_geim"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_slug(text: str, expected_slug: str):
|
||||||
|
slug = util.get_slug(text)
|
||||||
|
assert slug == expected_slug
|
52
ucast/tests/service/test_videoutil.py
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
import io
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from mutagen import id3
|
||||||
|
from PIL import Image, ImageChops
|
||||||
|
|
||||||
|
from ucast import tests
|
||||||
|
from ucast.models import Video
|
||||||
|
from ucast.service import videoutil
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_tag_audio():
|
||||||
|
video = Video.objects.get(id="ZPxEr4YdWt8")
|
||||||
|
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
audio_file = tmpdir / "audio.mp3"
|
||||||
|
cover_file = tests.DIR_TESTFILES / "cover" / "c1_blur.png"
|
||||||
|
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", audio_file)
|
||||||
|
|
||||||
|
videoutil.tag_audio(audio_file, video, cover_file)
|
||||||
|
|
||||||
|
tag = id3.ID3(audio_file)
|
||||||
|
assert tag["TPE1"].text[0] == "ThetaDev"
|
||||||
|
assert tag["TALB"].text[0] == "ThetaDev"
|
||||||
|
assert tag["TIT2"].text[0] == "2019-06-02 ThetaDev @ Embedded World 2019"
|
||||||
|
assert tag["TDRC"].text[0].text == "2019-06-02"
|
||||||
|
assert (
|
||||||
|
tag["COMM::XXX"].text[0]
|
||||||
|
== """This february I spent one day at the Embedded World in Nuremberg. They showed tons of interesting electronics stuff, so I had to take some pictures and videos for you to see ;-)
|
||||||
|
|
||||||
|
Sorry for the late upload, I just didn't have time to edit my footage.
|
||||||
|
|
||||||
|
Embedded World: https://www.embedded-world.de/
|
||||||
|
|
||||||
|
My website: https://thdev.org
|
||||||
|
Twitter: https://twitter.com/Theta_Dev
|
||||||
|
|
||||||
|
https://youtu.be/ZPxEr4YdWt8"""
|
||||||
|
)
|
||||||
|
|
||||||
|
tag_cover = tag["APIC:Cover"]
|
||||||
|
assert tag_cover.mime == "image/png"
|
||||||
|
|
||||||
|
tag_cover_img = Image.open(io.BytesIO(tag_cover.data))
|
||||||
|
expected_cover_img = Image.open(cover_file)
|
||||||
|
diff = ImageChops.difference(tag_cover_img, expected_cover_img)
|
||||||
|
assert diff.getbbox() is None
|
174
ucast/tests/service/test_youtube.py
Normal file
|
@ -0,0 +1,174 @@
|
||||||
|
import datetime
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from PIL import Image, ImageChops
|
||||||
|
|
||||||
|
from ucast import tests
|
||||||
|
from ucast.service import youtube
|
||||||
|
|
||||||
|
VIDEO_ID_THETADEV = "ZPxEr4YdWt8"
|
||||||
|
VIDEO_ID_SHORT = "lcQZ6YwQHiw"
|
||||||
|
VIDEO_ID_PERSUASION = "DWjFW7Yq1fA"
|
||||||
|
|
||||||
|
CHANNEL_ID_THETADEV = "UCGiJh0NZ52wRhYKYnuZI08Q"
|
||||||
|
CHANNEL_ID_BLENDER = "UCSMOQeBJ2RAnuFungnQOxLg"
|
||||||
|
CHANNEL_URL_BLENDER = "https://www.youtube.com/c/BlenderFoundation"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def video_details() -> youtube.VideoDetails:
|
||||||
|
return youtube.get_video_details(VIDEO_ID_THETADEV)
|
||||||
|
|
||||||
|
|
||||||
|
def test_download_thumbnail(video_details):
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
tn_file = tmpdir / "thumbnail.webp"
|
||||||
|
expected_tn_file = tests.DIR_TESTFILES / "thumbnail" / "t1.webp"
|
||||||
|
|
||||||
|
youtube.download_thumbnail(video_details, tn_file)
|
||||||
|
|
||||||
|
tn = Image.open(tn_file)
|
||||||
|
expected_tn = Image.open(expected_tn_file)
|
||||||
|
|
||||||
|
diff = ImageChops.difference(tn, expected_tn)
|
||||||
|
assert diff.getbbox() is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_video_details(video_details):
|
||||||
|
assert video_details.id == VIDEO_ID_THETADEV
|
||||||
|
assert video_details.title == "ThetaDev @ Embedded World 2019"
|
||||||
|
assert video_details.channel_id == "UCGiJh0NZ52wRhYKYnuZI08Q"
|
||||||
|
assert (
|
||||||
|
video_details.description
|
||||||
|
== """This february I spent one day at the Embedded World in Nuremberg. They showed tons of interesting electronics stuff, so I had to take some pictures and videos for you to see ;-)
|
||||||
|
|
||||||
|
Sorry for the late upload, I just didn't have time to edit my footage.
|
||||||
|
|
||||||
|
Embedded World: https://www.embedded-world.de/
|
||||||
|
|
||||||
|
My website: https://thdev.org
|
||||||
|
Twitter: https://twitter.com/Theta_Dev"""
|
||||||
|
)
|
||||||
|
assert video_details.duration == 267
|
||||||
|
assert not video_details.is_currently_live
|
||||||
|
assert not video_details.is_livestream
|
||||||
|
assert not video_details.is_short
|
||||||
|
assert video_details.published == datetime.datetime(
|
||||||
|
2019, 6, 2, tzinfo=datetime.timezone.utc
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_video_details_short():
|
||||||
|
vinfo = youtube.get_video_details(VIDEO_ID_SHORT)
|
||||||
|
assert vinfo.id == VIDEO_ID_SHORT
|
||||||
|
assert (
|
||||||
|
vinfo.title
|
||||||
|
== "Small pink flowers | #shorts | Free Stock Video | \
|
||||||
|
creative commons short videos | creative #short"
|
||||||
|
)
|
||||||
|
assert not vinfo.is_currently_live
|
||||||
|
assert not vinfo.is_livestream
|
||||||
|
assert vinfo.is_short
|
||||||
|
|
||||||
|
|
||||||
|
def test_download_audio():
|
||||||
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
download_file = tmpdir / "download.mp3"
|
||||||
|
|
||||||
|
vinfo = youtube.download_audio(VIDEO_ID_PERSUASION, download_file)
|
||||||
|
assert vinfo.id == VIDEO_ID_PERSUASION
|
||||||
|
assert vinfo.title == "Persuasion (Instrumental) – RYYZN (No Copyright Music)"
|
||||||
|
assert vinfo.duration == 100
|
||||||
|
|
||||||
|
# Check with ffmpeg if the audio file is valid
|
||||||
|
res = subprocess.run(
|
||||||
|
["ffmpeg", "-i", str(download_file)],
|
||||||
|
capture_output=True,
|
||||||
|
universal_newlines=True,
|
||||||
|
)
|
||||||
|
assert "Stream #0:0: Audio: mp3" in res.stderr
|
||||||
|
|
||||||
|
match = re.search(r"Duration: (\d{2}:\d{2}:\d{2})", res.stderr)
|
||||||
|
assert match[1] == "00:01:40"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"channel_str,channel_url",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
"https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://www.youtube.com/c/MrBeast6000",
|
||||||
|
"https://www.youtube.com/c/MrBeast6000",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://www.youtube.com/user/LinusTechTips",
|
||||||
|
"https://www.youtube.com/user/LinusTechTips",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
"https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"https://piped.mha.fi/user/LinusTechTips",
|
||||||
|
"https://www.youtube.com/user/LinusTechTips",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_channel_url_from_str(channel_str: str, channel_url: str):
|
||||||
|
url = youtube.channel_url_from_str(channel_str)
|
||||||
|
assert url == channel_url
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"channel_url,channel_id,name,avatar_url",
|
||||||
|
[
|
||||||
|
(
|
||||||
|
youtube.channel_url_from_id(CHANNEL_ID_THETADEV),
|
||||||
|
CHANNEL_ID_THETADEV,
|
||||||
|
"ThetaDev",
|
||||||
|
"https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
CHANNEL_URL_BLENDER,
|
||||||
|
CHANNEL_ID_BLENDER,
|
||||||
|
"Blender",
|
||||||
|
"https://yt3.ggpht.com/ytc/AKedOLT_31fFSD3FWEBnHZnyZeJx-GPHJwYCQKcEpaq8NQ=s900-c-k-c0x00ffffff-no-rj",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_channel_metadata(
|
||||||
|
channel_url: str, channel_id: str, name: str, avatar_url: str
|
||||||
|
):
|
||||||
|
metadata = youtube.get_channel_metadata(channel_url)
|
||||||
|
assert metadata.id == channel_id
|
||||||
|
assert metadata.name == name
|
||||||
|
assert metadata.avatar_url == avatar_url
|
||||||
|
assert metadata.description
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_channel_videos_from_feed():
|
||||||
|
videos = youtube.get_channel_videos_from_feed(CHANNEL_ID_THETADEV)
|
||||||
|
assert videos
|
||||||
|
|
||||||
|
v1 = videos[0]
|
||||||
|
assert len(v1.id) == 11
|
||||||
|
assert v1.published.tzinfo == datetime.timezone.utc
|
||||||
|
assert v1.published.second > 0 or v1.published.minute > 0 or v1.published.hour > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_channel_videos_from_scraper():
|
||||||
|
videos = youtube.get_channel_videos_from_scraper(CHANNEL_ID_THETADEV)
|
||||||
|
assert videos
|
||||||
|
|
||||||
|
v1 = videos[0]
|
||||||
|
assert len(v1.id) == 11
|
||||||
|
assert v1.published is None
|
0
ucast/tests/tasks/__init__.py
Normal file
81
ucast/tests/tasks/test_download.py
Normal file
|
@ -0,0 +1,81 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from ucast import queue, tests
|
||||||
|
from ucast.models import Channel, Video
|
||||||
|
from ucast.service import storage
|
||||||
|
from ucast.tasks import download
|
||||||
|
|
||||||
|
CHANNEL_ID_THETADEV = "UCGiJh0NZ52wRhYKYnuZI08Q"
|
||||||
|
VIDEO_ID_INTRO = "I0RRENheeTo"
|
||||||
|
VIDEO_SLUG_INTRO = "20211010_No_copyright_intro_free_fire_intro_no_text_free_copy_right_free_templates_free_download"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_download_video(download_dir, rq_queue):
|
||||||
|
video = Video.objects.get(id=VIDEO_ID_INTRO)
|
||||||
|
job = queue.enqueue(download.download_video, video)
|
||||||
|
|
||||||
|
store = storage.Storage()
|
||||||
|
cf = store.get_channel_folder(video.channel.slug)
|
||||||
|
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
assert os.path.isfile(cf.get_audio(VIDEO_SLUG_INTRO))
|
||||||
|
assert os.path.isfile(cf.get_cover(VIDEO_SLUG_INTRO))
|
||||||
|
assert os.path.isfile(cf.get_thumbnail(VIDEO_SLUG_INTRO))
|
||||||
|
assert os.path.isfile(cf.get_thumbnail(VIDEO_SLUG_INTRO, True))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_import_channel(
|
||||||
|
download_dir, rq_queue, mock_get_video_details, mock_download_audio
|
||||||
|
):
|
||||||
|
# Remove 2 videos from the database so they can be imported
|
||||||
|
Video.objects.get(id="ZPxEr4YdWt8").delete()
|
||||||
|
Video.objects.get(id="_I5IFObm_-k").delete()
|
||||||
|
|
||||||
|
job = rq_queue.enqueue(download.import_channel, CHANNEL_ID_THETADEV)
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
mock_download_audio.assert_any_call(
|
||||||
|
"_I5IFObm_-k",
|
||||||
|
download_dir / "ThetaDev" / "20180331_Easter_special_3D_printed_Bunny.mp3",
|
||||||
|
)
|
||||||
|
mock_download_audio.assert_any_call(
|
||||||
|
"ZPxEr4YdWt8",
|
||||||
|
download_dir / "ThetaDev" / "20190602_ThetaDev_Embedded_World_2019.mp3",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_update_channel(
|
||||||
|
download_dir, rq_queue, mock_get_video_details, mock_download_audio
|
||||||
|
):
|
||||||
|
# Remove 2 videos from the database so they can be imported
|
||||||
|
Video.objects.get(id="ZPxEr4YdWt8").delete()
|
||||||
|
Video.objects.get(id="_I5IFObm_-k").delete()
|
||||||
|
|
||||||
|
channel = Channel.objects.get(id=CHANNEL_ID_THETADEV)
|
||||||
|
job = rq_queue.enqueue(download.update_channel, channel)
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
mock_download_audio.assert_any_call(
|
||||||
|
"_I5IFObm_-k",
|
||||||
|
download_dir / "ThetaDev" / "20180331_Easter_special_3D_printed_Bunny.mp3",
|
||||||
|
)
|
||||||
|
mock_download_audio.assert_any_call(
|
||||||
|
"ZPxEr4YdWt8",
|
||||||
|
download_dir / "ThetaDev" / "20190602_ThetaDev_Embedded_World_2019.mp3",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_update_channels(rq_queue, mocker):
|
||||||
|
update_channel_mock = tests.GlobalMock()
|
||||||
|
mocker.patch.object(download, "update_channel", update_channel_mock)
|
||||||
|
job = rq_queue.enqueue(download.update_channels)
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
assert update_channel_mock.n_calls == 3
|
72
ucast/tests/tasks/test_library.py
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from ucast import tests
|
||||||
|
from ucast.models import Channel, Video
|
||||||
|
from ucast.service import cover, storage
|
||||||
|
from ucast.tasks import library
|
||||||
|
|
||||||
|
CHANNEL_ID_THETADEV = "UCGiJh0NZ52wRhYKYnuZI08Q"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_recreate_cover(download_dir_content, rq_queue, mocker):
|
||||||
|
create_cover_mock: mock.Mock = mocker.patch.object(cover, "create_cover_file")
|
||||||
|
|
||||||
|
video = Video.objects.get(id="ZPxEr4YdWt8")
|
||||||
|
|
||||||
|
store = storage.Storage()
|
||||||
|
cf = store.get_channel_folder(video.channel.slug)
|
||||||
|
|
||||||
|
job = rq_queue.enqueue(library.recreate_cover, video)
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
create_cover_mock.assert_called_once_with(
|
||||||
|
cf.get_thumbnail(video.slug),
|
||||||
|
cf.file_avatar,
|
||||||
|
video.title,
|
||||||
|
video.channel.name,
|
||||||
|
cover.COVER_STYLE_BLUR,
|
||||||
|
cf.get_cover(video.slug),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_recreate_covers(rq_queue, mocker):
|
||||||
|
recreate_cover_mock = tests.GlobalMock()
|
||||||
|
mocker.patch.object(library, "recreate_cover", recreate_cover_mock)
|
||||||
|
|
||||||
|
job = rq_queue.enqueue(library.recreate_covers)
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
assert recreate_cover_mock.n_calls == 4
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_update_channel_info(rq_queue, mock_get_channel_metadata):
|
||||||
|
channel = Channel.objects.get(id=CHANNEL_ID_THETADEV)
|
||||||
|
|
||||||
|
channel.description = "Old description"
|
||||||
|
channel.save()
|
||||||
|
|
||||||
|
job = rq_queue.enqueue(library.update_channel_info, channel)
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
channel.refresh_from_db()
|
||||||
|
assert (
|
||||||
|
channel.description
|
||||||
|
== "I'm ThetaDev. I love creating cool projects \
|
||||||
|
using electronics, 3D printers and other awesome tech-based stuff."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.django_db
|
||||||
|
def test_update_channel_infos(rq_queue, mocker):
|
||||||
|
update_channel_mock = tests.GlobalMock()
|
||||||
|
mocker.patch.object(library, "update_channel_info", update_channel_mock)
|
||||||
|
|
||||||
|
job = rq_queue.enqueue(library.update_channel_infos)
|
||||||
|
assert job.is_finished
|
||||||
|
|
||||||
|
assert update_channel_mock.n_calls == 3
|
|
@ -1,24 +0,0 @@
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from PIL import Image, ImageChops
|
|
||||||
|
|
||||||
from ucast import tests
|
|
||||||
from ucast.service import util
|
|
||||||
|
|
||||||
TEST_FILE_URL = "https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj"
|
|
||||||
|
|
||||||
|
|
||||||
def test_download_file():
|
|
||||||
tmpdir_o = tempfile.TemporaryDirectory()
|
|
||||||
tmpdir = Path(tmpdir_o.name)
|
|
||||||
download_file = tmpdir / "download.jpg"
|
|
||||||
expected_tn_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
|
||||||
|
|
||||||
util.download_file(TEST_FILE_URL, download_file)
|
|
||||||
|
|
||||||
downloaded_avatar = Image.open(download_file)
|
|
||||||
expected_avatar = Image.open(expected_tn_file)
|
|
||||||
|
|
||||||
diff = ImageChops.difference(downloaded_avatar, expected_avatar)
|
|
||||||
assert diff.getbbox() is None
|
|
|
@ -1,132 +0,0 @@
|
||||||
import datetime
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from PIL import Image, ImageChops
|
|
||||||
|
|
||||||
from ucast import tests
|
|
||||||
from ucast.service import youtube
|
|
||||||
|
|
||||||
VIDEO_ID_SINTEL = "eRsGyueVLvQ"
|
|
||||||
VIDEO_ID_SHORT = "lcQZ6YwQHiw"
|
|
||||||
VIDEO_ID_PERSUASION = "DWjFW7Yq1fA"
|
|
||||||
|
|
||||||
CHANNEL_ID_THETADEV = "UCGiJh0NZ52wRhYKYnuZI08Q"
|
|
||||||
CHANNEL_ID_BLENDER = "UCSMOQeBJ2RAnuFungnQOxLg"
|
|
||||||
CHANNEL_URL_BLENDER = "https://www.youtube.com/c/BlenderFoundation"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="module")
|
|
||||||
def video_info() -> youtube.VideoDetails:
|
|
||||||
return youtube.get_video_details(VIDEO_ID_SINTEL)
|
|
||||||
|
|
||||||
|
|
||||||
def test_download_thumbnail(video_info):
|
|
||||||
tmpdir_o = tempfile.TemporaryDirectory()
|
|
||||||
tmpdir = Path(tmpdir_o.name)
|
|
||||||
tn_file = tmpdir / "thumbnail"
|
|
||||||
expected_tn_file = tests.DIR_TESTFILES / "thumbnail" / "t2.webp"
|
|
||||||
|
|
||||||
tn_file = youtube.download_thumbnail(video_info, tn_file)
|
|
||||||
assert tn_file.suffix == ".webp"
|
|
||||||
|
|
||||||
tn = Image.open(tn_file)
|
|
||||||
expected_tn = Image.open(expected_tn_file)
|
|
||||||
|
|
||||||
diff = ImageChops.difference(tn, expected_tn)
|
|
||||||
assert diff.getbbox() is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_video_info(video_info):
|
|
||||||
assert video_info.id == VIDEO_ID_SINTEL
|
|
||||||
assert video_info.title == "Sintel - Open Movie by Blender Foundation"
|
|
||||||
assert video_info.channel_id == "UCSMOQeBJ2RAnuFungnQOxLg"
|
|
||||||
assert (
|
|
||||||
video_info.description
|
|
||||||
== """Help us making Free/Open Movies: https://cloud.blender.org/join
|
|
||||||
|
|
||||||
"Sintel" is an independently produced short film, initiated by the Blender Foundation \
|
|
||||||
as a means to further improve and validate the free/open source 3D creation suite \
|
|
||||||
Blender. With initial funding provided by 1000s of donations via the internet \
|
|
||||||
community, it has \
|
|
||||||
again proven to be a viable development model for both open 3D technology as for \
|
|
||||||
independent animation film.
|
|
||||||
This 15 minute film has been realized in the studio of the Amsterdam Blender \
|
|
||||||
Institute, by an international team of artists and developers. In addition to \
|
|
||||||
that, several crucial technical and creative targets have been realized online, \
|
|
||||||
by developers and artists and teams all over the world.
|
|
||||||
|
|
||||||
www.sintel.org"""
|
|
||||||
)
|
|
||||||
assert video_info.duration == 888
|
|
||||||
assert not video_info.is_currently_live
|
|
||||||
assert not video_info.is_livestream
|
|
||||||
assert not video_info.is_short
|
|
||||||
assert video_info.published == datetime.datetime(
|
|
||||||
2010, 9, 30, tzinfo=datetime.timezone.utc
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_video_info_short():
|
|
||||||
vinfo = youtube.get_video_details(VIDEO_ID_SHORT)
|
|
||||||
assert vinfo.id == VIDEO_ID_SHORT
|
|
||||||
assert (
|
|
||||||
vinfo.title
|
|
||||||
== "Small pink flowers | #shorts | Free Stock Video | \
|
|
||||||
creative commons short videos | creative #short"
|
|
||||||
)
|
|
||||||
assert not vinfo.is_currently_live
|
|
||||||
assert not vinfo.is_livestream
|
|
||||||
assert vinfo.is_short
|
|
||||||
|
|
||||||
|
|
||||||
def test_download_video():
|
|
||||||
tmpdir_o = tempfile.TemporaryDirectory()
|
|
||||||
tmpdir = Path(tmpdir_o.name)
|
|
||||||
download_file = tmpdir / "download.mp3"
|
|
||||||
|
|
||||||
vinfo = youtube.download_audio(VIDEO_ID_PERSUASION, download_file)
|
|
||||||
assert vinfo.id == VIDEO_ID_PERSUASION
|
|
||||||
assert vinfo.title == "Persuasion (Instrumental) – RYYZN (No Copyright Music)"
|
|
||||||
assert vinfo.duration == 100
|
|
||||||
|
|
||||||
# Check with ffmpeg if the audio file is valid
|
|
||||||
res = subprocess.run(
|
|
||||||
["ffmpeg", "-i", str(download_file)],
|
|
||||||
capture_output=True,
|
|
||||||
universal_newlines=True,
|
|
||||||
)
|
|
||||||
assert "Stream #0:0: Audio: mp3" in res.stderr
|
|
||||||
|
|
||||||
match = re.search(r"Duration: (\d{2}:\d{2}:\d{2})", res.stderr)
|
|
||||||
assert match[1] == "00:01:40"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"channel_url,channel_id,name,avatar_url",
|
|
||||||
[
|
|
||||||
(
|
|
||||||
youtube.channel_url_from_id(CHANNEL_ID_THETADEV),
|
|
||||||
CHANNEL_ID_THETADEV,
|
|
||||||
"ThetaDev",
|
|
||||||
"https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj",
|
|
||||||
),
|
|
||||||
(
|
|
||||||
CHANNEL_URL_BLENDER,
|
|
||||||
CHANNEL_ID_BLENDER,
|
|
||||||
"Blender",
|
|
||||||
"https://yt3.ggpht.com/ytc/AKedOLT_31fFSD3FWEBnHZnyZeJx-GPHJwYCQKcEpaq8NQ=s900-c-k-c0x00ffffff-no-rj",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_channel_metadata(
|
|
||||||
channel_url: str, channel_id: str, name: str, avatar_url: str
|
|
||||||
):
|
|
||||||
metadata = youtube.get_channel_metadata(channel_url)
|
|
||||||
assert metadata.id == channel_id
|
|
||||||
assert metadata.name == name
|
|
||||||
assert metadata.avatar_url == avatar_url
|
|
||||||
assert metadata.description
|
|
Before Width: | Height: | Size: 275 KiB |
5
ucast/urls.py
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
from django.urls import path
|
||||||
|
|
||||||
|
from ucast import views
|
||||||
|
|
||||||
|
urlpatterns = [path("", views.home)]
|
|
@ -1,3 +1,6 @@
|
||||||
from django.shortcuts import render # noqa: F401
|
from django import http
|
||||||
|
from django.shortcuts import render
|
||||||
|
|
||||||
# Create your views here.
|
|
||||||
|
def home(request: http.HttpRequest):
|
||||||
|
return render(request, "ucast/main.html")
|
||||||
|
|
|
@ -21,7 +21,12 @@ VAR_PREFIX = "UCAST_"
|
||||||
|
|
||||||
|
|
||||||
def get_env(name, default=None):
|
def get_env(name, default=None):
|
||||||
return os.environ.get(VAR_PREFIX + name, default)
|
val_raw = os.environ.get(VAR_PREFIX + name, default)
|
||||||
|
|
||||||
|
if default is not None:
|
||||||
|
return type(default)(val_raw)
|
||||||
|
|
||||||
|
return val_raw
|
||||||
|
|
||||||
|
|
||||||
def get_env_path(name, default=None):
|
def get_env_path(name, default=None):
|
||||||
|
@ -47,7 +52,6 @@ def _load_dotenv() -> Path:
|
||||||
|
|
||||||
if dotenv_path:
|
if dotenv_path:
|
||||||
dotenv.load_dotenv(dotenv_path)
|
dotenv.load_dotenv(dotenv_path)
|
||||||
print(f"Loaded config from envfile at {dotenv_path}")
|
|
||||||
default_workdir = Path(dotenv_path).resolve().parent
|
default_workdir = Path(dotenv_path).resolve().parent
|
||||||
|
|
||||||
os.chdir(default_workdir)
|
os.chdir(default_workdir)
|
||||||
|
@ -79,7 +83,7 @@ ALLOWED_HOSTS = []
|
||||||
# Application definition
|
# Application definition
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
INSTALLED_APPS = [
|
||||||
"ucast.apps.UcastConfig",
|
"ucast",
|
||||||
"django.contrib.admin",
|
"django.contrib.admin",
|
||||||
"django.contrib.auth",
|
"django.contrib.auth",
|
||||||
"django.contrib.contenttypes",
|
"django.contrib.contenttypes",
|
||||||
|
@ -92,6 +96,7 @@ INSTALLED_APPS = [
|
||||||
MIDDLEWARE = [
|
MIDDLEWARE = [
|
||||||
"django.middleware.security.SecurityMiddleware",
|
"django.middleware.security.SecurityMiddleware",
|
||||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||||
|
"django.middleware.locale.LocaleMiddleware",
|
||||||
"django.middleware.common.CommonMiddleware",
|
"django.middleware.common.CommonMiddleware",
|
||||||
"django.middleware.csrf.CsrfViewMiddleware",
|
"django.middleware.csrf.CsrfViewMiddleware",
|
||||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||||
|
@ -196,5 +201,8 @@ STATICFILES_DIRS = [resources.path("ucast", "static")]
|
||||||
|
|
||||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||||
|
|
||||||
# Delay between YouTube API calls
|
REDIS_URL = get_env("REDIS_URL", "redis://localhost:6379")
|
||||||
YOUTUBE_SCRAPE_DELAY = 1
|
REDIS_QUEUE_TIMEOUT = get_env("REDIS_QUEUE_TIMEOUT", 600)
|
||||||
|
REDIS_QUEUE_RESULT_TTL = 600
|
||||||
|
|
||||||
|
YT_UPDATE_INTERVAL = get_env("YT_UPDATE_INTERVAL", 900)
|
||||||
|
|
|
@ -14,8 +14,9 @@ Including another URLconf
|
||||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
||||||
"""
|
"""
|
||||||
from django.contrib import admin
|
from django.contrib import admin
|
||||||
from django.urls import path
|
from django.urls import include, path
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("admin/", admin.site.urls),
|
path("admin/", admin.site.urls),
|
||||||
|
path("", include("ucast.urls")),
|
||||||
]
|
]
|
||||||
|
|