Compare commits

...

3 commits

Author SHA1 Message Date
936a412caf add tests for tasks
All checks were successful
continuous-integration/drone/push Build is passing
2022-05-24 01:11:19 +02:00
0fa6e5c07d migrated to vanilla rq 2022-05-21 18:36:29 +02:00
5d49098004 update ci environment vars 2022-05-21 11:55:45 +02:00
36 changed files with 3794 additions and 191 deletions

View file

@ -24,31 +24,14 @@ steps:
commands: commands:
- poetry run invoke lint - poetry run invoke lint
- name: start worker
image: thetadev256/ucast-dev
volumes:
- name: cache
path: /root/.cache
environment:
UCAST_REDIS_HOST: redis
commands:
- poetry run invoke worker
detach: true
- name: test - name: test
image: thetadev256/ucast-dev image: thetadev256/ucast-dev
volumes: volumes:
- name: cache - name: cache
path: /root/.cache path: /root/.cache
environment:
UCAST_REDIS_HOST: redis
commands: commands:
- poetry run invoke test - poetry run invoke test
services:
- name: redis
image: redis:alpine
volumes: volumes:
- name: cache - name: cache
temp: { } temp: { }

14
.editorconfig Normal file
View file

@ -0,0 +1,14 @@
[*]
charset = utf-8
indent_style = space
indent_size = 4
end_of_line = lf
trim_trailing_whitespace = true
insert_final_newline = true
max_line_length = 88
[{Makefile,*.go}]
indent_style = tab
[*.{json,md,rst,ini,yml,yaml}]
indent_size = 2

View file

@ -1,7 +1,14 @@
version: "3" version: "3"
services: services:
redis: redis:
container_name: ucast-redis container_name: redis
image: redis:alpine image: redis:alpine
ports: ports:
- "127.0.0.1:6379:6379" - "127.0.0.1:6379:6379"
rq-dashboard:
image: eoranged/rq-dashboard
ports:
- "127.0.0.1:9181:9181"
environment:
RQ_DASHBOARD_REDIS_URL: "redis://redis:6379"

168
poetry.lock generated
View file

@ -128,14 +128,14 @@ Pillow = "*"
[[package]] [[package]]
name = "coverage" name = "coverage"
version = "6.3.3" version = "6.4"
description = "Code coverage measurement for Python" description = "Code coverage measurement for Python"
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
tomli = {version = "*", optional = true, markers = "extra == \"toml\""} tomli = {version = "*", optional = true, markers = "python_version < \"3.11\" and extra == \"toml\""}
[package.extras] [package.extras]
toml = ["tomli"] toml = ["tomli"]
@ -202,25 +202,26 @@ python-versions = ">=3.7"
django = ">=2.2" django = ">=2.2"
[[package]] [[package]]
name = "django-rq" name = "fakeredis"
version = "2.5.1" version = "1.7.5"
description = "An app that provides django integration for RQ (Redis Queue)" description = "Fake implementation of redis API for testing purposes."
category = "main" category = "dev"
optional = false optional = false
python-versions = "*" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
django = ">=2.0" packaging = "*"
redis = ">=3" redis = "<=4.3.1"
rq = ">=1.2" six = ">=1.12"
sortedcontainers = "*"
[package.extras] [package.extras]
sentry = ["raven (>=6.1.0)"] aioredis = ["aioredis"]
testing = ["mock (>=2.0.0)"] lua = ["lupa"]
[[package]] [[package]]
name = "feedparser" name = "feedparser"
version = "6.0.9" version = "6.0.10"
description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds"
category = "main" category = "main"
optional = false optional = false
@ -306,6 +307,19 @@ category = "dev"
optional = false optional = false
python-versions = "*" python-versions = "*"
[[package]]
name = "mock"
version = "4.0.3"
description = "Rolling backport of unittest.mock for all Pythons"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.extras]
build = ["twine", "wheel", "blurb"]
docs = ["sphinx"]
test = ["pytest (<5.4)", "pytest-cov"]
[[package]] [[package]]
name = "mutagen" name = "mutagen"
version = "1.45.1" version = "1.45.1"
@ -487,6 +501,20 @@ pytest = ">=5.4.0"
docs = ["sphinx", "sphinx-rtd-theme"] docs = ["sphinx", "sphinx-rtd-theme"]
testing = ["django", "django-configurations (>=2.0)"] testing = ["django", "django-configurations (>=2.0)"]
[[package]]
name = "pytest-mock"
version = "3.7.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
pytest = ">=5.0"
[package.extras]
dev = ["pre-commit", "tox", "pytest-asyncio"]
[[package]] [[package]]
name = "python-dateutil" name = "python-dateutil"
version = "2.8.2" version = "2.8.2"
@ -615,6 +643,14 @@ category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "sortedcontainers"
version = "2.4.0"
description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
category = "dev"
optional = false
python-versions = "*"
[[package]] [[package]]
name = "sqlparse" name = "sqlparse"
version = "0.4.2" version = "0.4.2"
@ -729,7 +765,7 @@ websockets = "*"
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "312ee264a4f1ed8ef9160046b18f3b76a23af638be5effb9f9feb78b25d05aae" content-hash = "ad3a5ecd6fc1152dfdfda51ed1e401ec11a048661a04f42985c15bc28e8eda9f"
[metadata.files] [metadata.files]
asgiref = [ asgiref = [
@ -921,47 +957,47 @@ colorthief = [
{file = "colorthief-0.2.1.tar.gz", hash = "sha256:079cb0c95bdd669c4643e2f7494de13b0b6029d5cdbe2d74d5d3c3386bd57221"}, {file = "colorthief-0.2.1.tar.gz", hash = "sha256:079cb0c95bdd669c4643e2f7494de13b0b6029d5cdbe2d74d5d3c3386bd57221"},
] ]
coverage = [ coverage = [
{file = "coverage-6.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df32ee0f4935a101e4b9a5f07b617d884a531ed5666671ff6ac66d2e8e8246d8"}, {file = "coverage-6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf"},
{file = "coverage-6.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75b5dbffc334e0beb4f6c503fb95e6d422770fd2d1b40a64898ea26d6c02742d"}, {file = "coverage-6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f"},
{file = "coverage-6.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:114944e6061b68a801c5da5427b9173a0dd9d32cd5fcc18a13de90352843737d"}, {file = "coverage-6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41"},
{file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab88a01cd180b5640ccc9c47232e31924d5f9967ab7edd7e5c91c68eee47a69"}, {file = "coverage-6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088"},
{file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad8f9068f5972a46d50fe5f32c09d6ee11da69c560fcb1b4c3baea246ca4109b"}, {file = "coverage-6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701"},
{file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4cd696aa712e6cd16898d63cf66139dc70d998f8121ab558f0e1936396dbc579"}, {file = "coverage-6.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea"},
{file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c1a9942e282cc9d3ed522cd3e3cab081149b27ea3bda72d6f61f84eaf88c1a63"}, {file = "coverage-6.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39"},
{file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c06455121a089252b5943ea682187a4e0a5cf0a3fb980eb8e7ce394b144430a9"}, {file = "coverage-6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632"},
{file = "coverage-6.3.3-cp310-cp310-win32.whl", hash = "sha256:cb5311d6ccbd22578c80028c5e292a7ab9adb91bd62c1982087fad75abe2e63d"}, {file = "coverage-6.4-cp310-cp310-win32.whl", hash = "sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f"},
{file = "coverage-6.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d4a6f30f611e657495cc81a07ff7aa8cd949144e7667c5d3e680d73ba7a70e4"}, {file = "coverage-6.4-cp310-cp310-win_amd64.whl", hash = "sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720"},
{file = "coverage-6.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79bf405432428e989cad7b8bc60581963238f7645ae8a404f5dce90236cc0293"}, {file = "coverage-6.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383"},
{file = "coverage-6.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:338c417613f15596af9eb7a39353b60abec9d8ce1080aedba5ecee6a5d85f8d3"}, {file = "coverage-6.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08"},
{file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db094a6a4ae6329ed322a8973f83630b12715654c197dd392410400a5bfa1a73"}, {file = "coverage-6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6"},
{file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1414e8b124611bf4df8d77215bd32cba6e3425da8ce9c1f1046149615e3a9a31"}, {file = "coverage-6.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d"},
{file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:93b16b08f94c92cab88073ffd185070cdcb29f1b98df8b28e6649145b7f2c90d"}, {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7"},
{file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbc86ae8cc129c801e7baaafe3addf3c8d49c9c1597c44bdf2d78139707c3c62"}, {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052"},
{file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b5ba058610e8289a07db2a57bce45a1793ec0d3d11db28c047aae2aa1a832572"}, {file = "coverage-6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211"},
{file = "coverage-6.3.3-cp37-cp37m-win32.whl", hash = "sha256:8329635c0781927a2c6ae068461e19674c564e05b86736ab8eb29c420ee7dc20"}, {file = "coverage-6.4-cp37-cp37m-win32.whl", hash = "sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a"},
{file = "coverage-6.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:e5af1feee71099ae2e3b086ec04f57f9950e1be9ecf6c420696fea7977b84738"}, {file = "coverage-6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311"},
{file = "coverage-6.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e814a4a5a1d95223b08cdb0f4f57029e8eab22ffdbae2f97107aeef28554517e"}, {file = "coverage-6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61"},
{file = "coverage-6.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f4fbf3633cb0713437291b8848634ea97f89c7e849c2be17a665611e433f53"}, {file = "coverage-6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f"},
{file = "coverage-6.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3401b0d2ed9f726fadbfa35102e00d1b3547b73772a1de5508ef3bdbcb36afe7"}, {file = "coverage-6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce"},
{file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8586b177b4407f988731eb7f41967415b2197f35e2a6ee1a9b9b561f6323c8e9"}, {file = "coverage-6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c"},
{file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:892e7fe32191960da559a14536768a62e83e87bbb867e1b9c643e7e0fbce2579"}, {file = "coverage-6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95"},
{file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afb03f981fadb5aed1ac6e3dd34f0488e1a0875623d557b6fad09b97a942b38a"}, {file = "coverage-6.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c"},
{file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cbe91bc84be4e5ef0b1480d15c7b18e29c73bdfa33e07d3725da7d18e1b0aff2"}, {file = "coverage-6.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6"},
{file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:91502bf27cbd5c83c95cfea291ef387469f2387508645602e1ca0fd8a4ba7548"}, {file = "coverage-6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166"},
{file = "coverage-6.3.3-cp38-cp38-win32.whl", hash = "sha256:c488db059848702aff30aa1d90ef87928d4e72e4f00717343800546fdbff0a94"}, {file = "coverage-6.4-cp38-cp38-win32.whl", hash = "sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426"},
{file = "coverage-6.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6534fcdfb5c503affb6b1130db7b5bfc8a0f77fa34880146f7a5c117987d0"}, {file = "coverage-6.4-cp38-cp38-win_amd64.whl", hash = "sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3"},
{file = "coverage-6.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cc692c9ee18f0dd3214843779ba6b275ee4bb9b9a5745ba64265bce911aefd1a"}, {file = "coverage-6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740"},
{file = "coverage-6.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:462105283de203df8de58a68c1bb4ba2a8a164097c2379f664fa81d6baf94b81"}, {file = "coverage-6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5"},
{file = "coverage-6.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc972d829ad5ef4d4c5fcabd2bbe2add84ce8236f64ba1c0c72185da3a273130"}, {file = "coverage-6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a"},
{file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06f54765cdbce99901871d50fe9f41d58213f18e98b170a30ca34f47de7dd5e8"}, {file = "coverage-6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65"},
{file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7835f76a081787f0ca62a53504361b3869840a1620049b56d803a8cb3a9eeea3"}, {file = "coverage-6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c"},
{file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6f5fee77ec3384b934797f1873758f796dfb4f167e1296dc00f8b2e023ce6ee9"}, {file = "coverage-6.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df"},
{file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:baa8be8aba3dd1e976e68677be68a960a633a6d44c325757aefaa4d66175050f"}, {file = "coverage-6.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018"},
{file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d06380e777dd6b35ee936f333d55b53dc4a8271036ff884c909cf6e94be8b6c"}, {file = "coverage-6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f"},
{file = "coverage-6.3.3-cp39-cp39-win32.whl", hash = "sha256:f8cabc5fd0091976ab7b020f5708335033e422de25e20ddf9416bdce2b7e07d8"}, {file = "coverage-6.4-cp39-cp39-win32.whl", hash = "sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3"},
{file = "coverage-6.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c9441d57b0963cf8340268ad62fc83de61f1613034b79c2b1053046af0c5284"}, {file = "coverage-6.4-cp39-cp39-win_amd64.whl", hash = "sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055"},
{file = "coverage-6.3.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:d522f1dc49127eab0bfbba4e90fa068ecff0899bbf61bf4065c790ddd6c177fe"}, {file = "coverage-6.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45"},
{file = "coverage-6.3.3.tar.gz", hash = "sha256:2781c43bffbbec2b8867376d4d61916f5e9c4cc168232528562a61d1b4b01879"}, {file = "coverage-6.4.tar.gz", hash = "sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49"},
] ]
croniter = [ croniter = [
{file = "croniter-1.3.5-py2.py3-none-any.whl", hash = "sha256:4f72faca42c00beb6e30907f1315145f43dfbe5ec0ad4ada24b4c0d57b86a33a"}, {file = "croniter-1.3.5-py2.py3-none-any.whl", hash = "sha256:4f72faca42c00beb6e30907f1315145f43dfbe5ec0ad4ada24b4c0d57b86a33a"},
@ -983,13 +1019,13 @@ django-bulma = [
{file = "django-bulma-0.8.3.tar.gz", hash = "sha256:b794b4e64f482de77f376451f7cd8b3c8448eb68e5a24c51b9190625a08b0b30"}, {file = "django-bulma-0.8.3.tar.gz", hash = "sha256:b794b4e64f482de77f376451f7cd8b3c8448eb68e5a24c51b9190625a08b0b30"},
{file = "django_bulma-0.8.3-py3-none-any.whl", hash = "sha256:0ef6e5c171c2a32010e724a8be61ba6cd0e55ebbd242cf6780560518483c4d00"}, {file = "django_bulma-0.8.3-py3-none-any.whl", hash = "sha256:0ef6e5c171c2a32010e724a8be61ba6cd0e55ebbd242cf6780560518483c4d00"},
] ]
django-rq = [ fakeredis = [
{file = "django-rq-2.5.1.tar.gz", hash = "sha256:f08486602664d73a6e335872c868d79663e380247e6307496d01b8fa770fefd8"}, {file = "fakeredis-1.7.5-py3-none-any.whl", hash = "sha256:c4ca2be686e7e7637756ccc7dcad8472a5e4866b065431107d7a4b7a250d4e6f"},
{file = "django_rq-2.5.1-py2.py3-none-any.whl", hash = "sha256:7be1e10e7091555f9f36edf100b0dbb205ea2b98683d74443d2bdf3c6649a03f"}, {file = "fakeredis-1.7.5.tar.gz", hash = "sha256:49375c630981dd4045d9a92e2709fcd4476c91f927e0228493eefa625e705133"},
] ]
feedparser = [ feedparser = [
{file = "feedparser-6.0.9-py3-none-any.whl", hash = "sha256:a522b2b81f3914a74ae44161a341940f74811bd29be5b4c2a689e6e6be51cd39"}, {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"},
{file = "feedparser-6.0.9.tar.gz", hash = "sha256:dad42e7beaec55f99c08b2b0cf7288bc7cfd24b6f72c8ef85478bcb55648cd42"}, {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"},
] ]
filelock = [ filelock = [
{file = "filelock-3.7.0-py3-none-any.whl", hash = "sha256:c7b5fdb219b398a5b28c8e4c1893ef5f98ece6a38c6ab2c22e26ec161556fed6"}, {file = "filelock-3.7.0-py3-none-any.whl", hash = "sha256:c7b5fdb219b398a5b28c8e4c1893ef5f98ece6a38c6ab2c22e26ec161556fed6"},
@ -1025,6 +1061,10 @@ invoke = [
{file = "invoke-1.7.1-py3-none-any.whl", hash = "sha256:2dc975b4f92be0c0a174ad2d063010c8a1fdb5e9389d69871001118b4fcac4fb"}, {file = "invoke-1.7.1-py3-none-any.whl", hash = "sha256:2dc975b4f92be0c0a174ad2d063010c8a1fdb5e9389d69871001118b4fcac4fb"},
{file = "invoke-1.7.1.tar.gz", hash = "sha256:7b6deaf585eee0a848205d0b8c0014b9bf6f287a8eb798818a642dff1df14b19"}, {file = "invoke-1.7.1.tar.gz", hash = "sha256:7b6deaf585eee0a848205d0b8c0014b9bf6f287a8eb798818a642dff1df14b19"},
] ]
mock = [
{file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"},
{file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"},
]
mutagen = [ mutagen = [
{file = "mutagen-1.45.1-py3-none-any.whl", hash = "sha256:9c9f243fcec7f410f138cb12c21c84c64fde4195481a30c9bfb05b5f003adfed"}, {file = "mutagen-1.45.1-py3-none-any.whl", hash = "sha256:9c9f243fcec7f410f138cb12c21c84c64fde4195481a30c9bfb05b5f003adfed"},
{file = "mutagen-1.45.1.tar.gz", hash = "sha256:6397602efb3c2d7baebd2166ed85731ae1c1d475abca22090b7141ff5034b3e1"}, {file = "mutagen-1.45.1.tar.gz", hash = "sha256:6397602efb3c2d7baebd2166ed85731ae1c1d475abca22090b7141ff5034b3e1"},
@ -1162,6 +1202,10 @@ pytest-django = [
{file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"}, {file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"},
{file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"}, {file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"},
] ]
pytest-mock = [
{file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"},
{file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"},
]
python-dateutil = [ python-dateutil = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
@ -1235,6 +1279,10 @@ six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
] ]
sortedcontainers = [
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
]
sqlparse = [ sqlparse = [
{file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"}, {file = "sqlparse-0.4.2-py3-none-any.whl", hash = "sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"},
{file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"}, {file = "sqlparse-0.4.2.tar.gz", hash = "sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae"},

View file

@ -26,7 +26,7 @@ psycopg2 = "^2.9.3"
mysqlclient = "^2.1.0" mysqlclient = "^2.1.0"
python-slugify = "^6.1.2" python-slugify = "^6.1.2"
mutagen = "^1.45.1" mutagen = "^1.45.1"
django-rq = "^2.5.1" rq = "^1.10.1"
rq-scheduler = "^0.11.0" rq-scheduler = "^0.11.0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
@ -36,6 +36,8 @@ invoke = "^1.7.0"
pytest-django = "^4.5.2" pytest-django = "^4.5.2"
pre-commit = "^2.19.0" pre-commit = "^2.19.0"
honcho = "^1.1.0" honcho = "^1.1.0"
pytest-mock = "^3.7.0"
fakeredis = "^1.7.5"
[tool.poetry.scripts] [tool.poetry.scripts]
"ucast-manage" = "ucast_project.manage:main" "ucast-manage" = "ucast_project.manage:main"

View file

@ -1,3 +1 @@
__version__ = "0.0.1" __version__ = "0.0.1"
default_app_config = "ucast.apps.UcastConfig"

View file

@ -0,0 +1,34 @@
"""
Based on the django-rq package by Selwin Ong (MIT License)
https://github.com/rq/django-rq
"""
from django.core.management.base import BaseCommand
from ucast import queue
class Command(BaseCommand):
"""Queue a function with the given arguments."""
help = __doc__
args = "<function arg arg ...>"
def add_arguments(self, parser):
parser.add_argument(
"--timeout", "-t", type=int, dest="timeout", help="A timeout in seconds"
)
parser.add_argument("args", nargs="*")
def handle(self, *args, **options):
"""
Queues the function given with the first argument with the
parameters given with the rest of the argument list.
"""
verbosity = int(options.get("verbosity", 1))
timeout = options.get("timeout")
q = queue.get_queue()
job = q.enqueue_call(args[0], args=args[1:], timeout=timeout)
if verbosity:
print("Job %s created" % job.id)

View file

@ -1,11 +1,58 @@
from django_rq.management.commands import rqscheduler """
Based on the django-rq package by Selwin Ong (MIT License)
https://github.com/rq/django-rq
"""
import os
from django.core.management.base import BaseCommand
from rq_scheduler.utils import setup_loghandlers
from ucast import queue
from ucast.tasks import schedule from ucast.tasks import schedule
class Command(rqscheduler.Command): class Command(BaseCommand):
def handle(self, *args, **kwargs): """Runs RQ Scheduler"""
print("Starting ucast scheduler")
help = __doc__
def add_arguments(self, parser):
parser.add_argument(
"--pid",
action="store",
dest="pid",
default=None,
help="PID file to write the scheduler`s pid into",
)
parser.add_argument(
"--interval",
"-i",
type=int,
dest="interval",
default=60,
help="""How often the scheduler checks for new jobs to add to the
queue (in seconds).""",
)
def handle(self, *args, **options):
schedule.clear_scheduled_jobs() schedule.clear_scheduled_jobs()
schedule.register_scheduled_jobs() schedule.register_scheduled_jobs()
super(Command, self).handle(*args, **kwargs)
pid = options.get("pid")
if pid:
with open(os.path.expanduser(pid), "w") as fp:
fp.write(str(os.getpid()))
# Verbosity is defined by default in BaseCommand for all commands
verbosity = options.get("verbosity")
if verbosity >= 2:
level = "DEBUG"
elif verbosity == 0:
level = "WARNING"
else:
level = "INFO"
setup_loghandlers(level)
scheduler = queue.get_scheduler(options.get("interval"))
scheduler.run()

View file

@ -0,0 +1,121 @@
"""
Based on the django-rq package by Selwin Ong (MIT License)
https://github.com/rq/django-rq
"""
import time
import click
from django.core.management.base import BaseCommand
from ucast import queue
class Command(BaseCommand):
"""Print RQ statistics"""
help = __doc__
def add_arguments(self, parser):
parser.add_argument(
"-j",
"--json",
action="store_true",
dest="json",
help="Output statistics as JSON",
)
parser.add_argument(
"-y",
"--yaml",
action="store_true",
dest="yaml",
help="Output statistics as YAML",
)
parser.add_argument(
"-i",
"--interval",
dest="interval",
type=float,
help="Poll statistics every N seconds",
)
def _print_separator(self):
try:
click.echo(self._separator)
except AttributeError:
self._separator = "-" * self.table_width
click.echo(self._separator)
def _print_stats_dashboard(self, statistics):
if self.interval:
click.clear()
click.echo()
click.echo("Django RQ CLI Dashboard")
click.echo()
self._print_separator()
# Header
click.echo(
"""| %-15s|%10s |%10s |%10s |%10s |%10s |"""
% ("Name", "Queued", "Active", "Deferred", "Finished", "Workers")
)
self._print_separator()
click.echo(
"""| %-15s|%10s |%10s |%10s |%10s |%10s |"""
% (
statistics["name"],
statistics["jobs"],
statistics["started_jobs"],
statistics["deferred_jobs"],
statistics["finished_jobs"],
statistics["workers"],
)
)
self._print_separator()
if self.interval:
click.echo()
click.echo("Press 'Ctrl+c' to quit")
def handle(self, *args, **options):
if options.get("json"):
import json
click.echo(json.dumps(queue.get_statistics()))
return
if options.get("yaml"):
try:
import yaml
except ImportError:
click.echo("Aborting. LibYAML is not installed.")
return
# Disable YAML alias
yaml.Dumper.ignore_aliases = lambda *args: True
click.echo(yaml.dump(queue.get_statistics(), default_flow_style=False))
return
self.interval = options.get("interval")
# Arbitrary
self.table_width = 78
# Do not continuously poll
if not self.interval:
self._print_stats_dashboard(queue.get_statistics())
return
# Abuse clicks to 'live' render CLI dashboard
try:
while True:
self._print_stats_dashboard(queue.get_statistics())
time.sleep(self.interval)
except KeyboardInterrupt:
pass

View file

@ -0,0 +1,103 @@
"""
Based on the django-rq package by Selwin Ong (MIT License)
https://github.com/rq/django-rq
"""
import os
import sys
from django.core.management.base import BaseCommand
from django.db import connections
from redis.exceptions import ConnectionError
from rq import use_connection
from rq.logutils import setup_loghandlers
from ucast import queue
def reset_db_connections():
for c in connections.all():
c.close()
class Command(BaseCommand):
"""Runs RQ worker"""
help = __doc__
def add_arguments(self, parser):
parser.add_argument(
"--pid",
action="store",
dest="pid",
default=None,
help="PID file to write the worker`s pid into",
)
parser.add_argument(
"--burst",
action="store_true",
dest="burst",
default=False,
help="Run worker in burst mode",
)
parser.add_argument(
"--with-scheduler",
action="store_true",
dest="with_scheduler",
default=False,
help="Run worker with scheduler enabled",
)
parser.add_argument(
"--name",
action="store",
dest="name",
default=None,
help="Name of the worker",
)
parser.add_argument(
"--worker-ttl",
action="store",
type=int,
dest="worker_ttl",
default=420,
help="Default worker timeout to be used",
)
def handle(self, *args, **options):
pid = options.get("pid")
if pid:
with open(os.path.expanduser(pid), "w") as fp:
fp.write(str(os.getpid()))
# Verbosity is defined by default in BaseCommand for all commands
verbosity = options.get("verbosity")
if verbosity >= 2:
level = "DEBUG"
elif verbosity == 0:
level = "WARNING"
else:
level = "INFO"
setup_loghandlers(level)
try:
# Instantiate a worker
worker_kwargs = {
"name": options["name"],
"default_worker_ttl": options["worker_ttl"],
}
w = queue.get_worker(**worker_kwargs)
# Call use_connection to push the redis connection into LocalStack
# without this, jobs using RQ's get_current_job() will fail
use_connection(w.connection)
# Close any opened DB connection before any fork
reset_db_connections()
w.work(
burst=options.get("burst", False),
with_scheduler=options.get("with_scheduler", False),
logging_level=level,
)
except ConnectionError as e:
self.stderr.write(str(e))
sys.exit(1)

View file

@ -25,6 +25,7 @@ class Migration(migrations.Migration):
("skip_livestreams", models.BooleanField(default=True)), ("skip_livestreams", models.BooleanField(default=True)),
("skip_shorts", models.BooleanField(default=True)), ("skip_shorts", models.BooleanField(default=True)),
("keep_videos", models.IntegerField(default=None, null=True)), ("keep_videos", models.IntegerField(default=None, null=True)),
("avatar_url", models.CharField(max_length=250, null=True)),
], ],
), ),
migrations.CreateModel( migrations.CreateModel(

View file

@ -36,11 +36,18 @@ class Channel(models.Model):
skip_livestreams = models.BooleanField(default=True) skip_livestreams = models.BooleanField(default=True)
skip_shorts = models.BooleanField(default=True) skip_shorts = models.BooleanField(default=True)
keep_videos = models.IntegerField(null=True, default=None) keep_videos = models.IntegerField(null=True, default=None)
avatar_url = models.CharField(max_length=250, null=True)
@classmethod @classmethod
def get_new_slug(cls, name: str) -> str: def get_new_slug(cls, name: str) -> str:
return _get_unique_slug(name, cls.objects, "channel") return _get_unique_slug(name, cls.objects, "channel")
def get_full_description(self) -> str:
desc = f"https://www.youtube.com/channel/{self.id}"
if self.description:
desc = f"{self.description}\n\n{desc}"
return desc
def __str__(self): def __str__(self):
return self.name return self.name
@ -66,5 +73,11 @@ class Video(models.Model):
title_w_date, cls.objects.filter(channel_id=channel_id), "video" title_w_date, cls.objects.filter(channel_id=channel_id), "video"
) )
def get_full_description(self) -> str:
desc = f"https://youtu.be/{self.id}"
if self.description:
desc = f"{self.description}\n\n{desc}"
return desc
def __str__(self): def __str__(self):
return self.title return self.title

87
ucast/queue.py Normal file
View file

@ -0,0 +1,87 @@
import redis
import rq
import rq_scheduler
from django.conf import settings
from rq import registry
from ucast.service import util
def get_redis_connection() -> redis.client.Redis:
return redis.Redis.from_url(settings.REDIS_URL)
def get_queue() -> rq.Queue:
redis_conn = get_redis_connection()
return rq.Queue(default_timeout=settings.REDIS_QUEUE_TIMEOUT, connection=redis_conn)
def get_scheduler(interval=60) -> rq_scheduler.Scheduler:
redis_conn = get_redis_connection()
return rq_scheduler.Scheduler(connection=redis_conn, interval=interval)
def get_worker(**kwargs) -> rq.Worker:
queue = get_queue()
return rq.Worker(
queue,
connection=queue.connection,
default_result_ttl=settings.REDIS_QUEUE_RESULT_TTL,
**kwargs,
)
def enqueue(f, *args, **kwargs) -> rq.job.Job:
queue = get_queue()
# return queue.enqueue(f, *args, **kwargs)
return queue.enqueue_call(f, args, kwargs)
def get_statistics() -> dict:
"""
Return statistics from the RQ Queue.
Taken from the django-rq package by Selwin Ong (MIT License)
https://github.com/rq/django-rq
:return: RQ statistics
"""
queue = get_queue()
connection = queue.connection
connection_kwargs = connection.connection_pool.connection_kwargs
# Raw access to the first item from left of the redis list.
# This might not be accurate since new job can be added from the left
# with `at_front` parameters.
# Ideally rq should supports Queue.oldest_job
last_job_id = connection.lindex(queue.key, 0)
last_job = queue.fetch_job(last_job_id.decode("utf-8")) if last_job_id else None
if last_job:
oldest_job_timestamp = util.to_localtime(last_job.enqueued_at).strftime(
"%Y-%m-%d, %H:%M:%S"
)
else:
oldest_job_timestamp = "-"
# parse_class and connection_pool are not needed and not JSON serializable
connection_kwargs.pop("parser_class", None)
connection_kwargs.pop("connection_pool", None)
finished_job_registry = registry.FinishedJobRegistry(queue.name, queue.connection)
started_job_registry = registry.StartedJobRegistry(queue.name, queue.connection)
deferred_job_registry = registry.DeferredJobRegistry(queue.name, queue.connection)
failed_job_registry = registry.FailedJobRegistry(queue.name, queue.connection)
scheduled_job_registry = registry.ScheduledJobRegistry(queue.name, queue.connection)
return {
"name": queue.name,
"jobs": queue.count,
"oldest_job_timestamp": oldest_job_timestamp,
"connection_kwargs": connection_kwargs,
"workers": rq.Worker.count(queue=queue),
"finished_jobs": len(finished_job_registry),
"started_jobs": len(started_job_registry),
"deferred_jobs": len(deferred_job_registry),
"failed_jobs": len(failed_job_registry),
"scheduled_jobs": len(scheduled_job_registry),
}

View file

@ -1,8 +1,12 @@
import datetime
import io import io
import json
from pathlib import Path from pathlib import Path
from typing import Any, Union
import requests import requests
import slugify import slugify
from django.utils import timezone
from PIL import Image from PIL import Image
AVATAR_SM_WIDTH = 100 AVATAR_SM_WIDTH = 100
@ -57,3 +61,50 @@ def resize_thumbnail(original_file: Path, new_file: Path):
def get_slug(text: str) -> str: def get_slug(text: str) -> str:
return slugify.slugify(text, lowercase=False, separator="_") return slugify.slugify(text, lowercase=False, separator="_")
def to_localtime(time: datetime.datetime):
"""Converts naive datetime to localtime based on settings"""
utc_time = time.replace(tzinfo=datetime.timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
def _get_np_attrs(o) -> dict:
"""
Return all non-protected attributes of the given object.
:param o: Object
:return: Dict of attributes
"""
return {k: v for k, v in o.__dict__.items() if not k.startswith("_")}
def serializer(o: Any) -> Union[str, dict, int, float, bool]:
"""
Serialize object to json-storable format
:param o: Object to serialize
:return: Serialized output data
"""
if hasattr(o, "serialize"):
return o.serialize()
if isinstance(o, (datetime.datetime, datetime.date)):
return o.isoformat()
if isinstance(o, (bool, float, int)):
return o
if hasattr(o, "__dict__"):
return _get_np_attrs(o)
return str(o)
def to_json(o, pretty=False) -> str:
"""
Convert object to json.
Uses the ``serialize()`` method of the target object if available.
:param o: Object to serialize
:param pretty: Prettify with indents
:return: JSON string
"""
return json.dumps(
o, default=serializer, indent=2 if pretty else None, ensure_ascii=False
)

View file

@ -0,0 +1,22 @@
from pathlib import Path
from mutagen import id3
from ucast.models import Video
def tag_audio(audio_path: Path, video: Video, cover_path: Path):
title_text = f"{video.published.date().isoformat()} {video.title}"
tag = id3.ID3(audio_path)
tag["TPE1"] = id3.TPE1(encoding=3, text=video.channel.name) # Artist
tag["TALB"] = id3.TALB(encoding=3, text=video.channel.name) # Album
tag["TIT2"] = id3.TIT2(encoding=3, text=title_text) # Title
tag["TDRC"] = id3.TDRC(encoding=3, text=video.published.date().isoformat()) # Date
tag["COMM"] = id3.COMM(encoding=3, text=video.get_full_description()) # Comment
with open(cover_path, "rb") as albumart:
tag["APIC"] = id3.APIC(
encoding=3, mime="image/png", type=3, desc="Cover", data=albumart.read()
)
tag.save()

View file

@ -9,11 +9,12 @@ from typing import List, Optional
import feedparser import feedparser
import requests import requests
from mutagen import id3
from yt_dlp import YoutubeDL from yt_dlp import YoutubeDL
from ucast.service import scrapetube, util from ucast.service import scrapetube, util
CHANID_REGEX = re.compile(r"""[-_a-zA-Z\d]{24}""")
class ItemNotFoundError(Exception): class ItemNotFoundError(Exception):
pass pass
@ -23,6 +24,10 @@ class ThumbnailNotFoundError(Exception):
pass pass
class InvalidMetadataError(Exception):
pass
@dataclass @dataclass
class VideoScraped: class VideoScraped:
""" """
@ -71,7 +76,8 @@ class VideoDetails:
thumbnails=info["thumbnails"], thumbnails=info["thumbnails"],
is_currently_live=bool(info.get("is_live")), is_currently_live=bool(info.get("is_live")),
is_livestream=info.get("is_live") or info.get("was_live"), is_livestream=info.get("is_live") or info.get("was_live"),
is_short=info["duration"] <= 60 and info["width"] < info["height"], is_short=info["duration"] <= 60
and (info["width"] or 0) < (info["height"] or 0),
) )
def add_scraped_data(self, scraped: VideoScraped): def add_scraped_data(self, scraped: VideoScraped):
@ -157,24 +163,6 @@ def download_audio(
return VideoDetails.from_vinfo(info) return VideoDetails.from_vinfo(info)
def tag_audio(audio_path: Path, vinfo: VideoDetails, cover_path: Path):
title_text = f"{vinfo.published.date().isoformat()} {vinfo.title}"
comment = f"https://youtu.be/{vinfo.id}\n\n{vinfo.description}"
tag = id3.ID3(audio_path)
tag["TPE1"] = id3.TPE1(encoding=3, text=vinfo.channel_name) # Artist
tag["TALB"] = id3.TALB(encoding=3, text=vinfo.channel_name) # Album
tag["TIT2"] = id3.TIT2(encoding=3, text=title_text) # Title
tag["TDRC"] = id3.TDRC(encoding=3, text=vinfo.published.date().isoformat()) # Date
tag["COMM"] = id3.COMM(encoding=3, text=comment) # Comment
with open(cover_path, "rb") as albumart:
tag["APIC"] = id3.APIC(
encoding=3, mime="image/png", type=3, desc="Cover", data=albumart.read()
)
tag.save()
def channel_url_from_id(channel_id: str) -> str: def channel_url_from_id(channel_id: str) -> str:
return "https://www.youtube.com/channel/" + channel_id return "https://www.youtube.com/channel/" + channel_id
@ -207,8 +195,7 @@ def channel_url_from_str(channel_str: str) -> str:
# Channel ID # Channel ID
return "https://www.youtube.com/channel/" + match[2] return "https://www.youtube.com/channel/" + match[2]
chanid_regex = re.compile(r"""[-_a-zA-Z\d]{24}""") if CHANID_REGEX.match(channel_str):
if chanid_regex.match(channel_str):
return "https://www.youtube.com/channel/" + channel_str return "https://www.youtube.com/channel/" + channel_str
raise ValueError("invalid channel string") raise ValueError("invalid channel string")
@ -226,9 +213,20 @@ def get_channel_metadata(channel_url: str) -> ChannelMetadata:
channel_id = metadata["externalId"] channel_id = metadata["externalId"]
name = metadata["title"] name = metadata["title"]
description = metadata["description"] description = metadata["description"].strip()
avatar = metadata["avatar"]["thumbnails"][0]["url"] avatar = metadata["avatar"]["thumbnails"][0]["url"]
if not CHANID_REGEX.match(channel_id):
raise InvalidMetadataError(f"got invalid channel id {repr(channel_id)}")
if not name:
raise InvalidMetadataError(f"no channel name found for channel {channel_id}")
if not avatar.startswith("https://"):
raise InvalidMetadataError(
f"got invalid avatar url for channel {channel_id}: {avatar}"
)
return ChannelMetadata(channel_id, name, description, avatar) return ChannelMetadata(channel_id, name, description, avatar)

View file

@ -1,12 +1,10 @@
import os import os
import django_rq
from django.utils import timezone from django.utils import timezone
from ucast import queue
from ucast.models import Channel, Video from ucast.models import Channel, Video
from ucast.service import cover, storage, util, youtube from ucast.service import cover, storage, util, videoutil, youtube
store = storage.Storage()
def _get_or_create_channel(channel_id: str) -> Channel: def _get_or_create_channel(channel_id: str) -> Channel:
@ -17,6 +15,7 @@ def _get_or_create_channel(channel_id: str) -> Channel:
youtube.channel_url_from_id(channel_id) youtube.channel_url_from_id(channel_id)
) )
channel_slug = Channel.get_new_slug(channel_data.name) channel_slug = Channel.get_new_slug(channel_data.name)
store = storage.Storage()
channel_folder = store.get_channel_folder(channel_slug) channel_folder = store.get_channel_folder(channel_slug)
util.download_image_file(channel_data.avatar_url, channel_folder.file_avatar) util.download_image_file(channel_data.avatar_url, channel_folder.file_avatar)
@ -61,7 +60,7 @@ def _load_scraped_video(vid: youtube.VideoScraped, channel: Channel):
) )
video.save() video.save()
django_rq.enqueue(download_video, video) queue.enqueue(download_video, video)
def download_video(video: Video): def download_video(video: Video):
@ -71,6 +70,7 @@ def download_video(video: Video):
:param video: Video object :param video: Video object
""" """
store = storage.Storage()
channel_folder = store.get_channel_folder(video.channel.slug) channel_folder = store.get_channel_folder(video.channel.slug)
audio_file = channel_folder.get_audio(video.slug) audio_file = channel_folder.get_audio(video.slug)
@ -84,13 +84,13 @@ def download_video(video: Video):
cover.create_cover_file( cover.create_cover_file(
tn_path, tn_path,
channel_folder.file_avatar, channel_folder.file_avatar,
details.title, video.title,
video.channel.name, video.channel.name,
cover.COVER_STYLE_BLUR, cover.COVER_STYLE_BLUR,
cover_file, cover_file,
) )
youtube.tag_audio(audio_file, details, cover_file) videoutil.tag_audio(audio_file, video, cover_file)
video.downloaded = timezone.now() video.downloaded = timezone.now()
video.download_size = os.path.getsize(audio_file) video.download_size = os.path.getsize(audio_file)
@ -115,13 +115,18 @@ def import_channel(channel_id: str, limit: int = None):
_load_scraped_video(vid, channel) _load_scraped_video(vid, channel)
def update_channel(channel: Channel):
"""Update a single channel from its RSS feed"""
videos = youtube.get_channel_videos_from_feed(channel.id)
for vid in videos:
_load_scraped_video(vid, channel)
def update_channels(): def update_channels():
""" """
Update all channels from their RSS feeds and download new videos. Update all channels from their RSS feeds and download new videos.
This task is scheduled a regular intervals. This task is scheduled a regular intervals.
""" """
for channel in Channel.objects.filter(active=True): for channel in Channel.objects.filter(active=True):
videos = youtube.get_channel_videos_from_feed(channel.id) queue.enqueue(update_channel, channel)
for vid in videos:
_load_scraped_video(vid, channel)

View file

@ -1,7 +1,10 @@
import os import os
from ucast.models import Video from django.utils import timezone
from ucast.service import cover, storage
from ucast import queue
from ucast.models import Channel, Video
from ucast.service import cover, storage, util, youtube
def recreate_cover(video: Video): def recreate_cover(video: Video):
@ -12,12 +15,10 @@ def recreate_cover(video: Video):
cover_file = cf.get_cover(video.slug) cover_file = cf.get_cover(video.slug)
if not os.path.isfile(cf.file_avatar): if not os.path.isfile(cf.file_avatar):
print(f"could not find avatar for channel {video.channel_id}") raise FileNotFoundError(f"could not find avatar for channel {video.channel_id}")
return
if not os.path.isfile(thumbnail_file): if not os.path.isfile(thumbnail_file):
print(f"could not find thumbnail for video {video.id}") raise FileNotFoundError(f"could not find thumbnail for video {video.id}")
return
cover.create_cover_file( cover.create_cover_file(
thumbnail_file, thumbnail_file,
@ -30,5 +31,58 @@ def recreate_cover(video: Video):
def recreate_covers(): def recreate_covers():
for video in Video.objects.filter(downloaded__isnull=False):
queue.enqueue(recreate_cover, video)
def update_file_storage():
store = storage.Storage()
for video in Video.objects.all(): for video in Video.objects.all():
recreate_cover(video) cf = store.get_channel_folder(video.channel.slug)
audio_file = cf.get_audio(video.slug)
cover_file = cf.get_cover(video.slug)
tn_file = cf.get_thumbnail(video.slug)
tn_file_sm = cf.get_thumbnail(video.slug, True)
if not os.path.isfile(audio_file) or not os.path.isfile(tn_file):
video.downloaded = None
video.download_size = None
video.save()
return
if not os.path.isfile(tn_file_sm):
util.resize_thumbnail(tn_file, tn_file_sm)
if not os.path.isfile(cover_file):
recreate_cover(video)
if video.downloaded is None:
video.downloaded = timezone.now()
video.download_size = os.path.getsize(audio_file)
video.save()
def update_channel_info(channel: Channel):
channel_data = youtube.get_channel_metadata(youtube.channel_url_from_id(channel.id))
if channel_data.avatar_url != channel.avatar_url:
store = storage.Storage()
channel_folder = store.get_channel_folder(channel.slug)
util.download_image_file(channel_data.avatar_url, channel_folder.file_avatar)
util.resize_avatar(channel_folder.file_avatar, channel_folder.file_avatar_sm)
channel.avatar_url = channel_data.avatar_url
channel.name = channel_data.name
channel.description = channel_data.description
channel.save()
def update_channel_infos():
for channel in Channel.objects.filter(active=True):
queue.enqueue(update_channel_info, channel)

View file

@ -1,17 +1,17 @@
import logging import logging
from datetime import datetime from datetime import datetime
import django_rq
from django.conf import settings from django.conf import settings
from ucast import queue
from ucast.tasks import download from ucast.tasks import download
scheduler = django_rq.get_scheduler()
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def clear_scheduled_jobs(): def clear_scheduled_jobs():
"""Delete all scheduled jobs to prevent duplicates""" """Delete all scheduled jobs to prevent duplicates"""
scheduler = queue.get_scheduler()
for job in scheduler.get_jobs(): for job in scheduler.get_jobs():
log.debug("Deleting scheduled job %s", job) log.debug("Deleting scheduled job %s", job)
job.delete() job.delete()
@ -19,6 +19,7 @@ def clear_scheduled_jobs():
def register_scheduled_jobs(): def register_scheduled_jobs():
"""Register all scheduled jobs""" """Register all scheduled jobs"""
scheduler = queue.get_scheduler()
scheduler.schedule( scheduler.schedule(
datetime.utcnow(), datetime.utcnow(),
download.update_channels, download.update_channels,

View file

@ -1,3 +1,80 @@
from importlib.resources import files import json
import uuid
from dataclasses import dataclass
from datetime import datetime
from importlib import resources
from typing import Dict, List
DIR_TESTFILES = files("ucast.tests._testfiles") from ucast.service import youtube
DIR_TESTFILES = resources.path("ucast.tests", "_testfiles")
def get_video_details(video_id: str):
with open(DIR_TESTFILES / "fixture" / "videodetails.json") as f:
videodetails = json.load(f)
vd_raw = videodetails[video_id]
vd_raw["published"] = datetime.fromisoformat(vd_raw["published"])
return youtube.VideoDetails(**vd_raw)
def get_channel_metadata(channel_url: str):
with open(DIR_TESTFILES / "fixture" / "channelmeta.json") as f:
channelmeta = json.load(f)
return youtube.ChannelMetadata(**channelmeta[channel_url])
_global_mock_calls: Dict[str, List["_GlobalMockCall"]] = {}
@dataclass
class _GlobalMockCall:
args: list
kwargs: dict
class GlobalMock:
def __init__(self):
self.uuid = str(uuid.uuid4())
@property
def calls(self) -> List[_GlobalMockCall]:
global _global_mock_calls
if self.uuid not in _global_mock_calls:
_global_mock_calls[self.uuid] = []
return _global_mock_calls[self.uuid]
@property
def n_calls(self) -> int:
return len(self.calls)
def __call__(self, *args, **kwargs):
call = _GlobalMockCall(args, kwargs)
self.calls.append(call)
def assert_called(self):
if not self.calls:
raise AssertionError("Mock has never been called")
def assert_any_call(self, *args, **kwargs):
self.assert_called()
for call in self.calls:
if call.args == args and call.kwargs == kwargs:
return
raise AssertionError(
f"Call with args: {args}, kwargs: {kwargs} not found.\
Registered calls: {self.calls}"
)
def assert_called_with(self, *args, **kwargs):
self.assert_called()
call = self.calls[-1]
assert call.args == args and call.kwargs == kwargs

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

View file

@ -0,0 +1,20 @@
{
"https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q": {
"id": "UCGiJh0NZ52wRhYKYnuZI08Q",
"name": "ThetaDev",
"description": "I'm ThetaDev. I love creating cool projects using electronics, 3D printers and other awesome tech-based stuff.",
"avatar_url": "https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj"
},
"https://www.youtube.com/channel/UC2TXq_t06Hjdr2g_KdKpHQg": {
"id": "UC2TXq_t06Hjdr2g_KdKpHQg",
"name": "media.ccc.de",
"description": "The real official channel of the chaos computer club, operated by the CCC VOC (https://c3voc.de)",
"avatar_url": "https://yt3.ggpht.com/c1jcNSbPuOMDUieixkWIlXc82kMNJ8pCDmq5KtL8hjt74rAXLobsT9Y078-w5DK7ymKyDaqr=s900-c-k-c0x00ffffff-no-rj"
},
"https://www.youtube.com/channel/UCmLTTbctUZobNQrr8RtX8uQ": {
"id": "UCmLTTbctUZobNQrr8RtX8uQ",
"name": "Creative Commons",
"description": "Hello friends,\nWelcome to my channel CREATIVE COMMONS.\nOn this channel you will get all the videos absolutely free copyright and no matter how many videos you download there is no copyright claim you can download them and upload them to your channel and all the music is young Is on the channel they can also download and use in their videos on this channel you will find different videos in which OUTRO Videos, INTRO Videos, FREE MUSIC, FREE SOUND EFFECTS, LOWER THIRDS, and more.",
"avatar_url": "https://yt3.ggpht.com/-ybcsEHc8YCmKUZMr2bf4DZoDv7SKrutgKIh8kSxXugj296QkqtBZQXVzpuZ1Izs8kNUz35B=s900-c-k-c0x00ffffff-no-rj"
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,141 @@
[
{
"model": "ucast.channel",
"pk": "UCGiJh0NZ52wRhYKYnuZI08Q",
"fields": {
"name": "ThetaDev",
"slug": "ThetaDev",
"description": "I'm ThetaDev. I love creating cool projects using electronics, 3D printers and other awesome tech-based stuff.",
"active": true,
"skip_livestreams": true,
"skip_shorts": true,
"keep_videos": null,
"avatar_url": "https://yt3.ggpht.com/ytc/AKedOLSnFfmpibLLoqyaYdsF6bJ-zaLPzomII__FrJve1w=s900-c-k-c0x00ffffff-no-rj"
}
},
{
"model": "ucast.channel",
"pk": "UC2TXq_t06Hjdr2g_KdKpHQg",
"fields": {
"name": "media.ccc.de",
"slug": "media_ccc_de",
"description": "The real official channel of the chaos computer club, operated by the CCC VOC (https://c3voc.de)",
"active": true,
"skip_livestreams": true,
"skip_shorts": true,
"keep_videos": null,
"avatar_url": "https://yt3.ggpht.com/c1jcNSbPuOMDUieixkWIlXc82kMNJ8pCDmq5KtL8hjt74rAXLobsT9Y078-w5DK7ymKyDaqr=s900-c-k-c0x00ffffff-no-rj"
}
},
{
"model": "ucast.channel",
"pk": "UCmLTTbctUZobNQrr8RtX8uQ",
"fields": {
"name": "Creative Commons",
"slug": "Creative_Commons",
"description": "Hello friends,\nWelcome to my channel CREATIVE COMMONS.\nOn this channel you will get all the videos absolutely free copyright and no matter how many videos you download there is no copyright claim you can download them and upload them to your channel and all the music is young Is on the channel they can also download and use in their videos on this channel you will find different videos in which OUTRO Videos, INTRO Videos, FREE MUSIC, FREE SOUND EFFECTS, LOWER THIRDS, and more.",
"active": true,
"skip_livestreams": true,
"skip_shorts": true,
"keep_videos": null,
"avatar_url": "https://yt3.ggpht.com/-ybcsEHc8YCmKUZMr2bf4DZoDv7SKrutgKIh8kSxXugj296QkqtBZQXVzpuZ1Izs8kNUz35B=s900-c-k-c0x00ffffff-no-rj"
}
},
{
"model": "ucast.video",
"pk": "ZPxEr4YdWt8",
"fields": {
"title": "ThetaDev @ Embedded World 2019",
"slug": "20190602_ThetaDev_Embedded_World_2019",
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
"published": "2019-06-02T00:00:00Z",
"downloaded": "2022-05-15T22:16:03.096Z",
"description": "This february I spent one day at the Embedded World in Nuremberg. They showed tons of interesting electronics stuff, so I had to take some pictures and videos for you to see ;-)\n\nSorry for the late upload, I just didn't have time to edit my footage.\n\nEmbedded World: https://www.embedded-world.de/\n\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev",
"duration": 267,
"is_livestream": false,
"is_short": false,
"download_size": 4558477
}
},
{
"model": "ucast.video",
"pk": "_I5IFObm_-k",
"fields": {
"title": "Easter special: 3D printed Bunny",
"slug": "20180331_Easter_special_3D_printed_Bunny",
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
"published": "2018-03-31T00:00:00Z",
"downloaded": "2022-05-15T22:16:12.514Z",
"description": "Happy Easter 2018!\nThis is just a special video where I print a little bunny as an Easter gift for friends or relatives. I hope you like the model, too.\n\nSadly my camera doesn't support timelapses, so I had to record the whole 4h printing process in real time, resulting in 30GB of footage. But I think it was worth it ;-)\n\n__PROJECT_LINKS___________________________\nBunny: https://www.thingiverse.com/thing:287884\n\n__COMPONENT_SUPPLIERS__________________\n3D printer: https://www.prusa3d.com/\n3D printing filament: https://www.dasfilament.de/\n______________________________________________\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev",
"duration": 511,
"is_livestream": false,
"is_short": false,
"download_size": 8444518
}
},
{
"model": "ucast.video",
"pk": "mmEDPbbSnaY",
"fields": {
"title": "ThetaDevlog#2 - MySensors singleLED",
"slug": "20180326_ThetaDevlog_2_MySensors_singleLED",
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
"published": "2018-03-26T00:00:00Z",
"downloaded": "2022-05-15T22:16:20.280Z",
"description": "The PCBs and components for the MySensors smart home devices arrived!\nIn this video I'll show you how to build the singleLED controller to switch/dim your 12V led lights. Detailed building instructions can be found on OpenHardware or GitHub.\n\n__PROJECT_LINKS___________________________\nOpenHardware: https://www.openhardware.io/view/563\nGitHub: https://github.com/Theta-Dev/MySensors-singleLED\n\nProgramming adapter: https://thdev.org/?Projects___misc___micro_JST\nBoard definitions: http://files.thdev.org/arduino/atmega.zip\n\n__COMPONENT_SUPPLIERS__________________\nElectronic components: https://www.aliexpress.com/\nPCBs: http://www.allpcb.com/\n3D printing filament: https://www.dasfilament.de/\n______________________________________________\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev\n______________________________________________\nMusic by Bartlebeats: https://bartlebeats.bandcamp.com",
"duration": 463,
"is_livestream": false,
"is_short": false,
"download_size": 7648860
}
},
{
"model": "ucast.video",
"pk": "Cda4zS-1j-k",
"fields": {
"title": "ThetaDevlog#1 - MySensors Smart Home!",
"slug": "20180217_ThetaDevlog_1_MySensors_Smart_Home",
"channel": "UCGiJh0NZ52wRhYKYnuZI08Q",
"published": "2018-02-17T00:00:00Z",
"downloaded": "2022-05-15T22:16:25.237Z",
"description": "Smart Home devices have been around for some time and can really make your life easier. But most of them are quite pricey and not always worth the money.\n\nHow about a sytem that costs only 5€ per device and has all the benefits of the expensive solutions? The open source project MySensors claims to do that. In this series I'll try this and find out whether it works!\n\n______________________________________________\nMy website: https://thdev.org\nTwitter: https://twitter.com/Theta_Dev",
"duration": 303,
"is_livestream": false,
"is_short": false,
"download_size": 5091124
}
},
{
"model": "ucast.video",
"pk": "2xfXsqyd8YA",
"fields": {
"title": "cy: Log4Shell - Bug oder Feature",
"slug": "20220521_cy_Log4Shell_Bug_oder_Feature",
"channel": "UC2TXq_t06Hjdr2g_KdKpHQg",
"published": "2022-05-21T00:00:00Z",
"downloaded": null,
"description": "https://media.ccc.de/v/gpn20-60-log4shell-bug-oder-feature\n\n\n\nUm den Jahreswechsel ging ein Aufschrei durch die IT-Abteilungen der Welt, der es bis in die Mainstream-Medien geschafft hat. Noch Wochen später zeigen sich Folgeprobleme in weit verbreiteter Software.\n \nIn Log4j, einer weit verbreiteten Java-Bibliothek wurde eine massive Sicherheitslücke gefunden, die die Ausführung von Schadcode auf einem entfernten System erlaubt.\nIn diesem Vortrag soll rekapitulierend erklärt werden, warum und wann es zu dem Problem kam und welche Auswirkungen bisher erkennbar sind. Ausserdem werden die technischen Details der Schwachstelle erklärt und in einer Live-Demo gezeigt, wie die Schwachstelle ausgenutzt werden kann.\n\n\n\ncy\n\nhttps://cfp.gulas.ch/gpn20/talk/77BCXN/\n\n#gpn20 #Security",
"duration": 3547,
"is_livestream": false,
"is_short": false,
"download_size": null
}
},
{
"model": "ucast.video",
"pk": "I0RRENheeTo",
"fields": {
"title": "No copyright intro free fire intro | no text | free copy right | free templates | free download",
"slug": "20211010_No_copyright_intro_free_fire_intro_no_text_free_copy_right_free_templates_free_download",
"channel": "UCmLTTbctUZobNQrr8RtX8uQ",
"published": "2021-10-10T00:00:00Z",
"downloaded": null,
"description": "Like Video▬▬▬▬▬❤\uD83D\uDC4D❤\n▬▬\uD83D\uDC47SUBSCRIBE OUR CHANNEL FOR LATEST UPDATES\uD83D\uDC46▬▬\nThis Channel: https://www.youtube.com/channel/UCmLTTbctUZobNQrr8RtX8uQ?sub_confirmation=1\nOther Channel: https://www.youtube.com/channel/UCKtfYFXi5A4KLIUdjgvfmHg?sub_confirmation=1\n▬▬▬▬▬▬▬▬/Subscription Free\\▬▬▬▬▬▬▬▬▬\n▬▬▬▬▬\uD83C\uDF81...Share Video To Friends...\uD83C\uDF81▬▬▬▬▬▬▬\n▬▬▬▬\uD83E\uDD14...Comment Any Questions....\uD83E\uDD14▬▬▬▬▬▬\nHello friends, \n Shahzaib Hassan and you are watching Creative Commons YouTube channel. On this channel, you will find all the videos absolutely free copyright which you can download and use in any project.\n It is copyright free so you won't have any problem using end screen for YouTube. if you use it or download and reupload it to your channel. By doing this you can use it for YouTube its use is absolutely free.\n ►I hope you'll like the video.◄\n ►Thanks For Watching◄ \nIf you really like this video then please don't forget to...\n\n\n▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬▬\n▬▬▬▬▬▬▬▬▬▬Tags\uD83D\uDC47▬▬▬▬▬▬▬▬▬▬\n#Creativecommons #commoncreative #free #freecopyright #nocopyright #nowatermark #freetouse #intro #notext #fireefire #channelintro",
"duration": 8,
"is_livestream": false,
"is_short": false,
"download_size": null
}
}
]

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

106
ucast/tests/conftest.py Normal file
View file

@ -0,0 +1,106 @@
import shutil
import tempfile
from pathlib import Path
from unittest import mock
import pytest
import rq
from django.conf import settings
from django.core.management import call_command
from fakeredis import FakeRedis
from ucast import queue, tests
from ucast.models import Video
from ucast.service import cover, storage, util, videoutil, youtube
@pytest.fixture(scope="session")
def django_db_setup(django_db_setup, django_db_blocker):
with django_db_blocker.unblock():
fixture_path = tests.DIR_TESTFILES / "fixture" / "videos.json"
call_command("loaddata", fixture_path)
@pytest.fixture
def download_dir() -> Path:
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
settings.DOWNLOAD_ROOT = tmpdir
# Copy channel avatars
store = storage.Storage()
for slug, avatar in (
("ThetaDev", "a1"),
("media_ccc_de", "a3"),
("Creative_Commons", "a4"),
):
cf = store.get_channel_folder(slug)
shutil.copyfile(
tests.DIR_TESTFILES / "avatar" / f"{avatar}.jpg", cf.file_avatar
)
util.resize_avatar(cf.file_avatar, cf.file_avatar_sm)
yield tmpdir
@pytest.fixture
@pytest.mark.django_db
def download_dir_content(download_dir) -> Path:
store = storage.Storage()
for video in Video.objects.filter(downloaded__isnull=False):
cf = store.get_channel_folder(video.channel.slug)
file_audio = cf.get_audio(video.slug)
file_tn = cf.get_thumbnail(video.slug)
file_cover = cf.get_cover(video.slug)
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", file_audio)
shutil.copyfile(tests.DIR_TESTFILES / "thumbnail" / f"{video.id}.webp", file_tn)
util.resize_thumbnail(file_tn, cf.get_thumbnail(video.slug, True))
cover.create_cover_file(
file_tn,
cf.file_avatar,
video.title,
video.channel.name,
cover.COVER_STYLE_BLUR,
file_cover,
)
videoutil.tag_audio(file_audio, video, file_cover)
yield download_dir
@pytest.fixture
def rq_queue(mocker) -> rq.Queue:
test_queue = rq.Queue(is_async=False, connection=FakeRedis())
mocker.patch.object(queue, "get_queue")
queue.get_queue.return_value = test_queue
return test_queue
@pytest.fixture
def mock_download_audio(mocker) -> mock.Mock:
def mockfn_download_audio(
video_id: str, download_path: Path, sponsorblock=False
) -> youtube.VideoDetails:
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", download_path)
return tests.get_video_details(video_id)
download_mock: mock.Mock = mocker.patch.object(youtube, "download_audio")
download_mock.side_effect = mockfn_download_audio
return download_mock
@pytest.fixture
def mock_get_video_details(mocker) -> mock.Mock:
video_details_mock: mock.Mock = mocker.patch.object(youtube, "get_video_details")
video_details_mock.side_effect = tests.get_video_details
return video_details_mock
@pytest.fixture
def mock_get_channel_metadata(mocker) -> mock.Mock:
channel_meta_mock: mock.Mock = mocker.patch.object(youtube, "get_channel_metadata")
channel_meta_mock.side_effect = tests.get_channel_metadata
return channel_meta_mock

View file

@ -0,0 +1,52 @@
import io
import shutil
import tempfile
from pathlib import Path
import pytest
from mutagen import id3
from PIL import Image, ImageChops
from ucast import tests
from ucast.models import Video
from ucast.service import videoutil
@pytest.mark.django_db
def test_tag_audio():
video = Video.objects.get(id="ZPxEr4YdWt8")
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
audio_file = tmpdir / "audio.mp3"
cover_file = tests.DIR_TESTFILES / "cover" / "c1_blur.png"
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", audio_file)
videoutil.tag_audio(audio_file, video, cover_file)
tag = id3.ID3(audio_file)
assert tag["TPE1"].text[0] == "ThetaDev"
assert tag["TALB"].text[0] == "ThetaDev"
assert tag["TIT2"].text[0] == "2019-06-02 ThetaDev @ Embedded World 2019"
assert tag["TDRC"].text[0].text == "2019-06-02"
assert (
tag["COMM::XXX"].text[0]
== """This february I spent one day at the Embedded World in Nuremberg. They showed tons of interesting electronics stuff, so I had to take some pictures and videos for you to see ;-)
Sorry for the late upload, I just didn't have time to edit my footage.
Embedded World: https://www.embedded-world.de/
My website: https://thdev.org
Twitter: https://twitter.com/Theta_Dev
https://youtu.be/ZPxEr4YdWt8"""
)
tag_cover = tag["APIC:Cover"]
assert tag_cover.mime == "image/png"
tag_cover_img = Image.open(io.BytesIO(tag_cover.data))
expected_cover_img = Image.open(cover_file)
diff = ImageChops.difference(tag_cover_img, expected_cover_img)
assert diff.getbbox() is None

View file

@ -1,13 +1,10 @@
import datetime import datetime
import io
import re import re
import shutil
import subprocess import subprocess
import tempfile import tempfile
from pathlib import Path from pathlib import Path
import pytest import pytest
from mutagen import id3
from PIL import Image, ImageChops from PIL import Image, ImageChops
from ucast import tests from ucast import tests
@ -101,43 +98,6 @@ def test_download_audio():
assert match[1] == "00:01:40" assert match[1] == "00:01:40"
def test_tag_audio(video_details):
tmpdir_o = tempfile.TemporaryDirectory()
tmpdir = Path(tmpdir_o.name)
audio_file = tmpdir / "audio.mp3"
cover_file = tests.DIR_TESTFILES / "cover" / "c1_blur.png"
shutil.copyfile(tests.DIR_TESTFILES / "audio" / "audio1.mp3", audio_file)
youtube.tag_audio(audio_file, video_details, cover_file)
tag = id3.ID3(audio_file)
assert tag["TPE1"].text[0] == "ThetaDev"
assert tag["TALB"].text[0] == "ThetaDev"
assert tag["TIT2"].text[0] == "2019-06-02 ThetaDev @ Embedded World 2019"
assert tag["TDRC"].text[0].text == "2019-06-02"
assert (
tag["COMM::XXX"].text[0]
== """https://youtu.be/ZPxEr4YdWt8
This february I spent one day at the Embedded World in Nuremberg. They showed tons of interesting electronics stuff, so I had to take some pictures and videos for you to see ;-)
Sorry for the late upload, I just didn't have time to edit my footage.
Embedded World: https://www.embedded-world.de/
My website: https://thdev.org
Twitter: https://twitter.com/Theta_Dev"""
)
tag_cover = tag["APIC:Cover"]
assert tag_cover.mime == "image/png"
tag_cover_img = Image.open(io.BytesIO(tag_cover.data))
expected_cover_img = Image.open(cover_file)
diff = ImageChops.difference(tag_cover_img, expected_cover_img)
assert diff.getbbox() is None
@pytest.mark.parametrize( @pytest.mark.parametrize(
"channel_str,channel_url", "channel_str,channel_url",
[ [

View file

View file

@ -0,0 +1,81 @@
import os
import pytest
from ucast import queue, tests
from ucast.models import Channel, Video
from ucast.service import storage
from ucast.tasks import download
CHANNEL_ID_THETADEV = "UCGiJh0NZ52wRhYKYnuZI08Q"
VIDEO_ID_INTRO = "I0RRENheeTo"
VIDEO_SLUG_INTRO = "20211010_No_copyright_intro_free_fire_intro_no_text_free_copy_right_free_templates_free_download"
@pytest.mark.django_db
def test_download_video(download_dir, rq_queue):
video = Video.objects.get(id=VIDEO_ID_INTRO)
job = queue.enqueue(download.download_video, video)
store = storage.Storage()
cf = store.get_channel_folder(video.channel.slug)
assert job.is_finished
assert os.path.isfile(cf.get_audio(VIDEO_SLUG_INTRO))
assert os.path.isfile(cf.get_cover(VIDEO_SLUG_INTRO))
assert os.path.isfile(cf.get_thumbnail(VIDEO_SLUG_INTRO))
assert os.path.isfile(cf.get_thumbnail(VIDEO_SLUG_INTRO, True))
@pytest.mark.django_db
def test_import_channel(
download_dir, rq_queue, mock_get_video_details, mock_download_audio
):
# Remove 2 videos from the database so they can be imported
Video.objects.get(id="ZPxEr4YdWt8").delete()
Video.objects.get(id="_I5IFObm_-k").delete()
job = rq_queue.enqueue(download.import_channel, CHANNEL_ID_THETADEV)
assert job.is_finished
mock_download_audio.assert_any_call(
"_I5IFObm_-k",
download_dir / "ThetaDev" / "20180331_Easter_special_3D_printed_Bunny.mp3",
)
mock_download_audio.assert_any_call(
"ZPxEr4YdWt8",
download_dir / "ThetaDev" / "20190602_ThetaDev_Embedded_World_2019.mp3",
)
@pytest.mark.django_db
def test_update_channel(
download_dir, rq_queue, mock_get_video_details, mock_download_audio
):
# Remove 2 videos from the database so they can be imported
Video.objects.get(id="ZPxEr4YdWt8").delete()
Video.objects.get(id="_I5IFObm_-k").delete()
channel = Channel.objects.get(id=CHANNEL_ID_THETADEV)
job = rq_queue.enqueue(download.update_channel, channel)
assert job.is_finished
mock_download_audio.assert_any_call(
"_I5IFObm_-k",
download_dir / "ThetaDev" / "20180331_Easter_special_3D_printed_Bunny.mp3",
)
mock_download_audio.assert_any_call(
"ZPxEr4YdWt8",
download_dir / "ThetaDev" / "20190602_ThetaDev_Embedded_World_2019.mp3",
)
@pytest.mark.django_db
def test_update_channels(rq_queue, mocker):
update_channel_mock = tests.GlobalMock()
mocker.patch.object(download, "update_channel", update_channel_mock)
job = rq_queue.enqueue(download.update_channels)
assert job.is_finished
assert update_channel_mock.n_calls == 3

View file

@ -0,0 +1,72 @@
from unittest import mock
import pytest
from ucast import tests
from ucast.models import Channel, Video
from ucast.service import cover, storage
from ucast.tasks import library
CHANNEL_ID_THETADEV = "UCGiJh0NZ52wRhYKYnuZI08Q"
@pytest.mark.django_db
def test_recreate_cover(download_dir_content, rq_queue, mocker):
create_cover_mock: mock.Mock = mocker.patch.object(cover, "create_cover_file")
video = Video.objects.get(id="ZPxEr4YdWt8")
store = storage.Storage()
cf = store.get_channel_folder(video.channel.slug)
job = rq_queue.enqueue(library.recreate_cover, video)
assert job.is_finished
create_cover_mock.assert_called_once_with(
cf.get_thumbnail(video.slug),
cf.file_avatar,
video.title,
video.channel.name,
cover.COVER_STYLE_BLUR,
cf.get_cover(video.slug),
)
@pytest.mark.django_db
def test_recreate_covers(rq_queue, mocker):
recreate_cover_mock = tests.GlobalMock()
mocker.patch.object(library, "recreate_cover", recreate_cover_mock)
job = rq_queue.enqueue(library.recreate_covers)
assert job.is_finished
assert recreate_cover_mock.n_calls == 4
@pytest.mark.django_db
def test_update_channel_info(rq_queue, mock_get_channel_metadata):
channel = Channel.objects.get(id=CHANNEL_ID_THETADEV)
channel.description = "Old description"
channel.save()
job = rq_queue.enqueue(library.update_channel_info, channel)
assert job.is_finished
channel.refresh_from_db()
assert (
channel.description
== "I'm ThetaDev. I love creating cool projects \
using electronics, 3D printers and other awesome tech-based stuff."
)
@pytest.mark.django_db
def test_update_channel_infos(rq_queue, mocker):
update_channel_mock = tests.GlobalMock()
mocker.patch.object(library, "update_channel_info", update_channel_mock)
job = rq_queue.enqueue(library.update_channel_infos)
assert job.is_finished
assert update_channel_mock.n_calls == 3

View file

@ -52,7 +52,6 @@ def _load_dotenv() -> Path:
if dotenv_path: if dotenv_path:
dotenv.load_dotenv(dotenv_path) dotenv.load_dotenv(dotenv_path)
print(f"Loaded config from envfile at {dotenv_path}")
default_workdir = Path(dotenv_path).resolve().parent default_workdir = Path(dotenv_path).resolve().parent
os.chdir(default_workdir) os.chdir(default_workdir)
@ -92,7 +91,6 @@ INSTALLED_APPS = [
"django.contrib.messages", "django.contrib.messages",
"django.contrib.staticfiles", "django.contrib.staticfiles",
"bulma", "bulma",
"django_rq",
] ]
MIDDLEWARE = [ MIDDLEWARE = [
@ -203,24 +201,8 @@ STATICFILES_DIRS = [resources.path("ucast", "static")]
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
REDIS_HOST = get_env("REDIS_HOST", "localhost") REDIS_URL = get_env("REDIS_URL", "redis://localhost:6379")
REDIS_PORT = get_env("REDIS_PORT", 6379)
REDIS_PASSWORD = get_env("REDIS_PASSWORD", "")
REDIS_DB = get_env("REDIS_DB", 0)
REDIS_QUEUE_TIMEOUT = get_env("REDIS_QUEUE_TIMEOUT", 600) REDIS_QUEUE_TIMEOUT = get_env("REDIS_QUEUE_TIMEOUT", 600)
REDIS_QUEUE_RESULT_TTL = 600 REDIS_QUEUE_RESULT_TTL = 600
RQ_QUEUES = {
"default": {
"HOST": REDIS_HOST,
"PORT": REDIS_PORT,
"DB": REDIS_DB,
"PASSWORD": REDIS_PASSWORD,
"DEFAULT_TIMEOUT": REDIS_QUEUE_TIMEOUT,
"DEFAULT_RESULT_TTL": REDIS_QUEUE_RESULT_TTL,
}
}
RQ_SHOW_ADMIN_LINK = True
YT_UPDATE_INTERVAL = get_env("YT_UPDATE_INTERVAL", 900) YT_UPDATE_INTERVAL = get_env("YT_UPDATE_INTERVAL", 900)

View file

@ -17,7 +17,6 @@ from django.contrib import admin
from django.urls import include, path from django.urls import include, path
urlpatterns = [ urlpatterns = [
path("admin/django-rq/", include("django_rq.urls")),
path("admin/", admin.site.urls), path("admin/", admin.site.urls),
path("", include("ucast.urls")), path("", include("ucast.urls")),
] ]