Compare commits
9 commits
Author | SHA1 | Date | |
---|---|---|---|
89a190ad4a | |||
11c679975b | |||
3eacc0ad8d | |||
27eeac66f0 | |||
ebe4ccf926 | |||
2fc63c0cb1 | |||
824ba9e101 | |||
a4cb344091 | |||
e131574393 |
|
@ -1,12 +0,0 @@
|
||||||
[bumpversion]
|
|
||||||
current_version = 0.4.6
|
|
||||||
commit = True
|
|
||||||
tag = True
|
|
||||||
|
|
||||||
[bumpversion:file:pyproject.toml]
|
|
||||||
search = version = "{current_version}"
|
|
||||||
replace = version = "{new_version}"
|
|
||||||
|
|
||||||
[bumpversion:file:ucast/__init__.py]
|
|
||||||
search = __version__ = "{current_version}"
|
|
||||||
replace = __version__ = "{new_version}"
|
|
|
@ -1,23 +0,0 @@
|
||||||
.idea
|
|
||||||
|
|
||||||
# Python
|
|
||||||
venv
|
|
||||||
dist
|
|
||||||
.tox
|
|
||||||
__pycache__
|
|
||||||
*.egg-info
|
|
||||||
.pytest_cache
|
|
||||||
|
|
||||||
# JS
|
|
||||||
node_modules
|
|
||||||
|
|
||||||
# Jupyter
|
|
||||||
.ipynb_checkpoints
|
|
||||||
|
|
||||||
# Application data
|
|
||||||
/.env
|
|
||||||
/_run*
|
|
||||||
*.sqlite3
|
|
||||||
|
|
||||||
assets
|
|
||||||
notes
|
|
62
.drone.yml
|
@ -1,62 +0,0 @@
|
||||||
kind: pipeline
|
|
||||||
name: default
|
|
||||||
type: docker
|
|
||||||
|
|
||||||
platform:
|
|
||||||
os: linux
|
|
||||||
arch: ''
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: install dependencies
|
|
||||||
image: thetadev256/ucast-dev
|
|
||||||
volumes:
|
|
||||||
- name: cache
|
|
||||||
path: /root/.cache
|
|
||||||
commands:
|
|
||||||
- poetry install
|
|
||||||
- poetry run invoke reset
|
|
||||||
|
|
||||||
- name: lint
|
|
||||||
image: thetadev256/ucast-dev
|
|
||||||
volumes:
|
|
||||||
- name: cache
|
|
||||||
path: /root/.cache
|
|
||||||
commands:
|
|
||||||
- poetry run invoke lint
|
|
||||||
depends_on:
|
|
||||||
- install dependencies
|
|
||||||
|
|
||||||
- name: test
|
|
||||||
image: thetadev256/ucast-dev
|
|
||||||
volumes:
|
|
||||||
- name: cache
|
|
||||||
path: /root/.cache
|
|
||||||
commands:
|
|
||||||
- poetry run invoke test
|
|
||||||
depends_on:
|
|
||||||
- install dependencies
|
|
||||||
|
|
||||||
# - name: build container
|
|
||||||
# image: quay.io/buildah/stable
|
|
||||||
# when:
|
|
||||||
# event:
|
|
||||||
# - tag
|
|
||||||
# commands:
|
|
||||||
# - buildah login -u $DOCKER_USER -p $DOCKER_PASS -- $DOCKER_REGISTRY
|
|
||||||
# - buildah manifest create ucast
|
|
||||||
# - buildah bud --tag code.thetadev.de/hsa/ucast:latest --manifest ucast --arch amd64 --build-arg TARGETPLATFORM=linux/amd64 -f deploy/Dockerfile .
|
|
||||||
# - buildah bud --tag code.thetadev.de/hsa/ucast:latest --manifest ucast --arch arm64 --build-arg TARGETPLATFORM=linux/arm64 -f deploy/Dockerfile .
|
|
||||||
# - buildah manifest push --all ucast docker://code.thetadev.de/hsa/ucast:latest
|
|
||||||
# environment:
|
|
||||||
# DOCKER_REGISTRY:
|
|
||||||
# from_secret: docker_registry
|
|
||||||
# DOCKER_USER:
|
|
||||||
# from_secret: docker_username
|
|
||||||
# DOCKER_PASS:
|
|
||||||
# from_secret: docker_password
|
|
||||||
# depends_on:
|
|
||||||
# - test
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- name: cache
|
|
||||||
temp: { }
|
|
|
@ -1,14 +0,0 @@
|
||||||
[*]
|
|
||||||
charset = utf-8
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 4
|
|
||||||
end_of_line = lf
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
insert_final_newline = true
|
|
||||||
max_line_length = 88
|
|
||||||
|
|
||||||
[{Makefile,*.go}]
|
|
||||||
indent_style = tab
|
|
||||||
|
|
||||||
[*.{json,md,rst,ini,yml,yaml,html,js,jsx,ts,tsx,vue}]
|
|
||||||
indent_size = 2
|
|
|
@ -1,3 +0,0 @@
|
||||||
UCAST_DEBUG=True
|
|
||||||
UCAST_WORKDIR=_run
|
|
||||||
UCAST_ALLOWED_HOSTS=localhost,127.0.0.1
|
|
14
.gitignore
vendored
|
@ -2,19 +2,19 @@
|
||||||
|
|
||||||
# Python
|
# Python
|
||||||
venv
|
venv
|
||||||
dist
|
|
||||||
.tox
|
.tox
|
||||||
__pycache__
|
__pycache__
|
||||||
*.egg-info
|
*.egg-info
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
|
|
||||||
# JS
|
|
||||||
node_modules
|
|
||||||
|
|
||||||
# Jupyter
|
# Jupyter
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# Media files
|
||||||
|
*.webm
|
||||||
|
*.mp4
|
||||||
|
*.mp3
|
||||||
|
|
||||||
# Application data
|
# Application data
|
||||||
/.env
|
/_run
|
||||||
/_run*
|
.env
|
||||||
*.sqlite3
|
|
||||||
|
|
33
README.md
|
@ -14,9 +14,11 @@ abrufen kann.
|
||||||
|
|
||||||
## Technik
|
## Technik
|
||||||
|
|
||||||
Der Server sollte mit dem Webframework [Django](https://djangoproject.com/)
|
Der Server sollte mit dem Webframework [Flask](https://flask.palletsprojects.com/)
|
||||||
realisiert werden.
|
realisiert werden.
|
||||||
|
|
||||||
|
Daten sollten entweder in einer SQLite-Datenbank oder in JSON-Dateien abgelegt werden.
|
||||||
|
|
||||||
Die Weboberfläche wird mit Jinja-Templates gerendert, auf ein JS-Framework kann vorerst verzichtet werden.
|
Die Weboberfläche wird mit Jinja-Templates gerendert, auf ein JS-Framework kann vorerst verzichtet werden.
|
||||||
Für ein ansehnliches Ansehen sorgt Bootstrap.
|
Für ein ansehnliches Ansehen sorgt Bootstrap.
|
||||||
|
|
||||||
|
@ -24,30 +26,5 @@ Für ein ansehnliches Ansehen sorgt Bootstrap.
|
||||||
|
|
||||||
### Project aufsetzen
|
### Project aufsetzen
|
||||||
|
|
||||||
1. Python3 + Node.js + [Poetry](https://python-poetry.org/) dependency manager +
|
1. Python3 + [Poetry](https://python-poetry.org/) dependency manager installieren
|
||||||
[pnpm](https://pnpm.io/) installieren
|
2. Dependencies mit ``poetry install`` installieren
|
||||||
2. Python-Dependencies mit ``poetry install`` installieren
|
|
||||||
3. Node-Dependencies mit ``pnpm i`` installerien
|
|
||||||
|
|
||||||
### Tasks (Python)
|
|
||||||
|
|
||||||
Ausführen: `invoke <taskname>`
|
|
||||||
|
|
||||||
`test` Unittests ausführen
|
|
||||||
|
|
||||||
`lint` Codequalität/Formatierung überprüfen
|
|
||||||
|
|
||||||
`format` Code mit black formatieren
|
|
||||||
|
|
||||||
`makemigrations` Datenbankmigration erstellen
|
|
||||||
|
|
||||||
`get-cover --vid <YouTube-Video-ID>` YouTube-Thumbnail herunterladen
|
|
||||||
und Coverbilder zum Testen erzeugen (werden unter `ucast/tests/testfiles` abgelegt)
|
|
||||||
|
|
||||||
### Tasks (Node.js)
|
|
||||||
|
|
||||||
Ausführen: `npm run <taskname>`
|
|
||||||
|
|
||||||
`start` Sass-Stylesheets automatisch bei Änderungen kompilieren
|
|
||||||
|
|
||||||
`build` Sass-Stylesheets kompilieren und optimieren
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<svg width="12.084mm" height="12.084mm" version="1.1" viewBox="0 0 12.084 12.084" xmlns="http://www.w3.org/2000/svg"><g transform="translate(-1.75 -1.9565)"><path d="m2 2.2065v7.3223l4.2617 4.2617h3.0605l4.2617-4.2617v-7.3223h-1v6.9082l-3.6758 3.6758h-2.2324l-3.6758-3.6758v-6.9082z" color="#000000" fill="#e00" stroke="#fff" stroke-linecap="square" stroke-width=".5"/></g><g transform="translate(-3.2188 -20.416)"><path d="m3.4688 20.666v7.3223l4.2617 4.2617h3.0605l4.2617-4.2617v-7.3223h-1v6.9082l-3.6758 3.6758h-2.2324l-3.6758-3.6758v-6.9082z" color="#000000" fill="#e00"/></g></svg>
|
|
Before Width: | Height: | Size: 626 B |
|
@ -1,78 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
|
||||||
|
|
||||||
<svg
|
|
||||||
width="68.5mm"
|
|
||||||
height="15.79044mm"
|
|
||||||
viewBox="0 0 68.5 15.79044"
|
|
||||||
version="1.1"
|
|
||||||
id="svg5"
|
|
||||||
sodipodi:docname="logo.svg"
|
|
||||||
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04, custom)"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg">
|
|
||||||
<sodipodi:namedview
|
|
||||||
id="namedview7"
|
|
||||||
pagecolor="#ffffff"
|
|
||||||
bordercolor="#666666"
|
|
||||||
borderopacity="1.0"
|
|
||||||
inkscape:pageshadow="2"
|
|
||||||
inkscape:pageopacity="0.0"
|
|
||||||
inkscape:pagecheckerboard="0"
|
|
||||||
inkscape:document-units="mm"
|
|
||||||
showgrid="false"
|
|
||||||
lock-margins="true"
|
|
||||||
fit-margin-top="2"
|
|
||||||
fit-margin-left="2"
|
|
||||||
fit-margin-right="2"
|
|
||||||
fit-margin-bottom="2"
|
|
||||||
inkscape:zoom="9.7594058"
|
|
||||||
inkscape:cx="189.30456"
|
|
||||||
inkscape:cy="45.853201"
|
|
||||||
inkscape:window-width="2516"
|
|
||||||
inkscape:window-height="1051"
|
|
||||||
inkscape:window-x="0"
|
|
||||||
inkscape:window-y="0"
|
|
||||||
inkscape:window-maximized="1"
|
|
||||||
inkscape:current-layer="layer1">
|
|
||||||
<inkscape:grid
|
|
||||||
type="xygrid"
|
|
||||||
id="grid1338"
|
|
||||||
originx="-1.4687504"
|
|
||||||
originy="-18.459564" />
|
|
||||||
</sodipodi:namedview>
|
|
||||||
<defs
|
|
||||||
id="defs2" />
|
|
||||||
<g
|
|
||||||
inkscape:label="Layer 1"
|
|
||||||
inkscape:groupmode="layer"
|
|
||||||
id="layer1"
|
|
||||||
transform="translate(-1.4687503,-18.45956)">
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#282828;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="M 67.468749,21.166667 H 56.885416"
|
|
||||||
id="path3041" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#282828;stroke-width:0.98677;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="M 62.177083,21.444868 V 31.75"
|
|
||||||
id="path3043" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#ee0000;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="M 3.9687503,21.166667 V 27.78125 L 7.9375002,31.75 h 2.6458328 l 3.96875,-3.96875 v -6.614583"
|
|
||||||
id="path3572" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#282828;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="m 27.781251,21.166667 h -6.614584 l -3.96875,3.96875 v 2.645833 l 3.96875,3.96875 h 6.614584"
|
|
||||||
id="path3687" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#282828;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="m 30.427084,31.75 v -5.291667 l 5.291667,-5.291666 5.291666,5.291666 V 31.75 v 0"
|
|
||||||
id="path3802" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#282828;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="m 54.239583,21.166667 h -7.9375 l -2.645834,2.645833 2.645834,2.645833 h 5.291666 L 54.239583,29.104166 51.593749,31.75 h -7.9375"
|
|
||||||
id="path3954" />
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 3.1 KiB |
|
@ -1,107 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
|
||||||
|
|
||||||
<svg
|
|
||||||
width="68.5mm"
|
|
||||||
height="15.79044mm"
|
|
||||||
viewBox="0 0 68.5 15.79044"
|
|
||||||
version="1.1"
|
|
||||||
id="svg5"
|
|
||||||
sodipodi:docname="logo_border.svg"
|
|
||||||
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04, custom)"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg">
|
|
||||||
<sodipodi:namedview
|
|
||||||
id="namedview7"
|
|
||||||
pagecolor="#ffffff"
|
|
||||||
bordercolor="#666666"
|
|
||||||
borderopacity="1.0"
|
|
||||||
inkscape:pageshadow="2"
|
|
||||||
inkscape:pageopacity="0.0"
|
|
||||||
inkscape:pagecheckerboard="0"
|
|
||||||
inkscape:document-units="mm"
|
|
||||||
showgrid="false"
|
|
||||||
lock-margins="true"
|
|
||||||
fit-margin-top="2"
|
|
||||||
fit-margin-left="2"
|
|
||||||
fit-margin-right="2"
|
|
||||||
fit-margin-bottom="2"
|
|
||||||
inkscape:zoom="0.89824763"
|
|
||||||
inkscape:cx="-394.65732"
|
|
||||||
inkscape:cy="200.94681"
|
|
||||||
inkscape:window-width="2516"
|
|
||||||
inkscape:window-height="1051"
|
|
||||||
inkscape:window-x="0"
|
|
||||||
inkscape:window-y="0"
|
|
||||||
inkscape:window-maximized="1"
|
|
||||||
inkscape:current-layer="layer2">
|
|
||||||
<inkscape:grid
|
|
||||||
type="xygrid"
|
|
||||||
id="grid1338"
|
|
||||||
originx="-1.4687504"
|
|
||||||
originy="-18.459564" />
|
|
||||||
</sodipodi:namedview>
|
|
||||||
<defs
|
|
||||||
id="defs2" />
|
|
||||||
<g
|
|
||||||
inkscape:groupmode="layer"
|
|
||||||
id="layer2"
|
|
||||||
inkscape:label="Border">
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke:#ffffff;stroke-linecap:square;stroke-opacity:1;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 54.916016,2.206456 v 1 h 0.5 10.583981 0.5 v -1 h -0.5 -10.583981 z"
|
|
||||||
id="path13414" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke:#ffffff;stroke-width:0.494467;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="M 60.208984,2.461412 V 2.9504073 13.301445 13.79044 h 1 V 13.301445 2.9504073 2.461412 Z"
|
|
||||||
id="path13417" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#ee0000;stroke:#ffffff;stroke-linecap:square;stroke-opacity:1;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 1.9999997,2.206456 v 0.5 6.822265 l 4.261719,4.261719 h 3.060547 L 13.583984,9.528721 v -6.822265 -0.5 h -1 v 0.5 6.408203 L 8.9082027,12.79044 H 6.6757807 L 2.9999997,9.114659 v -6.408203 -0.5 z"
|
|
||||||
id="path13420" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke:#ffffff;stroke-linecap:square;stroke-opacity:1;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 19.490234,2.206456 -4.261718,4.261718 v 3.060547 l 4.261718,4.261719 h 6.822266 0.5 v -1 h -0.5 -6.408203 L 16.228516,9.114659 V 6.882237 l 3.675781,-3.675781 h 6.408203 0.5 v -1 h -0.5 z"
|
|
||||||
id="path13423" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke:#ffffff;stroke-linecap:square;stroke-opacity:1;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 34.25,1.999424 -5.791016,5.792969 v 5.498047 0.5 h 1 v -0.5 -5.083984 L 34.25,3.413487 39.041016,8.206456 v 5.083984 0.5 h 1 v -0.5 -5.498047 z"
|
|
||||||
id="path13426" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke:#ffffff;stroke-linecap:square;stroke-opacity:1;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 44.626953,2.206456 -3.146484,3.146484 3.146484,3.146484 h 5.291016 l 2.146484,2.144532 -2.146484,2.146484 h -7.730469 -0.5 v 1 h 0.5 8.144531 L 53.478516,10.643956 50.332031,7.499424 H 45.041016 L 42.894531,5.35294 45.041016,3.206456 h 7.730468 0.5 v -1 h -0.5 z"
|
|
||||||
id="path13429" />
|
|
||||||
</g>
|
|
||||||
<g
|
|
||||||
inkscape:label="Main"
|
|
||||||
inkscape:groupmode="layer"
|
|
||||||
id="layer1"
|
|
||||||
transform="translate(-1.4687503,-18.45956)">
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke-linecap:square;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 56.384766,20.666016 v 1 h 0.5 10.583984 0.5 v -1 h -0.5 -10.583984 z"
|
|
||||||
id="path3041" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke-width:0.489566;stroke-linecap:square;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="M 61.677734,21.144438 V 21.623788 31.77065 32.25 h 1 v -0.47935 -10.146862 -0.47935 z"
|
|
||||||
id="path3043" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#ee0000;stroke-linecap:square;stroke:none;stroke-opacity:1;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 3.46875,20.666016 v 0.5 6.822265 L 7.7304687,32.25 h 3.0605473 l 4.261718,-4.261719 v -6.822265 -0.5 h -1 v 0.5 6.408203 L 10.376953,31.25 H 8.1445313 L 4.46875,27.574219 v -6.408203 -0.5 z"
|
|
||||||
id="path3572" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke-linecap:square;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 20.958984,20.666016 -4.261718,4.261718 v 3.060547 L 20.958984,32.25 h 6.822266 0.5 v -1 h -0.5 -6.408203 l -3.675781,-3.675781 v -2.232422 l 3.675781,-3.675781 h 6.408203 0.5 v -1 h -0.5 z"
|
|
||||||
id="path3687" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke-linecap:square;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 35.71875,20.458984 -5.791016,5.792969 V 31.75 v 0.5 h 1 v -0.5 -5.083984 l 4.791016,-4.792969 4.791016,4.792969 V 31.75 v 0.5 h 1 v -0.5 -5.498047 z"
|
|
||||||
id="path3802" />
|
|
||||||
<path
|
|
||||||
style="color:#000000;fill:#282828;stroke-linecap:square;stroke-width:0.5;stroke-miterlimit:4;stroke-dasharray:none"
|
|
||||||
d="m 46.095703,20.666016 -3.146484,3.146484 3.146484,3.146484 h 5.291016 L 53.533203,29.103516 51.386719,31.25 h -7.730469 -0.5 v 1 h 0.5 8.144531 l 3.146485,-3.146484 -3.146485,-3.144532 H 46.509766 L 44.363281,23.8125 46.509766,21.666016 h 7.730468 0.5 v -1 h -0.5 z"
|
|
||||||
id="path3954" />
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 5.7 KiB |
|
@ -1,78 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
|
||||||
|
|
||||||
<svg
|
|
||||||
width="68.5mm"
|
|
||||||
height="15.79044mm"
|
|
||||||
viewBox="0 0 68.5 15.79044"
|
|
||||||
version="1.1"
|
|
||||||
id="svg5"
|
|
||||||
sodipodi:docname="logo_dark.svg"
|
|
||||||
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04, custom)"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg">
|
|
||||||
<sodipodi:namedview
|
|
||||||
id="namedview7"
|
|
||||||
pagecolor="#ffffff"
|
|
||||||
bordercolor="#666666"
|
|
||||||
borderopacity="1.0"
|
|
||||||
inkscape:pageshadow="2"
|
|
||||||
inkscape:pageopacity="0.0"
|
|
||||||
inkscape:pagecheckerboard="0"
|
|
||||||
inkscape:document-units="mm"
|
|
||||||
showgrid="false"
|
|
||||||
lock-margins="true"
|
|
||||||
fit-margin-top="2"
|
|
||||||
fit-margin-left="2"
|
|
||||||
fit-margin-right="2"
|
|
||||||
fit-margin-bottom="2"
|
|
||||||
inkscape:zoom="0.34602189"
|
|
||||||
inkscape:cx="-85.254723"
|
|
||||||
inkscape:cy="-293.33405"
|
|
||||||
inkscape:window-width="2516"
|
|
||||||
inkscape:window-height="1051"
|
|
||||||
inkscape:window-x="0"
|
|
||||||
inkscape:window-y="0"
|
|
||||||
inkscape:window-maximized="1"
|
|
||||||
inkscape:current-layer="layer1">
|
|
||||||
<inkscape:grid
|
|
||||||
type="xygrid"
|
|
||||||
id="grid1338"
|
|
||||||
originx="-1.4687504"
|
|
||||||
originy="-18.459564" />
|
|
||||||
</sodipodi:namedview>
|
|
||||||
<defs
|
|
||||||
id="defs2" />
|
|
||||||
<g
|
|
||||||
inkscape:label="Layer 1"
|
|
||||||
inkscape:groupmode="layer"
|
|
||||||
id="layer1"
|
|
||||||
transform="translate(-1.4687503,-18.45956)">
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#ffffff;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="M 67.468749,21.166667 H 56.885416"
|
|
||||||
id="path3041" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#ffffff;stroke-width:0.98677;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="M 62.177083,21.444868 V 31.75"
|
|
||||||
id="path3043" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#ee0000;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="M 3.9687503,21.166667 V 27.78125 L 7.9375002,31.75 h 2.6458328 l 3.96875,-3.96875 v -6.614583"
|
|
||||||
id="path3572" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#ffffff;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="m 27.781251,21.166667 h -6.614584 l -3.96875,3.96875 v 2.645833 l 3.96875,3.96875 h 6.614584"
|
|
||||||
id="path3687" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#ffffff;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="m 30.427084,31.75 v -5.291667 l 5.291667,-5.291666 5.291666,5.291666 V 31.75 v 0"
|
|
||||||
id="path3802" />
|
|
||||||
<path
|
|
||||||
style="fill:none;stroke:#ffffff;stroke-width:1;stroke-linecap:square;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
|
||||||
d="m 54.239583,21.166667 h -7.9375 l -2.645834,2.645833 2.645834,2.645833 h 5.291666 L 54.239583,29.104166 51.593749,31.75 h -7.9375"
|
|
||||||
id="path3954" />
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 3.2 KiB |
|
@ -1,43 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<svg
|
|
||||||
enable-background="new 0 0 512 512"
|
|
||||||
id="Layer_1"
|
|
||||||
version="1.1"
|
|
||||||
viewBox="0 0 508.15335 357.10535"
|
|
||||||
xml:space="preserve"
|
|
||||||
sodipodi:docname="YOUTUBE_icon-icons.com_65487.svg"
|
|
||||||
width="508.15335"
|
|
||||||
height="357.10535"
|
|
||||||
inkscape:version="1.1.2 (0a00cf5339, 2022-02-04, custom)"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
|
||||||
id="defs9" /><sodipodi:namedview
|
|
||||||
id="namedview7"
|
|
||||||
pagecolor="#ffffff"
|
|
||||||
bordercolor="#666666"
|
|
||||||
borderopacity="1.0"
|
|
||||||
inkscape:pageshadow="2"
|
|
||||||
inkscape:pageopacity="0.0"
|
|
||||||
inkscape:pagecheckerboard="0"
|
|
||||||
showgrid="false"
|
|
||||||
fit-margin-top="0"
|
|
||||||
fit-margin-left="0"
|
|
||||||
fit-margin-right="0"
|
|
||||||
fit-margin-bottom="0"
|
|
||||||
inkscape:zoom="1.2304688"
|
|
||||||
inkscape:cx="252.34286"
|
|
||||||
inkscape:cy="163.75873"
|
|
||||||
inkscape:window-width="2516"
|
|
||||||
inkscape:window-height="1051"
|
|
||||||
inkscape:window-x="0"
|
|
||||||
inkscape:window-y="0"
|
|
||||||
inkscape:window-maximized="1"
|
|
||||||
inkscape:current-layer="Layer_1" /><g
|
|
||||||
id="g4"
|
|
||||||
style="fill:#ffffff"
|
|
||||||
transform="translate(-3.0903642,-91.894664)"><path
|
|
||||||
d="M 260.4,449 C 203.3,447.2 149,445.8 94.7,443.7 83,443.2 71.1,441.4 59.7,438.7 38.3,433.7 23.5,420.8 15.9,399.7 9.8,382.7 7.6,365.2 6,347.4 2.5,305.6 2.5,263.8 4.2,222 c 1,-23.6 1.6,-47.4 7.9,-70.3 3.8,-13.7 8.4,-27.1 19.5,-37 11.7,-10.5 25.4,-16.8 41,-17.5 42.8,-2.1 85.5,-4.7 128.3,-5.1 57.6,-0.6 115.3,0.2 172.9,1.3 24.9,0.5 50,1.8 74.7,5 22.6,3 39.5,15.6 48.5,37.6 6.9,16.9 9.5,34.6 11,52.6 3.9,45.1 4,90.2 1.8,135.3 -1.1,22.9 -2.2,45.9 -8.7,68.2 -7.4,25.6 -23.1,42.5 -49.3,48.3 -10.2,2.2 -20.8,3 -31.2,3.4 -54.4,1.9 -108.7,3.6 -160.2,5.2 z M 205.1,335.3 c 45.6,-23.6 90.7,-47 136.7,-70.9 -45.9,-24 -91,-47.5 -136.7,-71.4 0,47.7 0,94.6 0,142.3 z"
|
|
||||||
id="path2"
|
|
||||||
style="fill:#ffffff" /></g></svg>
|
|
Before Width: | Height: | Size: 2 KiB |
|
@ -1,48 +0,0 @@
|
||||||
@import "../../node_modules/bulma/sass/utilities/initial-variables"
|
|
||||||
@import "../../node_modules/bulma/bulma"
|
|
||||||
|
|
||||||
.channel-icon
|
|
||||||
max-height: 64px
|
|
||||||
|
|
||||||
.video-thumbnail
|
|
||||||
width: 100%
|
|
||||||
|
|
||||||
.video-grid
|
|
||||||
$spacing: 0.5vw
|
|
||||||
|
|
||||||
display: grid
|
|
||||||
grid-row-gap: $spacing
|
|
||||||
row-gap: $spacing
|
|
||||||
grid-column-gap: $spacing
|
|
||||||
column-gap: $spacing
|
|
||||||
grid-template-columns: repeat(2, minmax(0, 1fr))
|
|
||||||
grid-column: auto
|
|
||||||
|
|
||||||
@include tablet
|
|
||||||
grid-template-columns: repeat(3, minmax(0, 1fr))
|
|
||||||
|
|
||||||
@include desktop
|
|
||||||
grid-template-columns: repeat(4, minmax(0, 1fr))
|
|
||||||
|
|
||||||
@include widescreen
|
|
||||||
grid-template-columns: repeat(5, minmax(0, 1fr))
|
|
||||||
|
|
||||||
@include fullhd
|
|
||||||
grid-template-columns: repeat(6, minmax(0, 1fr))
|
|
||||||
|
|
||||||
.video-card
|
|
||||||
display: flex
|
|
||||||
flex-direction: column
|
|
||||||
|
|
||||||
.video-card-content
|
|
||||||
padding: 0 0.5vw
|
|
||||||
|
|
||||||
&:last-child
|
|
||||||
padding-bottom: 0.5vw
|
|
||||||
|
|
||||||
// Fix almost invisible navbar items on mobile
|
|
||||||
.navbar-item
|
|
||||||
color: #fff
|
|
||||||
|
|
||||||
.overflow-x
|
|
||||||
overflow-x: auto
|
|
|
@ -1,32 +0,0 @@
|
||||||
# This has to be built with docker buildx to set the TARGETPLATFORM argument
|
|
||||||
FROM registry.hub.docker.com/library/python:3.10
|
|
||||||
|
|
||||||
ARG TARGETPLATFORM
|
|
||||||
|
|
||||||
# ffmpeg static source (https://johnvansickle.com/ffmpeg/)
|
|
||||||
RUN set -e; \
|
|
||||||
mkdir /build_ffmpeg; \
|
|
||||||
cd /build_ffmpeg; \
|
|
||||||
case "$TARGETPLATFORM" in \
|
|
||||||
"linux/amd64") ffmpeg_arch="amd64";; \
|
|
||||||
"linux/arm64") ffmpeg_arch="arm64";; \
|
|
||||||
"linux/arm/v7") ffmpeg_arch="armhf";; \
|
|
||||||
*) echo "TARGETPLATFORM $TARGETPLATFORM not found"; exit 1 ;;\
|
|
||||||
esac; \
|
|
||||||
wget "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-${ffmpeg_arch}-static.tar.xz"; \
|
|
||||||
wget "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-${ffmpeg_arch}-static.tar.xz.md5"; \
|
|
||||||
md5sum -c "ffmpeg-release-${ffmpeg_arch}-static.tar.xz.md5"; \
|
|
||||||
tar Jxf "ffmpeg-release-${ffmpeg_arch}-static.tar.xz"; \
|
|
||||||
mv "ffmpeg-5.0.1-${ffmpeg_arch}-static/ffmpeg" /usr/bin; \
|
|
||||||
cd /; \
|
|
||||||
rm -rf /build_ffmpeg;
|
|
||||||
|
|
||||||
# The cryptography package is written in Rust and not available as a built wheel for armv7
|
|
||||||
# Thats why we need Rust to compile it from source
|
|
||||||
RUN set -e; \
|
|
||||||
if [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then \
|
|
||||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y; \
|
|
||||||
. $HOME/.cargo/env; \
|
|
||||||
fi; \
|
|
||||||
pip install --upgrade pip setuptools poetry; \
|
|
||||||
rm -rf $HOME/.cargo $HOME/.rustup;
|
|
|
@ -1,48 +0,0 @@
|
||||||
FROM registry.hub.docker.com/thetadev256/ucast-dev
|
|
||||||
|
|
||||||
COPY . /build
|
|
||||||
WORKDIR /build
|
|
||||||
|
|
||||||
RUN poetry build -f wheel
|
|
||||||
|
|
||||||
FROM registry.hub.docker.com/library/python:3.10
|
|
||||||
ARG TARGETPLATFORM
|
|
||||||
|
|
||||||
# ffmpeg static source (https://johnvansickle.com/ffmpeg/)
|
|
||||||
RUN set -e; \
|
|
||||||
mkdir /build_ffmpeg; \
|
|
||||||
cd /build_ffmpeg; \
|
|
||||||
case "$TARGETPLATFORM" in \
|
|
||||||
"linux/amd64") ffmpeg_arch="amd64";; \
|
|
||||||
"linux/arm64") ffmpeg_arch="arm64";; \
|
|
||||||
"linux/arm/v7") ffmpeg_arch="armhf";; \
|
|
||||||
*) echo "TARGETPLATFORM $TARGETPLATFORM not found"; exit 1 ;;\
|
|
||||||
esac; \
|
|
||||||
wget "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-${ffmpeg_arch}-static.tar.xz"; \
|
|
||||||
wget "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-${ffmpeg_arch}-static.tar.xz.md5"; \
|
|
||||||
md5sum -c "ffmpeg-release-${ffmpeg_arch}-static.tar.xz.md5"; \
|
|
||||||
tar Jxf "ffmpeg-release-${ffmpeg_arch}-static.tar.xz"; \
|
|
||||||
mv "ffmpeg-5.0.1-${ffmpeg_arch}-static/ffmpeg" /usr/bin; \
|
|
||||||
cd /; \
|
|
||||||
rm -rf /build_ffmpeg;
|
|
||||||
|
|
||||||
# nginx
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y nginx && \
|
|
||||||
apt-get clean && \
|
|
||||||
mkdir /ucast && \
|
|
||||||
chown 1000:1000 /ucast && \
|
|
||||||
chown -R 1000:1000 /var/lib/nginx /var/log/nginx
|
|
||||||
|
|
||||||
COPY ./deploy/nginx.conf /etc/nginx/nginx.conf
|
|
||||||
COPY ./deploy/nginx /etc/nginx/conf.d
|
|
||||||
COPY ./deploy/entrypoint.py /entrypoint.py
|
|
||||||
|
|
||||||
COPY --from=0 /build/dist /install
|
|
||||||
RUN pip install -- /install/*.whl gunicorn honcho && \
|
|
||||||
rm -rf ~/.cache/pip
|
|
||||||
|
|
||||||
ENV UCAST_WORKDIR=/ucast
|
|
||||||
|
|
||||||
EXPOSE 8001
|
|
||||||
ENTRYPOINT /entrypoint.py
|
|
|
@ -1,44 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
set -e
|
|
||||||
# Source: https://danmanners.com/posts/2022-01-buildah-multi-arch/
|
|
||||||
|
|
||||||
# Set your manifest name
|
|
||||||
export MANIFEST_NAME="ucast"
|
|
||||||
|
|
||||||
# Set the required variables
|
|
||||||
export BUILD_PATH="."
|
|
||||||
export DOCKERFILE="deploy/Dockerfile"
|
|
||||||
export REGISTRY="registry.hub.docker.com"
|
|
||||||
export USER="thetadev256"
|
|
||||||
export IMAGE_NAME="ucast"
|
|
||||||
export IMAGE_TAG="v0.3.2"
|
|
||||||
|
|
||||||
# Create a multi-architecture manifest
|
|
||||||
buildah manifest create ${MANIFEST_NAME}
|
|
||||||
|
|
||||||
# Build your amd64 architecture container
|
|
||||||
buildah bud \
|
|
||||||
--tag "${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}" \
|
|
||||||
--manifest ${MANIFEST_NAME} \
|
|
||||||
--arch amd64 \
|
|
||||||
--build-arg TARGETPLATFORM=linux/amd64 \
|
|
||||||
-f ${DOCKERFILE} \
|
|
||||||
${BUILD_PATH}
|
|
||||||
|
|
||||||
# Build your arm64 architecture container
|
|
||||||
buildah bud \
|
|
||||||
--tag "${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}" \
|
|
||||||
--manifest ${MANIFEST_NAME} \
|
|
||||||
--arch arm64 \
|
|
||||||
--build-arg TARGETPLATFORM=linux/arm64 \
|
|
||||||
-f ${DOCKERFILE} \
|
|
||||||
${BUILD_PATH}
|
|
||||||
|
|
||||||
# Push the full manifest, with both CPU Architectures
|
|
||||||
buildah manifest push --all \
|
|
||||||
${MANIFEST_NAME} \
|
|
||||||
"docker://${REGISTRY}/${USER}/${IMAGE_NAME}:${IMAGE_TAG}"
|
|
||||||
|
|
||||||
buildah manifest push --all \
|
|
||||||
${MANIFEST_NAME} \
|
|
||||||
"docker://${REGISTRY}/${USER}/${IMAGE_NAME}"
|
|
|
@ -1,21 +1,7 @@
|
||||||
version: "3"
|
version: "3"
|
||||||
services:
|
services:
|
||||||
ucast:
|
|
||||||
image: thetadev256/ucast
|
|
||||||
user: 1000:1000
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- "8001:8001"
|
|
||||||
volumes:
|
|
||||||
- "../_run:/ucast"
|
|
||||||
environment:
|
|
||||||
UCAST_REDIS_URL: "redis://redis:6379"
|
|
||||||
UCAST_SECRET_KEY: "django-insecure-Es/+plApGxNBy8+ewB+74zMlmfV2H3whw6gu7i0ESwGrEWAUYRP3HM2EX0PLr3UJ"
|
|
||||||
UCAST_ALLOWED_HOSTS: ".localhost,127.0.0.1"
|
|
||||||
UCAST_N_WORKERS: 2
|
|
||||||
UCAST_TZ: "Europe/Berlin"
|
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
container_name: redis
|
container_name: ucast-redis
|
||||||
image: redis:alpine
|
image: redis:alpine
|
||||||
restart: unless-stopped
|
ports:
|
||||||
|
- "127.0.0.1:6379:6379"
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
version: "3"
|
|
||||||
services:
|
|
||||||
redis:
|
|
||||||
container_name: redis
|
|
||||||
image: redis:alpine
|
|
||||||
ports:
|
|
||||||
- "127.0.0.1:6379:6379"
|
|
||||||
|
|
||||||
nginx:
|
|
||||||
image: nginx:1
|
|
||||||
network_mode: "host"
|
|
||||||
volumes:
|
|
||||||
- "./nginx:/etc/nginx/conf.d:ro"
|
|
||||||
- "../_run:/ucast:ro"
|
|
|
@ -1,30 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from honcho import manager
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmd(cmd):
|
|
||||||
returncode = subprocess.call(cmd)
|
|
||||||
if returncode != 0:
|
|
||||||
sys.exit(returncode)
|
|
||||||
|
|
||||||
|
|
||||||
n_workers = int(os.environ.get("UCAST_N_WORKERS", "1"))
|
|
||||||
|
|
||||||
run_cmd(["ucast-manage", "collectstatic", "--noinput"])
|
|
||||||
run_cmd(["ucast-manage", "migrate"])
|
|
||||||
|
|
||||||
m = manager.Manager()
|
|
||||||
m.add_process("ucast", "gunicorn ucast_project.wsgi")
|
|
||||||
m.add_process("nginx", "nginx")
|
|
||||||
|
|
||||||
for i in range(n_workers):
|
|
||||||
m.add_process(f"worker_{i}", "ucast-manage rqworker")
|
|
||||||
|
|
||||||
m.add_process("scheduler", "ucast-manage rqscheduler")
|
|
||||||
|
|
||||||
m.loop()
|
|
||||||
sys.exit(m.returncode)
|
|
|
@ -1,61 +0,0 @@
|
||||||
worker_processes auto;
|
|
||||||
daemon off;
|
|
||||||
pid /tmp/nginx.pid;
|
|
||||||
include /etc/nginx/modules-enabled/*.conf;
|
|
||||||
|
|
||||||
events {
|
|
||||||
worker_connections 768;
|
|
||||||
# multi_accept on;
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
|
|
||||||
##
|
|
||||||
# Basic Settings
|
|
||||||
##
|
|
||||||
|
|
||||||
sendfile on;
|
|
||||||
tcp_nopush on;
|
|
||||||
types_hash_max_size 2048;
|
|
||||||
# server_tokens off;
|
|
||||||
|
|
||||||
# server_names_hash_bucket_size 64;
|
|
||||||
# server_name_in_redirect off;
|
|
||||||
|
|
||||||
include /etc/nginx/mime.types;
|
|
||||||
default_type application/octet-stream;
|
|
||||||
|
|
||||||
##
|
|
||||||
# SSL Settings
|
|
||||||
##
|
|
||||||
|
|
||||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3; # Dropping SSLv3, ref: POODLE
|
|
||||||
ssl_prefer_server_ciphers on;
|
|
||||||
|
|
||||||
##
|
|
||||||
# Logging Settings
|
|
||||||
##
|
|
||||||
|
|
||||||
access_log off;
|
|
||||||
error_log stderr;
|
|
||||||
|
|
||||||
##
|
|
||||||
# Gzip Settings
|
|
||||||
##
|
|
||||||
|
|
||||||
gzip on;
|
|
||||||
|
|
||||||
# gzip_vary on;
|
|
||||||
# gzip_proxied any;
|
|
||||||
# gzip_comp_level 6;
|
|
||||||
# gzip_buffers 16 8k;
|
|
||||||
# gzip_http_version 1.1;
|
|
||||||
# gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript;
|
|
||||||
|
|
||||||
##
|
|
||||||
# Virtual Host Configs
|
|
||||||
##
|
|
||||||
|
|
||||||
include /etc/nginx/conf.d/*.conf;
|
|
||||||
include /etc/nginx/sites-enabled/*;
|
|
||||||
}
|
|
|
@ -1,26 +0,0 @@
|
||||||
server {
|
|
||||||
listen 8001;
|
|
||||||
server_name localhost;
|
|
||||||
|
|
||||||
client_max_body_size 1M;
|
|
||||||
|
|
||||||
# serve media files
|
|
||||||
location /static/ {
|
|
||||||
alias /ucast/static/;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /internal_files/ {
|
|
||||||
internal;
|
|
||||||
alias /ucast/data/;
|
|
||||||
}
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_set_header Host $http_host;
|
|
||||||
proxy_pass http://127.0.0.1:8000;
|
|
||||||
}
|
|
||||||
|
|
||||||
# location /errors/ {
|
|
||||||
# alias /etc/nginx/conf.d/errorpages/;
|
|
||||||
# internal;
|
|
||||||
# }
|
|
||||||
}
|
|
3
docs/.gitignore
vendored
|
@ -1,3 +0,0 @@
|
||||||
/.tox
|
|
||||||
/build
|
|
||||||
/venv
|
|
|
@ -1,20 +0,0 @@
|
||||||
# Minimal makefile for Sphinx documentation
|
|
||||||
#
|
|
||||||
|
|
||||||
# You can set these variables from the command line, and also
|
|
||||||
# from the environment for the first two.
|
|
||||||
SPHINXOPTS ?=
|
|
||||||
SPHINXBUILD ?= sphinx-build
|
|
||||||
SOURCEDIR = .
|
|
||||||
BUILDDIR = build
|
|
||||||
|
|
||||||
# Put it first so that "make" without argument is like "make help".
|
|
||||||
help:
|
|
||||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
.PHONY: help Makefile
|
|
||||||
|
|
||||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
|
||||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
|
||||||
%: Makefile
|
|
||||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
|
|
@ -1,108 +0,0 @@
|
||||||
\usepackage[absolute]{textpos}
|
|
||||||
\usepackage{setspace}
|
|
||||||
|
|
||||||
\newcommand{\hsamaketitle}{%
|
|
||||||
\let\sphinxrestorepageanchorsetting\relax
|
|
||||||
\ifHy@pageanchor\def\sphinxrestorepageanchorsetting{\Hy@pageanchortrue}\fi
|
|
||||||
\hypersetup{pdfauthor={\@author},
|
|
||||||
pdftitle={\@title},
|
|
||||||
pdfsubject={\subtitle},
|
|
||||||
pdfkeywords={Forschung, Entwicklung, Informatik},
|
|
||||||
}
|
|
||||||
\hypersetup{pageanchor=false}% avoid duplicate destination warnings
|
|
||||||
\begin{titlepage}
|
|
||||||
% Deckblatt - Hochschule Augsburg
|
|
||||||
\thispagestyle{empty}\null
|
|
||||||
% Logo - Hochschule Augsburg - Informatik
|
|
||||||
\begin{textblock}{10}(8.0,1.1)
|
|
||||||
\begin{figure}[h]
|
|
||||||
\centering
|
|
||||||
\includegraphics[width=0.45\textwidth]{hsa_informatik_logo_lq.pdf}
|
|
||||||
\end{figure}
|
|
||||||
|
|
||||||
\end{textblock}
|
|
||||||
|
|
||||||
% Text unter Logo
|
|
||||||
\begin{textblock}{15}(12.43,2.4)
|
|
||||||
\LARGE
|
|
||||||
\textsf{
|
|
||||||
\textbf{\textcolor[rgb]{1,0.41,0.13}{\\
|
|
||||||
\begin{flushleft}
|
|
||||||
Fakultät für\\
|
|
||||||
Informatik\\
|
|
||||||
\end{flushleft}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
\end{textblock}
|
|
||||||
|
|
||||||
% Textbox links - Informationen
|
|
||||||
\begin{textblock}{15}(2,2)
|
|
||||||
%\LARGE
|
|
||||||
\begin{flushleft}
|
|
||||||
\begin{spacing} {1.2}
|
|
||||||
\huge
|
|
||||||
\textbf{\@title}
|
|
||||||
\vspace{30pt}
|
|
||||||
\textcolor[rgb]{1,0.41,0.13}{\\
|
|
||||||
\textbf{\subtitle}}\\
|
|
||||||
\vspace{60pt}
|
|
||||||
\LARGE
|
|
||||||
Studienrichtung\\
|
|
||||||
\hscourse\\
|
|
||||||
\vspace{30pt}
|
|
||||||
\@author\\
|
|
||||||
\vspace{60pt}
|
|
||||||
\LARGE
|
|
||||||
Prüfer: \examiner\\
|
|
||||||
\vspace{10pt}
|
|
||||||
Abgabedatum: \deadline\\
|
|
||||||
\end{spacing}
|
|
||||||
\end{flushleft}
|
|
||||||
|
|
||||||
\end{textblock}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
% Textbox rechts - Hochschule
|
|
||||||
\begin{textblock}{5}(12.45,8.0)
|
|
||||||
\textcolor[rgb]{1,0,0}{\\
|
|
||||||
\footnotesize
|
|
||||||
\begin{flushleft}
|
|
||||||
\begin{spacing} {1.3}
|
|
||||||
Hochschule f\"ur angewandte\\
|
|
||||||
Wissenschaften Augsburg\\
|
|
||||||
\vspace{4pt}
|
|
||||||
An der Hochschule 1\\
|
|
||||||
D-86161 Augsburg\\
|
|
||||||
\vspace{4pt}
|
|
||||||
Telefon +49 821 55 86-0\\
|
|
||||||
Fax +49 821 55 86-3222\\
|
|
||||||
www.hs-augsburg.de\\
|
|
||||||
info(at)hs-augsburg-de
|
|
||||||
\end{spacing}
|
|
||||||
\end{flushleft}
|
|
||||||
}
|
|
||||||
\end{textblock}
|
|
||||||
|
|
||||||
|
|
||||||
% Textbox rechts mitte - Fakultät
|
|
||||||
\begin{textblock}{5}(12.45,11.4)
|
|
||||||
\footnotesize
|
|
||||||
\begin{flushleft}
|
|
||||||
\begin{spacing} {1.3}
|
|
||||||
Fakult\"at f\"ur Informatik\\
|
|
||||||
Telefon +49 821 55 86-3450\\
|
|
||||||
Fax \hspace{10pt} +49 821 55 86-3499\\
|
|
||||||
\end{spacing}
|
|
||||||
\end{flushleft}
|
|
||||||
\end{textblock}
|
|
||||||
\end{titlepage}%
|
|
||||||
\setcounter{footnote}{0}%
|
|
||||||
\let\thanks\relax\let\maketitle\relax
|
|
||||||
%\gdef\@thanks{}\gdef\@author{}\gdef\@title{}
|
|
||||||
\clearpage
|
|
||||||
\ifdefined\sphinxbackoftitlepage\sphinxbackoftitlepage\fi
|
|
||||||
\if@openright\cleardoublepage\else\clearpage\fi
|
|
||||||
\sphinxrestorepageanchorsetting
|
|
||||||
}
|
|
92
docs/conf.py
|
@ -1,92 +0,0 @@
|
||||||
# Configuration file for the Sphinx documentation builder.
|
|
||||||
#
|
|
||||||
# This file only contains a selection of the most common options. For a full
|
|
||||||
# list see the documentation:
|
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
|
||||||
|
|
||||||
# -- Path setup --------------------------------------------------------------
|
|
||||||
|
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
|
||||||
#
|
|
||||||
# import os
|
|
||||||
# import sys
|
|
||||||
# sys.path.insert(0, os.path.abspath('../code'))
|
|
||||||
|
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
|
||||||
|
|
||||||
project = "Ucast"
|
|
||||||
subtitle = "Projektarbeit Webtechnologien"
|
|
||||||
author = "Thomas Hampp"
|
|
||||||
copyright = "2022 " + author
|
|
||||||
|
|
||||||
examiner = "Fabian Ziegler"
|
|
||||||
deadline = "09.07.2022"
|
|
||||||
course = "Master Informatik"
|
|
||||||
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
|
||||||
# ones.
|
|
||||||
extensions = [
|
|
||||||
"sphinxcontrib.cairosvgconverter",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
|
||||||
templates_path = ["_templates"]
|
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
|
||||||
# for a list of supported languages.
|
|
||||||
#
|
|
||||||
# This is also used if you do content translation via gettext catalogs.
|
|
||||||
# Usually you set "language" from the command line for these cases.
|
|
||||||
language = "de"
|
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
|
||||||
# directories to ignore when looking for source files.
|
|
||||||
# This pattern also affects html_static_path and html_extra_path.
|
|
||||||
exclude_patterns = [".tox"]
|
|
||||||
|
|
||||||
# Pygments-Styling used for code syntax highlighting.
|
|
||||||
# See this page for an overview of all styles including live demo:
|
|
||||||
# https://pygments.org/demo/
|
|
||||||
pygments_style = "vs"
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output -------------------------------------------------
|
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
|
||||||
# a list of builtin themes.
|
|
||||||
#
|
|
||||||
html_theme = "sphinx_rtd_theme"
|
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
|
||||||
html_static_path = ["_static"]
|
|
||||||
|
|
||||||
# -- Options for PDF output -------------------------------------------------
|
|
||||||
latex_engine = "xelatex"
|
|
||||||
# latex_theme = 'hsathesis'
|
|
||||||
latex_elements = {
|
|
||||||
"extraclassoptions": "openany,oneside",
|
|
||||||
"preamble": r"""
|
|
||||||
\usepackage{hsastyle}
|
|
||||||
|
|
||||||
\newcommand\subtitle{%s}
|
|
||||||
\newcommand\deadline{%s}
|
|
||||||
\newcommand\examiner{%s}
|
|
||||||
\newcommand\hscourse{%s}
|
|
||||||
"""
|
|
||||||
% (subtitle, deadline, examiner, course),
|
|
||||||
"maketitle": r"\hsamaketitle",
|
|
||||||
}
|
|
||||||
|
|
||||||
latex_additional_files = [
|
|
||||||
"_latex/logos/hsa_informatik_logo_lq.pdf",
|
|
||||||
"_latex/hsastyle.sty",
|
|
||||||
]
|
|
|
@ -1,9 +0,0 @@
|
||||||
Ucast
|
|
||||||
#####
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: Inhalt:
|
|
||||||
:glob:
|
|
||||||
|
|
||||||
src/*
|
|
|
@ -1,4 +0,0 @@
|
||||||
Sphinx==4.4.0
|
|
||||||
sphinx-autobuild
|
|
||||||
sphinx-rtd-theme
|
|
||||||
sphinxcontrib-svg2pdfconverter[CairoSVG]
|
|
|
@ -1,245 +0,0 @@
|
||||||
Einleitung
|
|
||||||
##########
|
|
||||||
|
|
||||||
Bei den meisten YouTube-Videos, die ich mir anschaue, handelt es sich um
|
|
||||||
Nachrichten oder Kommentarvideos. Da diese Videos sehr textlastig sind,
|
|
||||||
spiele ich sie oft im Hintergrund ab und arbeite währenddessen an meinen Projekten.
|
|
||||||
|
|
||||||
Unterwegs habe ich aber keine Möglichkeit, YouTube-Videos im Hintergrund
|
|
||||||
abzuspielen, da die YouTube-App im Hintergrund die Wiedergabe unterbricht.
|
|
||||||
Es ist zwar möglich, YouTube-Videos mit entsprechenden Webdiensten herunterzuladen,
|
|
||||||
dies ist aber relativ unkomfortabel.
|
|
||||||
|
|
||||||
Deshalb höre ich unterwegs häufiger Podcasts, die mit entsprechenden Apps
|
|
||||||
(ich benutze AntennaPod) sowohl gestreamt als auch offline aufs Handy geladen werden
|
|
||||||
können.
|
|
||||||
|
|
||||||
Ich habe dann überlegt, ob es möglch wäre, YouTube-Kanäle automatisch in Podcasts
|
|
||||||
umzuwandeln. So kam ich auf die Idee, einen Server zu entwickeln,
|
|
||||||
der YouTube-Videos automatisch als MP3-Dateien herunterlädt und im Podcast-Format
|
|
||||||
bereitstellt. Auf diese Weise kann man sich die Audioinhalte von YouTube sowohl
|
|
||||||
am PC als auch unterwegs mit einer Podcast-App anhören.
|
|
||||||
|
|
||||||
Technik
|
|
||||||
#######
|
|
||||||
|
|
||||||
Webframework
|
|
||||||
************
|
|
||||||
|
|
||||||
Ich habe ucast mit dem Webframework Django entwickelt. Django hat den Vorteil,
|
|
||||||
das es grundlegende Funktionen von Webanwendungen wie ein Login-System bereits
|
|
||||||
implementiert hat. Dadurch konnte ich mich schneller auf die eigentlichen Features
|
|
||||||
meiner Anwendung konzentrieren.
|
|
||||||
|
|
||||||
|
|
||||||
YouTube-Downloading
|
|
||||||
*******************
|
|
||||||
|
|
||||||
Zum Herunterladen von Videos wird die Python-Library
|
|
||||||
`yt-dlp <https://github.com/yt-dlp/yt-dlp>`_ verwendet.
|
|
||||||
Diese Library kann Videos von YouTube und diversen anderen Videoplattformen
|
|
||||||
herunterladen und mithilfe von ffmpeg ins MP3-Format konvertieren.
|
|
||||||
|
|
||||||
Yt-dlp benötigt den Link oder die YouTube-ID eines Videos, um es herunterladen zu können.
|
|
||||||
Deswegen wird zusätzlich eine Möglichkeit benötigt, die aktuellen Videos eines
|
|
||||||
Kanals und dessen Metadaten (Profilbild, Beschreibung) abzurufen.
|
|
||||||
|
|
||||||
Hierfür gibt es zwei Möglichkeiten:
|
|
||||||
erstens Scraping der YouTube-Webseite und zweitens YouTube's eigene RSS-Feeds.
|
|
||||||
|
|
||||||
YouTube stellt für jeden Kanal einen RSS-Feed unter der Adresse
|
|
||||||
``https://www.youtube.com/feeds/videos.xml?channel_id=<Kanal-ID>`` bereit.
|
|
||||||
Der Feed listet allerdings nur die letzten 15 Videos eines Kanals auf.
|
|
||||||
Um ältere Videos sowie die Metadaten eines Kanals abrufen
|
|
||||||
zu können, muss die YouTube-Webseite aufgerufen und geparsed werden. Hierfür habe ich
|
|
||||||
die ``scrapetube``-Library als Grundlage verwendet und um eine Methode zum Abrufen
|
|
||||||
von Kanalinformationen erweitert.
|
|
||||||
|
|
||||||
|
|
||||||
Task-Queue
|
|
||||||
**********
|
|
||||||
|
|
||||||
Ucast muss regelmäßig die abonnierten Kanäle abrufen und Videos herunterladen.
|
|
||||||
Hier kommt eine `Task-Queue <https://python-rq.org>`_
|
|
||||||
zum Einsatz. Die Webanwendung kann neue Tasks in die
|
|
||||||
Queue einreihen, die dann im Hintergrund von Workern ausgeführt werden.
|
|
||||||
Mit einem Scheduler ist es auch möglich, periodisch (bspw. alle 15 Minuten)
|
|
||||||
Tasks auszuführen.
|
|
||||||
|
|
||||||
Die Queue benötigt eine Möglichkeit, Daten zwischen der Anwendung und den Workern
|
|
||||||
auszutauschen. Hier kommt eine Redis-Datenbank zum Einsatz.
|
|
||||||
|
|
||||||
|
|
||||||
Frontend
|
|
||||||
********
|
|
||||||
|
|
||||||
Da Ucast keine komplexen Funktionen auf der Clientseite bereitstellen muss,
|
|
||||||
wird das Frontend mithilfe von Django-Templates serverseitig gerendert und es
|
|
||||||
wurde auf ein Frontend-Framework verzichtet. Als CSS-Framework habe ich Bulma
|
|
||||||
verwendet, was eine Bibliothek von Komponenten bereitstellt. Bulma ist in Sass
|
|
||||||
geschrieben, wodurch es einfach an ein gewünschtes Designsthema angepasst werden kann.
|
|
||||||
|
|
||||||
Komplett auf Javascript verzichtet habe ich jedoch nicht.
|
|
||||||
Beispielsweise habe ich ``clipboard.js`` verwendet, um die Feed-URLs mit Klick auf einen
|
|
||||||
Button kopieren zu können.
|
|
||||||
|
|
||||||
Das endlose Scrolling auf den Videoseiten habe ich mit ``htmx`` umgesetzt, einer
|
|
||||||
JS-Library, mit der man dynamisch Webinhalte nachladen kann, ohne dafür eigenen
|
|
||||||
JS-Code zu schreiben.
|
|
||||||
|
|
||||||
|
|
||||||
Inbetriebnahme
|
|
||||||
##############
|
|
||||||
|
|
||||||
Docker-Compose
|
|
||||||
**************
|
|
||||||
|
|
||||||
Ucast ist als Docker-Image mit dem Namen
|
|
||||||
`thetadev256/ucast <https://hub.docker.com/r/thetadev256/ucast>`_ verfügbar.
|
|
||||||
Eine docker-compose-Datei mit einer Basiskonfiguration befindet sich im
|
|
||||||
Projektordner unter ``deploy/docker-compose.yml``. Um Ucast zu starten, müssen
|
|
||||||
die folgenden Befehle ausgeführt werden.
|
|
||||||
|
|
||||||
.. code-block:: sh
|
|
||||||
|
|
||||||
mkdir _run # Arbeitsverzeichnis erstellen
|
|
||||||
docker-compose -f deploy/docker-compose.yml up -d # Anwendung starten
|
|
||||||
docker exec -it ucast-ucast-1 ucast-manage createsuperuser # Benutzerkonto anlegen
|
|
||||||
|
|
||||||
Die Weboberfläche ist unter http://127.0.0.1:8001 erreichbar.
|
|
||||||
|
|
||||||
Konfiguration
|
|
||||||
*************
|
|
||||||
|
|
||||||
Die Konfiguration erfolgt durch Umgebungsvariablen. Alle Umgebungsvariablen
|
|
||||||
sind mit dem Präfix ``UCAST_`` zu versehen (z.B. ``UCAST_DEBUG``).
|
|
||||||
|
|
||||||
**DEBUG**
|
|
||||||
`Debug-Modus <https://docs.djangoproject.com/en/4.0/ref/settings/#debug>`_ von Django aktivieren.
|
|
||||||
Standard: ``false``
|
|
||||||
|
|
||||||
**ALLOWED_HOSTS**
|
|
||||||
Erlaubte `Hosts/Domains <https://docs.djangoproject.com/en/4.0/ref/settings/#allowed-hosts>`_.
|
|
||||||
Beispiel: ``"ucast.thetadev.de"``
|
|
||||||
|
|
||||||
**DB_ENGINE**
|
|
||||||
Verwendete Datenbanksoftware (``sqlite`` / ``mysql`` / ``postgresql``).
|
|
||||||
Standard: ``sqlite``
|
|
||||||
|
|
||||||
**DB_NAME**
|
|
||||||
Name der Datenbank. Standard: ``db``
|
|
||||||
|
|
||||||
**DB_HOST**
|
|
||||||
Adresse der Datenbank. Standard: ``127.0.0.1``
|
|
||||||
|
|
||||||
**DB_PORT**
|
|
||||||
Port der Datenbank. Standard: 3306 (mysql), 5432 (postgresql)
|
|
||||||
|
|
||||||
**DB_USER**, **DB_PASS**
|
|
||||||
Benutzername/Passwort für die Datenbank
|
|
||||||
|
|
||||||
**WORKDIR**
|
|
||||||
Hauptverzeichnis für Ucast (Siehe Verzeichnisstruktur).
|
|
||||||
Standard: aktuelles Arbeitsverzeichnis
|
|
||||||
|
|
||||||
**STATIC_ROOT**
|
|
||||||
Ordner für statische Dateien (``WORKDIR/static``)
|
|
||||||
|
|
||||||
**DOWNLOAD_ROOT**
|
|
||||||
Ordner für heruntergeladene Bilder und Audiodateien (``WORKDIR/data``)
|
|
||||||
|
|
||||||
**CACHE_ROOT**
|
|
||||||
Ordner für temporäre Dateien (``{WORKDIR}/cache``)
|
|
||||||
|
|
||||||
**DB_DIR**
|
|
||||||
Ordner für die SQLite-Datenbankdatei (``{WORKDIR}/db``)
|
|
||||||
|
|
||||||
**TZ**
|
|
||||||
Zeitzone. Standard: Systemeinstellung
|
|
||||||
|
|
||||||
**REDIS_URL**
|
|
||||||
Redis-Addresse. Standard: ``redis://localhost:6379``
|
|
||||||
|
|
||||||
**REDIS_QUEUE_TIMEOUT**
|
|
||||||
Timeout für gestartete Jobs [s]. Standard: 600
|
|
||||||
|
|
||||||
**REDIS_QUEUE_RESULT_TTL**
|
|
||||||
Speicherdauer für abgeschlossene Tasks [s]. Standard: 600
|
|
||||||
|
|
||||||
**YT_UPDATE_INTERVAL**
|
|
||||||
Zeitabstand, in dem die YouTube-Kanäle abgerufen werden [s].
|
|
||||||
Standard: 900
|
|
||||||
|
|
||||||
**FEED_MAX_ITEMS**
|
|
||||||
Maximale Anzahl Videos, die in den Feeds enthalten sind.
|
|
||||||
Standard: 50
|
|
||||||
|
|
||||||
**N_WORKERS**
|
|
||||||
Anzahl an Worker-Prozessen, die gestartet werden sollen
|
|
||||||
(nur im Docker-Container verfügbar).
|
|
||||||
Standard: 1
|
|
||||||
|
|
||||||
|
|
||||||
Verzeichnisstruktur
|
|
||||||
*******************
|
|
||||||
|
|
||||||
Ucast erstellt in seinem Arbeitsverzeichnis vier Unterordner, in denen die
|
|
||||||
Daten der Anwendung abgelegt werden.
|
|
||||||
|
|
||||||
.. code-block:: txt
|
|
||||||
|
|
||||||
- workdir
|
|
||||||
|_ cache Temporäre Dateien
|
|
||||||
|_ data Heruntergeladene Medien
|
|
||||||
|_ db SQLite-Datenbank
|
|
||||||
|_ static Statische Websitedaten
|
|
||||||
|
|
||||||
|
|
||||||
Bedienung
|
|
||||||
#########
|
|
||||||
|
|
||||||
Nach dem Login kommt man auf die Übersichtsseite, auf der alle abonnierten
|
|
||||||
Kanäle aufgelistet werden. Um einen neuen Kanal zu abonnieren, muss die YouTube-URL
|
|
||||||
(z.B. https://youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q)
|
|
||||||
in das Eingabefeld kopiert werden.
|
|
||||||
|
|
||||||
Wurde ein neuer Kanal hinzugefügt, beginnt ucast damit, die neuesten 15 Videos
|
|
||||||
herunterzuladen. Um zu überprüfen, welche Videos momentan heruntergeladen werden,
|
|
||||||
kann man auf die *Downloads*-Seite gehen. Auf dieser Seite werden auch fehlgeschlagene
|
|
||||||
Downloadtasks aufgelistet, die auch manuell wiederholt werden können (bspw. nach einem
|
|
||||||
Ausfall der Internetverbindung). Es gibt auch eine Suchfunktion, mit der man nach
|
|
||||||
einem Video mit einem bestimmten Titel suchen kann.
|
|
||||||
|
|
||||||
Um die abonnierten Kanäle zu seinem Podcast-Client hinzuzufügen, kann man die
|
|
||||||
Feed-URL auf der Übersichtsseite einfach kopieren und einfügen.
|
|
||||||
|
|
||||||
Die meisten Podcast-Clients bieten zudem eine Funktion zum Import von OPML-Dateien an.
|
|
||||||
In diesem Fall kann man einfach auf den Link *Download OPML* unten auf der Seite
|
|
||||||
klicken und die heruntergeladen Datei importieren. Auf diese Weise hat man schnell
|
|
||||||
alle abonnierten Kanäle zu seinem Podcast-Client hinzugefügt.
|
|
||||||
|
|
||||||
|
|
||||||
Fazit
|
|
||||||
#####
|
|
||||||
|
|
||||||
Ich betreibe Ucast seit einer Woche auf meiner NAS
|
|
||||||
und verwende es, um mir Videos sowohl am Rechner als auch unterwegs anzuhören.
|
|
||||||
|
|
||||||
In den ersten Tagen habe ich noch einige Bugs festgestellt, die beseitigt werden
|
|
||||||
mussten. Beispielsweise liegen nicht alle YouTube-Thumbnails im 16:9-Format vor,
|
|
||||||
weswegen sie zugeschnitten werden müssen, um das Layout der Webseite nicht zu
|
|
||||||
verschieben.
|
|
||||||
|
|
||||||
Am Anfang habe ich geplant, `SponsorBlock <https://sponsor.ajay.app>`_ in Ucast
|
|
||||||
zu integrieren, um Werbeinhalte aus den Videos zu entfernen. Yt-dlp hat dieses
|
|
||||||
Feature bereits integriert. Allerdings basiert Sponsorblock auf einer von der
|
|
||||||
Community verwalteten Datenbank, d.h. je nach Beliebtheit des Videos dauert es
|
|
||||||
zwischen einer halben und mehreren Stunden nach Release, bis Markierungen verfügbar
|
|
||||||
sind. Damit Sponsorblock zuverlässig funktioniert, müsste Ucast regelmäßig nach dem
|
|
||||||
Release des Videos die Datenbank abfragen und das Video bei Änderungen erneut
|
|
||||||
herunterladen und zuschneiden. Dies war mir zunächst zu komplex und ich habe mich
|
|
||||||
dazu entschieden, das Feature erst in Zukunft umzusetzen.
|
|
||||||
|
|
||||||
Ein weiteres Feature, das ich in Zukunft umsetzen werde,
|
|
||||||
ist die Unterstützung von alternativen Videoplattformen wie Peertube,
|
|
||||||
Odysee und Bitchute.
|
|
20
docs/tox.ini
|
@ -1,20 +0,0 @@
|
||||||
[tox]
|
|
||||||
skipsdist = True
|
|
||||||
envlist =
|
|
||||||
html
|
|
||||||
pdf
|
|
||||||
|
|
||||||
[testenv]
|
|
||||||
description = Dokumentation bauen
|
|
||||||
deps = -r{toxinidir}/requirements.txt
|
|
||||||
|
|
||||||
[testenv:html]
|
|
||||||
commands = sphinx-build -b html -d build/doctrees . build/html
|
|
||||||
|
|
||||||
[testenv:pdf]
|
|
||||||
allowlist_externals = make
|
|
||||||
commands = make latexpdf
|
|
||||||
|
|
||||||
[testenv:live]
|
|
||||||
description = Live update mit sphinx-autobuild
|
|
||||||
commands = sphinx-autobuild . build/html --open-browser
|
|
|
@ -1 +0,0 @@
|
||||||
ucast_project/manage.py
|
|
|
@ -1,14 +1,6 @@
|
||||||
# Coverbilder
|
# Coverbilder
|
||||||
|
|
||||||
Podcast-Cover sind quadratisch, während YT-Thumbnails das Seitenverhältnis
|
Podcast-Cover sind quadratisch.
|
||||||
16:9 haben. Da Thumbnails häufig Textelemente beinhalten, ist es nicht
|
|
||||||
vorteilhaft, das Thumbnail einfach quadratisch zuzuschneiden.
|
|
||||||
|
|
||||||
Stattdessen sollte Ucast das Thumbnail nach oben und unten farblich
|
|
||||||
passend erweitern und den Videotitel und Kanalnamen einfügen.
|
|
||||||
|
|
||||||
![](../tests/testfiles/thumbnail/t2.webp)
|
|
||||||
![](../tests/testfiles/cover/c2.png)
|
|
||||||
|
|
||||||
- Durchschnittliche Farbe der oberen und unteren 20% des Bilds berechnen
|
- Durchschnittliche Farbe der oberen und unteren 20% des Bilds berechnen
|
||||||
- Farbverlauf zwischen diesen Farben als Hintergrund verwenden
|
- Farbverlauf zwischen diesen Farben als Hintergrund verwenden
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
Django-Klasse: `django.utils.feedgenerator.Rss201rev2Feed`
|
|
||||||
|
|
||||||
### Channel-Attribute
|
|
||||||
|
|
||||||
| Tag | Beschreibung | Django-Attribut |
|
|
||||||
|--------------------------------------|-----------------------------------------|----------------------|
|
|
||||||
| `\<atom:link href="" rel="self">` | Feed-URL | `feed_url` |
|
|
||||||
| `\<title>` | Kanalname | `title` |
|
|
||||||
| `\<language>` | Sprache | `language` |
|
|
||||||
| `\<lastBuildDate>` | Datum der letzten Veränderung des Feeds | `latest_post_date()` |
|
|
||||||
| `\<description>` | Kanalbeschreibung | `description` |
|
|
||||||
| `\<link>` | Link zum Kanal | `link` |
|
|
||||||
| `\<copyright>` | Autor | `feed_copyright` |
|
|
||||||
| `\` | Cover-URL / Kanalname / Link | - |
|
|
||||||
| `\<itunes:image href="">` | Cover-URL | - |
|
|
||||||
| `\<itunes:author>` | Autor | - |
|
|
||||||
| `\<itunes:summary>` | Kanalbeschreibung | - |
|
|
||||||
|
|
||||||
|
|
||||||
### Item-Attribute
|
|
||||||
|
|
||||||
| Tag | Beschreibung | Django-Attribut |
|
|
||||||
|--------------------------------------------------|------------------------|-----------------|
|
|
||||||
| `\<title>` | Titel | `title` |
|
|
||||||
| `\<itunes:title>` | Titel | - |
|
|
||||||
| `\<description>` | Beschreibung | `description` |
|
|
||||||
| `\<pubDate>` | Veröffentlichungsdatum | `pubdate` |
|
|
||||||
| `\<link>` | Link | `link` |
|
|
||||||
| `\<guid>` | Eindeutige ID/ | `unique_id` |
|
|
||||||
| `\<itunes:summary>` | Bechreibung | - |
|
|
||||||
| `\<itunes:author>` | Autor | - |
|
|
||||||
| `\<enclosure url="" type="audio/mpeg" length=1>` | Audiodatei | `enclosures ` |
|
|
||||||
| `\<itunes:duration>00:40:35</itunes:duration>` | Dauer | - |
|
|
||||||
| `\<itunes:image href="">` | Cover-URL | - |
|
|
|
@ -3,54 +3,71 @@
|
||||||
## Verzeichnisstruktur
|
## Verzeichnisstruktur
|
||||||
|
|
||||||
```txt
|
```txt
|
||||||
|
_ config
|
||||||
|
|_ config.py
|
||||||
_ data
|
_ data
|
||||||
|_ LinusTechTips
|
|_ LinusTechTips
|
||||||
|_ _ucast
|
| |_ .ucast
|
||||||
|_ avatar.jpg # Profilbild des Kanals
|
| | |_ videos.json # IDs und Metadaten aller heruntergeladenen Videos
|
||||||
|_ avatar_sm.webp
|
| | |_ options.json # Kanalspezifische Optionen (ID, enabled)
|
||||||
|_ covers # Cover-Bilder
|
| | |_ avatar.png # Profilbild des Kanals
|
||||||
|_ 220409_Building_a_1_000_000_Computer.png
|
| | |_ feed.xml # RSS-Feed
|
||||||
|_ 220410_Apple_makes_GREAT_Gaming_Computers.png
|
| | |_ covers # Cover-Bilder
|
||||||
|_ thumbnails
|
| | |_ 220409_Building a _1_000_000 Computer.png
|
||||||
|_ 220409_Building_a_1_000_000_Computer.webp
|
| | |_ 220410_Apple makes GREAT Gaming Computers.png
|
||||||
|_ 220409_Building_a_1_000_000_Computer_sm.webp
|
| |_ 220409_Building a _1_000_000 Computer.mp3
|
||||||
|_ 220410_Apple_makes_GREAT_Gaming_Computers.webp
|
| |_ 220410_Apple makes GREAT Gaming Computers.mp3
|
||||||
|_ 220410_Apple_makes_GREAT_Gaming_Computers_sm.webp
|
|
|
||||||
|_ 220409_Building_a_1_000_000_Computer.mp3
|
|_ Andreas Spiess
|
||||||
|_ 220410_Apple_makes_GREAT_Gaming_Computers.mp3
|
|_ ...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verzeichnisstruktur (mit Datenbank)
|
||||||
|
|
||||||
|
```txt
|
||||||
|
_ config
|
||||||
|
|_ config.py
|
||||||
|
_ data
|
||||||
|
|_ ucast.db
|
||||||
|
|
|
||||||
|
|_ LinusTechTips
|
||||||
|
| |_ .ucast
|
||||||
|
| | |_ avatar.png # Profilbild des Kanals
|
||||||
|
| | |_ feed.xml # RSS-Feed
|
||||||
|
| | |_ covers # Cover-Bilder
|
||||||
|
| | |_ 220409_Building a _1_000_000 Computer.png
|
||||||
|
| | |_ 220410_Apple makes GREAT Gaming Computers.png
|
||||||
|
| |_ 220409_Building a _1_000_000 Computer.mp3
|
||||||
|
| |_ 220410_Apple makes GREAT Gaming Computers.mp3
|
||||||
|
|
|
||||||
|_ Andreas Spiess
|
|_ Andreas Spiess
|
||||||
|_ ...
|
|_ ...
|
||||||
```
|
```
|
||||||
|
|
||||||
## Datenmodelle
|
## Datenmodelle
|
||||||
|
|
||||||
### LastScan
|
### Channel
|
||||||
|
|
||||||
- LastScan: datetime
|
|
||||||
|
|
||||||
### ChannelOptions
|
|
||||||
|
|
||||||
- ID: `str, max_length=30`
|
|
||||||
- Active: `bool = True`
|
|
||||||
- LastScan: `datetime`
|
|
||||||
- SkipLivestreams: `bool = True`
|
|
||||||
- SkipShorts: `bool = True`
|
|
||||||
- KeepVideos: `int, nullable`
|
|
||||||
- Videos: `-> Video (1->n)`
|
|
||||||
|
|
||||||
|
- ID: str, VARCHAR(30), PKEY
|
||||||
|
- Name: str, VARCHAR(100)
|
||||||
|
- Active: bool = True
|
||||||
|
- SkipLivestreams: bool = True
|
||||||
|
- SkipShorts: bool = True
|
||||||
|
- KeepVideos: int = -1
|
||||||
|
|
||||||
### Video
|
### Video
|
||||||
|
|
||||||
- ID: `str, max_length=30`
|
- ID: str, VARCHAR(30), PKEY
|
||||||
- Title: `str, max_length=200`
|
- Channel: -> Channel.ID
|
||||||
- Slug: `str, max_length=209` (YYYYMMDD_Title, used as filename)
|
- Title: str, VARCHAR(200)
|
||||||
- Published: `datetime`
|
- Slug: str (YYYYMMDD_Title, used as filename), VARCHAR(209)
|
||||||
- Downloaded: `datetime, nullable`
|
- Published: datetime
|
||||||
- Description: `text`
|
- Downloaded: datetime
|
||||||
|
- Description: str, VARCHAR(1000)
|
||||||
|
|
||||||
### Config
|
### Config
|
||||||
|
|
||||||
- RedisURL: str
|
- RedisURL: str
|
||||||
- ScanInterval: 1h
|
- ScanInterval: 1h
|
||||||
|
- DefaultChannelOptions: ChannelOptions
|
||||||
- AppriseUrl: str (für Benachrichtigungen, https://github.com/caronc/apprise/wiki)
|
- AppriseUrl: str (für Benachrichtigungen, https://github.com/caronc/apprise/wiki)
|
||||||
|
|
1899
package-lock.json
generated
29
package.json
|
@ -1,29 +0,0 @@
|
||||||
{
|
|
||||||
"name": "ucast",
|
|
||||||
"license": "MIT",
|
|
||||||
"author": {
|
|
||||||
"name": "ThetaDev",
|
|
||||||
"email": "t.testboy@gmail.com"
|
|
||||||
},
|
|
||||||
"description": "YouTube to Podcast converter",
|
|
||||||
"private": true,
|
|
||||||
"dependencies": {
|
|
||||||
"bulma": "^0.9.4"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"autoprefixer": "^10.4.7",
|
|
||||||
"clean-css-cli": "^5.6.0",
|
|
||||||
"postcss": "^8.4.13",
|
|
||||||
"postcss-cli": "^9.1.0",
|
|
||||||
"rimraf": "^3.0.2",
|
|
||||||
"sass": "^1.51.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"build": "npm run build-clean && npm run build-sass && npm run build-autoprefix && npm run build-cleancss",
|
|
||||||
"build-autoprefix": "postcss --use autoprefixer --map false --output ucast/static/bulma/css/style.css ucast/static/bulma/css/style.css",
|
|
||||||
"build-cleancss": "cleancss -o ucast/static/bulma/css/style.min.css ucast/static/bulma/css/style.css",
|
|
||||||
"build-clean": "rimraf ucast/static/bulma/css",
|
|
||||||
"build-sass": "sass --style expanded --source-map assets/sass/style.sass ucast/static/bulma/css/style.css",
|
|
||||||
"start": "sass --style expanded --watch assets/sass/style.sass _run/static/bulma/css/style.min.css"
|
|
||||||
}
|
|
||||||
}
|
|
1269
poetry.lock
generated
|
@ -1,59 +1,46 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "ucast"
|
name = "ucast"
|
||||||
version = "0.4.6"
|
version = "0.0.1"
|
||||||
description = "YouTube to Podcast converter"
|
description = "YouTube to Podcast converter"
|
||||||
authors = ["Theta-Dev <t.testboy@gmail.com>"]
|
authors = ["Theta-Dev <t.testboy@gmail.com>"]
|
||||||
packages = [
|
|
||||||
{ include = "ucast" },
|
|
||||||
{ include = "ucast_project" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.10"
|
python = "^3.10"
|
||||||
Django = "^4.0.4"
|
starlette = {extras = ["full"], version = "^0.19.1"}
|
||||||
yt-dlp = "^2022.6.29"
|
uvicorn = "^0.17.6"
|
||||||
requests = "^2.28.1"
|
yt-dlp = "^2022.3.8"
|
||||||
|
scrapetube = "^2.2.2"
|
||||||
|
rfeed = "^1.1.1"
|
||||||
feedparser = "^6.0.8"
|
feedparser = "^6.0.8"
|
||||||
Pillow = "^9.1.0"
|
Pillow = "^9.1.0"
|
||||||
colorthief = "^0.2.1"
|
colorthief = "^0.2.1"
|
||||||
wcag-contrast-ratio = "^0.9"
|
wcag-contrast-ratio = "^0.9"
|
||||||
font-source-sans-pro = "^0.0.1"
|
font-source-sans-pro = "^0.0.1"
|
||||||
fonts = "^0.0.3"
|
fonts = "^0.0.3"
|
||||||
django-bulma = "^0.8.3"
|
alembic = "^1.7.7"
|
||||||
python-dotenv = "^0.20.0"
|
|
||||||
psycopg2 = "^2.9.3"
|
|
||||||
mysqlclient = "^2.1.1"
|
|
||||||
python-slugify = "^6.1.2"
|
python-slugify = "^6.1.2"
|
||||||
mutagen = "^1.45.1"
|
starlette-core = "^0.0.1"
|
||||||
rq = "^1.10.1"
|
click = "^8.1.3"
|
||||||
rq-scheduler = "^0.11.0"
|
python-dotenv = "^0.20.0"
|
||||||
pycryptodomex = "^3.14.1"
|
mysqlclient = "^2.1.0"
|
||||||
django-htmx = "^1.12.0"
|
psycopg2 = "^2.9.3"
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "^7.1.1"
|
pytest = "^7.1.2"
|
||||||
pytest-cov = "^3.0.0"
|
pytest-cov = "^3.0.0"
|
||||||
invoke = "^1.7.0"
|
invoke = "^1.7.0"
|
||||||
pytest-django = "^4.5.2"
|
pre-commit = "^2.18.1"
|
||||||
pre-commit = "^2.19.0"
|
virtualenv = "20.14.1"
|
||||||
honcho = "^1.1.0"
|
|
||||||
pytest-mock = "^3.7.0"
|
|
||||||
fakeredis = "^1.7.5"
|
|
||||||
gunicorn = "^20.1.0"
|
|
||||||
bump2version = "^1.0.1"
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
"ucast-manage" = "ucast_project.manage:main"
|
ucast = "ucast.__main__:cli"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
DJANGO_SETTINGS_MODULE = "ucast_project.settings"
|
|
||||||
|
|
||||||
[tool.flake8]
|
[tool.flake8]
|
||||||
extend-ignore = "E501"
|
max-line-length = 88
|
||||||
|
|
||||||
[tool.black]
|
[tool.black]
|
||||||
line-length = 88
|
line-length = 88
|
||||||
|
|
152
tasks.py
|
@ -1,150 +1,68 @@
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
from honcho import manager
|
from invoke import task
|
||||||
from invoke import Responder, task
|
|
||||||
|
|
||||||
from ucast import tests
|
import tests
|
||||||
from ucast.service import cover, util, youtube
|
from ucast import cover, util, youtube
|
||||||
|
|
||||||
os.chdir(Path(__file__).absolute().parent)
|
os.chdir(Path(__file__).absolute().parent)
|
||||||
|
db_file = Path("_run/ucast.db").absolute()
|
||||||
|
|
||||||
DIR_RUN = Path("_run").absolute()
|
# Configure application
|
||||||
DIR_STATIC = DIR_RUN / "static"
|
os.environ["DEBUG"] = "true"
|
||||||
DIR_DOWNLOAD = DIR_RUN / "data"
|
os.environ["SECRET_KEY"] = "1234"
|
||||||
FILE_DB = DIR_RUN / "db.sqlite"
|
os.environ["DATABASE_URL"] = f"sqlite:///{db_file}"
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def test(c):
|
def test(c):
|
||||||
"""Run unit tests"""
|
c.run("pytest tests", pty=True)
|
||||||
c.run("pytest", pty=True)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def lint(c):
|
def run(c):
|
||||||
"""Check for code quality and formatting"""
|
os.chdir("ucast")
|
||||||
c.run("pre-commit run -a", pty=True)
|
c.run("alembic upgrade head")
|
||||||
|
c.run("python app.py")
|
||||||
|
|
||||||
@task
|
|
||||||
def format(c):
|
|
||||||
"""Format the code with black"""
|
|
||||||
c.run("pre-commit run black -a", pty=True)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
def makemigrations(c):
|
|
||||||
"""Create a new migration that applies the changes made to the data model"""
|
|
||||||
c.run("python manage.py makemigrations ucast")
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
def collectstatic(c):
|
|
||||||
"""Copy static files into a common folder"""
|
|
||||||
c.run("python manage.py collectstatic --noinput")
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
def migrate(c):
|
|
||||||
"""Migrate the database"""
|
|
||||||
c.run("python manage.py migrate")
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
def create_testuser(c):
|
|
||||||
"""Create a test user with the credentials admin:pass"""
|
|
||||||
responder_pwd = Responder(pattern=r"Password.*: ", response="pass\n")
|
|
||||||
responder_yes = Responder(pattern=r"Bypass password validation", response="y\n")
|
|
||||||
|
|
||||||
c.run(
|
|
||||||
"python manage.py createsuperuser --username admin --email admin@example.com",
|
|
||||||
pty=True,
|
|
||||||
watchers=[responder_pwd, responder_yes],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def get_cover(c, vid=""):
|
def get_cover(c, vid=""):
|
||||||
"""
|
vinfo = youtube.get_video_info(vid)
|
||||||
Download thumbnail image of the YouTube video with the id
|
title = vinfo["fulltitle"]
|
||||||
from the ``--vid`` parameter and create cover images from it.
|
channel_name = vinfo["uploader"]
|
||||||
|
thumbnail_url = youtube.get_thumbnail_url(vinfo)
|
||||||
The images are stored in the ``ucast/tests/testfiles`` directory.
|
channel_url = vinfo["channel_url"]
|
||||||
"""
|
channel_metadata = youtube.get_channel_metadata(channel_url)
|
||||||
vinfo = youtube.get_video_details(vid)
|
|
||||||
title = vinfo.title
|
|
||||||
channel_name = vinfo.channel_name
|
|
||||||
channel_id = vinfo.channel_id
|
|
||||||
channel_metadata = youtube.get_channel_metadata(
|
|
||||||
youtube.channel_url_from_id(channel_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
ti = 1
|
ti = 1
|
||||||
while os.path.exists(tests.DIR_TESTFILES / "avatar" / f"a{ti}.jpg"):
|
while os.path.exists(tests.DIR_TESTFILES / "cover" / f"c{ti}.png"):
|
||||||
ti += 1
|
ti += 1
|
||||||
|
|
||||||
tn_file = tests.DIR_TESTFILES / "thumbnail" / f"t{ti}.webp"
|
tn_file = tests.DIR_TESTFILES / "thumbnail" / f"t{ti}.webp"
|
||||||
av_file = tests.DIR_TESTFILES / "avatar" / f"a{ti}.jpg"
|
av_file = tests.DIR_TESTFILES / "avatar" / f"a{ti}.jpg"
|
||||||
cv_file = tests.DIR_TESTFILES / "cover" / f"c{ti}_gradient.png"
|
cv_file = tests.DIR_TESTFILES / "cover" / f"c{ti}.png"
|
||||||
cv_blur_file = tests.DIR_TESTFILES / "cover" / f"c{ti}_blur.png"
|
|
||||||
|
|
||||||
youtube.download_thumbnail(vinfo, tn_file)
|
util.download_file(thumbnail_url, tn_file)
|
||||||
util.download_image_file(channel_metadata.avatar_url, av_file)
|
util.download_file(channel_metadata.avatar_url, av_file)
|
||||||
|
|
||||||
cover.create_cover_file(
|
cover.create_cover_file(tn_file, av_file, title, channel_name, cv_file)
|
||||||
tn_file, av_file, title, channel_name, cover.COVER_STYLE_GRADIENT, cv_file
|
|
||||||
)
|
|
||||||
cover.create_cover_file(
|
|
||||||
tn_file, av_file, title, channel_name, cover.COVER_STYLE_BLUR, cv_blur_file
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
@task
|
||||||
def build_devcontainer(c):
|
def add_migration(c, m=""):
|
||||||
c.run(
|
if not m:
|
||||||
"docker buildx build -t thetadev256/ucast-dev --push --platform amd64,arm64,armhf -f deploy/Devcontainer.Dockerfile deploy"
|
raise Exception("please input migration name")
|
||||||
)
|
|
||||||
|
|
||||||
|
tmpdir_o = TemporaryDirectory()
|
||||||
|
tmpdir = Path(tmpdir_o.name)
|
||||||
|
db_file = tmpdir / "migrate.db"
|
||||||
|
|
||||||
@task
|
os.environ["DATABASE_URL"] = f"sqlite:///{db_file}"
|
||||||
def reset(c):
|
|
||||||
if DIR_DOWNLOAD.exists():
|
|
||||||
shutil.rmtree(DIR_DOWNLOAD)
|
|
||||||
if FILE_DB.exists():
|
|
||||||
os.remove(FILE_DB)
|
|
||||||
os.makedirs(DIR_DOWNLOAD, exist_ok=True)
|
|
||||||
migrate(c)
|
|
||||||
create_testuser(c)
|
|
||||||
collectstatic(c)
|
|
||||||
|
|
||||||
|
os.chdir("ucast")
|
||||||
|
|
||||||
@task
|
c.run("alembic upgrade head")
|
||||||
def worker(c, n=2):
|
c.run(f"alembic revision --autogenerate -m '{m}'")
|
||||||
m = manager.Manager()
|
|
||||||
|
|
||||||
for i in range(n):
|
|
||||||
m.add_process(f"worker_{i}", "python manage.py rqworker")
|
|
||||||
|
|
||||||
m.add_process("scheduler", "python manage.py rqscheduler")
|
|
||||||
|
|
||||||
m.loop()
|
|
||||||
sys.exit(m.returncode)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
def optimize_svg(c):
|
|
||||||
out_dir = Path("ucast/static/ucast")
|
|
||||||
|
|
||||||
for icon in (Path("assets/icons/logo.svg"), Path("assets/icons/logo_dark.svg")):
|
|
||||||
c.run(
|
|
||||||
f"scour --indent=none --no-line-breaks --enable-comment-stripping {icon} {out_dir / icon.name}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@task
|
|
||||||
def build_sass(c):
|
|
||||||
c.run("npm run build")
|
|
||||||
collectstatic(c)
|
|
||||||
|
|
4
tests/__init__.py
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
# coding=utf-8
|
||||||
|
from importlib.resources import files
|
||||||
|
|
||||||
|
DIR_TESTFILES = files("tests.testfiles")
|
|
@ -1,3 +1,4 @@
|
||||||
|
# coding=utf-8
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List
|
from typing import List
|
||||||
|
@ -6,8 +7,8 @@ import pytest
|
||||||
from fonts.ttf import SourceSansPro
|
from fonts.ttf import SourceSansPro
|
||||||
from PIL import Image, ImageChops, ImageFont
|
from PIL import Image, ImageChops, ImageFont
|
||||||
|
|
||||||
from ucast import tests
|
import tests
|
||||||
from ucast.service import cover, typ
|
from ucast import cover, typ
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -25,7 +26,8 @@ from ucast.service import cover, typ
|
||||||
(
|
(
|
||||||
1000,
|
1000,
|
||||||
300,
|
300,
|
||||||
"Ha! du wärst Obrigkeit von Gott? Gott spendet Segen aus; du raubst! Du nicht von Gott, Tyrann!",
|
"Ha! du wärst Obrigkeit von Gott? Gott spendet Segen aus; du raubst! \
|
||||||
|
Du nicht von Gott, Tyrann!",
|
||||||
[
|
[
|
||||||
"Ha! du wärst",
|
"Ha! du wärst",
|
||||||
"Obrigkeit von",
|
"Obrigkeit von",
|
||||||
|
@ -48,7 +50,7 @@ def test_split_text(height: int, width: int, text: str, expect: List[str]):
|
||||||
"file_name,color",
|
"file_name,color",
|
||||||
[
|
[
|
||||||
("t1.webp", (63, 63, 62)),
|
("t1.webp", (63, 63, 62)),
|
||||||
("t2.webp", (22, 20, 20)),
|
("t2.webp", (74, 45, 37)),
|
||||||
("t3.webp", (54, 24, 28)),
|
("t3.webp", (54, 24, 28)),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@ -71,89 +73,23 @@ def test_get_text_color(bg_color: typ.Color, text_color: typ.Color):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"n_image,title,channel,style",
|
"n_image,title,channel",
|
||||||
[
|
[
|
||||||
(1, "ThetaDev @ Embedded World 2019", "ThetaDev", cover.COVER_STYLE_GRADIENT),
|
(1, "ThetaDev @ Embedded World 2019", "ThetaDev"),
|
||||||
(1, "ThetaDev @ Embedded World 2019", "ThetaDev", cover.COVER_STYLE_BLUR),
|
(2, "Sintel - Open Movie by Blender Foundation", "Blender"),
|
||||||
(
|
(3, "Systemabsturz Teaser zur DiVOC bb3", "media.ccc.de"),
|
||||||
2,
|
|
||||||
"Sintel - Open Movie by Blender Foundation",
|
|
||||||
"Blender",
|
|
||||||
cover.COVER_STYLE_GRADIENT,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
2,
|
|
||||||
"Sintel - Open Movie by Blender Foundation",
|
|
||||||
"Blender",
|
|
||||||
cover.COVER_STYLE_BLUR,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
3,
|
|
||||||
"Systemabsturz Teaser zur DiVOC bb3",
|
|
||||||
"media.ccc.de",
|
|
||||||
cover.COVER_STYLE_GRADIENT,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
3,
|
|
||||||
"Systemabsturz Teaser zur DiVOC bb3",
|
|
||||||
"media.ccc.de",
|
|
||||||
cover.COVER_STYLE_BLUR,
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_create_cover_image(
|
def test_create_cover_image(n_image: int, title: str, channel: str):
|
||||||
n_image: int, title: str, channel: str, style: cover.CoverStyle
|
|
||||||
):
|
|
||||||
tn_file = tests.DIR_TESTFILES / "thumbnail" / f"t{n_image}.webp"
|
tn_file = tests.DIR_TESTFILES / "thumbnail" / f"t{n_image}.webp"
|
||||||
av_file = tests.DIR_TESTFILES / "avatar" / f"a{n_image}.jpg"
|
av_file = tests.DIR_TESTFILES / "avatar" / f"a{n_image}.jpg"
|
||||||
expected_cv_file = tests.DIR_TESTFILES / "cover" / f"c{n_image}_{style}.png"
|
expected_cv_file = tests.DIR_TESTFILES / "cover" / f"c{n_image}.png"
|
||||||
|
|
||||||
tn_image = Image.open(tn_file)
|
tn_image = Image.open(tn_file)
|
||||||
av_image = Image.open(av_file)
|
av_image = Image.open(av_file)
|
||||||
expected_cv_image = Image.open(expected_cv_file)
|
expected_cv_image = Image.open(expected_cv_file)
|
||||||
|
|
||||||
cv_image = cover._create_cover_image(tn_image, av_image, title, channel, style)
|
cv_image = cover._create_cover_image(tn_image, av_image, title, channel)
|
||||||
|
|
||||||
assert cv_image.width == cover.COVER_WIDTH
|
|
||||||
assert cv_image.height == cover.COVER_WIDTH
|
|
||||||
|
|
||||||
diff = ImageChops.difference(cv_image, expected_cv_image)
|
|
||||||
assert diff.getbbox() is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_cover_image_noavatar():
|
|
||||||
tn_file = tests.DIR_TESTFILES / "thumbnail" / "t1.webp"
|
|
||||||
expected_cv_file = tests.DIR_TESTFILES / "cover" / "c1_noavatar.png"
|
|
||||||
|
|
||||||
tn_image = Image.open(tn_file)
|
|
||||||
expected_cv_image = Image.open(expected_cv_file)
|
|
||||||
|
|
||||||
cv_image = cover._create_cover_image(
|
|
||||||
tn_image,
|
|
||||||
None,
|
|
||||||
"ThetaDev @ Embedded World 2019",
|
|
||||||
"ThetaDev",
|
|
||||||
cover.COVER_STYLE_GRADIENT,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert cv_image.width == cover.COVER_WIDTH
|
|
||||||
assert cv_image.height == cover.COVER_WIDTH
|
|
||||||
|
|
||||||
diff = ImageChops.difference(cv_image, expected_cv_image)
|
|
||||||
assert diff.getbbox() is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_blank_cover_image():
|
|
||||||
av_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
|
||||||
expected_cv_file = tests.DIR_TESTFILES / "cover" / "blank.png"
|
|
||||||
|
|
||||||
av_image = Image.open(av_file)
|
|
||||||
expected_cv_image = Image.open(expected_cv_file)
|
|
||||||
|
|
||||||
cv_image = cover._create_blank_cover_image(av_image, "missingno", "ThetaDev")
|
|
||||||
|
|
||||||
assert cv_image.width == cover.COVER_WIDTH
|
|
||||||
assert cv_image.height == cover.COVER_WIDTH
|
|
||||||
|
|
||||||
diff = ImageChops.difference(cv_image, expected_cv_image)
|
diff = ImageChops.difference(cv_image, expected_cv_image)
|
||||||
assert diff.getbbox() is None
|
assert diff.getbbox() is None
|
||||||
|
@ -162,19 +98,14 @@ def test_create_blank_cover_image():
|
||||||
def test_create_cover_file():
|
def test_create_cover_file():
|
||||||
tn_file = tests.DIR_TESTFILES / "thumbnail" / "t1.webp"
|
tn_file = tests.DIR_TESTFILES / "thumbnail" / "t1.webp"
|
||||||
av_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
av_file = tests.DIR_TESTFILES / "avatar" / "a1.jpg"
|
||||||
expected_cv_file = tests.DIR_TESTFILES / "cover" / "c1_gradient.png"
|
expected_cv_file = tests.DIR_TESTFILES / "cover" / "c1.png"
|
||||||
|
|
||||||
tmpdir_o = tempfile.TemporaryDirectory()
|
tmpdir_o = tempfile.TemporaryDirectory()
|
||||||
tmpdir = Path(tmpdir_o.name)
|
tmpdir = Path(tmpdir_o.name)
|
||||||
cv_file = tmpdir / "cover.png"
|
cv_file = tmpdir / "cover.png"
|
||||||
|
|
||||||
cover.create_cover_file(
|
cover.create_cover_file(
|
||||||
tn_file,
|
tn_file, av_file, "ThetaDev @ Embedded World 2019", "ThetaDev", cv_file
|
||||||
av_file,
|
|
||||||
"ThetaDev @ Embedded World 2019",
|
|
||||||
"ThetaDev",
|
|
||||||
"gradient",
|
|
||||||
cv_file,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
cv_image = Image.open(cv_file)
|
cv_image = Image.open(cv_file)
|
86
tests/test_database.py
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
# coding=utf-8
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import sqlalchemy
|
||||||
|
from sqlalchemy import orm
|
||||||
|
|
||||||
|
# from ucast import models
|
||||||
|
from ucast import db
|
||||||
|
|
||||||
|
|
||||||
|
def test_insert_channel(testdb):
|
||||||
|
c1 = db.models.Channel(id="UCE1PLliRk3urTjDG6kGByiA", name="Natalie Gold")
|
||||||
|
session.add(c1)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
c2 = db.models.Channel(id="UCGiJh0NZ52wRhYKYnuZI08Q", name="ThetaDev")
|
||||||
|
|
||||||
|
session.add(c2)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
# stmt = sqlalchemy.select(models.Channel).where(models.Channel.name == "LinusTechTips")
|
||||||
|
# stmt = sqlalchemy.select(models.Channel)
|
||||||
|
# res = testdb.execute(stmt)
|
||||||
|
|
||||||
|
res = session.query(models.Channel).all()
|
||||||
|
assert len(res) == 2
|
||||||
|
assert res[0].name == "Natalie Gold"
|
||||||
|
assert res[1].name == "ThetaDev"
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
@pytest.fixture(scope="session", autouse=True)
|
||||||
|
def testdb():
|
||||||
|
try:
|
||||||
|
os.remove("test.db")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# url = "sqlite:///:memory:"
|
||||||
|
url = "sqlite:///test.db"
|
||||||
|
engine = sqlalchemy.create_engine(url)
|
||||||
|
models.metadata.create_all(engine) # Create the tables.
|
||||||
|
return engine
|
||||||
|
|
||||||
|
|
||||||
|
def test_insert_channel(testdb):
|
||||||
|
session_maker = orm.sessionmaker(bind=testdb)
|
||||||
|
session = session_maker()
|
||||||
|
c1 = models.Channel(id="UCE1PLliRk3urTjDG6kGByiA", name="Natalie Gold")
|
||||||
|
session.add(c1)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
c2 = models.Channel(id="UCGiJh0NZ52wRhYKYnuZI08Q", name="ThetaDev")
|
||||||
|
|
||||||
|
session.add(c2)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
# stmt = sqlalchemy.select(models.Channel).where(models.Channel.name == "LinusTechTips")
|
||||||
|
# stmt = sqlalchemy.select(models.Channel)
|
||||||
|
# res = testdb.execute(stmt)
|
||||||
|
|
||||||
|
res = session.query(models.Channel).all()
|
||||||
|
assert len(res) == 2
|
||||||
|
assert res[0].name == "Natalie Gold"
|
||||||
|
assert res[1].name == "ThetaDev"
|
||||||
|
|
||||||
|
|
||||||
|
def test_insert_video(testdb):
|
||||||
|
session_maker = orm.sessionmaker(bind=testdb)
|
||||||
|
session = session_maker()
|
||||||
|
|
||||||
|
c1 = models.Channel(id="UC0QEucPrn0-Ddi3JBTcs5Kw", name="Saria Delaney")
|
||||||
|
session.add(c1)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
v1 = models.Video(id="Bxhxzj8R_i0",
|
||||||
|
channel=c1,
|
||||||
|
title="Verschwiegen. Verraten. Verstummt. [Reupload: 10.10.2018]",
|
||||||
|
published=datetime(2020, 7, 4, 12, 21, 30),
|
||||||
|
description="")
|
||||||
|
v1.slug = v1.get_slug()
|
||||||
|
|
||||||
|
session.add(v1)
|
||||||
|
session.commit()
|
||||||
|
"""
|
Before Width: | Height: | Size: 186 KiB After Width: | Height: | Size: 186 KiB |
Before Width: | Height: | Size: 32 KiB After Width: | Height: | Size: 32 KiB |
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 53 KiB |
Before Width: | Height: | Size: 234 KiB After Width: | Height: | Size: 234 KiB |
BIN
tests/testfiles/cover/c2.png
Normal file
After Width: | Height: | Size: 229 KiB |
Before Width: | Height: | Size: 173 KiB After Width: | Height: | Size: 173 KiB |
5
tests/testfiles/sources.md
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
### Quellen der Thumbnails/Avatarbilder zum Testen
|
||||||
|
|
||||||
|
- a1/t1: [ThetaDev](https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q) (CC-BY)
|
||||||
|
- a2/t2: [Blender](https://www.youtube.com/c/BlenderFoundation) (CC-BY)
|
||||||
|
- a3/t3: [media.ccc.de](https://www.youtube.com/channel/UC2TXq_t06Hjdr2g_KdKpHQg) (CC-BY)
|
Before Width: | Height: | Size: 92 KiB After Width: | Height: | Size: 92 KiB |
BIN
tests/testfiles/thumbnail/t2.webp
Normal file
After Width: | Height: | Size: 101 KiB |
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
|
@ -1,5 +1,7 @@
|
||||||
__version__ = "0.4.6"
|
# coding=utf-8
|
||||||
|
__version__ = "0.0.1"
|
||||||
|
|
||||||
|
UCAST_BANNER = """\
|
||||||
def template_context(request):
|
┬ ┬┌─┐┌─┐┌─┐┌┬┐
|
||||||
return {"version": __version__}
|
│ ││ ├─┤└─┐ │
|
||||||
|
└─┘└─┘┴ ┴└─┘ ┴ """
|
||||||
|
|
77
ucast/__main__.py
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from importlib import resources
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import dotenv
|
||||||
|
import uvicorn
|
||||||
|
from alembic import config as alembic_cmd
|
||||||
|
|
||||||
|
import ucast
|
||||||
|
|
||||||
|
|
||||||
|
def load_dotenv():
|
||||||
|
dotenv_path = dotenv.find_dotenv()
|
||||||
|
if dotenv_path:
|
||||||
|
dotenv.load_dotenv(dotenv_path)
|
||||||
|
os.chdir(Path(dotenv_path).absolute().parent)
|
||||||
|
print(f"Loaded config from envfile at {dotenv_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def print_banner():
|
||||||
|
print(ucast.UCAST_BANNER + ucast.__version__)
|
||||||
|
|
||||||
|
|
||||||
|
def print_help():
|
||||||
|
print_banner()
|
||||||
|
print(
|
||||||
|
"""
|
||||||
|
Available commands:
|
||||||
|
run: start the server
|
||||||
|
migrate: apply database migrations
|
||||||
|
alembic: run the alembic migrator
|
||||||
|
|
||||||
|
Configuration is read from the .env file or environment variables.
|
||||||
|
Refer to the project page for more information: https://code.thetadev.de/HSA/Ucast"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run():
|
||||||
|
print_banner()
|
||||||
|
load_dotenv()
|
||||||
|
from ucast import config
|
||||||
|
|
||||||
|
uvicorn.run(
|
||||||
|
"ucast.app:create_app",
|
||||||
|
host="0.0.0.0",
|
||||||
|
port=config.HTTP_PORT,
|
||||||
|
factory=True,
|
||||||
|
reload=config.DEBUG,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def alembic(args):
|
||||||
|
load_dotenv()
|
||||||
|
alembic_ini_path = resources.path("ucast", "alembic.ini")
|
||||||
|
os.environ["ALEMBIC_CONFIG"] = str(alembic_ini_path)
|
||||||
|
|
||||||
|
alembic_cmd.main(args, f"{sys.argv[0]} alembic")
|
||||||
|
|
||||||
|
|
||||||
|
def cli():
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
sys.exit(print_help())
|
||||||
|
|
||||||
|
cmd = sys.argv[1]
|
||||||
|
args = sys.argv[2:]
|
||||||
|
|
||||||
|
if cmd == "run":
|
||||||
|
sys.exit(run())
|
||||||
|
elif cmd == "alembic":
|
||||||
|
sys.exit(alembic(args))
|
||||||
|
else:
|
||||||
|
sys.exit(print_help())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
cli()
|
|
@ -1,17 +0,0 @@
|
||||||
from django.contrib import admin
|
|
||||||
|
|
||||||
from ucast.models import Channel, User, Video
|
|
||||||
|
|
||||||
|
|
||||||
class ChannelAdmin(admin.ModelAdmin):
|
|
||||||
list_display = ["name", "id"]
|
|
||||||
|
|
||||||
|
|
||||||
class VideoAdmin(admin.ModelAdmin):
|
|
||||||
list_display = ["title", "published"]
|
|
||||||
ordering = ("-published",)
|
|
||||||
|
|
||||||
|
|
||||||
admin.site.register(Channel, ChannelAdmin)
|
|
||||||
admin.site.register(Video, VideoAdmin)
|
|
||||||
admin.site.register(User)
|
|
100
ucast/alembic.ini
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
script_location = ucast:migrations
|
||||||
|
|
||||||
|
# template used to generate migration files
|
||||||
|
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python-dateutil library that can be
|
||||||
|
# installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to dateutil.tz.gettz()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the
|
||||||
|
# "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to migrations/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARN
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARN
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
20
ucast/app.py
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# coding=utf-8
|
||||||
|
from starlette.applications import Starlette
|
||||||
|
from starlette.routing import Route
|
||||||
|
|
||||||
|
from ucast import config, views
|
||||||
|
|
||||||
|
|
||||||
|
def create_app():
|
||||||
|
app = Starlette(
|
||||||
|
config.DEBUG,
|
||||||
|
routes=[
|
||||||
|
Route("/", views.homepage),
|
||||||
|
Route("/err", views.error),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
if app.debug:
|
||||||
|
print("Debug mode enabled.")
|
||||||
|
|
||||||
|
return app
|
|
@ -1,6 +0,0 @@
|
||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class UcastConfig(AppConfig):
|
|
||||||
default_auto_field = "django.db.models.BigAutoField"
|
|
||||||
name = "ucast"
|
|
12
ucast/config.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# coding=utf-8
|
||||||
|
from starlette.config import Config
|
||||||
|
from starlette.datastructures import Secret
|
||||||
|
from starlette_core.database import DatabaseURL
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
|
||||||
|
# Basic configuration
|
||||||
|
DEBUG = config("DEBUG", cast=bool, default=False)
|
||||||
|
DATABASE_URL = config("DATABASE_URL", cast=DatabaseURL)
|
||||||
|
SECRET_KEY = config("SECRET_KEY", cast=Secret)
|
||||||
|
HTTP_PORT = config("HTTP_PORT", cast=int, default=8000)
|
210
ucast/cover.py
Normal file
|
@ -0,0 +1,210 @@
|
||||||
|
# coding=utf-8
|
||||||
|
import math
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
import wcag_contrast_ratio
|
||||||
|
from colorthief import ColorThief
|
||||||
|
from fonts.ttf import SourceSansPro
|
||||||
|
from PIL import Image, ImageDraw, ImageFont
|
||||||
|
|
||||||
|
from ucast import typ
|
||||||
|
|
||||||
|
CHAR_ELLIPSIS = "…"
|
||||||
|
COVER_WIDTH = 500
|
||||||
|
|
||||||
|
|
||||||
|
def _split_text(
|
||||||
|
height: int, width: int, text: str, font: ImageFont.FreeTypeFont, line_spacing=0
|
||||||
|
) -> List[str]:
|
||||||
|
if height < font.size:
|
||||||
|
return []
|
||||||
|
|
||||||
|
max_lines = math.floor((height - font.size) / (font.size + line_spacing)) + 1
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
line = ""
|
||||||
|
|
||||||
|
for word in text.split(" "):
|
||||||
|
if len(lines) >= max_lines:
|
||||||
|
line = word
|
||||||
|
break
|
||||||
|
|
||||||
|
if line == "":
|
||||||
|
nline = word
|
||||||
|
else:
|
||||||
|
nline = line + " " + word
|
||||||
|
|
||||||
|
if font.getsize(nline)[0] <= width:
|
||||||
|
line = nline
|
||||||
|
elif line != "":
|
||||||
|
lines.append(line)
|
||||||
|
line = word
|
||||||
|
else:
|
||||||
|
# try to trim current word
|
||||||
|
while nline:
|
||||||
|
nline = nline[:-1]
|
||||||
|
nline_e = nline + CHAR_ELLIPSIS
|
||||||
|
if font.getsize(nline_e)[0] <= width:
|
||||||
|
lines.append(nline_e)
|
||||||
|
break
|
||||||
|
|
||||||
|
if line != "":
|
||||||
|
if len(lines) >= max_lines:
|
||||||
|
# Drop the last line and add ... to the end
|
||||||
|
lastline = lines[-1] + CHAR_ELLIPSIS
|
||||||
|
if font.getsize(lastline)[0] <= width:
|
||||||
|
lines[-1] = lastline
|
||||||
|
else:
|
||||||
|
i_last_space = lines[-1].rfind(" ")
|
||||||
|
lines[-1] = lines[-1][:i_last_space] + CHAR_ELLIPSIS
|
||||||
|
else:
|
||||||
|
lines.append(line)
|
||||||
|
|
||||||
|
return lines
|
||||||
|
|
||||||
|
|
||||||
|
def _draw_text_box(
|
||||||
|
draw: ImageDraw.ImageDraw,
|
||||||
|
box: Tuple[int, int, int, int],
|
||||||
|
text: str,
|
||||||
|
font: ImageFont.FreeTypeFont,
|
||||||
|
color: typ.Color = (0, 0, 0),
|
||||||
|
line_spacing=0,
|
||||||
|
vertical_center=True,
|
||||||
|
):
|
||||||
|
x_tl, y_tl, x_br, y_br = box
|
||||||
|
height = y_br - y_tl
|
||||||
|
width = x_br - x_tl
|
||||||
|
|
||||||
|
lines = _split_text(height, width, text, font, line_spacing)
|
||||||
|
|
||||||
|
y_start = y_tl
|
||||||
|
if vertical_center:
|
||||||
|
text_height = len(lines) * (font.size + line_spacing) - line_spacing
|
||||||
|
y_start += int((height - text_height) / 2)
|
||||||
|
|
||||||
|
for i, line in enumerate(lines):
|
||||||
|
y_pos = y_start + i * (font.size + line_spacing)
|
||||||
|
draw.text((x_tl, y_pos), line, color, font)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_dominant_color(img: Image.Image):
|
||||||
|
thief = ColorThief.__new__(ColorThief)
|
||||||
|
thief.image = img
|
||||||
|
return thief.get_color()
|
||||||
|
|
||||||
|
|
||||||
|
def _interpolate_color(color_from: typ.Color, color_to: typ.Color, interval: int):
|
||||||
|
det_co = [(t - f) / interval for f, t in zip(color_from, color_to)]
|
||||||
|
for i in range(interval):
|
||||||
|
yield [round(f + det * i) for f, det in zip(color_from, det_co)]
|
||||||
|
|
||||||
|
|
||||||
|
def _get_text_color(bg_color) -> typ.Color:
|
||||||
|
color_decimal = tuple([c / 255 for c in bg_color])
|
||||||
|
c_blk = wcag_contrast_ratio.rgb((0, 0, 0), color_decimal)
|
||||||
|
c_wht = wcag_contrast_ratio.rgb((1, 1, 1), color_decimal)
|
||||||
|
if c_wht > c_blk:
|
||||||
|
return 255, 255, 255
|
||||||
|
return 0, 0, 0
|
||||||
|
|
||||||
|
|
||||||
|
def _create_cover_image(
|
||||||
|
thumbnail: Image.Image, avatar: Optional[Image.Image], title: str, channel: str
|
||||||
|
) -> Image.Image:
|
||||||
|
# Scale the thumbnail image down to cover size
|
||||||
|
tn_height = int(COVER_WIDTH / thumbnail.width * thumbnail.height)
|
||||||
|
tn = thumbnail.resize((COVER_WIDTH, tn_height), Image.Resampling.LANCZOS)
|
||||||
|
|
||||||
|
# Get dominant colors from the top and bottom 20% of the thumbnail image
|
||||||
|
top_part = tn.crop((0, 0, COVER_WIDTH, int(tn_height * 0.2)))
|
||||||
|
bottom_part = tn.crop((0, int(tn_height * 0.8), COVER_WIDTH, tn_height))
|
||||||
|
top_color = _get_dominant_color(top_part)
|
||||||
|
bottom_color = _get_dominant_color(bottom_part)
|
||||||
|
|
||||||
|
# Create new cover image
|
||||||
|
cover = Image.new("RGB", (COVER_WIDTH, COVER_WIDTH))
|
||||||
|
cover_draw = ImageDraw.Draw(cover)
|
||||||
|
|
||||||
|
# Draw background gradient
|
||||||
|
for i, color in enumerate(
|
||||||
|
_interpolate_color(top_color, bottom_color, cover.height)
|
||||||
|
):
|
||||||
|
cover_draw.line(((0, i), (cover.width, i)), tuple(color), 1)
|
||||||
|
|
||||||
|
# Insert thumbnail image in the middle
|
||||||
|
tn_margin = int((COVER_WIDTH - tn_height) / 2)
|
||||||
|
cover.paste(tn, (0, tn_margin))
|
||||||
|
|
||||||
|
# Add channel avatar
|
||||||
|
avt_margin = 0
|
||||||
|
avt_size = 0
|
||||||
|
|
||||||
|
if avatar:
|
||||||
|
avt_margin = int(tn_margin * 0.05)
|
||||||
|
avt_size = tn_margin - 2 * avt_margin
|
||||||
|
|
||||||
|
avt = avatar.resize((avt_size, avt_size), Image.Resampling.LANCZOS)
|
||||||
|
|
||||||
|
circle_mask = Image.new("L", (avt_size, avt_size))
|
||||||
|
circle_mask_draw = ImageDraw.Draw(circle_mask)
|
||||||
|
circle_mask_draw.ellipse((0, 0, avt_size, avt_size), 255)
|
||||||
|
|
||||||
|
cover.paste(avt, (avt_margin, avt_margin), circle_mask)
|
||||||
|
|
||||||
|
# Add text
|
||||||
|
text_margin_x = 16
|
||||||
|
text_margin_topleft = avt_margin + avt_size + text_margin_x
|
||||||
|
text_vertical_offset = -17
|
||||||
|
text_line_space = -4
|
||||||
|
|
||||||
|
fnt = ImageFont.truetype(SourceSansPro, 50)
|
||||||
|
top_text_color = _get_text_color(top_color)
|
||||||
|
bottom_text_color = _get_text_color(bottom_color)
|
||||||
|
|
||||||
|
_draw_text_box(
|
||||||
|
cover_draw,
|
||||||
|
(
|
||||||
|
text_margin_topleft,
|
||||||
|
text_vertical_offset,
|
||||||
|
COVER_WIDTH - text_margin_x,
|
||||||
|
tn_margin,
|
||||||
|
),
|
||||||
|
channel,
|
||||||
|
fnt,
|
||||||
|
top_text_color,
|
||||||
|
text_line_space,
|
||||||
|
)
|
||||||
|
_draw_text_box(
|
||||||
|
cover_draw,
|
||||||
|
(
|
||||||
|
text_margin_x,
|
||||||
|
COVER_WIDTH - tn_margin + text_vertical_offset,
|
||||||
|
COVER_WIDTH - text_margin_x,
|
||||||
|
COVER_WIDTH,
|
||||||
|
),
|
||||||
|
title,
|
||||||
|
fnt,
|
||||||
|
bottom_text_color,
|
||||||
|
text_line_space,
|
||||||
|
)
|
||||||
|
|
||||||
|
return cover
|
||||||
|
|
||||||
|
|
||||||
|
def create_cover_file(
|
||||||
|
thumbnail_path: Path,
|
||||||
|
avatar_path: Optional[Path],
|
||||||
|
title: str,
|
||||||
|
channel: str,
|
||||||
|
cover_path: Path,
|
||||||
|
):
|
||||||
|
thumbnail = Image.open(thumbnail_path)
|
||||||
|
|
||||||
|
avatar = None
|
||||||
|
if avatar_path:
|
||||||
|
avatar = Image.open(avatar_path)
|
||||||
|
|
||||||
|
cvr = _create_cover_image(thumbnail, avatar, title, channel)
|
||||||
|
cvr.save(cover_path)
|
14
ucast/db.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# coding=utf-8
|
||||||
|
from starlette_core.database import Database, metadata # noqa: F401
|
||||||
|
|
||||||
|
from ucast import models # noqa: F401
|
||||||
|
from ucast.config import DATABASE_URL
|
||||||
|
|
||||||
|
# set db config options
|
||||||
|
if DATABASE_URL.driver == "psycopg2":
|
||||||
|
engine_kwargs = {"pool_size": 20, "max_overflow": 0}
|
||||||
|
else:
|
||||||
|
engine_kwargs = {}
|
||||||
|
|
||||||
|
# setup database url
|
||||||
|
db = Database(DATABASE_URL, engine_kwargs=engine_kwargs)
|
201
ucast/feed.py
|
@ -1,201 +0,0 @@
|
||||||
import re
|
|
||||||
from xml.sax import saxutils
|
|
||||||
|
|
||||||
from django import http
|
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.sites.shortcuts import get_current_site
|
|
||||||
from django.contrib.syndication.views import Feed, add_domain
|
|
||||||
from django.utils import feedgenerator
|
|
||||||
from django.utils.feedgenerator import Rss201rev2Feed, rfc2822_date
|
|
||||||
from django.utils.xmlutils import SimplerXMLGenerator
|
|
||||||
|
|
||||||
from ucast.models import Channel, Video
|
|
||||||
from ucast.service import util
|
|
||||||
|
|
||||||
URL_REGEX = r"""http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"""
|
|
||||||
|
|
||||||
|
|
||||||
class PodcastFeedType(Rss201rev2Feed):
|
|
||||||
content_type = "application/xml; charset=utf-8"
|
|
||||||
|
|
||||||
def rss_attributes(self):
|
|
||||||
attrs = super().rss_attributes()
|
|
||||||
attrs["xmlns:itunes"] = "http://www.itunes.com/dtds/podcast-1.0.dtd"
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _xml_escape(text: str) -> str:
|
|
||||||
text = saxutils.escape(text)
|
|
||||||
text = re.sub(URL_REGEX, lambda m: f'<a href="{m[0]}">{m[0]}</a>', text)
|
|
||||||
text = text.replace("\n", "<br>")
|
|
||||||
return text
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _add_text_element(handler: SimplerXMLGenerator, text: str):
|
|
||||||
handler.startElement("description", {})
|
|
||||||
handler.ignorableWhitespace(f"<![CDATA[{PodcastFeedType._xml_escape(text)}]]>")
|
|
||||||
handler.endElement("description")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _format_secs(secs: int) -> str:
|
|
||||||
mm, ss = divmod(secs, 60)
|
|
||||||
hh, mm = divmod(mm, 60)
|
|
||||||
s = "%02d:%02d:%02d" % (hh, mm, ss)
|
|
||||||
return s
|
|
||||||
|
|
||||||
def add_root_elements(self, handler: SimplerXMLGenerator):
|
|
||||||
handler.addQuickElement("title", self.feed["title"])
|
|
||||||
handler.addQuickElement("link", self.feed["link"])
|
|
||||||
self._add_text_element(handler, self.feed["description"])
|
|
||||||
if self.feed["feed_url"] is not None:
|
|
||||||
handler.addQuickElement(
|
|
||||||
"atom:link", None, {"rel": "self", "href": self.feed["feed_url"]}
|
|
||||||
)
|
|
||||||
if self.feed["language"] is not None:
|
|
||||||
handler.addQuickElement("language", self.feed["language"])
|
|
||||||
for cat in self.feed["categories"]:
|
|
||||||
handler.addQuickElement("category", cat)
|
|
||||||
if self.feed["feed_copyright"] is not None:
|
|
||||||
handler.addQuickElement("copyright", self.feed["feed_copyright"])
|
|
||||||
handler.addQuickElement("lastBuildDate", rfc2822_date(self.latest_post_date()))
|
|
||||||
if self.feed["ttl"] is not None:
|
|
||||||
handler.addQuickElement("ttl", self.feed["ttl"])
|
|
||||||
|
|
||||||
if self.feed.get("image_url") is not None:
|
|
||||||
handler.startElement("image", {})
|
|
||||||
handler.addQuickElement("url", self.feed["image_url"])
|
|
||||||
handler.addQuickElement("title", self.feed["title"])
|
|
||||||
handler.addQuickElement("link", self.feed["link"])
|
|
||||||
handler.endElement("image")
|
|
||||||
|
|
||||||
handler.addQuickElement(
|
|
||||||
"itunes:image", None, {"href": self.feed["image_url"]}
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_item_elements(self, handler: SimplerXMLGenerator, item):
|
|
||||||
handler.addQuickElement("title", item["title"])
|
|
||||||
handler.addQuickElement("link", item["link"])
|
|
||||||
|
|
||||||
if item["description"] is not None:
|
|
||||||
self._add_text_element(handler, item["description"])
|
|
||||||
|
|
||||||
# Author information.
|
|
||||||
if item["author_name"] and item["author_email"]:
|
|
||||||
handler.addQuickElement(
|
|
||||||
"author", "%s (%s)" % (item["author_email"], item["author_name"])
|
|
||||||
)
|
|
||||||
elif item["author_email"]:
|
|
||||||
handler.addQuickElement("author", item["author_email"])
|
|
||||||
elif item["author_name"]:
|
|
||||||
handler.addQuickElement(
|
|
||||||
"dc:creator",
|
|
||||||
item["author_name"],
|
|
||||||
{"xmlns:dc": "http://purl.org/dc/elements/1.1/"},
|
|
||||||
)
|
|
||||||
|
|
||||||
if item["pubdate"] is not None:
|
|
||||||
handler.addQuickElement("pubDate", rfc2822_date(item["pubdate"]))
|
|
||||||
if item["comments"] is not None:
|
|
||||||
handler.addQuickElement("comments", item["comments"])
|
|
||||||
if item["unique_id"] is not None:
|
|
||||||
guid_attrs = {}
|
|
||||||
if isinstance(item.get("unique_id_is_permalink"), bool):
|
|
||||||
guid_attrs["isPermaLink"] = str(item["unique_id_is_permalink"]).lower()
|
|
||||||
handler.addQuickElement("guid", item["unique_id"], guid_attrs)
|
|
||||||
if item["ttl"] is not None:
|
|
||||||
handler.addQuickElement("ttl", item["ttl"])
|
|
||||||
|
|
||||||
# Enclosure.
|
|
||||||
if item["enclosures"]:
|
|
||||||
enclosures = list(item["enclosures"])
|
|
||||||
if len(enclosures) > 1:
|
|
||||||
raise ValueError(
|
|
||||||
"RSS feed items may only have one enclosure, see "
|
|
||||||
"http://www.rssboard.org/rss-profile#element-channel-item-enclosure"
|
|
||||||
)
|
|
||||||
enclosure = enclosures[0]
|
|
||||||
handler.addQuickElement(
|
|
||||||
"enclosure",
|
|
||||||
"",
|
|
||||||
{
|
|
||||||
"url": enclosure.url,
|
|
||||||
"length": enclosure.length,
|
|
||||||
"type": enclosure.mime_type,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Categories.
|
|
||||||
for cat in item["categories"]:
|
|
||||||
handler.addQuickElement("category", cat)
|
|
||||||
|
|
||||||
# Cover image
|
|
||||||
if item.get("image_url"):
|
|
||||||
handler.addQuickElement("itunes:image", None, {"href": item["image_url"]})
|
|
||||||
|
|
||||||
# Duration
|
|
||||||
if item.get("duration"):
|
|
||||||
handler.addQuickElement(
|
|
||||||
"itunes:duration", self._format_secs(item["duration"])
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UcastFeed(Feed):
|
|
||||||
feed_type = PodcastFeedType
|
|
||||||
|
|
||||||
def get_object(self, request, *args, **kwargs):
|
|
||||||
channel_slug = kwargs["channel"]
|
|
||||||
return Channel.objects.get(slug=channel_slug)
|
|
||||||
|
|
||||||
def get_feed(self, channel: Channel, request: http.HttpRequest):
|
|
||||||
max_items = settings.FEED_MAX_ITEMS
|
|
||||||
try:
|
|
||||||
max_items = int(request.GET.get("items"))
|
|
||||||
except TypeError or ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
feed = self.feed_type(
|
|
||||||
title=channel.name,
|
|
||||||
link=channel.get_absolute_url(),
|
|
||||||
description=channel.description,
|
|
||||||
language=self.language,
|
|
||||||
feed_url=self.full_link_url(request, f"/feed/{channel.slug}"),
|
|
||||||
image_url=self.full_link_url(request, f"/files/avatar/{channel.slug}.jpg"),
|
|
||||||
)
|
|
||||||
|
|
||||||
for video in channel.video_set.filter(downloaded__isnull=False).order_by(
|
|
||||||
"-published"
|
|
||||||
)[:max_items]:
|
|
||||||
feed.add_item(
|
|
||||||
title=video.title,
|
|
||||||
link=video.get_absolute_url(),
|
|
||||||
description=video.description,
|
|
||||||
unique_id=video.get_absolute_url(),
|
|
||||||
unique_id_is_permalink=True,
|
|
||||||
enclosures=self.item_enclosures_domain(video, request),
|
|
||||||
pubdate=video.published,
|
|
||||||
updateddate=video.downloaded,
|
|
||||||
image_url=self.full_link_url(
|
|
||||||
request, f"/files/cover/{channel.slug}/{video.slug}.png"
|
|
||||||
),
|
|
||||||
duration=video.duration,
|
|
||||||
)
|
|
||||||
return feed
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def full_link_url(request: http.HttpRequest, page_url: str) -> str:
|
|
||||||
anon_url = add_domain(
|
|
||||||
get_current_site(request).domain,
|
|
||||||
page_url,
|
|
||||||
request.is_secure(),
|
|
||||||
)
|
|
||||||
return util.add_key_to_url(anon_url, request.user.get_feed_key())
|
|
||||||
|
|
||||||
def item_enclosures_domain(self, item: Video, request: http.HttpRequest):
|
|
||||||
enc = feedgenerator.Enclosure(
|
|
||||||
url=self.full_link_url(
|
|
||||||
request, f"/files/audio/{item.channel.slug}/{item.slug}.mp3"
|
|
||||||
),
|
|
||||||
length=str(item.download_size),
|
|
||||||
mime_type="audio/mpeg",
|
|
||||||
)
|
|
||||||
return [enc]
|
|
|
@ -1,26 +0,0 @@
|
||||||
from django import forms
|
|
||||||
|
|
||||||
|
|
||||||
class AddChannelForm(forms.Form):
|
|
||||||
channel_str = forms.CharField(label="Channel-ID / URL")
|
|
||||||
|
|
||||||
|
|
||||||
class DeleteVideoForm(forms.Form):
|
|
||||||
id = forms.IntegerField()
|
|
||||||
|
|
||||||
|
|
||||||
class EditChannelForm(forms.Form):
|
|
||||||
skip_shorts = forms.BooleanField(
|
|
||||||
label="Skip shorts (vertical videos < 1m)", required=False
|
|
||||||
)
|
|
||||||
skip_livestreams = forms.BooleanField(label="Skip livestreams", required=False)
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadChannelForm(forms.Form):
|
|
||||||
n_videos = forms.IntegerField(
|
|
||||||
label="Number of videos (counting from most recent)", initial=50, min_value=1
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class RequeueForm(forms.Form):
|
|
||||||
id = forms.UUIDField()
|
|
|
@ -1,34 +0,0 @@
|
||||||
"""
|
|
||||||
Based on the django-rq package by Selwin Ong (MIT License)
|
|
||||||
https://github.com/rq/django-rq
|
|
||||||
"""
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from ucast import queue
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
"""Queue a function with the given arguments."""
|
|
||||||
|
|
||||||
help = __doc__
|
|
||||||
args = "<function arg arg ...>"
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--timeout", "-t", type=int, dest="timeout", help="A timeout in seconds"
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument("args", nargs="*")
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
"""
|
|
||||||
Queues the function given with the first argument with the
|
|
||||||
parameters given with the rest of the argument list.
|
|
||||||
"""
|
|
||||||
verbosity = int(options.get("verbosity", 1))
|
|
||||||
timeout = options.get("timeout")
|
|
||||||
q = queue.get_queue()
|
|
||||||
job = q.enqueue_call(args[0], args=args[1:], timeout=timeout)
|
|
||||||
if verbosity:
|
|
||||||
print("Job %s created" % job.id)
|
|
|
@ -1,58 +0,0 @@
|
||||||
"""
|
|
||||||
Based on the django-rq package by Selwin Ong (MIT License)
|
|
||||||
https://github.com/rq/django-rq
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from rq_scheduler.utils import setup_loghandlers
|
|
||||||
|
|
||||||
from ucast import queue
|
|
||||||
from ucast.tasks import schedule
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
"""Runs RQ Scheduler"""
|
|
||||||
|
|
||||||
help = __doc__
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--pid",
|
|
||||||
action="store",
|
|
||||||
dest="pid",
|
|
||||||
default=None,
|
|
||||||
help="PID file to write the scheduler`s pid into",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--interval",
|
|
||||||
"-i",
|
|
||||||
type=int,
|
|
||||||
dest="interval",
|
|
||||||
default=60,
|
|
||||||
help="""How often the scheduler checks for new jobs to add to the
|
|
||||||
queue (in seconds).""",
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
schedule.clear_scheduled_jobs()
|
|
||||||
schedule.register_scheduled_jobs()
|
|
||||||
|
|
||||||
pid = options.get("pid")
|
|
||||||
if pid:
|
|
||||||
with open(os.path.expanduser(pid), "w") as fp:
|
|
||||||
fp.write(str(os.getpid()))
|
|
||||||
|
|
||||||
# Verbosity is defined by default in BaseCommand for all commands
|
|
||||||
verbosity = options.get("verbosity")
|
|
||||||
if verbosity >= 2:
|
|
||||||
level = "DEBUG"
|
|
||||||
elif verbosity == 0:
|
|
||||||
level = "WARNING"
|
|
||||||
else:
|
|
||||||
level = "INFO"
|
|
||||||
setup_loghandlers(level)
|
|
||||||
|
|
||||||
scheduler = queue.get_scheduler(options.get("interval"))
|
|
||||||
scheduler.run()
|
|
|
@ -1,122 +0,0 @@
|
||||||
"""
|
|
||||||
Based on the django-rq package by Selwin Ong (MIT License)
|
|
||||||
https://github.com/rq/django-rq
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
|
|
||||||
import click
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from ucast import queue
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
"""Print RQ statistics"""
|
|
||||||
|
|
||||||
help = __doc__
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"-j",
|
|
||||||
"--json",
|
|
||||||
action="store_true",
|
|
||||||
dest="json",
|
|
||||||
help="Output statistics as JSON",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"-y",
|
|
||||||
"--yaml",
|
|
||||||
action="store_true",
|
|
||||||
dest="yaml",
|
|
||||||
help="Output statistics as YAML",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"-i",
|
|
||||||
"--interval",
|
|
||||||
dest="interval",
|
|
||||||
type=float,
|
|
||||||
help="Poll statistics every N seconds",
|
|
||||||
)
|
|
||||||
|
|
||||||
def _print_separator(self):
|
|
||||||
try:
|
|
||||||
click.echo(self._separator)
|
|
||||||
except AttributeError:
|
|
||||||
self._separator = "-" * self.table_width
|
|
||||||
click.echo(self._separator)
|
|
||||||
|
|
||||||
def _print_stats_dashboard(self, statistics):
|
|
||||||
if self.interval:
|
|
||||||
click.clear()
|
|
||||||
|
|
||||||
click.echo()
|
|
||||||
click.echo("Django RQ CLI Dashboard")
|
|
||||||
click.echo()
|
|
||||||
self._print_separator()
|
|
||||||
|
|
||||||
# Header
|
|
||||||
click.echo(
|
|
||||||
"""| %-15s|%10s |%10s |%10s |%10s |%10s |%10s |"""
|
|
||||||
% ("Name", "Queued", "Active", "Deferred", "Finished", "Failed", "Workers")
|
|
||||||
)
|
|
||||||
|
|
||||||
self._print_separator()
|
|
||||||
|
|
||||||
click.echo(
|
|
||||||
"""| %-15s|%10s |%10s |%10s |%10s |%10s |%10s |"""
|
|
||||||
% (
|
|
||||||
statistics["name"],
|
|
||||||
statistics["jobs"],
|
|
||||||
statistics["started_jobs"],
|
|
||||||
statistics["deferred_jobs"],
|
|
||||||
statistics["finished_jobs"],
|
|
||||||
statistics["failed_jobs"],
|
|
||||||
statistics["workers"],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._print_separator()
|
|
||||||
|
|
||||||
if self.interval:
|
|
||||||
click.echo()
|
|
||||||
click.echo("Press 'Ctrl+c' to quit")
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
|
|
||||||
if options.get("json"):
|
|
||||||
import json
|
|
||||||
|
|
||||||
click.echo(json.dumps(queue.get_statistics()))
|
|
||||||
return
|
|
||||||
|
|
||||||
if options.get("yaml"):
|
|
||||||
try:
|
|
||||||
import yaml
|
|
||||||
except ImportError:
|
|
||||||
click.echo("Aborting. LibYAML is not installed.")
|
|
||||||
return
|
|
||||||
# Disable YAML alias
|
|
||||||
yaml.Dumper.ignore_aliases = lambda *args: True
|
|
||||||
click.echo(yaml.dump(queue.get_statistics(), default_flow_style=False))
|
|
||||||
return
|
|
||||||
|
|
||||||
self.interval = options.get("interval")
|
|
||||||
|
|
||||||
# Arbitrary
|
|
||||||
self.table_width = 90
|
|
||||||
|
|
||||||
# Do not continuously poll
|
|
||||||
if not self.interval:
|
|
||||||
self._print_stats_dashboard(queue.get_statistics())
|
|
||||||
return
|
|
||||||
|
|
||||||
# Abuse clicks to 'live' render CLI dashboard
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
self._print_stats_dashboard(queue.get_statistics())
|
|
||||||
time.sleep(self.interval)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
|
@ -1,103 +0,0 @@
|
||||||
"""
|
|
||||||
Based on the django-rq package by Selwin Ong (MIT License)
|
|
||||||
https://github.com/rq/django-rq
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.db import connections
|
|
||||||
from redis.exceptions import ConnectionError
|
|
||||||
from rq import use_connection
|
|
||||||
from rq.logutils import setup_loghandlers
|
|
||||||
|
|
||||||
from ucast import queue
|
|
||||||
|
|
||||||
|
|
||||||
def reset_db_connections():
|
|
||||||
for c in connections.all():
|
|
||||||
c.close()
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
"""Runs RQ worker"""
|
|
||||||
|
|
||||||
help = __doc__
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
parser.add_argument(
|
|
||||||
"--pid",
|
|
||||||
action="store",
|
|
||||||
dest="pid",
|
|
||||||
default=None,
|
|
||||||
help="PID file to write the worker`s pid into",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--burst",
|
|
||||||
action="store_true",
|
|
||||||
dest="burst",
|
|
||||||
default=False,
|
|
||||||
help="Run worker in burst mode",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--with-scheduler",
|
|
||||||
action="store_true",
|
|
||||||
dest="with_scheduler",
|
|
||||||
default=False,
|
|
||||||
help="Run worker with scheduler enabled",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--name",
|
|
||||||
action="store",
|
|
||||||
dest="name",
|
|
||||||
default=None,
|
|
||||||
help="Name of the worker",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--worker-ttl",
|
|
||||||
action="store",
|
|
||||||
type=int,
|
|
||||||
dest="worker_ttl",
|
|
||||||
default=420,
|
|
||||||
help="Default worker timeout to be used",
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
pid = options.get("pid")
|
|
||||||
if pid:
|
|
||||||
with open(os.path.expanduser(pid), "w") as fp:
|
|
||||||
fp.write(str(os.getpid()))
|
|
||||||
|
|
||||||
# Verbosity is defined by default in BaseCommand for all commands
|
|
||||||
verbosity = options.get("verbosity")
|
|
||||||
if verbosity >= 2:
|
|
||||||
level = "DEBUG"
|
|
||||||
elif verbosity == 0:
|
|
||||||
level = "WARNING"
|
|
||||||
else:
|
|
||||||
level = "INFO"
|
|
||||||
setup_loghandlers(level)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Instantiate a worker
|
|
||||||
worker_kwargs = {
|
|
||||||
"name": options["name"],
|
|
||||||
"default_worker_ttl": options["worker_ttl"],
|
|
||||||
}
|
|
||||||
w = queue.get_worker(**worker_kwargs)
|
|
||||||
|
|
||||||
# Call use_connection to push the redis connection into LocalStack
|
|
||||||
# without this, jobs using RQ's get_current_job() will fail
|
|
||||||
use_connection(w.connection)
|
|
||||||
# Close any opened DB connection before any fork
|
|
||||||
reset_db_connections()
|
|
||||||
|
|
||||||
w.work(
|
|
||||||
burst=options.get("burst", False),
|
|
||||||
with_scheduler=options.get("with_scheduler", False),
|
|
||||||
logging_level=level,
|
|
||||||
)
|
|
||||||
except ConnectionError as e:
|
|
||||||
self.stderr.write(str(e))
|
|
||||||
sys.exit(1)
|
|
|
@ -1,192 +0,0 @@
|
||||||
# Generated by Django 4.0.4 on 2022-06-21 23:07
|
|
||||||
|
|
||||||
import django.contrib.auth.models
|
|
||||||
import django.contrib.auth.validators
|
|
||||||
import django.db.models.deletion
|
|
||||||
import django.utils.timezone
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
initial = True
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("auth", "0012_alter_user_first_name_max_length"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="Channel",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("channel_id", models.CharField(db_index=True, max_length=30)),
|
|
||||||
("name", models.CharField(max_length=100)),
|
|
||||||
("slug", models.CharField(db_index=True, max_length=100)),
|
|
||||||
("description", models.TextField()),
|
|
||||||
("subscribers", models.CharField(max_length=20, null=True)),
|
|
||||||
("active", models.BooleanField(default=True)),
|
|
||||||
("skip_livestreams", models.BooleanField(default=True)),
|
|
||||||
("skip_shorts", models.BooleanField(default=True)),
|
|
||||||
("avatar_url", models.CharField(max_length=250, null=True)),
|
|
||||||
(
|
|
||||||
"last_update",
|
|
||||||
models.DateTimeField(default=django.utils.timezone.now),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="Video",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("video_id", models.CharField(db_index=True, max_length=30)),
|
|
||||||
("title", models.CharField(max_length=200)),
|
|
||||||
("slug", models.CharField(db_index=True, max_length=209)),
|
|
||||||
("published", models.DateTimeField()),
|
|
||||||
("downloaded", models.DateTimeField(null=True)),
|
|
||||||
("description", models.TextField()),
|
|
||||||
("duration", models.IntegerField()),
|
|
||||||
("is_livestream", models.BooleanField(default=False)),
|
|
||||||
("is_short", models.BooleanField(default=False)),
|
|
||||||
("download_size", models.IntegerField(null=True)),
|
|
||||||
("is_deleted", models.BooleanField(default=False)),
|
|
||||||
(
|
|
||||||
"channel",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to="ucast.channel"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name="User",
|
|
||||||
fields=[
|
|
||||||
(
|
|
||||||
"id",
|
|
||||||
models.BigAutoField(
|
|
||||||
auto_created=True,
|
|
||||||
primary_key=True,
|
|
||||||
serialize=False,
|
|
||||||
verbose_name="ID",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("password", models.CharField(max_length=128, verbose_name="password")),
|
|
||||||
(
|
|
||||||
"last_login",
|
|
||||||
models.DateTimeField(
|
|
||||||
blank=True, null=True, verbose_name="last login"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"is_superuser",
|
|
||||||
models.BooleanField(
|
|
||||||
default=False,
|
|
||||||
help_text="Designates that this user has all permissions without explicitly assigning them.",
|
|
||||||
verbose_name="superuser status",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"username",
|
|
||||||
models.CharField(
|
|
||||||
error_messages={
|
|
||||||
"unique": "A user with that username already exists."
|
|
||||||
},
|
|
||||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
|
||||||
max_length=150,
|
|
||||||
unique=True,
|
|
||||||
validators=[
|
|
||||||
django.contrib.auth.validators.UnicodeUsernameValidator()
|
|
||||||
],
|
|
||||||
verbose_name="username",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"first_name",
|
|
||||||
models.CharField(
|
|
||||||
blank=True, max_length=150, verbose_name="first name"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"last_name",
|
|
||||||
models.CharField(
|
|
||||||
blank=True, max_length=150, verbose_name="last name"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"email",
|
|
||||||
models.EmailField(
|
|
||||||
blank=True, max_length=254, verbose_name="email address"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"is_staff",
|
|
||||||
models.BooleanField(
|
|
||||||
default=False,
|
|
||||||
help_text="Designates whether the user can log into this admin site.",
|
|
||||||
verbose_name="staff status",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"is_active",
|
|
||||||
models.BooleanField(
|
|
||||||
default=True,
|
|
||||||
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
|
|
||||||
verbose_name="active",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"date_joined",
|
|
||||||
models.DateTimeField(
|
|
||||||
default=django.utils.timezone.now, verbose_name="date joined"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("feed_key", models.CharField(default=None, max_length=50, null=True)),
|
|
||||||
(
|
|
||||||
"groups",
|
|
||||||
models.ManyToManyField(
|
|
||||||
blank=True,
|
|
||||||
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
|
|
||||||
related_name="user_set",
|
|
||||||
related_query_name="user",
|
|
||||||
to="auth.group",
|
|
||||||
verbose_name="groups",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"user_permissions",
|
|
||||||
models.ManyToManyField(
|
|
||||||
blank=True,
|
|
||||||
help_text="Specific permissions for this user.",
|
|
||||||
related_name="user_set",
|
|
||||||
related_query_name="user",
|
|
||||||
to="auth.permission",
|
|
||||||
verbose_name="user permissions",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
|
||||||
options={
|
|
||||||
"verbose_name": "user",
|
|
||||||
"verbose_name_plural": "users",
|
|
||||||
"abstract": False,
|
|
||||||
},
|
|
||||||
managers=[
|
|
||||||
("objects", django.contrib.auth.models.UserManager()),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
|
@ -0,0 +1 @@
|
||||||
|
# coding=utf-8
|
74
ucast/migrations/env.py
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
from sqlalchemy import engine_from_config, pool
|
||||||
|
|
||||||
|
from ucast import db
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
config.set_main_option("sqlalchemy.url", str(db.DATABASE_URL))
|
||||||
|
target_metadata = db.metadata
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline():
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online():
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
24
ucast/migrations/script.py.mako
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = ${repr(up_revision)}
|
||||||
|
down_revision = ${repr(down_revision)}
|
||||||
|
branch_labels = ${repr(branch_labels)}
|
||||||
|
depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
${downgrades if downgrades else "pass"}
|
|
@ -0,0 +1,52 @@
|
||||||
|
"""Initial revision
|
||||||
|
|
||||||
|
Revision ID: 0ae786127cd8
|
||||||
|
Revises:
|
||||||
|
Create Date: 2022-05-03 10:03:42.224721
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "0ae786127cd8"
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"channels",
|
||||||
|
sa.Column("id", sa.String(length=30), nullable=False),
|
||||||
|
sa.Column("name", sa.Unicode(length=100), nullable=False),
|
||||||
|
sa.Column("active", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("skip_livestreams", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("skip_shorts", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("keep_videos", sa.Integer(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"videos",
|
||||||
|
sa.Column("id", sa.String(length=30), nullable=False),
|
||||||
|
sa.Column("channel_id", sa.String(length=30), nullable=False),
|
||||||
|
sa.Column("title", sa.Unicode(length=200), nullable=False),
|
||||||
|
sa.Column("slug", sa.String(length=209), nullable=False),
|
||||||
|
sa.Column("published", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("downloaded", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("description", sa.UnicodeText(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
["channel_id"],
|
||||||
|
["channels.id"],
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table("videos")
|
||||||
|
op.drop_table("channels")
|
||||||
|
# ### end Alembic commands ###
|
161
ucast/models.py
|
@ -1,139 +1,38 @@
|
||||||
import base64
|
# coding=utf-8
|
||||||
import datetime
|
import slugify
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy import orm
|
||||||
|
from starlette_core.database import Base
|
||||||
|
|
||||||
from Cryptodome import Random
|
# metadata = sa.MetaData()
|
||||||
from django.contrib.auth.models import AbstractUser
|
# Base = declarative_base(metadata=metadata)
|
||||||
from django.db import models
|
|
||||||
from django.utils import timezone
|
|
||||||
|
|
||||||
from ucast.service import util
|
|
||||||
|
|
||||||
|
|
||||||
def _get_unique_slug(
|
class Channel(Base):
|
||||||
str_in: str, objects: models.query.QuerySet, model_name: str
|
__tablename__ = "channels"
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Get a new, unique slug for a database item
|
|
||||||
|
|
||||||
:param str_in: Input string to slugify
|
id = sa.Column(sa.String(30), primary_key=True)
|
||||||
:param objects: Django query set
|
name = sa.Column(sa.Unicode(100), nullable=False)
|
||||||
:return: Slug
|
videos = orm.relationship("Video", cascade="all, delete")
|
||||||
"""
|
active = sa.Column(sa.Boolean, nullable=False, default=True)
|
||||||
original_slug = util.get_slug(str_in)
|
skip_livestreams = sa.Column(sa.Boolean, nullable=False, default=True)
|
||||||
slug = original_slug
|
skip_shorts = sa.Column(sa.Boolean, nullable=False, default=True)
|
||||||
|
keep_videos = sa.Column(sa.Integer, nullable=True, default=None)
|
||||||
for i in range(1, objects.count() + 2):
|
|
||||||
if not objects.filter(slug=slug).exists():
|
|
||||||
return slug
|
|
||||||
|
|
||||||
slug = f"{original_slug}_{i}"
|
|
||||||
|
|
||||||
raise Exception(f"unique {model_name} slug for {original_slug} could not be found")
|
|
||||||
|
|
||||||
|
|
||||||
class Channel(models.Model):
|
class Video(Base):
|
||||||
channel_id = models.CharField(max_length=30, db_index=True)
|
__tablename__ = "videos"
|
||||||
name = models.CharField(max_length=100)
|
|
||||||
slug = models.CharField(max_length=100, db_index=True)
|
|
||||||
description = models.TextField()
|
|
||||||
subscribers = models.CharField(max_length=20, null=True)
|
|
||||||
active = models.BooleanField(default=True)
|
|
||||||
skip_livestreams = models.BooleanField(default=True)
|
|
||||||
skip_shorts = models.BooleanField(default=True)
|
|
||||||
avatar_url = models.CharField(max_length=250, null=True)
|
|
||||||
last_update = models.DateTimeField(default=timezone.now)
|
|
||||||
|
|
||||||
@classmethod
|
id = sa.Column(sa.String(30), primary_key=True)
|
||||||
def get_new_slug(cls, name: str) -> str:
|
channel_id = sa.Column(sa.String(30), sa.ForeignKey("channels.id"), nullable=False)
|
||||||
return _get_unique_slug(name, cls.objects, "channel")
|
channel = orm.relationship("Channel", back_populates="videos")
|
||||||
|
title = sa.Column(sa.Unicode(200), nullable=False)
|
||||||
|
slug = sa.Column(sa.String(209), nullable=False)
|
||||||
|
published = sa.Column(sa.DateTime, nullable=False)
|
||||||
|
downloaded = sa.Column(sa.DateTime, nullable=True)
|
||||||
|
description = sa.Column(sa.UnicodeText(), nullable=False, default="")
|
||||||
|
|
||||||
def get_full_description(self) -> str:
|
def get_slug(self) -> str:
|
||||||
desc = f"https://www.youtube.com/channel/{self.channel_id}"
|
title_slug = slugify.slugify(self.title, separator="_", lowercase=False)
|
||||||
if self.description:
|
date_slug = self.published.strftime("%Y%m%d")
|
||||||
desc = f"{self.description}\n\n{desc}"
|
return f"{date_slug}_{title_slug}"
|
||||||
return desc
|
|
||||||
|
|
||||||
def get_absolute_url(self) -> str:
|
|
||||||
return "https://www.youtube.com/channel/" + self.channel_id
|
|
||||||
|
|
||||||
def should_download(self, video: "Video") -> bool:
|
|
||||||
if self.skip_livestreams and video.is_livestream:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self.skip_shorts and video.is_short:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def download_size(self) -> int:
|
|
||||||
return self.video_set.aggregate(models.Sum("download_size")).get(
|
|
||||||
"download_size__sum"
|
|
||||||
)
|
|
||||||
|
|
||||||
def vfilter_args(self) -> dict:
|
|
||||||
filter_args = {}
|
|
||||||
if self.skip_livestreams:
|
|
||||||
filter_args["is_livestream"] = False
|
|
||||||
|
|
||||||
if self.skip_shorts:
|
|
||||||
filter_args["is_short"] = False
|
|
||||||
|
|
||||||
return filter_args
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
|
|
||||||
class Video(models.Model):
|
|
||||||
video_id = models.CharField(max_length=30, db_index=True)
|
|
||||||
title = models.CharField(max_length=200)
|
|
||||||
slug = models.CharField(max_length=209, db_index=True)
|
|
||||||
channel = models.ForeignKey(Channel, on_delete=models.CASCADE)
|
|
||||||
published = models.DateTimeField()
|
|
||||||
downloaded = models.DateTimeField(null=True)
|
|
||||||
description = models.TextField()
|
|
||||||
duration = models.IntegerField()
|
|
||||||
is_livestream = models.BooleanField(default=False)
|
|
||||||
is_short = models.BooleanField(default=False)
|
|
||||||
download_size = models.IntegerField(null=True)
|
|
||||||
is_deleted = models.BooleanField(default=False)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_new_slug(cls, title: str, date: datetime.date, channel_id: str) -> str:
|
|
||||||
title_w_date = f"{date.strftime('%Y%m%d')}_{title}"
|
|
||||||
|
|
||||||
return _get_unique_slug(
|
|
||||||
title_w_date, cls.objects.filter(channel__channel_id=channel_id), "video"
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_full_description(self) -> str:
|
|
||||||
desc = f"https://youtu.be/{self.video_id}"
|
|
||||||
if self.description:
|
|
||||||
desc = f"{self.description}\n\n{desc}"
|
|
||||||
return desc
|
|
||||||
|
|
||||||
def get_absolute_url(self) -> str:
|
|
||||||
return f"https://www.youtube.com/watch?v={self.video_id}"
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.title
|
|
||||||
|
|
||||||
|
|
||||||
class User(AbstractUser):
|
|
||||||
feed_key = models.CharField(max_length=50, null=True, default=None)
|
|
||||||
|
|
||||||
def generate_feed_key(self):
|
|
||||||
for _ in range(0, User.objects.count()):
|
|
||||||
key = base64.urlsafe_b64encode(Random.get_random_bytes(18)).decode()
|
|
||||||
|
|
||||||
if not User.objects.filter(feed_key=key).exists():
|
|
||||||
self.feed_key = key
|
|
||||||
self.save()
|
|
||||||
return
|
|
||||||
|
|
||||||
raise Exception("unique feed key could not be found")
|
|
||||||
|
|
||||||
def get_feed_key(self) -> str:
|
|
||||||
if self.feed_key is None:
|
|
||||||
self.generate_feed_key()
|
|
||||||
return self.feed_key
|
|
||||||
|
|
115
ucast/queue.py
|
@ -1,115 +0,0 @@
|
||||||
import redis
|
|
||||||
import rq
|
|
||||||
import rq_scheduler
|
|
||||||
from django.conf import settings
|
|
||||||
from django.db.models import ObjectDoesNotExist
|
|
||||||
from rq import registry
|
|
||||||
|
|
||||||
from ucast.models import Video
|
|
||||||
from ucast.service import util
|
|
||||||
|
|
||||||
|
|
||||||
def get_redis_connection() -> redis.client.Redis:
|
|
||||||
return redis.Redis.from_url(settings.REDIS_URL)
|
|
||||||
|
|
||||||
|
|
||||||
def get_queue() -> rq.Queue:
|
|
||||||
redis_conn = get_redis_connection()
|
|
||||||
return rq.Queue(default_timeout=settings.REDIS_QUEUE_TIMEOUT, connection=redis_conn)
|
|
||||||
|
|
||||||
|
|
||||||
def get_scheduler(interval=60) -> rq_scheduler.Scheduler:
|
|
||||||
redis_conn = get_redis_connection()
|
|
||||||
return rq_scheduler.Scheduler(connection=redis_conn, interval=interval)
|
|
||||||
|
|
||||||
|
|
||||||
def get_worker(**kwargs) -> rq.Worker:
|
|
||||||
queue = get_queue()
|
|
||||||
return rq.Worker(
|
|
||||||
queue,
|
|
||||||
connection=queue.connection,
|
|
||||||
default_result_ttl=settings.REDIS_QUEUE_RESULT_TTL,
|
|
||||||
**kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def enqueue(f, *args, **kwargs) -> rq.job.Job:
|
|
||||||
queue = get_queue()
|
|
||||||
return queue.enqueue(f, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def get_statistics() -> dict:
|
|
||||||
"""
|
|
||||||
Return statistics from the RQ Queue.
|
|
||||||
|
|
||||||
Taken from the django-rq package by Selwin Ong (MIT License)
|
|
||||||
https://github.com/rq/django-rq
|
|
||||||
|
|
||||||
:return: RQ statistics
|
|
||||||
"""
|
|
||||||
queue = get_queue()
|
|
||||||
connection = queue.connection
|
|
||||||
connection_kwargs = connection.connection_pool.connection_kwargs
|
|
||||||
|
|
||||||
# Raw access to the first item from left of the redis list.
|
|
||||||
# This might not be accurate since new job can be added from the left
|
|
||||||
# with `at_front` parameters.
|
|
||||||
# Ideally rq should supports Queue.oldest_job
|
|
||||||
last_job_id = connection.lindex(queue.key, 0)
|
|
||||||
last_job = queue.fetch_job(last_job_id.decode("utf-8")) if last_job_id else None
|
|
||||||
if last_job:
|
|
||||||
oldest_job_timestamp = util.to_localtime(last_job.enqueued_at).strftime(
|
|
||||||
"%Y-%m-%d, %H:%M:%S"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
oldest_job_timestamp = "-"
|
|
||||||
|
|
||||||
# parse_class and connection_pool are not needed and not JSON serializable
|
|
||||||
connection_kwargs.pop("parser_class", None)
|
|
||||||
connection_kwargs.pop("connection_pool", None)
|
|
||||||
|
|
||||||
finished_job_registry = registry.FinishedJobRegistry(queue.name, queue.connection)
|
|
||||||
started_job_registry = registry.StartedJobRegistry(queue.name, queue.connection)
|
|
||||||
deferred_job_registry = registry.DeferredJobRegistry(queue.name, queue.connection)
|
|
||||||
failed_job_registry = registry.FailedJobRegistry(queue.name, queue.connection)
|
|
||||||
scheduled_job_registry = registry.ScheduledJobRegistry(queue.name, queue.connection)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"name": queue.name,
|
|
||||||
"jobs": queue.count,
|
|
||||||
"oldest_job_timestamp": oldest_job_timestamp,
|
|
||||||
"connection_kwargs": connection_kwargs,
|
|
||||||
"workers": rq.Worker.count(queue=queue),
|
|
||||||
"finished_jobs": len(finished_job_registry),
|
|
||||||
"started_jobs": len(started_job_registry),
|
|
||||||
"deferred_jobs": len(deferred_job_registry),
|
|
||||||
"failed_jobs": len(failed_job_registry),
|
|
||||||
"scheduled_jobs": len(scheduled_job_registry),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_failed_job_registry():
|
|
||||||
queue = get_queue()
|
|
||||||
return registry.FailedJobRegistry(queue.name, queue.connection)
|
|
||||||
|
|
||||||
|
|
||||||
def get_downloading_videos(offset=0, limit=-1):
|
|
||||||
queue = get_queue()
|
|
||||||
v_ids = set()
|
|
||||||
|
|
||||||
for job in queue.get_jobs(offset, limit):
|
|
||||||
if (
|
|
||||||
job.func_name == "ucast.tasks.download.download_video"
|
|
||||||
and job.args
|
|
||||||
and job.args[0] > 0
|
|
||||||
):
|
|
||||||
v_ids.add(job.args[0])
|
|
||||||
|
|
||||||
videos = []
|
|
||||||
for v_id in v_ids:
|
|
||||||
try:
|
|
||||||
videos.append(Video.objects.get(id=v_id))
|
|
||||||
except ObjectDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return videos
|
|
Before Width: | Height: | Size: 1.4 KiB |
|
@ -1,74 +0,0 @@
|
||||||
import shutil
|
|
||||||
|
|
||||||
from ucast.models import Channel, Video
|
|
||||||
from ucast.service import storage, util, videoutil, youtube
|
|
||||||
|
|
||||||
|
|
||||||
class ChannelAlreadyExistsException(Exception):
|
|
||||||
def __init__(self, *args: object) -> None:
|
|
||||||
super().__init__("channel already exists", *args)
|
|
||||||
|
|
||||||
|
|
||||||
def download_channel_avatar(channel: Channel):
|
|
||||||
store = storage.Storage()
|
|
||||||
channel_folder = store.get_or_create_channel_folder(channel.slug)
|
|
||||||
util.download_image_file(
|
|
||||||
channel.avatar_url, channel_folder.file_avatar, videoutil.AVATAR_SIZE
|
|
||||||
)
|
|
||||||
videoutil.resize_avatar(channel_folder.file_avatar, channel_folder.file_avatar_sm)
|
|
||||||
|
|
||||||
|
|
||||||
def create_channel(channel_str: str) -> Channel:
|
|
||||||
if youtube.CHANID_REGEX.match(channel_str):
|
|
||||||
if Channel.objects.filter(channel_id=channel_str).exists():
|
|
||||||
raise ChannelAlreadyExistsException()
|
|
||||||
|
|
||||||
channel_url = youtube.channel_url_from_str(channel_str)
|
|
||||||
channel_data = youtube.get_channel_metadata(channel_url)
|
|
||||||
|
|
||||||
if Channel.objects.filter(channel_id=channel_data.id).exists():
|
|
||||||
raise ChannelAlreadyExistsException()
|
|
||||||
|
|
||||||
channel_slug = Channel.get_new_slug(channel_data.name)
|
|
||||||
|
|
||||||
channel = Channel(
|
|
||||||
channel_id=channel_data.id,
|
|
||||||
name=channel_data.name,
|
|
||||||
slug=channel_slug,
|
|
||||||
description=channel_data.description,
|
|
||||||
subscribers=channel_data.subscribers,
|
|
||||||
avatar_url=channel_data.avatar_url,
|
|
||||||
)
|
|
||||||
|
|
||||||
download_channel_avatar(channel)
|
|
||||||
|
|
||||||
channel.save()
|
|
||||||
return channel
|
|
||||||
|
|
||||||
|
|
||||||
def delete_video(id: int):
|
|
||||||
video = Video.objects.get(id=id)
|
|
||||||
|
|
||||||
store = storage.Storage()
|
|
||||||
channel_folder = store.get_channel_folder(video.channel.slug)
|
|
||||||
|
|
||||||
util.remove_if_exists(channel_folder.get_audio(video.slug))
|
|
||||||
util.remove_if_exists(channel_folder.get_cover(video.slug))
|
|
||||||
util.remove_if_exists(channel_folder.get_thumbnail(video.slug))
|
|
||||||
util.remove_if_exists(channel_folder.get_thumbnail(video.slug, True))
|
|
||||||
|
|
||||||
video.is_deleted = True
|
|
||||||
video.downloaded = None
|
|
||||||
video.download_size = None
|
|
||||||
video.save()
|
|
||||||
|
|
||||||
|
|
||||||
def delete_channel(id: int):
|
|
||||||
channel = Channel.objects.get(id=id)
|
|
||||||
|
|
||||||
store = storage.Storage()
|
|
||||||
channel_folder = store.get_channel_folder(channel.slug)
|
|
||||||
|
|
||||||
shutil.rmtree(channel_folder.dir_root)
|
|
||||||
|
|
||||||
channel.delete()
|
|
|
@ -1,451 +0,0 @@
|
||||||
import math
|
|
||||||
import random
|
|
||||||
from importlib import resources
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Literal, Optional, Tuple
|
|
||||||
|
|
||||||
import wcag_contrast_ratio
|
|
||||||
from colorthief import ColorThief
|
|
||||||
from fonts.ttf import SourceSansPro
|
|
||||||
from PIL import Image, ImageDraw, ImageEnhance, ImageFilter, ImageFont
|
|
||||||
|
|
||||||
from ucast.service import typ, util
|
|
||||||
|
|
||||||
COVER_STYLE_BLUR = "blur"
|
|
||||||
COVER_STYLE_GRADIENT = "gradient"
|
|
||||||
CoverStyle = Literal["blur", "gradient"]
|
|
||||||
|
|
||||||
CHAR_ELLIPSIS = "…"
|
|
||||||
COVER_WIDTH = 500
|
|
||||||
MIN_CONTRAST = 4.5
|
|
||||||
|
|
||||||
|
|
||||||
def _split_text(
|
|
||||||
height: int, width: int, text: str, font: ImageFont.FreeTypeFont, line_spacing=0
|
|
||||||
) -> List[str]:
|
|
||||||
"""
|
|
||||||
Split and trim the input text so it can be printed to a certain
|
|
||||||
area of an image.
|
|
||||||
|
|
||||||
:param height: Image area height [px]
|
|
||||||
:param width: Image area width [px]
|
|
||||||
:param text: Input text
|
|
||||||
:param font: Pillow ImageFont
|
|
||||||
:param line_spacing: Line spacing [px]
|
|
||||||
:return: List of lines
|
|
||||||
"""
|
|
||||||
if height < font.size:
|
|
||||||
return []
|
|
||||||
|
|
||||||
max_lines = math.floor((height - font.size) / (font.size + line_spacing)) + 1
|
|
||||||
|
|
||||||
lines = []
|
|
||||||
line = ""
|
|
||||||
|
|
||||||
for word in text.split(" "):
|
|
||||||
if len(lines) >= max_lines:
|
|
||||||
line = word
|
|
||||||
break
|
|
||||||
|
|
||||||
if line == "":
|
|
||||||
nline = word
|
|
||||||
else:
|
|
||||||
nline = line + " " + word
|
|
||||||
|
|
||||||
if font.getsize(nline)[0] <= width:
|
|
||||||
line = nline
|
|
||||||
elif line != "":
|
|
||||||
lines.append(line)
|
|
||||||
line = word
|
|
||||||
else:
|
|
||||||
# try to trim current word
|
|
||||||
while nline:
|
|
||||||
nline = nline[:-1]
|
|
||||||
nline_e = nline + CHAR_ELLIPSIS
|
|
||||||
if font.getsize(nline_e)[0] <= width:
|
|
||||||
lines.append(nline_e)
|
|
||||||
break
|
|
||||||
|
|
||||||
if line != "":
|
|
||||||
if len(lines) >= max_lines:
|
|
||||||
# Drop the last line and add ... to the end
|
|
||||||
lastline = lines[-1] + CHAR_ELLIPSIS
|
|
||||||
if font.getsize(lastline)[0] <= width:
|
|
||||||
lines[-1] = lastline
|
|
||||||
else:
|
|
||||||
i_last_space = lines[-1].rfind(" ")
|
|
||||||
lines[-1] = lines[-1][:i_last_space] + CHAR_ELLIPSIS
|
|
||||||
else:
|
|
||||||
lines.append(line)
|
|
||||||
|
|
||||||
return lines
|
|
||||||
|
|
||||||
|
|
||||||
def _draw_text_box(
|
|
||||||
draw: ImageDraw.ImageDraw,
|
|
||||||
box: Tuple[int, int, int, int],
|
|
||||||
text: str,
|
|
||||||
font: ImageFont.FreeTypeFont,
|
|
||||||
color: typ.Color = (0, 0, 0),
|
|
||||||
line_spacing=0,
|
|
||||||
vertical_center=True,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Draw a text box to an image. The text gets automatically
|
|
||||||
wrapped and trimmed to fit.
|
|
||||||
|
|
||||||
:param draw: Pillow ImageDraw object
|
|
||||||
:param box: Coordinates of the text box ``(x_tl, y_tl, x_br, y_br)``
|
|
||||||
:param text: Text to be printed
|
|
||||||
:param font: Pillow ImageFont
|
|
||||||
:param color: Text color
|
|
||||||
:param line_spacing: Line spacing [px]
|
|
||||||
:param vertical_center: Center text vertically in the box
|
|
||||||
"""
|
|
||||||
x_tl, y_tl, x_br, y_br = box
|
|
||||||
height = y_br - y_tl
|
|
||||||
width = x_br - x_tl
|
|
||||||
sanitized_text = util.strip_emoji(text)
|
|
||||||
|
|
||||||
lines = _split_text(height, width, sanitized_text, font, line_spacing)
|
|
||||||
|
|
||||||
y_start = y_tl
|
|
||||||
if vertical_center:
|
|
||||||
text_height = len(lines) * (font.size + line_spacing) - line_spacing
|
|
||||||
y_start += int((height - text_height) / 2)
|
|
||||||
|
|
||||||
for i, line in enumerate(lines):
|
|
||||||
y_pos = y_start + i * (font.size + line_spacing)
|
|
||||||
draw.text((x_tl, y_pos), line, color, font)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_dominant_color(img: Image.Image) -> typ.Color:
|
|
||||||
"""
|
|
||||||
Return the dominant color of an image using the ColorThief library.
|
|
||||||
|
|
||||||
:param img: Pillow Image object
|
|
||||||
:return: dominant color
|
|
||||||
"""
|
|
||||||
thief = ColorThief.__new__(ColorThief)
|
|
||||||
thief.image = img
|
|
||||||
return thief.get_color()
|
|
||||||
|
|
||||||
|
|
||||||
def _interpolate_color(color_from: typ.Color, color_to: typ.Color, steps: int):
|
|
||||||
"""
|
|
||||||
Return a generator providing colors within the given range. Useful to create
|
|
||||||
gradients.
|
|
||||||
|
|
||||||
:param color_from: Starting color
|
|
||||||
:param color_to: Ending color
|
|
||||||
:param steps: Number of steps
|
|
||||||
:return: Generator providing the colors
|
|
||||||
"""
|
|
||||||
det_co = [(t - f) / steps for f, t in zip(color_from, color_to)]
|
|
||||||
for i in range(steps):
|
|
||||||
yield [round(f + det * i) for f, det in zip(color_from, det_co)]
|
|
||||||
|
|
||||||
|
|
||||||
def _color_to_float(color: typ.Color) -> tuple[float, ...]:
|
|
||||||
return tuple(c / 255 for c in color)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_text_color(bg_color: typ.Color) -> typ.Color:
|
|
||||||
"""
|
|
||||||
Return the text color (black or white) with the largest contrast
|
|
||||||
to a given background color.
|
|
||||||
|
|
||||||
:param bg_color: Background color
|
|
||||||
:return: Text color
|
|
||||||
"""
|
|
||||||
color_float = _color_to_float(bg_color)
|
|
||||||
c_blk = wcag_contrast_ratio.rgb((0, 0, 0), color_float)
|
|
||||||
c_wht = wcag_contrast_ratio.rgb((1, 1, 1), color_float)
|
|
||||||
if c_wht > c_blk:
|
|
||||||
return 255, 255, 255
|
|
||||||
return 0, 0, 0
|
|
||||||
|
|
||||||
|
|
||||||
def _get_baseimage(thumbnail: Image.Image, style: CoverStyle):
|
|
||||||
"""
|
|
||||||
Return the background image for the cover.
|
|
||||||
|
|
||||||
:param thumbnail: Thumbnail image object
|
|
||||||
:param style: Style of the cover image
|
|
||||||
:return: Base image
|
|
||||||
"""
|
|
||||||
cover = Image.new("RGB", (COVER_WIDTH, COVER_WIDTH))
|
|
||||||
|
|
||||||
if style == COVER_STYLE_GRADIENT:
|
|
||||||
# Thumbnail with color gradient background
|
|
||||||
|
|
||||||
# Get dominant colors from the top and bottom 20% of the thumbnail image
|
|
||||||
top_part = thumbnail.crop((0, 0, COVER_WIDTH, int(thumbnail.height * 0.2)))
|
|
||||||
bottom_part = thumbnail.crop(
|
|
||||||
(0, int(thumbnail.height * 0.8), COVER_WIDTH, thumbnail.height)
|
|
||||||
)
|
|
||||||
top_color = _get_dominant_color(top_part)
|
|
||||||
bottom_color = _get_dominant_color(bottom_part)
|
|
||||||
|
|
||||||
cover_draw = ImageDraw.Draw(cover)
|
|
||||||
|
|
||||||
for i, color in enumerate(
|
|
||||||
_interpolate_color(top_color, bottom_color, cover.height)
|
|
||||||
):
|
|
||||||
cover_draw.line(((0, i), (cover.width, i)), tuple(color), 1)
|
|
||||||
else:
|
|
||||||
# Thumbnail with blurred background
|
|
||||||
ctn_width = int(COVER_WIDTH / thumbnail.height * thumbnail.width)
|
|
||||||
ctn_x_left = int((ctn_width - COVER_WIDTH) / 2)
|
|
||||||
|
|
||||||
ctn = thumbnail.resize(
|
|
||||||
(ctn_width, COVER_WIDTH), Image.Resampling.LANCZOS
|
|
||||||
).filter(ImageFilter.GaussianBlur(20))
|
|
||||||
cover.paste(ctn, (-ctn_x_left, 0))
|
|
||||||
|
|
||||||
return cover
|
|
||||||
|
|
||||||
|
|
||||||
def _resize_thumbnail(thumbnail: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Scale thumbnail image down to cover size and remove black bars
|
|
||||||
|
|
||||||
:param thumbnail: Thumbnail image object
|
|
||||||
:return: Resized thumbnail image object
|
|
||||||
"""
|
|
||||||
# Scale the thumbnail image down to cover size
|
|
||||||
tn_resize_height = int(COVER_WIDTH / thumbnail.width * thumbnail.height)
|
|
||||||
tn_16_9_height = int(COVER_WIDTH / 16 * 9)
|
|
||||||
tn_height = min(tn_resize_height, tn_16_9_height)
|
|
||||||
tn_crop_y_top = int((tn_resize_height - tn_height) / 2)
|
|
||||||
tn_crop_y_bottom = tn_resize_height - tn_crop_y_top
|
|
||||||
|
|
||||||
return thumbnail.resize(
|
|
||||||
(COVER_WIDTH, tn_resize_height), Image.Resampling.LANCZOS
|
|
||||||
).crop((0, tn_crop_y_top, COVER_WIDTH, tn_crop_y_bottom))
|
|
||||||
|
|
||||||
|
|
||||||
def _prepare_text_background(
|
|
||||||
base_img: Image.Image, bboxes: List[Tuple[int, int, int, int]]
|
|
||||||
) -> Tuple[Image.Image, typ.Color]:
|
|
||||||
"""
|
|
||||||
Return the preferred text color (black or white) and darken
|
|
||||||
the image if necessary
|
|
||||||
|
|
||||||
:param base_img: Image object
|
|
||||||
:param bboxes: Text boxes
|
|
||||||
:return: Updated image, text color
|
|
||||||
"""
|
|
||||||
rng = random.Random()
|
|
||||||
rng.seed(0x9B38D30461B7F0E6)
|
|
||||||
|
|
||||||
min_contrast_bk = 22
|
|
||||||
min_contrast_wt = 22
|
|
||||||
worst_color_wt = None
|
|
||||||
|
|
||||||
def corr_x(x: int) -> int:
|
|
||||||
return min(max(0, x), base_img.width)
|
|
||||||
|
|
||||||
def corr_y(y: int) -> int:
|
|
||||||
return min(max(0, y), base_img.height)
|
|
||||||
|
|
||||||
for bbox in bboxes:
|
|
||||||
x_tl, y_tl, x_br, y_br = bbox
|
|
||||||
x_tl = corr_x(x_tl)
|
|
||||||
y_tl = corr_y(y_tl)
|
|
||||||
x_br = corr_x(x_br)
|
|
||||||
y_br = corr_y(y_br)
|
|
||||||
|
|
||||||
height = y_br - y_tl
|
|
||||||
width = x_br - x_tl
|
|
||||||
|
|
||||||
for _ in range(math.ceil(width * height * 0.01)):
|
|
||||||
target_pos = (rng.randint(x_tl, x_br - 1), rng.randint(y_tl, y_br - 1))
|
|
||||||
img_color = base_img.getpixel(target_pos)
|
|
||||||
img_color_float = _color_to_float(img_color)
|
|
||||||
|
|
||||||
ct_bk = wcag_contrast_ratio.rgb((0, 0, 0), img_color_float)
|
|
||||||
ct_wt = wcag_contrast_ratio.rgb((1, 1, 1), img_color_float)
|
|
||||||
|
|
||||||
if ct_bk < min_contrast_bk:
|
|
||||||
min_contrast_bk = ct_bk
|
|
||||||
|
|
||||||
if ct_wt < min_contrast_wt:
|
|
||||||
worst_color_wt = img_color
|
|
||||||
min_contrast_wt = ct_wt
|
|
||||||
|
|
||||||
if min_contrast_bk >= MIN_CONTRAST:
|
|
||||||
return base_img, (0, 0, 0)
|
|
||||||
if min_contrast_wt >= MIN_CONTRAST:
|
|
||||||
return base_img, (255, 255, 255)
|
|
||||||
|
|
||||||
pixel = Image.new("RGB", (1, 1), worst_color_wt)
|
|
||||||
|
|
||||||
for i in range(1, 100):
|
|
||||||
brightness_f = 1 - i / 100
|
|
||||||
contrast_f = 1 - i / 1000
|
|
||||||
|
|
||||||
pixel_c = ImageEnhance.Brightness(pixel).enhance(brightness_f)
|
|
||||||
pixel_c = ImageEnhance.Contrast(pixel_c).enhance(contrast_f)
|
|
||||||
new_color = pixel_c.getpixel((0, 0))
|
|
||||||
|
|
||||||
if (
|
|
||||||
wcag_contrast_ratio.rgb((1, 1, 1), _color_to_float(new_color))
|
|
||||||
>= MIN_CONTRAST
|
|
||||||
):
|
|
||||||
new_img = ImageEnhance.Brightness(base_img).enhance(brightness_f)
|
|
||||||
new_img = ImageEnhance.Contrast(new_img).enhance(contrast_f)
|
|
||||||
return new_img, (255, 255, 255)
|
|
||||||
|
|
||||||
return base_img, (255, 255, 255)
|
|
||||||
|
|
||||||
|
|
||||||
def _draw_text_avatar(
|
|
||||||
cover: Image.Image,
|
|
||||||
avatar: Optional[Image.Image],
|
|
||||||
title: str,
|
|
||||||
channel: str,
|
|
||||||
) -> Image.Image:
|
|
||||||
# Add channel avatar
|
|
||||||
avt_margin = 0
|
|
||||||
avt_size = 0
|
|
||||||
|
|
||||||
tn_16_9_height = int(COVER_WIDTH / 16 * 9) # typical: 281
|
|
||||||
tn_16_9_margin = int((COVER_WIDTH - tn_16_9_height) / 2) # typical: 110
|
|
||||||
|
|
||||||
if avatar:
|
|
||||||
avt_margin = int(tn_16_9_margin * 0.05) # typical: 14
|
|
||||||
avt_size = tn_16_9_margin - 2 * avt_margin # typical: 82
|
|
||||||
|
|
||||||
# Add text
|
|
||||||
text_margin_x = 16
|
|
||||||
text_margin_topleft = avt_margin + avt_size + text_margin_x # typical: 112
|
|
||||||
text_vertical_offset = -17
|
|
||||||
text_line_space = -4
|
|
||||||
|
|
||||||
fnt = ImageFont.truetype(SourceSansPro, 50)
|
|
||||||
top_text_box = ( # typical: (112, -17, 484, 110)
|
|
||||||
text_margin_topleft,
|
|
||||||
text_vertical_offset,
|
|
||||||
COVER_WIDTH - text_margin_x,
|
|
||||||
tn_16_9_margin,
|
|
||||||
)
|
|
||||||
bottom_text_box = ( # typical: (16, 373, 484, 500)
|
|
||||||
text_margin_x,
|
|
||||||
COVER_WIDTH - tn_16_9_margin + text_vertical_offset,
|
|
||||||
COVER_WIDTH - text_margin_x,
|
|
||||||
COVER_WIDTH,
|
|
||||||
)
|
|
||||||
|
|
||||||
cover, text_color = _prepare_text_background(cover, [top_text_box, bottom_text_box])
|
|
||||||
cover_draw = ImageDraw.Draw(cover)
|
|
||||||
|
|
||||||
_draw_text_box(
|
|
||||||
cover_draw,
|
|
||||||
top_text_box,
|
|
||||||
channel,
|
|
||||||
fnt,
|
|
||||||
text_color,
|
|
||||||
text_line_space,
|
|
||||||
)
|
|
||||||
_draw_text_box(
|
|
||||||
cover_draw,
|
|
||||||
bottom_text_box,
|
|
||||||
title,
|
|
||||||
fnt,
|
|
||||||
text_color,
|
|
||||||
text_line_space,
|
|
||||||
)
|
|
||||||
|
|
||||||
if avatar:
|
|
||||||
avt = avatar.resize((avt_size, avt_size), Image.Resampling.LANCZOS)
|
|
||||||
|
|
||||||
circle_mask = Image.new("L", (avt_size, avt_size))
|
|
||||||
circle_mask_draw = ImageDraw.Draw(circle_mask)
|
|
||||||
circle_mask_draw.ellipse((0, 0, avt_size, avt_size), 255)
|
|
||||||
|
|
||||||
cover.paste(avt, (avt_margin, avt_margin), circle_mask)
|
|
||||||
|
|
||||||
return cover
|
|
||||||
|
|
||||||
|
|
||||||
def _create_cover_image(
|
|
||||||
thumbnail: Image.Image,
|
|
||||||
avatar: Optional[Image.Image],
|
|
||||||
title: str,
|
|
||||||
channel: str,
|
|
||||||
style: CoverStyle,
|
|
||||||
) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Create a cover image from video metadata and thumbnail
|
|
||||||
|
|
||||||
:param thumbnail: Thumbnail image object
|
|
||||||
:param avatar: Creator avatar image object
|
|
||||||
:param title: Video title
|
|
||||||
:param channel: Channel name
|
|
||||||
:param style: Style of cover image
|
|
||||||
:return: Cover image
|
|
||||||
"""
|
|
||||||
tn = _resize_thumbnail(thumbnail)
|
|
||||||
|
|
||||||
cover = _get_baseimage(tn, style)
|
|
||||||
|
|
||||||
cover = _draw_text_avatar(cover, avatar, title, channel)
|
|
||||||
|
|
||||||
# Insert thumbnail image in the middle
|
|
||||||
tn_margin = int((COVER_WIDTH - tn.height) / 2)
|
|
||||||
cover.paste(tn, (0, tn_margin))
|
|
||||||
|
|
||||||
return cover
|
|
||||||
|
|
||||||
|
|
||||||
def _create_blank_cover_image(
|
|
||||||
avatar: Optional[Image.Image], title: str, channel: str
|
|
||||||
) -> Image.Image:
|
|
||||||
bg_color = (16, 16, 16)
|
|
||||||
cover = Image.new("RGB", (COVER_WIDTH, COVER_WIDTH), bg_color)
|
|
||||||
|
|
||||||
yt_icon_path = resources.path("ucast.resources", "yt_icon.png")
|
|
||||||
yt_icon = Image.open(yt_icon_path)
|
|
||||||
yt_icon_x_left = int((COVER_WIDTH - yt_icon.width) / 2)
|
|
||||||
yt_icon_y_top = int((COVER_WIDTH - yt_icon.height) / 2)
|
|
||||||
cover.paste(yt_icon, (yt_icon_x_left, yt_icon_y_top))
|
|
||||||
|
|
||||||
_draw_text_avatar(cover, avatar, title, channel)
|
|
||||||
|
|
||||||
return cover
|
|
||||||
|
|
||||||
|
|
||||||
def create_cover_file(
|
|
||||||
thumbnail_path: Optional[Path],
|
|
||||||
avatar_path: Optional[Path],
|
|
||||||
title: str,
|
|
||||||
channel: str,
|
|
||||||
style: CoverStyle,
|
|
||||||
cover_path: Path,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Create a cover image from video metadata and thumbnail
|
|
||||||
and save it to disk.
|
|
||||||
|
|
||||||
:param thumbnail_path: Path of thumbnail image
|
|
||||||
:param avatar_path: Path of avatar image
|
|
||||||
:param title: Video title
|
|
||||||
:param channel: Channel name
|
|
||||||
:param style: Style of cover image
|
|
||||||
:param cover_path: Save path of cover image
|
|
||||||
"""
|
|
||||||
thumbnail = None
|
|
||||||
if thumbnail_path:
|
|
||||||
thumbnail = Image.open(thumbnail_path)
|
|
||||||
|
|
||||||
avatar = None
|
|
||||||
if avatar_path:
|
|
||||||
avatar = Image.open(avatar_path)
|
|
||||||
|
|
||||||
if thumbnail:
|
|
||||||
cvr = _create_cover_image(thumbnail, avatar, title, channel, style)
|
|
||||||
else:
|
|
||||||
cvr = _create_blank_cover_image(avatar, title, channel)
|
|
||||||
|
|
||||||
cvr.save(cover_path)
|
|
|
@ -1,40 +0,0 @@
|
||||||
from dataclasses import dataclass
|
|
||||||
from typing import Iterable
|
|
||||||
|
|
||||||
from django.utils.xmlutils import SimplerXMLGenerator
|
|
||||||
|
|
||||||
from ucast.models import Channel
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class FeedElement:
|
|
||||||
url: str
|
|
||||||
title: str
|
|
||||||
|
|
||||||
|
|
||||||
def __add_feed_element(handler: SimplerXMLGenerator, element: FeedElement):
|
|
||||||
handler.addQuickElement(
|
|
||||||
"outline", attrs={"xmlUrl": element.url, "title": element.title}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def write_opml(elements: Iterable[FeedElement], outfile):
|
|
||||||
handler = SimplerXMLGenerator(outfile, "utf-8", short_empty_elements=True)
|
|
||||||
handler.startDocument()
|
|
||||||
handler.startElement("opml", {})
|
|
||||||
handler.addQuickElement("head")
|
|
||||||
handler.startElement("body", {"version": "1.0"})
|
|
||||||
|
|
||||||
for element in elements:
|
|
||||||
__add_feed_element(handler, element)
|
|
||||||
|
|
||||||
handler.endElement("body")
|
|
||||||
handler.endElement("opml")
|
|
||||||
handler.endDocument()
|
|
||||||
|
|
||||||
|
|
||||||
def write_channels_opml(channels: Iterable[Channel], site_url: str, key: str, outfile):
|
|
||||||
elements = [
|
|
||||||
FeedElement(f"{site_url}/feed/{c.slug}?key={key}", c.name) for c in channels
|
|
||||||
]
|
|
||||||
write_opml(elements, outfile)
|
|
|
@ -1,244 +0,0 @@
|
||||||
"""
|
|
||||||
Based on the scrapetube package from dermasmid (MIT License)
|
|
||||||
https://github.com/dermasmid/scrapetube
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
from typing import Generator, Literal, Optional
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel(
|
|
||||||
channel_url: str,
|
|
||||||
limit: int = None,
|
|
||||||
sleep: int = 1,
|
|
||||||
sort_by: Literal["newest", "oldest", "popular"] = "newest",
|
|
||||||
) -> Generator[dict, None, None]:
|
|
||||||
"""
|
|
||||||
Get videos for a channel.
|
|
||||||
|
|
||||||
:param channel_url: The url of the channel you want to get the videos for.
|
|
||||||
:param limit: Limit the number of videos you want to get.
|
|
||||||
:param sleep: Seconds to sleep between API calls to youtube, in order to prevent
|
|
||||||
getting blocked. Defaults to ``1``.
|
|
||||||
:param sort_by: In what order to retrive to videos. Pass one of the following values.
|
|
||||||
``"newest"``: Get the new videos first.
|
|
||||||
``"oldest"``: Get the old videos first.
|
|
||||||
``"popular"``: Get the popular videos first.
|
|
||||||
Defaults to ``"newest"``.
|
|
||||||
:return: Generator providing the videos
|
|
||||||
"""
|
|
||||||
|
|
||||||
sort_by_map = {"newest": "dd", "oldest": "da", "popular": "p"}
|
|
||||||
url = "{url}/videos?view=0&sort={sort_by}&flow=grid".format(
|
|
||||||
url=channel_url,
|
|
||||||
sort_by=sort_by_map[sort_by],
|
|
||||||
)
|
|
||||||
api_endpoint = "https://www.youtube.com/youtubei/v1/browse"
|
|
||||||
videos = _get_videos(url, api_endpoint, "gridVideoRenderer", limit, sleep)
|
|
||||||
for video in videos:
|
|
||||||
yield video
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_metadata(channel_url: str) -> dict:
|
|
||||||
"""
|
|
||||||
Get metadata of a channel.
|
|
||||||
|
|
||||||
:param channel_url: Channel URL
|
|
||||||
:return: Raw channel metadata
|
|
||||||
"""
|
|
||||||
session = _new_session()
|
|
||||||
|
|
||||||
url = f"{channel_url}/videos?view=0&flow=grid"
|
|
||||||
|
|
||||||
html = _get_initial_data(session, url)
|
|
||||||
return json.loads(_get_json_from_html(html, "var ytInitialData = ", 0, "};") + "}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_playlist(
|
|
||||||
playlist_id: str, limit: int = None, sleep: int = 1
|
|
||||||
) -> Generator[dict, None, None]:
|
|
||||||
"""
|
|
||||||
Get videos for a playlist.
|
|
||||||
|
|
||||||
:param playlist_id: The playlist id from the playlist you want to get the videos for.
|
|
||||||
:param limit: Limit the number of videos you want to get.
|
|
||||||
:param sleep: Seconds to sleep between API calls to youtube, in order to prevent
|
|
||||||
getting blocked. Defaults to ``1``.
|
|
||||||
:return: Generator providing the videos
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = f"https://www.youtube.com/playlist?list={playlist_id}"
|
|
||||||
api_endpoint = "https://www.youtube.com/youtubei/v1/browse"
|
|
||||||
videos = _get_videos(url, api_endpoint, "playlistVideoRenderer", limit, sleep)
|
|
||||||
for video in videos:
|
|
||||||
yield video
|
|
||||||
|
|
||||||
|
|
||||||
def get_search(
|
|
||||||
query: str,
|
|
||||||
limit: int = None,
|
|
||||||
sleep: int = 1,
|
|
||||||
sort_by: Literal["relevance", "upload_date", "view_count", "rating"] = "relevance",
|
|
||||||
results_type: Literal["video", "channel", "playlist", "movie"] = "video",
|
|
||||||
) -> Generator[dict, None, None]:
|
|
||||||
"""
|
|
||||||
Search youtube and get videos.
|
|
||||||
|
|
||||||
:param query: The term you want to search for.
|
|
||||||
:param limit: Limit the number of videos you want to get.
|
|
||||||
:param sleep: Seconds to sleep between API calls to youtube, in order to prevent
|
|
||||||
getting blocked. Defaults to ``1``.
|
|
||||||
:param sort_by: In what order to retrive to videos. Pass one of the following values.
|
|
||||||
``"relevance"``: Get the new videos in order of relevance.
|
|
||||||
``"upload_date"``: Get the new videos first.
|
|
||||||
``"view_count"``: Get the popular videos first.
|
|
||||||
``"rating"``: Get videos with more likes first.
|
|
||||||
Defaults to ``"relevance"``.
|
|
||||||
:param results_type: What type you want to search for.
|
|
||||||
Pass one of the following values: ``"video"|"channel"|
|
|
||||||
"playlist"|"movie"``. Defaults to ``"video"``.
|
|
||||||
:return: Generator providing the videos
|
|
||||||
"""
|
|
||||||
|
|
||||||
sort_by_map = {
|
|
||||||
"relevance": "A",
|
|
||||||
"upload_date": "I",
|
|
||||||
"view_count": "M",
|
|
||||||
"rating": "E",
|
|
||||||
}
|
|
||||||
|
|
||||||
results_type_map = {
|
|
||||||
"video": ["B", "videoRenderer"],
|
|
||||||
"channel": ["C", "channelRenderer"],
|
|
||||||
"playlist": ["D", "playlistRenderer"],
|
|
||||||
"movie": ["E", "videoRenderer"],
|
|
||||||
}
|
|
||||||
|
|
||||||
param_string = f"CA{sort_by_map[sort_by]}SAhA{results_type_map[results_type][0]}"
|
|
||||||
url = f"https://www.youtube.com/results?search_query={query}&sp={param_string}"
|
|
||||||
api_endpoint = "https://www.youtube.com/youtubei/v1/search"
|
|
||||||
videos = _get_videos(
|
|
||||||
url, api_endpoint, results_type_map[results_type][1], limit, sleep
|
|
||||||
)
|
|
||||||
for video in videos:
|
|
||||||
yield video
|
|
||||||
|
|
||||||
|
|
||||||
def _get_videos(
|
|
||||||
url: str, api_endpoint: str, selector: str, limit: int, sleep: int
|
|
||||||
) -> Generator[dict, None, None]:
|
|
||||||
session = _new_session()
|
|
||||||
is_first = True
|
|
||||||
quit = False
|
|
||||||
count = 0
|
|
||||||
while True:
|
|
||||||
if is_first:
|
|
||||||
html = _get_initial_data(session, url)
|
|
||||||
client = json.loads(
|
|
||||||
_get_json_from_html(html, "INNERTUBE_CONTEXT", 2, '"}},') + '"}}'
|
|
||||||
)["client"]
|
|
||||||
api_key = _get_json_from_html(html, "innertubeApiKey", 3)
|
|
||||||
session.headers["X-YouTube-Client-Name"] = "1"
|
|
||||||
session.headers["X-YouTube-Client-Version"] = client["clientVersion"]
|
|
||||||
data = json.loads(
|
|
||||||
_get_json_from_html(html, "var ytInitialData = ", 0, "};") + "}"
|
|
||||||
)
|
|
||||||
next_data = _get_next_data(data)
|
|
||||||
is_first = False
|
|
||||||
else:
|
|
||||||
data = _get_ajax_data(session, api_endpoint, api_key, next_data, client)
|
|
||||||
next_data = _get_next_data(data)
|
|
||||||
for result in _get_videos_items(data, selector):
|
|
||||||
try:
|
|
||||||
count += 1
|
|
||||||
yield result
|
|
||||||
if count == limit:
|
|
||||||
quit = True
|
|
||||||
break
|
|
||||||
except GeneratorExit:
|
|
||||||
quit = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if not next_data or quit:
|
|
||||||
break
|
|
||||||
|
|
||||||
time.sleep(sleep)
|
|
||||||
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _new_session() -> requests.Session:
|
|
||||||
session = requests.Session()
|
|
||||||
session.headers[
|
|
||||||
"User-Agent"
|
|
||||||
] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36"
|
|
||||||
session.headers["Accept-Language"] = "en"
|
|
||||||
return session
|
|
||||||
|
|
||||||
|
|
||||||
def _get_initial_data(session: requests.Session, url: str) -> str:
|
|
||||||
response = session.get(url)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
if "uxe=" in response.request.url:
|
|
||||||
session.cookies.set("CONSENT", "YES+cb", domain=".youtube.com")
|
|
||||||
response = session.get(url)
|
|
||||||
|
|
||||||
html = response.text
|
|
||||||
return html
|
|
||||||
|
|
||||||
|
|
||||||
def _get_ajax_data(
|
|
||||||
session: requests.Session,
|
|
||||||
api_endpoint: str,
|
|
||||||
api_key: str,
|
|
||||||
next_data: dict,
|
|
||||||
client: dict,
|
|
||||||
) -> dict:
|
|
||||||
data = {
|
|
||||||
"context": {"clickTracking": next_data["click_params"], "client": client},
|
|
||||||
"continuation": next_data["token"],
|
|
||||||
}
|
|
||||||
response = session.post(api_endpoint, params={"key": api_key}, json=data)
|
|
||||||
return response.json()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_json_from_html(
|
|
||||||
html: str, key: str, num_chars: int = 2, stop: str = '"'
|
|
||||||
) -> str:
|
|
||||||
pos_begin = html.find(key) + len(key) + num_chars
|
|
||||||
pos_end = html.find(stop, pos_begin)
|
|
||||||
return html[pos_begin:pos_end]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_next_data(data: dict) -> Optional[dict]:
|
|
||||||
raw_next_data = next(_search_dict(data, "continuationEndpoint"), None)
|
|
||||||
if not raw_next_data:
|
|
||||||
return None
|
|
||||||
next_data = {
|
|
||||||
"token": raw_next_data["continuationCommand"]["token"],
|
|
||||||
"click_params": {"clickTrackingParams": raw_next_data["clickTrackingParams"]},
|
|
||||||
}
|
|
||||||
|
|
||||||
return next_data
|
|
||||||
|
|
||||||
|
|
||||||
def _search_dict(partial: dict, search_key: str) -> Generator[dict, None, None]:
|
|
||||||
stack = [partial]
|
|
||||||
while stack:
|
|
||||||
current_item = stack.pop(0)
|
|
||||||
if isinstance(current_item, dict):
|
|
||||||
for key, value in current_item.items():
|
|
||||||
if key == search_key:
|
|
||||||
yield value
|
|
||||||
else:
|
|
||||||
stack.append(value)
|
|
||||||
elif isinstance(current_item, list):
|
|
||||||
for value in current_item:
|
|
||||||
stack.append(value)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_videos_items(data: dict, selector: str) -> Generator[dict, None, None]:
|
|
||||||
return _search_dict(data, selector)
|
|
|
@ -1,96 +0,0 @@
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
UCAST_DIRNAME = "_ucast"
|
|
||||||
|
|
||||||
|
|
||||||
class ChannelFolder:
|
|
||||||
def __init__(self, dir_root: Path):
|
|
||||||
self.dir_root = dir_root
|
|
||||||
dir_ucast = self.dir_root / UCAST_DIRNAME
|
|
||||||
|
|
||||||
self.file_avatar = dir_ucast / "avatar.jpg"
|
|
||||||
self.file_avatar_sm = dir_ucast / "avatar_sm.webp"
|
|
||||||
|
|
||||||
self.dir_covers = dir_ucast / "covers"
|
|
||||||
self.dir_thumbnails = dir_ucast / "thumbnails"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _glob_file(parent_dir: Path, glob: str, default_filename: str = None) -> Path:
|
|
||||||
try:
|
|
||||||
return parent_dir.glob(glob).__next__()
|
|
||||||
except StopIteration:
|
|
||||||
if default_filename:
|
|
||||||
return parent_dir / default_filename
|
|
||||||
raise FileNotFoundError(f"file {str(parent_dir)}/{glob} not found")
|
|
||||||
|
|
||||||
def does_exist(self) -> bool:
|
|
||||||
return os.path.isdir(self.dir_covers)
|
|
||||||
|
|
||||||
def create(self):
|
|
||||||
os.makedirs(self.dir_covers, exist_ok=True)
|
|
||||||
os.makedirs(self.dir_thumbnails, exist_ok=True)
|
|
||||||
|
|
||||||
def get_cover(self, title_slug: str) -> Path:
|
|
||||||
return self.dir_covers / f"{title_slug}.png"
|
|
||||||
|
|
||||||
def get_thumbnail(self, title_slug: str, sm=False) -> Path:
|
|
||||||
filename = title_slug
|
|
||||||
if sm:
|
|
||||||
filename += "_sm"
|
|
||||||
|
|
||||||
return self._glob_file(self.dir_thumbnails, f"{filename}.*", f"{filename}.webp")
|
|
||||||
|
|
||||||
def get_audio(self, title_slug: str) -> Path:
|
|
||||||
return self.dir_root / f"{title_slug}.mp3"
|
|
||||||
|
|
||||||
|
|
||||||
class Storage:
|
|
||||||
def __init__(self):
|
|
||||||
self.dir_data = settings.DOWNLOAD_ROOT
|
|
||||||
|
|
||||||
def get_channel_folder(self, channel_slug: str) -> ChannelFolder:
|
|
||||||
cf = ChannelFolder(self.dir_data / channel_slug)
|
|
||||||
if not cf.does_exist():
|
|
||||||
raise FileNotFoundError
|
|
||||||
return cf
|
|
||||||
|
|
||||||
def get_or_create_channel_folder(self, channel_slug: str) -> ChannelFolder:
|
|
||||||
cf = ChannelFolder(self.dir_data / channel_slug)
|
|
||||||
if not cf.does_exist():
|
|
||||||
cf.create()
|
|
||||||
return cf
|
|
||||||
|
|
||||||
|
|
||||||
class Cache:
|
|
||||||
def __init__(self):
|
|
||||||
self.dir_cache = settings.CACHE_ROOT
|
|
||||||
self.dir_ytdlp_cache = self.dir_cache / "yt_dlp"
|
|
||||||
os.makedirs(self.dir_ytdlp_cache, exist_ok=True)
|
|
||||||
|
|
||||||
def create_tmpdir(self, prefix="dld") -> tempfile.TemporaryDirectory:
|
|
||||||
return tempfile.TemporaryDirectory(prefix=prefix + "_", dir=self.dir_cache)
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
"""
|
|
||||||
Delete temporary directories that are older than 24h and are most likely left
|
|
||||||
over after unexpected shutdowns.
|
|
||||||
"""
|
|
||||||
for dirname in os.listdir(self.dir_cache):
|
|
||||||
if dirname == "yt_dlp":
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
ctime = os.path.getctime(dirname)
|
|
||||||
# Cache folders may get removed by concurrent jobs
|
|
||||||
except FileNotFoundError:
|
|
||||||
continue
|
|
||||||
age = datetime.now() - datetime.fromtimestamp(ctime)
|
|
||||||
|
|
||||||
if age > timedelta(days=1):
|
|
||||||
shutil.rmtree(self.dir_cache / dirname, ignore_errors=True)
|
|
|
@ -1,202 +0,0 @@
|
||||||
import datetime
|
|
||||||
import io
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Optional, Tuple, Union
|
|
||||||
from urllib import parse
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import slugify
|
|
||||||
from django.utils import timezone
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
EMOJI_PATTERN = re.compile(
|
|
||||||
"["
|
|
||||||
"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
|
||||||
"\U0001F300-\U0001F5FF" # symbols & pictographs
|
|
||||||
"\U0001F600-\U0001F64F" # emoticons
|
|
||||||
"\U0001F680-\U0001F6FF" # transport & map symbols
|
|
||||||
"\U0001F700-\U0001F77F" # alchemical symbols
|
|
||||||
"\U0001F780-\U0001F7FF" # Geometric Shapes Extended
|
|
||||||
"\U0001F800-\U0001F8FF" # Supplemental Arrows-C
|
|
||||||
"\U0001F900-\U0001F9FF" # Supplemental Symbols and Pictographs
|
|
||||||
"\U0001FA00-\U0001FA6F" # Chess Symbols
|
|
||||||
"\U0001FA70-\U0001FAFF" # Symbols and Pictographs Extended-A
|
|
||||||
"\U00002702-\U000027B0" # Dingbats
|
|
||||||
"\U000024C2-\U0001F251"
|
|
||||||
"]+"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def download_file(url: str, download_path: Path):
|
|
||||||
r = requests.get(url, allow_redirects=True)
|
|
||||||
r.raise_for_status()
|
|
||||||
open(download_path, "wb").write(r.content)
|
|
||||||
|
|
||||||
|
|
||||||
def resize_image(img: Image, resize: Tuple[int, int]):
|
|
||||||
if img.size == resize:
|
|
||||||
return img
|
|
||||||
|
|
||||||
w_ratio = resize[0] / img.width
|
|
||||||
h_ratio = resize[1] / img.height
|
|
||||||
box = None
|
|
||||||
|
|
||||||
# Too tall
|
|
||||||
if h_ratio < w_ratio:
|
|
||||||
crop_height = int(img.width / resize[0] * resize[1])
|
|
||||||
border = int((img.height - crop_height) / 2)
|
|
||||||
box = (0, border, img.width, img.height - border)
|
|
||||||
# Too wide
|
|
||||||
elif w_ratio < h_ratio:
|
|
||||||
crop_width = int(img.height / resize[1] * resize[0])
|
|
||||||
border = int((img.width - crop_width) / 2)
|
|
||||||
box = (border, 0, img.width - border, img.height)
|
|
||||||
|
|
||||||
return img.resize(resize, Image.Resampling.LANCZOS, box)
|
|
||||||
|
|
||||||
|
|
||||||
def download_image_file(
|
|
||||||
url: str, download_path: Path, resize: Optional[Tuple[int, int]] = None
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Download an image and convert it to the type given
|
|
||||||
by the path.
|
|
||||||
|
|
||||||
:param url: Image URL
|
|
||||||
:param download_path: Download path
|
|
||||||
:param resize: target image size (set to None for no resizing)
|
|
||||||
"""
|
|
||||||
r = requests.get(url, allow_redirects=True)
|
|
||||||
r.raise_for_status()
|
|
||||||
|
|
||||||
img = Image.open(io.BytesIO(r.content))
|
|
||||||
img_ext = img.format.lower()
|
|
||||||
if img_ext == "jpeg":
|
|
||||||
img_ext = "jpg"
|
|
||||||
|
|
||||||
do_resize = resize and img.size != resize
|
|
||||||
if do_resize:
|
|
||||||
img = resize_image(img, resize)
|
|
||||||
|
|
||||||
if not do_resize and "." + img_ext == download_path.suffix:
|
|
||||||
open(download_path, "wb").write(r.content)
|
|
||||||
else:
|
|
||||||
img.save(download_path)
|
|
||||||
|
|
||||||
|
|
||||||
def get_slug(text: str) -> str:
|
|
||||||
return slugify.slugify(text, lowercase=False, separator="_")
|
|
||||||
|
|
||||||
|
|
||||||
def to_localtime(time: datetime.datetime):
|
|
||||||
"""Converts naive datetime to localtime based on settings"""
|
|
||||||
|
|
||||||
utc_time = time.replace(tzinfo=datetime.timezone.utc)
|
|
||||||
to_zone = timezone.get_default_timezone()
|
|
||||||
return utc_time.astimezone(to_zone)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_np_attrs(o) -> dict:
|
|
||||||
"""
|
|
||||||
Return all non-protected attributes of the given object.
|
|
||||||
:param o: Object
|
|
||||||
:return: Dict of attributes
|
|
||||||
"""
|
|
||||||
return {k: v for k, v in o.__dict__.items() if not k.startswith("_")}
|
|
||||||
|
|
||||||
|
|
||||||
def serializer(o: Any) -> Union[str, dict, int, float, bool]:
|
|
||||||
"""
|
|
||||||
Serialize object to json-storable format
|
|
||||||
:param o: Object to serialize
|
|
||||||
:return: Serialized output data
|
|
||||||
"""
|
|
||||||
if hasattr(o, "serialize"):
|
|
||||||
return o.serialize()
|
|
||||||
if isinstance(o, (datetime.datetime, datetime.date)):
|
|
||||||
return o.isoformat()
|
|
||||||
if isinstance(o, (bool, float, int)):
|
|
||||||
return o
|
|
||||||
if hasattr(o, "__dict__"):
|
|
||||||
return _get_np_attrs(o)
|
|
||||||
return str(o)
|
|
||||||
|
|
||||||
|
|
||||||
def to_json(o, pretty=False) -> str:
|
|
||||||
"""
|
|
||||||
Convert object to json.
|
|
||||||
Uses the ``serialize()`` method of the target object if available.
|
|
||||||
:param o: Object to serialize
|
|
||||||
:param pretty: Prettify with indents
|
|
||||||
:return: JSON string
|
|
||||||
"""
|
|
||||||
return json.dumps(
|
|
||||||
o, default=serializer, indent=2 if pretty else None, ensure_ascii=False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _urlencode(query, safe="", encoding=None, errors=None, quote_via=parse.quote_plus):
|
|
||||||
"""
|
|
||||||
Same as the urllib.parse.urlencode function, but does not add an
|
|
||||||
equals sign to no-value flags.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if hasattr(query, "items"):
|
|
||||||
query = query.items()
|
|
||||||
else:
|
|
||||||
# It's a bother at times that strings and string-like objects are
|
|
||||||
# sequences.
|
|
||||||
try:
|
|
||||||
# non-sequence items should not work with len()
|
|
||||||
# non-empty strings will fail this
|
|
||||||
if len(query) and not isinstance(query[0], tuple):
|
|
||||||
raise TypeError
|
|
||||||
# Zero-length sequences of all types will get here and succeed,
|
|
||||||
# but that's a minor nit. Since the original implementation
|
|
||||||
# allowed empty dicts that type of behavior probably should be
|
|
||||||
# preserved for consistency
|
|
||||||
except TypeError:
|
|
||||||
raise TypeError("not a valid non-string sequence " "or mapping object")
|
|
||||||
|
|
||||||
lst = []
|
|
||||||
|
|
||||||
for k, v in query:
|
|
||||||
if isinstance(k, bytes):
|
|
||||||
k = quote_via(k, safe)
|
|
||||||
else:
|
|
||||||
k = quote_via(str(k), safe, encoding, errors)
|
|
||||||
|
|
||||||
if isinstance(v, bytes):
|
|
||||||
v = quote_via(v, safe)
|
|
||||||
else:
|
|
||||||
v = quote_via(str(v), safe, encoding, errors)
|
|
||||||
|
|
||||||
if v:
|
|
||||||
lst.append(k + "=" + v)
|
|
||||||
else:
|
|
||||||
lst.append(k)
|
|
||||||
|
|
||||||
return "&".join(lst)
|
|
||||||
|
|
||||||
|
|
||||||
def add_key_to_url(url: str, key: str):
|
|
||||||
if not key:
|
|
||||||
return url
|
|
||||||
url_parts = list(parse.urlparse(url))
|
|
||||||
query = dict(parse.parse_qsl(url_parts[4], keep_blank_values=True))
|
|
||||||
query["key"] = key
|
|
||||||
url_parts[4] = _urlencode(query)
|
|
||||||
return parse.urlunparse(url_parts)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_if_exists(file: Path):
|
|
||||||
if os.path.isfile(file):
|
|
||||||
os.remove(file)
|
|
||||||
|
|
||||||
|
|
||||||
def strip_emoji(str_in: str) -> str:
|
|
||||||
stripped = EMOJI_PATTERN.sub("", str_in)
|
|
||||||
return re.sub(" +", " ", stripped)
|
|
|
@ -1,52 +0,0 @@
|
||||||
from datetime import date
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from mutagen import id3
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
AVATAR_SM_WIDTH = 100
|
|
||||||
THUMBNAIL_SM_WIDTH = 360
|
|
||||||
THUMBNAIL_SIZE = (1280, 720)
|
|
||||||
AVATAR_SIZE = (900, 900)
|
|
||||||
|
|
||||||
|
|
||||||
def tag_audio(
|
|
||||||
audio_path: Path,
|
|
||||||
title: str,
|
|
||||||
channel: str,
|
|
||||||
published: date,
|
|
||||||
description: str,
|
|
||||||
cover_path: Path,
|
|
||||||
):
|
|
||||||
title_text = f"{published.isoformat()} {title}"
|
|
||||||
|
|
||||||
tag = id3.ID3(audio_path)
|
|
||||||
tag["TPE1"] = id3.TPE1(encoding=3, text=channel) # Artist
|
|
||||||
tag["TALB"] = id3.TALB(encoding=3, text=channel) # Album
|
|
||||||
tag["TIT2"] = id3.TIT2(encoding=3, text=title_text) # Title
|
|
||||||
tag["TDRC"] = id3.TDRC(encoding=3, text=published.isoformat()) # Date
|
|
||||||
tag["COMM"] = id3.COMM(encoding=3, text=description) # Comment
|
|
||||||
|
|
||||||
with open(cover_path, "rb") as albumart:
|
|
||||||
tag["APIC"] = id3.APIC(
|
|
||||||
encoding=3, mime="image/png", type=3, desc="Cover", data=albumart.read()
|
|
||||||
)
|
|
||||||
tag.save()
|
|
||||||
|
|
||||||
|
|
||||||
def resize_avatar(original_file: Path, new_file: Path):
|
|
||||||
avatar = Image.open(original_file)
|
|
||||||
avatar_new_height = int(AVATAR_SM_WIDTH / avatar.width * avatar.height)
|
|
||||||
avatar = avatar.resize(
|
|
||||||
(AVATAR_SM_WIDTH, avatar_new_height), Image.Resampling.LANCZOS
|
|
||||||
)
|
|
||||||
avatar.save(new_file)
|
|
||||||
|
|
||||||
|
|
||||||
def resize_thumbnail(original_file: Path, new_file: Path):
|
|
||||||
thumbnail = Image.open(original_file)
|
|
||||||
tn_new_height = int(THUMBNAIL_SM_WIDTH / thumbnail.width * thumbnail.height)
|
|
||||||
thumbnail = thumbnail.resize(
|
|
||||||
(THUMBNAIL_SM_WIDTH, tn_new_height), Image.Resampling.LANCZOS
|
|
||||||
)
|
|
||||||
thumbnail.save(new_file)
|
|
|
@ -1,316 +0,0 @@
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from operator import itemgetter
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Generator, List, Optional
|
|
||||||
|
|
||||||
import feedparser
|
|
||||||
import requests
|
|
||||||
from yt_dlp import YoutubeDL
|
|
||||||
|
|
||||||
from ucast.service import scrapetube, storage, util, videoutil
|
|
||||||
|
|
||||||
CHANID_REGEX = re.compile(r"""[-_a-zA-Z\d]{24}""")
|
|
||||||
|
|
||||||
|
|
||||||
class ItemNotFoundError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ThumbnailNotFoundError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidMetadataError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class VideoScraped:
|
|
||||||
"""
|
|
||||||
Video object, as it is scraped from the website/rss feed.
|
|
||||||
RSS feeds contain the second-accurate publishing date, which cannot
|
|
||||||
be scraped from the video info and is therefore included in this object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
published: Optional[datetime.datetime]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class VideoDetails:
|
|
||||||
"""Mapping of YoutubeDL's video information"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
title: str
|
|
||||||
description: str
|
|
||||||
channel_id: str
|
|
||||||
channel_name: str
|
|
||||||
duration: int
|
|
||||||
published: datetime.datetime
|
|
||||||
thumbnails: List[dict]
|
|
||||||
is_currently_live: bool
|
|
||||||
is_livestream: bool
|
|
||||||
is_short: bool
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_vinfo(cls, info: dict):
|
|
||||||
published_date = datetime.datetime.strptime(
|
|
||||||
info["upload_date"], "%Y%m%d"
|
|
||||||
).replace(tzinfo=datetime.timezone.utc)
|
|
||||||
|
|
||||||
return VideoDetails(
|
|
||||||
id=info["id"],
|
|
||||||
title=info["title"],
|
|
||||||
description=info["description"],
|
|
||||||
channel_id=info["channel_id"],
|
|
||||||
channel_name=info["uploader"],
|
|
||||||
duration=info["duration"],
|
|
||||||
published=published_date,
|
|
||||||
thumbnails=info["thumbnails"],
|
|
||||||
is_currently_live=bool(info.get("is_live")),
|
|
||||||
is_livestream=info.get("is_live") or info.get("was_live"),
|
|
||||||
is_short=info["duration"] <= 60
|
|
||||||
and (info["width"] or 0) < (info["height"] or 0),
|
|
||||||
)
|
|
||||||
|
|
||||||
def add_scraped_data(self, scraped: VideoScraped):
|
|
||||||
if scraped.id != self.id:
|
|
||||||
raise ValueError("scraped data does not belong to video")
|
|
||||||
|
|
||||||
if scraped.published:
|
|
||||||
self.published = scraped.published
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ChannelMetadata:
|
|
||||||
"""Channel information"""
|
|
||||||
|
|
||||||
id: str
|
|
||||||
name: str
|
|
||||||
description: str
|
|
||||||
avatar_url: str
|
|
||||||
subscribers: Optional[str]
|
|
||||||
|
|
||||||
|
|
||||||
def download_thumbnail(vinfo: VideoDetails, download_path: Path):
|
|
||||||
"""
|
|
||||||
Download the thumbnail image of a YouTube video and save it at the given filepath.
|
|
||||||
The thumbnail file ending is added to the path.
|
|
||||||
|
|
||||||
:param vinfo: Video info (from ``get_video_info()``)
|
|
||||||
:param download_path: Path of the thumbnail file
|
|
||||||
:raise ThumbnailNotFoundError: if no thumbnail could be found (YT returned 404)
|
|
||||||
:return: Path with file ending
|
|
||||||
"""
|
|
||||||
|
|
||||||
for tn in sorted(vinfo.thumbnails, key=itemgetter("preference"), reverse=True):
|
|
||||||
url = tn["url"]
|
|
||||||
logging.info(f"downloading thumbnail {url}...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
util.download_image_file(url, download_path, videoutil.THUMBNAIL_SIZE)
|
|
||||||
return
|
|
||||||
except requests.HTTPError:
|
|
||||||
logging.warning(f"downloading thumbnail {url} failed")
|
|
||||||
pass
|
|
||||||
|
|
||||||
raise ThumbnailNotFoundError(f"could not find thumbnail for video {vinfo}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_video_details(video_id: str) -> VideoDetails:
|
|
||||||
"""
|
|
||||||
Get the details of a YouTube video without downloading it.
|
|
||||||
|
|
||||||
:param video_id: YouTube video ID
|
|
||||||
:return: VideoDetails
|
|
||||||
"""
|
|
||||||
cache = storage.Cache()
|
|
||||||
|
|
||||||
ydl_params = {
|
|
||||||
"cachedir": str(cache.dir_ytdlp_cache),
|
|
||||||
}
|
|
||||||
|
|
||||||
with YoutubeDL(ydl_params) as ydl:
|
|
||||||
info = ydl.extract_info(video_id, download=False)
|
|
||||||
return VideoDetails.from_vinfo(info)
|
|
||||||
|
|
||||||
|
|
||||||
def download_audio(
|
|
||||||
video_id: str, download_path: Path, sponsorblock=False
|
|
||||||
) -> VideoDetails:
|
|
||||||
"""
|
|
||||||
Download the audio track from a YouTube video save it at the given filepath.
|
|
||||||
|
|
||||||
:param video_id: YouTube video ID
|
|
||||||
:param download_path: Download path
|
|
||||||
:param sponsorblock: Enable Sponsorblock
|
|
||||||
:return: VideoDetails
|
|
||||||
"""
|
|
||||||
cache = storage.Cache()
|
|
||||||
tmpdir = cache.create_tmpdir()
|
|
||||||
tmp_dld_file = Path(tmpdir.name) / "audio.mp3"
|
|
||||||
|
|
||||||
ydl_params = {
|
|
||||||
"format": "bestaudio",
|
|
||||||
"postprocessors": [
|
|
||||||
{"key": "FFmpegExtractAudio", "preferredcodec": "mp3"},
|
|
||||||
],
|
|
||||||
"outtmpl": str(tmp_dld_file),
|
|
||||||
"cachedir": str(cache.dir_ytdlp_cache),
|
|
||||||
}
|
|
||||||
|
|
||||||
if sponsorblock:
|
|
||||||
# noinspection PyTypeChecker
|
|
||||||
ydl_params["postprocessors"].extend(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"key": "SponsorBlock",
|
|
||||||
"categories": ["sponsor"],
|
|
||||||
"when": "after_filter",
|
|
||||||
},
|
|
||||||
{"key": "ModifyChapters", "remove_sponsor_segments": ["sponsor"]},
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
with YoutubeDL(ydl_params) as ydl:
|
|
||||||
# extract_info downloads the video and returns its metadata
|
|
||||||
info = ydl.extract_info(video_id)
|
|
||||||
|
|
||||||
downloaded_file = info["requested_downloads"][0]["filepath"]
|
|
||||||
shutil.move(downloaded_file, download_path)
|
|
||||||
return VideoDetails.from_vinfo(info)
|
|
||||||
|
|
||||||
|
|
||||||
def channel_url_from_id(channel_id: str) -> str:
|
|
||||||
return "https://www.youtube.com/channel/" + channel_id
|
|
||||||
|
|
||||||
|
|
||||||
def channel_url_from_str(channel_str: str) -> str:
|
|
||||||
"""
|
|
||||||
Get the channel URL from user input. The following types are accepted:
|
|
||||||
|
|
||||||
- Channel ID URL: https://www.youtube.com/channel/UCGiJh0NZ52wRhYKYnuZI08Q
|
|
||||||
- Vanity URL: https://www.youtube.com/c/MrBeast6000
|
|
||||||
- User URL: https://www.youtube.com/user/LinusTechTips
|
|
||||||
- Channel ID: ``UCGiJh0NZ52wRhYKYnuZI08Q``
|
|
||||||
|
|
||||||
:param channel_str: Channel string
|
|
||||||
:return: Channel URL
|
|
||||||
"""
|
|
||||||
channel_url_regex = re.compile(
|
|
||||||
r"""(?:https?://)?[-a-zA-Z\d@:%._+~#=]+\.[a-zA-Z\d]{1,6}/(?:(channel|c|user)/)?([-_a-zA-Z\d]*)"""
|
|
||||||
)
|
|
||||||
|
|
||||||
match = channel_url_regex.match(channel_str)
|
|
||||||
if match:
|
|
||||||
url_type = match[1]
|
|
||||||
# Vanity URL
|
|
||||||
if not url_type or url_type == "c":
|
|
||||||
return "https://www.youtube.com/c/" + match[2]
|
|
||||||
# Username
|
|
||||||
if url_type == "user":
|
|
||||||
return "https://www.youtube.com/user/" + match[2]
|
|
||||||
# Channel ID
|
|
||||||
return "https://www.youtube.com/channel/" + match[2]
|
|
||||||
|
|
||||||
if CHANID_REGEX.match(channel_str):
|
|
||||||
return "https://www.youtube.com/channel/" + channel_str
|
|
||||||
|
|
||||||
raise ValueError("invalid channel string")
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_metadata(channel_url: str) -> ChannelMetadata:
|
|
||||||
"""
|
|
||||||
Get the metadata of a channel
|
|
||||||
|
|
||||||
:param channel_url: Channel-URL
|
|
||||||
:return: Channel metadata
|
|
||||||
"""
|
|
||||||
data = scrapetube.get_channel_metadata(channel_url)
|
|
||||||
metadata = data["metadata"]["channelMetadataRenderer"]
|
|
||||||
|
|
||||||
channel_id = metadata["externalId"]
|
|
||||||
name = metadata["title"]
|
|
||||||
description = metadata["description"].strip()
|
|
||||||
avatar = metadata["avatar"]["thumbnails"][0]["url"]
|
|
||||||
subscribers = None
|
|
||||||
# The subscriber count is not always visible
|
|
||||||
try:
|
|
||||||
raw_subscribers = data["header"]["c4TabbedHeaderRenderer"][
|
|
||||||
"subscriberCountText"
|
|
||||||
]["simpleText"]
|
|
||||||
subscribers = raw_subscribers.split(" ", 1)[0]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not CHANID_REGEX.match(channel_id):
|
|
||||||
raise InvalidMetadataError(f"got invalid channel id {repr(channel_id)}")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
raise InvalidMetadataError(f"no channel name found for channel {channel_id}")
|
|
||||||
|
|
||||||
if not avatar.startswith("https://"):
|
|
||||||
raise InvalidMetadataError(
|
|
||||||
f"got invalid avatar url for channel {channel_id}: {avatar}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return ChannelMetadata(channel_id, name, description, avatar, subscribers)
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_videos_from_feed(channel_id: str) -> List[VideoScraped]:
|
|
||||||
"""
|
|
||||||
Return videos of a channel using YouTube's RSS feed. Using the feed is fast,
|
|
||||||
but you only get the 15 latest videos.
|
|
||||||
|
|
||||||
:param channel_id: YouTube channel id
|
|
||||||
:return: Videos: video_id -> VideoScraped
|
|
||||||
"""
|
|
||||||
feed_url = f"https://www.youtube.com/feeds/videos.xml?channel_id={channel_id}"
|
|
||||||
feed = feedparser.parse(feed_url)
|
|
||||||
videos = []
|
|
||||||
|
|
||||||
for item in feed["entries"]:
|
|
||||||
video_id = item.get("yt_videoid")
|
|
||||||
if not video_id:
|
|
||||||
logging.warning(
|
|
||||||
f"found invalid item in rss feed of channel {channel_id}: {item}"
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
publish_date_str = item.get("published")
|
|
||||||
publish_date = None
|
|
||||||
if publish_date_str:
|
|
||||||
publish_date = datetime.datetime.fromisoformat(publish_date_str)
|
|
||||||
|
|
||||||
videos.append(VideoScraped(video_id, publish_date))
|
|
||||||
|
|
||||||
return videos
|
|
||||||
|
|
||||||
|
|
||||||
def get_channel_videos_from_scraper(
|
|
||||||
channel_id: str, limit: int = None
|
|
||||||
) -> Generator[VideoScraped, None, None]:
|
|
||||||
"""
|
|
||||||
Return all videos of a channel by scraping the YouTube website.
|
|
||||||
|
|
||||||
:param channel_id: YouTube channel id
|
|
||||||
:param limit: Limit number of scraped videos
|
|
||||||
:return: Generator of Videos
|
|
||||||
"""
|
|
||||||
|
|
||||||
for item in scrapetube.get_channel(channel_url_from_id(channel_id), limit):
|
|
||||||
video_id = item.get("videoId")
|
|
||||||
if not video_id:
|
|
||||||
logging.warning(
|
|
||||||
f"found invalid item in scraped feed of channel {channel_id}: {item}"
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
yield VideoScraped(video_id, None)
|
|
1
ucast/static/bulma/css/style.min.css
vendored
6
ucast/static/ucast/css/fontawesome.css
vendored
Before Width: | Height: | Size: 4.2 KiB |
7
ucast/static/ucast/js/clipboard.min.js
vendored
2
ucast/static/ucast/js/htmx.min.js
vendored
|
@ -1,9 +0,0 @@
|
||||||
const confirmButtons = document.getElementsByClassName("dialog-confirm")
|
|
||||||
for(let btn of confirmButtons) {
|
|
||||||
btn.addEventListener("click", function(e) {
|
|
||||||
const result = window.confirm(btn.getAttribute("confirm-msg"));
|
|
||||||
if(!result) {
|
|
||||||
e.preventDefault();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
|
@ -1,2 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<svg id="svg5" width="68.5mm" height="15.79mm" version="1.1" viewBox="0 0 68.5 15.79" xmlns="http://www.w3.org/2000/svg"><g id="layer1" transform="translate(-1.4688 -18.46)" fill="none" stroke-linecap="square"><path id="path3041" d="m67.469 21.167h-10.583" stroke="#282828"/><path id="path3043" d="m62.177 21.445v10.305" stroke="#282828" stroke-width=".98677"/><path id="path3572" d="m3.9688 21.167v6.6146l3.9687 3.9688h2.6458l3.9688-3.9688v-6.6146" stroke="#e00"/><path id="path3687" d="m27.781 21.167h-6.6146l-3.9688 3.9688v2.6458l3.9688 3.9688h6.6146" stroke="#282828"/><path id="path3802" d="m30.427 31.75v-5.2917l5.2917-5.2917 5.2917 5.2917v5.2917" stroke="#282828"/><path id="path3954" d="m54.24 21.167h-7.9375l-2.6458 2.6458 2.6458 2.6458h5.2917l2.6458 2.6458-2.6458 2.6458h-7.9375" stroke="#282828"/></g></svg>
|
|
Before Width: | Height: | Size: 858 B |
|
@ -1,2 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<svg id="svg5" width="68.5mm" height="15.79mm" version="1.1" viewBox="0 0 68.5 15.79" xmlns="http://www.w3.org/2000/svg"><g id="layer1" transform="translate(-1.4688 -18.46)" fill="none" stroke-linecap="square"><path id="path3041" d="m67.469 21.167h-10.583" stroke="#fff"/><path id="path3043" d="m62.177 21.445v10.305" stroke="#fff" stroke-width=".98677"/><path id="path3572" d="m3.9688 21.167v6.6146l3.9687 3.9688h2.6458l3.9688-3.9688v-6.6146" stroke="#e00"/><path id="path3687" d="m27.781 21.167h-6.6146l-3.9688 3.9688v2.6458l3.9688 3.9688h6.6146" stroke="#fff"/><path id="path3802" d="m30.427 31.75v-5.2917l5.2917-5.2917 5.2917 5.2917v5.2917" stroke="#fff"/><path id="path3954" d="m54.24 21.167h-7.9375l-2.6458 2.6458 2.6458 2.6458h5.2917l2.6458 2.6458-2.6458 2.6458h-7.9375" stroke="#fff"/></g></svg>
|
|
Before Width: | Height: | Size: 843 B |