├── .python-version
├── frontend
├── src
│ ├── boot
│ │ ├── .gitkeep
│ │ └── axios.js
│ ├── css
│ │ ├── app.scss
│ │ └── quasar.variables.scss
│ ├── assets
│ │ ├── icon.png
│ │ └── hd-icon.png
│ ├── App.vue
│ ├── stores
│ │ ├── store-flag.d.ts
│ │ ├── index.js
│ │ └── auth.js
│ ├── pages
│ │ ├── ErrorNotFound.vue
│ │ ├── LoginPage.vue
│ │ └── GeneralPage.vue
│ ├── components
│ │ ├── EssentialLink.vue
│ │ └── EpgInfoDialog.vue
│ ├── index.template.html
│ ├── router
│ │ ├── index.js
│ │ └── routes.js
│ └── mixins
│ │ ├── aioFunctionsMixin.js
│ │ └── backgroundTasksMixin.js
├── public
│ ├── favicon.ico
│ └── icons
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ ├── favicon-96x96.png
│ │ ├── favicon-128x128.png
│ │ ├── TVH-IPTV-Config-Logo.png
│ │ └── tvh-icon.svg
├── nodemon.json
├── .eslintignore
├── .editorconfig
├── .npmrc
├── postcss.config.cjs
├── babel.config.cjs
├── .gitignore
├── README.md
├── jsconfig.json
├── package.json
├── .eslintrc.js
└── quasar.config.js
├── backend
├── tvheadend
│ └── __init__.py
├── api
│ ├── __init__.py
│ ├── routes_epgs.py
│ ├── routes_channels.py
│ ├── routes_playlists.py
│ ├── tasks.py
│ ├── routes.py
│ └── routes_playlist_proxy.py
├── ffmpeg.py
├── auth.py
├── __init__.py
├── models.py
└── config.py
├── requirements-dev.txt
├── docker
├── overlay
│ ├── version.txt
│ ├── defaults
│ │ ├── tvheadend
│ │ │ ├── admin_auth
│ │ │ ├── admin_accesscontrol
│ │ │ └── config
│ │ └── nginx
│ │ │ └── nginx.conf.template
│ └── usr
│ │ └── bin
│ │ └── tv_grab_url
├── docker-compose.dev-aio.yml
├── docker-compose.dev-side-tvh.yml
├── Dockerfile
└── entrypoint.sh
├── logo.png
├── .editorconfig
├── .github
├── ISSUE_TEMPLATE
│ ├── config.yml
│ ├── DOCUMENTATION.yml
│ ├── FEATURE-REQUEST.yml
│ └── BUG-REPORT.yml
├── FUNDING.yml
├── label-actions.yml
└── workflows
│ └── build_docker_ci.yml
├── db-migrate.sh
├── migrations
├── README
├── script.py.mako
├── versions
│ ├── 20222080e13e_.py
│ ├── e0b4b10647d5_.py
│ ├── e829b6fba2dc_.py
│ ├── 4d27771deb98_.py
│ ├── 044b003faaaa_.py
│ ├── 62e3f7ef0ad2_.py
│ ├── f3d254922d25_.py
│ └── 46f0f37aab7b_.py
└── env.py
├── .gitignore
├── .dockerignore
├── devops
├── arm_build_test.sh
├── frontend_install.sh
├── setup_local_dev_env.sh
└── run_local_dev_env.sh
├── alembic.ini
├── docs
├── compose-files
│ ├── docker-compose.aio.yml
│ ├── docker-compose.side-tvh.yml
│ └── docker-compose.aio-with-proxies.yml
├── run-from-source.md
└── run-with-docker-compose.md
├── requirements.txt
├── requirements.in
├── run.py
└── README.md
/.python-version:
--------------------------------------------------------------------------------
1 | 3.12
2 |
--------------------------------------------------------------------------------
/frontend/src/boot/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/backend/tvheadend/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/frontend/src/css/app.scss:
--------------------------------------------------------------------------------
1 | // app global css in SCSS form
2 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pip-tools>=7.4.1
2 | pip-audit>=2.7.2
3 |
--------------------------------------------------------------------------------
/docker/overlay/version.txt:
--------------------------------------------------------------------------------
1 | Build: [2024-06-29 12:42:07] [local] []
2 |
--------------------------------------------------------------------------------
/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/logo.png
--------------------------------------------------------------------------------
/frontend/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/public/favicon.ico
--------------------------------------------------------------------------------
/frontend/src/assets/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/src/assets/icon.png
--------------------------------------------------------------------------------
/frontend/src/assets/hd-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/src/assets/hd-icon.png
--------------------------------------------------------------------------------
/frontend/nodemon.json:
--------------------------------------------------------------------------------
1 | {
2 | "watch": ["src"],
3 | "ext": "js,vue,css,html",
4 | "exec": "quasar build --develop"
5 | }
6 |
--------------------------------------------------------------------------------
/frontend/public/icons/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/public/icons/favicon-16x16.png
--------------------------------------------------------------------------------
/frontend/public/icons/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/public/icons/favicon-32x32.png
--------------------------------------------------------------------------------
/frontend/public/icons/favicon-96x96.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/public/icons/favicon-96x96.png
--------------------------------------------------------------------------------
/frontend/public/icons/favicon-128x128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/public/icons/favicon-128x128.png
--------------------------------------------------------------------------------
/frontend/public/icons/TVH-IPTV-Config-Logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Josh5/TVH-IPTV-Config/HEAD/frontend/public/icons/TVH-IPTV-Config-Logo.png
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | indent_style = space
6 | end_of_line = lf
7 | insert_final_newline = true
8 | trim_trailing_whitespace = true
9 |
--------------------------------------------------------------------------------
/frontend/.eslintignore:
--------------------------------------------------------------------------------
1 | /dist
2 | /src-bex/www
3 | /src-capacitor
4 | /src-cordova
5 | /.quasar
6 | /node_modules
7 | .eslintrc.cjs
8 | babel.config.cjs
9 | /quasar.config.*.temporary.compiled*
10 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | blank_issues_enabled: false
4 | contact_links:
5 | - name: Discord support
6 | url: https://unmanic.app/discord
7 | about: Ask questions in Discord.
8 |
--------------------------------------------------------------------------------
/db-migrate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | __script_path=$(cd $(dirname ${BASH_SOURCE}) && pwd)
5 |
6 | cd ${__script_path}/
7 | ./venv-docker/bin/flask db upgrade
8 | echo "Successfully ran migrations"
9 |
--------------------------------------------------------------------------------
/frontend/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | indent_style = space
6 | indent_size = 2
7 | end_of_line = lf
8 | insert_final_newline = true
9 | trim_trailing_whitespace = true
10 |
--------------------------------------------------------------------------------
/backend/api/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 |
4 | from quart import Blueprint
5 |
6 | blueprint = Blueprint(
7 | 'api_blueprint',
8 | __name__,
9 | url_prefix=''
10 | )
11 |
--------------------------------------------------------------------------------
/frontend/src/App.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
12 |
--------------------------------------------------------------------------------
/frontend/.npmrc:
--------------------------------------------------------------------------------
1 | # pnpm-related options
2 | shamefully-hoist=true
3 | strict-peer-dependencies=false
4 | # to get the latest compatible packages when creating the project https://github.com/pnpm/pnpm/issues/6463
5 | resolution-mode=highest
6 |
--------------------------------------------------------------------------------
/migrations/README:
--------------------------------------------------------------------------------
1 | # Create migrations
2 | ```
3 | sudo docker compose \
4 | -f ./docker/docker-compose.dev-aio.yml run \
5 | --user $(id -u) \
6 | --entrypoint='' tic \
7 | /var/venv-docker/bin/python -m alembic revision --autogenerate
8 | ```
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # byte-compiled / optimized files
2 | __pycache__/
3 | *.py[cod]
4 |
5 | # dev / testing directories
6 | /dev_env*
7 | /frontend/test_js/**
8 |
9 | # venv
10 | env
11 | venv*
12 |
13 | # editor directories and files
14 | .vscode
15 | .idea
16 |
--------------------------------------------------------------------------------
/frontend/postcss.config.cjs:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | // https://github.com/michael-ciniawsky/postcss-load-config
3 |
4 | module.exports = {
5 | plugins: [
6 | // to edit target browsers: use "browserslist" field in package.json
7 | require('autoprefixer')
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .git
2 | __pycache__
3 | *.pyc
4 | *.pyo
5 | *.pyd
6 |
7 | # dev / testing directories
8 | /dev_env*
9 | /frontend/test_js/**
10 | /frontend/dist/**
11 | /frontend/node_modules/**
12 |
13 | # venv
14 | env
15 | venv*
16 |
17 | # editor directories and files
18 | .vscode
19 | .idea
20 |
--------------------------------------------------------------------------------
/docker/overlay/defaults/tvheadend/admin_auth:
--------------------------------------------------------------------------------
1 | {
2 | "enabled": true,
3 | "username": "admin",
4 | "password2": "VFZIZWFkZW5kLUhpZGUtYWRtaW4=",
5 | "auth": [
6 | "enable"
7 | ],
8 | "authcode": "PBx9zTIBrAgObrCpEq5iS7pR-jYo",
9 | "comment": "TVH IPTV Config admin password entry",
10 | "wizard": true
11 | }
12 |
--------------------------------------------------------------------------------
/devops/arm_build_test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | docker buildx build \
4 | --build-arg VERSION="UNKNOWN" \
5 | --build-arg BUILD_DATE="NOW" \
6 | --build-arg BASE_IMAGE="ghcr.io/tvheadend/tvheadend:edge-debian" \
7 | --file docker/Dockerfile \
8 | --platform linux/arm64 \
9 | -t ghcr.io/josh5/tvh-iptv:latest \
10 | .
11 |
--------------------------------------------------------------------------------
/frontend/src/stores/store-flag.d.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 | // THIS FEATURE-FLAG FILE IS AUTOGENERATED,
3 | // REMOVAL OR CHANGES WILL CAUSE RELATED TYPES TO STOP WORKING
4 | import "quasar/dist/types/feature-flag";
5 |
6 | declare module "quasar/dist/types/feature-flag" {
7 | interface QuasarFeatureFlags {
8 | store: true;
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/frontend/babel.config.cjs:
--------------------------------------------------------------------------------
1 | /* eslint-disable */
2 |
3 | module.exports = api => {
4 | return {
5 | presets: [
6 | [
7 | '@quasar/babel-preset-app',
8 | api.caller(caller => caller && caller.target === 'node')
9 | ? {targets: {node: 'current'}}
10 | : {},
11 | ],
12 | ],
13 | };
14 | };
15 |
16 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/DOCUMENTATION.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Documentation Improvement
4 | description: Suggest an update to documentation.
5 | labels: ['status:awaiting-triage', 'type:documentation']
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Thanks for taking the time to help improve!
11 | - type: textarea
12 | id: description
13 | attributes:
14 | label: Description
15 | description: Provide a description of the proposed changes.
16 | validations:
17 | required: true
18 |
--------------------------------------------------------------------------------
/frontend/src/stores/index.js:
--------------------------------------------------------------------------------
1 | import { store } from 'quasar/wrappers'
2 | import { createPinia } from 'pinia'
3 |
4 | /*
5 | * If not building with SSR mode, you can
6 | * directly export the Store instantiation;
7 | *
8 | * The function below can be async too; either use
9 | * async/await or return a Promise which resolves
10 | * with the Store instance.
11 | */
12 |
13 | export default store((/* { ssrContext } */) => {
14 | const pinia = createPinia()
15 |
16 | // You can add Pinia plugins here
17 | // pinia.use(SomePiniaPlugin)
18 |
19 | return pinia
20 | })
21 |
--------------------------------------------------------------------------------
/devops/frontend_install.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | ###
3 | # File: frontend_install.sh
4 | # Project: devops
5 | # File Created: Tuesday, 11th April 2023 3:16:40 pm
6 | # Author: Josh.5 (jsunnex@gmail.com)
7 | # -----
8 | # Last Modified: Monday, 24th April 2023 7:14:51 pm
9 | # Modified By: Josh.5 (jsunnex@gmail.com)
10 | ###
11 |
12 | script_path=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
13 | project_root=$(readlink -e ${script_path}/..)
14 |
15 |
16 | pushd "${project_root}/frontend" || exit 1
17 |
18 |
19 | # Build frontend backage
20 | npm ci
21 | npm run build
22 |
23 |
24 | popd || exit 1
25 |
--------------------------------------------------------------------------------
/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision | comma,n}
5 | Create Date: ${create_date}
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 | ${imports if imports else ""}
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = ${repr(up_revision)}
14 | down_revision = ${repr(down_revision)}
15 | branch_labels = ${repr(branch_labels)}
16 | depends_on = ${repr(depends_on)}
17 |
18 |
19 | def upgrade():
20 | ${upgrades if upgrades else "pass"}
21 |
22 |
23 | def downgrade():
24 | ${downgrades if downgrades else "pass"}
25 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: Josh5
4 | patreon: Josh5
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: josh5coffee
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
13 |
--------------------------------------------------------------------------------
/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .thumbs.db
3 | node_modules
4 |
5 | # Quasar core related directories
6 | .quasar
7 | /dist
8 |
9 | # Cordova related directories and files
10 | /src-cordova/node_modules
11 | /src-cordova/platforms
12 | /src-cordova/plugins
13 | /src-cordova/www
14 |
15 | # Capacitor related directories and files
16 | /src-capacitor/www
17 | /src-capacitor/node_modules
18 |
19 | # BEX related directories and files
20 | /src-bex/www
21 | /src-bex/js/core
22 |
23 | # Log files
24 | npm-debug.log*
25 | yarn-debug.log*
26 | yarn-error.log*
27 |
28 | # Editor directories and files
29 | .vscode
30 | .idea
31 | *.suo
32 | *.ntvs*
33 | *.njsproj
34 | *.sln
35 |
--------------------------------------------------------------------------------
/frontend/src/pages/ErrorNotFound.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | 404
6 |
7 |
8 |
9 | Oops. Nothing here...
10 |
11 |
12 |
21 |
22 |
23 |
24 |
25 |
32 |
--------------------------------------------------------------------------------
/frontend/README.md:
--------------------------------------------------------------------------------
1 | # TVH IPTV Config (tvh_iptv_config)
2 |
3 | A tool for simplifying IPTV configuration in TVheadend
4 |
5 | ## Install the dependencies
6 | ```bash
7 | yarn
8 | # or
9 | npm install
10 | ```
11 |
12 | ### Start the app in development mode (hot-code reloading, error reporting, etc.)
13 | ```bash
14 | quasar dev
15 | ```
16 |
17 |
18 | ### Lint the files
19 | ```bash
20 | yarn lint
21 | # or
22 | npm run lint
23 | ```
24 |
25 |
26 | ### Format the files
27 | ```bash
28 | yarn format
29 | # or
30 | npm run format
31 | ```
32 |
33 |
34 |
35 | ### Build the app for production
36 | ```bash
37 | quasar build
38 | ```
39 |
40 | ### Customize the configuration
41 | See [Configuring quasar.config.js](https://v2.quasar.dev/quasar-cli-webpack/quasar-config-js).
42 |
--------------------------------------------------------------------------------
/frontend/jsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "baseUrl": ".",
4 | "paths": {
5 | "src/*": [
6 | "src/*"
7 | ],
8 | "app/*": [
9 | "*"
10 | ],
11 | "components/*": [
12 | "src/components/*"
13 | ],
14 | "layouts/*": [
15 | "src/layouts/*"
16 | ],
17 | "pages/*": [
18 | "src/pages/*"
19 | ],
20 | "assets/*": [
21 | "src/assets/*"
22 | ],
23 | "boot/*": [
24 | "src/boot/*"
25 | ],
26 | "stores/*": [
27 | "src/stores/*"
28 | ],
29 | "vue$": [
30 | "node_modules/vue/dist/vue.runtime.esm-bundler.js"
31 | ]
32 | }
33 | },
34 | "exclude": [
35 | "dist",
36 | ".quasar",
37 | "node_modules"
38 | ]
39 | }
--------------------------------------------------------------------------------
/docker/overlay/defaults/tvheadend/admin_accesscontrol:
--------------------------------------------------------------------------------
1 | {
2 | "index": 1,
3 | "enabled": true,
4 | "username": "admin",
5 | "prefix": "0.0.0.0/0,::/0",
6 | "change": [
7 | "change_rights"
8 | ],
9 | "uilevel": -1,
10 | "uilevel_nochange": -1,
11 | "streaming": [
12 | "basic",
13 | "advanced",
14 | "htsp"
15 | ],
16 | "profile": [
17 | ],
18 | "dvr": [
19 | "basic",
20 | "htsp",
21 | "all",
22 | "all_rw",
23 | "failed"
24 | ],
25 | "htsp_anonymize": false,
26 | "dvr_config": [
27 | ],
28 | "webui": true,
29 | "admin": true,
30 | "conn_limit_type": 0,
31 | "conn_limit": 0,
32 | "channel_min": 0,
33 | "channel_max": 0,
34 | "channel_tag_exclude": false,
35 | "channel_tag": [
36 | ],
37 | "xmltv_output_format": 0,
38 | "htsp_output_format": 0,
39 | "comment": "Default access entry",
40 | "wizard": false
41 | }
42 |
--------------------------------------------------------------------------------
/devops/setup_local_dev_env.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | ###
3 | # File: setup_local_dev_env.sh
4 | # Project: devops
5 | # File Created: Tuesday, 11th April 2023 3:14:41 pm
6 | # Author: Josh.5 (jsunnex@gmail.com)
7 | # -----
8 | # Last Modified: Saturday, 20th January 2024 4:23:22 pm
9 | # Modified By: Josh.5 (jsunnex@gmail.com)
10 | ###
11 |
12 | script_path=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
13 | project_root=$(readlink -e ${script_path}/..)
14 |
15 |
16 | pushd "${project_root}" || exit 1
17 |
18 |
19 | # Ensure we have created a venv
20 | if [[ ! -e venv-local/bin/activate ]]; then
21 | python3 -m venv venv-local
22 | fi
23 |
24 | # Active the venv
25 | source venv-local/bin/activate
26 |
27 | # Install all requirements
28 | python3 -m pip install -r requirements.txt
29 |
30 | # Install the project to the venv
31 | ./devops/frontend_install.sh
32 |
33 |
34 | popd || exit 1
35 |
--------------------------------------------------------------------------------
/frontend/src/css/quasar.variables.scss:
--------------------------------------------------------------------------------
1 | // Quasar SCSS (& Sass) Variables
2 | // --------------------------------------------------
3 | // To customize the look and feel of this app, you can override
4 | // the Sass/SCSS variables found in Quasar's source Sass/SCSS files.
5 |
6 | // Check documentation for full list of Quasar variables
7 |
8 | // Your own variables (that are declared here) and Quasar's own
9 | // ones will be available out of the box in your .vue/.scss/.sass files
10 |
11 | // It's highly recommended to change the default colors
12 | // to match your app's branding.
13 | // Tip: Use the "Theme Builder" on Quasar's documentation website.
14 |
15 | $primary : #201335;
16 | $secondary : #4F4789;
17 | $accent : #FFB17A;
18 |
19 | $dark : #1D1D1D;
20 | $dark-page : #121212;
21 |
22 | $positive : #21BA45;
23 | $negative : #C10015;
24 | $info : #31CCEC;
25 | $warning : #F2C037;
26 |
--------------------------------------------------------------------------------
/frontend/src/components/EssentialLink.vue:
--------------------------------------------------------------------------------
1 |
2 |
6 |
10 |
11 |
12 |
13 |
14 | {{ title }}
15 | {{ caption }}
16 |
17 |
18 |
19 |
20 |
48 |
--------------------------------------------------------------------------------
/devops/run_local_dev_env.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | ###
3 | # File: run_local_dev_env.sh
4 | # Project: devops
5 | # File Created: Tuesday, 11th April 2023 3:31:39 pm
6 | # Author: Josh.5 (jsunnex@gmail.com)
7 | # -----
8 | # Last Modified: Sunday, 3rd November 2024 2:39:21 pm
9 | # Modified By: Josh5 (jsunnex@gmail.com)
10 | ###
11 |
12 | script_path=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
13 | project_root=$(readlink -e ${script_path}/..)
14 |
15 | pushd "${project_root}" || exit 1
16 |
17 | # Ensure we have created a venv
18 | if [[ ! -e venv-local/bin/activate ]]; then
19 | python3 -m venv venv-local
20 | fi
21 | # Active the venv
22 | source venv-local/bin/activate
23 |
24 | # Configure env
25 | export PYTHONUNBUFFERED=1;
26 | export ENABLE_APP_DEBUGGING=true;
27 | export HOME_DIR="${PWD}/dev_env/config/"
28 |
29 | mkdir -p "${HOME_DIR}"
30 |
31 | # Setup database
32 | alembic upgrade head
33 |
34 | # Run main process
35 | python3 "${FLASK_APP:?}"
36 |
37 | popd || exit 1
38 |
--------------------------------------------------------------------------------
/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # path to migration scripts
5 | script_location = migrations
6 | prepend_sys_path = .
7 |
8 | # template used to generate migration files
9 | # file_template = %%(rev)s_%%(slug)s
10 |
11 | # set to 'true' to run the environment during
12 | # the 'revision' command, regardless of autogenerate
13 | # revision_environment = false
14 |
15 |
16 | # Logging configuration
17 | [loggers]
18 | keys = root,sqlalchemy,alembic
19 |
20 | [handlers]
21 | keys = console
22 |
23 | [formatters]
24 | keys = generic
25 |
26 | [logger_root]
27 | level = WARN
28 | handlers = console
29 | qualname =
30 |
31 | [logger_sqlalchemy]
32 | level = WARN
33 | handlers =
34 | qualname = sqlalchemy.engine
35 |
36 | [logger_alembic]
37 | level = INFO
38 | handlers =
39 | qualname = alembic
40 |
41 | [handler_console]
42 | class = StreamHandler
43 | args = (sys.stderr,)
44 | level = NOTSET
45 | formatter = generic
46 |
47 | [formatter_generic]
48 | format = %(levelname)-5.5s [%(name)s] %(message)s
49 | datefmt = %H:%M:%S
50 |
--------------------------------------------------------------------------------
/frontend/src/boot/axios.js:
--------------------------------------------------------------------------------
1 | import { boot } from 'quasar/wrappers'
2 | import axios from 'axios'
3 |
4 | // Be careful when using SSR for cross-request state pollution
5 | // due to creating a Singleton instance here;
6 | // If any client changes this (global) instance, it might be a
7 | // good idea to move this instance creation inside of the
8 | // "export default () => {}" function below (which runs individually
9 | // for each client)
10 | const api = axios.create({ baseURL: 'https://api.example.com' })
11 |
12 | export default boot(({ app }) => {
13 | // for use inside Vue files (Options API) through this.$axios and this.$api
14 |
15 | app.config.globalProperties.$axios = axios
16 | // ^ ^ ^ this will allow you to use this.$axios (for Vue Options API form)
17 | // so you won't necessarily have to import axios in each vue file
18 |
19 | app.config.globalProperties.$api = api
20 | // ^ ^ ^ this will allow you to use this.$api (for Vue Options API form)
21 | // so you can easily perform requests against your app's API
22 | })
23 |
24 | export { api }
25 |
--------------------------------------------------------------------------------
/frontend/src/index.template.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | <%= productName %>
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/migrations/versions/20222080e13e_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 20222080e13e
4 | Revises: e829b6fba2dc
5 | Create Date: 2024-01-22 21:01:59.559757
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '20222080e13e'
14 | down_revision = 'e829b6fba2dc'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('channels', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('logo_base64', sa.String(length=500), nullable=True))
23 | batch_op.drop_index('ix_channels_logo_url')
24 |
25 | # ### end Alembic commands ###
26 |
27 |
28 | def downgrade():
29 | # ### commands auto generated by Alembic - please adjust! ###
30 | with op.batch_alter_table('channels', schema=None) as batch_op:
31 | batch_op.create_index('ix_channels_logo_url', ['logo_url'], unique=False)
32 | batch_op.drop_column('logo_base64')
33 |
34 | # ### end Alembic commands ###
35 |
--------------------------------------------------------------------------------
/migrations/versions/e0b4b10647d5_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: e0b4b10647d5
4 | Revises: 46f0f37aab7b
5 | Create Date: 2023-04-29 01:56:56.935675
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'e0b4b10647d5'
14 | down_revision = '46f0f37aab7b'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('channels', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('tvh_uuid', sa.String(length=500), nullable=True))
23 | batch_op.create_index(batch_op.f('ix_channels_tvh_uuid'), ['tvh_uuid'], unique=False)
24 |
25 | # ### end Alembic commands ###
26 |
27 |
28 | def downgrade():
29 | # ### commands auto generated by Alembic - please adjust! ###
30 | with op.batch_alter_table('channels', schema=None) as batch_op:
31 | batch_op.drop_index(batch_op.f('ix_channels_tvh_uuid'))
32 | batch_op.drop_column('tvh_uuid')
33 |
34 | # ### end Alembic commands ###
35 |
--------------------------------------------------------------------------------
/frontend/src/stores/auth.js:
--------------------------------------------------------------------------------
1 | import {defineStore} from 'pinia';
2 | import axios from 'axios';
3 |
4 | export const useAuthStore = defineStore('auth', {
5 | state: () => ({
6 | isAuthenticated: false,
7 | appRuntimeKey: null,
8 | loading: false,
9 | }),
10 | actions: {
11 | async checkAuthentication() {
12 | this.loading = true;
13 | try {
14 | const response = await axios.get('/tic-api/check-auth', {
15 | cache: 'no-store',
16 | credentials: 'include',
17 | });
18 | this.isAuthenticated = response.status === 200;
19 | if (this.isAuthenticated) {
20 | let payload = await response.data;
21 | if (this.appRuntimeKey === null) {
22 | this.appRuntimeKey = payload.runtime_key;
23 | } else if (this.appRuntimeKey !== payload.runtime_key) {
24 | console.log('Reload window as backed was restarted');
25 | location.reload();
26 | }
27 | }
28 | } catch (error) {
29 | console.error(error);
30 | this.isAuthenticated = false;
31 | } finally {
32 | this.loading = false;
33 | }
34 | },
35 | },
36 | });
37 |
--------------------------------------------------------------------------------
/migrations/versions/e829b6fba2dc_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: e829b6fba2dc
4 | Revises: e0b4b10647d5
5 | Create Date: 2023-05-08 09:43:54.479498
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'e829b6fba2dc'
14 | down_revision = 'e0b4b10647d5'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('epg_channel_programmes', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('categories', sa.String(length=256), nullable=True))
23 | batch_op.create_index(batch_op.f('ix_epg_channel_programmes_categories'), ['categories'], unique=False)
24 |
25 | # ### end Alembic commands ###
26 |
27 |
28 | def downgrade():
29 | # ### commands auto generated by Alembic - please adjust! ###
30 | with op.batch_alter_table('epg_channel_programmes', schema=None) as batch_op:
31 | batch_op.drop_index(batch_op.f('ix_epg_channel_programmes_categories'))
32 | batch_op.drop_column('categories')
33 |
34 | # ### end Alembic commands ###
35 |
--------------------------------------------------------------------------------
/docs/compose-files/docker-compose.aio.yml:
--------------------------------------------------------------------------------
1 | ---
2 | networks:
3 | private-net:
4 |
5 | services:
6 | # -- TVH IPTV CONFIG --
7 | tic:
8 | image: ghcr.io/josh5/tvh-iptv:latest
9 |
10 | # NETWORK:
11 | networks:
12 | - private-net
13 | ports:
14 | # App Port (9985)
15 | - 9985:9985
16 | # Proxy Port (9987)
17 | - 9987:9987
18 | # TVH Webui
19 | - 9981:9981
20 | # TVH HTSP
21 | - 9982:9982
22 |
23 | # ENVIRONMENT:
24 | environment:
25 | # Process user ID
26 | - PUID=1000
27 | # Process group ID
28 | - PGID=1000
29 | # Timezone
30 | - TZ=Pacific/Auckland
31 | # Skips the DB migrate command execution on container startup
32 | - SKIP_MIGRATIONS=false
33 | # Executes a pip install on container startup (might be required to generate the venv)
34 | - RUN_PIP_INSTALL=false
35 | # Enables debug logging for main application
36 | - ENABLE_APP_DEBUGGING=false
37 | # Enables debug logging for DB queries
38 | - ENABLE_SQLALCHEMY_DEBUGGING=false
39 |
40 | # VOLUMES:
41 | volumes:
42 | # Configuration files
43 | - /data/containers/tvh-iptv/tic-config:/config
44 |
--------------------------------------------------------------------------------
/migrations/versions/4d27771deb98_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 4d27771deb98
4 | Revises: 20222080e13e
5 | Create Date: 2024-06-07 01:11:11.277594
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '4d27771deb98'
14 | down_revision = '20222080e13e'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('playlists', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('use_hls_proxy', sa.Boolean(), nullable=True))
23 |
24 | # Set default value of use_hls_proxy to False for existing records
25 | with op.batch_alter_table('playlists', schema=None) as batch_op:
26 | batch_op.execute('UPDATE playlists SET use_hls_proxy = FALSE WHERE use_hls_proxy IS NULL')
27 |
28 | # ### end Alembic commands ###
29 |
30 |
31 | def downgrade():
32 | # ### commands auto generated by Alembic - please adjust! ###
33 | with op.batch_alter_table('playlists', schema=None) as batch_op:
34 | batch_op.drop_column('use_hls_proxy')
35 |
36 | # ### end Alembic commands ###
37 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Feature Request
4 | description: Suggest a new feature.
5 | labels: ['status:awaiting-triage', 'type:enhancement']
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Thanks for taking the time to help improve!
11 | - type: textarea
12 | id: problem
13 | attributes:
14 | label: Is your feature request related to a problem?
15 | description: If so, please provide clear and concise description of the problem.
16 | placeholder: eg. I'm always frustrated when '...'
17 | - type: textarea
18 | id: feature
19 | attributes:
20 | label: What is your feature request?
21 | description: A clear and concise description of the feature.
22 | validations:
23 | required: true
24 | - type: textarea
25 | id: workaround
26 | attributes:
27 | label: Are there any workarounds?
28 | description: A clear and concise description of any alternative solutions or features you've considered.
29 | - type: textarea
30 | id: additional
31 | attributes:
32 | label: Additional Context
33 | description: Add any other context or screenshots about the feature request here.
34 |
--------------------------------------------------------------------------------
/docker/overlay/usr/bin/tv_grab_url:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | dflag=
4 | vflag=
5 | cflag=
6 |
7 | if [ $# -lt 1 ]; then
8 | exit 0
9 | fi
10 |
11 | OPTARG=""
12 | URL=$1
13 |
14 | args=""
15 | for arg in "$@"; do
16 | delim=""
17 | case "$arg" in
18 | --description) args="${args}-d ";;
19 | --version) args="${args}-v ";;
20 | --capabilities) args="${args}-c ";;
21 | *)
22 | if [ "${arg#-}" != "$arg" ]; then
23 | args="${args}${delim}${arg}${delim} "
24 | else
25 | OPTARG=${arg}
26 | fi
27 | ;;
28 | esac
29 | done
30 |
31 | # Reset the positional parameters to the short options
32 | set -- $args
33 |
34 | while getopts "dvc" option; do
35 | case $option in
36 | d) dflag=1;;
37 | v) vflag=1;;
38 | c) cflag=1;;
39 | \?) printf "unknown option: -%s\n" "$OPTARG"
40 | printf "Usage: %s: [--description] [--version] [--capabilities] \n" "$(basename "$0")"
41 | exit 2
42 | ;;
43 | esac
44 | done
45 |
46 | if [ "$dflag" ]; then
47 | printf "XMLTV URL grabber\n"
48 | exit 0
49 | fi
50 | if [ "$vflag" ]; then
51 | printf "0.1\n"
52 | exit 0
53 | fi
54 | if [ "$cflag" ]; then
55 | printf "baseline\n"
56 | exit 0
57 | fi
58 |
59 | curl -s "$URL"
60 |
61 | exit 0
62 |
--------------------------------------------------------------------------------
/frontend/src/pages/LoginPage.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
48 |
--------------------------------------------------------------------------------
/migrations/versions/044b003faaaa_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 044b003faaaa
4 | Revises: 62e3f7ef0ad2
5 | Create Date: 2024-06-17 13:45:34.336229
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '044b003faaaa'
14 | down_revision = '62e3f7ef0ad2'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('playlists', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('use_custom_hls_proxy', sa.Boolean(), nullable=True))
23 | batch_op.add_column(sa.Column('hls_proxy_path', sa.String(length=256), nullable=True))
24 |
25 | # Set all existing values for 'use_custom_hls_proxy' to False
26 | op.execute('UPDATE playlists SET use_custom_hls_proxy = False WHERE use_custom_hls_proxy IS NULL')
27 |
28 | # Alter the column to be non-nullable
29 | with op.batch_alter_table('playlists', schema=None) as batch_op:
30 | batch_op.alter_column('use_custom_hls_proxy', existing_type=sa.Boolean(), nullable=False)
31 | # ### end Alembic commands ###
32 |
33 |
34 | def downgrade():
35 | # ### commands auto generated by Alembic - please adjust! ###
36 | op.drop_column('playlists', 'hls_proxy_path')
37 | op.drop_column('playlists', 'use_custom_hls_proxy')
38 | # ### end Alembic commands ###
39 |
--------------------------------------------------------------------------------
/docker/docker-compose.dev-aio.yml:
--------------------------------------------------------------------------------
1 | ---
2 | networks:
3 | private-net:
4 |
5 | services:
6 | # -- TVH IPTV CONFIG --
7 | tic:
8 | image: ghcr.io/josh5/tvh-iptv:latest
9 | build:
10 | context: ..
11 | dockerfile: docker/Dockerfile
12 | args:
13 | VERSION: "UNKNOWN"
14 | BUILD_DATE: "NOW"
15 | BASE_IMAGE: "ghcr.io/tvheadend/tvheadend:edge-debian"
16 |
17 | # NETWORK:
18 | networks:
19 | - private-net
20 | ports:
21 | # App Port (9985)
22 | - 9985:9985
23 | # Proxy Port (9987)
24 | - 9987:9987
25 | # TVH Webui
26 | - 9981:9981
27 | # TVH HTSP
28 | - 9982:9982
29 |
30 | # ENVIRONMENT:
31 | environment:
32 | # Process user ID
33 | - PUID=1000
34 | # Process group ID
35 | - PGID=1000
36 | # Timezone
37 | - TZ=Pacific/Auckland
38 | # Skips the DB migrate command execution on container startup
39 | - SKIP_MIGRATIONS=false
40 | # Executes a pip install on container startup (might be required to generate the venv)
41 | - RUN_PIP_INSTALL=false
42 | # Enables debug logging for main application
43 | - ENABLE_APP_DEBUGGING=true
44 | # Enables debug logging for DB queries
45 | - ENABLE_SQLALCHEMY_DEBUGGING=false
46 |
47 | # VOLUMES:
48 | volumes:
49 | # Configuration files
50 | - ../dev_env/config:/config
51 | # Application source passthrough
52 | - ../:/app
53 |
--------------------------------------------------------------------------------
/frontend/public/icons/tvh-icon.svg:
--------------------------------------------------------------------------------
1 |
2 |
4 |
7 |
8 |
10 |
13 |
16 |
18 |
21 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/frontend/src/router/index.js:
--------------------------------------------------------------------------------
1 | import { route } from 'quasar/wrappers'
2 | import { createRouter, createMemoryHistory, createWebHistory, createWebHashHistory } from 'vue-router'
3 | import { useAuthStore } from 'stores/auth'
4 | import routes from './routes'
5 |
6 | /*
7 | * If not building with SSR mode, you can
8 | * directly export the Router instantiation;
9 | *
10 | * The function below can be async too; either use
11 | * async/await or return a Promise which resolves
12 | * with the Router instance.
13 | */
14 |
15 | export default route(function (/* { store, ssrContext } */) {
16 | const createHistory = process.env.SERVER
17 | ? createMemoryHistory
18 | : (process.env.VUE_ROUTER_MODE === 'history' ? createWebHistory : createWebHashHistory)
19 |
20 | const Router = createRouter({
21 | scrollBehavior: () => ({ left: 0, top: 0 }),
22 | routes,
23 |
24 | // Leave this as is and make changes in quasar.conf.js instead!
25 | // quasar.conf.js -> build -> vueRouterMode
26 | // quasar.conf.js -> build -> publicPath
27 | history: createHistory(process.env.MODE === 'ssr' ? void 0 : process.env.VUE_ROUTER_BASE)
28 | })
29 |
30 | // Add navigation guard
31 | Router.beforeEach(async (to, from, next) => {
32 | const authStore = useAuthStore();
33 | if (to.meta.requiresAuth) {
34 | await authStore.checkAuthentication();
35 | if (authStore.isAuthenticated) {
36 | next();
37 | } else {
38 | next('/login');
39 | }
40 | } else {
41 | next();
42 | }
43 | });
44 |
45 | return Router
46 | })
47 |
--------------------------------------------------------------------------------
/docs/run-from-source.md:
--------------------------------------------------------------------------------
1 | # Run from source
2 |
3 | > [!WARNING]
4 | > This is not the recommended way (or even an activly supported way) for running TIC. Please consider using Docker.
5 |
6 | ## Clone project
7 |
8 | Clone this project somewhere
9 | ```
10 | git clone https://github.com/Josh5/TVH-IPTV-Config.git
11 | cd TVH-IPTV-Config
12 | ```
13 |
14 | ## Run the build scripts
15 |
16 | 1) Run the setup script. This will create a local environment, installing a Python virtual environment and all dependencies listed in the requirements.txt file, along with the building the frontend. You should re-run this script whenever you pull updates from GitHub.
17 | ```
18 | ./devops/setup_local_dev_env.sh
19 | ```
20 | 2) Run the project.
21 | ```
22 | source venv-local/bin/activate
23 |
24 | # Create a directory for your config files and export it to HOME_DIR
25 | export HOME_DIR="${PWD}/dev_env/config/"
26 | mkdir -p "${HOME_DIR}"
27 |
28 | # Migrate database
29 | alembic upgrade head
30 |
31 | # Run app
32 | python3 ./run.py
33 | ```
34 |
35 | > [!NOTE]
36 | > These above commands will create a directory within this project root called `./dev_env` which contains all configuration and cache data.
37 | > If you want this config path to be somewhere else, set `HOME_DIR` to something else. You will need to ensure you export this before you run `./run.py` each time.
38 |
39 | > [!IMPORTANT]
40 | > If you are running it like this, you will need to configure all TVH stuff yourself.
41 |
42 | ## Update project
43 |
44 | Pull updates
45 | ```
46 | git pull origin master
47 | ```
48 |
49 | Rebuild
50 | ```
51 | ./devops/setup_local_dev_env.sh
52 | ./devops/run_local_dev_env.sh
53 | ```
54 |
--------------------------------------------------------------------------------
/.github/label-actions.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | # Configuration for Label Actions - https://github.com/dessant/label-actions
4 |
5 | added:
6 | comment: >
7 | This feature has been added and will be available in the next release.
8 | This issue will be automatically closed once the update is available.
9 |
10 | fixed:
11 | comment: >
12 | This bug has been fixed and will be available in the next release.
13 | This issue will be automatically closed once the update is available.
14 |
15 | invalid:duplicate:
16 | comment: >
17 | :wave: @{issue-author}, this appears to be a duplicate of a pre-existing issue.
18 | close: true
19 | lock: true
20 | unlabel: 'status:awaiting-triage'
21 |
22 | -invalid:duplicate:
23 | reopen: true
24 | unlock: true
25 |
26 | invalid:support:
27 | comment: >
28 | :wave: @{issue-author}, we use the issue tracker exclusively for bug reports.
29 | However, this issue appears to be a support request. Please use our
30 | [Discord Server](https://unmanic.app/discord) to get help. Thanks.
31 | close: true
32 | lock: true
33 | lock-reason: 'off-topic'
34 | unlabel: 'status:awaiting-triage'
35 |
36 | -invalid:support:
37 | reopen: true
38 | unlock: true
39 |
40 | invalid:template-incomplete:
41 | issues:
42 | comment: >
43 | :wave: @{issue-author}, please edit your issue to complete the template with
44 | all the required info. Your issue will be automatically closed in 5 days if
45 | the template is not completed. Thanks.
46 | prs:
47 | comment: >
48 | :wave: @{issue-author}, please edit your PR to complete the template with
49 | all the required info. Your PR will be automatically closed in 5 days if
50 | the template is not completed. Thanks.
51 |
--------------------------------------------------------------------------------
/docs/run-with-docker-compose.md:
--------------------------------------------------------------------------------
1 | # Docker Compose
2 |
3 | Follow these instructions to configure a docker-compose.yml for your system.
4 |
5 | > __Note__
6 | >
7 | > These instructions assume that you have docker and docker-compose installed for your system.
8 | >
9 | > Depending on how you have installed this, the commands to execute docker compose may vary.
10 |
11 |
12 | ## PREPARE DIRECTORIES:
13 |
14 | > __Warning__
15 | >
16 | > These commands are meant to be run as your user. Do not run them as root.
17 | >
18 | > If you do run these commands as root, you may need to manually fix the permissions and ownership after.
19 |
20 | Create a directory for your service:
21 | ```shell
22 | sudo mkdir -p /data/containers/tvh-iptv
23 | sudo chown -R $(id -u):$(id -g) /data/containers/tvh-iptv
24 | ```
25 |
26 | If you modify the path `/data/containers/tvh-iptv`, ensure you also modify the path in your docker-compose.yml file below.
27 |
28 |
29 | Create a Docker Compose file `/data/containers/tvh-iptv/docker-compose.yml`.
30 |
31 | Populate this file with the contents of one of these Docker Compose templates:
32 | - [AIO Stack with TIC & TVH in a single container (Recommended)](./compose-files/docker-compose.aio.yml).
33 | - [Side-cart Stack with TIC & TVH in separate containers (Requires that you do some initial setup for TVH)](./compose-files/docker-compose.side-tvh.yml).
34 | - [AIO Stack with multiple HLS Proxy side-carts](./compose-files/docker-compose.aio-with-proxies.yml).
35 |
36 | ## EXECUTE:
37 |
38 | Navigate to your compose location and execute it.
39 | ```shell
40 | cd /data/containers/tvh-iptv
41 | sudo docker-compose up -d --force-recreate --pull
42 | ```
43 |
44 | After container executes successfully, navigate to your docker host URL in your browser on port 9985 and click connect.
45 | `http://:9985/`
46 |
--------------------------------------------------------------------------------
/migrations/versions/62e3f7ef0ad2_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 62e3f7ef0ad2
4 | Revises: 4d27771deb98
5 | Create Date: 2024-06-09 11:56:56.425793
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '62e3f7ef0ad2'
14 | down_revision = '4d27771deb98'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | with op.batch_alter_table('epg_channel_programmes', schema=None) as batch_op:
22 | batch_op.add_column(sa.Column('sub_title', sa.String(length=500), nullable=True))
23 | batch_op.add_column(sa.Column('series_desc', sa.String(length=500), nullable=True))
24 | batch_op.add_column(sa.Column('icon_url', sa.String(length=500), nullable=True))
25 | batch_op.add_column(sa.Column('country', sa.String(length=500), nullable=True))
26 |
27 | with op.batch_alter_table('playlists', schema=None) as batch_op:
28 | batch_op.alter_column('use_hls_proxy',
29 | existing_type=sa.BOOLEAN(),
30 | nullable=False)
31 |
32 | # ### end Alembic commands ###
33 |
34 |
35 | def downgrade():
36 | # ### commands auto generated by Alembic - please adjust! ###
37 | with op.batch_alter_table('playlists', schema=None) as batch_op:
38 | batch_op.alter_column('use_hls_proxy',
39 | existing_type=sa.BOOLEAN(),
40 | nullable=True)
41 |
42 | with op.batch_alter_table('epg_channel_programmes', schema=None) as batch_op:
43 | batch_op.drop_column('country')
44 | batch_op.drop_column('icon_url')
45 | batch_op.drop_column('series_desc')
46 | batch_op.drop_column('sub_title')
47 |
48 | # ### end Alembic commands ###
49 |
--------------------------------------------------------------------------------
/docker/overlay/defaults/nginx/nginx.conf.template:
--------------------------------------------------------------------------------
1 | worker_processes 1;
2 | error_log stderr;
3 | daemon off;
4 | pid nginx.pid;
5 |
6 | events { worker_connections 1024; }
7 |
8 | http {
9 | include /etc/nginx/mime.types;
10 | default_type application/octet-stream;
11 | sendfile on;
12 | keepalive_timeout 65;
13 |
14 | # Logging
15 | access_log /dev/null;
16 |
17 | # Temporary files
18 | client_body_temp_path /tmp/nginx/client_body;
19 | proxy_temp_path /tmp/nginx/proxy;
20 | fastcgi_temp_path /tmp/nginx/fastcgi;
21 | uwsgi_temp_path /tmp/nginx/uwsgi;
22 | scgi_temp_path /tmp/nginx/scgi;
23 |
24 | server {
25 | listen 9985;
26 |
27 | location /tic-tvh/ {
28 | proxy_pass http://127.0.0.1:9981;
29 |
30 | # Set headers to handle WebSocket connections
31 | proxy_http_version 1.1;
32 | proxy_set_header Upgrade $http_upgrade;
33 | proxy_set_header Connection "upgrade";
34 |
35 | proxy_set_header Host $host;
36 | proxy_set_header X-Real-IP $remote_addr;
37 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
38 | proxy_set_header X-Forwarded-Proto $scheme;
39 |
40 | # Pass the Authorization header
41 | proxy_set_header Authorization $http_authorization;
42 | }
43 |
44 | location / {
45 | proxy_pass http://127.0.0.1:9984;
46 | proxy_set_header Host $host;
47 | proxy_set_header X-Real-IP $remote_addr;
48 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
49 | proxy_set_header X-Forwarded-Proto $scheme;
50 |
51 | # Pass the Authorization header
52 | proxy_set_header Authorization $http_authorization;
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tvh_iptv_config",
3 | "version": "0.0.1",
4 | "description": "A tool for simplifying IPTV configuration in TVheadend",
5 | "productName": "TVH IPTV Config",
6 | "author": "Josh.5 ",
7 | "private": true,
8 | "scripts": {
9 | "serve": "quasar dev --debug",
10 | "build": "quasar build",
11 | "build:watch": "nodemon",
12 | "build:publish": "quasar build --publish",
13 | "lint": "eslint --ext .js,.vue ./",
14 | "format": "prettier --write \"**/*.{js,vue,scss,html,md,json}\" --ignore-path .gitignore",
15 | "test": "echo \"No test specified\" && exit 0",
16 | "upgrade-packages": "npx npm-check-updates -u"
17 | },
18 | "dependencies": {
19 | "@quasar/extras": "^1.16.12",
20 | "axios": "^1.2.1",
21 | "core-js": "^3.39.0",
22 | "pinia": "^2.2.5",
23 | "quasar": "^2.17.1",
24 | "stream": "^0.0.3",
25 | "timers": "^0.1.1",
26 | "vue": "^3.2.29",
27 | "vue-router": "^4.0.12",
28 | "vuedraggable": "^4.1.0",
29 | "xml2js": "^0.6.2"
30 | },
31 | "devDependencies": {
32 | "@babel/eslint-parser": "^7.13.14",
33 | "@quasar/app-webpack": "^3.9.2",
34 | "@vue/devtools": "^7.6.2",
35 | "eslint": "^9.13.0",
36 | "eslint-config-prettier": "^9.1.0",
37 | "eslint-plugin-vue": "^9.30.0",
38 | "eslint-webpack-plugin": "^4.2.0",
39 | "nodemon": "^3.1.7",
40 | "prettier": "^3.3.3",
41 | "sass-embedded": "^1.80.6"
42 | },
43 | "browserslist": [
44 | "last 10 Chrome versions",
45 | "last 10 Firefox versions",
46 | "last 4 Edge versions",
47 | "last 7 Safari versions",
48 | "last 8 Android versions",
49 | "last 8 ChromeAndroid versions",
50 | "last 8 FirefoxAndroid versions",
51 | "last 10 iOS versions",
52 | "last 5 Opera versions"
53 | ],
54 | "engines": {
55 | "node": "^20 || ^18 || ^16",
56 | "npm": ">= 6.13.4",
57 | "yarn": ">= 1.21.1"
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/docker/overlay/defaults/tvheadend/config:
--------------------------------------------------------------------------------
1 | {
2 | "server_name": "TVH-IPTV",
3 | "version": 24,
4 | "full_version": "4.3-2336~g366e56290",
5 | "theme_ui": "blue",
6 | "ui_quicktips": true,
7 | "uilevel": 2,
8 | "uilevel_nochange": false,
9 | "caclient_ui": false,
10 | "info_area": [
11 | "login",
12 | "storage",
13 | "time"
14 | ],
15 | "chname_num": true,
16 | "chname_src": false,
17 | "date_mask": "",
18 | "label_formatting": false,
19 | "language": [
20 | ],
21 | "epg_compress": true,
22 | "epg_cutwindow": 300,
23 | "epg_window": 86400,
24 | "prefer_picon": false,
25 | "chiconscheme": 0,
26 | "piconscheme": 0,
27 | "http_server_name": "HTS/tvheadend",
28 | "http_realm_name": "tvheadend",
29 | "digest": 2,
30 | "digest_algo": 0,
31 | "cookie_expires": 7,
32 | "ticket_expires": 300,
33 | "proxy": false,
34 | "hdhomerun_ip": "",
35 | "local_ip": "",
36 | "local_port": 0,
37 | "hdhomerun_server_tuner_count": 0,
38 | "hdhomerun_server_enable": false,
39 | "http_user_agent": "TVHeadend/4.3-2336~g366e56290",
40 | "iptv_tpool": 2,
41 | "dscp": -1,
42 | "descrambler_buffer": 9000,
43 | "parser_backlog": true,
44 | "hbbtv": false,
45 | "tvhtime_update_enabled": false,
46 | "tvhtime_ntp_enabled": false,
47 | "tvhtime_tolerance": 5000,
48 | "satip_rtsp": 0,
49 | "satip_anonymize": false,
50 | "satip_noupnp": false,
51 | "satip_weight": 100,
52 | "satip_remote_weight": true,
53 | "satip_descramble": 1,
54 | "satip_muxcnf": 0,
55 | "satip_rtptcpsize": 42,
56 | "satip_nat_rtsp": 0,
57 | "satip_nat_name_force": false,
58 | "satip_iptv_sig_level": 220,
59 | "force_sig_level": 0,
60 | "satip_dvbs": 0,
61 | "satip_dvbs2": 0,
62 | "satip_dvbt": 0,
63 | "satip_dvbt2": 0,
64 | "satip_dvbc": 0,
65 | "satip_dvbc2": 0,
66 | "satip_atsct": 0,
67 | "satip_atscc": 0,
68 | "satip_isdbt": 0,
69 | "satip_max_sessions": 0,
70 | "satip_max_user_connections": 0,
71 | "satip_rewrite_pmt": false,
72 | "satip_nom3u": false,
73 | "satip_notcp_mode": false,
74 | "satip_restrict_pids_all": false,
75 | "satip_drop_fe": false
76 | }
77 |
--------------------------------------------------------------------------------
/docs/compose-files/docker-compose.side-tvh.yml:
--------------------------------------------------------------------------------
1 | ---
2 | networks:
3 | private-net:
4 |
5 | services:
6 | # -- TVH IPTV CONFIG --
7 | tic:
8 | image: josh5/tvh-iptv-config:latest
9 |
10 | # NETWORK:
11 | networks:
12 | - private-net
13 | ports:
14 | # App Port (9985)
15 | - 9985:9985
16 | # Proxy Port (9987)
17 | - 9987:9987
18 |
19 | # ENVIRONMENT:
20 | environment:
21 | # Process user ID
22 | - PUID=1000
23 | # Process group ID
24 | - PGID=1000
25 | # Timezone
26 | - TZ=Pacific/Auckland
27 | # Skips the DB migrate command execution on container startup
28 | - SKIP_MIGRATIONS=false
29 | # Executes a pip install on container startup (might be required to generate the venv)
30 | - RUN_PIP_INSTALL=false
31 | # Enables debug logging for main application
32 | - ENABLE_APP_DEBUGGING=false
33 | # Enables debug logging for DB queries
34 | - ENABLE_SQLALCHEMY_DEBUGGING=false
35 |
36 | # VOLUMES:
37 | volumes:
38 | # Configuration files
39 | - /data/containers/tvh-iptv/tic-config:/config
40 |
41 | depends_on:
42 | - tvheadend
43 |
44 | # -- TVHEADEND --
45 | tvheadend:
46 | image: lscr.io/linuxserver/tvheadend:latest
47 |
48 | # NETWORK:
49 | networks:
50 | - private-net
51 | ports:
52 | - "9981:9981/tcp" # Webui
53 | - "9982:9982/tcp" # HTSP
54 |
55 | # ENVIRONMENT:
56 | environment:
57 | # Process user ID
58 | - PUID=1000
59 | # Process group ID
60 | - PGID=1000
61 | # Timezone
62 | - TZ=Pacific/Auckland
63 | # Additional arguments to be passed to Tvheadend (Optional)
64 | # - RUN_OPTS= #optional
65 |
66 | # VOLUMES:
67 | volumes:
68 | - /data/containers/tvh-iptv/tvh-config:/config
69 | - /data/containers/tvh-iptv/tvh-recordings:/recordings
70 | - /data/containers/tvh-iptv/tvh-timeshift:/timeshift
71 |
72 | # DEVICES: - Uncomment if required
73 | # devices:
74 | # - /dev/dri #optional
75 |
--------------------------------------------------------------------------------
/frontend/src/router/routes.js:
--------------------------------------------------------------------------------
1 | const routes = [
2 | {
3 | path: '/',
4 | component: () => import('layouts/MainLayout.vue'),
5 | children: [
6 | {
7 | path: '',
8 | component: () => import('pages/GeneralPage.vue'),
9 | meta: {requiresAuth: true},
10 | },
11 | ],
12 | },
13 | {
14 | name: 'general',
15 | path: '/general',
16 | component: () => import('layouts/MainLayout.vue'),
17 | children: [
18 | {
19 | path: '',
20 | component: () => import('pages/GeneralPage.vue'),
21 | meta: {requiresAuth: true},
22 | },
23 | ],
24 | },
25 | {
26 | name: 'tvheadend',
27 | path: '/tvheadend',
28 | component: () => import('layouts/MainLayout.vue'),
29 | children: [
30 | {
31 | path: '',
32 | component: () => import('pages/TvheadendPage.vue'),
33 | meta: {requiresAuth: true},
34 | },
35 | ],
36 | },
37 | {
38 | name: 'playlists',
39 | path: '/playlists',
40 | component: () => import('layouts/MainLayout.vue'),
41 | children: [
42 | {
43 | path: '',
44 | component: () => import('pages/PlaylistsPage.vue'),
45 | meta: {requiresAuth: true},
46 | },
47 | ],
48 | },
49 | {
50 | name: 'epgs',
51 | path: '/epgs',
52 | component: () => import('layouts/MainLayout.vue'),
53 | children: [
54 | {
55 | path: '',
56 | component: () => import('pages/EpgsPage.vue'),
57 | meta: {requiresAuth: true},
58 | },
59 | ],
60 | },
61 | {
62 | name: 'channels',
63 | path: '/channels',
64 | component: () => import('layouts/MainLayout.vue'),
65 | children: [
66 | {
67 | path: '',
68 | component: () => import('pages/ChannelsPage.vue'),
69 | meta: {requiresAuth: true},
70 | },
71 | ],
72 | },
73 | {
74 | name: 'login',
75 | path: '/login',
76 | component: () => import('pages/LoginPage.vue'),
77 | },
78 |
79 | // Always leave this as last one,
80 | // but you can also remove it
81 | {
82 | path: '/:catchAll(.*)*',
83 | component: () => import('pages/ErrorNotFound.vue'),
84 | },
85 | ];
86 |
87 | export default routes;
88 |
--------------------------------------------------------------------------------
/docker/docker-compose.dev-side-tvh.yml:
--------------------------------------------------------------------------------
1 | ---
2 | networks:
3 | private-net:
4 |
5 | services:
6 | # -- TVH IPTV CONFIG --
7 | tic:
8 | image: josh5/tvh-iptv-config:latest
9 | build:
10 | context: ..
11 | dockerfile: docker/Dockerfile
12 | args:
13 | VERSION: "UNKNOWN"
14 | BUILD_DATE: "NOW"
15 | BASE_IMAGE: "python:3.12-bookworm"
16 |
17 | # NETWORK:
18 | networks:
19 | - private-net
20 | ports:
21 | # App Port (9985)
22 | - 9985:9985
23 | # Proxy Port (9987)
24 | - 9987:9987
25 |
26 | # ENVIRONMENT:
27 | environment:
28 | # Process user ID
29 | - PUID=1000
30 | # Process group ID
31 | - PGID=1000
32 | # Timezone
33 | - TZ=Pacific/Auckland
34 | # Skips the DB migrate command execution on container startup
35 | - SKIP_MIGRATIONS=false
36 | # Executes a pip install on container startup (might be required to generate the venv)
37 | - RUN_PIP_INSTALL=false
38 | # Enables debug logging for main application
39 | - ENABLE_APP_DEBUGGING=true
40 | # Enables debug logging for DB queries
41 | - ENABLE_SQLALCHEMY_DEBUGGING=false
42 |
43 | # VOLUMES:
44 | volumes:
45 | # Configuration files
46 | - ../dev_env/config:/config
47 | # Application source passthrough
48 | - ../:/app
49 |
50 | depends_on:
51 | - tvheadend
52 |
53 | # -- TVHEADEND --
54 | tvheadend:
55 | image: lscr.io/linuxserver/tvheadend:latest
56 |
57 | # NETWORK:
58 | networks:
59 | - private-net
60 | ports:
61 | - "9981:9981/tcp" # Webui
62 | - "9982:9982/tcp" # HTSP
63 |
64 | # ENVIRONMENT:
65 | environment:
66 | # Process user ID
67 | - PUID=1000
68 | # Process group ID
69 | - PGID=1000
70 | # Timezone
71 | - TZ=Pacific/Auckland
72 | # Additional arguments to be passed to Tvheadend (Optional)
73 | # - RUN_OPTS= #optional
74 |
75 | # VOLUMES:
76 | volumes:
77 | - ../dev_env/tvheadend:/config
78 | - ../dev_env/tvh-recordings:/recordings
79 | - ../dev_env/tvh-timeshift:/timeshift
80 |
81 | ## # DEVICES:
82 | ## devices:
83 | ## - /dev/dri #optional
84 |
--------------------------------------------------------------------------------
/migrations/env.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from logging.config import fileConfig
3 | from alembic import context
4 | from sqlalchemy import create_engine, pool
5 | from backend.config import sqlalchemy_database_uri
6 | from backend.models import Base
7 |
8 | # Interpret the config file for Python logging.
9 | fileConfig(context.config.config_file_name)
10 | logger = logging.getLogger('alembic.env')
11 |
12 | # Add your model's MetaData object here
13 | # for 'autogenerate' support
14 | target_metadata = Base.metadata
15 |
16 |
17 | def run_migrations_offline():
18 | """Run migrations in 'offline' mode.
19 |
20 | This configures the context with just a URL
21 | and not an Engine, though an Engine is acceptable
22 | here as well. By skipping the Engine creation
23 | we don't even need a DBAPI to be available.
24 |
25 | Calls to context.execute() here emit the given string to the
26 | script output.
27 | """
28 | context.configure(
29 | url=sqlalchemy_database_uri, target_metadata=target_metadata, literal_binds=True
30 | )
31 |
32 | with context.begin_transaction():
33 | context.run_migrations()
34 |
35 |
36 | def run_migrations_online():
37 | """Run migrations in 'online' mode.
38 |
39 | In this scenario we need to create an Engine
40 | and associate a connection with the context.
41 | """
42 |
43 | # This callback is used to prevent an auto-migration from being generated
44 | # when there are no changes to the schema
45 | # Reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
46 | def process_revision_directives(context, revision, directives):
47 | if getattr(context.config.cmd_opts, 'autogenerate', False):
48 | script = directives[0]
49 | if script.upgrade_ops.is_empty():
50 | directives[:] = []
51 | logger.info('No changes in schema detected.')
52 |
53 | connectable = create_engine(
54 | sqlalchemy_database_uri,
55 | poolclass=pool.NullPool,
56 | )
57 |
58 | with connectable.connect() as connection:
59 | context.configure(
60 | connection=connection,
61 | target_metadata=target_metadata,
62 | process_revision_directives=process_revision_directives,
63 | )
64 |
65 | with context.begin_transaction():
66 | context.run_migrations()
67 |
68 |
69 | if context.is_offline_mode():
70 | run_migrations_offline()
71 | else:
72 | run_migrations_online()
73 |
--------------------------------------------------------------------------------
/frontend/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | // https://eslint.org/docs/user-guide/configuring#configuration-cascading-and-hierarchy
3 | // This option interrupts the configuration hierarchy at this file
4 | // Remove this if you have an higher level ESLint config file (it usually happens into a monorepos)
5 | root: true,
6 |
7 | parserOptions: {
8 | parser: '@babel/eslint-parser',
9 | ecmaVersion: 2021, // Allows for the parsing of modern ECMAScript features
10 | sourceType: 'module', // Allows for the use of imports
11 | },
12 |
13 | env: {
14 | browser: true,
15 | 'vue/setup-compiler-macros': true,
16 | },
17 |
18 | // Rules order is important, please avoid shuffling them
19 | extends: [
20 | // Base ESLint recommended rules
21 | // 'eslint:recommended',
22 |
23 | // Uncomment any of the lines below to choose desired strictness,
24 | // but leave only one uncommented!
25 | // See https://eslint.vuejs.org/rules/#available-rules
26 | 'plugin:vue/vue3-essential', // Priority A: Essential (Error Prevention)
27 | // 'plugin:vue/vue3-strongly-recommended', // Priority B: Strongly Recommended (Improving Readability)
28 | // 'plugin:vue/vue3-recommended', // Priority C: Recommended (Minimizing Arbitrary Choices and Cognitive Overhead)
29 |
30 | // https://github.com/prettier/eslint-config-prettier#installation
31 | // usage with Prettier, provided by 'eslint-config-prettier'.
32 | 'prettier',
33 | ],
34 |
35 | plugins: [
36 | // https://eslint.vuejs.org/user-guide/#why-doesn-t-it-work-on-vue-files
37 | // required to lint *.vue files
38 | 'vue',
39 |
40 | // https://github.com/typescript-eslint/typescript-eslint/issues/389#issuecomment-509292674
41 | // Prettier has not been included as plugin to avoid performance impact
42 | // add it as an extension for your IDE
43 |
44 | ],
45 |
46 | globals: {
47 | ga: 'readonly', // Google Analytics
48 | cordova: 'readonly',
49 | __statics: 'readonly',
50 | __QUASAR_SSR__: 'readonly',
51 | __QUASAR_SSR_SERVER__: 'readonly',
52 | __QUASAR_SSR_CLIENT__: 'readonly',
53 | __QUASAR_SSR_PWA__: 'readonly',
54 | process: 'readonly',
55 | Capacitor: 'readonly',
56 | chrome: 'readonly',
57 | },
58 |
59 | // add your custom rules here
60 | rules: {
61 |
62 | 'prefer-promise-reject-errors': 'off',
63 |
64 | // allow debugger during development only
65 | 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off',
66 | },
67 | };
68 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/BUG-REPORT.yml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | name: Bug Report
4 | description: Please do not use bug reports for support issues.
5 | title: "[Bug]: "
6 | labels: ['status:awaiting-triage', 'type:bug']
7 | body:
8 | - type: markdown
9 | attributes:
10 | value: |
11 | **THIS IS NOT THE PLACE TO ASK FOR SUPPORT!** Please use [Discord](https://unmanic.app/discord) for support issues.
12 | - type: textarea
13 | id: description
14 | attributes:
15 | label: Describe the Bug
16 | description: A clear and concise description of the bug.
17 | validations:
18 | required: true
19 | - type: textarea
20 | id: steps
21 | attributes:
22 | label: Steps to Reproduce
23 | description: List each action required in order to reproduce the issue.
24 | placeholder: |
25 | 1. First Step '...'
26 | 2. Second Step '...'
27 | 3. So on '...'
28 | 4. See error
29 | - type: textarea
30 | id: expected
31 | attributes:
32 | label: Expected Behavior
33 | description: A clear and concise description of what you expected to happen.
34 | - type: textarea
35 | id: screenshots
36 | attributes:
37 | label: Screenshots
38 | description: Provide screenshots to help explain your problem.
39 | - type: textarea
40 | id: relevant
41 | attributes:
42 | label: Relevant Settings
43 | description: Include all settings/configuration that are relevant to your setup.
44 | placeholder: |
45 | - eg. Configuration of your Docker container.
46 | - eg. Configurations within the application/container.
47 | - type: input
48 | id: version
49 | attributes:
50 | label: Version
51 | description: The version is the first line printed in the Docker log.
52 | placeholder: 'Build: [10/08/22 03:09:26] [master] [477e25f82c2612b6345ca0c9777345e6b5129965]'
53 | validations:
54 | required: true
55 | - type: textarea
56 | id: logs
57 | attributes:
58 | label: Relevant log output
59 | description: |
60 | Please copy and paste any relevant log output.
61 | This will be automatically formatted into code, so no need for backticks.
62 | Note: Most relevant logs are found either in the docker logs or inside the container in /home/default/.cache/log/
63 | render: Shell
64 | - type: markdown
65 | attributes:
66 | value: |
67 | Make sure to close your issue when it's solved! If you found the solution yourself please comment so that others benefit from it.
68 |
--------------------------------------------------------------------------------
/frontend/src/mixins/aioFunctionsMixin.js:
--------------------------------------------------------------------------------
1 | import {ref, onBeforeUnmount} from 'vue';
2 | import {Notify, useQuasar} from 'quasar';
3 | import axios from 'axios';
4 |
5 | let instance;
6 |
7 | function createAioStartupTasks() {
8 | const $q = useQuasar();
9 | const firstRun = ref(null);
10 | const aioMode = ref(false);
11 |
12 | let pingInterval;
13 | const pingBackend = () => {
14 | axios({
15 | method: 'GET',
16 | url: '/tic-tvh/ping',
17 | timeout: 4000,
18 | }).then((response) => {
19 | if (response.status === 200 && response.data.includes('PONG')) {
20 | if (firstRun.value) {
21 | setTimeout(saveFirstRunSettings, 10000);
22 | } else {
23 | $q.loading.hide();
24 | clearInterval(pingInterval);
25 | }
26 | } else {
27 | $q.loading.show({
28 | message: `Tvheadend backend returned status code ${response.status}...`,
29 | });
30 | }
31 | }).catch(() => {
32 | $q.loading.show({
33 | message: 'Waiting for Tvheadend backend to start...',
34 | });
35 | });
36 | };
37 |
38 | const saveFirstRunSettings = () => {
39 | let postData = {
40 | settings: {
41 | 'first_run': true,
42 | 'app_url': window.location.origin,
43 | },
44 | };
45 | axios({
46 | method: 'POST',
47 | url: '/tic-api/save-settings',
48 | data: postData,
49 | }).then(() => {
50 | // Reload page to properly trigger the auth refresh
51 | location.reload();
52 | });
53 | };
54 |
55 | const checkTvhStatus = () => {
56 | // Fetch current settings
57 | axios({
58 | method: 'get',
59 | url: '/tic-api/get-settings',
60 | }).then((response) => {
61 | firstRun.value = response.data.data.first_run;
62 | axios({
63 | method: 'get',
64 | url: '/tic-api/tvh-running',
65 | }).then((response) => {
66 | aioMode.value = response.data.data.running;
67 | if (response.data.data.running) {
68 | // Fetch settings
69 | $q.loading.show({
70 | message: 'Checking status of Tvheadend backend...',
71 | });
72 | pingBackend();
73 | pingInterval = setInterval(pingBackend, 5000);
74 | }
75 | }).catch(() => {
76 | });
77 | }).catch(() => {
78 | });
79 | };
80 |
81 | checkTvhStatus();
82 |
83 | onBeforeUnmount(() => {
84 | clearInterval(pingInterval);
85 | });
86 |
87 | return {
88 | firstRun,
89 | aioMode,
90 | };
91 | }
92 |
93 | export default function getAioStartupTasks() {
94 | if (!instance) {
95 | instance = createAioStartupTasks();
96 | }
97 | return instance;
98 | }
99 |
--------------------------------------------------------------------------------
/frontend/src/mixins/backgroundTasksMixin.js:
--------------------------------------------------------------------------------
1 | import {ref, onBeforeUnmount} from 'vue';
2 | import {Notify} from 'quasar';
3 |
4 | export default function pollForBackgroundTasks() {
5 | const pendingTasks = ref([]);
6 | const notifications = ref({});
7 | const pendingTasksStatus = ref('running');
8 | let timerId = null;
9 |
10 | const displayCurrentTask = (messageId, taskName) => {
11 | if (!(messageId in notifications.value)) {
12 | notifications.value[messageId] = Notify.create({
13 | group: false,
14 | type: 'ongoing',
15 | position: 'bottom-left',
16 | message: `Executing background task: ${taskName}`,
17 | html: true,
18 | });
19 | } else {
20 | // Update the current status message
21 | notifications.value[messageId]({
22 | message: `Executing background task: ${taskName}`,
23 | html: true,
24 | });
25 | }
26 | };
27 | const dismissMessages = (messageId) => {
28 | if (typeof notifications.value === 'undefined') {
29 | return;
30 | }
31 | if (typeof notifications.value[messageId] === 'function') {
32 | notifications.value[messageId]();
33 | }
34 | if (typeof notifications.value[messageId] !== 'undefined') {
35 | delete notifications.value[messageId];
36 | }
37 | };
38 |
39 | async function fetchData() {
40 | const response = await fetch('/tic-api/get-background-tasks');
41 | // Check if authentication is required
42 | if ([401, 502, 504].includes(response.status)) {
43 | // Stop polling
44 | return;
45 | }
46 | if (response.ok) {
47 | let payload = await response.json();
48 | let tasks = [];
49 | if (payload.data['current_task']) {
50 | tasks.push({
51 | 'icon': 'pending',
52 | 'name': payload.data['current_task'],
53 | });
54 | }
55 | for (let i in payload.data['pending_tasks']) {
56 | tasks.push({
57 | 'icon': 'radio_button_unchecked',
58 | 'name': payload.data['pending_tasks'][i],
59 | });
60 | }
61 | pendingTasks.value = tasks;
62 | pendingTasksStatus.value = payload.data['task_queue_status'];
63 | if (payload.data['current_task']) {
64 | displayCurrentTask('currentTask', payload.data['current_task']);
65 | } else {
66 | dismissMessages('currentTask');
67 | }
68 | }
69 | startTimer();
70 | }
71 |
72 | function startTimer() {
73 | timerId = setTimeout(fetchData, 1000);
74 | }
75 |
76 | function stopTimer() {
77 | clearTimeout(timerId);
78 | dismissMessages('currentTask');
79 | }
80 |
81 | fetchData();
82 |
83 | onBeforeUnmount(() => {
84 | stopTimer();
85 | });
86 |
87 | return {
88 | pendingTasks,
89 | pendingTasksStatus,
90 | };
91 | }
92 |
--------------------------------------------------------------------------------
/backend/ffmpeg.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | import asyncio
4 | import json
5 | import re
6 | import subprocess
7 |
8 |
9 | class FFProbeError(Exception):
10 | """
11 | FFProbeError
12 | Custom exception for errors encountered while executing the ffprobe command.
13 | """
14 |
15 | def __init___(self, path, info):
16 | Exception.__init__(self, "Unable to fetch data from file {}. {}".format(path, info))
17 | self.path = path
18 | self.info = info
19 |
20 |
21 | async def ffprobe_cmd(params):
22 | """
23 | Execute a ffprobe command subprocess and read the output asynchronously
24 | :param params:
25 | :return:
26 | """
27 | command = ["ffprobe"] + params
28 |
29 | print(" ".join(command))
30 |
31 | process = await asyncio.create_subprocess_exec(
32 | *command,
33 | stdout=asyncio.subprocess.PIPE,
34 | stderr=asyncio.subprocess.STDOUT
35 | )
36 |
37 | out, _ = await process.communicate()
38 |
39 | # Check for results
40 | try:
41 | raw_output = out.decode("utf-8")
42 | except Exception as e:
43 | raise FFProbeError(command, str(e))
44 |
45 | if process.returncode == 1 or 'error' in raw_output.lower():
46 | raise FFProbeError(command, raw_output)
47 | if not raw_output:
48 | raise FFProbeError(command, 'No info found')
49 |
50 | return raw_output
51 |
52 |
53 | async def ffprobe_file(vid_file_path):
54 | """
55 | Returns a dictionary result from ffprobe command line probe of a file
56 | :param vid_file_path: The absolute (full) path of the video file, string.
57 | :return:
58 | """
59 | if type(vid_file_path) != str:
60 | raise Exception('Give ffprobe a full file path of the video')
61 |
62 | params = [
63 | "-loglevel", "quiet",
64 | "-print_format", "json",
65 | "-show_format",
66 | "-show_streams",
67 | "-show_error",
68 | "-show_chapters",
69 | vid_file_path
70 | ]
71 |
72 | # Check result
73 | results = await ffprobe_cmd(params)
74 | try:
75 | info = json.loads(results)
76 | except Exception as e:
77 | raise FFProbeError(vid_file_path, str(e))
78 |
79 | return info
80 |
81 |
82 | def generate_iptv_url(config, url='', service_name=''):
83 | if not url.startswith('pipe://'):
84 | settings = config.read_settings()
85 | if settings['settings']['enable_stream_buffer']:
86 | ffmpeg_args = settings['settings']['default_ffmpeg_pipe_args']
87 | ffmpeg_args = ffmpeg_args.replace("[URL]", url)
88 | service_name = re.sub(r'[^a-zA-Z0-9 \n\.]', '', service_name)
89 | service_name = re.sub(r'\s', '-', service_name)
90 | ffmpeg_args = ffmpeg_args.replace("[SERVICE_NAME]", service_name.lower())
91 | url = f"pipe://ffmpeg {ffmpeg_args}"
92 | return url
93 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with Python 3.12
3 | # by the following command:
4 | #
5 | # pip-compile ./requirements.in
6 | #
7 | aiofiles==23.2.1
8 | # via quart
9 | aiohttp==3.9.5
10 | # via -r ./requirements.in
11 | aiosignal==1.3.1
12 | # via aiohttp
13 | aiosqlite==0.20.0
14 | # via -r ./requirements.in
15 | alembic==1.16.5
16 | # via -r ./requirements.in
17 | apscheduler==3.10.4
18 | # via -r ./requirements.in
19 | attrs==23.2.0
20 | # via aiohttp
21 | beautifulsoup4==4.12.3
22 | # via -r ./requirements.in
23 | blinker==1.8.2
24 | # via
25 | # flask
26 | # quart
27 | certifi==2024.6.2
28 | # via requests
29 | charset-normalizer==3.3.2
30 | # via requests
31 | click==8.1.7
32 | # via
33 | # flask
34 | # quart
35 | flask==3.0.3
36 | # via
37 | # flask-sqlalchemy
38 | # quart
39 | flask-sqlalchemy==3.1.1
40 | # via -r ./requirements.in
41 | frozenlist==1.4.1
42 | # via
43 | # aiohttp
44 | # aiosignal
45 | greenlet==3.2.4
46 | # via sqlalchemy
47 | h11==0.14.0
48 | # via
49 | # hypercorn
50 | # wsproto
51 | h2==4.1.0
52 | # via hypercorn
53 | hpack==4.0.0
54 | # via h2
55 | hypercorn==0.17.3
56 | # via quart
57 | hyperframe==6.0.1
58 | # via h2
59 | idna==3.7
60 | # via
61 | # requests
62 | # yarl
63 | itsdangerous==2.2.0
64 | # via
65 | # flask
66 | # quart
67 | jinja2==3.1.4
68 | # via
69 | # flask
70 | # quart
71 | m3u-ipytv==0.2.8
72 | # via -r ./requirements.in
73 | mako==1.3.5
74 | # via alembic
75 | markupsafe==2.1.5
76 | # via
77 | # jinja2
78 | # mako
79 | # quart
80 | # werkzeug
81 | mergedeep==1.3.4
82 | # via -r ./requirements.in
83 | multidict==6.0.5
84 | # via
85 | # aiohttp
86 | # yarl
87 | priority==2.0.0
88 | # via hypercorn
89 | psutil==7.0.0
90 | # via -r ./requirements.in
91 | pytz==2024.1
92 | # via apscheduler
93 | pyyaml==6.0.1
94 | # via -r ./requirements.in
95 | quart==0.19.6
96 | # via
97 | # -r ./requirements.in
98 | # quart-flask-patch
99 | quart-flask-patch==0.3.0
100 | # via -r ./requirements.in
101 | requests==2.32.3
102 | # via
103 | # -r ./requirements.in
104 | # m3u-ipytv
105 | six==1.16.0
106 | # via apscheduler
107 | soupsieve==2.5
108 | # via beautifulsoup4
109 | sqlalchemy==2.0.43
110 | # via
111 | # -r ./requirements.in
112 | # alembic
113 | # flask-sqlalchemy
114 | typing-extensions==4.12.2
115 | # via
116 | # aiosqlite
117 | # alembic
118 | # sqlalchemy
119 | tzlocal==5.2
120 | # via apscheduler
121 | urllib3==2.2.1
122 | # via requests
123 | werkzeug==3.0.3
124 | # via
125 | # flask
126 | # quart
127 | wsproto==1.2.0
128 | # via hypercorn
129 | yarl==1.9.4
130 | # via aiohttp
131 |
--------------------------------------------------------------------------------
/backend/api/routes_epgs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | from backend.api.tasks import TaskQueueBroker
4 | from backend.auth import admin_auth_required
5 | from backend.epgs import read_config_all_epgs, add_new_epg, read_config_one_epg, update_epg, delete_epg, \
6 | import_epg_data, read_channels_from_all_epgs
7 | from backend.api import blueprint
8 | from quart import request, jsonify, current_app
9 |
10 |
11 | @blueprint.route('/tic-api/epgs/get', methods=['GET'])
12 | @admin_auth_required
13 | async def api_get_epgs_list():
14 | all_epg_configs = await read_config_all_epgs()
15 | return jsonify(
16 | {
17 | "success": True,
18 | "data": all_epg_configs
19 | }
20 | )
21 |
22 |
23 | @blueprint.route('/tic-api/epgs/settings/new', methods=['POST'])
24 | @admin_auth_required
25 | async def api_add_new_epg():
26 | json_data = await request.get_json()
27 | await add_new_epg(json_data)
28 | return jsonify(
29 | {
30 | "success": True
31 | }
32 | )
33 |
34 |
35 | @blueprint.route('/tic-api/epgs/settings/', methods=['GET'])
36 | @admin_auth_required
37 | async def api_get_epg_config(epg_id):
38 | epg_config = await read_config_one_epg(epg_id)
39 | return jsonify(
40 | {
41 | "success": True,
42 | "data": epg_config
43 | }
44 | )
45 |
46 |
47 | @blueprint.route('/tic-api/epgs/settings//save', methods=['POST'])
48 | @admin_auth_required
49 | async def api_set_epg_config(epg_id):
50 | json_data = await request.get_json()
51 | await update_epg(epg_id, json_data)
52 | # TODO: Trigger an update of the cached EPG config
53 | return jsonify(
54 | {
55 | "success": True
56 | }
57 | )
58 |
59 |
60 | @blueprint.route('/tic-api/epgs/settings//delete', methods=['DELETE'])
61 | @admin_auth_required
62 | async def api_delete_epg(epg_id):
63 | config = current_app.config['APP_CONFIG']
64 | await delete_epg(config, epg_id)
65 | # TODO: Trigger an update of the cached EPG config
66 | return jsonify(
67 | {
68 | "success": True
69 | }
70 | )
71 |
72 |
73 | @blueprint.route('/tic-api/epgs/update/', methods=['POST'])
74 | @admin_auth_required
75 | async def api_update_epg(epg_id):
76 | config = current_app.config['APP_CONFIG']
77 | task_broker = await TaskQueueBroker.get_instance()
78 | await task_broker.add_task({
79 | 'name': f'Update EPG - ID: {epg_id}',
80 | 'function': import_epg_data,
81 | 'args': [config, epg_id],
82 | }, priority=20)
83 | return jsonify(
84 | {
85 | "success": True,
86 | }
87 | )
88 |
89 |
90 | @blueprint.route('/tic-api/epgs/channels', methods=['GET'])
91 | @admin_auth_required
92 | async def api_get_all_epg_channels():
93 | config = current_app.config['APP_CONFIG']
94 | epgs_channels = await read_channels_from_all_epgs(config)
95 | return jsonify(
96 | {
97 | "success": True,
98 | "data": epgs_channels
99 | }
100 | )
101 |
--------------------------------------------------------------------------------
/backend/auth.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | import hashlib
4 | import base64
5 | import time
6 | from functools import wraps
7 |
8 | from quart import Response, request, current_app
9 |
10 | digest_auth_realm = "tvheadend"
11 |
12 |
13 | def unauthorized_response(auth_type="Basic"):
14 | if auth_type == "Basic":
15 | auth_header = 'Basic realm="{}"'.format(digest_auth_realm)
16 | else:
17 | nonce = hashlib.md5(str(time.time()).encode()).hexdigest()
18 | auth_header = (
19 | f'Digest realm="{digest_auth_realm}", '
20 | f'qop="auth", nonce="{nonce}", opaque="abcdef"'
21 | )
22 |
23 | response = Response(status=401)
24 | response.headers["WWW-Authenticate"] = auth_header
25 | return response
26 |
27 |
28 | def validate_digest_auth(auth_info, admin_user):
29 | username = auth_info.get("username")
30 | if username != admin_user.get('username'):
31 | return False
32 |
33 | ha1 = hashlib.md5(f"{username}:{digest_auth_realm}:{admin_user.get('password')}".encode()).hexdigest()
34 | ha2 = hashlib.md5(f'{request.method}:{auth_info["uri"]}'.encode()).hexdigest()
35 | response = hashlib.md5(
36 | f'{ha1}:{auth_info["nonce"]}:{auth_info["nc"]}:'
37 | f'{auth_info["cnonce"]}:{auth_info["qop"]}:{ha2}'.encode()
38 | ).hexdigest()
39 |
40 | return response == auth_info.get("response")
41 |
42 |
43 | def validate_basic_auth(auth_info, admin_user):
44 | username, password = base64.b64decode(auth_info).decode().split(':')
45 | return username == admin_user['username'] and password == admin_user['password']
46 |
47 |
48 | async def check_auth():
49 | config = current_app.config['APP_CONFIG']
50 | settings = config.read_settings()
51 | if not settings.get('settings', {}).get('enable_admin_user', True):
52 | return True
53 |
54 | # Check if Authorization header is present
55 | auth = request.headers.get("Authorization")
56 | if not auth:
57 | return False
58 |
59 | # Check if auth is "Digest" type
60 | if auth.startswith("Digest "):
61 | # Validate provided auth
62 | auth_type = "Digest"
63 | auth_info = {}
64 | for item in auth[len("Digest "):].split(","):
65 | key, value = item.split("=", 1)
66 | auth_info[key.strip()] = value.strip().replace('"', '')
67 | # If not "Digest" auth, then it must be Basic auth
68 | elif auth.startswith("Basic "):
69 | auth_type = "Basic"
70 | auth_info = auth[len("Basic "):].strip()
71 | else:
72 | return False
73 |
74 | admin_user = {
75 | 'username': 'admin',
76 | 'password': settings['settings'].get('admin_password', 'admin'),
77 | }
78 |
79 | if auth_type == "Digest" and not validate_digest_auth(auth_info, admin_user):
80 | return False
81 | elif auth_type == "Basic" and not validate_basic_auth(auth_info, admin_user):
82 | return False
83 |
84 | return True
85 |
86 |
87 | def admin_auth_required(func):
88 | @wraps(func)
89 | async def decorated_function(*args, **kwargs):
90 | if await check_auth():
91 | return await func(*args, **kwargs)
92 | return unauthorized_response()
93 |
94 | return decorated_function
95 |
--------------------------------------------------------------------------------
/requirements.in:
--------------------------------------------------------------------------------
1 | #
2 | # This requirements.in file manages the requirements.txt file using the pip-tools package.
3 | #
4 | # To update the requirements.txt file, run these commands:
5 | # 1) Install pip-tools
6 | # > pip install pip-tools
7 | # 2) Use pip-compile to build the requirements.txt file using one of these command examples
8 | # > pip-compile ./requirements.in # Build a requirements.txt file from the requirements.in file without modifying all the dependencies
9 | # > pip-compile ./requirements.in --upgrade # Try to upgrade all dependencies to their latest versions
10 | # > pip-compile ./requirements.in --upgrade-package flask # Only update the flask package
11 | # 3) Test that all local build dependencies now install without conflict. Update requirements-dev.txt dependencies as required.
12 | # > python -m pip install -r ./requirements.txt -r ./requirements-dev.txt
13 | #
14 |
15 |
16 |
17 | # ----------------------------- Core Packages ---------------------------- #
18 | #
19 | # Description: For core packages used by the project.
20 | #
21 |
22 | # -- Server or stand-alone executables
23 | Quart~=0.19
24 | # Reason: This is the web server framework
25 | # Import example: N/A
26 |
27 | # -- Extensions
28 | quart-flask-patch~=0.3
29 | # Reason: Quart-Flask-Patch is a Quart extension that patches Quart to work with Flask extensions.
30 | # Import example: import quart_flask_patch
31 | Flask-SQLAlchemy~=3.1
32 | # Reason: Needed for legacy support of some endpoints that have yet to be converted to plain SQLAlchemy.
33 | # Import example: from flask_sqlalchemy import SQLAlchemy
34 | SQLAlchemy>=2.0.32,<3.0
35 | # Reason: Database Abstraction Library. >=2.0.32 brings fixes for Python 3.13 runtime (e.g., generic TypeError/AttributeError bugs) – conservative upper bound <3.0.
36 | # Import example: from sqlalchemy import
37 | alembic~=1.13
38 | # Reason: SQLAlchemy database migrations for the application
39 | # Import example: from alembic import context
40 | APScheduler~=3.10
41 | # Reason: Adds APScheduler for running scheduled tasks
42 | # Import example: from apscheduler.schedulers.asyncio import AsyncIOScheduler
43 |
44 | # -- Support libraries
45 | aiosqlite>=0.20
46 | # Reason: Async SQLite driver.
47 | # Import example: sqlite+aiosqlite:///
48 | aiohttp>=3.9
49 | # Reason: Async http client/server framework (asyncio). Required for the proxy server.
50 | # Import example: import aiohttp
51 | m3u-ipytv~=0.2.7
52 | # Reason: A library for handling M3U playlists for IPTV (AKA m3u_plus)
53 | # Import example: from ipytv import playlist
54 | mergedeep>=1.3.4
55 | # Reason: Used to merge 2 dictionaries when updating the YAML config file
56 | # Import example: from mergedeep import merge
57 | PyYAML~=6.0
58 | # Reason: YAML parser and emitter for Python
59 | # Import example: import yaml
60 | requests>=2.31.0
61 | # Reason: HTTP requests
62 | # Import example: import requests
63 | beautifulsoup4>=4.12.3
64 | # Reason: HTTP parsing
65 | # Import example: from bs4 import BeautifulSoup
66 | psutil>=5.9.8
67 | # Reason: Optional process/memory metrics used by HLS proxy cache cleanup task
68 | # Import example: import psutil
69 |
--------------------------------------------------------------------------------
/migrations/versions/f3d254922d25_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: f3d254922d25
4 | Revises: 044b003faaaa
5 | Create Date: 2025-09-02 13:16:18.716341
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = 'f3d254922d25'
14 | down_revision = '044b003faaaa'
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.add_column('epg_channel_programmes', sa.Column('summary', sa.String(length=1000), nullable=True))
22 | op.add_column('epg_channel_programmes', sa.Column('keywords', sa.String(length=1000), nullable=True))
23 | op.add_column('epg_channel_programmes', sa.Column('credits_json', sa.String(length=4000), nullable=True))
24 | op.add_column('epg_channel_programmes', sa.Column('video_colour', sa.String(length=10), nullable=True))
25 | op.add_column('epg_channel_programmes', sa.Column('video_aspect', sa.String(length=32), nullable=True))
26 | op.add_column('epg_channel_programmes', sa.Column('video_quality', sa.String(length=16), nullable=True))
27 | op.add_column('epg_channel_programmes', sa.Column('subtitles_type', sa.String(length=32), nullable=True))
28 | op.add_column('epg_channel_programmes', sa.Column('audio_described', sa.Boolean(), nullable=True))
29 | op.add_column('epg_channel_programmes', sa.Column('previously_shown_date', sa.String(length=32), nullable=True))
30 | op.add_column('epg_channel_programmes', sa.Column('premiere', sa.Boolean(), nullable=True))
31 | op.add_column('epg_channel_programmes', sa.Column('is_new', sa.Boolean(), nullable=True))
32 | op.add_column('epg_channel_programmes', sa.Column('epnum_onscreen', sa.String(length=64), nullable=True))
33 | op.add_column('epg_channel_programmes', sa.Column('epnum_xmltv_ns', sa.String(length=64), nullable=True))
34 | op.add_column('epg_channel_programmes', sa.Column('epnum_dd_progid', sa.String(length=64), nullable=True))
35 | op.add_column('epg_channel_programmes', sa.Column('star_rating', sa.String(length=16), nullable=True))
36 | op.add_column('epg_channel_programmes', sa.Column('production_year', sa.String(length=8), nullable=True))
37 | op.add_column('epg_channel_programmes', sa.Column('rating_system', sa.String(length=32), nullable=True))
38 | op.add_column('epg_channel_programmes', sa.Column('rating_value', sa.String(length=64), nullable=True))
39 | # ### end Alembic commands ###
40 |
41 |
42 | def downgrade():
43 | # ### commands auto generated by Alembic - please adjust! ###
44 | op.drop_column('epg_channel_programmes', 'rating_value')
45 | op.drop_column('epg_channel_programmes', 'rating_system')
46 | op.drop_column('epg_channel_programmes', 'production_year')
47 | op.drop_column('epg_channel_programmes', 'star_rating')
48 | op.drop_column('epg_channel_programmes', 'epnum_dd_progid')
49 | op.drop_column('epg_channel_programmes', 'epnum_xmltv_ns')
50 | op.drop_column('epg_channel_programmes', 'epnum_onscreen')
51 | op.drop_column('epg_channel_programmes', 'is_new')
52 | op.drop_column('epg_channel_programmes', 'premiere')
53 | op.drop_column('epg_channel_programmes', 'previously_shown_date')
54 | op.drop_column('epg_channel_programmes', 'audio_described')
55 | op.drop_column('epg_channel_programmes', 'subtitles_type')
56 | op.drop_column('epg_channel_programmes', 'video_quality')
57 | op.drop_column('epg_channel_programmes', 'video_aspect')
58 | op.drop_column('epg_channel_programmes', 'video_colour')
59 | op.drop_column('epg_channel_programmes', 'credits_json')
60 | op.drop_column('epg_channel_programmes', 'keywords')
61 | op.drop_column('epg_channel_programmes', 'summary')
62 | # ### end Alembic commands ###
63 |
--------------------------------------------------------------------------------
/backend/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | import logging
4 | import time
5 | from importlib import import_module
6 | from logging.config import dictConfig
7 |
8 | import quart_flask_patch
9 | from quart import Quart
10 |
11 | from backend import config
12 | from backend.api import tasks
13 |
14 | dictConfig({
15 | 'version': 1,
16 | 'formatters': {
17 | 'default': {
18 | 'format': '%(asctime)s:%(levelname)s:%(name)s: - %(message)s',
19 | }
20 | },
21 | 'loggers': {
22 | 'quart.app': {
23 | 'level': 'ERROR',
24 | },
25 | },
26 | 'handlers': {
27 | 'wsgi': {
28 | 'class': 'logging.StreamHandler',
29 | 'stream': 'ext://sys.stderr',
30 | 'formatter': 'default'
31 | }
32 | },
33 | 'root': {
34 | 'level': 'INFO',
35 | 'handlers': ['wsgi']
36 | }
37 | })
38 |
39 |
40 | # Custom logging filter that ignores log messages for a specific endpoints
41 | class IgnoreLoggingRoutesFilter(logging.Filter):
42 | def filter(self, record):
43 | if "/tic-api/get-background-tasks" in record.getMessage():
44 | return False
45 | return True
46 |
47 |
48 | def init_db(app):
49 | from backend.models import db
50 | app.config["SQLALCHEMY_DATABASE_URI"] = config.sqlalchemy_database_uri
51 | app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = config.sqlalchemy_track_modifications
52 | # Increase SQLite timeout to reduce 'database is locked' errors under concurrent access
53 | app.config.setdefault("SQLALCHEMY_ENGINE_OPTIONS", {})
54 | engine_opts = app.config["SQLALCHEMY_ENGINE_OPTIONS"]
55 | connect_args = engine_opts.get("connect_args", {})
56 | # Only set timeout if not already user-defined
57 | connect_args.setdefault("timeout", 30) # seconds
58 | engine_opts["connect_args"] = connect_args
59 | app.config["SQLALCHEMY_ENGINE_OPTIONS"] = engine_opts
60 | db.init_app(app)
61 |
62 | # Enable WAL + relaxed synchronous for better concurrent write characteristics on SQLite
63 | try:
64 | from sqlalchemy import text
65 | with app.app_context():
66 | db.session.execute(text("PRAGMA journal_mode=WAL"))
67 | db.session.execute(text("PRAGMA synchronous=NORMAL"))
68 | db.session.commit()
69 | except Exception:
70 | # Ignore if not SQLite or already configured
71 | pass
72 |
73 | @app.teardown_appcontext
74 | def shutdown_session(exception=None):
75 | db.session.remove()
76 |
77 | return db
78 |
79 |
80 | def register_blueprints(app):
81 | module = import_module('backend.api.routes')
82 | import_module('backend.api.routes_playlists')
83 | import_module('backend.api.routes_epgs')
84 | import_module('backend.api.routes_channels')
85 | import_module('backend.api.routes_playlist_proxy')
86 | import_module('backend.api.routes_hls_proxy')
87 | app.register_blueprint(module.blueprint)
88 |
89 |
90 | def create_app():
91 | # Fetch app config
92 | app_config = config.Config()
93 | app_config.runtime_key = int(time.time())
94 | # Create app
95 | app = Quart(__name__, instance_relative_config=True)
96 | app.config["SECRET_KEY"] = config.secret_key
97 | app.config["SCHEDULER_API_ENABLED"] = config.scheduler_api_enabled
98 | app.config["APP_CONFIG"] = app_config
99 | app.config["ASSETS_ROOT"] = config.assets_root
100 |
101 | # Init the DB connection
102 | db = init_db(app)
103 |
104 | # Register the route blueprints
105 | register_blueprints(app)
106 |
107 | access_logger = logging.getLogger('hypercorn.access')
108 | app.logger.setLevel(logging.INFO)
109 | access_logger.setLevel(logging.INFO)
110 | if config.enable_app_debugging:
111 | app.logger.setLevel(logging.DEBUG)
112 | access_logger.setLevel(logging.DEBUG)
113 | access_logger.addFilter(IgnoreLoggingRoutesFilter())
114 |
115 | return app
116 |
--------------------------------------------------------------------------------
/docs/compose-files/docker-compose.aio-with-proxies.yml:
--------------------------------------------------------------------------------
1 | ---
2 | # https://gist.github.com/Josh5/b8ad8cc8c2c945f3c270fe0d1c1a3172#file-docker-wg-net-sh
3 | # docker-wg-net.sh --network-name vpn-uk-net
4 | networks:
5 | # Created a private 'internal' network for communication between containers (no external access)
6 | private-net:
7 | driver: bridge
8 | internal: true
9 | # Create a bridge for the main service
10 | tic-net:
11 | driver: bridge
12 | # Use the HLS proxy to manage the traffic that requires the VPN
13 | # Use my script here to manage the VPN Docker networks:
14 | # - https://gist.github.com/Josh5/b8ad8cc8c2c945f3c270fe0d1c1a3172#file-docker-wg-net-sh
15 | docker-wg0:
16 | driver: bridge
17 | # Configure as external. This requires that you have first set up the VPN docker network named 'docker-wg0'
18 | external: true
19 |
20 | services:
21 | # -- TVH IPTV CONFIG --
22 | tic:
23 | image: ghcr.io/josh5/tvh-iptv:latest
24 |
25 | # NETWORK:
26 | networks:
27 | - private-net
28 | - tic-net
29 | ports:
30 | # App Port (9985)
31 | - 9985:9985
32 | # TVH Webui
33 | - 9981:9981
34 | # TVH HTSP
35 | - 9982:9982
36 |
37 | # ENVIRONMENT:
38 | environment:
39 | # Process user ID
40 | - PUID=1000
41 | # Process group ID
42 | - PGID=1000
43 | # Timezone
44 | - TZ=Pacific/Auckland
45 | # Skips the DB migrate command execution on container startup
46 | - SKIP_MIGRATIONS=false
47 | # Executes a pip install on container startup (might be required to generate the venv)
48 | - RUN_PIP_INSTALL=false
49 | # Enables debug logging for main application
50 | - ENABLE_APP_DEBUGGING=false
51 | # Enables debug logging for DB queries
52 | - ENABLE_SQLALCHEMY_DEBUGGING=false
53 |
54 | # VOLUMES:
55 | volumes:
56 | # Configuration files
57 | - /data/containers/tvh-iptv/tic-config:/config
58 |
59 | # -- HLS Proxy (option #1) --
60 | #
61 | # Run Josh5/HLS-Proxy - A simple HLS proxy that uses ffmpeg to buffer video streams. Written my Josh.5.
62 | # REF: https://github.com/Josh5/HLS-Proxy
63 | #
64 | # To use this with TVH-IPTV-Config, configure the playlist with a custom HLS proxy using this URL:
65 | # > http://hls-proxy-1:9987/[B64_URL].m3u8
66 | #
67 | hls-proxy-1:
68 | image: ghcr.io/josh5/hls-proxy:latest
69 |
70 | # NETWORK:
71 | networks:
72 | - private-net
73 | - docker-wg0
74 | dns:
75 | - 1.1.1.1
76 | - 8.8.4.4
77 |
78 | # ENVIRONMENT:
79 | environment:
80 | # Container:
81 | HLS_PROXY_LOG_LEVEL: ${HLS_PROXY_LOG_LEVEL:-1}
82 | HLS_PROXY_HOST_IP: hls-proxy-1
83 |
84 | # -- HLS Proxy (option #2) --
85 | #
86 | # Run node 'HLS-Proxy' - A feature rich HTTP Live Streaming Proxy
87 | # REF: https://github.com/warren-bank/HLS-Proxy
88 | #
89 | # To use this with TVH-IPTV-Config, configure the playlist with a custom HLS proxy using this URL:
90 | # > http://hls-proxy-2:9987/[B64_URL].m3u8
91 | #
92 | hls-proxy-2:
93 | image: node:latest
94 | entrypoint: "sh"
95 | command: |
96 | -c '
97 | set -e
98 |
99 | cd $$(mktemp -d)
100 | npm install "@warren-bank/hls-proxy"
101 |
102 | npx hlsd --version
103 |
104 | # https://github.com/warren-bank/HLS-Proxy?tab=readme-ov-file#options
105 | npx hlsd \
106 | -v ${HLS_PROXY_LOG_LEVEL} \
107 | --host $${HLS_PROXY_HOST_IP}:9987 \
108 | --port 9987 \
109 | --prefetch \
110 | --max-segments 100 \
111 | --useragent "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.110 Safari/537.36 Edg/96.0.1054.62"
112 | '
113 |
114 | # NETWORK:
115 | networks:
116 | - private-net
117 | - docker-wg0
118 |
119 | # ENVIRONMENT:
120 | environment:
121 | # Container:
122 | HLS_PROXY_LOG_LEVEL: ${LOG_LEVEL:-1}
123 | HLS_PROXY_HOST_IP: hls-proxy # Must be the same name as the service
124 |
--------------------------------------------------------------------------------
/run.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 |
4 | from backend.api.tasks import scheduler, update_playlists, map_new_tvh_services, update_epgs, rebuild_custom_epg, \
5 | update_tvh_muxes, configure_tvh_with_defaults, update_tvh_channels, update_tvh_networks, update_tvh_epg, \
6 | TaskQueueBroker
7 | from backend import create_app, config
8 | import asyncio
9 |
10 | # Create app
11 | app = create_app()
12 | if config.enable_app_debugging:
13 | app.logger.info(' DEBUGGING = ' + str(config.enable_app_debugging))
14 | app.logger.debug('DBMS = ' + config.sqlalchemy_database_uri)
15 | app.logger.debug('ASSETS_ROOT = ' + config.assets_root)
16 |
17 | task_logger = app.logger.getChild('tasks')
18 | TaskQueueBroker.initialize(task_logger)
19 |
20 |
21 | @scheduler.scheduled_job('interval', id='background_tasks', seconds=10)
22 | async def background_tasks():
23 | async with app.app_context():
24 | task_broker = await TaskQueueBroker.get_instance()
25 | await task_broker.execute_tasks()
26 |
27 |
28 | @scheduler.scheduled_job('interval', id='do_5_mins', minutes=5, misfire_grace_time=60)
29 | async def every_5_mins():
30 | async with app.app_context():
31 | task_broker = await TaskQueueBroker.get_instance()
32 | await task_broker.add_task({
33 | 'name': 'Mapping all TVH services',
34 | 'function': map_new_tvh_services,
35 | 'args': [app],
36 | }, priority=10)
37 |
38 |
39 | @scheduler.scheduled_job('interval', id='do_60_mins', minutes=60, misfire_grace_time=300)
40 | async def every_60_mins():
41 | async with app.app_context():
42 | task_broker = await TaskQueueBroker.get_instance()
43 | await task_broker.add_task({
44 | 'name': 'Configuring TVH with global default',
45 | 'function': configure_tvh_with_defaults,
46 | 'args': [app],
47 | }, priority=11)
48 | await task_broker.add_task({
49 | 'name': 'Configuring TVH networks',
50 | 'function': update_tvh_networks,
51 | 'args': [app],
52 | }, priority=12)
53 | await task_broker.add_task({
54 | 'name': 'Configuring TVH channels',
55 | 'function': update_tvh_channels,
56 | 'args': [app],
57 | }, priority=13)
58 | await task_broker.add_task({
59 | 'name': 'Configuring TVH muxes',
60 | 'function': update_tvh_muxes,
61 | 'args': [app],
62 | }, priority=14)
63 | await task_broker.add_task({
64 | 'name': 'Triggering an update in TVH to fetch the latest XMLTV',
65 | 'function': update_tvh_epg,
66 | 'args': [app],
67 | }, priority=30)
68 |
69 |
70 | @scheduler.scheduled_job('cron', id='do_job_twice_a_day', hour='0/12', minute=1, misfire_grace_time=900)
71 | async def every_12_hours():
72 | async with app.app_context():
73 | task_broker = await TaskQueueBroker.get_instance()
74 | await task_broker.add_task({
75 | 'name': f'Updating all playlists',
76 | 'function': update_playlists,
77 | 'args': [app],
78 | }, priority=100)
79 | await task_broker.add_task({
80 | 'name': f'Updating all EPGs',
81 | 'function': update_epgs,
82 | 'args': [app],
83 | }, priority=100)
84 | await task_broker.add_task({
85 | 'name': 'Recreating static XMLTV file',
86 | 'function': rebuild_custom_epg,
87 | 'args': [app],
88 | }, priority=200)
89 |
90 |
91 | if __name__ == "__main__":
92 | # Create a custom loop
93 | loop = asyncio.get_event_loop()
94 |
95 | # Start scheduler
96 | app.logger.info("Starting scheduler...")
97 | scheduler.start()
98 | app.logger.info("Scheduler started.")
99 |
100 | # Start Quart server
101 | app.logger.info("Starting Quart server...")
102 | app.run(loop=loop, host=config.flask_run_host, port=config.flask_run_port,
103 | debug=config.enable_app_debugging, use_reloader=config.enable_app_debugging)
104 | app.logger.info("Quart server completed.")
105 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Tvheadend IPTV Config
2 | ===========================
3 |
4 | 
5 |
6 |
7 |
8 | [](https://github.com/Josh5/TVH-IPTV-Config/releases)
9 | [](https://github.com/Josh5/TVH-IPTV-Config/issues?q=is%3Aopen+is%3Aissue)
10 | [](https://github.com/Josh5/TVH-IPTV-Config/issues?q=is%3Aissue+is%3Aclosed)
11 | [](https://github.com/Josh5/TVH-IPTV-Config/pulls?q=is%3Aopen+is%3Apr)
12 | [](https://github.com/Josh5/TVH-IPTV-Config/pulls?q=is%3Apr+is%3Aclosed)
13 |
14 | [](https://hub.docker.com/r/josh5/tvh-iptv)
15 | [](https://hub.docker.com/r/josh5/tvh-iptv)
16 | [](https://hub.docker.com/r/josh5/tvh-iptv)
17 |
18 |
19 |
20 | 
21 |
22 | []()
23 | ---
24 |
25 | Tvheadend IPTV Config is a simple wrapper around Tvheadend with the singular goal of making it simple to configure Tvheadend for IPTV playlist.
26 |
27 | Tvheadend is an extremely powerful TV streaming server and recorder with excellent support for IPTV sources. However, for most people, setting this up can be difficult and time-consuming.
28 | The goal of this project is to wrap around Tvheadend and, using its API, configure most of the server for you.
29 |
30 | ### Project Status
31 |
32 | This project is currently in beta. I am publishing builds, but the application is very new and could be full of bugs, inefficent processes, or visual blemishes. That being said, if you would like to contribute to this project, feel free to provide a PR.
33 |
34 | I will probably not be acknowledging any issue reports or testing. But feel free to also reach out on [Discord](https://support-api.streamingtech.co.nz/discord) if you would like to contribute any suggestions there.
35 |
36 | ### Table Of Contents
37 |
38 | [Dependencies](#dependencies)
39 |
40 | [Install and Run](#install-and-run)
41 |
42 | [License](#license)
43 |
44 |
45 | ## Dependencies
46 |
47 | - NodeJS ([Install](https://nodejs.org/en/download)).
48 | - Python 3.x ([Install](https://www.python.org/downloads/)).
49 | - Various Python requirements listed in 'requirements.txt' in the project root.
50 | - A TVHeadend server
51 | - Only TVHeadend v4.3+ is tested, though 4.2.8 may work fine.
52 |
53 | ## Install and Run
54 |
55 | - [Run from source](./docs/run-from-source.md)
56 | - [Run with Docker Compose](./docs/run-with-docker-compose.md)
57 |
58 |
59 | ## Development
60 |
61 | ### Run from source with docker compose:
62 |
63 | From the project root run:
64 | ```
65 | mkdir -p ./dev_env/config
66 | docker compose -f ./docker/docker-compose.dev-aio.yml up --build
67 | ```
68 |
69 | This will create a directory within this project root called `./dev_env` which contains all configuration and cache data.
70 |
71 | ### Run from source with a Python venv
72 |
73 | To setup a development environment, first setup a [Run from source](./docs/run-from-source.md) setup.
74 |
75 | Then run the project with debugging tools enabled by using the script
76 | ```
77 | ./devops/run_local_dev_env.sh
78 | ```
79 |
80 | ### Updating packages
81 | Activate the venv and, from inside the Python venv, run the command:
82 | ```
83 | python3 -m pip install -r ./requirements.txt -r ./requirements-dev.txt
84 | ```
85 | This will install all the current dev dependencies and tools.
86 |
87 | Now run the pip-audit check command. This will print any packages that need to be updated.
88 | ```
89 | pip-audit -r ./requirements.txt -r ./requirements-dev.txt
90 | ```
91 | This will give you a printout of what is out of date. From there you can select which packages you wish to upgrade.
92 |
93 | Once you have upgraded the packages, run this command to upgrade the frozen requirements.
94 | ```
95 | pip-compile ./requirements.in --upgrade
96 | ```
97 |
98 |
99 | ## License
100 |
101 | This projected is licensed under the [Apache 2.0 Licence](./LICENSE).
102 |
103 | Copyright (C) Josh Sunnex - All Rights Reserved.
104 |
105 |
--------------------------------------------------------------------------------
/backend/api/routes_channels.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | import io
4 |
5 | from backend.api import blueprint
6 | from quart import request, jsonify, current_app, send_file
7 |
8 | from backend.auth import admin_auth_required
9 | from backend.channels import read_config_all_channels, add_new_channel, read_config_one_channel, update_channel, \
10 | delete_channel, add_bulk_channels, queue_background_channel_update_tasks, read_channel_logo, add_channels_from_groups
11 |
12 |
13 | @blueprint.route('/tic-api/channels/get', methods=['GET'])
14 | @admin_auth_required
15 | async def api_get_channels():
16 | channels_config = await read_config_all_channels()
17 | return jsonify(
18 | {
19 | "success": True,
20 | "data": channels_config
21 | }
22 | )
23 |
24 |
25 | @blueprint.route('/tic-api/channels/new', methods=['POST'])
26 | @admin_auth_required
27 | async def api_add_new_channel():
28 | json_data = await request.get_json()
29 | config = current_app.config['APP_CONFIG']
30 | await add_new_channel(config, json_data)
31 | await queue_background_channel_update_tasks(config)
32 | return jsonify(
33 | {
34 | "success": True
35 | }
36 | )
37 |
38 |
39 | @blueprint.route('/tic-api/channels/settings/', methods=['GET'])
40 | @admin_auth_required
41 | async def api_get_channel_config(channel_id):
42 | channel_config = read_config_one_channel(channel_id)
43 | return jsonify(
44 | {
45 | "success": True,
46 | "data": channel_config
47 | }
48 | )
49 |
50 |
51 | @blueprint.route('/tic-api/channels/settings//save', methods=['POST'])
52 | @admin_auth_required
53 | async def api_set_config_channels(channel_id):
54 | json_data = await request.get_json()
55 | config = current_app.config['APP_CONFIG']
56 | await update_channel(config, channel_id, json_data)
57 | await queue_background_channel_update_tasks(config)
58 | return jsonify(
59 | {
60 | "success": True
61 | }
62 | )
63 |
64 |
65 | @blueprint.route('/tic-api/channels/settings/multiple/save', methods=['POST'])
66 | @admin_auth_required
67 | async def api_set_config_multiple_channels():
68 | json_data = await request.get_json()
69 | config = current_app.config['APP_CONFIG']
70 | for channel_id in json_data.get('channels', {}):
71 | channel = json_data['channels'][channel_id]
72 | await update_channel(config, channel_id, channel)
73 | await queue_background_channel_update_tasks(config)
74 | return jsonify(
75 | {
76 | "success": True
77 | }
78 | )
79 |
80 |
81 | @blueprint.route('/tic-api/channels/settings/multiple/add', methods=['POST'])
82 | @admin_auth_required
83 | async def api_add_multiple_channels():
84 | json_data = await request.get_json()
85 | config = current_app.config['APP_CONFIG']
86 | await add_bulk_channels(config, json_data.get('channels', []))
87 | await queue_background_channel_update_tasks(config)
88 | return jsonify(
89 | {
90 | "success": True
91 | }
92 | )
93 |
94 |
95 | @blueprint.route('/tic-api/channels/settings/multiple/delete', methods=['POST'])
96 | @admin_auth_required
97 | async def api_delete_multiple_channels():
98 | json_data = await request.get_json()
99 | config = current_app.config['APP_CONFIG']
100 | current_app.logger.warning(json_data)
101 |
102 | for channel_id in json_data.get('channels', {}):
103 | await delete_channel(config, channel_id)
104 |
105 | # Queue background tasks to update TVHeadend
106 | await queue_background_channel_update_tasks(config)
107 |
108 | return jsonify({
109 | "success": True
110 | })
111 |
112 |
113 | @blueprint.route('/tic-api/channels/settings//delete', methods=['DELETE'])
114 | @admin_auth_required
115 | async def api_delete_config_channels(channel_id):
116 | config = current_app.config['APP_CONFIG']
117 | await delete_channel(config, channel_id)
118 | return jsonify(
119 | {
120 | "success": True
121 | }
122 | )
123 |
124 |
125 | @blueprint.route('/tic-api/channels//logo/', methods=['GET'])
126 | async def api_get_channel_logo(channel_id, file_placeholder):
127 | image_base64_string, mime_type = await read_channel_logo(channel_id)
128 | # Convert to a BytesIO object for sending file
129 | image_io = io.BytesIO(image_base64_string)
130 | image_io.seek(0)
131 | # Return file blob
132 | return await send_file(image_io, mimetype=mime_type)
133 |
134 | @blueprint.route('/tic-api/channels/settings/groups/add', methods=['POST'])
135 | @admin_auth_required
136 | async def api_add_channels_from_groups():
137 | json_data = await request.get_json()
138 | groups = json_data.get('groups', [])
139 |
140 | if not groups:
141 | return jsonify({
142 | "success": False,
143 | "message": "No groups provided"
144 | }), 400
145 |
146 | config = current_app.config['APP_CONFIG']
147 |
148 | # This function needs to be implemented in the channels module
149 | # It should add all channels from the specified groups
150 | added_count = await add_channels_from_groups(config, groups)
151 |
152 | await queue_background_channel_update_tasks(config)
153 |
154 | return jsonify({
155 | "success": True,
156 | "data": {
157 | "added_count": added_count
158 | }
159 | })
160 |
161 |
--------------------------------------------------------------------------------
/backend/api/routes_playlists.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | import os
4 |
5 | from backend.api.tasks import TaskQueueBroker
6 | from backend.auth import admin_auth_required
7 | from backend.channels import queue_background_channel_update_tasks
8 | from backend.playlists import read_config_all_playlists, add_new_playlist, read_config_one_playlist, update_playlist, \
9 | delete_playlist, import_playlist_data, read_stream_details_from_all_playlists, probe_playlist_stream, \
10 | read_filtered_stream_details_from_all_playlists, get_playlist_groups
11 |
12 | from backend.api import blueprint
13 | from quart import request, jsonify, current_app
14 |
15 | frontend_dir = os.path.join(os.path.dirname(os.path.abspath(os.path.dirname(__file__))), 'frontend')
16 | static_assets = os.path.join(frontend_dir, 'dist', 'spa')
17 |
18 |
19 | @blueprint.route('/tic-api/playlists/get', methods=['GET'])
20 | @admin_auth_required
21 | async def api_get_playlists_list():
22 | config = current_app.config['APP_CONFIG']
23 | all_playlist_configs = await read_config_all_playlists(config)
24 | return jsonify(
25 | {
26 | "success": True,
27 | "data": all_playlist_configs
28 | }
29 | )
30 |
31 |
32 | @blueprint.route('/tic-api/playlists/new', methods=['POST'])
33 | @admin_auth_required
34 | async def api_add_new_playlist():
35 | json_data = await request.get_json()
36 | config = current_app.config['APP_CONFIG']
37 | await add_new_playlist(config, json_data)
38 | return jsonify(
39 | {
40 | "success": True
41 | }
42 | )
43 |
44 |
45 | @blueprint.route('/tic-api/playlists/settings/', methods=['GET'])
46 | @admin_auth_required
47 | async def api_get_playlist_config(playlist_id):
48 | config = current_app.config['APP_CONFIG']
49 | playlist_config = await read_config_one_playlist(config, playlist_id)
50 | return jsonify(
51 | {
52 | "success": True,
53 | "data": playlist_config
54 | }
55 | )
56 |
57 |
58 | @blueprint.route('/tic-api/playlists/settings//save', methods=['POST'])
59 | @admin_auth_required
60 | async def api_set_config_playlists(playlist_id):
61 | json_data = await request.get_json()
62 | config = current_app.config['APP_CONFIG']
63 | await update_playlist(config, playlist_id, json_data)
64 | return jsonify(
65 | {
66 | "success": True
67 | }
68 | )
69 |
70 |
71 | @blueprint.route('/tic-api/playlists//delete', methods=['DELETE'])
72 | @admin_auth_required
73 | async def api_delete_playlist(playlist_id):
74 | config = current_app.config['APP_CONFIG']
75 | await delete_playlist(config, playlist_id)
76 | await queue_background_channel_update_tasks(config)
77 | return jsonify(
78 | {
79 | "success": True
80 | }
81 | )
82 |
83 |
84 | @blueprint.route('/tic-api/playlists/update/', methods=['POST'])
85 | @admin_auth_required
86 | async def api_update_playlist(playlist_id):
87 | config = current_app.config['APP_CONFIG']
88 | task_broker = await TaskQueueBroker.get_instance()
89 | await task_broker.add_task({
90 | 'name': f'Update playlist - ID: {playlist_id}',
91 | 'function': import_playlist_data,
92 | 'args': [config, playlist_id],
93 | }, priority=20)
94 | return jsonify(
95 | {
96 | "success": True,
97 | }
98 | )
99 |
100 |
101 | @blueprint.route('/tic-api/playlists/streams', methods=['POST'])
102 | @admin_auth_required
103 | async def api_get_filtered_playlist_streams():
104 | json_data = await request.get_json()
105 | results = read_filtered_stream_details_from_all_playlists(json_data)
106 | return jsonify(
107 | {
108 | "success": True,
109 | "data": results
110 | }
111 | )
112 |
113 |
114 | @blueprint.route('/tic-api/playlists/streams/all', methods=['GET'])
115 | @admin_auth_required
116 | async def api_get_all_playlist_streams():
117 | playlist_streams = await read_stream_details_from_all_playlists()
118 | return jsonify(
119 | {
120 | "success": True,
121 | "data": playlist_streams
122 | }
123 | )
124 |
125 |
126 | @blueprint.route('/tic-api/playlists/stream/probe/', methods=['GET'])
127 | @admin_auth_required
128 | async def api_probe_playlist_stream(playlist_stream_id):
129 | probe = await probe_playlist_stream(playlist_stream_id)
130 | return jsonify(
131 | {
132 | "success": True,
133 | "data": probe
134 | }
135 | )
136 |
137 | @blueprint.route('/tic-api/playlists/groups', methods=['POST'])
138 | @admin_auth_required
139 | async def api_get_playlist_groups():
140 | json_data = await request.get_json()
141 | playlist_id = json_data.get('playlist_id')
142 |
143 | if not playlist_id:
144 | return jsonify({
145 | "success": False,
146 | "message": "Playlist ID is required"
147 | }), 400
148 |
149 | config = current_app.config['APP_CONFIG']
150 |
151 | # Get search/filter parameters
152 | start = json_data.get('start', 0)
153 | length = json_data.get('length', 10)
154 | search_value = json_data.get('search_value', '')
155 | order_by = json_data.get('order_by', 'name')
156 | order_direction = json_data.get('order_direction', 'asc')
157 |
158 | # This function needs to be implemented in the playlists module
159 | # It should fetch all groups from a playlist with filtering/sorting/pagination
160 | groups_data = await get_playlist_groups(
161 | config,
162 | playlist_id,
163 | start=start,
164 | length=length,
165 | search_value=search_value,
166 | order_by=order_by,
167 | order_direction=order_direction
168 | )
169 |
170 | return jsonify({
171 | "success": True,
172 | "data": groups_data
173 | })
174 |
175 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG BASE_IMAGE=python:3.12-bookworm
2 | FROM ${BASE_IMAGE} AS base_image
3 |
4 | # _____ ____ ___ _ _ _____ _____ _ _ ____ ____ _____ _ ____ _____
5 | # | ___| _ \ / _ \| \ | |_ _| ____| \ | | _ \ / ___|_ _|/ \ / ___| ____|
6 | # | |_ | |_) | | | | \| | | | | _| | \| | | | | \___ \ | | / _ \| | _| _|
7 | # | _| | _ <| |_| | |\ | | | | |___| |\ | |_| | ___) || |/ ___ \ |_| | |___
8 | # |_| |_| \_\\___/|_| \_| |_| |_____|_| \_|____/ |____/ |_/_/ \_\____|_____|
9 | #
10 | FROM node:20-bookworm AS frontend_build_stage
11 |
12 | COPY ./frontend/package.json /build/
13 | COPY ./frontend/package-lock.json /build/
14 |
15 | WORKDIR /build
16 |
17 | RUN \
18 | echo "** Install node build dependencies **" \
19 | && apt-get update \
20 | && apt-get install -y --no-install-recommends \
21 | build-essential \
22 | libffi-dev \
23 | libssl-dev \
24 | python3-dev \
25 | python3-pip \
26 | python3-setuptools \
27 | python3-venv \
28 | python3-wheel \
29 | && \
30 | echo "** Install node dependencies **" \
31 | && npm ci --include dev \
32 | && \
33 | echo
34 |
35 | COPY ./frontend /build
36 | RUN \
37 | echo "** Build frontend **" \
38 | && npm run build:publish \
39 | && \
40 | echo
41 |
42 |
43 | # ____ _ ____ _ _______ _ _ ____ ____ _____ _ ____ _____
44 | # | __ ) / \ / ___| |/ / ____| \ | | _ \ / ___|_ _|/ \ / ___| ____|
45 | # | _ \ / _ \| | | ' /| _| | \| | | | | \___ \ | | / _ \| | _| _|
46 | # | |_) / ___ \ |___| . \| |___| |\ | |_| | ___) || |/ ___ \ |_| | |___
47 | # |____/_/ \_\____|_|\_\_____|_| \_|____/ |____/ |_/_/ \_\____|_____|
48 | #
49 | FROM base_image AS backend_build_stage
50 |
51 | USER root
52 | WORKDIR /var
53 |
54 | RUN \
55 | echo "** Install python build dependencies (only if base image is minimal python)**" \
56 | && apt-get update \
57 | && apt-get install -y --no-install-recommends \
58 | build-essential \
59 | libffi-dev \
60 | libssl-dev \
61 | python3-dev \
62 | python3-pip \
63 | python3-setuptools \
64 | python3-venv \
65 | python3-wheel \
66 | && \
67 | echo
68 |
69 | # Reduce pip noise & ensure deterministic, faster installs
70 | ENV PIP_NO_CACHE_DIR=1 \
71 | PIP_DISABLE_PIP_VERSION_CHECK=1 \
72 | PYTHONDONTWRITEBYTECODE=1 \
73 | PYTHONUNBUFFERED=1
74 |
75 | # Install python dependencies
76 | COPY requirements.txt /requirements.txt
77 | RUN --mount=type=cache,target=/root/.cache/pip \
78 | echo "**** Install python dependencies ****" \
79 | && python3 -m venv --symlinks /var/venv-docker \
80 | && . /var/venv-docker/bin/activate \
81 | && python3 -m pip install --upgrade pip \
82 | && python3 -m pip install -r /requirements.txt \
83 | && \
84 | echo
85 |
86 |
87 | # __ __ _ ___ _ _
88 | # | \/ | / \ |_ _| \ | |
89 | # | |\/| | / _ \ | || \| |
90 | # | | | |/ ___ \ | || |\ |
91 | # |_| |_/_/ \_\___|_| \_|
92 | #
93 | FROM base_image
94 |
95 | USER root
96 |
97 | # Runtime packages
98 | RUN set -eux; \
99 | echo "**** update apt repositories & install base runtime packages ****"; \
100 | apt-get update; \
101 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
102 | bash \
103 | ca-certificates \
104 | curl \
105 | sqlite3 \
106 | tini \
107 | tzdata \
108 | procps \
109 | ffmpeg; \
110 | echo "**** conditional tvheadend dependency libraries (only if tvheadend missing) ****"; \
111 | if ! command -v tvheadend >/dev/null 2>&1; then \
112 | # Pruned list to only widely available Debian bookworm runtime libs; removed: x264, x265, xmltv, pngquant, python3 (already present), libssl3 (already in base), to prevent 'unable to locate package' failures. \
113 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
114 | libavahi-client3 \
115 | libdbus-1-3 \
116 | libdvbcsa1 \
117 | libopus0 \
118 | libpcre2-8-0 \
119 | liburiparser1 \
120 | libva2 \
121 | mesa-va-drivers \
122 | mesa-vdpau-drivers \
123 | zlib1g; \
124 | fi; \
125 | echo "**** optional nginx install (INSTALL_NGINX=1) ****"; \
126 | if [ "${INSTALL_NGINX:-0}" = "1" ]; then \
127 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends nginx; \
128 | fi; \
129 | echo "**** optional Intel media drivers for hardware accel (x86_64 only; requires tvheadend present) ****"; \
130 | if [ "$(uname -m)" = 'x86_64' ] && command -v tvheadend >/dev/null 2>&1; then \
131 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends intel-media-va-driver i965-va-driver || echo "(Intel media drivers not available – continuing)"; \
132 | fi; \
133 | echo "**** cleanup apt cache ****"; \
134 | apt-get clean; \
135 | rm -rf /var/lib/apt/lists/*; \
136 | echo
137 |
138 | # Install gosu
139 | COPY --from=tianon/gosu:bookworm /gosu /usr/local/bin/
140 |
141 | # Install project
142 | COPY --from=backend_build_stage /var/venv-docker /var/venv-docker
143 | COPY --from=frontend_build_stage /build/dist/spa /app/frontend/dist/spa
144 | COPY docker/overlay /
145 | COPY backend /app/backend
146 | COPY migrations /app/migrations
147 | COPY alembic.ini /app/alembic.ini
148 | COPY run.py /app/run.py
149 | COPY db-migrate.sh /app/db-migrate.sh
150 |
151 | # Set environment variables (add venv bin to PATH for implicit execution)
152 | ENV HOME="/config" \
153 | PATH="/var/venv-docker/bin:${PATH}"
154 | ENV FLASK_APP="/app/run.py"
155 | ENV FLASK_RUN_PORT="9985"
156 | ENV FLASK_RUN_HOST="0.0.0.0"
157 | ENV ENABLE_APP_DEBUGGING="false"
158 | ENV ENABLE_SQLALCHEMY_DEBUGGING="false"
159 | ENV SKIP_MIGRATIONS="false"
160 | ENV HLS_PROXY_PREFIX="tic-hls-proxy"
161 |
162 | # Set working directory
163 | WORKDIR /app/
164 |
165 | # Expose ports
166 | EXPOSE 9985/tcp
167 |
168 | # Set version label
169 | ARG VERSION
170 | ARG BUILD_DATE
171 | ARG BASE_IMAGE
172 | LABEL maintainer="Josh.5 "
173 | LABEL build="Version:- ${VERSION} Build-date:- ${BUILD_DATE} Base:- ${BASE_IMAGE}"
174 |
175 | # Install entrypoint script
176 | COPY ./docker/entrypoint.sh /entrypoint.sh
177 | SHELL ["/bin/bash", "-o", "pipefail", "-c"]
178 | ENTRYPOINT [ "/usr/bin/tini", "--", "/entrypoint.sh" ]
179 |
--------------------------------------------------------------------------------
/backend/api/tasks.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | import asyncio
4 | import logging
5 | from apscheduler.schedulers.asyncio import AsyncIOScheduler
6 |
7 | scheduler = AsyncIOScheduler()
8 |
9 | logger = logging.getLogger('tic.tasks')
10 |
11 | import itertools
12 | from asyncio import Lock, PriorityQueue
13 |
14 |
15 | class TaskQueueBroker:
16 | __instance = None
17 | __lock = Lock()
18 | __logger = None
19 |
20 | def __init__(self, **kwargs):
21 | if TaskQueueBroker.__instance is not None:
22 | raise Exception("Singleton instance already exists!")
23 | else:
24 | # Create the singleton instance
25 | TaskQueueBroker.__instance = self
26 | # Create the queue
27 | self.__running_task = None
28 | self.__status = "running"
29 | self.__task_queue = PriorityQueue()
30 | self.__task_names = set()
31 | self.__priority_counter = itertools.count()
32 |
33 | @staticmethod
34 | def initialize(app_logger):
35 | TaskQueueBroker.__logger = app_logger
36 |
37 | @staticmethod
38 | async def get_instance():
39 | # Ensure no other coroutines can access this method at the same time
40 | async with TaskQueueBroker.__lock:
41 | # If the singleton instance has not been created yet, create it
42 | if TaskQueueBroker.__instance is None:
43 | TaskQueueBroker()
44 | return TaskQueueBroker.__instance
45 |
46 | def set_logger(self, app_logger):
47 | self.__logger = app_logger
48 |
49 | async def get_status(self):
50 | return self.__status
51 |
52 | async def toggle_status(self):
53 | if self.__status == "paused":
54 | self.__status = "running"
55 | else:
56 | self.__status = "paused"
57 | return self.__status
58 |
59 | async def add_task(self, task, priority=100):
60 | if task['name'] in self.__task_names:
61 | self.__logger.debug("Task already queued. Ignoring.")
62 | return
63 | await self.__task_queue.put((priority, next(self.__priority_counter), task))
64 | self.__task_names.add(task['name'])
65 |
66 | async def get_next_task(self):
67 | # Get the next task from the queue
68 | if not self.__task_queue.empty():
69 | task = await self.__task_queue.get()
70 | self.__task_names.remove(task['name'])
71 | return task
72 | else:
73 | return None
74 |
75 | async def execute_tasks(self):
76 | if self.__running_task is not None:
77 | self.__logger.warning("Another process is already running scheduled tasks.")
78 | if self.__task_queue.empty():
79 | self.__logger.debug("No pending tasks found.")
80 | return
81 | if self.__status == "paused":
82 | self.__logger.debug("Pending tasks queue paused.")
83 | return
84 | while not self.__task_queue.empty():
85 | if self.__status == "paused":
86 | break
87 | priority, i, task = await self.__task_queue.get()
88 | self.__task_names.remove(task['name'])
89 | self.__running_task = task['name']
90 | # Execute task here
91 | try:
92 | self.__logger.info("Executing task - %s.", task['name'])
93 | await task['function'](*task['args'])
94 | except Exception as e:
95 | self.__logger.exception("Failed to run task %s - %s", task['name'], str(e))
96 | self.__running_task = None
97 |
98 | async def get_currently_running_task(self):
99 | return self.__running_task
100 |
101 | async def get_pending_tasks(self):
102 | results = []
103 | async with self.__lock:
104 | # Temporarily hold tasks to restore them later
105 | temp_tasks = []
106 | while not self.__task_queue.empty():
107 | task = await self.__task_queue.get()
108 | temp_tasks.append(task)
109 | priority, i, task_data = task
110 | results.append(task_data['name'])
111 | # Put tasks back into the queue
112 | for task in temp_tasks:
113 | await self.__task_queue.put(task)
114 | return results
115 |
116 |
117 | async def configure_tvh_with_defaults(app):
118 | logger.info("Configuring TVH")
119 | config = app.config['APP_CONFIG']
120 | from backend.tvheadend.tvh_requests import configure_tvh
121 | await configure_tvh(config)
122 |
123 |
124 | async def update_playlists(app):
125 | logger.info("Updating Playlists")
126 | config = app.config['APP_CONFIG']
127 | from backend.playlists import import_playlist_data_for_all_playlists
128 | await import_playlist_data_for_all_playlists(config)
129 |
130 |
131 | async def update_epgs(app):
132 | logger.info("Updating EPGs")
133 | config = app.config['APP_CONFIG']
134 | from backend.epgs import import_epg_data_for_all_epgs
135 | await import_epg_data_for_all_epgs(config)
136 |
137 |
138 | async def rebuild_custom_epg(app):
139 | logger.info("Rebuilding custom EPG")
140 | config = app.config['APP_CONFIG']
141 | from backend.epgs import update_channel_epg_with_online_data
142 | await update_channel_epg_with_online_data(config)
143 | from backend.epgs import build_custom_epg
144 | await build_custom_epg(config)
145 |
146 |
147 | async def update_tvh_epg(app):
148 | logger.info("Triggering update of TVH EPG")
149 | config = app.config['APP_CONFIG']
150 | from backend.epgs import run_tvh_epg_grabbers
151 | await run_tvh_epg_grabbers(config)
152 |
153 |
154 | async def update_tvh_networks(app):
155 | logger.info("Updating channels in TVH")
156 | config = app.config['APP_CONFIG']
157 | from backend.playlists import publish_playlist_networks
158 | await publish_playlist_networks(config)
159 |
160 |
161 | async def update_tvh_channels(app):
162 | logger.info("Updating channels in TVH")
163 | config = app.config['APP_CONFIG']
164 | from backend.channels import publish_bulk_channels_to_tvh_and_m3u
165 | await publish_bulk_channels_to_tvh_and_m3u(config)
166 |
167 |
168 | async def update_tvh_muxes(app):
169 | logger.info("Updating muxes in TVH")
170 | config = app.config['APP_CONFIG']
171 | from backend.channels import publish_channel_muxes
172 | await publish_channel_muxes(config)
173 |
174 |
175 | async def map_new_tvh_services(app):
176 | logger.info("Mapping new services in TVH")
177 | config = app.config['APP_CONFIG']
178 | # Map any new services
179 | from backend.channels import map_all_services, cleanup_old_channels
180 | await map_all_services(config)
181 | # Clear out old channels
182 | await cleanup_old_channels(config)
183 |
--------------------------------------------------------------------------------
/frontend/src/components/EpgInfoDialog.vue:
--------------------------------------------------------------------------------
1 |
2 |
7 |
8 |
21 |
30 |
31 |
35 |
36 |
37 |
38 |
41 |
48 |
49 |
50 |
51 |
52 |
53 | Playlist Settings
54 |
55 |
56 |
57 |
60 |
67 | Close
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
83 |
84 |
87 |
88 |
89 |
90 |
93 |
98 |
99 |
100 |
103 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
244 |
245 |
248 |
--------------------------------------------------------------------------------
/docker/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | ###
3 | # File: entrypoint.sh
4 | # Project: docker
5 | # File Created: Monday, 13th May 2024 4:20:35 pm
6 | # Author: Josh.5 (jsunnex@gmail.com)
7 | # -----
8 | # Last Modified: Sunday, 3rd November 2024 2:39:11 pm
9 | # Modified By: Josh5 (jsunnex@gmail.com)
10 | ###
11 |
12 | set -e
13 |
14 | # Ensure HOME is always set to /config
15 | export HOME="/config"
16 |
17 | # All printed log lines from this script should be formatted with this function
18 | print_log() {
19 | local timestamp
20 | local pid
21 | local level
22 | local message
23 | timestamp="$(date +'%Y-%m-%d %H:%M:%S %z')"
24 | pid="$$"
25 | level="$1"
26 | message="${*:2}"
27 | echo "[${timestamp}] [${pid}] [${level^^}] ${message}"
28 | }
29 |
30 | # Catch term signal and terminate any child processes
31 | _term() {
32 | kill -TERM "$proxy_pid" 2>/dev/null
33 | if [ -n "$tvh_pid" ]; then
34 | kill -SIGINT "$tvh_pid" 2>/dev/null
35 | fi
36 | }
37 | trap _term SIGTERM SIGINT
38 |
39 | # If running as root, perform setup and re-run this script as the specified user
40 | if [ "$(id -u)" = "0" ]; then
41 | # Create required directories
42 | mkdir -p /config/.tvh_iptv_config
43 | chown "${PUID:-1000}:${PGID:-1000}" /config/.tvh_iptv_config
44 | mkdir -p /tmp/nginx
45 | chown "${PUID:-1000}:${PGID:-1000}" /tmp/nginx
46 | if command -v tvheadend >/dev/null 2>&1; then
47 | mkdir -p /config/.tvheadend
48 | chown "${PUID:-1000}:${PGID:-1000}" /config/.tvheadend
49 | mkdir -p /recordings
50 | chown -R "${PUID:-1000}:${PGID:-1000}" /recordings
51 | mkdir -p /timeshift
52 | chown -R "${PUID:-1000}:${PGID:-1000}" /timeshift
53 | else
54 | print_log warn "tvheadend binary NOT found during root setup phase (PATH=$PATH)"
55 | fi
56 | exec gosu "${PUID:-1000}" env HOME="/config" "$0" "$@"
57 | fi
58 |
59 | # Print the current version (if the file exists)
60 | if [[ -f /version.txt ]]; then
61 | cat /version.txt
62 | fi
63 |
64 | # Ensure the customer is set
65 | print_log info "ENABLE_APP_DEBUGGING: ${ENABLE_APP_DEBUGGING:-ENABLE_APP_DEBUGGING variable has not been set}"
66 | print_log info "ENABLE_SQLALCHEMY_DEBUGGING: ${ENABLE_SQLALCHEMY_DEBUGGING:-ENABLE_SQLALCHEMY_DEBUGGING variable has not been set}"
67 | print_log info "SKIP_MIGRATIONS: ${SKIP_MIGRATIONS:-SKIP_MIGRATIONS variable has not been set}"
68 | print_log info "RUN_PIP_INSTALL: ${RUN_PIP_INSTALL:-RUN_PIP_INSTALL variable has not been set}"
69 |
70 | # Configure required directories
71 | mkdir -p /config/.tvh_iptv_config
72 |
73 | # Exec provided command
74 | if [ "X$*" != "X" ]; then
75 | print_log info "Running command '${*}'"
76 | exec "$*"
77 | else
78 | # Install packages (if requested)
79 | if [ "${RUN_PIP_INSTALL}" = "true" ]; then
80 | python3 -m venv --symlinks --clear /var/venv-docker
81 | source /var/venv-docker/bin/activate
82 | python3 -m pip install --no-cache-dir -r /app/requirements.txt
83 | else
84 | source /var/venv-docker/bin/activate
85 | fi
86 |
87 | # Execute migrations
88 | if [ "${SKIP_MIGRATIONS}" != "true" ]; then
89 | print_log info "Running TVH-IPTV-Config DB migrations"
90 | alembic upgrade head
91 | fi
92 |
93 | # If the 'nginx' binary exists in the path, start it
94 | if command -v nginx >/dev/null 2>&1; then
95 | mkdir -p /tmp/nginx/logs
96 | # Replace the listen port in the Nginx configuration to whatever is set in FLASK_RUN_PORT
97 | if [ -n "${FLASK_RUN_PORT}" ]; then
98 | sed "s/listen.*;/listen ${FLASK_RUN_PORT};/" /defaults/nginx/nginx.conf.template > /tmp/nginx/nginx.conf
99 | fi
100 | # Start Nginx
101 | print_log info "Starting Nginx service"
102 | nginx -c /tmp/nginx/nginx.conf -p /tmp/nginx &
103 | proxy_pid=$!
104 | print_log info "Started Nginx service with PID $proxy_pid"
105 | # Update the Flask run port so that Nginx will proxy to whatever FLASK_RUN_PORT was set to
106 | export FLASK_RUN_PORT=9984
107 | fi
108 |
109 | # If the 'tvheadend' binary exists in the path, start it
110 | if command -v tvheadend >/dev/null 2>&1; then
111 | # Install default TVH config
112 | if [ ! -f /config/.tvheadend/accesscontrol/83e4a7e5712d79a97b570b54e8e0e781 ]; then
113 | print_log info "Installing admin tvheadend accesscontrol"
114 | mkdir -p /config/.tvheadend/accesscontrol
115 | cp -rf /defaults/tvheadend/admin_accesscontrol /config/.tvheadend/accesscontrol/83e4a7e5712d79a97b570b54e8e0e781
116 | fi
117 | if [ ! -f /config/.tvheadend/passwd/c0a8261ea68035cd447a29a57d12ff7c ]; then
118 | print_log info "Installing admin tvheadend passwd"
119 | mkdir -p /config/.tvheadend/passwd
120 | cp -rf /defaults/tvheadend/admin_auth /config/.tvheadend/passwd/c0a8261ea68035cd447a29a57d12ff7c
121 | fi
122 | if [ ! -f /config/.tvheadend/config ]; then
123 | print_log info "Installing default tvheadend config"
124 | mkdir -p /config/.tvheadend
125 | cp -rf /defaults/tvheadend/config /config/.tvheadend/config
126 | fi
127 | print_log info "Starting tvheadend service"
128 | set +e
129 | tvheadend --version 2>/dev/null || print_log warn "Unable to display tvheadend version (non-fatal)"
130 | set -e
131 | tvheadend --config /config/.tvheadend --http_root /tic-tvh --nobackup --nosatipcli \
132 | > /tmp/tvh_stdout.log 2> /tmp/tvh_stderr.log &
133 | tvh_pid=$!
134 | sleep 1
135 | if kill -0 "$tvh_pid" 2>/dev/null; then
136 | print_log info "Started tvheadend service with PID $tvh_pid"
137 | else
138 | print_log error "tvheadend failed to start";
139 | print_log error "Stdout:"; sed -e 's/^/[TVH-STDOUT] /' /tmp/tvh_stdout.log || true
140 | print_log error "Stderr:"; sed -e 's/^/[TVH-STDERR] /' /tmp/tvh_stderr.log || true
141 | fi
142 | else
143 | print_log warn "tvheadend binary not found at application start (PATH=$PATH). Skipping TVH launch."
144 | fi
145 |
146 | # Check if the database file exists
147 | if [[ -f "/config/.tvh_iptv_config/db.sqlite3" ]]; then
148 | echo "Starting VACUUM on /config/.tvh_iptv_config/db.sqlite3..."
149 | # Run VACUUM command on the database
150 | sqlite3 "/config/.tvh_iptv_config/db.sqlite3" "VACUUM;"
151 | echo "VACUUM completed for /config/.tvh_iptv_config/db.sqlite3."
152 | else
153 | echo "Database file not found at /config/.tvh_iptv_config/db.sqlite3. Skipping VACUUM."
154 | fi
155 |
156 | # Run TIC server
157 | print_log info "Starting TIC server"
158 | python3 "${FLASK_APP:?}"
159 |
160 | # Terminate TVH process if TIC service ends
161 | if [ -n "$tvh_pid" ]; then
162 | kill -SIGINT "$tvh_pid"
163 | fi
164 | fi
165 |
--------------------------------------------------------------------------------
/frontend/quasar.config.js:
--------------------------------------------------------------------------------
1 | /* eslint-env node */
2 |
3 | /*
4 | * This file runs in a Node context (it's NOT transpiled by Babel), so use only
5 | * the ES6 features that are supported by your Node version. https://node.green/
6 | */
7 |
8 | // Configuration for your app
9 | // https://v2.quasar.dev/quasar-cli-webpack/quasar-config-js
10 |
11 | const ESLintPlugin = require('eslint-webpack-plugin');
12 |
13 | const {configure} = require('quasar/wrappers');
14 |
15 | module.exports = configure(function(ctx) {
16 | return {
17 | // https://v2.quasar.dev/quasar-cli-webpack/supporting-ts
18 | supportTS: false,
19 |
20 | // https://v2.quasar.dev/quasar-cli-webpack/prefetch-feature
21 | // preFetch: true,
22 |
23 | // app boot file (/src/boot)
24 | // --> boot files are part of "main.js"
25 | // https://v2.quasar.dev/quasar-cli-webpack/boot-files
26 | boot: [
27 | 'axios',
28 | ],
29 |
30 | // https://v2.quasar.dev/quasar-cli-webpack/quasar-config-js#Property%3A-css
31 | css: [
32 | 'app.scss',
33 | ],
34 |
35 | // https://github.com/quasarframework/quasar/tree/dev/extras
36 | extras: [
37 | // 'ionicons-v4',
38 | // 'mdi-v7',
39 | 'fontawesome-v6',
40 | // 'eva-icons',
41 | // 'themify',
42 | // 'line-awesome',
43 | // 'roboto-font-latin-ext', // this or either 'roboto-font', NEVER both!
44 |
45 | 'roboto-font', // optional, you are not bound to it
46 | 'material-icons', // optional, you are not bound to it
47 | ],
48 |
49 | // Full list of options: https://v2.quasar.dev/quasar-cli-webpack/quasar-config-js#Property%3A-build
50 | build: {
51 | vueRouterMode: 'hash', // available values: 'hash', 'history'
52 |
53 | // transpile: false,
54 | // Set the root public path to /tic-web/*
55 | publicPath: '/tic-web/',
56 |
57 | // Add dependencies for transpiling with Babel (Array of string/regex)
58 | // (from node_modules, which are by default not transpiled).
59 | // Applies only if "transpile" is set to true.
60 | // transpileDependencies: [],
61 |
62 | // rtl: true, // https://quasar.dev/options/rtl-support
63 | // preloadChunks: true,
64 | // showProgress: false,
65 | // gzip: true,
66 | // analyze: true,
67 |
68 | // Options below are automatically set depending on the env, set them if you want to override
69 | // extractCSS: false,
70 |
71 | // https://v2.quasar.dev/quasar-cli-webpack/handling-webpack
72 | // "chain" is a webpack-chain object https://github.com/neutrinojs/webpack-chain
73 |
74 | chainWebpack(chain) {
75 | chain.plugin('eslint-webpack-plugin').
76 | use(ESLintPlugin, [{extensions: ['js', 'vue']}]);
77 | },
78 |
79 | },
80 |
81 | // Full list of options: https://v2.quasar.dev/quasar-cli-webpack/quasar-config-js#Property%3A-devServer
82 | devServer: {
83 | server: {
84 | type: 'http',
85 | },
86 | port: 8080,
87 | proxy: {
88 | '/tic-api': 'http://localhost:9985',
89 | '/tic-web/epg.xml': 'http://localhost:9985',
90 | '/tic-tvh': {
91 | target: 'ws://localhost:9985',
92 | ws: true,
93 | },
94 | },
95 | open: false, // opens browser window automatically
96 | },
97 |
98 | // https://v2.quasar.dev/quasar-cli-webpack/quasar-config-js#Property%3A-framework
99 | framework: {
100 | config: {},
101 |
102 | // iconSet: 'material-icons', // Quasar icon set
103 | // lang: 'en-US', // Quasar language pack
104 |
105 | // For special cases outside of where the auto-import strategy can have an impact
106 | // (like functional components as one of the examples),
107 | // you can manually specify Quasar components/directives to be available everywhere:
108 | //
109 | // components: [],
110 | // directives: [],
111 |
112 | // Quasar plugins
113 | plugins: [
114 | 'Dialog',
115 | 'Loading',
116 | 'Notify',
117 | ],
118 | },
119 |
120 | // animations: 'all', // --- includes all animations
121 | // https://quasar.dev/options/animations
122 | animations: [],
123 |
124 | // https://v2.quasar.dev/quasar-cli-webpack/developing-ssr/configuring-ssr
125 | ssr: {
126 | pwa: false,
127 |
128 | // manualStoreHydration: true,
129 | // manualPostHydrationTrigger: true,
130 |
131 | prodPort: 3000, // The default port that the production server should use
132 | // (gets superseded if process.env.PORT is specified at runtime)
133 |
134 | maxAge: 1000 * 60 * 60 * 24 * 30,
135 | // Tell browser when a file from the server should expire from cache (in ms)
136 |
137 | chainWebpackWebserver(chain) {
138 | chain.plugin('eslint-webpack-plugin').
139 | use(ESLintPlugin, [{extensions: ['js']}]);
140 | },
141 |
142 | middlewares: [
143 | ctx.prod ? 'compression' : '',
144 | 'render', // keep this as last one
145 | ],
146 | },
147 |
148 | // https://v2.quasar.dev/quasar-cli-webpack/developing-pwa/configuring-pwa
149 | pwa: {
150 | workboxPluginMode: 'GenerateSW', // 'GenerateSW' or 'InjectManifest'
151 | workboxOptions: {}, // only for GenerateSW
152 |
153 | // for the custom service worker ONLY (/src-pwa/custom-service-worker.[js|ts])
154 | // if using workbox in InjectManifest mode
155 |
156 | chainWebpackCustomSW(chain) {
157 | chain.plugin('eslint-webpack-plugin').
158 | use(ESLintPlugin, [{extensions: ['js']}]);
159 | },
160 |
161 | manifest: {
162 | name: `TVH IPTV Config`,
163 | short_name: `TVH IPTV Config`,
164 | description: `A tool for simplifying IPTV configuration in TVheadend`,
165 | display: 'standalone',
166 | orientation: 'portrait',
167 | background_color: '#ffffff',
168 | theme_color: '#027be3',
169 | icons: [
170 | {
171 | src: 'icons/icon-128x128.png',
172 | sizes: '128x128',
173 | type: 'image/png',
174 | },
175 | {
176 | src: 'icons/icon-192x192.png',
177 | sizes: '192x192',
178 | type: 'image/png',
179 | },
180 | {
181 | src: 'icons/icon-256x256.png',
182 | sizes: '256x256',
183 | type: 'image/png',
184 | },
185 | {
186 | src: 'icons/icon-384x384.png',
187 | sizes: '384x384',
188 | type: 'image/png',
189 | },
190 | {
191 | src: 'icons/icon-512x512.png',
192 | sizes: '512x512',
193 | type: 'image/png',
194 | },
195 | ],
196 | },
197 | },
198 |
199 | // Full list of options: https://v2.quasar.dev/quasar-cli-webpack/developing-cordova-apps/configuring-cordova
200 | cordova: {
201 | // noIosLegacyBuildFlag: true, // uncomment only if you know what you are doing
202 | },
203 |
204 | // Full list of options: https://v2.quasar.dev/quasar-cli-webpack/developing-capacitor-apps/configuring-capacitor
205 | capacitor: {
206 | hideSplashscreen: true,
207 | },
208 |
209 | // Full list of options: https://v2.quasar.dev/quasar-cli-webpack/developing-electron-apps/configuring-electron
210 | electron: {
211 | bundler: 'packager', // 'packager' or 'builder'
212 |
213 | packager: {
214 | // https://github.com/electron-userland/electron-packager/blob/master/docs/api.md#options
215 |
216 | // OS X / Mac App Store
217 | // appBundleId: '',
218 | // appCategoryType: '',
219 | // osxSign: '',
220 | // protocol: 'myapp://path',
221 |
222 | // Windows only
223 | // win32metadata: { ... }
224 | },
225 |
226 | builder: {
227 | // https://www.electron.build/configuration/configuration
228 |
229 | appId: 'tvh_iptv_config',
230 | },
231 |
232 | // "chain" is a webpack-chain object https://github.com/neutrinojs/webpack-chain
233 |
234 | chainWebpackMain(chain) {
235 | chain.plugin('eslint-webpack-plugin').
236 | use(ESLintPlugin, [{extensions: ['js']}]);
237 | },
238 |
239 | chainWebpackPreload(chain) {
240 | chain.plugin('eslint-webpack-plugin').
241 | use(ESLintPlugin, [{extensions: ['js']}]);
242 | },
243 |
244 | },
245 | };
246 | });
247 |
--------------------------------------------------------------------------------
/.github/workflows/build_docker_ci.yml:
--------------------------------------------------------------------------------
1 | name: Build All Packages CI
2 |
3 | on:
4 | push:
5 | branches: ["dev-**", "pr-**", "staging", "master"]
6 | tags: ["**"]
7 | pull_request:
8 | branches: ["staging", "master"]
9 | schedule:
10 | # At 02:30 on Saturday
11 | - cron: "30 2 * * 6"
12 |
13 | jobs:
14 | build_docker:
15 | name: Build Docker Image
16 | runs-on: ubuntu-latest
17 | permissions:
18 | contents: read
19 | packages: write
20 | strategy:
21 | matrix:
22 | base-image:
23 | ["python:3.12-bookworm", "ghcr.io/tvheadend/tvheadend:edge-debian"]
24 |
25 | steps:
26 | # ---
27 | - name: Checkout repository
28 | uses: actions/checkout@v4
29 |
30 | # ---
31 | - name: Set up QEMU
32 | uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 #v3.6.0
33 |
34 | # ---
35 | - name: Set up Docker Buildx
36 | id: buildx
37 | uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 #v3.8.0
38 |
39 | # ---
40 | - name: Available platforms
41 | if: success()
42 | run: echo ${{ steps.buildx.outputs.platforms }}
43 |
44 | # ---
45 | - name: Prepare
46 | if: success()
47 | id: prepare
48 | run: |
49 | echo "GITHUB_REF:${GITHUB_REF}"
50 | echo "GITHUB_REPOSITORY:${GITHUB_REPOSITORY}"
51 | VERSION_TAG=${GITHUB_REF#refs/*/}
52 | SHA_SHORT="${GITHUB_SHA::7}"
53 | ORG=$(echo "${{ github.repository_owner }}" | tr '[:upper:]' '[:lower:]')
54 | BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
55 |
56 | SERVICE_NAME=tvh-iptv-config
57 | if [[ "${{ matrix.base-image }}" == *"tvheadend"* ]]; then
58 | SERVICE_NAME=tvh-iptv
59 | fi
60 | DOCKER_HUB_IMAGE=docker.io/josh5/${SERVICE_NAME:?}
61 | GHCR_IMAGE=ghcr.io/${ORG:?}/${SERVICE_NAME:?}
62 |
63 | DOCKER_TAGS=""
64 | DOCKER_PUSH="false"
65 | if [[ ${GITHUB_REF} == refs/heads/master ]]; then
66 | DOCKER_PUSH="true"
67 | DOCKER_TAGS="${DOCKER_TAGS}${DOCKER_HUB_IMAGE}:latest,${GHCR_IMAGE}:latest,"
68 | elif [[ ${GITHUB_REF} == refs/heads/staging ]]; then
69 | DOCKER_PUSH="true"
70 | DOCKER_TAGS="${DOCKER_TAGS}${DOCKER_HUB_IMAGE}:staging,${GHCR_IMAGE}:staging,"
71 | elif [[ ${GITHUB_REF} == refs/heads/dev-* ]]; then
72 | DOCKER_PUSH="true"
73 | DOCKER_TAGS="${DOCKER_TAGS}${DOCKER_HUB_IMAGE}:${VERSION_TAG},${GHCR_IMAGE}:${VERSION_TAG},"
74 | elif [[ ${GITHUB_REF} == refs/tags/* ]]; then
75 | DOCKER_PUSH="true"
76 | VERSION=${GITHUB_REF#refs/tags/}
77 | if [[ ${VERSION} =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}[-\w]*$ ]]; then
78 | DOCKER_TAGS="${DOCKER_TAGS}${DOCKER_HUB_IMAGE}:${VERSION},${GHCR_IMAGE}:${VERSION},"
79 | DOCKER_TAGS="${DOCKER_TAGS}${DOCKER_HUB_IMAGE}:latest,${GHCR_IMAGE}:latest,"
80 | fi
81 | elif [[ ${GITHUB_REF} == refs/pull/* ]]; then
82 | PR_NUMBER=$(echo ${GITHUB_REF} | cut -d'/' -f3)
83 | # For PR builds, only tag the GHCR image.
84 | DOCKER_TAGS="${GHCR_IMAGE}:pr-${PR_NUMBER},"
85 | fi
86 |
87 | echo "Build: [$(date +"%F %T")] [${GITHUB_REF_NAME}] [${GITHUB_SHA}]" > ./docker/overlay/version.txt
88 | cat ./docker/overlay/version.txt
89 |
90 | echo "docker_hub_image:${DOCKER_HUB_IMAGE:?}"
91 | echo "docker_hub_image=${DOCKER_HUB_IMAGE:?}" >> $GITHUB_OUTPUT
92 |
93 | echo "ghcr_image:${GHCR_IMAGE:?}"
94 | echo "ghcr_image=${GHCR_IMAGE:?}" >> $GITHUB_OUTPUT
95 |
96 | echo "service_name:${SERVICE_NAME:?}"
97 | echo "service_name=${SERVICE_NAME:?}" >> $GITHUB_OUTPUT
98 |
99 | echo "docker_image:${DOCKER_HUB_IMAGE:?}"
100 | echo "docker_image=${DOCKER_HUB_IMAGE:?}" >> $GITHUB_OUTPUT
101 |
102 | echo "docker_tags:$(echo ${DOCKER_TAGS} | sed 's/,$//')"
103 | echo "docker_tags=$(echo ${DOCKER_TAGS} | sed 's/,$//')" >> $GITHUB_OUTPUT
104 |
105 | echo "docker_push:${DOCKER_PUSH:?}"
106 | echo "docker_push=${DOCKER_PUSH:?}" >> $GITHUB_OUTPUT
107 |
108 | echo "docker_build_date:${BUILD_DATE:?}"
109 | echo "docker_build_date=${BUILD_DATE:?}" >> $GITHUB_OUTPUT
110 |
111 | # ---
112 | - name: Log into GHCR registry
113 | uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
114 | with:
115 | registry: ghcr.io
116 | username: ${{ github.actor }}
117 | password: ${{ secrets.GITHUB_TOKEN }}
118 |
119 | # ---
120 | - name: Log into Docker Hub registry
121 | if: success() && (startsWith(github.ref, 'refs/heads/') || startsWith(github.ref, 'refs/tags/'))
122 | uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 #v3.3.0
123 | with:
124 | username: ${{ secrets.DOCKER_USERNAME }}
125 | password: ${{ secrets.DOCKER_PASSWORD }}
126 |
127 | # ---
128 | - name: Docker meta
129 | if: success()
130 | id: meta
131 | uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96 #v5.6.1
132 | with:
133 | images: |
134 | ${{ steps.prepare.outputs.docker_image }}
135 | labels: |
136 | maintainer=Josh.5
137 | source.version=${{ steps.prepare.outputs.sha_short }}
138 | source.project=TVH-IPTV-Config
139 | source.service=${{ steps.prepare.outputs.service_name }}
140 | org.opencontainers.image.title=${{ steps.prepare.outputs.service_name }}
141 | org.opencontainers.image.created=${{ steps.prepare.outputs.docker_build_date }}
142 |
143 | # ---
144 | - name: Build Frontend Cache
145 | if: success()
146 | uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc #v6.11.0
147 | with:
148 | context: .
149 | file: docker/Dockerfile
150 | target: frontend_build_stage
151 | build-args: |
152 | VERSION=${{ github.sha }}
153 | BUILD_DATE=${{ steps.prepare.outputs.docker_build_date }}
154 | BASE_IMAGE=${{ matrix.base-image }}
155 | platforms: linux/amd64,linux/arm64
156 | pull: "true"
157 | push: "false"
158 | tags: |
159 | ${{ steps.prepare.outputs.ghcr_image }}:build-cache-frontend-build-stage
160 | cache-from: type=gha,scope=${{ steps.prepare.outputs.service_name }}-frontend-build-stage
161 | cache-to: type=gha,scope=${{ steps.prepare.outputs.service_name }}-frontend-build-stage,mode=max
162 |
163 | # ---
164 | - name: Build Backend Cache
165 | if: success()
166 | uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc #v6.11.0
167 | with:
168 | context: .
169 | file: docker/Dockerfile
170 | target: backend_build_stage
171 | build-args: |
172 | VERSION=${{ github.sha }}
173 | BUILD_DATE=${{ steps.prepare.outputs.docker_build_date }}
174 | BASE_IMAGE=${{ matrix.base-image }}
175 | platforms: linux/amd64,linux/arm64
176 | pull: "true"
177 | push: "false"
178 | tags: |
179 | ${{ steps.prepare.outputs.ghcr_image }}:build-cache-backend-build-stage
180 | cache-from: type=gha,scope=${{ steps.prepare.outputs.service_name }}-backend-build-stage
181 | cache-to: type=gha,scope=${{ steps.prepare.outputs.service_name }}-backend-build-stage,mode=max
182 |
183 | # ---
184 | - name: Build Main Image
185 | if: success()
186 | uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc #v6.11.0
187 | with:
188 | context: .
189 | file: docker/Dockerfile
190 | build-args: |
191 | VERSION=${{ github.sha }}
192 | BUILD_DATE=${{ steps.prepare.outputs.docker_build_date }}
193 | BASE_IMAGE=${{ matrix.base-image }}
194 | platforms: linux/amd64,linux/arm64
195 | pull: "true"
196 | push: ${{ steps.prepare.outputs.docker_push }}
197 | tags: |
198 | ${{ steps.prepare.outputs.docker_tags }}
199 | labels: |
200 | ${{ steps.meta.outputs.labels }}
201 | cache-from: |
202 | type=gha,scope=${{ steps.prepare.outputs.service_name }}-main
203 | type=gha,scope=${{ steps.prepare.outputs.service_name }}-frontend-build-stage
204 | type=gha,scope=${{ steps.prepare.outputs.service_name }}-backend-build-stage
205 | cache-to: type=gha,scope=${{ steps.prepare.outputs.service_name }}-main,mode=max
206 |
--------------------------------------------------------------------------------
/backend/models.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | from flask_sqlalchemy import SQLAlchemy
4 | from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, Table, MetaData
5 | from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
6 | from sqlalchemy.orm import relationship, sessionmaker, declarative_base
7 |
8 | from backend import config
9 |
10 | metadata = MetaData()
11 | Base = declarative_base(metadata=metadata)
12 |
13 | engine = create_async_engine(config.sqlalchemy_database_async_uri, echo=config.enable_sqlalchemy_debugging)
14 | Session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
15 |
16 | # Use of 'db' in this project is now deprecated and will be removed in a future release. Use Session instead.
17 | db = SQLAlchemy()
18 |
19 |
20 | class Epg(Base):
21 | __tablename__ = "epgs"
22 | id = Column(Integer, primary_key=True)
23 |
24 | enabled = Column(Boolean, nullable=False, unique=False)
25 | name = Column(String(500), index=True, unique=False)
26 | url = Column(String(500), index=True, unique=False)
27 |
28 | # Backref to all associated linked channels
29 | epg_channels = relationship('EpgChannels', backref='guide', lazy=True, cascade="all, delete-orphan")
30 | channels = relationship('Channel', backref='guide', lazy=True, cascade="all, delete-orphan")
31 |
32 | def __repr__(self):
33 | return ''.format(self.id)
34 |
35 |
36 | class EpgChannels(Base):
37 | __tablename__ = "epg_channels"
38 | id = Column(Integer, primary_key=True)
39 |
40 | channel_id = Column(String(256), index=True, unique=False)
41 | name = Column(String(500), index=True, unique=False)
42 | icon_url = Column(String(500), index=False, unique=False)
43 |
44 | # Link with an epg
45 | epg_id = Column(Integer, ForeignKey('epgs.id'), nullable=False)
46 |
47 | # Backref to all associated linked channels
48 | epg_channel_programmes = relationship('EpgChannelProgrammes', backref='channel', lazy=True,
49 | cascade="all, delete-orphan")
50 |
51 | def __repr__(self):
52 | return ''.format(self.id)
53 |
54 |
55 | class EpgChannelProgrammes(Base):
56 | """
57 |
58 | Programme Title
59 | Programme description.
60 |
61 | """
62 | __tablename__ = "epg_channel_programmes"
63 | id = Column(Integer, primary_key=True)
64 |
65 | channel_id = Column(String(256), index=True, unique=False)
66 | title = Column(String(500), index=True, unique=False)
67 | sub_title = Column(String(500), index=False, unique=False)
68 | desc = Column(String(500), index=False, unique=False)
69 | series_desc = Column(String(500), index=False, unique=False)
70 | country = Column(String(500), index=False, unique=False)
71 | icon_url = Column(String(500), index=False, unique=False)
72 | start = Column(String(256), index=False, unique=False)
73 | stop = Column(String(256), index=False, unique=False)
74 | start_timestamp = Column(String(256), index=False, unique=False)
75 | stop_timestamp = Column(String(256), index=False, unique=False)
76 | categories = Column(String(256), index=True, unique=False)
77 | # Extended optional XMLTV / TVHeadend supported metadata (all nullable / optional)
78 | summary = Column(String(1000), index=False, unique=False)
79 | keywords = Column(String(1000), index=False, unique=False) # JSON encoded list of keyword strings
80 | credits_json = Column(String(4000), index=False, unique=False) # JSON: {"actor":[],"director":[],...}
81 | video_colour = Column(String(10), index=False, unique=False)
82 | video_aspect = Column(String(32), index=False, unique=False)
83 | video_quality = Column(String(16), index=False, unique=False)
84 | subtitles_type = Column(String(32), index=False, unique=False)
85 | audio_described = Column(Boolean, nullable=True) # True ->
86 | previously_shown_date = Column(String(32), index=False, unique=False) # YYYY-MM-DD
87 | premiere = Column(Boolean, nullable=True)
88 | is_new = Column(Boolean, nullable=True)
89 | epnum_onscreen = Column(String(64), index=False, unique=False)
90 | epnum_xmltv_ns = Column(String(64), index=False, unique=False)
91 | epnum_dd_progid = Column(String(64), index=False, unique=False)
92 | star_rating = Column(String(16), index=False, unique=False) # e.g. "3/5"
93 | production_year = Column(String(8), index=False, unique=False) #
94 | rating_system = Column(String(32), index=False, unique=False)
95 | rating_value = Column(String(64), index=False, unique=False)
96 |
97 | # Link with an epg channel
98 | epg_channel_id = Column(Integer, ForeignKey('epg_channels.id'), nullable=False)
99 |
100 | def __repr__(self):
101 | return ''.format(self.id)
102 |
103 |
104 | class Playlist(Base):
105 | __tablename__ = "playlists"
106 | id = Column(Integer, primary_key=True)
107 |
108 | enabled = Column(Boolean, nullable=False, unique=False)
109 | connections = Column(Integer, nullable=False, unique=False)
110 | name = Column(String(500), index=True, unique=False)
111 | tvh_uuid = Column(String(64), index=True, unique=True)
112 | url = Column(String(500), index=True, unique=False)
113 | use_hls_proxy = Column(Boolean, nullable=False, unique=False)
114 | use_custom_hls_proxy = Column(Boolean, nullable=False, unique=False)
115 | hls_proxy_path = Column(String(256), unique=False)
116 |
117 | # Backref to all associated linked sources
118 | channel_sources = relationship('ChannelSource', backref='playlist', lazy=True, cascade="all, delete-orphan")
119 | playlist_streams = relationship('PlaylistStreams', backref='playlist', lazy=True, cascade="all, delete-orphan")
120 |
121 | def __repr__(self):
122 | return ''.format(self.id)
123 |
124 |
125 | class PlaylistStreams(Base):
126 | __tablename__ = "playlist_streams"
127 | id = Column(Integer, primary_key=True)
128 |
129 | name = Column(String(500), index=True, unique=False)
130 | url = Column(String(500), index=True, unique=False)
131 | channel_id = Column(String(500), index=True, unique=False)
132 | group_title = Column(String(500), index=True, unique=False)
133 | tvg_chno = Column(Integer, index=False, unique=False)
134 | tvg_id = Column(String(500), index=True, unique=False)
135 | tvg_logo = Column(String(500), index=False, unique=False)
136 |
137 | # Link with a playlist
138 | playlist_id = Column(Integer, ForeignKey('playlists.id'), nullable=False)
139 |
140 | def __repr__(self):
141 | return ''.format(self.id)
142 |
143 |
144 | channels_tags_association_table = Table(
145 | 'channels_tags_group',
146 | Base.metadata,
147 | Column('channel_id', Integer, ForeignKey('channels.id')),
148 | Column('tag_id', Integer, ForeignKey('channel_tags.id'))
149 | )
150 |
151 |
152 | class Channel(Base):
153 | __tablename__ = "channels"
154 | id = Column(Integer, primary_key=True)
155 |
156 | enabled = Column(Boolean, nullable=False, unique=False)
157 | name = Column(String(500), index=True, unique=False)
158 | logo_url = Column(String(500), index=False, unique=False)
159 | logo_base64 = Column(String(500), index=False, unique=False)
160 | number = Column(Integer, index=True, unique=False)
161 | tvh_uuid = Column(String(500), index=True, unique=False)
162 |
163 | # Link with a guide
164 | guide_id = Column(Integer, ForeignKey('epgs.id'))
165 | guide_name = Column(String(256), index=False, unique=False)
166 | guide_channel_id = Column(String(64), index=False, unique=False)
167 |
168 | # Backref to all associated linked sources
169 | sources = relationship('ChannelSource', backref='channel', lazy=True, cascade="all, delete-orphan")
170 |
171 | # Specify many-to-many relationships
172 | tags = relationship("ChannelTag", secondary=channels_tags_association_table)
173 |
174 | def __repr__(self):
175 | return ''.format(self.id)
176 |
177 |
178 | class ChannelTag(Base):
179 | __tablename__ = "channel_tags"
180 | id = Column(Integer, primary_key=True)
181 |
182 | name = Column(String(64), index=False, unique=True)
183 |
184 | def __repr__(self):
185 | return ''.format(self.id)
186 |
187 |
188 | class ChannelSource(Base):
189 | __tablename__ = "channel_sources"
190 | id = Column(Integer, primary_key=True)
191 |
192 | # Link with channel
193 | channel_id = Column(Integer, ForeignKey('channels.id'), nullable=False)
194 |
195 | # Link with a playlist
196 | playlist_id = Column(Integer, ForeignKey('playlists.id'), nullable=False)
197 | playlist_stream_name = Column(String(500), index=True, unique=False)
198 | playlist_stream_url = Column(String(500), index=True, unique=False)
199 | priority = Column(String(500), index=True, unique=False)
200 | tvh_uuid = Column(String(500), index=True, unique=False)
201 |
202 | def __repr__(self):
203 | return ''.format(self.id)
204 |
--------------------------------------------------------------------------------
/backend/api/routes.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | import asyncio
4 | import os
5 |
6 | from quart import request, jsonify, redirect, send_from_directory, current_app
7 |
8 | from backend.api import blueprint
9 |
10 | from backend.api.tasks import TaskQueueBroker
11 | from backend.auth import admin_auth_required, check_auth
12 | from backend.config import is_tvh_process_running_locally, get_local_tvh_proc_admin_password
13 | from backend.tvheadend.tvh_requests import configure_tvh
14 |
15 |
16 | @blueprint.route('/')
17 | def index():
18 | return redirect('/tic-web/')
19 |
20 |
21 | @blueprint.route('/tic-web/')
22 | @admin_auth_required
23 | async def serve_index():
24 | response = await send_from_directory(current_app.config['ASSETS_ROOT'], 'index.html')
25 | response.headers['Cache-Control'] = 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0'
26 | response.headers['Pragma'] = 'no-cache'
27 | response.headers['Expires'] = '0'
28 | return response
29 |
30 |
31 | @blueprint.route('/tic-web/')
32 | @admin_auth_required
33 | async def serve_static(path):
34 | response = await send_from_directory(current_app.config['ASSETS_ROOT'], path)
35 | response.headers['Cache-Control'] = 'no-store, no-cache, must-revalidate, proxy-revalidate, max-age=0'
36 | response.headers['Pragma'] = 'no-cache'
37 | response.headers['Expires'] = '0'
38 | return response
39 |
40 |
41 | @blueprint.route('/tic-web/epg.xml')
42 | def serve_epg_static():
43 | config = current_app.config['APP_CONFIG']
44 | return send_from_directory(os.path.join(config.config_path), 'epg.xml')
45 |
46 |
47 | @blueprint.route('/tic-web/playlist.m3u8')
48 | def serve_playlist_static():
49 | config = current_app.config['APP_CONFIG']
50 | response = send_from_directory(os.path.join(config.config_path), 'playlist.m3u8')
51 | return response
52 |
53 |
54 | @blueprint.route('/tic-api/ping')
55 | async def ping():
56 | # Frontend AIO mixin expects uppercase 'PONG' substring in plain response
57 | return 'PONG', 200, {'Content-Type': 'text/plain; charset=utf-8', 'Cache-Control': 'no-store'}
58 |
59 | # Convenience alias: some clients are probing /tic-tvh/ping (tvheadend http_root); return same pong
60 | @blueprint.route('/tic-tvh/ping')
61 | async def ping_tvh_alias():
62 | return await ping()
63 |
64 |
65 | # Fallback redirector for TVHeadend UI paths when nginx reverse proxy is not installed.
66 | # Without nginx, requests to /tic-tvh/... hit the Quart app and 404. We redirect the
67 | # browser to the real TVH port (9981) preserving the sub-path. This does not proxy
68 | # WebSockets (so log streaming etc may be limited) but restores basic UI access.
69 | @blueprint.route('/tic-tvh/')
70 | @admin_auth_required
71 | async def tvh_root_redirect():
72 | host_only = request.host.split(':')[0]
73 | target = f'http://{host_only}:9981/tic-tvh/'
74 | return redirect(target, 302)
75 |
76 |
77 | @blueprint.route('/tic-tvh/')
78 | @admin_auth_required
79 | async def tvh_any_redirect(subpath: str):
80 | # Special case: keep existing ping handler (already defined above)
81 | if subpath == 'ping':
82 | return await ping()
83 | host_only = request.host.split(':')[0]
84 | target = f'http://{host_only}:9981/tic-tvh/{subpath}'
85 | return redirect(target, 302)
86 |
87 |
88 | @blueprint.route('/tic-api/check-auth')
89 | async def api_check_auth():
90 | config = current_app.config['APP_CONFIG']
91 | if await check_auth():
92 | return jsonify(
93 | {
94 | "success": True,
95 | "runtime_key": config.runtime_key
96 | }
97 | ), 200
98 | return jsonify(
99 | {
100 | "success": False,
101 | }
102 | ), 401
103 |
104 |
105 | @blueprint.route('/tic-api/require-auth')
106 | @admin_auth_required
107 | async def api_require_auth():
108 | return jsonify(
109 | {
110 | "success": True,
111 | }
112 | ), 200
113 |
114 |
115 | @blueprint.route('/tic-api/get-background-tasks', methods=['GET'])
116 | @admin_auth_required
117 | async def api_get_background_tasks():
118 | task_broker = await TaskQueueBroker.get_instance()
119 | await task_broker.get_pending_tasks()
120 | return jsonify(
121 | {
122 | "success": True,
123 | "data": {
124 | "task_queue_status": await task_broker.get_status(),
125 | "current_task": await task_broker.get_currently_running_task(),
126 | "pending_tasks": await task_broker.get_pending_tasks(),
127 | },
128 | }
129 | ), 200
130 |
131 |
132 | @blueprint.route('/tic-api/toggle-pause-background-tasks', methods=['GET'])
133 | @admin_auth_required
134 | async def api_toggle_background_tasks_status():
135 | task_broker = await TaskQueueBroker.get_instance()
136 | await task_broker.toggle_status()
137 | return jsonify(
138 | {
139 | "success": True
140 | }
141 | ), 200
142 |
143 |
144 | @blueprint.route('/tic-api/tvh-running', methods=['GET'])
145 | @admin_auth_required
146 | async def api_check_if_tvh_running_status():
147 | running = await is_tvh_process_running_locally()
148 | return jsonify(
149 | {
150 | "success": True,
151 | "data": {
152 | "running": running
153 | }
154 | }
155 | ), 200
156 |
157 |
158 | @blueprint.route('/tic-api/save-settings', methods=['POST'])
159 | @admin_auth_required
160 | async def api_save_config():
161 | json_data = await request.get_json()
162 | config = current_app.config['APP_CONFIG']
163 |
164 | # Update auth for AIO container
165 | if await is_tvh_process_running_locally():
166 | admin_username = 'admin'
167 | if json_data.get('settings', {}).get('first_run'):
168 | json_data['settings']['admin_password'] = admin_username
169 | # Force admin login
170 | json_data['settings']['enable_admin_user'] = True
171 | # Update TVH password also
172 | if json_data.get('settings', {}).get('admin_password'):
173 | if not json_data.get('settings', {}).get('tvheadend'):
174 | json_data['settings']['tvheadend'] = {}
175 | json_data['settings']['tvheadend']['username'] = admin_username
176 | json_data['settings']['tvheadend']['password'] = json_data['settings']['admin_password']
177 | # Force the creation of a client user
178 | json_data['settings']['create_client_user'] = True
179 | client_username = json_data.get('settings', {}).get('client_username')
180 | if not client_username or client_username == '':
181 | json_data['settings']['client_username'] = 'client'
182 | client_password = json_data.get('settings', {}).get('client_password')
183 | if not client_password or client_password == '':
184 | json_data['settings']['client_password'] = 'client'
185 |
186 | # Mark first run as complete
187 | json_data['settings']['first_run'] = False
188 |
189 | # Save the config
190 | config.update_settings(json_data)
191 | config.save_settings()
192 |
193 | # Store settings for TVH service
194 | if json_data.get('settings', {}).get('tvheadend'):
195 | try:
196 | await configure_tvh(config)
197 | pass
198 | except Exception as e:
199 | current_app.logger.exception(f"Error while configuring TVH: %s", e)
200 | return jsonify(
201 | {
202 | "success": False
203 | }
204 | ), 400
205 | return jsonify(
206 | {
207 | "success": True
208 | }
209 | ), 200
210 |
211 |
212 | @blueprint.route('/tic-api/get-settings')
213 | @admin_auth_required
214 | async def api_get_config_tvheadend():
215 | config = current_app.config['APP_CONFIG']
216 | settings = config.read_settings()
217 | return_data = settings.get('settings', {})
218 | if await is_tvh_process_running_locally():
219 | tvh_password = await get_local_tvh_proc_admin_password()
220 | return_data['tvheadend']['username'] = 'admin'
221 | return_data['tvheadend']['password'] = tvh_password
222 | return jsonify(
223 | {
224 | "success": True,
225 | "data": return_data
226 | }
227 | ), 200
228 |
229 |
230 | @blueprint.route('/tic-api/export-config')
231 | @admin_auth_required
232 | async def api_export_config():
233 | config = current_app.config['APP_CONFIG']
234 | # Fetch all playlists
235 | from backend.playlists import read_config_all_playlists
236 | all_playlist_configs = await read_config_all_playlists(config, output_for_export=True)
237 | # Fetch all epgs
238 | from backend.epgs import read_config_all_epgs
239 | all_epg_configs = await read_config_all_epgs(output_for_export=True)
240 | # Fetch all channels
241 | from backend.channels import read_config_all_channels
242 | channels_config = await read_config_all_channels(output_for_export=True)
243 | return_data = {
244 | 'playlists': all_playlist_configs,
245 | 'epgs': all_epg_configs,
246 | 'channels': channels_config,
247 | }
248 | return jsonify(
249 | {
250 | "success": True,
251 | "data": return_data
252 | }
253 | ), 200
254 |
--------------------------------------------------------------------------------
/frontend/src/pages/GeneralPage.vue:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 | Authentication
13 |
14 |
16 |
17 |
18 |
19 |
20 |
21 | Enable authentication on TIC web interface
22 |
23 |
24 |
25 |
26 |
29 |
32 |
39 |
40 |
43 |
46 |
52 |
53 |
58 |
59 |
60 |
61 |
62 | Connections
63 |
64 |
65 |
68 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 | Setup Steps:
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 | 1. Configure the Admin username and password. This user should not be used for streaming clients.
95 |
96 |
97 |
98 |
99 |
100 |
101 | 2. Configure the connection details that clients should use to connect to TIC.
102 | This will be applied to the playlists and guide data supplied to these clients.
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 | Notes:
111 |
112 |
113 |
114 |
115 |
116 | Authentication:
117 |
118 |
119 |
120 |
121 | Authentication is shared between TIC and the TVheadend Backend.
122 | Updating the admin user here will also update the admin user in TVheadend.
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
255 |
--------------------------------------------------------------------------------
/backend/api/routes_playlist_proxy.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding:utf-8 -*-
3 | import re
4 | import sqlalchemy.exc
5 |
6 | from flask import request
7 |
8 | from backend.api import blueprint
9 | from quart import jsonify, current_app, render_template_string, Response
10 |
11 | from backend.config import is_tvh_process_running_locally
12 |
13 | device_xml_template = """
14 |
15 |
16 | 1
17 | 0
18 |
19 | {{ data.BaseURL }}
20 |
21 | urn:schemas-upnp-org:device:MediaServer:1
22 | {{ data.FriendlyName }}
23 | {{ data.Manufacturer }}
24 | {{ data.ModelNumber }}
25 | {{ data.ModelNumber }}
26 |
27 | uuid:{{ data.DeviceID }}
28 |
29 | """
30 |
31 |
32 | async def _get_tvh_settings(include_auth=True, stream_profile='pass'):
33 | config = current_app.config['APP_CONFIG']
34 | settings = config.read_settings()
35 | # Configure TVH-IPTV-Config base URL (proto/host/port)
36 | tic_base_url = settings['settings']['app_url']
37 | protocol_match = re.match(r'^(https?)://', settings['settings']['app_url'])
38 | tic_base_url_protocol = protocol_match.group(1) if protocol_match else 'http'
39 | # Create URLs for TVH
40 | # Note: This host needs to be the externally accessible host that third-party apps can then access TVH with
41 | tvh_host = settings['settings']['tvheadend']['host']
42 | tvh_port = settings['settings']['tvheadend']['port']
43 | tvh_path = settings['settings']['tvheadend']['path']
44 | tvh_base_url = f"{tvh_host}:{tvh_port}{tvh_path}"
45 | if await is_tvh_process_running_locally():
46 | tvh_path = '/tic-tvh'
47 | app_url = re.sub(r'^https?://', '', settings['settings']['app_url'])
48 | tvh_base_url = f"{app_url}{tvh_path}"
49 | # Configure some connection URLs
50 | tvh_api_url = f"{tic_base_url_protocol}://{tvh_base_url}/api"
51 | tvh_http_url = f"{tic_base_url_protocol}://{tvh_base_url}"
52 | if include_auth:
53 | client_username = settings['settings']['client_username']
54 | client_password = settings['settings']['client_password']
55 | if settings['settings']['create_client_user'] and client_username:
56 | tvh_http_url = f"{tic_base_url_protocol}://{client_username}:{client_password}@{tvh_base_url}"
57 | # Set stream configuration
58 | stream_priority = 300
59 | return {
60 | "tic_base_url": tic_base_url,
61 | "tvh_base_url": tvh_base_url,
62 | "tvh_path": tvh_path,
63 | "tvh_api_url": tvh_api_url,
64 | "tvh_http_url": tvh_http_url,
65 | "stream_profile": stream_profile,
66 | "stream_priority": stream_priority,
67 | }
68 |
69 |
70 | async def _get_channels(playlist_id):
71 | return_channels = []
72 | from backend.channels import read_config_all_channels
73 | channels_config = await read_config_all_channels(filter_playlist_ids=[int(playlist_id)])
74 | for channel in channels_config:
75 | if channel['enabled']:
76 | return_channels.append(channel)
77 | return return_channels
78 |
79 |
80 | async def _get_playlist_connection_count(config, playlist_id):
81 | from backend.playlists import read_config_one_playlist
82 | try:
83 | playlist_config = await read_config_one_playlist(config, playlist_id)
84 | return playlist_config.get('connections', 1)
85 | except sqlalchemy.exc.NoResultFound:
86 | # Playlist not found, return default value
87 | return 1
88 |
89 |
90 | async def _get_discover_data(playlist_id=0):
91 | config = current_app.config['APP_CONFIG']
92 | settings = config.read_settings()
93 | tvh_settings = await _get_tvh_settings(include_auth=True)
94 | device_name = f'TVH-IPTV-Config-{playlist_id}'
95 | tuner_count = await _get_playlist_connection_count(config, playlist_id)
96 | device_id = f'tic-12345678-{playlist_id}'
97 | device_auth = f'tic-{playlist_id}'
98 | base_url = f'{tvh_settings["tic_base_url"]}/tic-api/hdhr_device/{playlist_id}'
99 | return {
100 | 'FriendlyName': device_name,
101 | 'Manufacturer': 'Tvheadend',
102 | 'ModelNumber': 'HDTC-2US',
103 | 'FirmwareName': 'bin_2.2.0',
104 | 'TunerCount': tuner_count,
105 | 'FirmwareVersion': '2.2.0',
106 | 'DeviceID': device_id,
107 | 'DeviceAuth': device_auth,
108 | 'BaseURL': base_url,
109 | 'LineupURL': f'{base_url}/lineup.json',
110 | }
111 |
112 |
113 | async def _get_lineup_list(playlist_id):
114 | use_tvh_source = True
115 | tvh_settings = await _get_tvh_settings(include_auth=True)
116 | lineup_list = []
117 | from backend.epgs import generate_epg_channel_id
118 | for channel_details in await _get_channels(playlist_id):
119 | channel_id = generate_epg_channel_id(channel_details["number"], channel_details["name"])
120 | # TODO: Add support for fetching a stream from this application without using TVH as a proxy
121 | if use_tvh_source and channel_details.get('tvh_uuid'):
122 | channel_url = f'{tvh_settings["tvh_http_url"]}/stream/channel/{channel_details["tvh_uuid"]}'
123 | path_args = f'?profile={tvh_settings["stream_profile"]}&weight={tvh_settings["stream_priority"]}'
124 | url = f'{channel_url}{path_args}'
125 | lineup_list.append(
126 | {
127 | 'GuideNumber': channel_id,
128 | 'GuideName': channel_details['name'],
129 | 'URL': url
130 | }
131 | )
132 | return lineup_list
133 |
134 |
135 | async def _get_playlist_channels(playlist_id, include_auth=False, stream_profile='pass'):
136 | use_tvh_source = True
137 | tvh_settings = await _get_tvh_settings(include_auth=include_auth, stream_profile=stream_profile)
138 | playlist = [f'#EXTM3U url-tvg="{tvh_settings["tic_base_url"]}/tic-web/epg.xml"']
139 | from backend.epgs import generate_epg_channel_id
140 | for channel_details in await _get_channels(playlist_id):
141 | current_app.logger.warning(channel_details)
142 | channel_id = generate_epg_channel_id(channel_details["number"], channel_details["name"])
143 | channel_name = channel_details['name']
144 | channel_logo_url = channel_details['logo_url']
145 | channel_uuid = channel_details['tvh_uuid']
146 | line = f'#EXTINF:-1 tvg-name="{channel_name}" tvg-logo="{channel_logo_url}" tvg-id="{channel_uuid}" tvg-chno="{channel_id}"'
147 | if channel_details['tags']:
148 | group_title = channel_details['tags'][0]
149 | line += f' group-title="{group_title}"'
150 | playlist.append(line)
151 | # TODO: Add support for fetching a stream from this application without using TVH as a proxy
152 | if use_tvh_source and channel_details.get('tvh_uuid'):
153 | channel_url = f'{tvh_settings["tvh_http_url"]}/stream/channel/{channel_details["tvh_uuid"]}'
154 | path_args = f'?profile={tvh_settings["stream_profile"]}&weight={tvh_settings["stream_priority"]}'
155 | url = f'{channel_url}{path_args}'
156 | playlist.append(url)
157 | return playlist
158 |
159 |
160 | @blueprint.route('/tic-api/hdhr_device//discover.json', methods=['GET'])
161 | async def discover_json(playlist_id):
162 | discover_data = await _get_discover_data(playlist_id=playlist_id)
163 | return jsonify(discover_data)
164 |
165 |
166 | @blueprint.route('/tic-api/hdhr_device//lineup.json', methods=['GET'])
167 | async def lineup_json(playlist_id):
168 | lineup_list = await _get_lineup_list(playlist_id)
169 | return jsonify(lineup_list)
170 |
171 |
172 | @blueprint.route('/tic-api/hdhr_device//lineup_status.json', methods=['GET'])
173 | async def lineup_status_json(playlist_id=None):
174 | return jsonify(
175 | {
176 | 'ScanInProgress': 0,
177 | 'ScanPossible': 0,
178 | 'Source': "Cable",
179 | 'SourceList': ['Cable']
180 | }
181 | )
182 |
183 |
184 | @blueprint.route('/tic-api/hdhr_device//lineup.post', methods=['GET', 'POST'])
185 | async def lineup_post(playlist_id=None):
186 | return ''
187 |
188 |
189 | @blueprint.route('/tic-api/hdhr_device//device.xml', methods=['GET'])
190 | async def device_xml(playlist_id):
191 | discover_data = await _get_discover_data(playlist_id)
192 | xml_content = await render_template_string(device_xml_template, data=discover_data)
193 | return Response(xml_content, mimetype='application/xml')
194 |
195 |
196 | @blueprint.route('/tic-api/tvh_playlist//channels.m3u', methods=['GET'])
197 | async def tvh_playlist_m3u(playlist_id):
198 | # Check for 'include_auth' GET argument
199 | include_auth = request.args.get('include_auth') != 'false'
200 | stream_profile = request.args.get('profile', 'pass')
201 |
202 | # Get the playlist channels
203 | file_lines = await _get_playlist_channels(playlist_id, include_auth=include_auth, stream_profile=stream_profile)
204 | # Join the lines to form the m3u content
205 | m3u_content = "\n".join(file_lines)
206 | # Create a response object with appropriate headers
207 | response = Response(m3u_content, mimetype='application/vnd.apple.mpegurl')
208 | response.headers['Content-Disposition'] = f'attachment; filename="{playlist_id}_channels.m3u"'
209 | return response
210 |
--------------------------------------------------------------------------------
/backend/config.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import base64
3 | import json
4 | import os
5 | import subprocess
6 |
7 | import aiofiles
8 | import yaml
9 | from mergedeep import merge
10 |
11 |
12 | def get_home_dir():
13 | home_dir = os.environ.get('HOME_DIR')
14 | if home_dir is None:
15 | home_dir = os.path.expanduser("~")
16 | return home_dir
17 |
18 |
19 | async def is_tvh_process_running_locally():
20 | process_name = 'tvheadend'
21 | try:
22 | process = await asyncio.create_subprocess_exec(
23 | 'pgrep', '-x', process_name,
24 | stdout=asyncio.subprocess.PIPE,
25 | stderr=asyncio.subprocess.PIPE
26 | )
27 | stdout, stderr = await process.communicate()
28 |
29 | if process.returncode == 0:
30 | return True
31 | else:
32 | return False
33 | except Exception as e:
34 | print(f"An error occurred: {e}")
35 | return False
36 |
37 |
38 | def is_tvh_process_running_locally_sync():
39 | process_name = 'tvheadend'
40 | try:
41 | result = subprocess.run(
42 | ['pgrep', '-x', process_name],
43 | stdout=subprocess.PIPE,
44 | stderr=subprocess.PIPE
45 | )
46 | if result.returncode == 0:
47 | return True
48 | else:
49 | return False
50 | except Exception as e:
51 | print(f"An error occurred: {e}")
52 | return False
53 |
54 |
55 | async def get_admin_file(directory):
56 | if os.path.exists(directory) and os.listdir(directory):
57 | for filename in os.listdir(directory):
58 | file_path = os.path.join(directory, filename)
59 | if os.path.isfile(file_path):
60 | async with aiofiles.open(file_path, 'r') as file:
61 | try:
62 | contents = await file.read()
63 | data = json.loads(contents)
64 | if data.get('username') == 'admin':
65 | return file_path, data
66 | except (json.JSONDecodeError, IOError) as e:
67 | print(f"Error processing file {file_path}: {e}")
68 | return None, None
69 |
70 |
71 | async def update_accesscontrol_files():
72 | accesscontrol_path = os.path.join(get_home_dir(), '.tvheadend', 'accesscontrol')
73 | file_path, data = await get_admin_file(accesscontrol_path)
74 | if data:
75 | data['prefix'] = '0.0.0.0/0,::/0'
76 | async with aiofiles.open(file_path, 'w') as outfile:
77 | await outfile.write(json.dumps(data, indent=4))
78 |
79 |
80 | async def get_local_tvh_proc_admin_password():
81 | passwd_path = os.path.join(get_home_dir(), '.tvheadend', 'passwd')
82 | file_path, data = await get_admin_file(passwd_path)
83 | if data:
84 | encoded_password = data.get('password2')
85 | try:
86 | decoded_password = base64.b64decode(encoded_password).decode('utf-8')
87 | parts = decoded_password.split('-')
88 | return parts[2]
89 | except Exception as e:
90 | print(f"Error decoding password: {e}")
91 | return None
92 |
93 |
94 | def write_yaml(file, data):
95 | if not os.path.exists(os.path.dirname(file)):
96 | os.makedirs(os.path.dirname(file))
97 | with open(file, "w") as outfile:
98 | yaml.dump(data, outfile, default_flow_style=False)
99 |
100 |
101 | def read_yaml(file):
102 | if not os.path.exists(file):
103 | return {}
104 | with open(file, "r") as stream:
105 | try:
106 | return yaml.safe_load(stream)
107 | except yaml.YAMLError as exc:
108 | print(exc)
109 |
110 |
111 | def update_yaml(file, new_data):
112 | if not os.path.exists(os.path.dirname(file)):
113 | os.makedirs(os.path.dirname(file))
114 | data = read_yaml(file)
115 | merge(data, new_data)
116 | with open(file, "w") as outfile:
117 | yaml.dump(data, outfile, default_flow_style=False)
118 |
119 |
120 | def recursive_dict_update(defaults, updates):
121 | for key, value in updates.items():
122 | if isinstance(value, dict) and key in defaults:
123 | recursive_dict_update(defaults[key], value)
124 | else:
125 | defaults[key] = value
126 | return defaults
127 |
128 |
129 | class Config:
130 | runtime_key = ''
131 |
132 | def __init__(self, **kwargs):
133 | # Set default directories
134 | self.config_path = os.path.join(get_home_dir(), '.tvh_iptv_config')
135 | self.config_file = os.path.join(self.config_path, 'settings.yml')
136 | # Set default settings
137 | self.settings = None
138 | self.tvh_local = is_tvh_process_running_locally_sync()
139 | self.default_settings = {
140 | "settings": {
141 | "first_run": True,
142 | "tvheadend": {
143 | "host": "",
144 | "port": "9981",
145 | "path": "/",
146 | "username": "",
147 | "password": "",
148 | },
149 | "app_url": None,
150 | "enable_admin_user": True if self.tvh_local else False,
151 | "admin_password": "admin",
152 | "enable_stream_buffer": True,
153 | "default_ffmpeg_pipe_args": "-hide_banner -loglevel error "
154 | "-probesize 10M -analyzeduration 0 -fpsprobesize 0 "
155 | "-i [URL] -c copy -metadata service_name=[SERVICE_NAME] "
156 | "-f mpegts pipe:1",
157 | "create_client_user": True,
158 | "client_username": "client",
159 | "client_password": "client",
160 | "epgs": {
161 | "enable_tmdb_metadata": False,
162 | "tmdb_api_key": "",
163 | "enable_google_image_search_metadata": False,
164 | }
165 |
166 | }
167 | }
168 |
169 | def create_default_settings_yaml(self):
170 | self.write_settings_yaml(self.default_settings)
171 |
172 | def write_settings_yaml(self, data):
173 | write_yaml(self.config_file, data)
174 |
175 | def read_config_yaml(self):
176 | if not os.path.exists(self.config_file):
177 | self.create_default_settings_yaml()
178 | return read_yaml(self.config_file)
179 |
180 | def read_settings(self):
181 | yaml_settings = {}
182 | if self.settings is None:
183 | yaml_settings = self.read_config_yaml()
184 | self.settings = recursive_dict_update(self.default_settings, yaml_settings)
185 | return self.settings
186 |
187 | def save_settings(self):
188 | if self.settings is None:
189 | self.create_default_settings_yaml()
190 | self.write_settings_yaml(self.settings)
191 |
192 | def update_settings(self, updated_settings):
193 | if self.settings is None:
194 | self.read_settings()
195 | self.settings = recursive_dict_update(self.default_settings, updated_settings)
196 |
197 | async def tvh_connection_settings(self):
198 | settings = await asyncio.to_thread(self.read_settings)
199 | if await is_tvh_process_running_locally():
200 | # Note: Host can be localhost here because the app will publish to TVH from the backend
201 | tvh_host = '127.0.0.1'
202 | tvh_port = '9981'
203 | tvh_path = '/tic-tvh'
204 | tvh_username = 'admin'
205 | tvh_password = await get_local_tvh_proc_admin_password()
206 | return {
207 | 'tvh_local': True,
208 | 'tvh_host': tvh_host,
209 | 'tvh_port': tvh_port,
210 | 'tvh_path': tvh_path,
211 | 'tvh_username': tvh_username,
212 | 'tvh_password': tvh_password,
213 | }
214 | return {
215 | 'tvh_local': False,
216 | 'tvh_host': settings['settings']['tvheadend']['host'],
217 | 'tvh_port': settings['settings']['tvheadend']['port'],
218 | 'tvh_path': settings['settings']['tvheadend']['path'],
219 | 'tvh_username': settings['settings']['tvheadend']['username'],
220 | 'tvh_password': settings['settings']['tvheadend']['password'],
221 | }
222 |
223 |
224 | frontend_dir = os.path.join(os.path.dirname(os.path.abspath(os.path.dirname(__file__))), 'frontend')
225 |
226 | enable_app_debugging = False
227 | if os.environ.get('ENABLE_APP_DEBUGGING', 'false').lower() == 'true':
228 | enable_app_debugging = True
229 |
230 | enable_sqlalchemy_debugging = False
231 | if os.environ.get('ENABLE_SQLALCHEMY_DEBUGGING', 'false').lower() == 'true':
232 | enable_sqlalchemy_debugging = True
233 |
234 | flask_run_host = os.environ.get('FLASK_RUN_HOST', '0.0.0.0')
235 | flask_run_port = int(os.environ.get('FLASK_RUN_PORT', '9985'))
236 |
237 | app_basedir = os.path.abspath(os.path.dirname(__file__))
238 | config_path = os.path.join(get_home_dir(), '.tvh_iptv_config')
239 | if not os.path.exists(config_path):
240 | os.makedirs(config_path)
241 |
242 | # Configure SQLite DB
243 | sqlalchemy_database_path = os.path.join(config_path, 'db.sqlite3')
244 | sqlalchemy_database_uri = 'sqlite:///' + sqlalchemy_database_path
245 | sqlalchemy_database_async_uri = 'sqlite+aiosqlite:///' + sqlalchemy_database_path
246 | sqlalchemy_track_modifications = False
247 |
248 | # Configure scheduler
249 | scheduler_api_enabled = True
250 |
251 | # Set up the App SECRET_KEY
252 | # SECRET_KEY = config('SECRET_KEY' , default='S#perS3crEt_007')
253 | secret_key = os.getenv('SECRET_KEY', 'S#perS3crEt_007')
254 |
255 | # Assets Management
256 | assets_root = os.getenv('ASSETS_ROOT', os.path.join(frontend_dir, 'dist', 'spa'))
257 |
--------------------------------------------------------------------------------
/migrations/versions/46f0f37aab7b_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 46f0f37aab7b
4 | Revises:
5 | Create Date: 2023-04-24 11:38:57.368281
6 |
7 | """
8 | from alembic import op
9 | import sqlalchemy as sa
10 |
11 |
12 | # revision identifiers, used by Alembic.
13 | revision = '46f0f37aab7b'
14 | down_revision = None
15 | branch_labels = None
16 | depends_on = None
17 |
18 |
19 | def upgrade():
20 | # ### commands auto generated by Alembic - please adjust! ###
21 | op.create_table('channel_tags',
22 | sa.Column('id', sa.Integer(), nullable=False),
23 | sa.Column('name', sa.String(length=64), nullable=True),
24 | sa.PrimaryKeyConstraint('id'),
25 | sa.UniqueConstraint('name')
26 | )
27 | op.create_table('epgs',
28 | sa.Column('id', sa.Integer(), nullable=False),
29 | sa.Column('enabled', sa.Boolean(), nullable=False),
30 | sa.Column('name', sa.String(length=500), nullable=True),
31 | sa.Column('url', sa.String(length=500), nullable=True),
32 | sa.PrimaryKeyConstraint('id')
33 | )
34 | with op.batch_alter_table('epgs', schema=None) as batch_op:
35 | batch_op.create_index(batch_op.f('ix_epgs_name'), ['name'], unique=False)
36 | batch_op.create_index(batch_op.f('ix_epgs_url'), ['url'], unique=False)
37 |
38 | op.create_table('playlists',
39 | sa.Column('id', sa.Integer(), nullable=False),
40 | sa.Column('enabled', sa.Boolean(), nullable=False),
41 | sa.Column('connections', sa.Integer(), nullable=False),
42 | sa.Column('name', sa.String(length=500), nullable=True),
43 | sa.Column('tvh_uuid', sa.String(length=64), nullable=True),
44 | sa.Column('url', sa.String(length=500), nullable=True),
45 | sa.PrimaryKeyConstraint('id')
46 | )
47 | with op.batch_alter_table('playlists', schema=None) as batch_op:
48 | batch_op.create_index(batch_op.f('ix_playlists_name'), ['name'], unique=False)
49 | batch_op.create_index(batch_op.f('ix_playlists_tvh_uuid'), ['tvh_uuid'], unique=True)
50 | batch_op.create_index(batch_op.f('ix_playlists_url'), ['url'], unique=False)
51 |
52 | op.create_table('channels',
53 | sa.Column('id', sa.Integer(), nullable=False),
54 | sa.Column('enabled', sa.Boolean(), nullable=False),
55 | sa.Column('name', sa.String(length=500), nullable=True),
56 | sa.Column('logo_url', sa.String(length=500), nullable=True),
57 | sa.Column('number', sa.Integer(), nullable=True),
58 | sa.Column('guide_id', sa.Integer(), nullable=True),
59 | sa.Column('guide_name', sa.String(length=256), nullable=True),
60 | sa.Column('guide_channel_id', sa.String(length=64), nullable=True),
61 | sa.ForeignKeyConstraint(['guide_id'], ['epgs.id'], ),
62 | sa.PrimaryKeyConstraint('id')
63 | )
64 | with op.batch_alter_table('channels', schema=None) as batch_op:
65 | batch_op.create_index(batch_op.f('ix_channels_logo_url'), ['logo_url'], unique=False)
66 | batch_op.create_index(batch_op.f('ix_channels_name'), ['name'], unique=False)
67 | batch_op.create_index(batch_op.f('ix_channels_number'), ['number'], unique=False)
68 |
69 | op.create_table('epg_channels',
70 | sa.Column('id', sa.Integer(), nullable=False),
71 | sa.Column('channel_id', sa.String(length=256), nullable=True),
72 | sa.Column('name', sa.String(length=500), nullable=True),
73 | sa.Column('icon_url', sa.String(length=500), nullable=True),
74 | sa.Column('epg_id', sa.Integer(), nullable=False),
75 | sa.ForeignKeyConstraint(['epg_id'], ['epgs.id'], ),
76 | sa.PrimaryKeyConstraint('id')
77 | )
78 | with op.batch_alter_table('epg_channels', schema=None) as batch_op:
79 | batch_op.create_index(batch_op.f('ix_epg_channels_channel_id'), ['channel_id'], unique=False)
80 | batch_op.create_index(batch_op.f('ix_epg_channels_name'), ['name'], unique=False)
81 |
82 | op.create_table('playlist_streams',
83 | sa.Column('id', sa.Integer(), nullable=False),
84 | sa.Column('name', sa.String(length=500), nullable=True),
85 | sa.Column('url', sa.String(length=500), nullable=True),
86 | sa.Column('channel_id', sa.String(length=500), nullable=True),
87 | sa.Column('group_title', sa.String(length=500), nullable=True),
88 | sa.Column('tvg_chno', sa.Integer(), nullable=True),
89 | sa.Column('tvg_id', sa.String(length=500), nullable=True),
90 | sa.Column('tvg_logo', sa.String(length=500), nullable=True),
91 | sa.Column('playlist_id', sa.Integer(), nullable=False),
92 | sa.ForeignKeyConstraint(['playlist_id'], ['playlists.id'], ),
93 | sa.PrimaryKeyConstraint('id')
94 | )
95 | with op.batch_alter_table('playlist_streams', schema=None) as batch_op:
96 | batch_op.create_index(batch_op.f('ix_playlist_streams_channel_id'), ['channel_id'], unique=False)
97 | batch_op.create_index(batch_op.f('ix_playlist_streams_group_title'), ['group_title'], unique=False)
98 | batch_op.create_index(batch_op.f('ix_playlist_streams_name'), ['name'], unique=False)
99 | batch_op.create_index(batch_op.f('ix_playlist_streams_tvg_id'), ['tvg_id'], unique=False)
100 | batch_op.create_index(batch_op.f('ix_playlist_streams_url'), ['url'], unique=False)
101 |
102 | op.create_table('channel_sources',
103 | sa.Column('id', sa.Integer(), nullable=False),
104 | sa.Column('channel_id', sa.Integer(), nullable=False),
105 | sa.Column('playlist_id', sa.Integer(), nullable=False),
106 | sa.Column('playlist_stream_name', sa.String(length=500), nullable=True),
107 | sa.Column('playlist_stream_url', sa.String(length=500), nullable=True),
108 | sa.Column('priority', sa.String(length=500), nullable=True),
109 | sa.Column('tvh_uuid', sa.String(length=500), nullable=True),
110 | sa.ForeignKeyConstraint(['channel_id'], ['channels.id'], ),
111 | sa.ForeignKeyConstraint(['playlist_id'], ['playlists.id'], ),
112 | sa.PrimaryKeyConstraint('id')
113 | )
114 | with op.batch_alter_table('channel_sources', schema=None) as batch_op:
115 | batch_op.create_index(batch_op.f('ix_channel_sources_playlist_stream_name'), ['playlist_stream_name'], unique=False)
116 | batch_op.create_index(batch_op.f('ix_channel_sources_playlist_stream_url'), ['playlist_stream_url'], unique=False)
117 | batch_op.create_index(batch_op.f('ix_channel_sources_priority'), ['priority'], unique=False)
118 | batch_op.create_index(batch_op.f('ix_channel_sources_tvh_uuid'), ['tvh_uuid'], unique=False)
119 |
120 | op.create_table('channels_tags_group',
121 | sa.Column('channel_id', sa.Integer(), nullable=True),
122 | sa.Column('tag_id', sa.Integer(), nullable=True),
123 | sa.ForeignKeyConstraint(['channel_id'], ['channels.id'], ),
124 | sa.ForeignKeyConstraint(['tag_id'], ['channel_tags.id'], )
125 | )
126 | op.create_table('epg_channel_programmes',
127 | sa.Column('id', sa.Integer(), nullable=False),
128 | sa.Column('channel_id', sa.String(length=256), nullable=True),
129 | sa.Column('title', sa.String(length=500), nullable=True),
130 | sa.Column('desc', sa.String(length=500), nullable=True),
131 | sa.Column('start', sa.String(length=256), nullable=True),
132 | sa.Column('stop', sa.String(length=256), nullable=True),
133 | sa.Column('start_timestamp', sa.String(length=256), nullable=True),
134 | sa.Column('stop_timestamp', sa.String(length=256), nullable=True),
135 | sa.Column('epg_channel_id', sa.Integer(), nullable=False),
136 | sa.ForeignKeyConstraint(['epg_channel_id'], ['epg_channels.id'], ),
137 | sa.PrimaryKeyConstraint('id')
138 | )
139 | with op.batch_alter_table('epg_channel_programmes', schema=None) as batch_op:
140 | batch_op.create_index(batch_op.f('ix_epg_channel_programmes_channel_id'), ['channel_id'], unique=False)
141 | batch_op.create_index(batch_op.f('ix_epg_channel_programmes_title'), ['title'], unique=False)
142 |
143 | # ### end Alembic commands ###
144 |
145 |
146 | def downgrade():
147 | # ### commands auto generated by Alembic - please adjust! ###
148 | with op.batch_alter_table('epg_channel_programmes', schema=None) as batch_op:
149 | batch_op.drop_index(batch_op.f('ix_epg_channel_programmes_title'))
150 | batch_op.drop_index(batch_op.f('ix_epg_channel_programmes_channel_id'))
151 |
152 | op.drop_table('epg_channel_programmes')
153 | op.drop_table('channels_tags_group')
154 | with op.batch_alter_table('channel_sources', schema=None) as batch_op:
155 | batch_op.drop_index(batch_op.f('ix_channel_sources_tvh_uuid'))
156 | batch_op.drop_index(batch_op.f('ix_channel_sources_priority'))
157 | batch_op.drop_index(batch_op.f('ix_channel_sources_playlist_stream_url'))
158 | batch_op.drop_index(batch_op.f('ix_channel_sources_playlist_stream_name'))
159 |
160 | op.drop_table('channel_sources')
161 | with op.batch_alter_table('playlist_streams', schema=None) as batch_op:
162 | batch_op.drop_index(batch_op.f('ix_playlist_streams_url'))
163 | batch_op.drop_index(batch_op.f('ix_playlist_streams_tvg_id'))
164 | batch_op.drop_index(batch_op.f('ix_playlist_streams_name'))
165 | batch_op.drop_index(batch_op.f('ix_playlist_streams_group_title'))
166 | batch_op.drop_index(batch_op.f('ix_playlist_streams_channel_id'))
167 |
168 | op.drop_table('playlist_streams')
169 | with op.batch_alter_table('epg_channels', schema=None) as batch_op:
170 | batch_op.drop_index(batch_op.f('ix_epg_channels_name'))
171 | batch_op.drop_index(batch_op.f('ix_epg_channels_channel_id'))
172 |
173 | op.drop_table('epg_channels')
174 | with op.batch_alter_table('channels', schema=None) as batch_op:
175 | batch_op.drop_index(batch_op.f('ix_channels_number'))
176 | batch_op.drop_index(batch_op.f('ix_channels_name'))
177 | batch_op.drop_index(batch_op.f('ix_channels_logo_url'))
178 |
179 | op.drop_table('channels')
180 | with op.batch_alter_table('playlists', schema=None) as batch_op:
181 | batch_op.drop_index(batch_op.f('ix_playlists_url'))
182 | batch_op.drop_index(batch_op.f('ix_playlists_tvh_uuid'))
183 | batch_op.drop_index(batch_op.f('ix_playlists_name'))
184 |
185 | op.drop_table('playlists')
186 | with op.batch_alter_table('epgs', schema=None) as batch_op:
187 | batch_op.drop_index(batch_op.f('ix_epgs_url'))
188 | batch_op.drop_index(batch_op.f('ix_epgs_name'))
189 |
190 | op.drop_table('epgs')
191 | op.drop_table('channel_tags')
192 | # ### end Alembic commands ###
193 |
--------------------------------------------------------------------------------