├── .dockerignore ├── .github └── workflows │ ├── docker.yml │ └── upload_unraid_template.yml ├── .gitignore ├── .schema └── config.schema.json ├── Dockerfile ├── LICENSE ├── README.md ├── api.py ├── assets ├── Bebas-Regular.otf ├── Bebas-Regular.ttf ├── Roboto-Light.ttf ├── fade_out.mov └── overlay.mov ├── config.yaml.example ├── consts.py ├── docker-compose-dev.yml ├── docker-compose.yml ├── documentation └── images │ ├── logo.png │ ├── mascot.jpeg │ └── recently-added-preroll-example.png ├── ecosystem.config.json ├── entrypoint.sh ├── modules ├── __init__.py ├── config_parser.py ├── errors.py ├── ffmpeg_utils.py ├── files.py ├── logs.py ├── models.py ├── plex_connector.py ├── renderers │ ├── __init__.py │ ├── base.py │ └── recently_added.py ├── schedule_manager.py ├── statics.py ├── utils.py ├── webhooks │ ├── __init__.py │ ├── last_run.py │ ├── plex.py │ └── webhook_processor.py └── youtube_downloader.py ├── pm2_keepalive.py ├── requirements.txt ├── run.py └── templates └── plex_prerolls.xml /.dockerignore: -------------------------------------------------------------------------------- 1 | *.md 2 | LICENSE 3 | Dockerfile 4 | .dockerignore 5 | .gitignore 6 | .github 7 | .git 8 | .idea 9 | docker-compose.yml 10 | venv 11 | .venv 12 | __pycache__ 13 | documentation 14 | templates 15 | -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: Build & Publish Docker image 2 | on: 3 | release: 4 | types: [ created ] 5 | secrets: 6 | DOCKER_USERNAME: 7 | required: true 8 | DOCKER_TOKEN: 9 | required: true 10 | workflow_dispatch: 11 | inputs: 12 | version: 13 | type: string 14 | description: Version number 15 | required: true 16 | jobs: 17 | publish: 18 | name: Build & Publish to DockerHub and GitHub Packages 19 | runs-on: ubuntu-latest 20 | if: contains(github.event.head_commit.message, '[no build]') == false 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v3 24 | with: 25 | fetch-depth: 0 26 | 27 | - name: Establish variables 28 | id: vars 29 | run: | 30 | echo "app_name=plex_prerolls" >> "$GITHUB_OUTPUT" 31 | echo "version=${{ github.event.inputs.version || github.event.release.tag_name }}" >> "$GITHUB_OUTPUT" 32 | echo "major_version=$(echo ${{ github.event.inputs.version || github.event.release.tag_name }} | cut -d '.' -f 1)" >> "$GITHUB_OUTPUT" 33 | echo "today=$(date +'%Y-%m-%d')" >> "$GITHUB_OUTPUT" 34 | echo "year=$(date +'%Y')" >> "$GITHUB_OUTPUT" 35 | 36 | - name: Update version number 37 | uses: jacobtomlinson/gha-find-replace@2.0.0 38 | with: 39 | find: "VERSIONADDEDBYGITHUB" 40 | replace: "${{ steps.vars.outputs.version }}" 41 | regex: false 42 | 43 | - name: Update copyright year 44 | uses: jacobtomlinson/gha-find-replace@2.0.0 45 | with: 46 | find: "YEARADDEDBYGITHUB" 47 | replace: "${{ steps.vars.outputs.year }}" 48 | regex: false 49 | 50 | - name: Set up QEMU 51 | uses: docker/setup-qemu-action@v2 52 | 53 | - name: Set up Docker Buildx 54 | uses: docker/setup-buildx-action@v2 55 | id: docker-buildx 56 | 57 | - name: Login to DockerHub 58 | uses: docker/login-action@v2 59 | with: 60 | username: ${{ secrets.DOCKER_USERNAME }} 61 | password: ${{ secrets.DOCKER_TOKEN }} 62 | 63 | - name: Login to GitHub Container Registry 64 | uses: docker/login-action@v2 65 | with: 66 | registry: ghcr.io 67 | username: ${{ github.repository_owner }} 68 | password: ${{ secrets.GITHUB_TOKEN }} 69 | 70 | - name: Login to Gitea Container Registry 71 | uses: docker/login-action@v3 72 | with: 73 | registry: ${{ secrets.GITEA_REGISTRY }} 74 | username: ${{ secrets.GITEA_USERNAME }} 75 | password: ${{ secrets.GITEA_TOKEN }} 76 | 77 | - name: Build and push 78 | uses: docker/build-push-action@v3 79 | with: 80 | builder: ${{ steps.docker-buildx.outputs.name }} 81 | context: . 82 | file: ./Dockerfile 83 | push: true 84 | platforms: linux/amd64,linux/armhf,linux/arm64 85 | tags: | 86 | nwithan8/${{ steps.vars.outputs.app_name }}:latest 87 | nwithan8/${{ steps.vars.outputs.app_name }}:${{ steps.vars.outputs.version }} 88 | nwithan8/${{ steps.vars.outputs.app_name }}:${{ steps.vars.outputs.major_version }} 89 | ghcr.io/nwithan8/${{ steps.vars.outputs.app_name }}:latest 90 | ghcr.io/nwithan8/${{ steps.vars.outputs.app_name }}:${{ steps.vars.outputs.version }} 91 | ghcr.io/nwithan8/${{ steps.vars.outputs.app_name }}:${{ steps.vars.outputs.major_version }} 92 | ${{ secrets.GITEA_REGISTRY }}/nwithan8/${{ steps.vars.outputs.app_name }}:latest 93 | ${{ secrets.GITEA_REGISTRY }}/nwithan8/${{ steps.vars.outputs.app_name }}:${{ steps.vars.outputs.version }} 94 | ${{ secrets.GITEA_REGISTRY }}/nwithan8/${{ steps.vars.outputs.app_name }}:${{ steps.vars.outputs.major_version }} 95 | labels: | 96 | org.opencontainers.image.title=${{ steps.vars.outputs.app_name }} 97 | org.opencontainers.image.version=${{ steps.vars.outputs.version }} 98 | org.opencontainers.image.created=${{ steps.vars.outputs.today }} 99 | -------------------------------------------------------------------------------- /.github/workflows/upload_unraid_template.yml: -------------------------------------------------------------------------------- 1 | name: Copy Unraid Community Applications template(s) to templates repository 2 | 3 | on: 4 | release: 5 | types: [ created ] 6 | workflow_dispatch: ~ 7 | 8 | jobs: 9 | copy: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v3 14 | with: 15 | fetch-depth: 0 16 | 17 | - name: Establish variables 18 | id: vars 19 | run: | 20 | VERSION=${{ github.event.inputs.version || github.ref_name }} 21 | echo ::set-output name=version::${VERSION} 22 | echo ::set-output name=today::$(date +'%Y-%m-%d') 23 | 24 | - name: Open PR with template changes to unraid_templates 25 | uses: nwithan8/action-pull-request-another-repo@v1.1.1 26 | env: 27 | API_TOKEN_GITHUB: ${{ secrets.PR_OPEN_GITHUB_TOKEN }} 28 | with: 29 | # Will mirror folder structure (copying "templates" folder to "templates" folder in destination repo) 30 | source_folder: 'templates' 31 | destination_repo: 'nwithan8/unraid_templates' 32 | destination_base_branch: 'main' 33 | destination_head_branch: prerolls-${{ steps.vars.outputs.version }} 34 | user_email: 'nwithan8@users.noreply.github.com' 35 | user_name: 'nwithan8' 36 | pull_request_assignees: 'nwithan8' 37 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Exclude local Config files 2 | config.ini 3 | schedules.yaml 4 | 5 | #Dev environmet 6 | .devcontainer 7 | .vscode 8 | .markdownlint.json 9 | 10 | # Byte-compiled / optimized / DLL files 11 | __pycache__/ 12 | *.py[cod] 13 | *$py.class 14 | 15 | # C extensions 16 | *.so 17 | 18 | # Distribution / packaging 19 | .Python 20 | build/ 21 | develop-eggs/ 22 | dist/ 23 | downloads/ 24 | eggs/ 25 | .eggs/ 26 | lib/ 27 | lib64/ 28 | parts/ 29 | sdist/ 30 | var/ 31 | wheels/ 32 | pip-wheel-metadata/ 33 | share/python-wheels/ 34 | *.egg-info/ 35 | .installed.cfg 36 | *.egg 37 | MANIFEST 38 | typings/ 39 | 40 | # PyInstaller 41 | # Usually these files are written by a python script from a template 42 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 43 | *.manifest 44 | *.spec 45 | 46 | # Installer logs 47 | pip-log.txt 48 | pip-delete-this-directory.txt 49 | 50 | # Unit test / coverage reports 51 | htmlcov/ 52 | .tox/ 53 | .nox/ 54 | .coverage 55 | .coverage.* 56 | .cache 57 | nosetests.xml 58 | coverage.xml 59 | *.cover 60 | *.py,cover 61 | .hypothesis/ 62 | .pytest_cache/ 63 | test/ 64 | 65 | # Translations 66 | *.mo 67 | *.pot 68 | 69 | # Logging items 70 | #/log/ 71 | *.log 72 | *.log.* 73 | 74 | # Django stuff: 75 | local_settings.py 76 | db.sqlite3 77 | db.sqlite3-journal 78 | 79 | # Flask stuff: 80 | instance/ 81 | .webassets-cache 82 | 83 | # Scrapy stuff: 84 | .scrapy 85 | 86 | # Sphinx documentation 87 | docs/_build/ 88 | 89 | # PyBuilder 90 | target/ 91 | 92 | # Jupyter Notebook 93 | .ipynb_checkpoints 94 | 95 | # IPython 96 | profile_default/ 97 | ipython_config.py 98 | 99 | # pyenv 100 | .python-version 101 | 102 | # pipenv 103 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 104 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 105 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 106 | # install all needed dependencies. 107 | #Pipfile.lock 108 | 109 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 110 | __pypackages__/ 111 | 112 | # Celery stuff 113 | celerybeat-schedule 114 | celerybeat.pid 115 | 116 | # SageMath parsed files 117 | *.sage.py 118 | 119 | # Environments 120 | .env 121 | .venv 122 | env/ 123 | venv/ 124 | ENV/ 125 | env.bak/ 126 | venv.bak/ 127 | 128 | # Spyder project settings 129 | .spyderproject 130 | .spyproject 131 | 132 | # Rope project settings 133 | .ropeproject 134 | 135 | # mkdocs documentation 136 | /site 137 | 138 | # mypy 139 | .mypy_cache/ 140 | .dmypy.json 141 | dmypy.json 142 | #mypy.ini 143 | 144 | # Pyre type checker 145 | .pyre/ 146 | 147 | # Custom 148 | /scratch.py 149 | on_host -------------------------------------------------------------------------------- /.schema/config.schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$id": "https://github.com/nwithan8/plex-prerolls/.schema/config.schema.json", 3 | "$schema": "http://json-schema.org/draft-07/schema#", 4 | "$comment": "https://github.com/nwithan8/plex-prerolls", 5 | "title": "Plex Prerolls Config Schema", 6 | "type": "object", 7 | "additionalProperties": true, 8 | "definitions": { 9 | "date": { 10 | "id": "#/definitions/date", 11 | "type": "string", 12 | "format": "date-time" 13 | }, 14 | "cron": { 15 | "id": "#/definitions/cron", 16 | "type": "string", 17 | "pattern": "^([0-5]?\\d) ([01]?\\d|2[0-3]) ([1-9]|[12]\\d|3[01]) ([1-9]|1[012]) ([0-6])$", 18 | "description": "Cron expression in the format: `minute hour day month weekday`" 19 | }, 20 | "positiveInteger": { 21 | "id": "#/definitions/positiveInteger", 22 | "type": "integer", 23 | "minimum": 1 24 | }, 25 | "emptyString": { 26 | "id": "#/definitions/emptyString", 27 | "type": "string", 28 | "pattern": "^$" 29 | }, 30 | "emptyableString": { 31 | "id": "#/definitions/emptyableString", 32 | "oneOf": [ 33 | { 34 | "$ref": "#/definitions/emptyString" 35 | }, 36 | { 37 | "type": "string" 38 | } 39 | ] 40 | }, 41 | "hostWithIpAndOptionalPort": { 42 | "id": "#/definitions/hostWithIpAndOptionalPort", 43 | "type": "string", 44 | "pattern": "^(https?://)?[a-zA-Z0-9.-]+(:[0-9]{1,5})?$" 45 | }, 46 | "plexToken": { 47 | "title": "Plex token", 48 | "id": "#/definitions/plexToken", 49 | "type": "string" 50 | }, 51 | "weekNumber": { 52 | "title": "Week of the year number", 53 | "description": "The week of the year number", 54 | "id": "#/definitions/weekNumber", 55 | "type": "integer", 56 | "minimum": 1, 57 | "maximum": 53 58 | }, 59 | "monthNumber": { 60 | "title": "Month number", 61 | "id": "#/definitions/monthNumber", 62 | "type": "integer", 63 | "minimum": 1, 64 | "maximum": 12 65 | }, 66 | "enabled": { 67 | "title": "Enabled", 68 | "id": "#/definitions/enabled", 69 | "description": "Whether the feature is enabled", 70 | "type": "boolean" 71 | }, 72 | "startDate": { 73 | "title": "Start date", 74 | "description": "The start date of the range", 75 | "$ref": "#/definitions/date" 76 | }, 77 | "endDate": { 78 | "title": "End date", 79 | "description": "The end date of the range", 80 | "$ref": "#/definitions/date" 81 | }, 82 | "disableAlways": { 83 | "title": "Disable always prerolls", 84 | "id": "#/definitions/disableAlways", 85 | "description": "Whether to disable always prerolls when this schedule is active", 86 | "type": "boolean" 87 | }, 88 | "weight": { 89 | "title": "Weight", 90 | "description": "The weight of the preroll for this range", 91 | "$ref": "#/definitions/positiveInteger" 92 | }, 93 | "videoFilePath": { 94 | "title": "Video file path", 95 | "id": "#/definitions/videoFilePath", 96 | "type": "string", 97 | "description": "A path to a media file", 98 | "pattern": "^.+\\.(mp4|mkv|avi|mov|wmv|flv|webm)$" 99 | }, 100 | "paths": { 101 | "title": "Video file paths", 102 | "id": "#/definitions/paths", 103 | "type": "array", 104 | "items": { 105 | "description": "A path to a video file", 106 | "$ref": "#/definitions/videoFilePath" 107 | } 108 | }, 109 | "globPattern": { 110 | "title": "Glob pattern", 111 | "id": "#/definitions/globPattern", 112 | "type": "string", 113 | "description": "A glob pattern to match files" 114 | }, 115 | "pathGlobbing": { 116 | "title": "Path globbing", 117 | "description": "Settings for path globbing", 118 | "id": "#/definitions/pathGlobbing", 119 | "type": "object", 120 | "properties": { 121 | "enabled": { 122 | "$ref": "#/definitions/enabled" 123 | }, 124 | "pairs": { 125 | "description": "The pairs of paths to match and replace", 126 | "type": "array", 127 | "items": { 128 | "type": "object", 129 | "properties": { 130 | "root_path": { 131 | "description": "The local root path to match", 132 | "type": "string" 133 | }, 134 | "plex_path": { 135 | "description": "The remote Plex path to replace", 136 | "type": "string" 137 | }, 138 | "patterns": { 139 | "description": "The glob patterns to match", 140 | "type": "array", 141 | "items": { 142 | "$ref": "#/definitions/globPattern" 143 | } 144 | } 145 | }, 146 | "required": [ 147 | "root_path", 148 | "plex_path", 149 | "patterns" 150 | ] 151 | } 152 | } 153 | } 154 | } 155 | }, 156 | "properties": { 157 | "run": { 158 | "title": "Run configuration", 159 | "description": "Settings for running the application", 160 | "type": "object", 161 | "properties": { 162 | "schedule": { 163 | "title": "Cron schedule", 164 | "description": "The cron schedule for running the application", 165 | "$ref": "#/definitions/cron", 166 | "default": "0 0 * * *" 167 | }, 168 | "dry_run": { 169 | "title": "Dry run", 170 | "description": "Whether to run in dry run mode (don't actually make changes to Plex)", 171 | "type": "boolean", 172 | "default": false 173 | } 174 | } 175 | }, 176 | "plex": { 177 | "title": "Plex configuration", 178 | "description": "Settings for the Plex server", 179 | "type": "object", 180 | "properties": { 181 | "url": { 182 | "title": "Plex URL", 183 | "description": "The URL of the Plex server", 184 | "$ref": "#/definitions/hostWithIpAndOptionalPort" 185 | }, 186 | "token": { 187 | "title": "Plex token", 188 | "description": "The token for the Plex server", 189 | "$ref": "#/definitions/plexToken" 190 | } 191 | }, 192 | "required": [ 193 | "url", 194 | "token" 195 | ] 196 | }, 197 | "always": { 198 | "title": "Always preroll configuration", 199 | "description": "Configuration for always-included prerolls", 200 | "type": "object", 201 | "properties": { 202 | "enabled": { 203 | "title": "Always preroll enabled", 204 | "description": "Whether the always preroll is enabled", 205 | "$ref": "#/definitions/enabled" 206 | }, 207 | "paths": { 208 | "title": "Always preroll paths", 209 | "description": "Paths to media files to always include as prerolls", 210 | "$ref": "#/definitions/paths" 211 | }, 212 | "path_globbing": { 213 | "$ref": "#/definitions/pathGlobbing" 214 | }, 215 | "weight": { 216 | "$ref": "#/definitions/weight" 217 | }, 218 | "count": { 219 | "title": "Always preroll count", 220 | "description": "The number of prerolls to include", 221 | "$ref": "#/definitions/positiveInteger" 222 | } 223 | }, 224 | "required": [ 225 | "enabled" 226 | ] 227 | }, 228 | "date_range": { 229 | "title": "Date range preroll configuration", 230 | "description": "Configuration for date range-based prerolls", 231 | "type": "object", 232 | "properties": { 233 | "enabled": { 234 | "title": "Date range preroll enabled", 235 | "description": "Whether the date range preroll is enabled", 236 | "$ref": "#/definitions/enabled" 237 | }, 238 | "ranges": { 239 | "title": "Date ranges", 240 | "description": "Date ranges for which to include prerolls", 241 | "type": "array", 242 | "items": { 243 | "type": "object", 244 | "properties": { 245 | "name": { 246 | "title": "Name", 247 | "description": "The name of the date range", 248 | "type": "string" 249 | }, 250 | "start_date": { 251 | "$ref": "#/definitions/startDate" 252 | }, 253 | "end_date": { 254 | "$ref": "#/definitions/endDate" 255 | }, 256 | "paths": { 257 | "title": "Date range preroll paths", 258 | "description": "Paths to media files to include as prerolls for this range", 259 | "$ref": "#/definitions/paths" 260 | }, 261 | "path_globbing": { 262 | "title": "Path globbing", 263 | "description": "Settings for path globbing", 264 | "$ref": "#/definitions/pathGlobbing" 265 | }, 266 | "weight": { 267 | "$ref": "#/definitions/weight" 268 | }, 269 | "disable_always": { 270 | "$ref": "#/definitions/disableAlways" 271 | } 272 | }, 273 | "required": [ 274 | "start_date", 275 | "end_date" 276 | ] 277 | } 278 | } 279 | }, 280 | "required": [ 281 | "enabled", 282 | "ranges" 283 | ] 284 | }, 285 | "weekly": { 286 | "title": "Weekly preroll configuration", 287 | "description": "Configuration for weekly-based prerolls", 288 | "type": "object", 289 | "properties": { 290 | "enabled": { 291 | "title": "Weekly preroll enabled", 292 | "description": "Whether the weekly preroll is enabled", 293 | "$ref": "#/definitions/enabled" 294 | }, 295 | "weeks": { 296 | "title": "Configuration for weekly-based prerolls", 297 | "description": "Configuration for weekly-based prerolls", 298 | "type": "array", 299 | "items": { 300 | "type": "object", 301 | "properties": { 302 | "number": { 303 | "$ref": "#/definitions/weekNumber" 304 | }, 305 | "paths": { 306 | "title": "Weekly preroll paths", 307 | "description": "Paths to media files to include as prerolls for this week", 308 | "$ref": "#/definitions/paths" 309 | }, 310 | "path_globbing": { 311 | "title": "Path globbing", 312 | "description": "Settings for path globbing", 313 | "$ref": "#/definitions/pathGlobbing" 314 | }, 315 | "weight": { 316 | "$ref": "#/definitions/weight" 317 | }, 318 | "disable_always": { 319 | "$ref": "#/definitions/disableAlways" 320 | } 321 | }, 322 | "required": [ 323 | "number" 324 | ] 325 | } 326 | } 327 | }, 328 | "required": [ 329 | "enabled", 330 | "weeks" 331 | ] 332 | }, 333 | "monthly": { 334 | "title": "Monthly preroll configuration", 335 | "description": "Configuration for monthly-based prerolls", 336 | "type": "object", 337 | "properties": { 338 | "enabled": { 339 | "title": "Monthly preroll enabled", 340 | "description": "Whether the monthly preroll is enabled", 341 | "$ref": "#/definitions/enabled" 342 | }, 343 | "months": { 344 | "title": "Configuration for monthly-based prerolls", 345 | "description": "Configuration for monthly-based prerolls", 346 | "type": "array", 347 | "items": { 348 | "type": "object", 349 | "properties": { 350 | "number": { 351 | "$ref": "#/definitions/monthNumber" 352 | }, 353 | "paths": { 354 | "title": "Monthly preroll paths", 355 | "description": "Paths to media files to include as prerolls for this month", 356 | "$ref": "#/definitions/paths" 357 | }, 358 | "path_globbing": { 359 | "title": "Path globbing", 360 | "description": "Settings for path globbing", 361 | "$ref": "#/definitions/pathGlobbing" 362 | }, 363 | "weight": { 364 | "$ref": "#/definitions/weight" 365 | }, 366 | "disable_always": { 367 | "$ref": "#/definitions/disableAlways" 368 | } 369 | }, 370 | "required": [ 371 | "number" 372 | ] 373 | } 374 | } 375 | }, 376 | "required": [ 377 | "enabled", 378 | "months" 379 | ] 380 | }, 381 | "advanced": { 382 | "title": "Advanced configuration", 383 | "description": "Advanced configuration options", 384 | "type": "object", 385 | "properties": { 386 | "auto_generation": { 387 | "title": "Auto generation", 388 | "description": "Settings for auto-generating prerolls", 389 | "type": "object", 390 | "properties": { 391 | "plex_path": { 392 | "title": "Plex path for auto generation", 393 | "description": "The path for the Plex Media Server to use to access auto-generated prerolls", 394 | "type": "string" 395 | }, 396 | "recently_added": { 397 | "title": "Recently added", 398 | "description": "Settings for auto-generating prerolls from recently added media", 399 | "type": "object", 400 | "properties": { 401 | "enabled": { 402 | "title": "Recently added auto generation enabled", 403 | "description": "Whether auto generation from recently added media is enabled", 404 | "$ref": "#/definitions/enabled" 405 | }, 406 | "count": { 407 | "title": "Recently added count", 408 | "description": "The number of recently added media to include as prerolls", 409 | "$ref": "#/definitions/positiveInteger" 410 | }, 411 | "trailer_cutoff_year": { 412 | "title": "Trailer cutoff year", 413 | "description": "The year to use as a cutoff for trailers. Default is 1980", 414 | "$ref": "#/definitions/positiveInteger" 415 | } 416 | }, 417 | "required": [ 418 | "enabled", 419 | "count" 420 | ] 421 | } 422 | }, 423 | "required": [ 424 | "plex_path", 425 | "recently_added" 426 | ] 427 | } 428 | } 429 | } 430 | }, 431 | "required": [ 432 | "plex", 433 | "advanced" 434 | ] 435 | } 436 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Node.js 18.19 and Python 3.11.x pre-installed on Alpine Linux 3.19 2 | FROM nwithan8/python-3.x-node-18.19.0-alpine3.19:latest 3 | WORKDIR / 4 | 5 | # Copy requirements.txt from build machine to WORKDIR (/app) folder (important we do this BEFORE copying the rest of the files to avoid re-running pip install on every code change) 6 | COPY requirements.txt requirements.txt 7 | 8 | # Python virtual environment already exists in base image as /app/venv 9 | 10 | # Install Python requirements 11 | # Ref: https://github.com/python-pillow/Pillow/issues/1763 12 | RUN LIBRARY_PATH=/lib:/usr/lib /bin/sh -c "/app/venv/bin/pip install --no-cache-dir setuptools_rust" # https://github.com/docker/compose/issues/8105#issuecomment-775931324 13 | RUN LIBRARY_PATH=/lib:/usr/lib /bin/sh -c "/app/venv/bin/pip install --no-cache-dir -r requirements.txt" 14 | 15 | # Make Docker /config volume for optional config file 16 | VOLUME /config 17 | 18 | # Copy config file from build machine to Docker /config folder 19 | COPY config.yaml* /config/ 20 | 21 | # Make Docker /logs volume for log file 22 | VOLUME /logs 23 | 24 | # Make Docker /render volume for rendered files 25 | VOLUME /renders 26 | 27 | # Make Docker /auto-rolls volume for completed auto-rolls files 28 | VOLUME /auto_rolls 29 | 30 | # Copy source code from build machine to WORKDIR (/app) folder 31 | COPY . . 32 | 33 | # Delete unnecessary files in WORKDIR (/app) folder (not caught by .dockerignore) 34 | RUN echo "**** removing unneeded files ****" 35 | # Remove all files except .py files and entrypoint.sh (keep all directories) 36 | # RUN find / -type f -maxdepth 1 ! -name '*.py' ! -name 'entrypoint.sh' -delete 37 | 38 | # Run entrypoint.sh script 39 | ENTRYPOINT ["sh", "/entrypoint.sh"] 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | logo 3 |

Plex Prerolls

4 |

A script to automate management of Plex pre-rolls.

5 |
6 | 7 | --- 8 | 9 | ## Installation and Usage 10 | 11 | ### Run Script Directly 12 | 13 | With the introduction of webhook ingestion and auto-generation of prerolls, it is no longer advised to run this 14 | application as a direct Python script. Please use the [Docker container](#run-as-docker-container) instead. 15 | 16 | ### Run as Docker Container 17 | 18 | #### Requirements 19 | 20 | - Docker 21 | 22 | #### Docker Compose 23 | 24 | Complete the provided `docker-compose.yml` file and run: 25 | 26 | ```sh 27 | docker-compose up -d 28 | ``` 29 | 30 | #### Docker CLI 31 | 32 | ```sh 33 | docker run -d \ 34 | --name=plex_prerolls \ 35 | -p 8283:8283 \ 36 | -e PUID=1000 \ 37 | -e PGID=1000 \ 38 | -e TZ=Etc/UTC \ 39 | -v /path/to/config:/ \ 40 | -v /path/to/logs:/logs \ 41 | -v /path/to/preroll/files:/files \ 42 | -v /path/to/auto-generated/rolls/temp:/renders \ 43 | -v /path/to/auto-generated/rolls/parent:/auto_rolls \ 44 | --restart unless-stopped \ 45 | nwithan8/plex_prerolls:latest 46 | ``` 47 | 48 | #### Paths and Environment Variables 49 | 50 | | Path | Description | 51 | |---------------|------------------------------------------------------------------------------------------------------------------------------| 52 | | `/config` | Path to config directory (`config.yaml` should be in this directory) | 53 | | `/logs` | Path to log directory (`Plex Prerolls.log` will be in this directory) | 54 | | `/files` | Path to the root directory of all preroll files (for [Path Globbing](#path-globbing) feature) | 55 | | `/auto_rolls` | Path to the root directory where all [auto-generated prerolls files](#auto-generation) will be stored | 56 | | `/renders` | Path to where [auto-generated prerolls](#auto-generation) and associated assets will be temporarily stored during generation | 57 | 58 | | Environment Variable | Description | 59 | |----------------------|-------------------------------------------------------------------| 60 | | `PUID` | UID of user to run as | 61 | | `PGID` | GID of user to run as | 62 | | `TZ` | Timezone to use for cron schedule | 63 | 64 | --- 65 | 66 | ## Schedule Rules 67 | 68 | Any entry whose schedule falls within the current date/time at the time of execution will be added to the preroll. 69 | 70 | You can define as many schedules as you want, in the following categories (order does not matter): 71 | 72 | 1. **always**: Items listed here will always be included (appended) to the preroll list 73 | - If you have a large set of prerolls, you can provide all paths and use `count` to randomly select a smaller 74 | subset of the list to use on each run. 75 | 76 | 2. **date_range**: Schedule based on a specific date/time range (including [wildcards](#date-range-section-scheduling)) 77 | 78 | 3. **weekly**: Schedule based on a specific week of the year 79 | 80 | 4. **monthly**: Schedule based on a specific month of the year 81 | 82 | ### Advanced Scheduling 83 | 84 | #### Weight 85 | 86 | All schedule entries accept an optional `weight` value that can be used to adjust the emphasis of this entry over 87 | others by adding the listed paths multiple times. Since Plex selects a random preroll from the list of paths, having the 88 | same path listed multiple times increases its chances of being selected over paths that only appear once. This allows 89 | you to combine, e.g. a `date_range` entry with an `always` entry, but place more weight/emphasis on the `date_range` 90 | entry. 91 | 92 | ```yaml 93 | date_range: 94 | enabled: true 95 | ranges: 96 | - start_date: 2020-01-01 # Jan 1st, 2020 97 | end_date: 2020-01-02 # Jan 2nd, 2020 98 | paths: 99 | - /path/to/video.mp4 100 | - /path/to/another/video.mp4 101 | weight: 2 # Add these paths to the list twice (make up greater percentage of prerolls - more likely to be selected) 102 | ``` 103 | 104 | #### Disable Always 105 | 106 | Any schedule entry (except for the `always` section) can disable the inclusion of the `always` section by setting the 107 | `disable_always` value to `true`. This can be useful if you want to make one specific, i.e. `date_range` entry for a 108 | holiday, 109 | and you don't want to include the `always` section for this specific holiday, but you still want to include the `always` 110 | section 111 | for other holidays. 112 | 113 | ```yaml 114 | date_range: 115 | enabled: true 116 | ranges: 117 | - start_date: 2020-01-01 # Jan 1st, 2020 118 | end_date: 2020-01-02 # Jan 2nd, 2020 119 | paths: 120 | - /path/to/video.mp4 121 | - /path/to/another/video.mp4 122 | disable_always: true # Disable the inclusion of the `always` section when this entry is active 123 | ``` 124 | 125 | #### Path Globbing 126 | 127 | **NOTE**: This feature will only work if you are running the Docker container on the same machine as your Plex 128 | server. 129 | 130 | Instead of listing out each individual preroll file, you can use glob (wildcard) patterns to match multiple files in a 131 | specific directory. 132 | 133 | The application will search for all files on your local filesystem that match the pattern(s) and automatically translate 134 | them to Plex-compatible remote paths. 135 | 136 | ##### Setup 137 | 138 | Enable the feature under the `path_globbing` section of each schedule. 139 | 140 | Each `pair` is a local (`root_path`) path and remote (`plex_path`) path that correspond to each other. 141 | The `patterns` list is a list of glob patterns that will be searched for in the `root_path` directory and translated to 142 | the `plex_path`-directory equivalent. 143 | 144 | You can provide multiple `pairs` to match multiple local-remote directory pairs and multiple subsequent glob patterns. 145 | 146 | ```yaml 147 | path_globbing: 148 | enabled: true 149 | pairs: 150 | - root_path: /files # The root folder to use for globbing 151 | plex_path: /path/to/prerolls/in/plex # The path to use for the Plex server 152 | patterns: 153 | - "local/path/to/prerolls/*.mp4" # The pattern to look for in the root_path 154 | - "local/path/to/prerolls/*.mkv" # The pattern to look for in the root_path 155 | - root_path: /other/files 156 | plex_path: /path/to/other/prerolls/in/plex 157 | patterns: 158 | - "local/path/to/prerolls/*.mp4" 159 | - "local/path/to/prerolls/*.mkv" 160 | ``` 161 | 162 | For example, if your prerolls on your file system are located at `/mnt/user/media/prerolls` and Plex sees them at 163 | `/media/prerolls`, you would set the `root_path` to `/mnt/user/media/prerolls` and the `plex_path` to `/media/prerolls`. 164 | 165 | If you are using the Docker container, you can mount the preroll directory to the container at any location you would 166 | prefer (recommended: `/files`) and set the `root_path` accordingly. Although you can define multiple roots, it is 167 | recommended to use a single all-encompassing root folder and rely on more-detailed glob patterns to match files in 168 | specific subdirectories. 169 | 170 | If you are using the Unraid version of this container, the "Files Path" path is mapped to `/files` by default; you 171 | should set `root_path` to `/files` and `plex_path` to the same directory as seen by Plex. 172 | 173 | #### Usage 174 | 175 | In any schedule section, you can use the `path_globbing` key to specify glob pattern rules to match files. 176 | 177 | ```yaml 178 | always: 179 | enabled: true 180 | paths: 181 | - /remote/path/1.mp4 182 | - /remote/path/2.mp4 183 | - /remote/path/3.mp4 184 | path_globbing: 185 | enabled: true 186 | pairs: 187 | - root_path: /files 188 | plex_path: /path/to/prerolls/in/plex 189 | patterns: 190 | - "*.mp4" 191 | ``` 192 | 193 | The above example will match all `.mp4` files in the `root_path` directory and append them to the list of prerolls. 194 | 195 | If you have organized your prerolls into subdirectories, you can specify specific subdirectories to match, or use `**` 196 | to match all subdirectories. 197 | 198 | ```yaml 199 | always: 200 | enabled: true 201 | paths: 202 | - /remote/path/1.mp4 203 | - /remote/path/2.mp4 204 | - /remote/path/3.mp4 205 | path_globbing: 206 | enabled: true 207 | pairs: 208 | - root_path: /files 209 | plex_path: /path/to/prerolls/in/plex 210 | patterns: 211 | - "subdir1/*.mp4" 212 | - "subdir2/*.mp4" 213 | - "subdir3/**/*.mp4" 214 | ``` 215 | 216 | You can use both `paths` and `path_globbing` in the same section, allowing you to mix and match specific files with glob 217 | patterns. Please note that `paths` entries must be fully-qualified **remote** paths (as seen by Plex), while `pattern` 218 | entries in `path_globbing` are relative to the **local** `root_path` directory. 219 | 220 | #### Date Range Section Scheduling 221 | 222 | `date_range` entries can accept both dates (`yyyy-mm-dd`) and datetimes (`yyyy-mm-dd hh:mm:ss`, 24-hour time). 223 | 224 | `date_range` entries can also accept wildcards for any of the date/time fields. This can be useful for scheduling 225 | recurring events, such as annual events, "first-of-the-month" events, or even hourly events. 226 | 227 | ```yaml 228 | date_range: 229 | enabled: true 230 | ranges: 231 | # Each entry requires start_date, end_date, path values 232 | - start_date: 2020-01-01 # Jan 1st, 2020 233 | end_date: 2020-01-02 # Jan 2nd, 2020 234 | paths: 235 | - /path/to/video.mp4 236 | - /path/to/another/video.mp4 237 | - start_date: xxxx-07-04 # Every year on July 4th 238 | end_date: xxxx-07-04 # Every year on July 4th 239 | paths: 240 | - /path/to/video.mp4 241 | - /path/to/another/video.mp4 242 | - name: "My Schedule" # Optional name for logging purposes 243 | start_date: xxxx-xx-02 # Every year on the 2nd of every month 244 | end_date: xxxx-xx-03 # Every year on the 3rd of every month 245 | paths: 246 | - /path/to/video.mp4 247 | - /path/to/another/video.mp4 248 | - start_date: xxxx-xx-xx 08:00:00 # Every day at 8am 249 | end_date: xxxx-xx-xx 09:30:00 # Every day at 9:30am 250 | paths: 251 | - /path/to/video.mp4 252 | - /path/to/another/video.mp4 253 | ``` 254 | 255 | You should [adjust your cron schedule](#scheduling-script) to run the script more frequently if you use this feature. 256 | 257 | `date_range` entries also accept an optional `name` value that can be used to identify the schedule in the logs. 258 | 259 | --- 260 | 261 | ## Advanced Configuration 262 | 263 | ### Auto-Generation 264 | 265 | **NOTE**: This feature will only work if you are running the Docker container on the same machine as your Plex 266 | server. 267 | 268 | **NOTE**: This feature relies on Plex webhooks, which require a Plex Pass subscription. 269 | 270 | Plex Prerolls can automatically generate prerolls, store the generated files in a specified directory and include them 271 | in the list of prerolls. 272 | 273 | #### "Recently Added Media" Pre-Rolls 274 | 275 | The application can generate trailer-like prerolls for each new media item added to your library (with a rolling total, 276 | defaults to 10 items). 277 | 278 | This is done by receiving a webhook from Plex when new media is added, retrieving a trailer and soundtrack (via YouTube) 279 | as well as poster and metadata for the media item, and generating a preroll from these assets. 280 | 281 | Example of a generated preroll: 282 | 283 | logo 284 | 285 | > :warning: This feature requires [extracting cookies for YouTube](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#how-do-i-pass-cookies-to-yt-dlp) and storing them in a file called `yt_dlp_cookies.txt` alongside your `config.yaml` file. 286 | 287 | ##### Setup 288 | 289 | [Set up a Plex webhook](https://support.plex.tv/articles/115002267687-webhooks/) to point to the application's 290 | `/recently-added` endpoint (e.g. `http://localhost:8283/recently-added`). 291 | 292 | Because this feature requires Plex Prerolls and Plex Media Server to be running on the same host machine, it is highly 293 | recommended to use internal networking (local IP addresses) rather than publicly exposing Plex Prerolls to the Internet. 294 | 295 | --- 296 | 297 | ## Shout out to places to get Pre-Roll 298 | 299 | - 300 | 301 | --- 302 | 303 | ## FAQ 304 | 305 | **Can this work with Jellyfin?** 306 | 307 | Jellyfin has an [Intros plugin](https://github.com/jellyfin/jellyfin-plugin-intros) that already replicates this 308 | functionality, in terms of setting rules (including based on schedule, as well as based on the about-to-play media item) 309 | for prerolls. I recommend using that plugin instead. 310 | 311 | **Can this work with Emby?** 312 | 313 | Emby has a [Cinema Intros plugin](https://emby.media/support/articles/Cinema-Intros.html) with a 314 | similar ["list of videos" option](https://emby.media/support/articles/Cinema-Intros.html#custom-intros). Currently, 315 | there is **no way** to update this setting via Emby's API, so there is no way to automate this process. I am in 316 | communication with the Emby development team to see if this feature can be added. 317 | 318 | 319 | ## Credit 320 | 321 | - [BrianLindner](https://github.com/BrianLindner) for the [original pre-roll scheduling concept](https://github.com/BrianLindner/plex-schedule-prerolls) 322 | - [AndrewHolmes060](https://github.com/AndrewHolmes060) for the [original trailer auto-generation feature](https://github.com/AndrewHolmes060/Plex-Preroll-Builder) -------------------------------------------------------------------------------- /api.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from flask import ( 3 | Flask, 4 | request as flask_request, 5 | ) 6 | 7 | import modules.logs as logging 8 | from consts import ( 9 | APP_NAME, 10 | APP_DESCRIPTION, 11 | DEFAULT_CONFIG_PATH, 12 | DEFAULT_LOG_DIR, 13 | DEFAULT_RENDERS_DIR, 14 | CONSOLE_LOG_LEVEL, 15 | FILE_LOG_LEVEL, 16 | FLASK_ADDRESS, 17 | FLASK_PORT, 18 | ) 19 | from modules.config_parser import Config 20 | from modules.errors import determine_exit_code 21 | from modules.webhooks.webhook_processor import WebhookProcessor 22 | 23 | parser = argparse.ArgumentParser(description=f"{APP_NAME} - {APP_DESCRIPTION}") 24 | 25 | parser.add_argument("-c", "--config", help=f"Path to config file. Defaults to '{DEFAULT_CONFIG_PATH}'", 26 | default=DEFAULT_CONFIG_PATH) 27 | parser.add_argument("-l", "--log", help=f"Log file directory. Defaults to '{DEFAULT_LOG_DIR}'", 28 | default=DEFAULT_LOG_DIR) # Should include trailing backslash 29 | parser.add_argument("-r", "--renders", help=f"Path to renders directory. Defaults to '{DEFAULT_RENDERS_DIR}'", 30 | default=DEFAULT_RENDERS_DIR) 31 | args = parser.parse_args() 32 | 33 | # Set up logging 34 | logging.init(app_name=APP_NAME, 35 | console_log_level=CONSOLE_LOG_LEVEL, 36 | log_to_file=True, 37 | log_file_dir=args.log, 38 | file_log_level=FILE_LOG_LEVEL) 39 | 40 | _config = Config(app_name=APP_NAME, config_path=f"{args.config}") 41 | 42 | 43 | def run_with_potential_exit_on_error(func): 44 | def wrapper(*args, **kwargs): 45 | try: 46 | return func(*args, **kwargs) 47 | except Exception as e: 48 | logging.fatal(f"Fatal error occurred. Shutting down: {e}") 49 | exit_code = determine_exit_code(exception=e) 50 | logging.fatal(f"Exiting with code {exit_code}") 51 | exit(exit_code) 52 | 53 | return wrapper 54 | 55 | @run_with_potential_exit_on_error 56 | def start_webhooks_server(config: Config) -> None: 57 | api = Flask(APP_NAME) 58 | 59 | @api.route('/ping', methods=['GET']) 60 | def ping(): 61 | return WebhookProcessor.process_ping(request=flask_request, config=config) 62 | 63 | @api.route('/recently-added', methods=['POST']) 64 | def recently_added(): 65 | if not config.advanced.auto_generation.recently_added.enabled: 66 | return 'Recently added preroll generation is disabled', 200 67 | return WebhookProcessor.process_recently_added(request=flask_request, config=config, output_dir=args.renders) 68 | 69 | @api.route('/last-run-within', methods=['GET']) 70 | def last_run_within(): 71 | return WebhookProcessor.process_last_run_within(request=flask_request, logs_folder=args.log) 72 | 73 | api.run(host=FLASK_ADDRESS, port=FLASK_PORT, debug=True, use_reloader=False) 74 | 75 | 76 | if __name__ == "__main__": 77 | start_webhooks_server(config=_config) 78 | -------------------------------------------------------------------------------- /assets/Bebas-Regular.otf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/assets/Bebas-Regular.otf -------------------------------------------------------------------------------- /assets/Bebas-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/assets/Bebas-Regular.ttf -------------------------------------------------------------------------------- /assets/Roboto-Light.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/assets/Roboto-Light.ttf -------------------------------------------------------------------------------- /assets/fade_out.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/assets/fade_out.mov -------------------------------------------------------------------------------- /assets/overlay.mov: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/assets/overlay.mov -------------------------------------------------------------------------------- /config.yaml.example: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/nwithan8/plex-prerolls/main/.schema/config.schema.json 2 | 3 | # All keys must be in lowercase 4 | # All paths will be case-sensitive based on your environment (Linux, Windows) 5 | 6 | run: 7 | schedule: 0 0 * * * 8 | dry_run: false 9 | 10 | plex: 11 | url: http://localhost:32400 # URL to your Plex server 12 | token: thisismyplextoken # Your Plex token 13 | 14 | # Always include these pre-rolls 15 | always: 16 | enabled: true 17 | paths: 18 | - "remote/path/to/video1.mp4" 19 | - "remote/path/to/video2.mp4" 20 | - "remote/path/to/video3.mp4" 21 | path_globbing: 22 | enabled: false # If true, use globbing to match paths 23 | pairs: 24 | - root_path: /files # The root folder to use for globbing 25 | plex_path: /path/to/prerolls/in/plex # The path to use for the Plex server 26 | patterns: 27 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 28 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 29 | - root_path: /other/files # The root folder to use for globbing 30 | plex_path: /path/to/other/prerolls/in/plex # The path to use for the Plex server 31 | patterns: 32 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 33 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 34 | 35 | count: 10 # Optional, randomly select X many videos from the list rather than all of them 36 | weight: 1 # Optional, how much to emphasize these pre-rolls over others (higher = more likely to play) 37 | 38 | # Schedule prerolls by date and time frames 39 | date_range: 40 | enabled: true 41 | ranges: 42 | - name: "New Years" # Optional name for logging purposes 43 | start_date: 2020-01-01 # Jan 1st, 2020 44 | end_date: 2020-01-02 # Jan 2nd, 2020 45 | paths: 46 | - "remote/path/to/video1.mp4" 47 | - "remote/path/to/video2.mp4" 48 | - "remote/path/to/video3.mp4" 49 | path_globbing: 50 | enabled: false # If true, use globbing to match paths 51 | pairs: 52 | - root_path: /files # The root folder to use for globbing 53 | plex_path: /path/to/prerolls/in/plex # The path to use for the Plex server 54 | patterns: 55 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 56 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 57 | - root_path: /other/files # The root folder to use for globbing 58 | plex_path: /path/to/other/prerolls/in/plex # The path to use for the Plex server 59 | patterns: 60 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 61 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 62 | weight: 2 # Optional, add these paths to the list twice (make up greater percentage of prerolls - more likely to be selected) 63 | disable_always: true # Optional, if present and true, disable the always prerolls when this schedule is active 64 | - start_date: xxxx-07-04 # Every year on July 4th 65 | end_date: xxxx-07-04 # Every year on July 4th 66 | paths: 67 | - "remote/path/to/video1.mp4" 68 | - "remote/path/to/video2.mp4" 69 | - "remote/path/to/video3.mp4" 70 | disable_always: false 71 | - start_date: xxxx-xx-02 # Every year on the 2nd of every month 72 | end_date: xxxx-xx-03 # Every year on the 3rd of every month 73 | paths: 74 | - "remote/path/to/video1.mp4" 75 | - "remote/path/to/video2.mp4" 76 | - "remote/path/to/video3.mp4" 77 | - start_date: xxxx-xx-xx 08:00:00 # Every day at 8am 78 | end_date: xxxx-xx-xx 09:30:00 # Every day at 9:30am 79 | paths: 80 | - "remote/path/to/video1.mp4" 81 | - "remote/path/to/video2.mp4" 82 | - "remote/path/to/video3.mp4" 83 | 84 | # Schedule prerolls by week of the year 85 | weekly: 86 | enabled: false 87 | weeks: 88 | - number: 1 # First week of the year 89 | paths: 90 | - "remote/path/to/video1.mp4" 91 | - "remote/path/to/video2.mp4" 92 | - "remote/path/to/video3.mp4" 93 | path_globbing: 94 | enabled: false # If true, use globbing to match paths 95 | pairs: 96 | - root_path: /files # The root folder to use for globbing 97 | plex_path: /path/to/prerolls/in/plex # The path to use for the Plex server 98 | patterns: 99 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 100 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 101 | - root_path: /other/files # The root folder to use for globbing 102 | plex_path: /path/to/other/prerolls/in/plex # The path to use for the Plex server 103 | patterns: 104 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 105 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 106 | weight: 1 # Optional, how much to emphasize these pre-rolls over others (higher = more likely to play) 107 | - number: 2 # Second week of the year 108 | paths: 109 | - "remote/path/to/video1.mp4" 110 | - "remote/path/to/video2.mp4" 111 | - "remote/path/to/video3.mp4" 112 | disable_always: true # If true, disable the always prerolls when this schedule is active 113 | 114 | # Schedule prerolls by month of the year 115 | monthly: 116 | enabled: false 117 | months: 118 | - number: 1 # January 119 | paths: 120 | - "remote/path/to/video1.mp4" 121 | - "remote/path/to/video2.mp4" 122 | - "remote/path/to/video3.mp4" 123 | weight: 1 # Optional, how much to emphasize these pre-rolls over others (higher = more likely to play) 124 | - number: 2 # February 125 | paths: 126 | - "remote/path/to/video1.mp4" 127 | - "remote/path/to/video2.mp4" 128 | - "remote/path/to/video3.mp4" 129 | path_globbing: 130 | enabled: false # If true, use globbing to match paths 131 | pairs: 132 | - root_path: /files # The root folder to use for globbing 133 | plex_path: /path/to/prerolls/in/plex # The path to use for the Plex server 134 | patterns: 135 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 136 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 137 | - root_path: /other/files # The root folder to use for globbing 138 | plex_path: /path/to/other/prerolls/in/plex # The path to use for the Plex server 139 | patterns: 140 | - "local/path/to/prerolls/*.mp4" # The pattern to use for globbing 141 | - "local/path/to/prerolls/*.mkv" # The pattern to use for globbing 142 | disable_always: false # If true, disable the always prerolls when this schedule is active 143 | 144 | advanced: 145 | auto_generation: 146 | plex_path: /path/to/auto-generated/prerolls/in/plex # The path for the Plex Media Server to use to access auto-generated prerolls 147 | recently_added: 148 | # If enabled, auto-generate prerolls for recently added items and add them as an always-on choice (files will be uploaded to the "Preroll Auto-Generated/Recently Added" folder) 149 | enabled: true # If enabled, auto-generate prerolls for recently added items and add them to the "always" list 150 | count: 2 # The number of most-recently added items to use for auto-generation 151 | trailer_cutoff_year: 1980 # Optional: Specify the earliest year for valid trailer searches (Defaults to 1980) 152 | 153 | 154 | 155 | -------------------------------------------------------------------------------- /consts.py: -------------------------------------------------------------------------------- 1 | APP_NAME = "Plex Prerolls" 2 | APP_DESCRIPTION = "A tool to manage prerolls for Plex" 3 | DEFAULT_CONFIG_PATH = "config.yaml" 4 | DEFAULT_LOG_DIR = "logs/" 5 | LAST_RUN_CHECK_FILE = "last_run.txt" # Should be in the logs directory 6 | DEFAULT_RENDERS_DIR = "renders" 7 | ASSETS_DIR = "assets" 8 | AUTO_GENERATED_PREROLLS_DIR = "/auto_rolls" 9 | AUTO_GENERATED_RECENTLY_ADDED_PREROLL_PREFIX = "recently-added-preroll" 10 | CONSOLE_LOG_LEVEL = "INFO" 11 | FILE_LOG_LEVEL = "DEBUG" 12 | FLASK_ADDRESS = "0.0.0.0" 13 | FLASK_PORT = 8283 14 | -------------------------------------------------------------------------------- /docker-compose-dev.yml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | services: 3 | plex_prerolls: 4 | build: 5 | context: . 6 | dockerfile: Dockerfile 7 | volumes: 8 | - ./on_host/config:/config 9 | - ./on_host/logs:/logs 10 | - ./on_host/prerolls:/files 11 | - ./on_host/renders:/renders 12 | - ./on_host/auto_rolls:/auto_rolls 13 | ports: 14 | - "8283:8283" 15 | environment: 16 | TZ: America/New_York 17 | healthcheck: 18 | test: ["CMD", "curl", "-f", "http://localhost:8283/last-run-within?timeframe=24h"] # Adjust the timeframe as needed 19 | interval: 5m 20 | timeout: 10s 21 | retries: 3 22 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.9" 2 | services: 3 | plex_prerolls: 4 | image: nwithan8/plex_prerolls:latest 5 | volumes: 6 | - /path/to/config:/config 7 | - /path/to/logs:/logs 8 | - /path/to/prerolls:/files 9 | - /path/to/auto-generated/rolls/temp:/renders 10 | - /path/to/auto-generated/rolls/parent:/auto_rolls 11 | ports: 12 | - "8283:8283" 13 | environment: 14 | TZ: America/New_York 15 | healthcheck: 16 | test: [ "CMD", "curl", "-f", "http://localhost:8283/last-run-within?timeframe=24h" ] # Adjust the timeframe as needed 17 | interval: 5m 18 | timeout: 10s 19 | retries: 3 -------------------------------------------------------------------------------- /documentation/images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/documentation/images/logo.png -------------------------------------------------------------------------------- /documentation/images/mascot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/documentation/images/mascot.jpeg -------------------------------------------------------------------------------- /documentation/images/recently-added-preroll-example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/documentation/images/recently-added-preroll-example.png -------------------------------------------------------------------------------- /ecosystem.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "apps": [ 3 | { 4 | "name": "keepalive", 5 | "interpreter": "/app/venv/bin/python", 6 | "script": "pm2_keepalive.py", 7 | "autorestart": true, 8 | "exec_mode": "fork", 9 | "instances": 1 10 | }, 11 | { 12 | "name": "api", 13 | "interpreter": "/app/venv/bin/python", 14 | "script": "api.py", 15 | "args": [ 16 | "-c", 17 | "/config/config.yaml", 18 | "-l", 19 | "/logs", 20 | "-r", 21 | "/renders" 22 | ], 23 | "autorestart": true, 24 | "exec_mode": "fork", 25 | "instances": 1, 26 | "stop_exit_codes": [ 27 | 302 28 | ] 29 | }, 30 | { 31 | "name": "app", 32 | "interpreter": "/app/venv/bin/python", 33 | "script": "run.py", 34 | "args": [ 35 | "-c", 36 | "/config/config.yaml", 37 | "-l", 38 | "/logs", 39 | "-r", 40 | "/renders" 41 | ], 42 | "autorestart": true, 43 | "exec_mode": "fork", 44 | "instances": 1, 45 | "stop_exit_codes": [ 46 | 302 47 | ] 48 | } 49 | ] 50 | } 51 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Start application 4 | pm2-runtime start ecosystem.config.json 5 | -------------------------------------------------------------------------------- /modules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nwithan8/plex-prerolls/d832d0f409ae1fbbacc97cbfcc966d6f0c752e18/modules/__init__.py -------------------------------------------------------------------------------- /modules/config_parser.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from typing import List, Union 4 | 5 | import confuse 6 | import yaml 7 | 8 | import modules.files as files 9 | import modules.logs as logging 10 | from consts import AUTO_GENERATED_PREROLLS_DIR, AUTO_GENERATED_RECENTLY_ADDED_PREROLL_PREFIX 11 | 12 | 13 | class YAMLElement: 14 | def __init__(self, data): 15 | self.data = data 16 | 17 | def _get_value(self, key: str, default=None): 18 | try: 19 | return self.data[key].get() 20 | except confuse.NotFoundError: 21 | return default 22 | except Exception: 23 | try: 24 | return self.data[key] 25 | except Exception: 26 | return default 27 | 28 | 29 | class ConfigSection(YAMLElement): 30 | def __init__(self, section_key: str, data, parent_key: str = None): 31 | self.section_key = section_key 32 | try: 33 | data = data[self.section_key] 34 | except confuse.NotFoundError: 35 | pass 36 | self._parent_key = parent_key 37 | super().__init__(data=data) 38 | 39 | @property 40 | def full_key(self): 41 | if self._parent_key is None: 42 | return self.section_key 43 | return f"{self._parent_key}_{self.section_key}".upper() 44 | 45 | def _get_subsection(self, key: str, default=None): 46 | try: 47 | return ConfigSection(section_key=key, parent_key=self.full_key, data=self.data) 48 | except confuse.NotFoundError: 49 | return default 50 | 51 | 52 | class PathGlobbingPairConfig(YAMLElement): 53 | def __init__(self, data): 54 | super().__init__(data=data) 55 | 56 | @property 57 | def local_root_folder(self) -> str: 58 | return self._get_value(key="root_path", default="/") 59 | 60 | @property 61 | def remote_root_folder(self) -> str: 62 | return self._get_value(key="plex_path", default="/") 63 | 64 | @property 65 | def patterns(self) -> List[str]: 66 | return self._get_value(key="patterns", default=[]) 67 | 68 | def __repr__(self): 69 | return (f"PathGlobbingPairConfig(local_root_folder={self.local_root_folder}, " 70 | f"remote_root_folder={self.remote_root_folder}, " 71 | f"patterns={self.patterns})") 72 | 73 | 74 | class PathGlobbingConfig(YAMLElement): 75 | def __init__(self, data): 76 | super().__init__(data=data) 77 | 78 | @property 79 | def enabled(self) -> bool: 80 | return self._get_value(key="enabled", default=False) 81 | 82 | @property 83 | def pairs(self) -> List[PathGlobbingPairConfig]: 84 | data = self._get_value(key="pairs", default=[]) 85 | return [PathGlobbingPairConfig(data=d) for d in data] 86 | 87 | def __repr__(self): 88 | return f"PathGlobbingConfig(enabled={self.enabled}, pairs={self.pairs})" 89 | 90 | 91 | class Entry(YAMLElement): 92 | def __init__(self, data): 93 | super().__init__(data) 94 | self.data = data 95 | 96 | def all_paths(self, advanced_settings: 'AdvancedConfig' = None) -> List[str]: 97 | paths = [] 98 | paths.extend(self.remote_paths) 99 | 100 | if not self.path_globbing or not self.path_globbing.enabled: 101 | return paths 102 | 103 | for pair in self.path_globbing.pairs: 104 | local_files_root = pair.local_root_folder 105 | remote_files_root = pair.remote_root_folder 106 | for pattern in pair.patterns: 107 | local_files = files.get_all_files_matching_glob_pattern(directory=local_files_root, pattern=pattern) 108 | for local_file in local_files: 109 | remote_file = files.translate_local_path_to_remote_path(local_path=local_file, 110 | local_root_folder=local_files_root, 111 | remote_root_folder=remote_files_root) 112 | paths.append(remote_file) 113 | 114 | return paths 115 | 116 | @property 117 | def remote_paths(self) -> List[str]: 118 | return self._get_value(key="paths", default=[]) 119 | 120 | @property 121 | def path_globbing(self) -> PathGlobbingConfig: 122 | data = self._get_value(key="path_globbing", default={}) 123 | return PathGlobbingConfig(data=data) 124 | 125 | @property 126 | def weight(self) -> int: 127 | return self._get_value(key="weight", default=1) 128 | 129 | @property 130 | def disable_always(self) -> bool: 131 | return self._get_value(key="disable_always", default=False) 132 | 133 | 134 | class NumericalEntry(Entry): 135 | def __init__(self, data): 136 | super().__init__(data) 137 | 138 | @property 139 | def number(self) -> int: 140 | return self._get_value(key="number", default=None) 141 | 142 | 143 | class DateRangeEntry(Entry): 144 | def __init__(self, data): 145 | super().__init__(data=data) 146 | 147 | @property 148 | def name(self) -> str: 149 | return self._get_value(key="name", default=None) 150 | 151 | @property 152 | def start_date(self) -> str: 153 | return self._get_value(key="start_date", default=None) 154 | 155 | @property 156 | def end_date(self) -> str: 157 | return self._get_value(key="end_date", default=None) 158 | 159 | def __repr__(self): 160 | return (f"DateRangeEntry(start_date={self.start_date}, end_date={self.end_date}, " 161 | f"remote_paths={self.remote_paths}, path_globbing={self.path_globbing}, weight={self.weight})") 162 | 163 | 164 | class WeekEntry(NumericalEntry): 165 | def __init__(self, data): 166 | super().__init__(data=data) 167 | 168 | def __repr__(self): 169 | return (f"WeekEntry(number={self.number}, remote_paths={self.remote_paths}, " 170 | f"path_globbing={self.path_globbing}, weight={self.weight})") 171 | 172 | 173 | class MonthEntry(NumericalEntry): 174 | def __init__(self, data): 175 | super().__init__(data=data) 176 | 177 | def __repr__(self): 178 | return (f"MonthEntry(number={self.number}, remote_paths={self.remote_paths}, " 179 | f"path_globbing={self.path_globbing}, weight={self.weight})") 180 | 181 | 182 | class RunConfig(ConfigSection): 183 | def __init__(self, data): 184 | super().__init__(section_key="run", data=data) 185 | 186 | @property 187 | def schedule(self) -> str: 188 | return self._get_value(key="schedule", default="0 0 * * *") 189 | 190 | @property 191 | def dry_run(self) -> bool: 192 | return self._get_value(key="dry_run", default=False) 193 | 194 | 195 | class PlexServerConfig(ConfigSection): 196 | def __init__(self, data): 197 | super().__init__(section_key="plex", data=data) 198 | 199 | @property 200 | def url(self) -> str: 201 | return self._get_value(key="url", default="") 202 | 203 | @property 204 | def token(self) -> str: 205 | return self._get_value(key="token", default="") 206 | 207 | @property 208 | def port(self) -> Union[int, None]: 209 | port = self._get_value(key="port", default=None) 210 | if not port: 211 | # Try to parse the port from the URL 212 | if self.url.startswith("http://"): 213 | port = 80 214 | elif self.url.startswith("https://"): 215 | port = 443 216 | 217 | return port 218 | 219 | 220 | class RecentlyAddedAutoGenerationConfig(ConfigSection): 221 | def __init__(self, data, parent: 'AutoGenerationConfig'): 222 | self._parent = parent 223 | super().__init__(section_key="recently_added", data=data) 224 | 225 | @property 226 | def enabled(self) -> bool: 227 | return self._get_value(key="enabled", default=False) 228 | 229 | @property 230 | def count(self) -> int: 231 | return self._get_value(key="count", default=10) 232 | 233 | @property 234 | def remote_files_root(self) -> str: 235 | # The Plex-aware equivalent of the local (internal) path where auto-generated prerolls will be stored 236 | return f"{self._parent.remote_path_root}/Recently Added" 237 | 238 | @property 239 | def local_files_root(self) -> str: 240 | # The local (internal) path where auto-generated prerolls will be stored 241 | return f"{self._parent.local_path_root}/Recently Added" 242 | 243 | # Double inheritance doesn't work well with conflicting "data" properties, just re-implement these two functions. 244 | def all_paths(self, advanced_settings: 'AdvancedConfig' = None) -> List[str]: 245 | paths = [] 246 | 247 | local_files = files.get_all_files_matching_glob_pattern(directory=self.local_files_root, 248 | pattern=f"{AUTO_GENERATED_RECENTLY_ADDED_PREROLL_PREFIX}*") 249 | for local_file in local_files: 250 | remote_file = files.translate_local_path_to_remote_path(local_path=local_file, 251 | local_root_folder=self.local_files_root, 252 | remote_root_folder=self.remote_files_root) 253 | paths.append(remote_file) 254 | 255 | return paths 256 | 257 | @property 258 | def trailer_cutoff_year(self) -> int: 259 | return self._get_value(key="trailer_cutoff_year", default=1980) 260 | 261 | 262 | class AutoGenerationConfig(ConfigSection): 263 | def __init__(self, data): 264 | super().__init__(section_key="auto_generation", data=data) 265 | 266 | @property 267 | def remote_path_root(self) -> str: 268 | # The Plex-aware equivalent of the local (internal) path where auto-generated prerolls will be stored 269 | return self._get_value(key="plex_path", default=self.local_path_root) 270 | 271 | @property 272 | def local_path_root(self) -> str: 273 | # The local (internal) path where auto-generated prerolls will be stored 274 | return AUTO_GENERATED_PREROLLS_DIR 275 | 276 | @property 277 | def cookies_file(self) -> str: 278 | cookies_file_path = "/config/yt_dlp_cookies.txt" 279 | return cookies_file_path if os.path.exists(cookies_file_path) else "" 280 | 281 | @property 282 | def recently_added(self) -> RecentlyAddedAutoGenerationConfig: 283 | return RecentlyAddedAutoGenerationConfig(data=self.data, parent=self) 284 | 285 | 286 | class AdvancedConfig(ConfigSection): 287 | def __init__(self, data): 288 | super().__init__(section_key="advanced", data=data) 289 | 290 | @property 291 | def auto_generation(self) -> AutoGenerationConfig: 292 | return AutoGenerationConfig(data=self.data) 293 | 294 | 295 | class ScheduleSection(ConfigSection): 296 | def __init__(self, section_key: str, data): 297 | super().__init__(section_key=section_key, data=data) 298 | 299 | @property 300 | def enabled(self) -> bool: 301 | return self._get_value(key="enabled", default=False) 302 | 303 | 304 | class AlwaysSection(ScheduleSection): 305 | def __init__(self, data): 306 | super(ScheduleSection, self).__init__(section_key="always", data=data) 307 | 308 | # Double inheritance doesn't work well with conflicting "data" properties, just re-implement these functions 309 | def all_paths(self, advanced_settings: 'AdvancedConfig' = None) -> List[str]: 310 | paths = [] 311 | paths.extend(self.remote_paths) 312 | 313 | if not self.path_globbing or not self.path_globbing.enabled: 314 | return paths 315 | 316 | for pair in self.path_globbing.pairs: 317 | local_files_root = pair.local_root_folder 318 | remote_files_root = pair.remote_root_folder 319 | for pattern in pair.patterns: 320 | local_files = files.get_all_files_matching_glob_pattern(directory=local_files_root, pattern=pattern) 321 | for local_file in local_files: 322 | remote_file = files.translate_local_path_to_remote_path(local_path=local_file, 323 | local_root_folder=local_files_root, 324 | remote_root_folder=remote_files_root) 325 | paths.append(remote_file) 326 | 327 | return paths 328 | 329 | @property 330 | def remote_paths(self) -> List[str]: 331 | return self._get_value(key="paths", default=[]) 332 | 333 | @property 334 | def path_globbing(self) -> PathGlobbingConfig: 335 | data = self._get_value(key="path_globbing", default={}) 336 | return PathGlobbingConfig(data=data) 337 | 338 | @property 339 | def weight(self) -> int: 340 | return self._get_value(key="weight", default=1) 341 | 342 | def random_count(self, advanced_settings: 'AdvancedConfig' = None) -> int: 343 | return self._get_value(key="count", default=len(self.all_paths(advanced_settings=advanced_settings))) 344 | 345 | def __repr__(self): 346 | return (f"AlwaysSection(remote_paths={self.remote_paths}, path_globbing={self.path_globbing}, " 347 | f"weight={self.weight}") 348 | 349 | 350 | class DateRangeSection(ScheduleSection): 351 | def __init__(self, data): 352 | super().__init__(section_key="date_range", data=data) 353 | 354 | @property 355 | def ranges(self) -> List[DateRangeEntry]: 356 | data = self._get_value(key="ranges", default=[]) 357 | return [DateRangeEntry(data=d) for d in data] 358 | 359 | @property 360 | def range_count(self) -> int: 361 | return len(self.ranges) 362 | 363 | 364 | class WeeklySection(ScheduleSection): 365 | def __init__(self, data): 366 | super().__init__(section_key="weekly", data=data) 367 | 368 | @property 369 | def weeks(self) -> List[WeekEntry]: 370 | data = self._get_value(key="weeks", default=[]) 371 | return [WeekEntry(data=d) for d in data] 372 | 373 | @property 374 | def week_count(self) -> int: 375 | return len(self.weeks) 376 | 377 | 378 | class MonthlySection(ScheduleSection): 379 | def __init__(self, data): 380 | super().__init__(section_key="monthly", data=data) 381 | 382 | @property 383 | def months(self) -> List[MonthEntry]: 384 | data = self._get_value(key="months", default=[]) 385 | return [MonthEntry(data=d) for d in data] 386 | 387 | @property 388 | def month_count(self) -> int: 389 | return len(self.months) 390 | 391 | 392 | class Config: 393 | def __init__(self, app_name: str, config_path: str): 394 | self.config = confuse.Configuration(app_name) 395 | 396 | # noinspection PyBroadException 397 | try: 398 | self.config.set_file(filename=config_path) 399 | logging.debug(f"Loaded config from {config_path}") 400 | except Exception: # pylint: disable=broad-except # not sure what confuse will throw 401 | raise FileNotFoundError(f"Config file not found: {config_path}") 402 | 403 | self.run = RunConfig(data=self.config) 404 | self.plex = PlexServerConfig(data=self.config) 405 | self.always = AlwaysSection(data=self.config) 406 | self.date_ranges = DateRangeSection(data=self.config) 407 | self.monthly = MonthlySection(data=self.config) 408 | self.weekly = WeeklySection(data=self.config) 409 | self.advanced = AdvancedConfig(data=self.config) 410 | 411 | logging.debug(f"Using configuration:\n{self.log()}") 412 | 413 | def __repr__(self) -> str: 414 | raw_yaml_data = self.config.dump() 415 | json_data = yaml.load(raw_yaml_data, Loader=yaml.FullLoader) 416 | return json.dumps(json_data, indent=4) 417 | 418 | @property 419 | def all(self) -> dict: 420 | return { 421 | "Run - Schedule": self.run.schedule, 422 | "Run - Dry Run": self.run.dry_run, 423 | "Plex - URL": self.plex.url, 424 | "Plex - Token": "Exists" if self.plex.token else "Not Set", 425 | "Always - Enabled": self.always.enabled, 426 | "Always - Config": self.always, 427 | "Date Range - Enabled": self.date_ranges.enabled, 428 | "Date Range - Ranges": self.date_ranges.ranges, 429 | "Monthly - Enabled": self.monthly.enabled, 430 | "Monthly - Months": self.monthly.months, 431 | "Weekly - Enabled": self.weekly.enabled, 432 | "Weekly - Weeks": self.weekly.weeks, 433 | "Advanced - Auto Generation - Remote Path Root": self.advanced.auto_generation.remote_path_root, 434 | "Advanced - Auto Generation - Recently Added - Enabled": self.advanced.auto_generation.recently_added.enabled, 435 | "Advanced - Auto Generation - Recently Added - Count": self.advanced.auto_generation.recently_added.count, 436 | "Advanced - Auto Generation - Recently Added - Trailer Cutoff Year": self.advanced.auto_generation.recently_added.trailer_cutoff_year, 437 | } 438 | 439 | def log(self) -> str: 440 | return "\n".join([f"{key}: {value}" for key, value in self.all.items()]) 441 | -------------------------------------------------------------------------------- /modules/errors.py: -------------------------------------------------------------------------------- 1 | def determine_exit_code(exception: Exception) -> int: 2 | """ 3 | Determine the exit code based on the exception that was thrown 4 | 5 | :param exception: The exception that was thrown 6 | :return: The exit code 7 | """ 8 | return 1 9 | -------------------------------------------------------------------------------- /modules/ffmpeg_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import ffmpeg 4 | 5 | 6 | def convert_video_to_audio(video_file_path: str, audio_file_path: str, delete_original_file: bool = False) -> str: 7 | """ 8 | Convert a video file to an audio file. 9 | 10 | :param video_file_path: The path to the video file to convert. 11 | :type video_file_path: str 12 | :param audio_file_path: The path to the audio file to create. 13 | :type audio_file_path: str 14 | :param delete_original_file: Whether to delete the original video file after conversion. 15 | :type delete_original_file: bool 16 | :return: The path to the audio file. 17 | :rtype: str 18 | """ 19 | ffmpeg_command = ffmpeg.input(video_file_path) 20 | ffmpeg_command = ffmpeg.output(ffmpeg_command, audio_file_path) 21 | 22 | ffmpeg.run(ffmpeg_command, overwrite_output=True, quiet=True) 23 | 24 | if delete_original_file: 25 | os.remove(video_file_path) 26 | 27 | return audio_file_path 28 | 29 | 30 | def trim_audio_file_to_length(audio_file_path: str, length_seconds: float, fade_in: bool = False, 31 | fade_in_length: float = 0, fade_out: bool = False, fade_out_length: float = 0) -> str: 32 | """ 33 | Trim an audio file to a specific length. Replaces the original audio file in-place. 34 | 35 | :param audio_file_path: The path to the audio file to trim. 36 | :type audio_file_path: str 37 | :param length_seconds: The length in seconds to trim the audio file to. 38 | :type length_seconds: float 39 | :param fade_in: Whether to fade in the audio. 40 | :type fade_in: bool 41 | :param fade_in_length: The length of the fade in. 42 | :type fade_in_length: float 43 | :param fade_out: Whether to fade out the audio. 44 | :type fade_out: bool 45 | :param fade_out_length: The length of the fade out. 46 | :type fade_out_length: float 47 | :return: The path to the trimmed audio file (should be the same as the input path). 48 | :rtype: str 49 | """ 50 | ffmpeg_command = ffmpeg.input(audio_file_path, ss=0, t=length_seconds) 51 | if fade_in: 52 | ffmpeg_command = ffmpeg.filter(ffmpeg_command, "afade", t="in", st=0, d=fade_in_length) 53 | if fade_out: 54 | ffmpeg_command = ffmpeg.filter(ffmpeg_command, "afade", t="out", st=(length_seconds - fade_out_length), 55 | d=fade_out_length) 56 | temp_audio_file_path = audio_file_path.split(".")[0] + "_temp." + audio_file_path.split(".")[1] 57 | ffmpeg_command = ffmpeg.output(ffmpeg_command, temp_audio_file_path) 58 | 59 | ffmpeg.run(ffmpeg_command, overwrite_output=True, quiet=True) 60 | 61 | os.remove(audio_file_path) 62 | os.rename(temp_audio_file_path, audio_file_path) 63 | 64 | return audio_file_path 65 | -------------------------------------------------------------------------------- /modules/files.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | from typing import List 4 | 5 | 6 | def get_all_files_matching_glob_pattern(directory: str, pattern: str) -> List[str]: 7 | """ 8 | Get all files matching a glob pattern in a directory. 9 | 10 | Args: 11 | directory (str): The directory to search in. 12 | pattern (str): The glob pattern to search for. 13 | 14 | Returns: 15 | List[str]: A list of file paths that match the glob pattern. 16 | """ 17 | return [file for file in glob.glob(os.path.join(directory, pattern)) if os.path.isfile(file)] 18 | 19 | 20 | def translate_local_path_to_remote_path(local_path: str, local_root_folder: str, remote_root_folder: str) -> str: 21 | """ 22 | Translate a local path to a remote path. 23 | 24 | Args: 25 | local_path (str): The local path to translate. 26 | local_root_folder (str): The root folder of the local path. 27 | remote_root_folder (str): The root folder of the remote path. 28 | 29 | Returns: 30 | str: The translated remote path. 31 | """ 32 | return local_path.replace(local_root_folder, remote_root_folder, 1) 33 | -------------------------------------------------------------------------------- /modules/logs.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from datetime import datetime 3 | from typing import Optional 4 | 5 | _nameToLevel = { 6 | 'CRITICAL': logging.CRITICAL, 7 | 'FATAL': logging.FATAL, 8 | 'ERROR': logging.ERROR, 9 | 'WARN': logging.WARNING, 10 | 'WARNING': logging.WARNING, 11 | 'INFO': logging.INFO, 12 | 'DEBUG': logging.DEBUG, 13 | 'NOTSET': logging.NOTSET, 14 | } 15 | 16 | _DEFAULT_LOGGER_NAME = None 17 | 18 | 19 | def init(app_name: str, 20 | console_log_level: str, 21 | log_to_file: Optional[bool] = False, 22 | log_file_dir: Optional[str] = "", 23 | file_log_level: Optional[str] = None): 24 | global _DEFAULT_LOGGER_NAME 25 | _DEFAULT_LOGGER_NAME = app_name 26 | 27 | logger = logging.getLogger(app_name) 28 | 29 | # Default log to DEBUG 30 | logger.setLevel(logging.DEBUG) 31 | 32 | formatter = logging.Formatter('%(asctime)s - [%(levelname)s]: %(message)s') 33 | 34 | # Console logging 35 | console_logger = logging.StreamHandler() 36 | console_logger.setFormatter(formatter) 37 | console_logger.setLevel(level_name_to_level(console_log_level)) 38 | logger.addHandler(console_logger) 39 | 40 | # File logging 41 | if log_to_file: 42 | log_file_dir = log_file_dir if log_file_dir.endswith('/') else f'{log_file_dir}/' 43 | file_logger = logging.FileHandler(f'{log_file_dir}{app_name}.log') 44 | file_logger.setFormatter(formatter) 45 | file_logger.setLevel(level_name_to_level(file_log_level or console_log_level)) 46 | logger.addHandler(file_logger) 47 | 48 | 49 | def level_name_to_level(level_name: str): 50 | return _nameToLevel.get(level_name, _nameToLevel['NOTSET']) 51 | 52 | 53 | def info(message: str, specific_logger: Optional[str] = None): 54 | logging.getLogger(specific_logger if specific_logger else _DEFAULT_LOGGER_NAME).info(msg=message) 55 | 56 | 57 | def warning(message: str, specific_logger: Optional[str] = None): 58 | logging.getLogger(specific_logger if specific_logger else _DEFAULT_LOGGER_NAME).warning(msg=message) 59 | 60 | 61 | def debug(message: str, specific_logger: Optional[str] = None): 62 | logging.getLogger(specific_logger if specific_logger else _DEFAULT_LOGGER_NAME).debug(msg=message) 63 | 64 | 65 | def error(message: str, specific_logger: Optional[str] = None): 66 | logging.getLogger(specific_logger if specific_logger else _DEFAULT_LOGGER_NAME).error(msg=message) 67 | 68 | 69 | def critical(message: str, specific_logger: Optional[str] = None): 70 | logging.getLogger(specific_logger if specific_logger else _DEFAULT_LOGGER_NAME).critical(msg=message) 71 | 72 | 73 | def fatal(message: str, specific_logger: Optional[str] = None): 74 | logging.getLogger(specific_logger if specific_logger else _DEFAULT_LOGGER_NAME).critical(msg=message) 75 | 76 | 77 | def write_to_last_run_file(logs_folder: str, last_run_file: str): 78 | if logs_folder.endswith('/'): 79 | logs_folder = logs_folder[:-1] 80 | 81 | last_run_check_file = f"{logs_folder}/{last_run_file}" 82 | 83 | with open(last_run_check_file, 'w') as file: 84 | file.write(datetime.now().isoformat()) 85 | 86 | info(f"Last run time written to {last_run_check_file}") 87 | 88 | 89 | def read_last_run_file(logs_folder: str, last_run_file: str) -> Optional[datetime]: 90 | if logs_folder.endswith('/'): 91 | logs_folder = logs_folder[:-1] 92 | 93 | last_run_check_file = f"{logs_folder}/{last_run_file}" 94 | 95 | try: 96 | with open(last_run_check_file, 'r') as file: 97 | last_run_data = file.read().strip() 98 | if not last_run_data: 99 | warning("Last run data is empty.") 100 | return None 101 | return datetime.fromisoformat(last_run_data) 102 | except FileNotFoundError: 103 | error(f"Last run file not found: {last_run_check_file}") 104 | return None 105 | except ValueError as e: 106 | error(f"Error decoding last run data: {e}") 107 | return None 108 | -------------------------------------------------------------------------------- /modules/models.py: -------------------------------------------------------------------------------- 1 | import random 2 | from datetime import datetime 3 | from typing import NamedTuple, List, Union 4 | 5 | import modules.logs as logging 6 | from modules import utils 7 | from modules.statics import ScheduleType 8 | 9 | 10 | class ScheduleEntry(NamedTuple): 11 | type: str 12 | start_date: datetime 13 | end_date: datetime 14 | paths: List[str] 15 | weight: int 16 | name_prefix: str 17 | disable_always: bool = False 18 | 19 | @property 20 | def should_be_used(self) -> bool: 21 | now = datetime.now() 22 | return self.start_date <= now <= self.end_date 23 | 24 | @property 25 | def name(self) -> str: 26 | return f"{self.name_prefix} ({self.start_date} - {self.end_date})" 27 | 28 | 29 | def schedule_entry_from_always(paths: List[str], count: int, weight: int) -> ScheduleEntry: 30 | start_date = utils.make_midnight(utils.start_of_time()) 31 | end_date = utils.make_right_before_midnight(utils.end_of_time()) 32 | 33 | if count > len(paths): 34 | logging.warning(f"Always schedule has a count of {count} but only {len(paths)} paths were provided. " 35 | f"Setting count to {len(paths)}") 36 | count = len(paths) 37 | 38 | random_paths = random.sample(population=paths, k=count) 39 | 40 | return ScheduleEntry(type=ScheduleType.always.value, 41 | start_date=start_date, 42 | end_date=end_date, 43 | paths=random_paths, 44 | weight=weight, 45 | name_prefix="Always") 46 | 47 | 48 | def schedule_entry_from_auto_generated(name: str, paths: List[str], weight: int) -> ScheduleEntry: 49 | # Always schedule 50 | start_date = utils.make_midnight(utils.start_of_time()) 51 | end_date = utils.make_right_before_midnight(utils.end_of_time()) 52 | 53 | # Always use all paths available 54 | 55 | return ScheduleEntry(type=ScheduleType.always.value, 56 | start_date=start_date, 57 | end_date=end_date, 58 | paths=paths, 59 | weight=weight, 60 | name_prefix=f"Auto Generated - {name}") 61 | 62 | 63 | def schedule_entry_from_week_number(week_number: int, paths: List[str], weight: int, disable_always: bool = False) -> \ 64 | Union[ScheduleEntry, None]: 65 | start_date = utils.start_of_week_number(week_number=week_number) 66 | end_date = utils.end_of_week_number(week_number=week_number) 67 | 68 | return ScheduleEntry(type=ScheduleType.weekly.value, 69 | start_date=start_date, 70 | end_date=end_date, 71 | paths=paths, 72 | weight=weight, 73 | disable_always=disable_always, 74 | name_prefix=f"Week {week_number}") 75 | 76 | 77 | def schedule_entry_from_month_number(month_number: int, paths: List[str], weight: int, disable_always: bool = False) -> \ 78 | Union[ScheduleEntry, None]: 79 | start_date = utils.start_of_month(month_number=month_number) 80 | end_date = utils.end_of_month(month_number=month_number) 81 | 82 | return ScheduleEntry(type=ScheduleType.monthly.value, 83 | start_date=start_date, 84 | end_date=end_date, 85 | paths=paths, 86 | weight=weight, 87 | disable_always=disable_always, 88 | name_prefix=f"Month {month_number}") 89 | 90 | 91 | def schedule_entry_from_date_range(start_date_string: str, end_date_string: str, paths: List[str], weight: int, 92 | disable_always: bool = False, name: str = None) \ 93 | -> Union[ScheduleEntry, None]: 94 | if not name: 95 | name = "Date Range" 96 | 97 | start_date, end_date = utils.wildcard_strings_to_datetimes(start_date_string=start_date_string, 98 | end_date_string=end_date_string) 99 | 100 | if not start_date or not end_date: 101 | logging.error( 102 | f"{name} has invalid start or end date wildcard patterns. " 103 | f"Any wildcard elements must be in the same position in both the start and end date.\n" 104 | f"Start date: {start_date_string}\nEnd date: {end_date_string}") 105 | return None 106 | 107 | return ScheduleEntry(type=ScheduleType.date_range.value, 108 | start_date=start_date, 109 | end_date=end_date, 110 | paths=paths, 111 | weight=weight, 112 | disable_always=disable_always, 113 | name_prefix=name) 114 | -------------------------------------------------------------------------------- /modules/plex_connector.py: -------------------------------------------------------------------------------- 1 | from typing import List, Union, Tuple 2 | 3 | from plexapi.exceptions import BadRequest 4 | from plexapi.server import PlexServer 5 | from plexapi.video import Movie 6 | 7 | import modules.logs as logging 8 | 9 | 10 | def prepare_pre_roll_string(paths: List[str]) -> Tuple[Union[str, None], int]: 11 | if not paths: 12 | return None, 0 13 | 14 | # Filter out empty paths 15 | paths = [path for path in paths if path] 16 | 17 | return ";".join(paths), len(paths) 18 | 19 | 20 | class PlexConnector: 21 | def __init__(self, host: str, token: str): 22 | self._host = host 23 | self._token = token 24 | logging.info(f"Connecting to Plex server at {self._host}") 25 | self._plex_server = PlexServer(baseurl=self._host, token=self._token) 26 | 27 | def update_pre_roll_paths(self, paths: List[str], testing: bool = False) -> None: 28 | pre_roll_string, count = prepare_pre_roll_string(paths=paths) 29 | if not pre_roll_string: 30 | logging.info("No pre-roll paths to update") 31 | return 32 | 33 | logging.info(f"Using {count} pre-roll paths") 34 | 35 | if testing: 36 | logging.debug(f"Testing: Would have updated pre-roll to: {pre_roll_string}") 37 | return 38 | 39 | logging.info(f"Updating pre-roll to: {pre_roll_string}") 40 | 41 | self._plex_server.settings.get("cinemaTrailersPrerollID").set(pre_roll_string) # type: ignore 42 | 43 | try: 44 | self._plex_server.settings.save() # type: ignore 45 | except BadRequest as e: 46 | if "Too Large" in str(e): 47 | logging.error("Failed to update pre-roll: Too many paths") 48 | return 49 | except Exception as e: 50 | logging.error(f"Failed to save pre-roll: {e}") 51 | return 52 | 53 | logging.info("Successfully updated pre-roll") 54 | 55 | def get_movie(self, item_key: str) -> Union[None, Movie]: 56 | """ 57 | Get a movie from the Plex server 58 | 59 | :param item_key: The key of the movie, should start with "/library/metadata/" 60 | :return: The movie object or None 61 | """ 62 | try: 63 | return self._plex_server.fetchItem(ekey=item_key) 64 | except Exception as e: 65 | logging.error(f"Failed to get movie: {e}") 66 | return None 67 | -------------------------------------------------------------------------------- /modules/renderers/__init__.py: -------------------------------------------------------------------------------- 1 | from modules.renderers.recently_added import RecentlyAddedPrerollRenderer 2 | -------------------------------------------------------------------------------- /modules/renderers/base.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | from modules import youtube_downloader as ytd 4 | from modules.config_parser import Config 5 | 6 | 7 | class PrerollRenderer: 8 | def __init__(self): 9 | pass 10 | 11 | def render(self, config: Config): 12 | raise NotImplementedError 13 | -------------------------------------------------------------------------------- /modules/renderers/recently_added.py: -------------------------------------------------------------------------------- 1 | import textwrap 2 | from typing import Union, Tuple 3 | 4 | import ffmpeg 5 | import requests 6 | from plexapi.video import Movie 7 | 8 | import modules.logs as logging 9 | from consts import ASSETS_DIR, AUTO_GENERATED_RECENTLY_ADDED_PREROLL_PREFIX 10 | from modules import youtube_downloader as ytd, utils, ffmpeg_utils 11 | from modules.renderers.base import PrerollRenderer 12 | from modules.config_parser import Config 13 | 14 | LENGTH_SECONDS = 33.5 15 | 16 | 17 | def _trim_background_music(background_music_file_path: str) -> str: 18 | logging.info(f"Trimming {background_music_file_path} to {LENGTH_SECONDS} seconds") 19 | video_file_path = ffmpeg_utils.trim_audio_file_to_length(audio_file_path=background_music_file_path, 20 | length_seconds=LENGTH_SECONDS, 21 | fade_in=True, 22 | fade_in_length=0.5, 23 | fade_out=True, 24 | fade_out_length=2) 25 | logging.info(f"Converting {video_file_path} to MP3") 26 | audio_file_path = f"{video_file_path.split('.')[0]}.mp3" 27 | ffmpeg_utils.convert_video_to_audio(video_file_path=video_file_path, audio_file_path=audio_file_path) 28 | 29 | return audio_file_path 30 | 31 | 32 | class RecentlyAddedPrerollRenderer(PrerollRenderer): 33 | def __init__(self, render_folder: str, movie: Movie): 34 | super().__init__() 35 | self.download_folder = render_folder # Will be set in render() 36 | self._video_file_name = "video" 37 | self._audio_file_name = "audio" 38 | self._poster_file_name = "poster.jpg" 39 | # Needs to end with epoch timestamp to sort correctly during rclone sync 40 | self._output_file_name = f"{AUTO_GENERATED_RECENTLY_ADDED_PREROLL_PREFIX}-{utils.now_epoch()}.mp4" 41 | self.movie_title = movie.title 42 | self.movie_year = getattr(movie, "year", None) 43 | duration_milliseconds = getattr(movie, "duration", 0) 44 | self.movie_duration_human = utils.milliseconds_to_hours_minutes_seconds(milliseconds=duration_milliseconds) 45 | self.movie_tagline = getattr(movie, "tagline", "") 46 | self.movie_summary = getattr(movie, "summary", "") 47 | self.movie_studio = getattr(movie, "studio", "") 48 | self.movie_directors = getattr(movie, "directors", []) 49 | self.movie_actors = getattr(movie, "actors", []) 50 | self.movie_genres = getattr(movie, "genres", []) 51 | self.movie_critic_rating = getattr(movie, "rating", None) # 0.0 - 10.0 52 | self.movie_audience_rating = getattr(movie, "audienceRating", None) # 0.0 - 10.0 53 | self.movie_poster_url = getattr(movie, "posterUrl", None) 54 | 55 | @property 56 | def youtube_search_query_movie_title(self) -> str: 57 | return f'"{self.movie_title}" {self.movie_year or ""}'.strip() 58 | 59 | def _get_trailer(self, config: Config) -> str: 60 | search_query = f"{self.youtube_search_query_movie_title} Official Movie Theatrical Trailer" 61 | logging.info(f'Retrieving trailer for "{self.movie_title}", YouTube search query: "{search_query}"') 62 | video_id = ytd.run_youtube_search( 63 | query=f"{self.youtube_search_query_movie_title} Official Movie Theatrical Trailer", 64 | selector_function=ytd.SelectorPresets.select_first_video, 65 | results_limit=5) 66 | video_url = ytd.get_video_url(video_id=video_id) 67 | video_file_path = ytd.download_youtube_video(url=video_url, 68 | config=config, 69 | output_dir=self.download_folder, 70 | output_filename=self._video_file_name) 71 | logging.info("Trailer retrieved successfully") 72 | return video_file_path 73 | 74 | def _get_background_music(self, config: Config) -> str: 75 | search_query = f"{self.youtube_search_query_movie_title} movie soundtrack" 76 | logging.info(f'Retrieving background music for "{self.movie_title}", YouTube search query: "{search_query}"') 77 | video_id = ytd.run_youtube_search(query=f"{self.youtube_search_query_movie_title} movie soundtrack", 78 | selector_function=ytd.SelectorPresets.select_first_video, 79 | results_limit=5) 80 | video_url = ytd.get_video_url(video_id=video_id) 81 | video_file_path = ytd.download_youtube_video(url=video_url, 82 | config=config, 83 | output_dir=self.download_folder, 84 | output_filename=self._audio_file_name) 85 | logging.info("Background music retrieved successfully") 86 | return video_file_path 87 | 88 | def _get_movie_poster(self) -> Union[str, None]: 89 | logging.info(f'Retrieving poster for "{self.movie_title}"') 90 | if not self.movie_poster_url: 91 | logging.warning(f"No poster URL available for {self.movie_title}") 92 | return None 93 | res = requests.get(self.movie_poster_url) 94 | file_path = f"{self.download_folder}/{self._poster_file_name}" 95 | with open(file_path, "wb") as f: 96 | f.write(res.content) 97 | 98 | logging.info("Poster retrieved successfully") 99 | return file_path 100 | 101 | def render(self, config: Config) -> Tuple[Union[str, None], Union[str, None]]: 102 | if not self.movie_title: 103 | logging.warning("No movie title available, cannot build preroll") 104 | return None, None 105 | if not self.movie_year: 106 | logging.warning("No movie year available, not going to attempt to build preroll") 107 | return None, None 108 | trailer_cutoff_year = config.advanced.auto_generation.recently_added.trailer_cutoff_year 109 | if self.movie_year < trailer_cutoff_year: 110 | # Finding good trailers automatically for movies older than 1980 is difficult (year is arbitrary) 111 | logging.warning("Movie is too old, not going to attempt to build preroll") 112 | return None, None 113 | 114 | self.download_folder = utils.get_temporary_directory_path(parent_directory=self.download_folder) 115 | logging.info(f'Retrieving assets for preroll of "{self.movie_title}", saving to {self.download_folder}') 116 | video_path = self._get_trailer(config=config) 117 | audio_path = self._get_background_music(config=config) 118 | audio_path = _trim_background_music(background_music_file_path=audio_path) 119 | poster_path = self._get_movie_poster() 120 | 121 | logging.info(f'Rendering preroll for "{self.movie_title}"') 122 | title_position_offset = (len(self.movie_title) * 33) / 2 - 7 123 | if title_position_offset > 716: 124 | title = textwrap.fill(self.movie_title, width=40, break_long_words=False) 125 | title_newline = title.find("\n") 126 | title_position_offset = (title_newline * 33) / 2 - 7 127 | 128 | description = textwrap.fill(self.movie_summary, width=22, break_long_words=False) 129 | num_of_lines = description.count("\n") 130 | description_size = 580 / num_of_lines if num_of_lines > 22 else 26 131 | 132 | # Prepare elements for preroll video 133 | sidebar = ffmpeg.input(f"{ASSETS_DIR}/overlay.mov") 134 | poster = ffmpeg.filter(ffmpeg.input(poster_path, loop=1), "scale", 200, -1) 135 | fade_out = ffmpeg.input(f"{ASSETS_DIR}/fade_out.mov") 136 | title_font = f"{ASSETS_DIR}/Bebas-Regular.ttf" 137 | description_font = f"{ASSETS_DIR}/Roboto-Light.ttf" 138 | 139 | # Prepare preroll video 140 | ffmpeg_command = ffmpeg.input(video_path, ss=10, t=LENGTH_SECONDS) 141 | ffmpeg_command = ffmpeg.filter(ffmpeg_command, "scale", 1600, -1) 142 | ffmpeg_audio_command = ffmpeg.input(audio_path) 143 | ffmpeg_command = ffmpeg.overlay(sidebar, ffmpeg_command, x=300, y=125) 144 | ffmpeg_command = ffmpeg.overlay(ffmpeg_command, poster, x=40, y=195, enable="gte(t,1)") 145 | 146 | # Add ratings 147 | # If neither rating is available, show nothing 148 | if not self.movie_critic_rating and not self.movie_audience_rating: 149 | pass 150 | # If both ratings are available, show both 151 | elif self.movie_critic_rating and self.movie_audience_rating: 152 | ffmpeg_command = ffmpeg.drawtext( 153 | ffmpeg_command, 154 | text=f"Critic Rating: {self.movie_critic_rating}", 155 | fontfile=title_font, 156 | x=3, 157 | y=135, 158 | escape_text=True, 159 | fontcolor="0xFFFFFF@0xff", 160 | fontsize=32, 161 | enable="gte(t,1)", 162 | ) 163 | ffmpeg_command = ffmpeg.drawtext( 164 | ffmpeg_command, 165 | text=f"Audience Rating: {self.movie_audience_rating}", 166 | fontfile=title_font, 167 | x=3, 168 | y=165, 169 | escape_text=True, 170 | fontcolor="0xFFFFFF@0xff", 171 | fontsize=32, 172 | enable="gte(t,1)", 173 | ) 174 | # If only one rating is available, show that one 175 | else: 176 | rating = self.movie_critic_rating or self.movie_audience_rating 177 | rating_type = "Critic" if self.movie_critic_rating else "Audience" 178 | ffmpeg_command = ffmpeg.drawtext( 179 | ffmpeg_command, 180 | text=f"{rating_type} Rating: {rating}", 181 | fontfile=title_font, 182 | x=3, 183 | y=150, 184 | escape_text=True, 185 | fontcolor="0xFFFFFF@0xff", 186 | fontsize=36, 187 | enable="gte(t,1)", 188 | ) 189 | 190 | # Add title and description 191 | ffmpeg_command = ffmpeg.drawtext( 192 | ffmpeg_command, 193 | text=self.movie_title, 194 | fontfile=title_font, 195 | x=(1106 - title_position_offset), 196 | y=20, 197 | escape_text=True, 198 | fontcolor="0xFFFFFF@0xff", 199 | fontsize=76, 200 | enable="gte(t,1)", 201 | ) 202 | ffmpeg_command = ffmpeg.drawtext( 203 | ffmpeg_command, 204 | text=description, 205 | fontfile=description_font, 206 | x=3, 207 | y=500, 208 | escape_text=True, 209 | fontcolor="0xFFFFFF@0xff", 210 | fontsize=description_size, 211 | enable="gte(t,1)", 212 | ) 213 | 214 | # TODO: Add studio, directors, actors, genres, tagline 215 | 216 | # Add fade out 217 | ffmpeg_command = ffmpeg.overlay(ffmpeg_command, fade_out, eof_action="endall") 218 | 219 | # Combine video and audio 220 | file_path = f"{self.download_folder}/{self._output_file_name}" 221 | ffmpeg_command = ffmpeg.output(ffmpeg_audio_command, ffmpeg_command, 222 | file_path, ) 223 | 224 | # Run ffmpeg command 225 | ffmpeg.run(ffmpeg_command, overwrite_output=True, quiet=False) 226 | 227 | logging.info(f'Preroll for "{self.movie_title}" rendered successfully to {file_path}') 228 | 229 | return self.download_folder, file_path 230 | -------------------------------------------------------------------------------- /modules/schedule_manager.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | import modules.logs as logging 4 | from modules import models 5 | from modules.config_parser import ( 6 | Config, 7 | ) 8 | from modules.models import ScheduleEntry 9 | 10 | 11 | class ScheduleManager: 12 | def __init__(self, config: Config): 13 | self._config = config 14 | self.weekly_schedules: List[ScheduleEntry] = [] 15 | self.monthly_schedules: List[ScheduleEntry] = [] 16 | self.date_range_schedules: List[ScheduleEntry] = [] 17 | self.always_schedules: List[ScheduleEntry] = [] 18 | self.auto_generated_schedules: List[ScheduleEntry] = [] 19 | self._parse_schedules() # Only call this once, otherwise it will duplicate schedules 20 | 21 | def _parse_schedules(self): 22 | logging.info("Parsing schedules...") 23 | if self._config.weekly.enabled: 24 | for week in self._config.weekly.weeks: 25 | self.weekly_schedules.append(models.schedule_entry_from_week_number( 26 | week_number=week.number, 27 | paths=week.all_paths( 28 | advanced_settings=self._config.advanced), 29 | weight=week.weight, 30 | disable_always=week.disable_always)) 31 | 32 | if self._config.monthly.enabled: 33 | for month in self._config.monthly.months: 34 | self.monthly_schedules.append(models.schedule_entry_from_month_number( 35 | month_number=month.number, 36 | paths=month.all_paths( 37 | advanced_settings=self._config.advanced), 38 | weight=month.weight, 39 | disable_always=month.disable_always)) 40 | 41 | if self._config.date_ranges.enabled: 42 | for date_range in self._config.date_ranges.ranges: 43 | entry = models.schedule_entry_from_date_range( 44 | start_date_string=date_range.start_date, 45 | end_date_string=date_range.end_date, 46 | paths=date_range.all_paths( 47 | advanced_settings=self._config.advanced), 48 | weight=date_range.weight, 49 | name=date_range.name, 50 | disable_always=date_range.disable_always) 51 | if entry: 52 | self.date_range_schedules.append(entry) 53 | 54 | if self._config.always.enabled: 55 | self.always_schedules.append(models.schedule_entry_from_always( 56 | paths=self._config.always.all_paths( 57 | advanced_settings=self._config.advanced), 58 | count=self._config.always.random_count( 59 | advanced_settings=self._config.advanced), 60 | weight=self._config.always.weight)) 61 | 62 | if self._config.advanced.auto_generation.recently_added.enabled: 63 | self.auto_generated_schedules.append(models.schedule_entry_from_auto_generated( 64 | name="Recently Added", 65 | paths=self._config.advanced.auto_generation.recently_added.all_paths( 66 | advanced_settings=self._config.advanced), 67 | weight=1)) 68 | 69 | @property 70 | def valid_weekly_schedules(self) -> List[ScheduleEntry]: 71 | return [schedule for schedule in self.weekly_schedules if schedule.should_be_used] 72 | 73 | @property 74 | def valid_weekly_schedule_count(self) -> int: 75 | return len(self.valid_weekly_schedules) 76 | 77 | @property 78 | def valid_weekly_schedule_log_message(self) -> str: 79 | valid_schedules = "" 80 | for schedule in self.valid_weekly_schedules: 81 | valid_schedules += f"- {schedule.name}\n" 82 | return valid_schedules 83 | 84 | @property 85 | def valid_monthly_schedules(self) -> List[ScheduleEntry]: 86 | return [schedule for schedule in self.monthly_schedules if schedule.should_be_used] 87 | 88 | @property 89 | def valid_monthly_schedule_count(self) -> int: 90 | return len(self.valid_monthly_schedules) 91 | 92 | @property 93 | def valid_monthly_schedule_log_message(self) -> str: 94 | valid_schedules = "" 95 | for schedule in self.valid_monthly_schedules: 96 | valid_schedules += f"- {schedule.name}\n" 97 | return valid_schedules 98 | 99 | @property 100 | def valid_date_range_schedules(self) -> List[ScheduleEntry]: 101 | return [schedule for schedule in self.date_range_schedules if schedule.should_be_used] 102 | 103 | @property 104 | def valid_date_range_schedule_count(self) -> int: 105 | return len(self.valid_date_range_schedules) 106 | 107 | @property 108 | def valid_date_range_schedule_log_message(self) -> str: 109 | valid_schedules = "" 110 | for schedule in self.valid_date_range_schedules: 111 | valid_schedules += f"- {schedule.name}\n" 112 | return valid_schedules 113 | 114 | @property 115 | def valid_always_schedules(self) -> List[ScheduleEntry]: 116 | if self.disable_always: 117 | return [] 118 | 119 | return [schedule for schedule in self.always_schedules if schedule.should_be_used] 120 | 121 | @property 122 | def valid_always_schedule_count(self) -> int: 123 | return len(self.valid_always_schedules) 124 | 125 | @property 126 | def valid_always_schedule_log_message(self) -> str: 127 | valid_schedules = "" 128 | for schedule in self.valid_always_schedules: 129 | valid_schedules += f"- {schedule.name}\n" 130 | return valid_schedules 131 | 132 | @property 133 | def valid_auto_generated_schedules(self) -> List[ScheduleEntry]: 134 | return [schedule for schedule in self.auto_generated_schedules if schedule.should_be_used] 135 | 136 | @property 137 | def valid_auto_generated_schedule_count(self) -> int: 138 | return len(self.valid_auto_generated_schedules) 139 | 140 | @property 141 | def auto_generated_schedules_log_message(self) -> str: 142 | valid_schedules = "" 143 | for schedule in self.valid_auto_generated_schedules: 144 | valid_schedules += f"- {schedule.name}\n" 145 | return valid_schedules 146 | 147 | @property 148 | def all_schedules_except_always(self) -> List[ScheduleEntry]: 149 | # Auto generated schedules are not included, considered "Always" 150 | return self.weekly_schedules + self.monthly_schedules + self.date_range_schedules 151 | 152 | @property 153 | def all_valid_schedule_except_always(self) -> List[ScheduleEntry]: 154 | return [schedule for schedule in self.all_schedules_except_always if schedule.should_be_used] 155 | 156 | @property 157 | def disable_always(self) -> bool: 158 | return any([schedule.disable_always for schedule in self.all_valid_schedule_except_always]) 159 | 160 | @property 161 | def all_schedules(self) -> List[ScheduleEntry]: 162 | schedules = self.all_schedules_except_always 163 | 164 | if not self.disable_always: 165 | schedules += self.always_schedules 166 | schedules += self.auto_generated_schedules 167 | 168 | return schedules 169 | 170 | @property 171 | def all_valid_schedules(self) -> List[ScheduleEntry]: 172 | return [schedule for schedule in self.all_schedules if schedule.should_be_used] 173 | 174 | @property 175 | def all_valid_paths(self) -> List[str]: 176 | """ 177 | Returns a list of all valid paths from all valid schedules. Accounts for weight. 178 | """ 179 | paths = [] 180 | for schedule in self.all_valid_schedules: 181 | for _ in range(schedule.weight): 182 | paths.extend(schedule.paths) 183 | 184 | return paths 185 | 186 | @property 187 | def valid_schedule_count(self) -> int: 188 | return len(self.all_valid_schedules) 189 | 190 | @property 191 | def valid_schedule_count_log_message(self) -> str: 192 | return f""" 193 | Valid Schedule Count: 194 | Always - {"Disabled by other schedule(s)" if self.disable_always else self.valid_always_schedule_count} 195 | {self.valid_always_schedule_log_message} 196 | Weekly - {self.valid_weekly_schedule_count} 197 | {self.valid_weekly_schedule_log_message} 198 | Monthly - {self.valid_monthly_schedule_count} 199 | {self.valid_monthly_schedule_log_message} 200 | Date Ranges - {self.valid_date_range_schedule_count} 201 | {self.valid_date_range_schedule_log_message} 202 | Auto Generated - {"Disabled by other schedule(s)" if self.disable_always else self.valid_auto_generated_schedule_count} 203 | {self.auto_generated_schedules_log_message}""" 204 | -------------------------------------------------------------------------------- /modules/statics.py: -------------------------------------------------------------------------------- 1 | # Number 1-9, and A-Z 2 | import enum 3 | import subprocess 4 | import sys 5 | 6 | VERSION = "VERSIONADDEDBYGITHUB" 7 | COPYRIGHT = "Copyright © YEARADDEDBYGITHUB Nate Harris. All rights reserved." 8 | 9 | ASCII_ART = """ 10 | """ 11 | 12 | 13 | def splash_logo() -> str: 14 | version = VERSION 15 | if "GITHUB" in version: 16 | try: 17 | last_commit = subprocess.check_output(["git", "rev-parse", "HEAD"]).decode("utf-8").strip() 18 | version = f"git-{last_commit[:7]}" 19 | except subprocess.SubprocessError: 20 | version = "git-unknown-commit" 21 | return f""" 22 | {ASCII_ART} 23 | Version {version}, Python {sys.version} 24 | 25 | {COPYRIGHT} 26 | """ 27 | 28 | 29 | class ScheduleType(enum.Enum): 30 | monthly = "monthly" 31 | weekly = "weekly" 32 | date_range = "date_range" 33 | always = "always" 34 | 35 | 36 | def schedule_types() -> list[str]: 37 | """Return a list of Schedule Types 38 | 39 | Returns: 40 | List[ScheduleType]: List of Schedule Types 41 | """ 42 | return [_enum.value for _enum in ScheduleType] 43 | -------------------------------------------------------------------------------- /modules/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | from datetime import datetime, timedelta, date 4 | from typing import Tuple, Union 5 | 6 | from pytz import timezone 7 | 8 | import modules.logs as logging 9 | 10 | 11 | def get_temporary_directory_path(sub_directory: str = None, parent_directory: str = None) -> str: 12 | """ 13 | Return a temporary directory path 14 | 15 | :param sub_directory: (Optional) subdirectory to use 16 | :type sub_directory: str, optional 17 | :param parent_directory: (Optional) parent directory to use 18 | :type parent_directory: str, optional 19 | :return: temporary directory path 20 | :rtype: str 21 | """ 22 | base = parent_directory or "/tmp" 23 | 24 | if sub_directory: 25 | base = os.path.join(base, sub_directory) 26 | 27 | path = os.path.join(base, os.urandom(24).hex()) 28 | 29 | os.makedirs(path, exist_ok=True) 30 | 31 | return path 32 | 33 | 34 | def get_temporary_file_path(sub_directory: str = None, parent_directory: str = None, file_extension: str = None) -> str: 35 | """ 36 | Return a temporary file path 37 | 38 | :param sub_directory: (Optional) subdirectory to use 39 | :type sub_directory: str, optional 40 | :param parent_directory: (Optional) parent directory to use 41 | :type parent_directory: str, optional 42 | :param file_extension: (Optional) file extension to use 43 | :type file_extension: str, optional 44 | :return: temporary file path 45 | :rtype: str 46 | """ 47 | base = parent_directory or "/tmp" 48 | 49 | if sub_directory: 50 | base = os.path.join(base, sub_directory) 51 | 52 | os.makedirs(base, exist_ok=True) 53 | 54 | return os.path.join(base, f"{os.urandom(24).hex()}{file_extension or '.tmp'}") 55 | 56 | 57 | def get_current_directory() -> str: 58 | return os.getcwd() 59 | 60 | 61 | def copy_file(source: str, destination: str): 62 | """ 63 | Copy a file 64 | 65 | :param source: source file to copy 66 | :type source: str 67 | :param destination: destination file to copy to 68 | :type destination: str 69 | """ 70 | shutil.copy(source, destination) 71 | 72 | 73 | def move_file(source: str, destination: str): 74 | """ 75 | Move a file 76 | 77 | :param source: source file to move 78 | :type source: str 79 | :param destination: destination file to move to 80 | :type destination: str 81 | """ 82 | shutil.move(source, destination) 83 | 84 | 85 | def create_directory(directory: str): 86 | """ 87 | Create a directory 88 | 89 | :param directory: directory to create 90 | :type directory: str 91 | """ 92 | os.makedirs(directory, exist_ok=True) 93 | 94 | 95 | def delete_directory(directory: str): 96 | """ 97 | Delete a directory 98 | 99 | :param directory: directory to delete 100 | :type directory: str 101 | """ 102 | if os.path.exists(directory): 103 | shutil.rmtree(directory) 104 | 105 | 106 | def delete_file(file: str): 107 | """ 108 | Delete a file 109 | 110 | :param file: file to delete 111 | :type file: str 112 | """ 113 | if os.path.exists(file): 114 | os.remove(file) 115 | 116 | 117 | def get_x_most_recent_files(directory: str, count: int) -> list: 118 | """ 119 | Get the most recent files in a directory 120 | 121 | :param directory: directory to search 122 | :type directory: str 123 | :param count: number of files to return 124 | :type count: int 125 | :return: list of files 126 | :rtype: list 127 | """ 128 | files = os.listdir(directory) 129 | files.sort(key=lambda x: os.path.getmtime(os.path.join(directory, x)), reverse=True) 130 | return files[:count] 131 | 132 | 133 | def get_all_files_in_directory_beyond_most_recent_x_count(directory: str, count: int) -> list: 134 | """ 135 | Get all files in a directory beyond the most recent x count 136 | 137 | :param directory: directory to search 138 | :type directory: str 139 | :param count: number of most recent files to keep 140 | :type count: int 141 | :return: list of files 142 | :rtype: list 143 | """ 144 | files = os.listdir(directory) 145 | files.sort(key=lambda x: os.path.getmtime(os.path.join(directory, x)), reverse=True) 146 | return files[count:] 147 | 148 | 149 | def make_plural(word, count: int, suffix_override: str = 's') -> str: 150 | if count > 1: 151 | return f"{word}{suffix_override}" 152 | return word 153 | 154 | 155 | def quote(string: str) -> str: 156 | return f"\"{string}\"" 157 | 158 | 159 | def status_code_is_success(status_code: int) -> bool: 160 | return 200 <= status_code < 300 161 | 162 | 163 | def milliseconds_to_minutes_seconds(milliseconds: int) -> str: 164 | seconds = int(milliseconds / 1000) 165 | minutes = int(seconds / 60) 166 | if minutes < 10: 167 | minutes = f"0{minutes}" 168 | seconds = int(seconds % 60) 169 | if seconds < 10: 170 | seconds = f"0{seconds}" 171 | return f"{minutes}:{seconds}" 172 | 173 | 174 | def milliseconds_to_hours_minutes_seconds(milliseconds: int) -> str: 175 | seconds = int(milliseconds / 1000) 176 | hours = int(seconds / 3600) 177 | if hours < 10: 178 | hours = f"0{hours}" 179 | seconds = int(seconds % 3600) 180 | minutes = int(seconds / 60) 181 | if minutes < 10: 182 | minutes = f"0{minutes}" 183 | seconds = int(seconds % 60) 184 | if seconds < 10: 185 | seconds = f"0{seconds}" 186 | return f"{hours}:{minutes}:{seconds}" 187 | 188 | 189 | def now(timezone_code: str = None) -> datetime: 190 | if timezone_code: 191 | return datetime.now(timezone(timezone_code)) # will raise exception if invalid timezone_code 192 | return datetime.now() 193 | 194 | 195 | def now_plus_milliseconds(milliseconds: int, timezone_code: str = None) -> datetime: 196 | if timezone_code: 197 | _now = datetime.now(timezone(timezone_code)) # will raise exception if invalid timezone_code 198 | else: 199 | _now = datetime.now() 200 | return _now + timedelta(milliseconds=milliseconds) 201 | 202 | 203 | def now_epoch() -> int: 204 | return int(datetime.now().timestamp()) 205 | 206 | 207 | def now_in_range(start: datetime, end: datetime) -> bool: 208 | _now = now() 209 | return start <= _now <= end 210 | 211 | 212 | def start_of_time() -> datetime: 213 | return datetime(1970, 1, 1) 214 | 215 | 216 | def end_of_time() -> datetime: 217 | return datetime(9999, 12, 31) 218 | 219 | 220 | def start_of_year(year: int = None) -> datetime: 221 | _now = now() 222 | 223 | if not year: 224 | year = _now.year 225 | 226 | return datetime(year, 1, 1) 227 | 228 | 229 | def end_of_year(year: int = None) -> datetime: 230 | _now = now() 231 | 232 | if not year: 233 | year = _now.year 234 | 235 | return datetime(year, 12, 31) 236 | 237 | 238 | def start_of_month(month_number: int = None) -> datetime: 239 | _now = now() 240 | 241 | if not month_number: 242 | month_number = _now.month 243 | 244 | return datetime(_now.year, month_number, 1) 245 | 246 | 247 | def end_of_month(month_number: int = None) -> datetime: 248 | _now = now() 249 | 250 | if not month_number: 251 | month_number = _now.month 252 | 253 | if month_number == 12: 254 | return end_of_year(year=_now.year) # If month is December, return end of year (shortcut) 255 | else: 256 | return start_of_month(month_number=month_number + 1) - timedelta( 257 | days=1) # Subtract one day from start of next month 258 | 259 | 260 | def start_of_week_number(week_number: int = None) -> datetime: 261 | _now = now() 262 | 263 | if not week_number: 264 | week_number = _now.strftime('%U') 265 | 266 | return datetime.strptime(f"{_now.year}-W{int(week_number)}-0", "%Y-W%W-%w") 267 | 268 | 269 | def end_of_week_number(week_number: int = None) -> datetime: 270 | _now = now() 271 | 272 | if not week_number: 273 | week_number = _now.strftime('%U') 274 | 275 | return datetime.strptime(f"{_now.year}-W{int(week_number)}-6", "%Y-W%W-%w") 276 | 277 | 278 | def make_midnight(date: datetime) -> datetime: 279 | return datetime(date.year, date.month, date.day) 280 | 281 | 282 | def make_right_before_midnight(date: datetime) -> datetime: 283 | return datetime(date.year, date.month, date.day, 23, 59, 59) 284 | 285 | 286 | def limit_text_length(text: str, limit: int, suffix: str = "...") -> str: 287 | if len(text) <= limit: 288 | return text 289 | 290 | suffix_length = len(suffix) 291 | return f"{text[:limit - suffix_length]}{suffix}" 292 | 293 | 294 | def string_to_datetime(date_string: str, template: str = "%Y-%m-%dT%H:%M:%S") -> datetime: 295 | """ 296 | Convert a datetime string to a datetime.datetime object 297 | 298 | :param date_string: datetime string to convert 299 | :type date_string: str 300 | :param template: (Optional) datetime template to use when parsing string 301 | :type template: str, optional 302 | :return: datetime.datetime object 303 | :rtype: datetime.datetime 304 | """ 305 | if date_string.endswith('Z'): 306 | date_string = date_string[:-5] 307 | return datetime.strptime(date_string, template) 308 | 309 | 310 | def datetime_to_string(datetime_object: datetime, template: str = "%Y-%m-%dT%H:%M:%S.000Z") -> str: 311 | """ 312 | Convert a datetime.datetime object to a string 313 | 314 | :param datetime_object: datetime.datetime object to convert 315 | :type datetime_object: datetime.datetime 316 | :param template: (Optional) datetime template to use when parsing string 317 | :type template: str, optional 318 | :return: str representation of datetime 319 | :rtype: str 320 | """ 321 | return datetime_object.strftime(template) 322 | 323 | 324 | def wildcard_strings_to_datetimes(start_date_string: str, end_date_string: str) -> \ 325 | Tuple[Union[datetime, None], Union[datetime, None]]: 326 | """ 327 | Convert date or datetime strings with wildcards to datetime.datetime objects 328 | 329 | :param start_date_string: start datetime string to convert 330 | :type start_date_string: str 331 | :param end_date_string: end datetime string to convert 332 | :type end_date_string: str 333 | :return: datetime.datetime object 334 | :rtype: datetime.datetime 335 | """ 336 | if isinstance(start_date_string, date): 337 | start_date_string = start_date_string.strftime("%Y-%m-%d") 338 | if isinstance(end_date_string, date): 339 | end_date_string = end_date_string.strftime("%Y-%m-%d") 340 | 341 | start_date_and_time = start_date_string.split(' ') 342 | end_date_and_time = end_date_string.split(' ') 343 | template = "%Y-%m-%d %H:%M:%S" 344 | _now = now() 345 | 346 | # Sample: xxxx-xx-xx 347 | # Sample: xxxx-xx-xx xx:xx:xx 348 | 349 | _start_date = start_date_and_time[0] # xxxx-xx-xx 350 | _end_date = end_date_and_time[0] # xxxx-xx-xx 351 | 352 | need_specific_datetime = False 353 | 354 | _start_time = start_date_and_time[1] if len(start_date_and_time) > 1 else "00:00:00" 355 | _end_time = end_date_and_time[1] if len(end_date_and_time) > 1 else "23:59:59" 356 | 357 | # Sample: xxxx-xx-xx xx:xx:xx 358 | 359 | start_time_parts = _start_time.split(':') 360 | end_time_parts = _end_time.split(':') 361 | 362 | start_second = start_time_parts[2] 363 | end_second = end_time_parts[2] 364 | 365 | # Can't have a wildcard in one and not the other 366 | if (start_second != 'xx' and end_second == 'xx') or (start_second == 'xx' and end_second != 'xx'): 367 | logging.error(message=f"Incompatible second comparison: {start_date_string} - {end_date_string}") 368 | return None, None 369 | 370 | # At this point, either they both have wildcards or neither do, we can assume based on start_second 371 | if start_second == 'xx': 372 | start_second = '00' 373 | end_second = '59' # Keep wide to ensure script running time doesn't interfere 374 | else: 375 | need_specific_datetime = True 376 | 377 | # Finalize the seconds 378 | start_second = int(start_second) 379 | end_second = int(end_second) 380 | 381 | start_minute = start_time_parts[1] 382 | end_minute = end_time_parts[1] 383 | 384 | # Can't have a wildcard in one and not the other 385 | if (start_minute != 'xx' and end_minute == 'xx') or (start_minute == 'xx' and end_minute != 'xx'): 386 | logging.error(message=f"Incompatible minute comparison: {start_date_string} - {end_date_string}") 387 | return None, None 388 | 389 | # At this point, either they both have wildcards or neither do, we can assume based on start_minute 390 | if start_minute == 'xx': 391 | if need_specific_datetime: 392 | start_minute = _now.minute 393 | end_minute = _now.minute + 3 # Give buffer for script running time 394 | else: 395 | start_minute = '00' 396 | end_minute = '59' # Keep wide to ensure script running time doesn't interfere 397 | else: 398 | need_specific_datetime = True 399 | 400 | # Finalize the minutes 401 | start_minute = int(start_minute) 402 | end_minute = int(end_minute) 403 | 404 | start_hour = start_time_parts[0] 405 | end_hour = end_time_parts[0] 406 | 407 | # Can't have a wildcard in one and not the other 408 | if (start_hour != 'xx' and end_hour == 'xx') or (start_hour == 'xx' and end_hour != 'xx'): 409 | logging.error(message=f"Incompatible hour comparison: {start_date_string} - {end_date_string}") 410 | return None, None 411 | 412 | # At this point, either they both have wildcards or neither do, we can assume based on start_hour 413 | if start_hour == 'xx': 414 | if need_specific_datetime: 415 | start_hour = _now.hour 416 | end_hour = _now.hour 417 | else: 418 | start_hour = '00' 419 | end_hour = '23' # Keep wide to ensure script running time doesn't interfere 420 | 421 | # Finalize the hours 422 | start_hour = int(start_hour) 423 | end_hour = int(end_hour) 424 | 425 | _start_time = f"{start_hour}:{start_minute}:{start_second}" 426 | _end_time = f"{end_hour}:{end_minute}:{end_second}" 427 | 428 | start_date_parts = _start_date.split('-') 429 | end_date_parts = _end_date.split('-') 430 | 431 | start_day = start_date_parts[2] 432 | end_day = end_date_parts[2] 433 | 434 | # Can't have a wildcard in one and not the other 435 | if (start_day != 'xx' and end_day == 'xx') or (start_day == 'xx' and end_day != 'xx'): 436 | logging.error(message=f"Incompatible day comparison: {start_date_string} - {end_date_string}") 437 | return None, None 438 | 439 | # At this point, either they both have wildcards or neither do, we can assume based on start_day 440 | if start_day == 'xx': 441 | start_day = _now.day 442 | end_day = _now.day 443 | else: 444 | need_specific_datetime = True 445 | 446 | # Finalize the days 447 | start_day = int(start_day) 448 | end_day = int(end_day) 449 | 450 | start_month = start_date_parts[1] 451 | end_month = end_date_parts[1] 452 | 453 | # Can't have a wildcard in one and not the other 454 | if (start_month != 'xx' and end_month == 'xx') or (start_month == 'xx' and end_month != 'xx'): 455 | logging.error(message=f"Incompatible month comparison: {start_date_string} - {end_date_string}") 456 | return None, None 457 | 458 | # At this point, either they both have wildcards or neither do, we can assume based on start_month 459 | if start_month == 'xx': 460 | if need_specific_datetime: 461 | start_month = _now.month 462 | end_month = _now.month 463 | 464 | # Account for crossing a month boundary 465 | if start_day > end_day: # e.g. Start on the 31st and end on the 1st 466 | if _now.day < start_day: # Current date is before start day (in the next month) 467 | start_month -= 1 # TODO: This could break if the start_month is January (1) -> 0 468 | else: 469 | end_month += 1 # TODO: This could break if the end_month is December (12) -> 13 470 | else: 471 | start_month = start_of_year().month 472 | end_month = end_of_year().month 473 | else: 474 | need_specific_datetime = True 475 | 476 | # Finalize the months 477 | start_month = int(start_month) 478 | end_month = int(end_month) 479 | 480 | start_year = start_date_parts[0] 481 | end_year = end_date_parts[0] 482 | 483 | # Can't have a wildcard in one and not the other 484 | if (start_year != 'xxxx' and end_year == 'xxxx') or (start_year == 'xxxx' and end_year != 'xxxx'): 485 | logging.error(message=f"Incompatible year comparison: {start_date_string} - {end_date_string}") 486 | return None, None 487 | 488 | # At this point, either they both have wildcards or neither do, we can assume based on start_year 489 | if start_year == 'xxxx': 490 | if need_specific_datetime: 491 | start_year = _now.year 492 | end_year = _now.year 493 | 494 | # Account for crossing a year boundary 495 | # At this point, the start_month and end_month are numerical strings or ints 496 | if start_month > end_month: # e.g. Start in December (12) and end in January (1) 497 | if _now.month < start_month: # Current date is before start month (in the next year) 498 | start_year -= 1 499 | else: 500 | end_year += 1 501 | else: 502 | start_year = start_of_time().year 503 | end_year = end_of_time().year 504 | 505 | # Finalize the years 506 | start_year = int(start_year) 507 | end_year = int(end_year) 508 | 509 | _start_date = f"{start_year}-{start_month}-{start_day}" 510 | _end_date = f"{end_year}-{end_month}-{end_day}" 511 | 512 | _start_datetime = f"{_start_date} {_start_time}" 513 | _end_datetime = f"{_end_date} {_end_time}" 514 | 515 | return string_to_datetime(date_string=_start_datetime, template=template), \ 516 | string_to_datetime(date_string=_end_datetime, template=template) 517 | -------------------------------------------------------------------------------- /modules/webhooks/__init__.py: -------------------------------------------------------------------------------- 1 | from modules.webhooks.plex import PlexWebhook 2 | from modules.webhooks.webhook_processor import WebhookProcessor 3 | -------------------------------------------------------------------------------- /modules/webhooks/last_run.py: -------------------------------------------------------------------------------- 1 | import enum 2 | from datetime import datetime 3 | 4 | from flask import ( 5 | request as flask_request, 6 | ) 7 | from pydantic import BaseModel, Field 8 | 9 | 10 | class Timeframe(enum.Enum): 11 | seconds = "s" 12 | minutes = "m" 13 | hours = "h" 14 | days = "d" 15 | 16 | 17 | def parse_timeframe(timeframe: str) -> tuple[int, Timeframe]: 18 | """ 19 | Parse the timeframe string into a tuple of an integer and a Timeframe enum. 20 | ex. 24h -> (24, Timeframe.hours), 5m -> (5, Timeframe.minutes) 21 | """ 22 | if not timeframe or len(timeframe) < 2: 23 | raise ValueError("Invalid timeframe format. Expected format: (e.g., 24h, 5m).") 24 | 25 | try: 26 | number = int(timeframe[:-1]) 27 | except ValueError: 28 | raise ValueError("Invalid number in timeframe.") 29 | 30 | unit = timeframe[-1].lower() 31 | if unit == 's': 32 | return number, Timeframe.seconds 33 | elif unit == 'm': 34 | return number, Timeframe.minutes 35 | elif unit == 'h': 36 | return number, Timeframe.hours 37 | elif unit == 'd': 38 | return number, Timeframe.days 39 | else: 40 | raise ValueError("Invalid timeframe unit. Use 's', 'm', 'h', or 'd'.") 41 | 42 | 43 | def seconds_between(start: datetime, end: datetime = None) -> int: 44 | """ 45 | Calculate the number of seconds between two datetime objects. 46 | :param start: The start datetime. 47 | :param end: The end datetime. 48 | :return: The number of seconds between the two datetimes. 49 | """ 50 | end = end or datetime.now() 51 | return int((end - start).total_seconds()) 52 | 53 | 54 | def minutes_between(start: datetime, end: datetime = None) -> int: 55 | """ 56 | Calculate the number of minutes between two datetime objects. 57 | :param start: The start datetime. 58 | :param end: The end datetime. 59 | :return: The number of minutes between the two datetimes. 60 | """ 61 | return seconds_between(start, end) // 60 62 | 63 | 64 | def hours_between(start: datetime, end: datetime = None) -> int: 65 | """ 66 | Calculate the number of hours between two datetime objects. 67 | :param start: The start datetime. 68 | :param end: The end datetime. 69 | :return: The number of hours between the two datetimes. 70 | """ 71 | return seconds_between(start, end) // 3600 72 | 73 | 74 | def days_between(start: datetime, end: datetime = None) -> int: 75 | """ 76 | Calculate the number of days between two datetime objects. 77 | :param start: The start datetime. 78 | :param end: The end datetime. 79 | :return: The number of days between the two datetimes. 80 | """ 81 | return seconds_between(start, end) // 86400 82 | 83 | 84 | class LastRunWithinTimeframeCheck(BaseModel): 85 | timeframe_number: int = Field(..., description="The numeric part of the timeframe (e.g., 24 in 24h).") 86 | timeframe_unit: Timeframe = Field(..., description="The unit of the timeframe (e.g., hours in 24h).") 87 | 88 | @classmethod 89 | def from_flask_request(cls, request: flask_request): 90 | """ 91 | Create an instance from a Flask request. 92 | """ 93 | timeframe = request.args.get('timeframe') 94 | 95 | if not timeframe: 96 | raise ValueError("Timeframe parameter is required.") 97 | 98 | try: 99 | number, unit = parse_timeframe(timeframe) 100 | return cls(timeframe_number=number, timeframe_unit=unit) 101 | except ValueError as e: 102 | raise ValueError(f"Could not parse timeframe: {e}") 103 | 104 | def is_within_timeframe(self, time: datetime) -> bool: 105 | """ 106 | Check if the given time is within the specified timeframe. 107 | :param time: The datetime to check against. 108 | :return: True if the time is within the timeframe, False otherwise. 109 | """ 110 | if self.timeframe_unit == Timeframe.seconds: 111 | return seconds_between(time) <= self.timeframe_number 112 | elif self.timeframe_unit == Timeframe.minutes: 113 | return minutes_between(time) <= self.timeframe_number 114 | elif self.timeframe_unit == Timeframe.hours: 115 | return hours_between(time) <= self.timeframe_number 116 | elif self.timeframe_unit == Timeframe.days: 117 | return days_between(time) <= self.timeframe_number 118 | else: 119 | raise ValueError("Invalid timeframe unit.") 120 | -------------------------------------------------------------------------------- /modules/webhooks/plex.py: -------------------------------------------------------------------------------- 1 | import enum 2 | from typing import Optional 3 | 4 | from pydantic import BaseModel, Field 5 | 6 | 7 | class PlexWebhookEventType(enum.Enum): 8 | MEDIA_ADDED = "library.new" 9 | ON_DECK = "library.on.deck" 10 | PLAY = "media.play" 11 | PAUSE = "media.pause" 12 | STOP = "media.stop" 13 | RESUME = "media.resume" 14 | SCROBBLE = "media.scrobble" 15 | RATE = "media.rate" 16 | DATABASE_BACKUP = "admin.database.backup" 17 | DATABASE_CORRUPTED = "admin.database.corrupted" 18 | NEW_ADMIN_DEVICE = "device.new" 19 | SHARED_PLAYBACK_STARTED = "playback.started" 20 | 21 | 22 | class PlexWebhookMetadataType(enum.Enum): 23 | MOVIE = "movie" 24 | 25 | 26 | class Account(BaseModel): 27 | id: Optional[int] = None 28 | thumb: Optional[str] = None 29 | title: Optional[str] = None 30 | 31 | 32 | class Server(BaseModel): 33 | title: Optional[str] = None 34 | uuid: Optional[str] = None 35 | 36 | 37 | class Player(BaseModel): 38 | local: bool 39 | publicAddress: Optional[str] = None 40 | title: Optional[str] = None 41 | uuid: Optional[str] = None 42 | 43 | 44 | class Metadata(BaseModel): 45 | librarySectionType: Optional[str] = None 46 | ratingKey: Optional[str] = None 47 | key: Optional[str] = None 48 | parentRatingKey: Optional[str] = None 49 | grandparentRatingKey: Optional[str] = None 50 | guid: Optional[str] = None 51 | librarySectionID: Optional[int] = None 52 | type: Optional[str] = None 53 | title: Optional[str] = None 54 | year: Optional[int] = None 55 | grandparentKey: Optional[str] = None 56 | parentKey: Optional[str] = None 57 | grandparentTitle: Optional[str] = None 58 | parentTitle: Optional[str] = None 59 | summary: Optional[str] = None 60 | index: Optional[int] = None 61 | parentIndex: Optional[int] = None 62 | ratingCount: Optional[int] = None 63 | thumb: Optional[str] = None 64 | art: Optional[str] = None 65 | parentThumb: Optional[str] = None 66 | grandparentThumb: Optional[str] = None 67 | grandparentArt: Optional[str] = None 68 | addedAt: Optional[int] = None 69 | updatedAt: Optional[int] = None 70 | 71 | 72 | class PlexWebhook(BaseModel): 73 | event: Optional[str] = None 74 | user: bool 75 | owner: bool 76 | account: Optional[Account] = Field(None, alias="Account") 77 | server: Optional[Server] = Field(None, alias="Server") 78 | player: Optional[Player] = Field(None, alias="Player") 79 | metadata: Optional[Metadata] = Field(None, alias="Metadata") 80 | 81 | @property 82 | def event_type(self) -> PlexWebhookEventType: 83 | return PlexWebhookEventType(self.event) 84 | -------------------------------------------------------------------------------- /modules/webhooks/webhook_processor.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import threading 4 | from typing import Union 5 | 6 | import pydantic_core 7 | from flask import ( 8 | jsonify, 9 | request as flask_request, 10 | ) 11 | from plexapi.video import Movie 12 | 13 | import modules.logs as logging 14 | from consts import LAST_RUN_CHECK_FILE 15 | from modules import utils 16 | from modules.config_parser import Config 17 | from modules.plex_connector import PlexConnector 18 | from modules.renderers import RecentlyAddedPrerollRenderer 19 | from modules.webhooks.plex import PlexWebhook, PlexWebhookEventType, PlexWebhookMetadataType 20 | from modules.webhooks.last_run import LastRunWithinTimeframeCheck 21 | 22 | 23 | class WebhookProcessor: 24 | def __init__(self): 25 | pass 26 | 27 | @classmethod 28 | def _extract_body(cls, request: flask_request) -> dict: 29 | """ 30 | Extract the body from a Flask request. 31 | """ 32 | try: 33 | form_data = request.form.get('payload', '{}') 34 | return json.loads(form_data) 35 | except Exception as e: 36 | return {} 37 | 38 | @staticmethod 39 | def process_ping(request: flask_request, config: Config) -> [Union[str, None], int]: 40 | """ 41 | Process a ping request. 42 | Return a 'Pong!' response and a 200 status code. 43 | """ 44 | return 'Pong!', 200 45 | 46 | @staticmethod 47 | def process_last_run_within(request: flask_request, logs_folder: str) -> [Union[str, None], 48 | int]: 49 | """ 50 | Process a request to check if the last successful run was within a specified timeframe. 51 | :param request: Flask request object. 52 | :param logs_folder: Path to the logs folder (where the last run information is stored). 53 | :return: 200 if the last run was within the timeframe, 400 otherwise. 500 if there was an error processing the request. 54 | """ 55 | try: 56 | last_run_check = LastRunWithinTimeframeCheck.from_flask_request(request=request) 57 | return jsonify({}), 200 if WebhookProcessor._process_last_run_within_check(logs_folder=logs_folder, 58 | last_run_check=last_run_check) else 400 59 | except ValueError as e: 60 | logging.error(f"Error processing last run within request: {e}") 61 | return jsonify({"error": str(e)}), 500 62 | 63 | @staticmethod 64 | def _process_last_run_within_check(logs_folder: str, last_run_check: LastRunWithinTimeframeCheck) -> bool: 65 | """ 66 | Check if the last successful run was within the specified timeframe. 67 | :param logs_folder: Path to the logs folder. 68 | :param last_run_check: LastRunWithinTimeframeCheck instance containing the timeframe to check. 69 | :return: True if the last run was within the timeframe, False otherwise. 70 | """ 71 | try: 72 | last_run_time: datetime.datetime = logging.read_last_run_file(logs_folder=logs_folder, 73 | last_run_file=LAST_RUN_CHECK_FILE) 74 | if not last_run_time: 75 | logging.warning("Last run time is not available. Assuming it is not within the timeframe.") 76 | return False 77 | return last_run_check.is_within_timeframe(time=last_run_time) 78 | except Exception as e: 79 | logging.error(f"Error reading last run file: {e}") 80 | return False 81 | 82 | @staticmethod 83 | def process_recently_added(request: flask_request, config: Config, output_dir: str) -> [Union[str, None], int]: 84 | """ 85 | Process a recently added webhook from Tautulli. 86 | """ 87 | json_data = WebhookProcessor._extract_body(request=request) 88 | 89 | try: 90 | webhook = PlexWebhook(**json_data) 91 | except pydantic_core._pydantic_core.ValidationError as e: 92 | # If we receive a validation error (incoming webhook does not have the payload we expect), simply ignore it 93 | # This can happen, e.g. when we receive a playback start webhook for a cinema trailer, which does not have a librarySectionID 94 | return jsonify({}), 200 95 | 96 | match webhook.event_type: 97 | case PlexWebhookEventType.MEDIA_ADDED: 98 | if webhook.metadata.type == PlexWebhookMetadataType.MOVIE.value: # Skip if new content is not a movie 99 | thread = threading.Thread(target=WebhookProcessor._process_recently_added_preroll_render, 100 | args=(webhook, config, output_dir)) 101 | thread.start() 102 | case _: # pragma: no cover 103 | pass 104 | 105 | return jsonify({}), 200 106 | 107 | @staticmethod 108 | def _process_recently_added_preroll_render(webhook: PlexWebhook, config: Config, output_dir: str) -> None: 109 | """ 110 | Process the preroll render for a recently added webhook. 111 | """ 112 | plex_connector = PlexConnector(host=config.plex.url, token=config.plex.token) 113 | logging.info(f'Retrieving information from Plex for recently added movie: "{webhook.metadata.title}"') 114 | plex_movie: Movie = plex_connector.get_movie(item_key=webhook.metadata.key) 115 | if not plex_movie: 116 | logging.warning(f'Could not find movie in Plex: "{webhook.metadata.title}"') # Not an error, just a warning 117 | return 118 | 119 | renderer = RecentlyAddedPrerollRenderer(render_folder=output_dir, 120 | movie=plex_movie) 121 | asset_folder, local_file_path = renderer.render(config=config) 122 | 123 | if not local_file_path: # error has already been logged 124 | return 125 | 126 | destination_folder = f"{config.advanced.auto_generation.recently_added.local_files_root}" 127 | utils.create_directory(directory=destination_folder) 128 | utils.copy_file(source=local_file_path, destination=destination_folder) 129 | 130 | files_to_delete = utils.get_all_files_in_directory_beyond_most_recent_x_count(directory=destination_folder, 131 | count=config.advanced.auto_generation.recently_added.count) 132 | logging.info( 133 | f"Deleting {len(files_to_delete)} prerolls from {destination_folder} to maintain {config.advanced.auto_generation.recently_added.count} auto-generated prerolls limit") 134 | for remote_file in files_to_delete: 135 | utils.delete_file(remote_file) 136 | 137 | logging.info(f"Cleaning up local preroll assets folder: '{asset_folder}'") 138 | utils.delete_directory(directory=asset_folder) 139 | -------------------------------------------------------------------------------- /modules/youtube_downloader.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | 3 | import youtubesearchpython 4 | import yt_dlp 5 | import os 6 | 7 | import modules.logs as logging 8 | from modules.config_parser import Config 9 | 10 | 11 | class SelectorPresets: 12 | @staticmethod 13 | def select_first_video(videos: dict) -> str: 14 | """ 15 | Select the first video ID from a search result. 16 | 17 | :param videos: The search result dictionary. 18 | 19 | :return: The selected video ID. 20 | """ 21 | return videos[0]['id'] 22 | 23 | @staticmethod 24 | def select_last_video(videos: dict) -> str: 25 | """ 26 | Select the last video ID from a search result. 27 | 28 | :param videos: The search result dictionary. 29 | 30 | :return: The selected video ID. 31 | """ 32 | return videos[-1]['id'] 33 | 34 | @staticmethod 35 | def select_random_video(videos: dict) -> str: 36 | """ 37 | Select a random video ID from a search result. 38 | 39 | :param videos: The search result dictionary. 40 | 41 | :return: The selected video ID. 42 | """ 43 | import random 44 | return random.choice(videos)['id'] 45 | 46 | 47 | class YouTubeDownloaderLogger: 48 | def debug(self, msg): 49 | # For compatibility with youtube-dl, both debug and info are passed into debug 50 | # You can distinguish them by the prefix '[debug] ' 51 | if msg.startswith('[debug] '): 52 | pass # Suppress debug messages 53 | # logging.debug(msg) 54 | else: 55 | self.info(msg) 56 | 57 | def info(self, msg): 58 | pass # Suppress info messages 59 | # logging.info(msg) 60 | 61 | def warning(self, msg): 62 | logging.warning(msg) 63 | 64 | def error(self, msg): 65 | logging.error(msg) 66 | 67 | 68 | def _download_progress_hook(d): 69 | if d['status'] == 'finished': 70 | logging.info('Download complete') 71 | 72 | 73 | def get_video_url(video_id: str) -> str: 74 | """ 75 | Get a YouTube video URL from a video ID. 76 | 77 | :param video_id: The video ID. 78 | 79 | :return: The video URL. 80 | """ 81 | return f"https://www.youtube.com/watch?v={video_id}" 82 | 83 | 84 | def run_youtube_search(query: str, selector_function: Callable[[dict], str], results_limit: int = 20) -> str: 85 | """ 86 | Run a YouTube search and return a video ID. 87 | 88 | :param query: The search query. 89 | :param selector_function: A function that selects a video ID from the search results dictionary. 90 | :param results_limit: The number of results to return. 91 | 92 | :return: The selected video ID. 93 | """ 94 | search_results: dict = youtubesearchpython.CustomSearch(query=query, 95 | # sp parameter: Videos only, <4 minutes, sorted by relevance 96 | searchPreferences="EgQQARgB", 97 | limit=results_limit).result() 98 | videos = search_results['result'] 99 | return selector_function(videos) 100 | 101 | 102 | def download_youtube_video(url: str, config: Config, output_dir: str, output_filename: str = None) -> str: 103 | """ 104 | Download a YouTube video as a video file. 105 | 106 | :param url: The YouTube video URL. 107 | :param config: The configuration for Plex Prerolls. 108 | :param output_dir: The output directory. 109 | :param output_filename: The output filename. 110 | :return: The path to the downloaded file. 111 | """ 112 | cookies_file = config.advanced.auto_generation.cookies_file 113 | options = { 114 | "paths": {"home": output_dir}, 115 | 'logger': YouTubeDownloaderLogger(), 116 | # 'progress_hooks': [_download_progress_hook], 117 | "overwrites": True, 118 | } 119 | if output_filename: 120 | options['outtmpl'] = f"{output_filename}.%(ext)s" 121 | if cookies_file: 122 | options['cookiefile'] = cookies_file 123 | 124 | with yt_dlp.YoutubeDL(params=options) as ydl: 125 | # download the file and extract info 126 | info = ydl.extract_info(url, download=True) 127 | # return the file path 128 | return ydl.prepare_filename(info) 129 | 130 | 131 | def download_youtube_audio(url: str, config: Config, output_dir: str, output_filename: str = None) -> str: 132 | """ 133 | Download a YouTube video as an audio file. 134 | 135 | :param url: The YouTube video URL. 136 | :param config: The configuration for Plex Prerolls. 137 | :param output_dir: The output directory. 138 | :param output_filename: The output filename. 139 | :return: The path to the downloaded file. 140 | """ 141 | cookies_file = config.advanced.auto_generation.cookies_file 142 | options = { 143 | "paths": {"home": output_dir}, 144 | 'format': 'm4a/bestaudio/best', 145 | 'postprocessors': [{ # Extract audio using ffmpeg 146 | 'key': 'FFmpegExtractAudio', 147 | 'preferredcodec': 'm4a', 148 | }], 149 | 'logger': YouTubeDownloaderLogger(), 150 | # 'progress_hooks': [_download_progress_hook], 151 | "overwrites": True, 152 | } 153 | if output_filename: 154 | options['outtmpl'] = f"{output_filename}.%(ext)s" 155 | if cookies_file: 156 | options['cookiefile'] = cookies_file 157 | 158 | with yt_dlp.YoutubeDL(params=options) as ydl: 159 | # download the file and extract info 160 | info = ydl.extract_info(url, download=True) 161 | # return the file path 162 | return ydl.prepare_filename(info) 163 | -------------------------------------------------------------------------------- /pm2_keepalive.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | while True: 4 | time.sleep(1) 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | plexapi==4.13.* 2 | confuse~=1.7 3 | pytz~=2022.1 4 | PyYAML==6.0.* 5 | Flask~=3.0.2 6 | croniter==3.0.3 7 | ffmpeg-python==0.2.0 8 | youtube-search-python==1.6.6 9 | yt-dlp==2025.3.31 10 | pydantic>=2.10.0 11 | -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import threading 3 | from datetime import datetime 4 | from time import sleep 5 | 6 | from croniter import croniter 7 | from flask import ( 8 | Flask, 9 | request as flask_request, 10 | ) 11 | 12 | import modules.logs as logging 13 | from consts import ( 14 | APP_NAME, 15 | APP_DESCRIPTION, 16 | DEFAULT_CONFIG_PATH, 17 | DEFAULT_LOG_DIR, 18 | DEFAULT_RENDERS_DIR, 19 | CONSOLE_LOG_LEVEL, 20 | FILE_LOG_LEVEL, 21 | FLASK_ADDRESS, 22 | FLASK_PORT, 23 | LAST_RUN_CHECK_FILE, 24 | ) 25 | from modules.config_parser import Config 26 | from modules.errors import determine_exit_code 27 | from modules.plex_connector import PlexConnector 28 | from modules.schedule_manager import ScheduleManager 29 | from modules.webhooks.webhook_processor import WebhookProcessor 30 | 31 | parser = argparse.ArgumentParser(description=f"{APP_NAME} - {APP_DESCRIPTION}") 32 | 33 | parser.add_argument("-c", "--config", help=f"Path to config file. Defaults to '{DEFAULT_CONFIG_PATH}'", 34 | default=DEFAULT_CONFIG_PATH) 35 | parser.add_argument("-l", "--log", help=f"Log file directory. Defaults to '{DEFAULT_LOG_DIR}'", 36 | default=DEFAULT_LOG_DIR) # Should include trailing backslash 37 | parser.add_argument("-r", "--renders", help=f"Path to renders directory. Defaults to '{DEFAULT_RENDERS_DIR}'", 38 | default=DEFAULT_RENDERS_DIR) 39 | 40 | args = parser.parse_args() 41 | 42 | # Set up logging 43 | logging.init(app_name=APP_NAME, 44 | console_log_level=CONSOLE_LOG_LEVEL, 45 | log_to_file=True, 46 | log_file_dir=args.log, 47 | file_log_level=FILE_LOG_LEVEL) 48 | 49 | _config = Config(app_name=APP_NAME, config_path=f"{args.config}") 50 | 51 | 52 | def run_with_potential_exit_on_error(func): 53 | def wrapper(*args, **kwargs): 54 | try: 55 | return func(*args, **kwargs) 56 | except Exception as e: 57 | logging.fatal(f"Fatal error occurred. Shutting down: {e}") 58 | exit_code = determine_exit_code(exception=e) 59 | logging.fatal(f"Exiting with code {exit_code}") 60 | exit(exit_code) 61 | 62 | return wrapper 63 | 64 | @run_with_potential_exit_on_error 65 | def pre_roll_update(config: Config): 66 | cron_pattern = config.run.schedule 67 | while True: 68 | now = datetime.now() 69 | if not croniter.match(cron_pattern, now): 70 | # Cron only goes to minutes, not seconds, so we don't need to recheck as often 71 | sleep(30) # Sleep/check every 30 seconds 72 | continue 73 | 74 | logging.info(f"Current time {now} matches cron pattern '{cron_pattern}'") 75 | logging.info(f"Running pre-roll update...") 76 | 77 | schedule_manager = ScheduleManager(config=config) 78 | 79 | logging.info(f"Found {schedule_manager.valid_schedule_count} valid schedules") 80 | logging.info(schedule_manager.valid_schedule_count_log_message) 81 | 82 | all_valid_paths = schedule_manager.all_valid_paths 83 | 84 | plex_connector = PlexConnector(host=config.plex.url, token=config.plex.token) 85 | plex_connector.update_pre_roll_paths(paths=all_valid_paths, testing=config.run.dry_run) 86 | 87 | logging.write_to_last_run_file(logs_folder=args.log, last_run_file=LAST_RUN_CHECK_FILE) 88 | 89 | sleep(60) # Sleep at least a minute to avoid running multiple times in the same minute 90 | 91 | 92 | if __name__ == '__main__': 93 | # logging.info(splash_logo()) 94 | logging.info(f"Starting {APP_NAME}...") 95 | 96 | pre_roll_update(config=_config) 97 | -------------------------------------------------------------------------------- /templates/plex_prerolls.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | Plex Prerolls 4 | nwithan8/plex_prerolls:latest 5 | https://hub.docker.com/r/nwithan8/plex_prerolls 6 | 7 | latest 8 | Latest stable release 9 | 10 | bridge 11 | bash 12 | false 13 | https://github.com/nwithan8/plex-prerolls/issues 14 | https://github.com/nwithan8/plex-prerolls 15 | Plex Prerolls is a tool to automate preroll scheduling for a Plex Media Server. 16 | Tools: MediaServer: Productivity: Other: Status:Stable 17 | https://raw.githubusercontent.com/nwithan8/plex-prerolls/main/documentation/images/logo.png 18 | https://raw.githubusercontent.com/nwithan8/unraid_templates/main/templates/plex_prerolls.xml 19 | 20 | https://github.com/nwithan8 21 | 22 | 8283 23 | /mnt/user/appdata/plex_prerolls/files 24 | 25 | UTC 26 | /mnt/user/appdata/plex_prerolls/config 27 | /mnt/user/appdata/plex_prerolls/logs 28 | /mnt/user/appdata/plex_prerolls/renders 29 | --------------------------------------------------------------------------------