├── .coveragerc ├── .dockerignore ├── .editorconfig ├── .flake8 ├── .gitignore ├── .isort.cfg ├── .mypy.ini ├── .pylintrc ├── .travis.yml ├── .yapf ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── Makefile ├── Pipfile ├── Pipfile.lock ├── README.md ├── README.rst ├── assets ├── dopplerr-16x16.jpg ├── dopplerr-32x32.jpg ├── dopplerr-64x64.jpg ├── dopplerr-banner.png └── dopplerr-square.psd ├── bootstrap-system.sh ├── cfgtree ├── README.rst ├── __init__.py ├── cfgtree.py ├── dictxpath.py ├── storages.py ├── tests │ ├── __init__.py │ └── test_dictxparth.py └── types.py ├── dockerfs └── etc │ ├── cont-init.d │ └── 30-install │ └── services.d │ └── dopplerr │ └── run ├── dopplerr ├── __init__.py ├── api │ ├── __init__.py │ ├── add_route.py │ └── v1 │ │ ├── __init__.py │ │ ├── config.py │ │ ├── events.py │ │ ├── medias.py │ │ ├── notify.py │ │ ├── series.py │ │ └── status.py ├── config.py ├── db.py ├── descriptors │ ├── __init__.py │ ├── media_types.py │ └── series.py ├── downloader.py ├── json.py ├── logging.py ├── main.py ├── main_pyannotate.py ├── notifications │ ├── __init__.py │ ├── _base.py │ └── pushover.py ├── notifications_types │ ├── __init__.py │ ├── _base.py │ ├── series_media_refreshed.py │ └── series_subtitles_fetched.py ├── plugins │ ├── __init__.py │ └── sonarr │ │ ├── __init__.py │ │ ├── filter.py │ │ ├── response.py │ │ └── task.py ├── request_filter.py ├── response.py ├── routes.py ├── singleton.py ├── status.py ├── tasks │ ├── __init__.py │ ├── base.py │ ├── disk_scanner.py │ ├── download_subtitles.py │ ├── manager.py │ ├── periodic.py │ ├── queued.py │ ├── subtasks │ │ ├── __init__.py │ │ └── subliminal.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_aps.py │ │ ├── test_download_subtitles.py │ │ ├── test_executors.py │ │ ├── test_queued.py │ │ └── vectors │ │ │ ├── a_subfolder │ │ │ ├── complex[name].mkv │ │ │ ├── prepended-videofile.mp4 │ │ │ ├── videofile-suffixed.mp4 │ │ │ └── videofile.mp4 │ │ │ ├── anothervideo.mkv │ │ │ ├── complex[name][withanothersuffix].mkv │ │ │ ├── notavideofile.mp3 │ │ │ └── videofile.mp4 │ └── threaded.py └── tests │ ├── __init__.py │ └── vectors │ ├── basedir │ └── Series │ │ └── A Series Title │ │ └── Season 1 │ │ └── The.Episode.Title.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled.mkv │ ├── radarr_on_grab-info.json │ ├── radarr_on_grab.json │ ├── sonarr_on_download-info.json │ ├── sonarr_on_download.json │ ├── sonarr_on_download2-info.json │ ├── sonarr_on_download2.json │ ├── sonarr_on_download_rename-info.json │ ├── sonarr_on_download_rename.json │ ├── sonarr_on_grab-info.json │ └── sonarr_on_grab.json ├── frontend ├── .babelrc ├── .editorconfig ├── .eslintignore ├── .eslintrc.js ├── .gitignore ├── .stylintrc ├── Makefile ├── README.md ├── build │ ├── css-utils.js │ ├── env-utils.js │ ├── hot-reload.js │ ├── script.build.js │ ├── script.clean.js │ ├── script.dev.js │ ├── webpack.base.conf.js │ ├── webpack.dev.conf.js │ └── webpack.prod.conf.js ├── config │ ├── dev.env.js │ ├── index.js │ └── prod.env.js ├── package-lock.json ├── package.json ├── src │ ├── App.vue │ ├── assets │ │ ├── Velocity0_70c.jpg │ │ ├── dopplerr-32x32.jpg │ │ ├── dopplerr-64x64.jpg │ │ ├── hubblecast43f-thin.jpg │ │ ├── hubblecast43f-thin.psd │ │ ├── hubblecast43f.jpg │ │ ├── hubblecast43f0.jpg │ │ ├── quasar-logo-full.svg │ │ └── quasar-logo.png │ ├── components │ │ ├── 404.vue │ │ ├── pages.vue │ │ └── pages │ │ │ ├── about.vue │ │ │ ├── events.vue │ │ │ ├── home.vue │ │ │ ├── logs.vue │ │ │ ├── movies.vue │ │ │ ├── series.vue │ │ │ └── status.vue │ ├── index.html │ ├── main.js │ ├── router.js │ ├── statics │ │ ├── apple-icon-152x152.png │ │ ├── favicon-16x16.png │ │ ├── favicon-32x32.png │ │ ├── hubblecast43f-thin.png │ │ ├── icon-192x192.png │ │ ├── icon-512x512.png │ │ └── ms-icon-144x144.png │ └── themes │ │ ├── app.ios.styl │ │ ├── app.mat.styl │ │ ├── app.variables.styl │ │ └── quasar.variables.styl └── templates │ ├── component.vue │ ├── layout.vue │ └── page.vue ├── requirements-dev.txt ├── requirements.txt ├── setup.cfg └── setup.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = dopplerr 4 | 5 | [report] 6 | ignore_errors = True 7 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .venv 2 | .travis.* 3 | assets 4 | frontend/node_modules 5 | frontend/test 6 | Pipfile.lock 7 | requirements.txt 8 | requirements-dev.txt 9 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig is awesome: http://EditorConfig.org 2 | 3 | # top-most EditorConfig file 4 | root = true 5 | 6 | [*] 7 | end_of_line = lf 8 | insert_final_newline = true 9 | indent_style = space 10 | indent_size = 4 11 | tab_width = 4 12 | charset = utf-8 13 | trim_trailing_whitespace = true 14 | 15 | [*.rst] 16 | tab_width = 4 17 | 18 | [*.md] 19 | tab_width = 2 20 | 21 | [*.yml] 22 | tab_width = 2 23 | 24 | [Makefile] 25 | indent_style = tab 26 | indent_size = 4 27 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | 2 | [flake8] 3 | ignore = 4 | D1, # Missing docstring in * 5 | D200, # One-line docstring should fit on one line with quotes 6 | D202, # No blank lines allowed after function docstring 7 | D211, # No blank lines allowed before class docstring 8 | E704, # multi statement on one line 9 | E741, # ambiguous variable name 10 | F401, # bad handling of typing 11 | 12 | enable-extensions= 13 | G, # enable all logging-format checks 14 | 15 | max-line-length = 100 16 | exclude = .env 17 | max-complexity = 9 18 | accept-encodings = utf-8 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | _trial_temp/ 3 | .cache 4 | .eggs/ 5 | .mypy_cache 6 | .venv 7 | .vscode 8 | *.egg-info/ 9 | *.log 10 | *.log.* 11 | *.pyc 12 | /.coverage 13 | /AUTHORS 14 | /build 15 | /ChangeLog 16 | /sqlite.db-shm 17 | /sqlite.db-wal 18 | /type_info.json 19 | AUTHORS 20 | cachefile.dbm 21 | ChangeLog 22 | config.json 23 | dist/ 24 | junit.xml 25 | sqlite.db 26 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | 2 | [isort] 3 | line_length = 100 4 | known_future_library = future 5 | known_third_party = 6 | asynctest, 7 | coverage, 8 | flask, 9 | sanic, 10 | setuptools, 11 | default_section = THIRDPARTY 12 | known_first_party = 13 | dopplerr 14 | force_single_line = 1 15 | sections = 16 | FUTURE, 17 | STDLIB, 18 | THIRDPARTY, 19 | FIRSTPARTY, 20 | LOCALFOLDER, 21 | min-similarity-lines=5 22 | ignore-comments=yes 23 | ignore-docstrings=yes 24 | ignore-imports=yes 25 | import_heading_stdlib: Standard Libraries 26 | import_heading_thirdparty: Third Party Libraries 27 | import_heading_firstparty: Dopplerr 28 | -------------------------------------------------------------------------------- /.mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports: True 3 | disallow_untyped_defs = False 4 | disallow_subclassing_any = False 5 | warn_no_return = True 6 | strict_optional = False 7 | no_implicit_optional = False 8 | disallow_any_generics = False 9 | disallow_any_unimported = False 10 | warn_redundant_casts = True 11 | warn_unused_ignores = True 12 | warn_unused_configs = False 13 | 14 | 15 | [mypy-dopplerr.api.*] 16 | ignore_errors = True 17 | 18 | [mypy-dopplerr.db.*] 19 | # peewee messes with Any inheritance 20 | disallow_subclassing_any = False 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "3.6" 5 | # - "pypy3.6" # not ready yet 6 | 7 | install: 8 | - sudo ./bootstrap-system.sh 9 | - sudo apt-get install -y libffi-dev libssl-dev pandoc nodejs 10 | - pip3 install -U 'pip>=9.0.1' 'pipenv>=9.0.1' 'setuptools>=36.6.0' 11 | # for coveralls and removing InsecurePlatformWarning 12 | - pipenv run pip install codecov coveralls pytest-cov requests[security] 13 | - make backend-dev 14 | - make frontend-dev 15 | 16 | script: 17 | - make backend-checks 18 | - make frontend-lint 19 | - make frontend-build 20 | - make release 21 | - make test-unit 22 | - make test-coverage 23 | # - ./test-docker.sh 24 | 25 | after_success: 26 | - coveralls 27 | - codecov 28 | 29 | # After you create the Github repo and add it to Travis, run the 30 | # travis_pypi_setup.py script to finish PyPI deployment setup 31 | deploy: 32 | provider: pypi 33 | user: stibbons 34 | distributions: sdist bdist_wheel 35 | password: 36 | secure: !!binary | 37 | TktFNXg2ZldIVHB6Y0FES0lwLzBLYU14NDJtM1B0eGF1NjJKMTBkR0h1dVoyU3BVUk9CVjNneVVx 38 | NFlBSnQyZEhBUXR2T21qTHJvQk5DK1JkOUlXVXk2S3FYS3RJR0RHU29xM2l3OG9BdHF2eVFTNEdX 39 | TERZUXBWOE9OUnVZRmphbXQrZnBYQWIzU0N0RldkaEEvaFBjV293Q3cvamZXcDVWbXpYaHJ4MHRt 40 | NlRlUFdZNGtJOXp3ZTJibWZDbHh0MnNpMEkwODBpZUllb1hxanVLcTFtVWhaUEFXZzFsWlpTby9l 41 | cWc2emp1K2lJMldnRklicHRCS1VIUWUvNjlSck5nNEVPMnErM01nSVJtZ1dZeENGblRURGtMcXVy 42 | ZVBZYjlxQ1lkWURDb0lrZUpEWGkvTFU4Nml5N2puMjYxMnlFOVUxazc1dUszM3FFUElkcithMTNx 43 | QXRlb1hxUDJPWnpEWnYxb2tIZno1SSt2UmtNOUU4NC8zTFdEa1RvRzBwekxXVWo5enBhbWpIeTFJ 44 | czRUbG1LVjZDTGZBOHFmREJQVU5ONzkxVmRia2pJTUp0OXpITDhmVy9VeHI1WGVRRjJMbFYvSDRD 45 | Q1hCYko2QXFzdm1YSWRYZnhpUFNzdWJYMURvb0s0QTZqczVJRlh5bTYybnhWbnVEQmM3SHpzejhn 46 | eGgwTDRteU9iQ0ZVQllBQ3ZWSWJHYmtlWXMveWZtV21QaVRDamZKV0JtbHFFa3dMdU5adnpGN0Mz 47 | VlFSN0NLK3hrWWs3OGJVaXhRQS9uVThpakZQRUp5K0d2UEhjb3lOYlhrZldvV3RGN0FTZmRXZDV1 48 | VUZQN3VnbnB0R05SZGJMZ3ZPVENEcGQrN2RqU2MzSDArT1oxVEhRWU1YWXArckNVZ3hjVVRSZDg9 49 | on: 50 | tags: true 51 | repo: Stibbons/dopplerr 52 | -------------------------------------------------------------------------------- /.yapf: -------------------------------------------------------------------------------- 1 | [style] 2 | based_on_style = pep8 3 | 4 | ; knobs 5 | align_closing_bracket_with_visual_indent = true 6 | blank_line_before_class_docstring= true 7 | blank_line_before_nested_class_or_def = true 8 | coalesce_brackets = false 9 | column_limit = 100 10 | dedent_closing_brackets = false 11 | indent_dictionary_value = true 12 | spaces_around_power_operator = false 13 | spaces_before_comment = 2 14 | split_before_first_argument= false 15 | split_before_logical_operator = false 16 | split_penalty_after_opening_bracket = 1000 17 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM stibbons31/alpine-s6-python3-twisted:py3.6-tx17.9 2 | MAINTAINER gaetan@xeberon.net 3 | 4 | # set environment variables 5 | ENV PYTHONIOENCODING="UTF-8" 6 | 7 | RUN apk add --no-cache --update \ 8 | curl \ 9 | gcc \ 10 | git \ 11 | linux-headers \ 12 | make \ 13 | musl-dev \ 14 | nodejs \ 15 | python2 \ 16 | python3-dev 17 | 18 | # Install frontend high level dependencies 19 | RUN apk add --no-cache --update \ 20 | nodejs \ 21 | nodejs-npm \ 22 | && npm install -g npm@5 23 | 24 | # Injecting files into containers 25 | RUN mkdir -p /app 26 | WORKDIR /app 27 | 28 | # Keep dependencies on its own Docker FS Layer 29 | # To avoid reinstall of all dependencies each time code changes 30 | COPY Pipfile* setup-pip.sh /app/ 31 | RUN ./setup-pip.sh \ 32 | && pipenv install --system --skip-lock 33 | 34 | # installing main Python module so that PBR finds the version 35 | # used in later 'make version' targets 36 | COPY . /app/ 37 | RUN cd /app \ 38 | && pip install . 39 | 40 | # Adding rest of the application in next docker layers 41 | COPY frontend /app/frontend/ 42 | 43 | RUN cd /app/frontend \ 44 | && make dev \ 45 | && make version \ 46 | && make build \ 47 | && mkdir -p /www \ 48 | && cp -rf dist/* /www/ \ 49 | && rm -rf /app/frontend 50 | 51 | RUN npm cache clear --force \ 52 | && apk del \ 53 | nodejs \ 54 | nodejs-npm 55 | 56 | # copy containers's startup files 57 | COPY dockerfs/ / 58 | RUN mkdir -p /media 59 | 60 | USER root 61 | # clean up 62 | # dopplerr is installed on the system, /www has the frontend 63 | RUN rm -rfv /app 64 | 65 | RUN apk del \ 66 | python3-dev \ 67 | make \ 68 | gcc \ 69 | curl \ 70 | linux-headers \ 71 | musl-dev \ 72 | nodejs \ 73 | && rm -rf \ 74 | /root/.cache \ 75 | /tmp/* 76 | 77 | # Docker configuration 78 | EXPOSE 8086 79 | VOLUME /config \ 80 | /animes \ 81 | /movies \ 82 | /tv 83 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Gaetan Semet 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # plase here the files and directory you do not want to place in you distribution packages 2 | # Note: this only impact source distribution package (`sdist`), not the binary packages 3 | # (`bdist` and `bdist_wheel`). So these files will appear in the wheel 4 | 5 | #Unit tests 6 | recursive-exclude dopplerr/tests * 7 | recursive-exclude dopplerr/tasks/tests * 8 | recursive-exclude cfgtree/tests * 9 | 10 | # Frontend source code 11 | recursive-exclude frontend/ * 12 | recursive-include frontend/dist/ * # force the dist/ folder 13 | 14 | # docker filesystem 15 | recursive-exclude dockerfs/ * 16 | 17 | # assets 18 | recursive-exclude assets * 19 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | 3 | verify_ssl = true 4 | url = "https://pypi.python.org/simple" 5 | 6 | 7 | [requires] 8 | 9 | python_version = "3.6" 10 | 11 | 12 | [dev-packages] 13 | 14 | pandoc = "*" 15 | pytest = "*" 16 | cryptography = "*" 17 | yapf = "*" 18 | tox = "*" 19 | isort = "*" 20 | pylint = "*" 21 | "autopep8" = "*" 22 | pyyaml = "*" 23 | "flake8" = "*" 24 | pipenv-to-requirements = ">=0.1.9" 25 | pytest-asyncio = "*" 26 | "flake8-coding" = "*" 27 | "flake8-comprehensions" = "*" 28 | asynctest = "*" 29 | pytest-sugar = "*" 30 | pyannotate = "*" 31 | mypy = "*" 32 | "flake8-logging-format" = "*" 33 | "flake8-docstrings" = "*" 34 | sphinx = "*" 35 | 36 | 37 | [packages] 38 | 39 | sanic-transmute = "*" 40 | subliminal = { git = "https://github.com/stibbons/subliminal", edit = true } 41 | peewee = "*" 42 | pbr = "*" 43 | colorlog = "*" 44 | setuptools = "!=36.0.0" 45 | sanic = "*" 46 | aiohttp = "*" 47 | apsw = "*" 48 | apscheduler = "*" 49 | aiofiles = "*" 50 | -------------------------------------------------------------------------------- /assets/dopplerr-16x16.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/assets/dopplerr-16x16.jpg -------------------------------------------------------------------------------- /assets/dopplerr-32x32.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/assets/dopplerr-32x32.jpg -------------------------------------------------------------------------------- /assets/dopplerr-64x64.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/assets/dopplerr-64x64.jpg -------------------------------------------------------------------------------- /assets/dopplerr-banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/assets/dopplerr-banner.png -------------------------------------------------------------------------------- /assets/dopplerr-square.psd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/assets/dopplerr-square.psd -------------------------------------------------------------------------------- /bootstrap-system.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Bootstrap system dependencies" 4 | echo "sudo it accodingly to your system" 5 | # git : For PBR 6 | # make : Developer's toolbox 7 | # pandoc : for README .md =>.rst 8 | # npm : for building frontend 9 | # libpython3.6-dev : for 'Python.h' (if Twisted wheel rebuild is needed) 10 | 11 | if [ -f /etc/debian_version ]; then 12 | apt-get -y install \ 13 | git \ 14 | make \ 15 | pandoc \ 16 | libpython3.6-dev \ 17 | nodejs 18 | elif [ -f /etc/redhat-release ]; then 19 | yum install git make pandoc 20 | elif [[ -f /etc/os-release && $(grep "alpine" /etc/os-release) != "" ]]; then 21 | echo "Alpine dependencies should be described in Dockerfile" 22 | # elif [ -f /etc/??? ]; then 23 | # brew install pip pipenv make git pandoc 24 | else 25 | echo "Please ensure 'git', 'make', and 'pandoc' and 'Python.h' headers are installed on your system" 26 | fi 27 | echo "Done" 28 | -------------------------------------------------------------------------------- /cfgtree/README.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Configuration Tree 3 | ================== 4 | 5 | This module provides an easy yet comprehensive way of defining a configuration tree 6 | for any application. 7 | 8 | It requires the following acknolegdment: 9 | 10 | - Application settings are organize in a hierarchical structure, dependend of the application 11 | itself. This structure is called in cfgtree: "bare config". 12 | 13 | - Settings can come from different inputs: 14 | 15 | - environment variables (12 factors approach) 16 | - command line argument 17 | - configuration storage area such as json or configuration server 18 | 19 | Configuration Storage 20 | --------------------- 21 | 22 | The trivial storage is a simple json file. The complete settings are placed inside it, such as: 23 | 24 | { 25 | 'setting1': 'value1', 26 | 'setting2': 'value2', 27 | 'setting3': 'value3', 28 | } 29 | 30 | But developer may want to organize in a more hierarchical structure. 31 | 32 | Another typical file format for configuration is Yaml file, which is more human readable and allow 33 | inserting comments and so. 34 | 35 | But both are storing hierarchical configuration. 36 | 37 | Instead of one file, we can imagine a set of files where each individual file is gathered at the 38 | first level of the configuration hierarchy. 39 | 40 | Current Support: 41 | 42 | - single Json file 43 | 44 | Future support: 45 | 46 | - Yaml file (with inplace save keeping comments and overall organization) 47 | - Set of Yaml files 48 | - Configuration server 49 | 50 | Configuration Tree Description 51 | ------------------------------ 52 | 53 | Configuration hierarchy is to be described in a `cfgtree.EnvironmentConfig` inherited instance, 54 | inside the member `.cfgtree`, using helper classes such as `StringCfg`, 'IntCfg', 'UserCfg' or 55 | 'PasswordCfg'. Each setting can be set by environment variable, command line parameter or by 56 | the storage file(s) itself. 57 | 58 | Let's take an example of an item defined at the first level of the hierarchy. It is defined as a 59 | 'IntCfg' with name 'count'. It can be set by the following: 60 | 61 | - environment variable `APPLICATIONNAME_COUNT` (`APPLICATIONNAME` is an optional developer-defined 62 | prefix added to every environment variable) 63 | - command line argument `--count` 64 | - item `count` at the first level of a json file 65 | 66 | Hierarchical structure is reflected in these different ways, to avoid conflicts. Now, the 'count' 67 | setting is set in a settings section called 'general': 68 | 69 | - environment variable: `APPLICATIONNAME_GENERAL_COUNT` 70 | - command line argument: `--general-count` 71 | - Json has a first level named `general`, and inside one of the items is called `count`. 72 | 73 | XPath syntax 74 | ------------ 75 | 76 | A xpath-like syntax allows to reach any item of the configuration: `...`. 77 | -------------------------------------------------------------------------------- /cfgtree/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | import pbr.version 4 | 5 | __version__ = pbr.version.VersionInfo('dopplerr').release_string() 6 | VERSION = __version__ 7 | LOGGER_NAME = "cfgtree" 8 | 9 | __all__ = [ 10 | '__version__', 11 | 'VERSION', 12 | ] 13 | -------------------------------------------------------------------------------- /cfgtree/dictxpath.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | import re 6 | 7 | # Third Party Libraries 8 | from cfgtree import LOGGER_NAME 9 | 10 | log = logging.getLogger(LOGGER_NAME) 11 | 12 | # tells flake8 to ignore complexity check for this file 13 | # flake8: noqa 14 | 15 | 16 | def get_node_by_xpath(mapping, xpath, default=None, ignore_errors=False, 17 | handle_list_selector=False): 18 | '''Return the node pointed to by xpath from mapping. 19 | 20 | Args: 21 | mapping: nested dictionary. 22 | xpath: string-like selector. 23 | default: default value if the attribute doesn't exist. 24 | ignore_errors: if True, pass silently if the xpath is invalid. 25 | handle_list_selector: allow to support list element selector 26 | 27 | Example: 28 | 29 | >>> tree = {'level1': {'level2': {'level3': 'bottom'}}} 30 | >>> get_node_by_xpath(tree, 'level1.level2.level3') == 'bottom' 31 | True 32 | 33 | ''' 34 | if not isinstance(mapping, dict): 35 | if not ignore_errors: 36 | raise KeyError("Mapping is not dictionary: {!r}".format(mapping)) 37 | return default 38 | for segment in xpath.split('.'): 39 | if not mapping: 40 | if not ignore_errors: 41 | raise KeyError("Empty mapping, but need to access to '{}'".format(xpath)) 42 | return default 43 | if segment not in mapping: 44 | if handle_list_selector and '[' in segment: 45 | re_subselector = re.compile(r"(.*)\[(\d+)\]$") 46 | m = re_subselector.match(segment) 47 | if m: 48 | key = m.group(1) 49 | index = int(m.group(2)) 50 | if key not in mapping: 51 | if not ignore_errors: 52 | raise KeyError("Invalid '{}' index selector: '{}' does not match " 53 | "anything. Available keys: {!r}".format( 54 | xpath, key, list(mapping.keys()))) 55 | return default 56 | items = mapping[key] 57 | if not isinstance(items, list): 58 | if not ignore_errors: 59 | raise KeyError("Invalid '{}' selector: '{}' is not a list, is: {}" 60 | .format(xpath, key, type(items))) 61 | return default 62 | if len(items) <= index: 63 | if not ignore_errors: 64 | raise KeyError("Invalid '{}' selector: item index '{}' of '{}' is " 65 | "outside of the list boundaries. Length is: {}".format( 66 | xpath, index, key, len(items))) 67 | return default 68 | mapping = items[index] 69 | continue 70 | elif not ignore_errors: 71 | raise KeyError("Invalid '{}' selector: '{}' doesn't match " 72 | "anything. Available keys: {!r}".format( 73 | xpath, segment, list(mapping.keys()))) 74 | return default 75 | mapping = mapping[segment] 76 | return mapping 77 | 78 | 79 | def set_node_by_xpath(mapping, xpath, value, extend=False, setter_attr=None): 80 | '''Set the node pointed to by xpath from mapping. 81 | 82 | Args: 83 | mapping: nested dictionary. 84 | xpath: string-like selector. 85 | value: value to set. 86 | extend: if True, create the nested structure if it doesn't exist, 87 | otherwise, raise an exception. 88 | setter_attr: use a special setter method attribute in mapping, instead of replacing 89 | the node by the new value (note: do not use a property setter attribute) 90 | 91 | Example: 92 | 93 | >>> tree = {'level1': {'level2': {'level3': 'bottom'}}} 94 | >>> set_node_by_xpath(tree, 'level1.level2.level3', 'bottom') 95 | 96 | ''' 97 | segments = xpath.split('.') 98 | attrname = segments.pop() 99 | for segment in segments: 100 | if segment not in mapping: 101 | if not extend: 102 | raise KeyError("Invalid '{}' selector: '{}' doesn't match " 103 | "anything.".format(xpath, segment)) 104 | mapping[segment] = {} 105 | mapping = mapping[segment] 106 | if setter_attr: 107 | # setter attribute defined, calling this setter 108 | setter = getattr(mapping[attrname], setter_attr) 109 | setter(value) 110 | else: 111 | mapping[attrname] = value 112 | 113 | 114 | def delete_node_by_xpath(mapping, xpath, ignore_errors=False): 115 | '''Delete the node pointed to by xpath from mapping. 116 | 117 | Args: 118 | mapping: nested dictionary. 119 | xpath: string-like selector. 120 | ignore_errors: if True, pass silently if the node doesn't exist, 121 | otherwise, raise an exception. 122 | 123 | Example: 124 | 125 | >>> tree = {'level1': {'level2': {'level3': 'bottom'}}} 126 | >>> delete_node_by_xpath(tree, 'level1.level2') 127 | >>> tree 128 | {'level1': {}} 129 | 130 | ''' 131 | segments = xpath.split('.') 132 | attrname = segments.pop() 133 | for segment in segments: 134 | if segment not in mapping: 135 | if ignore_errors: 136 | return 137 | raise KeyError("Invalid '{}' selector: '{}' doesn't match " 138 | "anything.".format(xpath, segment)) 139 | mapping = mapping[segment] 140 | return mapping.pop(attrname, None) 141 | -------------------------------------------------------------------------------- /cfgtree/storages.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import json 5 | import logging 6 | import os 7 | import sys 8 | from pathlib import PosixPath 9 | 10 | # Third Party Libraries 11 | from cfgtree import LOGGER_NAME 12 | 13 | log = logging.getLogger(LOGGER_NAME) 14 | 15 | 16 | class _ConfigStorageBase(object): 17 | 18 | def find_config_storage(self): 19 | raise NotImplementedError 20 | 21 | def get_bare_config_dict(self): 22 | raise NotImplementedError 23 | 24 | def save_bare_config_dict(self, bare_cfg): 25 | raise NotImplementedError 26 | 27 | 28 | class JsonFileConfigStorage(_ConfigStorageBase): 29 | json_configstorage_default_filename: str = None 30 | json_configstorage_environ_var_name: str = None 31 | json_configstorage_short_param_name: str = None 32 | json_configstorage_long_param_name: str = None 33 | 34 | __resolved_config_file = None 35 | __bare_config_dict = None 36 | 37 | def find_config_storage(self): 38 | configfile = self.json_configstorage_default_filename 39 | if self.json_configstorage_environ_var_name in os.environ: 40 | configfile = os.environ[self.json_configstorage_environ_var_name] 41 | log.debug("%s defined: %s", self.json_configstorage_environ_var_name, configfile) 42 | for i in range(len(sys.argv)): 43 | good = [] 44 | if self.json_configstorage_short_param_name: 45 | good.append(self.json_configstorage_short_param_name) 46 | if self.json_configstorage_long_param_name: 47 | good.append(self.json_configstorage_long_param_name) 48 | if sys.argv[i] in good: 49 | if i == len(sys.argv): 50 | raise Exception("No value given to {}".format(" or ".join(good))) 51 | configfile = sys.argv[i + 1] 52 | log.debug("%s defined: %s", " or ".join(good), configfile) 53 | break 54 | config_file_path = PosixPath(configfile) 55 | log.debug("Configuration file set to: %s", configfile) 56 | self.__resolved_config_file = config_file_path.resolve().as_posix() 57 | self._load_bare_config() 58 | 59 | def _load_bare_config(self): 60 | log.debug("Loading configuration file: %s", self.__resolved_config_file) 61 | config_file_path = PosixPath(self.__resolved_config_file) 62 | if config_file_path.exists(): 63 | with config_file_path.open() as f: 64 | self.__bare_config_dict = json.load(f) 65 | else: 66 | self.__bare_config_dict = {} 67 | 68 | def save_bare_config_dict(self, bare_cfg): 69 | with PosixPath(self.__resolved_config_file).open('w') as f: 70 | f.write(json.dumps(bare_cfg, sort_keys=True, indent=4, separators=(',', ': '))) 71 | 72 | def get_bare_config_dict(self): 73 | return self.__bare_config_dict 74 | -------------------------------------------------------------------------------- /cfgtree/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /cfgtree/tests/test_dictxparth.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | from unittest import TestCase 5 | 6 | # Third Party Libraries 7 | from cfgtree.dictxpath import delete_node_by_xpath 8 | from cfgtree.dictxpath import get_node_by_xpath 9 | from cfgtree.dictxpath import set_node_by_xpath 10 | 11 | 12 | class DictXpathTests(TestCase): 13 | 14 | def test_get_node_by_path(self): 15 | mapping = {'level1': {'level2': {'level3': 42}}} 16 | expected = 42 17 | actual = get_node_by_xpath(mapping, 'level1.level2.level3') 18 | self.assertEqual(expected, actual) 19 | self.assertRaisesRegex( 20 | KeyError, (r"Invalid 'level1.unknown' selector: 'unknown' doesn't match anything. " 21 | r"Available keys: \['level2'\]"), get_node_by_xpath, mapping, 22 | 'level1.unknown') 23 | actual = get_node_by_xpath( 24 | mapping, 'level1.unknown', default="default value", ignore_errors=True) 25 | self.assertEqual(actual, "default value") 26 | 27 | def test_get_node_by_path_empty_mapping(self): 28 | empty_map = {} 29 | self.assertRaises(KeyError, get_node_by_xpath, empty_map, 'level1') 30 | self.assertRaises(KeyError, get_node_by_xpath, empty_map, 'level1.other') 31 | self.assertEqual( 32 | get_node_by_xpath(empty_map, 'level1', default="default value", ignore_errors=True), 33 | "default value") 34 | self.assertEqual( 35 | get_node_by_xpath(empty_map, '', default="default value", ignore_errors=True), 36 | "default value") 37 | 38 | def test_get_node_by_path_invalid_first_level(self): 39 | mapping = {"level1": {}} 40 | self.assertRaises(KeyError, get_node_by_xpath, mapping, 'invalid_level1') 41 | self.assertEqual( 42 | get_node_by_xpath( 43 | mapping, 'invalid_level1', default="default value", ignore_errors=True), 44 | "default value") 45 | 46 | def test_get_node_by_path_mapping_not_dict(self): 47 | mapping_no_a_dict = "simple string!" 48 | self.assertEqual( 49 | get_node_by_xpath(mapping_no_a_dict, '', default="default value", ignore_errors=True), 50 | "default value") 51 | self.assertRaises( 52 | KeyError, 53 | get_node_by_xpath, 54 | mapping_no_a_dict, 55 | 'level1.unknown', 56 | ignore_errors=False, 57 | ) 58 | 59 | def test_get_node_by_path_incomplete_mapping(self): 60 | mapping = {'level1': {'level2': {}}} 61 | self.assertRaises(KeyError, get_node_by_xpath, mapping, 'level1.level2.level3') 62 | self.assertRaises(KeyError, get_node_by_xpath, mapping, 'level1.unknown') 63 | # test with tailing "." at the end of path 64 | self.assertRaises(KeyError, get_node_by_xpath, mapping, 'level1.') 65 | self.assertEqual( 66 | get_node_by_xpath( 67 | mapping, 'level1.level2', default="default value", ignore_errors=True), {}) 68 | self.assertEqual( 69 | get_node_by_xpath( 70 | mapping, 'level1.unknown', default="default value", ignore_errors=True), 71 | "default value") 72 | # test with tailing "." at the end of path 73 | self.assertEqual( 74 | get_node_by_xpath(mapping, 'level1.', default="default value", ignore_errors=True), 75 | "default value") 76 | 77 | def test_set_node_by_path(self): 78 | mapping = {'level1': {'level2': {'level3': None}}} 79 | expected = 42 80 | set_node_by_xpath(mapping, 'level1.level2.level3', expected) 81 | actual = get_node_by_xpath(mapping, 'level1.level2.level3') 82 | self.assertEqual(expected, actual) 83 | set_node_by_xpath(mapping, 'level1.unknown', expected) 84 | actual = get_node_by_xpath(mapping, 'level1.unknown') 85 | self.assertEqual(expected, actual) 86 | self.assertRaises(KeyError, set_node_by_xpath, mapping, 'level1.invalid.level3', '') 87 | expected = 'extended' 88 | set_node_by_xpath(mapping, 'level1.missing.level3', expected, extend=True) 89 | actual = get_node_by_xpath(mapping, 'level1.missing.level3') 90 | self.assertEqual(expected, actual) 91 | 92 | def test_delete_node_by_path(self): 93 | mapping = {'level1': {'level2': {'level3': 42}}} 94 | expected = 42 95 | actual = delete_node_by_xpath(mapping, 'level1.level2.level3') 96 | self.assertEqual(expected, actual) 97 | self.assertFalse('level3' in mapping['level1']['level2']) 98 | self.assertRaises(KeyError, delete_node_by_xpath, mapping, 'level1.invalid.level3') 99 | self.assertIsNone( 100 | delete_node_by_xpath(mapping, 'level1.invalid.level3', ignore_errors=True)) 101 | 102 | def test_get_node_by_path_with_list_selector(self): 103 | mapping = {'level1': {'level_2_is_a_list': ['item1', 'item2']}} 104 | actual = get_node_by_xpath( 105 | mapping, 'level1.level_2_is_a_list[1]', handle_list_selector=True) 106 | self.assertEqual('item2', actual) 107 | 108 | def test_get_node_w_lst_selctr_sub_list(self): 109 | mapping = { 110 | 'level1': { 111 | 'level_2_is_a_list': [ 112 | { 113 | 'item1': { 114 | 'k1': 'v1' 115 | } 116 | }, 117 | { 118 | 'item2': { 119 | 'k2': 'v2' 120 | } 121 | }, 122 | ] 123 | } 124 | } 125 | actual = get_node_by_xpath( 126 | mapping, 'level1.level_2_is_a_list[1].item2.k2', handle_list_selector=True) 127 | self.assertEqual('v2', actual) 128 | 129 | def test_get_w_bad_lst_selctr_n_default_val(self): 130 | mapping = {'level1': {'level_2_is_a_list': ['item1', 'item2']}} 131 | actual = get_node_by_xpath( 132 | mapping, 'level1.level_2_is_a_list[1]', handle_list_selector=True, default="N/A") 133 | self.assertEqual('item2', actual) 134 | self.assertRaisesRegex( 135 | KeyError, 136 | (r"Invalid \'level1.level_2_is_a_list\[99\]\' selector: " 137 | r"item index \'99\' of \'level_2_is_a_list\' is outside of the list boundaries. " 138 | r"Length is: 2"), 139 | get_node_by_xpath, 140 | mapping, 141 | 'level1.level_2_is_a_list[99]', 142 | handle_list_selector=True, 143 | default="N/A") 144 | -------------------------------------------------------------------------------- /cfgtree/types.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import argparse 5 | import logging 6 | import os 7 | from typing import Any 8 | 9 | # Third Party Libraries 10 | from cfgtree import LOGGER_NAME 11 | 12 | log = logging.getLogger(LOGGER_NAME) 13 | _UNDEFINED = object() 14 | 15 | 16 | class _CfgBase(object): 17 | 18 | default: Any = None 19 | name = None 20 | xpath = None 21 | arg_type = None 22 | environ_var_prefix = None 23 | ignore_in_cfg = False 24 | ignore_in_args = False 25 | ignore_in_envvars = False 26 | 27 | def __init__(self, l=None, s=None, h=None, r=False, d=_UNDEFINED): 28 | # Note: self.name should come later by EnvironmentConfig._inject_names() 29 | self.short_param = s 30 | self.help_str = h 31 | self.required = r 32 | self.forced_long_param = l 33 | if d != _UNDEFINED: 34 | self.default = d 35 | self._value = self.default 36 | 37 | def set_value(self, value): 38 | """ 39 | Setter method used in `set_node_by_xpath`. 40 | """ 41 | self._value = value 42 | 43 | @property 44 | def value(self): 45 | return self._value 46 | 47 | @value.setter 48 | def value(self, value): 49 | self.set_value(value) 50 | 51 | @property 52 | def environ_var_name(self): 53 | return self.environ_var_prefix + self.cmd_line_name.upper() 54 | 55 | def get_cmd_line_params(self): 56 | a = [] 57 | if self.short_param: 58 | a.append(self.short_param) 59 | if self.name: 60 | a.append(self.long_param) 61 | return a 62 | 63 | @property 64 | def _environ_var_value(self): 65 | return os.environ.get(self.environ_var_name, _UNDEFINED) 66 | 67 | def read_environ_var(self): 68 | return str(self._environ_var_value) 69 | 70 | @property 71 | def long_param(self): 72 | if self.forced_long_param: 73 | return self.forced_long_param 74 | if not self.xpath: 75 | return "--" + self.name.lower().replace("_", "-") 76 | return "--" + self.xpath.replace('.', '-').replace('_', '-') 77 | 78 | @property 79 | def cmd_line_name(self): 80 | return self.xpath.lower().replace("-", "_").replace(".", "_") 81 | 82 | @property 83 | def action(self): 84 | return 'store' 85 | 86 | @property 87 | def n_args(self): 88 | return None 89 | 90 | @property 91 | def safe_value(self): 92 | """ 93 | Return value as a string without compromizing information. 94 | """ 95 | return self.value 96 | 97 | @property 98 | def cfgfile_value(self): 99 | """ 100 | Return value to save in config file. 101 | """ 102 | return self.value if self.value is not None else "" 103 | 104 | @property 105 | def metavar(self): 106 | return self.name.upper() 107 | 108 | 109 | class StringCfg(_CfgBase): 110 | default = "" 111 | 112 | def read_environ_var(self): 113 | return str(self._environ_var_value) 114 | 115 | 116 | class ListOfStringCfg(_CfgBase): 117 | 118 | """ 119 | Comma separated list of string (1 argument). 120 | """ 121 | 122 | def __init__(self, *args, **kwargs): 123 | self.default = [] 124 | super(ListOfStringCfg, self).__init__(*args, **kwargs) 125 | 126 | def read_environ_var(self): 127 | ls = self._environ_var_value 128 | return ls.split(",") 129 | 130 | @property 131 | def cfgfile_value(self): 132 | """ 133 | Return value to save in config file. 134 | """ 135 | return ",".join(self.value) 136 | 137 | @staticmethod 138 | def arg_type(string): 139 | return string.split(",") 140 | 141 | 142 | class IntCfg(_CfgBase): 143 | default = 0 144 | 145 | def read_environ_var(self): 146 | return int(self._environ_var_value) 147 | 148 | 149 | class HardcodedCfg(_CfgBase): 150 | 151 | """ 152 | Placeholder only used to store application value. 153 | 154 | It does not present an environment variable nor a command line argument 155 | """ 156 | 157 | default = None 158 | ignore_in_args = True 159 | ignore_in_envvars = True 160 | 161 | def get_cmd_line_params(self): 162 | return [] 163 | 164 | def read_environ_var(self): 165 | return None 166 | 167 | @property 168 | def long_param(self): 169 | return None 170 | 171 | 172 | class UserCfg(StringCfg): 173 | 174 | @property 175 | def user(self): 176 | return self.value 177 | 178 | 179 | class PasswordCfg(StringCfg): 180 | 181 | @property 182 | def password(self): 183 | return self.value 184 | 185 | @property 186 | def safe_value(self): 187 | """ 188 | Hide password in logs. 189 | """ 190 | return "*" * len(self.value) 191 | 192 | 193 | class DirNameCfg(StringCfg): 194 | default = None 195 | 196 | def set_value(self, value): 197 | self._value = os.path.abspath(value) 198 | 199 | 200 | class ConfigFileCfg(StringCfg): 201 | default = None 202 | ignore_in_cfg = True 203 | 204 | 205 | class BoolCfg(_CfgBase): 206 | default = False 207 | 208 | def read_environ_var(self): 209 | e = os.environ.get(self.environ_var_name) 210 | return bool(e) 211 | 212 | @property 213 | def action(self): 214 | return 'store_true' 215 | 216 | @property 217 | def metavar(self): 218 | return None 219 | 220 | 221 | class MultiChoiceCfg(ListOfStringCfg): 222 | 223 | def __init__(self, choices=None, *args, **kwargs): 224 | super(MultiChoiceCfg, self).__init__(*args, **kwargs) 225 | self.choices = choices 226 | 227 | def arg_type(self, string): 228 | items = string.split(",") 229 | for item in items: 230 | if item not in self.choices: 231 | raise argparse.ArgumentTypeError("{!r} not in available choise: {}".format( 232 | item, ", ".join(self.choices))) 233 | return items 234 | 235 | 236 | class SingleChoiceCfg(StringCfg): 237 | 238 | def __init__(self, choices=None, *args, **kwargs): 239 | super(SingleChoiceCfg, self).__init__(*args, **kwargs) 240 | self.choices = choices 241 | 242 | def arg_type(self, string): 243 | if string not in self.choices: 244 | raise argparse.ArgumentTypeError("{!r} not in available choise: {}".format( 245 | string, ", ".join(self.choices))) 246 | return string 247 | -------------------------------------------------------------------------------- /dockerfs/etc/cont-init.d/30-install: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | 3 | # permissions 4 | chown -R abc:abc \ 5 | /app 6 | -------------------------------------------------------------------------------- /dockerfs/etc/services.d/dopplerr/run: -------------------------------------------------------------------------------- 1 | #!/usr/bin/with-contenv bash 2 | 3 | umask 022 4 | 5 | cd /app 6 | 7 | exec \ 8 | s6-setuidgid abc dopplerr \ 9 | --configfile /config/config.json \ 10 | --output-type plain \ 11 | --general-port 8086 \ 12 | --general-no-color \ 13 | --general-basedir / \ 14 | --general-logfile /config/dopplerr.log \ 15 | --general-configdir /config \ 16 | --general-frontenddir /www 17 | -------------------------------------------------------------------------------- /dopplerr/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | import platform 4 | 5 | import pbr.version 6 | 7 | __version__ = pbr.version.VersionInfo('dopplerr').release_string() 8 | VERSION = __version__ 9 | DOPPLERR_VERSION = __version__ 10 | 11 | APSSCHEDULER_VERSION = pbr.version.VersionInfo('apscheduler').release_string() 12 | PYTHON_VERSION = platform.python_version() 13 | SANIC_VERSION = pbr.version.VersionInfo('sanic').release_string() 14 | 15 | __all__ = [ 16 | '__version__', 17 | 'APSSCHEDULER_VERSION', 18 | 'DOPPLERR_VERSION', 19 | 'PYTHON_VERSION', 20 | 'SANIC_VERSION', 21 | 'VERSION', 22 | ] 23 | -------------------------------------------------------------------------------- /dopplerr/api/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/api/add_route.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Third Party Libraries 4 | from sanic_transmute import add_route 5 | from transmute_core.compat import string_type 6 | from transmute_core.function import TransmuteAttributes 7 | 8 | 9 | def describe_add_route(blueprint, **kwargs): 10 | 11 | # if we have a single method, make it a list. 12 | if isinstance(kwargs.get("paths"), string_type): 13 | kwargs["paths"] = [kwargs["paths"]] 14 | if isinstance(kwargs.get("methods"), string_type): 15 | kwargs["methods"] = [kwargs["methods"]] 16 | attrs = TransmuteAttributes(**kwargs) 17 | 18 | def decorator(fnc): 19 | if hasattr(fnc, "transmute"): 20 | fnc.transmute = fnc.transmute | attrs 21 | else: 22 | fnc.transmute = attrs 23 | add_route(blueprint, fnc) 24 | return fnc 25 | 26 | return decorator 27 | -------------------------------------------------------------------------------- /dopplerr/api/v1/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Dopplerr 4 | import dopplerr.api.v1.config 5 | import dopplerr.api.v1.events 6 | import dopplerr.api.v1.medias 7 | import dopplerr.api.v1.notify 8 | import dopplerr.api.v1.series 9 | import dopplerr.api.v1.status 10 | 11 | 12 | def add_api_blueprints(app): 13 | for modu in [ 14 | 'config', 15 | 'events', 16 | 'medias', 17 | 'notify', 18 | 'series', 19 | 'status', 20 | ]: 21 | modu = getattr(dopplerr.api.v1, modu) 22 | app.blueprint(modu.bp) 23 | -------------------------------------------------------------------------------- /dopplerr/api/v1/config.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | from sanic import Blueprint 8 | from schematics.models import Model 9 | from schematics.types import ModelType 10 | from schematics.types import StringType 11 | from schematics.types import URLType 12 | 13 | # Dopplerr 14 | from dopplerr.api.add_route import describe_add_route 15 | from dopplerr.config import DopplerrConfig 16 | 17 | log = logging.getLogger(__name__) 18 | 19 | 20 | class Links(Model): 21 | _self = URLType() 22 | 23 | 24 | class ConfigDir(Model): 25 | configdir = StringType(required=True, metadata={"label": "dldl", "description": "descript"}) 26 | basedir = StringType(required=True) 27 | frontenddir = StringType(required=True) 28 | _links = ModelType(Links) 29 | 30 | 31 | bp = Blueprint('config', url_prefix="/api/v1") 32 | 33 | 34 | @describe_add_route(bp, paths="/config/general/dirs") 35 | async def config_directories(request) -> ConfigDir: 36 | """ 37 | Get all configuration directories. 38 | """ 39 | return { 40 | "configdir": DopplerrConfig().get_cfg_value("general.configdir"), 41 | "basedir": DopplerrConfig().get_cfg_value("general.basedir"), 42 | "frontenddir": DopplerrConfig().get_cfg_value("general.frontenddir"), 43 | "_links": { 44 | "_self": request.app.url_for("config.config_directories") 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /dopplerr/api/v1/events.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | from sanic import Blueprint 8 | from schematics.models import Model 9 | from schematics.types import IntType 10 | from schematics.types import ListType 11 | from schematics.types import ModelType 12 | from schematics.types import StringType 13 | 14 | # Dopplerr 15 | from dopplerr.api.add_route import describe_add_route 16 | from dopplerr.db import DopplerrDb 17 | 18 | log = logging.getLogger(__name__) 19 | 20 | 21 | class Event(Model): 22 | timestamp = StringType() 23 | type = StringType() 24 | message = StringType() 25 | 26 | 27 | class Events(Model): 28 | events = ListType(ModelType(Event)) 29 | 30 | 31 | class RecentSerie(Model): 32 | added_timestamp = StringType() 33 | series_title = StringType() 34 | season_number = IntType() 35 | episode_number = IntType() 36 | episode_title = StringType() 37 | quality = StringType() 38 | video_languages = IntType(required=False) 39 | subtitle_language = StringType() 40 | 41 | 42 | class RecentSeries(Model): 43 | events = ListType(ModelType(RecentSerie)) 44 | 45 | 46 | bp = Blueprint('events', url_prefix="/api/v1/recent") 47 | 48 | 49 | @describe_add_route(bp, paths="/events/{num}", methods="GET") 50 | async def recent_events_num(num: int = 10) -> Events: 51 | num = int(num) 52 | if num > 100: 53 | num = 100 54 | res = {"events": DopplerrDb().get_recent_events(num)} 55 | return res 56 | 57 | 58 | @describe_add_route(bp, paths="/events/", methods="GET") 59 | async def recent_events_10() -> Events: 60 | res = {"events": DopplerrDb().get_recent_events(10)} 61 | return res 62 | 63 | 64 | @describe_add_route(bp, paths="/series/{num}", methods="GET") 65 | async def recent_fetched_series_num(num: int = 10) -> RecentSeries: 66 | num = int(num) 67 | if num > 100: 68 | num = 100 69 | res = {"events": DopplerrDb().get_last_fetched_series(num)} 70 | return res 71 | -------------------------------------------------------------------------------- /dopplerr/api/v1/medias.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | from sanic import Blueprint 8 | from schematics.models import Model 9 | 10 | # Dopplerr 11 | from dopplerr.api.add_route import describe_add_route 12 | from dopplerr.downloader import DopplerrDownloader 13 | 14 | log = logging.getLogger(__name__) 15 | 16 | 17 | class RequestAnswer(Model): 18 | pass 19 | 20 | 21 | bp = Blueprint('medias', url_prefix="/api/v1") 22 | 23 | 24 | @describe_add_route(bp, paths="/medias/", methods="GET") 25 | async def fullscan(request) -> RequestAnswer: 26 | content = request.json 27 | logging.debug("Fullscan request: %r", content) 28 | res = DopplerrDownloader().process_fullscan(content) 29 | res = "Unimplemented" 30 | return res 31 | -------------------------------------------------------------------------------- /dopplerr/api/v1/notify.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | from sanic import Blueprint 8 | from sanic_transmute import APIException 9 | 10 | # Dopplerr 11 | from dopplerr.api.add_route import describe_add_route 12 | from dopplerr.plugins.sonarr.task import TaskSonarrOnDownload 13 | 14 | log = logging.getLogger(__name__) 15 | 16 | bp = Blueprint('notify', url_prefix="/api/v1") 17 | 18 | 19 | @describe_add_route(bp, paths="/notify/sonarr", methods=['POST']) 20 | async def notify_sonarr(request): 21 | """ 22 | Process a sonarr notification. 23 | """ 24 | res = await TaskSonarrOnDownload().run(request.json) 25 | return res 26 | 27 | 28 | @describe_add_route(bp, paths="/notify", methods=['GET']) 29 | async def notify_not_allowed(): 30 | return APIException( 31 | "Method GET not allowed. " 32 | "Use POST with a JSON body with the right format", code=405) 33 | -------------------------------------------------------------------------------- /dopplerr/api/v1/series.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | from sanic import Blueprint 8 | from schematics.models import Model 9 | from schematics.types import DateType 10 | from schematics.types import IntType 11 | from schematics.types import ListType 12 | from schematics.types import ModelType 13 | from schematics.types import StringType 14 | 15 | # Dopplerr 16 | from dopplerr.api.add_route import describe_add_route 17 | from dopplerr.db import DopplerrDb 18 | 19 | log = logging.getLogger(__name__) 20 | 21 | bp = Blueprint('series', url_prefix="/api/v1/medias/series") 22 | 23 | 24 | class Media(Model): 25 | timestamp = DateType() 26 | type = StringType() 27 | message = StringType() 28 | 29 | 30 | class Medias(Model): 31 | medias = ListType(ModelType(Media)) 32 | 33 | 34 | class Series(Model): 35 | id = IntType() # pylint: disable=invalid-name 36 | tv_db_id = IntType() 37 | series_title = StringType() 38 | 39 | 40 | SeriesList = ListType(ModelType(Series)) 41 | 42 | 43 | @describe_add_route(bp, paths="/all", methods="GET") 44 | async def medias_series() -> Medias: 45 | res = {"medias": DopplerrDb().get_medias_series()} 46 | return res 47 | 48 | 49 | @describe_add_route(bp, paths="/", methods="GET") 50 | async def list_series() -> SeriesList: 51 | return DopplerrDb().list_series() 52 | 53 | 54 | @describe_add_route(bp, paths="/", methods="GET") 55 | async def get_series(seriesid) -> Series: 56 | return DopplerrDb().get_series(seriesid) 57 | -------------------------------------------------------------------------------- /dopplerr/api/v1/status.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | from sanic import Blueprint 8 | from schematics.models import Model 9 | from schematics.types import BooleanType 10 | from schematics.types import IntType 11 | from schematics.types import ListType 12 | from schematics.types import ModelType 13 | from schematics.types import StringType 14 | 15 | # Dopplerr 16 | from dopplerr import APSSCHEDULER_VERSION 17 | from dopplerr import DOPPLERR_VERSION 18 | from dopplerr import PYTHON_VERSION 19 | from dopplerr import SANIC_VERSION 20 | from dopplerr.api.add_route import describe_add_route 21 | from dopplerr.status import DopplerrStatus 22 | from dopplerr.tasks.disk_scanner import DiskScanner 23 | from dopplerr.tasks.manager import DopplerrTasksManager 24 | 25 | log = logging.getLogger(__name__) 26 | 27 | 28 | class SubtitleDownloader(Model): 29 | active = IntType() 30 | started = IntType() 31 | 32 | 33 | class DiscScanner(Model): 34 | interval_hours = IntType() 35 | next_run_time = StringType() 36 | active = IntType() 37 | started = IntType() 38 | 39 | 40 | class TaskStatus(Model): 41 | downloader = StringType() 42 | background_tasks = IntType() 43 | subtitle_downloader = ModelType(SubtitleDownloader) 44 | disc_scanner = ModelType(DiscScanner) 45 | 46 | 47 | class Status(Model): 48 | healthy = BooleanType() 49 | languages = ListType(StringType()) 50 | mapping = ListType(StringType()) 51 | version = StringType() 52 | 53 | 54 | class Version(StringType): 55 | pass 56 | 57 | 58 | class Versions(Model): 59 | dopplerr = StringType() 60 | apscheduler = StringType() 61 | sanic = StringType() 62 | python = StringType() 63 | 64 | 65 | class Log(Model): 66 | timestamp = StringType() 67 | level = StringType() 68 | message = StringType() 69 | logger = StringType() 70 | 71 | 72 | OkKo = StringType(regex=r"(OK|KO)") 73 | 74 | Logs = ListType(ModelType(Log)) 75 | 76 | bp = Blueprint('status', url_prefix="/api/v1") 77 | 78 | 79 | @describe_add_route(bp, paths="/health") 80 | async def health() -> OkKo: 81 | """ 82 | Health check. 83 | 84 | If it returns KO, Dopplerr is dead and should be restarted. 85 | """ 86 | return "OK" if DopplerrStatus().healthy else "KO" 87 | 88 | 89 | @describe_add_route(bp, paths="/ready") 90 | async def ready() -> OkKo: 91 | """ 92 | Readiness check. 93 | 94 | Use this endpoint to test if Dopplerr can still process new requests. If it 95 | returns "KO", Dopplerr is either congestionned either starting, and cannot accept 96 | new requests. 97 | """ 98 | return "OK" if DopplerrStatus().ready else "KO" 99 | 100 | 101 | @describe_add_route(bp, paths="/tasks/status") 102 | async def tasks_status() -> TaskStatus: 103 | res_health = DopplerrTasksManager().status() 104 | return res_health 105 | 106 | 107 | @describe_add_route(bp, paths="/tasks/scanner/start", methods=["POST"]) 108 | async def start_scanner() -> TaskStatus: 109 | await DiskScanner().force_start() 110 | return "OK" 111 | 112 | 113 | @describe_add_route(bp, paths="/version") 114 | async def api_version() -> Version: 115 | return DOPPLERR_VERSION 116 | 117 | 118 | @describe_add_route(bp, paths="/versions") 119 | async def api_versions() -> Versions: 120 | return { 121 | "dopplerr": DOPPLERR_VERSION, 122 | "apscheduler": APSSCHEDULER_VERSION, 123 | "sanic": SANIC_VERSION, 124 | "python": PYTHON_VERSION, 125 | } 126 | 127 | 128 | @describe_add_route(bp, paths="/logs", query_parameters=['limit']) 129 | async def api_log_100(limit: int = 100) -> Logs: 130 | return await DopplerrStatus().get_logs(limit) 131 | -------------------------------------------------------------------------------- /dopplerr/config.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | import os 6 | from pathlib import PosixPath 7 | 8 | # Third Party Libraries 9 | import pkg_resources 10 | from cfgtree.cfgtree import EnvironmentConfig 11 | from cfgtree.storages import JsonFileConfigStorage 12 | from cfgtree.types import BoolCfg 13 | from cfgtree.types import ConfigFileCfg 14 | from cfgtree.types import DirNameCfg 15 | from cfgtree.types import HardcodedCfg 16 | from cfgtree.types import IntCfg 17 | from cfgtree.types import ListOfStringCfg 18 | from cfgtree.types import MultiChoiceCfg 19 | from cfgtree.types import PasswordCfg 20 | from cfgtree.types import SingleChoiceCfg 21 | from cfgtree.types import StringCfg 22 | from cfgtree.types import UserCfg 23 | 24 | # Dopplerr 25 | from dopplerr.singleton import singleton 26 | 27 | log = logging.getLogger(__name__) 28 | DEFAULT_PORT = 8086 29 | 30 | 31 | def _find_frontend_data(): 32 | installed_data_frontend = pkg_resources.resource_filename(__name__, 'frontend') 33 | if PosixPath(installed_data_frontend).exists(): 34 | log.debug("Found local frontend path: %s", installed_data_frontend) 35 | return installed_data_frontend 36 | setup_py = pkg_resources.resource_filename(__name__, "main.py") 37 | dev_env_frontend_dist = PosixPath(setup_py).parent.parent / "frontend" / "dist" 38 | if dev_env_frontend_dist.exists(): 39 | log.debug("Found dev local frontend path: %s", dev_env_frontend_dist) 40 | return str(dev_env_frontend_dist) 41 | return None 42 | 43 | 44 | class DopplerrJsonConfigFile(JsonFileConfigStorage): 45 | json_configstorage_environ_var_name = "DOPPLERR_COMMON_CONFIG_FILE" 46 | json_configstorage_long_param_name = "--configfile" 47 | json_configstorage_short_param_name = "-g" 48 | json_configstorage_default_filename = "config.json" 49 | 50 | 51 | @singleton 52 | class DopplerrConfig(EnvironmentConfig): 53 | 54 | environ_var_prefix = "DOPPLERR_" 55 | config_storage = DopplerrJsonConfigFile() 56 | 57 | cfgtree = { 58 | "configfile": ConfigFileCfg(l="--configfile", h="Config directory"), 59 | "debug_config": BoolCfg(l="--debug-config", h="Show logs before configuration load"), 60 | "general": { 61 | "basedir": 62 | DirNameCfg(s="-b", d=os.getcwd(), h='Base directory'), 63 | "configdir": 64 | DirNameCfg(s="-c", d=os.getcwd(), h="Config directory"), 65 | "frontenddir": 66 | DirNameCfg(s="-f", d=_find_frontend_data(), r=True, h="Frontend directory"), 67 | "verbose": 68 | BoolCfg(s='-v', l="--verbose", h='Enable verbose output logs'), 69 | "output_type": 70 | SingleChoiceCfg( 71 | l="--output-type", 72 | h="Output log type", 73 | choices=["quiet", "plain", "dev"], 74 | d="plain"), 75 | "logfile": 76 | StringCfg(s="-l", h='Output log to file'), 77 | "mapping": 78 | ListOfStringCfg( 79 | s='-m', 80 | h=("Map root folder of tv/anime/movie to another name.\n" 81 | "Ex: series are mounted on a docker image as /tv but \n" 82 | "on the other system it is under /video/Series. In this \n" 83 | "case use '--basedir /video --mapping tv=Series,movies=Movies'\n" 84 | "Please enter trivial mapping as well:\n" 85 | " '--mapping tv=tv,movies=movies'")), 86 | "port": 87 | IntCfg(s='-p', d=DEFAULT_PORT, h='The port to listen on'), 88 | "no_color": 89 | BoolCfg(h="Disable color in logs"), 90 | "version": 91 | HardcodedCfg(), 92 | }, 93 | "subliminal": { 94 | "languages": ListOfStringCfg(), 95 | "addic7ed": { 96 | "enabled": BoolCfg(h="Enable addic7ed"), 97 | "user": UserCfg(h="addic7ed username"), 98 | "password": PasswordCfg(h="addic7ed password"), 99 | }, 100 | "legendastv": { 101 | "enabled": BoolCfg(h="Enable legendastv"), 102 | "user": UserCfg(h="legendastv username"), 103 | "password": PasswordCfg(h="legendastv password"), 104 | }, 105 | "opensubtitles": { 106 | "enabled": BoolCfg(h="Enable opensubtitles"), 107 | "user": UserCfg(h="opensubtitles username"), 108 | "password": PasswordCfg(h="opensubtitles password"), 109 | }, 110 | "subscenter": { 111 | "enabled": BoolCfg(h="Enable subscenter"), 112 | "user": UserCfg(h="subscenter username"), 113 | "password": PasswordCfg(h="subscenter password"), 114 | }, 115 | }, 116 | "notifications": { 117 | "pushover": { 118 | "enabled": 119 | BoolCfg(h="Enable pushover"), 120 | "user": 121 | UserCfg(h="pushover username"), 122 | "token": 123 | PasswordCfg(h="pushover password"), 124 | "registered_notifications": 125 | MultiChoiceCfg(h="Notifications", choices=["fetched"], d=["fetched"]), 126 | } 127 | }, 128 | "scanner": { 129 | "enable": BoolCfg(h="Enable periodic disc scanner", d=False), 130 | "interval_hours": IntCfg(h="Refresh interval (in hours)", d=6), 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /dopplerr/descriptors/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/descriptors/media_types.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | VIDEO_FILES_EXT = [ 4 | 'asf', 5 | 'avc', 6 | 'avi', 7 | 'divx', 8 | 'm4v', 9 | 'mkv', 10 | 'mov', 11 | 'mp4', 12 | 'mpg', 13 | 'ogv', 14 | 'qt', 15 | 'viv', 16 | 'vp3', 17 | 'wmv', 18 | ] 19 | -------------------------------------------------------------------------------- /dopplerr/descriptors/series.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | from collections import namedtuple 6 | 7 | log = logging.getLogger(__name__) 8 | 9 | SeriesEpisodeUid = namedtuple('SeriesEpisodeUid', ['tv_db_id', 'season_number', 'episode_number']) 10 | 11 | SeriesEpisodeInfo = namedtuple('SeriesEpisodeInfo', [ 12 | 'series_episode_uid', 13 | 'series_title', 14 | 'episode_title', 15 | 'quality', 16 | 'video_languages', 17 | 'subtitles_languages', 18 | 'media_filename', 19 | 'dirty', 20 | ]) 21 | -------------------------------------------------------------------------------- /dopplerr/downloader.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Dopplerr 7 | from dopplerr.singleton import singleton 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | @singleton 13 | class DopplerrDownloader(object): 14 | 15 | def process_fullscan(self, _request): 16 | log.debug("Processing full scan of missing subtitle files...") 17 | res = { 18 | 'status': 'unprocessed', 19 | 'message': 'not implemented yet!', 20 | } 21 | # TODO: inspiration 22 | # https://gist.github.com/alexsavio/9299716 23 | return res 24 | -------------------------------------------------------------------------------- /dopplerr/json.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | from enum import Enum 5 | from json import JSONEncoder 6 | from json import dumps 7 | 8 | 9 | def _pretty_kw(): 10 | return { 11 | "sort_keys": True, 12 | "indent": 4, 13 | "separators": (',', ': '), 14 | } 15 | 16 | 17 | class _EnumEncoder(JSONEncoder): 18 | 19 | def default(self, obj): # pylint: disable=arguments-differ,method-hidden 20 | if isinstance(obj, Enum): 21 | return obj.name 22 | return JSONEncoder.default(self, obj) 23 | 24 | 25 | def safe_dumps(data): 26 | return dumps(data, cls=_EnumEncoder, **_pretty_kw()) 27 | -------------------------------------------------------------------------------- /dopplerr/main.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | import os 6 | import sys 7 | from pathlib import Path 8 | 9 | # Dopplerr 10 | from dopplerr import DOPPLERR_VERSION 11 | from dopplerr.config import DopplerrConfig 12 | from dopplerr.db import DopplerrDb 13 | from dopplerr.logging import OutputType 14 | from dopplerr.logging import setup_logging 15 | from dopplerr.routes import listen 16 | from dopplerr.status import DopplerrStatus 17 | from dopplerr.tasks.subtasks.subliminal import SubliminalSubDownloader 18 | 19 | log = logging.getLogger(__name__) 20 | 21 | 22 | def main(): 23 | outputtype = OutputType.PLAIN 24 | if "--debug-config" in sys.argv: 25 | default_level = logging.DEBUG 26 | else: 27 | default_level = logging.ERROR 28 | for i in range(len(sys.argv)): 29 | if sys.argv[i] == "--output-type": 30 | if i < len(sys.argv) and sys.argv[i + 1] == "dev": 31 | outputtype = OutputType.DEV 32 | break 33 | 34 | debug = default_level is logging.DEBUG 35 | setup_logging(debug=debug, outputtype=outputtype) 36 | log.debug("Initializing Dopplerr version %s...", DOPPLERR_VERSION) 37 | 38 | DopplerrConfig().find_configuration_values() 39 | log.debug("Current configuration: %s", DopplerrConfig().json(safe=True)) 40 | 41 | debug = DopplerrConfig().get_cfg_value("general.verbose") 42 | output_type = DopplerrConfig().get_cfg_value("general.output_type") 43 | if output_type == 'dev': 44 | outputtype = OutputType.DEV 45 | elif output_type == 'plain': 46 | outputtype = OutputType.PLAIN 47 | else: 48 | raise NotImplementedError("Invalid output type: {!r}".format(output_type)) 49 | 50 | log.debug("Applying configuration") 51 | custom_log_levels = [ 52 | ("peewee", logging.DEBUG if debug else logging.ERROR), 53 | ("sanic", logging.INFO), 54 | ("cfgtree", logging.DEBUG if debug else logging.ERROR), 55 | ("apscheduler", logging.INFO), 56 | # Subliminal loggers 57 | ("chardet", logging.ERROR), 58 | ("dogpile", logging.ERROR), 59 | ("enzyme", logging.ERROR), 60 | ("rebulk.processors", logging.INFO), 61 | ("rebulk", logging.ERROR), 62 | ("subliminal.providers", logging.ERROR), 63 | ("subliminal.score", logging.ERROR), 64 | ("subliminal.subtitle", logging.ERROR), 65 | ("subliminal", logging.INFO), 66 | ("urllib3", logging.ERROR), 67 | ] 68 | setup_logging( 69 | outputtype=outputtype, 70 | debug=debug, 71 | logfile=DopplerrConfig().get_cfg_value("general.logfile"), 72 | custom_log_levels=custom_log_levels) 73 | log.info("Logging is set to %s", "verbose" 74 | if DopplerrConfig().get_cfg_value("general.verbose") else "not verbose") 75 | DopplerrStatus().refresh_from_cfg() 76 | # Backup configuration, now refresh_from_cfg has updated the version 77 | DopplerrConfig().save_configuration() 78 | 79 | log.info("Initializing Subtitle DopplerrDownloader Service") 80 | 81 | SubliminalSubDownloader.initialize_db() 82 | DopplerrStatus().sqlite_db_path = ( 83 | Path(DopplerrConfig().get_cfg_value("general.configdir")) / "sqlite.db") 84 | reset_db = False 85 | if DopplerrStatus().has_minor_version_changed: 86 | log.warning("Major version change, dropping all databases") 87 | reset_db = True 88 | else: 89 | log.info("Previous version was %s, now: %s", 90 | DopplerrStatus().previous_version, 91 | DopplerrConfig().get_cfg_value("general.version")) 92 | log.debug("SQLite DB: %s", DopplerrStatus().sqlite_db_path.as_posix()) 93 | DopplerrDb().init(DopplerrStatus().sqlite_db_path, reset_db=reset_db) 94 | DopplerrDb().create_tables() 95 | if reset_db: 96 | DopplerrDb().insert_event("db reset", 97 | "Doppler major or minor version upgrade caused a DB reset ") 98 | 99 | # change current work dir for subliminal work files 100 | os.chdir(DopplerrConfig().get_cfg_value("general.configdir")) 101 | 102 | DopplerrDb().insert_event("start", "dopplerr started") 103 | 104 | # main event loop (Asyncio behind) 105 | listen() 106 | 107 | logging.info("Clean stopping") 108 | DopplerrDb().insert_event("stop", "dopplerr stopped") 109 | 110 | return 0 111 | -------------------------------------------------------------------------------- /dopplerr/main_pyannotate.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | from pyannotate_runtime import collect_types 8 | 9 | # Dopplerr 10 | from dopplerr.main import main 11 | 12 | log = logging.getLogger(__name__) 13 | 14 | 15 | def main_pyannotate(): 16 | try: 17 | collect_types.init_types_collection() 18 | collect_types.resume() 19 | return main() 20 | finally: 21 | log.info("dumping type_info") 22 | collect_types.pause() 23 | collect_types.dump_stats('type_info.json') 24 | 25 | 26 | if __name__ == '__main__': 27 | main_pyannotate() 28 | -------------------------------------------------------------------------------- /dopplerr/notifications/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | import logging 4 | 5 | from dopplerr.notifications.pushover import NotificationPushOver 6 | from dopplerr.config import DopplerrConfig 7 | 8 | log = logging.getLogger(__name__) 9 | 10 | 11 | async def emit_notifications(notification): 12 | log.debug("Emiting notification: [%s] %s - %s", notification.notification_type, 13 | notification.notification_title, notification.one_liner) 14 | if DopplerrConfig().get_cfg_value("notifications.pushover.enabled"): 15 | log.debug("Emiting pushover with user %s", 16 | DopplerrConfig().get_cfg_value("notifications.pushover.user")) 17 | po = NotificationPushOver( 18 | DopplerrConfig().get_cfg_value("notifications.pushover.token"), 19 | DopplerrConfig().get_cfg_value("notifications.pushover.user"), 20 | DopplerrConfig().get_cfg_value("notifications.pushover.registered_notifications"), 21 | ) 22 | await po.emit(notification.notification_type, notification.notification_title, 23 | notification.one_liner) 24 | -------------------------------------------------------------------------------- /dopplerr/notifications/_base.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | 9 | class _NotificationBase(object): 10 | 11 | NOTIFICATION_TYPES = ["fetched"] 12 | 13 | def __init__(self, registered_notification): 14 | self._registered_notification = registered_notification 15 | 16 | def can_emit_notification_type(self, requested_notif_type): 17 | return requested_notif_type in self._registered_notification 18 | -------------------------------------------------------------------------------- /dopplerr/notifications/pushover.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Third Party Libraries 7 | import aiohttp 8 | 9 | # Dopplerr 10 | from dopplerr.notifications._base import _NotificationBase 11 | 12 | log = logging.getLogger(__name__) 13 | 14 | 15 | class NotificationPushOver(_NotificationBase): 16 | 17 | __api_url = "https://api.pushover.net/1/messages.json" 18 | 19 | def __init__(self, token, user, registered_notifications): 20 | self.token = token 21 | self.user = user 22 | super(NotificationPushOver, self).__init__(registered_notifications) 23 | 24 | async def emit(self, notification_type, title, message): 25 | if not self.can_emit_notification_type(notification_type): 26 | log.debug("notification %s is ignored for pushover") 27 | return 28 | async with aiohttp.ClientSession() as session: 29 | d = { 30 | "token": self.token, 31 | "user": self.user, 32 | "message": message, 33 | "title": title, 34 | } 35 | async with session.post(self.__api_url, data=d) as result: 36 | response = await result.json() 37 | log.debug("PushOver response: %r", response) 38 | -------------------------------------------------------------------------------- /dopplerr/notifications_types/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/notifications_types/_base.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | log = logging.getLogger(__name__) 7 | 8 | 9 | class _NotificationTypeBase(object): 10 | pass 11 | 12 | 13 | class _SeriesNotificationBase(_NotificationTypeBase): 14 | notification_type: str = None 15 | notification_title: str = None 16 | series_episode_info = None 17 | 18 | def __init__(self, series_episode_info): 19 | self.series_episode_info = series_episode_info 20 | -------------------------------------------------------------------------------- /dopplerr/notifications_types/series_media_refreshed.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Dopplerr 7 | from dopplerr.notifications_types._base import _SeriesNotificationBase 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | class SeriesMediaRefreshedNotification(_SeriesNotificationBase): 13 | notification_type = "refresh" 14 | notification_title = "Episode Information Refreshed" 15 | 16 | @property 17 | def one_liner(self): 18 | return ("{e.series_title} - {e.season_number}x{e.episode_number} - " 19 | "{e.episode_title} [{e.quality}] - Lang: {video_languages}".format( 20 | e=self.series_episode_info, 21 | video_languages=",".join(self.series_episode_info.video_languages), 22 | )) 23 | -------------------------------------------------------------------------------- /dopplerr/notifications_types/series_subtitles_fetched.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Dopplerr 7 | from dopplerr.notifications_types._base import _SeriesNotificationBase 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | # pylint: disable=duplicate-code 13 | class SubtitleFetchedNotification(_SeriesNotificationBase): 14 | notification_type = "fetched" 15 | notification_title = "Episode Subtitles Fetched" 16 | 17 | @property 18 | def one_liner(self): 19 | return ("{e[series_title]} - {e[season_number]}x{e[episode_number]} - " 20 | "{e[episode_title]} [{e[quality]}] - Lang: {e[video_languages]} - " 21 | "Subtitles: {subtitles_languages}".format( 22 | e=self.series_episode_info, 23 | subtitles_languages=",".join(self.series_episode_info['subtitles_languages']), 24 | )) 25 | -------------------------------------------------------------------------------- /dopplerr/plugins/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/plugins/sonarr/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/plugins/sonarr/filter.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Dopplerr 7 | from dopplerr.config import DopplerrConfig 8 | from dopplerr.plugins.sonarr.response import SonarrOnDownloadResponse 9 | from dopplerr.request_filter import _FilterBase 10 | from dopplerr.response import UnhandledResponse 11 | 12 | log = logging.getLogger(__name__) 13 | 14 | 15 | class SonarrFilter(_FilterBase): 16 | 17 | async def filter(self, request): 18 | # probably request_event 19 | low_request = self.lowerize_dick_keys(request) 20 | eventtype = low_request.get("eventtype") 21 | if eventtype == "Download": 22 | res = await self.process_on_download(request) 23 | return res 24 | return UnhandledResponse("sonarr", "Ignoring Sonarr event type: {!r}".format(eventtype)) 25 | 26 | async def process_on_download(self, request): 27 | log.debug("Processing Sonarr's 'on downloaded' event") 28 | res = SonarrOnDownloadResponse() 29 | res.processing() 30 | low_request = self.lowerize_dick_keys(request) 31 | low_series = self.lowerize_dick_keys(low_request.get("series", {})) 32 | root_dir = low_series.get("path") 33 | series_title = low_series.get("title") 34 | tv_db_id = low_series.get("tvdbid") 35 | if not root_dir: 36 | res.failed("Empty Series Path") 37 | return res 38 | root_dir = self.appy_path_mapping(root_dir) 39 | log.debug("Root folder: %s", root_dir) 40 | log.debug("Reconstructing full media path with basedir '%s'", 41 | DopplerrConfig().get_cfg_value("general.basedir")) 42 | 43 | def concat_path(str_a, str_b): 44 | if not str_a.endswith('/'): 45 | str_a += '/' 46 | if str_b.startswith('/'): 47 | str_b = str_b[1:] 48 | str_a += str_b 49 | return str_a 50 | 51 | root_dir = concat_path(DopplerrConfig().get_cfg_value("general.basedir"), root_dir) 52 | basename = root_dir 53 | log.info("Searching episodes for serie '%s' in '%s'", series_title, root_dir) 54 | res.processing("listing candidates") 55 | for episode in low_request.get("episodes", []): 56 | low_episode = self.lowerize_dick_keys(episode) 57 | basename = low_episode.get("scenename", "") 58 | episode_title = low_episode.get("title", "") 59 | season_number = low_episode.get("seasonnumber", "") 60 | episode_number = low_episode.get("episodenumber", "") 61 | quality = low_episode.get("quality", "") 62 | log.debug("Candidate: episode '%s' with base filename '%s'", episode_title, basename) 63 | res.candidates.append({ 64 | "series_title": series_title, 65 | "tv_db_id": tv_db_id, 66 | "episode_title": episode_title, 67 | "root_dir": root_dir, 68 | "scenename": basename, 69 | "season_number": season_number, 70 | "episode_number": episode_number, 71 | "quality": quality, 72 | }) 73 | res.processing("candidates listed") 74 | return res 75 | -------------------------------------------------------------------------------- /dopplerr/plugins/sonarr/response.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Dopplerr 7 | from dopplerr.response import Response 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | class SonarrOnDownloadResponse(Response): 13 | 14 | def __init__(self, *args, **kwargs): 15 | super(SonarrOnDownloadResponse, self).__init__(*args, **kwargs) 16 | self.request_type = "sonarr" 17 | self.request_event = "on download" 18 | 19 | @property 20 | def subtitles(self): 21 | return self.res.get("result", {}).get("subtitles", None) 22 | 23 | @subtitles.setter 24 | def subtitles(self, subtitles): 25 | self.res.setdefault("result", {})["subtitles"] = subtitles 26 | 27 | @property 28 | def candidates(self): 29 | return self.res.setdefault("candidates", []) 30 | 31 | @candidates.setter 32 | def candidates(self, candidates): 33 | self.res.candidates = candidates 34 | 35 | @property 36 | def sonarr_episode_infos(self): 37 | candidate = self.candidates[0] 38 | subtitles = self.subtitles 39 | return [{ 40 | "series_title": candidate.get("series_title"), 41 | "tv_db_id": candidate.get("tv_db_id"), 42 | "season_number": candidate.get("season_number"), 43 | "episode_number": candidate.get("episode_number"), 44 | "episode_title": candidate.get("episode_title"), 45 | "quality": candidate.get("quality"), 46 | "video_languages": candidate.get("video_languages", "???"), 47 | "subtitles_languages": [s["language"] for s in subtitles], 48 | }] 49 | -------------------------------------------------------------------------------- /dopplerr/plugins/sonarr/task.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Dopplerr 7 | from dopplerr.db import DopplerrDb 8 | from dopplerr.descriptors.series import SeriesEpisodeUid 9 | from dopplerr.notifications import emit_notifications 10 | from dopplerr.notifications_types.series_subtitles_fetched import SubtitleFetchedNotification 11 | from dopplerr.plugins.sonarr.filter import SonarrFilter 12 | from dopplerr.tasks.download_subtitles import DownloadSubtitleTask 13 | from dopplerr.tasks.manager import DopplerrTasksManager 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | class DopplerrTask(object): 19 | 20 | def post_task(self, task): 21 | DopplerrTasksManager().post_task(task) 22 | 23 | 24 | class TaskSonarrOnDownload(DopplerrTask): 25 | 26 | async def run(self, task) -> dict: 27 | content = task 28 | 29 | log.debug("Sonarr notification received: %r", content) 30 | res = await SonarrFilter().filter(content) 31 | if res.is_unhandled: 32 | # event has been filtered out 33 | return res.to_dict() 34 | 35 | if res.candidates: 36 | for candidate in res.candidates: 37 | DopplerrDb().insert_event("media update", 38 | "Available TV episode: {} - {}x{} - {} [{}].".format( 39 | candidate.get("series_title"), 40 | candidate.get("season_number"), 41 | candidate.get("episode_number"), 42 | candidate.get("episode_title"), 43 | candidate.get("quality"), 44 | )) 45 | 46 | log.debug("Sonarr notification ok, posting background task") 47 | # processing ok, let's post our background task to the task queue 48 | self.post_task(self.task_sonarr_on_download_background(res)) 49 | # asyncio.ensure_future(self.task_sonarr_on_download_background(res)) 50 | res.successful("Request successfully posted") 51 | return res.to_dict() 52 | 53 | async def task_sonarr_on_download_background(self, res): 54 | log.debug("Starting task_sonarr_on_download_background") 55 | downloader = DownloadSubtitleTask() 56 | res = await downloader.run_and_wait(res) 57 | if not res.is_successful: 58 | log.debug("not successful, leaving background task") 59 | return res 60 | 61 | for episode_info in res.sonarr_episode_infos: 62 | await emit_notifications(SubtitleFetchedNotification(series_episode_info=episode_info)) 63 | DopplerrDb().update_fetched_series_subtitles( 64 | series_episode_uid=SeriesEpisodeUid( 65 | episode_info['tv_db_id'], 66 | episode_info['season_number'], 67 | episode_info['episode_number'], 68 | ), 69 | subtitles_languages=episode_info['subtitles_languages'], 70 | dirty=False) 71 | log.debug("Background task finished with result: %s", res.to_json()) 72 | return res 73 | -------------------------------------------------------------------------------- /dopplerr/request_filter.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | 6 | # Dopplerr 7 | from dopplerr.config import DopplerrConfig 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | class _FilterBase(object): 13 | 14 | def appy_path_mapping(self, folder): 15 | if not DopplerrConfig().get_cfg_value("general.mapping"): 16 | return folder 17 | if folder.startswith("/"): 18 | absolute = True 19 | folder = folder[1:] 20 | for mapping in DopplerrConfig().get_cfg_value("general.mapping"): 21 | log.debug("Mapping: %s", mapping) 22 | k, _, v = mapping.partition("=") 23 | log.debug("Applying mapping %s => %s", k, v) 24 | if folder.startswith(k): 25 | folder = v + folder[len(k):] 26 | break 27 | if absolute: 28 | return "/" + folder 29 | return folder 30 | 31 | @staticmethod 32 | def lowerize_dick_keys(thedict): 33 | return {k.lower(): v for k, v in thedict.items()} 34 | 35 | 36 | # 37 | # class AutoDetectFilter(_FilterBase): 38 | # def filter(self, request, res): 39 | # low_request = self.lowerize_dick_keys(request) 40 | # if "series" in low_request: 41 | # return SonarrFilter().filter(request, res) 42 | # return res.failed("Unable to find request type") 43 | -------------------------------------------------------------------------------- /dopplerr/response.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | from enum import Enum 6 | 7 | # Dopplerr 8 | from dopplerr import json 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class RequestStatus(Enum): 14 | UNHANDLED = "unhandled" 15 | PROCESSING = "processing" 16 | SUCCESSFUL = "successful" 17 | FAILED = "failed" 18 | 19 | 20 | class Response(object): 21 | 22 | def __init__(self): 23 | self.res = {} 24 | self.__update_status(RequestStatus.UNHANDLED) 25 | 26 | def processing(self, message=None): 27 | self.__update_status(RequestStatus.PROCESSING, message=message) 28 | 29 | def failed(self, message): 30 | log.error(message) 31 | self.__update_status(RequestStatus.FAILED, message=message.lower()) 32 | 33 | def unhandled(self, message): 34 | log.info("Filtered out event: %s", message) 35 | self.res.setdefault("result", {})["status"] = RequestStatus.UNHANDLED 36 | self.res.setdefault("result", {})["message"] = message.lower() 37 | 38 | @property 39 | def is_unhandled(self): 40 | return self.res.get("result", {}).get("status") == RequestStatus.UNHANDLED 41 | 42 | @property 43 | def is_failed(self): 44 | return self.res.get("result", {}).get("status") == RequestStatus.FAILED 45 | 46 | def __update_status(self, status, message=None): 47 | self.res.setdefault("result", {})['status'] = status 48 | if message is not None: 49 | self.res['result']["message"] = message 50 | elif "message" in self.res: 51 | del self.res['result']["message"] 52 | 53 | def successful(self, message=None): 54 | return self.__update_status(RequestStatus.SUCCESSFUL, message=message) 55 | 56 | @property 57 | def is_successful(self): 58 | return self.res.get("result", {}).get("status") == RequestStatus.SUCCESSFUL 59 | 60 | def to_dict(self): 61 | """ 62 | Return json-able dictionary. 63 | """ 64 | return self._to_dict(self.res) 65 | 66 | def _to_dict(self, dat): 67 | r = {} 68 | for k, v in dat.items(): 69 | if isinstance(v, Enum): 70 | v = v.name 71 | elif isinstance(v, dict): 72 | v = self._to_dict(v) 73 | r[k] = v 74 | return r 75 | 76 | def to_json(self): 77 | return json.safe_dumps(self.res) 78 | 79 | @property 80 | def request_type(self): 81 | return self.res.get("request", {}).get("type", None) 82 | 83 | @request_type.setter 84 | def request_type(self, thetype): 85 | self.res.setdefault("request", {})["type"] = thetype 86 | 87 | @property 88 | def request_event(self): 89 | return self.res.get("request", {}).get("event", None) 90 | 91 | @request_event.setter 92 | def request_event(self, event): 93 | self.res.setdefault("request", {})["event"] = event 94 | 95 | @property 96 | def exception(self): 97 | return self.res.get("result", {}).get("exception", None) 98 | 99 | @exception.setter 100 | def exception(self, exception): 101 | self.res.setdefault("result", {})["exception"] = exception 102 | 103 | 104 | class UnhandledResponse(Response): 105 | 106 | def __init__(self, request_type, message, *args, **kwargs): 107 | super(UnhandledResponse, self).__init__(*args, **kwargs) 108 | self.request_type = request_type 109 | self.unhandled(message) 110 | -------------------------------------------------------------------------------- /dopplerr/routes.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | from pathlib import Path 6 | 7 | # Third Party Libraries 8 | from sanic import Sanic 9 | # from sanic_transmute import add_swagger 10 | from sanic_transmute import add_swagger_api_route, create_swagger_json_handler 11 | 12 | # Dopplerr 13 | from dopplerr.api.v1 import add_api_blueprints 14 | from dopplerr.config import DopplerrConfig 15 | from dopplerr.status import DopplerrStatus 16 | from dopplerr.tasks.manager import DopplerrTasksManager 17 | 18 | log = logging.getLogger(__name__) 19 | 20 | 21 | async def init_in_sanic_loop(): 22 | DopplerrTasksManager().start() 23 | 24 | 25 | async def deinit_in_sanic_loop(): 26 | DopplerrTasksManager().stop() 27 | 28 | 29 | def add_swagger(app, json_route, html_route, title="default", version="1.0", base_path=None): 30 | # TODO: remove when this PR is merged: https://github.com/yunstanford/sanic-transmute/pull/4 31 | app.add_route( 32 | create_swagger_json_handler(app, title=title, version=version, base_path=base_path), 33 | json_route, 34 | methods=["GET"]) 35 | add_swagger_api_route(app, html_route, json_route) 36 | 37 | 38 | def listen(): 39 | app = Sanic(__name__, log_config=None) 40 | add_api_blueprints(app) 41 | add_swagger(app, "/api/v1/swagger.json", "/api/v1/") 42 | 43 | DopplerrStatus().healthy = True 44 | for fi in Path(DopplerrConfig().get_cfg_value("general.frontenddir")).iterdir(): 45 | app.static('/' + fi.name if fi.name != "index.html" else '/', fi.resolve().as_posix()) 46 | 47 | @app.listener('before_server_start') 48 | async def before_start(_app, _loop): # pylint: disable=unused-variable 49 | await init_in_sanic_loop() 50 | 51 | @app.listener('after_server_stop') 52 | async def after_stop(_app, _loop): # pylint: disable=unused-variable 53 | await deinit_in_sanic_loop() 54 | 55 | app.run(host='0.0.0.0', port=int(DopplerrConfig().get_cfg_value("general.port"))) 56 | -------------------------------------------------------------------------------- /dopplerr/singleton.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """ 3 | Singleton class definition. 4 | 5 | Usage: 6 | 7 | @singleton 8 | class MySingletonClass(object): 9 | def __init__(self): 10 | pass 11 | ... 12 | def a_function(self): 13 | pass 14 | 15 | Later in the code: 16 | 17 | from module.of.MySingleton import MySingletonClass 18 | def anyFunction(...): 19 | ... 20 | MySingletonClass().a_function(...) 21 | 22 | 23 | """ 24 | from __future__ import absolute_import 25 | from __future__ import division 26 | from __future__ import print_function 27 | from __future__ import unicode_literals 28 | 29 | # Standard Libraries 30 | import types 31 | 32 | # pylint: disable=invalid-name 33 | 34 | 35 | class __Singleton(object): 36 | 37 | """ 38 | A non-thread-safe helper class to ease implementing singletons. 39 | 40 | This should be used as a decorator -- not a metaclass -- to the 41 | class that should be a singleton. 42 | 43 | The decorated class can define one `__init__` function that 44 | takes only the `self` argument. Other than that, there are 45 | no restrictions that apply to the decorated class. 46 | 47 | To get the singleton instance, use the `instance` method. Trying 48 | to use `__call__` will result in a `TypeError` being raised. 49 | 50 | Limitations: The decorated class cannot be inherited from. 51 | """ 52 | 53 | def __init__(self, decorated): 54 | self._decorated = decorated 55 | 56 | def instance(self, *args, **kwargs): 57 | """ 58 | Return the singleton instance. 59 | 60 | Upon its first call, it creates a new instance of the decorated class and calls its 61 | `__init__` method. 62 | 63 | On all subsequent calls, the already created instance is returned. 64 | """ 65 | # Do not use a test here for performance sake. The first call has the exception penalty 66 | try: 67 | return self._instance 68 | except AttributeError: 69 | # pylint: disable=attribute-defined-outside-init 70 | self._instance = self._decorated(*args, **kwargs) 71 | 72 | # pylint: enable=attribute-defined-outside-init 73 | 74 | def unload(inst): 75 | # pylint: disable=protected-access 76 | inst.__singleton.unload() 77 | # pylint: enable=protected-access 78 | 79 | # Magically bind "unload" as a method 80 | self._instance.unload = types.MethodType(unload, self._instance) 81 | # pylint: disable=protected-access 82 | self._instance.__singleton = self 83 | # pylint: enable=protected-access 84 | return self._instance 85 | 86 | def __call__(self, *args, **kwargs): 87 | return self.instance(*args, **kwargs) 88 | 89 | def unload(self): 90 | if hasattr(self, "_instance"): 91 | delattr(self, "_instance") 92 | 93 | 94 | singleton = __Singleton 95 | # pylint: enable=invalid-name 96 | -------------------------------------------------------------------------------- /dopplerr/status.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import io 5 | import logging 6 | 7 | # Third Party Libraries 8 | import aiofiles 9 | from babelfish import Language 10 | 11 | # Dopplerr 12 | from dopplerr import DOPPLERR_VERSION 13 | from dopplerr.config import DopplerrConfig 14 | from dopplerr.singleton import singleton 15 | 16 | log = logging.getLogger(__name__) 17 | 18 | 19 | @singleton 20 | class DopplerrStatus(object): 21 | 22 | """ 23 | Contain current status of the application and derived values from `DopplerrConfig`. 24 | """ 25 | 26 | def __init__(self): 27 | self.healthy = False 28 | self.sqlite_db_path = None 29 | self.subliminal_provider_configs = None 30 | self.previous_version = None 31 | 32 | def refresh_from_cfg(self): 33 | """ 34 | Refresh derived values from cfg. 35 | """ 36 | cfg = DopplerrConfig() 37 | if not cfg.get_cfg_value("general.port"): 38 | log.fatal("No port defined !") 39 | raise Exception("No port defined") 40 | if not cfg.get_cfg_value("general.frontenddir"): 41 | log.fatal("No frontend dir defined") 42 | raise Exception("No frontend dir defined") 43 | self.subliminal_provider_configs = self._build_subliminal_provider_cfgs() 44 | 45 | languages = cfg.get_cfg_value("subliminal.languages") 46 | if not languages: 47 | raise Exception("No languages defined") 48 | if any(not x for x in languages): 49 | raise Exception("Bad languages: {!r}".format(languages)) 50 | 51 | if not self._check_languages(languages): 52 | raise Exception("Bad language defined") 53 | 54 | if self.previous_version is None: 55 | self.previous_version = cfg.get_cfg_value("general.version") 56 | cfg.set_cfg_value("general.version", DOPPLERR_VERSION) 57 | 58 | @property 59 | def has_minor_version_changed(self): 60 | if not self.previous_version: 61 | return True 62 | major1, _, minor_patch1 = self.previous_version.partition('.') 63 | major2, _, minor_patch2 = DOPPLERR_VERSION.partition('.') 64 | minor1, _, _patch1 = minor_patch1.partition('.') 65 | minor2, _, _patch2 = minor_patch2.partition('.') 66 | return major1 != major2 or minor1 != minor2 67 | 68 | def _build_subliminal_provider_cfgs(self): 69 | cfg = DopplerrConfig() 70 | provider_configs = {} 71 | provider_names = [ 72 | "addic7ed", 73 | "legendastv", 74 | "opensubtitles", 75 | "subscenter", 76 | ] 77 | for provider_name in provider_names: 78 | if cfg.get_cfg_value("subliminal.{}.enabled".format(provider_name)): 79 | provider_configs[provider_name] = { 80 | 'username': cfg.get_cfg_value("subliminal.{}.user".format(provider_name)), 81 | 'password': cfg.get_cfg_value("subliminal.{}.password".format(provider_name)), 82 | } 83 | log.debug("Using %s username: %s", provider_name, 84 | provider_configs[provider_name]['username']) 85 | return provider_configs 86 | 87 | @staticmethod 88 | def _check_languages(languages): 89 | failed = False 90 | for l in languages: 91 | try: 92 | Language(l) 93 | except ValueError: 94 | failed = True 95 | logging.critical("Invalid language: %r", l) 96 | if failed: 97 | return False 98 | return True 99 | 100 | async def get_logs(self, limit=100): 101 | """ 102 | Get `limit` lines of logs in reverse order from the end of the file. 103 | """ 104 | logfile = DopplerrConfig().get_cfg_value("general.logfile") 105 | if not logfile: 106 | return 107 | logs = [] 108 | i = 0 109 | async with aiofiles.open(logfile) as fp: 110 | async for line in self._reverse_read_lines(fp): 111 | try: 112 | i += 1 113 | if i > limit: 114 | break 115 | if not line: 116 | continue 117 | splited_line = line.split("::") 118 | if len(splited_line) < 4: 119 | continue 120 | dat = splited_line[0].strip() 121 | level = splited_line[1].strip() 122 | logger = splited_line[2].strip() 123 | message = splited_line[3].strip() 124 | logs.append({ 125 | 'timestamp': dat, 126 | 'level': level, 127 | 'logger': logger, 128 | 'message': message, 129 | }) 130 | finally: 131 | pass 132 | return logs 133 | 134 | @staticmethod 135 | async def _reverse_read_lines(fp, buf_size=8192): # pylint: disable=invalid-name 136 | """ 137 | Async generator that returns the lines of a file in reverse order. 138 | 139 | ref: https://stackoverflow.com/a/23646049/8776239 140 | and: https://stackoverflow.com/questions/2301789/read-a-file-in-reverse-order-using-python 141 | """ 142 | segment = None # holds possible incomplete segment at the beginning of the buffer 143 | offset = 0 144 | await fp.seek(0, io.SEEK_END) 145 | file_size = remaining_size = await fp.tell() 146 | while remaining_size > 0: 147 | offset = min(file_size, offset + buf_size) 148 | await fp.seek(file_size - offset) 149 | buffer = await fp.read(min(remaining_size, buf_size)) 150 | remaining_size -= buf_size 151 | lines = buffer.splitlines(True) 152 | # the first line of the buffer is probably not a complete line so 153 | # we'll save it and append it to the last line of the next buffer 154 | # we read 155 | if segment is not None: 156 | # if the previous chunk starts right from the beginning of line 157 | # do not concat the segment to the last line of new chunk 158 | # instead, yield the segment first 159 | if buffer[-1] == '\n': 160 | # print 'buffer ends with newline' 161 | yield segment 162 | else: 163 | lines[-1] += segment 164 | # print 'enlarged last line to >{}<, len {}'.format(lines[-1], len(lines)) 165 | segment = lines[0] 166 | for index in range(len(lines) - 1, 0, -1): 167 | l = lines[index] 168 | if l: 169 | yield l 170 | # Don't yield None if the file was empty 171 | if segment is not None: 172 | yield segment 173 | -------------------------------------------------------------------------------- /dopplerr/tasks/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/tasks/base.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import logging 6 | 7 | log = logging.getLogger(__name__) 8 | 9 | 10 | class TaskBase(object): 11 | worker_threads_num = 1 12 | active = False 13 | 14 | async def run(self, task): 15 | self.active = True 16 | try: 17 | return await self._run(task) 18 | finally: 19 | self.active = False 20 | 21 | async def _run(self, task): 22 | raise NotImplementedError 23 | 24 | @staticmethod 25 | async def _run_command(*args): 26 | """ 27 | Asynchronous run command in subprocess. 28 | 29 | :param *args: command to execute 30 | :return: tuple (stdout, stderr, exit code) 31 | 32 | Example from: http://asyncio.readthedocs.io/en/latest/subprocess.html 33 | """ 34 | # Create subprocess 35 | log.debug("Executing subprocess: %s", " ".join([a for a in args])) 36 | # pylint: disable=no-value-for-parameter 37 | process = await asyncio.create_subprocess_exec( 38 | *args, stderr=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE) 39 | # pylint: enable=no-value-for-parameter 40 | 41 | # Status 42 | log.info('Started: %s (pid = %s)', args, process.pid) 43 | 44 | # Wait for the subprocess to finish 45 | stdout, stderr = await process.communicate() 46 | 47 | if process.returncode == 0: 48 | log.debug('Subprocess pid %s succeeded: %s', process.pid, args) 49 | else: 50 | log.debug('Subprocess pid %s failed: %s', process.pid, args) 51 | 52 | stdout_str = stdout.decode().strip() 53 | stderr_str = stderr.decode().strip() 54 | 55 | # Return stdout, stderr, exit code 56 | return stdout_str, stderr_str, process.returncode 57 | -------------------------------------------------------------------------------- /dopplerr/tasks/disk_scanner.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import logging 6 | import os 7 | 8 | # Dopplerr 9 | from dopplerr.config import DopplerrConfig 10 | from dopplerr.db import DopplerrDb 11 | from dopplerr.descriptors.media_types import VIDEO_FILES_EXT 12 | from dopplerr.descriptors.series import SeriesEpisodeInfo 13 | from dopplerr.singleton import singleton 14 | from dopplerr.tasks.periodic import PeriodicTask 15 | from dopplerr.tasks.subtasks.subliminal import RefineVideoFileTask 16 | 17 | log = logging.getLogger(__name__) 18 | 19 | SPEED_LIMIT = 10 20 | SPEED_WAIT_SEC = 0.1 21 | 22 | 23 | @singleton 24 | class DiskScanner(PeriodicTask): 25 | job_id = 'scan_disk' 26 | seconds = 0 27 | minutes = 0 28 | hours = None 29 | enable_cfg = 'scanner.enable' 30 | 31 | def init(self): 32 | self.hours = DopplerrConfig().get_cfg_value('scanner.interval_hours') 33 | 34 | async def _run(self): 35 | basedir = DopplerrConfig().get_cfg_value('general.basedir') 36 | mapping = DopplerrConfig().get_cfg_value('general.mapping') 37 | media_dirs = [] 38 | for mapp in mapping: 39 | _src, _, dst = mapp.partition('=') 40 | media_dirs.append(dst) 41 | log.debug("Scanning %s with media directories: %r", basedir, media_dirs) 42 | await self._scan(basedir, media_dirs=media_dirs) 43 | 44 | async def _scan(self, root, media_dirs=None): 45 | i = 0 46 | with os.scandir(root) as it: 47 | for entry in it: 48 | i += 1 49 | if i > SPEED_LIMIT: 50 | # this allows the event loop to update 51 | await asyncio.sleep(SPEED_WAIT_SEC) 52 | i = 0 53 | if self.stopped or self.interrupted: 54 | return 55 | if media_dirs: 56 | if entry.name in media_dirs: 57 | await self._scan(entry.path, media_dirs=None) 58 | elif not entry.name.startswith('.'): 59 | if entry.is_dir(follow_symlinks=False): 60 | await self._scan(entry.path, media_dirs=None) 61 | elif entry.name.rpartition('.')[2] in VIDEO_FILES_EXT: 62 | await self._refresh_video(entry.path) 63 | 64 | async def _refresh_video(self, filepath): 65 | if DopplerrDb().media_exists(filepath): 66 | log.info("Already existing video file found: %s", filepath) 67 | return 68 | log.info("Unknown Video file found: %s", filepath) 69 | refined_lst = await RefineVideoFileTask().refine_file(filepath) 70 | if not refined_lst: 71 | log.error("Cannot refine file: %s", filepath) 72 | return 73 | if not isinstance(refined_lst, list): 74 | refined_lst = [refined_lst] 75 | for refined in refined_lst: 76 | if isinstance(refined, SeriesEpisodeInfo): 77 | DopplerrDb().update_series_media( 78 | series_title=refined.series_title, 79 | tv_db_id=refined.series_episode_uid.tv_db_id, 80 | season_number=refined.series_episode_uid.season_number, 81 | episode_number=refined.series_episode_uid.episode_number, 82 | episode_title=refined.episode_title, 83 | quality=refined.quality, 84 | video_languages=refined.video_languages, 85 | media_filename=refined.media_filename, 86 | dirty=refined.dirty, 87 | ) 88 | DopplerrDb().insert_event("media update", 89 | "Available TV episode: {} - {}x{} - {}.".format( 90 | refined.series_title, 91 | refined.series_episode_uid.season_number, 92 | refined.series_episode_uid.episode_number, 93 | refined.episode_title, 94 | )) 95 | else: 96 | log.error("Unsupported refined video type: %r", refined) 97 | return 98 | 99 | @property 100 | def interval_hours(self): 101 | return self.hours 102 | -------------------------------------------------------------------------------- /dopplerr/tasks/download_subtitles.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import glob 5 | import logging 6 | import os 7 | from pathlib import Path 8 | 9 | # Dopplerr 10 | from dopplerr.config import DopplerrConfig 11 | from dopplerr.db import DopplerrDb 12 | from dopplerr.descriptors.media_types import VIDEO_FILES_EXT 13 | from dopplerr.singleton import singleton 14 | from dopplerr.status import DopplerrStatus 15 | from dopplerr.tasks.queued import QueuedTask 16 | from dopplerr.tasks.subtasks.subliminal import SubliminalSubDownloader 17 | 18 | log = logging.getLogger(__name__) 19 | 20 | 21 | @singleton 22 | class DownloadSubtitleTask(QueuedTask): 23 | name = "Download Subtitle Task" 24 | 25 | async def _run(self, task): 26 | log.debug("Starting Download subtitle for task") 27 | res = task 28 | if not res.candidates: 29 | DopplerrDb().insert_event("error", "event handled but no candidate found") 30 | log.debug("event handled but no candidate found") 31 | res.failed("event handled but no candidate found") 32 | return res 33 | 34 | for candidate in res.candidates: 35 | await self._process_candidate(candidate, res) 36 | return res 37 | 38 | @staticmethod 39 | def search_file(root_dir, base_name): 40 | # This won't work with python < 3.5 41 | found = [] 42 | base_name = glob.escape(base_name) 43 | beforext, _, ext = base_name.rpartition('.') 44 | if ext.lower() in VIDEO_FILES_EXT: 45 | protected_path = os.path.join(root_dir, "**", "*" + beforext + "*" + ext) 46 | else: 47 | protected_path = os.path.join(root_dir, "**", "*" + beforext + "*") 48 | protected_path = protected_path 49 | log.debug("Searching %r", protected_path) 50 | for filename in glob.iglob(protected_path, recursive=True): 51 | log.debug("Found: %s", filename) 52 | found.append(filename) 53 | return found 54 | 55 | async def _process_candidate(self, candidate, res): 56 | log.info( 57 | "Searching episode '%s' from series '%s'. Filename: %s", 58 | candidate.get("episode_title"), 59 | candidate.get("series_title"), 60 | candidate.get("scenename"), 61 | ) 62 | 63 | candidate_files = self._search_candidate_files(candidate, res) 64 | if not candidate_files: 65 | return 66 | 67 | self._refresh_db_media(candidate, candidate_files[0]) 68 | 69 | videos = self.filter_video_files(candidate_files, res) 70 | if not videos: 71 | return 72 | 73 | subtitles_info = await self.download_sub(videos, res) 74 | res.subtitles = subtitles_info 75 | if subtitles_info: 76 | res.successful("download successful") 77 | DopplerrDb().insert_event("subtitles", 78 | "subtitles fetched: {}".format(", ".join([ 79 | "{} (lang: {}, source: {})".format( 80 | s.get("filename"), 81 | s.get("language"), 82 | s.get("provider"), 83 | ) for s in subtitles_info 84 | ]))) 85 | else: 86 | DopplerrDb().insert_event("subtitles", "no subtitle found for: {}".format( 87 | ", ".join([Path(f).name for f in candidate_files]))) 88 | res.failed("no subtitle found") 89 | 90 | @staticmethod 91 | def _refresh_db_media(candidate, media_filename): 92 | DopplerrDb().update_series_media( 93 | series_title=candidate.get("series_title"), 94 | tv_db_id=candidate.get("tv_db_id"), 95 | season_number=candidate.get("season_number"), 96 | episode_number=candidate.get("episode_number"), 97 | episode_title=candidate.get("episode_title"), 98 | quality=candidate.get("quality"), 99 | video_languages=None, 100 | media_filename=media_filename, 101 | dirty=True) 102 | 103 | def _search_candidate_files(self, candidate, res): 104 | candidate_files = self.search_file(candidate['root_dir'], candidate['scenename']) 105 | log.debug("All found files: %r", candidate_files) 106 | if not candidate_files: 107 | res.failed("candidates defined in request but no video file found on disk") 108 | DopplerrDb().insert_event("subtitles", "No video file found on disk " 109 | "after sonarr notification") 110 | return [] 111 | return candidate_files 112 | 113 | @staticmethod 114 | def filter_video_files(candidate_files, res): 115 | log.info("Searching and downloading missing subtitles for: %r", candidate_files) 116 | res.processing("downloading missing subtitles") 117 | videos = SubliminalSubDownloader.filter_video_files(candidate_files) 118 | log.info("Video files: %r", videos) 119 | if not videos: 120 | log.debug("No subtitle to download") 121 | res.failed("no video file found") 122 | return 123 | return videos 124 | 125 | async def download_sub(self, videos, res): 126 | res.processing("fetching best subtitles") 127 | log.info("fetching subtitles...") 128 | subtitles = [] 129 | try: 130 | subliminal = SubliminalSubDownloader() 131 | provider_configs = DopplerrStatus().subliminal_provider_configs 132 | languages = DopplerrConfig().get_cfg_value("subliminal.languages") 133 | subtitles = await subliminal.download_sub( 134 | videos, languages, provider_configs=provider_configs) 135 | except Exception as e: 136 | log.exception("subliminal raised an exception") 137 | res.failed("subliminal exception") 138 | res.exception = repr(e) 139 | return res 140 | 141 | subtitles_info = [] 142 | for vid in videos: 143 | log.info("Found subtitles for %s:", vid) 144 | for sub in subtitles[vid]: 145 | log.info(" %s from %s", sub.language, sub.provider_name) 146 | subtitles_info.append({ 147 | "language": str(sub.language), 148 | "provider": sub.provider_name, 149 | "filename": subliminal.get_subtitle_path(vid.name, language=sub.language) 150 | }) 151 | subliminal.save_subtitles(vid, subtitles[vid]) 152 | 153 | return subtitles_info 154 | -------------------------------------------------------------------------------- /dopplerr/tasks/manager.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import logging 6 | 7 | # Third Party Libraries 8 | from apscheduler.schedulers.asyncio import AsyncIOScheduler 9 | 10 | # Dopplerr 11 | from dopplerr.singleton import singleton 12 | from dopplerr.tasks.disk_scanner import DiskScanner 13 | from dopplerr.tasks.download_subtitles import DownloadSubtitleTask 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | @singleton 19 | class DopplerrTasksManager(object): 20 | background_tasks = 0 21 | apscheduler = None 22 | 23 | def post_task(self, task): 24 | """ 25 | TODO: transfor this simple wrapper arround `ensure_future ` into a real queue management. 26 | """ 27 | 28 | async def wrap_task(task): 29 | try: 30 | self.background_tasks += 1 31 | return await task 32 | finally: 33 | self.background_tasks -= 1 34 | 35 | asyncio.ensure_future(wrap_task(task)) 36 | 37 | def start(self): 38 | DownloadSubtitleTask().start() 39 | self.apscheduler = AsyncIOScheduler() 40 | DiskScanner().add_job(self.apscheduler) 41 | self.apscheduler.start() 42 | 43 | def stop(self): 44 | DownloadSubtitleTask().stop() 45 | self.apscheduler.shutdown(False) 46 | 47 | def status(self): 48 | return { 49 | 'background_tasks': self.background_tasks, 50 | 'subtitle_downloader': { 51 | 'started': 1 if DownloadSubtitleTask().started else 0, 52 | 'active': 1 if DownloadSubtitleTask().active else 0, 53 | }, 54 | 'disc_scanner': { 55 | 'started': 1 if DiskScanner().started else 0, 56 | 'active': 1 if DiskScanner().active else 0, 57 | 'interval_hours': DiskScanner().interval_hours, 58 | 'next_run_time': DiskScanner().next_run_time_iso, 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /dopplerr/tasks/periodic.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import logging 6 | 7 | # Dopplerr 8 | from dopplerr.config import DopplerrConfig 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class PeriodicTask(object): 14 | job_id: str = None 15 | job_type = 'interval' 16 | job_default_kwargs = {'max_instances': 1} 17 | scheduler = None 18 | seconds: int = None 19 | minutes: int = None 20 | hours: int = None 21 | active = False 22 | _interrupted = False 23 | enable_cfg: str = None 24 | forced = False 25 | force_start_required = False 26 | 27 | def __init__(self): 28 | self.init() 29 | 30 | def init(self): 31 | pass 32 | 33 | async def run(self): 34 | try: 35 | if self.force_start_required: 36 | self.forced = True 37 | elif self.forced: 38 | log.debug("Forced execution already started, skip this periodic schedule") 39 | return 40 | self.active = True 41 | return await self._run() 42 | finally: 43 | self.active = False 44 | 45 | async def _run(self): 46 | raise NotImplementedError 47 | 48 | @property 49 | def _add_job_kwargs(self): 50 | kw = self.job_default_kwargs.copy() 51 | if self.seconds: 52 | kw['seconds'] = self.seconds 53 | if self.minutes: 54 | kw['minutes'] = self.minutes 55 | if self.hours: 56 | kw['hours'] = self.hours 57 | return kw 58 | 59 | @property 60 | def job(self): 61 | if self.scheduler: 62 | return self.scheduler.get_job(self.job_id) 63 | 64 | def add_job(self, scheduler): 65 | if self.enable_cfg is not None and not DopplerrConfig().get_cfg_value(self.enable_cfg): 66 | log.info("Do not enable job '%s', it is disabled by configuration '%s'", self.job_id, 67 | self.enable_cfg) 68 | return 69 | self.scheduler = scheduler 70 | scheduler.add_job( 71 | self.run, self.job_type, id=self.job_id, replace_existing=True, **self._add_job_kwargs) 72 | 73 | @property 74 | def next_run_time(self): 75 | job = self.job 76 | if job: 77 | return self.job.next_run_time 78 | 79 | @property 80 | def next_run_time_iso(self): 81 | t = self.next_run_time 82 | if t: 83 | return t.isoformat() 84 | 85 | @property 86 | def interval(self): 87 | # yapf: disable 88 | return ( 89 | (self.seconds if self.seconds else 0) + 90 | (self.minutes * 60 if self.minutes else 0) + 91 | (self.hours * 60 * 60 if self.hours else 0) + 92 | 0) 93 | # yapf: enable 94 | 95 | @property 96 | def started(self): 97 | return self.scheduler 98 | 99 | def stop(self): 100 | self.scheduler = None 101 | 102 | @property 103 | def stopped(self): 104 | return not self.scheduler 105 | 106 | def interrupt(self): 107 | self._interrupted = True 108 | 109 | @property 110 | def interrupted(self): 111 | return self._interrupted 112 | 113 | async def force_start(self): 114 | log.debug("Force start job: %s", self.job_id) 115 | self.force_start_required = True 116 | asyncio.ensure_future(self.run()) 117 | -------------------------------------------------------------------------------- /dopplerr/tasks/queued.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import logging 6 | 7 | # Dopplerr 8 | from dopplerr.tasks.base import TaskBase 9 | 10 | log = logging.getLogger(__name__) 11 | 12 | 13 | class QueuedTask(TaskBase): 14 | 15 | """ 16 | Task that should be queued. 17 | 18 | Ie, where two similar cannot be executed at the same time, 19 | but to an underlying shared resource. 20 | 21 | Usage: 22 | 23 | - inherit from `QueuedTask` and implement `name` member and `_run` methods 24 | 25 | class MyTask(QueuedTask): 26 | name = "MyTask" 27 | 28 | async def _run(self, task): 29 | ... 30 | 31 | """ 32 | 33 | _queue: asyncio.Queue = None 34 | _result: asyncio.Queue = None 35 | _started: bool = False 36 | _consumer: asyncio.Future = None 37 | name: str = "Unnamed queue" 38 | FIRE_AND_FORGET = 0 39 | WAIT_FOR_RESULT = 1 40 | 41 | def __init__(self): 42 | self._queue = asyncio.Queue() 43 | self._result = asyncio.Queue() 44 | 45 | async def _loop_queue(self): 46 | assert self._queue 47 | while True: 48 | # log.debug("QueuedTask %s: waits for something in the task queue", self.name) 49 | fire_and_forget, inputs = await self._queue.get() 50 | 51 | try: 52 | self.active = True 53 | # log.debug("QueuedTask %s: starts execution: %r", self.name, inputs) 54 | result = await self._run(inputs) 55 | # log.debug("QueuedTask %s: ends execution: %r (result: %r)", self.name, inputs, 56 | # result) 57 | if fire_and_forget == QueuedTask.WAIT_FOR_RESULT: 58 | # log.debug("QueuedTask %s: task %s is 'wait for result', posting result %r", 59 | # self.name, inputs, result) 60 | await self._result.put(result) 61 | finally: 62 | self.active = False 63 | self._queue.task_done() 64 | 65 | async def _run(self, task): 66 | raise NotImplementedError 67 | 68 | async def _wait_next_result(self): 69 | assert self.started 70 | # log.debug("QueuedTask %s: waiting for result", self.name) 71 | r = await self._result.get() 72 | # log.debug("QueuedTask %s: previous task returned a result: %s", self.name, r) 73 | self._result.task_done() 74 | return r 75 | 76 | def start(self): 77 | assert self._queue 78 | if self.stopped: 79 | # log.debug("QueuedTask %s: starting queue", self.name) 80 | self._consumer = asyncio.ensure_future(self._loop_queue()) 81 | 82 | def stop(self): 83 | assert self._queue 84 | # log.debug("QueuedTask %s: stopping queue", self.name) 85 | if self._consumer: 86 | self._consumer.cancel() 87 | self._consumer = None 88 | 89 | async def fire_and_forget(self, task): 90 | assert self._queue 91 | # log.debug("QueuedTask %s: posting new execution request: %r", self.name, task) 92 | await self._queue.put((QueuedTask.FIRE_AND_FORGET, task)) 93 | 94 | async def run(self, task): 95 | return await self.run_and_wait(task) 96 | 97 | async def run_and_wait(self, task): 98 | assert self._queue 99 | # log.debug("QueuedTask %s: posting 'Run and resume' job: %r", self.name, task) 100 | await self._queue.put((QueuedTask.WAIT_FOR_RESULT, task)) 101 | # log.debug("QueuedTask %s: task %s posted, waiting for result", self.name, task) 102 | return await self._wait_next_result() 103 | 104 | async def join(self): 105 | assert self._queue 106 | await self._queue.join() 107 | 108 | @property 109 | def stopped(self): 110 | return not self.started 111 | 112 | @property 113 | def started(self): 114 | return self._consumer 115 | -------------------------------------------------------------------------------- /dopplerr/tasks/subtasks/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/tasks/subtasks/subliminal.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import logging 5 | import os 6 | from datetime import timedelta 7 | from typing import List 8 | 9 | # Third Party Libraries 10 | from babelfish import Language 11 | from subliminal import Episode 12 | from subliminal import Movie 13 | from subliminal import Video 14 | from subliminal import download_best_subtitles 15 | from subliminal import refine 16 | from subliminal import refiner_manager 17 | from subliminal import region 18 | from subliminal import save_subtitles 19 | from subliminal.cli import MutexLock 20 | from subliminal.subtitle import get_subtitle_path 21 | 22 | # Dopplerr 23 | from dopplerr.descriptors.series import SeriesEpisodeInfo 24 | from dopplerr.descriptors.series import SeriesEpisodeUid 25 | from dopplerr.tasks.threaded import ThreadedTask 26 | 27 | log = logging.getLogger(__name__) 28 | 29 | 30 | class SubliminalSubDownloader(ThreadedTask): 31 | worker_threads_num = 1 32 | 33 | async def _run(self, res): 34 | raise NotImplementedError 35 | 36 | @staticmethod 37 | def initialize_db(): 38 | log.info("Initializing Subliminal cache...") 39 | region.configure( 40 | 'dogpile.cache.dbm', 41 | expiration_time=timedelta(days=30), 42 | arguments={ 43 | 'filename': 'cachefile.dbm', 44 | 'lock_factory': MutexLock 45 | }) 46 | 47 | # async def download_sub_by_subproc(self, videos, languages, provider_configs): 48 | # subl_cmd = ["subliminal"] 49 | # print(provider_configs) 50 | # for provider_name, provider_config in provider_configs.items(): 51 | # subl_cmd.extend([ 52 | # "--{}".format(provider_name), 53 | # provider_config['username'], 54 | # provider_config['password'], 55 | # ]) 56 | # subl_cmd.extend(["download"]) 57 | # for l in languages: 58 | # subl_cmd.extend(["--language", l]) 59 | # subl_cmd.extend(videos) 60 | # subl_cmd.extend(["-vvv"]) 61 | # stdout, stderr, code = await self._run_command(*subl_cmd) 62 | # log.debug(stdout) 63 | # log.error(stderr) 64 | # log.error(code) 65 | # if "Downloaded 0 subtitle" in stdout: 66 | # log.error("No subtitle downloaded") 67 | # raise NotImplementedError 68 | 69 | async def download_sub(self, videos, languages, provider_configs): 70 | return await self._run_in_thread( 71 | download_best_subtitles, 72 | videos, {Language(l) 73 | for l in languages}, 74 | provider_configs=provider_configs) 75 | 76 | @staticmethod 77 | def filter_video_files(files): 78 | videos = [] 79 | for fil in files: 80 | _, ext = os.path.splitext(fil) 81 | if ext in [".jpeg", ".jpg", ".nfo", ".srt", ".sub", ".nbz"]: 82 | log.debug("Ignoring %s because of extension: %s", fil, ext) 83 | continue 84 | videos.append(Video.fromname(fil)) 85 | return videos 86 | 87 | @staticmethod 88 | def get_subtitle_path(video_file, language): 89 | return get_subtitle_path(video_file, language=language) 90 | 91 | @staticmethod 92 | def save_subtitles(video, subtitle_info): 93 | return save_subtitles(video, subtitle_info) 94 | 95 | 96 | class RefineVideoFileTask(ThreadedTask): 97 | 98 | async def refine_file(self, video_file): 99 | return await self._run_in_thread(self._refine_file, video_file) 100 | 101 | @staticmethod 102 | def _refine_file(video_file) -> List[SeriesEpisodeInfo]: 103 | log.debug("Refining file %s", video_file) 104 | try: 105 | video = Video.fromname(video_file) 106 | except ValueError: 107 | log.error("Cannot guess video file type from: %s", video_file) 108 | return [] 109 | refiner = sorted(refiner_manager.names()) 110 | refine(video, episode_refiners=refiner, movie_refiners=refiner) 111 | log.debug("refine result: %r", video) 112 | if isinstance(video, Episode): 113 | log.debug("series: %s", video.series) 114 | log.debug("season: %s", video.season) 115 | if not video.season: 116 | log.error("No season defined !") 117 | return [] 118 | if isinstance(video.season, list): 119 | video.season = video.season[0] 120 | log.error("Multi season found, only using first one: %s", video.season) 121 | log.debug("episode: %s", video.episode) 122 | log.debug("title: %s", video.title) 123 | log.debug("series_tvdb_id: %s", video.series_tvdb_id) 124 | r = [] 125 | # Support for double episode 126 | if not isinstance(video.episode, list): 127 | video.episode = [video.episode] 128 | for video_episode in video.episode: 129 | r.append( 130 | SeriesEpisodeInfo( 131 | series_episode_uid=SeriesEpisodeUid( 132 | tv_db_id=video.series_tvdb_id, 133 | season_number=video.season, 134 | episode_number=video_episode, 135 | ), 136 | series_title=video.series, 137 | episode_title=video.title, 138 | quality=None, 139 | video_languages=None, 140 | subtitles_languages=None, 141 | media_filename=video_file, 142 | dirty=True, 143 | )) 144 | return r 145 | elif isinstance(video, Movie): 146 | log.debug("movie: %s", video.title) 147 | return [] 148 | -------------------------------------------------------------------------------- /dopplerr/tasks/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/tasks/tests/test_aps.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import os 6 | from datetime import datetime 7 | 8 | # Third Party Libraries 9 | from apscheduler.schedulers.asyncio import AsyncIOScheduler 10 | 11 | 12 | def test_apscheduler(): 13 | 14 | def tick(): 15 | print('Tick! The time is: %s' % datetime.now()) 16 | 17 | async def leave(): 18 | asyncio.get_event_loop().stop() 19 | 20 | scheduler = AsyncIOScheduler() 21 | scheduler.add_job(tick, 'interval', seconds=1) 22 | scheduler.add_job(leave, 'interval', seconds=3) 23 | scheduler.start() 24 | print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) 25 | 26 | # Execution will block here until Ctrl+C (Ctrl+Break on Windows) is pressed. 27 | try: 28 | asyncio.get_event_loop().run_forever() 29 | except (KeyboardInterrupt, SystemExit): 30 | pass 31 | -------------------------------------------------------------------------------- /dopplerr/tasks/tests/test_download_subtitles.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import unittest 5 | from pathlib import Path 6 | 7 | # Dopplerr 8 | from dopplerr.tasks.download_subtitles import DownloadSubtitleTask 9 | 10 | 11 | class TestGlob(unittest.TestCase): 12 | 13 | def assert_list_size(self, lst, size): 14 | if len(lst) != size: 15 | self.fail("list size should be {}, is {} : {}".format(size, len(lst), ", ".join(lst))) 16 | 17 | def test_glob_simple_filename(self): 18 | downloader = DownloadSubtitleTask() 19 | found = downloader.search_file(Path(__file__).parent / "vectors", "videofile.mp4") 20 | self.assert_list_size(found, 4) 21 | found = sorted(found) 22 | self.assertIn("/a_subfolder/prepended-videofile.mp4", found[0]) 23 | self.assertIn("/a_subfolder/videofile-suffixed.mp4", found[1]) 24 | self.assertIn("/a_subfolder/videofile.mp4", found[2]) 25 | self.assertIn("/videofile.mp4", found[3]) 26 | 27 | def test_glob_filename_with_bracket(self): 28 | downloader = DownloadSubtitleTask() 29 | found = downloader.search_file(Path(__file__).parent / "vectors", "complex[name].mkv") 30 | self.assert_list_size(found, 2) 31 | found = sorted(found) 32 | self.assertIn("vectors/a_subfolder/complex[name].mkv", found[0]) 33 | self.assertIn("vectors/complex[name][withanothersuffix].mkv", found[1]) 34 | 35 | # Todo: 36 | # glob test of "The.Series.Name.S07E06.720p.BluRay.DD5.1.x264-EbP-Obfuscated" 37 | -------------------------------------------------------------------------------- /dopplerr/tasks/tests/test_executors.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import logging 6 | import time 7 | 8 | # Third Party Libraries 9 | from asynctest import TestCase 10 | 11 | # Dopplerr 12 | from dopplerr.tasks.manager import DopplerrTasksManager 13 | from dopplerr.tasks.threaded import ThreadedTask 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | class SingleExecutor(ThreadedTask): 19 | worker_threads_num = 1 20 | 21 | async def execute(self, long_task, *args): 22 | return await self._run_in_thread(long_task, *args) 23 | 24 | async def _run(self, task): 25 | raise NotImplementedError 26 | 27 | 28 | class TestApscheduler(TestCase): 29 | 30 | async def test_executors(self): 31 | log.info("") 32 | de = DopplerrTasksManager() 33 | 34 | def mkprefix(i): 35 | return " " * 15 + " " * 45 * i 36 | 37 | def long_sync_task(i): 38 | # all tasks should run inside the SAME thread 39 | prefix = mkprefix(1) 40 | log.info("%s long sync task %s: begin", prefix, i) 41 | for _ in range(0, i * 10): 42 | log.info("%s long sync task %s: working", prefix, i) 43 | # note: time.sleep does NOT block current thread, so other "blocking" task 44 | # might actually overlap their executions 45 | time.sleep(0.1) 46 | log.info("%s long sync task %s: end", prefix, i) 47 | 48 | async def long_async_task(i): 49 | log.info("long async task %s: begin", i) 50 | await SingleExecutor().execute(long_sync_task, i) 51 | log.info("long async task %s: more work after results from run in thread task", i) 52 | await asyncio.sleep(1) 53 | log.info("long async task %s: end", i) 54 | 55 | log.info("event loop thread %s worker thread", " " * 40) 56 | de.post_task(long_async_task(1)) 57 | de.post_task(long_async_task(2)) 58 | de.post_task(long_async_task(3)) 59 | de.post_task(long_async_task(4)) 60 | log.info("waiting 5s") 61 | for _ in range(0, 60): 62 | log.info("-- running long async tasks: %s --", de.status()['background_tasks']) 63 | await asyncio.sleep(0.1) 64 | log.info("end of unittest") 65 | -------------------------------------------------------------------------------- /dopplerr/tasks/tests/test_queued.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import logging 6 | from typing import Any 7 | from typing import List 8 | 9 | # Third Party Libraries 10 | import asynctest 11 | 12 | # Dopplerr 13 | from dopplerr.tasks.queued import QueuedTask 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | class MyTask(QueuedTask): 19 | name: str = "MyTask" 20 | 21 | def __init__(self) -> None: 22 | super(MyTask, self).__init__() 23 | self.input_processed: List[Any] = [] 24 | self.event_sequence: List[Any] = [] 25 | 26 | def add_event(self, event: str, *args) -> None: 27 | log.info(event, *args) 28 | self.event_sequence.append(event % args) 29 | 30 | async def _run(self, task) -> str: 31 | self.add_event("Task %s: work started", task) 32 | await asyncio.sleep(0.2) 33 | self.input_processed.append(task) 34 | self.add_event("Task %s: work finished", task) 35 | return "finished task {}".format(task) 36 | 37 | 38 | class TestTaskQueue(asynctest.TestCase): 39 | maxDiff: int = None 40 | 41 | async def test_fire_and_forget(self) -> None: 42 | task = MyTask() 43 | task.start() 44 | await task.fire_and_forget("#1") 45 | await task.fire_and_forget("#2") 46 | task.add_event("EventLoop: doing other stuff...") 47 | await asyncio.sleep(1) 48 | await task.fire_and_forget("#3") 49 | await task.fire_and_forget("#4") 50 | task.add_event("EventLoop: doing other other stuff...") 51 | await asyncio.sleep(1) 52 | await task.fire_and_forget("maybe unprocessed task") 53 | task.add_event("EventLoop: stopping everything...") 54 | await asyncio.sleep(1) 55 | # await task.join() 56 | task.stop() 57 | self.assertIn('#1', task.input_processed) 58 | self.assertIn('#2', task.input_processed) 59 | self.assertIn('#3', task.input_processed) 60 | self.assertIn('#4', task.input_processed) 61 | self.assertListEqual([ 62 | 'EventLoop: doing other stuff...', 63 | 'Task #1: work started', 64 | 'Task #1: work finished', 65 | 'Task #2: work started', 66 | 'Task #2: work finished', 67 | 'EventLoop: doing other other stuff...', 68 | 'Task #3: work started', 69 | 'Task #3: work finished', 70 | 'Task #4: work started', 71 | 'Task #4: work finished', 72 | 'EventLoop: stopping everything...', 73 | 'Task maybe unprocessed task: work started', 74 | 'Task maybe unprocessed task: work finished', 75 | ], task.event_sequence) 76 | 77 | async def test_run_with_additional_work(self) -> None: 78 | 79 | async def job_with_task_and_additional_work(task: MyTask, task_id: str) -> None: 80 | task.add_event("Job %s: asking to execute the sequential task", task_id) 81 | res = await task.run_and_wait(task_id) 82 | task.add_event("Job %s: result received '%s'", task_id, res) 83 | task.add_event("Job %s: additional work start (can be executed in //)", task_id) 84 | await asyncio.sleep(0.4) 85 | task.add_event("Job %s: additional work end", task_id) 86 | 87 | task = MyTask() 88 | task.start() 89 | task.add_event("EventLoop: launching 2 tasks that should be executed sequentially later...") 90 | asyncio.ensure_future(job_with_task_and_additional_work(task, "#1")) 91 | asyncio.ensure_future(job_with_task_and_additional_work(task, "#2")) 92 | task.add_event("EventLoop: doing other other stuff...") 93 | await asyncio.sleep(1) 94 | task.stop() 95 | self.assertListEqual([ 96 | 'EventLoop: launching 2 tasks that should be executed sequentially later...', 97 | 'EventLoop: doing other other stuff...', 98 | 'Job #1: asking to execute the sequential task', 99 | 'Job #2: asking to execute the sequential task', 100 | 'Task #1: work started', 101 | 'Task #1: work finished', 102 | 'Task #2: work started', 103 | "Job #1: result received 'finished task #1'", 104 | 'Job #1: additional work start (can be executed in //)', 105 | 'Task #2: work finished', 106 | "Job #2: result received 'finished task #2'", 107 | 'Job #2: additional work start (can be executed in //)', 108 | 'Job #1: additional work end', 109 | 'Job #2: additional work end', 110 | ], task.event_sequence) 111 | -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/a_subfolder/complex[name].mkv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/a_subfolder/complex[name].mkv -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/a_subfolder/prepended-videofile.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/a_subfolder/prepended-videofile.mp4 -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/a_subfolder/videofile-suffixed.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/a_subfolder/videofile-suffixed.mp4 -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/a_subfolder/videofile.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/a_subfolder/videofile.mp4 -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/anothervideo.mkv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/anothervideo.mkv -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/complex[name][withanothersuffix].mkv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/complex[name][withanothersuffix].mkv -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/notavideofile.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/notavideofile.mp3 -------------------------------------------------------------------------------- /dopplerr/tasks/tests/vectors/videofile.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tasks/tests/vectors/videofile.mp4 -------------------------------------------------------------------------------- /dopplerr/tasks/threaded.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Standard Libraries 4 | import asyncio 5 | import concurrent 6 | import functools 7 | import logging 8 | 9 | log = logging.getLogger(__name__) 10 | 11 | 12 | class ThreadedTask(object): 13 | worker_threads_num = 1 14 | 15 | def __init__(self): 16 | self.executors = concurrent.futures.ThreadPoolExecutor(max_workers=self.worker_threads_num) 17 | 18 | async def _run_in_thread(self, func, *args, **kwargs): 19 | event_loop = asyncio.get_event_loop() 20 | res = await event_loop.run_in_executor(self.executors, 21 | functools.partial(func, *args, **kwargs)) 22 | return res 23 | -------------------------------------------------------------------------------- /dopplerr/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/basedir/Series/A Series Title/Season 1/The.Episode.Title.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled.mkv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/dopplerr/tests/vectors/basedir/Series/A Series Title/Season 1/The.Episode.Title.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled.mkv -------------------------------------------------------------------------------- /dopplerr/tests/vectors/radarr_on_grab-info.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename_on_disk": "Movie.Name.avi" 3 | } 4 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/radarr_on_grab.json: -------------------------------------------------------------------------------- 1 | { 2 | "EventType": "Grab", 3 | "RemoteMovie": { 4 | "ImdbId": "tt123456", 5 | "Year": 2000, 6 | "TmdbId": 1234, 7 | "Title": "Movie Name" 8 | }, 9 | "Movie": { 10 | "FilePath": null, 11 | "Id": 123, 12 | "Title": "Movie Name" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_download-info.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename_on_disk": "The.Series.Title.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled.mkv" 3 | } 4 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_download.json: -------------------------------------------------------------------------------- 1 | { 2 | "EventType": "Download", 3 | "Series": { 4 | "Id": 12, 5 | "Title": "A Series Title", 6 | "Path": "/tv/A Series Title", 7 | "TvdbId": 123456 8 | }, 9 | "Episodes": [ 10 | { 11 | "Id": 1234, 12 | "EpisodeNumber": 2, 13 | "SeasonNumber": 1, 14 | "Title": "The Episode Title", 15 | "AirDate": "2017-03-01", 16 | "AirDateUtc": "2017-03-02T02:00:00Z", 17 | "Quality": "WEBDL-1080p", 18 | "QualityVersion": 1, 19 | "ReleaseGroup": "ReleaseGroupName", 20 | "SceneName": "The.Series.Title.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_download2-info.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename_on_disk": "The.Episode.Title.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled.mp4" 3 | } 4 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_download2.json: -------------------------------------------------------------------------------- 1 | { 2 | "EventType": "Download", 3 | "Series": { 4 | "Id": 28, 5 | "Title": "A Series Title", 6 | "Path": "/tv/A Series Title", 7 | "TvdbId": 123456 8 | }, 9 | "Episodes": [ 10 | { 11 | "Id": 1234, 12 | "EpisodeNumber": 9, 13 | "SeasonNumber": 1, 14 | "Title": "DNR", 15 | "AirDate": "2017-04-26", 16 | "AirDateUtc": "2017-04-27T01:00:00Z", 17 | "Quality": "WEBDL-1080p", 18 | "QualityVersion": 1, 19 | "ReleaseGroup": "ReleaseGroupName", 20 | "SceneName": "The.Episode.Title.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled" 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_download_rename-info.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename_on_disk": "The Series Name - S01E25 - Finale.mp4", 3 | "relative_path": "The Series Name/Season 1/" 4 | } 5 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_download_rename.json: -------------------------------------------------------------------------------- 1 | { 2 | "episodes": [ 3 | { 4 | "id": 1234, 5 | "episode Number": 25, 6 | "seasonNumber": 1, 7 | "title": "Finale", 8 | "airDate": "2013-03-17", 9 | "airDateUtc": "2013-03-17T08:30:00Z", 10 | "quality": "SDTV", 11 | "qualityVersion": 1, 12 | "releaseGroup": "fqm", 13 | "sceneName": "The.Series.Name.S01E25.Finale.PDTV.x264-FQM" 14 | } 15 | ], 16 | "episodeFile": { 17 | "id": 1847, 18 | "relativePath": "Season 1/The Series Name - S01E25 - Finale.mp4", 19 | "path": "/Series/.downloaded/The.Series.Name.S01E25.Finale.PDTV.x264-FQM/the.series.name.s01e25.pdtv.x264-fqm.mp4", 20 | "quality": "SDTV", 21 | "qualityVersion": 1, 22 | "releaseGroup": "fqm", 23 | "sceneName": "The.Series.Name.S01E25.Finale.PDTV.x264-FQM" 24 | }, 25 | "isUpgrade": false, 26 | "eventType": "Download", 27 | "series": { 28 | "id": 12, 29 | "title": "The Series Name", 30 | "path": "/Series/The Series Name", 31 | "tvdbId": 12345 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_grab-info.json: -------------------------------------------------------------------------------- 1 | { 2 | "filename_on_disk": "/tv/A Series Title/Season 1/The.Episode.Name.S01E09.1080p.WEB-DL.DD5.1.H264-AGROUP-Scrambled.mkv" 3 | } 4 | -------------------------------------------------------------------------------- /dopplerr/tests/vectors/sonarr_on_grab.json: -------------------------------------------------------------------------------- 1 | { 2 | "EventType": "Grab", 3 | "Series": { 4 | "Id": 12, 5 | "Title": "A Series Title", 6 | "Path": "/tv/A Series Title", 7 | "TvdbId": 123456 8 | }, 9 | "Episodes": [ 10 | { 11 | "Id": 1234, 12 | "EpisodeNumber": 9, 13 | "SeasonNumber": 1, 14 | "Title": "DNR", 15 | "AirDate": "2017-04-26", 16 | "AirDateUtc": "2017-04-27T01:00:00Z", 17 | "Quality": "WEBDL-1080p", 18 | "QualityVersion": 1, 19 | "ReleaseGroup": "AGROUP", 20 | "SceneName": null 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /frontend/.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [["es2015", {"modules": false}], "stage-2"], 3 | "plugins": ["transform-runtime"], 4 | "comments": false 5 | } 6 | -------------------------------------------------------------------------------- /frontend/.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | 11 | [Makefile] 12 | indent_style = tab 13 | indent_size = 4 14 | -------------------------------------------------------------------------------- /frontend/.eslintignore: -------------------------------------------------------------------------------- 1 | build/*.js 2 | config/*.js 3 | dist/*.js 4 | -------------------------------------------------------------------------------- /frontend/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | parser: 'babel-eslint', 4 | parserOptions: { 5 | sourceType: 'module' 6 | }, 7 | env: { 8 | browser: true 9 | }, 10 | // https://github.com/feross/standard/blob/master/RULES.md#javascript-standard-style 11 | extends: [ 12 | 'standard' 13 | ], 14 | // required to lint *.vue files 15 | plugins: [ 16 | 'html', 17 | 'import' 18 | ], 19 | globals: { 20 | 'cordova': true, 21 | 'DEV': true, 22 | 'PROD': true, 23 | '__THEME': true 24 | }, 25 | // add your custom rules here 26 | 'rules': { 27 | // allow paren-less arrow functions 28 | 'arrow-parens': 0, 29 | 'one-var': 0, 30 | 'comma-dangle': 0, 31 | 'import/first': 0, 32 | 'import/named': 2, 33 | 'import/namespace': 2, 34 | 'import/default': 2, 35 | 'import/export': 2, 36 | // allow debugger during development 37 | 'no-debugger': process.env.NODE_ENV === 'production' ? 2 : 0, 38 | 'brace-style': [2, 'stroustrup', { 'allowSingleLine': true }] 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .thumbs.db 3 | node_modules/ 4 | dist/ 5 | npm-debug.log* 6 | cordova/platforms 7 | cordova/plugins 8 | -------------------------------------------------------------------------------- /frontend/.stylintrc: -------------------------------------------------------------------------------- 1 | { 2 | "blocks": "never", 3 | "brackets": "never", 4 | "colons": "never", 5 | "colors": "always", 6 | "commaSpace": "always", 7 | "commentSpace": "always", 8 | "cssLiteral": "never", 9 | "depthLimit": false, 10 | "duplicates": true, 11 | "efficient": "always", 12 | "extendPref": false, 13 | "globalDupe": true, 14 | "indentPref": 2, 15 | "leadingZero": "never", 16 | "maxErrors": false, 17 | "maxWarnings": false, 18 | "mixed": false, 19 | "namingConvention": false, 20 | "namingConventionStrict": false, 21 | "none": "never", 22 | "noImportant": false, 23 | "parenSpace": "never", 24 | "placeholder": false, 25 | "prefixVarsWithDollar": "always", 26 | "quotePref": "single", 27 | "semicolons": "never", 28 | "sortOrder": false, 29 | "stackedProperties": "never", 30 | "trailingWhitespace": "never", 31 | "universal": "never", 32 | "valid": true, 33 | "zeroUnits": "never", 34 | "zIndexNormalize": false 35 | } 36 | -------------------------------------------------------------------------------- /frontend/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: build 2 | 3 | dev: 4 | npm install -g quasar-cli 5 | npm install 6 | 7 | run: 8 | quasar dev 9 | 10 | build: 11 | quasar build 12 | 13 | version: 14 | npm version $(pbr info dopplerr | cut -f2) 15 | 16 | release: version 17 | 18 | clean: 19 | rm -rf node_modules/ 20 | rm -rf dist/ 21 | 22 | style: check 23 | 24 | check: 25 | npm run lint 26 | 27 | update: 28 | npm update 29 | 30 | lint: check 31 | dist: build 32 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | # Dopplerr 2 | 3 | > Dopplerr Subtitle Downloader 4 | 5 | ## Build Setup 6 | 7 | ``` bash 8 | # install dependencies 9 | $ npm install 10 | 11 | # serve with hot reload at localhost:8080 12 | $ quasar dev 13 | 14 | # build for production with minification 15 | $ quasar build 16 | 17 | # lint code 18 | $ quasar lint 19 | ``` 20 | -------------------------------------------------------------------------------- /frontend/build/css-utils.js: -------------------------------------------------------------------------------- 1 | var 2 | ExtractTextPlugin = require('extract-text-webpack-plugin'), 3 | autoprefixer = require('autoprefixer'), 4 | purify = require('purify-css'), 5 | glob = require('glob'), 6 | path = require('path'), 7 | fs = require('fs') 8 | 9 | module.exports.postcss = [autoprefixer()] 10 | 11 | module.exports.styleLoaders = function (options) { 12 | options = options || {} 13 | 14 | function generateLoaders (loaders) { 15 | if (options.postcss) { 16 | loaders.splice(1, 0, 'postcss') 17 | } 18 | 19 | var sourceLoader = loaders.map(function (loader) { 20 | var extraParamChar 21 | if (/\?/.test(loader)) { 22 | loader = loader.replace(/\?/, '-loader?') 23 | extraParamChar = '&' 24 | } 25 | else { 26 | loader = loader + '-loader' 27 | extraParamChar = '?' 28 | } 29 | return loader + (options.sourceMap ? extraParamChar + 'sourceMap' : '') 30 | }).join('!') 31 | 32 | if (options.extract) { 33 | return ExtractTextPlugin.extract({ 34 | use: sourceLoader, 35 | fallback: 'vue-style-loader' 36 | }) 37 | } 38 | else { 39 | return ['vue-style-loader', sourceLoader].join('!') 40 | } 41 | } 42 | 43 | return { 44 | css: generateLoaders(['css']), 45 | less: generateLoaders(['css', 'less']), 46 | sass: generateLoaders(['css', 'sass?indentedSyntax']), 47 | scss: generateLoaders(['css', 'sass']), 48 | styl: generateLoaders(['css', 'stylus']), 49 | stylus: generateLoaders(['css', 'stylus']) 50 | } 51 | } 52 | 53 | module.exports.styleRules = function (options) { 54 | var output = [] 55 | var loaders = exports.styleLoaders(options) 56 | for (var extension in loaders) { 57 | var loader = loaders[extension] 58 | output.push({ 59 | test: new RegExp('\\.' + extension + '$'), 60 | loader: loader 61 | }) 62 | } 63 | return output 64 | } 65 | 66 | function getSize (size) { 67 | return (size / 1024).toFixed(2) + 'kb' 68 | } 69 | 70 | module.exports.purify = function(cb) { 71 | var css = glob.sync(path.join(__dirname, '../dist/**/*.css')) 72 | var js = glob.sync(path.join(__dirname, '../dist/**/*.js')) 73 | 74 | Promise.all(css.map(function (file) { 75 | return new Promise(function (resolve) { 76 | console.log('\n Purifying ' + path.relative(path.join(__dirname, '../dist'), file).bold + '...') 77 | purify(js, [file], {minify: true}, function (purified) { 78 | var oldSize = fs.statSync(file).size 79 | fs.writeFileSync(file, purified) 80 | var newSize = fs.statSync(file).size 81 | 82 | console.log( 83 | ' * Reduced size by ' + ((1 - newSize / oldSize) * 100).toFixed(2) + '%, from ' + 84 | getSize(oldSize) + ' to ' + getSize(newSize) + '.' 85 | ) 86 | resolve() 87 | }) 88 | }) 89 | })) 90 | .then(cb) 91 | } 92 | -------------------------------------------------------------------------------- /frontend/build/env-utils.js: -------------------------------------------------------------------------------- 1 | var 2 | config = require('../config'), 3 | theme = process.argv[2] || config.defaultTheme 4 | 5 | module.exports = { 6 | dev: process.env.NODE_ENV === 'development', 7 | prod: process.env.NODE_ENV === 'production', 8 | 9 | platform: { 10 | theme: theme, 11 | cordovaAssets: './cordova/platforms/' + (theme === 'mat' ? 'android' : 'ios') + '/platform_www' 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /frontend/build/hot-reload.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | require('eventsource-polyfill') 3 | require('webpack-hot-middleware/client?noInfo=true&reload=true') 4 | -------------------------------------------------------------------------------- /frontend/build/script.build.js: -------------------------------------------------------------------------------- 1 | process.env.NODE_ENV = 'production' 2 | 3 | require('colors') 4 | 5 | var 6 | shell = require('shelljs'), 7 | path = require('path'), 8 | env = require('./env-utils'), 9 | css = require('./css-utils'), 10 | config = require('../config'), 11 | webpack = require('webpack'), 12 | webpackConfig = require('./webpack.prod.conf'), 13 | targetPath = path.join(__dirname, '../dist') 14 | 15 | console.log(' WARNING!'.bold) 16 | console.log(' Do NOT use VueRouter\'s "history" mode if') 17 | console.log(' building for Cordova or Electron.\n') 18 | 19 | require('./script.clean.js') 20 | console.log((' Building Dopplerr with "' + env.platform.theme + '" theme...\n').bold) 21 | 22 | shell.mkdir('-p', targetPath) 23 | shell.cp('-R', 'src/statics', targetPath) 24 | 25 | function finalize () { 26 | console.log(( 27 | '\n Build complete with "' + env.platform.theme.bold + '" theme in ' + 28 | '"/dist"'.bold + ' folder.\n').cyan) 29 | 30 | console.log(' Built files are meant to be served over an HTTP server.'.bold) 31 | console.log(' Opening index.html over file:// won\'t work.'.bold) 32 | } 33 | 34 | webpack(webpackConfig, function (err, stats) { 35 | if (err) throw err 36 | 37 | process.stdout.write(stats.toString({ 38 | colors: true, 39 | modules: false, 40 | children: false, 41 | chunks: false, 42 | chunkModules: false 43 | }) + '\n') 44 | 45 | if (stats.hasErrors()) { 46 | process.exit(1) 47 | } 48 | 49 | if (config.build.purifyCSS) { 50 | css.purify(finalize) 51 | } 52 | else { 53 | finalize() 54 | } 55 | }) 56 | -------------------------------------------------------------------------------- /frontend/build/script.clean.js: -------------------------------------------------------------------------------- 1 | var 2 | shell = require('shelljs'), 3 | path = require('path') 4 | 5 | shell.rm('-rf', path.resolve(__dirname, '../dist/*')) 6 | shell.rm('-rf', path.resolve(__dirname, '../dist/.*')) 7 | console.log(' Cleaned build artifacts.\n') 8 | -------------------------------------------------------------------------------- /frontend/build/script.dev.js: -------------------------------------------------------------------------------- 1 | process.env.NODE_ENV = 'development' 2 | 3 | require('colors') 4 | 5 | var 6 | path = require('path'), 7 | express = require('express'), 8 | webpack = require('webpack'), 9 | env = require('./env-utils'), 10 | config = require('../config'), 11 | opn = require('opn'), 12 | proxyMiddleware = require('http-proxy-middleware'), 13 | webpackConfig = require('./webpack.dev.conf'), 14 | app = express(), 15 | port = process.env.PORT || config.dev.port, 16 | uri = 'http://localhost:' + port 17 | 18 | console.log(' Starting dev server with "' + (process.argv[2] || env.platform.theme).bold + '" theme...') 19 | console.log(' Will listen at ' + uri.bold) 20 | if (config.dev.openBrowser) { 21 | console.log(' Browser will open when build is ready.\n') 22 | } 23 | 24 | var compiler = webpack(webpackConfig) 25 | 26 | // Define HTTP proxies to your custom API backend 27 | // https://github.com/chimurai/http-proxy-middleware 28 | var proxyTable = config.dev.proxyTable 29 | 30 | var devMiddleware = require('webpack-dev-middleware')(compiler, { 31 | publicPath: webpackConfig.output.publicPath, 32 | quiet: true 33 | }) 34 | 35 | var hotMiddleware = require('webpack-hot-middleware')(compiler, { 36 | log: function () {} 37 | }) 38 | 39 | // force page reload when html-webpack-plugin template changes 40 | compiler.plugin('compilation', function (compilation) { 41 | compilation.plugin('html-webpack-plugin-after-emit', function (data, cb) { 42 | hotMiddleware.publish({ action: 'reload' }) 43 | cb() 44 | }) 45 | }) 46 | 47 | // proxy requests like API. See /config/index.js -> dev.proxyTable 48 | // https://github.com/chimurai/http-proxy-middleware 49 | Object.keys(proxyTable).forEach(function (context) { 50 | var options = proxyTable[context] 51 | if (typeof options === 'string') { 52 | options = { target: options } 53 | } 54 | app.use(proxyMiddleware(context, options)) 55 | }) 56 | 57 | // handle fallback for HTML5 history API 58 | app.use(require('connect-history-api-fallback')()) 59 | 60 | // serve webpack bundle output 61 | app.use(devMiddleware) 62 | 63 | // enable hot-reload and state-preserving 64 | // compilation error display 65 | app.use(hotMiddleware) 66 | 67 | // serve pure static assets 68 | var staticsPath = path.posix.join(webpackConfig.output.publicPath, 'statics/') 69 | app.use(staticsPath, express.static('./src/statics')) 70 | 71 | // try to serve Cordova statics for Play App 72 | app.use(express.static(env.platform.cordovaAssets)) 73 | 74 | module.exports = app.listen(port, function (err) { 75 | if (err) { 76 | console.log(err) 77 | process.exit(1) 78 | } 79 | 80 | // open browser if set so in /config/index.js 81 | if (config.dev.openBrowser) { 82 | devMiddleware.waitUntilValid(function () { 83 | opn(uri) 84 | }) 85 | } 86 | }) 87 | -------------------------------------------------------------------------------- /frontend/build/webpack.base.conf.js: -------------------------------------------------------------------------------- 1 | var 2 | path = require('path'), 3 | webpack = require('webpack'), 4 | config = require('../config'), 5 | cssUtils = require('./css-utils'), 6 | env = require('./env-utils'), 7 | merge = require('webpack-merge'), 8 | projectRoot = path.resolve(__dirname, '../'), 9 | ProgressBarPlugin = require('progress-bar-webpack-plugin'), 10 | useCssSourceMap = 11 | (env.dev && config.dev.cssSourceMap) || 12 | (env.prod && config.build.productionSourceMap) 13 | 14 | function resolve (dir) { 15 | return path.join(__dirname, '..', dir) 16 | } 17 | 18 | module.exports = { 19 | entry: { 20 | app: './src/main.js' 21 | }, 22 | output: { 23 | path: path.resolve(__dirname, '../dist'), 24 | publicPath: config[env.prod ? 'build' : 'dev'].publicPath, 25 | filename: 'js/[name].js', 26 | chunkFilename: 'js/[id].[chunkhash].js' 27 | }, 28 | resolve: { 29 | extensions: ['.js', '.vue', '.json'], 30 | modules: [ 31 | resolve('src'), 32 | resolve('node_modules') 33 | ], 34 | alias: config.aliases 35 | }, 36 | module: { 37 | rules: [ 38 | { // eslint 39 | enforce: 'pre', 40 | test: /\.(vue|js)$/, 41 | loader: 'eslint-loader', 42 | include: projectRoot, 43 | exclude: /node_modules/, 44 | options: { 45 | formatter: require('eslint-friendly-formatter') 46 | } 47 | }, 48 | { 49 | test: /\.js$/, 50 | loader: 'babel-loader', 51 | include: projectRoot, 52 | exclude: /node_modules/ 53 | }, 54 | { 55 | test: /\.vue$/, 56 | loader: 'vue-loader', 57 | options: { 58 | postcss: cssUtils.postcss, 59 | loaders: merge({js: 'babel-loader'}, cssUtils.styleLoaders({ 60 | sourceMap: useCssSourceMap, 61 | extract: env.prod 62 | })) 63 | } 64 | }, 65 | { 66 | test: /\.json$/, 67 | loader: 'json-loader' 68 | }, 69 | { 70 | test: /\.(png|jpe?g|gif|svg)(\?.*)?$/, 71 | loader: 'url-loader', 72 | options: { 73 | limit: 10000, 74 | name: 'img/[name].[hash:7].[ext]' 75 | } 76 | }, 77 | { 78 | test: /\.(woff2?|eot|ttf|otf)(\?.*)?$/, 79 | loader: 'url-loader', 80 | options: { 81 | limit: 10000, 82 | name: 'fonts/[name].[hash:7].[ext]' 83 | } 84 | } 85 | ] 86 | }, 87 | plugins: [ 88 | new webpack.DefinePlugin({ 89 | 'process.env': config[env.prod ? 'build' : 'dev'].env, 90 | 'DEV': env.dev, 91 | 'PROD': env.prod, 92 | '__THEME': '"' + env.platform.theme + '"' 93 | }), 94 | new webpack.LoaderOptionsPlugin({ 95 | minimize: env.prod, 96 | options: { 97 | context: path.resolve(__dirname, '../src'), 98 | postcss: cssUtils.postcss 99 | } 100 | }), 101 | new ProgressBarPlugin({ 102 | format: config.progressFormat 103 | }) 104 | ], 105 | performance: { 106 | hints: false 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /frontend/build/webpack.dev.conf.js: -------------------------------------------------------------------------------- 1 | var 2 | config = require('../config'), 3 | webpack = require('webpack'), 4 | merge = require('webpack-merge'), 5 | cssUtils = require('./css-utils'), 6 | baseWebpackConfig = require('./webpack.base.conf'), 7 | HtmlWebpackPlugin = require('html-webpack-plugin'), 8 | FriendlyErrorsPlugin = require('friendly-errors-webpack-plugin') 9 | 10 | // add hot-reload related code to entry chunks 11 | Object.keys(baseWebpackConfig.entry).forEach(function (name) { 12 | baseWebpackConfig.entry[name] = ['./build/hot-reload.js', baseWebpackConfig.entry[name]] 13 | }) 14 | 15 | module.exports = merge(baseWebpackConfig, { 16 | // eval-source-map is faster for development 17 | devtool: '#cheap-module-eval-source-map', 18 | devServer: { 19 | historyApiFallback: true, 20 | noInfo: true 21 | }, 22 | module: { 23 | rules: cssUtils.styleRules({ 24 | sourceMap: config.dev.cssSourceMap, 25 | postcss: true 26 | }) 27 | }, 28 | plugins: [ 29 | new webpack.HotModuleReplacementPlugin(), 30 | new webpack.NoEmitOnErrorsPlugin(), 31 | new HtmlWebpackPlugin({ 32 | filename: 'index.html', 33 | template: 'src/index.html', 34 | inject: true 35 | }), 36 | new FriendlyErrorsPlugin({ 37 | clearConsole: config.dev.clearConsoleOnRebuild 38 | }) 39 | ], 40 | performance: { 41 | hints: false 42 | } 43 | }) 44 | -------------------------------------------------------------------------------- /frontend/build/webpack.prod.conf.js: -------------------------------------------------------------------------------- 1 | var 2 | path = require('path'), 3 | config = require('../config'), 4 | cssUtils = require('./css-utils'), 5 | webpack = require('webpack'), 6 | merge = require('webpack-merge'), 7 | baseWebpackConfig = require('./webpack.base.conf'), 8 | ExtractTextPlugin = require('extract-text-webpack-plugin'), 9 | HtmlWebpackPlugin = require('html-webpack-plugin'), 10 | OptimizeCSSPlugin = require('optimize-css-assets-webpack-plugin') 11 | 12 | module.exports = merge(baseWebpackConfig, { 13 | module: { 14 | rules: cssUtils.styleRules({ 15 | sourceMap: config.build.productionSourceMap, 16 | extract: true, 17 | postcss: true 18 | }) 19 | }, 20 | devtool: config.build.productionSourceMap ? '#source-map' : false, 21 | plugins: [ 22 | new webpack.optimize.UglifyJsPlugin({ 23 | sourceMap: config.build.productionSourceMap, 24 | minimize: true, 25 | compress: { 26 | warnings: false 27 | } 28 | }), 29 | // Compress extracted CSS. We are using this plugin so that possible 30 | // duplicated CSS from different components can be deduped. 31 | new OptimizeCSSPlugin({ 32 | cssProcessorOptions: { 33 | safe: true 34 | } 35 | }), 36 | // extract css into its own file 37 | new ExtractTextPlugin({ 38 | filename: '[name].[contenthash].css' 39 | }), 40 | new HtmlWebpackPlugin({ 41 | filename: path.resolve(__dirname, '../dist/index.html'), 42 | template: 'src/index.html', 43 | inject: true, 44 | minify: { 45 | removeComments: true, 46 | collapseWhitespace: true, 47 | removeAttributeQuotes: true 48 | // more options: 49 | // https://github.com/kangax/html-minifier#options-quick-reference 50 | }, 51 | // necessary to consistently work with multiple chunks via CommonsChunkPlugin 52 | chunksSortMode: 'dependency' 53 | }), 54 | // split vendor js into its own file 55 | new webpack.optimize.CommonsChunkPlugin({ 56 | name: 'vendor', 57 | minChunks: function (module, count) { 58 | // any required modules inside node_modules are extracted to vendor 59 | return ( 60 | module.resource && 61 | /\.js$/.test(module.resource) && 62 | ( 63 | module.resource.indexOf('quasar') > -1 || 64 | module.resource.indexOf( 65 | path.join(__dirname, '../node_modules') 66 | ) === 0 67 | ) 68 | ) 69 | } 70 | }), 71 | // extract webpack runtime and module manifest to its own file in order to 72 | // prevent vendor hash from being updated whenever app bundle is updated 73 | new webpack.optimize.CommonsChunkPlugin({ 74 | name: 'manifest', 75 | chunks: ['vendor'] 76 | }) 77 | ] 78 | }) 79 | -------------------------------------------------------------------------------- /frontend/config/dev.env.js: -------------------------------------------------------------------------------- 1 | var merge = require('webpack-merge') 2 | var prodEnv = require('./prod.env') 3 | 4 | module.exports = merge(prodEnv, { 5 | NODE_ENV: '"development"' 6 | }) 7 | -------------------------------------------------------------------------------- /frontend/config/index.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | 3 | module.exports = { 4 | // Webpack aliases 5 | aliases: { 6 | quasar: path.resolve(__dirname, '../node_modules/quasar-framework/'), 7 | src: path.resolve(__dirname, '../src'), 8 | assets: path.resolve(__dirname, '../src/assets'), 9 | '@': path.resolve(__dirname, '../src/components'), 10 | variables: path.resolve(__dirname, '../src/themes/quasar.variables.styl') 11 | }, 12 | 13 | // Progress Bar Webpack plugin format 14 | // https://github.com/clessg/progress-bar-webpack-plugin#options 15 | progressFormat: ' [:bar] ' + ':percent'.bold + ' (:msg)', 16 | 17 | // Default theme to build with ('ios' or 'mat') 18 | defaultTheme: 'mat', 19 | 20 | build: { 21 | env: require('./prod.env'), 22 | publicPath: '', 23 | productionSourceMap: false, 24 | 25 | // Remove unused CSS 26 | // Disable it if it has side-effects for your specific app 27 | purifyCSS: true 28 | }, 29 | dev: { 30 | env: require('./dev.env'), 31 | cssSourceMap: true, 32 | // auto open browser or not 33 | openBrowser: true, 34 | publicPath: '/', 35 | port: 8080, 36 | 37 | // If for example you are using Quasar Play 38 | // to generate a QR code then on each dev (re)compilation 39 | // you need to avoid clearing out the console, so set this 40 | // to "false", otherwise you can set it to "true" to always 41 | // have only the messages regarding your last (re)compilation. 42 | clearConsoleOnRebuild: false, 43 | 44 | // Proxy your API if using any. 45 | // Also see /build/script.dev.js and search for "proxy api requests" 46 | // https://github.com/chimurai/http-proxy-middleware 47 | proxyTable: { 48 | // proxy all requests starting with /api 49 | '/api': { 50 | target: 'http://localhost:8086/api', 51 | changeOrigin: true, 52 | pathRewrite: { 53 | '^/api': '' 54 | } 55 | } 56 | } 57 | } 58 | } 59 | 60 | /* 61 | * proxyTable example: 62 | * 63 | proxyTable: { 64 | // proxy all requests starting with /api 65 | '/api': { 66 | target: 'https://some.address.com/api', 67 | changeOrigin: true, 68 | pathRewrite: { 69 | '^/api': '' 70 | } 71 | } 72 | } 73 | */ 74 | -------------------------------------------------------------------------------- /frontend/config/prod.env.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | NODE_ENV: '"production"' 3 | } 4 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "quasar-app", 3 | "productName": "Dopplerr", 4 | "version": "0.2.17", 5 | "private": true, 6 | "description": "Dopplerr", 7 | "author": "Your Name ", 8 | "scripts": { 9 | "clean": "node build/script.clean.js", 10 | "dev": "node build/script.dev.js", 11 | "build": "node build/script.build.js", 12 | "lint": "eslint --ext .js,.vue src" 13 | }, 14 | "dependencies": { 15 | "axios": "^0.17.1", 16 | "babel-runtime": "^6.25.0", 17 | "npm-auto-version": "^1.0.0", 18 | "quasar-extras": "0.x", 19 | "quasar-framework": "^0.14.7", 20 | "vue": "^2.5.3", 21 | "vue-axios": "^2.0.2", 22 | "vue-resource": "^1.3.4", 23 | "vue-router": "^3.0.1" 24 | }, 25 | "devDependencies": { 26 | "autoprefixer": "^6.4.0", 27 | "babel-core": "^6.0.0", 28 | "babel-eslint": "^7.0.0", 29 | "babel-loader": "^7.1.2", 30 | "babel-plugin-transform-runtime": "^6.0.0", 31 | "babel-preset-es2015": "^6.0.0", 32 | "babel-preset-stage-2": "^6.0.0", 33 | "colors": "^1.1.2", 34 | "connect-history-api-fallback": "^1.5.0", 35 | "css-loader": "^0.28.7", 36 | "es6-promise": "^4.1.1", 37 | "eslint": "^4.11.0", 38 | "eslint-config-standard": "^10.2.1", 39 | "eslint-friendly-formatter": "^3.0.0", 40 | "eslint-loader": "^1.9.0", 41 | "eslint-plugin-html": "^3.2.2", 42 | "eslint-plugin-import": "^2.7.0", 43 | "eslint-plugin-node": "^5.2.1", 44 | "eslint-plugin-promise": "^3.5.0", 45 | "eslint-plugin-standard": "^3.0.1", 46 | "eventsource-polyfill": "^0.9.6", 47 | "express": "^4.16.1", 48 | "extract-text-webpack-plugin": "^3.0.2", 49 | "file-loader": "^0.11.1", 50 | "friendly-errors-webpack-plugin": "^1.1.3", 51 | "glob": "^7.1.2", 52 | "html-webpack-plugin": "^2.30.1", 53 | "http-proxy-middleware": "^0.17.0", 54 | "json-loader": "^0.5.7", 55 | "opn": "^5.0.0", 56 | "optimize-css-assets-webpack-plugin": "^3.2.0", 57 | "postcss-loader": "^1.0.0", 58 | "progress-bar-webpack-plugin": "^1.10.0", 59 | "purify-css": "^1.2.6", 60 | "shelljs": "^0.7.0", 61 | "stylus": "^0.54.5", 62 | "stylus-loader": "^3.0.1", 63 | "url-loader": "^0.5.7", 64 | "vue-loader": "^13.5.0", 65 | "vue-style-loader": "^3.0.3", 66 | "vue-template-compiler": "^2.5.3", 67 | "webpack": "^3.6.0", 68 | "webpack-dev-middleware": "^1.12.0", 69 | "webpack-hot-middleware": "^2.19.1", 70 | "webpack-merge": "^4.1.1" 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /frontend/src/App.vue: -------------------------------------------------------------------------------- 1 | 7 | 8 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /frontend/src/assets/Velocity0_70c.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/Velocity0_70c.jpg -------------------------------------------------------------------------------- /frontend/src/assets/dopplerr-32x32.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/dopplerr-32x32.jpg -------------------------------------------------------------------------------- /frontend/src/assets/dopplerr-64x64.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/dopplerr-64x64.jpg -------------------------------------------------------------------------------- /frontend/src/assets/hubblecast43f-thin.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/hubblecast43f-thin.jpg -------------------------------------------------------------------------------- /frontend/src/assets/hubblecast43f-thin.psd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/hubblecast43f-thin.psd -------------------------------------------------------------------------------- /frontend/src/assets/hubblecast43f.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/hubblecast43f.jpg -------------------------------------------------------------------------------- /frontend/src/assets/hubblecast43f0.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/hubblecast43f0.jpg -------------------------------------------------------------------------------- /frontend/src/assets/quasar-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/assets/quasar-logo.png -------------------------------------------------------------------------------- /frontend/src/components/404.vue: -------------------------------------------------------------------------------- 1 | 33 | 34 | 54 | 55 | 78 | -------------------------------------------------------------------------------- /frontend/src/components/pages.vue: -------------------------------------------------------------------------------- 1 | 110 | 111 | 174 | 175 | 177 | -------------------------------------------------------------------------------- /frontend/src/components/pages/about.vue: -------------------------------------------------------------------------------- 1 | 57 | 58 | 106 | 107 | 109 | -------------------------------------------------------------------------------- /frontend/src/components/pages/events.vue: -------------------------------------------------------------------------------- 1 | 25 | 26 | 115 | 116 | 121 | -------------------------------------------------------------------------------- /frontend/src/components/pages/home.vue: -------------------------------------------------------------------------------- 1 | 51 | 52 | 206 | 207 | 209 | -------------------------------------------------------------------------------- /frontend/src/components/pages/logs.vue: -------------------------------------------------------------------------------- 1 | 17 | 18 | 103 | 104 | 109 | -------------------------------------------------------------------------------- /frontend/src/components/pages/movies.vue: -------------------------------------------------------------------------------- 1 | 7 | 8 | 15 | 16 | 18 | -------------------------------------------------------------------------------- /frontend/src/components/pages/series.vue: -------------------------------------------------------------------------------- 1 | 11 | 12 | 113 | 114 | 116 | -------------------------------------------------------------------------------- /frontend/src/components/pages/status.vue: -------------------------------------------------------------------------------- 1 | 91 | 92 | 160 | 161 | 163 | -------------------------------------------------------------------------------- /frontend/src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 10 | 14 | 18 | Dopplerr 19 | 20 | 27 | 34 | 35 | 39 | 43 | 47 | 54 | 61 | 65 | 66 | 67 | 73 | 79 | 87 | 95 | 96 | 100 | 104 | 105 | 106 |
107 | 108 | 109 | 110 | -------------------------------------------------------------------------------- /frontend/src/main.js: -------------------------------------------------------------------------------- 1 | // === DEFAULT / CUSTOM STYLE === 2 | // WARNING! always comment out ONE of the two require() calls below. 3 | // 1. use next line to activate CUSTOM STYLE (./src/themes) 4 | require(`./themes/app.${__THEME}.styl`) 5 | // 2. or, use next line to activate DEFAULT QUASAR STYLE 6 | // require(`quasar/dist/quasar.${__THEME}.css`) 7 | // ============================== 8 | 9 | // Uncomment the following lines if you need IE11/Edge support 10 | // require(`quasar/dist/quasar.ie`) 11 | // require(`quasar/dist/quasar.ie.${__THEME}.css`) 12 | 13 | import Vue from 'vue' 14 | import Quasar from 'quasar' 15 | import router from './router' 16 | import axios from 'axios' 17 | import VueAxios from 'vue-axios' 18 | 19 | Vue.config.productionTip = false 20 | Vue.use(Quasar) // Install Quasar Framework 21 | Vue.use(VueAxios, axios) // Install Axios HTTP Framework 22 | 23 | if (__THEME === 'mat') { 24 | require('quasar-extras/roboto-font') 25 | } 26 | import 'quasar-extras/material-icons' 27 | // import 'quasar-extras/ionicons' 28 | import 'quasar-extras/fontawesome' 29 | // import 'quasar-extras/animate' 30 | 31 | Quasar.start(() => { 32 | /* eslint-disable no-new */ 33 | new Vue({ 34 | el: '#q-app', 35 | router, 36 | render: h => h(require('./App').default) 37 | }) 38 | }) 39 | -------------------------------------------------------------------------------- /frontend/src/router.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import VueRouter from 'vue-router' 3 | 4 | Vue.use(VueRouter) 5 | 6 | function load (component) { 7 | // '@' is aliased to src/components 8 | return () => import(`@/${component}.vue`) 9 | } 10 | 11 | export default new VueRouter({ 12 | /* 13 | * NOTE! VueRouter "history" mode DOESN'T works for Cordova builds, 14 | * it is only to be used only for websites. 15 | * 16 | * If you decide to go with "history" mode, please also open /config/index.js 17 | * and set "build.publicPath" to something other than an empty string. 18 | * Example: '/' instead of current '' 19 | * 20 | * If switching back to default "hash" mode, don't forget to set the 21 | * build publicPath back to '' so Cordova builds work again. 22 | */ 23 | 24 | mode: 'hash', 25 | scrollBehavior: () => ({ y: 0 }), 26 | 27 | routes: [ 28 | { 29 | path: '/', 30 | redirect: '/home', 31 | }, 32 | { 33 | path: '/home', 34 | component: load('pages'), 35 | // sub-routes 36 | children: [ 37 | { path: '/home', component: load('pages/home') }, 38 | { path: '/events', component: load('pages/events') }, 39 | { path: '/movies', component: load('pages/movies') }, 40 | { path: '/series', component: load('pages/series') }, 41 | { path: '/status', component: load('pages/status') }, 42 | { path: '/logs', component: load('pages/logs') }, 43 | { path: '/about', component: load('pages/about') }, 44 | { path: '*', component: load('404') }, // Not found 45 | ] 46 | }, 47 | 48 | // Always leave this last one 49 | { path: '*', component: load('404') }, // Not found 50 | ] 51 | }) 52 | -------------------------------------------------------------------------------- /frontend/src/statics/apple-icon-152x152.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/statics/apple-icon-152x152.png -------------------------------------------------------------------------------- /frontend/src/statics/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/statics/favicon-16x16.png -------------------------------------------------------------------------------- /frontend/src/statics/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/statics/favicon-32x32.png -------------------------------------------------------------------------------- /frontend/src/statics/hubblecast43f-thin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/statics/hubblecast43f-thin.png -------------------------------------------------------------------------------- /frontend/src/statics/icon-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/statics/icon-192x192.png -------------------------------------------------------------------------------- /frontend/src/statics/icon-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/statics/icon-512x512.png -------------------------------------------------------------------------------- /frontend/src/statics/ms-icon-144x144.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gsemet/dopplerr/6a2124e1b8b41d0b2ec4845a42b3db9aa10b5702/frontend/src/statics/ms-icon-144x144.png -------------------------------------------------------------------------------- /frontend/src/themes/app.ios.styl: -------------------------------------------------------------------------------- 1 | // This file is included in the build if src/main.js imports it. 2 | // Otherwise the default iOS CSS file is bundled. 3 | // Check "DEFAULT / CUSTOM STYLE" in src/main.js 4 | 5 | // App Shared Variables 6 | // -------------------------------------------------- 7 | // Shared Stylus variables go in the app.variables.styl file 8 | @import 'app.variables' 9 | 10 | // Quasar iOS Design Stylus 11 | // -------------------------------------------------- 12 | // Custom App variables must be declared before importing Quasar. 13 | // Quasar will use its default values when a custom variable isn't provided. 14 | @import '~quasar-framework/dist/quasar.ios.styl' 15 | -------------------------------------------------------------------------------- /frontend/src/themes/app.mat.styl: -------------------------------------------------------------------------------- 1 | // This file is included in the build if src/main.js imports it. 2 | // Otherwise the default Material CSS file is bundled. 3 | // Check "DEFAULT / CUSTOM STYLE" in src/main.js 4 | 5 | // App Shared Variables 6 | // -------------------------------------------------- 7 | // Shared Stylus variables go in the app.variables.styl file 8 | @import 'app.variables' 9 | 10 | // Quasar Material Design Stylus 11 | // -------------------------------------------------- 12 | // Custom App variables must be declared before importing Quasar. 13 | // Quasar will use its default values when a custom variable isn't provided. 14 | @import '~quasar-framework/dist/quasar.mat.styl' 15 | -------------------------------------------------------------------------------- /frontend/src/themes/app.variables.styl: -------------------------------------------------------------------------------- 1 | // This file is included in the build if src/main.js imports 2 | // either app.mat.styl or app.ios.styl. 3 | // Check "DEFAULT / CUSTOM STYLE" in src/main.js 4 | 5 | // App Shared Variables 6 | // -------------------------------------------------- 7 | // To customize the look and feel of this app, you can override 8 | // the Stylus variables found in Quasar's source Stylus files. Setting 9 | // variables before Quasar's Stylus will use these variables rather than 10 | // Quasar's default Stylus variable values. Stylus variables specific 11 | // to the themes belong in either the app.ios.styl or app.mat.styl files. 12 | 13 | 14 | // App Shared Color Variables 15 | // -------------------------------------------------- 16 | // It's highly recommended to change the default colors 17 | // to match your app's branding. 18 | 19 | // color calculator: https://www.sessions.edu/color-calculator/ 20 | 21 | $primary = #026301 22 | $secondary = #713701 23 | $tertiary = #6f0171 24 | 25 | $neutral = #E0E1E2 26 | $positive = #21BA45 27 | $negative = #DB2828 28 | $info = #31CCEC 29 | $warning = #F2C037 30 | -------------------------------------------------------------------------------- /frontend/src/themes/quasar.variables.styl: -------------------------------------------------------------------------------- 1 | // 2 | // Webpack alias "variables" points to this file. 3 | // So you can import it in your app's *.vue files 4 | // inside the 12 | 13 | 14 | // First we load app's Stylus variables 15 | @import 'app.variables' 16 | 17 | // Then we load Quasar Stylus variables. 18 | // Any variables defined in "app.variables.styl" 19 | // will override Quasar's ones. 20 | // 21 | // NOTICE that we only import Core Quasar Variables 22 | // like colors, media breakpoints, and so. 23 | // No component variable will be included. 24 | @import '~quasar/dist/core.variables' 25 | -------------------------------------------------------------------------------- /frontend/templates/component.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 12 | 13 | 15 | -------------------------------------------------------------------------------- /frontend/templates/layout.vue: -------------------------------------------------------------------------------- 1 | 60 | 61 | 68 | 69 | 71 | -------------------------------------------------------------------------------- /frontend/templates/page.vue: -------------------------------------------------------------------------------- 1 | 7 | 8 | 15 | 16 | 18 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # This requirements files has been automatically generated from `Pipfile` with 3 | # `pipenv-to-requirements` 4 | # 5 | ## This has been done to maintain backward compatibility with tools and services 6 | # that does not support `Pipfile` yet. 7 | ## Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and 8 | # `Pipfile.lock` 9 | ################################################################################ 10 | 11 | -e . 12 | asynctest 13 | autopep8 14 | cryptography 15 | flake8 16 | flake8-coding 17 | flake8-comprehensions 18 | flake8-docstrings 19 | flake8-logging-format 20 | isort 21 | mypy 22 | pandoc 23 | pipenv-to-requirements>=0.1.9 24 | pyannotate 25 | pylint 26 | pytest 27 | pytest-asyncio 28 | pytest-catchlog 29 | pytest-sugar 30 | pyyaml 31 | tox 32 | yapf -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ################################################################################ 2 | # This requirements files has been automatically generated from `Pipfile` with 3 | # `pipenv-to-requirements` 4 | # 5 | ## This has been done to maintain backward compatibility with tools and services 6 | # that does not support `Pipfile` yet. 7 | ## Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and 8 | # `Pipfile.lock` 9 | ################################################################################ 10 | 11 | aiofiles 12 | aiohttp 13 | apscheduler 14 | apsw 15 | colorlog 16 | pbr 17 | peewee 18 | sanic 19 | sanic-transmute 20 | setuptools!=36.0.0 21 | subliminal -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = dopplerr 3 | summary = Subtitle Download Web Service for Sonarr 4 | description-file = README.rst 5 | author = Gaetan Semet 6 | author-email = gaetan@xeberon.net 7 | home-page = https://github.com/Stibbons/dopplerr 8 | classifier = 9 | Intended Audience :: Information Technology 10 | Intended Audience :: System Administrators 11 | License :: OSI Approved :: MIT License 12 | Operating System :: POSIX :: Linux 13 | Programming Language :: Python 14 | Programming Language :: Python :: 3.6 15 | Topic :: Multimedia :: Video 16 | 17 | [files] 18 | packages = 19 | dopplerr 20 | cfgtree 21 | data-files = frontend = frontend/dist/* 22 | 23 | [entry_points] 24 | console_scripts = 25 | dopplerr = dopplerr.main:main 26 | 27 | [build_sphinx] 28 | source-dir = doc/source 29 | build-dir = doc/build 30 | all_files = 1 31 | 32 | [upload_sphinx] 33 | upload-dir = doc/build/html 34 | 35 | [pbr] 36 | warnerrors = True 37 | 38 | [wheel] 39 | universal = 1 40 | 41 | [bdist_wheel] 42 | # This flag says that the code is written to work on both Python 2 and Python 43 | # 3. If at all possible, it is good practice to do this. If you cannot, you 44 | # will need to generate wheels for each Python version that you support. 45 | universal=1 46 | 47 | [pep8] 48 | max-line-length = 100 49 | 50 | [tool:pytest] 51 | junit_suite_name = dopplerr 52 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | # Third Party Libraries 4 | import setuptools 5 | 6 | # In python < 2.7.4, a lazy loading of package `pbr` will break 7 | # setuptools if some other modules registered functions in `atexit`. 8 | # solution from: http://bugs.python.org/issue15881#msg170215 9 | try: 10 | import multiprocessing # noqa 11 | except ImportError: 12 | pass 13 | 14 | setuptools.setup(setup_requires=['pbr'], 15 | pbr=True) 16 | --------------------------------------------------------------------------------