├── qlikflow ├── version.py ├── init.py └── qlikflow.py ├── docker ├── dags │ ├── readme.md │ └── simpledagfile.py ├── logs │ └── readme.md ├── plugins │ └── readme.md ├── cert │ └── readme.md ├── dockerfile ├── config │ └── config.json └── docker-compose.yaml ├── config.json ├── pyproject.toml ├── simpledagfile.py ├── CHANGELOG.md ├── .gitignore ├── README.md ├── config_generator.py ├── doc └── readme.md └── LICENSE /qlikflow/version.py: -------------------------------------------------------------------------------- 1 | __version__ = '1.0.14' -------------------------------------------------------------------------------- /docker/dags/readme.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Folder for DAG-files 4 | -------------------------------------------------------------------------------- /docker/logs/readme.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Folder for Airflow logs 4 | -------------------------------------------------------------------------------- /docker/plugins/readme.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Folder for Airflow plugins 4 | -------------------------------------------------------------------------------- /docker/cert/readme.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Folder for qlik sense certificates 4 | -------------------------------------------------------------------------------- /qlikflow/init.py: -------------------------------------------------------------------------------- 1 | from .qlikflow import create_tasks, qs_run_task, qv_run_task, np_run_task 2 | from .version import __version__ 3 | 4 | NAME = 'qlikflow' -------------------------------------------------------------------------------- /docker/dockerfile: -------------------------------------------------------------------------------- 1 | FROM apache/airflow:latest 2 | RUN pip install apache-airflow-providers-telegram --user 3 | RUN pip install --no-cache-dir qlikflow==1.0.14 -U --user -------------------------------------------------------------------------------- /config.json: -------------------------------------------------------------------------------- 1 | {"sleep": {"default_pool": "sensors"}, "np100": {"server": "https://server15.domain.com", "credential": "domain\\username", "password": "password", "default_pool": "np1_default_pool"}, "qv1asd": {"server": "http://server3.domain.com", "port": "4799", "extraurl": "/QMS/Service", "username": "username", "password": "password", "default_pool": "qv1_default_pool"}, "qs1": {"server": "https://server2.domain.com", "username": "domain\\username", "password": "password", "default_pool": "qs1_default_pool", "certificate": ["client.pem", "client_key.pem"]}, "telegram": {"token": ""}} -------------------------------------------------------------------------------- /docker/config/config.json: -------------------------------------------------------------------------------- 1 | {"sleep": {"default_pool": "sensors"}, "np100": {"server": "https://server15.domain.com", "credential": "domain\\username", "password": "password", "default_pool": "np1_default_pool"}, "qv1asd": {"server": "http://server3.domain.com", "port": "4799", "extraurl": "/QMS/Service", "username": "username", "password": "password", "default_pool": "qv1_default_pool"}, "qs1": {"server": "https://server2.domain.com", "username": "domain\\username", "password": "password", "default_pool": "qs1_default_pool", "certificate": ["client.pem", "client_key.pem"]}, "telegram": {"token": ""}} -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "qlikflow" 3 | version = "1.0.14" 4 | description = "This module allows you to create simple Apache Airflow DAG files-constructors for QlikView, Qlik Sense and NPrinting." 5 | authors = ["bintocher "] 6 | license = "Apache-2.0" 7 | repository = "https://github.com/bintocher/qlikflow" 8 | documentation = "https://github.com/bintocher/qlikflow/blob/main/doc/readme.md" 9 | readme = "README.md" 10 | include = ["CHANGELOG.md"] 11 | 12 | [tool.poetry.dependencies] 13 | python = "^3.6" 14 | 15 | requests = "^2.25.1" 16 | requests-ntlm = "^1.1.0" 17 | zeep = "^4.0.0" 18 | apache-airflow-providers-telegram = "^1.0.2" 19 | 20 | [tool.poetry.dev-dependencies] 21 | 22 | [build-system] 23 | requires = ["poetry-core>=1.0.0"] 24 | build-backend = "poetry.core.masonry.api" 25 | -------------------------------------------------------------------------------- /simpledagfile.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Stanislav Chernov 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | from airflow import DAG 18 | from airflow.utils.dates import days_ago 19 | from qlikflow import qlikflow 20 | from datetime import datetime 21 | 22 | 23 | tasksDict = { 24 | u'qliksense. Test task': { 25 | 'Soft' : 'qs1', 26 | 'TaskId' : 'c5d80e71-f574-4655-8874-3a6e2aed6218', 27 | 'RandomStartDelay' : 10, 28 | }, 29 | u'np100. run nprinting tasks' : { 30 | 'Soft' : 'np100', 31 | 'TaskId' : [ 32 | 'taskid1', 33 | 'taskid2', 34 | 'taskid3', 35 | 'taskid4', 36 | ], 37 | 'Dep' : { 38 | u'qliksense. Test task', 39 | } 40 | } 41 | } 42 | 43 | default_args = { 44 | 'owner': 'test', 45 | 'depends_on_past': False, 46 | } 47 | 48 | dag = DAG( 49 | dag_id = '_my_test_dag', 50 | default_args = default_args , 51 | start_date = days_ago(1), 52 | schedule_interval = '@daily', 53 | description = 'Default test dag', 54 | tags = ['qliksense', 'testing'], 55 | catchup = False 56 | ) 57 | 58 | airflowTasksDict = {} 59 | qlikflow.create_tasks(tasksDict, airflowTasksDict, dag) -------------------------------------------------------------------------------- /docker/dags/simpledagfile.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Stanislav Chernov 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | from airflow import DAG 18 | from airflow.utils.dates import days_ago 19 | from qlikflow import qlikflow 20 | from datetime import datetime 21 | 22 | 23 | tasksDict = { 24 | u'qliksense. Test task': { 25 | 'Soft' : 'qs1', 26 | 'TaskId' : 'c5d80e71-f574-4655-8874-3a6e2aed6218', 27 | 'RandomStartDelay' : 10, 28 | }, 29 | u'np100. run nprinting tasks' : { 30 | 'Soft' : 'np100', 31 | 'TaskId' : [ 32 | 'taskid1', 33 | 'taskid2', 34 | 'taskid3', 35 | 'taskid4', 36 | ], 37 | 'Dep' : { 38 | u'qliksense. Test task', 39 | } 40 | } 41 | } 42 | 43 | default_args = { 44 | 'owner': 'test', 45 | 'depends_on_past': False, 46 | } 47 | 48 | dag = DAG( 49 | dag_id = '_my_test_dag', 50 | default_args = default_args , 51 | start_date = days_ago(1), 52 | schedule_interval = '@daily', 53 | description = 'Default test dag', 54 | tags = ['qliksense', 'testing'], 55 | catchup = False 56 | ) 57 | 58 | airflowTasksDict = {} 59 | qlikflow.create_tasks(tasksDict, airflowTasksDict, dag) -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## verison 1.0.14 4 | 5 | ### Fix 6 | 7 | - fix error without 'delete_files' parameter 8 | 9 | ## verison 1.0.13 10 | 11 | ### Add 12 | 13 | - new parameter - "DeleteFiles", list-type, start after task completed. used in qv/qs/np 14 | 15 | in linux system you can easy mount windows-like shared folders of whole disk, [link](https://www.google.com/search?q=linux+mount+windows+share) 16 | 17 | ## verison 1.0.12 18 | 19 | ### Change 20 | 21 | - currently, config.json file is read when the task is started, previously, it was read when the dag was started 22 | 23 | ### Fix 24 | 25 | - airflow task was completed without waiting for the task to be completed in qlik sense server 26 | 27 | ## verison 1.0.11 28 | 29 | - some fixes, disable debug msg's 30 | 31 | ## verison 1.0.10 32 | 33 | testing version 34 | 35 | ### Fix 36 | 37 | - remove unused modules 38 | 39 | ## verison 1.0.9 40 | 41 | ### Add 42 | 43 | - update documentations files; for config.json, dag creation dict{} and folders in docker folder 44 | 45 | ## verison 1.0.8 46 | 47 | ### Fix 48 | 49 | - fix error `File "/home/airflow/.local/lib/python3.6/site-packages/qlikflow/qlikflow.py", line 459, in create_aftask` 50 | 51 | ### Add 52 | 53 | - add `dockerfile` and update `docker-compose.yaml` 54 | 55 | ## verison 1.0.7 56 | 57 | ### Add 58 | 59 | - add dependency apache-airflow[telegram] 60 | 61 | - in `readme.md` add shields for git repository and pipy installs 62 | 63 | ### Change 64 | 65 | - remove "root" param in certs for qlik sense servers 66 | 67 | ### Fix 68 | 69 | - fix path for qlik sense certificates 70 | 71 | 72 | ## verison 1.0.6 73 | 74 | - fix config.json location to AIRFLOW_HOME/config/ 75 | 76 | - fix cert's location to AIRFLOW_HOME/cert/ 77 | 78 | ## verison 1.0.5 79 | 80 | - lowered version for required dependencies 81 | 82 | - exclude airflow dep's from install 83 | 84 | ## verison 1.0.4 85 | 86 | - update config.json folder from AIRFLOW_HOME env 87 | 88 | ## verison 1.0.3 89 | 90 | - update documentation info in pip package 91 | 92 | ## verison 1.0.2 93 | 94 | - add package dependencies 95 | 96 | ## verison 1.0.1 97 | 98 | - remove unused files 99 | 100 | ## version 1.0.0 101 | 102 | - initial commit 103 | 104 | - work with Qlik Sense, QlikView and NPrinting tasks -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | build.cmd 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | docker/logs/scheduler/latest 132 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![GitHub stars](https://img.shields.io/github/stars/bintocher/qlikflow.svg) 2 | ![GitHub contributors](https://img.shields.io/github/contributors/bintocher/qlikflow.svg) 3 | ![GitHub license](https://img.shields.io/github/license/bintocher/qlikflow.svg) 4 | ![Pipy Installs](https://img.shields.io/pypi/dm/qlikflow) 5 | ![Last commit](https://img.shields.io/github/last-commit/bintocher/qlikflow) 6 | ![Issues](https://img.shields.io/github/issues/bintocher/qlikflow) 7 | 8 | # qlikflow 9 | 10 | This module allows you to create simple Apache Airflow DAG files-constructors for QlikView, Qlik Sense and NPrinting. 11 | 12 | ## Information files 13 | 14 | - Changelog : https://github.com/bintocher/qlikflow/blob/main/CHANGELOG.md 15 | 16 | - Manual(en) : https://github.com/bintocher/qlikflow/blob/main/doc/readme.md 17 | 18 | - This readme : https://github.com/bintocher/qlikflow/blob/main/README.md 19 | 20 | ## Install 21 | 22 | ``` bash 23 | pip3 install qlikflow 24 | ``` 25 | 26 | ## Upgrade 27 | 28 | ``` bash 29 | pip3 install qlikflow -U 30 | ``` 31 | 32 | ## Create config-file 33 | 34 | Open ``config_generator.py`` with your IDE editor, and set settings, save script 35 | 36 | Then run script to create ``config.json`` file 37 | 38 | Put this ``config.json`` file on your Apache Airflow server in folder: ``AIRFLOW_HOME/config/`` 39 | 40 | ## Use in DAG-files 41 | 42 | ``` python 43 | 44 | from airflow import DAG 45 | from airflow.utils.dates import days_ago 46 | from qlikflow import qlikflow 47 | from datetime import datetime 48 | 49 | 50 | tasksDict = { 51 | u'qliksense. Test task': { 52 | 'Soft' : 'qs1', 53 | 'TaskId' : 'c5d80e71-f574-4655-8874-3a6e2aed6218', 54 | 'RandomStartDelay' : 10, 55 | }, 56 | u'np100. run nprinting tasks' : { 57 | 'Soft' : 'np100', 58 | 'TaskId' : [ 59 | 'taskid1', 60 | 'taskid2', 61 | 'taskid3', 62 | 'taskid4', 63 | ], 64 | 'Dep' : { 65 | u'qliksense. Test task', 66 | } 67 | } 68 | } 69 | 70 | default_args = { 71 | 'owner': 'test', 72 | 'depends_on_past': False, 73 | } 74 | 75 | dag = DAG( 76 | dag_id = '_my_test_dag', 77 | default_args = default_args , 78 | start_date = days_ago(1), 79 | schedule_interval = '@daily', 80 | description = 'Default test dag', 81 | tags = ['qliksense', 'testing'], 82 | catchup = False 83 | ) 84 | 85 | airflowTasksDict = {} 86 | qlikflow.create_tasks(tasksDict, airflowTasksDict, dag) 87 | ``` 88 | 89 | This code convert into DAG like this: 90 | 91 | ![image](https://user-images.githubusercontent.com/8188055/117771014-020b1600-b279-11eb-9565-de198a12c9e2.png) 92 | -------------------------------------------------------------------------------- /config_generator.py: -------------------------------------------------------------------------------- 1 | 2 | import json, os 3 | 4 | def read_config(): 5 | dir_path = os.path.dirname(os.path.realpath(__file__)) 6 | with open(dir_path + '\\config.json', 'r') as f: 7 | config = json.load(f) 8 | return config 9 | 10 | def save_config(): 11 | """ 12 | Simple config file generator for use with qlikflow module 13 | 14 | """ 15 | config = { 16 | # If you plan to use 'sleep' tasks, then you need to describe the sleep parameter with the default pool 17 | "sleep" : { 18 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 19 | "default_pool": "sensors", 20 | }, 21 | # The first two letters define soft, which in turn starts the task function. 22 | # These names should be specified in the dictionary with the list of tasks 23 | # You can specify parameters for any number of your QV/QS/NP servers 24 | # One name - one server, possible names, for example: 25 | # for NPrinting : np100, npmyserver1, np1, np5, npmyotherName... 26 | # for Qlik Sense : qs1, qs22, qsense, qsense5 ... 27 | # for QlikView : qvmyserver.domain.com , qv1, qv2 ... 28 | "np100": { #nprinting server 29 | # Full FQDN server name with http[s] prefix 30 | "server": "https://server15.domain.com", 31 | # Your domain and account names 32 | "credential": "domain\\username", 33 | # Domain user password 34 | "password": "password", 35 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 36 | "default_pool": "np1_default_pool" 37 | }, 38 | "qv1asd": { # qlikview server 39 | # Full FQDN server name with http[s] prefix 40 | "server" : "http://server3.domain.com", 41 | # QlikView Management Service API port 42 | "port" : "4799", 43 | # QlikView Management Service API url path 44 | "extraurl" : "/QMS/Service", 45 | # username without domain! 46 | "username" : "username", 47 | # user password 48 | "password" : "password", 49 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 50 | "default_pool" : "qv1_default_pool", 51 | }, 52 | "qs1": { # qlik sense server 53 | "server" : "https://server2.domain.com", 54 | # your domain and username 55 | "username" : "domain\\username", 56 | # user password 57 | "password" : "password", 58 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 59 | "default_pool" : "qs1_default_pool", 60 | # Qlik Sense exported certificates path's 61 | # You can get it from https://qliksense.server.name/qmc/certificates 62 | # Export in PEM-format and then put it to airflow server 63 | # put it in AIRFLOW_HOME/cert folder 64 | "certificate" : ('client.pem', 'client_key.pem'), 65 | # "root_cert" : 'root.pem', 66 | }, 67 | # Telegram parameter used for send messages to telegram channels 68 | # Create a new bot from https://t.me/botfather 69 | # And enter token here 70 | # Chat id's you can get from @userinfobot (https://github.com/nadam/userinfobot) 71 | # and use it in tasks dicts 72 | "telegram" : { 73 | "token" : "", # put here telegram bot api token 74 | }, 75 | } 76 | dir_path = os.path.dirname(os.path.realpath(__file__)) 77 | with open(dir_path + '\\config.json', 'w') as f: 78 | json.dump(config, f) 79 | 80 | if __name__ == "__main__": 81 | # config = read_config() 82 | save_config() 83 | config = read_config() 84 | print (config) 85 | -------------------------------------------------------------------------------- /doc/readme.md: -------------------------------------------------------------------------------- 1 | # Documentation 2 | 3 | In progress... 4 | 5 | 6 | ## How to configure tasksDict = {} in DAG files 7 | 8 | ``` python 9 | tasksDict = { 10 | # Name of Task 11 | u'NP. Monitoring': { 12 | # ----- 13 | # Pool for this task, if not set - will be used default from config.json 14 | 'Pool' : 'default_pool', 15 | # ----- 16 | # Soft name, 2 left char will select python_operator function for run task 17 | # Required and possible values: np , qs , qv, sleep 18 | # After the name - any characters, it is important that the names written here match the names in config.json 19 | 'Soft' : 'np1', 20 | # 'Soft' : 'qs-dev', 21 | # 'Soft' : 'qstesting', 22 | # ----- 23 | # GUID(s) of task to run 24 | # in qs - you can see it in taks page, in np - we use "App GUIDs", in qv - look at dictribution folder or qlikview in xml's 25 | 'TaskId' : 'bc58a89f-4401-4769-abbd-f515e13d386e', 26 | # you can use lists to create tasks with the same parameters. 27 | # very convenient to use for NPrinting tasks 28 | # 'TaskId' : [ 29 | # 'task1_guid', 30 | # 'task2_guid', 31 | # 'task3_guid', 32 | # 'task4_guid', 33 | # 'task5_guid', 34 | # ], 35 | # ----- 36 | # start time: hour, minute - without leading zeros 37 | # this option will create a previous task with the specified time for this task 38 | # you should not create too many tasks with such parameter, because this consumes slots in pools in airflow 39 | 'StartTime' : [5,27] , 40 | # ----- 41 | # hang on to other tasks that precede them until they are executed - this task will not start running 42 | 'Dep' : [ 43 | u'qs-dev.task1', 44 | u'ds-test.task2', 45 | ] , 46 | # ----- 47 | # used only for NPrinting 48 | # if the task completion status is warning, then do fail this task, otherwise-complete 49 | 'WarningIsFail' : True, 50 | # ----- 51 | # send email's on task fails 52 | # must be congired smtp settings in airflow.cfg file 53 | 'OnFail' : { 'mail' : 'schernov1@gmail.com'}, # mail address to send msg 54 | # ----- 55 | # for telegram, 56 | # if task is done without errors , you will receive a message 57 | # you can send messages to yourself or to groups or channels 58 | # the chat ID can be obtained from the bot @ShowJsonBot 59 | # if you add him to the group, he will write full information about it, including the Chat ID - which we need 60 | # if you forward a message to the bot, it will show information about this message and the ID of its sender 61 | 'OnSuccess' : { 'telegram' : '585108837'}, 62 | # ----- 63 | # the number of attempts to perform the task in case of errors 64 | 'Retries_count' : 10, 65 | # ----- 66 | # the interval for restarting the task in case of an execution error, in seconds 67 | 'Retries_delay' : 30, 68 | # ----- 69 | # the restart interval will increase exponentially automatically on each subsequent attempt to start 70 | 'Retries_ExponentialDelay' : True, # or False 71 | # ----- 72 | # arbitrary delay time up to the specified seconds, before starting the task execution 73 | 'RandomStartDelay' : 10, # random interval 0.0001 < 10 seconds 74 | #'RandomStartDelay' : 100, # random interval 0.0001 < 100 seconds 75 | # ----- 76 | }, 77 | } 78 | ``` 79 | 80 | ``` python 81 | tasksDict = { 82 | # Name of Task 83 | u'Virtual sleep' : { 84 | # we will create sleep timer 85 | 'Soft' : 'sleep', 86 | # name to display on the charts in Airflow 87 | 'TaskId' : u'это будет названием таска', 88 | # waiting time, in seconds 89 | 'Seconds' : 10 , 90 | # hang on to other tasks that precede them until they are executed - this task will not start running 91 | 'Dep' : [ 92 | u'QS1. Test hidden', 93 | ], 94 | }, 95 | } 96 | ``` 97 | 98 | ## Config generator 99 | 100 | ``` python 101 | config = { 102 | # If you plan to use 'sleep' tasks, then you need to describe the sleep parameter with the default pool 103 | "sleep" : { 104 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 105 | "default_pool": "sensors", 106 | }, 107 | # The first two letters define soft, which in turn starts the task function. 108 | # These names should be specified in the dictionary with the list of tasks 109 | # You can specify parameters for any number of your QV/QS/NP servers 110 | # One name - one server, possible names, for example: 111 | # for NPrinting : np100, npmyserver1, np1, np5, npmyotherName... 112 | # for Qlik Sense : qs1, qs22, qsense, qsense5 ... 113 | # for QlikView : qvmyserver.domain.com , qv1, qv2 ... 114 | "np100": { #nprinting server 115 | # Full FQDN server name with http[s] prefix 116 | "server": "https://server15.domain.com", 117 | # Your domain and account names 118 | "credential": "domain\\username", 119 | # Domain user password 120 | "password": "password", 121 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 122 | "default_pool": "np1_default_pool" 123 | }, 124 | "qv1asd": { # qlikview server 125 | # Full FQDN server name with http[s] prefix 126 | "server" : "http://server3.domain.com", 127 | # QlikView Management Service API port 128 | "port" : "4799", 129 | # QlikView Management Service API url path 130 | "extraurl" : "/QMS/Service", 131 | # username without domain! 132 | "username" : "username", 133 | # user password 134 | "password" : "password", 135 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 136 | "default_pool" : "qv1_default_pool", 137 | }, 138 | "qs1": { # qlik sense server 139 | "server" : "https://server2.domain.com", 140 | # your domain and username 141 | "username" : "domain\\username", 142 | # user password 143 | "password" : "password", 144 | # the name of default pool, they are created at http://yourariflow.server.com:8080/pool/list/ 145 | "default_pool" : "qs1_default_pool", 146 | # Qlik Sense exported certificates path's 147 | # You can get it from https://qliksense.server.name/qmc/certificates 148 | # Export in PEM-format and then put it to airflow server 149 | # put it in AIRFLOW_HOME/cert folder 150 | "certificate" : ('client.pem', 'client_key.pem'), 151 | # "root_cert" : 'root.pem', 152 | }, 153 | # Telegram parameter used for send messages to telegram channels 154 | # Create a new bot from https://t.me/botfather 155 | # And enter token here 156 | # Chat id's you can get from @userinfobot (https://github.com/nadam/userinfobot) 157 | # and use it in tasks dicts 158 | "telegram" : { 159 | "token" : "", # put here telegram bot api token 160 | }, 161 | } 162 | ``` 163 | -------------------------------------------------------------------------------- /docker/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one 2 | # or more contributor license agreements. See the NOTICE file 3 | # distributed with this work for additional information 4 | # regarding copyright ownership. The ASF licenses this file 5 | # to you under the Apache License, Version 2.0 (the 6 | # "License"); you may not use this file except in compliance 7 | # with the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, 12 | # software distributed under the License is distributed on an 13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 14 | # KIND, either express or implied. See the License for the 15 | # specific language governing permissions and limitations 16 | # under the License. 17 | # 18 | 19 | # Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL. 20 | # 21 | # WARNING: This configuration is for local development. Do not use it in a production deployment. 22 | # 23 | # This configuration supports basic configuration using environment variables or an .env file 24 | # The following variables are supported: 25 | # 26 | # AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. 27 | # Default: apache/airflow:2.2.3 28 | # AIRFLOW_UID - User ID in Airflow containers 29 | # Default: 50000 30 | # Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode 31 | # 32 | # _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested). 33 | # Default: airflow 34 | # _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested). 35 | # Default: airflow 36 | # _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers. 37 | # Default: '' 38 | # 39 | # Feel free to modify this file to suit your needs. 40 | --- 41 | version: '3' 42 | x-airflow-common: 43 | &airflow-common 44 | # In order to add custom dependencies or upgrade provider packages you can use your extended image. 45 | # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml 46 | # and uncomment the "build" line below, Then run `docker-compose build` to build the images. 47 | image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:latest} 48 | # build: . 49 | environment: 50 | &airflow-common-env 51 | AIRFLOW__CORE__EXECUTOR: CeleryExecutor 52 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow 53 | AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow 54 | AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0 55 | AIRFLOW__CORE__FERNET_KEY: '' 56 | AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' 57 | AIRFLOW__CORE__LOAD_EXAMPLES: 'false' 58 | AIRFLOW__API__AUTH_BACKEND: 'airflow.api.auth.backend.basic_auth' 59 | _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:- qlikflow apache-airflow-providers-telegram} 60 | volumes: 61 | - ./dags:/opt/airflow/dags 62 | - ./logs:/opt/airflow/logs 63 | - ./cert:/opt/airflow/cert 64 | - ./config:/opt/airflow/config 65 | - ./plugins:/opt/airflow/plugins 66 | user: "${AIRFLOW_UID:-50000}:0" 67 | depends_on: 68 | &airflow-common-depends-on 69 | redis: 70 | condition: service_healthy 71 | postgres: 72 | condition: service_healthy 73 | 74 | services: 75 | postgres: 76 | image: postgres:13 77 | environment: 78 | POSTGRES_USER: airflow 79 | POSTGRES_PASSWORD: airflow 80 | POSTGRES_DB: airflow 81 | volumes: 82 | - postgres-db-volume:/var/lib/postgresql/data 83 | healthcheck: 84 | test: ["CMD", "pg_isready", "-U", "airflow"] 85 | interval: 5s 86 | retries: 5 87 | restart: always 88 | 89 | redis: 90 | image: redis:latest 91 | expose: 92 | - 6379 93 | healthcheck: 94 | test: ["CMD", "redis-cli", "ping"] 95 | interval: 5s 96 | timeout: 30s 97 | retries: 50 98 | restart: always 99 | 100 | airflow-webserver: 101 | <<: *airflow-common 102 | command: webserver 103 | ports: 104 | - 8080:8080 105 | healthcheck: 106 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] 107 | interval: 10s 108 | timeout: 10s 109 | retries: 5 110 | restart: always 111 | depends_on: 112 | <<: *airflow-common-depends-on 113 | airflow-init: 114 | condition: service_completed_successfully 115 | 116 | airflow-scheduler: 117 | <<: *airflow-common 118 | command: scheduler 119 | healthcheck: 120 | test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"'] 121 | interval: 10s 122 | timeout: 10s 123 | retries: 5 124 | restart: always 125 | depends_on: 126 | <<: *airflow-common-depends-on 127 | airflow-init: 128 | condition: service_completed_successfully 129 | 130 | airflow-worker: 131 | <<: *airflow-common 132 | command: celery worker 133 | healthcheck: 134 | test: 135 | - "CMD-SHELL" 136 | - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"' 137 | interval: 10s 138 | timeout: 10s 139 | retries: 5 140 | environment: 141 | <<: *airflow-common-env 142 | # Required to handle warm shutdown of the celery workers properly 143 | # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation 144 | DUMB_INIT_SETSID: "0" 145 | restart: always 146 | depends_on: 147 | <<: *airflow-common-depends-on 148 | airflow-init: 149 | condition: service_completed_successfully 150 | 151 | airflow-triggerer: 152 | <<: *airflow-common 153 | command: triggerer 154 | healthcheck: 155 | test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"'] 156 | interval: 10s 157 | timeout: 10s 158 | retries: 5 159 | restart: always 160 | depends_on: 161 | <<: *airflow-common-depends-on 162 | airflow-init: 163 | condition: service_completed_successfully 164 | 165 | airflow-init: 166 | <<: *airflow-common 167 | entrypoint: /bin/bash 168 | # yamllint disable rule:line-length 169 | command: 170 | - -c 171 | - | 172 | function ver() { 173 | printf "%04d%04d%04d%04d" $${1//./ } 174 | } 175 | airflow_version=$$(gosu airflow airflow version) 176 | airflow_version_comparable=$$(ver $${airflow_version}) 177 | min_airflow_version=2.2.0 178 | min_airflow_version_comparable=$$(ver $${min_airflow_version}) 179 | if (( airflow_version_comparable < min_airflow_version_comparable )); then 180 | echo 181 | echo -e "\033[1;31mERROR!!!: Too old Airflow version $${airflow_version}!\e[0m" 182 | echo "The minimum Airflow version supported: $${min_airflow_version}. Only use this or higher!" 183 | echo 184 | exit 1 185 | fi 186 | if [[ -z "${AIRFLOW_UID}" ]]; then 187 | echo 188 | echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m" 189 | echo "If you are on Linux, you SHOULD follow the instructions below to set " 190 | echo "AIRFLOW_UID environment variable, otherwise files will be owned by root." 191 | echo "For other operating systems you can get rid of the warning with manually created .env file:" 192 | echo " See: https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#setting-the-right-airflow-user" 193 | echo 194 | fi 195 | one_meg=1048576 196 | mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) 197 | cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat) 198 | disk_available=$$(df / | tail -1 | awk '{print $$4}') 199 | warning_resources="false" 200 | if (( mem_available < 4000 )) ; then 201 | echo 202 | echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m" 203 | echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))" 204 | echo 205 | warning_resources="true" 206 | fi 207 | if (( cpus_available < 2 )); then 208 | echo 209 | echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m" 210 | echo "At least 2 CPUs recommended. You have $${cpus_available}" 211 | echo 212 | warning_resources="true" 213 | fi 214 | if (( disk_available < one_meg * 10 )); then 215 | echo 216 | echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m" 217 | echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))" 218 | echo 219 | warning_resources="true" 220 | fi 221 | if [[ $${warning_resources} == "true" ]]; then 222 | echo 223 | echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m" 224 | echo "Please follow the instructions to increase amount of resources available:" 225 | echo " https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#before-you-begin" 226 | echo 227 | fi 228 | mkdir -p /sources/logs /sources/dags /sources/plugins 229 | chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} 230 | exec /entrypoint airflow version 231 | # yamllint enable rule:line-length 232 | environment: 233 | <<: *airflow-common-env 234 | _AIRFLOW_DB_UPGRADE: 'true' 235 | _AIRFLOW_WWW_USER_CREATE: 'true' 236 | _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow} 237 | _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} 238 | user: "0:0" 239 | volumes: 240 | - .:/sources 241 | 242 | airflow-cli: 243 | <<: *airflow-common 244 | profiles: 245 | - debug 246 | environment: 247 | <<: *airflow-common-env 248 | CONNECTION_CHECK_MAX_COUNT: "0" 249 | # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252 250 | command: 251 | - bash 252 | - -c 253 | - airflow 254 | 255 | flower: 256 | <<: *airflow-common 257 | command: celery flower 258 | ports: 259 | - 5555:5555 260 | healthcheck: 261 | test: ["CMD", "curl", "--fail", "http://localhost:5555/"] 262 | interval: 10s 263 | timeout: 10s 264 | retries: 5 265 | restart: always 266 | depends_on: 267 | <<: *airflow-common-depends-on 268 | airflow-init: 269 | condition: service_completed_successfully 270 | 271 | volumes: 272 | postgres-db-volume: 273 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /qlikflow/qlikflow.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2021 Stanislav Chernov 3 | 4 | Licensed under the Apache License, Version 2.0 (the "License"); 5 | you may not use this file except in compliance with the License. 6 | You may obtain a copy of the License at 7 | 8 | http://www.apache.org/licenses/LICENSE-2.0 9 | 10 | Unless required by applicable law or agreed to in writing, software 11 | distributed under the License is distributed on an "AS IS" BASIS, 12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | See the License for the specific language governing permissions and 14 | limitations under the License. 15 | """ 16 | 17 | import json 18 | import requests 19 | import random 20 | import ast 21 | import csv 22 | import os 23 | from time import sleep 24 | from datetime import timedelta 25 | from requests_ntlm import HttpNtlmAuth 26 | from zeep import Client 27 | from zeep.transports import Transport 28 | from airflow import DAG 29 | from airflow.exceptions import AirflowException 30 | from airflow.utils.log.logging_mixin import LoggingMixin 31 | from airflow.sensors.time_delta import TimeDeltaSensor 32 | from airflow.operators.python import PythonOperator 33 | from airflow.providers.telegram.hooks.telegram import TelegramHook 34 | 35 | 36 | # Read config from json file 37 | def read_config(): 38 | # Get Airflow home folder with dags and config.json file 39 | file = os.getenv('AIRFLOW_HOME') + '/config/config.json' 40 | with open(file, 'r') as f: 41 | config = json.load(f) 42 | return config 43 | 44 | 45 | def sendlog_telegram(msg, chatid): 46 | config = read_config() 47 | telegram_hook = TelegramHook(token=config["telegram"]["token"], chat_id=chatid) 48 | telegram_hook.send_message({"text": msg}) 49 | pass 50 | 51 | 52 | def sleep_task(*args, **kwargs): 53 | sleep_seconds = kwargs.get('sleep_timer') 54 | sleep(sleep_seconds) 55 | 56 | 57 | def get_qs_tasks(*args, **kwargs): 58 | qs_server = kwargs.get('qs_server') 59 | qs_username = kwargs.get('qs_username') 60 | qs_password = kwargs.get('qs_password') 61 | qs_filename = kwargs.get('qs_filename') 62 | certificate_temp = kwargs.get('certificate') 63 | certificate = ( os.getenv('AIRFLOW_HOME') + '/cert/' + certificate_temp[0] , os.getenv('AIRFLOW_HOME') + '/cert/' + certificate_temp[1] ) 64 | 65 | xrfkey = ''.join(random.sample('qwertyuiopasdfghjklzxcvbnm1234567890', 16)) 66 | 67 | qs_headers = { 68 | 'content-type': 'application/json', 69 | 'X-Qlik-Xrfkey': xrfkey, 70 | 'X-Qlik-User' : 'UserDirectory=INTERNAL; UserId=sa_repository', 71 | } 72 | 73 | requests.packages.urllib3.disable_warnings() 74 | 75 | qs_session = requests.session() 76 | qs_session.auth = HttpNtlmAuth(qs_username, qs_password, qs_session) 77 | 78 | endpoint = 'qrs/task/full' 79 | 80 | def byte_to_dict(txt): 81 | dict_str = txt.decode("UTF-8").replace('\\\\','\\').replace(':null',':""').replace(':false',':False').replace(':true',':True') 82 | mydata = ast.literal_eval(dict_str) 83 | return mydata 84 | 85 | url = '{0}:4242/{1}?Xrfkey={2}'.format(qs_server, endpoint, xrfkey) 86 | start_response = qs_session.get(url, headers=qs_headers, verify=False, cert=certificate) 87 | 88 | if start_response.status_code == 200: 89 | content = byte_to_dict(start_response.content) 90 | print ('Tasks total count = {}'.format(len(content))) 91 | 92 | result_list = [] 93 | 94 | for task in content: 95 | content = {} 96 | content["task_id"] = task["id"] 97 | content["task_name"] = task["name"] 98 | content["task_enabled"] = task["enabled"] 99 | content["task_timeout"] = task["taskSessionTimeout"] 100 | content["task_retries"] = task["maxRetries"] 101 | content["app_id"] = task["app"].get("id") 102 | content["app_name"] = task["app"].get("name") 103 | if type(task["app"].get("stream")) is dict: 104 | content["stream_id"] = task["app"]["stream"].get("id") 105 | content["stream_name"] = task["app"]["stream"].get("name") 106 | elif type(task["app"].get("stream")) is str: 107 | content["stream_id"] = '' 108 | content["stream_name"] = '' 109 | 110 | result_list.append(content) 111 | 112 | print ('Total items in list = {}'.format(len(result_list))) 113 | 114 | else: 115 | raise AirflowException('Response give !=200 status_code\n',start_response.content) 116 | 117 | with open(qs_filename, 'w', encoding='utf-8') as output_file: 118 | dict_writer = csv.DictWriter(output_file, restval="-", fieldnames=result_list[0].keys(), delimiter=';') 119 | dict_writer.writeheader() 120 | dict_writer.writerows(result_list) 121 | 122 | 123 | def qs_run_task(*args, **kwargs): 124 | config = read_config() 125 | qs_server = kwargs.get('qs_server') 126 | qs_username = kwargs.get('qs_username') 127 | qs_password = kwargs.get('qs_password') 128 | qs_taskid = kwargs.get('qs_taskid') 129 | random_delay = kwargs.get('random_delay') 130 | certificate_temp = kwargs.get('certificate') 131 | certificate = ( os.getenv('AIRFLOW_HOME') + '/cert/' + certificate_temp[0] , os.getenv('AIRFLOW_HOME') + '/cert/' + certificate_temp[1] ) 132 | 133 | if random_delay != None: 134 | random.seed() 135 | secs = random.random() * random_delay 136 | print ('Random delay in seconds - {}'.format(secs)) 137 | sleep(secs) 138 | 139 | xrfkey = ''.join(random.sample('qwertyuiopasdfghjklzxcvbnm1234567890', 16)) 140 | 141 | qs_headers = { 142 | 'content-type': 'application/json', 143 | 'X-Qlik-Xrfkey': xrfkey, 144 | 'X-Qlik-User' : 'UserDirectory=INTERNAL; UserId=sa_repository', 145 | } 146 | 147 | requests.packages.urllib3.disable_warnings() 148 | 149 | qs_session = requests.session() 150 | qs_session.auth = HttpNtlmAuth(qs_username, qs_password, qs_session) 151 | 152 | endpoint = 'qrs/task/{}/start/synchronous'.format(qs_taskid) 153 | url = '{0}:4242/{1}?Xrfkey={2}'.format(qs_server, endpoint, xrfkey) 154 | start_response = qs_session.post(url, headers=qs_headers, verify=False, cert=certificate) 155 | 156 | def byte_to_dict(txt): 157 | dict_str = txt.decode("UTF-8").replace('\\\\','\\').replace(':null',':""').replace(':false',':False').replace(':true',':True') 158 | mydata = ast.literal_eval(dict_str) 159 | return mydata 160 | 161 | if start_response.status_code != 201: 162 | raise AirflowException('Failed to start task {}'.format(qs_taskid)) 163 | 164 | session_id = byte_to_dict(start_response.content)["value"] 165 | if session_id == '00000000-0000-0000-0000-000000000000': 166 | raise AirflowException ("The task is already running {} or can't start in this session".format(qs_taskid)) 167 | 168 | endpoint = 'qrs/executionsession/{}'.format(session_id) 169 | url = '{0}:4242/{1}?Xrfkey={2}'.format(qs_server, endpoint, xrfkey) 170 | session_response = qs_session.get(url, headers=qs_headers, verify=False, cert=certificate) 171 | exec_id = byte_to_dict(session_response.content)["executionResult"]["id"] 172 | 173 | while True: 174 | sleep(1) 175 | 176 | endpoint = 'qrs/executionresult/{}'.format(exec_id) 177 | url = '{0}:4242/{1}?Xrfkey={2}'.format(qs_server, endpoint, xrfkey) 178 | exec_response = qs_session.get(url, headers=qs_headers, verify=False, cert=certificate) 179 | result = byte_to_dict(exec_response.content) 180 | 181 | allstatuses = ['0: NeverStarted' , '1: Triggered' , '2: Started' , '3: Queued', 182 | '4: AbortInitiated', '5: Aborting', '6: Aborted', '7: FinishedSuccess', 183 | '8: FinishedFail', '9: Skipped', '10: Retry', '11: Error', '12: Reset'] 184 | 185 | bad_status = [4,5,6,8,11,12] 186 | good_status = [7] 187 | 188 | status = result["status"] 189 | if status in bad_status: 190 | raise AirflowException ('Error status = {}\n'.format(status)) 191 | break 192 | elif status in good_status: 193 | print ('All complete!') 194 | break 195 | 196 | # add remove files 197 | filelist = kwargs.get('delete_files') 198 | if filelist != None: 199 | for i in range(len(filelist)): 200 | if os.path.isfile(filelist[i]): 201 | print ('Delete file : {}'.format(filelist[i])) 202 | os.remove(filelist[i]) 203 | 204 | if kwargs.get('telegram_ok') != None: 205 | t = TelegramHook(token=config["telegram"]["token"], chat_id=kwargs.get('telegram_ok')) 206 | msg = 'Airflow alert: DAG: {}\nTASK: {}\nStatus : Completed\n'.format(kwargs.get('mydagid'),kwargs.get('mytaskid')) 207 | t.send_message({"text": msg}) 208 | 209 | def get_new_session_qv(qv_username, qv_password, qv_server, qv_port, qv_extraurl): 210 | session = requests.session() 211 | session.auth = HttpNtlmAuth(qv_username, qv_password) 212 | wsdl = "{0}:{1}{2}".format(qv_server, qv_port, qv_extraurl) 213 | client = Client(wsdl, transport=Transport(session=session)) 214 | service_key = client.service.GetTimeLimitedServiceKey() 215 | client.transport.session.headers.update({'X-Service-Key': service_key}) 216 | 217 | return client 218 | 219 | 220 | def qv_run_task(*args, **kwargs): 221 | config = read_config() 222 | qv_server = kwargs.get('qv_server') 223 | qv_port = kwargs.get('qv_port') 224 | qv_extraurl = kwargs.get('qv_extraurl') 225 | qv_username = kwargs.get('qv_username') 226 | qv_password = kwargs.get('qv_password') 227 | qv_taskid = kwargs.get('qv_taskid') 228 | qv_dsid = kwargs.get('qv_dsid') 229 | qv_getstatustimeout = kwargs.get('qv_getstatustimeout') 230 | 231 | random_delay = kwargs.get('random_delay') 232 | if random_delay != None: 233 | random.seed() 234 | secs = random.random() * random_delay 235 | print ('Random delay in seconds - {}'.format(secs)) 236 | sleep(secs) 237 | 238 | # session = requests.session() 239 | # session.auth = HttpNtlmAuth(qv_username, qv_password) 240 | # wsdl = "{0}:{1}{2}".format(qv_server, qv_port, qv_extraurl) 241 | # client = Client(wsdl, transport=Transport(session=session)) 242 | # service_key = client.service.GetTimeLimitedServiceKey() 243 | # client.transport.session.headers.update({'X-Service-Key': service_key}) 244 | client = get_new_session_qv(qv_username, qv_password, qv_server, qv_port, qv_extraurl) 245 | try: 246 | execute_status = client.service.TriggerEDXTask(qv_dsid, qv_taskid, '') 247 | except Exception as e: 248 | message = 'DAG: {}\nTASK: {}\nFailed to start QV task: {}\nERROR : {}'.format( kwargs.get('mydagid'), kwargs.get('mytaskid') , qv_taskid , e) 249 | raise AirflowException (message) 250 | 251 | check_sleep_time = 10 # seconds, sleep interval 252 | 253 | if qv_getstatustimeout != None: 254 | trycount_max = qv_getstatustimeout / check_sleep_time 255 | 256 | trycount_errors = 0 257 | trycounter_total = 0 258 | while True: 259 | sleep(check_sleep_time) 260 | status = 'Unknown' 261 | try: 262 | # service_key = client.service.GetTimeLimitedServiceKey() 263 | # client.transport.session.headers.update({'X-Service-Key': service_key}) 264 | client = get_new_session_qv(qv_username, qv_password, qv_server, qv_port, qv_extraurl) 265 | task_status = client.service.GetEDXTaskStatus(qv_dsid, execute_status.ExecId) 266 | status = task_status.TaskStatus 267 | if task_status.TaskName[-15:] == '(work disabled)': 268 | raise AirflowException("QlikView task failed with status - QlikView task is disabled") 269 | if status == 'Completed': 270 | break 271 | if status == 'Warning': 272 | raise AirflowException("QlikView task failed with status - The task completed with a warning") 273 | break 274 | if status == 'Failed': 275 | raise AirflowException("QlikView task failed with status - Failed") 276 | if status == 'Aborting': 277 | raise AirflowException("QlikView task failed with status - Aborting") 278 | if status == 'Disabled': 279 | raise AirflowException("QlikView task failed with status - The task is about to run but hasn't started yet") 280 | if status == 'Unrunnable': 281 | raise AirflowException("QlikView task failed with status - The task has a distributiongroup unavailable") 282 | except Exception as e: 283 | message = 'DAG: {} | TASK: {} | Error when trying ({} (current)/{} (total)) to get the task status in QV: {}\nError text: {}'.format( kwargs.get('mydagid'), kwargs.get('mytaskid'), trycount_errors, trycounter_total , qv_taskid , e) 284 | if qv_getstatustimeout != None: 285 | if trycount_errors >= trycount_max: 286 | raise AirflowException (message) 287 | else: 288 | raise AirflowException (message) 289 | trycount_errors += 1 290 | trycounter_total += 1 291 | 292 | 293 | # add remove files 294 | filelist = kwargs.get('delete_files') 295 | if filelist != None: 296 | for i in range(len(filelist)): 297 | if os.path.isfile(filelist[i]): 298 | print ('Delete file : {}'.format(filelist[i])) 299 | os.remove(filelist[i]) 300 | 301 | 302 | if kwargs.get('telegram_ok') != None: 303 | t = TelegramHook(token=config["telegram"]["token"], chat_id=kwargs.get('telegram_ok')) 304 | msg = 'Airflow alert: DAG: {}\nTASK: {}\nStatus : Completed\n'.format(kwargs.get('mydagid'),kwargs.get('mytaskid')) 305 | print (msg) 306 | t.send_message({"text": msg}) 307 | 308 | def np_run_task(*args, **kwargs): 309 | config = read_config() 310 | np_server = kwargs.get('np_server') 311 | np_credential = kwargs.get('np_credential') 312 | np_password = kwargs.get('np_password') 313 | np_taskid = kwargs.get('np_taskid') 314 | np_warnisfail = kwargs.get('np_warnisfail') 315 | if np_warnisfail == None: 316 | np_warnisfail = False 317 | 318 | random_delay = kwargs.get('random_delay') 319 | if random_delay != None: 320 | random.seed() 321 | secs = random.random() * random_delay 322 | print ('Random delay in seconds - {}'.format(secs)) 323 | sleep(secs) 324 | 325 | requests.packages.urllib3.disable_warnings() 326 | tokenstring = '' 327 | np_headers = {"Accept": "application/json", 328 | "Content-Type": "application/json"} 329 | np_session = requests.session() 330 | 331 | np_session.auth = HttpNtlmAuth(np_credential, np_password, np_session) 332 | np_headers['User-Agent'] = 'Windows' 333 | 334 | response = np_session.get('{0}/{1}'.format (np_server, 'login/ntlm'), headers=np_headers, verify=False) 335 | tokenstring = (np_session.cookies['NPWEBCONSOLE_XSRF-TOKEN']) 336 | np_headers['X-XSRF-TOKEN'] = tokenstring 337 | 338 | urltorun = '{0}/{1}/{2}/executions'.format (np_server, 'tasks',np_taskid) 339 | 340 | attemps = 10 341 | for i in range(attemps): 342 | try: 343 | response = np_session.post(urltorun, headers=np_headers, verify=False) 344 | result = response.json()["data"] 345 | break 346 | except Exception as e: 347 | if i == attemps - 1: 348 | print ('Error {} attempt:\n{}\n{}\n{}'.format(attemps,e, response.json(), urltorun)) 349 | raise AirflowException('Failed to start NP task for {} attempts'.format(attemps)) 350 | if i == 0: 351 | print ('Error 0 attempt:\n{}\n{}\n{}'.format(e, response.json(), urltorun)) 352 | 353 | random.seed() 354 | secs = random.random() * 9 + 1 355 | print ('Failed to start NP task, attempt = {} with sleep {} s.'.format(str(i+1),secs)) 356 | sleep(secs) 357 | 358 | np_exec_id = result["id"] 359 | 360 | check_sleep_time = 10 # in seconds 361 | while True: 362 | sleep(check_sleep_time) 363 | urltorun = '{0}/{1}/{2}/executions/{3}'.format (np_server, 'tasks',np_taskid,np_exec_id) 364 | response = np_session.get(urltorun, headers=np_headers, verify=False) 365 | result = response.json()["data"] 366 | if result["status"] == 'Completed': 367 | break 368 | 369 | if result["status"] == 'Warning' and np_warnisfail == True: 370 | raise AirflowException("NPrinting task failed with status Warning, flag WarnIsFail = True") 371 | 372 | if result["status"] == 'Warning': 373 | break 374 | if result["status"] == 'CompletedWithWarning': 375 | break 376 | if result["status"] == 'Failed': 377 | raise AirflowException("NPrinting task failed with status Failed") 378 | if result["status"] == 'Aborted': 379 | raise AirflowException("NPrinting task failed with status Aborted") 380 | 381 | 382 | # add remove files 383 | filelist = kwargs.get('delete_files') 384 | if filelist != None: 385 | for i in range(len(filelist)): 386 | if os.path.isfile(filelist[i]): 387 | print ('Delete file : {}'.format(filelist[i])) 388 | os.remove(filelist[i]) 389 | 390 | 391 | if kwargs.get('telegram_ok') != None: 392 | t = TelegramHook(token=config["telegram"]["token"], chat_id=kwargs.get('telegram_ok')) 393 | msg = 'Airflow alert: DAG: {}\nTASK: {}\nStatus : Completed\n'.format(kwargs.get('mydagid'),kwargs.get('mytaskid')) 394 | t.send_message({"text": msg}) 395 | 396 | def clean_for_taskid(name): 397 | newname = name.replace(" ", "_").replace("'", "").replace("/", "_").replace("(", "_").replace(")", "_").replace(",", "_").replace(".qvw", "").replace("__", "_") 398 | return newname 399 | 400 | def create_aftask(task, task_id, task_guid, dag, tasksDict): 401 | config = read_config() 402 | args_telegram_ok = None 403 | args_telegram_fail = None 404 | args_mail_ok = None 405 | args_mail_fail = None 406 | var_name = None # for qv 407 | var_values = None # for qv 408 | mydag = dag.dag_id 409 | mytaskid = task_id 410 | warningisfail = None 411 | random_delay = None 412 | delete_files = None 413 | args_getstatustimeout = None 414 | 415 | if tasksDict[task].get('OnSuccess') != None: 416 | if tasksDict[task].get('OnSuccess').get('telegram') != None: 417 | args_telegram_ok = tasksDict[task].get('OnSuccess').get('telegram') 418 | if tasksDict[task].get('OnSuccess').get('mail') != None: 419 | args_mail_ok = tasksDict[task].get('OnSuccess').get('mail') 420 | if tasksDict[task].get('OnFail') != None: 421 | if tasksDict[task].get('OnFail').get('telegram') != None: 422 | args_telegram_fail = tasksDict[task].get('OnFail').get('telegram') 423 | if tasksDict[task].get('OnFail').get('mail') != None: 424 | args_mail_fail = tasksDict[task].get('OnFail').get('mail') 425 | if tasksDict[task].get('WarningIsFail') != None: 426 | warningisfail = tasksDict[task].get('WarningIsFail') 427 | if tasksDict[task].get('RandomStartDelay') != None: 428 | random_delay = tasksDict[task].get('RandomStartDelay') 429 | if tasksDict[task].get('GetStatusTimeout') != None: 430 | args_getstatustimeout = tasksDict[task].get('GetStatusTimeout') 431 | if tasksDict[task].get('DeleteFiles') != None: 432 | delete_files = tasksDict[task].get('DeleteFiles') 433 | if type(delete_files) is not list: 434 | raise AirflowException ('In task {}, parameter "DeleteFiles" must be list, not {}'.format(task_id, str(type(delete_files)) )) 435 | 436 | # QS Get Task list 437 | if tasksDict[task]['Soft'] == 'get_qs_tasks': 438 | kwargs = { 439 | "qs_server" : config[tasksDict[task]['Server']]["server"], 440 | "qs_username" : config[tasksDict[task]['Server']]["username"], 441 | "qs_password" : config[tasksDict[task]['Server']]["password"], 442 | "qs_filename" : tasksDict[task].get('FullFileName_ToSave'), 443 | "certificate" : config[tasksDict[task]['Soft']]["certificate"], 444 | # "root_cert" : config[tasksDict[task]['Soft']]["root_cert"], 445 | "mail_ok" : args_mail_ok, 446 | "mail_fail" : args_mail_fail, 447 | "telegram_ok" : args_telegram_ok, 448 | "telegram_fail" : args_telegram_fail, 449 | "mydagid" : mydag, 450 | "mytaskid" : mytaskid, 451 | "random_delay" : random_delay, 452 | } 453 | AirflowTask = PythonOperator(task_id=task_id, python_callable=get_qs_tasks, op_kwargs=kwargs, dag=dag) 454 | 455 | # NPrinting 456 | if tasksDict[task]['Soft'][:2] == 'np': 457 | kwargs = { 458 | "np_server" : config[tasksDict[task]['Soft']]["server"], 459 | "np_credential" : config[tasksDict[task]['Soft']]["credential"], 460 | "np_password" : config[tasksDict[task]['Soft']]["password"], 461 | "np_taskid" : task_guid, 462 | "mail_ok" : args_mail_ok, 463 | "mail_fail" : args_mail_fail, 464 | "telegram_ok" : args_telegram_ok, 465 | "telegram_fail" : args_telegram_fail, 466 | "mydagid" : mydag, 467 | "mytaskid" : mytaskid, 468 | "np_warnisfail" : warningisfail, 469 | "random_delay" : random_delay, 470 | "delete_files" : delete_files, 471 | } 472 | AirflowTask = PythonOperator(task_id=task_id, python_callable=np_run_task, op_kwargs=kwargs, dag=dag) 473 | 474 | # QlikView 475 | if tasksDict[task]['Soft'][:2] == 'qv': 476 | 477 | kwargs = { 478 | "qv_server" : config[tasksDict[task]['Soft']]["server"], 479 | "qv_port" : config[tasksDict[task]['Soft']]["port"], 480 | "qv_extraurl" : config[tasksDict[task]['Soft']]["extraurl"], 481 | "qv_username" : config[tasksDict[task]['Soft']]["username"], 482 | "qv_password" : config[tasksDict[task]['Soft']]["password"], 483 | "qv_taskid" : task_guid, 484 | "qv_getstatustimeout" : args_getstatustimeout, 485 | "mail_ok" : args_mail_ok, 486 | "mail_fail" : args_mail_fail, 487 | "telegram_ok" : args_telegram_ok, 488 | "telegram_fail" : args_telegram_fail, 489 | "mydagid" : mydag, 490 | "mytaskid" : mytaskid, 491 | "random_delay" : random_delay, 492 | "delete_files" : delete_files, 493 | } 494 | AirflowTask = PythonOperator(task_id=task_id, python_callable=qv_run_task, op_kwargs=kwargs, dag=dag) 495 | 496 | # Qlik Sense 497 | if tasksDict[task]['Soft'][:2] == 'qs': 498 | kwargs = { 499 | "qs_server" : config[tasksDict[task]['Soft']]["server"], 500 | "qs_username" : config[tasksDict[task]['Soft']]["username"], 501 | "qs_password" : config[tasksDict[task]['Soft']]["password"], 502 | "certificate" : config[tasksDict[task]['Soft']]["certificate"], 503 | # "root_cert" : config[tasksDict[task]['Soft']]["root_cert"], 504 | "qs_taskid" : task_guid, 505 | "mail_ok" : args_mail_ok, 506 | "mail_fail" : args_mail_fail, 507 | "telegram_ok" : args_telegram_ok, 508 | "telegram_fail" : args_telegram_fail, 509 | "mydagid" : mydag, 510 | "mytaskid" : mytaskid, 511 | "random_delay" : random_delay, 512 | "delete_files" : delete_files, 513 | } 514 | AirflowTask = PythonOperator(task_id=task_id, python_callable=qs_run_task, op_kwargs=kwargs, dag=dag) 515 | # Sleep timer 516 | if tasksDict[task]['Soft'] == 'sleep': 517 | kwargs = { 518 | "sleep_timer" : tasksDict[task]["Seconds"], 519 | } 520 | sensorSeconds = tasksDict[task]["Seconds"] 521 | sensorTaskID = task_guid + '_sleep_{}'.format(str(sensorSeconds)) 522 | AirflowTask = PythonOperator(task_id=sensorTaskID, python_callable=sleep_task, op_kwargs=kwargs, dag=dag, pool='sensors') 523 | 524 | return AirflowTask 525 | 526 | def addparams_totask(task, newtask, dag, tasksDict, airflowTasksDict): 527 | config = read_config() 528 | if 'Dep' in tasksDict[task]: 529 | for dep in tasksDict[task]['Dep']: 530 | airflowTasksDict[newtask].set_upstream(airflowTasksDict[dep]) # dep's 531 | 532 | if 'OnFail' in tasksDict[task]: 533 | if tasksDict[task]['OnFail'].get('mail') != None: 534 | airflowTasksDict[newtask].email_on_failure = True 535 | airflowTasksDict[newtask].email = tasksDict[task]['OnFail']['mail'] 536 | 537 | if 'Retries_count' in tasksDict[task]: 538 | airflowTasksDict[newtask].retries = int(tasksDict[task]['Retries_count']) 539 | 540 | if 'Retries_delay' in tasksDict[task]: 541 | airflowTasksDict[newtask].retry_delay = timedelta(seconds = int(tasksDict[task]['Retries_delay'])) 542 | 543 | if 'Retries_ExponentialDelay' in tasksDict[task]: 544 | airflowTasksDict[newtask].retry_exponential_backoff = tasksDict[task]['Retries_ExponentialDelay'] 545 | 546 | if 'StartTime' in tasksDict[task]: 547 | hour = tasksDict[task]['StartTime'][0] 548 | minute = tasksDict[task]['StartTime'][1] 549 | sensorTime = timedelta(hours=hour, minutes=minute) 550 | sensorTaskID = u'TimeSensor_{}_{}'.format(hour, minute) 551 | 552 | if sensorTaskID not in airflowTasksDict: 553 | SensorTask = TimeDeltaSensor(delta=sensorTime, task_id=sensorTaskID, pool='sensors', dag=dag) 554 | airflowTasksDict[sensorTaskID] = SensorTask 555 | airflowTasksDict[newtask].set_upstream(airflowTasksDict[sensorTaskID]) 556 | 557 | if 'Pool' in tasksDict[task]: 558 | setpool = tasksDict[task]['Pool'] 559 | airflowTasksDict[newtask].pool = setpool 560 | else: # default pool from config if not manually set 561 | setpool = config[tasksDict[task]['Soft']]['default_pool'] 562 | airflowTasksDict[newtask].pool = setpool 563 | 564 | def create_tasks(tasksDict, airflowTasksDict, dag): 565 | """ 566 | Create Airflow tasks from ``Dict`` 567 | """ 568 | for task in tasksDict.keys(): 569 | 570 | if 'Soft' in tasksDict[task]: 571 | if type(tasksDict[task]["TaskId"]) is str: 572 | task_id = clean_for_taskid(task) 573 | AirflowTask = create_aftask(task, task_id, tasksDict[task]["TaskId"], dag, tasksDict) 574 | airflowTasksDict[task] = AirflowTask 575 | addparams_totask(task, task, dag, tasksDict, airflowTasksDict) 576 | elif type(tasksDict[task]["TaskId"]) is list: 577 | for i in range(len(tasksDict[task]["TaskId"])): 578 | task_id = clean_for_taskid(task + '_' + str(tasksDict[task]["TaskId"][i])) 579 | AirflowTask = create_aftask(task, task_id, str(tasksDict[task]["TaskId"][i]), dag, tasksDict) 580 | airflowTasksDict[task_id] = AirflowTask 581 | addparams_totask(task, task_id, dag, tasksDict, airflowTasksDict) 582 | --------------------------------------------------------------------------------