├── .gitchangelog.rc ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ ├── minor-release.md │ ├── patch-release.md │ ├── pre-release.md │ ├── question.md │ └── task.md └── workflows │ ├── ci.yml │ └── package-release.yml ├── .gitignore ├── CHANGELOG.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── argo └── workflows │ ├── __init__.py │ └── dsl │ ├── __about__.py │ ├── __init__.py │ ├── _arguments.py │ ├── _base.py │ ├── _cronworkflow.py │ ├── _inputs.py │ ├── _outputs.py │ ├── _utils.py │ ├── _workflow.py │ ├── _workflow_template.py │ ├── tasks.py │ └── templates.py ├── docs └── integration_tests.md ├── examples ├── artifacts.ipynb ├── artifacts.yaml ├── cronworkflow.py ├── dag-diamond.ipynb ├── dag-diamond.yaml ├── hello-world-single-task.ipynb ├── hello-world-single-task.yaml ├── hello-world.ipynb ├── hello-world.yaml ├── resource.ipynb ├── resource.yaml ├── scripts.ipynb └── scripts.yaml ├── requirements-test.txt ├── requirements.txt ├── scripts ├── generate_changelog.sh ├── integration_tests.sh └── validate_workflow.sh ├── setup.py └── tests ├── __init__.py ├── _base.py ├── cronworkflows ├── .gitignore └── hello-cron.py ├── data └── workflows │ ├── cluster-workflow-template.yaml │ ├── cron-workflow.yaml │ ├── hello-world.yaml │ └── workflow-template.yaml ├── test-notebooks.sh ├── test_cluster_workflow_template.py ├── test_cronworkflow.py ├── test_workflow.py ├── test_workflow_template.py ├── workflow_templates ├── .gitignore └── hello-template.py └── workflows ├── .gitignore ├── artifacts.py ├── dag-diamond.py ├── hello-world.py ├── loops.py └── scripts-python.py /.gitchangelog.rc: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8; mode: python -*- 2 | ## 3 | ## Format 4 | ## 5 | ## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...] 6 | ## 7 | ## Description 8 | ## 9 | ## ACTION is one of 'chg', 'fix', 'new' 10 | ## 11 | ## Is WHAT the change is about. 12 | ## 13 | ## 'chg' is for refactor, small improvement, cosmetic changes... 14 | ## 'fix' is for bug fixes 15 | ## 'new' is for new features, big improvement 16 | ## 17 | ## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc' 18 | ## 19 | ## Is WHO is concerned by the change. 20 | ## 21 | ## 'dev' is for developpers (API changes, refactors...) 22 | ## 'usr' is for final users (UI changes) 23 | ## 'pkg' is for packagers (packaging changes) 24 | ## 'test' is for testers (test only related changes) 25 | ## 'doc' is for doc guys (doc only changes) 26 | ## 27 | ## COMMIT_MSG is ... well ... the commit message itself. 28 | ## 29 | ## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic' 30 | ## 31 | ## They are preceded with a '!' or a '@' (prefer the former, as the 32 | ## latter is wrongly interpreted in github.) Commonly used tags are: 33 | ## 34 | ## 'refactor' is obviously for refactoring code only 35 | ## 'minor' is for a very meaningless change (a typo, adding a comment) 36 | ## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...) 37 | ## 'wip' is for partial functionality but complete subfunctionality. 38 | ## 39 | ## Example: 40 | ## 41 | ## new: usr: support of bazaar implemented 42 | ## chg: re-indentend some lines !cosmetic 43 | ## new: dev: updated code to be compatible with last version of killer lib. 44 | ## fix: pkg: updated year of licence coverage. 45 | ## new: test: added a bunch of test around user usability of feature X. 46 | ## fix: typo in spelling my name in comment. !minor 47 | ## 48 | ## Please note that multi-line commit message are supported, and only the 49 | ## first line will be considered as the "summary" of the commit message. So 50 | ## tags, and other rules only applies to the summary. The body of the commit 51 | ## message will be displayed in the changelog without reformatting. 52 | 53 | 54 | ## 55 | ## ``ignore_regexps`` is a line of regexps 56 | ## 57 | ## Any commit having its full commit message matching any regexp listed here 58 | ## will be ignored and won't be reported in the changelog. 59 | ## 60 | ignore_regexps = [ 61 | r'@ignore', r'!ignore', 62 | r'@minor', r'!minor', 63 | r'@cosmetic', r'!cosmetic', 64 | r'@refactor', r'!refactor', 65 | r'@wip', r'!wip', 66 | r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:', 67 | r'^(.{3,3}\s*:)?\s*([fF]irst|[Ii]nitial) commit.?\s*$', 68 | r'^Merge', ## ignore merge commits 69 | r'^$', ## ignore commits with empty messages 70 | ] 71 | 72 | 73 | ## ``section_regexps`` is a list of 2-tuples associating a string label and a 74 | ## list of regexp 75 | ## 76 | ## Commit messages will be classified in sections thanks to this. Section 77 | ## titles are the label, and a commit is classified under this section if any 78 | ## of the regexps associated is matching. 79 | ## 80 | ## Please note that ``section_regexps`` will only classify commits and won't 81 | ## make any changes to the contents. So you'll probably want to go check 82 | ## ``subject_process`` (or ``body_process``) to do some changes to the subject, 83 | ## whenever you are tweaking this variable. 84 | ## 85 | section_regexps = [ 86 | ('New', [ 87 | r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 88 | ]), 89 | ('Changes', [ 90 | r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 91 | ]), 92 | ('Fix', [ 93 | r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$', 94 | ]), 95 | 96 | ('Other', None ## Match all lines 97 | ), 98 | 99 | ] 100 | 101 | 102 | ## ``body_process`` is a callable 103 | ## 104 | ## This callable will be given the original body and result will 105 | ## be used in the changelog. 106 | ## 107 | ## Available constructs are: 108 | ## 109 | ## - any python callable that take one txt argument and return txt argument. 110 | ## 111 | ## - ReSub(pattern, replacement): will apply regexp substitution. 112 | ## 113 | ## - Indent(chars=" "): will indent the text with the prefix 114 | ## Please remember that template engines gets also to modify the text and 115 | ## will usually indent themselves the text if needed. 116 | ## 117 | ## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns 118 | ## 119 | ## - noop: do nothing 120 | ## 121 | ## - ucfirst: ensure the first letter is uppercase. 122 | ## (usually used in the ``subject_process`` pipeline) 123 | ## 124 | ## - final_dot: ensure text finishes with a dot 125 | ## (usually used in the ``subject_process`` pipeline) 126 | ## 127 | ## - strip: remove any spaces before or after the content of the string 128 | ## 129 | ## - SetIfEmpty(msg="No commit message."): will set the text to 130 | ## whatever given ``msg`` if the current text is empty. 131 | ## 132 | ## Additionally, you can `pipe` the provided filters, for instance: 133 | #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ") 134 | #body_process = Wrap(regexp=r'\n(?=\w+\s*:)') 135 | #body_process = noop 136 | body_process = ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip 137 | 138 | 139 | ## ``subject_process`` is a callable 140 | ## 141 | ## This callable will be given the original subject and result will 142 | ## be used in the changelog. 143 | ## 144 | ## Available constructs are those listed in ``body_process`` doc. 145 | subject_process = (strip | 146 | ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') | 147 | SetIfEmpty("No commit message.") | ucfirst | final_dot) 148 | 149 | 150 | ## ``tag_filter_regexp`` is a regexp 151 | ## 152 | ## Tags that will be used for the changelog must match this regexp. 153 | ## 154 | tag_filter_regexp = r'^v?(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:[-]?(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$' 155 | 156 | 157 | ## ``unreleased_version_label`` is a string or a callable that outputs a string 158 | ## 159 | ## This label will be used as the changelog Title of the last set of changes 160 | ## between last valid tag and HEAD if any. 161 | import os 162 | unreleased_version_label = os.environ.get("RELEASE_VERSION", "HEAD") 163 | 164 | 165 | 166 | ## ``output_engine`` is a callable 167 | ## 168 | ## This will change the output format of the generated changelog file 169 | ## 170 | ## Available choices are: 171 | ## 172 | ## - rest_py 173 | ## 174 | ## Legacy pure python engine, outputs ReSTructured text. 175 | ## This is the default. 176 | ## 177 | ## - mustache() 178 | ## 179 | ## Template name could be any of the available templates in 180 | ## ``templates/mustache/*.tpl``. 181 | ## Requires python package ``pystache``. 182 | ## Examples: 183 | ## - mustache("markdown") 184 | ## - mustache("restructuredtext") 185 | ## 186 | ## - makotemplate() 187 | ## 188 | ## Template name could be any of the available templates in 189 | ## ``templates/mako/*.tpl``. 190 | ## Requires python package ``mako``. 191 | ## Examples: 192 | ## - makotemplate("restructuredtext") 193 | ## 194 | output_engine = rest_py 195 | #output_engine = mustache("restructuredtext") 196 | #output_engine = mustache("markdown") 197 | #output_engine = makotemplate("restructuredtext") 198 | 199 | 200 | ## ``include_merge`` is a boolean 201 | ## 202 | ## This option tells git-log whether to include merge commits in the log. 203 | ## The default is to include them. 204 | include_merge = True 205 | 206 | 207 | ## ``log_encoding`` is a string identifier 208 | ## 209 | ## This option tells gitchangelog what encoding is outputed by ``git log``. 210 | ## The default is to be clever about it: it checks ``git config`` for 211 | ## ``i18n.logOutputEncoding``, and if not found will default to git's own 212 | ## default: ``utf-8``. 213 | #log_encoding = 'utf-8' 214 | 215 | 216 | ## ``publish`` is a callable 217 | ## 218 | ## Sets what ``gitchangelog`` should do with the output generated by 219 | ## the output engine. ``publish`` is a callable taking one argument 220 | ## that is an interator on lines from the output engine. 221 | ## 222 | ## Some helper callable are provided: 223 | ## 224 | ## Available choices are: 225 | ## 226 | ## - stdout 227 | ## 228 | ## Outputs directly to standard output 229 | ## (This is the default) 230 | ## 231 | ## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start(), flags) 232 | ## 233 | ## Creates a callable that will parse given file for the given 234 | ## regex pattern and will insert the output in the file. 235 | ## ``idx`` is a callable that receive the matching object and 236 | ## must return a integer index point where to insert the 237 | ## the output in the file. Default is to return the position of 238 | ## the start of the matched string. 239 | ## 240 | ## - FileRegexSubst(file, pattern, replace, flags) 241 | ## 242 | ## Apply a replace inplace in the given file. Your regex pattern must 243 | ## take care of everything and might be more complex. Check the README 244 | ## for a complete copy-pastable example. 245 | ## 246 | # publish = FileInsertIntoFirstRegexMatch( 247 | # "CHANGELOG.rst", 248 | # r'/(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n/', 249 | # idx=lambda m: m.start(1) 250 | # ) 251 | #publish = stdout 252 | 253 | 254 | ## ``revs`` is a list of callable or a list of string 255 | ## 256 | ## callable will be called to resolve as strings and allow dynamical 257 | ## computation of these. The result will be used as revisions for 258 | ## gitchangelog (as if directly stated on the command line). This allows 259 | ## to filter exaclty which commits will be read by gitchangelog. 260 | ## 261 | ## To get a full documentation on the format of these strings, please 262 | ## refer to the ``git rev-list`` arguments. There are many examples. 263 | ## 264 | ## Using callables is especially useful, for instance, if you 265 | ## are using gitchangelog to generate incrementally your changelog. 266 | ## 267 | ## Some helpers are provided, you can use them:: 268 | ## 269 | ## - FileFirstRegexMatch(file, pattern): will return a callable that will 270 | ## return the first string match for the given pattern in the given file. 271 | ## If you use named sub-patterns in your regex pattern, it'll output only 272 | ## the string matching the regex pattern named "rev". 273 | ## 274 | ## - Caret(rev): will return the rev prefixed by a "^", which is a 275 | ## way to remove the given revision and all its ancestor. 276 | ## 277 | ## Please note that if you provide a rev-list on the command line, it'll 278 | ## replace this value (which will then be ignored). 279 | ## 280 | ## If empty, then ``gitchangelog`` will act as it had to generate a full 281 | ## changelog. 282 | ## 283 | ## The default is to use all commits to make the changelog. 284 | #revs = ["^1.0.3", ] 285 | #revs = [ 286 | # Caret( 287 | # FileFirstRegexMatch( 288 | # "CHANGELOG.rst", 289 | # r"(?P[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")), 290 | # "HEAD" 291 | #] 292 | revs = [] -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @binarycrayon 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behaviour** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/minor-release.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Minor release 3 | about: Create a new minor release 4 | title: New minor release 5 | assignees: '' 6 | 7 | --- 8 | 9 | Hey, Kebechet! 10 | 11 | Create a new minor release, please. 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/patch-release.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Patch release 3 | about: Create a new patch release 4 | title: New patch release 5 | assignees: '' 6 | 7 | --- 8 | 9 | Hey, Kebechet! 10 | 11 | Create a new patch release, please. 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/pre-release.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Pre-release 3 | about: Create a new pre-release 4 | title: New pre-release 5 | assignees: '' 6 | 7 | --- 8 | 9 | Hey, Kebechet! 10 | 11 | Create a new pre-release, please. 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Generic question or request 4 | title: '' 5 | labels: question 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Description** 11 | 12 | 13 | **Additional context** 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/task.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Task 3 | about: Task 4 | title: "[TASK] ..." 5 | labels: task 6 | assignees: '' 7 | 8 | --- 9 | 10 | **User story** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Additional context** 14 | Add any other context or screenshots about the feature request here. 15 | 16 | **Acceptance Criteria** 17 | - [ ] At least once acceptance criteria should be present 18 | 19 | **Linked epics / issues** 20 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | - release-* 11 | 12 | jobs: 13 | test: 14 | env: 15 | ACTIONS_ALLOW_UNSECURE_COMMANDS: true 16 | if: >- 17 | !contains(github.event.head_commit.message, 'doc') || 18 | !contains(github.event.head_commit.message, 'release') || 19 | !contains(github.event.head_commit.message, '!ignore') || 20 | !contains(github.event.head_commit.message, '!minor') || 21 | !contains(github.event.head_commit.message, '!refactor') || 22 | !contains(github.event.head_commit.message, '!wip') 23 | 24 | runs-on: ubuntu-latest 25 | strategy: 26 | matrix: 27 | python-version: [3.6, 3.7, 3.8] 28 | 29 | steps: 30 | - uses: actions/checkout@v2 31 | - name: Set up Python ${{ matrix.python-version }} 32 | uses: actions/setup-python@v2 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | - name: Install dependencies 36 | run: | 37 | sudo apt-get install tree 38 | 39 | mkdir -p $HOME/.local/bin 40 | 41 | # fix permissions 42 | export PATH=$HOME/.local/bin:/usr/local/bin:$PATH 43 | 44 | sudo find . -type f -exec chmod 666 {} \; 45 | sudo find . -type d -exec chmod 774 {} \; 46 | sudo find $HOME/.local -type f -exec chmod 666 {} \; 47 | sudo find $HOME/.local -type d -exec chmod 774 {} \; 48 | 49 | sudo chown -R $USER:$USER $HOME/.local 50 | 51 | #s2i 52 | curl \ 53 | -L https://github.com/openshift/source-to-image/releases/download/v1.2.0/source-to-image-v1.2.0-2a579ecd-linux-amd64.tar.gz \ 54 | -o release.tar.gz 55 | tar -C $HOME/.local/bin -xvf release.tar.gz 56 | 57 | # python dependencies 58 | python -m pip install --user --upgrade pip pipenv 59 | pip install --user -r requirements-test.txt 60 | 61 | # - name: Lint with flake8instance 62 | # run: | 63 | # pip install flake8 64 | # # stop the build if there are Python syntax errors or undefined names 65 | # flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 66 | # # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 67 | # flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 68 | 69 | - name: Test 70 | run: | 71 | export PATH=$HOME/.local/bin:/usr/local/bin:$PATH 72 | 73 | # run tests 74 | pytest 75 | # bash tests/test-notebooks.sh 76 | - name: Install Argo CLI 77 | run: | 78 | curl -sLO https://github.com/argoproj/argo/releases/download/v2.11.8/argo-linux-amd64.gz 79 | gunzip argo-linux-amd64.gz 80 | chmod +x argo-linux-amd64 81 | sudo mv ./argo-linux-amd64 /usr/local/bin/argo 82 | - uses: opsgang/ga-setup-minikube@v0.1.1 83 | with: 84 | minikube-version: 1.11.0 85 | k8s-version: 1.18.3 86 | - name: Integration tests 87 | run: | 88 | minikube config set vm-driver docker 89 | minikube config set kubernetes-version 1.18.3 90 | minikube start 91 | 92 | kubectl create ns argo 93 | kubectl create sa default -n argo 94 | kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj/argo/v2.11.1/manifests/quick-start-minimal.yaml 95 | kubectl wait -n argo --for=condition=Ready pods --all --timeout=300s 96 | 97 | chmod +x scripts/integration_tests.sh 98 | scripts/integration_tests.sh 99 | -------------------------------------------------------------------------------- /.github/workflows/package-release.yml: -------------------------------------------------------------------------------- 1 | name: Python package release 2 | 3 | on: 4 | release: 5 | types: [created] 6 | 7 | jobs: 8 | release: 9 | 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v1 14 | - name: Setup Python 15 | uses: actions/setup-python@v1 16 | with: 17 | python-version: "3.6" 18 | - name: Install dependencies 19 | run: | 20 | sudo apt-get install tree 21 | # python dependencies 22 | python -m pip install --upgrade pip pipenv twine 23 | pipenv install --dev 24 | 25 | - name: Lint with flake8instance 26 | 27 | run: | 28 | pipenv run pip install flake8 29 | # stop the build if there are Python syntax errors or undefined names 30 | pipenv run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 31 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 32 | pipenv run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 33 | 34 | - name: Build Python package 35 | run: | 36 | pwd && tree 37 | pipenv run python setup.py sdist bdist_wheel 38 | 39 | - name: Twine check 40 | run: | 41 | pipenv run twine check dist/* 42 | 43 | - name: Publish to PyPI 44 | run: | 45 | pipenv run twine upload --repository-url https://upload.pypi.org/legacy/ dist/* -u macermak -p ${{ secrets.PYPI_PASSWORD }} 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | .idea/ 3 | .ipynb_checkpoints/ 4 | 5 | build/ 6 | dist/ 7 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========= 3 | 4 | 5 | 0.4.0 6 | ----- 7 | - [release-0.4.0] update readme with more examples. [Yudi Xue] 8 | - :tada: Release 0.4.0. [Yudi Xue] 9 | - [release-0.4.0] update tests to be compatibel with argo-sdk. [Yudi 10 | Xue] 11 | 12 | - compatible with argo-client-python 4.0.1 13 | - [release-0.4.0] make argo-python-dsl compatible with latest sdk. [Yudi 14 | Xue] 15 | 16 | - align with argo-python-client 4.0.1 17 | - :tada: Release 0.4.0. [Yudi Xue] 18 | - Update README.md. [Yudi Xue] 19 | 20 | remove contributors since github provides that info 21 | - :tada: Release 0.3.0 (#15) [Yudi Xue] 22 | 23 | 24 | v0.3.0 (2020-11-17) 25 | ------------------- 26 | - :tada: Release 0.3.0. [Yudi Xue] 27 | - [master] fix CI. [Yudi Xue] 28 | - Release 0.2.0 (#14) [Yudi Xue] 29 | 30 | * [release-0.2.0] use argo-workflows 3.6 31 | 32 | * :tada: Release 0.2.0 33 | 34 | 35 | v0.2.0 (2020-11-17) 36 | ------------------- 37 | 38 | Changes 39 | ~~~~~~~ 40 | - Updated README due to migration to argoproj-labs. [Marek Cermak] 41 | - Return workflow result on submit. [asavpatel92] 42 | 43 | modified: argo/workflows/dsl/_workflow.py 44 | modified: tests/test_workflow.py 45 | 46 | Fix 47 | ~~~ 48 | - Make sure both, swagger and openapi code works. [Marek Cermak] 49 | - :pushpin: Lock dependencies. [Marek Cermak] 50 | 51 | Signed-off-by: Marek Cermak 52 | 53 | modified: Pipfile 54 | modified: Pipfile.lock 55 | - Fixed compilation issue with multiple instances. [Marek Cermak] 56 | 57 | Signed-off-by: Marek Cermak 58 | 59 | modified: argo/workflows/dsl/_base.py 60 | modified: argo/workflows/dsl/_workflow.py 61 | modified: tests/test_workflow.py 62 | - Updated return type and doc strings, improve test. [asavpatel92] 63 | 64 | modified: argo/workflows/dsl/_workflow.py 65 | modified: tests/test_workflow.py 66 | 67 | Other 68 | ~~~~~ 69 | - :tada: Release 0.2.0. [Yudi Xue] 70 | - [release-0.2.0] use argo-workflows 3.6. [Yudi Xue] 71 | - Feat: Add (Cluster)WorkflowTemplate DSL (#13) [Pablo Osinaga] 72 | 73 | * feat: Add WorkflowTempate DSL 74 | 75 | * feat: Add ClusterWorkflowTempate DSL 76 | 77 | * fix: update argo workflow client version to 3.5 78 | - Feat: Add CronWorkflow DSL (#12) [Pablo Osinaga] 79 | - Ci: add integration tests workflows (#11) [Pablo Osinaga] 80 | 81 | * ci(integration): add artifacts workflow 82 | 83 | * ci(integration): add script workflow 84 | 85 | * ci(integration): add loop workflow 86 | 87 | * docs: integration tests 88 | - Ci: add integration tests (#9) [Pablo Osinaga] 89 | - Use argo workflows v3.5 (#8) [binarycrayon] 90 | 91 | * [argo-workflows-3.5] set argo-workflows 3.5 as dep 92 | 93 | Signed-off-by: Yudi Xue <10211+binarycrayon@users.noreply.github.com> 94 | 95 | * [arge-workflows-v3.5] fix a bug 96 | 97 | - image kwarg for Template should not be None 98 | 99 | Signed-off-by: Yudi Xue <10211+binarycrayon@users.noreply.github.com> 100 | 101 | * [argo-workflows-v3.5] update readme and github worklow 102 | 103 | - update workflow to test python 3.6, 3.7, 3.8 104 | - show CI badge 105 | 106 | Signed-off-by: Yudi Xue <10211+binarycrayon@users.noreply.github.com> 107 | 108 | * [argo-workflows-v3.5] only submit wf with generate_name 109 | 110 | - when both 'name' and 'generate_name' present in metadata, name is prefered 111 | - change design to use generate_name only 112 | - Bad model ref (#1) [Ross Crawford-d'Heureuse, Ross 113 | Crawford-d'Heureuse] 114 | 115 | 116 | v0.1.0-rc (2020-03-08) 117 | ---------------------- 118 | 119 | New 120 | ~~~ 121 | - Added `Workflow.to_file` method. [Marek Cermak] 122 | - :clipboard: Document closures and scopes. [Marek Cermak] 123 | - Organize closure imports. [Marek Cermak] 124 | - Multi-line strings are represented as blocks. [Marek Cermak] 125 | 126 | Signed-off-by: Marek Cermak 127 | 128 | modified: argo/workflows/sdk/_utils.py 129 | modified: argo/workflows/sdk/_workflow.py 130 | - Implemented scoped closures. [Marek Cermak] 131 | 132 | Signed-off-by: Marek Cermak 133 | 134 | modified: argo/workflows/sdk/_base.py 135 | modified: argo/workflows/sdk/_workflow.py 136 | modified: argo/workflows/sdk/templates.py 137 | - Added `submit` function to submit a Workflow. [Marek Cermak] 138 | 139 | Signed-off-by: Marek Cermak 140 | 141 | modified: argo/workflows/sdk/_base.py 142 | modified: argo/workflows/sdk/_workflow.py 143 | - Closure accepts V1alpha1ScriptTemplate attributes. [Marek Cermak] 144 | 145 | Signed-off-by: Marek Cermak 146 | 147 | modified: argo/workflows/sdk/_base.py 148 | modified: argo/workflows/sdk/templates.py 149 | - Added Workflow utility methods. [Marek Cermak] 150 | 151 | Signed-off-by: Marek Cermak 152 | 153 | modified: Pipfile 154 | modified: Pipfile.lock 155 | modified: argo/workflows/sdk/_workflow.py 156 | modified: argo/workflows/sdk/templates.py 157 | new file: tests/__init__.py 158 | new file: tests/_base.py 159 | new file: tests/data/workflows/hello-world.yaml 160 | new file: tests/test_workflow.py 161 | - Workflow spec can be configured with class properties. [Marek Cermak] 162 | 163 | Signed-off-by: Marek Cermak 164 | 165 | modified: argo/workflows/sdk/_workflow.py 166 | modified: argo/workflows/sdk/templates.py 167 | - Added scripts and closure examples. [Marek Cermak] 168 | 169 | Signed-off-by: Marek Cermak 170 | 171 | new file: examples/scripts.ipynb 172 | new file: examples/scripts.yaml 173 | - Added `closure` Prop. [Marek Cermak] 174 | 175 | Signed-off-by: Marek Cermak 176 | 177 | modified: argo/workflows/sdk/tasks.py 178 | modified: argo/workflows/sdk/templates.py 179 | modified: examples/resource.ipynb 180 | - Added resource example. [Marek Cermak] 181 | 182 | Signed-off-by: Marek Cermak 183 | 184 | modified: argo/workflows/sdk/templates.py 185 | new file: examples/resource.ipynb 186 | new file: examples/resource.yaml 187 | 188 | Changes 189 | ~~~~~~~ 190 | - Do not truncate version in the commit message. [Marek Cermak] 191 | - Argo Workflows SDK -> Argo Workflows DSL. [Marek Cermak] 192 | 193 | Signed-off-by: Marek Cermak 194 | 195 | modified: README.md 196 | renamed: argo/workflows/sdk/__about__.py -> argo/workflows/dsl/__about__.py 197 | renamed: argo/workflows/sdk/__init__.py -> argo/workflows/dsl/__init__.py 198 | renamed: argo/workflows/sdk/_arguments.py -> argo/workflows/dsl/_arguments.py 199 | renamed: argo/workflows/sdk/_base.py -> argo/workflows/dsl/_base.py 200 | renamed: argo/workflows/sdk/_inputs.py -> argo/workflows/dsl/_inputs.py 201 | renamed: argo/workflows/sdk/_outputs.py -> argo/workflows/dsl/_outputs.py 202 | renamed: argo/workflows/sdk/_utils.py -> argo/workflows/dsl/_utils.py 203 | renamed: argo/workflows/sdk/_workflow.py -> argo/workflows/dsl/_workflow.py 204 | renamed: argo/workflows/sdk/tasks.py -> argo/workflows/dsl/tasks.py 205 | renamed: argo/workflows/sdk/templates.py -> argo/workflows/dsl/templates.py 206 | 207 | modified: Makefile 208 | modified: argo/workflows/dsl/__about__.py 209 | modified: argo/workflows/dsl/__init__.py 210 | modified: argo/workflows/dsl/_utils.py 211 | modified: examples/artifacts.ipynb 212 | modified: examples/dag-diamond.ipynb 213 | modified: examples/hello-world-single-task.ipynb 214 | modified: examples/hello-world.ipynb 215 | modified: examples/resource.ipynb 216 | modified: examples/scripts.ipynb 217 | modified: setup.py 218 | modified: tests/__init__.py 219 | modified: tests/test-notebooks.sh 220 | modified: tests/test_workflow.py 221 | - Arguments.artifact -> artifact. [Marek Cermak] 222 | 223 | Signed-off-by: Marek Cermak 224 | 225 | modified: argo/workflows/sdk/_arguments.py 226 | modified: argo/workflows/sdk/_inputs.py 227 | modified: argo/workflows/sdk/_outputs.py 228 | modified: argo/workflows/sdk/_workflow.py 229 | modified: argo/workflows/sdk/tasks.py 230 | modified: argo/workflows/sdk/templates.py 231 | - Allow to disable `omitempty` in `to_yaml` [Marek Cermak] 232 | - Added skip CI flags. [Marek Cermak] 233 | 234 | Fix 235 | ~~~ 236 | - Fixed invalid Makefile variable. [Marek Cermak] 237 | - Fixed missing target in the Makefile. [Marek Cermak] 238 | 239 | Signed-off-by: Marek Cermak 240 | 241 | modified: .gitchangelog.rc 242 | modified: Makefile 243 | - Fixed Workflow.submit parameter handling. [Marek Cermak] 244 | 245 | Signed-off-by: Marek Cermak 246 | 247 | modified: argo/workflows/dsl/_base.py 248 | modified: argo/workflows/dsl/_workflow.py 249 | modified: tests/test_workflow.py 250 | - Fix Workflow.from_url. [Yudi Xue - binarycrayon] 251 | 252 | Workflow.from_url should be using url argument to fetch yaml 253 | - Change __extra__ to __origin__ in python >=3.7. [Marek Cermak] 254 | - Fill missing parameter value. [Marek Cermak] 255 | - Fixed multiple inputs/outputs being discarded. [Marek Cermak] 256 | - Omitempty should only discard None. [Marek Cermak] 257 | - Fixed newlines being removed with trailing spaces. [Marek Cermak] 258 | - Fixed closures with undefined scope. [Marek Cermak] 259 | - Closures should not be called. [Marek Cermak] 260 | 261 | Signed-off-by: Marek Cermak 262 | 263 | modified: argo/workflows/sdk/_base.py 264 | modified: argo/workflows/sdk/templates.py 265 | 266 | Other 267 | ~~~~~ 268 | - :tada: Release 0.1.0-rc. [Marek Cermak] 269 | 270 | 271 | v0.1.0-dev (2019-12-19) 272 | ----------------------- 273 | 274 | New 275 | ~~~ 276 | - Added badges to the README. [Marek Cermak] 277 | - Added issue templates and CI workflow. [Marek Cermak] 278 | 279 | Signed-off-by: Marek Cermak 280 | 281 | new file: .github/ISSUE_TEMPLATE/bug_report.md 282 | new file: .github/ISSUE_TEMPLATE/feature_request.md 283 | new file: .github/ISSUE_TEMPLATE/minor-release.md 284 | new file: .github/ISSUE_TEMPLATE/patch-release.md 285 | new file: .github/ISSUE_TEMPLATE/pre-release.md 286 | new file: .github/ISSUE_TEMPLATE/question.md 287 | new file: .github/ISSUE_TEMPLATE/task.md 288 | new file: .github/workflows/ci.yml 289 | new file: .github/workflows/package-release.yml 290 | - Added notebook tests. [Marek Cermak] 291 | 292 | Signed-off-by: Marek Cermak 293 | 294 | modified: Pipfile.lock 295 | modified: examples/artifacts.ipynb 296 | modified: examples/dag-diamond.ipynb 297 | modified: examples/hello-world-single-task.ipynb 298 | modified: examples/hello-world.ipynb 299 | new file: tests/test-notebooks.sh 300 | - Updated README with the Artifact example. [Marek Cermak] 301 | - Updated README with Dag Diamond example. [Marek Cermak] 302 | - Artifact passing. [Marek Cermak] 303 | 304 | Signed-off-by: Marek Cermak 305 | 306 | modified: Pipfile 307 | modified: Pipfile.lock 308 | modified: argo/workflows/sdk/_arguments.py 309 | modified: argo/workflows/sdk/_inputs.py 310 | new file: argo/workflows/sdk/_outputs.py 311 | modified: argo/workflows/sdk/_workflow.py 312 | modified: argo/workflows/sdk/tasks.py 313 | modified: argo/workflows/sdk/templates.py 314 | new file: examples/artifacts.ipynb 315 | new file: examples/artifacts.yaml 316 | modified: examples/dag-diamond.ipynb 317 | modified: examples/dag-diamond.yaml 318 | - Updated README with Hello World example. [Marek Cermak] 319 | - Added possibility to pass parameters to tasks. [Marek Cermak] 320 | 321 | Signed-off-by: Marek Cermak 322 | 323 | modified: argo/workflows/sdk/_arguments.py 324 | modified: argo/workflows/sdk/_base.py 325 | modified: argo/workflows/sdk/_workflow.py 326 | modified: argo/workflows/sdk/tasks.py 327 | modified: argo/workflows/sdk/templates.py 328 | new file: examples/dag-diamond.ipynb 329 | new file: examples/dag-diamond.yaml 330 | - Allow input parameters to the template spec. [Marek Cermak] 331 | - Added hello-world example. [Marek Cermak] 332 | 333 | Signed-off-by: Marek Cermak 334 | 335 | new file: ../../examples/hello-world.ipynb 336 | new file: ../../examples/hello-world.yaml 337 | - New: usr: Initial implementation of @template. [Marek Cermak] 338 | 339 | Signed-off-by: Marek Cermak 340 | 341 | modified: sdk/__init__.py 342 | modified: sdk/_workflow.py 343 | new file: sdk/_utils.py 344 | new file: sdk/templates.py 345 | renamed: sdk/task.py -> sdk/tasks.py 346 | - Initial implementation of the Workflow class. [Marek Cermak] 347 | 348 | Signed-off-by: Marek Cermak 349 | 350 | modified: sdk/__init__.py 351 | modified: sdk/_workflow.py 352 | - New: dev: Initial implementation of a @task. [Marek Cermak] 353 | 354 | Signed-off-by: Marek Cermak 355 | 356 | new file: argo/workflows/__init__.py 357 | new file: argo/workflows/sdk/__init__.py 358 | new file: argo/workflows/sdk/_base.py 359 | new file: argo/workflows/sdk/_task.py 360 | - Added .gitignore. [Marek Cermak] 361 | 362 | Changes 363 | ~~~~~~~ 364 | - Use pure pip instead of pipenv for the CI. [Marek Cermak] 365 | - Input parameters have to be provided explicitly. [Marek Cermak] 366 | 367 | Signed-off-by: Marek Cermak 368 | 369 | new file: argo/workflows/sdk/_inputs.py 370 | modified: argo/workflows/sdk/_arguments.py 371 | modified: argo/workflows/sdk/_base.py 372 | modified: argo/workflows/sdk/_workflow.py 373 | modified: argo/workflows/sdk/tasks.py 374 | modified: argo/workflows/sdk/templates.py 375 | modified: examples/dag-diamond.ipynb 376 | - Refactor template specification and compilation. [Marek Cermak] 377 | 378 | Signed-off-by: Marek Cermak 379 | 380 | modified: argo/workflows/sdk/_base.py 381 | modified: argo/workflows/sdk/_workflow.py 382 | modified: argo/workflows/sdk/tasks.py 383 | modified: argo/workflows/sdk/templates.py 384 | modified: examples/hello-world.ipynb 385 | - Compile a Workflow on instance initialization. [Marek Cermak] 386 | 387 | Signed-off-by: Marek Cermak 388 | 389 | modified: Pipfile 390 | modified: Pipfile.lock 391 | modified: argo/workflows/sdk/_workflow.py 392 | modified: argo/workflows/sdk/tasks.py 393 | modified: argo/workflows/sdk/templates.py 394 | modified: examples/hello-world.yaml 395 | - Excluded some of the props from the task spec. [Marek Cermak] 396 | 397 | Fix 398 | ~~~ 399 | - Fixed TTY issue with the CI. [Marek Cermak] 400 | - Fixed CI python permission issue. [Marek Cermak] 401 | - Fixed CI permission issues. [Marek Cermak] 402 | - Fixed missing s2i binary in the CI. [Marek Cermak] 403 | - Fixed misplaced result of compilation hook. [Marek Cermak] 404 | 405 | Signed-off-by: Marek Cermak 406 | 407 | modified: argo/workflows/sdk/_base.py 408 | modified: argo/workflows/sdk/tasks.py 409 | modified: examples/hello-world-single-task.yaml 410 | - Fixed invalid task template reference. [Marek Cermak] 411 | 412 | Signed-off-by: Marek Cermak 413 | 414 | modified: argo/workflows/sdk/_base.py 415 | modified: argo/workflows/sdk/_workflow.py 416 | modified: argo/workflows/sdk/tasks.py 417 | new file: examples/hello-world-single-task.ipynb 418 | new file: examples/hello-world-single-task.yaml 419 | - Fixed spec return annotation. [Marek Cermak] 420 | 421 | Signed-off-by: Marek Cermak 422 | 423 | modified: argo/workflows/sdk/_base.py 424 | modified: argo/workflows/sdk/_workflow.py 425 | - Fixed issue with argument passing. [Marek Cermak] 426 | 427 | Signed-off-by: Marek Cermak 428 | 429 | modified: argo/workflows/sdk/_base.py 430 | modified: argo/workflows/sdk/_workflow.py 431 | modified: examples/hello-world.ipynb 432 | - Allow a Spec to be called as a function. [Marek Cermak] 433 | 434 | Signed-off-by: Marek Cermak 435 | 436 | new file: argo/workflows/sdk/__about__.py 437 | modified: argo/workflows/sdk/_base.py 438 | modified: argo/workflows/sdk/_workflow.py 439 | modified: argo/workflows/sdk/tasks.py 440 | 441 | 442 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | graft docs/ 2 | 3 | # manifest files 4 | include Pipfile 5 | include requirements.txt 6 | 7 | # additional metadata 8 | include CHANGELOG.md 9 | include LICENSE 10 | 11 | # readme 12 | include README.md 13 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PACKAGE_NAME = workflows.dsl 2 | PACKAGE_DESCRIPTION = Python DSL for Argo Workflows 3 | 4 | CURRENT_DIR ?= $(shell pwd) 5 | 6 | define get_branch 7 | $(shell git branch | sed -n '/\* /s///p') 8 | endef 9 | 10 | define get_tag 11 | $(shell \ 12 | if [ -z "`git status --porcelain`" ]; then \ 13 | git describe \ 14 | --exact-match \ 15 | --tags HEAD 2>/dev/null || (>&2 echo "Tag has not been created.") \ 16 | fi \ 17 | ) 18 | endef 19 | 20 | define get_tree_state 21 | $(shell \ 22 | if [ -z "`git status --porcelain`" ]; then \ 23 | echo "clean" \ 24 | else \ 25 | echo "dirty" \ 26 | fi 27 | ) 28 | endef 29 | 30 | GIT_COMMIT = $(shell git rev-parse HEAD) 31 | 32 | GIT_BRANCH = $(call get_branch) 33 | GIT_TAG = $(call get_tag) 34 | GIT_TREE_STATE = $(call get_tree_state) 35 | 36 | ifeq (${GIT_TAG},) 37 | GIT_TAG = $(shell git rev-parse --abbrev-ref HEAD) 38 | endif 39 | 40 | VERSION ?= $(shell b="${GIT_BRANCH}"; v="$${b/release-/}.0"; echo "$${v:0:5}") 41 | 42 | PYPI_REPOSITORY ?= https://upload.pypi.org/legacy/ 43 | 44 | 45 | .PHONY: all 46 | all: 47 | @echo "Nothing to do." 48 | 49 | 50 | .PHONY: patch 51 | patch: SHELL:=/bin/bash 52 | patch: all 53 | - rm -rf build/ dist/ 54 | - git tag --delete "v${VERSION}" 55 | 56 | $(MAKE) changelog 57 | 58 | sed -i "s/__version__ = \(.*\)/__version__ = \"${VERSION}\"/g" argo/workflows/dsl/__about__.py 59 | 60 | python setup.py sdist bdist_wheel 61 | twine check dist/* || (echo "Twine check did not pass. Aborting."; exit 1) 62 | 63 | git commit -a -m ":wrench: Patch ${VERSION}" --signoff 64 | git tag -a "v${VERSION}" -m "Patch ${VERSION}" 65 | 66 | 67 | .PHONY: release 68 | release: SHELL:=/bin/bash 69 | release: all 70 | - rm -rf build/ dist/ 71 | - git tag --delete "v${VERSION}" 72 | 73 | $(MAKE) changelog 74 | 75 | sed -i "s/__version__ = \(.*\)/__version__ = \"${VERSION}\"/g" argo/workflows/dsl/__about__.py 76 | 77 | python setup.py sdist bdist_wheel 78 | twine check dist/* || (echo "Twine check did not pass. Aborting."; exit 1) 79 | 80 | v=${VERSION}; git commit -a -m ":tada: Release $${v}" --signoff 81 | v=${VERSION}; git tag -a "v${VERSION}" -m "Release $${v}" 82 | 83 | 84 | validate: 85 | @echo "Validating version '${VERSION}' on branch '{GIT_BRANCH}'" 86 | 87 | if [ "$(shell python -c \ 88 | "from semantic_version import validate; print( validate('${VERSION}') )" \ 89 | )" != "True" ]; then \ 90 | echo "Invalid version. Aborting."; \ 91 | exit 1; \ 92 | fi 93 | 94 | changelog: 95 | RELEASE_VERSION=${VERSION} ./scripts/generate_changelog.sh 96 | -------------------------------------------------------------------------------- /argo/workflows/__init__.py: -------------------------------------------------------------------------------- 1 | __path__ = __import__('pkgutil').extend_path(__path__, __name__) 2 | -------------------------------------------------------------------------------- /argo/workflows/dsl/__about__.py: -------------------------------------------------------------------------------- 1 | """About this package.""" 2 | 3 | __all__ = [ 4 | "__title__", 5 | "__summary__", 6 | "__uri__", 7 | "__version__", 8 | "__author__", 9 | "__email__", 10 | "__license__", 11 | "__copyright__", 12 | ] 13 | 14 | __title__ = "argo-workflows-dsl" 15 | __summary__ = "DSL for Argo Workflows" 16 | __uri__ = "https://github.com/CermakM/argo-python-dsl" 17 | 18 | __version__ = "0.4.0" 19 | 20 | __author__ = "Yudi Xue, Marek Cermak" 21 | __email__ = "binarycrayon@gmail.com, macermak@redhat.com" 22 | 23 | __license__ = "Apache Software License" 24 | __copyright__ = "Copyright 2019 {0}".format(__author__) 25 | -------------------------------------------------------------------------------- /argo/workflows/dsl/__init__.py: -------------------------------------------------------------------------------- 1 | """Argo Workflows Python DSL.""" 2 | 3 | __all__ = [ 4 | # modules 5 | "task", 6 | "templates", 7 | # decorators 8 | "tasks", 9 | "template", 10 | # main 11 | "ClusterWorkflowTemplate", 12 | "CronWorkflow", 13 | "Workflow", 14 | "WorkflowTemplate", 15 | ] 16 | 17 | # modules 18 | from . import tasks 19 | from . import templates 20 | 21 | # decorators 22 | from .tasks import task 23 | from .templates import template 24 | 25 | # main 26 | from ._workflow_template import ClusterWorkflowTemplate 27 | from ._cronworkflow import CronWorkflow 28 | from ._workflow import Workflow 29 | from ._workflow_template import WorkflowTemplate 30 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_arguments.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from typing import Callable 3 | from typing import List 4 | 5 | from argo.workflows.client.models import ( 6 | V1alpha1Arguments, 7 | V1alpha1Artifact, 8 | V1alpha1Parameter, 9 | ) 10 | 11 | from ._base import Prop 12 | 13 | __all__ = ["artifact", "parameter", "V1alpha1Artifact", "V1alpha1Parameter"] 14 | 15 | 16 | class artifact(Prop, extends=("arguments", V1alpha1Arguments)): 17 | 18 | __model__ = V1alpha1Artifact 19 | 20 | def __call__(self, f: Callable): 21 | artifacts: List[V1alpha1Artifact] = [self] 22 | 23 | prop: Any 24 | prop_name: str 25 | prop_name, prop = self.__extends__ 26 | 27 | if not hasattr(f, "__props__"): 28 | f.__props__ = {prop_name: prop(artifacts=artifacts)} 29 | else: 30 | arguments: Type[prop] = f.__props__.get(prop_name, prop()) 31 | 32 | if not getattr(arguments, "artifacts"): 33 | arguments.artifacts = artifacts 34 | else: 35 | arguments.artifacts.extend(artifacts) 36 | 37 | f.__props__[prop_name] = arguments 38 | 39 | return f 40 | 41 | class parameter(Prop, extends=("arguments", V1alpha1Arguments)): 42 | 43 | __model__ = V1alpha1Parameter 44 | 45 | def __call__(self, f: Callable): 46 | parameters: List[V1alpha1Parameter] = [self] 47 | 48 | prop: Any 49 | prop_name: str 50 | prop_name, prop = self.__extends__ 51 | 52 | if not hasattr(f, "__props__"): 53 | f.__props__ = {prop_name: prop(parameters=parameters)} 54 | else: 55 | arguments: Type[prop] = f.__props__.get(prop_name, prop()) 56 | 57 | if not getattr(arguments, "parameters"): 58 | arguments.parameters = parameters 59 | else: 60 | arguments.parameters.extend(parameters) 61 | 62 | f.__props__[prop_name] = arguments 63 | 64 | return f 65 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_base.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import typing 3 | 4 | from functools import partial 5 | from functools import wraps 6 | 7 | from typing import Any 8 | from typing import Callable 9 | from typing import Dict 10 | from typing import Generic 11 | from typing import Tuple 12 | from typing import Type 13 | from typing import TypeVar 14 | from typing import Union 15 | 16 | from argo.workflows.client import models 17 | 18 | T = TypeVar("T") 19 | 20 | 21 | class SpecProxy(object): 22 | """Spec Proxy. 23 | 24 | NOTE: This class is not meant to be used directly. 25 | """ 26 | 27 | def __new__(cls, spec: "Spec", obj: Any, callable: bool): 28 | self = super(SpecProxy, cls).__new__(cls) 29 | 30 | self._obj = obj 31 | self._spec = spec 32 | 33 | self._callable = callable 34 | 35 | return self 36 | 37 | def __call__(self, *args, **kwargs): 38 | spec: "Spec" = self._spec 39 | 40 | T = Type[spec.__model__] 41 | 42 | if self._callable: 43 | ret: Any = spec.fget(self._obj, *args, **kwargs) 44 | 45 | if hasattr(spec.__model__, "swagger_types"): 46 | for attr, swagger_type in spec.__model__.swagger_types.items(): 47 | t: Any = getattr(models, swagger_type, None) 48 | if t == type(ret): 49 | setattr(spec, attr, ret) 50 | break 51 | else: 52 | for attr, openapi_type in spec.__model__.openapi_types.items(): 53 | t: Any = getattr(models, openapi_type, None) 54 | if t == type(ret): 55 | setattr(spec, attr, ret) 56 | break 57 | 58 | spec.__init_model__(ret, *args, **kwargs) 59 | 60 | attr_dict: Dict[str, Any] = { 61 | k: spec.__dict__[k] for k in spec.__model__.attribute_map 62 | } 63 | model: T = spec.__model__(**attr_dict) 64 | 65 | self._spec.model = model 66 | 67 | return model 68 | 69 | 70 | class Spec(property): 71 | """Base class for Workflow Specs. 72 | 73 | NOTE: This class is not meant to be used directly. 74 | """ 75 | 76 | __model__ = T 77 | 78 | def __new__(cls, f: Callable[..., T]): 79 | f.__model__ = cls.__model__ 80 | 81 | self = super().__new__(cls, f) 82 | self.__callable = True 83 | self.__compiled_model = None 84 | 85 | for prop in cls.__model__.attribute_map.keys(): 86 | setattr(self, prop, None) 87 | 88 | # __props__ is set by Type[Prop] decorator 89 | for prop in getattr(f, "__props__", {}): 90 | if prop not in self.__model__.attribute_map: 91 | raise ValueError(f"Unknown property '{prop}' of '{self.__model__}") 92 | 93 | setattr(self, prop, f.__props__[prop]) 94 | 95 | sig: inspect.Signature = inspect.signature(f) 96 | sig = sig.replace(return_annotation=cls.__model__) 97 | setattr(self, "__signature__", sig) 98 | 99 | return self 100 | 101 | def __call__(self, *args, **kwargs) -> T: 102 | # This function is required for the call signature and should NOT be called 103 | raise NotImplementedError("This function shouldn't be called directly.") 104 | 105 | def __get__(self, obj: Any, objtype: Any = None, **kwargs): 106 | if obj is None: 107 | return self 108 | if self.fget is None: 109 | raise AttributeError(f"Unreadable attribute '{self.fget}'") 110 | return SpecProxy(self, obj, callable=self.__callable) 111 | 112 | def __init_model__(self, *args, **kwargs) -> None: 113 | """A hook executed before creation of a model.""" 114 | 115 | @property 116 | def callable(self) -> bool: 117 | """Return whether current spec is callable.""" 118 | return self.__callable 119 | 120 | @callable.setter 121 | def callable(self, is_callable: bool): 122 | """Set whether current spec is callable.""" 123 | self.__callable = is_callable 124 | 125 | @property 126 | def model(self) -> Union[T, None]: 127 | """Return the model specification. 128 | 129 | :returns: T if compiled, otherwise None 130 | """ 131 | return self.__compiled_model 132 | 133 | @model.setter 134 | def model(self, spec: T): 135 | if not isinstance(spec, self.__model__): 136 | raise TypeError(f"Expected type {self.__model__}, got: {type(spec)}") 137 | 138 | self.__compiled_model = spec 139 | 140 | 141 | class PropMeta(type): 142 | """Prop metaclass.""" 143 | 144 | __model__ = Generic[T] 145 | 146 | def __new__( 147 | cls, name: Union[str, T], bases: Tuple[T, ...], props: Dict[str, Any], **kwargs 148 | ): 149 | __model__ = props.get("__model__", None) 150 | 151 | if __model__ is not None: 152 | # typing 153 | if hasattr(__model__, "__origin__"): 154 | try: 155 | # Python 3.5, 3.6 156 | props["__type__"] = __model__.__extra__ 157 | except AttributeError: 158 | # Python >=3.7 159 | props["__type__"] = __model__.__origin__ 160 | bases = ( 161 | *bases, 162 | props["__type__"], 163 | ) 164 | # argo models 165 | elif hasattr(models, __model__.__name__): 166 | bases = ( 167 | *bases, 168 | __model__, 169 | ) 170 | 171 | if kwargs.get("extends") is not None: 172 | props["__extends__"] = kwargs.pop("extends") 173 | 174 | props.update({"name": props.get("name", name)}) 175 | props.update(kwargs) 176 | 177 | return super().__new__(cls, name, bases, props) 178 | 179 | 180 | class Prop(metaclass=PropMeta): 181 | """Base class for Spec props. 182 | 183 | NOTE: This class is not meant to be used directly. 184 | """ 185 | 186 | def __init_subclass__(cls): 187 | return super().__init_subclass__() 188 | 189 | def __call__(self, f: Callable, **kwargs) -> Callable: 190 | if "name" in kwargs: 191 | name = kwargs.pop("name") 192 | else: 193 | name = self.name 194 | 195 | dct: Dict[str, any] = {name: self, **kwargs} 196 | if not hasattr(f, "__props__"): 197 | f.__props__ = dct 198 | else: 199 | f.__props__.update(dct) 200 | 201 | return f 202 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_cronworkflow.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import json 3 | import logging 4 | import requests 5 | import yaml 6 | 7 | from abc import ABCMeta 8 | from inflection import camelize 9 | from inflection import dasherize 10 | from inflection import underscore 11 | from pathlib import Path 12 | 13 | from typing import Any 14 | from typing import Dict 15 | from typing import List 16 | from typing import Optional 17 | from typing import Set 18 | from typing import Tuple 19 | from typing import Type 20 | from typing import Union 21 | 22 | from argo.workflows.client import ( 23 | ApiClient, V1alpha1CreateCronWorkflowRequest, CronWorkflowServiceApi) 24 | from argo.workflows.client.models import V1alpha1Arguments 25 | from argo.workflows.client.models import V1alpha1Artifact 26 | from argo.workflows.client.models import V1alpha1DAGTask 27 | from argo.workflows.client.models import V1alpha1DAGTemplate 28 | from argo.workflows.client.models import V1alpha1Parameter 29 | from argo.workflows.client.models import V1alpha1Template 30 | from argo.workflows.client.models import V1alpha1WorkflowSpec 31 | from argo.workflows.client.models import V1alpha1CronWorkflow 32 | from argo.workflows.client.models import V1alpha1CronWorkflowSpec 33 | from argo.workflows.client.models import V1ObjectMeta 34 | from argo.workflows.client.models import V1alpha1CreateCronWorkflowRequest 35 | 36 | from . import _utils 37 | 38 | __all__ = ["CronWorkflow"] 39 | 40 | 41 | _LOGGER = logging.getLogger(__name__) 42 | 43 | 44 | class CronWorkflowMeta(ABCMeta): 45 | 46 | __model__ = V1alpha1CronWorkflow 47 | 48 | def __new__( 49 | cls, 50 | name: Union[str, Type["CronWorkflow"]], 51 | bases: Tuple[Type["CronWorkflow"], ...], 52 | props: Dict[str, Any], 53 | **kwargs, 54 | ): 55 | workflow_name = dasherize(underscore(name)) 56 | 57 | props["kind"] = "CronWorkflow" 58 | props["api_version"] = "argoproj.io/v1alpha1" 59 | 60 | metadata_dict = {"name": workflow_name} 61 | metadata_dict.update(props.get("__metadata__", {})) 62 | 63 | # Required fields 64 | props["metadata"]: V1ObjectMeta = V1ObjectMeta(**metadata_dict) 65 | props["spec"] = { 66 | k: props.pop(k) 67 | for k in V1alpha1CronWorkflowSpec.attribute_map 68 | if props.get(k) 69 | } 70 | props["workflow_spec"] = { 71 | k: props.pop(k) for k in V1alpha1WorkflowSpec.attribute_map if props.get(k) 72 | } 73 | props["status"] = {} 74 | 75 | bases = (*bases, cls.__model__) 76 | klass = super().__new__(cls, name, bases, props) 77 | 78 | if name == "CronWorkflow": 79 | # No need to initialize any further 80 | return klass 81 | 82 | cls.__compile(klass, name, bases, props) 83 | 84 | return klass 85 | 86 | @classmethod 87 | def __compile( 88 | cls, 89 | klass: "CronWorkflow", 90 | name: str, 91 | bases: Tuple[Type["CronWorkflow"], ...], 92 | props: Dict[str, Any], 93 | **kwargs, 94 | ): 95 | tasks: List[V1alpha1DAGTask] = [] 96 | templates: List[V1alpha1Template] = [] 97 | 98 | scopes: Dict[str, List[Any]] = {} 99 | 100 | # get scopes first 101 | for key, prop in props.items(): 102 | scope = getattr(prop, "__scope__", None) 103 | if scope is None: 104 | continue 105 | 106 | scoped_objects = [prop] 107 | scoped_objects.extend(scopes.get(scope, [])) 108 | 109 | scopes[scope] = scoped_objects 110 | 111 | for key, prop in props.items(): 112 | model = getattr(prop, "__model__", None) 113 | if model is None: 114 | continue 115 | 116 | template: Optional[V1alpha1Template] = None 117 | 118 | # V1alpha1Template 119 | if issubclass(model, V1alpha1Template): 120 | template = prop 121 | 122 | # closures require special treatment 123 | if hasattr(template, "__closure__") and template.script is not None: 124 | template = cls.__compile_closure(template, scopes) 125 | 126 | templates.append(template) 127 | 128 | # V1alpha1DAGTask 129 | elif issubclass(model, V1alpha1DAGTask): 130 | task = prop 131 | tasks.append(task) 132 | 133 | if tasks: 134 | main_template = V1alpha1Template(name="main") 135 | main_template.dag = V1alpha1DAGTemplate(tasks=tasks) 136 | 137 | templates.insert(0, main_template) 138 | 139 | wf_spec_dict: dict = klass.workflow_spec 140 | wf_spec_dict["entrypoint"] = wf_spec_dict.get("entrypoint", "main") 141 | wf_spec_dict["templates"] = templates 142 | 143 | cron_wf_spec_dict: dict = klass.spec 144 | cron_wf_spec_dict["workflow_spec"] = V1alpha1WorkflowSpec(**klass.workflow_spec) 145 | 146 | klass.spec: V1alpha1CronWorkflowSpec = V1alpha1CronWorkflowSpec(**klass.spec) 147 | 148 | @classmethod 149 | def __compile_closure( 150 | cls, template: V1alpha1Template, scopes: Dict[str, Any] = None 151 | ) -> V1alpha1Template: 152 | scopes = scopes or {} 153 | 154 | scope: str = template.__closure__ 155 | if scope is None: 156 | # nothing to do 157 | return template 158 | 159 | script: List[str] = [f"class {scope}:\n"] 160 | script.append(f' """Scoped objects injected from scope \'{scope}\'."""\n\n') 161 | 162 | scoped_objects = scopes.get(scope) or [] 163 | for so in scoped_objects: 164 | source, _ = inspect.getsourcelines(so.__get__(cls).__code__) 165 | 166 | for co_start, line in enumerate(source): 167 | if line.strip().startswith("def"): 168 | break 169 | 170 | source = [" @staticmethod\n"] + source[co_start:] + ["\n"] 171 | script.extend(source) 172 | 173 | script = script + [ 174 | "\n", 175 | *template.script.source.splitlines(keepends=True), 176 | ] 177 | 178 | import_lines: List[str] = [] 179 | source_lines: List[str] = [] 180 | 181 | import_in_previous_line = False 182 | for line in script: 183 | if "import " in line: 184 | import_lines.append(line.strip(" ")) 185 | import_in_previous_line = True 186 | else: 187 | is_blankline = not bool(line.strip()) 188 | if import_in_previous_line and is_blankline: 189 | # blank line separating imports 190 | pass 191 | else: 192 | source_lines.append(line) 193 | 194 | import_in_previous_line = False 195 | 196 | # split `imports` and `from` and sort them separately 197 | import_lines_with_from: Set[str] = set() 198 | import_lines_without_from: Set[str] = set() 199 | 200 | for line in import_lines: 201 | if "from " in line: 202 | import_lines_with_from.add(line) 203 | else: 204 | import_lines_without_from.add(line) 205 | 206 | import_lines = [ 207 | *sorted(import_lines_without_from), 208 | "\n", 209 | *sorted(import_lines_with_from), 210 | ] 211 | 212 | template.script.source = "".join((*import_lines, "\n", *source_lines)) 213 | 214 | return template 215 | 216 | 217 | class CronWorkflow(metaclass=CronWorkflowMeta): 218 | """Base class for Workflows.""" 219 | 220 | __model__ = V1alpha1CronWorkflow 221 | 222 | def __init__(self, compile=True): 223 | """CronWorkflow is the definition of a workflow resource. 224 | 225 | This class is a base class for Argo Workflows. It is not meant 226 | to be instantiated directly. 227 | 228 | :para compile: bool, whether to compile during initialization [True] 229 | """ 230 | self.__compiled_model: Union[V1alpha1CronWorkflow, None] = None 231 | self.__validated = False 232 | 233 | if compile: 234 | self.compile() 235 | 236 | def __hash__(self) -> str: 237 | """Compute hash of this CronWorkflow.""" 238 | return self.to_str().__hash__() 239 | 240 | @property 241 | def model(self) -> Union[V1alpha1CronWorkflow, None]: 242 | """Return the CronWorkflow model. 243 | 244 | :returns: V1alpha1CronWorkflow if compiled, otherwise None 245 | """ 246 | return self.__compiled_model 247 | 248 | @model.setter 249 | def model(self, m: V1alpha1CronWorkflow): 250 | """Set CronWorkflow model.""" 251 | if not isinstance(m, self.__model__): 252 | raise TypeError(f"Expected type {self.__model__}, got: {type(m)}") 253 | 254 | self.__compiled_model = m 255 | 256 | @property 257 | def name(self) -> Union[str, None]: 258 | """Return the CronWorkflow name.""" 259 | return self.metadata.name 260 | 261 | @name.setter 262 | def name(self, name: str): 263 | """Set CronWorkflow name.""" 264 | self.metadata.name = name 265 | 266 | @property 267 | def validated(self) -> bool: 268 | """Return whether this workflow has been validated.""" 269 | return self.__validated 270 | 271 | @classmethod 272 | def from_file(cls, fp: Union[str, Path], validate: bool = True) -> "CronWorkflow": 273 | """Create a CronWorkflow from a file.""" 274 | wf_path = Path(fp) 275 | 276 | wf: Dict[str, Any] = yaml.safe_load(wf_path.read_text()) 277 | return cls.from_dict(wf, validate=validate) 278 | 279 | @classmethod 280 | def from_url(cls, url: str, validate: bool = True) -> "CronWorkflow": 281 | """Create a CronWorkflow from a remote file.""" 282 | resp = requests.get(url) 283 | resp.raise_for_status() 284 | 285 | wf: Dict[str, Any] = yaml.safe_load(resp.text) 286 | return cls.from_dict(wf, validate=validate) 287 | 288 | @classmethod 289 | def from_dict(cls, wf: Dict[str, Any], validate: bool = True) -> "CronWorkflow": 290 | """Create a CronWorkflow from a dict.""" 291 | # work around validation issues and allow empty status 292 | wf["status"] = wf.get("status", {}) or {} 293 | 294 | return cls.from_string(json.dumps(wf), validate=validate) 295 | 296 | @classmethod 297 | def from_string(cls, wf: str, validate: bool = True) -> "CronWorkflow": 298 | """Create a CronWorkflow from a YAML string.""" 299 | body = {"data": wf} 300 | 301 | return cls.__deserialize(body, validate=validate) 302 | 303 | @classmethod 304 | def __deserialize(cls, body: Dict[str, str], *, validate: bool) -> "CronWorkflow": 305 | """Deserialize given object into a CronWorkflow instance.""" 306 | wf: Union[V1alpha1CronWorkflow, Dict[str, Any]] 307 | if validate: 308 | attr = type("Response", (), body) 309 | 310 | wf = ApiClient().deserialize(attr, cls.__model__) 311 | else: 312 | _LOGGER.warning( 313 | "Validation is turned off. This may result in missing or invalid attributes." 314 | ) 315 | wf = json.loads(body["data"]) 316 | 317 | self = cls(compile=False) 318 | 319 | if isinstance(wf, V1alpha1CronWorkflow): 320 | self.__dict__.update( 321 | api_version=wf.api_version, 322 | kind=wf.kind, 323 | metadata=wf.metadata, 324 | spec=wf.spec, 325 | status=wf.status, # a small hack to overcome validation 326 | ) 327 | else: 328 | self.__dict__.update(**wf) 329 | 330 | self.__validated = validate 331 | 332 | return self 333 | 334 | def compile(self) -> V1alpha1CronWorkflow: 335 | """Compile the CronWorkflow class to V1alpha1CronWorkflow model.""" 336 | if self.model is not None: 337 | return self.model 338 | 339 | def _compile(obj: Any): 340 | if hasattr(obj, "__model__"): 341 | if not hasattr(obj, "model"): 342 | # results of compilation (i.e. dicts, lists) 343 | return obj 344 | 345 | if hasattr(obj, "model") and obj.model is not None: 346 | # prevents compiled templates from being compiled again 347 | return obj.model 348 | 349 | args: Dict[str, Any] = {} 350 | props: Dict[str, Any] = getattr(obj.fget, "__props__", {}) 351 | 352 | arguments: V1alpha1Arguments = props.get("arguments") 353 | if arguments: 354 | for artifact in getattr(arguments, "artifacts", []) or []: 355 | if hasattr(artifact, "to_dict"): 356 | artifact = V1alpha1Artifact(**artifact.to_dict()) 357 | else: 358 | artifact = V1alpha1Artifact(**artifact) 359 | args[underscore(artifact.name)] = artifact 360 | 361 | for param in getattr(arguments, "parameters", []) or []: 362 | if hasattr(param, "to_dict"): 363 | param = V1alpha1Parameter(**param.to_dict()) 364 | else: 365 | param = V1alpha1Parameter(**param) 366 | args[underscore(param.name)] = param 367 | 368 | # __call__ sets the `model` attribute when compiled successfully 369 | return obj.__get__(self).__call__(**args) 370 | if isinstance(obj, list): 371 | return list(map(_compile, obj)) 372 | if hasattr(obj, "attribute_map"): 373 | for attr in obj.attribute_map: 374 | value: Any = _compile(getattr(obj, attr)) 375 | setattr(obj, attr, value) 376 | 377 | return obj 378 | 379 | self.spec = _compile(self.spec) 380 | self.model = CronWorkflow.__model__(**self.to_dict(omitempty=False)) 381 | 382 | self.__validated = True 383 | 384 | return self.model 385 | 386 | def submit( 387 | self, 388 | client: ApiClient, 389 | namespace: str, 390 | *, 391 | parameters: Optional[Dict[str, str]] = None, 392 | ) -> V1alpha1CronWorkflow: 393 | """Submit an Argo CronWorkflow to a given namespace. 394 | 395 | :returns: V1alpha1CronWorkflow, submitted CronWorkflow 396 | """ 397 | parameters = parameters or {} 398 | 399 | new_parameters: List[V1alpha1Parameter] = [] 400 | for name, value in parameters.items(): 401 | param = V1alpha1Parameter(name=name, value=value) 402 | new_parameters.append(param) 403 | 404 | if getattr(self.spec.workflow_spec, "arguments"): 405 | for p in getattr(self.spec.workflow_spec.arguments, "parameters", []): 406 | if p.name in parameters: 407 | continue # overridden 408 | elif not getattr(p, "value"): 409 | default = getattr(p, "default") 410 | if default is not None: 411 | p.value = default 412 | else: 413 | raise Exception(f"Missing required workflow parameter {p.name}") 414 | 415 | new_parameters.append(p) 416 | 417 | self.spec.arguments.parameters = new_parameters 418 | elif parameters: 419 | raise AttributeError("The CronWorkflow doesn't take any parameters.") 420 | 421 | body: Dict[str, Any] 422 | if not getattr(self, "validated", True): 423 | _LOGGER.debug( 424 | "The CronWorkflow has not been previously validated." 425 | "Sanitizing for serialization." 426 | ) 427 | body = camelize(self.to_dict()) 428 | else: 429 | body = client.sanitize_for_serialization(self) 430 | 431 | service = CronWorkflowServiceApi(api_client=client) 432 | # submit the workflow 433 | created: V1alpha1CronWorkflow = service.create_cron_workflow( 434 | namespace, V1alpha1CreateCronWorkflowRequest( 435 | cron_workflow=body)) 436 | 437 | # return the computed CronWorkflow 438 | return created 439 | 440 | def to_file(self, fp: Union[Path, str], fmt="yaml", **kwargs): 441 | """Dumps the CronWorkflow to a file.""" 442 | d: Dict[str, Any] = _utils.sanitize_for_serialization(self) 443 | 444 | opts = kwargs 445 | 446 | if fmt == "json": 447 | Path(fp).write_text(json.dumps(d, **opts)) 448 | else: 449 | Path(fp).write_text( 450 | yaml.dump(d, Dumper=_utils.BlockDumper, **opts) + "\n") 451 | 452 | def to_yaml(self, omitempty=True, **kwargs) -> str: 453 | """Returns the CronWorkflow manifest as a YAML.""" 454 | d: Dict[str, Any] = self.to_dict(omitempty=omitempty) 455 | 456 | opts = dict(default_flow_style=False) 457 | opts.update(kwargs) 458 | 459 | serialized = yaml.dump(d, Dumper=_utils.BlockDumper, **opts) 460 | 461 | return serialized 462 | 463 | def to_dict(self, omitempty=True) -> Dict[str, Any]: 464 | """Returns the CronWorkflow manifest as a dict. 465 | 466 | :param omitempty: bool, whether to omit empty values 467 | """ 468 | result = V1alpha1CronWorkflow.to_dict(self) 469 | 470 | if omitempty: 471 | return _utils.omitempty(result) 472 | 473 | return result 474 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_inputs.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | from typing import List 3 | 4 | from argo.workflows.client.models import ( 5 | V1alpha1Inputs, 6 | V1alpha1Artifact, 7 | V1alpha1Parameter, 8 | ) 9 | 10 | from ._base import Prop 11 | from ._arguments import artifact as artifact 12 | from ._arguments import parameter as parameter 13 | 14 | __all__ = ["inputs", "V1alpha1Artifact", "V1alpha1Parameter"] 15 | 16 | 17 | class inputs: 18 | """Arguments namespace.""" 19 | 20 | class artifact(artifact, extends=("inputs", V1alpha1Inputs)): 21 | 22 | __model__ = V1alpha1Artifact 23 | 24 | class parameter(parameter, extends=("inputs", V1alpha1Inputs)): 25 | 26 | __model__ = V1alpha1Parameter 27 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_outputs.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | from typing import List 3 | 4 | from argo.workflows.client.models import ( 5 | V1alpha1Outputs, 6 | V1alpha1Artifact, 7 | V1alpha1Parameter, 8 | ) 9 | 10 | from ._base import Prop 11 | from ._arguments import artifact as artifact 12 | from ._arguments import parameter as parameter 13 | 14 | __all__ = ["outputs", "V1alpha1Artifact", "V1alpha1Parameter"] 15 | 16 | 17 | class outputs: 18 | """Outputs namespace.""" 19 | 20 | class artifact(artifact, extends=("outputs", V1alpha1Outputs)): 21 | 22 | __model__ = V1alpha1Artifact 23 | 24 | class parameter(parameter, extends=("outputs", V1alpha1Outputs)): 25 | 26 | __model__ = V1alpha1Parameter 27 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import yaml 3 | 4 | from typing import Any 5 | from typing import Dict 6 | 7 | from uuid import uuid4 8 | 9 | try: 10 | from yaml import CDumper as Dumper 11 | except ImportError: 12 | from yaml import Dumper 13 | 14 | """Argo Workflow Python DSL utilities.""" 15 | 16 | __mark = "___%s" % uuid4() 17 | 18 | 19 | class BlockDumper(Dumper): 20 | def represent_scalar(self, tag, value, style=None): 21 | if re.search("\n", value): 22 | style = "|" 23 | # remove trailing spaces and newlines which are not allowed in YAML blocks 24 | value = re.sub(" +\n", "\n", value).strip() 25 | 26 | return super().represent_scalar(tag, value, style) 27 | 28 | 29 | def _omitempty_rec(obj: Dict[str, Any]): 30 | obj[__mark] = True 31 | result: Dict[str, Any] = {} 32 | 33 | for k, v in obj.items(): 34 | if k == __mark: 35 | continue 36 | 37 | if v is None: # empty 38 | continue 39 | 40 | if isinstance(v, dict) and __mark not in v: 41 | result[k] = _omitempty_rec(v) 42 | elif isinstance(v, list): 43 | result[k] = list( 44 | map(lambda d: _omitempty_rec(d) if isinstance(d, dict) else d, v) 45 | ) 46 | else: 47 | result[k] = v 48 | 49 | return result 50 | 51 | 52 | def _remove_marks(obj: Dict[str, Any]): 53 | if __mark not in obj: 54 | return 55 | del obj[__mark] 56 | for v in obj.values(): 57 | if isinstance(v, dict): 58 | _remove_marks(v) 59 | 60 | 61 | def omitempty(obj: Dict[str, Any]) -> Dict[str, Any]: 62 | """Return copy of the object with empty values omitted.""" 63 | try: 64 | result = _omitempty_rec(obj) 65 | finally: 66 | _remove_marks(obj) 67 | 68 | return result 69 | 70 | 71 | def sanitize_for_serialization(obj: Dict[str, Any]) -> Dict[str, Any]: 72 | """Return object sanitized for serialization. 73 | 74 | May be used with a V1alpha1Workflow to sanitize it 75 | back to the original state (i.e. per manifest). 76 | """ 77 | from argo.workflows.client import ApiClient 78 | 79 | cl = ApiClient() 80 | return cl.sanitize_for_serialization(obj) 81 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_workflow.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta 2 | 3 | import logging 4 | import re 5 | 6 | import inspect 7 | import json 8 | import yaml 9 | 10 | import pprint 11 | import requests 12 | 13 | from inflection import camelize 14 | from inflection import dasherize 15 | from inflection import underscore 16 | 17 | from pathlib import Path 18 | 19 | from typing import Any 20 | from typing import Callable 21 | from typing import Dict 22 | from typing import List 23 | from typing import Optional 24 | from typing import Set 25 | from typing import Tuple 26 | from typing import Type 27 | from typing import Union 28 | 29 | from argo.workflows.client import ApiClient, WorkflowServiceApi 30 | 31 | from argo.workflows.client.models import V1alpha1Arguments 32 | from argo.workflows.client.models import V1alpha1Artifact 33 | from argo.workflows.client.models import V1alpha1DAGTask 34 | from argo.workflows.client.models import V1alpha1DAGTemplate 35 | from argo.workflows.client.models import V1alpha1Parameter 36 | from argo.workflows.client.models import V1alpha1Template 37 | from argo.workflows.client.models import V1alpha1TemplateRef 38 | from argo.workflows.client.models import V1alpha1Workflow 39 | from argo.workflows.client.models import V1alpha1WorkflowSpec 40 | from argo.workflows.client.models import V1alpha1WorkflowStatus 41 | from argo.workflows.client.models import V1ObjectMeta 42 | from argo.workflows.client.models import V1alpha1WorkflowCreateRequest 43 | 44 | from ._base import Prop 45 | from ._base import Spec 46 | from . import _utils 47 | 48 | __all__ = ["Workflow"] 49 | 50 | 51 | _LOGGER = logging.getLogger(__name__) 52 | 53 | 54 | class WorkflowMeta(ABCMeta): 55 | 56 | __model__ = V1alpha1Workflow 57 | 58 | def __new__( 59 | cls, 60 | name: Union[str, Type["Workflow"]], 61 | bases: Tuple[Type["Workflow"], ...], 62 | props: Dict[str, Any], 63 | **kwargs, 64 | ): 65 | workflow_name = dasherize(underscore(name)) 66 | 67 | props["kind"] = "Workflow" 68 | props["api_version"] = "argoproj.io/v1alpha1" 69 | 70 | metadata_dict = dict(name='', generate_name=f"{workflow_name}-") 71 | metadata_dict.update(props.get("__metadata__", {})) 72 | 73 | # Required fields 74 | props["metadata"]: V1ObjectMeta = V1ObjectMeta(**metadata_dict) 75 | props["spec"] = { 76 | k: props.pop(k) for k in V1alpha1WorkflowSpec.attribute_map if props.get(k) 77 | } 78 | props["status"] = {} 79 | 80 | bases = (*bases, cls.__model__) 81 | klass = super().__new__(cls, name, bases, props) 82 | 83 | if name == "Workflow": 84 | # No need to initialize any further 85 | return klass 86 | 87 | cls.__compile(klass, name, bases, props) 88 | 89 | return klass 90 | 91 | @classmethod 92 | def __compile( 93 | cls, 94 | klass: "Workflow", 95 | name: str, 96 | bases: Tuple[Type["Workflow"], ...], 97 | props: Dict[str, Any], 98 | **kwargs, 99 | ): 100 | tasks: List[V1alpha1DAGTask] = [] 101 | templates: List[V1alpha1Template] = [] 102 | 103 | scopes: Dict[str, List[Any]] = {} 104 | 105 | # get scopes first 106 | for key, prop in props.items(): 107 | scope = getattr(prop, "__scope__", None) 108 | if scope is None: 109 | continue 110 | 111 | scoped_objects = [prop] 112 | scoped_objects.extend(scopes.get(scope, [])) 113 | 114 | scopes[scope] = scoped_objects 115 | 116 | for key, prop in props.items(): 117 | model = getattr(prop, "__model__", None) 118 | if model is None: 119 | continue 120 | 121 | template: Optional[V1alpha1Template] = None 122 | 123 | # V1alpha1Template 124 | if issubclass(model, V1alpha1Template): 125 | template = prop 126 | 127 | # closures require special treatment 128 | if hasattr(template, "__closure__") and template.script is not None: 129 | template = cls.__compile_closure(template, scopes) 130 | 131 | templates.append(template) 132 | 133 | # V1alpha1DAGTask 134 | elif issubclass(model, V1alpha1DAGTask): 135 | task = prop 136 | tasks.append(task) 137 | 138 | if tasks: 139 | main_template = V1alpha1Template(name="main") 140 | main_template.dag = V1alpha1DAGTemplate(tasks=tasks) 141 | 142 | templates.insert(0, main_template) 143 | 144 | spec_dict: dict = klass.spec 145 | spec_dict["entrypoint"] = spec_dict.get("entrypoint", "main") 146 | spec_dict["templates"] = templates 147 | 148 | klass.spec: V1alpha1WorkflowSpec = V1alpha1WorkflowSpec(**spec_dict) 149 | 150 | @classmethod 151 | def __compile_closure( 152 | cls, template: V1alpha1Template, scopes: Dict[str, Any] = None 153 | ) -> V1alpha1Template: 154 | scopes = scopes or {} 155 | 156 | scope: str = template.__closure__ 157 | if scope is None: 158 | # nothing to do 159 | return template 160 | 161 | script: List[str] = [f"class {scope}:\n"] 162 | script.append(f' """Scoped objects injected from scope \'{scope}\'."""\n\n') 163 | 164 | scoped_objects = scopes.get(scope) or [] 165 | for so in scoped_objects: 166 | source, _ = inspect.getsourcelines(so.__get__(cls).__code__) 167 | 168 | for co_start, line in enumerate(source): 169 | if line.strip().startswith("def"): 170 | break 171 | 172 | source = [" @staticmethod\n"] + source[co_start:] + ["\n"] 173 | script.extend(source) 174 | 175 | script = script + [ 176 | "\n", 177 | *template.script.source.splitlines(keepends=True), 178 | ] 179 | 180 | import_lines: List[str] = [] 181 | source_lines: List[str] = [] 182 | 183 | import_in_previous_line = False 184 | for line in script: 185 | if "import " in line: 186 | import_lines.append(line.strip(" ")) 187 | import_in_previous_line = True 188 | else: 189 | is_blankline = not bool(line.strip()) 190 | if import_in_previous_line and is_blankline: 191 | # blank line separating imports 192 | pass 193 | else: 194 | source_lines.append(line) 195 | 196 | import_in_previous_line = False 197 | 198 | # split `imports` and `from` and sort them separately 199 | import_lines_with_from: Set[str] = set() 200 | import_lines_without_from: Set[str] = set() 201 | 202 | for line in import_lines: 203 | if "from " in line: 204 | import_lines_with_from.add(line) 205 | else: 206 | import_lines_without_from.add(line) 207 | 208 | import_lines = [ 209 | *sorted(import_lines_without_from), 210 | "\n", 211 | *sorted(import_lines_with_from), 212 | ] 213 | 214 | template.script.source = "".join((*import_lines, "\n", *source_lines)) 215 | 216 | return template 217 | 218 | 219 | class Workflow(metaclass=WorkflowMeta): 220 | """Base class for Workflows.""" 221 | 222 | __model__ = V1alpha1Workflow 223 | 224 | def __init__(self, compile=True): 225 | """Workflow is the definition of a workflow resource. 226 | 227 | This class is a base class for Argo Workflows. It is not meant 228 | to be instantiated directly. 229 | 230 | :para compile: bool, whether to compile during initialization [True] 231 | """ 232 | self.__compiled_model: Union[V1alpha1Workflow, None] = None 233 | self.__validated = False 234 | 235 | if compile: 236 | self.compile() 237 | 238 | def __hash__(self) -> str: 239 | """Compute hash of this Workflow.""" 240 | return self.to_str().__hash__() 241 | 242 | @property 243 | def model(self) -> Union[V1alpha1Workflow, None]: 244 | """Return the Workflow model. 245 | 246 | :returns: V1alpha1Workflow if compiled, otherwise None 247 | """ 248 | return self.__compiled_model 249 | 250 | @model.setter 251 | def model(self, m: V1alpha1Workflow): 252 | """Set Workflow model.""" 253 | if not isinstance(m, self.__model__): 254 | raise TypeError(f"Expected type {self.__model__}, got: {type(spec)}") 255 | 256 | self.__compiled_model = m 257 | 258 | @property 259 | def name(self) -> Union[str, None]: 260 | """Return the Workflow name.""" 261 | return self.metadata.name 262 | 263 | @name.setter 264 | def name(self, name: str): 265 | """Set Workflow name.""" 266 | self.metadata.name = name 267 | 268 | @property 269 | def validated(self) -> bool: 270 | """Return whether this workflow has been validated.""" 271 | return self.__validated 272 | 273 | @classmethod 274 | def from_file(cls, fp: Union[str, Path], validate: bool = True) -> "Workflow": 275 | """Create a Workflow from a file.""" 276 | wf_path = Path(fp) 277 | 278 | wf: Dict[str, Any] = yaml.safe_load(wf_path.read_text()) 279 | return cls.from_dict(wf, validate=validate) 280 | 281 | @classmethod 282 | def from_url(cls, url: str, validate: bool = True) -> "Workflow": 283 | """Create a Workflow from a remote file.""" 284 | resp = requests.get(url) 285 | resp.raise_for_status() 286 | 287 | wf: Dict[str, Any] = yaml.safe_load(resp.text) 288 | return cls.from_dict(wf, validate=validate) 289 | 290 | @classmethod 291 | def from_dict(cls, wf: Dict[str, Any], validate: bool = True) -> "Workflow": 292 | """Create a Workflow from a dict.""" 293 | # work around validation issues and allow empty status 294 | wf["status"] = wf.get("status", {}) or {} 295 | 296 | return cls.from_string(json.dumps(wf), validate=validate) 297 | 298 | @classmethod 299 | def from_string(cls, wf: str, validate: bool = True) -> "Workflow": 300 | """Create a Workflow from a YAML string.""" 301 | body = {"data": wf} 302 | 303 | return cls.__deserialize(body, validate=validate) 304 | 305 | @classmethod 306 | def __deserialize(cls, body: Dict[str, str], *, validate: bool) -> "Workflow": 307 | """Deserialize given object into a Workflow instance.""" 308 | wf: Union[V1alpha1Workflow, Dict[str, Any]] 309 | if validate: 310 | attr = type("Response", (), body) 311 | 312 | wf = ApiClient().deserialize(attr, cls.__model__) 313 | else: 314 | _LOGGER.warning( 315 | "Validation is turned off. This may result in missing or invalid attributes." 316 | ) 317 | wf = json.loads(body["data"]) 318 | 319 | self = cls(compile=False) 320 | 321 | if isinstance(wf, V1alpha1Workflow): 322 | self.__dict__.update( 323 | api_version=wf.api_version, 324 | kind=wf.kind, 325 | metadata=wf.metadata, 326 | spec=wf.spec, 327 | status=wf.status, # a small hack to overcome validation 328 | ) 329 | else: 330 | self.__dict__.update(**wf) 331 | 332 | self.__validated = validate 333 | 334 | return self 335 | 336 | def compile(self) -> V1alpha1Workflow: 337 | """Compile the Workflow class to V1alpha1Workflow model.""" 338 | if self.model is not None: 339 | return self.model 340 | 341 | def _compile(obj: Any): 342 | if hasattr(obj, "__model__"): 343 | if not hasattr(obj, "model"): 344 | # results of compilation (i.e. dicts, lists) 345 | return obj 346 | 347 | if hasattr(obj, "model") and obj.model is not None: 348 | # prevents compiled templates from being compiled again 349 | return obj.model 350 | 351 | args: Dict[str, Any] = {} 352 | props: Dict[str, Any] = getattr(obj.fget, "__props__", {}) 353 | 354 | arguments: V1alpha1Arguments = props.get("arguments") 355 | if arguments: 356 | for artifact in getattr(arguments, "artifacts", []) or []: 357 | if hasattr(artifact, "to_dict"): 358 | artifact = V1alpha1Artifact(**artifact.to_dict()) 359 | else: 360 | artifact = V1alpha1Artifact(**artifact) 361 | args[underscore(artifact.name)] = artifact 362 | 363 | for param in getattr(arguments, "parameters", []) or []: 364 | if hasattr(param, "to_dict"): 365 | param = V1alpha1Parameter(**param.to_dict()) 366 | else: 367 | param = V1alpha1Parameter(**param) 368 | args[underscore(param.name)] = param 369 | 370 | # __call__ sets the `model` attribute when compiled successfully 371 | return obj.__get__(self).__call__(**args) 372 | if isinstance(obj, list): 373 | return list(map(_compile, obj)) 374 | if hasattr(obj, "attribute_map"): 375 | for attr in obj.attribute_map: 376 | value: Any = _compile(getattr(obj, attr)) 377 | setattr(obj, attr, value) 378 | 379 | return obj 380 | 381 | self.spec = _compile(self.spec) 382 | self.model = Workflow.__model__(**self.to_dict(omitempty=False)) 383 | 384 | self.__validated = True 385 | 386 | return self.model 387 | 388 | def submit( 389 | self, 390 | client: ApiClient, 391 | namespace: str, 392 | *, 393 | parameters: Optional[Dict[str, str]] = None, 394 | ) -> V1alpha1Workflow: 395 | """Submit an Argo Workflow to a given namespace. 396 | 397 | :returns: V1alpha1Workflow, submitted Workflow 398 | """ 399 | parameters = parameters or {} 400 | 401 | new_parameters: List[V1alpha1Parameter] = [] 402 | for name, value in parameters.items(): 403 | param = V1alpha1Parameter(name=name, value=value) 404 | new_parameters.append(param) 405 | 406 | if getattr(self.spec, "arguments"): 407 | for p in getattr(self.spec.arguments, "parameters", []): 408 | if p.name in parameters: 409 | continue # overridden 410 | elif not getattr(p, "value"): 411 | default = getattr(p, "default") 412 | if default is not None: 413 | p.value = default 414 | else: 415 | raise Exception(f"Missing required workflow parameter {p.name}") 416 | 417 | new_parameters.append(p) 418 | 419 | self.spec.arguments.parameters = new_parameters 420 | elif parameters: 421 | raise AttributeError("The Workflow doesn't take any parameters.") 422 | 423 | body: Dict[str, Any] 424 | if not getattr(self, "validated", True): 425 | _LOGGER.debug( 426 | "The Workflow has not been previously validated." 427 | "Sanitizing for serialization." 428 | ) 429 | body = camelize(self.to_dict()) 430 | else: 431 | body = client.sanitize_for_serialization(self) 432 | 433 | service = WorkflowServiceApi(api_client=client) 434 | # submit the workflow 435 | created: models.V1alpha1Workflow = service.create_workflow( 436 | namespace, V1alpha1WorkflowCreateRequest(workflow=body) 437 | ) 438 | 439 | # return the computed Workflow 440 | return created 441 | 442 | def to_file(self, fp: Union[Path, str], fmt="yaml", **kwargs): 443 | """Dumps the Workflow to a file.""" 444 | d: Dict[str, Any] = _utils.sanitize_for_serialization(self) 445 | 446 | opts = kwargs 447 | 448 | if fmt == "json": 449 | path = Path(fp).write_text(json.dumps(d, **opts)) 450 | else: 451 | path = Path(fp).write_text( 452 | yaml.dump(d, Dumper=_utils.BlockDumper, **opts) + "\n" 453 | ) 454 | 455 | def to_yaml(self, omitempty=True, **kwargs) -> str: 456 | """Returns the Workflow manifest as a YAML.""" 457 | d: Dict[str, Any] = self.to_dict(omitempty=omitempty) 458 | 459 | opts = dict(default_flow_style=False) 460 | opts.update(kwargs) 461 | 462 | serialized = yaml.dump(d, Dumper=_utils.BlockDumper, **opts) 463 | 464 | return serialized 465 | 466 | def to_dict(self, omitempty=True) -> Dict[str, Any]: 467 | """Returns the Workflow manifest as a dict. 468 | 469 | :param omitempty: bool, whether to omit empty values 470 | """ 471 | result = V1alpha1Workflow.to_dict(self) 472 | 473 | if omitempty: 474 | return _utils.omitempty(result) 475 | 476 | return result 477 | -------------------------------------------------------------------------------- /argo/workflows/dsl/_workflow_template.py: -------------------------------------------------------------------------------- 1 | from abc import ABCMeta 2 | 3 | import logging 4 | 5 | import inspect 6 | import json 7 | import yaml 8 | 9 | import requests 10 | 11 | 12 | from inflection import dasherize 13 | from inflection import underscore 14 | 15 | from pathlib import Path 16 | 17 | from typing import Any 18 | from typing import Dict 19 | from typing import List 20 | from typing import Optional 21 | from typing import Set 22 | from typing import Tuple 23 | from typing import Type 24 | from typing import Union 25 | 26 | from argo.workflows.client import ApiClient 27 | 28 | from argo.workflows.client.models import V1alpha1Arguments 29 | from argo.workflows.client.models import V1alpha1Artifact 30 | from argo.workflows.client.models import V1alpha1DAGTask 31 | from argo.workflows.client.models import V1alpha1DAGTemplate 32 | from argo.workflows.client.models import V1alpha1Parameter 33 | from argo.workflows.client.models import V1alpha1Template 34 | from argo.workflows.client.models import V1alpha1ClusterWorkflowTemplate 35 | from argo.workflows.client.models import V1alpha1WorkflowTemplate 36 | from argo.workflows.client.models import V1alpha1WorkflowTemplateSpec 37 | from argo.workflows.client.models import V1ObjectMeta 38 | 39 | 40 | from . import _utils 41 | 42 | __all__ = ["WorkflowTemplate"] 43 | 44 | 45 | _LOGGER = logging.getLogger(__name__) 46 | 47 | 48 | class WorkflowTemplateMeta(ABCMeta): 49 | 50 | __model__ = V1alpha1WorkflowTemplate 51 | __kind__ = "WorkflowTemplate" 52 | 53 | def __new__( 54 | cls, 55 | name: Union[str, Type["WorkflowTemplate"]], 56 | bases: Tuple[Type["WorkflowTemplate"], ...], 57 | props: Dict[str, Any], 58 | **kwargs, 59 | ): 60 | workflow_name = dasherize(underscore(name)) 61 | 62 | props["kind"] = cls.__kind__ 63 | props["api_version"] = "argoproj.io/v1alpha1" 64 | 65 | metadata_dict = {"name": workflow_name} 66 | metadata_dict.update(props.get("__metadata__", {})) 67 | 68 | # Required fields 69 | props["metadata"]: V1ObjectMeta = V1ObjectMeta(**metadata_dict) 70 | props["spec"] = { 71 | k: props.pop(k) 72 | for k in V1alpha1WorkflowTemplateSpec.attribute_map 73 | if props.get(k) 74 | } 75 | 76 | bases = (*bases, cls.__model__) 77 | klass = super().__new__(cls, name, bases, props) 78 | 79 | if name == cls.__kind__: 80 | # No need to initialize any further 81 | return klass 82 | 83 | cls.__compile(klass, name, bases, props) 84 | 85 | return klass 86 | 87 | @classmethod 88 | def __compile( 89 | cls, 90 | klass: "WorkflowTemplate", 91 | name: str, 92 | bases: Tuple[Type["WorkflowTemplate"], ...], 93 | props: Dict[str, Any], 94 | **kwargs, 95 | ): 96 | tasks: List[V1alpha1DAGTask] = [] 97 | templates: List[V1alpha1Template] = [] 98 | 99 | scopes: Dict[str, List[Any]] = {} 100 | 101 | # get scopes first 102 | for key, prop in props.items(): 103 | scope = getattr(prop, "__scope__", None) 104 | if scope is None: 105 | continue 106 | 107 | scoped_objects = [prop] 108 | scoped_objects.extend(scopes.get(scope, [])) 109 | 110 | scopes[scope] = scoped_objects 111 | 112 | for key, prop in props.items(): 113 | model = getattr(prop, "__model__", None) 114 | if model is None: 115 | continue 116 | 117 | template: Optional[V1alpha1Template] = None 118 | 119 | # V1alpha1Template 120 | if issubclass(model, V1alpha1Template): 121 | template = prop 122 | 123 | # closures require special treatment 124 | if hasattr(template, "__closure__") and template.script is not None: 125 | template = cls.__compile_closure(template, scopes) 126 | 127 | templates.append(template) 128 | 129 | # V1alpha1DAGTask 130 | elif issubclass(model, V1alpha1DAGTask): 131 | task = prop 132 | tasks.append(task) 133 | 134 | if tasks: 135 | main_template = V1alpha1Template(name="main") 136 | main_template.dag = V1alpha1DAGTemplate(tasks=tasks) 137 | 138 | templates.insert(0, main_template) 139 | 140 | spec_dict: dict = klass.spec 141 | spec_dict["entrypoint"] = spec_dict.get("entrypoint", "main") 142 | spec_dict["templates"] = templates 143 | 144 | klass.spec: V1alpha1WorkflowTemplateSpec = V1alpha1WorkflowTemplateSpec( 145 | **spec_dict 146 | ) 147 | 148 | @classmethod 149 | def __compile_closure( 150 | cls, template: V1alpha1Template, scopes: Dict[str, Any] = None 151 | ) -> V1alpha1Template: 152 | scopes = scopes or {} 153 | 154 | scope: str = template.__closure__ 155 | if scope is None: 156 | # nothing to do 157 | return template 158 | 159 | script: List[str] = [f"class {scope}:\n"] 160 | script.append(f' """Scoped objects injected from scope \'{scope}\'."""\n\n') 161 | 162 | scoped_objects = scopes.get(scope) or [] 163 | for so in scoped_objects: 164 | source, _ = inspect.getsourcelines(so.__get__(cls).__code__) 165 | 166 | for co_start, line in enumerate(source): 167 | if line.strip().startswith("def"): 168 | break 169 | 170 | source = [" @staticmethod\n"] + source[co_start:] + ["\n"] 171 | script.extend(source) 172 | 173 | script = script + [ 174 | "\n", 175 | *template.script.source.splitlines(keepends=True), 176 | ] 177 | 178 | import_lines: List[str] = [] 179 | source_lines: List[str] = [] 180 | 181 | import_in_previous_line = False 182 | for line in script: 183 | if "import " in line: 184 | import_lines.append(line.strip(" ")) 185 | import_in_previous_line = True 186 | else: 187 | is_blankline = not bool(line.strip()) 188 | if import_in_previous_line and is_blankline: 189 | # blank line separating imports 190 | pass 191 | else: 192 | source_lines.append(line) 193 | 194 | import_in_previous_line = False 195 | 196 | # split `imports` and `from` and sort them separately 197 | import_lines_with_from: Set[str] = set() 198 | import_lines_without_from: Set[str] = set() 199 | 200 | for line in import_lines: 201 | if "from " in line: 202 | import_lines_with_from.add(line) 203 | else: 204 | import_lines_without_from.add(line) 205 | 206 | import_lines = [ 207 | *sorted(import_lines_without_from), 208 | "\n", 209 | *sorted(import_lines_with_from), 210 | ] 211 | 212 | template.script.source = "".join((*import_lines, "\n", *source_lines)) 213 | 214 | return template 215 | 216 | 217 | class WorkflowTemplate(metaclass=WorkflowTemplateMeta): 218 | """Base class for Workflows.""" 219 | 220 | __model__ = V1alpha1WorkflowTemplate 221 | 222 | def __init__(self, compile=True): 223 | """WorkflowTemplate is the definition of a workflow resource. 224 | 225 | This class is a base class for Argo Workflows. It is not meant 226 | to be instantiated directly. 227 | 228 | :para compile: bool, whether to compile during initialization [True] 229 | """ 230 | self._compiled_model: Union[V1alpha1WorkflowTemplate, None] = None 231 | self.__validated = False 232 | 233 | if compile: 234 | self.compile() 235 | 236 | def __hash__(self) -> str: 237 | """Compute hash of this WorkflowTemplate.""" 238 | return self.to_str().__hash__() 239 | 240 | @property 241 | def model(self) -> Union[V1alpha1WorkflowTemplate, None]: 242 | """Return the WorkflowTemplate model. 243 | 244 | :returns: V1alpha1WorkflowTemplate if compiled, otherwise None 245 | """ 246 | return self._compiled_model 247 | 248 | @model.setter 249 | def model(self, m: V1alpha1WorkflowTemplate): 250 | """Set WorkflowTemplate model.""" 251 | if not isinstance(m, self.__model__): 252 | raise TypeError(f"Expected type {self.__model__}, got: {type(m)}") 253 | 254 | self._compiled_model = m 255 | 256 | @property 257 | def name(self) -> Union[str, None]: 258 | """Return the WorkflowTemplate name.""" 259 | return self.metadata.name 260 | 261 | @name.setter 262 | def name(self, name: str): 263 | """Set WorkflowTemplate name.""" 264 | self.metadata.name = name 265 | 266 | @property 267 | def validated(self) -> bool: 268 | """Return whether this workflow has been validated.""" 269 | return self.__validated 270 | 271 | @classmethod 272 | def from_file( 273 | cls, fp: Union[str, Path], validate: bool = True 274 | ) -> "WorkflowTemplate": 275 | """Create a WorkflowTemplate from a file.""" 276 | wf_path = Path(fp) 277 | 278 | wf: Dict[str, Any] = yaml.safe_load(wf_path.read_text()) 279 | return cls.from_dict(wf, validate=validate) 280 | 281 | @classmethod 282 | def from_url(cls, url: str, validate: bool = True) -> "WorkflowTemplate": 283 | """Create a WorkflowTemplate from a remote file.""" 284 | resp = requests.get(url) 285 | resp.raise_for_status() 286 | 287 | wf: Dict[str, Any] = yaml.safe_load(resp.text) 288 | return cls.from_dict(wf, validate=validate) 289 | 290 | @classmethod 291 | def from_dict(cls, wf: Dict[str, Any], validate: bool = True) -> "WorkflowTemplate": 292 | """Create a WorkflowTemplate from a dict.""" 293 | return cls.from_string(json.dumps(wf), validate=validate) 294 | 295 | @classmethod 296 | def from_string(cls, wf: str, validate: bool = True) -> "WorkflowTemplate": 297 | """Create a WorkflowTemplate from a YAML string.""" 298 | body = {"data": wf} 299 | 300 | return cls.__deserialize(body, validate=validate) 301 | 302 | @classmethod 303 | def __deserialize( 304 | cls, body: Dict[str, str], *, validate: bool 305 | ) -> "WorkflowTemplate": 306 | """Deserialize given object into a WorkflowTemplate instance.""" 307 | wf: Union[V1alpha1WorkflowTemplate, Dict[str, Any]] 308 | if validate: 309 | attr = type("Response", (), body) 310 | 311 | wf = ApiClient().deserialize(attr, cls.__model__) 312 | else: 313 | _LOGGER.warning( 314 | "Validation is turned off. This may result in missing or invalid attributes." 315 | ) 316 | wf = json.loads(body["data"]) 317 | 318 | self = cls(compile=False) 319 | 320 | if isinstance(wf, cls.__model__): 321 | self.__dict__.update( 322 | api_version=wf.api_version, 323 | kind=wf.kind, 324 | metadata=wf.metadata, 325 | spec=wf.spec, 326 | ) 327 | else: 328 | self.__dict__.update(**wf) 329 | 330 | self.__validated = validate 331 | 332 | return self 333 | 334 | def compile(self) -> V1alpha1WorkflowTemplate: 335 | """Compile the WorkflowTemplate class to V1alpha1WorkflowTemplate model.""" 336 | if self.model is not None: 337 | return self.model 338 | 339 | def _compile(obj: Any): 340 | if hasattr(obj, "__model__"): 341 | if not hasattr(obj, "model"): 342 | # results of compilation (i.e. dicts, lists) 343 | return obj 344 | 345 | if hasattr(obj, "model") and obj.model is not None: 346 | # prevents compiled templates from being compiled again 347 | return obj.model 348 | 349 | args: Dict[str, Any] = {} 350 | props: Dict[str, Any] = getattr(obj.fget, "__props__", {}) 351 | 352 | arguments: V1alpha1Arguments = props.get("arguments") 353 | if arguments: 354 | for artifact in getattr(arguments, "artifacts", []) or []: 355 | if hasattr(artifact, "to_dict"): 356 | artifact = V1alpha1Artifact(**artifact.to_dict()) 357 | else: 358 | artifact = V1alpha1Artifact(**artifact) 359 | args[underscore(artifact.name)] = artifact 360 | 361 | for param in getattr(arguments, "parameters", []) or []: 362 | if hasattr(param, "to_dict"): 363 | param = V1alpha1Parameter(**param.to_dict()) 364 | else: 365 | param = V1alpha1Parameter(**param) 366 | args[underscore(param.name)] = param 367 | 368 | # __call__ sets the `model` attribute when compiled successfully 369 | return obj.__get__(self).__call__(**args) 370 | if isinstance(obj, list): 371 | return list(map(_compile, obj)) 372 | if hasattr(obj, "attribute_map"): 373 | for attr in obj.attribute_map: 374 | value: Any = _compile(getattr(obj, attr)) 375 | setattr(obj, attr, value) 376 | 377 | return obj 378 | 379 | self.spec = _compile(self.spec) 380 | self.model = self.__model__(**self.to_dict(omitempty=False)) 381 | 382 | self.__validated = True 383 | 384 | return self.model 385 | 386 | def to_file(self, fp: Union[Path, str], fmt="yaml", **kwargs): 387 | """Dumps the WorkflowTemplate to a file.""" 388 | d: Dict[str, Any] = _utils.sanitize_for_serialization(self) 389 | 390 | opts = kwargs 391 | 392 | if fmt == "json": 393 | Path(fp).write_text(json.dumps(d, **opts)) 394 | else: 395 | Path(fp).write_text(yaml.dump(d, Dumper=_utils.BlockDumper, **opts) + "\n") 396 | 397 | def to_yaml(self, omitempty=True, **kwargs) -> str: 398 | """Returns the WorkflowTemplate manifest as a YAML.""" 399 | d: Dict[str, Any] = self.to_dict(omitempty=omitempty) 400 | 401 | opts = dict(default_flow_style=False) 402 | opts.update(kwargs) 403 | 404 | serialized = yaml.dump(d, Dumper=_utils.BlockDumper, **opts) 405 | 406 | return serialized 407 | 408 | def to_dict(self, omitempty=True) -> Dict[str, Any]: 409 | """Returns the WorkflowTemplate manifest as a dict. 410 | 411 | :param omitempty: bool, whether to omit empty values 412 | """ 413 | result = self.__model__.to_dict(self) 414 | 415 | if omitempty: 416 | return _utils.omitempty(result) 417 | 418 | return result 419 | 420 | 421 | class ClusterWorkflowTemplateMeta(WorkflowTemplateMeta): 422 | 423 | __kind__ = "ClusterWorkflowTemplate" 424 | __model__ = V1alpha1ClusterWorkflowTemplate 425 | 426 | 427 | class ClusterWorkflowTemplate(WorkflowTemplate, metaclass=ClusterWorkflowTemplateMeta): 428 | """Base class for Workflows.""" 429 | 430 | __model__ = V1alpha1ClusterWorkflowTemplate 431 | -------------------------------------------------------------------------------- /argo/workflows/dsl/tasks.py: -------------------------------------------------------------------------------- 1 | from inflection import dasherize 2 | from functools import partial 3 | from functools import wraps 4 | 5 | from typing import Any 6 | from typing import Dict 7 | from typing import Callable 8 | from typing import List 9 | from typing import Optional 10 | from typing import Tuple 11 | from typing import Type 12 | from typing import Union 13 | 14 | from argo.workflows.client.models import ( 15 | V1alpha1Arguments, 16 | V1alpha1Artifact, 17 | V1alpha1ContinueOn, 18 | V1alpha1DAGTask, 19 | V1alpha1Parameter, 20 | V1alpha1Sequence, 21 | V1alpha1Template, 22 | V1alpha1TemplateRef, 23 | ) 24 | 25 | from ._arguments import artifact 26 | from ._arguments import parameter 27 | from ._base import Prop 28 | from ._base import Spec 29 | 30 | 31 | __all__ = [ 32 | # decorators 33 | "artifact", 34 | "continue_on", 35 | "dependencies", 36 | "parameter", 37 | "task", 38 | "when", 39 | "with_items", 40 | "with_param", 41 | "with_sequence", 42 | # models 43 | "V1alpha1Template", 44 | "V1alpha1TemplateRef", 45 | ] 46 | 47 | # return type 48 | T = Union[V1alpha1Template, V1alpha1TemplateRef] 49 | 50 | 51 | class task(Spec): 52 | 53 | __model__ = V1alpha1DAGTask 54 | 55 | def __new__(cls, f: Callable[..., T]): 56 | """Workflow spec for V1alpha1Template.""" 57 | self = super().__new__(cls, f) 58 | self.name = dasherize(f.__code__.co_name) 59 | 60 | self.template: str = None 61 | self.template_ref: V1alpha1TemplateRef = None 62 | 63 | return self 64 | 65 | def __init_model__(self, spec: T, *args, **kwargs): 66 | _: Tuple[Any, ...] = args # ignore 67 | 68 | if isinstance(spec, V1alpha1Template): 69 | self.template: str = spec.name 70 | elif isinstance(spec, V1alpha1TemplateRef): 71 | self.template_ref: V1alpha1TemplateRef = spec 72 | else: 73 | raise TypeError(f"Expected {T}, got: {type(spec)}") 74 | 75 | 76 | class dependencies(Prop): 77 | 78 | __model__ = List[str] 79 | 80 | 81 | class continue_on(Prop): 82 | 83 | __model__ = V1alpha1ContinueOn 84 | 85 | 86 | class when(Prop): 87 | 88 | __model__ = str 89 | 90 | 91 | class with_items(Prop): 92 | 93 | __model__ = List[str] 94 | 95 | 96 | class with_param(Prop): 97 | 98 | __model__ = str 99 | 100 | 101 | class with_sequence(Prop): 102 | 103 | __model__ = V1alpha1Sequence 104 | -------------------------------------------------------------------------------- /argo/workflows/dsl/templates.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import re 3 | import textwrap 4 | 5 | from inflection import dasherize 6 | from functools import partial 7 | from functools import wraps 8 | 9 | from typing import Any 10 | from typing import Dict 11 | from typing import Callable 12 | from typing import List 13 | from typing import Optional 14 | from typing import Tuple 15 | from typing import Union 16 | 17 | from argo.workflows.client.models import ( 18 | V1alpha1Arguments, 19 | V1alpha1Artifact, 20 | V1alpha1Inputs, 21 | V1alpha1Outputs, 22 | V1alpha1Parameter, 23 | V1alpha1ResourceTemplate, 24 | V1alpha1ScriptTemplate, 25 | V1alpha1Template, 26 | V1Container, 27 | V1ContainerPort, 28 | V1EnvFromSource, 29 | V1EnvVar, 30 | V1Lifecycle, 31 | V1Probe, 32 | V1ResourceRequirements, 33 | V1SecurityContext, 34 | V1VolumeDevice, 35 | V1VolumeMount, 36 | ) 37 | 38 | from ._arguments import artifact 39 | from ._arguments import parameter 40 | from ._base import Prop 41 | from ._base import Spec 42 | from ._inputs import inputs 43 | from ._outputs import outputs 44 | 45 | 46 | __all__ = [ 47 | # decorators 48 | "artifact", 49 | "closure", 50 | "inputs", 51 | "outputs", 52 | "parameter", 53 | "scope", 54 | "template", 55 | # models 56 | "V1alpha1Arguments", 57 | "V1alpha1Artifact", 58 | "V1alpha1Parameter", 59 | "V1alpha1ResourceTemplate", 60 | "V1alpha1ScriptTemplate", 61 | "V1Container", 62 | "V1ContainerPort", 63 | "V1EnvFromSource", 64 | "V1EnvVar", 65 | "V1Lifecycle", 66 | "V1Probe", 67 | "V1ResourceRequirements", 68 | "V1SecurityContext", 69 | "V1VolumeDevice", 70 | "V1VolumeMount", 71 | ] 72 | 73 | # return type 74 | T = Union[V1alpha1ResourceTemplate, V1alpha1ScriptTemplate, V1Container] 75 | 76 | 77 | class template(Spec): 78 | 79 | __model__ = V1alpha1Template 80 | 81 | def __new__(cls, f: Callable[..., T]): 82 | """Workflow spec for V1alpha1Template.""" 83 | self = super().__new__(cls, f) 84 | self.name = dasherize(f.__code__.co_name) 85 | 86 | return self 87 | 88 | 89 | class closure(Prop, command=["python"]): 90 | """Workflow spec for V1alpha1Template using closure.""" 91 | 92 | __model__ = V1alpha1ScriptTemplate 93 | 94 | def __init__( 95 | self, 96 | image: str, 97 | scope: str = None, 98 | *, 99 | env: List[V1EnvVar] = None, 100 | env_from: List[V1EnvFromSource] = None, 101 | image_pull_policy: str = None, 102 | lifecycle: V1Lifecycle = None, 103 | liveness_probe: V1Probe = None, 104 | ports: List[V1ContainerPort] = None, 105 | readiness_probe: V1Probe = None, 106 | resources: V1ResourceRequirements = None, 107 | security_context: V1SecurityContext = None, 108 | stdin: bool = None, 109 | stdin_once: bool = None, 110 | termination_message_path: str = None, 111 | termination_message_policy: str = None, 112 | tty: bool = None, 113 | volume_devices: List[V1VolumeDevice] = None, 114 | volume_mounts: List[V1VolumeMount] = None, 115 | working_dir: str = None, 116 | ): # noqa: E501 117 | super().__init__(**self.__dict__, name="script", source="", image="") 118 | 119 | self.image = image 120 | self.scope = scope 121 | 122 | if env is not None: 123 | self.env = env 124 | if env_from is not None: 125 | self.env_from = env_from 126 | if image_pull_policy is not None: 127 | self.image_pull_policy = image_pull_policy 128 | if lifecycle is not None: 129 | self.lifecycle = lifecycle 130 | if liveness_probe is not None: 131 | self.liveness_probe = liveness_probe 132 | if ports is not None: 133 | self.ports = ports 134 | if readiness_probe is not None: 135 | self.readiness_probe = readiness_probe 136 | if resources is not None: 137 | self.resources = resources 138 | if security_context is not None: 139 | self.security_context = security_context 140 | if stdin is not None: 141 | self.stdin = stdin 142 | if stdin_once is not None: 143 | self.stdin_once = stdin_once 144 | if termination_message_path is not None: 145 | self.termination_message_path = termination_message_path 146 | if termination_message_policy is not None: 147 | self.termination_message_policy = termination_message_policy 148 | if tty is not None: 149 | self.tty = tty 150 | if volume_devices is not None: 151 | self.volume_devices = volume_devices 152 | if volume_mounts is not None: 153 | self.volume_mounts = volume_mounts 154 | if working_dir is not None: 155 | self.working_dir = working_dir 156 | 157 | def __call__(self, f: Callable[..., None]) -> template: 158 | super().__call__(f) 159 | 160 | self.name = dasherize(f.__code__.co_name) 161 | 162 | source: List[str] 163 | source, _ = inspect.getsourcelines(f.__code__) 164 | 165 | co_start: int = 0 166 | for i, line in enumerate(source): 167 | if re.search(r"\)( -> (.+))?:[\s\n\r]+$", line): 168 | co_start = i + 1 169 | break 170 | 171 | self.source = textwrap.dedent("".join(source[co_start:])) 172 | 173 | tmpl = template(f) 174 | tmpl.callable = False 175 | 176 | tmpl.__closure__ = self.scope 177 | 178 | return tmpl 179 | 180 | 181 | class scope: 182 | """Mark scope for closures.""" 183 | 184 | def __init__(self, name: str): 185 | self.name = name 186 | 187 | def __call__(self, f: Callable) -> Callable: 188 | m = staticmethod(f) 189 | m.__scope__ = self.name 190 | 191 | return m 192 | 193 | @property 194 | def name(self) -> str: 195 | return self._name 196 | 197 | @name.setter 198 | def name(self, name: str): 199 | """Validate and set scope name. 200 | 201 | :raises: ValueError 202 | """ 203 | valid_pattern = r"^[a-zA-Z_][a-zA-Z0-9_]*$" 204 | 205 | if not bool(re.match(valid_pattern, name)): 206 | raise ValueError( 207 | f"String {name} is not valid scope name." 208 | f"Scope name must match expression '{valid_pattern}'." 209 | ) 210 | 211 | self._name = name 212 | -------------------------------------------------------------------------------- /docs/integration_tests.md: -------------------------------------------------------------------------------- 1 | # Integration Tests 2 | 3 | The current integration test suite requires: 4 | 5 | - [argo cli](https://argoproj.github.io/argo/cli/) 6 | - [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) 7 | - [minikube](https://kubernetes.io/docs/tasks/tools/install-minikube/) 8 | 9 | Star a k8s cluster using minikube: 10 | 11 | ```sh 12 | minikube config set vm-driver docker 13 | minikube config set kubernetes-version 1.18.3 14 | minikube start 15 | ``` 16 | 17 | Install Argo Workflows: 18 | 19 | ```sh 20 | kubectl create ns argo 21 | kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj/argo/v2.11.1/manifests/quick-start-minimal.yaml 22 | ``` 23 | 24 | Run the integration tests: 25 | ```sh 26 | scripts/integration_tests.sh 27 | ``` 28 | 29 | ## Workflows 30 | 31 | The workflows of the integration test are located in the following directory: [test/workflows](tests/workflows). 32 | 33 | Each workflow python file must contain the following code: 34 | 35 | ```python 36 | if __name__ == "__main__": 37 | wf = HelloWorld() # Workflow class to be tested 38 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 39 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 40 | ``` -------------------------------------------------------------------------------- /examples/artifacts.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: Workflow 3 | metadata: 4 | generateName: artifacts- 5 | name: '' 6 | spec: 7 | entrypoint: main 8 | templates: 9 | - dag: 10 | tasks: 11 | - name: generate 12 | template: whalesay 13 | - arguments: 14 | artifacts: 15 | - from: '{{tasks.generate.outputs.artifacts.hello-art}}' 16 | name: message 17 | dependencies: 18 | - generate 19 | name: consume-artifact 20 | template: print-message 21 | name: main 22 | - container: 23 | args: 24 | - cowsay hello world | tee /tmp/hello_world.txt 25 | command: 26 | - sh 27 | - -c 28 | image: docker/whalesay:latest 29 | name: whalesay 30 | name: whalesay 31 | outputs: 32 | artifacts: 33 | - name: hello-art 34 | path: /tmp/hello_world.txt 35 | - container: 36 | args: 37 | - cat 38 | - /tmp/message 39 | command: 40 | - sh 41 | - -c 42 | image: alpine:latest 43 | name: print-message 44 | inputs: 45 | artifacts: 46 | - name: message 47 | path: /tmp/message 48 | name: print-message 49 | status: {} 50 | 51 | -------------------------------------------------------------------------------- /examples/cronworkflow.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import ntpath 3 | 4 | from argo.workflows.dsl import template 5 | from argo.workflows.dsl import CronWorkflow 6 | from argo.workflows.dsl.templates import V1Container 7 | 8 | 9 | class HelloWorld(CronWorkflow): 10 | 11 | entrypoint = "whalesay" 12 | schedule = "* * * * *" 13 | name = "hellow-world-cron" 14 | 15 | @template 16 | def whalesay(self) -> V1Container: 17 | container = V1Container( 18 | image="docker/whalesay:latest", 19 | name="whalesay", 20 | command=["cowsay"], 21 | args=["hello world"], 22 | ) 23 | return container 24 | 25 | 26 | if __name__ == "__main__": 27 | wf = HelloWorld() 28 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 29 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 30 | -------------------------------------------------------------------------------- /examples/dag-diamond.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "toc": true 7 | }, 8 | "source": [ 9 | "

Table of Contents

\n", 10 | "
    " 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": { 17 | "ExecuteTime": { 18 | "end_time": "2020-01-16T13:10:49.038996Z", 19 | "start_time": "2020-01-16T13:10:49.018358Z" 20 | } 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "%load_ext autoreload\n", 25 | "%autoreload 2" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": { 32 | "ExecuteTime": { 33 | "end_time": "2020-01-16T13:10:49.527287Z", 34 | "start_time": "2020-01-16T13:10:49.041932Z" 35 | } 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "from argo.workflows.dsl import Workflow\n", 40 | "\n", 41 | "from argo.workflows.dsl.tasks import *\n", 42 | "from argo.workflows.dsl.templates import *" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 3, 48 | "metadata": { 49 | "ExecuteTime": { 50 | "end_time": "2020-01-16T13:10:49.557306Z", 51 | "start_time": "2020-01-16T13:10:49.531713Z" 52 | } 53 | }, 54 | "outputs": [], 55 | "source": [ 56 | "import yaml\n", 57 | "\n", 58 | "from pprint import pprint\n", 59 | "\n", 60 | "from argo.workflows.dsl._utils import sanitize_for_serialization" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "---" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 4, 73 | "metadata": { 74 | "ExecuteTime": { 75 | "end_time": "2020-01-16T13:10:49.699083Z", 76 | "start_time": "2020-01-16T13:10:49.559145Z" 77 | } 78 | }, 79 | "outputs": [], 80 | "source": [ 81 | "!sh -c '[ -f \"dag-diamond.yaml\" ] || curl -LO https://raw.githubusercontent.com/CermakM/argo-python-dsl/master/examples/dag-diamond.yaml'" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 5, 87 | "metadata": { 88 | "ExecuteTime": { 89 | "end_time": "2020-01-16T13:10:49.738187Z", 90 | "start_time": "2020-01-16T13:10:49.703317Z" 91 | } 92 | }, 93 | "outputs": [ 94 | { 95 | "name": "stdout", 96 | "output_type": "stream", 97 | "text": [ 98 | "# @file: dag-diamond.yaml\n", 99 | "# The following workflow executes a diamond workflow\n", 100 | "#\n", 101 | "# A\n", 102 | "# / \\\n", 103 | "# B C\n", 104 | "# \\ /\n", 105 | "# D\n", 106 | "apiVersion: argoproj.io/v1alpha1\n", 107 | "kind: Workflow\n", 108 | "metadata:\n", 109 | " name: dag-diamond\n", 110 | " generateName: dag-diamond-\n", 111 | "spec:\n", 112 | " entrypoint: main\n", 113 | " templates:\n", 114 | " - name: main\n", 115 | " dag:\n", 116 | " tasks:\n", 117 | " - name: A\n", 118 | " template: echo\n", 119 | " arguments:\n", 120 | " parameters: [{name: message, value: A}]\n", 121 | " - name: B\n", 122 | " dependencies: [A]\n", 123 | " template: echo\n", 124 | " arguments:\n", 125 | " parameters: [{name: message, value: B}]\n", 126 | " - name: C\n", 127 | " dependencies: [A]\n", 128 | " template: echo\n", 129 | " arguments:\n", 130 | " parameters: [{name: message, value: C}]\n", 131 | " - name: D\n", 132 | " dependencies: [B, C]\n", 133 | " template: echo\n", 134 | " arguments:\n", 135 | " parameters: [{name: message, value: D}]\n", 136 | "\n", 137 | " # @task: [A, B, C, D]\n", 138 | " - name: echo\n", 139 | " inputs:\n", 140 | " parameters:\n", 141 | " - name: message\n", 142 | " container:\n", 143 | " name: echo\n", 144 | " image: alpine:3.7\n", 145 | " command: [echo, \"{{inputs.parameters.message}}\"]\n", 146 | "status: {}\n" 147 | ] 148 | } 149 | ], 150 | "source": [ 151 | "from pathlib import Path\n", 152 | "\n", 153 | "manifest = Path(\"./dag-diamond.yaml\").read_text()\n", 154 | "print(manifest)" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": 6, 160 | "metadata": { 161 | "ExecuteTime": { 162 | "end_time": "2020-01-16T13:10:49.802546Z", 163 | "start_time": "2020-01-16T13:10:49.741999Z" 164 | }, 165 | "scrolled": false 166 | }, 167 | "outputs": [ 168 | { 169 | "data": { 170 | "text/plain": [ 171 | "{'api_version': 'argoproj.io/v1alpha1',\n", 172 | " 'kind': 'Workflow',\n", 173 | " 'metadata': {'generate_name': 'dag-diamond-', 'name': 'dag-diamond'},\n", 174 | " 'spec': {'entrypoint': 'main',\n", 175 | " 'templates': [{'dag': {'tasks': [{'arguments': {'parameters': [{'name': 'message',\n", 176 | " 'value': 'A'}]},\n", 177 | " 'name': 'A',\n", 178 | " 'template': 'echo'},\n", 179 | " {'arguments': {'parameters': [{'name': 'message',\n", 180 | " 'value': 'B'}]},\n", 181 | " 'dependencies': ['A'],\n", 182 | " 'name': 'B',\n", 183 | " 'template': 'echo'},\n", 184 | " {'arguments': {'parameters': [{'name': 'message',\n", 185 | " 'value': 'C'}]},\n", 186 | " 'dependencies': ['A'],\n", 187 | " 'name': 'C',\n", 188 | " 'template': 'echo'},\n", 189 | " {'arguments': {'parameters': [{'name': 'message',\n", 190 | " 'value': 'D'}]},\n", 191 | " 'dependencies': ['B', 'C'],\n", 192 | " 'name': 'D',\n", 193 | " 'template': 'echo'}]},\n", 194 | " 'name': 'main'},\n", 195 | " {'container': {'command': ['echo',\n", 196 | " '{{inputs.parameters.message}}'],\n", 197 | " 'image': 'alpine:3.7',\n", 198 | " 'name': 'echo'},\n", 199 | " 'inputs': {'parameters': [{'name': 'message'}]},\n", 200 | " 'name': 'echo'}]},\n", 201 | " 'status': {}}" 202 | ] 203 | }, 204 | "execution_count": 6, 205 | "metadata": {}, 206 | "output_type": "execute_result" 207 | } 208 | ], 209 | "source": [ 210 | "class DagDiamond(Workflow):\n", 211 | " \n", 212 | " @task\n", 213 | " @parameter(name=\"message\", value=\"A\")\n", 214 | " def A(self, message: V1alpha1Parameter) -> V1alpha1Template:\n", 215 | " return self.echo(message=message)\n", 216 | " \n", 217 | " @task\n", 218 | " @parameter(name=\"message\", value=\"B\")\n", 219 | " @dependencies([\"A\"])\n", 220 | " def B(self, message: V1alpha1Parameter) -> V1alpha1Template:\n", 221 | " return self.echo(message=message)\n", 222 | " \n", 223 | " @task\n", 224 | " @parameter(name=\"message\", value=\"C\")\n", 225 | " @dependencies([\"A\"])\n", 226 | " def C(self, message: V1alpha1Parameter) -> V1alpha1Template:\n", 227 | " return self.echo(message=message)\n", 228 | " \n", 229 | " @task\n", 230 | " @parameter(name=\"message\", value=\"D\")\n", 231 | " @dependencies([\"B\", \"C\"])\n", 232 | " def D(self, message: V1alpha1Parameter) -> V1alpha1Template:\n", 233 | " return self.echo(message=message)\n", 234 | " \n", 235 | " @template\n", 236 | " @inputs.parameter(name=\"message\")\n", 237 | " def echo(self, message: V1alpha1Parameter) -> V1Container:\n", 238 | " container = V1Container(\n", 239 | " image=\"alpine:3.7\",\n", 240 | " name=\"echo\",\n", 241 | " command=[\"echo\", \"{{inputs.parameters.message}}\"],\n", 242 | " )\n", 243 | " \n", 244 | " return container\n", 245 | "\n", 246 | "wf = DagDiamond()\n", 247 | "wf" 248 | ] 249 | }, 250 | { 251 | "cell_type": "markdown", 252 | "metadata": {}, 253 | "source": [ 254 | "---" 255 | ] 256 | }, 257 | { 258 | "cell_type": "code", 259 | "execution_count": 7, 260 | "metadata": { 261 | "ExecuteTime": { 262 | "end_time": "2020-01-16T13:10:49.836296Z", 263 | "start_time": "2020-01-16T13:10:49.804479Z" 264 | } 265 | }, 266 | "outputs": [ 267 | { 268 | "name": "stdout", 269 | "output_type": "stream", 270 | "text": [ 271 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 272 | " 'kind': 'Workflow',\n", 273 | " 'metadata': {'generateName': 'dag-diamond-', 'name': 'dag-diamond'},\n", 274 | " 'spec': {'entrypoint': 'main',\n", 275 | " 'templates': [{'dag': {'tasks': [{'arguments': {'parameters': [{'name': 'message',\n", 276 | " 'value': 'A'}]},\n", 277 | " 'name': 'A',\n", 278 | " 'template': 'echo'},\n", 279 | " {'arguments': {'parameters': [{'name': 'message',\n", 280 | " 'value': 'B'}]},\n", 281 | " 'dependencies': ['A'],\n", 282 | " 'name': 'B',\n", 283 | " 'template': 'echo'},\n", 284 | " {'arguments': {'parameters': [{'name': 'message',\n", 285 | " 'value': 'C'}]},\n", 286 | " 'dependencies': ['A'],\n", 287 | " 'name': 'C',\n", 288 | " 'template': 'echo'},\n", 289 | " {'arguments': {'parameters': [{'name': 'message',\n", 290 | " 'value': 'D'}]},\n", 291 | " 'dependencies': ['B', 'C'],\n", 292 | " 'name': 'D',\n", 293 | " 'template': 'echo'}]},\n", 294 | " 'name': 'main'},\n", 295 | " {'container': {'command': ['echo',\n", 296 | " '{{inputs.parameters.message}}'],\n", 297 | " 'image': 'alpine:3.7',\n", 298 | " 'name': 'echo'},\n", 299 | " 'inputs': {'parameters': [{'name': 'message'}]},\n", 300 | " 'name': 'echo'}]},\n", 301 | " 'status': {}}\n" 302 | ] 303 | } 304 | ], 305 | "source": [ 306 | "pprint(sanitize_for_serialization(wf))" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": 8, 312 | "metadata": { 313 | "ExecuteTime": { 314 | "end_time": "2020-01-16T13:10:49.885619Z", 315 | "start_time": "2020-01-16T13:10:49.839676Z" 316 | } 317 | }, 318 | "outputs": [ 319 | { 320 | "name": "stdout", 321 | "output_type": "stream", 322 | "text": [ 323 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 324 | " 'kind': 'Workflow',\n", 325 | " 'metadata': {'generateName': 'dag-diamond-', 'name': 'dag-diamond'},\n", 326 | " 'spec': {'entrypoint': 'main',\n", 327 | " 'templates': [{'dag': {'tasks': [{'arguments': {'parameters': [{'name': 'message',\n", 328 | " 'value': 'A'}]},\n", 329 | " 'name': 'A',\n", 330 | " 'template': 'echo'},\n", 331 | " {'arguments': {'parameters': [{'name': 'message',\n", 332 | " 'value': 'B'}]},\n", 333 | " 'dependencies': ['A'],\n", 334 | " 'name': 'B',\n", 335 | " 'template': 'echo'},\n", 336 | " {'arguments': {'parameters': [{'name': 'message',\n", 337 | " 'value': 'C'}]},\n", 338 | " 'dependencies': ['A'],\n", 339 | " 'name': 'C',\n", 340 | " 'template': 'echo'},\n", 341 | " {'arguments': {'parameters': [{'name': 'message',\n", 342 | " 'value': 'D'}]},\n", 343 | " 'dependencies': ['B', 'C'],\n", 344 | " 'name': 'D',\n", 345 | " 'template': 'echo'}]},\n", 346 | " 'name': 'main'},\n", 347 | " {'container': {'command': ['echo',\n", 348 | " '{{inputs.parameters.message}}'],\n", 349 | " 'image': 'alpine:3.7',\n", 350 | " 'name': 'echo'},\n", 351 | " 'inputs': {'parameters': [{'name': 'message'}]},\n", 352 | " 'name': 'echo'}]},\n", 353 | " 'status': {}}\n" 354 | ] 355 | } 356 | ], 357 | "source": [ 358 | "pprint(yaml.safe_load(manifest))" 359 | ] 360 | }, 361 | { 362 | "cell_type": "code", 363 | "execution_count": 9, 364 | "metadata": { 365 | "ExecuteTime": { 366 | "end_time": "2020-01-16T13:10:49.950217Z", 367 | "start_time": "2020-01-16T13:10:49.891251Z" 368 | } 369 | }, 370 | "outputs": [], 371 | "source": [ 372 | "assert sanitize_for_serialization(wf) == yaml.safe_load(manifest), \"Manifests don't match.\"" 373 | ] 374 | } 375 | ], 376 | "metadata": { 377 | "finalized": { 378 | "timestamp": 1579180254322, 379 | "trusted": true 380 | }, 381 | "hide_input": false, 382 | "kernelspec": { 383 | "display_name": "argo-python-dsl", 384 | "language": "python", 385 | "name": "argo-python-dsl" 386 | }, 387 | "language_info": { 388 | "codemirror_mode": { 389 | "name": "ipython", 390 | "version": 3 391 | }, 392 | "file_extension": ".py", 393 | "mimetype": "text/x-python", 394 | "name": "python", 395 | "nbconvert_exporter": "python", 396 | "pygments_lexer": "ipython3", 397 | "version": "3.6.10" 398 | }, 399 | "requirements": { 400 | "aliases": {}, 401 | "dev-packages": {}, 402 | "packages": { 403 | "argo-workflows": "*", 404 | "inflection": "==0.3.1", 405 | "pyyaml": "==5.2" 406 | }, 407 | "requires": { 408 | "python_version": "3.6" 409 | }, 410 | "sources": [ 411 | { 412 | "name": "pypi", 413 | "url": "https://pypi.org/simple", 414 | "verify_ssl": true 415 | } 416 | ] 417 | }, 418 | "toc": { 419 | "base_numbering": 1, 420 | "nav_menu": {}, 421 | "number_sections": true, 422 | "sideBar": true, 423 | "skip_h1_title": true, 424 | "title_cell": "Table of Contents", 425 | "title_sidebar": "Contents", 426 | "toc_cell": true, 427 | "toc_position": {}, 428 | "toc_section_display": true, 429 | "toc_window_display": false 430 | } 431 | }, 432 | "nbformat": 4, 433 | "nbformat_minor": 2 434 | } 435 | -------------------------------------------------------------------------------- /examples/dag-diamond.yaml: -------------------------------------------------------------------------------- 1 | # @file: dag-diamond.yaml 2 | # The following workflow executes a diamond workflow 3 | # 4 | # A 5 | # / \ 6 | # B C 7 | # \ / 8 | # D 9 | apiVersion: argoproj.io/v1alpha1 10 | kind: Workflow 11 | metadata: 12 | name: dag-diamond 13 | generateName: dag-diamond- 14 | spec: 15 | entrypoint: main 16 | templates: 17 | - name: main 18 | dag: 19 | tasks: 20 | - name: A 21 | template: echo 22 | arguments: 23 | parameters: [{name: message, value: A}] 24 | - name: B 25 | dependencies: [A] 26 | template: echo 27 | arguments: 28 | parameters: [{name: message, value: B}] 29 | - name: C 30 | dependencies: [A] 31 | template: echo 32 | arguments: 33 | parameters: [{name: message, value: C}] 34 | - name: D 35 | dependencies: [B, C] 36 | template: echo 37 | arguments: 38 | parameters: [{name: message, value: D}] 39 | 40 | # @task: [A, B, C, D] 41 | - name: echo 42 | inputs: 43 | parameters: 44 | - name: message 45 | container: 46 | name: echo 47 | image: alpine:3.7 48 | command: [echo, "{{inputs.parameters.message}}"] 49 | status: {} -------------------------------------------------------------------------------- /examples/hello-world-single-task.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "toc": true 7 | }, 8 | "source": [ 9 | "

    Table of Contents

    \n", 10 | "
      " 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 2, 16 | "metadata": { 17 | "ExecuteTime": { 18 | "end_time": "2020-01-16T13:10:09.564490Z", 19 | "start_time": "2020-01-16T13:10:09.528893Z" 20 | } 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "%load_ext autoreload\n", 25 | "%autoreload 2" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 3, 31 | "metadata": { 32 | "ExecuteTime": { 33 | "end_time": "2020-01-16T13:10:10.206492Z", 34 | "start_time": "2020-01-16T13:10:09.568569Z" 35 | } 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "from argo.workflows.dsl import Workflow\n", 40 | "from argo.workflows.dsl import task\n", 41 | "from argo.workflows.dsl import template\n", 42 | "\n", 43 | "from argo.workflows.dsl.templates import V1Container\n", 44 | "from argo.workflows.dsl.templates import V1alpha1Template" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 4, 50 | "metadata": { 51 | "ExecuteTime": { 52 | "end_time": "2020-01-16T13:10:10.244082Z", 53 | "start_time": "2020-01-16T13:10:10.209527Z" 54 | } 55 | }, 56 | "outputs": [], 57 | "source": [ 58 | "import yaml\n", 59 | "\n", 60 | "from pprint import pprint\n", 61 | "\n", 62 | "from argo.workflows.dsl._utils import sanitize_for_serialization" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": {}, 68 | "source": [ 69 | "---" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 5, 75 | "metadata": { 76 | "ExecuteTime": { 77 | "end_time": "2020-01-16T13:10:10.403616Z", 78 | "start_time": "2020-01-16T13:10:10.246036Z" 79 | } 80 | }, 81 | "outputs": [], 82 | "source": [ 83 | "!sh -c '[ -f \"hello-world-single-task.yaml\" ] || curl -LO https://raw.githubusercontent.com/CermakM/argo-python-dsl/master/examples/hello-world-single-task.yaml'" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 6, 89 | "metadata": { 90 | "ExecuteTime": { 91 | "end_time": "2020-01-16T13:10:10.487203Z", 92 | "start_time": "2020-01-16T13:10:10.414339Z" 93 | } 94 | }, 95 | "outputs": [ 96 | { 97 | "name": "stdout", 98 | "output_type": "stream", 99 | "text": [ 100 | "# @file: hello-world-single-task.yaml\n", 101 | "apiVersion: argoproj.io/v1alpha1\n", 102 | "kind: Workflow\n", 103 | "metadata:\n", 104 | " name: hello-world\n", 105 | " generateName: hello-world-\n", 106 | "spec:\n", 107 | " entrypoint: main\n", 108 | " templates:\n", 109 | " - name: main\n", 110 | " dag:\n", 111 | " tasks:\n", 112 | " - name: A\n", 113 | " template: whalesay\n", 114 | "\n", 115 | " # @task: [A]\n", 116 | " - name: whalesay\n", 117 | " container:\n", 118 | " name: whalesay\n", 119 | " image: docker/whalesay:latest\n", 120 | " command: [cowsay]\n", 121 | " args: [\"hello world\"]\n", 122 | "status: {}\n", 123 | "\n" 124 | ] 125 | } 126 | ], 127 | "source": [ 128 | "from pathlib import Path\n", 129 | "\n", 130 | "manifest = Path(\"./hello-world-single-task.yaml\").read_text()\n", 131 | "print(manifest)" 132 | ] 133 | }, 134 | { 135 | "cell_type": "code", 136 | "execution_count": 7, 137 | "metadata": { 138 | "ExecuteTime": { 139 | "end_time": "2020-01-16T13:10:10.553219Z", 140 | "start_time": "2020-01-16T13:10:10.492599Z" 141 | }, 142 | "scrolled": false 143 | }, 144 | "outputs": [ 145 | { 146 | "data": { 147 | "text/plain": [ 148 | "{'api_version': 'argoproj.io/v1alpha1',\n", 149 | " 'kind': 'Workflow',\n", 150 | " 'metadata': {'generate_name': 'hello-world-', 'name': 'hello-world'},\n", 151 | " 'spec': {'entrypoint': 'main',\n", 152 | " 'templates': [{'dag': {'tasks': [{'name': 'A',\n", 153 | " 'template': 'whalesay'}]},\n", 154 | " 'name': 'main'},\n", 155 | " {'container': {'args': ['hello world'],\n", 156 | " 'command': ['cowsay'],\n", 157 | " 'image': 'docker/whalesay:latest',\n", 158 | " 'name': 'whalesay'},\n", 159 | " 'name': 'whalesay'}]},\n", 160 | " 'status': {}}" 161 | ] 162 | }, 163 | "execution_count": 7, 164 | "metadata": {}, 165 | "output_type": "execute_result" 166 | } 167 | ], 168 | "source": [ 169 | "class HelloWorld(Workflow):\n", 170 | " \n", 171 | " @task\n", 172 | " def A(self) -> V1alpha1Template:\n", 173 | " return self.whalesay()\n", 174 | " \n", 175 | " @template\n", 176 | " def whalesay(self) -> V1Container:\n", 177 | " container = V1Container(\n", 178 | " image=\"docker/whalesay:latest\",\n", 179 | " name=\"whalesay\",\n", 180 | " command=[\"cowsay\"],\n", 181 | " args=[\"hello world\"]\n", 182 | " )\n", 183 | " \n", 184 | " return container\n", 185 | "\n", 186 | "wf = HelloWorld()\n", 187 | "wf" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": 8, 193 | "metadata": { 194 | "ExecuteTime": { 195 | "end_time": "2020-01-16T13:10:10.593877Z", 196 | "start_time": "2020-01-16T13:10:10.557342Z" 197 | } 198 | }, 199 | "outputs": [ 200 | { 201 | "name": "stdout", 202 | "output_type": "stream", 203 | "text": [ 204 | "api_version: argoproj.io/v1alpha1\n", 205 | "kind: Workflow\n", 206 | "metadata:\n", 207 | " generate_name: hello-world-\n", 208 | " name: hello-world\n", 209 | "spec:\n", 210 | " entrypoint: main\n", 211 | " templates:\n", 212 | " - dag:\n", 213 | " tasks:\n", 214 | " - name: A\n", 215 | " template: whalesay\n", 216 | " name: main\n", 217 | " - container:\n", 218 | " args:\n", 219 | " - hello world\n", 220 | " command:\n", 221 | " - cowsay\n", 222 | " image: docker/whalesay:latest\n", 223 | " name: whalesay\n", 224 | " name: whalesay\n", 225 | "status: {}\n", 226 | "\n" 227 | ] 228 | } 229 | ], 230 | "source": [ 231 | "print(wf.to_yaml())" 232 | ] 233 | }, 234 | { 235 | "cell_type": "markdown", 236 | "metadata": {}, 237 | "source": [ 238 | "---" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": 9, 244 | "metadata": { 245 | "ExecuteTime": { 246 | "end_time": "2020-01-16T13:10:10.656100Z", 247 | "start_time": "2020-01-16T13:10:10.599955Z" 248 | } 249 | }, 250 | "outputs": [ 251 | { 252 | "name": "stdout", 253 | "output_type": "stream", 254 | "text": [ 255 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 256 | " 'kind': 'Workflow',\n", 257 | " 'metadata': {'generateName': 'hello-world-', 'name': 'hello-world'},\n", 258 | " 'spec': {'entrypoint': 'main',\n", 259 | " 'templates': [{'dag': {'tasks': [{'name': 'A',\n", 260 | " 'template': 'whalesay'}]},\n", 261 | " 'name': 'main'},\n", 262 | " {'container': {'args': ['hello world'],\n", 263 | " 'command': ['cowsay'],\n", 264 | " 'image': 'docker/whalesay:latest',\n", 265 | " 'name': 'whalesay'},\n", 266 | " 'name': 'whalesay'}]},\n", 267 | " 'status': {}}\n" 268 | ] 269 | } 270 | ], 271 | "source": [ 272 | "pprint(sanitize_for_serialization(wf))" 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": 10, 278 | "metadata": { 279 | "ExecuteTime": { 280 | "end_time": "2020-01-16T13:10:10.728280Z", 281 | "start_time": "2020-01-16T13:10:10.660783Z" 282 | } 283 | }, 284 | "outputs": [ 285 | { 286 | "name": "stdout", 287 | "output_type": "stream", 288 | "text": [ 289 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 290 | " 'kind': 'Workflow',\n", 291 | " 'metadata': {'generateName': 'hello-world-', 'name': 'hello-world'},\n", 292 | " 'spec': {'entrypoint': 'main',\n", 293 | " 'templates': [{'dag': {'tasks': [{'name': 'A',\n", 294 | " 'template': 'whalesay'}]},\n", 295 | " 'name': 'main'},\n", 296 | " {'container': {'args': ['hello world'],\n", 297 | " 'command': ['cowsay'],\n", 298 | " 'image': 'docker/whalesay:latest',\n", 299 | " 'name': 'whalesay'},\n", 300 | " 'name': 'whalesay'}]},\n", 301 | " 'status': {}}\n" 302 | ] 303 | } 304 | ], 305 | "source": [ 306 | "pprint(yaml.safe_load(manifest))" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": 11, 312 | "metadata": { 313 | "ExecuteTime": { 314 | "end_time": "2020-01-16T13:10:10.781781Z", 315 | "start_time": "2020-01-16T13:10:10.733527Z" 316 | } 317 | }, 318 | "outputs": [], 319 | "source": [ 320 | "assert sanitize_for_serialization(wf) == yaml.safe_load(manifest), \"Manifests don't match.\"" 321 | ] 322 | } 323 | ], 324 | "metadata": { 325 | "finalized": { 326 | "timestamp": 1579180218183, 327 | "trusted": true 328 | }, 329 | "hide_input": false, 330 | "kernelspec": { 331 | "display_name": "argo-python-dsl", 332 | "language": "python", 333 | "name": "argo-python-dsl" 334 | }, 335 | "language_info": { 336 | "codemirror_mode": { 337 | "name": "ipython", 338 | "version": 3 339 | }, 340 | "file_extension": ".py", 341 | "mimetype": "text/x-python", 342 | "name": "python", 343 | "nbconvert_exporter": "python", 344 | "pygments_lexer": "ipython3", 345 | "version": "3.6.10" 346 | }, 347 | "requirements": { 348 | "aliases": {}, 349 | "dev-packages": {}, 350 | "packages": { 351 | "argo-workflows": "*", 352 | "inflection": "==0.3.1", 353 | "pyyaml": "==5.2" 354 | }, 355 | "requires": { 356 | "python_version": "3.6" 357 | }, 358 | "sources": [ 359 | { 360 | "name": "pypi", 361 | "url": "https://pypi.org/simple", 362 | "verify_ssl": true 363 | } 364 | ] 365 | }, 366 | "toc": { 367 | "base_numbering": 1, 368 | "nav_menu": {}, 369 | "number_sections": true, 370 | "sideBar": true, 371 | "skip_h1_title": true, 372 | "title_cell": "Table of Contents", 373 | "title_sidebar": "Contents", 374 | "toc_cell": true, 375 | "toc_position": {}, 376 | "toc_section_display": true, 377 | "toc_window_display": false 378 | } 379 | }, 380 | "nbformat": 4, 381 | "nbformat_minor": 2 382 | } 383 | -------------------------------------------------------------------------------- /examples/hello-world-single-task.yaml: -------------------------------------------------------------------------------- 1 | # @file: hello-world-single-task.yaml 2 | apiVersion: argoproj.io/v1alpha1 3 | kind: Workflow 4 | metadata: 5 | name: hello-world 6 | generateName: hello-world- 7 | spec: 8 | entrypoint: main 9 | templates: 10 | - name: main 11 | dag: 12 | tasks: 13 | - name: A 14 | template: whalesay 15 | 16 | # @task: [A] 17 | - name: whalesay 18 | container: 19 | name: whalesay 20 | image: docker/whalesay:latest 21 | command: [cowsay] 22 | args: ["hello world"] 23 | status: {} 24 | -------------------------------------------------------------------------------- /examples/hello-world.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "toc": true 7 | }, 8 | "source": [ 9 | "

      Table of Contents

      \n", 10 | "
        " 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 2, 16 | "metadata": { 17 | "ExecuteTime": { 18 | "end_time": "2020-01-16T13:09:48.985950Z", 19 | "start_time": "2020-01-16T13:09:48.961714Z" 20 | } 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "%load_ext autoreload\n", 25 | "%autoreload 2" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 3, 31 | "metadata": { 32 | "ExecuteTime": { 33 | "end_time": "2020-01-16T13:09:49.435917Z", 34 | "start_time": "2020-01-16T13:09:48.988004Z" 35 | } 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "from argo.workflows.dsl import Workflow\n", 40 | "from argo.workflows.dsl import template\n", 41 | "\n", 42 | "from argo.workflows.dsl.templates import V1Container" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 4, 48 | "metadata": { 49 | "ExecuteTime": { 50 | "end_time": "2020-01-16T13:09:49.471510Z", 51 | "start_time": "2020-01-16T13:09:49.439442Z" 52 | } 53 | }, 54 | "outputs": [], 55 | "source": [ 56 | "import yaml\n", 57 | "\n", 58 | "from pprint import pprint\n", 59 | "\n", 60 | "from argo.workflows.dsl._utils import sanitize_for_serialization" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "---" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 5, 73 | "metadata": { 74 | "ExecuteTime": { 75 | "end_time": "2020-01-16T13:09:49.637896Z", 76 | "start_time": "2020-01-16T13:09:49.473672Z" 77 | } 78 | }, 79 | "outputs": [], 80 | "source": [ 81 | "!sh -c '[ -f \"hello-world.yaml\" ] || curl -LO https://raw.githubusercontent.com/CermakM/argo-python-dsl/master/examples/hello-world.yaml'" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 6, 87 | "metadata": { 88 | "ExecuteTime": { 89 | "end_time": "2020-01-16T13:09:49.692704Z", 90 | "start_time": "2020-01-16T13:09:49.641746Z" 91 | } 92 | }, 93 | "outputs": [ 94 | { 95 | "name": "stdout", 96 | "output_type": "stream", 97 | "text": [ 98 | "# @file: hello-world.yaml\n", 99 | "apiVersion: argoproj.io/v1alpha1\n", 100 | "kind: Workflow\n", 101 | "metadata:\n", 102 | " name: hello-world\n", 103 | " generateName: hello-world-\n", 104 | "spec:\n", 105 | " entrypoint: whalesay\n", 106 | " templates:\n", 107 | " - name: whalesay\n", 108 | " container:\n", 109 | " name: whalesay\n", 110 | " image: docker/whalesay:latest\n", 111 | " command: [cowsay]\n", 112 | " args: [\"hello world\"]\n", 113 | "status: {}\n", 114 | "\n" 115 | ] 116 | } 117 | ], 118 | "source": [ 119 | "from pathlib import Path\n", 120 | "\n", 121 | "manifest = Path(\"./hello-world.yaml\").read_text()\n", 122 | "print(manifest)" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": 7, 128 | "metadata": { 129 | "ExecuteTime": { 130 | "end_time": "2020-01-16T13:09:49.740156Z", 131 | "start_time": "2020-01-16T13:09:49.695359Z" 132 | }, 133 | "scrolled": false 134 | }, 135 | "outputs": [ 136 | { 137 | "data": { 138 | "text/plain": [ 139 | "{'api_version': 'argoproj.io/v1alpha1',\n", 140 | " 'kind': 'Workflow',\n", 141 | " 'metadata': {'generate_name': 'hello-world-', 'name': 'hello-world'},\n", 142 | " 'spec': {'entrypoint': 'whalesay',\n", 143 | " 'templates': [{'container': {'args': ['hello world'],\n", 144 | " 'command': ['cowsay'],\n", 145 | " 'image': 'docker/whalesay:latest',\n", 146 | " 'name': 'whalesay'},\n", 147 | " 'name': 'whalesay'}]},\n", 148 | " 'status': {}}" 149 | ] 150 | }, 151 | "execution_count": 7, 152 | "metadata": {}, 153 | "output_type": "execute_result" 154 | } 155 | ], 156 | "source": [ 157 | "class HelloWorld(Workflow):\n", 158 | " \n", 159 | " entrypoint = \"whalesay\"\n", 160 | " \n", 161 | " @template\n", 162 | " def whalesay(self) -> V1Container:\n", 163 | " container = V1Container(\n", 164 | " image=\"docker/whalesay:latest\",\n", 165 | " name=\"whalesay\",\n", 166 | " command=[\"cowsay\"],\n", 167 | " args=[\"hello world\"]\n", 168 | " )\n", 169 | " \n", 170 | " return container\n", 171 | "\n", 172 | "wf = HelloWorld()\n", 173 | "wf" 174 | ] 175 | }, 176 | { 177 | "cell_type": "markdown", 178 | "metadata": {}, 179 | "source": [ 180 | "---" 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": 8, 186 | "metadata": { 187 | "ExecuteTime": { 188 | "end_time": "2020-01-16T13:09:49.813819Z", 189 | "start_time": "2020-01-16T13:09:49.743558Z" 190 | } 191 | }, 192 | "outputs": [ 193 | { 194 | "name": "stdout", 195 | "output_type": "stream", 196 | "text": [ 197 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 198 | " 'kind': 'Workflow',\n", 199 | " 'metadata': {'generateName': 'hello-world-', 'name': 'hello-world'},\n", 200 | " 'spec': {'entrypoint': 'whalesay',\n", 201 | " 'templates': [{'container': {'args': ['hello world'],\n", 202 | " 'command': ['cowsay'],\n", 203 | " 'image': 'docker/whalesay:latest',\n", 204 | " 'name': 'whalesay'},\n", 205 | " 'name': 'whalesay'}]},\n", 206 | " 'status': {}}\n" 207 | ] 208 | } 209 | ], 210 | "source": [ 211 | "pprint(sanitize_for_serialization(wf))" 212 | ] 213 | }, 214 | { 215 | "cell_type": "code", 216 | "execution_count": 9, 217 | "metadata": { 218 | "ExecuteTime": { 219 | "end_time": "2020-01-16T13:09:49.881900Z", 220 | "start_time": "2020-01-16T13:09:49.821116Z" 221 | } 222 | }, 223 | "outputs": [ 224 | { 225 | "name": "stdout", 226 | "output_type": "stream", 227 | "text": [ 228 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 229 | " 'kind': 'Workflow',\n", 230 | " 'metadata': {'generateName': 'hello-world-', 'name': 'hello-world'},\n", 231 | " 'spec': {'entrypoint': 'whalesay',\n", 232 | " 'templates': [{'container': {'args': ['hello world'],\n", 233 | " 'command': ['cowsay'],\n", 234 | " 'image': 'docker/whalesay:latest',\n", 235 | " 'name': 'whalesay'},\n", 236 | " 'name': 'whalesay'}]},\n", 237 | " 'status': {}}\n" 238 | ] 239 | } 240 | ], 241 | "source": [ 242 | "pprint(yaml.safe_load(manifest))" 243 | ] 244 | }, 245 | { 246 | "cell_type": "code", 247 | "execution_count": 10, 248 | "metadata": { 249 | "ExecuteTime": { 250 | "end_time": "2020-01-16T13:09:49.933848Z", 251 | "start_time": "2020-01-16T13:09:49.889193Z" 252 | } 253 | }, 254 | "outputs": [], 255 | "source": [ 256 | "assert sanitize_for_serialization(wf) == yaml.safe_load(manifest), \"Manifests don't match.\"" 257 | ] 258 | } 259 | ], 260 | "metadata": { 261 | "finalized": { 262 | "timestamp": 1579180197096, 263 | "trusted": true 264 | }, 265 | "hide_input": false, 266 | "kernelspec": { 267 | "display_name": "argo-python-dsl", 268 | "language": "python", 269 | "name": "argo-python-dsl" 270 | }, 271 | "language_info": { 272 | "codemirror_mode": { 273 | "name": "ipython", 274 | "version": 3 275 | }, 276 | "file_extension": ".py", 277 | "mimetype": "text/x-python", 278 | "name": "python", 279 | "nbconvert_exporter": "python", 280 | "pygments_lexer": "ipython3", 281 | "version": "3.6.10" 282 | }, 283 | "requirements": { 284 | "aliases": {}, 285 | "dev-packages": {}, 286 | "packages": { 287 | "argo-workflows": "*", 288 | "inflection": "==0.3.1", 289 | "pyyaml": "==5.2" 290 | }, 291 | "requires": { 292 | "python_version": "3.6" 293 | }, 294 | "sources": [ 295 | { 296 | "name": "pypi", 297 | "url": "https://pypi.org/simple", 298 | "verify_ssl": true 299 | } 300 | ] 301 | }, 302 | "toc": { 303 | "base_numbering": 1, 304 | "nav_menu": {}, 305 | "number_sections": true, 306 | "sideBar": true, 307 | "skip_h1_title": true, 308 | "title_cell": "Table of Contents", 309 | "title_sidebar": "Contents", 310 | "toc_cell": true, 311 | "toc_position": {}, 312 | "toc_section_display": true, 313 | "toc_window_display": false 314 | } 315 | }, 316 | "nbformat": 4, 317 | "nbformat_minor": 2 318 | } 319 | -------------------------------------------------------------------------------- /examples/hello-world.yaml: -------------------------------------------------------------------------------- 1 | # @file: hello-world.yaml 2 | apiVersion: argoproj.io/v1alpha1 3 | kind: Workflow 4 | metadata: 5 | name: hello-world 6 | generateName: hello-world- 7 | spec: 8 | entrypoint: whalesay 9 | templates: 10 | - name: whalesay 11 | container: 12 | name: whalesay 13 | image: docker/whalesay:latest 14 | command: [cowsay] 15 | args: ["hello world"] 16 | status: {} 17 | -------------------------------------------------------------------------------- /examples/resource.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "toc": true 7 | }, 8 | "source": [ 9 | "

        Table of Contents

        \n", 10 | "
          " 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "metadata": { 17 | "ExecuteTime": { 18 | "end_time": "2020-01-16T13:00:57.173531Z", 19 | "start_time": "2020-01-16T13:00:57.149288Z" 20 | } 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "%load_ext autoreload\n", 25 | "%autoreload 2" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": { 32 | "ExecuteTime": { 33 | "end_time": "2020-01-16T13:00:57.641335Z", 34 | "start_time": "2020-01-16T13:00:57.176270Z" 35 | } 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "from argo.workflows.dsl import Workflow\n", 40 | "\n", 41 | "from argo.workflows.dsl.tasks import *\n", 42 | "from argo.workflows.dsl.templates import *" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 3, 48 | "metadata": { 49 | "ExecuteTime": { 50 | "end_time": "2020-01-16T13:00:57.667765Z", 51 | "start_time": "2020-01-16T13:00:57.645386Z" 52 | } 53 | }, 54 | "outputs": [], 55 | "source": [ 56 | "import yaml\n", 57 | "\n", 58 | "from pprint import pprint\n", 59 | "\n", 60 | "from argo.workflows.dsl._utils import sanitize_for_serialization" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "---" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 4, 73 | "metadata": { 74 | "ExecuteTime": { 75 | "end_time": "2020-01-16T13:00:57.809752Z", 76 | "start_time": "2020-01-16T13:00:57.670017Z" 77 | } 78 | }, 79 | "outputs": [], 80 | "source": [ 81 | "!sh -c '[ -f \"resource.yaml\" ] || curl -LO https://raw.githubusercontent.com/CermakM/argo-python-dsl/master/examples/resource.yaml'" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 5, 87 | "metadata": { 88 | "ExecuteTime": { 89 | "end_time": "2020-01-16T13:00:57.850413Z", 90 | "start_time": "2020-01-16T13:00:57.813695Z" 91 | } 92 | }, 93 | "outputs": [ 94 | { 95 | "name": "stdout", 96 | "output_type": "stream", 97 | "text": [ 98 | "# @file: resource.yaml\n", 99 | "apiVersion: argoproj.io/v1alpha1\n", 100 | "kind: Workflow\n", 101 | "metadata:\n", 102 | " name: k8s-jobs\n", 103 | " generateName: k8s-jobs-\n", 104 | "spec:\n", 105 | " entrypoint: pi\n", 106 | " templates:\n", 107 | " - name: pi\n", 108 | " resource:\n", 109 | " action: create\n", 110 | " successCondition: status.succeeded > 0\n", 111 | " failureCondition: status.failed > 3\n", 112 | " manifest: |\n", 113 | " apiVersion: batch/v1\n", 114 | " kind: Job\n", 115 | " metadata:\n", 116 | " generateName: pi-job-\n", 117 | " spec:\n", 118 | " template:\n", 119 | " metadata:\n", 120 | " name: pi\n", 121 | " spec:\n", 122 | " containers:\n", 123 | " - name: pi\n", 124 | " image: perl\n", 125 | " command: [\"perl\", \"-Mbignum=bpi\", \"-wle\", \"print bpi(2000)\"]\n", 126 | " restartPolicy: Never\n", 127 | " backoffLimit: 4\n", 128 | "status: {}\n", 129 | "\n" 130 | ] 131 | } 132 | ], 133 | "source": [ 134 | "from pathlib import Path\n", 135 | "\n", 136 | "manifest = Path(\"./resource.yaml\").read_text()\n", 137 | "print(manifest)" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": 6, 143 | "metadata": { 144 | "ExecuteTime": { 145 | "end_time": "2020-01-16T13:00:57.904644Z", 146 | "start_time": "2020-01-16T13:00:57.853125Z" 147 | }, 148 | "scrolled": false 149 | }, 150 | "outputs": [ 151 | { 152 | "data": { 153 | "text/plain": [ 154 | "{'api_version': 'argoproj.io/v1alpha1',\n", 155 | " 'kind': 'Workflow',\n", 156 | " 'metadata': {'generate_name': 'k8s-jobs-', 'name': 'k8s-jobs'},\n", 157 | " 'spec': {'entrypoint': 'pi',\n", 158 | " 'templates': [{'name': 'pi',\n", 159 | " 'resource': {'action': 'create',\n", 160 | " 'failure_condition': 'status.failed > 3',\n", 161 | " 'manifest': 'apiVersion: batch/v1\\n'\n", 162 | " 'kind: Job\\n'\n", 163 | " 'metadata:\\n'\n", 164 | " ' generateName: pi-job-\\n'\n", 165 | " 'spec:\\n'\n", 166 | " ' template:\\n'\n", 167 | " ' metadata:\\n'\n", 168 | " ' name: pi\\n'\n", 169 | " ' spec:\\n'\n", 170 | " ' containers:\\n'\n", 171 | " ' - name: pi\\n'\n", 172 | " ' image: perl\\n'\n", 173 | " ' command: [\"perl\", '\n", 174 | " '\"-Mbignum=bpi\", \"-wle\", '\n", 175 | " '\"print bpi(2000)\"]\\n'\n", 176 | " ' restartPolicy: Never\\n'\n", 177 | " ' backoffLimit: 4\\n',\n", 178 | " 'success_condition': 'status.succeeded > '\n", 179 | " '0'}}]},\n", 180 | " 'status': {}}" 181 | ] 182 | }, 183 | "execution_count": 6, 184 | "metadata": {}, 185 | "output_type": "execute_result" 186 | } 187 | ], 188 | "source": [ 189 | "import textwrap\n", 190 | "\n", 191 | "class K8sJobs(Workflow):\n", 192 | " \n", 193 | " entrypoint = \"pi\"\n", 194 | " \n", 195 | " @template\n", 196 | " def pi(self) -> V1alpha1ResourceTemplate:\n", 197 | " manifest = textwrap.dedent(\"\"\"\\\n", 198 | " apiVersion: batch/v1\n", 199 | " kind: Job\n", 200 | " metadata:\n", 201 | " generateName: pi-job-\n", 202 | " spec:\n", 203 | " template:\n", 204 | " metadata:\n", 205 | " name: pi\n", 206 | " spec:\n", 207 | " containers:\n", 208 | " - name: pi\n", 209 | " image: perl\n", 210 | " command: [\"perl\", \"-Mbignum=bpi\", \"-wle\", \"print bpi(2000)\"]\n", 211 | " restartPolicy: Never\n", 212 | " backoffLimit: 4\n", 213 | " \"\"\")\n", 214 | " template = V1alpha1ResourceTemplate(\n", 215 | " action=\"create\",\n", 216 | " success_condition=\"status.succeeded > 0\",\n", 217 | " failure_condition=\"status.failed > 3\",\n", 218 | " manifest=manifest\n", 219 | " )\n", 220 | "\n", 221 | " return template\n", 222 | " \n", 223 | "wf = K8sJobs()\n", 224 | "wf" 225 | ] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "execution_count": 7, 230 | "metadata": { 231 | "ExecuteTime": { 232 | "end_time": "2020-01-16T13:00:57.935148Z", 233 | "start_time": "2020-01-16T13:00:57.907300Z" 234 | } 235 | }, 236 | "outputs": [ 237 | { 238 | "name": "stdout", 239 | "output_type": "stream", 240 | "text": [ 241 | "api_version: argoproj.io/v1alpha1\n", 242 | "kind: Workflow\n", 243 | "metadata:\n", 244 | " generate_name: k8s-jobs-\n", 245 | " name: k8s-jobs\n", 246 | "spec:\n", 247 | " entrypoint: pi\n", 248 | " templates:\n", 249 | " - name: pi\n", 250 | " resource:\n", 251 | " action: create\n", 252 | " failure_condition: status.failed > 3\n", 253 | " manifest: |-\n", 254 | " apiVersion: batch/v1\n", 255 | " kind: Job\n", 256 | " metadata:\n", 257 | " generateName: pi-job-\n", 258 | " spec:\n", 259 | " template:\n", 260 | " metadata:\n", 261 | " name: pi\n", 262 | " spec:\n", 263 | " containers:\n", 264 | " - name: pi\n", 265 | " image: perl\n", 266 | " command: [\"perl\", \"-Mbignum=bpi\", \"-wle\", \"print bpi(2000)\"]\n", 267 | " restartPolicy: Never\n", 268 | " backoffLimit: 4\n", 269 | " success_condition: status.succeeded > 0\n", 270 | "status: {}\n", 271 | "\n" 272 | ] 273 | } 274 | ], 275 | "source": [ 276 | "print(wf.to_yaml())" 277 | ] 278 | }, 279 | { 280 | "cell_type": "markdown", 281 | "metadata": {}, 282 | "source": [ 283 | "---" 284 | ] 285 | }, 286 | { 287 | "cell_type": "code", 288 | "execution_count": 8, 289 | "metadata": { 290 | "ExecuteTime": { 291 | "end_time": "2020-01-16T13:00:57.977728Z", 292 | "start_time": "2020-01-16T13:00:57.942176Z" 293 | } 294 | }, 295 | "outputs": [ 296 | { 297 | "name": "stdout", 298 | "output_type": "stream", 299 | "text": [ 300 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 301 | " 'kind': 'Workflow',\n", 302 | " 'metadata': {'generateName': 'k8s-jobs-', 'name': 'k8s-jobs'},\n", 303 | " 'spec': {'entrypoint': 'pi',\n", 304 | " 'templates': [{'name': 'pi',\n", 305 | " 'resource': {'action': 'create',\n", 306 | " 'failureCondition': 'status.failed > 3',\n", 307 | " 'manifest': 'apiVersion: batch/v1\\n'\n", 308 | " 'kind: Job\\n'\n", 309 | " 'metadata:\\n'\n", 310 | " ' generateName: pi-job-\\n'\n", 311 | " 'spec:\\n'\n", 312 | " ' template:\\n'\n", 313 | " ' metadata:\\n'\n", 314 | " ' name: pi\\n'\n", 315 | " ' spec:\\n'\n", 316 | " ' containers:\\n'\n", 317 | " ' - name: pi\\n'\n", 318 | " ' image: perl\\n'\n", 319 | " ' command: [\"perl\", '\n", 320 | " '\"-Mbignum=bpi\", \"-wle\", '\n", 321 | " '\"print bpi(2000)\"]\\n'\n", 322 | " ' restartPolicy: Never\\n'\n", 323 | " ' backoffLimit: 4\\n',\n", 324 | " 'successCondition': 'status.succeeded > '\n", 325 | " '0'}}]},\n", 326 | " 'status': {}}\n" 327 | ] 328 | } 329 | ], 330 | "source": [ 331 | "pprint(sanitize_for_serialization(wf))" 332 | ] 333 | }, 334 | { 335 | "cell_type": "code", 336 | "execution_count": 9, 337 | "metadata": { 338 | "ExecuteTime": { 339 | "end_time": "2020-01-16T13:00:58.026864Z", 340 | "start_time": "2020-01-16T13:00:57.981645Z" 341 | } 342 | }, 343 | "outputs": [ 344 | { 345 | "name": "stdout", 346 | "output_type": "stream", 347 | "text": [ 348 | "{'apiVersion': 'argoproj.io/v1alpha1',\n", 349 | " 'kind': 'Workflow',\n", 350 | " 'metadata': {'generateName': 'k8s-jobs-', 'name': 'k8s-jobs'},\n", 351 | " 'spec': {'entrypoint': 'pi',\n", 352 | " 'templates': [{'name': 'pi',\n", 353 | " 'resource': {'action': 'create',\n", 354 | " 'failureCondition': 'status.failed > 3',\n", 355 | " 'manifest': 'apiVersion: batch/v1\\n'\n", 356 | " 'kind: Job\\n'\n", 357 | " 'metadata:\\n'\n", 358 | " ' generateName: pi-job-\\n'\n", 359 | " 'spec:\\n'\n", 360 | " ' template:\\n'\n", 361 | " ' metadata:\\n'\n", 362 | " ' name: pi\\n'\n", 363 | " ' spec:\\n'\n", 364 | " ' containers:\\n'\n", 365 | " ' - name: pi\\n'\n", 366 | " ' image: perl\\n'\n", 367 | " ' command: [\"perl\", '\n", 368 | " '\"-Mbignum=bpi\", \"-wle\", '\n", 369 | " '\"print bpi(2000)\"]\\n'\n", 370 | " ' restartPolicy: Never\\n'\n", 371 | " ' backoffLimit: 4\\n',\n", 372 | " 'successCondition': 'status.succeeded > '\n", 373 | " '0'}}]},\n", 374 | " 'status': {}}\n" 375 | ] 376 | } 377 | ], 378 | "source": [ 379 | "pprint(yaml.safe_load(manifest))" 380 | ] 381 | }, 382 | { 383 | "cell_type": "code", 384 | "execution_count": 10, 385 | "metadata": { 386 | "ExecuteTime": { 387 | "end_time": "2020-01-16T13:00:58.095481Z", 388 | "start_time": "2020-01-16T13:00:58.033770Z" 389 | } 390 | }, 391 | "outputs": [ 392 | { 393 | "data": { 394 | "text/plain": [ 395 | "{}" 396 | ] 397 | }, 398 | "execution_count": 10, 399 | "metadata": {}, 400 | "output_type": "execute_result" 401 | } 402 | ], 403 | "source": [ 404 | "from deepdiff import DeepDiff\n", 405 | "\n", 406 | "diff = DeepDiff(sanitize_for_serialization(wf), yaml.safe_load(manifest))\n", 407 | "diff" 408 | ] 409 | }, 410 | { 411 | "cell_type": "code", 412 | "execution_count": 11, 413 | "metadata": { 414 | "ExecuteTime": { 415 | "end_time": "2020-01-16T13:00:58.119950Z", 416 | "start_time": "2020-01-16T13:00:58.098161Z" 417 | } 418 | }, 419 | "outputs": [], 420 | "source": [ 421 | "assert not diff, \"Manifests don't match.\"" 422 | ] 423 | } 424 | ], 425 | "metadata": { 426 | "finalized": { 427 | "timestamp": 1579179665849, 428 | "trusted": true 429 | }, 430 | "hide_input": false, 431 | "kernelspec": { 432 | "display_name": "argo-python-dsl", 433 | "language": "python", 434 | "name": "argo-python-dsl" 435 | }, 436 | "language_info": { 437 | "codemirror_mode": { 438 | "name": "ipython", 439 | "version": 3 440 | }, 441 | "file_extension": ".py", 442 | "mimetype": "text/x-python", 443 | "name": "python", 444 | "nbconvert_exporter": "python", 445 | "pygments_lexer": "ipython3", 446 | "version": "3.6.10" 447 | }, 448 | "requirements": { 449 | "aliases": {}, 450 | "dev-packages": {}, 451 | "packages": { 452 | "argo-workflows": "*", 453 | "deepdiff": "==4.0.9", 454 | "inflection": "==0.3.1", 455 | "pyyaml": "==5.2" 456 | }, 457 | "requires": { 458 | "python_version": "3.6" 459 | }, 460 | "sources": [ 461 | { 462 | "name": "pypi", 463 | "url": "https://pypi.org/simple", 464 | "verify_ssl": true 465 | } 466 | ] 467 | }, 468 | "toc": { 469 | "base_numbering": 1, 470 | "nav_menu": {}, 471 | "number_sections": true, 472 | "sideBar": true, 473 | "skip_h1_title": true, 474 | "title_cell": "Table of Contents", 475 | "title_sidebar": "Contents", 476 | "toc_cell": true, 477 | "toc_position": {}, 478 | "toc_section_display": true, 479 | "toc_window_display": false 480 | } 481 | }, 482 | "nbformat": 4, 483 | "nbformat_minor": 2 484 | } 485 | -------------------------------------------------------------------------------- /examples/resource.yaml: -------------------------------------------------------------------------------- 1 | # @file: resource.yaml 2 | apiVersion: argoproj.io/v1alpha1 3 | kind: Workflow 4 | metadata: 5 | name: k8s-jobs 6 | generateName: k8s-jobs- 7 | spec: 8 | entrypoint: pi 9 | templates: 10 | - name: pi 11 | resource: 12 | action: create 13 | successCondition: status.succeeded > 0 14 | failureCondition: status.failed > 3 15 | manifest: | 16 | apiVersion: batch/v1 17 | kind: Job 18 | metadata: 19 | generateName: pi-job- 20 | spec: 21 | template: 22 | metadata: 23 | name: pi 24 | spec: 25 | containers: 26 | - name: pi 27 | image: perl 28 | command: ["perl", "-Mbignum=bpi", "-wle", "print bpi(2000)"] 29 | restartPolicy: Never 30 | backoffLimit: 4 31 | status: {} 32 | -------------------------------------------------------------------------------- /examples/scripts.yaml: -------------------------------------------------------------------------------- 1 | # @file: scripts.yaml 2 | apiVersion: argoproj.io/v1alpha1 3 | kind: Workflow 4 | metadata: 5 | name: scripts-python 6 | generateName: scripts-python- 7 | spec: 8 | entrypoint: main 9 | templates: 10 | - name: main 11 | dag: 12 | tasks: 13 | - name: generate 14 | template: gen-random-int 15 | - name: print 16 | template: print-message 17 | arguments: 18 | parameters: 19 | - name: message 20 | value: "{{tasks.generate.outputs.result}}" 21 | dependencies: [generate] 22 | 23 | - name: gen-random-int 24 | script: 25 | image: python:alpine3.6 26 | name: gen-random-int 27 | command: [python] 28 | source: | 29 | import random 30 | i = random.randint(1, 100) 31 | print(i) 32 | 33 | - name: print-message 34 | inputs: 35 | parameters: 36 | - name: message 37 | container: 38 | image: alpine:latest 39 | name: print-message 40 | command: [sh, -c] 41 | args: ["echo result was: {{inputs.parameters.message}}"] 42 | status: {} 43 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | flexmock 3 | pytest==3.2.1 4 | pytest-cov==2.6.0 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | argo-workflows==4.0.1 2 | inflection 3 | pyyaml 4 | requests 5 | python-dateutil 6 | 7 | # --- development --- 8 | # black 9 | # deepdiff 10 | -------------------------------------------------------------------------------- /scripts/generate_changelog.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # @python_requires: gitchangelog 4 | argo::generate::changelog() { 5 | : "${RELEASE_VERSION?Must define RELEASE_VERSION env variable}" 6 | 7 | local args="$@" 8 | local changelog=$(find ./ -type f -name 'CHANGELOG.*' -exec basename {} \;) 9 | 10 | gitchangelog $args > ${changelog} 11 | } 12 | 13 | args=("$@") 14 | 15 | if [ "$0" = "$BASH_SOURCE" ] ; then 16 | >&2 echo -e "\nGenerating CHANGELOG... \n" 17 | argo::generate::changelog ${args[@]} 18 | fi 19 | -------------------------------------------------------------------------------- /scripts/integration_tests.sh: -------------------------------------------------------------------------------- 1 | echo "Workflow Tests ... \n" 2 | 3 | for f in tests/workflows/*.py 4 | do 5 | workflow=$(basename ${f%%.*}) 6 | echo "Testing $workflow workflow ..." 7 | PYTHONPATH=. python $f 8 | argo submit -n argo tests/workflows/$workflow.yaml --name $workflow 9 | bash scripts/validate_workflow.sh $workflow 10 | echo "Testing $workflow workflow ... done!\n" 11 | done 12 | 13 | echo "WorkflowTemplate Tests ... \n" 14 | for f in tests/workflow_templates/*.py 15 | do 16 | workflow=$(basename ${f%%.*}) 17 | echo "Testing $workflow workflow ..." 18 | PYTHONPATH=. python $f 19 | argo template create -n argo tests/workflow_templates/$workflow.yaml 20 | argo submit -n argo --from=wftmpl/$workflow --name $workflow 21 | bash scripts/validate_workflow.sh $workflow 22 | echo "Testing $workflow workflow ... done!\n" 23 | done 24 | 25 | echo "CronWorkflow Tests ... \n" 26 | for f in tests/cronworkflows/*.py 27 | do 28 | workflow=$(basename ${f%%.*}) 29 | echo "Testing $workflow workflow ..." 30 | PYTHONPATH=. python $f 31 | argo cron create -n argo tests/cronworkflows/$workflow.yaml 32 | argo submit -n argo --from=cronwf/$workflow --name $workflow 33 | bash scripts/validate_workflow.sh $workflow 34 | echo "Testing $workflow workflow ... done!\n" 35 | done 36 | -------------------------------------------------------------------------------- /scripts/validate_workflow.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 The Couler Authors. All rights reserved. 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # We intentionally `set +e` here since we want to allow certain kinds of known failures that can be ignored. 16 | # For example, workflows may not have been created yet so neither `kubectl get workflows` nor 17 | # `kubectl delete workflow` would be successful at earlier stages of this script. 18 | set +e 19 | 20 | WF_NAME=$1 21 | CHECK_INTERVAL_SECS=10 22 | 23 | function get_workflow_status { 24 | local wf_status=$(kubectl -n argo get workflow $1 -o jsonpath='{.status.phase}') 25 | echo ${wf_status} 26 | } 27 | 28 | for i in {1..20}; do 29 | WF_STATUS=$(get_workflow_status ${WF_NAME}) 30 | 31 | if [[ "$WF_STATUS" == "Succeeded" ]]; then 32 | echo "Workflow ${WF_NAME} succeeded." 33 | exit 0 34 | elif [[ "$WF_STATUS" == "Failed" ]] || 35 | [[ "$WF_STATUS" == "Error" ]]; then 36 | echo "Workflow ${WF_NAME} failed." 37 | kubectl -n argo describe workflow ${WF_NAME} 38 | exit 1 39 | else 40 | echo "Workflow ${WF_NAME} status: ${WF_STATUS}. Continue checking..." 41 | sleep ${CHECK_INTERVAL_SECS} 42 | fi 43 | done 44 | echo "Workflow ${WF_NAME} timed out." 45 | 46 | exit 1 47 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from __future__ import print_function 5 | 6 | from pathlib import Path 7 | 8 | from setuptools import setup 9 | from setuptools import find_packages 10 | 11 | 12 | HERE = Path(__file__).parent 13 | 14 | ABOUT = dict() 15 | exec(Path(HERE, "argo/workflows/dsl", "__about__.py").read_text(), ABOUT) 16 | 17 | DEVELOPMENT_STATUS = "2 - Pre-Alpha" 18 | 19 | README: str = Path(HERE, "README.md").read_text(encoding="utf-8") 20 | REQUIREMENTS: list = Path(HERE, "requirements.txt").read_text().splitlines() 21 | 22 | 23 | setup_args = dict( 24 | name=ABOUT["__title__"], 25 | version=ABOUT["__version__"], 26 | author=ABOUT["__author__"], 27 | author_email=ABOUT["__email__"], 28 | url=ABOUT["__uri__"], 29 | license=ABOUT["__license__"], 30 | description=ABOUT["__summary__"], 31 | long_description=README, 32 | long_description_content_type="text/markdown", 33 | classifiers=[ 34 | "Development Status :: %s" % DEVELOPMENT_STATUS, 35 | "Intended Audience :: Developers", 36 | "Intended Audience :: Information Technology", 37 | "License :: OSI Approved :: Apache Software License", 38 | "Operating System :: OS Independent", 39 | "Programming Language :: Python :: 3.6", 40 | "Programming Language :: Python :: 3.7", 41 | "Topic :: Software Development", 42 | "Topic :: Utilities", 43 | ], 44 | packages=["argo.workflows.%s" % p for p in find_packages(where="argo/workflows/")], 45 | zip_safe=False, 46 | install_requires=REQUIREMENTS, 47 | ) 48 | 49 | if __name__ == "__main__": 50 | setup(**setup_args) 51 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Argo Python DSL test suite.""" 2 | -------------------------------------------------------------------------------- /tests/_base.py: -------------------------------------------------------------------------------- 1 | """A base class for implementing tests.""" 2 | 3 | from pathlib import Path 4 | 5 | 6 | _HERE = Path(__file__).parent 7 | 8 | 9 | class TestCase: 10 | """A base class for implementing test cases.""" 11 | 12 | DATA = _HERE / "data" 13 | -------------------------------------------------------------------------------- /tests/cronworkflows/.gitignore: -------------------------------------------------------------------------------- 1 | *.yaml 2 | -------------------------------------------------------------------------------- /tests/cronworkflows/hello-cron.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import ntpath 3 | 4 | from argo.workflows.dsl import template 5 | from argo.workflows.dsl import CronWorkflow 6 | from argo.workflows.dsl.templates import V1Container 7 | 8 | 9 | class HelloCron(CronWorkflow): 10 | 11 | entrypoint = "whalesay" 12 | schedule = "0 0 1 1 *" 13 | 14 | @template 15 | def whalesay(self) -> V1Container: 16 | container = V1Container( 17 | image="docker/whalesay:latest", 18 | name="whalesay", 19 | command=["cowsay"], 20 | args=["hello world"], 21 | ) 22 | return container 23 | 24 | 25 | if __name__ == "__main__": 26 | wf = HelloCron() 27 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 28 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 29 | -------------------------------------------------------------------------------- /tests/data/workflows/cluster-workflow-template.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: ClusterWorkflowTemplate 3 | metadata: 4 | name: hello-cluster-template 5 | spec: 6 | templates: 7 | - name: whalesay-template 8 | inputs: 9 | parameters: 10 | - name: message 11 | container: 12 | image: docker/whalesay 13 | command: [cowsay] 14 | args: ["{{inputs.parameters.message}}"] 15 | -------------------------------------------------------------------------------- /tests/data/workflows/cron-workflow.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: CronWorkflow 3 | metadata: 4 | name: hello-world 5 | spec: 6 | schedule: "* * * * *" 7 | timezone: "America/Los_Angeles" # Default to local machine timezone 8 | startingDeadlineSeconds: 0 9 | concurrencyPolicy: "Replace" # Default to "Allow" 10 | successfulJobsHistoryLimit: 4 # Default 3 11 | failedJobsHistoryLimit: 4 # Default 1 12 | suspend: false # Set to "true" to suspend scheduling 13 | workflowSpec: 14 | entrypoint: whalesay 15 | templates: 16 | - name: whalesay 17 | container: 18 | image: docker/whalesay:latest 19 | command: [cowsay] 20 | args: ["🕓 hello world"] 21 | -------------------------------------------------------------------------------- /tests/data/workflows/hello-world.yaml: -------------------------------------------------------------------------------- 1 | ../../../examples/hello-world.yaml -------------------------------------------------------------------------------- /tests/data/workflows/workflow-template.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: hello-template 5 | spec: 6 | entrypoint: whalesay-template 7 | arguments: 8 | parameters: 9 | - name: message 10 | value: hello world 11 | templates: 12 | - name: whalesay-template 13 | inputs: 14 | parameters: 15 | - name: message 16 | container: 17 | image: docker/whalesay 18 | command: [cowsay] 19 | args: ["{{inputs.parameters.message}}"] 20 | -------------------------------------------------------------------------------- /tests/test-notebooks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -x 3 | 4 | for nb_path in $(ls examples/*.ipynb); do 5 | { 6 | notebook="$(basename ${nb_path})" 7 | 8 | echo "---" 9 | echo "--- Test: ${notebook}" 10 | echo "---" 11 | 12 | { 13 | set -euo pipefail 14 | set +x 15 | 16 | IMAGE_NAME="s2i-test-${notebook%%.*}" 17 | 18 | s2i build \ 19 | --rm \ 20 | --env JUPYTER_NOTEBOOK_PATH="$notebook" \ 21 | 'examples/' quay.io/cermakm/jupyter-notebook-s2i ${IMAGE_NAME} \ 22 | > "/tmp/test-${notebook}-build.stdout" 23 | 24 | docker run -i \ 25 | -v $(pwd)/argo/workflows/dsl/:/opt/app-root/lib/python3.6/site-packages/argo/workflows/dsl/:ro,z \ 26 | -v $(pwd)/examples/:/opt/app-root/src/examples/:ro,z \ 27 | --env JUPYTER_NOTEBOOK_PATH="examples/$notebook" \ 28 | --name "test-${notebook}" \ 29 | --rm \ 30 | --user $(id -u) \ 31 | ${IMAGE_NAME}:latest 32 | } 33 | 34 | res=$? 35 | 36 | # cleanup 37 | docker rmi ${IMAGE_NAME} || true 38 | 39 | if [ $res -ne 0 ]; then 40 | echo "--- Test: ${notebook} ... FAILED\n" ; exit 1 # fail fast 41 | else 42 | echo "--- Test: ${notebook} ... PASSED\n" 43 | fi 44 | } 45 | done 46 | 47 | exit 0 48 | -------------------------------------------------------------------------------- /tests/test_cluster_workflow_template.py: -------------------------------------------------------------------------------- 1 | import flexmock 2 | import pytest 3 | import requests 4 | 5 | from argo.workflows.client import ApiClient 6 | from argo.workflows.client.models import ( 7 | V1alpha1Parameter, 8 | V1alpha1Template, 9 | V1alpha1WorkflowTemplate, 10 | V1Container, 11 | ) 12 | 13 | from argo.workflows.dsl import ClusterWorkflowTemplate 14 | from argo.workflows.dsl.tasks import dependencies, parameter, task 15 | from argo.workflows.dsl.templates import inputs, template 16 | 17 | from ._base import TestCase 18 | 19 | """ClusterWorkflowTemplate test suite.""" 20 | 21 | 22 | @pytest.fixture # type: ignore 23 | def api() -> ApiClient: 24 | """Fake API client.""" 25 | return ApiClient() 26 | 27 | 28 | @pytest.fixture # type: ignore 29 | def url() -> str: 30 | """Fake URL fixture.""" 31 | 32 | 33 | @pytest.fixture # type: ignore 34 | def wf() -> ClusterWorkflowTemplate: 35 | """Fake ClusterWorkflowTemplate.""" 36 | 37 | class FakeClusterWorkflowTemplate(ClusterWorkflowTemplate): 38 | name = "test" 39 | 40 | wf = FakeClusterWorkflowTemplate(compile=True) 41 | return wf 42 | 43 | 44 | class TestWorkflow(TestCase): 45 | """Test ClusterWorkflowTemplate.""" 46 | 47 | _WORKFLOW_FILE = TestCase.DATA / "workflows" / "cluster-workflow-template.yaml" 48 | 49 | def test_compile(self) -> None: 50 | """Test `ClusterWorkflowTemplate.compile` method.""" 51 | 52 | class TestClusterWorfklowTemplateWithParameters(ClusterWorkflowTemplate): 53 | name = "test" 54 | 55 | @task 56 | @parameter(name="message", value="A") 57 | def A(self, message: V1alpha1Parameter) -> V1alpha1Template: 58 | return self.echo(message=message) 59 | 60 | @task 61 | @parameter(name="message", value="B") 62 | @dependencies(["A"]) 63 | def B(self, message: V1alpha1Parameter) -> V1alpha1Template: 64 | return self.echo(message=message) 65 | 66 | @template 67 | @inputs.parameter(name="message") 68 | def echo(self, message: V1alpha1Parameter) -> V1Container: 69 | container = V1Container( 70 | image="alpine:3.7", 71 | name="echo", 72 | command=["echo", "{{inputs.parameters.message}}"], 73 | ) 74 | return container 75 | 76 | # test compile=False 77 | wf_not_compiled = TestClusterWorfklowTemplateWithParameters(compile=False) 78 | 79 | assert wf_not_compiled.model is None 80 | 81 | # test multiple instances 82 | wf_a = TestClusterWorfklowTemplateWithParameters() 83 | wf_b = TestClusterWorfklowTemplateWithParameters() 84 | 85 | # assert wf_a.name == "hello-cluster-template" 86 | assert wf_b.kind == "ClusterWorkflowTemplate" 87 | 88 | assert wf_a == wf_b 89 | 90 | assert isinstance(wf_a, V1alpha1WorkflowTemplate) 91 | assert isinstance(wf_b, V1alpha1WorkflowTemplate) 92 | 93 | def test_from_file(self) -> None: 94 | """Test `Workflow.from_file` method.""" 95 | wf = ClusterWorkflowTemplate.from_file(self._WORKFLOW_FILE) 96 | 97 | assert isinstance(wf, ClusterWorkflowTemplate) 98 | assert wf.name == "hello-cluster-template" 99 | assert wf.kind == "ClusterWorkflowTemplate" 100 | assert len(wf.spec.templates) == 1 101 | 102 | def test_from_url(self, url: str) -> None: 103 | """Test `Workflow.from_url` method.""" 104 | fake_response = type( 105 | "Response", 106 | (), 107 | {"text": self._WORKFLOW_FILE.read_text(), "raise_for_status": lambda: None}, 108 | ) 109 | flexmock(requests).should_receive("get").and_return(fake_response) 110 | 111 | wf = ClusterWorkflowTemplate.from_url(url) 112 | 113 | assert isinstance(wf, ClusterWorkflowTemplate) 114 | # assert wf.name == "test" 115 | assert wf.kind == "ClusterWorkflowTemplate" 116 | assert len(wf.spec.templates) == 1 117 | -------------------------------------------------------------------------------- /tests/test_cronworkflow.py: -------------------------------------------------------------------------------- 1 | import flexmock 2 | import pytest 3 | import requests 4 | 5 | from argo.workflows.client import ApiClient 6 | from argo.workflows.client.models import ( 7 | V1alpha1CronWorkflow, 8 | V1alpha1Parameter, 9 | V1alpha1Template, 10 | V1Container, 11 | ) 12 | 13 | from argo.workflows.dsl import CronWorkflow 14 | from argo.workflows.dsl.tasks import dependencies, parameter, task 15 | from argo.workflows.dsl.templates import inputs, template 16 | 17 | from ._base import TestCase 18 | 19 | """CronWorkflow test suite.""" 20 | 21 | 22 | @pytest.fixture # type: ignore 23 | def api() -> ApiClient: 24 | """Fake API client.""" 25 | return ApiClient() 26 | 27 | 28 | @pytest.fixture # type: ignore 29 | def url() -> str: 30 | """Fake URL fixture.""" 31 | 32 | 33 | @pytest.fixture # type: ignore 34 | def cronwf() -> CronWorkflow: 35 | """Fake CronWorkflow.""" 36 | 37 | class FakeCronWorkflow(CronWorkflow): 38 | name = "test" 39 | schedule = "0 0 1 1 *" 40 | 41 | wf = FakeCronWorkflow(compile=True) 42 | return wf 43 | 44 | 45 | class TestCronWorkflow(TestCase): 46 | """Test CronWorkflow.""" 47 | 48 | _WORKFLOW_FILE = TestCase.DATA / "workflows" / "cron-workflow.yaml" 49 | 50 | def test_compile(self) -> None: 51 | """Test `CronWorkflow.compile` method.""" 52 | 53 | class TestCronWorfklowWithParameters(CronWorkflow): 54 | name = "test" 55 | schedule = "0 0 1 1 *" 56 | 57 | @task 58 | @parameter(name="message", value="A") 59 | def A(self, message: V1alpha1Parameter) -> V1alpha1Template: 60 | return self.echo(message=message) 61 | 62 | @task 63 | @parameter(name="message", value="B") 64 | @dependencies(["A"]) 65 | def B(self, message: V1alpha1Parameter) -> V1alpha1Template: 66 | return self.echo(message=message) 67 | 68 | @template 69 | @inputs.parameter(name="message") 70 | def echo(self, message: V1alpha1Parameter) -> V1Container: 71 | container = V1Container( 72 | image="alpine:3.7", 73 | name="echo", 74 | command=["echo", "{{inputs.parameters.message}}"], 75 | ) 76 | return container 77 | 78 | # test compile=False 79 | wf_not_compiled = TestCronWorfklowWithParameters(compile=False) 80 | 81 | assert wf_not_compiled.model is None 82 | 83 | # test multiple instances 84 | wf_a = TestCronWorfklowWithParameters() 85 | wf_b = TestCronWorfklowWithParameters() 86 | 87 | assert wf_a == wf_b 88 | 89 | assert isinstance(wf_a, V1alpha1CronWorkflow) 90 | assert isinstance(wf_b, V1alpha1CronWorkflow) 91 | 92 | def test_from_file(self) -> None: 93 | """Test `Workflow.from_file` method.""" 94 | wf = CronWorkflow.from_file(self._WORKFLOW_FILE) 95 | 96 | assert isinstance(wf, CronWorkflow) 97 | assert wf.name == "hello-world" 98 | assert wf.kind == "CronWorkflow" 99 | assert len(wf.spec.workflow_spec.templates) == 1 100 | 101 | def test_from_url(self, url: str) -> None: 102 | """Test `Workflow.from_url` method.""" 103 | fake_response = type( 104 | "Response", 105 | (), 106 | {"text": self._WORKFLOW_FILE.read_text(), "raise_for_status": lambda: None}, 107 | ) 108 | flexmock(requests).should_receive("get").and_return(fake_response) 109 | 110 | wf = CronWorkflow.from_url(url) 111 | 112 | assert isinstance(wf, CronWorkflow) 113 | # assert wf.name == "test" 114 | assert wf.kind == "CronWorkflow" 115 | assert len(wf.spec.workflow_spec.templates) == 1 116 | -------------------------------------------------------------------------------- /tests/test_workflow.py: -------------------------------------------------------------------------------- 1 | import flexmock 2 | import pytest 3 | import requests 4 | 5 | from argo.workflows.client import ApiClient, WorkflowServiceApi 6 | from argo.workflows.client.models import ( 7 | V1alpha1Arguments, 8 | V1alpha1Parameter, 9 | V1alpha1Template, 10 | V1alpha1Workflow, 11 | V1Container 12 | ) 13 | 14 | from argo.workflows.dsl import Workflow 15 | from argo.workflows.dsl.tasks import ( 16 | dependencies, 17 | parameter, 18 | task 19 | ) 20 | from argo.workflows.dsl.templates import inputs, template 21 | 22 | from ._base import TestCase 23 | 24 | """Workflow test suite.""" 25 | 26 | 27 | @pytest.fixture # type: ignore 28 | def api() -> ApiClient: 29 | """Fake API client.""" 30 | return ApiClient() 31 | 32 | 33 | @pytest.fixture # type: ignore 34 | def url() -> str: 35 | """Fake URL fixture.""" 36 | 37 | 38 | @pytest.fixture # type: ignore 39 | def wf() -> Workflow: 40 | """Fake Workflow.""" 41 | 42 | class FakeWorkflow(Workflow): 43 | name = "test" 44 | 45 | wf = FakeWorkflow(compile=True) 46 | return wf 47 | 48 | 49 | class TestWorkflow(TestCase): 50 | """Test Workflow.""" 51 | 52 | _WORKFLOW_FILE = TestCase.DATA / "workflows" / "hello-world.yaml" 53 | 54 | def test_compile(self) -> None: 55 | """Test `Workflow.compile` method.""" 56 | class TestWorfklowWithParameters(Workflow): 57 | name = "test" 58 | 59 | @task 60 | @parameter(name="message", value="A") 61 | def A(self, message: V1alpha1Parameter) -> V1alpha1Template: 62 | return self.echo(message=message) 63 | 64 | @task 65 | @parameter(name="message", value="B") 66 | @dependencies(["A"]) 67 | def B(self, message: V1alpha1Parameter) -> V1alpha1Template: 68 | return self.echo(message=message) 69 | 70 | @template 71 | @inputs.parameter(name="message") 72 | def echo(self, message: V1alpha1Parameter) -> V1Container: 73 | container = V1Container( 74 | image="alpine:3.7", 75 | name="echo", 76 | command=["echo", "{{inputs.parameters.message}}"], 77 | ) 78 | return container 79 | 80 | # test compile=False 81 | wf_not_compiled = TestWorfklowWithParameters(compile=False) 82 | 83 | assert wf_not_compiled.model is None 84 | 85 | # test multiple instances 86 | wf_a = TestWorfklowWithParameters() 87 | wf_b = TestWorfklowWithParameters() 88 | 89 | assert wf_a == wf_b 90 | 91 | assert isinstance(wf_a, V1alpha1Workflow) 92 | assert isinstance(wf_b, V1alpha1Workflow) 93 | 94 | def test_from_file(self) -> None: 95 | """Test `Workflow.from_file` method.""" 96 | wf = Workflow.from_file(self._WORKFLOW_FILE) 97 | 98 | assert isinstance(wf, Workflow) 99 | assert wf.name == "hello-world" 100 | assert wf.kind == "Workflow" 101 | assert len(wf.spec.templates) == 1 102 | 103 | def test_from_url(self, url: str) -> None: 104 | """Test `Workflow.from_url` method.""" 105 | fake_response = type( 106 | "Response", 107 | (), 108 | {"text": self._WORKFLOW_FILE.read_text(), "raise_for_status": lambda: None}, 109 | ) 110 | flexmock(requests).should_receive("get").and_return(fake_response) 111 | 112 | wf = Workflow.from_url(url) 113 | 114 | assert isinstance(wf, Workflow) 115 | # assert wf.name == "test" 116 | assert wf.kind == "Workflow" 117 | assert len(wf.spec.templates) == 1 118 | 119 | def test_submit(self, api: ApiClient, wf: Workflow) -> None: 120 | """Test `Workflow.submit` method.""" 121 | fake_workflow_name = "test" 122 | flexmock(WorkflowServiceApi).should_receive("create_workflow").and_return( 123 | fake_workflow_name 124 | ) 125 | 126 | # submit w/o parameters 127 | workflow_name: str = wf.submit(client=ApiClient(), namespace="test") 128 | 129 | assert isinstance(workflow_name, str) 130 | assert workflow_name == "test" 131 | 132 | # submit w/ parameters 133 | with pytest.raises(AttributeError) as exc: 134 | # Expected ValueError due to undefined parameter 135 | workflow_name: str = wf.submit( 136 | client=api, namespace="test", parameters={"param": "test"} 137 | ) 138 | 139 | wf.spec.arguments = V1alpha1Arguments( 140 | parameters=[V1alpha1Parameter(name="param")] 141 | ) 142 | workflow_result: str = wf.submit( 143 | client=api, namespace="test", parameters={"param": "test"} 144 | ) 145 | # assert isinstance(workflow_result, V1alpha1Workflow) 146 | # assert isinstance(workflow_result.metadata.name, str) 147 | # assert len(workflow_result.spec.arguments.parameters) == 1 148 | # assert workflow_result.spec.arguments.parameters[0].name == 'param' 149 | # assert workflow_result.spec.arguments.parameters[0].value == 'test' 150 | # assert workflow_result.metadata.name == "test" 151 | -------------------------------------------------------------------------------- /tests/test_workflow_template.py: -------------------------------------------------------------------------------- 1 | import flexmock 2 | import pytest 3 | import requests 4 | 5 | from argo.workflows.client import ApiClient 6 | from argo.workflows.client.models import ( 7 | V1alpha1Parameter, 8 | V1alpha1Template, 9 | V1alpha1WorkflowTemplate, 10 | V1Container, 11 | ) 12 | 13 | from argo.workflows.dsl import WorkflowTemplate 14 | from argo.workflows.dsl.tasks import dependencies, parameter, task 15 | from argo.workflows.dsl.templates import inputs, template 16 | 17 | from ._base import TestCase 18 | 19 | """WorkflowTemplate test suite.""" 20 | 21 | 22 | @pytest.fixture # type: ignore 23 | def api() -> ApiClient: 24 | """Fake API client.""" 25 | return ApiClient() 26 | 27 | 28 | @pytest.fixture # type: ignore 29 | def url() -> str: 30 | """Fake URL fixture.""" 31 | 32 | 33 | @pytest.fixture # type: ignore 34 | def wf() -> WorkflowTemplate: 35 | """Fake WorkflowTemplate.""" 36 | 37 | class FakeWorkflowTemplate(WorkflowTemplate): 38 | name = "test" 39 | 40 | wf = FakeWorkflowTemplate(compile=True) 41 | return wf 42 | 43 | 44 | class TestWorkflow(TestCase): 45 | """Test WorkflowTemplate.""" 46 | 47 | _WORKFLOW_FILE = TestCase.DATA / "workflows" / "workflow-template.yaml" 48 | 49 | def test_compile(self) -> None: 50 | """Test `WorkflowTemplate.compile` method.""" 51 | 52 | class TestWorfklowTemplateWithParameters(WorkflowTemplate): 53 | name = "test" 54 | 55 | @task 56 | @parameter(name="message", value="A") 57 | def A(self, message: V1alpha1Parameter) -> V1alpha1Template: 58 | return self.echo(message=message) 59 | 60 | @task 61 | @parameter(name="message", value="B") 62 | @dependencies(["A"]) 63 | def B(self, message: V1alpha1Parameter) -> V1alpha1Template: 64 | return self.echo(message=message) 65 | 66 | @template 67 | @inputs.parameter(name="message") 68 | def echo(self, message: V1alpha1Parameter) -> V1Container: 69 | container = V1Container( 70 | image="alpine:3.7", 71 | name="echo", 72 | command=["echo", "{{inputs.parameters.message}}"], 73 | ) 74 | return container 75 | 76 | # test compile=False 77 | wf_not_compiled = TestWorfklowTemplateWithParameters(compile=False) 78 | 79 | assert wf_not_compiled.model is None 80 | 81 | # test multiple instances 82 | wf_a = TestWorfklowTemplateWithParameters() 83 | wf_b = TestWorfklowTemplateWithParameters() 84 | 85 | assert wf_a == wf_b 86 | 87 | assert isinstance(wf_a, V1alpha1WorkflowTemplate) 88 | assert isinstance(wf_b, V1alpha1WorkflowTemplate) 89 | 90 | def test_from_file(self) -> None: 91 | """Test `Workflow.from_file` method.""" 92 | wf = WorkflowTemplate.from_file(self._WORKFLOW_FILE) 93 | 94 | assert isinstance(wf, WorkflowTemplate) 95 | assert wf.name == "hello-template" 96 | assert wf.kind == "WorkflowTemplate" 97 | assert len(wf.spec.templates) == 1 98 | 99 | def test_from_url(self, url: str) -> None: 100 | """Test `Workflow.from_url` method.""" 101 | fake_response = type( 102 | "Response", 103 | (), 104 | {"text": self._WORKFLOW_FILE.read_text(), "raise_for_status": lambda: None}, 105 | ) 106 | flexmock(requests).should_receive("get").and_return(fake_response) 107 | 108 | wf = WorkflowTemplate.from_url(url) 109 | 110 | assert isinstance(wf, WorkflowTemplate) 111 | # assert wf.name == "test" 112 | assert wf.kind == "WorkflowTemplate" 113 | assert len(wf.spec.templates) == 1 114 | -------------------------------------------------------------------------------- /tests/workflow_templates/.gitignore: -------------------------------------------------------------------------------- 1 | *.yaml 2 | -------------------------------------------------------------------------------- /tests/workflow_templates/hello-template.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import ntpath 3 | 4 | from argo.workflows.dsl import template 5 | from argo.workflows.dsl import WorkflowTemplate 6 | from argo.workflows.dsl.templates import V1Container 7 | 8 | 9 | class HelloTemplate(WorkflowTemplate): 10 | 11 | entrypoint = "whalesay" 12 | 13 | @template 14 | def whalesay(self) -> V1Container: 15 | container = V1Container( 16 | image="docker/whalesay:latest", 17 | name="whalesay", 18 | command=["cowsay"], 19 | args=["hello world"], 20 | ) 21 | return container 22 | 23 | 24 | if __name__ == "__main__": 25 | wf = HelloTemplate() 26 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 27 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 28 | -------------------------------------------------------------------------------- /tests/workflows/.gitignore: -------------------------------------------------------------------------------- 1 | *.yaml 2 | -------------------------------------------------------------------------------- /tests/workflows/artifacts.py: -------------------------------------------------------------------------------- 1 | import ntpath 2 | import pathlib 3 | 4 | from argo.workflows.dsl import Workflow 5 | from argo.workflows.dsl.tasks import task 6 | from argo.workflows.dsl.tasks import dependencies 7 | from argo.workflows.dsl.templates import inputs 8 | from argo.workflows.dsl.templates import outputs 9 | from argo.workflows.dsl.templates import artifact 10 | from argo.workflows.dsl.templates import template 11 | from argo.workflows.dsl.templates import V1alpha1Artifact 12 | from argo.workflows.dsl.templates import V1alpha1Template 13 | from argo.workflows.dsl.templates import V1Container 14 | 15 | 16 | class Artifacts(Workflow): 17 | @task 18 | def generate(self) -> V1alpha1Template: 19 | return self.whalesay() 20 | 21 | @task 22 | @artifact(name="message", _from="{{tasks.generate.outputs.artifacts.hello-art}}") 23 | @dependencies(["generate"]) 24 | def consume_artifact(self, message: V1alpha1Artifact) -> V1alpha1Template: 25 | return self.print_message(message=message) 26 | 27 | @template 28 | @outputs.artifact(name="hello-art", path="/tmp/hello_world.txt") 29 | def whalesay(self) -> V1Container: 30 | container = V1Container( 31 | name="whalesay", 32 | image="docker/whalesay:latest", 33 | command=["sh", "-c"], 34 | args=["cowsay hello world | tee /tmp/hello_world.txt"], 35 | ) 36 | 37 | return container 38 | 39 | @template 40 | @inputs.artifact(name="message", path="/tmp/message") 41 | def print_message(self, message: V1alpha1Artifact) -> V1Container: 42 | container = V1Container( 43 | name="print-message", 44 | image="alpine:latest", 45 | command=["sh", "-c"], 46 | args=["cat", "/tmp/message"], 47 | ) 48 | 49 | return container 50 | 51 | 52 | if __name__ == "__main__": 53 | wf = Artifacts() 54 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 55 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 56 | -------------------------------------------------------------------------------- /tests/workflows/dag-diamond.py: -------------------------------------------------------------------------------- 1 | import ntpath 2 | import pathlib 3 | 4 | from argo.workflows.dsl import Workflow 5 | from argo.workflows.dsl.tasks import task 6 | from argo.workflows.dsl.tasks import dependencies 7 | from argo.workflows.dsl.templates import inputs 8 | from argo.workflows.dsl.templates import parameter 9 | from argo.workflows.dsl.templates import template 10 | from argo.workflows.dsl.templates import V1alpha1Parameter 11 | from argo.workflows.dsl.templates import V1alpha1Template 12 | from argo.workflows.dsl.templates import V1Container 13 | 14 | 15 | class DagDiamond(Workflow): 16 | @task 17 | @parameter(name="message", value="A") 18 | def A(self, message: V1alpha1Parameter) -> V1alpha1Template: 19 | return self.echo(message=message) 20 | 21 | @task 22 | @parameter(name="message", value="B") 23 | @dependencies(["A"]) 24 | def B(self, message: V1alpha1Parameter) -> V1alpha1Template: 25 | return self.echo(message=message) 26 | 27 | @task 28 | @parameter(name="message", value="C") 29 | @dependencies(["A"]) 30 | def C(self, message: V1alpha1Parameter) -> V1alpha1Template: 31 | return self.echo(message=message) 32 | 33 | @task 34 | @parameter(name="message", value="D") 35 | @dependencies(["B", "C"]) 36 | def D(self, message: V1alpha1Parameter) -> V1alpha1Template: 37 | return self.echo(message=message) 38 | 39 | @template 40 | @inputs.parameter(name="message") 41 | def echo(self, message: V1alpha1Parameter) -> V1Container: 42 | container = V1Container( 43 | image="alpine:3.7", 44 | name="echo", 45 | command=["echo", "{{inputs.parameters.message}}"], 46 | ) 47 | 48 | return container 49 | 50 | 51 | if __name__ == "__main__": 52 | wf = DagDiamond() 53 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 54 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 55 | -------------------------------------------------------------------------------- /tests/workflows/hello-world.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import ntpath 3 | 4 | from argo.workflows.dsl import template 5 | from argo.workflows.dsl import Workflow 6 | from argo.workflows.dsl.templates import V1Container 7 | 8 | 9 | class HelloWorld(Workflow): 10 | 11 | entrypoint = "whalesay" 12 | 13 | @template 14 | def whalesay(self) -> V1Container: 15 | container = V1Container( 16 | image="docker/whalesay:latest", 17 | name="whalesay", 18 | command=["cowsay"], 19 | args=["hello world"], 20 | ) 21 | return container 22 | 23 | 24 | if __name__ == "__main__": 25 | wf = HelloWorld() 26 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 27 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 28 | -------------------------------------------------------------------------------- /tests/workflows/loops.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | import ntpath 3 | 4 | from argo.workflows.dsl import task 5 | from argo.workflows.dsl.tasks import parameter 6 | from argo.workflows.dsl.templates import inputs 7 | from argo.workflows.dsl import Workflow 8 | from argo.workflows.dsl.templates import template 9 | from argo.workflows.dsl.templates import V1Container 10 | from argo.workflows.dsl.tasks import V1alpha1Template 11 | from argo.workflows.dsl.tasks import with_items 12 | 13 | 14 | class Loop(Workflow): 15 | @task 16 | @with_items(["apple", "orange", "pineapple", "watermelon"]) 17 | @parameter(name="message", value="{{item}}") 18 | def generate(self, message) -> V1alpha1Template: 19 | return self.whalesay("") 20 | 21 | @template 22 | @inputs.parameter(name="message") 23 | def whalesay(self, message: str) -> V1Container: 24 | container = V1Container( 25 | image="docker/whalesay:latest", 26 | name="whalesay", 27 | command=["cowsay"], 28 | args=["{{inputs.parameters.message}}"], 29 | ) 30 | return container 31 | 32 | 33 | if __name__ == "__main__": 34 | wf = Loop() 35 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 36 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") -------------------------------------------------------------------------------- /tests/workflows/scripts-python.py: -------------------------------------------------------------------------------- 1 | import ntpath 2 | import pathlib 3 | import textwrap 4 | 5 | from argo.workflows.dsl import Workflow 6 | from argo.workflows.dsl.tasks import task 7 | from argo.workflows.dsl.tasks import dependencies 8 | from argo.workflows.dsl.tasks import parameter 9 | from argo.workflows.dsl.templates import inputs 10 | from argo.workflows.dsl.templates import template 11 | from argo.workflows.dsl.templates import V1alpha1Template 12 | from argo.workflows.dsl.templates import V1Container 13 | from argo.workflows.dsl.templates import V1alpha1ScriptTemplate 14 | 15 | 16 | class ScriptsPython(Workflow): 17 | @task 18 | def generate(self) -> V1alpha1Template: 19 | return self.gen_random_int() 20 | 21 | @task 22 | @parameter(name="message", value="{{tasks.generate.outputs.result}}") 23 | @dependencies(["generate"]) 24 | def print(self, message: str) -> V1alpha1Template: 25 | return self.print_message(message) 26 | 27 | @template 28 | def gen_random_int(self) -> V1alpha1ScriptTemplate: 29 | source = textwrap.dedent( 30 | """\ 31 | import random 32 | i = random.randint(1, 100) 33 | print(i) 34 | """ 35 | ) 36 | 37 | template = V1alpha1ScriptTemplate( 38 | image="python:alpine3.6", 39 | name="gen-random-int", 40 | command=["python"], 41 | source=source, 42 | ) 43 | 44 | return template 45 | 46 | @template 47 | @inputs.parameter(name="message") 48 | def print_message(self, message: str) -> V1Container: 49 | container = V1Container( 50 | image="alpine:latest", 51 | name="print-message", 52 | command=["sh", "-c"], 53 | args=["echo result was: {{inputs.parameters.message}}"], 54 | ) 55 | 56 | return container 57 | 58 | 59 | wf = ScriptsPython() 60 | 61 | 62 | if __name__ == "__main__": 63 | wf = ScriptsPython() 64 | wf_file = ntpath.basename(__file__).replace(".py", ".yaml") 65 | wf.to_file(f"{pathlib.Path(__file__).parent}/{wf_file}") 66 | --------------------------------------------------------------------------------