├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── ci.yml │ └── pre-commit.yml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── Makefile ├── README.md ├── docs ├── CNAME ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── components │ ├── data │ │ ├── feast.md │ │ ├── istio.md │ │ ├── minio.md │ │ └── pachyderm.md │ ├── deploy │ │ └── seldon.md │ ├── develop │ │ ├── jupyter.md │ │ └── kubeflow.md │ ├── govern │ │ └── mlflow.md │ └── introduction.md ├── design.md ├── images │ ├── demetrios.webp │ ├── github.webp │ ├── luke.webp │ ├── phil.webp │ ├── stacks.webp │ └── terraform.webp ├── index.md ├── infrastructure │ ├── google.md │ └── introduction.md ├── overrides │ └── main.html ├── stacks │ ├── bodywork-mlflow.md │ ├── introduction.md │ ├── kubeflow-mlflow.md │ └── minio-pachyderm.md └── terrachain.md ├── mkdocs.yml ├── poetry.lock └── pyproject.toml /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Documentation Bug Report 3 | about: Create a report to help us improve the documentation 4 | title: 'bug: [short description]' 5 | labels: bug 6 | assignees: 7 | - philwinder 8 | - lukemarsden 9 | --- 10 | 11 | > **IMPORTANT: If you have a problem with a particular component or stack, please raise bugs in the respective repositories. Not this one.** 12 | 13 | **Where is the bug** 14 | Where can the bug be observed? What markdown files? What URLs? 15 | 16 | **Describe the bug** 17 | A clear and concise description of what the bug is. 18 | 19 | **Expected behavior** 20 | A clear and concise description of what you expected to happen. 21 | 22 | **Additional context** 23 | Add any other context about the problem here. 24 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: 'feat: [short description]' 5 | labels: enhancement 6 | assignees: 7 | - philwinder 8 | - lukemarsden 9 | --- 10 | 11 | **Is your feature request related to a problem? Please describe.** 12 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 13 | 14 | **Describe the solution you'd like** 15 | A clear and concise description of what you want to happen. 16 | 17 | **Describe alternatives you've considered** 18 | A clear and concise description of any alternative solutions or features you've considered. 19 | 20 | **Additional context** 21 | Add any other context or screenshots about the feature request here. 22 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | branches: 5 | - master 6 | - main 7 | jobs: 8 | deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - uses: actions/setup-python@v2 13 | with: 14 | python-version: 3.x 15 | - run: pip install mkdocs-material 16 | - run: mkdocs gh-deploy --force 17 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: pre-commit 2 | on: 3 | push: 4 | branches: [ main, master ] 5 | pull_request: 6 | branches: [ main, master ] 7 | jobs: 8 | pre-commit: 9 | name: Linting 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | - uses: actions/setup-python@v2 14 | - uses: pre-commit/action@v2.0.0 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .python-version 2 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v3.4.0 4 | hooks: 5 | - id: check-added-large-files 6 | - id: check-executables-have-shebangs 7 | - id: check-json 8 | - id: pretty-format-json 9 | - id: check-merge-conflict 10 | - id: check-symlinks 11 | - id: check-toml 12 | - id: check-vcs-permalinks 13 | - id: check-xml 14 | - id: check-yaml 15 | args: ['--unsafe'] 16 | - id: destroyed-symlinks 17 | - id: detect-private-key 18 | - id: end-of-file-fixer 19 | - id: mixed-line-ending 20 | - id: sort-simple-yaml 21 | - id: trailing-whitespace 22 | - repo: https://github.com/Yelp/detect-secrets 23 | rev: v1.1.0 24 | hooks: 25 | - id: detect-secrets 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | finalize: 2 | poetry update 3 | 4 | pre-commit: 5 | poetry run pre-commit install 6 | 7 | install: finalize 8 | poetry install 9 | 10 | serve: install 11 | poetry run mkdocs serve 12 | 13 | lint: install 14 | poetry run pre-commit run -a 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Combinator.ml 2 | 3 | Combinator is an collection of composable ML components combined into opinionated stacks. You can easily spin up a supported ML stack with only a few commands. Read the documentation to get started. 4 | 5 | - [Documentation](https://combinator-ml.github.io/combinator/) 6 | - [Contributing](docs/CONTRIBUTING.md) 7 | -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | combinator.ml 2 | -------------------------------------------------------------------------------- /docs/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | We as members, contributors, and leaders pledge to make participation in our 7 | community a harassment-free experience for everyone, regardless of age, body 8 | size, visible or invisible disability, ethnicity, sex characteristics, gender 9 | identity and expression, level of experience, education, socio-economic status, 10 | nationality, personal appearance, race, caste, color, religion, or sexual identity 11 | and orientation. 12 | 13 | We pledge to act and interact in ways that contribute to an open, welcoming, 14 | diverse, inclusive, and healthy community. 15 | 16 | ## Our Standards 17 | 18 | Examples of behavior that contributes to a positive environment for our 19 | community include: 20 | 21 | * Demonstrating empathy and kindness toward other people 22 | * Being respectful of differing opinions, viewpoints, and experiences 23 | * Giving and gracefully accepting constructive feedback 24 | * Accepting responsibility and apologizing to those affected by our mistakes, 25 | and learning from the experience 26 | * Focusing on what is best not just for us as individuals, but for the 27 | overall community 28 | 29 | Examples of unacceptable behavior include: 30 | 31 | * The use of sexualized language or imagery, and sexual attention or 32 | advances of any kind 33 | * Trolling, insulting or derogatory comments, and personal or political attacks 34 | * Public or private harassment 35 | * Publishing others' private information, such as a physical or email 36 | address, without their explicit permission 37 | * Other conduct which could reasonably be considered inappropriate in a 38 | professional setting 39 | 40 | ## Enforcement Responsibilities 41 | 42 | Community leaders are responsible for clarifying and enforcing our standards of 43 | acceptable behavior and will take appropriate and fair corrective action in 44 | response to any behavior that they deem inappropriate, threatening, offensive, 45 | or harmful. 46 | 47 | Community leaders have the right and responsibility to remove, edit, or reject 48 | comments, commits, code, wiki edits, issues, and other contributions that are 49 | not aligned to this Code of Conduct, and will communicate reasons for moderation 50 | decisions when appropriate. 51 | 52 | ## Scope 53 | 54 | This Code of Conduct applies within all community spaces, and also applies when 55 | an individual is officially representing the community in public spaces. 56 | Examples of representing our community include using an official e-mail address, 57 | posting via an official social media account, or acting as an appointed 58 | representative at an online or offline event. 59 | 60 | ## Enforcement 61 | 62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 63 | reported to the community leaders responsible for enforcement at 64 | `phil@winderresearch.com`. 65 | All complaints will be reviewed and investigated promptly and fairly. 66 | 67 | All community leaders are obligated to respect the privacy and security of the 68 | reporter of any incident. 69 | 70 | ## Enforcement Guidelines 71 | 72 | Community leaders will follow these Community Impact Guidelines in determining 73 | the consequences for any action they deem in violation of this Code of Conduct: 74 | 75 | ### 1. Correction 76 | 77 | **Community Impact**: Use of inappropriate language or other behavior deemed 78 | unprofessional or unwelcome in the community. 79 | 80 | **Consequence**: A private, written warning from community leaders, providing 81 | clarity around the nature of the violation and an explanation of why the 82 | behavior was inappropriate. A public apology may be requested. 83 | 84 | ### 2. Warning 85 | 86 | **Community Impact**: A violation through a single incident or series 87 | of actions. 88 | 89 | **Consequence**: A warning with consequences for continued behavior. No 90 | interaction with the people involved, including unsolicited interaction with 91 | those enforcing the Code of Conduct, for a specified period of time. This 92 | includes avoiding interactions in community spaces as well as external channels 93 | like social media. Violating these terms may lead to a temporary or 94 | permanent ban. 95 | 96 | ### 3. Temporary Ban 97 | 98 | **Community Impact**: A serious violation of community standards, including 99 | sustained inappropriate behavior. 100 | 101 | **Consequence**: A temporary ban from any sort of interaction or public 102 | communication with the community for a specified period of time. No public or 103 | private interaction with the people involved, including unsolicited interaction 104 | with those enforcing the Code of Conduct, is allowed during this period. 105 | Violating these terms may lead to a permanent ban. 106 | 107 | ### 4. Permanent Ban 108 | 109 | **Community Impact**: Demonstrating a pattern of violation of community 110 | standards, including sustained inappropriate behavior, harassment of an 111 | individual, or aggression toward or disparagement of classes of individuals. 112 | 113 | **Consequence**: A permanent ban from any sort of public interaction within 114 | the community. 115 | 116 | ## Attribution 117 | 118 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 119 | version 2.0, available at 120 | [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0]. 121 | 122 | Community Impact Guidelines were inspired by 123 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 124 | 125 | For answers to common questions about this code of conduct, see the FAQ at 126 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available 127 | at [https://www.contributor-covenant.org/translations][translations]. 128 | 129 | [homepage]: https://www.contributor-covenant.org 130 | [v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html 131 | [Mozilla CoC]: https://github.com/mozilla/diversity 132 | [FAQ]: https://www.contributor-covenant.org/faq 133 | [translations]: https://www.contributor-covenant.org/translations 134 | -------------------------------------------------------------------------------- /docs/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | First off, thanks for taking the time to contribute. You're amazing! 🎉 😘 ✨ 4 | 5 | If at any point you need any help, the best way to get in touch with someone is in `#mlops-stacks` channel on the MLOps.community Slack. 6 | 7 | [Join Slack :rocket:](https://go.mlops.community/slack){ .md-button .md-button--primary } 8 | 9 | The following is a set of guidelines for contributing. These are mostly guidelines, not rules. Use your best judgment, and feel free to propose changes to this document in a pull request. 10 | 11 | - [Contributing Components and Stacks](#contributing-components-and-stacks) 12 | - [Reporting Bugs](#reporting-bugs) 13 | - [Suggesting Feature Requests or Enhancements](#suggesting-feature-requests-or-enhancements) 14 | - [Pull Requests](#pull-requests) 15 | * [Check Code Style](#check-code-style) 16 | * [Test](#test) 17 | * [Commit](#commit) 18 | * [Yes! Pull request](#yes-pull-request) 19 | - [Code of Conduct](#code-of-conduct) 20 | 21 | ## Contributing Components and Stacks 22 | 23 | See the [tutorial](https://combinator.ml/design/#tutorial). 24 | 25 | ## Reporting Bugs 26 | 27 | Bugs are tracked as GitHub issues, tagged with a `bug` label. Search before you create an issue. When you create an issue, please provide the following information by filling in the template. 28 | 29 | - Core documentation or website issues should be [reported to the combinator repository](https://github.com/combinator-ml/combinator/issues) 30 | - Component or stack issues should be reported to the respective repository, because these may not be owned by the core contributors 31 | 32 | ## Suggesting Feature Requests or Enhancements 33 | 34 | Enhancements or feature requests are tracked as GitHub issues, tagged with an `enhancement` label. When you create an issue, please provide the following information by filling in the template. 35 | 36 | - Enhancements to combinator in general (e.g. new stacks, new components, new ideas) should be [reported to the combinator repository](https://github.com/combinator-ml/combinator/issues) 37 | - Enhancements to components or stacks should be reported to the respective repository, because these may not be owned by the core contributors 38 | 39 | ## Pull Requests 40 | 41 | ### Check Code Style 42 | 43 | Run `make lint` and make sure all the tests pass. 44 | 45 | ### Test 46 | 47 | Run `make test` and verify all the tests pass. 48 | 49 | ### Commit 50 | 51 | #### Commit Message Format 52 | 53 | ``` 54 | : Short description (fix #1234) 55 | 56 | Longer description here if necessary 57 | 58 | BREAKING CHANGE: only contain breaking change 59 | ``` 60 | 61 | #### Type 62 | Must be one of the following: 63 | 64 | * **feat**: A new feature 65 | * **fix**: A bug fix 66 | * **breaking**: A breaking change 67 | * **docs**: Documentation only changes 68 | * **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc) 69 | * **refactor**: A code change that neither fixes a bug nor adds a feature 70 | * **perf**: A code change that improves performance 71 | * **test**: Adding missing or correcting existing tests 72 | * **chore**: Changes to the build process or auxiliary tools and libraries such as documentation generation 73 | * **revert**: Reverting changes 74 | 75 | #### Subject 76 | 77 | * use the imperative, __present__ tense: "change" not "changed" nor "changes" 78 | * don't capitalize the first letter 79 | * no dot (.) at the end 80 | * reference GitHub issues at the end. If the commit doesn’t completely fix the issue, then use `(refs #1234)` instead of `(fixes #1234)`. 81 | 82 | #### Body 83 | 84 | * use the imperative, __present__ tense: "change" not "changed" nor "changes". 85 | * the motivation for the change and contrast this with previous behavior. 86 | 87 | ### Yes! Pull request 88 | 89 | Make your pull request, then describe your changes. 90 | 91 | #### Title 92 | 93 | Follow other PR title format on below. 94 | ``` 95 | : Short Description (fix #111) 96 | : Short Description (fix #123, #111, #122) 97 | : Short Description (ref #111) 98 | ``` 99 | * use present tense: 'change' not 'changed' or 'changes' 100 | 101 | #### Description 102 | 103 | If it has related to issues, add links to the issues (like `#123`) in the description. 104 | 105 | ## Code of Conduct 106 | 107 | See the [CODE_OF_CONDUCT.md](CODE_OF_CONDUCT.md). 108 | -------------------------------------------------------------------------------- /docs/components/data/feast.md: -------------------------------------------------------------------------------- 1 | # Feast 2 | 3 | **tl; dr;** A [combinator](https://combinator.ml) data component that installs [Feast](https://feast.dev), a feature store. 4 | 5 | - [Introduction](#introduction) 6 | - [Test Drive](#test-drive) 7 | - [Usage](#usage) 8 | 9 | ## Introduction 10 | 11 | [Feast](https://feast.dev) is an open-source feature store. A feature store allows you to manage, govern, and trace features derived from raw data. This is useful because it helps to unify and standardise, which reduces waste, improves quality, and makes models more reproducible. 12 | 13 | Feast does not perform any computation. You can think of it as a meta-database; a database that manages other databases. It effectively creates a cache of feature data, keyed by time. The Feast libraries and CLIs provide a consistent way of pushing or streaming new data into the cache. Downstream systems use a similar interface to access point-in-time data. [Learn more about feast in the documentation.](https://docs.feast.dev) 14 | 15 | ## Test Drive 16 | 17 | The fastest way to get started is to use the test drive functionality provided by [TestFaster](https://testfaster.ci). Click on the "Launch Test Drive" button below (opens a new window). 18 | 19 | :computer: Launch Test Drive :computer: 20 | 21 | ### Launch Jupyter 22 | 23 | Once the component has launched, click on the Jupyter link. Feast does not come with a UI. You will use Jupyter to interact with Feast via its API. 24 | 25 | ### Example Notebook 26 | 27 | Once inside Jupyter, browse to the minimal notebook, which is the [official example](https://github.com/feast-dev/feast/tree/v0.9-branch/examples/minimal). Follow the instructions in the notebook. 28 | 29 | ## Usage 30 | 31 | ### Prerequisites 32 | 33 | Start by preparing your Kubernetes cluster using one of the [infrastructure components](https://combinator.ml/infrastructure/introduction/) or use your own cluster. 34 | 35 | ### Component Usage 36 | 37 | ```terraform 38 | module "feast" { 39 | source = "combinator-ml/feast/k8s" 40 | # Optional settings go here 41 | } 42 | ``` 43 | 44 | See the full configuration options below. 45 | 46 | ## Requirements 47 | 48 | No requirements. 49 | 50 | ## Providers 51 | 52 | | Name | Version | 53 | |------|---------| 54 | | helm | n/a | 55 | | kubernetes | n/a | 56 | | random | n/a | 57 | 58 | ## Modules 59 | 60 | No Modules. 61 | 62 | ## Resources 63 | 64 | | Name | 65 | |------| 66 | | [helm_release](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | 67 | | [kubernetes_namespace](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | 68 | | [kubernetes_secret](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/secret) | 69 | | [random_password](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/password) | 70 | 71 | ## Inputs 72 | 73 | | Name | Description | Type | Default | Required | 74 | |------|-------------|------|---------|:--------:| 75 | | name\_prefix | Prefix to be used when naming the different components of Feast | `string` | `"combinator"` | no | 76 | | namespace | (Optional) The namespace to install into. Defaults to feast. | `string` | `"feast"` | no | 77 | 78 | ## Outputs 79 | 80 | No output. 81 | -------------------------------------------------------------------------------- /docs/components/data/istio.md: -------------------------------------------------------------------------------- 1 | # Istio 2 | 3 | The [Istio](https://istio.io) component allows you to create a network mesh. This is a common dependency for other projects. 4 | 5 | - [Website](https://github.com/combinator-ml/terraform-k8s-istio) 6 | -------------------------------------------------------------------------------- /docs/components/data/minio.md: -------------------------------------------------------------------------------- 1 | # Minio 2 | 3 | The [Minio](https://min.io) component creates a cloud-native S3 compatible data store. 4 | 5 | - [Website](https://github.com/combinator-ml/terraform-k8s-minio) 6 | -------------------------------------------------------------------------------- /docs/components/data/pachyderm.md: -------------------------------------------------------------------------------- 1 | # Pachyderm 2 | 3 | **tl; dr;** A [combinator](https://combinator.ml) data component that installs [Pachyderm](https://www.pachyderm.com), a data lineage and pipelining solution. 4 | 5 | - [Introduction](#introduction) 6 | - [Test Drive](#test-drive) 7 | - [Usage](#usage) 8 | 9 | ## Introduction 10 | 11 | [Pachyderm](https://www.pachyderm.com) is an open-source-driven solution that provides data lineage and pipelines. Data lineage is important for \_provenance\_; knowing the origin of downstream assets. In ML, the assets are often models and the provenance describes how the model became to be. Precise knowledge of what a model was trained upon is important for disaster recovery, auditing, and robustness. 12 | 13 | Pipelines encode a process. This can be anything from automating pre-processing, to training and deploying models. Pachyderm's solution is unique beacuse it is backed by data lineage; i.e. data driven pipelines, not process driven ones. 14 | 15 | ## Test Drive 16 | 17 | The fastest way to get started is to use the test drive functionality provided by [TestFaster](https://testfaster.ci). Click on the "Launch Test Drive" button below (opens a new window). 18 | 19 | :computer: Launch Test Drive :computer: 20 | 21 | ### Quick Start Pachyderm Tutorial 22 | 23 | Once the test drive has launched, click the two links to the left to get started with Pachyderm: 24 | 25 | 1. Click the Jupyter link and launch the `demo.ipynb` notebook. 26 | 2. Click on the Dashboard link to launch the Pachyderm Enterprise Dashboard. 27 | 28 | ## Usage 29 | 30 | ### Prerequisites 31 | 32 | Start by preparing your Kubernetes cluster using one of the [infrastructure components](https://combinator.ml/infrastructure/introduction/) or use your own cluster. 33 | 34 | ### Component Usage 35 | 36 | ```terraform 37 | module "pachyderm" { 38 | source = "combinator-ml/pachyderm/k8s" 39 | # Optional settings go here 40 | } 41 | ``` 42 | 43 | See the full configuration options below. 44 | 45 | ## Requirements 46 | 47 | | Name | Version | 48 | |------|---------| 49 | | helm | ~> 2.1.2 | 50 | | kubernetes | ~> 2.2.0 | 51 | | null | ~> 3.1.0 | 52 | 53 | ## Providers 54 | 55 | | Name | Version | 56 | |------|---------| 57 | | helm | ~> 2.1.2 | 58 | | kubernetes | ~> 2.2.0 | 59 | 60 | ## Modules 61 | 62 | No Modules. 63 | 64 | ## Resources 65 | 66 | | Name | 67 | |------| 68 | | [helm_release](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | 69 | | [kubernetes_namespace](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | 70 | 71 | ## Inputs 72 | 73 | | Name | Description | Type | Default | Required | 74 | |------|-------------|------|---------|:--------:| 75 | | namespace | (Optional) The namespace to install the release into. | `string` | `"pachyderm"` | no | 76 | | values | (Optional) List of values in raw yaml to pass to helm. See https://github.com/pachyderm/helmchart/blob/master/pachyderm/values.yaml. | `list(string)` |
[
"tls:\n certName: null # Disable TLS\n create: null # Disable TLS\npachd:\n logLevel: debug\n storage:\n backend: LOCAL\n"
]
| no | 77 | 78 | ## Outputs 79 | 80 | | Name | Description | 81 | |------|-------------| 82 | | namespace | Namespace is the kubernetes namespace of the release. | 83 | -------------------------------------------------------------------------------- /docs/components/deploy/seldon.md: -------------------------------------------------------------------------------- 1 | # Seldon 2 | 3 | **tl; dr;** A [combinator](https://combinator.ml) data component that installs [Seldon-Core](https://www.seldon.io/tech/products/core/), an ML serving framework. 4 | 5 | - [Introduction](#introduction) 6 | - [Test Drive](#test-drive) 7 | - [Usage](#usage) 8 | 9 | ## Introduction 10 | 11 | [Seldon-Core](https://www.seldon.io/tech/products/core/) is an open-source model serving and monitoring framework. It allows you to deploy your ML models so that they can be consumed by users over consistent REST APIs. In addition, other Seldon tools allow you to monitor your model in production. 12 | 13 | [Learn more about Seldon-Core in the repo.](https://github.com/SeldonIO/seldon-core) 14 | 15 | ## Test Drive 16 | 17 | The fastest way to get started is to use the test drive functionality provided by [TestFaster](https://testfaster.ci). Click on the "Launch Test Drive" button below (opens a new window). 18 | 19 | :computer: Launch Test Drive :computer: 20 | 21 | ### Launch Jupyter 22 | 23 | Once the component has launched, click on the Jupyter link. Seldon-Core does not have a UI by default. You will use Jupyter to interact with Seldon-Core via its Kubernetes API. 24 | 25 | ### Example Notebook 26 | 27 | Once inside Jupyter, browse to the demo notebook, which comes from the [official quick start guide](https://docs.seldon.io/projects/seldon-core/en/v1.1.0/workflow/quickstart.html). Follow the instructions in the notebook to deploy a pre-trained model. 28 | 29 | ## Usage 30 | 31 | ### Prerequisites 32 | 33 | Start by preparing your Kubernetes cluster using one of the [infrastructure components](https://combinator.ml/infrastructure/introduction/) or use your own cluster. 34 | 35 | ### Component Usage 36 | 37 | ```terraform 38 | module "seldon" { 39 | source = "combinator-ml/seldon/k8s" 40 | # Optional settings go here 41 | } 42 | ``` 43 | 44 | See the full configuration options below. 45 | 46 | ## Requirements 47 | 48 | | Name | Version | 49 | |------|---------| 50 | | helm | >= 2.0.0 | 51 | | kubectl | >= 1.7.0 | 52 | | kubernetes | >= 2.0.0 | 53 | 54 | ## Providers 55 | 56 | | Name | Version | 57 | |------|---------| 58 | | helm | >= 2.0.0 | 59 | | kubectl | >= 1.7.0 | 60 | | kubernetes | >= 2.0.0 | 61 | 62 | ## Modules 63 | 64 | | Name | Source | Version | 65 | |------|--------|---------| 66 | | istio | combinator-ml/istio/k8s | 0.0.1 | 67 | 68 | ## Resources 69 | 70 | | Name | 71 | |------| 72 | | [helm_release](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | 73 | | [kubectl_file_documents](https://registry.terraform.io/providers/gavinbunney/kubectl/latest/docs/data-sources/file_documents) | 74 | | [kubectl_manifest](https://registry.terraform.io/providers/gavinbunney/kubectl/latest/docs/resources/manifest) | 75 | | [kubernetes_namespace](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | 76 | 77 | ## Inputs 78 | 79 | | Name | Description | Type | Default | Required | 80 | |------|-------------|------|---------|:--------:| 81 | | enable\_example\_seldon\_deployment | Enable an example seldon deployment | `bool` | `true` | no | 82 | | enable\_seldon\_gateway | Create an istio gateway for seldon | `bool` | `true` | no | 83 | | seldon\_core\_operator\_namespace | (Optional) The namespace to install the minio operator into. Defaults to minio-operator | `string` | `"seldon-system"` | no | 84 | | seldon\_core\_values | (Optional) List of values in raw yaml to pass to helm. | `list(string)` | `[]` | no | 85 | 86 | ## Outputs 87 | 88 | No output. 89 | -------------------------------------------------------------------------------- /docs/components/develop/jupyter.md: -------------------------------------------------------------------------------- 1 | # terraform-k8s-jupyter 2 | 3 | **tl; dr;** A [combinator](https://combinator.ml) data component that provides a single [Jupyter](https://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) instance, a notebook provider. 4 | 5 | - [Introduction](#introduction) 6 | - [Test Drive](#test-drive) 7 | - [Usage](#usage) 8 | 9 | ## Introduction 10 | 11 | [Jupyter](https://jupyter-docker-stacks.readthedocs.io/en/latest/index.html) is an open-source notebook host. Data scientists use notebooks to research, develop, and document their solutions. However, it is also very useful to use when demonstrating other products where the user persona is an engineer or scientist. 12 | 13 | Hence, this component is generally used within other stacks to enable demos. The functionality of this component is intended to help provide those demos. 14 | 15 | If you are more interested in a notebook platform, then check out [Jupyter Hub](https://jupyter.org/hub), or any cloud vendor notebook hosting solution. 16 | 17 | ## Test Drive 18 | 19 | The fastest way to get started is to use the test drive functionality provided by [TestFaster](https://testfaster.ci). Click on the "Launch Test Drive" button below (opens a new window). 20 | 21 | :computer: Launch Test Drive :computer: 22 | 23 | ### Launch Jupyter 24 | 25 | Once the component has launched, click on the Jupyter link. Once inside Jupyter, explore and try some demos. 26 | 27 | ### Prerequisites 28 | 29 | Start by preparing your Kubernetes cluster using one of the [infrastructure components](https://combinator.ml/infrastructure/introduction/) or use your own cluster. 30 | 31 | ### Component Usage 32 | 33 | ```terraform 34 | module "feast" { 35 | source = "combinator-ml/jupyter/k8s" 36 | # Optional settings go here 37 | } 38 | ``` 39 | 40 | See the full configuration options below. 41 | 42 | ## Requirements 43 | 44 | No requirements. 45 | 46 | ## Providers 47 | 48 | | Name | Version | 49 | |------|---------| 50 | | kubernetes | n/a | 51 | 52 | ## Modules 53 | 54 | No Modules. 55 | 56 | ## Resources 57 | 58 | | Name | 59 | |------| 60 | | [kubernetes_deployment](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/deployment) | 61 | | [kubernetes_namespace](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | 62 | 63 | ## Inputs 64 | 65 | | Name | Description | Type | Default | Required | 66 | |------|-------------|------|---------|:--------:| 67 | | image | Docker image to use | `string` | `"jupyter/scipy-notebook:python-3.9.2"` | no | 68 | | name\_prefix | Prefix to be used when naming the different components. | `string` | `"combinator"` | no | 69 | | namespace | The namespace to install into. | `string` | `"jupyter"` | no | 70 | 71 | ## Outputs 72 | 73 | No output. 74 | -------------------------------------------------------------------------------- /docs/components/develop/kubeflow.md: -------------------------------------------------------------------------------- 1 | # Kubeflow 2 | 3 | **tl; dr;** A [combinator](https://combinator.ml) governance component that provides [Kubeflow](https://kubeflow.org), a pipelining tool, Jupyter host, and hyperparameter tuner. 4 | 5 | - [Introduction](#introduction) 6 | - [Test Drive](#test-drive) 7 | - [Usage](#usage) 8 | 9 | ## Introduction 10 | 11 | [Kubeflow](https://kubeflow.org) is an open-source MLOps platform that combines Jupyter hosting, ML pipelining, and hyperparameter tuning. It is packaged into a single UI to help data scientists train their ML models. 12 | 13 | Kubeflow Pipelines (KFP) in particular, has emerged as one eminent ML pipelinging technology, mainly thanks to the managed hosting in various clouds. 14 | 15 | Its opinionated ML-specific API helps data scientists and ML engineers develop robust, repeatable pipelines. 16 | 17 | ### Kubeflow Version 18 | 19 | This installation uses Kubeflow version 1.2, which is now out of date. 20 | 21 | ### Status and Recommendations 22 | 23 | :warning: **For Testing Only** :warning: 24 | 25 | This installation method is not recommended for use. It required a lot of work-arounds that are not suitable for production use. Please refer to the [official documentation](https://www.kubeflow.org/docs/started/installing-kubeflow/) for production installation instructions. 26 | 27 | ## Test Drive 28 | 29 | The fastest way to get started is to use the test drive functionality provided by [TestFaster](https://testfaster.ci). Click on the "Launch Test Drive" button below (opens a new window). 30 | 31 | :computer: Launch Test Drive :computer: 32 | 33 | ## Usage 34 | 35 | ### Prerequisites 36 | 37 | Start by preparing your Kubernetes cluster using one of the [infrastructure components](https://combinator.ml/infrastructure/introduction/) or use your own cluster. 38 | 39 | ### Component Usage 40 | 41 | ```terraform 42 | module "kubeflow" { 43 | source = "combinator-ml/kubeflow/k8s" 44 | # Optional settings go here 45 | } 46 | ``` 47 | 48 | See the full configuration options below. 49 | 50 | ### Instructions 51 | 52 | Kubeflow is big, so it can take some time to start. Once it does connect to the istio ingress gateway service. 53 | 54 | Once you see the login screen, the username is `admin@kubeflow.org` and the password is `12341234`. 55 | 56 | ## Requirements 57 | 58 | | Name | Version | 59 | |------|---------| 60 | | terraform | >= 0.13 | 61 | | helm | = 2.2.0 | 62 | | k8s | = 0.9.1 | 63 | | kubernetes | = 2.3.2 | 64 | 65 | ## Providers 66 | 67 | No provider. 68 | 69 | ## Modules 70 | 71 | | Name | Source | Version | 72 | |------|--------|---------| 73 | | kubeflow | ./terraform-module-kubeflow | | 74 | 75 | ## Resources 76 | 77 | No resources. 78 | 79 | ## Inputs 80 | 81 | No input. 82 | 83 | ## Outputs 84 | 85 | No output. 86 | -------------------------------------------------------------------------------- /docs/components/govern/mlflow.md: -------------------------------------------------------------------------------- 1 | # MLFlow 2 | 3 | **tl; dr;** A [combinator](https://combinator.ml) governance component that provides a hosted [MLFlow](https://mlflow.org) server, a model repository and experiment tracker. 4 | 5 | - [Introduction](#introduction) 6 | - [Test Drive](#test-drive) 7 | - [Usage](#usage) 8 | 9 | ## Introduction 10 | 11 | [MLFlow](https://mlflow.org) is an open-source model registry and experiment tracker. Data scientists leverage this hosted service to store the results of their experiments and persist final artifacts like model parameters and weights. 12 | 13 | This provides a centralised catalogue of models and experiments, which is useful for organizational purposes and sharing work. 14 | 15 | MLFlow also comes with limited serving capabilities, although that is not it's core aim. 16 | 17 | ## Test Drive 18 | 19 | The fastest way to get started is to use the test drive functionality provided by [TestFaster](https://testfaster.ci). Click on the "Launch Test Drive" button below (opens a new window). 20 | 21 | :computer: Launch Test Drive :computer: 22 | 23 | ## Usage 24 | 25 | ### Prerequisites 26 | 27 | Start by preparing your Kubernetes cluster using one of the [infrastructure components](https://combinator.ml/infrastructure/introduction/) or use your own cluster. 28 | 29 | ### Component Usage 30 | 31 | ```terraform 32 | module "mlflow" { 33 | source = "combinator-ml/mlflow/k8s" 34 | # Optional settings go here 35 | } 36 | ``` 37 | 38 | See the full configuration options below. 39 | 40 | ## Requirements 41 | 42 | | Name | Version | 43 | |------|---------| 44 | | terraform | >= 0.14 | 45 | | helm | >= 2.0.0 | 46 | | kubernetes | >= 2.0.0 | 47 | 48 | ## Providers 49 | 50 | | Name | Version | 51 | |------|---------| 52 | | helm | >= 2.0.0 | 53 | | kubernetes | >= 2.0.0 | 54 | 55 | ## Modules 56 | 57 | No Modules. 58 | 59 | ## Resources 60 | 61 | | Name | 62 | |------| 63 | | [helm_release](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | 64 | | [kubernetes_secret](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/secret) | 65 | 66 | ## Inputs 67 | 68 | | Name | Description | Type | Default | Required | 69 | |------|-------------|------|---------|:--------:| 70 | | name\_prefix | Prefix to be used when naming the different components. | `string` | `"combinator"` | no | 71 | | namespace | The namespace to install into. | `string` | `"mlflow"` | no | 72 | 73 | ## Outputs 74 | 75 | No output. 76 | -------------------------------------------------------------------------------- /docs/components/introduction.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | Combinator components are intend to be best-of-breed point-solutions for specific ML problems. A combination of components build a stack. 4 | 5 | Since there are multiple components that attempt to solve the same problem, components can be organized into catagories. These are: 6 | 7 | - [Data](#data) 8 | - [Develop](#develop) 9 | - [Deploy](#deploy) 10 | - [Monitor](#monitor) 11 | - [Govern](#govern) 12 | 13 | Undoubtedly there are components that cross boundaries. When this happens, they are listed in multiple catagories. 14 | 15 | ## Data 16 | 17 | - [Minio](data/minio.md) - S3 API compatible cloud-native storage 18 | - [Pachyderm](data/pachyderm.md) - Data lineage (version control) 19 | - [Feast](data/feast.md) - Feature store 20 | - [Istio](data/istio.md) - Service mesh 21 | 22 | ## Develop 23 | 24 | - [Pachyderm](data/pachyderm.md) - Pipelines 25 | - [Jupyter](develop/jupyter.md) - Notebooks 26 | - [Kubeflow](develop/kubeflow.md) - Pipelines, Notebooks, Training 27 | 28 | ## Deploy 29 | 30 | - KFServing - Serving 31 | - [Seldon](deploy/seldon.md) - Serving and monitoring 32 | 33 | ## Monitor 34 | 35 | - Boxkite - Cloud-native model monitoring 36 | - [Seldon](deploy/seldon.md) - Serving and monitoring 37 | 38 | ## Govern 39 | 40 | - [MLFlow](govern/mlflow.md) - Model management 41 | -------------------------------------------------------------------------------- /docs/design.md: -------------------------------------------------------------------------------- 1 | # Design and Standards 2 | 3 | Imagine this. You've been given the task of testing a new piece of ML software. Maybe you want to integrate it into your stack or maybe you just want to try it out. In our experience we spent a non-trivial amount of time attempting to get the stack running, especially when there are multiple components involved. 4 | 5 | Combinator attempts to simplify the deployment of common ML stacks. We hope that over time these could become "standardized" stacks, which people use as the ground truth when, for example, stating dependencies. 6 | 7 | To achieve this goal we need to define some principals, otherwise there can be no standardization. This isn't formal, like in the bureaucratic sense of the word, but it does aim to be a pattern, to ensure consistency throughout the codebase and make it easier for users to comprehend. To than end, this document serves as a set of opinions (which may change over time) that all code must adhere to. 8 | 9 | ## Overview 10 | 11 | There are three types of code in this project: components, stacks, and infra. 12 | 13 | - **Components** provide the smallest possible amount of functionality. Ideally they solve one specific MLOps-related problem and are usually a single project - obvious exceptions are things like Kubeflow. Components are flexible enough to be reused in several stacks. 14 | - **Stacks** are a combination of components that produce a suite of functionality. Stacks are reusable modules too, but they should only comprise of low-level components and any requisite glue. 15 | - **Infrastructure** represents infrastructure components that are required to run stacks and components. Typically this defaults to Kubernetes, but all cloud resources are allowed. 16 | 17 | All three are managed as terraform code, to make it portable enough to work with any cloud, and to be able to install and/or control all elements throughout the stack. This allows combinator to use best-of-breed resources, like provision Kubernetes clusters with GPUs, or applications through the use of Helm charts. And all with a single dependency, Terraform. 18 | 19 | ## Standards 20 | 21 | All components, stacks, and infrastructure must: 22 | 23 | - Be packaged as a terraform module. 24 | - Be published in the [Terraform Registry](https://registry.terraform.io) to make it easier to install. 25 | - Target Kubernetes, where possible. 26 | - Have an accompanying page on https://github.com/combinator-ml/combinator 27 | 28 | All components, stacks, and infrastructure should: 29 | 30 | - Be concise. For example, if an application can run in memory, it should; include extra storage in a stack. But if it _needs_ a database to work, you must bundle the database into the component if it can’t deploy one itself. 31 | - Only expose options for variations if they have the same interface. When a component or stack can have multiple sub-components, only include those that provide a like-for-like experience. For example, including the option to select Minio or S3 is great. But selecting whether to include Kubeflow or Sagemaker is not; that should be a separate stack. 32 | - Expose all pertinent parameters, in anticipation of future use cases. 33 | - Use semantic versioning to inform users about changes. 34 | - Be packaged with working examples, to allow users to spin up the component quickly. 35 | 36 | Components must not: 37 | 38 | - Make breaking changes (unless impossible to avoid). Where breaking changes are necessary, bump the major version and make a big scary announcement. 39 | 40 | ## Creating Components or Stacks 41 | 42 | There is little practical difference between components and stacks, other than the level of abstraction. Therefore we recommend the following layout (a basic terraform module layout): 43 | 44 | ``` 45 | ├── .github # (Optional) Use github actions to ensure quality 46 | │ └── workflows 47 | │ ├── documentation.yaml 48 | │ └── terraform.yaml 49 | ├── .gitignore 50 | ├── .header.md # (Optional) Part of the final README.md 51 | ├── .terraform-docs.yml # (Optional) A way of automatically generating terraform docs from a github action 52 | ├── LICENSE 53 | ├── README.md 54 | ├── examples 55 | │ └── basic 56 | │ └── main.tf 57 | ├── locals.tf 58 | ├── component-technology-0.tf 59 | ├── component-technology-1.tf 60 | ├── outputs.tf 61 | ├── providers.tf 62 | └── variables.tf 63 | ``` 64 | 65 | ### Tutorial 66 | 67 | We've created some helpful templates to make it easier to get started developing a component or stack. Follow the steps to get started. 68 | 69 | #### 1. Create a Repo from a Template 70 | 71 | First create your new repository and base it upon the [combinator template](https://github.com/combinator-ml/terraform-template). If you are working within the combinator organization, then you can use Github's template functionality. Otherwise, just merge the repo into yours. 72 | 73 | The template provides the following functionality: 74 | 75 | - Good initial structure 76 | - Github actions to: publish docs to the main combinator site, generate terraform documentation, create release notes, lint, test the terraform code 77 | 78 | #### 2. Release to the Terraform Registry 79 | 80 | Create your first release, `0.0.0` by convention, browse to the [Terraform registry](https://registry.terraform.io/github/create) and publish a new **module**. Once generated it should show your template repo. 81 | 82 | #### 3. Update the Code and Examples 83 | 84 | Now you need to go through all the Terraform code and implement your component. Take a look at the other components for examples using Helm to install your application, or leverage other modules to create a stack, for example. 85 | 86 | #### 4. Update the Documentation 87 | 88 | Now go through the `.header.md` file and update your README. This file will be pushed to the combinator.ml website, so make sure it makes sense outside of the context of the repository. 89 | 90 | Browse to the [`combinator-docs.yml` github action](https://github.com/combinator-ml/terraform-template/blob/main/.github/workflows/combinator-docs.yml) and update the [`COMBINATOR_DOCS_FILE` environmental variable](https://github.com/combinator-ml/terraform-template/blob/main/.github/workflows/combinator-docs.yml#L13) to point to the location where you want your documentation to appear in the website heirarchy. For example, the [Feast](components/data/feast.md) component is set to: `docs/components/data/feast.md`. The directory must already exist. 91 | 92 | If you are working within the [combinator-ml organization](https://github.com/combinator-ml) then the combinator docs github action will start to work once merged. 93 | 94 | ##### Users Outside of the Combinator-ml Organization 95 | 96 | But if you are outside of the [combinator-ml organization](https://github.com/combinator-ml) then you will need to create a Github Secret called `CI_TOKEN` with a personal [Github access token](https://github.com/settings/tokens) that has the single permission: `public_repo`. This delegates permission to create a PR using your access credentials. 97 | 98 | #### 5. Create a PR 99 | 100 | Push your code to a branch and create a PR. This will kick off the various Github actions. This step is important because if you just push to master, it won't regenerate the documentation. 101 | 102 | Make sure all the actions pass, then merge the PR in your repository. You should now have a valid README.md. 103 | 104 | #### 6. Merge the Combinator Docs 105 | 106 | Now browse to the [combinator repository](https://github.com/combinator-ml/combinator/pulls) and look at the PR that your documentation action created. Make sure everything is in order and request a merge from one of the project's admins. 107 | 108 | Don't forget to add your page to the [mkdocs settings file](https://github.com/combinator-ml/combinator/blob/main/mkdocs.yml) if the component is new, because only the pages listed in this file are added to the website. 109 | 110 | You might also want to update any introductory pages or cross-references. 111 | 112 | From then on, whenever you update your README.md, it will [automatically raise a PR in the combinator repository](https://github.com/combinator-ml/combinator/pulls). 113 | 114 | #### 7. Celebrate and Share 115 | 116 | Now share your success! Tell us about the experience on Slack! Share with your colleagues! Share on social media! Spread the love! 117 | -------------------------------------------------------------------------------- /docs/images/demetrios.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/combinator-ml/combinator/bb517794020bda5b8f1326236217214a6a1d3b4d/docs/images/demetrios.webp -------------------------------------------------------------------------------- /docs/images/github.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/combinator-ml/combinator/bb517794020bda5b8f1326236217214a6a1d3b4d/docs/images/github.webp -------------------------------------------------------------------------------- /docs/images/luke.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/combinator-ml/combinator/bb517794020bda5b8f1326236217214a6a1d3b4d/docs/images/luke.webp -------------------------------------------------------------------------------- /docs/images/phil.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/combinator-ml/combinator/bb517794020bda5b8f1326236217214a6a1d3b4d/docs/images/phil.webp -------------------------------------------------------------------------------- /docs/images/stacks.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/combinator-ml/combinator/bb517794020bda5b8f1326236217214a6a1d3b4d/docs/images/stacks.webp -------------------------------------------------------------------------------- /docs/images/terraform.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/combinator-ml/combinator/bb517794020bda5b8f1326236217214a6a1d3b4d/docs/images/terraform.webp -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Easy MLOps Stacks 2 | 3 | Sophisticated teams develop their MLOps stack from a combination of best of breed components. 4 | 5 | But building or spinning up stacks is incredibly difficult. 6 | 7 | This open source community exists to make combining them less of a headache. 8 | 9 | Deploy common MLOps stacks with a single command. 10 | 11 |
12 | 13 |
Credit: Clemens Mewald
14 |
15 | 16 | ## How It Works 17 | 18 | Combinator.ml makes it easy to test drive, combine & deploy the stack that's best for you. 19 | 20 | ### Components, Stacks and Infrastructure 21 | 22 | Combinator components are infinitely composible ML products. We present a curated combination of components as stacks, although it is easy to develop your own bespoke stack. Infrastructure abstractions make it easy to deploy components and stacks. [Find out more.](design.md) 23 | 24 | ### ![Terraform](images/terraform.webp){ width=200 } 25 | 26 | Terraform is the common language we use to combine stacks. Don't worry if you're not familiar with it, we make it easy to get started with it. 27 | 28 | You can test drive (with the help of [TestFaster](https://testfaster.ci)), spin up a local dev environment, or deploy to a cloud of your choice. 29 | 30 | ### ![GitHub](images/github.webp){ width=200 } 31 | 32 | Every component is published as a terraform module in an open source GitHub repo. 33 | 34 | Each stack is a terraform module too, which references the terraform modules of the components. 35 | 36 | We need your help to maintain these! Please contribute to the existing components or stacks, or create your own. [Find out more.](CONTRIBUTING.md) 37 | 38 | ## MLOps Categories 39 | 40 | Components typically fall into one ore more of the following MLOps catagories. [Find out more.](components/introduction.md) 41 | 42 | | Category | Description | | 43 | | ---------- | ----------- | | 44 | | **Data** | Store, manage, and move data. | [**Explore**](components/introduction.md#data) | 45 | | **Develop** | Develop and train models | [**Explore**](components/introduction.md#develop) | 46 | | **Deploy** | Deploy models to extract value. | [**Explore**](components/introduction.md#deploy) | 47 | | **Monitor** | Understand the behavior of your models | [**Explore**](components/introduction.md#monitor) | 48 | | **Govern** | Manage, control, and audit your models. | [**Explore**](components/introduction.md#govern) | 49 | 50 | ## Featured Stacks 51 | 52 | Stacks are opinionated combinations of components. [Find out more.](stacks/introduction.md) 53 | 54 | Here are a few examples: 55 | 56 | !!! abstract inline "Kubeflow + MLflow" 57 | *By combinator.ml* 58 | 59 | Kubeflow provides orchestration for notebooks, pipelines, and serving. This stack adds MLflow for model management and makes it easy to log models to MLflow from kubeflow notebooks and pipelines. 60 | 61 | [**View**](stacks/kubeflow-mlflow.md){ .md-button } 62 | 63 | !!! abstract inline "Minio + Pachyderm" 64 | *By combinator.ml* 65 | 66 | Minio is an S3-compatible cloud-native data store. A Pachyderm cluster provides data lineage and pipelines. This stack makes it easy to spin up a cloud-native versioned controlled data store and pipelining tool. 67 | 68 | [**View**](stacks/minio-pachyderm.md){ .md-button } 69 | 70 |
71 | 72 | ## Getting Started 73 | 74 | It's really easy to get started. 75 | 76 | 1. [Install Terraform](https://learn.hashicorp.com/tutorials/terraform/install-cli) 77 | 2. Browse to a [stack](stacks/introduction.md) or [component](components/introduction.md) and follow the instructions to deploy. 78 | 79 | ## Get involved! 80 | 81 | Find us in `#mlops-stacks` on the MLOps.community Slack: 82 | 83 | [Join Slack :rocket:](https://go.mlops.community/slack){ .md-button .md-button--primary } 84 | 85 | Or [read more about contributing](CONTRIBUTING.md). 86 | 87 | ## Community Founders 88 | 89 | * [Luke Marsden](https://www.linkedin.com/in/luke-marsden-71b3789/) - [MLOps Consulting](https://mlops.consulting) 90 | * [Kai Davenport](https://www.linkedin.com/in/kai-davenport-228b6017/) - [MLOps Consulting](https://mlops.consulting) 91 | * [Phil Winder](https://winder.ai/about/team/phil-winder/) - [Winder.ai - MLOps Consulting](https://winder.ai/services/mlops/mlops-consulting/) 92 | * [Demetrios Brinkmann](https://www.linkedin.com/in/dpbrinkm/) - [MLOps Community](https://mlops.community) 93 | * [Dan Baker](https://www.linkedin.com/in/danthebaker/) - AutoDB 94 | -------------------------------------------------------------------------------- /docs/infrastructure/google.md: -------------------------------------------------------------------------------- 1 | # Google 2 | 3 | Google offer Kubernetes clusters in their [Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine). This component creates a cluster in GKE. 4 | 5 | - [Component Website](https://github.com/combinator-ml/terraform-google-kubernetes) 6 | 7 | ## Prerequisites 8 | 9 | To use this component you will need a GCP account, be authorised to create infrastructure and a GCP project id. 10 | 11 | ## Costs 12 | 13 | This runs in a single zone, which means it falls under GCP's free management tier and you don't have to pay for management nodes, only worker nodes. After the first zone you pay, and that's when it gets expensive. See the [GCP pricing pages](https://cloud.google.com/kubernetes-engine/pricing#cluster_management_fee_and_free_tier) for more information. 14 | 15 | ## Usage 16 | 17 | ```terraform 18 | module "terraform-google-kubernetes" { 19 | source = "combinator-ml/kubernetes/google" 20 | gcp_project_id = "your-gpc-project-id" 21 | } 22 | ``` 23 | -------------------------------------------------------------------------------- /docs/infrastructure/introduction.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | Most of the combinator components and stacks target Kubernetes. This means you need a running Kubernetes cluster. 4 | 5 | The infrastructure-specific components provide a simple way to create Kubernetes clusters in all of the major clouds. 6 | 7 | This pages will also describe how to setup Kubernetes locally. 8 | 9 | ## Cloud Kubernetes Components 10 | 11 | - [Google GKE](google.md) 12 | -------------------------------------------------------------------------------- /docs/overrides/main.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block analytics %} 3 | 61 | 62 | {% endblock %} 63 | -------------------------------------------------------------------------------- /docs/stacks/bodywork-mlflow.md: -------------------------------------------------------------------------------- 1 | # Bodywork + MLflow 2 | 3 | This tutorial enables you to experiment with Bodywork and MLflow combined into a single open-source MLOps stack. [Bodywork](https://github.com/bodywork-ml/bodywork-core) is a tool that focuses on the deployment of machine learning projects to Kubernetes. [MLflow](https://www.mlflow.org) is a tool for managing the machine learning lifecycle (tracking metrics and managing ML arteacts, such as trained models). 4 | 5 | We have developed an example train-and-serve pipeline to demonstrate Bodywork and MLflow working side-by-side, which you can explore in [this GitHub repository](https://github.com/bodywork-ml/bodywork-pipeline-with-mlflow). The pipeline uses MLflow to track the training metrics and manage trained models. The pipeline consists of two stages, defined in two executable Python modules: 6 | 7 | 1. `train_model.py` - run a batch job to train a model, logging metrics and registering models to MLflow. 8 | 2. `serve_model.py` - loads the latest 'production' model from MLflow and then starts a simple Flask app to handle requests for scoring data. 9 | 10 | The details of the deployment are described in the `bodywork.yaml` configuration file. When a deployment is triggered, Bodywork instructs Kubernetes to start pre-built [Bodywork containers](https://hub.docker.com/repository/docker/bodyworkml/bodywork-core), that pull the code from the demo project's Git repo and run the executable Python modules. Each stage is associated with one Python module and is run, in isolation, in it's own container. 11 | 12 | Launch the test drive below and follow the steps to see this pipeline in action! 13 | 14 | ![bodywork](https://bodywork-media.s3.eu-west-2.amazonaws.com/ml_pipeline_with_mlflow.png) 15 | 16 | ## Step 0 - Launch the Test Drive 17 | 18 | **Note: the test drive doesn't work in Safari yet. Please use Chrome or Firefox for now! Also please note it won't work in Private/Incognito windows.** 19 | 20 | Use the following test drive to launch a temporary Kubernetes cluster with the tutorial running in it: 21 | 22 | 32 | 33 | Launch Test Drive 34 | 35 | 36 | 37 | At busy times, you may need to wait a few minutes for a test drive environment to become available. 38 | 39 | **Note that the environment will shut down automatically 1 hour after you start using it.** 40 | 41 | ## Step 1 - Deploy the Pipeline 42 | 43 | To test the deployment using a local workflow-controller that streams logs to stdout, run, 44 | 45 | ```text 46 | $ bodywork workflow \ 47 | --namespace=bodywork \ 48 | https://github.com/bodywork-ml/bodywork-pipeline-with-mlflow \ 49 | master 50 | ``` 51 | 52 | Once the deployment has completed, browse to the MLflow UI to check that the model metrics that were logged to the `iris-classification` experiment during training, and to confirm that the trained model, `iris-classifier--sklearn-decision-tree`, was registered and promoted to 'production'. 53 | 54 | ## Step 2 - Test the Scoring Service 55 | 56 | Requests to score data can now be sent to the scoring service. Try the following in the shell, 57 | 58 | ```text 59 | $ curl http://localhost:31380/bodywork/bodywork-mlflow-demo--scoring-service/iris/v1/score \ 60 | --request POST \ 61 | --header "Content-Type: application/json" \ 62 | --data '{"sepal_length": 5.1, "sepal_width": 3.5, "petal_length": 1.4, "petal_width": 0.2}' 63 | ``` 64 | 65 | Which should return, 66 | 67 | ```json 68 | { 69 | "species_prediction":"setosa", 70 | "probabilities":"setosa=1.0|versicolor=0.0|virginica=0.0", 71 | "model_info": "DecisionTreeClassifier(class_weight='balanced', random_state=42)" 72 | } 73 | ``` 74 | 75 | According to how the payload has been defined in the `serve_model.py` module. 76 | 77 | ## Running the ML Pipeline on a Schedule 78 | 79 | If you're happy with the test results, you can schedule the workflow-controller to operate remotely on the cluster, on a pre-defined schedule. For example, to setup the the workflow to run every hour, use the following command, 80 | 81 | ```text 82 | $ bodywork cronjob create \ 83 | --namespace=bodywork \ 84 | --name=train-and-deploy \ 85 | --schedule="0 * * * *" \ 86 | --git-repo-url=https://github.com/bodywork-ml/bodywork-bodywork-mlflow-demo-project \ 87 | --git-repo-branch=master 88 | ``` 89 | 90 | Each scheduled workflow will attempt to re-run the batch-job, as defined by the state of this repository's `master` branch at the time of execution. 91 | 92 | To get the execution history for all `train-and-deploy` jobs use, 93 | 94 | ```text 95 | $ bodywork cronjob history \ 96 | --namespace=bodywork \ 97 | --name=train-and-deploy 98 | ``` 99 | 100 | Which should return output along the lines of, 101 | 102 | ```text 103 | JOB_NAME START_TIME COMPLETION_TIME ACTIVE SUCCEEDED FAILED 104 | train-and-deploy-1605214260 2020-11-12 20:51:04+00:00 2020-11-12 20:52:34+00:00 0 1 0 105 | ``` 106 | 107 | ## Cleaning Up 108 | 109 | To clean-up the deployment in its entirety, delete the namespace using Kubectl - e.g. by running, 110 | 111 | ```text 112 | $ kubectl delete ns bodywork 113 | ``` 114 | 115 | ## Make this Project Your Own 116 | 117 | This repository is a [GitHub template repository](https://docs.github.com/en/free-pro-team@latest/github/creating-cloning-and-archiving-repositories/creating-a-repository-from-a-template) that can be automatically copied into your own GitHub account by clicking the `Use this template` button above. 118 | 119 | After you've cloned the template project, use official [Bodywork documentation](https://bodywork.readthedocs.io/en/latest/) to help modify the project to meet your own requirements. 120 | -------------------------------------------------------------------------------- /docs/stacks/introduction.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | Combinator stacks are built from two or more [components](../components/introduction.md). A stack represents a common combination of tools to provide end-to-end ML functionality. 4 | 5 | A specific stack is formed from an opinionated selection of [components](../components/introduction.md), so there may be cross-over between stacks. 6 | -------------------------------------------------------------------------------- /docs/stacks/kubeflow-mlflow.md: -------------------------------------------------------------------------------- 1 | # Kubeflow + MLFlow 2 | 3 | **tl; dr;** A [combinator](https://combinator.ml) stack that provides [Kubeflow](https://kubeflow.org) and [MLFlow](https://mlflow.org). 4 | 5 | - [Introduction](#introduction) 6 | - [Test Drive](#test-drive) 7 | - [Usage](#usage) 8 | 9 | ## Introduction 10 | 11 | [Kubeflow](https://kubeflow.org) is an open-source MLOps platform that combines Jupyter hosting, ML pipelining, and hyperparameter tuning. It is packaged into a single UI to help data scientists train their ML models. 12 | 13 | Kubeflow Pipelines (KFP) in particular, has emerged as one eminent ML pipelinging technology, mainly thanks to the managed hosting in various clouds. 14 | 15 | Its opinionated ML-specific API helps data scientists and ML engineers develop robust, repeatable pipelines. 16 | 17 | This stack adds MLflow for model management and makes it easy to log models to MLflow from kubeflow notebooks and pipelines. 18 | 19 | ### Kubeflow Version 20 | 21 | This installation uses Kubeflow version 1.2, which is now out of date. 22 | 23 | ### Status and Recommendations 24 | 25 | :warning: **For Testing Only** :warning: 26 | 27 | This installation method is not recommended for use. It required a lot of work-arounds that are not suitable for production use. Please refer to the [official documentation](https://www.kubeflow.org/docs/started/installing-kubeflow/) for production installation instructions. 28 | 29 | ## Test Drive 30 | 31 | The fastest way to get started is to use the test drive functionality provided by [TestFaster](https://testfaster.ci). Click on the "Launch Test Drive" button below (opens a new window). 32 | 33 | :computer: Launch Test Drive :computer: 34 | 35 | ## Usage 36 | 37 | ### Prerequisites 38 | 39 | Start by preparing your Kubernetes cluster using one of the [infrastructure components](https://combinator.ml/infrastructure/introduction/) or use your own cluster. 40 | 41 | ### Component Usage 42 | 43 | ```terraform 44 | module "kubeflow_mlflow_stack" { 45 | source = "combinator-ml/stack-kubeflow-mlflow/k8s" 46 | # Optional settings go here 47 | } 48 | ``` 49 | 50 | See the full configuration options below. 51 | 52 | ### Instructions 53 | 54 | Kubeflow is big, so it can take some time to start. Once it does connect to the istio ingress gateway service. 55 | 56 | Once you see the login screen, the username is `admin@kubeflow.org` and the password is `12341234`. 57 | 58 | ## Requirements 59 | 60 | | Name | Version | 61 | |------|---------| 62 | | terraform | >= 0.13 | 63 | | helm | >= 2.2.0 | 64 | | k8s | >= 0.9.1 | 65 | | kubernetes | >= 2.3.2 | 66 | 67 | ## Providers 68 | 69 | | Name | Version | 70 | |------|---------| 71 | | k8s | >= 0.9.1 | 72 | | kubernetes | >= 2.3.2 | 73 | | null | n/a | 74 | | time | n/a | 75 | 76 | ## Modules 77 | 78 | | Name | Source | Version | 79 | |------|--------|---------| 80 | | kubeflow | combinator-ml/kubeflow/k8s | 0.0.2 | 81 | | mlflow | combinator-ml/mlflow/k8s | 0.0.3 | 82 | 83 | ## Resources 84 | 85 | | Name | 86 | |------| 87 | | [k8s_manifest](https://registry.terraform.io/providers/banzaicloud/k8s/latest/docs/resources/manifest) | 88 | | [kubernetes_cluster_role_binding](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/cluster_role_binding) | 89 | | [null_resource](https://registry.terraform.io/providers/hashicorp/null/latest/docs/resources/resource) | 90 | | [time_sleep](https://registry.terraform.io/providers/hashicorp/time/latest/docs/resources/sleep) | 91 | 92 | ## Inputs 93 | 94 | | Name | Description | Type | Default | Required | 95 | |------|-------------|------|---------|:--------:| 96 | | mlflow\_namespace | (Optional) The namespace to install into. | `string` | `"mlflow"` | no | 97 | 98 | ## Outputs 99 | 100 | No output. 101 | -------------------------------------------------------------------------------- /docs/stacks/minio-pachyderm.md: -------------------------------------------------------------------------------- 1 | # Minio + Pachyderm 2 | 3 | Create a minio-backed Pachyderm cluster for data lineage and pipelines. 4 | -------------------------------------------------------------------------------- /docs/terrachain.md: -------------------------------------------------------------------------------- 1 | # Terrachain (alpha) 2 | 3 | The purpose of the "terrachain" format is to enable the execution of a sequence of terraform modules, feeding the output of one into the input for another. 4 | In particular this enables workflows where one module creates a Kubernetes cluster, and a later module deploys _into_ that Kubernetes cluster. 5 | 6 | See [the Terraform documentation](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs#stacking-with-managed-kubernetes-cluster-resources) and [this GitHub issue](https://github.com/hashicorp/terraform/issues/2430#issuecomment-800192314) for motivation. 7 | 8 | Example of a configuration that terrachain can apply: 9 | 10 | ``` 11 | apiVersion: combinator.ml/v1alpha1 12 | modules: 13 | # The modules get terraform-applied in order, with the outputs from each 14 | # being passed into the inputs to the next. It is up to the module authors to 15 | # ensure the inputs & outputs match up in such a way that they are mutually 16 | # compatible. 17 | - name: aks 18 | repo: https://github.com/combinator-ml/terraform-azure-kubernetes 19 | args: 20 | autoscaling: on 21 | - name: kfp 22 | repo: https://github.com/combinator-ml/terraform-k8s-kfp 23 | ``` 24 | 25 | Or a simpler example, just installing mlflow onto an existing Kubernetes cluster (assumes kubeconfig file in default location or `KUBECONFIG` or similar env var is set): 26 | 27 | ``` 28 | apiVersion: combinator.ml/v1alpha1 29 | modules: 30 | - name: mlflow 31 | repo: https://github.com/combinator-ml/terraform-k8s-mlflow 32 | ``` 33 | 34 | The terrachain format can be consumed by the [Combinator app](https://app.combinator.ml) and the [SAME project](https://sameproject.org/) with: 35 | ``` 36 | same init -f 37 | ``` 38 | 39 | As a special case, if a provider outputs a variable called `kubeconfig_contents`, that file will be written to a temporary file and then the path to the temporary file passed to later modules in both `KUBECONFIG` and `KUBE_CONFIG_PATH` environment variables for convenience. 40 | 41 | If no `KUBE_CONFIG_PATH` variable is set, and no previous module outputs a `kubeconfig_contents`, terrachain will set it to `~/.kube/config` to workaround [this issue](https://github.com/terraform-aws-modules/terraform-aws-eks/issues/1234). 42 | 43 | There are some sample terrachain configs at [https://github.com/combinator-ml/stacks](https://github.com/combinator-ml/stacks). 44 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: "Combinator.ml" 2 | theme: 3 | name: material 4 | custom_dir: docs/overrides 5 | markdown_extensions: 6 | - attr_list 7 | - admonition 8 | - pymdownx.details 9 | - pymdownx.superfences 10 | - pymdownx.emoji: 11 | emoji_index: !!python/name:materialx.emoji.twemoji 12 | emoji_generator: !!python/name:materialx.emoji.to_svg 13 | repo_url: https://github.com/combinator-ml/website 14 | nav: 15 | - Components: 16 | - components/introduction.md 17 | - Data: 18 | - components/data/minio.md 19 | - components/data/pachyderm.md 20 | - components/data/feast.md 21 | - components/data/istio.md 22 | - Develop: 23 | - components/develop/jupyter.md 24 | - components/develop/kubeflow.md 25 | - Deploy: 26 | - components/deploy/seldon.md 27 | - Govern: 28 | - components/govern/mlflow.md 29 | - Stacks: 30 | - stacks/introduction.md 31 | - stacks/bodywork-mlflow.md 32 | - stacks/kubeflow-mlflow.md 33 | - stacks/minio-pachyderm.md 34 | - Infrastructure: 35 | - infrastructure/introduction.md 36 | - infrastructure/google.md 37 | - design.md 38 | - terrachain.md 39 | - CONTRIBUTING.md 40 | - CODE_OF_CONDUCT.md 41 | 42 | # Otherwise mkdocs assumes the main branch is called 'master' 43 | edit_uri: edit/main/docs/ 44 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "appdirs" 3 | version = "1.4.4" 4 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 5 | category = "dev" 6 | optional = false 7 | python-versions = "*" 8 | 9 | [[package]] 10 | name = "cfgv" 11 | version = "3.3.0" 12 | description = "Validate configuration and produce human readable error messages." 13 | category = "dev" 14 | optional = false 15 | python-versions = ">=3.6.1" 16 | 17 | [[package]] 18 | name = "click" 19 | version = "8.0.1" 20 | description = "Composable command line interface toolkit" 21 | category = "main" 22 | optional = false 23 | python-versions = ">=3.6" 24 | 25 | [package.dependencies] 26 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 27 | 28 | [[package]] 29 | name = "colorama" 30 | version = "0.4.4" 31 | description = "Cross-platform colored terminal text." 32 | category = "main" 33 | optional = false 34 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 35 | 36 | [[package]] 37 | name = "distlib" 38 | version = "0.3.2" 39 | description = "Distribution utilities" 40 | category = "dev" 41 | optional = false 42 | python-versions = "*" 43 | 44 | [[package]] 45 | name = "filelock" 46 | version = "3.0.12" 47 | description = "A platform independent file lock." 48 | category = "dev" 49 | optional = false 50 | python-versions = "*" 51 | 52 | [[package]] 53 | name = "future" 54 | version = "0.18.2" 55 | description = "Clean single-source support for Python 3 and 2" 56 | category = "main" 57 | optional = false 58 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 59 | 60 | [[package]] 61 | name = "identify" 62 | version = "2.2.11" 63 | description = "File identification library for Python" 64 | category = "dev" 65 | optional = false 66 | python-versions = ">=3.6.1" 67 | 68 | [package.extras] 69 | license = ["editdistance-s"] 70 | 71 | [[package]] 72 | name = "jinja2" 73 | version = "3.0.1" 74 | description = "A very fast and expressive template engine." 75 | category = "main" 76 | optional = false 77 | python-versions = ">=3.6" 78 | 79 | [package.dependencies] 80 | MarkupSafe = ">=2.0" 81 | 82 | [package.extras] 83 | i18n = ["Babel (>=2.7)"] 84 | 85 | [[package]] 86 | name = "joblib" 87 | version = "1.0.1" 88 | description = "Lightweight pipelining with Python functions" 89 | category = "main" 90 | optional = false 91 | python-versions = ">=3.6" 92 | 93 | [[package]] 94 | name = "livereload" 95 | version = "2.6.3" 96 | description = "Python LiveReload is an awesome tool for web developers" 97 | category = "main" 98 | optional = false 99 | python-versions = "*" 100 | 101 | [package.dependencies] 102 | six = "*" 103 | tornado = {version = "*", markers = "python_version > \"2.7\""} 104 | 105 | [[package]] 106 | name = "lunr" 107 | version = "0.5.8" 108 | description = "A Python implementation of Lunr.js" 109 | category = "main" 110 | optional = false 111 | python-versions = "*" 112 | 113 | [package.dependencies] 114 | future = ">=0.16.0" 115 | nltk = {version = ">=3.2.5", optional = true, markers = "python_version > \"2.7\" and extra == \"languages\""} 116 | six = ">=1.11.0" 117 | 118 | [package.extras] 119 | languages = ["nltk (>=3.2.5,<3.5)", "nltk (>=3.2.5)"] 120 | 121 | [[package]] 122 | name = "markdown" 123 | version = "3.3.4" 124 | description = "Python implementation of Markdown." 125 | category = "main" 126 | optional = false 127 | python-versions = ">=3.6" 128 | 129 | [package.extras] 130 | testing = ["coverage", "pyyaml"] 131 | 132 | [[package]] 133 | name = "markupsafe" 134 | version = "2.0.1" 135 | description = "Safely add untrusted strings to HTML/XML markup." 136 | category = "main" 137 | optional = false 138 | python-versions = ">=3.6" 139 | 140 | [[package]] 141 | name = "mkdocs" 142 | version = "1.1.2" 143 | description = "Project documentation with Markdown." 144 | category = "main" 145 | optional = false 146 | python-versions = ">=3.5" 147 | 148 | [package.dependencies] 149 | click = ">=3.3" 150 | Jinja2 = ">=2.10.1" 151 | livereload = ">=2.5.1" 152 | lunr = {version = "0.5.8", extras = ["languages"]} 153 | Markdown = ">=3.2.1" 154 | PyYAML = ">=3.10" 155 | tornado = ">=5.0" 156 | 157 | [[package]] 158 | name = "mkdocs-material" 159 | version = "7.1.3" 160 | description = "A Material Design theme for MkDocs" 161 | category = "main" 162 | optional = false 163 | python-versions = "*" 164 | 165 | [package.dependencies] 166 | markdown = ">=3.2" 167 | mkdocs = ">=1.1" 168 | mkdocs-material-extensions = ">=1.0" 169 | Pygments = ">=2.4" 170 | pymdown-extensions = ">=7.0" 171 | 172 | [[package]] 173 | name = "mkdocs-material-extensions" 174 | version = "1.0.1" 175 | description = "Extension pack for Python Markdown." 176 | category = "main" 177 | optional = false 178 | python-versions = ">=3.5" 179 | 180 | [package.dependencies] 181 | mkdocs-material = ">=5.0.0" 182 | 183 | [[package]] 184 | name = "nltk" 185 | version = "3.6.2" 186 | description = "Natural Language Toolkit" 187 | category = "main" 188 | optional = false 189 | python-versions = ">=3.5.*" 190 | 191 | [package.dependencies] 192 | click = "*" 193 | joblib = "*" 194 | regex = "*" 195 | tqdm = "*" 196 | 197 | [package.extras] 198 | all = ["matplotlib", "twython", "scipy", "numpy", "gensim (<4.0.0)", "python-crfsuite", "pyparsing", "scikit-learn", "requests"] 199 | corenlp = ["requests"] 200 | machine_learning = ["gensim (<4.0.0)", "numpy", "python-crfsuite", "scikit-learn", "scipy"] 201 | plot = ["matplotlib"] 202 | tgrep = ["pyparsing"] 203 | twitter = ["twython"] 204 | 205 | [[package]] 206 | name = "nodeenv" 207 | version = "1.6.0" 208 | description = "Node.js virtual environment builder" 209 | category = "dev" 210 | optional = false 211 | python-versions = "*" 212 | 213 | [[package]] 214 | name = "pre-commit" 215 | version = "2.13.0" 216 | description = "A framework for managing and maintaining multi-language pre-commit hooks." 217 | category = "dev" 218 | optional = false 219 | python-versions = ">=3.6.1" 220 | 221 | [package.dependencies] 222 | cfgv = ">=2.0.0" 223 | identify = ">=1.0.0" 224 | nodeenv = ">=0.11.1" 225 | pyyaml = ">=5.1" 226 | toml = "*" 227 | virtualenv = ">=20.0.8" 228 | 229 | [[package]] 230 | name = "pygments" 231 | version = "2.9.0" 232 | description = "Pygments is a syntax highlighting package written in Python." 233 | category = "main" 234 | optional = false 235 | python-versions = ">=3.5" 236 | 237 | [[package]] 238 | name = "pymdown-extensions" 239 | version = "8.2" 240 | description = "Extension pack for Python Markdown." 241 | category = "main" 242 | optional = false 243 | python-versions = ">=3.6" 244 | 245 | [package.dependencies] 246 | Markdown = ">=3.2" 247 | 248 | [[package]] 249 | name = "pyyaml" 250 | version = "5.4.1" 251 | description = "YAML parser and emitter for Python" 252 | category = "main" 253 | optional = false 254 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 255 | 256 | [[package]] 257 | name = "regex" 258 | version = "2021.7.6" 259 | description = "Alternative regular expression module, to replace re." 260 | category = "main" 261 | optional = false 262 | python-versions = "*" 263 | 264 | [[package]] 265 | name = "six" 266 | version = "1.16.0" 267 | description = "Python 2 and 3 compatibility utilities" 268 | category = "main" 269 | optional = false 270 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 271 | 272 | [[package]] 273 | name = "toml" 274 | version = "0.10.2" 275 | description = "Python Library for Tom's Obvious, Minimal Language" 276 | category = "dev" 277 | optional = false 278 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" 279 | 280 | [[package]] 281 | name = "tornado" 282 | version = "6.1" 283 | description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." 284 | category = "main" 285 | optional = false 286 | python-versions = ">= 3.5" 287 | 288 | [[package]] 289 | name = "tqdm" 290 | version = "4.61.2" 291 | description = "Fast, Extensible Progress Meter" 292 | category = "main" 293 | optional = false 294 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" 295 | 296 | [package.dependencies] 297 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 298 | 299 | [package.extras] 300 | dev = ["py-make (>=0.1.0)", "twine", "wheel"] 301 | notebook = ["ipywidgets (>=6)"] 302 | telegram = ["requests"] 303 | 304 | [[package]] 305 | name = "virtualenv" 306 | version = "20.4.7" 307 | description = "Virtual Python Environment builder" 308 | category = "dev" 309 | optional = false 310 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" 311 | 312 | [package.dependencies] 313 | appdirs = ">=1.4.3,<2" 314 | distlib = ">=0.3.1,<1" 315 | filelock = ">=3.0.0,<4" 316 | six = ">=1.9.0,<2" 317 | 318 | [package.extras] 319 | docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] 320 | testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] 321 | 322 | [metadata] 323 | lock-version = "1.1" 324 | python-versions = "^3.8" 325 | content-hash = "362e9f4bf943285d6cfc7ee671c5bcc18fc21ce2bcdf5e09b9f3a3a84992c5b3" 326 | 327 | [metadata.files] 328 | appdirs = [ 329 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, 330 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, 331 | ] 332 | cfgv = [ 333 | {file = "cfgv-3.3.0-py2.py3-none-any.whl", hash = "sha256:b449c9c6118fe8cca7fa5e00b9ec60ba08145d281d52164230a69211c5d597a1"}, 334 | {file = "cfgv-3.3.0.tar.gz", hash = "sha256:9e600479b3b99e8af981ecdfc80a0296104ee610cab48a5ae4ffd0b668650eb1"}, 335 | ] 336 | click = [ 337 | {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, 338 | {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, 339 | ] 340 | colorama = [ 341 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, 342 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, 343 | ] 344 | distlib = [ 345 | {file = "distlib-0.3.2-py2.py3-none-any.whl", hash = "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"}, 346 | {file = "distlib-0.3.2.zip", hash = "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736"}, 347 | ] 348 | filelock = [ 349 | {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, 350 | {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, 351 | ] 352 | future = [ 353 | {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, 354 | ] 355 | identify = [ 356 | {file = "identify-2.2.11-py2.py3-none-any.whl", hash = "sha256:7abaecbb414e385752e8ce02d8c494f4fbc780c975074b46172598a28f1ab839"}, 357 | {file = "identify-2.2.11.tar.gz", hash = "sha256:a0e700637abcbd1caae58e0463861250095dfe330a8371733a471af706a4a29a"}, 358 | ] 359 | jinja2 = [ 360 | {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, 361 | {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, 362 | ] 363 | joblib = [ 364 | {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, 365 | {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, 366 | ] 367 | livereload = [ 368 | {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, 369 | ] 370 | lunr = [ 371 | {file = "lunr-0.5.8-py2.py3-none-any.whl", hash = "sha256:aab3f489c4d4fab4c1294a257a30fec397db56f0a50273218ccc3efdbf01d6ca"}, 372 | {file = "lunr-0.5.8.tar.gz", hash = "sha256:c4fb063b98eff775dd638b3df380008ae85e6cb1d1a24d1cd81a10ef6391c26e"}, 373 | ] 374 | markdown = [ 375 | {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, 376 | {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, 377 | ] 378 | markupsafe = [ 379 | {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, 380 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, 381 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, 382 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, 383 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, 384 | {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, 385 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, 386 | {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, 387 | {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, 388 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, 389 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, 390 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, 391 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, 392 | {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, 393 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, 394 | {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, 395 | {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, 396 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, 397 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, 398 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, 399 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, 400 | {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, 401 | {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, 402 | {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, 403 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, 404 | {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, 405 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, 406 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, 407 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, 408 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, 409 | {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, 410 | {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, 411 | {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, 412 | {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, 413 | ] 414 | mkdocs = [ 415 | {file = "mkdocs-1.1.2-py3-none-any.whl", hash = "sha256:096f52ff52c02c7e90332d2e53da862fde5c062086e1b5356a6e392d5d60f5e9"}, 416 | {file = "mkdocs-1.1.2.tar.gz", hash = "sha256:f0b61e5402b99d7789efa032c7a74c90a20220a9c81749da06dbfbcbd52ffb39"}, 417 | ] 418 | mkdocs-material = [ 419 | {file = "mkdocs-material-7.1.3.tar.gz", hash = "sha256:e34bba93ad1a0e6f9afc371f4ef55bedabbf13b9a786b013b0ce26ac55ec2932"}, 420 | {file = "mkdocs_material-7.1.3-py2.py3-none-any.whl", hash = "sha256:437638b0de7a9113d7f1c9ddc93c0a29a3b808c71c3606713d8c1fa437697a3e"}, 421 | ] 422 | mkdocs-material-extensions = [ 423 | {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, 424 | {file = "mkdocs_material_extensions-1.0.1-py3-none-any.whl", hash = "sha256:d90c807a88348aa6d1805657ec5c0b2d8d609c110e62b9dce4daf7fa981fa338"}, 425 | ] 426 | nltk = [ 427 | {file = "nltk-3.6.2-py3-none-any.whl", hash = "sha256:240e23ab1ab159ef9940777d30c7c72d7e76d91877099218a7585370c11f6b9e"}, 428 | {file = "nltk-3.6.2.zip", hash = "sha256:57d556abed621ab9be225cc6d2df1edce17572efb67a3d754630c9f8381503eb"}, 429 | ] 430 | nodeenv = [ 431 | {file = "nodeenv-1.6.0-py2.py3-none-any.whl", hash = "sha256:621e6b7076565ddcacd2db0294c0381e01fd28945ab36bcf00f41c5daf63bef7"}, 432 | {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, 433 | ] 434 | pre-commit = [ 435 | {file = "pre_commit-2.13.0-py2.py3-none-any.whl", hash = "sha256:b679d0fddd5b9d6d98783ae5f10fd0c4c59954f375b70a58cbe1ce9bcf9809a4"}, 436 | {file = "pre_commit-2.13.0.tar.gz", hash = "sha256:764972c60693dc668ba8e86eb29654ec3144501310f7198742a767bec385a378"}, 437 | ] 438 | pygments = [ 439 | {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, 440 | {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, 441 | ] 442 | pymdown-extensions = [ 443 | {file = "pymdown-extensions-8.2.tar.gz", hash = "sha256:b6daa94aad9e1310f9c64c8b1f01e4ce82937ab7eb53bfc92876a97aca02a6f4"}, 444 | {file = "pymdown_extensions-8.2-py3-none-any.whl", hash = "sha256:141452d8ed61165518f2c923454bf054866b85cf466feedb0eb68f04acdc2560"}, 445 | ] 446 | pyyaml = [ 447 | {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, 448 | {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, 449 | {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, 450 | {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, 451 | {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, 452 | {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, 453 | {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, 454 | {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, 455 | {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, 456 | {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, 457 | {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, 458 | {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, 459 | {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, 460 | {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, 461 | {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, 462 | {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, 463 | {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, 464 | {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, 465 | {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, 466 | {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, 467 | {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, 468 | ] 469 | regex = [ 470 | {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, 471 | {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, 472 | {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, 473 | {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, 474 | {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, 475 | {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, 476 | {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, 477 | {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, 478 | {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, 479 | {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, 480 | {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, 481 | {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, 482 | {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, 483 | {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, 484 | {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, 485 | {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, 486 | {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, 487 | {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, 488 | {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, 489 | {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, 490 | {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, 491 | {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, 492 | {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, 493 | {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, 494 | {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, 495 | {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, 496 | {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, 497 | {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, 498 | {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, 499 | {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, 500 | {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, 501 | {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, 502 | {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, 503 | {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, 504 | {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, 505 | {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, 506 | {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, 507 | {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, 508 | {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, 509 | {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, 510 | {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, 511 | ] 512 | six = [ 513 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 514 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 515 | ] 516 | toml = [ 517 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, 518 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, 519 | ] 520 | tornado = [ 521 | {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, 522 | {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, 523 | {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, 524 | {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, 525 | {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, 526 | {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, 527 | {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, 528 | {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, 529 | {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, 530 | {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, 531 | {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, 532 | {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, 533 | {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, 534 | {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, 535 | {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, 536 | {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, 537 | {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, 538 | {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, 539 | {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, 540 | {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, 541 | {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, 542 | {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, 543 | {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, 544 | {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, 545 | {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, 546 | {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, 547 | {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, 548 | {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, 549 | {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, 550 | {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, 551 | {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, 552 | {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, 553 | {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, 554 | {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, 555 | {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, 556 | {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, 557 | {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, 558 | {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, 559 | {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, 560 | {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, 561 | {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, 562 | ] 563 | tqdm = [ 564 | {file = "tqdm-4.61.2-py2.py3-none-any.whl", hash = "sha256:5aa445ea0ad8b16d82b15ab342de6b195a722d75fc1ef9934a46bba6feafbc64"}, 565 | {file = "tqdm-4.61.2.tar.gz", hash = "sha256:8bb94db0d4468fea27d004a0f1d1c02da3cdedc00fe491c0de986b76a04d6b0a"}, 566 | ] 567 | virtualenv = [ 568 | {file = "virtualenv-20.4.7-py2.py3-none-any.whl", hash = "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76"}, 569 | {file = "virtualenv-20.4.7.tar.gz", hash = "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467"}, 570 | ] 571 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "website" 3 | version = "0.0.0" 4 | description = "Combinator ML Website" 5 | authors = ["Phil Winder "] 6 | 7 | [tool.poetry.dependencies] 8 | python = "^3.8" 9 | mkdocs = "1.1.2" 10 | mkdocs-material = "7.1.3" 11 | mkdocs-material-extensions = "1.0.1" 12 | 13 | [tool.poetry.dev-dependencies] 14 | pre-commit = "^2.12.1" 15 | 16 | [build-system] 17 | requires = ["poetry-core>=1.0.0"] 18 | build-backend = "poetry.core.masonry.api" 19 | --------------------------------------------------------------------------------