├── .codeclimate.yml ├── .dockerignore ├── .editorconfig ├── .github ├── ISSUE_TEMPLATE │ ├── 01_bug_report.md │ ├── 02_feature_request.md │ └── config.yml ├── PULL_REQUEST_TEMPLATE.md ├── changelog-configuration.json └── workflows │ ├── build-virtualenv-caches.yml │ ├── coverage.yml │ ├── publish-pypi.yml │ ├── push.yml │ └── test.yml ├── .gitignore ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── commodore ├── __init__.py ├── __main__.py ├── catalog.py ├── cli │ ├── __init__.py │ ├── catalog.py │ ├── component.py │ ├── inventory.py │ ├── oidc.py │ ├── options.py │ └── package.py ├── cluster.py ├── compile.py ├── component │ ├── __init__.py │ ├── compile.py │ └── template.py ├── config.py ├── dependency_mgmt │ ├── __init__.py │ ├── component_library.py │ ├── discovery.py │ ├── jsonnet_bundler.py │ ├── tools.py │ └── version_parsing.py ├── dependency_syncer.py ├── dependency_templater.py ├── filters │ └── helm_namespace.jsonnet ├── gitrepo │ ├── __init__.py │ └── diff.py ├── helpers.py ├── inventory │ ├── __init__.py │ ├── lint.py │ ├── lint_dependency_specification.py │ ├── lint_deprecated_parameters.py │ ├── parameters.py │ └── render.py ├── k8sobject.py ├── lib │ ├── commodore.libjsonnet │ ├── kube-libsonnet │ │ └── kube.libsonnet │ ├── kube.libjsonnet │ └── kube.libsonnet ├── login.py ├── multi_dependency.py ├── normalize_url.py ├── package │ ├── __init__.py │ ├── compile.py │ └── template.py ├── postprocess │ ├── __init__.py │ ├── builtin_filters.py │ └── jsonnet.py ├── refs.py ├── scripts │ └── run-kustomize └── tokencache.py ├── docker-compose.yaml ├── docs ├── antora.yml └── modules │ └── ROOT │ ├── assets │ └── images │ │ └── projectsyn.svg │ ├── nav.adoc │ ├── pages │ ├── .vale.ini │ ├── explanation │ │ ├── compilation-metadata.adoc │ │ ├── dependencies.adoc │ │ ├── local-mode.adoc │ │ ├── migrate-kapitan-0.29-0.30.adoc │ │ └── running-commodore.adoc │ ├── how-to │ │ ├── local-mode-component.adoc │ │ └── shell-completion.adoc │ ├── index.adoc │ ├── reference │ │ ├── architecture.adoc │ │ ├── cli.adoc │ │ ├── commands.adoc │ │ ├── commodore-libjsonnet.adoc │ │ ├── component-deprecation.adoc │ │ ├── concepts.adoc │ │ ├── deprecation-notices.adoc │ │ ├── deprecation-policy.adoc │ │ ├── hierarchy.adoc │ │ ├── kube-libjsonnet.adoc │ │ └── parameters.adoc │ └── tutorial │ │ └── package.adoc │ └── partials │ ├── nav-explanation.adoc │ ├── nav-howtos.adoc │ ├── nav-reference.adoc │ └── nav-tutorials.adoc ├── poetry.lock ├── pyproject.toml ├── renovate.json ├── tests ├── bench_gitrepo.py ├── conftest.py ├── jsonnet │ ├── envList.json │ ├── envList.jsonnet │ ├── fixupDir.json │ ├── fixupDir.jsonnet │ ├── generateResources.json │ ├── generateResources.jsonnet │ ├── getValueOrDefault.json │ ├── getValueOrDefault.jsonnet │ ├── inventory.json │ ├── inventory.jsonnet │ ├── inventory.yaml │ ├── list_dir.json │ ├── list_dir.jsonnet │ ├── makeMergeable.json │ ├── makeMergeable.jsonnet │ ├── namespaced.json │ ├── namespaced.jsonnet │ ├── noProxyVars.json │ ├── noProxyVars.jsonnet │ ├── noProxyVars.yaml │ ├── proxyVars.json │ ├── proxyVars.jsonnet │ ├── proxyVars.yaml │ ├── renderArray.json │ ├── renderArray.jsonnet │ ├── yaml_load.json │ └── yaml_load.jsonnet ├── mock_gitrepo.py ├── test_catalog.py ├── test_catalog_compile.py ├── test_cli.py ├── test_cli_catalog.py ├── test_cli_component.py ├── test_cli_inventory.py ├── test_cli_oidc.py ├── test_cli_package.py ├── test_cluster.py ├── test_commodore_libjsonnet.py ├── test_compile.py ├── test_compile_meta.py ├── test_component.py ├── test_component_compile.py ├── test_component_template.py ├── test_config.py ├── test_dependency_mgmt.py ├── test_dependency_mgmt_component_library.py ├── test_dependency_mgmt_discovery.py ├── test_dependency_mgmt_jsonnet_bundler.py ├── test_dependency_mgmt_tools.py ├── test_dependency_mgmt_version_parsing.py ├── test_dependency_sync.py ├── test_gitrepo.py ├── test_gitrepo_diff.py ├── test_helpers.py ├── test_inventory.py ├── test_inventory_lint.py ├── test_inventory_lint_components.py ├── test_inventory_lint_deprecated_parameters.py ├── test_inventory_parameters.py ├── test_inventory_render.py ├── test_k8sobject.py ├── test_kapitan_reclass.py ├── test_login.py ├── test_multi_dependency.py ├── test_normalize_url.py ├── test_package.py ├── test_package_compile.py ├── test_package_template.py ├── test_postprocess.py ├── test_refs.py ├── test_render_inventory.py ├── test_target.py ├── test_tokencache.py └── testdata │ ├── catalog_list │ ├── id_multi │ ├── id_single │ ├── json_multi │ ├── pretty_multi │ ├── yaml_multi │ └── yml_multi │ ├── github │ ├── projectsyn-package-foo-archived-response.json │ ├── projectsyn-package-foo-response-issue-comment.json │ ├── projectsyn-package-foo-response-issue.json │ ├── projectsyn-package-foo-response-pr.json │ ├── projectsyn-package-foo-response-pulls.json │ └── projectsyn-package-foo-response.json │ ├── inventory_apps │ ├── classes │ │ ├── test.yml │ │ └── test │ │ │ ├── params.yml │ │ │ └── specific.yml │ └── targets │ │ └── test.yml │ ├── inventory_relative_refs │ ├── classes │ │ ├── test.yml │ │ └── test │ │ │ ├── common.yml │ │ │ └── specific.yml │ └── targets │ │ └── test.yml │ └── inventory_yml_yaml │ ├── classes │ ├── a.yml │ └── b.yaml │ └── targets │ └── test.yml ├── tools ├── entrypoint.sh └── install-jb.sh ├── tox.ini └── tox.mk /.codeclimate.yml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | checks: 4 | argument-count: 5 | enabled: true 6 | config: 7 | threshold: 10 8 | complex-logic: 9 | enabled: true 10 | config: 11 | threshold: 4 12 | file-lines: 13 | enabled: true 14 | config: 15 | threshold: 500 16 | method-complexity: 17 | enabled: true 18 | config: 19 | threshold: 15 20 | method-count: 21 | enabled: true 22 | config: 23 | threshold: 30 24 | method-lines: 25 | enabled: true 26 | config: 27 | threshold: 50 28 | nested-control-flow: 29 | enabled: true 30 | config: 31 | threshold: 4 32 | return-statements: 33 | enabled: true 34 | config: 35 | threshold: 10 36 | similar-code: 37 | enabled: true 38 | identical-code: 39 | enabled: true 40 | 41 | exclude_patterns: 42 | - 'catalog/' 43 | - 'commodore/cruft/' 44 | - 'compiled/' 45 | - 'dependencies/' 46 | - 'inventory/' 47 | - 'vendor/' 48 | - '.tox/' 49 | - 'tests/' 50 | - 'tests_cruft/' 51 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git* 2 | *.swp 3 | catalog/ 4 | compiled/ 5 | dependencies/ 6 | inventory/ 7 | tools/ 8 | !tools/entrypoint.sh 9 | !tools/install-jb.sh 10 | !tools/install-kustomize.sh 11 | Dockerfile 12 | docker-compose.* 13 | _antora 14 | .vscode/ 15 | 16 | ## From here: GitHub's gitignore template for Python 17 | # Byte-compiled / optimized / DLL files 18 | __pycache__/ 19 | *.py[cod] 20 | *$py.class 21 | 22 | # C extensions 23 | *.so 24 | 25 | # Distribution / packaging 26 | .Python 27 | build/ 28 | develop-eggs/ 29 | dist/ 30 | downloads/ 31 | eggs/ 32 | .eggs/ 33 | # Don't exclude lib, as we are providing Jsonnet libs in that folder 34 | # lib/ 35 | lib64/ 36 | parts/ 37 | sdist/ 38 | var/ 39 | wheels/ 40 | pip-wheel-metadata/ 41 | share/python-wheels/ 42 | *.egg-info/ 43 | .installed.cfg 44 | *.egg 45 | MANIFEST 46 | 47 | # PyInstaller 48 | # Usually these files are written by a python script from a template 49 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 50 | *.manifest 51 | *.spec 52 | 53 | # Installer logs 54 | pip-log.txt 55 | pip-delete-this-directory.txt 56 | 57 | # Unit test / coverage reports 58 | htmlcov/ 59 | .tox/ 60 | .nox/ 61 | .coverage 62 | .coverage.* 63 | .cache 64 | nosetests.xml 65 | coverage.xml 66 | *.cover 67 | *.py,cover 68 | .hypothesis/ 69 | .pytest_cache/ 70 | 71 | # Translations 72 | *.mo 73 | *.pot 74 | 75 | # Django stuff: 76 | *.log 77 | local_settings.py 78 | db.sqlite3 79 | db.sqlite3-journal 80 | 81 | # Flask stuff: 82 | instance/ 83 | .webassets-cache 84 | 85 | # Scrapy stuff: 86 | .scrapy 87 | 88 | # Sphinx documentation 89 | docs/_build/ 90 | 91 | # PyBuilder 92 | target/ 93 | 94 | # Jupyter Notebook 95 | .ipynb_checkpoints 96 | 97 | # IPython 98 | profile_default/ 99 | ipython_config.py 100 | 101 | # pyenv 102 | .python-version 103 | 104 | # pipenv 105 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 106 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 107 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 108 | # install all needed dependencies. 109 | #Pipfile.lock 110 | 111 | # celery beat schedule file 112 | celerybeat-schedule 113 | 114 | # SageMath parsed files 115 | *.sage.py 116 | 117 | # Environments 118 | .env 119 | .venv 120 | env/ 121 | venv/ 122 | ENV/ 123 | env.bak/ 124 | venv.bak/ 125 | 126 | # Spyder project settings 127 | .spyderproject 128 | .spyproject 129 | 130 | # Rope project settings 131 | .ropeproject 132 | 133 | # mkdocs documentation 134 | /site 135 | 136 | # mypy 137 | .mypy_cache/ 138 | .dmypy.json 139 | dmypy.json 140 | 141 | # Pyre type checker 142 | .pyre/ 143 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | ; This file is for unifying the coding style for different editors and IDEs. 2 | ; More information at https://editorconfig.org 3 | 4 | root = true 5 | 6 | [*] 7 | charset = utf-8 8 | end_of_line = lf 9 | insert_final_newline = true 10 | trim_trailing_whitespace = true 11 | 12 | [*.{y*ml,*json*,*sonnet}] 13 | indent_style = space 14 | indent_size = 2 15 | 16 | [*.*sonnet] 17 | # C-style doc comments 18 | block_comment_start = /* 19 | block_comment = * 20 | block_comment_end = */ 21 | 22 | [.gitkeep] 23 | insert_final_newline = false 24 | 25 | [Makefile] 26 | indent_style = tab 27 | 28 | [tests/testdata/**] 29 | trim_trailing_whitespace = false 30 | 31 | [commodore/lib/kube-libsonnet/**] 32 | indent_size = unset 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/01_bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🐜 Bug report 3 | about: Create a report to help us improve 🔧 4 | labels: bug 5 | --- 6 | 7 | 8 | 9 | ## Steps to Reproduce the Problem 10 | 11 | 12 | 1. 13 | 1. 14 | 1. 15 | 16 | ## Actual Behavior 17 | 18 | 19 | ## Expected Behavior 20 | 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/02_feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🚀 Feature request 3 | about: Suggest an idea for this project 💡 4 | labels: enhancement 5 | --- 6 | 7 | ## Context 8 | 13 | 14 | ## Alternatives 15 | 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: ❓ Help and Support RocketChat Channel 4 | url: https://community.appuio.ch 5 | about: Please ask and answer questions here. 🏥 6 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 7 | 8 | ## Checklist 9 | 12 | 13 | - [ ] Keep pull requests small so they can be easily reviewed. 14 | - [ ] Update the documentation. 15 | - [ ] Update tests. 16 | - [ ] Categorize the PR by setting a good title and adding one of the labels: 17 | `bug`, `enhancement`, `documentation`, `change`, `breaking`, `dependency`, `internal` 18 | as they show up in the changelog 19 | - [ ] Link this PR to related issues. 20 | 21 | 25 | -------------------------------------------------------------------------------- /.github/changelog-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "pr_template": "- ${{TITLE}} (#${{NUMBER}})", 3 | "categories": [ 4 | { 5 | "title": "## 🔎 Breaking Changes", 6 | "labels": ["breaking"] 7 | }, 8 | { 9 | "title": "## 🚀 Features", 10 | "labels": ["enhancement", "feature"] 11 | }, 12 | { 13 | "title": "## 🛠️ Minor Changes", 14 | "labels": ["change"] 15 | }, 16 | { 17 | "title": "## 🪛 Internal Changes", 18 | "labels": ["internal"] 19 | }, 20 | { 21 | "title": "## 🐛 Fixes", 22 | "labels": ["bug", "fix"] 23 | }, 24 | { 25 | "title": "## 📄 Documentation", 26 | "labels": ["documentation"] 27 | }, 28 | { 29 | "title": "## 🔗 Dependency Updates", 30 | "labels": ["dependency"] 31 | } 32 | ], 33 | "template": "${{CATEGORIZED_COUNT}} changes since ${{FROM_TAG}}\n\n${{CHANGELOG}}" 34 | } 35 | -------------------------------------------------------------------------------- /.github/workflows/build-virtualenv-caches.yml: -------------------------------------------------------------------------------- 1 | name: Build virtualenv caches 2 | on: 3 | push: 4 | branches: 5 | - master 6 | # Verify & rebuild caches every Monday at 04:00 7 | # This should mostly ensure that the caches aren't evicted after 7 days of 8 | # inactivity 9 | schedule: 10 | - cron: '0 4 * * MON' 11 | 12 | jobs: 13 | build-lint-virtualenvs: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | command: 18 | - flake8 19 | - pylint 20 | - bandit 21 | - mypy 22 | - black 23 | steps: 24 | - uses: actions/checkout@v4 25 | - uses: actions/setup-python@v5 26 | with: 27 | python-version: '3.11' 28 | - uses: Gr1N/setup-poetry@v9 29 | - name: Install tox 30 | run: | 31 | pip install tox 32 | - uses: actions/cache@v4 33 | with: 34 | path: | 35 | .tox 36 | ~/.cache/pypoetry/virtualenvs 37 | key: ${{ runner.os }}-lint-${{ matrix.command }}-${{ hashFiles('poetry.lock', 'tox.ini') }} 38 | - name: Build virtualenv for ${{ matrix.command }} 39 | run: make lintenv_${{ matrix.command }} 40 | build-test-virtualenvs: 41 | runs-on: ubuntu-latest 42 | strategy: 43 | matrix: 44 | python-version: 45 | - '3.10' 46 | - '3.11' 47 | - '3.12' 48 | steps: 49 | - uses: actions/checkout@v4 50 | - uses: actions/setup-python@v5 51 | with: 52 | python-version: ${{ matrix.python-version }} 53 | - uses: Gr1N/setup-poetry@v9 54 | - name: Install tox 55 | run: | 56 | pip install tox 57 | - uses: actions/cache@v4 58 | with: 59 | path: | 60 | .tox 61 | ~/.cache/pypoetry/virtualenvs 62 | key: ${{ runner.os }}-test-py${{ matrix.python-version }}-${{ hashFiles('poetry.lock', 'tox.ini') }} 63 | - name: Extract Python minor version from matrix python-version 64 | run: echo "PYVER=$(echo ${{ matrix.python-version}} |cut -d. -f1,2)" >> $GITHUB_ENV 65 | - run: | 66 | make testenv_py${PYVER} 67 | 68 | build-bench-virtualenvs: 69 | runs-on: ubuntu-latest 70 | strategy: 71 | matrix: 72 | python-version: 73 | - '3.10' 74 | - '3.11' 75 | - '3.12' 76 | steps: 77 | - uses: actions/checkout@v4 78 | - uses: actions/setup-python@v5 79 | with: 80 | python-version: ${{ matrix.python-version }} 81 | - uses: Gr1N/setup-poetry@v9 82 | - name: Install tox 83 | run: | 84 | pip install tox 85 | - uses: actions/cache@v4 86 | with: 87 | path: | 88 | .tox 89 | ~/.cache/pypoetry/virtualenvs 90 | key: ${{ runner.os }}-bench-py${{ matrix.python-version }}-${{ hashFiles('poetry.lock', 'tox.ini') }} 91 | - name: Extract Python minor version from matrix python-version 92 | run: echo "PYVER=$(echo ${{ matrix.python-version}} |cut -d. -f1,2)" >> $GITHUB_ENV 93 | - run: | 94 | make benchenv_py${PYVER} 95 | 96 | build_poetry_virtualenv: 97 | runs-on: ubuntu-latest 98 | steps: 99 | - uses: actions/checkout@v4 100 | - uses: actions/setup-python@v5 101 | with: 102 | python-version: '3.11' 103 | - uses: Gr1N/setup-poetry@v9 104 | - uses: actions/cache@v4 105 | with: 106 | path: ~/.cache/pypoetry/virtualenvs 107 | key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} 108 | - name: Install Poetry and setup Poetry virtualenv 109 | run: | 110 | poetry env use python3.11 111 | poetry install 112 | -------------------------------------------------------------------------------- /.github/workflows/coverage.yml: -------------------------------------------------------------------------------- 1 | name: Test coverage with codeclimate 2 | on: 3 | pull_request: 4 | branches: 5 | - master 6 | push: 7 | branches: 8 | - master 9 | 10 | jobs: 11 | coverage: 12 | # Only run coverage (which includes the integration test) for PRs which don't originate from a fork 13 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: actions/setup-python@v5 18 | with: 19 | python-version: '3.11' 20 | - uses: Gr1N/setup-poetry@v9 21 | - uses: actions/cache@v4 22 | with: 23 | path: ~/.cache/pypoetry/virtualenvs 24 | key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} 25 | - name: Install Poetry and setup Poetry virtualenv 26 | run: | 27 | poetry env use python3.11 28 | poetry install 29 | - name: Install jsonnet-bundler 30 | run: | 31 | mkdir -p /opt/bin && curl -sLo /opt/bin/jb \ 32 | https://github.com/projectsyn/jsonnet-bundler/releases/download/v0.6.1/jb_linux_amd64 \ 33 | && chmod +x /opt/bin/jb 34 | - name: Update PATH 35 | run: echo "/opt/bin" >> $GITHUB_PATH 36 | - name: Pull in SSH deploy key for integration test 37 | env: 38 | SSH_AUTH_SOCK: /tmp/ssh_agent.sock 39 | run: | 40 | mkdir -p ~/.ssh 41 | echo "github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==" >> ~/.ssh/known_hosts 42 | ssh-agent -a $SSH_AUTH_SOCK > /dev/null 43 | ssh-add - <<< "${{ secrets.CATALOG_DEPLOY_KEY }}" 44 | - name: Run test coverage 45 | run: make test_coverage 46 | env: 47 | SSH_AUTH_SOCK: /tmp/ssh_agent.sock 48 | - name: Upload code coverage report to Code Climate 49 | uses: paambaati/codeclimate-action@v9.0.0 50 | env: 51 | CC_TEST_REPORTER_ID: f9c194f25b65bf9c9413d736386e70d32c128516218768333cd7205e79076506 52 | with: 53 | coverageLocations: coverage.xml:coverage.py 54 | -------------------------------------------------------------------------------- /.github/workflows/publish-pypi.yml: -------------------------------------------------------------------------------- 1 | name: PyPI release 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | tags: 8 | - v* 9 | pull_request: 10 | branches: 11 | - master 12 | 13 | jobs: 14 | build-and-publish: 15 | # Skip job on forks 16 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false 17 | name: Build and publish to PyPI 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | with: 23 | fetch-depth: 0 24 | - name: Set up Python 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: '3.11' 28 | - uses: Gr1N/setup-poetry@v9 29 | - uses: actions/cache@v4 30 | with: 31 | path: ~/.cache/pypoetry/virtualenvs 32 | key: ${{ runner.os }}-publish-pypi-${{ hashFiles('poetry.lock') }} 33 | - name: Setup poetry environment 34 | run: | 35 | poetry install 36 | - name: Inject package versions to action env 37 | run: make inject-version 38 | - name: Set Python package version for release 39 | if: startsWith(github.ref, 'refs/tags/v') 40 | run: poetry version "${PYVERSION}" 41 | - name: Set Python package version 42 | if: "!startsWith(github.ref, 'refs/tags/v')" 43 | run: | 44 | poetry version "${PYVERSION}.dev${GITHUB_RUN_NUMBER}" 45 | - name: Build Python package 46 | run: poetry build 47 | - name: Publish to PyPI 48 | if: startsWith(github.ref, 'refs/tags/v') 49 | env: 50 | # increase connection timeout to PyPI to 60s 51 | # NOTE(sg): This is probably not required 52 | POETRY_REQUESTS_TIMEOUT: "60" 53 | run: poetry publish -u __token__ -p ${{ secrets.PYPI_TOKEN }} 54 | - name: Publish to TestPyPI 55 | if: "!startsWith(github.ref, 'refs/tags/v')" 56 | env: 57 | # increase connection timeout to TestPyPI to 60s 58 | POETRY_REQUESTS_TIMEOUT: "60" 59 | run: | 60 | poetry config repositories.test-pypi https://test.pypi.org/legacy/ 61 | poetry publish -r test-pypi -u __token__ -p ${{ secrets.TEST_PYPI_TOKEN }} 62 | -------------------------------------------------------------------------------- /.github/workflows/push.yml: -------------------------------------------------------------------------------- 1 | name: Build & Push Container Image 2 | on: 3 | push: 4 | branches: 5 | - master 6 | tags: 7 | - v* 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | env: 13 | IMAGE: docker.io/${{ github.repository }} 14 | steps: 15 | - uses: actions/checkout@v4 16 | with: 17 | fetch-depth: "0" 18 | - name: Set image version latest 19 | if: github.ref == 'refs/heads/master' 20 | run: echo "VERSION=latest" >> ${GITHUB_ENV} 21 | - name: Set image version from tag 22 | if: startsWith(github.ref, 'refs/tags/v') 23 | run: echo "VERSION=$(echo ${GITHUB_REF#refs/tags/})" >> ${GITHUB_ENV} 24 | - name: Build Image 25 | run: make docker 26 | env: 27 | IMAGE_NAME: "${IMAGE}:${VERSION}" 28 | - name: Push Image 29 | env: 30 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} 31 | DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} 32 | run: | 33 | docker login docker.io --username "${DOCKER_USERNAME}" --password "${DOCKER_PASSWORD}" 34 | docker push "${IMAGE}:${VERSION}" 35 | - name: Build changelog from PRs with labels 36 | if: startsWith(github.ref, 'refs/tags/v') 37 | id: build_changelog 38 | uses: mikepenz/release-changelog-builder-action@v5 39 | with: 40 | configuration: ".github/changelog-configuration.json" 41 | # PreReleases still get a changelog, but the next full release gets a diff since the last full release, 42 | # combining possible changelogs of all previous PreReleases in between. 43 | # PreReleases show a partial changelog since last PreRelease. 44 | ignorePreReleases: "${{ !contains(github.ref, '-rc') }}" 45 | env: 46 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 47 | - name: Read release message from tag commit 48 | id: tag_message 49 | if: startsWith(github.ref, 'refs/tags/v') 50 | run: | 51 | git fetch origin +refs/tags/*:refs/tags/* 52 | # Extract tag message 53 | TAG_MSG=$(git tag -n --format='%(contents:body)' ${GITHUB_REF##refs/tags/} | tr -d '\r') 54 | # Escape literal % and newlines (\n, \r) for github actions output 55 | TAG_MSG=${TAG_MSG//'%'/%25} 56 | TAG_MSG=${TAG_MSG//$'\n'/%0A} 57 | # Join multiple lines belonging to the same paragraph for GitHub 58 | # markdown. 59 | # Paragraph breaks should be %0A%0A. We replace single line breaks 60 | # with a space with sed. 61 | TAG_MSG=$(echo ${TAG_MSG} |sed 's/\([^A]\)%0A\([^%]\)/\1 \2/g') 62 | # Set action output `messsage` 63 | echo "::set-output name=message::${TAG_MSG}" 64 | env: 65 | GITHUB_REF: ${{ github.ref }} 66 | - name: Create Release 67 | if: startsWith(github.ref, 'refs/tags/v') 68 | uses: ncipollo/release-action@v1 69 | with: 70 | body: "# Summary\n\n${{steps.tag_message.outputs.message}}\n\n# Changes\n\n${{steps.build_changelog.outputs.changelog}}" 71 | prerelease: "${{ contains(github.ref, '-rc') }}" 72 | # Ensure target branch for release is "master" 73 | commit: master 74 | token: ${{ secrets.GITHUB_TOKEN }} 75 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore commodore output 2 | /catalog 3 | /compiled 4 | /dependencies 5 | /inventory 6 | 7 | ## jsonnet-bundler 8 | 9 | jsonnetfile.* 10 | /vendor 11 | 12 | ## From here: GitHub's gitignore template for Python 13 | # Byte-compiled / optimized / DLL files 14 | __pycache__/ 15 | *.py[cod] 16 | *$py.class 17 | 18 | # C extensions 19 | *.so 20 | 21 | # Distribution / packaging 22 | .Python 23 | build/ 24 | develop-eggs/ 25 | dist/ 26 | downloads/ 27 | eggs/ 28 | .eggs/ 29 | # Don't exclude lib, as we are providing Jsonnet libs in that folder 30 | # lib/ 31 | lib64/ 32 | parts/ 33 | sdist/ 34 | var/ 35 | wheels/ 36 | pip-wheel-metadata/ 37 | share/python-wheels/ 38 | *.egg-info/ 39 | .installed.cfg 40 | *.egg 41 | MANIFEST 42 | 43 | # PyInstaller 44 | # Usually these files are written by a python script from a template 45 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 46 | *.manifest 47 | *.spec 48 | 49 | # Installer logs 50 | pip-log.txt 51 | pip-delete-this-directory.txt 52 | 53 | # Unit test / coverage reports 54 | htmlcov/ 55 | .tox/ 56 | .nox/ 57 | .coverage 58 | .coverage.* 59 | .cache 60 | nosetests.xml 61 | coverage.xml 62 | *.cover 63 | *.py,cover 64 | .hypothesis/ 65 | .pytest_cache/ 66 | 67 | # Translations 68 | *.mo 69 | *.pot 70 | 71 | # Django stuff: 72 | *.log 73 | local_settings.py 74 | db.sqlite3 75 | db.sqlite3-journal 76 | 77 | # Flask stuff: 78 | instance/ 79 | .webassets-cache 80 | 81 | # Scrapy stuff: 82 | .scrapy 83 | 84 | # Sphinx documentation 85 | docs/_build/ 86 | 87 | # PyBuilder 88 | target/ 89 | 90 | # Jupyter Notebook 91 | .ipynb_checkpoints 92 | 93 | # IPython 94 | profile_default/ 95 | ipython_config.py 96 | 97 | # pyenv 98 | .python-version 99 | 100 | # pipenv 101 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 102 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 103 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 104 | # install all needed dependencies. 105 | #Pipfile.lock 106 | 107 | # celery beat schedule file 108 | celerybeat-schedule 109 | 110 | # SageMath parsed files 111 | *.sage.py 112 | 113 | # Environments 114 | .env 115 | .venv 116 | env/ 117 | venv/ 118 | ENV/ 119 | env.bak/ 120 | venv.bak/ 121 | 122 | # Spyder project settings 123 | .spyderproject 124 | .spyproject 125 | 126 | # Rope project settings 127 | .ropeproject 128 | 129 | # mkdocs documentation 130 | /site 131 | 132 | # mypy 133 | .mypy_cache/ 134 | .dmypy.json 135 | dmypy.json 136 | 137 | # Pyre type checker 138 | .pyre/ 139 | 140 | .vscode/ 141 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @projectsyn/tarazed 2 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | This code repository is part of Project Syn and the code of conduct at 4 | https://syn.tools/syn/about/code_of_conduct.html does apply. 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to contribute 2 | 3 | This code repository is part of Project Syn and the contribution guide at 4 | https://syn.tools/syn/about/contribution_guide.html does apply. 5 | 6 | Submit Pull Requests at https://github.com/projectsyn/commodore/pulls. 7 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/python:3.12.10-slim-bookworm AS base 2 | 3 | ARG TARGETARCH 4 | ENV TARGETARCH=${TARGETARCH:-amd64} 5 | 6 | ENV HOME=/app 7 | 8 | WORKDIR ${HOME} 9 | 10 | FROM base AS builder 11 | 12 | ENV PATH=${PATH}:${HOME}/.local/bin:/usr/local/go/bin 13 | 14 | ARG POETRY_VERSION=1.8.5 15 | RUN apt-get update && apt-get install -y --no-install-recommends \ 16 | build-essential \ 17 | curl \ 18 | libffi-dev \ 19 | && rm -rf /var/lib/apt/lists/* \ 20 | && curl -sSL https://install.python-poetry.org | python - --version ${POETRY_VERSION} \ 21 | && mkdir -p /app/.config 22 | 23 | 24 | COPY pyproject.toml poetry.lock ./ 25 | 26 | RUN poetry config virtualenvs.create false \ 27 | && poetry install --no-dev --no-root 28 | 29 | COPY . ./ 30 | 31 | ARG PYVERSION=v0.0.0 32 | ARG GITVERSION=v0.0.0+dirty 33 | 34 | RUN sed -i "s/^__git_version__.*$/__git_version__ = '${GITVERSION}'/" commodore/__init__.py \ 35 | && poetry version "${PYVERSION}" \ 36 | && poetry build --format wheel 37 | 38 | RUN pip install ./dist/syn_commodore-*-py3-none-any.whl 39 | 40 | RUN curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 \ 41 | && chmod 700 get_helm.sh \ 42 | && ./get_helm.sh \ 43 | && mv /usr/local/bin/helm /usr/local/bin/helm3 \ 44 | && curl -LO https://git.io/get_helm.sh \ 45 | && chmod 700 get_helm.sh \ 46 | && ./get_helm.sh \ 47 | && mv /usr/local/bin/helm /usr/local/bin/helm2 48 | 49 | ARG KUSTOMIZE_VERSION=5.6.0 50 | ARG JSONNET_BUNDLER_VERSION=v0.6.3 51 | 52 | RUN ./tools/install-jb.sh ${JSONNET_BUNDLER_VERSION} \ 53 | && curl -fsSLO "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" \ 54 | && chmod +x install_kustomize.sh \ 55 | && ./install_kustomize.sh ${KUSTOMIZE_VERSION} /usr/local/bin 56 | 57 | FROM base AS runtime 58 | 59 | ENV PYTHON_MINOR="${PYTHON_VERSION%.*}" 60 | 61 | RUN apt-get update && apt-get install -y --no-install-recommends \ 62 | curl \ 63 | git \ 64 | gpg \ 65 | libnss-wrapper \ 66 | openssh-client \ 67 | && rm -rf /var/lib/apt/lists/* \ 68 | && echo " ControlMaster auto\n ControlPath /tmp/%r@%h:%p" >> /etc/ssh/ssh_config 69 | 70 | COPY --from=builder \ 71 | /usr/local/lib/python${PYTHON_MINOR}/site-packages/ \ 72 | /usr/local/lib/python${PYTHON_MINOR}/site-packages/ 73 | COPY --from=builder \ 74 | /usr/local/bin/kapitan* \ 75 | /usr/local/bin/commodore* \ 76 | /usr/local/bin/helm* \ 77 | /usr/local/bin/jb \ 78 | /usr/local/bin/kustomize \ 79 | /usr/local/bin/ 80 | 81 | RUN ln -s /usr/local/bin/helm3 /usr/local/bin/helm 82 | 83 | COPY ./tools/entrypoint.sh /usr/local/bin/ 84 | 85 | RUN chgrp 0 /app/ \ 86 | && chmod g+rwX /app/ \ 87 | && mkdir /app/.gnupg \ 88 | && chmod ug+w /app/.gnupg 89 | 90 | USER 1001 91 | 92 | # OIDC token callback 93 | EXPOSE 18000 94 | 95 | ENTRYPOINT ["/usr/local/bin/entrypoint.sh", "commodore"] 96 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019-2020, VSHN AG, info@vshn.ch 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * Neither the name of the copyright holder nor the names of its 15 | contributors may be used to endorse or promote products derived from 16 | this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | MAKEFLAGS += -j4 2 | 3 | ifneq "$(shell which docker 2>/dev/null)" "" 4 | DOCKER_CMD ?= $(shell which docker) 5 | DOCKER_USERNS ?= "" 6 | else 7 | DOCKER_CMD ?= podman 8 | DOCKER_USERNS ?= keep-id 9 | endif 10 | DOCKER_ARGS ?= --rm --tty --user "$$(id -u):$$(id -g)" --userns=$(DOCKER_USERNS) 11 | 12 | vale_cmd ?= $(DOCKER_CMD) run $(DOCKER_ARGS) --volume "$${PWD}"/docs/modules/ROOT/pages:/pages ghcr.io/vshn/vale:2.15.5 --minAlertLevel=error --config=/pages/.vale.ini /pages 13 | antora_preview_cmd ?= $(DOCKER_CMD) run --rm --publish 35729:35729 --publish 2020:2020 --volume "${PWD}/.git":/preview/antora/.git --volume "${PWD}/docs":/preview/antora/docs ghcr.io/vshn/antora-preview:3.1.2.3 --style=syn --antora=docs 14 | 15 | UNAME := $(shell uname) 16 | ifeq ($(UNAME), Linux) 17 | OS = linux-x64 18 | OPEN = xdg-open 19 | endif 20 | ifeq ($(UNAME), Darwin) 21 | OS = darwin-x64 22 | OPEN = open 23 | endif 24 | 25 | .PHONY: all 26 | all: docs 27 | 28 | .PHONY: docs-serve 29 | docs-serve: 30 | $(antora_preview_cmd) 31 | 32 | .PHONY: docs-vale 33 | docs-vale: 34 | $(vale_cmd) 35 | 36 | 37 | ### 38 | ### Section for Commodore linting and testing 39 | ### 40 | 41 | include tox.mk 42 | 43 | ### 44 | ### Section for Commodore image build using GitHub actions 45 | ### 46 | 47 | # Project parameters 48 | BINARY_NAME ?= commodore 49 | 50 | GITVERSION ?= $(shell git describe --tags --always --match=v* --dirty=+dirty || (echo "command failed $?"; exit 1)) 51 | PYVERSION ?= $(shell git describe --tags --always --match=v* | cut -d- -f1,2 || (echo "command failed $?"; exit 1)) 52 | 53 | IMAGE_NAME ?= docker.io/projectsyn/$(BINARY_NAME):test 54 | 55 | .PHONY: docker 56 | 57 | docker: 58 | $(DOCKER_CMD) build --build-arg PYVERSION=$(PYVERSION) \ 59 | --build-arg GITVERSION=$(GITVERSION) \ 60 | -t $(IMAGE_NAME) . 61 | @echo built image $(IMAGE_NAME) 62 | 63 | .PHONY: inject-version 64 | inject-version: 65 | @if [ -n "${CI}" ]; then\ 66 | echo "In CI";\ 67 | echo "PYVERSION=${PYVERSION}" >> "${GITHUB_ENV}";\ 68 | else\ 69 | poetry version "${PYVERSION}";\ 70 | fi 71 | # Always inject Git version 72 | sed -i "s/^__git_version__.*$$/__git_version__ = '${GITVERSION}'/" commodore/__init__.py 73 | 74 | .PHONY: test_integration 75 | test_integration: 76 | poetry run pytest -m integration -n auto ./tests 77 | 78 | .PHONY: test_coverage 79 | test_coverage: 80 | poetry run pytest -m "not bench" -n auto --cov="commodore" --cov-report xml 81 | 82 | .PHONY: test_gen_golden 83 | test_gen_golden: 84 | COMMODORE_TESTS_GEN_GOLDEN=true poetry run pytest ./tests/test_catalog.py 85 | -------------------------------------------------------------------------------- /commodore/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Commodore. Build dynamic inventories and compile catalogs with Kapitan 3 | """ 4 | 5 | from pathlib import Path as P 6 | from importlib.metadata import version 7 | 8 | __url__ = "https://github.com/projectsyn/commodore/" 9 | __git_version__ = "0" 10 | __version__ = version("syn-commodore") 11 | 12 | # provide Commodore installation dir as variable that can be imported 13 | __install_dir__ = P(__file__).parent 14 | 15 | # Location of Kustomize wrapper script 16 | __kustomize_wrapper__ = __install_dir__ / "scripts" / "run-kustomize" 17 | -------------------------------------------------------------------------------- /commodore/__main__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helper module to run not-installed version (via ``python3 -m commodore``) 3 | """ 4 | 5 | from .cli import main 6 | 7 | if __name__ == "__main__": 8 | main() 9 | -------------------------------------------------------------------------------- /commodore/cli/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import multiprocessing 4 | 5 | from pathlib import Path 6 | 7 | import click 8 | 9 | from reclass_rs import Reclass 10 | 11 | from dotenv import load_dotenv, find_dotenv 12 | from commodore import __git_version__, __version__ 13 | from commodore.config import Config 14 | 15 | import commodore.cli.options as options 16 | 17 | from .catalog import catalog_group 18 | from .component import component_group 19 | from .inventory import inventory_group 20 | from .package import package_group 21 | from .oidc import commodore_fetch_token, commodore_login 22 | 23 | 24 | def _version(): 25 | if f"v{__version__}" != __git_version__: 26 | return f"{__version__} (Git version: {__git_version__})" 27 | return __version__ 28 | 29 | 30 | CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]} 31 | 32 | 33 | @click.group(context_settings=CONTEXT_SETTINGS) 34 | @click.version_option(_version(), prog_name="commodore") 35 | @options.verbosity 36 | @click.option( 37 | "-d", 38 | "--working-dir", 39 | default="./", 40 | show_default=True, 41 | type=click.Path(file_okay=False, dir_okay=True), 42 | envvar="COMMODORE_WORKING_DIR", 43 | help=( 44 | "The directory in which Commodore will fetch dependencies, " 45 | "inventory and catalog, and store intermediate outputs" 46 | ), 47 | ) 48 | @click.option( 49 | "--request-timeout", 50 | default=5, 51 | show_default=True, 52 | type=click.INT, 53 | envvar="COMMODORE_REQUEST_TIMEOUT", 54 | help="Timeout in seconds for HTTP requests", 55 | ) 56 | @click.pass_context 57 | def commodore(ctx, working_dir, verbose, request_timeout): 58 | cfg = Config(Path(working_dir), verbose=verbose) 59 | cfg.request_timeout = request_timeout 60 | ctx.obj = cfg 61 | 62 | 63 | commodore.add_command(catalog_group) 64 | commodore.add_command(component_group) 65 | commodore.add_command(inventory_group) 66 | commodore.add_command(package_group) 67 | commodore.add_command(commodore_login) 68 | commodore.add_command(commodore_fetch_token) 69 | 70 | 71 | def main(): 72 | multiprocessing.set_start_method("spawn") 73 | Reclass.set_thread_count(0) 74 | 75 | load_dotenv(dotenv_path=find_dotenv(usecwd=True)) 76 | commodore.main( 77 | prog_name="commodore", auto_envvar_prefix="COMMODORE", max_content_width=100 78 | ) 79 | -------------------------------------------------------------------------------- /commodore/cli/oidc.py: -------------------------------------------------------------------------------- 1 | """Commands which expose Commodore's OIDC login support""" 2 | 3 | import click 4 | 5 | from commodore.config import Config 6 | from commodore.login import fetch_token, login 7 | 8 | import commodore.cli.options as options 9 | 10 | 11 | @click.command( 12 | name="login", 13 | short_help="Login to Lieutenant", 14 | ) 15 | @options.api_url 16 | @options.oidc_discovery_url 17 | @options.oidc_client 18 | @options.pass_config 19 | def commodore_login( 20 | config: Config, oidc_discovery_url: str, oidc_client: str, api_url: str 21 | ): 22 | """Login to Lieutenant""" 23 | config.oidc_client = oidc_client 24 | config.oidc_discovery_url = oidc_discovery_url 25 | config.api_url = api_url 26 | 27 | login(config) 28 | 29 | 30 | @click.command( 31 | name="fetch-token", 32 | short_help="Fetch Lieutenant token", 33 | ) 34 | @options.api_url 35 | @options.oidc_discovery_url 36 | @options.oidc_client 37 | @options.pass_config 38 | @options.verbosity 39 | def commodore_fetch_token( 40 | config: Config, 41 | oidc_discovery_url: str, 42 | oidc_client: str, 43 | api_url: str, 44 | verbose: int, 45 | ): 46 | """Fetch Lieutenant token 47 | 48 | Get the token from the cache, if it's still valid, otherwise fetch a new token based 49 | on the provided OIDC config and Lieutenant URL. 50 | 51 | The command prints the token to stdout. 52 | """ 53 | if api_url is None: 54 | raise click.ClickException( 55 | "Can't fetch Lieutenant token. Please provide the Lieutenant API URL." 56 | ) 57 | 58 | config.api_url = api_url 59 | config.oidc_client = oidc_client 60 | config.oidc_discovery_url = oidc_discovery_url 61 | config.update_verbosity(verbose) 62 | 63 | token = fetch_token(config) 64 | click.echo(token) 65 | -------------------------------------------------------------------------------- /commodore/cli/options.py: -------------------------------------------------------------------------------- 1 | """Click options which are reused for multiple commands""" 2 | 3 | from typing import Optional 4 | 5 | import click 6 | 7 | from commodore.config import Config 8 | 9 | pass_config = click.make_pass_decorator(Config) 10 | 11 | verbosity = click.option( 12 | "-v", 13 | "--verbose", 14 | count=True, 15 | help="Control verbosity. Can be repeated for more verbose output.", 16 | ) 17 | 18 | inventory_output_format = click.option( 19 | "-o", 20 | "--output-format", 21 | help="Output format", 22 | type=click.Choice(["json", "yaml"]), 23 | default="yaml", 24 | ) 25 | 26 | inventory_values = click.option( 27 | "-f", 28 | "--values", 29 | help=( 30 | "Extra values file to use when rendering inventory. " 31 | + "Used as additional reclass class. " 32 | + "Use a values file to specify any cluster facts. " 33 | + "Can be repeated." 34 | ), 35 | multiple=True, 36 | type=click.Path(exists=True, file_okay=True, dir_okay=False), 37 | ) 38 | 39 | inventory_allow_missing_classes = click.option( 40 | " / -A", 41 | "--allow-missing-classes/--no-allow-missing-classes", 42 | default=True, 43 | help="Whether to allow missing classes when rendering the inventory. Defaults to true.", 44 | ) 45 | 46 | api_url = click.option( 47 | "--api-url", envvar="COMMODORE_API_URL", help="Lieutenant API URL.", metavar="URL" 48 | ) 49 | 50 | api_token = click.option( 51 | "--api-token", 52 | envvar="COMMODORE_API_TOKEN", 53 | help="Lieutenant API token.", 54 | metavar="TOKEN", 55 | ) 56 | 57 | oidc_discovery_url = click.option( 58 | "--oidc-discovery-url", 59 | envvar="COMMODORE_OIDC_DISCOVERY_URL", 60 | help="The discovery URL of the IdP.", 61 | metavar="URL", 62 | ) 63 | 64 | oidc_client = click.option( 65 | "--oidc-client", 66 | envvar="COMMODORE_OIDC_CLIENT", 67 | help="The OIDC client name.", 68 | metavar="TEXT", 69 | ) 70 | 71 | github_token = click.option( 72 | "--github-token", 73 | help="GitHub API token", 74 | envvar="COMMODORE_GITHUB_TOKEN", 75 | default="", 76 | ) 77 | 78 | pr_branch = click.option( 79 | "--pr-branch", 80 | "-b", 81 | metavar="BRANCH", 82 | default="template-sync", 83 | show_default=True, 84 | type=str, 85 | help="Branch name to use for updates from template", 86 | ) 87 | 88 | pr_label = click.option( 89 | "--pr-label", 90 | "-l", 91 | metavar="LABEL", 92 | default=[], 93 | multiple=True, 94 | help="Labels to set on the PR. Can be repeated", 95 | ) 96 | 97 | pr_batch_size = click.option( 98 | "--pr-batch-size", 99 | metavar="COUNT", 100 | default=10, 101 | type=int, 102 | show_default=True, 103 | help="Number of PRs to create before pausing" 104 | + "Tune this parameter if your sync job hits the GitHub secondary rate limit.", 105 | ) 106 | 107 | github_pause = click.option( 108 | "--github-pause", 109 | metavar="DURATION", 110 | default=120, 111 | type=int, 112 | show_default=True, 113 | help="Duration for which to pause (in seconds) after creating a number PRs " 114 | + "(according to --pr-batch-size). " 115 | + "Tune this parameter if your sync job hits the GitHub secondary rate limit.", 116 | ) 117 | 118 | dependency_filter = click.option( 119 | "--filter", 120 | metavar="REGEX", 121 | default="", 122 | type=str, 123 | show_default=False, 124 | help="Regex to select which dependencies to sync. " 125 | + "If the option isn't given, all dependencies listed in the provided YAML " 126 | + "are synced.", 127 | ) 128 | 129 | 130 | def template_version(default: Optional[str]): 131 | help_str = "The component template version (Git tree-ish) to use." 132 | if default is None: 133 | help_str = ( 134 | help_str 135 | + " If not provided, the currently active template version will be used." 136 | ) 137 | 138 | return click.option( 139 | "--template-version", 140 | default=default, 141 | show_default=default is not None, 142 | help=help_str, 143 | ) 144 | 145 | 146 | def local(help: str): 147 | return click.option( 148 | "--local", 149 | is_flag=True, 150 | default=False, 151 | help=help, 152 | ) 153 | 154 | 155 | def dry_run(help: str): 156 | return click.option( 157 | "--dry-run", 158 | is_flag=True, 159 | help=help, 160 | default=False, 161 | ) 162 | -------------------------------------------------------------------------------- /commodore/dependency_mgmt/jsonnet_bundler.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import json 5 | from collections.abc import Iterable 6 | from pathlib import Path 7 | from subprocess import call # nosec 8 | from typing import Optional 9 | 10 | import click 11 | 12 | from commodore.config import Config 13 | 14 | 15 | def jsonnet_dependencies(config: Config) -> Iterable: 16 | """ 17 | Creates a list of Jsonnet dependencies for the given Components. 18 | """ 19 | dependencies = [] 20 | 21 | for _, component in sorted(config.get_components().items()): 22 | dependencies.append( 23 | { 24 | "source": { 25 | "local": { 26 | "directory": os.path.relpath( 27 | # TODO: Update again when we have proper monorepo handling 28 | component.repo_directory, 29 | start=config.work_dir, 30 | ), 31 | } 32 | } 33 | } 34 | ) 35 | 36 | # Defining the `lib` folder as a local dependency is just a cheap way to have a symlink to that 37 | # folder. 38 | dependencies.append( 39 | { 40 | "source": { 41 | "local": { 42 | "directory": os.path.relpath( 43 | config.inventory.lib_dir, start=config.work_dir 44 | ), 45 | } 46 | } 47 | } 48 | ) 49 | 50 | return dependencies 51 | 52 | 53 | def write_jsonnetfile(file: Path, deps: Iterable): 54 | """ 55 | Writes the file `jsonnetfile.json` containing all provided dependencies. 56 | """ 57 | data = { 58 | "version": 1, 59 | "dependencies": deps, 60 | "legacyImports": True, 61 | } 62 | 63 | with open(file, "w", encoding="utf-8") as f: 64 | f.write(json.dumps(data, indent=4)) 65 | f.write("\n") 66 | 67 | 68 | def fetch_jsonnet_libraries(cwd: Path, deps: Optional[Iterable] = None): 69 | """ 70 | Download Jsonnet libraries using Jsonnet-Bundler. 71 | """ 72 | jsonnetfile = cwd / "jsonnetfile.json" 73 | if deps: 74 | write_jsonnetfile(jsonnetfile, deps) 75 | 76 | if not jsonnetfile.exists(): 77 | click.secho("No jsonnetfile.json found, skipping Jsonnet Bundler install.") 78 | return 79 | 80 | try: 81 | # To make sure we don't use any stale lock files 82 | lock_file = cwd / "jsonnetfile.lock.json" 83 | if lock_file.exists(): 84 | lock_file.unlink() 85 | if call(["jb", "install"], cwd=cwd) != 0: 86 | raise click.ClickException("jsonnet-bundler exited with error") 87 | except FileNotFoundError as e: 88 | raise click.ClickException( 89 | "the jsonnet-bundler executable `jb` could not be found" 90 | ) from e 91 | -------------------------------------------------------------------------------- /commodore/dependency_mgmt/tools.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from collections.abc import Iterable 4 | from typing import Callable 5 | 6 | 7 | def format_component_list( 8 | components: Iterable[str], format_func: Callable[[str], str] = lambda c: f"'{c}'" 9 | ) -> str: 10 | formatted_list = list(map(format_func, sorted(components))) 11 | 12 | if len(formatted_list) == 0: 13 | return "" 14 | 15 | if len(formatted_list) == 1: 16 | return f"{formatted_list[0]}" 17 | 18 | formatted = ", ".join(formatted_list[:-1]) 19 | 20 | # Use serial ("Oxford") comma when formatting lists of 3 or more items, cf. 21 | # https://en.wikipedia.org/wiki/Serial_comma 22 | serial_comma = "" 23 | if len(formatted_list) > 2: 24 | serial_comma = "," 25 | 26 | formatted += f"{serial_comma} and {formatted_list[-1]}" 27 | 28 | return formatted 29 | -------------------------------------------------------------------------------- /commodore/dependency_mgmt/version_parsing.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from collections.abc import Iterable 4 | from dataclasses import dataclass 5 | from typing import Optional 6 | 7 | from enum import Enum 8 | 9 | import click 10 | 11 | from commodore.config import Config 12 | from commodore.helpers import kapitan_inventory 13 | 14 | 15 | class DepType(Enum): 16 | COMPONENT = "components" 17 | PACKAGE = "packages" 18 | 19 | 20 | class DependencyParseError(ValueError): 21 | field: str 22 | 23 | def __init__(self, field: str): 24 | super().__init__("Error parsing dependency specification") 25 | self.field = field 26 | 27 | 28 | @dataclass 29 | class DependencySpec: 30 | """Class for parsed Dependency specification""" 31 | 32 | url: str 33 | version: str 34 | path: str 35 | 36 | @classmethod 37 | def parse( 38 | cls, 39 | info: dict[str, str], 40 | base_config: Optional[DependencySpec] = None, 41 | ) -> DependencySpec: 42 | if "url" not in info and not base_config: 43 | raise DependencyParseError("url") 44 | 45 | if "version" not in info and not base_config: 46 | raise DependencyParseError("version") 47 | 48 | path = info.get("path", "") 49 | if path.startswith("/"): 50 | path = path[1:] 51 | 52 | if base_config: 53 | url = info.get("url", base_config.url) 54 | version = info.get("version", base_config.version) 55 | if not path: 56 | path = base_config.path 57 | else: 58 | url = info["url"] 59 | version = info["version"] 60 | 61 | return DependencySpec(url, version, path) 62 | 63 | 64 | def _read_versions( 65 | cfg: Config, 66 | dependency_type: DepType, 67 | dependency_names: Iterable[str], 68 | require_key: bool = True, 69 | ignore_class_notfound: bool = False, 70 | aliases: dict[str, str] = {}, 71 | fallback: dict[str, DependencySpec] = {}, 72 | ) -> dict[str, DependencySpec]: 73 | deps_key = dependency_type.value 74 | deptype_str = dependency_type.name.lower() 75 | deptype_cap = deptype_str.capitalize() 76 | dependencies = {} 77 | 78 | inv = kapitan_inventory(cfg, ignore_class_notfound=ignore_class_notfound) 79 | cluster_inventory = inv[cfg.inventory.bootstrap_target] 80 | deps = cluster_inventory["parameters"].get(deps_key, None) 81 | if not deps: 82 | if require_key: 83 | raise click.ClickException( 84 | f"{deptype_cap} list ('parameters.{deps_key}') missing" 85 | ) 86 | # If we don't require the key for the requested dependency type to be present, 87 | # just set deps to the empty dict. 88 | deps = {} 89 | 90 | if aliases: 91 | all_dep_keys = set(aliases.keys()) 92 | else: 93 | all_dep_keys = deps.keys() 94 | for depname in dependency_names: 95 | if depname not in all_dep_keys: 96 | raise click.ClickException( 97 | f"Unknown {deptype_str} '{depname}'." 98 | + f" Please add it to 'parameters.{deps_key}'" 99 | ) 100 | 101 | try: 102 | basename_for_dep = aliases.get(depname, depname) 103 | dep = DependencySpec.parse( 104 | deps.get(depname, {}), 105 | base_config=fallback.get(basename_for_dep), 106 | ) 107 | except DependencyParseError as e: 108 | raise click.ClickException( 109 | f"{deptype_cap} '{depname}' is missing field '{e.field}'" 110 | ) 111 | 112 | if cfg.debug: 113 | click.echo(f" > URL for {depname}: {dep.url}") 114 | click.echo(f" > Version for {depname}: {dep.version}") 115 | click.echo(f" > Subpath for {depname}: {dep.path}") 116 | 117 | dependencies[depname] = dep 118 | 119 | return dependencies 120 | 121 | 122 | def _read_components( 123 | cfg: Config, component_aliases: dict[str, str] 124 | ) -> dict[str, DependencySpec]: 125 | component_names = set(component_aliases.values()) 126 | alias_names = set(component_aliases.keys()) - component_names 127 | 128 | component_versions = _read_versions(cfg, DepType.COMPONENT, component_names) 129 | alias_versions = _read_versions( 130 | cfg, 131 | DepType.COMPONENT, 132 | alias_names, 133 | aliases=component_aliases, 134 | fallback=component_versions, 135 | ) 136 | 137 | for alias, aspec in alias_versions.items(): 138 | if alias in component_versions: 139 | raise ValueError("alias name already in component_versions?") 140 | component_versions[alias] = aspec 141 | 142 | return component_versions 143 | 144 | 145 | def _read_packages( 146 | cfg: Config, package_names: Iterable[str] 147 | ) -> dict[str, DependencySpec]: 148 | return _read_versions( 149 | cfg, 150 | DepType.PACKAGE, 151 | package_names, 152 | require_key=False, 153 | ignore_class_notfound=True, 154 | ) 155 | -------------------------------------------------------------------------------- /commodore/filters/helm_namespace.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | local kube = import 'lib/kube.libjsonnet'; 3 | local chart_output_dir = std.extVar('chart_output_dir'); 4 | local namespace = std.extVar('namespace'); 5 | local create_namespace = std.extVar('create_namespace'); 6 | local exclude_objstr = std.extVar('exclude_objects'); 7 | local exclude_objstrs = 8 | std.filter(function(s) std.length(s) > 0, std.split(exclude_objstr, '|')); 9 | local exclude_objs = 10 | std.map(function(e) std.parseJson(e), exclude_objstrs); 11 | 12 | com.addNamespaceToHelmOutput(chart_output_dir, namespace, exclude_objs) + 13 | if create_namespace == "true" then {'00_namespace': kube.Namespace(namespace)} else {} 14 | -------------------------------------------------------------------------------- /commodore/gitrepo/diff.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import difflib 4 | 5 | from collections.abc import Iterable 6 | from typing import Protocol 7 | 8 | import click 9 | 10 | 11 | class DiffFunc(Protocol): 12 | def __call__( 13 | self, before_text: str, after_text: str, fromfile: str = "", tofile: str = "" 14 | ) -> tuple[Iterable[str], bool]: ... 15 | 16 | 17 | def _colorize_diff(line: str) -> str: 18 | if line.startswith("--- ") or line.startswith("+++ ") or line.startswith("@@ "): 19 | return click.style(line, fg="yellow") 20 | if line.startswith("+"): 21 | return click.style(line, fg="green") 22 | if line.startswith("-"): 23 | return click.style(line, fg="red") 24 | return line 25 | 26 | 27 | def _compute_similarity(change): 28 | before = change.b_blob.data_stream.read().decode("utf-8").split("\n") 29 | after = change.a_blob.data_stream.read().decode("utf-8").split("\n") 30 | r = difflib.SequenceMatcher(a=before, b=after).ratio() 31 | similarity_diff = [] 32 | similarity_diff.append(click.style(f"--- {change.b_path}", fg="yellow")) 33 | similarity_diff.append(click.style(f"+++ {change.a_path}", fg="yellow")) 34 | similarity_diff.append(f"Renamed file, similarity index {r * 100:.2f}%") 35 | return similarity_diff 36 | 37 | 38 | def default_difffunc( 39 | before_text: str, after_text: str, fromfile: str = "", tofile: str = "" 40 | ) -> tuple[Iterable[str], bool]: 41 | before_lines = before_text.split("\n") 42 | after_lines = after_text.split("\n") 43 | diff_lines = difflib.unified_diff( 44 | before_lines, after_lines, lineterm="", fromfile=fromfile, tofile=tofile 45 | ) 46 | # never suppress diffs in default difffunc 47 | return diff_lines, False 48 | 49 | 50 | def process_diff(change_type: str, change, diff_func: DiffFunc) -> Iterable[str]: 51 | difftext = [] 52 | # Because we're diffing the staged changes, the diff objects 53 | # are backwards, and "added" files are actually being deleted 54 | # and vice versa for "deleted" files. 55 | if change_type == "A": 56 | difftext.append(click.style(f"Deleted file {change.b_path}", fg="red")) 57 | elif change_type == "D": 58 | difftext.append(click.style(f"Added file {change.b_path}", fg="green")) 59 | elif change_type == "R": 60 | difftext.append( 61 | click.style(f"Renamed file {change.b_path} => {change.a_path}", fg="yellow") 62 | ) 63 | else: 64 | # Other changes should produce a usable diff 65 | # The diff objects are backwards, so use b_blob as before 66 | # and a_blob as after. 67 | before = change.b_blob.data_stream.read().decode("utf-8") 68 | after = change.a_blob.data_stream.read().decode("utf-8") 69 | diff_lines, suppress_diff = diff_func( 70 | before, after, fromfile=change.b_path, tofile=change.a_path 71 | ) 72 | if not suppress_diff: 73 | if change.renamed_file: 74 | # Just compute similarity ratio for renamed files 75 | # similar to git's diffing 76 | difftext.append("\n".join(_compute_similarity(change)).strip()) 77 | else: 78 | diff_lines = [_colorize_diff(line) for line in diff_lines] 79 | difftext.append("\n".join(diff_lines).strip()) 80 | 81 | return difftext 82 | -------------------------------------------------------------------------------- /commodore/inventory/__init__.py: -------------------------------------------------------------------------------- 1 | from os import makedirs 2 | from pathlib import Path as P 3 | from typing import Optional, Union 4 | 5 | from commodore.component import Component 6 | 7 | 8 | class Inventory: 9 | _work_dir: P 10 | 11 | def __init__(self, work_dir: Optional[P] = None): 12 | if work_dir: 13 | self._work_dir = work_dir 14 | else: 15 | self._work_dir = P(".") 16 | 17 | @property 18 | def work_dir(self) -> P: 19 | return self._work_dir 20 | 21 | @work_dir.setter 22 | def work_dir(self, d: P): 23 | self._work_dir = d 24 | 25 | @property 26 | def inventory_dir(self) -> P: 27 | return self._work_dir / "inventory" 28 | 29 | @property 30 | def dependencies_dir(self) -> P: 31 | return self._work_dir / "dependencies" 32 | 33 | @property 34 | def classes_dir(self) -> P: 35 | return self.inventory_dir / "classes" 36 | 37 | @property 38 | def components_dir(self) -> P: 39 | return self.classes_dir / "components" 40 | 41 | @property 42 | def defaults_dir(self) -> P: 43 | return self.classes_dir / "defaults" 44 | 45 | @property 46 | def targets_dir(self) -> P: 47 | return self.inventory_dir / "targets" 48 | 49 | @property 50 | def lib_dir(self) -> P: 51 | return self.dependencies_dir / "lib" 52 | 53 | @property 54 | def libs_dir(self) -> P: 55 | return self.dependencies_dir / "libs" 56 | 57 | @property 58 | def global_config_dir(self) -> P: 59 | return self.classes_dir / "global" 60 | 61 | @property 62 | def bootstrap_target(self) -> str: 63 | return "cluster" 64 | 65 | @property 66 | def params_dir(self) -> P: 67 | return self.classes_dir / "params" 68 | 69 | @property 70 | def params_file(self) -> P: 71 | return self.params_dir / f"{self.bootstrap_target}.yml" 72 | 73 | @property 74 | def output_dir(self) -> P: 75 | return self._work_dir / "compiled" 76 | 77 | def tenant_config_dir(self, tenant: str) -> P: 78 | return self.classes_dir / tenant 79 | 80 | def package_dir(self, pkgname: str) -> P: 81 | return self.classes_dir / pkgname 82 | 83 | def component_file(self, component: Union[Component, str]) -> P: 84 | return self.components_dir / f"{_component_name(component)}.yml" 85 | 86 | def defaults_file(self, component: Union[Component, str]) -> P: 87 | return self.defaults_dir / f"{_component_name(component)}.yml" 88 | 89 | def target_file(self, target: Union[Component, str]) -> P: 90 | return self.targets_dir / f"{_component_name(target)}.yml" 91 | 92 | def ensure_dirs(self): 93 | makedirs(self.components_dir, exist_ok=True) 94 | makedirs(self.defaults_dir, exist_ok=True) 95 | makedirs(self.params_dir, exist_ok=True) 96 | makedirs(self.lib_dir, exist_ok=True) 97 | makedirs(self.libs_dir, exist_ok=True) 98 | makedirs(self.targets_dir, exist_ok=True) 99 | 100 | 101 | def _component_name(component: Union[Component, str]) -> str: 102 | if isinstance(component, Component): 103 | return component.name 104 | 105 | return component 106 | -------------------------------------------------------------------------------- /commodore/inventory/lint_dependency_specification.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from typing import Any 5 | 6 | import click 7 | from commodore.dependency_mgmt.version_parsing import DepType 8 | 9 | 10 | def lint_dependency_specification( 11 | deptype: DepType, file: Path, filecontents: dict[str, Any] 12 | ) -> int: 13 | errcount = 0 14 | params = filecontents.get("parameters", {}) 15 | if not isinstance(params, dict): 16 | click.secho( 17 | f"> Key 'parameters' isn't a dictionary in {file}", 18 | fg="red", 19 | ) 20 | return 1 21 | 22 | components = params.get(deptype.value, {}) 23 | deptype_str = deptype.name.capitalize() 24 | for d, dspec in components.items(): 25 | if "version" not in dspec: 26 | click.secho( 27 | f"> {deptype_str} specification for {d} " 28 | + f"is missing key 'version' in {file}", 29 | fg="red", 30 | ) 31 | errcount += 1 32 | 33 | unk_keys = set(dspec.keys()) - {"url", "version", "path"} 34 | if len(unk_keys) > 0: 35 | click.secho( 36 | f"> {deptype_str} specification for {d} " 37 | + f"contains unknown key(s) '{unk_keys}' in {file}", 38 | fg="red", 39 | ) 40 | errcount += 1 41 | 42 | durl = dspec.get("url", "") 43 | if not isinstance(durl, str): 44 | click.secho( 45 | f"> {deptype_str} {d} url is of type {type(durl).__name__}" 46 | + f" (expected string) in {file}", 47 | fg="red", 48 | ) 49 | errcount += 1 50 | 51 | dversion = dspec.get("version", "") 52 | if not isinstance(dversion, str): 53 | click.secho( 54 | f"> {deptype_str} {d} version is of type {type(dversion).__name__}," 55 | + f" (expected string) in {file}", 56 | fg="red", 57 | ) 58 | errcount += 1 59 | 60 | return errcount 61 | 62 | 63 | def lint_components(file: Path, filecontents: dict[str, Any]) -> int: 64 | return lint_dependency_specification(DepType.COMPONENT, file, filecontents) 65 | 66 | 67 | def lint_packages(file: Path, filecontents: dict[str, Any]) -> int: 68 | return lint_dependency_specification(DepType.PACKAGE, file, filecontents) 69 | -------------------------------------------------------------------------------- /commodore/inventory/lint_deprecated_parameters.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from typing import Any 5 | 6 | import click 7 | 8 | DEPRECATED_PARAMS = [ 9 | "${customer:name}", 10 | "${cloud:provider}", 11 | "${cloud:region}", 12 | "${cluster:dist}", 13 | ] 14 | 15 | 16 | def _lint_data(file: Path, prefix: str, data: Any) -> int: 17 | if isinstance(data, dict): 18 | return _lint_dict(file, prefix, data) 19 | if isinstance(data, list): 20 | return _lint_list(file, prefix, data) 21 | if isinstance(data, str): 22 | return _lint_string(file, prefix, data) 23 | 24 | # Only dicts, lists, or strings can contain parameter references 25 | return 0 26 | 27 | 28 | def _lint_string(file: Path, prefix: str, data: str) -> int: 29 | errcount = 0 30 | 31 | for p in DEPRECATED_PARAMS: 32 | if p in data: 33 | click.secho( 34 | f"> Field '{prefix}' in file '{file}' contains deprecated parameter '{p}'", 35 | fg="red", 36 | ) 37 | errcount += 1 38 | 39 | return errcount 40 | 41 | 42 | def _lint_list(file: Path, prefix: str, data: list) -> int: 43 | errcount = 0 44 | for i, v in enumerate(data): 45 | errcount += _lint_data(file, prefix + f"[{i}]", v) 46 | 47 | return errcount 48 | 49 | 50 | def _lint_dict(file: Path, prefix: str, data: dict[str, Any]) -> int: 51 | errcount = 0 52 | if prefix != "": 53 | prefix = f"{prefix}." 54 | for k, v in data.items(): 55 | errcount += _lint_data(file, prefix + k, v) 56 | 57 | return errcount 58 | 59 | 60 | def lint_deprecated_parameters(file: Path, filecontents: dict[str, Any]) -> int: 61 | prefix = "" 62 | return _lint_dict(file, prefix, filecontents) 63 | -------------------------------------------------------------------------------- /commodore/inventory/render.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import shutil 4 | import tempfile 5 | 6 | from pathlib import Path 7 | 8 | import click 9 | 10 | from commodore.config import Config 11 | 12 | from .parameters import ( 13 | InventoryFactory, 14 | InventoryFacts, 15 | InventoryParameters, 16 | ) 17 | 18 | 19 | def _cleanup_work_dir(cfg: Config, work_dir: Path): 20 | if not cfg.debug: 21 | # Clean up work dir if we're not in debug mode 22 | shutil.rmtree(work_dir) 23 | 24 | 25 | def extract_packages( 26 | cfg: Config, invfacts: InventoryFacts 27 | ) -> dict[str, dict[str, str]]: 28 | return _get_inventory(cfg, invfacts).parameters("packages") 29 | 30 | 31 | def extract_components( 32 | cfg: Config, invfacts: InventoryFacts 33 | ) -> dict[str, dict[str, str]]: 34 | return _get_inventory(cfg, invfacts).parameters("components") 35 | 36 | 37 | def extract_parameters( 38 | cfg: Config, invfacts: InventoryFacts 39 | ) -> dict[str, dict[str, str]]: 40 | return _get_inventory(cfg, invfacts).parameters() 41 | 42 | 43 | def _get_inventory(cfg: Config, invfacts: InventoryFacts) -> InventoryParameters: 44 | if cfg.debug: 45 | click.echo( 46 | f"Called with: global_config={invfacts.global_config} " 47 | + f"tenant_config={invfacts.tenant_config} " 48 | + f"extra_classes={invfacts.extra_classes} " 49 | + f"allow_missing_classes={invfacts.allow_missing_classes}." 50 | ) 51 | 52 | global_dir = Path(invfacts.global_config).resolve().absolute() 53 | tenant_dir = None 54 | if invfacts.tenant_config: 55 | tenant_dir = Path(invfacts.tenant_config).resolve().absolute() 56 | 57 | work_dir = Path(tempfile.mkdtemp(prefix="commodore-reclass-")).resolve() 58 | 59 | if global_dir.is_dir() and (not tenant_dir or tenant_dir.is_dir()): 60 | invfactory = InventoryFactory.from_repo_dirs( 61 | work_dir, global_dir, tenant_dir, invfacts 62 | ) 63 | else: 64 | _cleanup_work_dir(cfg, work_dir) 65 | raise NotImplementedError("Cloning global or tenant repo not yet implemented") 66 | 67 | try: 68 | inv = invfactory.reclass(invfacts) 69 | except ValueError as e: 70 | _cleanup_work_dir(cfg, work_dir) 71 | raise ValueError( 72 | "Unable to render inventory with `--no-allow-missing-classes`. " 73 | + f"{e}. " 74 | + "Verify the provided values or allow missing classes." 75 | ) from e 76 | 77 | _cleanup_work_dir(cfg, work_dir) 78 | 79 | return inv 80 | -------------------------------------------------------------------------------- /commodore/k8sobject.py: -------------------------------------------------------------------------------- 1 | class K8sObject: 2 | def __init__(self, obj): 3 | self._obj = {} 4 | if obj: 5 | self._obj = obj 6 | self._kind = self._obj.get("kind", "") 7 | self._name = self._obj.get("metadata", {}).get("name", "") 8 | self._namespace = self._obj.get("metadata", {}).get("namespace", "") 9 | 10 | def __lt__(self, other): 11 | if self._kind != other._kind: 12 | return self._kind < other._kind 13 | if self._namespace != other._namespace: 14 | return self._namespace < other._namespace 15 | return self._name < other._name 16 | 17 | def __gt__(self, other): 18 | if self._kind != other._kind: 19 | return self._kind > other._kind 20 | if self._namespace != other._namespace: 21 | return self._namespace > other._namespace 22 | return self._name > other._name 23 | 24 | def __eq__(self, other): 25 | return ( 26 | self._kind == other._kind 27 | and self._namespace == other._namespace 28 | and self._name == other._name 29 | ) 30 | 31 | def __le__(self, other): 32 | return not self.__gt__(other) 33 | 34 | def __ge__(self, other): 35 | return not self.__lt__(other) 36 | 37 | def __ne__(self, other): 38 | return not self.__eq__(other) 39 | -------------------------------------------------------------------------------- /commodore/lib/kube.libjsonnet: -------------------------------------------------------------------------------- 1 | import 'lib/kube.libsonnet' 2 | -------------------------------------------------------------------------------- /commodore/lib/kube.libsonnet: -------------------------------------------------------------------------------- 1 | // Injects a deprecation message into all kube.libsonnet fields. 2 | // The message is not yet used. But cna be enabled with 3 | // local kube = (import 'lib/kube.libjsonnet') { _commodore_show_kube_libsonnet_deprecation:: true }; 4 | 5 | local deprecationWarning = ||| 6 | Usage of `kube.lib(j)sonnet` is deprecated. 7 | 8 | It will be replaced by https://github.com/jsonnet-libs/k8s-libsonnet in the future, which auto generates the library from the OpenAPI spec. 9 | 10 | Used `kube.%s`. 11 | |||; 12 | 13 | local kube = import 'lib/kube-libsonnet/kube.libsonnet'; 14 | 15 | { 16 | _commodore_show_kube_libsonnet_deprecation:: false, 17 | } 18 | + 19 | { 20 | [key]: if super._commodore_show_kube_libsonnet_deprecation then 21 | std.trace(deprecationWarning % key, std.get(kube, key)) 22 | else 23 | std.get(kube, key) 24 | for key in std.objectFieldsAll(kube) 25 | } 26 | -------------------------------------------------------------------------------- /commodore/multi_dependency.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from typing import Optional 5 | 6 | from url_normalize.tools import deconstruct_url 7 | 8 | from commodore.gitrepo import GitRepo, normalize_git_url 9 | 10 | 11 | class MultiDependency: 12 | _repo: GitRepo 13 | _components: dict[str, Path] 14 | _packages: dict[str, Path] 15 | 16 | def __init__( 17 | self, 18 | repo_url: str, 19 | dependencies_dir: Path, 20 | author_name: Optional[str] = None, 21 | author_email: Optional[str] = None, 22 | ): 23 | repo_dir = dependency_dir(dependencies_dir, repo_url) 24 | self._repo = GitRepo( 25 | repo_url, 26 | repo_dir, 27 | bare=True, 28 | author_name=author_name, 29 | author_email=author_email, 30 | ) 31 | self._components = {} 32 | self._packages = {} 33 | 34 | @property 35 | def url(self) -> str: 36 | return self._repo.remote 37 | 38 | @url.setter 39 | def url(self, repo_url: str): 40 | self._repo.remote = repo_url 41 | 42 | @property 43 | def repo_directory(self) -> Path: 44 | return Path(self._repo.repo.common_dir).resolve().absolute() 45 | 46 | @property 47 | def bare_repo(self) -> GitRepo: 48 | return self._repo 49 | 50 | def get_component(self, name: str) -> Optional[Path]: 51 | return self._components.get(name) 52 | 53 | def register_component(self, name: str, target_dir: Path): 54 | if name in self._components: 55 | raise ValueError(f"component {name} already registered") 56 | 57 | self._components[name] = target_dir 58 | 59 | def deregister_component(self, name: str): 60 | try: 61 | del self._components[name] 62 | except KeyError as e: 63 | raise ValueError(f"can't deregister unknown component {name}") from e 64 | 65 | def checkout_component(self, name: str, version: str): 66 | """Create or update worktree for component `name`.""" 67 | target_dir = self.get_component(name) 68 | if not target_dir: 69 | raise ValueError(f"can't checkout unknown component {name}") 70 | self._repo.checkout_worktree(target_dir, version=version) 71 | 72 | def get_package(self, name: str) -> Optional[Path]: 73 | return self._packages.get(name) 74 | 75 | def register_package(self, name: str, target_dir: Path): 76 | if name in self._packages: 77 | raise ValueError(f"package {name} already registered") 78 | 79 | self._packages[name] = target_dir 80 | 81 | def deregister_package(self, name: str): 82 | try: 83 | del self._packages[name] 84 | except KeyError as e: 85 | raise ValueError(f"can't deregister unknown package {name}") from e 86 | 87 | def checkout_package(self, name: str, version: str): 88 | """Create or update worktree for package `name`.""" 89 | target_dir = self.get_package(name) 90 | if not target_dir: 91 | raise ValueError(f"can't checkout unknown package {name}") 92 | self._repo.checkout_worktree(target_dir, version=version) 93 | 94 | def initialize_worktree(self, target_dir: Path) -> None: 95 | """Initialize a worktree in `target_dir`.""" 96 | self._repo.initialize_worktree(target_dir) 97 | 98 | def has_checkouts(self) -> bool: 99 | return len(self._repo.worktrees) > 1 100 | 101 | 102 | def dependency_dir(base_dir: Path, repo_url: str) -> Path: 103 | return base_dir / ".repos" / dependency_key(repo_url) 104 | 105 | 106 | def dependency_key(repo_url: str) -> str: 107 | """Create normalized and scheme-agnostic key for the given repo URL. 108 | 109 | This is also used to determine the subpath where the bare checkout is created.""" 110 | repo_url = normalize_git_url(repo_url) 111 | url_parts = deconstruct_url(repo_url) 112 | depkey = "" 113 | if url_parts.host: 114 | depkey = f"{url_parts.host}/" 115 | return depkey + url_parts.path[1:] 116 | -------------------------------------------------------------------------------- /commodore/normalize_url.py: -------------------------------------------------------------------------------- 1 | from url_normalize import url_normalize 2 | from url_normalize.tools import deconstruct_url, reconstruct_url 3 | 4 | 5 | def normalize_url(url: str) -> str: 6 | nurl = url_normalize(url) 7 | if not nurl: 8 | raise ValueError(f"url_normalize returned None for {url}") 9 | return nurl 10 | 11 | 12 | def _normalize_git_ssh(url: str) -> str: 13 | # Import url_normalize internal methods here, so they're not visible in the file 14 | # scope of gitrepo.py 15 | # pylint: disable=import-outside-toplevel 16 | from url_normalize.url_normalize import ( 17 | normalize_userinfo, 18 | normalize_host, 19 | normalize_path, 20 | provide_url_scheme, 21 | ) 22 | 23 | if "@" in url and not url.startswith("ssh://"): 24 | # Assume git@host:repo format, reformat so url_normalize understands 25 | # the URL 26 | host, repo = url.split(":") 27 | url = f"{host}/{repo}" 28 | # Reuse normalization logic from url_normalize, simplify for Git-SSH use case. 29 | # We can't do `url_normalize(url, "ssh"), because the library doesn't know "ssh" as 30 | # a scheme, and fails to look up the default port for "ssh". 31 | url = provide_url_scheme(url, "ssh") 32 | urlparts = deconstruct_url(url) 33 | urlparts = urlparts._replace( 34 | userinfo=normalize_userinfo(urlparts.userinfo), 35 | host=normalize_host(urlparts.host), 36 | path=normalize_path(urlparts.path, scheme="https"), 37 | ) 38 | return reconstruct_url(urlparts) 39 | 40 | 41 | def normalize_git_url(url: str) -> str: 42 | """Normalize HTTP(s) and SSH Git URLs""" 43 | if "@" in url and ("://" not in url or url.startswith("ssh://")): 44 | url = _normalize_git_ssh(url) 45 | elif url.startswith("http://") or url.startswith("https://"): 46 | nurl = url_normalize(url) 47 | if not nurl: 48 | # NOTE(sg): This should be unreachable, since url_normalize() only 49 | # returns None when passed None as the url to normalize. However, we 50 | # need the check to make mypy happy. 51 | raise ValueError(f"failed to normalize {url}") 52 | url = nurl 53 | return url 54 | -------------------------------------------------------------------------------- /commodore/package/__init__.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Optional 3 | 4 | from commodore.multi_dependency import MultiDependency 5 | from commodore.gitrepo import GitRepo 6 | 7 | 8 | class Package: 9 | """ 10 | Class representing a config package. 11 | Used to abstract from details of cloning/checking out the correct package repo and 12 | version 13 | """ 14 | 15 | _gitrepo: Optional[GitRepo] 16 | 17 | @classmethod 18 | def clone(cls, cfg, clone_url: str, name: str, version: str = "master"): 19 | pdep = cfg.register_dependency_repo(clone_url) 20 | p = Package( 21 | name, 22 | pdep, 23 | package_dependency_dir(cfg.work_dir, name), 24 | version=version, 25 | ) 26 | p.checkout() 27 | return p 28 | 29 | # pylint: disable=too-many-arguments 30 | def __init__( 31 | self, 32 | name: str, 33 | dependency: MultiDependency, 34 | target_dir: Path, 35 | version: Optional[str] = None, 36 | sub_path: str = "", 37 | ): 38 | self._name = name 39 | self._version = version 40 | self._sub_path = sub_path 41 | self._dependency = dependency 42 | self._dependency.register_package(name, target_dir) 43 | self._dir = target_dir 44 | self._gitrepo = None 45 | 46 | @property 47 | def url(self) -> str: 48 | return self._dependency.url 49 | 50 | @property 51 | def version(self) -> Optional[str]: 52 | return self._version 53 | 54 | @property 55 | def sub_path(self) -> str: 56 | return self._sub_path 57 | 58 | @property 59 | def repository_dir(self) -> Optional[Path]: 60 | return self._dependency.get_package(self._name) 61 | 62 | @property 63 | def repo(self) -> GitRepo: 64 | if not self._gitrepo: 65 | if self._dependency: 66 | dep_repo = self._dependency.bare_repo 67 | author_name = dep_repo.author.name 68 | author_email = dep_repo.author.email 69 | else: 70 | # Fall back to author detection if we don't have a dependency 71 | author_name = None 72 | author_email = None 73 | self._gitrepo = GitRepo( 74 | None, 75 | self._dir, 76 | author_name=author_name, 77 | author_email=author_email, 78 | ) 79 | return self._gitrepo 80 | 81 | @property 82 | def target_dir(self) -> Optional[Path]: 83 | worktree = self._dependency.get_package(self._name) 84 | if not worktree: 85 | return None 86 | 87 | return worktree / self._sub_path 88 | 89 | def checkout(self): 90 | self._dependency.checkout_package(self._name, self._version) 91 | 92 | def is_checked_out(self) -> bool: 93 | return self.target_dir is not None and self.target_dir.is_dir() 94 | 95 | def checkout_is_dirty(self) -> bool: 96 | dep_repo = self._dependency.bare_repo 97 | author_name = dep_repo.author.name 98 | author_email = dep_repo.author.email 99 | worktree = self._dependency.get_package(self._name) 100 | 101 | if worktree and worktree.is_dir(): 102 | r = GitRepo( 103 | None, worktree, author_name=author_name, author_email=author_email 104 | ) 105 | return r.repo.is_dirty() 106 | else: 107 | return False 108 | 109 | 110 | def package_dependency_dir(work_dir: Path, pname: str) -> Path: 111 | return work_dir / "dependencies" / f"pkg.{pname}" 112 | -------------------------------------------------------------------------------- /commodore/package/template.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | import click 6 | 7 | from commodore.config import Config 8 | from commodore.dependency_mgmt.discovery import ( 9 | RESERVED_PACKAGE_PATTERN, 10 | TENANT_PREFIX_PATTERN, 11 | ) 12 | from commodore.dependency_templater import Templater 13 | from commodore.package import package_dependency_dir 14 | 15 | 16 | class PackageTemplater(Templater): 17 | @classmethod 18 | def from_existing(cls, config: Config, path: Path): 19 | return cls._base_from_existing(config, path, "package") 20 | 21 | def _validate_slug(self, value: str): 22 | # First perform default slug checks 23 | slug = super()._validate_slug(value) 24 | # Then perform additional checks for package slug 25 | if RESERVED_PACKAGE_PATTERN.match(slug): 26 | raise click.ClickException(f"Package can't use reserved slug '{slug}'") 27 | if TENANT_PREFIX_PATTERN.match(slug): 28 | raise click.ClickException( 29 | "Package slug can't use reserved tenant prefix 't-'" 30 | ) 31 | return slug 32 | 33 | @property 34 | def deptype(self) -> str: 35 | return "package" 36 | 37 | def dependency_dir(self) -> Path: 38 | return package_dependency_dir(self.config.work_dir, self.slug) 39 | -------------------------------------------------------------------------------- /commodore/postprocess/builtin_filters.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | 5 | from pathlib import Path as P 6 | 7 | import _gojsonnet 8 | 9 | import click 10 | 11 | from commodore import __install_dir__ 12 | from commodore.config import Config 13 | from commodore.component import Component 14 | 15 | from .jsonnet import jsonnet_runner 16 | 17 | 18 | def _output_dir(work_dir: P, instance: str, path): 19 | """Compute directory in which to apply filter""" 20 | return work_dir / "compiled" / instance / path 21 | 22 | 23 | def _builtin_filter_helm_namespace( 24 | work_dir: P, inv, component: Component, instance: str, path, **kwargs 25 | ): 26 | if "namespace" not in kwargs: 27 | raise click.ClickException( 28 | "Builtin filter 'helm_namespace': filter argument 'namespace' is required" 29 | ) 30 | create_namespace = kwargs.get("create_namespace", "false") 31 | # Transform create_namespace to string as jsonnet extvars can only be 32 | # strings 33 | if isinstance(create_namespace, bool): 34 | create_namespace = "true" if create_namespace else "false" 35 | exclude_objects = kwargs.get("exclude_objects", []) 36 | exclude_objects = "|".join([json.dumps(e) for e in exclude_objects]) 37 | output_dir = _output_dir(work_dir, instance, path) 38 | 39 | # pylint: disable=c-extension-no-member 40 | jsonnet_runner( 41 | work_dir, 42 | inv, 43 | component.name, 44 | instance, 45 | path, 46 | _gojsonnet.evaluate_file, 47 | __install_dir__ / "filters" / "helm_namespace.jsonnet", 48 | namespace=kwargs["namespace"], 49 | create_namespace=create_namespace, 50 | exclude_objects=exclude_objects, 51 | chart_output_dir=str(output_dir), 52 | ) 53 | 54 | 55 | _builtin_filters = { 56 | "helm_namespace": _builtin_filter_helm_namespace, 57 | } 58 | 59 | 60 | class UnknownBuiltinFilter(ValueError): 61 | def __init__(self, filtername): 62 | super().__init__(f"Unknown builtin filter: {filtername}") 63 | self.filtername = filtername 64 | 65 | 66 | # pylint: disable=too-many-arguments 67 | def run_builtin_filter( 68 | config: Config, 69 | inv: dict, 70 | component: Component, 71 | instance: str, 72 | filterid: str, 73 | path: P, 74 | **filterargs: str, 75 | ): 76 | if filterid not in _builtin_filters: 77 | raise UnknownBuiltinFilter(filterid) 78 | _builtin_filters[filterid]( 79 | config.work_dir, inv, component, instance, path, **filterargs 80 | ) 81 | 82 | 83 | # pylint: disable=unused-argument 84 | def validate_builtin_filter(config: Config, c: Component, instance: str, fd: dict): 85 | if fd["filter"] not in _builtin_filters: 86 | raise UnknownBuiltinFilter(fd["filter"]) 87 | 88 | if "filterargs" not in fd: 89 | raise KeyError("Builtin filter is missing required key 'filterargs'") 90 | 91 | fpath = _output_dir(config.work_dir, instance, fd["path"]) 92 | if not fpath.exists(): 93 | raise ValueError("Builtin filter called on path which doesn't exist") 94 | -------------------------------------------------------------------------------- /commodore/postprocess/jsonnet.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import os 5 | import functools 6 | 7 | from collections.abc import Callable, Iterable 8 | from pathlib import Path as P 9 | from typing import Any 10 | 11 | import _gojsonnet 12 | 13 | from commodore.config import Config 14 | from commodore.component import Component 15 | from commodore.helpers import yaml_load, yaml_load_all, yaml_dump, yaml_dump_all 16 | from commodore import __install_dir__ 17 | 18 | 19 | def _try_path(basedir: P, rel: str): 20 | """ 21 | Returns content of file basedir/rel if it exists, None if file not found, or throws an exception 22 | """ 23 | if not rel: 24 | raise RuntimeError("Got invalid filename (empty string).") 25 | if rel[0] == "/": 26 | full_path = P(rel) 27 | else: 28 | full_path = basedir / rel 29 | if full_path.is_dir(): 30 | raise RuntimeError("Attempted to import a directory") 31 | 32 | if not full_path.is_file(): 33 | return str(full_path), None 34 | with open(full_path, encoding="utf-8") as f: 35 | return str(full_path), f.read().encode("utf-8") 36 | 37 | 38 | def _import_callback_with_searchpath(search: Iterable[P], basedir: P, rel: str): 39 | full_path, content = _try_path(basedir, rel) 40 | if content: 41 | return full_path, content 42 | for p in search: 43 | full_path, content = _try_path(p, rel) 44 | if content: 45 | return full_path, content 46 | raise RuntimeError("File not found") 47 | 48 | 49 | def _import_cb(work_dir: P, basedir: str, rel: str): 50 | # Add current working dir to search path for Jsonnet import callback 51 | search_path = [ 52 | work_dir.resolve(), 53 | __install_dir__.resolve(), 54 | (work_dir / "vendor").resolve(), 55 | ] 56 | return _import_callback_with_searchpath(search_path, P(basedir), rel) 57 | 58 | 59 | def _list_dir(basedir: os.PathLike, basename: bool): 60 | """ 61 | Non-recursively list files in directory `basedir`. If `basename` is set to 62 | True, only return the file name itself and not the full path. 63 | """ 64 | files = [x for x in P(basedir).iterdir() if x.is_file()] 65 | 66 | if basename: 67 | return [f.parts[-1] for f in files] 68 | 69 | return files 70 | 71 | 72 | _native_callbacks = { 73 | "yaml_load": (("file",), yaml_load), 74 | "yaml_load_all": (("file",), yaml_load_all), 75 | "list_dir": ( 76 | ( 77 | "dir", 78 | "basename", 79 | ), 80 | _list_dir, 81 | ), 82 | } 83 | 84 | 85 | def write_jsonnet_output(output_dir: P, output: str): 86 | out_objs = json.loads(output) 87 | for outobj, outcontents in out_objs.items(): 88 | outpath = output_dir / f"{outobj}.yaml" 89 | if not outpath.exists(): 90 | print(f" > {outpath} doesn't exist, creating...") 91 | os.makedirs(outpath.parent, exist_ok=True) 92 | if isinstance(outcontents, list): 93 | yaml_dump_all(outcontents, outpath) 94 | else: 95 | yaml_dump(outcontents, outpath) 96 | 97 | 98 | # pylint: disable=too-many-arguments 99 | def jsonnet_runner( 100 | work_dir: P, 101 | inv: dict[str, Any], 102 | component: str, 103 | instance: str, 104 | path: os.PathLike, 105 | jsonnet_func: Callable, 106 | jsonnet_input: os.PathLike, 107 | **kwargs: str, 108 | ): 109 | def _inventory() -> dict[str, Any]: 110 | return inv 111 | 112 | _native_cb = _native_callbacks 113 | _native_cb["commodore_inventory"] = ((), _inventory) 114 | kwargs["target"] = component 115 | kwargs["component"] = component 116 | output_dir = work_dir / "compiled" / instance / path 117 | kwargs["output_path"] = str(output_dir) 118 | output = jsonnet_func( 119 | str(jsonnet_input), 120 | import_callback=functools.partial(_import_cb, work_dir), 121 | native_callbacks=_native_cb, 122 | ext_vars=kwargs, 123 | ) 124 | write_jsonnet_output(output_dir, output) 125 | 126 | 127 | def _filter_file(component: Component, instance: str, filterpath: str) -> P: 128 | return component.alias_directory(instance) / filterpath 129 | 130 | 131 | def run_jsonnet_filter( 132 | config: Config, 133 | inv: dict, 134 | component: Component, 135 | instance: str, 136 | filterid: str, 137 | path: P, 138 | **filterargs: str, 139 | ): 140 | """ 141 | Run user-supplied jsonnet as postprocessing filter. This is the original 142 | way of doing postprocessing filters. 143 | """ 144 | filterfile = _filter_file(component, instance, filterid) 145 | # pylint: disable=c-extension-no-member 146 | jsonnet_runner( 147 | config.work_dir, 148 | inv, 149 | component.name, 150 | instance, 151 | path, 152 | _gojsonnet.evaluate_file, 153 | filterfile, 154 | **filterargs, 155 | ) 156 | 157 | 158 | # pylint: disable=unused-argument 159 | def validate_jsonnet_filter(config: Config, c: Component, instance: str, fd: dict): 160 | filterfile = _filter_file(c, instance, fd["filter"]) 161 | if not filterfile.is_file(): 162 | raise ValueError("Jsonnet filter definition does not exist") 163 | -------------------------------------------------------------------------------- /commodore/scripts/run-kustomize: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # The wrapper always calls `kustomize build`. To use the wrapper provide the 4 | # directory in which the output should be written as the first argument. We 5 | # need to pass the output directory as an argument, because otherwise Kapitan 6 | # won't substitute `${compiled_target_dir}` with the path of the compilation 7 | # target directory. To avoid having to reimplement kustomize argument parsing, 8 | # we require that the output directory is the first argument. 9 | # Further arguments are passed to kustomize as provided. The input directory 10 | # is expected to be provided in environment variable ${INPUT_DIR}. 11 | # 12 | # export INPUT_DIR=/path/to/kustomization 13 | # run-kustomize [kustomize args...] 14 | # 15 | # Wrapper around kustomize which provides some convenience features 16 | # 1) The wrapper searches for the kustomize binary in ${PATH} 17 | # 2) The wrapper ensures that the user provides the expected arguments 18 | # 3) The wrapper ensures that the provided output directory exists 19 | # 20 | set -e 21 | 22 | # Kapitan provides a fairly standard PATH variable, we add /opt/homebrew/bin for macOS 23 | export PATH="${PATH}:/opt/homebrew/bin" 24 | 25 | kustomize=$(which kustomize) || (>&2 echo "kustomize not found in ${PATH}"; exit 7) 26 | 27 | if [ -z "${INPUT_DIR}" ]; then 28 | (>&2 echo "INPUT_DIR environment variable not provided"; exit 2) 29 | fi 30 | 31 | # Assumption: output dir provided as first arg 32 | readonly output_dir="$1" 33 | if [ -z "${output_dir}" ]; then 34 | (>&2 echo "First argument is empty, expected output directory as first argument"; exit 2) 35 | fi 36 | mkdir -p "${output_dir}" 37 | 38 | exec "$kustomize" build "${INPUT_DIR}" -o "${@}" 39 | -------------------------------------------------------------------------------- /commodore/tokencache.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | import os 5 | 6 | from pathlib import Path 7 | from typing import Any 8 | 9 | import click 10 | from xdg.BaseDirectory import xdg_cache_home 11 | 12 | cache_name = Path(xdg_cache_home) / "commodore" / "token" 13 | 14 | 15 | def save(lieutenant: str, token: dict[str, Any]): 16 | try: 17 | with open(cache_name, "r", encoding="utf-8") as f: 18 | cache = json.load(f) 19 | except (IOError, FileNotFoundError): 20 | cache = {} 21 | except json.JSONDecodeError: 22 | click.secho(" > Dropping invalid JSON for token cache", fg="yellow") 23 | cache = {} 24 | 25 | cache[lieutenant] = token 26 | 27 | os.makedirs(os.path.dirname(cache_name), exist_ok=True) 28 | with open(cache_name, "w", encoding="utf-8") as f: 29 | f.write(json.dumps(cache, indent=1)) 30 | 31 | 32 | def get(lieutenant: str) -> dict[str, Any]: 33 | try: 34 | with open(cache_name, "r", encoding="utf-8") as f: 35 | cache = json.load(f) 36 | except (IOError, FileNotFoundError, json.JSONDecodeError): 37 | return {} 38 | data = cache.get(lieutenant, {}) 39 | if isinstance(data, str): 40 | data = {} 41 | return data 42 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.0" 2 | networks: 3 | default: 4 | external: 5 | name: host 6 | services: 7 | commodore: 8 | build: . 9 | image: docker.io/projectsyn/commodore:latest 10 | env_file: .env 11 | user: "${USER_ID:-0}" 12 | volumes: 13 | - ~/.ssh/:/app/.ssh/:ro 14 | - ~/.gitconfig:/app/.gitconfig:ro 15 | - ./:/app/data 16 | working_dir: /app/data 17 | -------------------------------------------------------------------------------- /docs/antora.yml: -------------------------------------------------------------------------------- 1 | name: commodore 2 | title: "Project Syn: Commodore" 3 | version: master 4 | start_page: ROOT:index.adoc 5 | nav: 6 | - modules/ROOT/nav.adoc 7 | -------------------------------------------------------------------------------- /docs/modules/ROOT/assets/images/projectsyn.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /docs/modules/ROOT/nav.adoc: -------------------------------------------------------------------------------- 1 | * xref:index.adoc[Home] 2 | 3 | .Tutorials 4 | include::partial$nav-tutorials.adoc[] 5 | 6 | .How-to guides 7 | include::partial$nav-howtos.adoc[] 8 | 9 | .Technical reference 10 | include::partial$nav-reference.adoc[] 11 | 12 | .Explanation 13 | include::partial$nav-explanation.adoc[] 14 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/.vale.ini: -------------------------------------------------------------------------------- 1 | StylesPath = /styles 2 | MinAlertLevel = warning # suggestion, warning or error 3 | 4 | # Only check Asciidoc files 5 | [*.adoc] 6 | 7 | # Using the Microsoft style 8 | BasedOnStyles = Microsoft 9 | Microsoft.GenderBias = warning 10 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/explanation/compilation-metadata.adoc: -------------------------------------------------------------------------------- 1 | = Compilation metadata reporting 2 | 3 | The reporting is implemented according to https://syn.tools/syn/SDDs/0031-component-version-tracking.html[SDD 0031 - Central Component Version tracking]. 4 | 5 | Commodore will only report metadata for catalog compilations that result in a new catalog commit which was successfully pushed to the catalog repository. 6 | 7 | Currently, Commodore reports the following metadata: 8 | 9 | * Component instance URLs, versions, subpaths, and Git commit hashes 10 | * Package URLs, versions, subpaths, and Git commit hashes 11 | * Global repo URL, version and Git commit hash 12 | * Tenant repo URL, version and Git commit hash 13 | * Commodore Python package version and Git version 14 | * The timestamp of the successful compilation 15 | 16 | 17 | Commodore uses the same data that's reported to Lieutenant to generate the catalog commit message. 18 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/explanation/dependencies.adoc: -------------------------------------------------------------------------------- 1 | = Manage dependencies 2 | 3 | Commodore components can declare dependencies to Jsonnet libraries. 4 | Those dependencies are resolved and downloaded using https://github.com/jsonnet-bundler/jsonnet-bundler[jsonnet-bundler]. 5 | 6 | Define dependencies in a `jsonnetfile.json` file. 7 | This file must be placed at the root of the component's file structure. 8 | 9 | .Example `jsonnetfile.json` 10 | [source,json] 11 | ---- 12 | { 13 | "version": 1, 14 | "dependencies": [ 15 | { 16 | "source": { 17 | "git": { 18 | "remote": "https://github.com/coreos/kube-prometheus", 19 | "subdir": "jsonnet/kube-prometheus" 20 | } 21 | }, 22 | "version": "release-0.3" 23 | } 24 | ], 25 | "legacyImports": true 26 | } 27 | ---- 28 | 29 | [CAUTION] 30 | ==== 31 | If two components depend on the same Jsonnet library, they must require the same version. 32 | See https://github.com/jsonnet-bundler/jsonnet-bundler#current-limitations[jsonnet-bundler - Current Limitations] 33 | ==== 34 | 35 | [IMPORTANT] 36 | ==== 37 | Jsonnet-bundler uses the file `jsonnetfile.lock.json` to freeze dependency versions for a project. 38 | Lock files of libraries aren't taken into consideration and within this setup, components are libraries. 39 | If your component requires a specific version, you have to declare this in the `version` field within `jsonnetfile.json`. 40 | ==== 41 | 42 | [TIP] 43 | ==== 44 | When compiling components with `commodore component compile`, jsonnet-bundler will create the file `jsonnetfile.lock.json` and the folder `vendor`. 45 | Add them to your `.gitignore`. 46 | ==== 47 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/explanation/local-mode.adoc: -------------------------------------------------------------------------------- 1 | = Local mode 2 | 3 | [NOTE] 4 | ==== 5 | This document assumes that you have your local environment set up to run Commodore as `commodore`. 6 | See xref:running-commodore.adoc[Running Commodore] for a guide to get your local environment set up. 7 | ==== 8 | 9 | Commodore provides a local mode for the `catalog compile` command. 10 | Local mode can be enabled with the `--local` flag. 11 | 12 | Local mode is intended for local development, and won't fetch information from Lieutenant or clone Git repositories for the inventory and components. 13 | However, by default Commodore will fetch Jsonnet dependencies in local mode (using jsonnet-bundler) and will configure Kapitan to fetch dependencies. 14 | This dependency fetching can be disabled with the `--no-fetch-dependencies` command line flag. 15 | 16 | [source,bash] 17 | -- 18 | commodore catalog compile --local [--no-fetch-dependencies] 19 | -- 20 | 21 | In local mode, the existing directory structure in the working directory is used. 22 | This allows local development on components and also allows testing local modifications to the inventory. 23 | 24 | [NOTE] 25 | ==== 26 | The user is responsible for preparing the working directory to hold a directory structure which Commodore understands. 27 | We recommend running Commodore in regular mode once to fetch all the inputs which are required to compile the catalog for the selected cluster. 28 | 29 | [source,bash] 30 | -- 31 | commodore catalog compile 32 | -- 33 | ==== 34 | 35 | [NOTE] 36 | ==== 37 | Local mode will perform component discovery and will create Kapitan targets for discovered components. 38 | However, Commodore will only create targets for components which are present in `dependencies/`. 39 | See the xref:how-to/local-mode-component.adoc[how-to on adding a component to a cluster in local mode] for detailed steps to add new or existing components to a cluster in local mode. 40 | ==== 41 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/explanation/migrate-kapitan-0.29-0.30.adoc: -------------------------------------------------------------------------------- 1 | = Migrating from Kapitan 0.29 to 0.30 2 | 3 | == Changes 4 | 5 | The major changes in Kapitan 0.30 which are relevant for Commodore are: 6 | 7 | * Kapitan 0.30 calls out to the system `helm` binary for fetching and rendering Helm charts, see https://github.com/kapicorp/kapitan/pull/701[Kapitan PR#701]. 8 | * The reclass version regression present in Kapitan 0.29.5 has been fixed, see https://github.com/kapicorp/kapitan/pull/703[Kapitan PR#703]. 9 | 10 | == Helm templating 11 | 12 | With the change to using the system `helm` binary, Kapitan now supports both Helm 2 and Helm 3. 13 | 14 | === Selecting the Helm version 15 | 16 | Users can specify which `helm` binary should be used by default by setting environment variable `KAPITAN_HELM_PATH`. 17 | Additionally, users can require a specific binary for individual Helm chart compilations by setting parameter `helm_path`. 18 | If neither the environment variable nor parameter `helm_path` are specified, Kapitan uses the `helm` command in `$PATH`. 19 | 20 | [NOTE] 21 | ==== 22 | Despite the naming of the variable and parameter, you don't have to specify the full path to the Helm binary. 23 | Kapitan will search `$PATH` for a binary with the given name, if you just provide a name like `helm3`. 24 | ==== 25 | 26 | The Commodore Docker image provides Helm 3 as binary `/usr/local/bin/helm3` and `/usr/local/bin/helm`, and Helm 2 as binary `/usr/local/bin/helm2`. 27 | This allows users of the Docker image to specify the Helm version to use for individual compilations as `helm_path: helm3` and `helm_path: helm2`. 28 | Users who want to run Commodore locally should ensure that their `$PATH` contains `helm3` and `helm` pointing to Helm 3, and `helm2` pointing to Helm 2. 29 | 30 | .Example helm compilation with `helm_path` set 31 | [source,yaml] 32 | ---- 33 | parameters: 34 | kapitan: 35 | compile: 36 | - input_paths: 37 | - path/to/helm/chart 38 | input_type: helm 39 | helm_values: { 40 | # arbitrary Helm values 41 | } 42 | helm_path: helm3 <1> 43 | helm_params: { 44 | # parameters for helm binary 45 | } 46 | output_path: path/to/output/ 47 | ---- 48 | <1> Set parameter `helm_path` to `helm2` or `helm3` 49 | 50 | === Changes to fields in `helm_params` 51 | 52 | [IMPORTANT] 53 | ==== 54 | Previously `helm_params.release_name` was used to specify the name of the Helm release. 55 | Currently this is in use by most Commodore components which compile Helm charts. 56 | 57 | However, Helm 3 has a command line flag `--release-name` which has a different effect. 58 | Therefore the old use of `release_name` has been deprecated, and components should switch to using `helm_params.name` to specify the name of the Helm release. 59 | ==== 60 | 61 | In Kapitan 0.30, the parameter `helm_params` can be used to specify most command line arguments which Helm understands. 62 | Kapitan includes flags `--include-crds=true` and `--skip-tests=true` by default. 63 | 64 | The following flags aren't supported by Kapitan: 65 | 66 | * `--dry-run` 67 | * `--generate-name` 68 | * `--help` 69 | * `--output-dir` 70 | * `--show-only` 71 | 72 | See the https://kapitan.dev/compile/#helm[Kapitan Helm input type] documentation for more details. 73 | 74 | == Catalog output 75 | 76 | When first compiling a cluster catalog with Kapitan 0.30, the contents of some Helm chart output files appear in a different order than for Kapitan 0.29. 77 | Additionally, there's a lot of noise in the resulting catalog diff because the value of the `app.kubernetes.io/managed-by` and `heritage` labels for resources generated by Helm changes from `Tiller` to `Helm` for charts compiled with Helm 3. 78 | 79 | To make it easier to identify real changes in the catalog diff, we've added an optional command line flag `-m`, `--migration` to the `catalog compile` command. 80 | The flag takes a value specifying which migration Commodore should consider when rendering the catalog diff. 81 | Currently Commodore only knows a single migration: `kapitan-0.29-to-0.30`. 82 | To suppress diffs which only contain reordered objects or the aforementioned label changes, you can compile the catalog with 83 | 84 | [source,bash] 85 | ---- 86 | commodore catalog compile ${CLUSTER_ID} -m kapitan-0.29-to-0.30 87 | ---- 88 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/how-to/local-mode-component.adoc: -------------------------------------------------------------------------------- 1 | = Add a component to a cluster in local mode 2 | 3 | [abstract] 4 | This how-to describes the steps required to add a component (new or existing) to a cluster in local mode. 5 | 6 | == Add an existing component to a cluster in local mode 7 | 8 | To add an existing component to a cluster in local mode for testing purposes, the following steps can be used. 9 | This example uses the https://github.com/projectsyn/component-nfs-subdir-external-provisioner/[nfs-subdir-external-provisioner] component. 10 | 11 | . Create a working directory for the cluster 12 | + 13 | TIP: You can skip this step if you've already got a working directory in which you can run `commodore catalog compile --local` for the cluster you're targeting. 14 | + 15 | [source,bash] 16 | ---- 17 | mkdir /path/to/working/directory 18 | 19 | commodore catalog compile 20 | ---- 21 | 22 | . Clone the component, if it's not present in `dependencies/` 23 | + 24 | [source,bash] 25 | -- 26 | COMPONENT_NAME=nfs-subdir-external-provisioner 27 | test -d dependencies/${COMPONENT_NAME} || \ 28 | git clone https://github.com/projectsyn/component-${COMPONENT_NAME}.git \ 29 | dependencies/${COMPONENT_NAME} 30 | -- 31 | 32 | . Add the component to the cluster, by adding it in the `applications` array in the cluster config: 33 | + 34 | [source,yaml] 35 | -- 36 | applications: 37 | - nfs-subdir-external-provisioner 38 | -- 39 | 40 | . Run Commodore in local mode with dependency fetching enabled 41 | + 42 | [source,bash] 43 | -- 44 | commodore catalog compile --local 45 | -- 46 | 47 | . Now you can configure the component and test changes with dependency fetching disabled 48 | + 49 | [source,bash] 50 | -- 51 | commodore catalog compile --local --no-fetch-dependencies 52 | -- 53 | 54 | == Testing a new component in local mode 55 | 56 | Commodore's `component new` command won't insert the component into the current cluster configuration. 57 | 58 | To test a new component in local mode, you need to explicitly add the component to the cluster by following the steps in <<_add_an_existing_component_to_a_cluster_in_local_mode,the previous section>>. 59 | You can omit the second step of the previous section, if you've created the component in the target cluster's working directory with `component new`, as that command will initialize the component repo in `dependencies/` by default. 60 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/how-to/shell-completion.adoc: -------------------------------------------------------------------------------- 1 | = Shell autocompletion 2 | 3 | Commodore supports command and cluster autocompletion. 4 | Command autocompletion shows possible completion for partially typed Commodore subcommands and command-line flags. 5 | Command autocompletion is provided by the Click Python library. 6 | Cluster autocompletion is enabled for `commodore catalog compile`. 7 | This relies on a custom completion implementation in Commodore which fetches the list of known clusters from the provided Lieutenant API to show possible completions. 8 | 9 | This only works when Commodore is installed locally due to limitations on how shell completion is implemented in the Click Python library. 10 | See xref:explanation/running-commodore.adoc#_pypi[Running Commodore] for details on how to install Commodore locally. 11 | 12 | Click supports autocompletion for the `bash`, `zsh` and `fish` shells. 13 | 14 | To enable autocompletion for your local Commodore installation, follow the steps below. 15 | 16 | . Add the following snippet in your shell's configuration 17 | + 18 | .*bash* (`~/.bashrc`) 19 | [%collapsible] 20 | ==== 21 | [source,bash] 22 | ---- 23 | source <(_COMMODORE_COMPLETE=bash_source commodore) 24 | ---- 25 | ==== 26 | + 27 | .*zsh* (`~/.zshrc`) 28 | [%collapsible] 29 | ==== 30 | [source,zsh] 31 | ---- 32 | source <(_COMMODORE_COMPLETE=zsh_source commodore) 33 | ---- 34 | ==== 35 | + 36 | .*fish* (`~/.config/fish/completions/commodore.fish`) 37 | [%collapsible] 38 | ==== 39 | [source,fish] 40 | ---- 41 | source <(_COMMODORE_COMPLETE=fish_source commodore) 42 | ---- 43 | ==== 44 | 45 | . Reload your shell's configuration or start a new shell in order for the changes to be loaded 46 | 47 | See https://click.palletsprojects.com/en/8.1.x/shell-completion/#enabling-completion[the Click documentation] for more details on how-to setup autocompletion. 48 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/index.adoc: -------------------------------------------------------------------------------- 1 | = Project Syn: Commodore 2 | 3 | image::projectsyn.svg[] 4 | 5 | Welcome to the documentation of Commodore - a Project Syn tool. 6 | 7 | == Overview 8 | 9 | Commodore is a command line tool which provides opinionated tenant-aware 10 | management of https://kapitan.dev/[Kapitan] inventories and templates. 11 | 12 | Commodore uses Kapitan for the heavy lifting of rendering templates and 13 | resolving a hierarchical configuration structure. 14 | 15 | Commodore introduces the concept of a component, which is a bundle of Kapitan 16 | templates and associated Kapitan classes which describe how to render the 17 | templates. Commodore sets up an https://kapitan.dev/inventory/[inventory] by 18 | downloading all required components and symlinking the relevant classes into 19 | the inventory structure which Kapitan understands. 20 | 21 | Components can define dependencies to Jsonnet libraries using https://github.com/jsonnet-bundler/jsonnet-bundler[jsonnet-bundler]. 22 | See xref:commodore:ROOT:dependencies.adoc[Manage Dependencies]. 23 | 24 | Additionally, Commodore allows post-processing the output of Kapitan. This 25 | functionality was first introduced to allow adding namespace information to 26 | the output of `helm template`, as this information isn't applied by Helm when 27 | only templating a chart. 28 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/reference/component-deprecation.adoc: -------------------------------------------------------------------------------- 1 | = Component deprecation 2 | 3 | Commodore supports components being marked as deprecated. 4 | Components can be marked as "deprecated" by adding `deprecated: true` to parameter `parameters.._metadata`. 5 | To avoid allowing the inventory hierarchy to overwrite a component's `_metadata` parameter, it must be labeled as https://github.com/kapicorp/reclass/blob/develop/README-extensions.rst#constant-parameters[constant] by prefixing it with a `=`. 6 | The component template adds the `_metadata` parameter (with no content) for new components. 7 | 8 | .class/defaults.yml 9 | [source,yaml] 10 | ---- 11 | parameters: 12 | component_name: 13 | =_metadata: 14 | deprecated: true 15 | ---- 16 | 17 | If the component is deprecated in favor of a new component, the new component can be indicated by adding `replaced_by: another-component` in the component's `_metadata` parameter. 18 | In general, the value of `replaced_by` isn't verified to be an existing component. 19 | The component named in `replaced_by` is allowed to take over the deprecated component's library prefix in order to provide a transitional library to make switching components easy. 20 | 21 | .class/defaults.yml 22 | [source,yaml] 23 | ---- 24 | parameters: 25 | component_name: 26 | =_metadata: 27 | deprecated: true 28 | replaced_by: another-component 29 | ---- 30 | 31 | Commodore will append the contents of field `deprecation_notice` in the component's `_metadata` parameter to the deprecation notice. 32 | This field is intended to be used to give extended information about the deprecation. 33 | This could be a link to a migration guide, if a replacement component exists, or simply a link to a longer deprecation notice in the component's documentation. 34 | 35 | .class/defaults.yml 36 | [source,yaml] 37 | ---- 38 | parameters: 39 | component_name: 40 | =_metadata: 41 | deprecated: true 42 | replaced_by: another-component 43 | deprecation_notice: >- 44 | See https://github.com/projectsyn/component-another-component/docs/.../how-tos/migrating-from-component-name.adoc 45 | for a migration guide. 46 | ---- 47 | 48 | Commodore will print a deprecation notice for each component which has `parameters.._metadata.deprecated` set to `true`. 49 | 50 | * If field `replaced_by` in the component's `_metadata` parameter isn't empty, the deprecation notice will use the field's value as the name of the replacement component. 51 | * If field `deprecation_notice` in the component's `_metadata` parameter isn't empty, the value of the field will be appended to the deprecation notice. 52 | 53 | Components can indicate that they replace another component by setting `_metadata.replaces`. 54 | By providing this information, a component is allowed to use its predecessor's library prefix in the following two cases. 55 | First, the component is allowed to use the additional prefix if its predecessor isn't deployed on the cluster which is being compiled. 56 | Second, the component is allowed to use the additional prefix if its predecessor has set `_metadata.deprecated: true` and `_metadata.replaced_by` to the component's name. 57 | 58 | .class/defaults.yml 59 | [source,yaml] 60 | ---- 61 | parameters: 62 | another-component: 63 | =_metadata: 64 | replaces: component-name 65 | ---- 66 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/reference/deprecation-policy.adoc: -------------------------------------------------------------------------------- 1 | = Deprecation policy 2 | 3 | NOTE: This document isn't finalized yet. 4 | 5 | Starting with Commodore v0.5.0, deprecation and removal of Commodore features will adhere to the deprecation policy for public APIs in https://semver.org/[semantic versioning]: 6 | 7 | * Deprecating a feature must happen in a new minor version (https://semver.org/#spec-item-7[SemVer specification §7]). 8 | * Deprecated features will continue to work in all patch releases of the minor version which introduces the deprecation (https://semver.org/#spec-item-6[SemVer specification §6]). 9 | * Deprecated features will be removed in the next major version released after the minor version which introduces the deprecation (https://semver.org/#spec-item-8[SemVer specification §8]). 10 | 11 | In addition, Commodore will print warnings ("deprecation notices") to the standard output for all deprecated features which are used by a cluster configuration. 12 | To make the deprecation notices easy to find, they're all collected and are printed after the catalog has been compiled. 13 | 14 | We list all deprecation notices organized by Commodore version in xref:commodore:ROOT:reference/deprecation-notices.adoc[the documentation]. 15 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/reference/hierarchy.adoc: -------------------------------------------------------------------------------- 1 | = Configure the inventory hierarchy 2 | 3 | [abstract] 4 | The https://kapitan.dev/inventory/[Kapitan inventory] used by Commodore builds on top of https://reclass.pantsfullofunix.net/[reclass]. 5 | https://github.com/kapicorp/reclass[The Reclass fork] used by Kapitan allows to https://github.com/kapicorp/reclass/blob/develop/README-extensions.rst#use-references-in-class-names[use references in class names]. 6 | This allows to dynamically build up the configuration hierarchy. 7 | 8 | == Directory structure 9 | 10 | Commodore builds up the Kapitan inventory with the following directory structure: 11 | 12 | ---- 13 | inventory/ 14 | ├─ classes/ 15 | | ├─ components/ 16 | | | └─ … <1> 17 | | ├─ defaults/ 18 | | | └─ … <2> 19 | | ├─ global/ <3> 20 | | | ├─ commodore.yml <4> 21 | | | └─ … 22 | | ├─ params/ 23 | | | └─ cluster.yml <5> 24 | | └─ <6> 25 | | └─ … 26 | └─ targets 27 | ├─ cluster.yml <7> 28 | └─ … <8> 29 | ---- 30 | <1> Symlinks pointing to the class files of the components. 31 | <2> Symlinks pointing to the default values of the components. 32 | <3> Clone of the git repository holding your global configuration hierarchy. 33 | <4> Included by the clusters target and thus the starting point for your inventory hierarchy. 34 | <5> Holds parameters set by commodore along with cluster facts retrieved from Lieutenant. 35 | Included by the clusters target. 36 | This file is generated by Commodore. 37 | <6> Clone of the git repository holding the configuration hierarchy of the clusters tenant. 38 | <7> A Kapitan target for the cluster. 39 | Used by Commodore for bootstrapping the component targets. 40 | <8> A Kapitan target for each component. 41 | This file is generated by commodore. 42 | 43 | == Available parameters 44 | 45 | To build up your configuration hierarchy, include classes in `commodore.yml` within your global configuration inventory. 46 | The following parameters can be used to make your hierarchy dynamic based on cluster facts. 47 | 48 | [source,yaml] 49 | ---- 50 | parameters: 51 | cluster: 52 | catalog_url: "ssh://git@…" <1> 53 | name: "c-…" <2> 54 | tenant: "t-…" <3> 55 | facts: { … } <4> 56 | ---- 57 | <1> The git URL of the clusters manifest catalog. 58 | <2> The clusters Lieutenant id. 59 | <3> The Lieutenant id of the clusters tenant. 60 | <4> Dictionary of all the clusters facts set at Lieutenant. 61 | 62 | [IMPORTANT] 63 | ==== 64 | When using to include classes, parameters can not be defined in the same file they're referenced . 65 | They must be defined within files already processed and therefore are higher up in the hierarchy. 66 | ==== 67 | 68 | == Defining the hierarchy 69 | 70 | Based on the above, you are free to define your hierarchy according to your needs. 71 | The following are examples. 72 | They showcase a sensible foundation to start. 73 | It also showcases what's possible. 74 | Also take note of the https://github.com/projectsyn/commodore-defaults[commodore-defaults], a global configuration repository used in tutorials. 75 | 76 | From commodore we get the following base hierarchy: 77 | 78 | * Component default parameter values 79 | * Cluster parameter values 80 | * `commodore.yml` within the global configuration repository 81 | 82 | Within `commodore.yml`, we then can include further classes and build up the hierarchy. 83 | The hierarchy could then be extended by configurations related to a: 84 | 85 | * Kubernetes distribution 86 | * Cloud provider 87 | * Cloud provider region 88 | * Tenant 89 | * Cluster 90 | 91 | The distribution and cloud provider are the levels to most likely include Commodore components. 92 | 93 | [#reclass_precedence] 94 | .Parameter precedence in Reclass 95 | [CAUTION] 96 | ==== 97 | Reclass follows a node centric model where included classes are considered to be parents. 98 | Parameters within a class will override the one from included classes. 99 | See https://reclass.pantsfullofunix.net/concepts.html[reclass concepts]. 100 | 101 | This is somewhat counter intuitive to how Reclass is used in this context. 102 | In order to prevent confusion, `classes` and `parameters` shouldn't be defined within the same file. 103 | It's suggested to have parameters defined in dedicated `params.yml` files. 104 | ==== 105 | 106 | === Examples 107 | 108 | .commodore.yml 109 | [source,yaml] 110 | ---- 111 | classes: 112 | - global.distribution.${facts:distribution} 113 | - global.cloud.${facts:cloud} 114 | - global.cloud.${facts:cloud}.${facts:region} 115 | - ${cluster:tenant}.${cluster:name} 116 | ---- 117 | 118 | Reclass requires parameters to be defined when used in references. 119 | When building a hierarchy that has optional parts, one must be creative. 120 | You might work with clouds that don't know the concept of a region. 121 | So instead of including the region in `commodore.yml`, include it only for those clouds that have regions. 122 | 123 | .commodore.yml 124 | [source,yaml] 125 | ---- 126 | classes: 127 | - global.distribution.${facts:distribution} 128 | - global.cloud.${facts:cloud} 129 | - ${cluster:tenant}.${cluster:name} 130 | ---- 131 | 132 | .cloud/cloud_without_regions.yml 133 | [source,yml] 134 | ---- 135 | classes: [] 136 | ---- 137 | 138 | .cloud/cloud_with_regions.yml 139 | [source,yml] 140 | ---- 141 | classes: 142 | - global.cloud.cloud_with_regions.params <1> 143 | - global.cloud.cloud_with_regions.${facts:region} 144 | ---- 145 | <1> Parameters for `cloud_with_regions` are define in a dedicated parameters file. 146 | See <> for why this is. 147 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/reference/kube-libjsonnet.adoc: -------------------------------------------------------------------------------- 1 | = `kube.lib(j)sonnet` 2 | 3 | Commodore ensures that the https://github.com/bitnami-labs/kube-libsonnet/blob/master/kube.libsonnet[`bitnami-labs/kube-libsonnet/kube.libsonnet`] file is available as `lib/kube.libjsonnet`. 4 | 5 | As the upstream project isn't actively maintained, the Commodore maintainers update the file to keep it compatible with the latest Kubernetes versions. 6 | The file was originally imported from release https://github.com/bitnami-labs/kube-libsonnet/releases/tag/v1.19.0[`v1.19.0`]. 7 | 8 | It's planned to switch to https://github.com/jsonnet-libs/k8s-libsonnet[`jsonnet-libs/k8s-libsonnet`] in the future. 9 | A migration guide will be provided. 10 | 11 | [IMPORTANT] 12 | It's not possible to override the provided `kube.libsonnet` using a component provided Jsonnet Bundler file. 13 | 14 | == Changes to the original `kube.libsonnet` 15 | 16 | === CronJob 17 | 18 | `apiVersion` is updated to `batch/v1` from `batch/v1beta1`. 19 | 20 | === PodDisruptionBudget 21 | 22 | `apiVersion` is updated to `policy/v1` from `policy/v1beta1`. 23 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/reference/parameters.adoc: -------------------------------------------------------------------------------- 1 | = Commodore-managed inventory parameters 2 | 3 | This page provides a reference for all inventory parameters which are injected into the hierarchy by Commodore. 4 | The parameters are injected using the class `params.cluster`. 5 | This class is created by Commodore in file `inventory/classes/params/cluster.yml`. 6 | 7 | The class is included in each Kapitan target with the lowest precedence of all classes. 8 | 9 | == Global parameters 10 | 11 | === `cluster` 12 | 13 | The key `cluster` holds the following information about the cluster and its tenant: 14 | 15 | `name`:: 16 | The cluster's ID (the name of the cluster object managed by Lieutenant). 17 | `display_name`:: 18 | The cluster's display name. 19 | `tenant`:: 20 | The ID of the cluster's tenant. 21 | `tenant_display_name`:: 22 | The display name of the cluster's tenant. 23 | `catalog_url`:: 24 | The cluster catalog Git repository URL. 25 | 26 | === `facts` 27 | 28 | The cluster's static facts, as stored in the cluster's Lieutenant object. 29 | 30 | The following facts are mandatory: 31 | 32 | `cloud`:: The cloud provider on which the cluster is installed. 33 | `region`:: 34 | The cloud region on which the cluster is installed. 35 | Mandatory only for clouds which have multiple regions. 36 | `distribution`:: 37 | The Kubernetes distribution of the cluster. 38 | 39 | === `dynamic_facts` 40 | 41 | The cluster's dynamic facts as reported by Steward on the cluster. 42 | 43 | There are currently no mandatory dynamic facts. 44 | 45 | [NOTE] 46 | ==== 47 | Components shouldn't directly consume dynamic facts. 48 | Instead, components should expose a parameter which can be configured with a dynamic fact if information from dynamic facts should be used in a component. 49 | This minimizes the coupling between components and the presence of dynamic facts, and allows components to degrade gracefully when a dynamic fact is missing for a Project Syn installation. 50 | 51 | This could look something like 52 | 53 | .Component `defaults.yml` 54 | [source,yaml] 55 | ---- 56 | parameters: 57 | component_name: 58 | kubernetes_version: '1.20' <1> 59 | ---- 60 | <1> The component defaults to K8s version 1.20 when the parameter isn't overwritten in the configuration hierarchy 61 | 62 | .Project Syn global configuration repository 63 | [source,yaml] 64 | ---- 65 | parameters: 66 | component_name: 67 | kubernetes_version: '${dynamic_facts:kubernetesVersion:major}.${dynamic_facts:kubernetesVersion:minor}' <1> 68 | ---- 69 | <1> The parameter is overwritten using dynamic facts in the Project Syn installation's global configuration repository. 70 | ==== 71 | 72 | == Component-specific parameters 73 | 74 | Commodore adds some "meta-parameters" to each component's Kapitan target. 75 | These are provided to simplify component configurations. 76 | 77 | Commodore provides the following component-specific top-level parameters 78 | 79 | === `_base_directory` 80 | 81 | This parameter provides the absolute path to the component's base directory. 82 | This parameter is intended for component authors to use in `kapitan.compile` and `kapitan.dependencies` entries when referencing files in the component directory. 83 | 84 | === `_kustomize_wrapper` 85 | 86 | This parameter provides the absolute path to the Kustomize wrapper script bundled with Commodore. 87 | This parameter is intended for component authors to use to call Kustomize in components. 88 | See the xref:syn:ROOT:explanations/commodore-components/kustomizations.adoc[Kustomization best practices] for more details. 89 | -------------------------------------------------------------------------------- /docs/modules/ROOT/pages/tutorial/package.adoc: -------------------------------------------------------------------------------- 1 | = Tutorial: Writing and using Commodore Packages 2 | 3 | This tutorial will guide you through the steps to create and use a Commodore Package. 4 | Commodore Packages allow you to distribute configuration for one or more components. 5 | 6 | We will write a simple package deploying https://github.com/projectsyn/component-cert-manager[Component cert-manager] and bundling some custom defaults. 7 | We then look at adding this package to a cluster. 8 | 9 | We assume that you understand the basics of Project Syn and how to use it, have a working Commodore installation, and that you have access to a Lieutenant instance and a Syn managed cluster. 10 | If this isn't the case, first work through the https://syn.tools/syn/tutorials/getting-started.html[Getting Started guide] and the tutorial on xref:tuto:ROOT:index.adoc[Writing a Component]. 11 | 12 | 13 | == Creating a Commodore Package 14 | 15 | First let's bootstrap a package using Commodore. 16 | Switch to an exiting Commodore working directory or a new empty directory and run: 17 | 18 | [source,bash] 19 | ---- 20 | commodore package new cert-manager --owner ${GITHUB_USERNAME} 21 | ---- 22 | 23 | The new package is now located at `dependencies/pkg.cert-manager`. 24 | 25 | [source,bash] 26 | ---- 27 | cd dependencies/pkg.cert-manager 28 | tree 29 | ---- 30 | 31 | You'll see a new, mostly empty repository with some boilerplate. 32 | Let's add some functionality by adding a new class `common.yml` with the following content. 33 | 34 | .`common.yml` 35 | [source,yaml] 36 | ---- 37 | applications: 38 | - cert-manager 39 | 40 | parameters: 41 | components: 42 | cert-manager: 43 | url: https://github.com/projectsyn/component-cert-manager.git 44 | version: v3.0.2 45 | 46 | cert_manager: 47 | letsencrypt_email: cert@example.com 48 | ---- 49 | 50 | This class enables the component cert-manager and configures a custom default for the letsencrypt email. 51 | A class can contain pretty much arbitrary configuration that will be added as-is to a cluster configuration that imports the class. 52 | 53 | Next you can push the catalog to GitHub. 54 | By default the origin will be `github.com/${GITHUB_USERNAME}/package-cert-manager`. 55 | Make sure to create a new project on GitHub with that name and push to it. 56 | 57 | [source,bash] 58 | ---- 59 | git add . 60 | git commit -m "Add component cert-manager" 61 | git push --set-upstream origin master 62 | ---- 63 | 64 | == Using a Commodore Package 65 | With the package published on GitHub we can deploy it on our cluster. 66 | 67 | Switch to the tenant repository of your Syn managed cluster and add the following to the cluster configuration: 68 | 69 | [source,yaml] 70 | ---- 71 | classes: 72 | - cert-manager.common <3> 73 | applications: 74 | - pkg.cert-manager <2> 75 | parameters: 76 | packages: 77 | cert-manager: <1> 78 | url: https://github.com/${GITHUB_USERNAME}/package-cert-manager.git 79 | version: master 80 | ---- 81 | <1> Package dependencies are specified similarly to Component dependencies. 82 | <2> We need to enable the package as part of the applications. 83 | This gives us access to the provided classes. 84 | <3> We can now import any of the classes in the package as if they're locally available or in the global defaults. 85 | 86 | This will add the component cert-manager to the cluster and configure the lestsencrypt mail to `cert@example.com`. 87 | 88 | [NOTE] 89 | ==== 90 | During catalog compilation, after the message `Discovering config packages...`, you'll see several warnings like `[WARNING] Reclass class not found: 'cert-manager.common'. Skipped!`. 91 | This is expected, as Commodore has to render the inventory in order to be able to read the package's Git repository URL and version, which are stored in the inventory. 92 | ==== 93 | 94 | == Conclusion 95 | 96 | This package is very much a toy example, but I hope it has shown you how we can use Commodore Packages to bundle common configuration for one or more Components and simplify configuration reuse. 97 | 98 | As a next step to writing your own package, consult our https://syn.tools/syn/explanations/commodore-packages.html[best practices] or look at existing packages such as https://github.com/projectsyn/package-monitoring[package-monitoring], the package for our vendor independent monitoring stack. 99 | -------------------------------------------------------------------------------- /docs/modules/ROOT/partials/nav-explanation.adoc: -------------------------------------------------------------------------------- 1 | * xref:commodore:ROOT:explanation/local-mode.adoc[Local Mode] 2 | * xref:commodore:ROOT:explanation/dependencies.adoc[Manage Dependencies] 3 | * xref:commodore:ROOT:explanation/running-commodore.adoc[Running Commodore] 4 | * xref:commodore:ROOT:explanation/migrate-kapitan-0.29-0.30.adoc[Migrating from Kapitan 0.29 to 0.30] 5 | * xref:commodore:ROOT:explanation/compilation-metadata.adoc[] 6 | -------------------------------------------------------------------------------- /docs/modules/ROOT/partials/nav-howtos.adoc: -------------------------------------------------------------------------------- 1 | * xref:commodore:ROOT:how-to/local-mode-component.adoc[] 2 | * xref:commodore:ROOT:how-to/shell-completion.adoc[] 3 | -------------------------------------------------------------------------------- /docs/modules/ROOT/partials/nav-reference.adoc: -------------------------------------------------------------------------------- 1 | * xref:commodore:ROOT:reference/concepts.adoc[Concepts] 2 | * xref:commodore:ROOT:reference/architecture.adoc[Architecture] 3 | * User interface 4 | ** xref:commodore:ROOT:reference/commands.adoc[Commands] 5 | ** xref:commodore:ROOT:reference/cli.adoc[Command Line Options] 6 | * Configuration hierarchy 7 | ** xref:commodore:ROOT:reference/hierarchy.adoc[Inventory Hierarchy] 8 | ** xref:commodore:ROOT:reference/parameters.adoc[Commodore-managed inventory parameters] 9 | * Feature deprecation 10 | ** xref:commodore:ROOT:reference/deprecation-policy.adoc[Deprecation policy] 11 | ** xref:commodore:ROOT:reference/deprecation-notices.adoc[Deprecation notices] 12 | ** xref:commodore:ROOT:reference/component-deprecation.adoc[Component deprecation] 13 | * xref:commodore:ROOT:reference/commodore-libjsonnet.adoc[`commodore.libjsonnet` API reference] 14 | * xref:commodore:ROOT:reference/kube-libjsonnet.adoc[`kube.libjsonnet`] 15 | -------------------------------------------------------------------------------- /docs/modules/ROOT/partials/nav-tutorials.adoc: -------------------------------------------------------------------------------- 1 | * xref:tuto:ROOT:index.adoc[Writing a Component] 2 | * xref:commodore:ROOT:tutorial/package.adoc[Writing a Package] 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "syn-commodore" 3 | version = "v0.0.0" 4 | description = "Commodore provides opinionated tenant-aware management of Kapitan inventories and templates. Commodore uses Kapitan for the heavy lifting of rendering templates and resolving a hierachical configuration structure." 5 | readme = "README.md" 6 | authors = ["VSHN AG "] 7 | license = "BSD-3-Clause" 8 | homepage = "https://github.com/projectsyn/commodore" 9 | documentation = "https://syn.tools/commodore/index.html" 10 | packages = [ 11 | {include = "commodore"} 12 | ] 13 | include = [ 14 | "commodore/lib/commodore.libjsonnet", 15 | "commodore/lib/kube.libsonnet", 16 | "commodore/lib/kube.libjsonnet", 17 | "commodore/lib/kube-libsonnet/kube.libsonnet", 18 | "commodore/filters/helm_namespace.jsonnet", 19 | "commodore/scripts/run-kustomize", 20 | ] 21 | 22 | [tool.poetry.dependencies] 23 | python = ">=3.10, <3.13" 24 | # NOTE: We restrict boto3/botocore versions to reduce complexity of Poetry's 25 | # dependency resolution significantly, cf. 26 | # https://github.com/orgs/python-poetry/discussions/8165#discussioncomment-6387378 27 | # We put those dependencies first to constrain the Poetry dependency solver 28 | # which processes dependencies in the order they're listed. Without these 29 | # restrictions, the broad required boto3 dependency in Kapitan makes 30 | # dependency resolution very slow. 31 | boto3 = "^1.26.145" 32 | botocore="^1.29.145" 33 | kapitan = "0.34.6" 34 | click = "8.2.1" 35 | gitpython = "3.1.44" 36 | requests = "2.32.3" 37 | url-normalize = "2.2.1" 38 | python-dotenv = "1.1.0" 39 | pyxdg = "0.28" 40 | cruft = "2.16.0" 41 | oauthlib = "3.2.2" 42 | pyjwt = "2.10.1" 43 | PyGithub = "2.6.1" 44 | reclass-rs = "0.8.0" 45 | gojsonnet = "0.21.0" 46 | 47 | [tool.poetry.dev-dependencies] 48 | tox = "3.28.0" 49 | pytest = "8.3.5" 50 | pytest-xdist = "3.6.1" 51 | pytest-benchmark = "5.1.0" 52 | responses = "0.25.7" 53 | black = "25.1.0" 54 | pyfakefs = "5.8.0" 55 | pytest-cov = "6.1.1" 56 | pylint = "3.3.7" 57 | types-toml = "0.10.8.20240310" 58 | examples = "1.0.2" 59 | pytest-mock = "^3.8.2" 60 | 61 | [tool.poetry.scripts] 62 | commodore = 'commodore.cli:main' 63 | kapitan = 'kapitan.cli:main' 64 | 65 | [build-system] 66 | requires = ["poetry>=0.12"] 67 | build-backend = "poetry.masonry.api" 68 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base", 4 | ":gitSignOff", 5 | ":disableDependencyDashboard" 6 | ], 7 | "poetry": { 8 | "enabled": true 9 | }, 10 | "lockFileMaintenance": { 11 | "enabled": true, 12 | "labels": ["ignore"], 13 | "automerge": true, 14 | "automergeType": "pr", 15 | "platformAutomerge": true 16 | }, 17 | "labels": [ 18 | "dependency" 19 | ], 20 | "ignoreDeps": [ 21 | "boto3", 22 | "botocore" 23 | ], 24 | "regexManagers": [ 25 | { 26 | "fileMatch": ["^Dockerfile$"], 27 | "matchStrings": ["ARG GO_VERSION=(?.*?)\\n"], 28 | "extractVersionTemplate": "^go(?.*)$", 29 | "datasourceTemplate": "github-tags", 30 | "depNameTemplate": "golang/go" 31 | }, 32 | { 33 | "fileMatch": ["^Dockerfile$"], 34 | "matchStrings": ["ARG KUSTOMIZE_VERSION=(?.*?)\\n"], 35 | "extractVersionTemplate": "^kustomize/v(?.*)$", 36 | "datasourceTemplate": "github-releases", 37 | "depNameTemplate": "kubernetes-sigs/kustomize" 38 | }, 39 | { 40 | "fileMatch": ["^Dockerfile$"], 41 | "matchStrings": ["ARG POETRY_VERSION=(?.*?)\\n"], 42 | "datasourceTemplate": "github-releases", 43 | "depNameTemplate": "python-poetry/poetry" 44 | }, 45 | { 46 | "fileMatch": ["^Dockerfile$"], 47 | "matchStrings": ["ARG JSONNET_BUNDLER_VERSION=(?.*?)\\n"], 48 | "datasourceTemplate": "github-releases", 49 | "depNameTemplate": "projectsyn/jsonnet-bundler" 50 | } 51 | ], 52 | "packageRules": [ 53 | { 54 | "matchPackageNames": ["docker.io/python"], 55 | "separateMajorMinor": true, 56 | "separateMinorPatch": true, 57 | "separateMultipleMajor": true, 58 | "separateMultipleMinor": true 59 | }, 60 | { 61 | "matchManagers": ["poetry"], 62 | "matchDepNames": ["python"], 63 | "enabled": false 64 | } 65 | ] 66 | } 67 | -------------------------------------------------------------------------------- /tests/bench_gitrepo.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from commodore import gitrepo 6 | 7 | from test_gitrepo import setup_remote 8 | 9 | 10 | @pytest.mark.bench 11 | def bench_component_checkout(benchmark, tmp_path: Path): 12 | repo_url, _ = setup_remote(tmp_path) 13 | r = gitrepo.GitRepo(repo_url, tmp_path / "local", force_init=True) 14 | benchmark(r.checkout) 15 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shared test fixtures for all tests 3 | See the pytest docs for more details: 4 | https://docs.pytest.org/en/latest/how-to/fixtures.html#scope-sharing-fixtures-across-classes-modules-packages-or-session 5 | """ 6 | 7 | from __future__ import annotations 8 | 9 | import multiprocessing 10 | import os 11 | 12 | from pathlib import Path 13 | from typing import Protocol 14 | 15 | import pytest 16 | 17 | from click.testing import CliRunner, Result 18 | from git import Repo 19 | 20 | from commodore import cli 21 | from commodore.config import Config 22 | from commodore.gitrepo import GitRepo 23 | 24 | 25 | class RunnerFunc(Protocol): 26 | def __call__(self, args: list[str]) -> Result: ... 27 | 28 | 29 | # For gojsonnet we must use start_method "spawn" for multiprocessing so that the tests don't break 30 | # with pytest-xdist. Since we also use start_method "spawn" in Commodore, this should be fine. We 31 | # set this via a session-scoped autouse fixture, so it's set once when the PyTest session starts. 32 | @pytest.fixture(autouse=True, scope="session") 33 | def init_env(): 34 | multiprocessing.set_start_method("spawn") 35 | 36 | 37 | @pytest.fixture(autouse=True) 38 | def gitconfig(tmp_path: Path) -> Path: 39 | """Ensure that tests have a predictable empty gitconfig. 40 | 41 | We set autouse=True, so that the fixture is automatically used for all 42 | tests. Tests that want to access the mock gitconfig can explicitly specify 43 | the fixture, so they get the path to the mock gitconfig. 44 | """ 45 | os.environ["GIT_CONFIG_NOSYSTEM"] = "true" 46 | os.environ["HOME"] = str(tmp_path) 47 | os.environ["XDG_CONFIG_HOME"] = str(tmp_path / ".config") 48 | gitconfig = tmp_path / ".config" / "git" / "config" 49 | os.makedirs(gitconfig.parent, exist_ok=True) 50 | 51 | return gitconfig 52 | 53 | 54 | @pytest.fixture 55 | def cli_runner() -> RunnerFunc: 56 | r = CliRunner() 57 | return lambda args: r.invoke(cli.commodore, args) 58 | 59 | 60 | @pytest.fixture 61 | def config(tmp_path): 62 | """ 63 | Setup test Commodore config 64 | """ 65 | 66 | return Config( 67 | tmp_path, 68 | api_url="https://syn.example.com", 69 | api_token="token", 70 | username="John Doe", 71 | usermail="john.doe@example.com", 72 | ) 73 | 74 | 75 | @pytest.fixture 76 | def api_data(): 77 | """ 78 | Setup test Lieutenant API data 79 | """ 80 | 81 | tenant = { 82 | "id": "t-foo", 83 | "displayName": "Foo Inc.", 84 | } 85 | cluster = { 86 | "id": "c-bar", 87 | "displayName": "Foo Inc. Bar Cluster", 88 | "tenant": tenant["id"], 89 | "facts": { 90 | "distribution": "rancher", 91 | "cloud": "cloudscale", 92 | }, 93 | "dynamicFacts": { 94 | "kubernetes_version": { 95 | "major": "1", 96 | "minor": "21", 97 | "gitVersion": "v1.21.3", 98 | } 99 | }, 100 | "gitRepo": { 101 | "url": "ssh://git@git.example.com/cluster-catalogs/mycluster", 102 | }, 103 | } 104 | return { 105 | "cluster": cluster, 106 | "tenant": tenant, 107 | } 108 | 109 | 110 | class MockMultiDependency: 111 | _repo: Repo 112 | _components: dict[str, Path] 113 | _packages: dict[str, Path] 114 | 115 | def __init__(self, repo: Repo): 116 | self._repo = repo 117 | self._components = {} 118 | self._packages = {} 119 | 120 | def register_component(self, name: str, target_dir: Path): 121 | assert name not in self._components 122 | self._components[name] = target_dir 123 | 124 | def checkout_component(self, name, version): 125 | assert name in self._components 126 | assert version == "master" 127 | self._repo.clone(self._components[name]) 128 | 129 | def register_package(self, name: str, target_dir: Path): 130 | assert name not in self._packages 131 | self._packages[name] = target_dir 132 | 133 | def checkout_package(self, name, version): 134 | assert name in self._packages 135 | assert version == "master" 136 | self._repo.clone(self._packages[name]) 137 | 138 | def get_component(self, name) -> Path: 139 | return self._components[name] 140 | 141 | @property 142 | def bare_repo(self) -> GitRepo: 143 | return self._repo 144 | 145 | 146 | @pytest.fixture 147 | def mockdep(tmp_path): 148 | return MockMultiDependency(Repo.init(tmp_path / "repo.git")) 149 | 150 | 151 | class MockTemplater: 152 | def __init__(self): 153 | self.template_version = None 154 | self.test_cases = [] 155 | 156 | def update(self, *args, **kwargs): 157 | pass 158 | 159 | 160 | def make_mock_templater(mock_templater, expected_path): 161 | mt = MockTemplater() 162 | 163 | def mock_from_existing(_config: Config, path: Path): 164 | assert path == expected_path 165 | return mt 166 | 167 | mock_templater.from_existing = mock_from_existing 168 | 169 | return mt 170 | -------------------------------------------------------------------------------- /tests/jsonnet/envList.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "name": "VAR1", 4 | "value": "aaa" 5 | }, 6 | { 7 | "name": "VAR2", 8 | "valueFrom": { 9 | "configMapRef": { 10 | "name": "test", 11 | "key": "var2" 12 | } 13 | } 14 | }, 15 | { 16 | "name": "VAR3", 17 | "value": null 18 | } 19 | ] 20 | -------------------------------------------------------------------------------- /tests/jsonnet/envList.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | com.envList({ 4 | VAR1: 'aaa', 5 | VAR2: { configMapRef: { name: 'test', key: 'var2' } }, 6 | VAR3: null, 7 | }) 8 | -------------------------------------------------------------------------------- /tests/jsonnet/fixupDir.json: -------------------------------------------------------------------------------- 1 | { 2 | "test0": [ 3 | { 4 | "metadata": { 5 | "name": "obj1", 6 | "annotations": { 7 | "patched": "true" 8 | } 9 | }, 10 | "spec": { 11 | "a": "a", 12 | "b": "b", 13 | "c": "c", 14 | "d": "d" 15 | } 16 | } 17 | ], 18 | "test1": [ 19 | { 20 | "metadata": { 21 | "name": "obj1", 22 | "annotations": { 23 | "patched": "true" 24 | } 25 | }, 26 | "spec": { 27 | "a": "a", 28 | "b": "b", 29 | "c": "c", 30 | "d": "d" 31 | } 32 | }, 33 | { 34 | "metadata": { 35 | "name": "obj2", 36 | "namespace": "foo", 37 | "annotations": { 38 | "patched": "true" 39 | } 40 | }, 41 | "spec": 5 42 | }, 43 | { 44 | "metadata": { 45 | "name": "obj3", 46 | "namespace": "test", 47 | "annotations": { 48 | "patched": "true" 49 | } 50 | }, 51 | "spec": {"a": [1, 2, 3], "b": [4, 5, 6]} 52 | } 53 | ], 54 | "test2": [ 55 | { 56 | "metadata": { 57 | "name": "obj4", 58 | "namespace": "test", 59 | "annotations": { 60 | "patched": "true" 61 | } 62 | }, 63 | "spec": { 64 | "list": [1] 65 | } 66 | }, 67 | { 68 | "metadata": {"name": "obj5", 69 | "annotations": { 70 | "patched": "true" 71 | } 72 | }, 73 | "spec": {"value": "aaa"} 74 | } 75 | ] 76 | } 77 | -------------------------------------------------------------------------------- /tests/jsonnet/fixupDir.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | local fixup(obj) = 4 | obj { 5 | metadata+: { 6 | annotations+: { 7 | patched: 'true', 8 | }, 9 | }, 10 | }; 11 | 12 | com.fixupDir(std.extVar('work_dir'), fixup) 13 | -------------------------------------------------------------------------------- /tests/jsonnet/generateResources.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "metadata": { 4 | "name": "obj1" 5 | }, 6 | "spec": { 7 | "a": 1, 8 | "b": 2 9 | } 10 | } 11 | ] 12 | -------------------------------------------------------------------------------- /tests/jsonnet/generateResources.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | local input = { 4 | obj1: { spec: { a: 1, b: 2 } }, 5 | obj2: null, 6 | }; 7 | 8 | local objFn(name) = { 9 | metadata: { 10 | name: name, 11 | }, 12 | }; 13 | 14 | com.generateResources(input, objFn) 15 | -------------------------------------------------------------------------------- /tests/jsonnet/getValueOrDefault.json: -------------------------------------------------------------------------------- 1 | { 2 | "v1": 1, 3 | "v2": "1" 4 | } 5 | -------------------------------------------------------------------------------- /tests/jsonnet/getValueOrDefault.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | local data = { 4 | a: 1, 5 | b: 'bbb', 6 | c: [ 1, 2, 3 ], 7 | d: { 8 | e: { 9 | f: { 10 | g: 'ggg', 11 | }, 12 | h: 'hhh', 13 | }, 14 | }, 15 | }; 16 | 17 | { 18 | v1: com.getValueOrDefault(data, 'a', '1'), 19 | v2: com.getValueOrDefault(data, 'A', '1'), 20 | } 21 | -------------------------------------------------------------------------------- /tests/jsonnet/inventory.json: -------------------------------------------------------------------------------- 1 | { 2 | "parameters": { 3 | "param1": { 4 | "a": "a", 5 | "b": "b" 6 | }, 7 | "param2": { 8 | "c": "c", 9 | "d": "d" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/jsonnet/inventory.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | com.inventory() 4 | -------------------------------------------------------------------------------- /tests/jsonnet/inventory.yaml: -------------------------------------------------------------------------------- 1 | parameters: 2 | param1: 3 | a: a 4 | b: b 5 | param2: 6 | c: c 7 | d: d 8 | -------------------------------------------------------------------------------- /tests/jsonnet/list_dir.json: -------------------------------------------------------------------------------- 1 | [ 2 | "test0.yaml", 3 | "test1.yaml", 4 | "test2.yaml" 5 | ] 6 | -------------------------------------------------------------------------------- /tests/jsonnet/list_dir.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | std.sort(com.list_dir(std.extVar('work_dir'))) 4 | -------------------------------------------------------------------------------- /tests/jsonnet/makeMergeable.json: -------------------------------------------------------------------------------- 1 | { 2 | "o0": { "base": { "v2": "test" }}, 3 | "o1": { "base": { "v": "value", "v2": "test" }}, 4 | "o2": { 5 | "base": "test", 6 | "nestedArr": [ 1, 2, 3, 4 ], 7 | "nestedObj": { 8 | "a": "aaa", 9 | "b": "bbb" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tests/jsonnet/makeMergeable.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | { 4 | o0: { base: { v: 'value' } } + { base: { v2: 'test' } }, 5 | o1: { base: { v: 'value' } } + com.makeMergeable({ base: { v2: 'test' } }), 6 | o2: { 7 | base: 'value', 8 | nestedArr: [ 1, 2, 3 ], 9 | nestedObj: { a: 'aaa' }, 10 | } + com.makeMergeable({ 11 | base: 'test', 12 | nestedArr: [ 4 ], 13 | nestedObj: { 14 | b: 'bbb', 15 | }, 16 | }), 17 | } 18 | -------------------------------------------------------------------------------- /tests/jsonnet/namespaced.json: -------------------------------------------------------------------------------- 1 | { 2 | "ns1": { 3 | "metadata": { 4 | "annotations": { 5 | "a": "1" 6 | }, 7 | "namespace": "test" 8 | } 9 | }, 10 | "ns2": { 11 | "metadata": { 12 | "namespace": "test" 13 | } 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /tests/jsonnet/namespaced.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | { 4 | ns1: com.namespaced( 5 | 'test', 6 | { 7 | metadata: { 8 | annotations: { 9 | a: '1', 10 | }, 11 | }, 12 | } 13 | ), 14 | ns2: com.namespaced( 15 | 'test', 16 | { metadata: { namespace: 'other' } } 17 | ), 18 | } 19 | -------------------------------------------------------------------------------- /tests/jsonnet/noProxyVars.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /tests/jsonnet/noProxyVars.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | com.proxyVars 4 | -------------------------------------------------------------------------------- /tests/jsonnet/noProxyVars.yaml: -------------------------------------------------------------------------------- 1 | parameters: 2 | global: {} 3 | -------------------------------------------------------------------------------- /tests/jsonnet/proxyVars.json: -------------------------------------------------------------------------------- 1 | { 2 | "HTTP_PROXY": "http://proxy.example.com:8000/", 3 | "http_proxy": "http://proxy.example.com:8000/", 4 | "HTTPS_PROXY": "https://proxy.example.com:8443/", 5 | "https_proxy": "https://proxy.example.com:8443/", 6 | "NO_PROXY": "10.0.0.0/8", 7 | "no_proxy": "10.0.0.0/8" 8 | } 9 | -------------------------------------------------------------------------------- /tests/jsonnet/proxyVars.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | com.proxyVars 4 | -------------------------------------------------------------------------------- /tests/jsonnet/proxyVars.yaml: -------------------------------------------------------------------------------- 1 | parameters: 2 | global: 3 | http_proxy: http://proxy.example.com:8000/ 4 | https_proxy: https://proxy.example.com:8443/ 5 | no_proxy: "10.0.0.0/8" 6 | -------------------------------------------------------------------------------- /tests/jsonnet/renderArray.json: -------------------------------------------------------------------------------- 1 | { 2 | "a1": [ "a", "c" ], 3 | "a2": [ "a", "b", "c" ], 4 | "a3": [ "a", "c" ] 5 | } 6 | -------------------------------------------------------------------------------- /tests/jsonnet/renderArray.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | { 4 | a1: com.renderArray([ 'a', 'b', 'c', '~b', 'c' ]), 5 | a2: com.renderArray([ 'a', 'b', 'c', '~b', 'c', 'b' ]), 6 | a3: com.renderArray([ 'c', 'a' ]), 7 | } 8 | -------------------------------------------------------------------------------- /tests/jsonnet/yaml_load.json: -------------------------------------------------------------------------------- 1 | { 2 | "f0": { 3 | "metadata": { 4 | "name": "obj1" 5 | }, 6 | "spec": { 7 | "a": "a", 8 | "b": "b", 9 | "c": "c", 10 | "d": "d" 11 | } 12 | }, 13 | "f1": [ 14 | { 15 | "metadata": { 16 | "name": "obj1" 17 | }, 18 | "spec": { 19 | "a": "a", 20 | "b": "b", 21 | "c": "c", 22 | "d": "d" 23 | } 24 | }, 25 | { 26 | "metadata": { 27 | "name": "obj2", 28 | "namespace": "foo" 29 | }, 30 | "spec": 5 31 | }, 32 | { 33 | "metadata": { 34 | "name": "obj3", 35 | "namespace": "test" 36 | }, 37 | "spec": {"a": [1, 2, 3], "b": [4, 5, 6]} 38 | } 39 | ], 40 | "f2": [ 41 | { 42 | "metadata": { 43 | "name": "obj4", 44 | "namespace": "test" 45 | }, 46 | "spec": { 47 | "list": [1] 48 | } 49 | }, 50 | { 51 | "metadata": {"name": "obj5"}, 52 | "spec": {"value": "aaa"} 53 | } 54 | ] 55 | } 56 | -------------------------------------------------------------------------------- /tests/jsonnet/yaml_load.jsonnet: -------------------------------------------------------------------------------- 1 | local com = import 'lib/commodore.libjsonnet'; 2 | 3 | local work_dir = std.extVar('work_dir'); 4 | 5 | { 6 | f0: com.yaml_load(work_dir + '/test0.yaml'), 7 | f1: com.yaml_load_all(work_dir + '/test1.yaml'), 8 | f2: com.yaml_load_all(work_dir + '/test2.yaml'), 9 | } 10 | -------------------------------------------------------------------------------- /tests/mock_gitrepo.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import Optional 3 | 4 | 5 | class Head: 6 | def __init__(self): 7 | self._reference = None 8 | self.call_counts = { 9 | "reference": 0, 10 | "reference.setter": 0, 11 | "reset": 0, 12 | } 13 | 14 | @property 15 | def reference(self): 16 | self.call_counts["reference"] += 1 17 | return self._reference 18 | 19 | @reference.setter 20 | def reference(self, ref): 21 | self.call_counts["reference.setter"] += 1 22 | self._reference = ref 23 | 24 | def reset(self, **kwargs): 25 | self.call_counts["reset"] += 1 26 | pass 27 | 28 | 29 | class Repo: 30 | def __init__(self): 31 | self.head = Head() 32 | 33 | 34 | class GitRepo: 35 | def __init__( 36 | self, 37 | remote: str, 38 | targetdir: Path, 39 | force_init=False, 40 | author_name: Optional[str] = None, 41 | author_email: Optional[str] = None, 42 | config=None, 43 | ): 44 | self.remote = remote 45 | self.targetdir = targetdir 46 | self.config = config 47 | self.repo = Repo() 48 | self.version = None 49 | 50 | self.call_counts = { 51 | "commit": 0, 52 | "checkout": 0, 53 | } 54 | 55 | def checkout(self, rev): 56 | self.call_counts["checkout"] += 1 57 | self.version = rev 58 | 59 | def commit(self, rev): 60 | self.call_counts["commit"] += 1 61 | return rev 62 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for command line interface (CLI) 3 | """ 4 | 5 | from subprocess import call 6 | 7 | 8 | def test_runas_module(): 9 | """ 10 | Can this package be run as a Python module? 11 | """ 12 | exit_status = call("python -m commodore", shell=True) 13 | # click 8.2 changed exit code for `no_args_is_help` from 0 to 2 14 | assert exit_status == 2 15 | 16 | 17 | def test_entrypoint(): 18 | """ 19 | Is entrypoint script installed? 20 | """ 21 | exit_status = call("commodore --help", shell=True) 22 | assert exit_status == 0 23 | 24 | 25 | def test_clean_command(): 26 | """ 27 | Is subcommand available? 28 | """ 29 | exit_status = call("commodore catalog clean --help", shell=True) 30 | assert exit_status == 0 31 | 32 | 33 | def test_compile_command(): 34 | """ 35 | Is subcommand available? 36 | """ 37 | exit_status = call("commodore catalog compile --help", shell=True) 38 | assert exit_status == 0 39 | 40 | 41 | def test_component_new_command(): 42 | """ 43 | Is subcommand available? 44 | """ 45 | exit_status = call("commodore component new --help", shell=True) 46 | assert exit_status == 0 47 | 48 | 49 | def test_component_compile_command(): 50 | """ 51 | Is subcommand available? 52 | """ 53 | exit_status = call("commodore component compile --help", shell=True) 54 | assert exit_status == 0 55 | 56 | 57 | def test_inventory_show_command(): 58 | exit_status = call("commodore inventory show --help", shell=True) 59 | assert exit_status == 0 60 | 61 | 62 | def test_inventory_components_command(): 63 | exit_status = call("commodore inventory components --help", shell=True) 64 | assert exit_status == 0 65 | 66 | 67 | def test_inventory_packages_command(): 68 | exit_status = call("commodore inventory packages --help", shell=True) 69 | assert exit_status == 0 70 | 71 | 72 | def test_inventory_lint_command(): 73 | exit_status = call("commodore inventory lint --help", shell=True) 74 | assert exit_status == 0 75 | 76 | 77 | def test_login_command(): 78 | exit_status = call("commodore login --help", shell=True) 79 | assert exit_status == 0 80 | 81 | 82 | def test_fetch_token_command(): 83 | exit_status = call("commodore fetch-token --help", shell=True) 84 | assert exit_status == 0 85 | 86 | 87 | def test_package_compile_command(): 88 | exit_status = call("commodore package compile --help", shell=True) 89 | assert exit_status == 0 90 | 91 | 92 | def test_package_new_command(): 93 | exit_status = call("commodore package new --help", shell=True) 94 | assert exit_status == 0 95 | 96 | 97 | def test_package_update_command(): 98 | exit_status = call("commodore package update --help", shell=True) 99 | assert exit_status == 0 100 | 101 | 102 | def test_package_sync_command(): 103 | exit_status = call("commodore package sync --help", shell=True) 104 | assert exit_status == 0 105 | -------------------------------------------------------------------------------- /tests/test_cli_component.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | from collections.abc import Iterable 6 | from datetime import timedelta 7 | from pathlib import Path 8 | from typing import Optional, Type 9 | from unittest import mock 10 | 11 | import pytest 12 | import yaml 13 | 14 | from commodore.component import Component 15 | from commodore.component.template import ComponentTemplater 16 | 17 | import commodore.cli.component as component 18 | 19 | from conftest import RunnerFunc, make_mock_templater 20 | 21 | 22 | @pytest.mark.parametrize("repo_dir", [False, True]) 23 | @mock.patch.object(component, "compile_component") 24 | def test_compile_component_cli(mock_compile, tmp_path, repo_dir, cli_runner): 25 | cpath = tmp_path / "test-component" 26 | cpath.mkdir() 27 | 28 | def _compile(cfg, path, alias, values, search_paths, output, name): 29 | assert cfg.verbose == 0 30 | assert path == str(cpath) 31 | assert values == () 32 | assert alias is None 33 | assert search_paths == () 34 | assert output == "./" 35 | assert name == "" 36 | 37 | mock_compile.side_effect = _compile 38 | 39 | repo_dir_arg = [] 40 | if repo_dir: 41 | repo_dir_arg = ["-r", str(tmp_path)] 42 | result = cli_runner(["component", "compile", str(cpath)] + repo_dir_arg) 43 | 44 | assert result.exit_code == 0 45 | 46 | if repo_dir: 47 | assert ( 48 | result.stdout 49 | == " > Parameter `-r`/`--repo-directory` is deprecated and has no effect\n" 50 | ) 51 | 52 | 53 | @pytest.mark.parametrize("template_version", [None, "main^"]) 54 | @mock.patch.object(component, "ComponentTemplater") 55 | def test_update_component_cli(mock_templater, tmp_path, cli_runner, template_version): 56 | cpath = tmp_path / "test-component" 57 | cpath.mkdir() 58 | 59 | mt = make_mock_templater(mock_templater, cpath) 60 | 61 | template_arg = ( 62 | [f"--template-version={template_version}"] 63 | if template_version is not None 64 | else [] 65 | ) 66 | 67 | result = cli_runner(["component", "update", str(cpath)] + template_arg) 68 | 69 | assert result.exit_code == 0 70 | assert mt.template_version == template_version 71 | 72 | 73 | @mock.patch.object(component, "sync_dependencies") 74 | @pytest.mark.parametrize( 75 | "ghtoken,template_version", 76 | [ 77 | (None, None), 78 | ("ghp_fake-token", None), 79 | ("ghp_fake-token", "custom-template-version"), 80 | ], 81 | ) 82 | def test_component_sync_cli( 83 | mock_sync_dependencies, 84 | ghtoken, 85 | template_version, 86 | tmp_path: Path, 87 | cli_runner: RunnerFunc, 88 | ): 89 | os.chdir(tmp_path) 90 | if ghtoken is not None: 91 | os.environ["COMMODORE_GITHUB_TOKEN"] = ghtoken 92 | 93 | dep_list = tmp_path / "deps.yaml" 94 | with open(dep_list, "w", encoding="utf-8") as f: 95 | yaml.safe_dump(["projectsyn/component-foo"], f) 96 | 97 | def sync_deps( 98 | config, 99 | deplist: Path, 100 | dry_run: bool, 101 | pr_branch: str, 102 | pr_labels: Iterable[str], 103 | deptype: Type, 104 | templater: Type, 105 | pr_batch_size: int, 106 | github_pause: int, 107 | filter: str, 108 | tmpl_version: Optional[str], 109 | ): 110 | assert config.github_token == ghtoken 111 | assert deplist.absolute() == dep_list.absolute() 112 | assert not dry_run 113 | assert pr_branch == "template-sync" 114 | assert list(pr_labels) == [] 115 | assert deptype == Component 116 | assert templater == ComponentTemplater 117 | assert pr_batch_size == 10 118 | assert github_pause == timedelta(seconds=120) 119 | assert filter == "" 120 | assert tmpl_version == template_version 121 | 122 | mock_sync_dependencies.side_effect = sync_deps 123 | template_version_flag = ( 124 | [f"--template-version={template_version}"] 125 | if template_version is not None 126 | else [] 127 | ) 128 | result = cli_runner(["component", "sync", "deps.yaml"] + template_version_flag) 129 | assert result.exit_code == (1 if ghtoken is None else 0) 130 | -------------------------------------------------------------------------------- /tests/test_cli_package.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | 5 | from collections.abc import Iterable 6 | from datetime import timedelta 7 | from pathlib import Path 8 | from typing import Optional, Type 9 | from unittest import mock 10 | 11 | import pytest 12 | import yaml 13 | 14 | from commodore.cli import package 15 | from commodore.package import Package 16 | from commodore.package.template import PackageTemplater 17 | 18 | from conftest import RunnerFunc, make_mock_templater 19 | 20 | 21 | @pytest.mark.parametrize("template_version", [None, "main^"]) 22 | @mock.patch.object(package, "PackageTemplater") 23 | def test_update_package_cli(mock_templater, tmp_path, cli_runner, template_version): 24 | ppath = tmp_path / "test-package" 25 | ppath.mkdir() 26 | 27 | mt = make_mock_templater(mock_templater, ppath) 28 | 29 | template_arg = ( 30 | [f"--template-version={template_version}"] 31 | if template_version is not None 32 | else [] 33 | ) 34 | 35 | result = cli_runner(["package", "update", str(ppath)] + template_arg) 36 | 37 | assert result.exit_code == 0 38 | assert mt.template_version == template_version 39 | 40 | 41 | @mock.patch.object(package, "sync_dependencies") 42 | @pytest.mark.parametrize( 43 | "ghtoken,template_version", 44 | [ 45 | (None, None), 46 | ("ghp_fake-token", None), 47 | ("ghp_fake-token", "custom-template-version"), 48 | ], 49 | ) 50 | def test_package_sync_cli( 51 | mock_sync_packages, 52 | ghtoken, 53 | template_version, 54 | tmp_path: Path, 55 | cli_runner: RunnerFunc, 56 | ): 57 | os.chdir(tmp_path) 58 | if ghtoken is not None: 59 | os.environ["COMMODORE_GITHUB_TOKEN"] = ghtoken 60 | 61 | pkg_list = tmp_path / "pkgs.yaml" 62 | with open(pkg_list, "w", encoding="utf-8") as f: 63 | yaml.safe_dump(["projectsyn/package-foo"], f) 64 | 65 | def sync_pkgs( 66 | config, 67 | pkglist: Path, 68 | dry_run: bool, 69 | pr_branch: str, 70 | pr_labels: Iterable[str], 71 | deptype: Type, 72 | templater: Type, 73 | pr_batch_size: int, 74 | github_pause: int, 75 | filter: str, 76 | tmpl_version: Optional[str], 77 | ): 78 | assert config.github_token == ghtoken 79 | assert pkglist.absolute() == pkg_list.absolute() 80 | assert not dry_run 81 | assert pr_branch == "template-sync" 82 | assert list(pr_labels) == [] 83 | assert deptype == Package 84 | assert templater == PackageTemplater 85 | assert pr_batch_size == 10 86 | assert github_pause == timedelta(seconds=120) 87 | assert filter == "" 88 | assert tmpl_version == template_version 89 | 90 | mock_sync_packages.side_effect = sync_pkgs 91 | template_version_flag = ( 92 | [f"--template-version={template_version}"] 93 | if template_version is not None 94 | else [] 95 | ) 96 | result = cli_runner(["package", "sync", "pkgs.yaml"] + template_version_flag) 97 | print(result.stdout) 98 | assert result.exit_code == (1 if ghtoken is None else 0) 99 | -------------------------------------------------------------------------------- /tests/test_commodore_libjsonnet.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import functools 4 | import json 5 | 6 | from pathlib import Path 7 | from typing import Any 8 | 9 | from commodore.postprocess.jsonnet import _import_cb, _native_callbacks 10 | 11 | import _gojsonnet 12 | import pytest 13 | import yaml 14 | 15 | 16 | TESTS_DIR = Path(__file__).parent / "jsonnet" 17 | 18 | 19 | def discover_tc() -> list[str]: 20 | files = { 21 | f.stem 22 | for f in TESTS_DIR.iterdir() 23 | if f.is_file() and not f.name.startswith(".") 24 | } 25 | print(files) 26 | return list(sorted(files)) 27 | 28 | 29 | def tc_files(tc: str) -> (Path, Path, Path): 30 | return ( 31 | TESTS_DIR / f"{tc}.jsonnet", 32 | TESTS_DIR / f"{tc}.yaml", 33 | TESTS_DIR / f"{tc}.json", 34 | ) 35 | 36 | 37 | def write_testdata(tmp_path: Path): 38 | testdata1 = [ 39 | { 40 | "metadata": { 41 | "name": "obj1", 42 | }, 43 | "spec": { 44 | "a": "a", 45 | "b": "b", 46 | "c": "c", 47 | "d": "d", 48 | }, 49 | }, 50 | { 51 | "metadata": { 52 | "name": "obj2", 53 | "namespace": "foo", 54 | }, 55 | "spec": 5, 56 | }, 57 | { 58 | "metadata": { 59 | "name": "obj3", 60 | "namespace": "test", 61 | }, 62 | "spec": {"a": [1, 2, 3], "b": [4, 5, 6]}, 63 | }, 64 | ] 65 | testdata2 = [ 66 | { 67 | "metadata": { 68 | "name": "obj4", 69 | "namespace": "test", 70 | }, 71 | "spec": { 72 | "list": [1], 73 | }, 74 | }, 75 | { 76 | "metadata": {"name": "obj5"}, 77 | "spec": {"value": "aaa"}, 78 | }, 79 | ] 80 | 81 | with open(tmp_path / "test0.yaml", "w", encoding="utf-8") as tf: 82 | yaml.dump_all([testdata1[0]], tf) 83 | 84 | with open(tmp_path / "test1.yaml", "w", encoding="utf-8") as tf: 85 | yaml.dump_all(testdata1, tf) 86 | 87 | with open(tmp_path / "test2.yaml", "w", encoding="utf-8") as tf: 88 | yaml.dump_all(testdata2, tf) 89 | 90 | 91 | def render_jsonnet(tmp_path: Path, inputf: Path, invf: Path, **kwargs): 92 | inv = {} 93 | if invf.is_file(): 94 | with open(invf) as invfh: 95 | inv = yaml.safe_load(invfh) 96 | 97 | def _inventory() -> dict[str, Any]: 98 | return inv 99 | 100 | _native_cb = _native_callbacks 101 | _native_cb["commodore_inventory"] = ((), _inventory) 102 | 103 | resstr = _gojsonnet.evaluate_file( 104 | str(inputf), 105 | import_callback=functools.partial(_import_cb, tmp_path), 106 | native_callbacks=_native_cb, 107 | ext_vars=kwargs, 108 | ) 109 | return json.loads(resstr) 110 | 111 | 112 | @pytest.mark.parametrize( 113 | "tc", 114 | discover_tc(), 115 | ) 116 | def test_jsonnet(tmp_path: Path, tc: str): 117 | """Test jsonnet functions. 118 | 119 | Functions can expect the following files to be present in the directory indicated by external variable `work_dir`: 120 | 121 | * test0.yaml: 122 | ``` 123 | --- 124 | metadata: 125 | name: obj1 126 | spec: 127 | a: a 128 | b: b 129 | c: c 130 | d: d 131 | ``` 132 | 133 | * test1.yaml: 134 | ``` 135 | --- 136 | metadata: 137 | name: obj1 138 | spec: 139 | a: a 140 | b: b 141 | c: c 142 | d: d 143 | --- 144 | metadata: 145 | name: obj2 146 | namespace: foo 147 | spec: 5 148 | --- 149 | metadata: 150 | name: obj3 151 | namespace: test 152 | spec: 153 | a: [1,2,3] 154 | b: [4,5,6] 155 | ``` 156 | 157 | * test2.yaml 158 | ``` 159 | --- 160 | metadata: 161 | name: obj4 162 | namespace: test 163 | spec: 164 | list: 165 | - 1 166 | --- 167 | metadata: 168 | name: obj5 169 | spec: 170 | value: aaa 171 | """ 172 | inputf, invf, expectedf = tc_files(tc) 173 | write_testdata(tmp_path) 174 | result = render_jsonnet(tmp_path, inputf, invf, work_dir=str(tmp_path)) 175 | with open(expectedf) as e: 176 | expected = json.load(e) 177 | 178 | assert result == expected 179 | -------------------------------------------------------------------------------- /tests/test_dependency_mgmt_discovery.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from unittest.mock import patch 4 | 5 | import click 6 | import pytest 7 | 8 | from test_dependency_mgmt import _setup_mock_inventory 9 | 10 | from commodore.config import Config 11 | 12 | from commodore.dependency_mgmt import discovery 13 | 14 | 15 | @pytest.mark.parametrize( 16 | "expected_aliases,expected_exception_msg", 17 | [ 18 | ( 19 | {"other-component": "aliased", "third-component": "aliased"}, 20 | "Duplicate component alias 'aliased': components " 21 | + "'other-component' and 'third-component' are aliased to 'aliased'", 22 | ), 23 | ( 24 | {"other-component": "third-component", "third-component": "aliased"}, 25 | "Component 'other-component' aliases existing component 'third-component'", 26 | ), 27 | ( 28 | { 29 | "test-component": "third-component", 30 | "other-component": "third-component", 31 | "third-component": "aliased", 32 | }, 33 | "Components 'other-component' and 'test-component' alias " 34 | + "existing component 'third-component'", 35 | ), 36 | ], 37 | ) 38 | @patch.object(discovery, "kapitan_inventory") 39 | def test_discover_components_duplicate_aliases( 40 | patch_inventory, config: Config, expected_aliases, expected_exception_msg 41 | ): 42 | _setup_mock_inventory(patch_inventory, expected_aliases) 43 | 44 | with pytest.raises(KeyError) as e: 45 | discovery._discover_components(config) 46 | 47 | assert e.value.args[0] == expected_exception_msg 48 | 49 | 50 | @patch.object(discovery, "kapitan_inventory") 51 | @pytest.mark.parametrize("packages", [[], ["test"]]) 52 | def test_discover_components(patch_inventory, config: Config, packages): 53 | component_inv = _setup_mock_inventory(patch_inventory, packages=packages) 54 | 55 | components, aliases = discovery._discover_components(config) 56 | assert components == sorted(component_inv.keys()) 57 | assert sorted(aliases.keys()) == components 58 | assert all(k == v for k, v in aliases.items()) 59 | assert all(p not in aliases.keys() and p not in aliases.values() for p in packages) 60 | 61 | 62 | @patch.object(discovery, "kapitan_inventory") 63 | def test_discover_components_aliases(patch_inventory, config: Config): 64 | expected_aliases = {"other-component": "aliased"} 65 | component_inv = _setup_mock_inventory(patch_inventory, expected_aliases) 66 | 67 | components, aliases = discovery._discover_components(config) 68 | assert components == sorted(component_inv.keys()) 69 | assert set(components + list(expected_aliases.values())) == set(aliases.keys()) 70 | assert set(aliases.values()) == set(components) 71 | assert aliases["aliased"] == "other-component" 72 | 73 | 74 | @patch.object(discovery, "kapitan_inventory") 75 | @pytest.mark.parametrize( 76 | "packages", [[], ["test"], ["test", "foo"], ["test", "test-component"]] 77 | ) 78 | def test_discover_packages(patch_inventory, config: Config, packages: list[str]): 79 | component_inv = _setup_mock_inventory(patch_inventory, packages=packages) 80 | 81 | pkgs = discovery._discover_packages(config) 82 | assert sorted(packages) == sorted(pkgs) 83 | non_overlap = set(component_inv.keys()) - set(packages) 84 | assert all(cn not in pkgs for cn in non_overlap) 85 | 86 | 87 | @patch.object(discovery, "kapitan_inventory") 88 | @pytest.mark.parametrize( 89 | "packages,expected", 90 | [ 91 | ( 92 | ["t-foo"], 93 | "Package names can't be prefixed with 't-'. " 94 | + "This prefix is reserved for tenant configurations.", 95 | ), 96 | (["components"], "Can't use reserved name 'components' as package name"), 97 | (["defaults"], "Can't use reserved name 'defaults' as package name"), 98 | (["global"], "Can't use reserved name 'global' as package name"), 99 | (["params"], "Can't use reserved name 'params' as package name"), 100 | ], 101 | ) 102 | def test_discover_packages_illegal( 103 | patch_inventory, config: Config, packages: list[str], expected: str 104 | ): 105 | _ = _setup_mock_inventory(patch_inventory, packages=packages) 106 | 107 | with pytest.raises(click.ClickException) as e: 108 | _ = discovery._discover_packages(config) 109 | 110 | assert expected in str(e.value) 111 | -------------------------------------------------------------------------------- /tests/test_dependency_mgmt_jsonnet_bundler.py: -------------------------------------------------------------------------------- 1 | import json 2 | from pathlib import Path 3 | 4 | from commodore.component import Component 5 | from commodore.config import Config 6 | 7 | from commodore.dependency_mgmt import jsonnet_bundler 8 | 9 | 10 | def test_write_jsonnetfile(config: Config, tmp_path: Path, mockdep): 11 | config.register_component( 12 | Component("test-component", dependency=mockdep, work_dir=tmp_path) 13 | ) 14 | config.register_component( 15 | Component("test-component-2", dependency=mockdep, work_dir=tmp_path) 16 | ) 17 | dirs = [ 18 | "dependencies/test-component", 19 | "dependencies/test-component-2", 20 | "dependencies/lib", 21 | ] 22 | 23 | file = tmp_path / "jsonnetfile.json" 24 | 25 | jsonnet_bundler.write_jsonnetfile( 26 | file, jsonnet_bundler.jsonnet_dependencies(config) 27 | ) 28 | 29 | with open(file) as jf: 30 | jf_string = jf.read() 31 | assert jf_string[-1] == "\n" 32 | jf_contents = json.loads(jf_string) 33 | assert jf_contents["version"] == 1 34 | assert jf_contents["legacyImports"] 35 | deps = jf_contents["dependencies"] 36 | for dep in deps: 37 | assert dep["source"]["local"]["directory"] in dirs 38 | 39 | 40 | def test_clear_jsonnet_lock_file(tmp_path: Path): 41 | jsonnetfile = tmp_path / "jsonnetfile.json" 42 | jsonnet_lock = tmp_path / "jsonnetfile.lock.json" 43 | with open(jsonnetfile, "w") as jf: 44 | json.dump( 45 | { 46 | "version": 1, 47 | "dependencies": [ 48 | { 49 | "source": { 50 | "git": { 51 | "remote": "https://github.com/brancz/kubernetes-grafana.git", 52 | "subdir": "grafana", 53 | } 54 | }, 55 | "version": "master", 56 | } 57 | ], 58 | "legacyImports": True, 59 | }, 60 | jf, 61 | ) 62 | with open(jsonnet_lock, "w") as jl: 63 | json.dump( 64 | { 65 | "version": 1, 66 | "dependencies": [ 67 | { 68 | "source": { 69 | "git": { 70 | "remote": "https://github.com/brancz/kubernetes-grafana.git", 71 | "subdir": "grafana", 72 | } 73 | }, 74 | "version": "57b4365eacda291b82e0d55ba7eec573a8198dda", 75 | "sum": "92DWADwGjnCfpZaL7Q07C0GZayxBziGla/O03qWea34=", 76 | } 77 | ], 78 | "legacyImports": True, 79 | }, 80 | jl, 81 | ) 82 | jsonnet_bundler.fetch_jsonnet_libraries(tmp_path) 83 | 84 | assert jsonnet_lock.is_file() 85 | with open(jsonnet_lock, "r") as file: 86 | data = json.load(file) 87 | assert ( 88 | data["dependencies"][0]["version"] 89 | != "57b4365eacda291b82e0d55ba7eec573a8198dda" 90 | ) 91 | -------------------------------------------------------------------------------- /tests/test_dependency_mgmt_tools.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from commodore.dependency_mgmt import tools 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "components,expected", 8 | [ 9 | ([], ""), 10 | (["a"], "'a'"), 11 | (["a", "b"], "'a' and 'b'"), 12 | # Verify that Oxford comma is used in lists with >= 2 items 13 | ( 14 | ["a", "b", "c"], 15 | "'a', 'b', and 'c'", 16 | ), 17 | ( 18 | ["a", "b", "c", "d", "e"], 19 | "'a', 'b', 'c', 'd', and 'e'", 20 | ), 21 | ], 22 | ) 23 | def test_format_component_list(components, expected): 24 | assert tools.format_component_list(components) == expected 25 | -------------------------------------------------------------------------------- /tests/test_gitrepo_diff.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | import click 6 | import git 7 | 8 | from commodore.gitrepo import diff 9 | 10 | 11 | class MockDataStream: 12 | _data: str 13 | 14 | def __init__(self, data): 15 | self._data = data 16 | 17 | @property 18 | def data_stream(self): 19 | return self 20 | 21 | def read(self) -> bytes: 22 | return self._data.encode("utf-8") 23 | 24 | 25 | class MockDiff: 26 | def __init__(self, a_path, b_path, a_blob, b_blob): 27 | self.a_path = a_path 28 | self.b_path = b_path 29 | self.a_blob = MockDataStream(a_blob) 30 | self.b_blob = MockDataStream(b_blob) 31 | 32 | 33 | def test_compute_similarity(tmp_path: Path): 34 | change = MockDiff("foo.txt", "bar.txt", "foo\nbar\nbaz\n", "foo\nbar\nbar\n") 35 | 36 | similarity = diff._compute_similarity(change) 37 | expected = [ 38 | click.style("--- bar.txt", fg="yellow"), 39 | click.style("+++ foo.txt", fg="yellow"), 40 | "Renamed file, similarity index 75.00%", 41 | ] 42 | assert similarity == expected 43 | 44 | 45 | def test_process_diff_renamed(tmp_path: Path): 46 | r = git.Repo.init(tmp_path / "repo") 47 | 48 | with open(Path(r.working_tree_dir) / "foo.txt", "w", encoding="utf-8") as f: 49 | f.write("foo\nbar\nbaz\n") 50 | 51 | r.index.add(["foo.txt"]) 52 | r.index.commit("Initial") 53 | 54 | # "Move" foo.txt to bar.txt 55 | (Path(r.working_tree_dir) / "foo.txt").unlink() 56 | with open(Path(r.working_tree_dir) / "bar.txt", "w", encoding="utf-8") as f: 57 | f.write("foo\nbar\nbar\n") 58 | 59 | r.index.remove(["foo.txt"]) 60 | r.index.add(["bar.txt"]) 61 | 62 | difftext: list[str] = [] 63 | d = r.index.diff(r.head.commit) 64 | for ct in d.change_type: 65 | for c in d.iter_change_type(ct): 66 | print(c) 67 | difftext.extend(diff.process_diff(ct, c, diff.default_difffunc)) 68 | 69 | expected = [ 70 | click.style("Renamed file foo.txt => bar.txt", fg="yellow"), 71 | "\n".join( 72 | [ 73 | click.style("--- foo.txt", fg="yellow"), 74 | click.style("+++ bar.txt", fg="yellow"), 75 | "Renamed file, similarity index 75.00%", 76 | ] 77 | ), 78 | ] 79 | assert difftext == expected 80 | -------------------------------------------------------------------------------- /tests/test_inventory.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path as P 2 | 3 | from commodore.component import Component 4 | from commodore.inventory import Inventory 5 | 6 | 7 | def test_inventory_dir(): 8 | assert str(Inventory().inventory_dir) == "inventory" 9 | assert str(Inventory(work_dir=P("./foo")).inventory_dir) == "foo/inventory" 10 | 11 | 12 | def test_dependencies_dir(): 13 | assert str(Inventory().dependencies_dir) == "dependencies" 14 | assert str(Inventory(work_dir=P("./bar")).dependencies_dir) == "bar/dependencies" 15 | 16 | 17 | def test_classes_dir(): 18 | assert str(Inventory().classes_dir) == "inventory/classes" 19 | assert str(Inventory(work_dir=P("./baz")).classes_dir) == "baz/inventory/classes" 20 | 21 | 22 | def test_components_dir(): 23 | assert str(Inventory().components_dir) == "inventory/classes/components" 24 | assert ( 25 | str(Inventory(work_dir=P("./foo")).components_dir) 26 | == "foo/inventory/classes/components" 27 | ) 28 | 29 | 30 | def test_defaults_dir(): 31 | assert str(Inventory().defaults_dir) == "inventory/classes/defaults" 32 | assert ( 33 | str(Inventory(work_dir=P("./bar")).defaults_dir) 34 | == "bar/inventory/classes/defaults" 35 | ) 36 | 37 | 38 | def test_targets_dir(): 39 | assert str(Inventory().targets_dir) == "inventory/targets" 40 | assert str(Inventory(work_dir=P("./baz")).targets_dir) == "baz/inventory/targets" 41 | 42 | 43 | def test_lib_dir(): 44 | assert str(Inventory().lib_dir) == "dependencies/lib" 45 | assert str(Inventory(work_dir=P("./foo")).lib_dir) == "foo/dependencies/lib" 46 | 47 | 48 | def test_libs_dir(): 49 | assert str(Inventory().libs_dir) == "dependencies/libs" 50 | assert str(Inventory(work_dir=P("./bar")).libs_dir) == "bar/dependencies/libs" 51 | 52 | 53 | def test_global_config_dir(): 54 | assert str(Inventory().global_config_dir) == "inventory/classes/global" 55 | assert ( 56 | str(Inventory(work_dir=P("./baz")).global_config_dir) 57 | == "baz/inventory/classes/global" 58 | ) 59 | 60 | 61 | def test_tenant_config_dir(): 62 | assert str(Inventory().tenant_config_dir("t-foo")) == "inventory/classes/t-foo" 63 | assert ( 64 | str(Inventory(work_dir=P("./baz")).tenant_config_dir("t-bar")) 65 | == "baz/inventory/classes/t-bar" 66 | ) 67 | 68 | 69 | def test_component_file(tmp_path: P, mockdep): 70 | assert ( 71 | str(Inventory().component_file("foo")) == "inventory/classes/components/foo.yml" 72 | ) 73 | assert ( 74 | str(Inventory().component_file(Component("baz", mockdep, work_dir=tmp_path))) 75 | == "inventory/classes/components/baz.yml" 76 | ) 77 | assert ( 78 | str(Inventory(work_dir=P("./baz")).component_file("bar")) 79 | == "baz/inventory/classes/components/bar.yml" 80 | ) 81 | 82 | 83 | def test_defaults_file(tmp_path: P, mockdep): 84 | assert str(Inventory().defaults_file("foo")) == "inventory/classes/defaults/foo.yml" 85 | assert ( 86 | str(Inventory().defaults_file(Component("baz", mockdep, work_dir=tmp_path))) 87 | == "inventory/classes/defaults/baz.yml" 88 | ) 89 | assert ( 90 | str(Inventory(work_dir=P("./baz")).defaults_file("bar")) 91 | == "baz/inventory/classes/defaults/bar.yml" 92 | ) 93 | 94 | 95 | def test_target_file(tmp_path: P, mockdep): 96 | assert str(Inventory().target_file("foo")) == "inventory/targets/foo.yml" 97 | assert ( 98 | str(Inventory().target_file(Component("baz", mockdep, work_dir=tmp_path))) 99 | == "inventory/targets/baz.yml" 100 | ) 101 | assert ( 102 | str(Inventory(work_dir=P("./baz")).target_file("bar")) 103 | == "baz/inventory/targets/bar.yml" 104 | ) 105 | 106 | 107 | def ensure_dirs(tmp_path: P): 108 | dirs = [ 109 | tmp_path / "inventory/classes/components", 110 | tmp_path / "inventory/classes/defaults", 111 | tmp_path / "dependencies/lib", 112 | tmp_path / "dependencies/libs", 113 | tmp_path / "inventory/targets", 114 | ] 115 | for d in dirs: 116 | assert not d.is_dir() 117 | 118 | Inventory(work_dir=tmp_path).ensure_dirs() 119 | 120 | for d in dirs: 121 | assert d.is_dir() 122 | -------------------------------------------------------------------------------- /tests/test_inventory_lint.py: -------------------------------------------------------------------------------- 1 | import click 2 | import pytest 3 | import yaml 4 | 5 | from commodore.config import Config 6 | from commodore.inventory import lint 7 | 8 | 9 | def test_check_removed_reclass_variables_error(tmp_path, config: Config): 10 | testf = tmp_path / "test.yml" 11 | with open(testf, "w") as f: 12 | yaml.safe_dump({"parameters": {"test": "${customer:name}"}}, f) 13 | 14 | with pytest.raises(click.ClickException) as e: 15 | lint.check_removed_reclass_variables(config, "tests", [testf]) 16 | 17 | assert "Found 1 usages of removed reclass variables in the tests." in str(e.value) 18 | -------------------------------------------------------------------------------- /tests/test_inventory_lint_deprecated_parameters.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from typing import Any 5 | 6 | import pytest 7 | import yaml 8 | 9 | from commodore.config import Config 10 | from commodore.inventory import lint_deprecated_parameters, lint 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "data,expected", 15 | [ 16 | ({}, ""), 17 | ({"key": ""}, ""), 18 | ({"key": "${some:ref}"}, ""), 19 | ( 20 | {"key": "${customer:name}"}, 21 | "> Field 'key' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n", 22 | ), 23 | ( 24 | {"key": "${some:${customer:name}}"}, 25 | "> Field 'key' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n", 26 | ), 27 | ( 28 | {"key": "embedded-${customer:name}"}, 29 | "> Field 'key' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n", 30 | ), 31 | ( 32 | {"key": "${cluster:dist}"}, 33 | "> Field 'key' in file 'test.yaml' contains deprecated parameter '${cluster:dist}'\n", 34 | ), 35 | ( 36 | {"key": "${cloud:provider}"}, 37 | "> Field 'key' in file 'test.yaml' contains deprecated parameter '${cloud:provider}'\n", 38 | ), 39 | ( 40 | {"key": "${cloud:region}"}, 41 | "> Field 'key' in file 'test.yaml' contains deprecated parameter '${cloud:region}'\n", 42 | ), 43 | ({"list": [1, 2, 3]}, ""), 44 | ( 45 | {"list": ["test", "${customer:name}", "aaa"]}, 46 | "Field 'list[1]' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n", 47 | ), 48 | ( 49 | {"list": ["test", {"name": "${customer:name}"}, "aaa"]}, 50 | "Field 'list[1].name' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n", 51 | ), 52 | ( 53 | {"list": ["test", {"name": "${customer:name}"}, "${customer:name}"]}, 54 | "> Field 'list[1].name' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n" 55 | + "> Field 'list[2]' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n", 56 | ), 57 | ( 58 | {"nested": {"key": "${customer:name}"}}, 59 | "> Field 'nested.key' in file 'test.yaml' contains deprecated parameter '${customer:name}'\n", 60 | ), 61 | ( 62 | { 63 | "unlintable:int": 1, 64 | "unlintable:float": 1.0, 65 | "unlintable:bool": True, 66 | "key": "${customer:name}", 67 | }, 68 | "> Field 'key' in file 'test.yaml' contains deprecated parameter '${customer:name}'", 69 | ), 70 | ], 71 | ) 72 | def test_lint_deprecated_parameters(capsys, data: dict[str, Any], expected: str): 73 | file = Path("test.yaml") 74 | 75 | ec = lint_deprecated_parameters.lint_deprecated_parameters(file, data) 76 | 77 | captured = capsys.readouterr() 78 | assert expected in captured.out 79 | expected_count = len(expected.strip().split("\n")) if len(expected) > 0 else 0 80 | assert ec == expected_count 81 | 82 | 83 | def test_lint_deprecated_parameters_directory(capsys, tmp_path: Path, config: Config): 84 | with open(tmp_path / "test.yaml", "w") as f: 85 | yaml.safe_dump( 86 | { 87 | "list": ["test", {"name": "${customer:name}"}, "${customer:name}"], 88 | "unlintable:int": 1, 89 | "unlintable:float": 1.0, 90 | "unlintable:bool": True, 91 | "key": "${customer:name}", 92 | }, 93 | f, 94 | ) 95 | 96 | ec = lint.DeprecatedParameterLinter()(config, tmp_path) 97 | 98 | captured = capsys.readouterr() 99 | out_lines = captured.out.strip().split("\n") 100 | 101 | assert ec == 3 102 | assert len(out_lines) == 3 103 | assert ( 104 | f"> Field 'key' in file '{tmp_path/'test.yaml'}' contains deprecated parameter '${{customer:name}}'" 105 | in out_lines 106 | ) 107 | assert ( 108 | f"> Field 'list[1].name' in file '{tmp_path/'test.yaml'}' contains deprecated parameter '${{customer:name}}'" 109 | in out_lines 110 | ) 111 | assert ( 112 | f"> Field 'list[2]' in file '{tmp_path/'test.yaml'}' contains deprecated parameter '${{customer:name}}'" 113 | in out_lines 114 | ) 115 | -------------------------------------------------------------------------------- /tests/test_inventory_render.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | 5 | from commodore.config import Config 6 | from commodore.inventory import render 7 | 8 | from test_inventory_parameters import ( 9 | setup_global_repo_dir, 10 | setup_tenant_repo_dir, 11 | create_inventory_facts, 12 | verify_components, 13 | GLOBAL_PARAMS, 14 | DIST_PARAMS, 15 | CLOUD_REGION_PARAMS, 16 | CLOUD_REGION_TESTCASES, 17 | CLUSTER_PARAMS, 18 | ) 19 | 20 | 21 | @pytest.mark.parametrize("distribution", DIST_PARAMS.keys()) 22 | @pytest.mark.parametrize("cloud,region", CLOUD_REGION_TESTCASES) 23 | @pytest.mark.parametrize("cluster_id", [None] + list(CLUSTER_PARAMS.keys())) 24 | def test_extract_components( 25 | tmp_path: Path, distribution: str, cloud: str, region: str, cluster_id: str 26 | ): 27 | global_dir = setup_global_repo_dir( 28 | tmp_path, GLOBAL_PARAMS, DIST_PARAMS, CLOUD_REGION_PARAMS 29 | ) 30 | if cluster_id: 31 | tenant_id = "t-foo" 32 | tenant_dir = setup_tenant_repo_dir(tmp_path, CLUSTER_PARAMS) 33 | else: 34 | tenant_id = None 35 | tenant_dir = None 36 | config = Config(tmp_path) 37 | invfacts = create_inventory_facts( 38 | tmp_path, 39 | global_dir, 40 | tenant_dir, 41 | distribution, 42 | cloud, 43 | region, 44 | cluster_id, 45 | tenant_id, 46 | ) 47 | components = render.extract_components(config, invfacts) 48 | 49 | assert set(components.keys()) == set(GLOBAL_PARAMS["components"].keys()) 50 | verify_components(components, distribution, cloud, region, cluster_id) 51 | 52 | 53 | @pytest.mark.parametrize( 54 | "invfacts,expected_error_msg", 55 | [ 56 | ( 57 | lambda t, g: create_inventory_facts( 58 | t, g, None, "x-invalid-dist", None, None, allow_missing_classes=False 59 | ), 60 | "Error while rendering global: Class global.distribution.x-invalid-dist not found.", 61 | ), 62 | ( 63 | lambda t, g: create_inventory_facts( 64 | t, g, None, "a", "x-invalid-cloud", None, allow_missing_classes=False 65 | ), 66 | "Error while rendering global: Class global.cloud.x-invalid-cloud not found.", 67 | ), 68 | ( 69 | lambda t, g: create_inventory_facts( 70 | t, g, None, "a", "y", "x-invalid-region", allow_missing_classes=False 71 | ), 72 | "Error while rendering global: Class global.cloud.y.x-invalid-region not found.", 73 | ), 74 | ], 75 | ) 76 | def test_extract_components_valueerror_on_invalid_args( 77 | tmp_path: Path, invfacts, expected_error_msg 78 | ): 79 | global_dir = setup_global_repo_dir( 80 | tmp_path, GLOBAL_PARAMS, DIST_PARAMS, CLOUD_REGION_PARAMS 81 | ) 82 | config = Config(tmp_path) 83 | 84 | with pytest.raises( 85 | ValueError, 86 | match="Unable to render inventory with `--no-allow-missing-classes`. " 87 | + f"{expected_error_msg} " 88 | + "Verify the provided values or allow missing classes.", 89 | ): 90 | render.extract_components(config, invfacts(tmp_path, global_dir)) 91 | -------------------------------------------------------------------------------- /tests/test_kapitan_reclass.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path as P 2 | 3 | from commodore.helpers import kapitan_inventory 4 | 5 | 6 | class MockInventory: 7 | def __init__(self, invdir: P): 8 | self.invdir = invdir 9 | 10 | @property 11 | def inventory_dir(self): 12 | return self.invdir 13 | 14 | @property 15 | def targets_dir(self): 16 | return self.invdir / "targets" 17 | 18 | @property 19 | def classes_dir(self): 20 | return self.invdir / "classes" 21 | 22 | 23 | class MockConfig: 24 | def __init__(self, invdir: P): 25 | self.inv = MockInventory(P(__file__).parent.absolute() / "testdata" / invdir) 26 | 27 | @property 28 | def inventory(self): 29 | return self.inv 30 | 31 | 32 | def test_yml_yaml(tmp_path: P): 33 | config = MockConfig(invdir="inventory_yml_yaml") 34 | 35 | inv = kapitan_inventory(config) 36 | 37 | assert "test" in inv 38 | inv = inv["test"] 39 | assert "parameters" in inv 40 | assert "test" in inv["parameters"] 41 | assert "key1" in inv["parameters"]["test"] 42 | assert "value1" == inv["parameters"]["test"]["key1"] 43 | assert "key2" in inv["parameters"]["test"] 44 | assert "value2" == inv["parameters"]["test"]["key2"] 45 | 46 | 47 | def test_applications(tmp_path: P): 48 | config = MockConfig("inventory_apps") 49 | 50 | apps = kapitan_inventory(config, key="applications") 51 | 52 | apps = apps.keys() 53 | assert "app1" in apps 54 | assert "app2" not in apps 55 | 56 | 57 | def test_relative_refs(tmp_path: P): 58 | config = MockConfig("inventory_relative_refs") 59 | 60 | inv = kapitan_inventory(config) 61 | 62 | assert "test" in inv 63 | inv = inv["test"] 64 | assert "parameters" in inv 65 | assert "test" in inv["parameters"] 66 | assert "key1" in inv["parameters"]["test"] 67 | assert "value1" == inv["parameters"]["test"]["key1"] 68 | -------------------------------------------------------------------------------- /tests/test_normalize_url.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from commodore import normalize_url 3 | 4 | 5 | @pytest.mark.parametrize( 6 | "url,expected", 7 | [ 8 | ("https://example.com", "https://example.com/"), 9 | ("https://example.com//foo", "https://example.com/foo"), 10 | ], 11 | ) 12 | def test_normalize_url(url: str, expected: str): 13 | nurl = normalize_url.normalize_url(url) 14 | assert nurl == expected 15 | 16 | 17 | def test_normalize_url_raises_on_none(): 18 | with pytest.raises(ValueError, match="url_normalize returned None for None"): 19 | _ = normalize_url.normalize_url(None) 20 | 21 | 22 | @pytest.mark.parametrize( 23 | "url,expected", 24 | [ 25 | ("https://git.example.com//foo/bar.git", "https://git.example.com/foo/bar.git"), 26 | ( 27 | "https://user@git.example.com/path/to////repo.git", 28 | "https://user@git.example.com/path/to/repo.git", 29 | ), 30 | ("user@host:path/to/repo.git", "ssh://user@host/path/to/repo.git"), 31 | ("ssh://user@host///path/to/repo.git", "ssh://user@host/path/to/repo.git"), 32 | ( 33 | "ssh://user@host:2222/path////to/repo.git", 34 | "ssh://user@host:2222/path/to/repo.git", 35 | ), 36 | ], 37 | ) 38 | def test_normalize_git_url(url: str, expected: str): 39 | nurl = normalize_url.normalize_git_url(url) 40 | assert nurl == expected 41 | -------------------------------------------------------------------------------- /tests/test_package.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import git 4 | import yaml 5 | 6 | from commodore.config import Config 7 | from commodore.multi_dependency import MultiDependency 8 | 9 | from commodore import package 10 | 11 | 12 | def test_package_init(tmp_path: Path): 13 | pkg_url = "https://git.example.com/pkg.git" 14 | pdep = MultiDependency(pkg_url, tmp_path / "repo.git") 15 | p = package.Package( 16 | "test", 17 | dependency=pdep, 18 | target_dir=tmp_path / "pkg", 19 | version="master", 20 | ) 21 | assert p.url == "https://git.example.com/pkg.git" 22 | assert p.version == "master" 23 | assert p.target_dir == tmp_path / "pkg" 24 | assert p.repository_dir == tmp_path / "pkg" 25 | 26 | 27 | def _setup_package_remote(pkg_name: str, rpath: Path): 28 | r = git.Repo.init(rpath) 29 | pkg_file = f"{pkg_name}.yml" 30 | with open(rpath / pkg_file, "w") as f: 31 | yaml.safe_dump({"parameters": {pkg_name: "testing"}}, f) 32 | 33 | r.index.add([pkg_file]) 34 | r.index.commit("Initial commit") 35 | 36 | 37 | def test_package_checkout(tmp_path: Path): 38 | _setup_package_remote("test", tmp_path / "pkg.git") 39 | 40 | pdep = MultiDependency(f"file://{tmp_path}/pkg.git", tmp_path / ".pkg") 41 | p = package.Package( 42 | "test", 43 | dependency=pdep, 44 | target_dir=tmp_path / "pkg", 45 | version="master", 46 | ) 47 | p.checkout() 48 | 49 | classf = p.target_dir / "test.yml" 50 | 51 | assert p.target_dir.exists() 52 | assert p.target_dir.is_dir() 53 | assert classf.exists() 54 | assert classf.is_file() 55 | 56 | with open(classf, "r") as f: 57 | fcontents = yaml.safe_load(f) 58 | assert "parameters" in fcontents 59 | params = fcontents["parameters"] 60 | assert "test" in params 61 | assert params["test"] == "testing" 62 | 63 | 64 | def test_package_checkout_is_dirty(tmp_path: Path, config: Config): 65 | _setup_package_remote("test", tmp_path / "pkg.git") 66 | clone_url = f"file://{tmp_path}/pkg.git" 67 | 68 | p = package.Package.clone(config, clone_url, "test-component") 69 | p.checkout() 70 | 71 | assert not p.checkout_is_dirty() 72 | -------------------------------------------------------------------------------- /tests/test_refs.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pathlib import Path 4 | 5 | from commodore import refs 6 | from commodore.config import Config 7 | 8 | 9 | @pytest.fixture 10 | def inventory(): 11 | """ 12 | Setup test inventory 13 | """ 14 | 15 | def _test(target): 16 | return { 17 | "accesskey": f"?{{vaultkv:t-tenant/c-cluster/test/{target}-accesskey}}", 18 | "secretkey": f"?{{vaultkv:t-tenant/c-cluster/test/{target}-secretkey}}", 19 | "config": "something else", 20 | "params": { 21 | "env": [ 22 | {"key": "envA", "value": "valA"}, 23 | {"key": "envB", "value": "valB"}, 24 | ], 25 | "complex": True, 26 | }, 27 | } 28 | 29 | def _params(target): 30 | return { 31 | "_instance": target, 32 | "test": _test(target), 33 | "non_component": { 34 | "password": "?{vaultkv:t-tenant/c-cluster/global/password}", 35 | }, 36 | "non-component-2": { 37 | "key1": "value", 38 | "key2": 42, 39 | }, 40 | "other_component": { 41 | "enabled": True, 42 | "thesecret": "?{vaultkv:t-tenant/c-cluster/other-component/thesecret}", 43 | "users": ["user1", "user2"], 44 | "multiref": "?{vaultkv:t-tenant/c-cluster/foo/bar}-?{vaultkv:t-tenant/c-cluster/foo/baz}", 45 | }, 46 | "kapitan": { 47 | "secrets": { 48 | "vaultkv": { 49 | "VAULT_ADDR": "https://vault.example.com", 50 | "VAULT_CAPATH": "/etc/ssl/certs/", 51 | "VAULT_SKIP_VERIFY": "false", 52 | "auth": "token", 53 | "engine": "kv-v2", 54 | "mount": "clusters/kv", 55 | } 56 | }, 57 | }, 58 | } 59 | 60 | return { 61 | "cluster": { 62 | "parameters": _params("cluster"), 63 | }, 64 | "test-a": { 65 | "parameters": _params("test-a"), 66 | }, 67 | "test-b": { 68 | "parameters": _params("test-b"), 69 | }, 70 | "other-component": { 71 | "parameters": _params("other-component"), 72 | }, 73 | } 74 | 75 | 76 | def test_update_refs(tmp_path: Path, config: Config, inventory): 77 | aliases = { 78 | "other-component": "other-component", 79 | "test-a": "test", 80 | "test-b": "test", 81 | } 82 | config.register_component_aliases(aliases) 83 | 84 | refs.update_refs(config, aliases, inventory) 85 | ref_prefix = config.refs_dir / "t-tenant" / "c-cluster" 86 | expected_refs = [ 87 | Path("other-component/thesecret"), 88 | Path("test/test-a-accesskey"), 89 | Path("test/test-a-secretkey"), 90 | Path("test/test-b-accesskey"), 91 | Path("test/test-b-secretkey"), 92 | Path("global/password"), 93 | Path("foo/bar"), 94 | Path("foo/baz"), 95 | ] 96 | for ref in expected_refs: 97 | refpath = ref_prefix / ref 98 | assert refpath.is_file() 99 | 100 | not_expected_refs = [ 101 | Path("test/cluster-accesskey"), 102 | Path("test/cluster-secretkey"), 103 | Path("test/other-component-accesskey"), 104 | Path("test/other-component-secretkey"), 105 | ] 106 | for ref in not_expected_refs: 107 | refpath = ref_prefix / ref 108 | assert not refpath.exists() 109 | -------------------------------------------------------------------------------- /tests/test_render_inventory.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from pathlib import Path 4 | 5 | import git 6 | 7 | from commodore.cluster import update_target 8 | from commodore.component import Component 9 | from commodore.config import Config 10 | from commodore.dependency_mgmt import create_component_symlinks, create_alias_symlinks 11 | from commodore.helpers import kapitan_inventory, yaml_dump, yaml_load 12 | 13 | from conftest import MockMultiDependency 14 | 15 | 16 | def _setup(tmp_path: Path): 17 | cfg = Config( 18 | work_dir=tmp_path, 19 | api_url="https://syn.example.com", 20 | api_token="abcd1234", 21 | ) 22 | 23 | os.makedirs(cfg.inventory.defaults_dir) 24 | os.makedirs(cfg.inventory.components_dir) 25 | os.makedirs(cfg.inventory.global_config_dir) 26 | os.makedirs(cfg.inventory.params_dir) 27 | 28 | os.makedirs(tmp_path / "dependencies" / "test") 29 | cdep = MockMultiDependency(git.Repo.init(tmp_path / "repo.git")) 30 | c = Component("test", cdep, work_dir=tmp_path) 31 | c.register_alias("test-1", "master", cdep) 32 | os.makedirs(c.class_file.parent) 33 | # Create alias checkout by symlinking component directory 34 | os.symlink(c.target_directory, c.alias_directory("test-1")) 35 | 36 | yaml_dump( 37 | { 38 | "parameters": { 39 | "kapitan": { 40 | "compile": [ 41 | { 42 | "input_paths": ["input/path/file.txt"], 43 | "input_type": "copy", 44 | "output_path": "test", 45 | } 46 | ] 47 | } 48 | } 49 | }, 50 | c.class_file, 51 | ) 52 | yaml_dump( 53 | { 54 | "parameters": { 55 | "test": { 56 | "multi_instance": True, 57 | "namespace": "syn-test", 58 | "instance_value": "${_instance}", 59 | } 60 | } 61 | }, 62 | c.defaults_file, 63 | ) 64 | yaml_dump( 65 | { 66 | "parameters": { 67 | "cluster": { 68 | "name": "c-cluster-id-1234", 69 | "tenant": "t-tenant-id-1234", 70 | }, 71 | }, 72 | }, 73 | cfg.inventory.params_file, 74 | ) 75 | yaml_dump({"classes": []}, cfg.inventory.global_config_dir / "commodore.yml") 76 | 77 | cfg.register_component(c) 78 | create_component_symlinks(cfg, c) 79 | cfg.register_component_aliases({"test-1": "test"}) 80 | create_alias_symlinks(cfg, c, "test-1") 81 | 82 | for alias, component in cfg.get_component_aliases().items(): 83 | update_target(cfg, alias, component=component) 84 | 85 | return cfg 86 | 87 | 88 | def test_render_inventory_instantiated_component_no_custom_config(tmp_path: Path): 89 | cfg = _setup(tmp_path) 90 | 91 | inv = kapitan_inventory(cfg) 92 | 93 | assert inv["test-1"]["parameters"]["test"]["instance_value"] == "test-1" 94 | 95 | pass 96 | 97 | 98 | def test_render_inventory_instantiated_component_custom_config(tmp_path: Path): 99 | cfg = _setup(tmp_path) 100 | # Add parameter in test_1 101 | params = yaml_load(cfg.inventory.params_file) 102 | params["parameters"]["test_1"] = {"instance_value": "testing"} 103 | yaml_dump(params, cfg.inventory.params_file) 104 | 105 | inv = kapitan_inventory(cfg) 106 | 107 | assert inv["test-1"]["parameters"]["test"]["instance_value"] == "testing" 108 | -------------------------------------------------------------------------------- /tests/test_tokencache.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit-tests for tokencache 3 | """ 4 | 5 | import json 6 | 7 | from xdg.BaseDirectory import xdg_cache_home 8 | from commodore import tokencache 9 | 10 | 11 | def test_get_token(fs): 12 | fs.create_file( 13 | f"{xdg_cache_home}/commodore/token", 14 | contents='{"https://syn.example.com":{"id_token": "thetoken"},' 15 | + '"https://syn2.example.com":{"id_token": "token2"}}', 16 | ) 17 | assert tokencache.get("https://syn.example.com") == {"id_token": "thetoken"} 18 | assert tokencache.get("https://syn2.example.com") == {"id_token": "token2"} 19 | 20 | 21 | def test_get_nonexistent_token(fs): 22 | fs.create_file( 23 | f"{xdg_cache_home}/commodore/token", 24 | contents='{"https://syn.example.com":{"id_token":"token"},' 25 | + '"https://syn2.example.com":{"id_token":"token2"}}', 26 | ) 27 | assert tokencache.get("https://syn3.example.com") == {} 28 | 29 | 30 | def test_ignore_broken_json_cache(fs): 31 | fs.create_file( 32 | f"{xdg_cache_home}/commodore/token", 33 | contents='{"https://syn.example.com":{"id_token":"token"}', 34 | ) 35 | assert tokencache.get("https://syn.example.com") == {} 36 | 37 | 38 | def test_save_token(fs): 39 | tokencache.save("https://syn.example.com", {"id_token": "save"}) 40 | tokencache.save("https://syn2.example.com", {"id_token": "save2"}) 41 | 42 | with open(f"{xdg_cache_home}/commodore/token") as f: 43 | cache = json.load(f) 44 | assert cache == { 45 | "https://syn.example.com": {"id_token": "save"}, 46 | "https://syn2.example.com": {"id_token": "save2"}, 47 | } 48 | 49 | 50 | def test_save_and_get_token(fs): 51 | tokencache.save("https://syn.example.com", {"id_token": "token"}) 52 | tokencache.save("https://syn2.example.com", {"id_token": "token2"}) 53 | tokencache.save("https://syn3.example.com", {"id_token": "token3"}) 54 | tokencache.save("https://syn2.example.com", {"id_token": "Foo"}) 55 | 56 | assert tokencache.get("https://syn.example.com") == {"id_token": "token"} 57 | assert tokencache.get("https://syn2.example.com") == {"id_token": "Foo"} 58 | assert tokencache.get("https://syn3.example.com") == {"id_token": "token3"} 59 | 60 | 61 | def test_drop_old_cache_entry(fs): 62 | fs.create_file( 63 | f"{xdg_cache_home}/commodore/token", 64 | contents='{"https://syn.example.com":"thetoken",' 65 | + '"https://syn2.example.com":{"id_token": "token2"}}', 66 | ) 67 | assert tokencache.get("https://syn.example.com") == {} 68 | assert tokencache.get("https://syn2.example.com") == {"id_token": "token2"} 69 | 70 | 71 | def test_update_broken_json_cache(fs): 72 | cachef = fs.create_file( 73 | f"{xdg_cache_home}/commodore/token", 74 | contents='{"https://syn.example.com":{"id_token":"token"}', 75 | ) 76 | tokencache.save("https://syn2.example.com", {"id_token": "token2"}) 77 | assert tokencache.get("https://syn2.example.com") == {"id_token": "token2"} 78 | assert ( 79 | cachef.contents 80 | == '{\n "https://syn2.example.com": {\n "id_token": "token2"\n }\n}' 81 | ) 82 | 83 | tokencache.save("https://syn.example.com", {"id_token": "token"}) 84 | 85 | assert tokencache.get("https://syn2.example.com") == {"id_token": "token2"} 86 | assert tokencache.get("https://syn.example.com") == {"id_token": "token"} 87 | -------------------------------------------------------------------------------- /tests/testdata/catalog_list/id_multi: -------------------------------------------------------------------------------- 1 | c-bar 2 | c-foo 3 | c-test 4 | -------------------------------------------------------------------------------- /tests/testdata/catalog_list/id_single: -------------------------------------------------------------------------------- 1 | c-test 2 | -------------------------------------------------------------------------------- /tests/testdata/catalog_list/json_multi: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "id": "c-bar", 4 | "tenant": "t-foo", 5 | "displayName": "Bar", 6 | "facts": { 7 | "cloud": "local", 8 | "distribution": "k3s" 9 | }, 10 | "gitRepo": { 11 | "url": "ssh://git@github.com/projectsyn/test-cluster-catalog.git" 12 | } 13 | }, 14 | { 15 | "id": "c-foo", 16 | "tenant": "t-foo", 17 | "displayName": "Foo", 18 | "facts": { 19 | "cloud": "local", 20 | "distribution": "k3s" 21 | }, 22 | "gitRepo": { 23 | "url": "ssh://git@github.com/projectsyn/test-cluster-catalog.git" 24 | } 25 | }, 26 | { 27 | "id": "c-test", 28 | "tenant": "t-test-tenant", 29 | "displayName": "Test cluster", 30 | "facts": { 31 | "cloud": "local", 32 | "distribution": "k3s" 33 | }, 34 | "gitRepo": { 35 | "url": "ssh://git@github.com/projectsyn/test-cluster-catalog.git" 36 | } 37 | } 38 | ] 39 | -------------------------------------------------------------------------------- /tests/testdata/catalog_list/pretty_multi: -------------------------------------------------------------------------------- 1 | ID DISPLAY NAME TENANT 2 | c-bar Bar t-foo 3 | c-foo Foo t-foo 4 | c-test Test cluster t-test-tenant 5 | -------------------------------------------------------------------------------- /tests/testdata/catalog_list/yaml_multi: -------------------------------------------------------------------------------- 1 | - displayName: Bar 2 | facts: 3 | cloud: local 4 | distribution: k3s 5 | gitRepo: 6 | url: ssh://git@github.com/projectsyn/test-cluster-catalog.git 7 | id: c-bar 8 | tenant: t-foo 9 | - displayName: Foo 10 | facts: 11 | cloud: local 12 | distribution: k3s 13 | gitRepo: 14 | url: ssh://git@github.com/projectsyn/test-cluster-catalog.git 15 | id: c-foo 16 | tenant: t-foo 17 | - displayName: Test cluster 18 | facts: 19 | cloud: local 20 | distribution: k3s 21 | gitRepo: 22 | url: ssh://git@github.com/projectsyn/test-cluster-catalog.git 23 | id: c-test 24 | tenant: t-test-tenant 25 | 26 | -------------------------------------------------------------------------------- /tests/testdata/catalog_list/yml_multi: -------------------------------------------------------------------------------- 1 | - displayName: Bar 2 | facts: 3 | cloud: local 4 | distribution: k3s 5 | gitRepo: 6 | url: ssh://git@github.com/projectsyn/test-cluster-catalog.git 7 | id: c-bar 8 | tenant: t-foo 9 | - displayName: Foo 10 | facts: 11 | cloud: local 12 | distribution: k3s 13 | gitRepo: 14 | url: ssh://git@github.com/projectsyn/test-cluster-catalog.git 15 | id: c-foo 16 | tenant: t-foo 17 | - displayName: Test cluster 18 | facts: 19 | cloud: local 20 | distribution: k3s 21 | gitRepo: 22 | url: ssh://git@github.com/projectsyn/test-cluster-catalog.git 23 | id: c-test 24 | tenant: t-test-tenant 25 | 26 | -------------------------------------------------------------------------------- /tests/testdata/github/projectsyn-package-foo-response-issue-comment.json: -------------------------------------------------------------------------------- 1 | { 2 | "url": "https://api.github.com/repos/projectsyn/package-foo/issues/1/comments/1261906463", 3 | "html_url": "https://github.com/projectsyn/package-foo/pull/1#issuecomment-1261906463", 4 | "issue_url": "https://api.github.com/repos/projectsyn/package-foo/issues/1", 5 | "id": 1261906463, 6 | "node_id": "IC_kwDOHyWSm85LNyof", 7 | "user": { 8 | "login": "vshnbot", 9 | "id": 55274381, 10 | "node_id": "MDQ6VXNlcjU1Mjc0Mzgx", 11 | "avatar_url": "https://avatars.githubusercontent.com/u/55274381?u=40686ca6cd7f24c09e9ebbeed5258e613f6c6baa&v=4", 12 | "gravatar_id": "", 13 | "url": "https://api.github.com/users/vshnbot", 14 | "html_url": "https://github.com/vshnbot", 15 | "followers_url": "https://api.github.com/users/vshnbot/followers", 16 | "following_url": "https://api.github.com/users/vshnbot/following{/other_user}", 17 | "gists_url": "https://api.github.com/users/vshnbot/gists{/gist_id}", 18 | "starred_url": "https://api.github.com/users/vshnbot/starred{/owner}{/repo}", 19 | "subscriptions_url": "https://api.github.com/users/vshnbot/subscriptions", 20 | "organizations_url": "https://api.github.com/users/vshnbot/orgs", 21 | "repos_url": "https://api.github.com/users/vshnbot/repos", 22 | "events_url": "https://api.github.com/users/vshnbot/events{/privacy}", 23 | "received_events_url": "https://api.github.com/users/vshnbot/received_events", 24 | "type": "User", 25 | "site_admin": false 26 | }, 27 | "created_at": "2022-09-29T07:56:56Z", 28 | "updated_at": "2022-09-29T07:56:56Z", 29 | "author_association": "NONE", 30 | "body": "Foo bar", 31 | "reactions": { 32 | "url": "https://api.github.com/repos/projectsyn/package-foo/issues/comments/1261906463/reactions", 33 | "total_count": 0, 34 | "+1": 0, 35 | "-1": 0, 36 | "laugh": 0, 37 | "hooray": 0, 38 | "confused": 0, 39 | "heart": 0, 40 | "rocket": 0, 41 | "eyes": 0 42 | }, 43 | "performed_via_github_app": null 44 | } 45 | -------------------------------------------------------------------------------- /tests/testdata/github/projectsyn-package-foo-response-issue.json: -------------------------------------------------------------------------------- 1 | { 2 | "url": "https://api.github.com/repos/projectsyn/package-foo/issues/1", 3 | "repository_url": "https://api.github.com/repos/projectsyn/package-foo", 4 | "labels_url": "https://api.github.com/repos/projectsyn/package-foo/issues/1/labels{/name}", 5 | "comments_url": "https://api.github.com/repos/projectsyn/package-foo/issues/1/comments", 6 | "events_url": "https://api.github.com/repos/projectsyn/package-foo/issues/1/events", 7 | "html_url": "https://github.com/projectsyn/package-foo/pull/1", 8 | "id": 1331860795, 9 | "node_id": "PR_kwDOHyWSm8480Enh", 10 | "number": 1, 11 | "title": "Update from package template", 12 | "user": { 13 | "login": "simu", 14 | "id": 393160, 15 | "node_id": "MDQ6VXNlcjM5MzE2MA==", 16 | "avatar_url": "https://avatars.githubusercontent.com/u/393160?v=4", 17 | "gravatar_id": "", 18 | "url": "https://api.github.com/users/simu", 19 | "html_url": "https://github.com/simu", 20 | "followers_url": "https://api.github.com/users/simu/followers", 21 | "following_url": "https://api.github.com/users/simu/following{/other_user}", 22 | "gists_url": "https://api.github.com/users/simu/gists{/gist_id}", 23 | "starred_url": "https://api.github.com/users/simu/starred{/owner}{/repo}", 24 | "subscriptions_url": "https://api.github.com/users/simu/subscriptions", 25 | "organizations_url": "https://api.github.com/users/simu/orgs", 26 | "repos_url": "https://api.github.com/users/simu/repos", 27 | "events_url": "https://api.github.com/users/simu/events{/privacy}", 28 | "received_events_url": "https://api.github.com/users/simu/received_events", 29 | "type": "User", 30 | "site_admin": false 31 | }, 32 | "labels": [ 33 | { 34 | "id": 4405886391, 35 | "node_id": "LA_kwDOHyWSm88AAAABBpx9tw", 36 | "url": "https://api.github.com/repos/projectsyn/package-foo/labels/template-sync", 37 | "name": "template-sync", 38 | "color": "ededed", 39 | "default": false, 40 | "description": null 41 | }, 42 | { 43 | "id": 4409686608, 44 | "node_id": "LA_kwDOHyWSm88AAAABBtZ6UA", 45 | "url": "https://api.github.com/repos/projectsyn/package-foo/labels/foo", 46 | "name": "foo", 47 | "color": "ededed", 48 | "default": false, 49 | "description": null 50 | }, 51 | { 52 | "id": 4409686609, 53 | "node_id": "LA_kwDOHyWSm88AAAABBtZ6UQ", 54 | "url": "https://api.github.com/repos/projectsyn/package-foo/labels/bar", 55 | "name": "bar", 56 | "color": "ededed", 57 | "default": false, 58 | "description": null 59 | } 60 | ], 61 | "state": "open", 62 | "locked": false, 63 | "assignee": null, 64 | "assignees": [ 65 | 66 | ], 67 | "milestone": null, 68 | "comments": 1, 69 | "created_at": "2022-08-08T13:16:28Z", 70 | "updated_at": "2022-09-29T07:56:56Z", 71 | "closed_at": null, 72 | "author_association": "OWNER", 73 | "active_lock_reason": null, 74 | "draft": false, 75 | "pull_request": { 76 | "url": "https://api.github.com/repos/projectsyn/package-foo/pulls/1", 77 | "html_url": "https://github.com/projectsyn/package-foo/pull/1", 78 | "diff_url": "https://github.com/projectsyn/package-foo/pull/1.diff", 79 | "patch_url": "https://github.com/projectsyn/package-foo/pull/1.patch", 80 | "merged_at": null 81 | }, 82 | "body": "Template version: main (cc39fb4)", 83 | "closed_by": null, 84 | "reactions": { 85 | "url": "https://api.github.com/repos/projectsyn/package-foo/issues/1/reactions", 86 | "total_count": 0, 87 | "+1": 0, 88 | "-1": 0, 89 | "laugh": 0, 90 | "hooray": 0, 91 | "confused": 0, 92 | "heart": 0, 93 | "rocket": 0, 94 | "eyes": 0 95 | }, 96 | "timeline_url": "https://api.github.com/repos/projectsyn/package-foo/issues/1/timeline", 97 | "performed_via_github_app": null, 98 | "state_reason": null 99 | } 100 | -------------------------------------------------------------------------------- /tests/testdata/inventory_apps/classes/test.yml: -------------------------------------------------------------------------------- 1 | classes: 2 | - test.params 3 | - test.specific 4 | -------------------------------------------------------------------------------- /tests/testdata/inventory_apps/classes/test/params.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - app1 3 | - app2 4 | -------------------------------------------------------------------------------- /tests/testdata/inventory_apps/classes/test/specific.yml: -------------------------------------------------------------------------------- 1 | applications: 2 | - ~app2 3 | -------------------------------------------------------------------------------- /tests/testdata/inventory_apps/targets/test.yml: -------------------------------------------------------------------------------- 1 | classes: 2 | - test 3 | -------------------------------------------------------------------------------- /tests/testdata/inventory_relative_refs/classes/test.yml: -------------------------------------------------------------------------------- 1 | classes: 2 | - test.specific 3 | -------------------------------------------------------------------------------- /tests/testdata/inventory_relative_refs/classes/test/common.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | test: 3 | key1: value1 4 | -------------------------------------------------------------------------------- /tests/testdata/inventory_relative_refs/classes/test/specific.yml: -------------------------------------------------------------------------------- 1 | classes: 2 | - .common 3 | -------------------------------------------------------------------------------- /tests/testdata/inventory_relative_refs/targets/test.yml: -------------------------------------------------------------------------------- 1 | classes: 2 | - test 3 | -------------------------------------------------------------------------------- /tests/testdata/inventory_yml_yaml/classes/a.yml: -------------------------------------------------------------------------------- 1 | parameters: 2 | test: 3 | key1: value1 4 | -------------------------------------------------------------------------------- /tests/testdata/inventory_yml_yaml/classes/b.yaml: -------------------------------------------------------------------------------- 1 | parameters: 2 | test: 3 | key2: value2 4 | -------------------------------------------------------------------------------- /tests/testdata/inventory_yml_yaml/targets/test.yml: -------------------------------------------------------------------------------- 1 | classes: 2 | - a 3 | - b 4 | -------------------------------------------------------------------------------- /tools/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | # Make sure that if we are using an arbitrary UID that it appears in /etc/passwd, 6 | # otherwise this will cause issues with things like cloning with git+ssh 7 | # reference: https://access.redhat.com/documentation/en-us/openshift_container_platform/3.11/html/creating_images/creating-images-guidelines#use-uid 8 | 9 | # Ensure that we're using the correct libnss_wrapper.so 10 | # This should only ever fail in CI 11 | readonly libnss_wrapper_so="/usr/lib/$(uname -m)-linux-gnu/libnss_wrapper.so" 12 | if [ ! -f "${libnss_wrapper_so}" ]; then 13 | echo "${libnss_wrapper_so} doesn't exist." 14 | exit 1 15 | fi 16 | 17 | export LD_PRELOAD="${libnss_wrapper_so}" 18 | export NSS_WRAPPER_PASSWD=/tmp/passwd 19 | export NSS_WRAPPER_GROUP=/etc/group 20 | 21 | if ! whoami &> /dev/null; then 22 | echo "commodore:x:$(id -u):0:commodore user:${HOME}:/sbin/nologin" > "${NSS_WRAPPER_PASSWD}" 23 | fi 24 | 25 | if [ ! -z "${SSH_PRIVATE_KEY}" ]; then 26 | if [ ! -d "/app/.ssh" ]; then 27 | mkdir /app/.ssh 28 | fi 29 | echo "${SSH_PRIVATE_KEY}" > /app/.ssh/id 30 | chmod 0400 /app/.ssh/id 31 | fi 32 | 33 | if [ -z "${SSH_AUTH_SOCK}" ]; then 34 | eval "$(ssh-agent)" 35 | ssh-add $(grep -rlE 'BEGIN .+ PRIVATE KEY' /app/.ssh) || echo "No SSH keys were added" 36 | fi 37 | 38 | exec "$@" 39 | -------------------------------------------------------------------------------- /tools/install-jb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eu 3 | 4 | VERSION=${1:-} 5 | 6 | if [ "x${VERSION}" = "x" ]; then 7 | echo "Usage: $0 VERSION" 8 | exit 3 9 | fi 10 | 11 | ARCH=$(uname -m) 12 | case $ARCH in 13 | armv7*) ARCH="arm";; 14 | aarch64|arm64) ARCH="arm64";; 15 | x86_64|amd64) ARCH="amd64";; 16 | *) 17 | echo "Unsupported arch: $ARCH" 18 | exit 5 19 | :: 20 | esac 21 | 22 | curl -fsSLo /usr/local/bin/jb \ 23 | "https://github.com/projectsyn/jsonnet-bundler/releases/download/$VERSION/jb_linux_$ARCH" 24 | 25 | chmod +x /usr/local/bin/jb 26 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | isolated_build = true 3 | envlist = 4 | flake8 5 | pylint 6 | bandit 7 | mypy 8 | black 9 | py3{10,11,12}{,-bench} 10 | 11 | [testenv] 12 | description = Unit tests and doctests 13 | changedir = {envtmpdir} 14 | deps = 15 | cli-test-helpers 16 | pytest 17 | pytest-mock 18 | responses 19 | pyfakefs 20 | !bench: pytest-xdist 21 | bench: pytest-benchmark 22 | commands = \ 23 | pytest {toxinidir}/tests \ 24 | bench: -m bench \ 25 | !bench: -m "not bench and not integration" -n auto --dist worksteal \ 26 | {posargs} 27 | passenv = 28 | SSH_AUTH_SOCK 29 | HOME 30 | PATH 31 | allowlist_externals = 32 | curl 33 | patch 34 | {toxinidir}/tools/patch-kapitan.sh 35 | 36 | [testenv:bandit] 37 | description = PyCQA security linter 38 | deps = bandit 39 | commands = bandit -r --ini {toxinidir}/tox.ini {toxinidir}/commodore/ 40 | 41 | [testenv:flake8] 42 | description = Static code analysis and code style 43 | deps = flake8 44 | commands = flake8 {toxinidir} 45 | 46 | [testenv:pylint] 47 | description = Check for errors and code smells 48 | deps = 49 | pylint 50 | commands = 51 | pylint --rcfile={toxinidir}/tox.ini {toxinidir}/commodore 52 | 53 | [testenv:mypy] 54 | description = Run static analysis for typing 55 | deps = 56 | mypy 57 | types-PyYAML 58 | types-requests 59 | types-toml 60 | commands = 61 | # We ignore missing imports since some of our dependencies do not have type information. 62 | # Someday, it might be useful to try and import headers for them. ~chrisglass 63 | mypy --ignore-missing-imports --show-error-codes {toxinidir}/commodore 64 | 65 | [testenv:black] 66 | description = Ensure black formatting is applied 67 | deps = 68 | black~=25.1 69 | commands = 70 | black --check {toxinidir} 71 | 72 | [bandit] 73 | exclude = .cache,.git,.tox,build,dist,docs,tests 74 | targets = . 75 | skips = B603,B607 76 | 77 | [flake8] 78 | exclude = *.egg*,.git,.tox,venv 79 | max-line-length = 120 80 | extend-ignore = E701 81 | 82 | [pylint] 83 | [pylint.main] 84 | output-format = colorized 85 | reports = no 86 | disable = invalid-name, missing-function-docstring,missing-class-docstring,missing-module-docstring,fixme 87 | min-public-methods = 1 88 | ignore-paths= [ 'commodore/cruft/.*' ] 89 | 90 | [pylint.similarities] 91 | min-similarity-lines=10 92 | 93 | [pytest] 94 | python_files = 95 | test_*.py 96 | bench_*.py 97 | python_functions = 98 | test_* 99 | bench_* 100 | markers = 101 | bench 102 | integration 103 | addopts = 104 | --color=yes 105 | --doctest-modules 106 | --ignore=build 107 | --ignore=commodore/component-template 108 | --ignore=commodore/filters 109 | --ignore=commodore/lib 110 | --ignore=dist 111 | --strict-markers 112 | --verbose 113 | -------------------------------------------------------------------------------- /tox.mk: -------------------------------------------------------------------------------- 1 | .PHONY: tox lint lint_flake8 lint_pylint lint_bandit lint_black 2 | 3 | TOX_COMMAND = poetry run tox 4 | 5 | tox: 6 | $(TOX_COMMAND) 7 | 8 | lint_flake8: 9 | $(TOX_COMMAND) -e flake8 10 | 11 | lint_pylint: 12 | $(TOX_COMMAND) -e pylint 13 | 14 | lint_bandit: 15 | $(TOX_COMMAND) -e bandit 16 | 17 | lint_mypy: 18 | $(TOX_COMMAND) -e mypy 19 | 20 | lint_black: 21 | $(TOX_COMMAND) -e black 22 | 23 | lint: lint_flake8 lint_pylint lint_bandit lint_mypy lint_black 24 | 25 | .PHONY: lintenv_flake8 lintenv_pylint lintenv_bandit lintenv_mypy lintenv_black 26 | 27 | lintenv_flake8: 28 | $(TOX_COMMAND) -e flake8 --notest 29 | 30 | lintenv_pylint: 31 | $(TOX_COMMAND) -e pylint --notest 32 | 33 | lintenv_bandit: 34 | $(TOX_COMMAND) -e bandit --notest 35 | 36 | lintenv_mypy: 37 | $(TOX_COMMAND) -e mypy --notest 38 | 39 | lintenv_black: 40 | $(TOX_COMMAND) -e black --notest 41 | 42 | .PHONY: test_py3.10 test_py3.11 test_py3.12 43 | 44 | test_py3.10: 45 | $(TOX_COMMAND) -e py310 46 | 47 | test_py3.11: 48 | $(TOX_COMMAND) -e py311 49 | 50 | test_py3.12: 51 | $(TOX_COMMAND) -e py312 52 | 53 | .PHONY: testenv_py3.10 testenv_py3.11 testenv_py3.12 54 | 55 | testenv_py3.10: 56 | $(TOX_COMMAND) -e py310 --notest 57 | 58 | testenv_py3.11: 59 | $(TOX_COMMAND) -e py311 --notest 60 | 61 | testenv_py3.12: 62 | $(TOX_COMMAND) -e py312 --notest 63 | 64 | .PHONY: bench_py3.10 bench_py3.11 bench_py3.12 65 | 66 | bench_py3.10: 67 | $(TOX_COMMAND) -e py310-bench 68 | 69 | bench_py3.11: 70 | $(TOX_COMMAND) -e py311-bench 71 | 72 | bench_py3.12: 73 | $(TOX_COMMAND) -e py312-bench 74 | 75 | .PHONY: benchenv_py3.10 benchenv_py3.11 benchenv_py3.12 76 | 77 | benchenv_py3.10: 78 | $(TOX_COMMAND) -e py310-bench --notest 79 | 80 | benchenv_py3.11: 81 | $(TOX_COMMAND) -e py311-bench --notest 82 | 83 | benchenv_py3.12: 84 | $(TOX_COMMAND) -e py312-bench --notest 85 | --------------------------------------------------------------------------------