├── .flake8
├── .github
├── actions
│ └── semantic-release
│ │ ├── action.yaml
│ │ └── semantic-release.sh
└── workflows
│ ├── build-test.yaml
│ ├── fix-style.yaml
│ ├── pr.yaml
│ ├── release.yaml
│ └── semantic-pr-title.yaml
├── .gitignore
├── .pre-commit-config.yaml
├── .pre-commit-hooks.yaml
├── .releaserc.yaml
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.md
├── ci
├── build-test
│ ├── conda.sh
│ └── wheel.sh
├── output-release-version.sh
├── publish
│ ├── conda.sh
│ └── wheel.sh
└── update-versions.sh
├── docs
├── Makefile
├── make.bat
└── source
│ ├── _templates
│ ├── class.rst
│ └── module.rst
│ ├── api_docs
│ └── index.rst
│ ├── conf.py
│ └── index.rst
├── package-lock.json
├── package.json
├── pyproject.toml
├── recipe
└── meta.yaml
├── renovate.json
├── src
└── rapids_dependency_file_generator
│ ├── __init__.py
│ ├── _cli.py
│ ├── _config.py
│ ├── _constants.py
│ ├── _rapids_dependency_file_generator.py
│ ├── _rapids_dependency_file_validator.py
│ ├── _version.py
│ ├── py.typed
│ └── schema.json
└── tests
├── conftest.py
├── examples
├── conda-minimal
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── build_cuda-115_arch-x86_64.yaml
│ │ └── build_cuda-116_arch-x86_64.yaml
├── duplicate-specific-matrix-entries
│ └── dependencies.yaml
├── integration
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── all_cuda-115.yaml
│ │ ├── all_cuda-116.yaml
│ │ ├── requirements_all_cuda-115.txt
│ │ └── requirements_all_cuda-116.txt
├── invalid
│ ├── invalid-requirement
│ │ └── dependencies.yaml
│ └── pip-no-list
│ │ └── dependencies.yaml
├── matrix-glob
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── dev_cuda-100.yaml
│ │ ├── dev_cuda-118.yaml
│ │ └── dev_cuda-120.yaml
├── matrix-null-item
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── dev_cuda-115_arch-arm64.yaml
│ │ ├── dev_cuda-115_arch-arm64_py-38.yaml
│ │ ├── dev_cuda-115_arch-x86_64.yaml
│ │ └── dev_cuda-115_arch-x86_64_py-38.yaml
├── matrix
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── dev_cuda-115_arch-arm64_py-38.yaml
│ │ ├── dev_cuda-115_arch-arm64_py-39.yaml
│ │ ├── dev_cuda-115_arch-x86_64_py-38.yaml
│ │ ├── dev_cuda-115_arch-x86_64_py-39.yaml
│ │ ├── dev_cuda-116_arch-arm64_py-38.yaml
│ │ ├── dev_cuda-116_arch-arm64_py-39.yaml
│ │ ├── dev_cuda-116_arch-x86_64_py-38.yaml
│ │ └── dev_cuda-116_arch-x86_64_py-39.yaml
├── nested-pyproject
│ ├── dependencies.yaml
│ ├── pyproject.toml
│ └── some
│ │ └── cool
│ │ └── code
│ │ └── pyproject.toml
├── no-matrix
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ └── checks.yaml
├── no-specific-match
│ └── dependencies.yaml
├── overlapping-deps
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ └── pyproject.toml
├── prepend-channels
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── build_cuda-115_arch-x86_64.yaml
│ │ └── build_cuda-116_arch-x86_64.yaml
├── pyproject-no-extras
│ └── dependencies.yaml
├── pyproject_bad_key
│ └── dependencies.yaml
├── pyproject_matrix
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ └── pyproject.toml
├── pyproject_matrix_fallback
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ └── pyproject.toml
├── pyproject_matrix_multi
│ └── dependencies.yaml
├── requirements-minimal
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── pyproject.toml
│ │ ├── requirements_all_cuda-115_arch-x86_64.txt
│ │ └── requirements_all_cuda-116_arch-x86_64.txt
├── requirements-pip-dict
│ └── dependencies.yaml
├── specific-fallback-first
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── all_cuda-115.yaml
│ │ └── all_cuda-118.yaml
├── specific-fallback-multiple-matches
│ ├── dependencies.yaml
│ └── output
│ │ └── expected
│ │ ├── all_cuda-115.yaml
│ │ └── all_cuda-118.yaml
└── specific-fallback
│ ├── dependencies.yaml
│ └── output
│ └── expected
│ ├── all_cuda-115.yaml
│ └── all_cuda-118.yaml
├── test_cli.py
├── test_config.py
├── test_examples.py
├── test_rapids_dependency_file_generator.py
└── test_schema.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length = 120
3 | select = E,F,W
4 | ignore = E123,E126,E203,E226,E241,E704,W503,W504
5 |
--------------------------------------------------------------------------------
/.github/actions/semantic-release/action.yaml:
--------------------------------------------------------------------------------
1 | name: Run Semantic Release
2 | inputs:
3 | GITHUB_TOKEN:
4 | required: true
5 | PYPI_TOKEN: {}
6 | ANACONDA_STABLE_TOKEN: {}
7 | ANACONDA_NIGHTLY_TOKEN: {}
8 | DRY_RUN:
9 | default: "false"
10 | outputs:
11 | release_version:
12 | value: ${{ steps.semantic-release.outputs.release_version }}
13 |
14 | runs:
15 | using: composite
16 | steps:
17 | - name: Setup Node.js
18 | uses: actions/setup-node@v4
19 | with:
20 | node-version: lts/*
21 | - name: Run Semantic Release
22 | id: semantic-release
23 | shell: bash
24 | env:
25 | GITHUB_TOKEN: ${{ inputs.GITHUB_TOKEN }}
26 | PYPI_TOKEN: ${{ inputs.PYPI_TOKEN }}
27 | ANACONDA_STABLE_TOKEN: ${{ inputs.ANACONDA_STABLE_TOKEN }}
28 | ANACONDA_NIGHTLY_TOKEN: ${{ inputs.ANACONDA_NIGHTLY_TOKEN }}
29 | DRY_RUN: ${{ inputs.DRY_RUN }}
30 | run: ${GITHUB_ACTION_PATH}/semantic-release.sh
31 |
--------------------------------------------------------------------------------
/.github/actions/semantic-release/semantic-release.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | FLAGS=()
5 | if [[ "${DRY_RUN}" == "true" ]]; then
6 | FLAGS+=("--dry-run")
7 | fi
8 | npm install
9 | npx semantic-release "${FLAGS[@]}"
10 |
--------------------------------------------------------------------------------
/.github/workflows/build-test.yaml:
--------------------------------------------------------------------------------
1 | on:
2 | workflow_call:
3 | inputs:
4 | release_version:
5 | type: string
6 |
7 | env:
8 | OUTPUT_DIR: /tmp/output
9 |
10 | jobs:
11 | conda:
12 | runs-on: ubuntu-latest
13 | container: condaforge/miniforge3:latest
14 | steps:
15 | - uses: actions/checkout@v4
16 | with:
17 | fetch-depth: 0
18 | persist-credentials: false
19 | - run: ./ci/build-test/conda.sh
20 | env:
21 | RELEASE_VERSION: ${{ inputs.release_version }}
22 | - uses: actions/upload-artifact@v4
23 | with:
24 | name: conda_artifacts
25 | path: ${{ env.OUTPUT_DIR }}
26 | wheel:
27 | runs-on: ubuntu-latest
28 | container: python:3.9
29 | steps:
30 | - uses: actions/checkout@v4
31 | with:
32 | fetch-depth: 0
33 | persist-credentials: false
34 | - run: ./ci/build-test/wheel.sh
35 | env:
36 | RELEASE_VERSION: ${{ inputs.release_version }}
37 | - uses: actions/upload-artifact@v4
38 | with:
39 | name: wheel_artifacts
40 | path: ${{ env.OUTPUT_DIR }}
41 |
--------------------------------------------------------------------------------
/.github/workflows/fix-style.yaml:
--------------------------------------------------------------------------------
1 | name: 'Fix Style'
2 | on:
3 | issue_comment:
4 | types: [created]
5 | pull_request_target:
6 |
7 | jobs:
8 | fix-style:
9 | if: |
10 | github.repository_owner == 'rapidsai' &&
11 | (
12 | github.event_name == 'pull_request_target' ||
13 | (
14 | github.event_name == 'issue_comment' &&
15 | github.event.issue.pull_request &&
16 | startsWith(github.event.comment.body, '/fix style')
17 | )
18 | )
19 | runs-on: ubuntu-latest
20 | steps:
21 | - name: Get PR Information
22 | id: pr-info
23 | run: |
24 | set -x
25 | PR_NUM=${{ github.event.number || github.event.issue.number }}
26 | echo "PR_INFO=$(gh -R ${{ github.repository }} pr view ${PR_NUM} --json headRepository,headRepositoryOwner,headRefName | jq -c)" >> $GITHUB_OUTPUT
27 | env:
28 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
29 | - uses: actions/checkout@v4
30 | with:
31 | repository: '${{ fromJSON(steps.pr-info.outputs.PR_INFO).headRepositoryOwner.login }}/${{ fromJSON(steps.pr-info.outputs.PR_INFO).headRepository.name }}'
32 | ref: '${{ fromJSON(steps.pr-info.outputs.PR_INFO).headRefName }}'
33 | - uses: actions/setup-python@v5
34 | - run: python -m pip install pre-commit
35 | - run: pre-commit run --all-files --show-diff-on-failure --color=always
36 | - uses: EndBug/add-and-commit@v9
37 | # Only commit changes if triggered by a comment starting with '/fix style'
38 | if: |
39 | failure() &&
40 | github.event_name == 'issue_comment' &&
41 | github.event.issue.pull_request &&
42 | startsWith(github.event.comment.body, '/fix style')
43 | with:
44 | fetch: false
45 | message: 'Fix style.'
46 |
--------------------------------------------------------------------------------
/.github/workflows/pr.yaml:
--------------------------------------------------------------------------------
1 | name: pr
2 |
3 | on:
4 | pull_request:
5 |
6 | concurrency:
7 | group: pr-${{ github.workflow }}-${{ github.ref }}
8 | cancel-in-progress: true
9 |
10 | jobs:
11 | build-test:
12 | uses: ./.github/workflows/build-test.yaml
13 |
--------------------------------------------------------------------------------
/.github/workflows/release.yaml:
--------------------------------------------------------------------------------
1 | name: release
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | concurrency:
9 | # Limit concurrency to avoid race conditions, but don't cancel in progress
10 | # release since that could cause issues
11 | group: release-${{ github.workflow }}-${{ github.ref }}
12 | cancel-in-progress: false
13 |
14 | jobs:
15 | check-release:
16 | runs-on: ubuntu-latest
17 | outputs:
18 | release_version: ${{ steps.check-release.outputs.release_version }}
19 | steps:
20 | - name: Checkout
21 | uses: actions/checkout@v4
22 | with:
23 | fetch-depth: 0
24 | persist-credentials: false
25 | - uses: ./.github/actions/semantic-release
26 | id: check-release
27 | with:
28 | GITHUB_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
29 | DRY_RUN: "true"
30 | build-test:
31 | if: ${{ needs.check-release.outputs.release_version }}
32 | needs: [check-release]
33 | uses: ./.github/workflows/build-test.yaml
34 | with:
35 | release_version: ${{ needs.check-release.outputs.release_version }}
36 | release:
37 | needs: [check-release, build-test]
38 | runs-on: ubuntu-latest
39 | env:
40 | CONDA_OUTPUT_DIR: /tmp/conda_output
41 | WHEEL_OUTPUT_DIR: /tmp/wheel_output
42 | steps:
43 | - name: Checkout
44 | uses: actions/checkout@v4
45 | with:
46 | fetch-depth: 0
47 | persist-credentials: false
48 | - name: Download Conda Artifacts
49 | uses: actions/download-artifact@v4
50 | with:
51 | name: conda_artifacts
52 | path: ${{ env.CONDA_OUTPUT_DIR }}
53 | - name: Download Wheel Artifacts
54 | uses: actions/download-artifact@v4
55 | with:
56 | name: wheel_artifacts
57 | path: ${{ env.WHEEL_OUTPUT_DIR }}
58 | - uses: ./.github/actions/semantic-release
59 | with:
60 | GITHUB_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
61 | PYPI_TOKEN: ${{ secrets.RAPIDSAI_PYPI_TOKEN }}
62 | ANACONDA_STABLE_TOKEN: ${{ secrets.CONDA_RAPIDSAI_TOKEN }}
63 | ANACONDA_NIGHTLY_TOKEN: ${{ secrets.CONDA_RAPIDSAI_NIGHTLY_TOKEN }}
64 | - name: Trigger CI Images
65 | env:
66 | GH_TOKEN: ${{ secrets.WORKFLOW_TOKEN }}
67 | run: |
68 | gh workflow run push.yaml \
69 | --field upstream_job="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" \
70 | --field upstream_repository=dependency-file-generator \
71 | --repo rapidsai/ci-imgs \
72 | --ref main
73 |
--------------------------------------------------------------------------------
/.github/workflows/semantic-pr-title.yaml:
--------------------------------------------------------------------------------
1 | name: pr-title
2 |
3 | on:
4 | pull_request_target:
5 | types:
6 | - opened
7 | - edited
8 | - synchronize
9 |
10 | concurrency:
11 | group: ${{ github.workflow }}-${{ github.ref }}
12 | cancel-in-progress: true
13 |
14 | jobs:
15 | semantic-pr-title:
16 | runs-on: ubuntu-latest
17 | steps:
18 | - uses: amannn/action-semantic-pull-request@v5
19 | env:
20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
21 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | .pytest_cache
3 | *.egg-info
4 | dist
5 | build
6 | tests/**/actual
7 | .vscode
8 | *.swp
9 | node_modules
10 | docs/source/api_docs/generated/
11 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ci:
2 | autofix_commit_msg: "chore(ci): pre-commit auto fixes"
3 | autoupdate_commit_msg: "chore(ci): pre-commit autoupdate"
4 | repos:
5 | - repo: https://github.com/pre-commit/pre-commit-hooks
6 | rev: 'v5.0.0'
7 | hooks:
8 | - id: end-of-file-fixer
9 | - id: trailing-whitespace
10 | - id: check-builtin-literals
11 | - id: check-executables-have-shebangs
12 | - id: check-json
13 | - id: check-yaml
14 | - id: debug-statements
15 | - id: requirements-txt-fixer
16 | - repo: https://github.com/pre-commit/mirrors-mypy
17 | rev: 'v1.16.0'
18 | hooks:
19 | - id: mypy
20 | additional_dependencies: [types-PyYAML]
21 | args: ["--config-file=pyproject.toml", "src/", "docs/"]
22 | pass_filenames: false
23 | - repo: https://github.com/asottile/pyupgrade
24 | rev: 'v3.20.0'
25 | hooks:
26 | - id: pyupgrade
27 | args:
28 | - --py39-plus
29 | - repo: https://github.com/python-jsonschema/check-jsonschema
30 | rev: 0.33.0
31 | hooks:
32 | - id: check-metaschema
33 | files: ^src/rapids_dependency_file_generator/schema.json$
34 | - id: check-jsonschema
35 | files: ^tests/examples/([^/]*)/dependencies.yaml$
36 | args: ["--schemafile", "src/rapids_dependency_file_generator/schema.json"]
37 | - id: check-github-workflows
38 | - repo: https://github.com/astral-sh/ruff-pre-commit
39 | rev: v0.11.13
40 | hooks:
41 | - id: ruff
42 | files: src/.*$
43 | - id: ruff-format
44 | files: src/.*$
45 | default_language_version:
46 | python: python3
47 |
--------------------------------------------------------------------------------
/.pre-commit-hooks.yaml:
--------------------------------------------------------------------------------
1 | - id: rapids-dependency-file-generator
2 | name: RAPIDS dependency file generator
3 | description: Update dependency files according to the RAPIDS dependencies spec
4 | entry: rapids-dependency-file-generator
5 | language: python
6 | files: "dependencies.yaml"
7 | pass_filenames: false
8 |
--------------------------------------------------------------------------------
/.releaserc.yaml:
--------------------------------------------------------------------------------
1 | branches:
2 | - main
3 | plugins:
4 | - "@semantic-release/commit-analyzer"
5 | - "@semantic-release/release-notes-generator"
6 | - - "@semantic-release/github"
7 | - addReleases: top
8 | - - "@semantic-release/exec"
9 | - verifyReleaseCmd: ./ci/output-release-version.sh ${nextRelease.version}
10 | - - "@semantic-release/exec"
11 | - prepareCmd: ./ci/update-versions.sh ${nextRelease.version}
12 | - - "@semantic-release/exec"
13 | - publishCmd: ./ci/publish/wheel.sh
14 | - - "@semantic-release/exec"
15 | - publishCmd: ./ci/publish/conda.sh rapidsai
16 | - - "@semantic-release/exec"
17 | - publishCmd: ./ci/publish/conda.sh rapidsai-nightly
18 | - - "@semantic-release/git"
19 | - assets:
20 | - src/rapids_dependency_file_generator/_version.py
21 | - src/rapids_dependency_file_generator/schema.json
22 | - package.json
23 | - recipe/meta.yaml
24 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | ## Releases
4 |
5 | Releases for `dependency-file-generator` are handled by [semantic-release][semantic-release]. To ensure that every commit on the `main` branch has a semantic commit message, the following settings have been configured:
6 |
7 | - Only squash commits are allowed
8 | - The default squash commit message is derived from the pull-request's title and body
9 | - Pull request titles are required to be semantic commit messages
10 |
11 | The table below (from [semantic-release][semantic-release] docs) shows the types of changes that correspond to each release type.
12 |
13 | | Commit message | Release type |
14 | | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------------------- |
15 | | `fix(pencil): stop graphite breaking when too much pressure applied` | Patch Release |
16 | | `feat(pencil): add 'graphiteWidth' option` | Minor Release |
17 | | `perf(pencil): remove graphiteWidth option`
`BREAKING CHANGE: The graphiteWidth option has been removed.`
`The default graphite width of 10mm is always used for performance reasons.` | Major
(Note that the `BREAKING CHANGE: ` string must be in the body of the pull-request) |
18 |
19 | If a change type not listed in the table above is used, it will not trigger a release. For example:
20 |
21 | - `docs: fix README type`
22 | - `ci: update GHAs workflow`
23 | - `chore: some miscellaneous work`
24 |
25 | The source of truth for these rules is [semantic-release/commit-analyzer](https://github.com/semantic-release/commit-analyzer). The `angular` preset is used by default, which is documented [here](https://github.com/conventional-changelog/conventional-changelog/tree/master/packages/conventional-changelog-angular).
26 |
27 | [semantic-release]: https://github.com/semantic-release/semantic-release
28 |
29 | ## Examples
30 |
31 | The [tests/examples](./tests/examples/) directory has example `dependencies.yaml` files along with their corresponding output files.
32 |
33 | To create new `example` tests do the following:
34 |
35 | - Create a new directory with a `dependencies.yaml` file in [tests/examples](tests/examples/)
36 | - Ensure the `output` directories (e.g. `conda_dir`, `requirements_dir`, etc.) are set to write to `output/actual`
37 | - Run `rapids-dependency-file-generator --config tests/examples//dependencies.yaml` to generate the initial output files
38 | - Manually inspect the generated files for correctness
39 | - Copy the contents of `output/actual` to `output/expected`, so it will be committed to the repository and used as a baseline for future changes
40 | - Add the new folder name to [test_examples.py](./tests/test_examples.py)
41 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2018 NVIDIA Corporation
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include src/rapids_dependency_file_generator/py.typed
2 | include src/rapids_dependency_file_generator/schema.json
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # rapids-dependency-file-generator
2 |
3 | `rapids-dependency-file-generator` is a Python CLI tool that generates conda `environment.yaml` files and `requirements.txt` files from a single YAML file, typically named `dependencies.yaml`.
4 |
5 | When installed, it makes the `rapids-dependency-file-generator` CLI command available which is responsible for parsing a `dependencies.yaml` configuration file and generating the appropriate conda `environment.yaml` and `requirements.txt` dependency files.
6 |
7 | ## Installation
8 |
9 | `rapids-dependency-file-generator` is available on [PyPI](https://pypi.org/project/rapids-dependency-file-generator/). To install, run:
10 |
11 | ```sh
12 | pip install rapids-dependency-file-generator
13 | ```
14 |
15 | ## Usage
16 |
17 | When `rapids-dependency-file-generator` is invoked, it will read a `dependencies.yaml` file from the current directory and generate children dependency files.
18 |
19 | The `dependencies.yaml` file has the following characteristics:
20 |
21 | - it is intended to be committed to the root directory of repositories
22 | - it can define matrices that enable the output dependency files to vary according to any arbitrary specification (or combination of specifications), including CUDA version, machine architecture, Python version, etc.
23 | - it contains bifurcated lists of dependencies based on the dependency's purpose (i.e. build, runtime, test, etc.). The bifurcated dependency lists are merged according to the description in the _"How Dependency Lists Are Merged"_ section below.
24 |
25 | ## `dependencies.yaml` Format
26 |
27 | The `dependencies.yaml` file has three relevant top-level keys: `files`, `channels`, and `dependencies`. These keys are described in detail below.
28 |
29 | ### `files` Key
30 |
31 | The top-level `files` key is responsible for determining the following:
32 |
33 | - which types of dependency files should be generated (i.e. conda `environment.yaml` files and/or `requirements.txt` files)
34 | - where the generated files should be written to (relative to the `dependencies.yaml` file)
35 | - which variant files should be generated (based on the provided matrix)
36 | - which of the dependency lists from the top-level `dependencies` key should be included in the generated files
37 |
38 | Here is an example of what the `files` key might look like:
39 |
40 | ```yaml
41 | files:
42 | all: # used as the prefix for the generated dependency file names for conda or requirements files (has no effect on pyproject.toml files)
43 | output: [conda, requirements] # which dependency file types to generate. required, can be "conda", "requirements", "pyproject", "none" or a list of non-"none" values
44 | conda_dir: conda/environments # where to put conda environment.yaml files. optional, defaults to "conda/environments"
45 | requirements_dir: python/cudf # where to put requirements.txt files. optional, but recommended. defaults to "python"
46 | pyproject_dir: python/cudf # where to put pyproject.toml files. optional, but recommended. defaults to "python"
47 | matrix: # (optional) contains an arbitrary set of key/value pairs to determine which dependency files that should be generated. These values are included in the output filename.
48 | cuda: ["11.5", "11.6"] # which CUDA version variant files to generate.
49 | arch: [x86_64] # which architecture version variant files to generate. This value should be the result of running the `arch` command on a given machine.
50 | includes: # a list of keys from the `dependencies` section which should be included in the generated files
51 | - build
52 | - test
53 | - runtime
54 | build: # multiple `files` children keys can be specified
55 | output: requirements
56 | conda_dir: conda/environments
57 | requirements_dir: python/cudf
58 | matrix:
59 | cuda: ["11.5"]
60 | arch: [x86_64]
61 | py: ["3.8"]
62 | includes:
63 | - build
64 | ```
65 |
66 | The result of the above configuration is that the following dependency files would be generated:
67 |
68 | - `conda/environments/all_cuda-115_arch-x86_64.yaml`
69 | - `conda/environments/all_cuda-116_arch-x86_64.yaml`
70 | - `python/cudf/requirements_all_cuda-115_arch-x86_64.txt`
71 | - `python/cudf/requirements_all_cuda-116_arch-x86_64.txt`
72 | - `python/cudf/requirements_build_cuda-115_arch-x86_64_py-38.txt`
73 |
74 | The `all*.yaml` and `requirements_all*.txt` files would include the contents of the `build`, `test`, and `runtime` dependency lists from the top-level `dependency` key. The `requirements_build*.txt` file would only include the contents of the `build` dependency list from the top-level `dependency` key.
75 |
76 | The value of `output` can also be `none` as shown below.
77 |
78 | ```yaml
79 | files:
80 | test:
81 | output: none
82 | includes:
83 | - test
84 | ```
85 |
86 | When `output: none` is used, the `conda_dir`, `requirements_dir` and `matrix` keys can be omitted. The use case for `output: none` is described in the [_Additional CLI Notes_](#additional-cli-notes) section below.
87 |
88 | #### `extras`
89 |
90 | A given file may include an `extras` entry that may be used to provide inputs specific to a particular file type
91 |
92 | Here is an example:
93 |
94 | ```yaml
95 | files:
96 | build:
97 | output: pyproject
98 | includes: # a list of keys from the `dependencies` section which should be included in the generated files
99 | - build
100 | extras:
101 | table: table_name
102 | key: key_name
103 | ```
104 |
105 | Currently the supported extras by file type are:
106 | - pyproject.toml
107 | - table: The table in pyproject.toml where the dependencies should be written. Acceptable values are "build-system", "project", and "project.optional-dependencies".
108 | - key: The key corresponding to the dependency list in `table`. This may only be provided for the "project.optional-dependencies" table since the key name is fixed for "build-system" ("requires") and "project" ("dependencies"). Note that this implicitly prohibits including optional dependencies via an inline table under the "project" table.
109 |
110 | ### `channels` Key
111 |
112 | The top-level `channels` key specifies the channels that should be included in any generated conda `environment.yaml` files.
113 |
114 | It might look like this:
115 |
116 | ```yaml
117 | channels:
118 | - rapidsai
119 | - conda-forge
120 | ```
121 |
122 | In the absence of a `channels` key, some sensible defaults for RAPIDS will be used (see [constants.py](./src/rapids_dependency_file_generator/constants.py)).
123 |
124 | ### `dependencies` Key
125 |
126 | The top-level `dependencies` key is where the bifurcated dependency lists should be specified.
127 |
128 | Underneath the `dependencies` key are sets of key-value pairs. For each pair, the key can be arbitrarily named, but should match an item from the `includes` list of any `files` entry.
129 |
130 | The value of each key-value pair can have the following children keys:
131 |
132 | - `common` - contains dependency lists that are the same across all matrix variations
133 | - `specific` - contains dependency lists that are specific to a particular matrix combination
134 |
135 | The values of each of these keys are described in detail below.
136 |
137 | #### `common` Key
138 |
139 | The `common` key contains a list of objects with the following keys:
140 |
141 | - `output_types` - a list of output types (e.g. "conda" for `environment.yaml` files or "requirements" for `requirements.txt` files) for the packages in the `packages` key
142 | - `packages` - a list of packages to be included in the generated output file
143 |
144 | #### `specific` Key
145 |
146 | The `specific` key contains a list of objects with the following keys:
147 |
148 | - `output_types` - _same as `output_types` for the `common` key above_
149 | - `matrices` - a list of objects (described below) which define packages that are specific to a particular matrix combination
150 |
151 | ##### `matrices` Key
152 |
153 | Each list item under the `matrices` key contains a `matrix` key and a `packages` key.
154 | The `matrix` key is used to define which matrix combinations from `files.[*].matrix` will use the associated packages.
155 | The `packages` key is a list of packages to be included in the generated output file for a matching matrix.
156 | This is elaborated on in [How Dependency Lists Are Merged](#how-dependency-lists-are-merged).
157 |
158 | An example of the above structure is exemplified below:
159 |
160 | ```yaml
161 | dependencies:
162 | build: # dependency list name
163 | common: # dependencies common among all matrix variations
164 | - output_types: [conda, requirements] # the output types this list item should apply to
165 | packages:
166 | - common_build_dep
167 | - output_types: conda
168 | packages:
169 | - cupy
170 | - pip: # supports `pip` key for conda environment.yaml files
171 | - some_random_dep
172 | specific: # dependencies specific to a particular matrix combination
173 | - output_types: conda # dependencies specific to conda environment.yaml files
174 | matrices:
175 | - matrix:
176 | cuda: "11.5"
177 | packages:
178 | - cudatoolkit=11.5
179 | - matrix:
180 | cuda: "11.6"
181 | packages:
182 | - cudatoolkit=11.6
183 | - matrix: # an empty matrix entry serves as a fallback if there are no other matrix matches
184 | packages:
185 | - cudatoolkit
186 | - output_types: [conda, requirements]
187 | matrices:
188 | - matrix: # dependencies specific to x86_64 and 11.5
189 | cuda: "11.5"
190 | arch: x86_64
191 | packages:
192 | - a_random_x86_115_specific_dep
193 | - matrix: # an empty matrix/package entry to prevent error from being thrown for non 11.5 and x86_64 matches
194 | packages:
195 | - output_types: requirements # dependencies specific to requirements.txt files
196 | matrices:
197 | - matrix:
198 | cuda: "11.5"
199 | packages:
200 | - another_random_dep=11.5.0
201 | - matrix:
202 | cuda: "11.6"
203 | packages:
204 | - another_random_dep=11.6.0
205 | test:
206 | common:
207 | - output_types: [conda, requirements]
208 | packages:
209 | - pytest
210 | ```
211 |
212 | ## How Dependency Lists Are Merged
213 |
214 | The information from the top-level `files` and `dependencies` keys are used to determine which dependencies should be included in the final output of the generated dependency files.
215 |
216 | Consider the following top-level `files` key configuration:
217 |
218 | ```yaml
219 | files:
220 | all:
221 | output: conda
222 | conda_dir: conda/environments
223 | requirements_dir: python/cudf
224 | matrix:
225 | cuda: ["11.5", "11.6"]
226 | arch: [x86_64]
227 | includes:
228 | - build
229 | - test
230 | ```
231 |
232 | In this example, `rapids-dependency-file-generator` will generate two conda environment files: `conda/environments/all_cuda-115_arch-x86_64.yaml` and `conda/environments/all_cuda-116_arch-x86_64.yaml`.
233 |
234 | Since the `output` value is `conda`, `rapids-dependency-file-generator` will iterate through any `dependencies.build.common` and `dependencies.test.common` list entries and use the `packages` of any entry whose `output_types` key is `conda` or `[conda, ...]`.
235 |
236 | Further, for the `11.5` and `x86_64` matrix combination, any `build.specific` and `test.specific` list items whose output includes `conda` and whose `matrices` list items matches any of the definitions below would also be merged:
237 |
238 | ```yaml
239 | specific:
240 | - output_types: conda
241 | matrices:
242 | - matrix:
243 | cuda: "11.5"
244 | packages:
245 | - some_dep1
246 | - some_dep2
247 | # or
248 | specific:
249 | - output_types: conda
250 | matrices:
251 | - matrix:
252 | cuda: "11.5"
253 | arch: "x86_64"
254 | packages:
255 | - some_dep1
256 | - some_dep2
257 | # or
258 | specific:
259 | - output_types: conda
260 | matrices:
261 | - matrix:
262 | arch: "x86_64"
263 | packages:
264 | - some_dep1
265 | - some_dep2
266 | ```
267 |
268 | Every `matrices` list must have a match for a given input matrix (only the first matching matrix in the list of `matrices` will be used).
269 | If no matches are found for a particular matrix combination, an error will be thrown.
270 | In instances where an error should not be thrown, an empty `matrix` and `packages` list item can be used:
271 |
272 | ```yaml
273 | - output_types: conda
274 | matrices:
275 | - matrix:
276 | cuda: "11.5"
277 | arch: x86_64
278 | py: "3.8"
279 | packages:
280 | - a_very_specific_115_x86_38_dep
281 | - matrix: # an empty matrix entry serves as a fallback if there are no other matrix matches
282 | packages:
283 | ```
284 |
285 | Merged dependency lists are sorted and deduped.
286 |
287 | ## Additional CLI Notes
288 |
289 | Invoking `rapids-dependency-file-generator` without any arguments is meant to be the default behavior for RAPIDS developers. It will generate all of the necessary dependency files as specified in the top-level `files` configuration.
290 |
291 | However, there are CLI arguments that can augment the `files` configuration values before the files are generated.
292 |
293 | Consider the example when `output: none` is used:
294 |
295 | ```yaml
296 | files:
297 | test:
298 | output: none
299 | includes:
300 | - test
301 | ```
302 |
303 | The `test` file generated by the configuration above is useful for CI, but it might not make sense to necessarily commit those files to a repository. In such a scenario, the following CLI arguments can be used:
304 |
305 | ```sh
306 | ENV_NAME="cudf_test"
307 |
308 | rapids-dependency-file-generator \
309 | --file-key "test" \
310 | --output "conda" \
311 | --matrix "cuda=12.5;arch=$(arch)" > env.yaml
312 | mamba env create --file env.yaml
313 | mamba activate "$ENV_NAME"
314 |
315 | # install cudf packages built in CI and test them in newly created environment...
316 | ```
317 |
318 | The `--file-key` argument is passed the `test` key name from the `files` configuration. Additional flags are used to generate a single dependency file. When the CLI is used in this fashion, it will print to `stdout` instead of writing the resulting contents to the filesystem.
319 |
320 | The `--file-key`, `--output`, and `--matrix` flags must be used together. `--matrix` may be an empty string if the file that should be generated does not depend on any specific matrix variations.
321 |
322 | Where multiple values for the same key are passed to `--matrix`, e.g. `cuda_suffixed=true;cuda_suffixed=false`, only the last value will be used.
323 |
324 | Where `--file-key` is supplied multiple times in the same invocation, the output printed to `stdout` will contain a union (without duplicates) of all of the corresponding dependencies. For example:
325 |
326 | ```shell
327 | rapids-dependency-file-generator \
328 | --file-key "test" \
329 | --file-key "test_notebooks" \
330 | --output "conda" \
331 | --matrix "cuda=12.5;arch=$(arch)" > env.yaml
332 | ```
333 |
334 | The `--prepend-channel` argument accepts additional channels to use, like `rapids-dependency-file-generator --prepend-channel my_channel --prepend-channel my_other_channel`.
335 | If both `--output` and `--prepend-channel` are provided, the output format must be conda.
336 | Prepending channels can be useful for adding local channels with packages to be tested in CI workflows.
337 |
338 | Running `rapids-dependency-file-generator -h` will show the most up-to-date CLI arguments.
339 |
--------------------------------------------------------------------------------
/ci/build-test/conda.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | ./ci/update-versions.sh "${RELEASE_VERSION:-}"
5 |
6 | mamba install -y conda-build
7 | conda build \
8 | --output-folder "${OUTPUT_DIR:-"/tmp/output"}" \
9 | recipe/
10 |
--------------------------------------------------------------------------------
/ci/build-test/wheel.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | OUTPUT_DIR="${OUTPUT_DIR:-"/tmp/output"}"
5 |
6 | ./ci/update-versions.sh "${RELEASE_VERSION:-}"
7 |
8 | pip install build pytest 'packaging>=24.2' 'twine>=6.1.0'
9 |
10 | python -m build \
11 | --outdir "${OUTPUT_DIR}" \
12 | .
13 |
14 | twine check --strict "${OUTPUT_DIR}"/*
15 |
16 | for PKG in "${OUTPUT_DIR}/"*; do
17 | echo "$PKG"
18 | pip uninstall -y rapids-dependency-file-generator
19 | pip install "$PKG"
20 | pytest
21 | rapids-dependency-file-generator -h # test CLI output
22 | done
23 |
--------------------------------------------------------------------------------
/ci/output-release-version.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euo pipefail
3 |
4 | RELEASE_VERSION="${1}"
5 |
6 | echo "release_version=${RELEASE_VERSION}" | tee --append "${GITHUB_OUTPUT:-/dev/null}"
7 |
--------------------------------------------------------------------------------
/ci/publish/conda.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Uploads packages to PyPI
3 | # Per https://github.com/semantic-release/exec:
4 | # - stderr is used for logging
5 | # - stdout is used for returning release information
6 | set -euo pipefail
7 |
8 | {
9 | . /usr/share/miniconda/etc/profile.d/conda.sh
10 | conda activate base
11 | conda install -y anaconda-client
12 | pkgs_to_upload=$(find "${CONDA_OUTPUT_DIR}" -name "*.conda" -o -name "*.tar.bz2")
13 |
14 | export CONDA_ORG="${1}"
15 |
16 | case "${CONDA_ORG}" in
17 | "rapidsai")
18 | TOKEN="${ANACONDA_STABLE_TOKEN}"
19 | ;;
20 | "rapidsai-nightly")
21 | TOKEN="${ANACONDA_NIGHTLY_TOKEN}"
22 | ;;
23 | *)
24 | echo "Unknown conda org: ${CONDA_ORG}"
25 | exit 1
26 | ;;
27 | esac
28 |
29 |
30 | anaconda \
31 | -t "${TOKEN}" \
32 | upload \
33 | --no-progress \
34 | ${pkgs_to_upload}
35 | } 1>&2
36 |
37 | jq -ncr '{name: "Conda release - \(env.CONDA_ORG)", url: "https://anaconda.org/\(env.CONDA_ORG)/rapids-dependency-file-generator"}'
38 |
--------------------------------------------------------------------------------
/ci/publish/wheel.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Uploads packages to PyPI
3 | # Per https://github.com/semantic-release/exec:
4 | # - stderr is used for logging
5 | # - stdout is used for returning release information
6 | set -euo pipefail
7 |
8 | {
9 | pip install \
10 | 'packaging>=24.2' \
11 | 'twine>=6.1.0'
12 |
13 | twine upload \
14 | --username __token__ \
15 | --password "${PYPI_TOKEN}" \
16 | --disable-progress-bar \
17 | "${WHEEL_OUTPUT_DIR}/"*
18 | } 1>&2
19 |
20 | jq -ncr '{name: "PyPI release", url: "https://pypi.org/project/rapids-dependency-file-generator/"}'
21 |
--------------------------------------------------------------------------------
/ci/update-versions.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Updates the version string throughout the project
3 | set -euo pipefail
4 |
5 | RELEASE_VERSION="${1:-}"
6 |
7 | # no `RELEASE_VERSION` is computed for PRs
8 | if [[ -z "${RELEASE_VERSION}" ]]; then
9 | echo "No release version provided."
10 | echo "Skipping version replacement."
11 | exit 0
12 | fi
13 |
14 | if ! [[ "${RELEASE_VERSION}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
15 | echo "Invalid version string: ${RELEASE_VERSION}."
16 | exit 1
17 | fi
18 |
19 | sed -i "/__version__/ s/\".*\"/\"${RELEASE_VERSION}\"/" src/rapids_dependency_file_generator/_version.py
20 | sed -i "/\$id/ s|/v[^/]*/|/v${RELEASE_VERSION}/|" src/rapids_dependency_file_generator/schema.json
21 | sed -i "/\"version\":/ s|: \".*\"|: \"${RELEASE_VERSION}\"|" package.json
22 | sed -i "/version:/ s|: .*|: ${RELEASE_VERSION}|" recipe/meta.yaml
23 |
24 |
25 | cat \
26 | src/rapids_dependency_file_generator/_version.py \
27 | src/rapids_dependency_file_generator/schema.json \
28 | package.json \
29 | recipe/meta.yaml
30 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?= -n -v -W --keep-going
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/_templates/class.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. currentmodule:: {{ module }}
4 |
5 | .. autoclass:: {{ objname }}
6 | :members:
7 | :show-inheritance:
8 | :inherited-members:
9 |
10 | {% block methods %}
11 | .. automethod:: __init__
12 |
13 | {% if methods %}
14 | .. rubric:: {{ _('Methods') }}
15 |
16 | .. autosummary::
17 | {% for item in methods %}
18 | ~{{ name }}.{{ item }}
19 | {%- endfor %}
20 | {% endif %}
21 | {% endblock %}
22 |
23 | {% block attributes %}
24 | {% if attributes %}
25 | .. rubric:: {{ _('Attributes') }}
26 |
27 | .. autosummary::
28 | {% for item in attributes %}
29 | ~{{ name }}.{{ item }}
30 | {%- endfor %}
31 | {% endif %}
32 | {% endblock %}
33 |
--------------------------------------------------------------------------------
/docs/source/_templates/module.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline}}
2 |
3 | .. automodule:: {{ fullname }}
4 |
5 | {% block attributes %}
6 | {% if attributes %}
7 | .. rubric:: Module Attributes
8 |
9 | .. autosummary::
10 | :toctree:
11 | {% for item in attributes %}
12 | {{ item }}
13 | {%- endfor %}
14 | {% endif %}
15 | {% endblock %}
16 |
17 | {% block functions %}
18 | {% if functions %}
19 | .. rubric:: {{ _('Functions') }}
20 |
21 | .. autosummary::
22 | :toctree:
23 | {% for item in functions %}
24 | {{ item }}
25 | {%- endfor %}
26 | {% endif %}
27 | {% endblock %}
28 |
29 | {% block classes %}
30 | {% if classes %}
31 | .. rubric:: {{ _('Classes') }}
32 |
33 | .. autosummary::
34 | :toctree:
35 | :template: class.rst
36 | {% for item in classes %}
37 | {{ item }}
38 | {%- endfor %}
39 | {% endif %}
40 | {% endblock %}
41 |
42 | {% block exceptions %}
43 | {% if exceptions %}
44 | .. rubric:: {{ _('Exceptions') }}
45 |
46 | .. autosummary::
47 | :toctree:
48 | {% for item in exceptions %}
49 | {{ item }}
50 | {%- endfor %}
51 | {% endif %}
52 | {% endblock %}
53 |
54 | {% block modules %}
55 | {% if modules %}
56 | .. rubric:: Modules
57 |
58 | .. autosummary::
59 | :toctree:
60 | :template: module.rst
61 | :recursive:
62 | {% for item in modules %}
63 | {{ item }}
64 | {%- endfor %}
65 | {% endif %}
66 | {% endblock %}
67 |
--------------------------------------------------------------------------------
/docs/source/api_docs/index.rst:
--------------------------------------------------------------------------------
1 | API
2 | ===
3 |
4 | .. autosummary::
5 | :toctree: generated/
6 | :template: module.rst
7 | :recursive:
8 |
9 | rapids_dependency_file_generator
10 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # For the full list of built-in configuration values, see the documentation:
4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
5 |
6 | # -- Project information -----------------------------------------------------
7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
8 |
9 | import datetime
10 |
11 | from packaging.version import Version
12 |
13 | import rapids_dependency_file_generator
14 |
15 | DFG_VERSION = Version(rapids_dependency_file_generator.__version__)
16 | project = "rapids-dependency-file-generator"
17 | copyright = f"2022-{datetime.datetime.today().year}, NVIDIA Corporation"
18 | author = "NVIDIA Corporation"
19 | release = str(DFG_VERSION)
20 |
21 | # -- General configuration ---------------------------------------------------
22 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
23 |
24 | extensions = [
25 | "sphinx.ext.autodoc",
26 | "sphinx.ext.autosummary",
27 | "sphinx.ext.intersphinx",
28 | "numpydoc",
29 | ]
30 |
31 | templates_path = ["_templates"]
32 | exclude_patterns: list[str] = []
33 |
34 |
35 | # -- Options for HTML output -------------------------------------------------
36 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
37 |
38 | html_theme = "pydata_sphinx_theme"
39 |
40 | # Theme options are theme-specific and customize the look and feel of a theme
41 | # further. For a list of options available for each theme, see the
42 | # documentation.
43 | #
44 | html_theme_options = {
45 | "external_links": [],
46 | # https://github.com/pydata/pydata-sphinx-theme/issues/1220
47 | "icon_links": [],
48 | "github_url": "https://github.com/rapidsai/dependency-file-generator",
49 | "twitter_url": "https://twitter.com/rapidsai",
50 | "show_toc_level": 1,
51 | "navbar_align": "right",
52 | }
53 |
54 | html_static_path = ["_static"]
55 |
56 | # Add any paths that contain templates here, relative to this directory.
57 | templates_path = ["_templates"]
58 |
59 | autosummary_ignore_module_all = False
60 |
61 | intersphinx_mapping = {
62 | "python": ("https://docs.python.org/3", None),
63 | }
64 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to rapids-dependency-file-generator's documentation!
2 | ============================================================
3 |
4 | .. toctree::
5 | :maxdepth: 2
6 | :caption: Contents:
7 |
8 | api_docs/index
9 |
10 | Indices and tables
11 | ==================
12 |
13 | * :ref:`genindex`
14 | * :ref:`modindex`
15 | * :ref:`search`
16 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "rapids-dependency-file-generator",
3 | "version": "1.18.1",
4 | "description": "`rapids-dependency-file-generator` is a Python CLI tool that generates conda `environment.yaml` files and `requirements.txt` files from a single YAML file, typically named `dependencies.yaml`.",
5 | "repository": {
6 | "type": "git",
7 | "url": "git+https://github.com/rapidsai/dependency-file-generator.git"
8 | },
9 | "author": "",
10 | "license": "Apache-2.0",
11 | "bugs": {
12 | "url": "https://github.com/rapidsai/dependency-file-generator/issues"
13 | },
14 | "homepage": "https://github.com/rapidsai/dependency-file-generator",
15 | "devDependencies": {
16 | "@semantic-release/exec": "^7.0.0",
17 | "@semantic-release/git": "^10.0.1",
18 | "semantic-release": "^24.0.0"
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "rapids-dependency-file-generator"
7 | dynamic = [
8 | "version",
9 | ]
10 | authors = [
11 | { name = "RAPIDS Development Team", email = "pypi@rapids.ai" }
12 | ]
13 | urls = { homepage = "https://github.com/rapidsai/dependency-file-generator" }
14 | description = "Tool for generating RAPIDS environment files"
15 | readme = { file = "README.md", content-type = "text/markdown" }
16 | license = { file = "LICENSE" }
17 | classifiers = [
18 | "License :: OSI Approved :: Apache Software License",
19 | "Programming Language :: Python :: 3",
20 | ]
21 | requires-python = ">=3.9"
22 | dependencies = [
23 | "PyYAML",
24 | "jsonschema",
25 | "tomlkit",
26 | ]
27 |
28 | [project.scripts]
29 | rapids-dependency-file-generator = "rapids_dependency_file_generator._cli:main"
30 |
31 | [tool.setuptools]
32 | packages = { "find" = { where = ["src"] } }
33 |
34 | [tool.setuptools.dynamic]
35 | version = {attr = "rapids_dependency_file_generator._version.__version__"}
36 |
37 | [tool.isort]
38 | profile = "black"
39 |
40 | [tool.mypy]
41 | ignore_missing_imports = true
42 |
43 | [tool.ruff]
44 | line-length = 120
45 |
46 | [tool.ruff.lint]
47 | select = ["E", "F", "W", "I", "D"]
48 |
49 | [tool.ruff.lint.pydocstyle]
50 | convention = "numpy"
51 |
--------------------------------------------------------------------------------
/recipe/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: rapids-dependency-file-generator
3 | version: 1.18.1
4 |
5 | build:
6 | noarch: python
7 | script: pip install --no-build-isolation --no-deps .
8 | entry_points:
9 | - rapids-dependency-file-generator = rapids_dependency_file_generator._cli:main
10 |
11 | source:
12 | path: ../
13 |
14 | requirements:
15 | host:
16 | - python >=3.9
17 | - python-build
18 | - setuptools
19 | run:
20 | - python >=3.9
21 | - pyyaml
22 | - jsonschema
23 | - tomlkit
24 |
25 | test:
26 | source_files:
27 | - tests
28 | requires:
29 | - pytest
30 | commands:
31 | - pytest
32 | - rapids-dependency-file-generator -h
33 |
34 | about:
35 | home: https://github.com/rapidsai/dependency-file-generator
36 | dev_url: https://github.com/rapidsai/dependency-file-generator
37 | license: Apache-2.0
38 | summary: Dependency file generator for RAPIDS
39 |
--------------------------------------------------------------------------------
/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": ["config:base"],
4 | "ignoreDeps": [
5 | "python"
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/__init__.py:
--------------------------------------------------------------------------------
1 | """Public API for rapids-dependency-file-generator.
2 |
3 | This API can be used by Python build tools or other tools that want to
4 | programmatically generate ``pyproject.toml``, ``requirements.txt``, or
5 | a Conda environment from ``dependencies.yaml``.
6 | """
7 |
8 | from . import _config, _rapids_dependency_file_generator
9 | from ._config import * # noqa: F401,F403
10 | from ._rapids_dependency_file_generator import * # noqa: F401,F403
11 | from ._version import __version__
12 |
13 | __all__ = [
14 | "__version__",
15 | *_config.__all__,
16 | *_rapids_dependency_file_generator.__all__,
17 | ]
18 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/_cli.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 |
4 | from ._config import Output, load_config_from_file
5 | from ._constants import cli_name, default_dependency_file_path
6 | from ._rapids_dependency_file_generator import (
7 | delete_existing_files,
8 | make_dependency_files,
9 | )
10 | from ._version import __version__ as version
11 |
12 |
13 | def validate_args(argv):
14 | parser = argparse.ArgumentParser(
15 | description=f"Generates dependency files for RAPIDS libraries (version: {version})."
16 | )
17 | parser.add_argument(
18 | "-c",
19 | "--config",
20 | default=default_dependency_file_path,
21 | help="Path to YAML config file.",
22 | )
23 | parser.add_argument(
24 | "--clean",
25 | nargs="?",
26 | default=None,
27 | const="",
28 | help=(
29 | "Delete any files previously created by dfg before running. An optional "
30 | "path to clean may be provided, otherwise the current working directory "
31 | "is used as the root from which to clean."
32 | ),
33 | )
34 |
35 | codependent_args = parser.add_argument_group("optional, but codependent")
36 | codependent_args.add_argument(
37 | "--file-key",
38 | action="append",
39 | help=(
40 | "The file key from `dependencies.yaml` to generate. "
41 | "If supplied multiple times, dependency lists from all requested file keys will be merged."
42 | ),
43 | )
44 | codependent_args.add_argument(
45 | "--output",
46 | help="The output file type to generate.",
47 | choices=[
48 | x.value
49 | for x in [
50 | Output.CONDA,
51 | Output.PYPROJECT,
52 | Output.REQUIREMENTS,
53 | ]
54 | ],
55 | )
56 | codependent_args.add_argument(
57 | "--matrix",
58 | help=(
59 | "String representing which matrix combination should be generated, "
60 | 'such as `--matrix "cuda=11.5;arch=x86_64"`. May also be an empty string.'
61 | ),
62 | )
63 |
64 | parser.add_argument(
65 | "--prepend-channel",
66 | action="append",
67 | default=[],
68 | dest="prepend_channels",
69 | help=(
70 | "A string representing a conda channel to prepend to the list of "
71 | "channels. This option is only valid with --output "
72 | f"{Output.CONDA.value} or no --output. May be specified multiple times."
73 | ),
74 | )
75 |
76 | parser.add_argument(
77 | "--version",
78 | default=False,
79 | action="store_true",
80 | help="Show the version and exit.",
81 | )
82 |
83 | args = parser.parse_args(argv)
84 |
85 | dependent_arg_keys = ["file_key", "output", "matrix"]
86 | dependent_arg_values = [getattr(args, key) is None for key in dependent_arg_keys]
87 | if any(dependent_arg_values) and not all(dependent_arg_values):
88 | raise ValueError(
89 | "The following arguments must be used together:"
90 | + "".join([f"\n {x}" for x in ["--file-key", "--output", "--matrix"]])
91 | )
92 |
93 | if args.prepend_channels and args.output and args.output != Output.CONDA.value:
94 | raise ValueError(f"--prepend-channel is only valid with --output {Output.CONDA.value}")
95 |
96 | # If --clean was passed without arguments, default to cleaning from the root of the
97 | # tree where the config file is.
98 | if args.clean == "":
99 | args.clean = os.path.dirname(os.path.abspath(args.config))
100 |
101 | return args
102 |
103 |
104 | def generate_matrix(matrix_arg):
105 | if not matrix_arg:
106 | return None
107 | matrix = {}
108 | for matrix_column in matrix_arg.split(";"):
109 | key, val = matrix_column.split("=")
110 | matrix[key] = [val]
111 | return matrix
112 |
113 |
114 | def main(argv=None) -> None:
115 | args = validate_args(argv)
116 |
117 | if args.version:
118 | print(f"{cli_name}, version {version}")
119 | return
120 |
121 | parsed_config = load_config_from_file(args.config)
122 |
123 | matrix = generate_matrix(args.matrix)
124 | to_stdout = all([args.file_key, args.output, args.matrix is not None])
125 |
126 | if to_stdout:
127 | file_keys = args.file_key
128 | output = {Output(args.output)}
129 | else:
130 | file_keys = list(parsed_config.files.keys())
131 | output = None
132 |
133 | if args.clean:
134 | delete_existing_files(args.clean)
135 |
136 | make_dependency_files(
137 | parsed_config=parsed_config,
138 | file_keys=file_keys,
139 | output=output,
140 | matrix=matrix,
141 | prepend_channels=args.prepend_channels,
142 | to_stdout=to_stdout,
143 | )
144 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/_config.py:
--------------------------------------------------------------------------------
1 | import typing
2 | from dataclasses import dataclass, field
3 | from enum import Enum
4 | from os import PathLike
5 | from pathlib import Path
6 |
7 | import yaml
8 |
9 | from . import _constants
10 | from ._rapids_dependency_file_validator import validate_dependencies
11 |
12 | __all__ = [
13 | "Output",
14 | "FileExtras",
15 | "File",
16 | "PipRequirements",
17 | "CommonDependencies",
18 | "MatrixMatcher",
19 | "SpecificDependencies",
20 | "Dependencies",
21 | "Config",
22 | "parse_config",
23 | "load_config_from_file",
24 | ]
25 |
26 |
27 | class Output(Enum):
28 | """An output file type to generate."""
29 |
30 | PYPROJECT = "pyproject"
31 | """Generate a ``pyproject.toml``."""
32 |
33 | REQUIREMENTS = "requirements"
34 | """Generate a ``requirements.txt``."""
35 |
36 | CONDA = "conda"
37 | """Generate a Conda environment file."""
38 |
39 |
40 | @dataclass
41 | class FileExtras:
42 | """The ``extras`` field of a file key in ``dependencies.yaml``."""
43 |
44 | table: str
45 | """The ``table`` field."""
46 |
47 | key: typing.Union[str, None] = None
48 | """The ``key`` field."""
49 |
50 |
51 | @dataclass
52 | class File:
53 | """A file key in ``dependencies.yaml``."""
54 |
55 | output: set[Output]
56 | """The set of output file types to generate."""
57 |
58 | includes: list[str]
59 | """The list of dependency sets to include."""
60 |
61 | extras: typing.Union[FileExtras, None] = None
62 | """Optional extra information for the file generator."""
63 |
64 | matrix: dict[str, list[str]] = field(default_factory=dict)
65 | """The matrix of specific parameters to use when generating."""
66 |
67 | requirements_dir: Path = Path(_constants.default_requirements_dir)
68 | """The directory in which to write ``requirements.txt``."""
69 |
70 | conda_dir: Path = Path(_constants.default_conda_dir)
71 | """The directory in which to write the Conda environment file."""
72 |
73 | pyproject_dir: Path = Path(_constants.default_pyproject_dir)
74 | """The directory in which to write ``pyproject.toml``."""
75 |
76 |
77 | @dataclass
78 | class PipRequirements:
79 | """A list of Pip requirements to include as dependencies."""
80 |
81 | pip: list[str]
82 | """The list of Pip requirements."""
83 |
84 |
85 | @dataclass
86 | class CommonDependencies:
87 | """A dependency entry in the ``common`` field of a dependency set."""
88 |
89 | output_types: set[Output]
90 | """The set of output types for this entry."""
91 |
92 | packages: list[typing.Union[str, PipRequirements]]
93 | """The list of packages for this entry."""
94 |
95 |
96 | @dataclass
97 | class MatrixMatcher:
98 | """A matrix matcher for a ``specific`` dependency entry."""
99 |
100 | matrix: dict[str, str]
101 | """The set of matrix values to match."""
102 |
103 | packages: list[typing.Union[str, PipRequirements]]
104 | """The list of packages for this entry."""
105 |
106 |
107 | @dataclass
108 | class SpecificDependencies:
109 | """A dependency entry in the ``specific`` field of a dependency set."""
110 |
111 | output_types: set[Output]
112 | """The set of output types for this entry."""
113 |
114 | matrices: list[MatrixMatcher]
115 | """The list of matrix matchers for this entry."""
116 |
117 |
118 | @dataclass
119 | class Dependencies:
120 | """A dependency set."""
121 |
122 | common: list[CommonDependencies] = field(default_factory=list)
123 | """The list of common dependency entries."""
124 |
125 | specific: list[SpecificDependencies] = field(default_factory=list)
126 | """The list of specific dependency entries."""
127 |
128 |
129 | @dataclass
130 | class Config:
131 | """A fully parsed ``dependencies.yaml`` file."""
132 |
133 | path: Path
134 | """The path to the parsed file."""
135 |
136 | files: dict[str, File] = field(default_factory=dict)
137 | """The file entries, keyed by name."""
138 |
139 | channels: list[str] = field(default_factory=lambda: list(_constants.default_channels))
140 | """A list of channels to include in Conda files."""
141 |
142 | dependencies: dict[str, Dependencies] = field(default_factory=dict)
143 | """The dependency sets, keyed by name."""
144 |
145 |
146 | def _parse_outputs(outputs: typing.Union[str, list[str]]) -> set[Output]:
147 | if isinstance(outputs, str):
148 | outputs = [outputs]
149 | if outputs == ["none"]:
150 | outputs = []
151 | return {Output(o) for o in outputs}
152 |
153 |
154 | def _parse_extras(extras: dict[str, str]) -> FileExtras:
155 | return FileExtras(
156 | table=extras["table"],
157 | key=extras.get("key", None),
158 | )
159 |
160 |
161 | def _parse_file(file_config: dict[str, typing.Any]) -> File:
162 | def get_extras() -> typing.Union[FileExtras, None]:
163 | try:
164 | extras = file_config["extras"]
165 | except KeyError:
166 | return None
167 |
168 | return _parse_extras(extras)
169 |
170 | return File(
171 | output=_parse_outputs(file_config["output"]),
172 | extras=get_extras(),
173 | includes=list(file_config["includes"]),
174 | matrix={key: list(value) for key, value in file_config.get("matrix", {}).items()},
175 | requirements_dir=Path(file_config.get("requirements_dir", _constants.default_requirements_dir)),
176 | conda_dir=Path(file_config.get("conda_dir", _constants.default_conda_dir)),
177 | pyproject_dir=Path(file_config.get("pyproject_dir", _constants.default_pyproject_dir)),
178 | )
179 |
180 |
181 | def _parse_requirement(requirement: typing.Union[str, dict[str, list[str]]]) -> typing.Union[str, PipRequirements]:
182 | if isinstance(requirement, str):
183 | return requirement
184 |
185 | return PipRequirements(pip=requirement["pip"])
186 |
187 |
188 | def _parse_dependencies(dependencies: dict[str, typing.Any]) -> Dependencies:
189 | return Dependencies(
190 | common=[
191 | CommonDependencies(
192 | output_types=_parse_outputs(d["output_types"]),
193 | packages=[_parse_requirement(p) for p in d["packages"]],
194 | )
195 | for d in dependencies.get("common", [])
196 | ],
197 | specific=[
198 | SpecificDependencies(
199 | output_types=_parse_outputs(d["output_types"]),
200 | matrices=[
201 | MatrixMatcher(
202 | matrix=dict(m.get("matrix", {}) or {}),
203 | packages=[_parse_requirement(p) for p in m.get("packages", []) or []],
204 | )
205 | for m in d["matrices"]
206 | ],
207 | )
208 | for d in dependencies.get("specific", [])
209 | ],
210 | )
211 |
212 |
213 | def _parse_channels(channels) -> list[str]:
214 | if isinstance(channels, str):
215 | return [channels]
216 |
217 | return list(channels)
218 |
219 |
220 | def parse_config(config: dict[str, typing.Any], path: PathLike) -> Config:
221 | """Parse a configuration file from a dictionary.
222 |
223 | Parameters
224 | ----------
225 | config : dict[str, Any]
226 | The dictionary to parse.
227 | path : PathLike
228 | The path to the parsed configuration file. This will be stored as the ``path``
229 | attribute.
230 |
231 | Returns
232 | -------
233 | Config
234 | The fully parsed configuration file.
235 |
236 | Raises
237 | ------
238 | jsonschema.exceptions.ValidationError
239 | If the dependencies do not conform to the schema
240 | """
241 | validate_dependencies(config)
242 | return Config(
243 | path=Path(path),
244 | files={key: _parse_file(value) for key, value in config["files"].items()},
245 | channels=_parse_channels(config.get("channels", [])),
246 | dependencies={key: _parse_dependencies(value) for key, value in config["dependencies"].items()},
247 | )
248 |
249 |
250 | def load_config_from_file(path: PathLike) -> Config:
251 | """Open a ``dependencies.yaml`` file and parse it.
252 |
253 | Parameters
254 | ----------
255 | path : PathLike
256 | The path to the configuration file to parse.
257 |
258 | Returns
259 | -------
260 | Config
261 | The fully parsed configuration file.
262 | """
263 | with open(path) as f:
264 | return parse_config(yaml.safe_load(f), path)
265 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/_constants.py:
--------------------------------------------------------------------------------
1 | cli_name = "rapids-dependency-file-generator"
2 |
3 | default_channels = [
4 | "rapidsai",
5 | "rapidsai-nightly",
6 | "conda-forge",
7 | ]
8 |
9 | default_conda_dir = "conda/environments"
10 | default_requirements_dir = "python"
11 | default_pyproject_dir = "python"
12 | default_dependency_file_path = "dependencies.yaml"
13 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/_rapids_dependency_file_generator.py:
--------------------------------------------------------------------------------
1 | import fnmatch
2 | import itertools
3 | import os
4 | import textwrap
5 | import typing
6 | from collections.abc import Generator
7 | from dataclasses import dataclass
8 |
9 | import tomlkit
10 | import yaml
11 |
12 | from . import _config
13 | from ._constants import cli_name
14 |
15 | __all__ = [
16 | "make_dependency_files",
17 | ]
18 |
19 | HEADER = f"# This file is generated by `{cli_name}`."
20 |
21 |
22 | def delete_existing_files(root: str) -> None:
23 | """Delete any files generated by this generator.
24 |
25 | This function can be used to clean up a directory tree before generating a new set
26 | of files from scratch.
27 |
28 | Parameters
29 | ----------
30 | root : str
31 | The path (relative or absolute) to the root of the directory tree to search for files to delete.
32 | """
33 | for dirpath, _, filenames in os.walk(root):
34 | for fn in filter(lambda fn: fn.endswith(".txt") or fn.endswith(".yaml"), filenames):
35 | with open(file_path := os.path.join(dirpath, fn)) as f:
36 | try:
37 | if HEADER in f.read():
38 | os.remove(file_path)
39 | except UnicodeDecodeError:
40 | pass
41 |
42 |
43 | def dedupe(
44 | dependencies: list[typing.Union[str, _config.PipRequirements]],
45 | ) -> typing.Sequence[typing.Union[str, dict[str, list[str]]]]:
46 | """Generate the unique set of dependencies contained in a dependency list.
47 |
48 | Parameters
49 | ----------
50 | dependencies : list[str | PipRequirements]
51 | A sequence containing dependencies (possibly including duplicates).
52 |
53 | Returns
54 | -------
55 | Sequence[str | dict[str, list[str]]]
56 | The ``dependencies`` with all duplicates removed.
57 | """
58 | string_deps: set[str] = set()
59 | pip_deps: set[str] = set()
60 | for dep in dependencies:
61 | if isinstance(dep, str):
62 | string_deps.add(dep)
63 | elif isinstance(dep, _config.PipRequirements):
64 | pip_deps.update(dep.pip)
65 |
66 | if pip_deps:
67 | return [*sorted(string_deps), {"pip": sorted(pip_deps)}]
68 | else:
69 | return sorted(string_deps)
70 |
71 |
72 | def grid(gridspec: dict[str, list[str]]) -> Generator[dict[str, str], None, None]:
73 | """Yield the Cartesian product of a `dict` of iterables.
74 |
75 | The input ``gridspec`` is a dictionary whose keys correspond to
76 | parameter names. Each key is associated with an iterable of the
77 | values that parameter could take on. The result is a sequence of
78 | dictionaries where each dictionary has one of the unique combinations
79 | of the parameter values.
80 |
81 | Parameters
82 | ----------
83 | gridspec : dict[str, list[str]]
84 | A mapping from parameter names to lists of parameter values.
85 |
86 | Yields
87 | ------
88 | dict[str, str]
89 | Each yielded value is a dictionary containing one of the unique
90 | combinations of parameter values from `gridspec`.
91 | """
92 | for values in itertools.product(*gridspec.values()):
93 | yield dict(zip(gridspec.keys(), values))
94 |
95 |
96 | def make_dependency_file(
97 | *,
98 | file_type: _config.Output,
99 | conda_env_name: typing.Union[str, None],
100 | file_name: str,
101 | config_file: os.PathLike,
102 | output_dir: os.PathLike,
103 | conda_channels: list[str],
104 | dependencies: typing.Sequence[typing.Union[str, dict[str, list[str]]]],
105 | extras: typing.Union[_config.FileExtras, None],
106 | ) -> str:
107 | """Generate the contents of the dependency file.
108 |
109 | Parameters
110 | ----------
111 | file_type : Output
112 | An Output value used to determine the file type.
113 | conda_env_name : str | None
114 | Name to put in the 'name: ' field when generating conda environment YAML files.
115 | If ``None``, the generated conda environment file will not have a 'name:' entry.
116 | Only used when ``file_type`` is CONDA.
117 | file_name : str
118 | Name of a file in ``output_dir`` to read in.
119 | Only used when ``file_type`` is PYPROJECT.
120 | config_file : PathLike
121 | The full path to the dependencies.yaml file.
122 | output_dir : PathLike
123 | The path to the directory where the dependency files will be written.
124 | conda_channels : list[str]
125 | The channels to include in the file. Only used when ``file_type`` is
126 | CONDA.
127 | dependencies : Sequence[str | dict[str, list[str]]]
128 | The dependencies to include in the file.
129 | extras : FileExtras | None
130 | Any extra information provided for generating this dependency file.
131 |
132 | Returns
133 | -------
134 | str
135 | The contents of the file.
136 | """
137 | relative_path_to_config_file = os.path.relpath(config_file, output_dir)
138 | file_contents = textwrap.dedent(
139 | f"""\
140 | {HEADER}
141 | # To make changes, edit {relative_path_to_config_file} and run `{cli_name}`.
142 | """
143 | )
144 | if file_type == _config.Output.CONDA:
145 | env_dict = {
146 | "channels": conda_channels,
147 | "dependencies": dependencies,
148 | }
149 | if conda_env_name is not None:
150 | env_dict["name"] = conda_env_name
151 | file_contents += yaml.dump(env_dict)
152 | elif file_type == _config.Output.REQUIREMENTS:
153 | for dep in dependencies:
154 | if isinstance(dep, dict):
155 | raise ValueError(f"Map inputs like {dep} are not allowed for the 'requirements' file type.")
156 |
157 | file_contents += f"{dep}\n"
158 | elif file_type == _config.Output.PYPROJECT:
159 | if extras is None:
160 | raise ValueError("The 'extras' field must be provided for the 'pyproject' file type.")
161 |
162 | if extras.table == "build-system":
163 | key = "requires"
164 | if extras.key is not None:
165 | raise ValueError(
166 | "The 'key' field is not allowed for the 'pyproject' file type when 'table' is 'build-system'."
167 | )
168 | elif extras.table == "project":
169 | key = "dependencies"
170 | if extras.key is not None:
171 | raise ValueError(
172 | "The 'key' field is not allowed for the 'pyproject' file type when 'table' is 'project'."
173 | )
174 | else:
175 | if extras.key is None:
176 | raise ValueError(
177 | "The 'key' field is required for the 'pyproject' file type when "
178 | "'table' is not one of 'build-system' or 'project'."
179 | )
180 | key = extras.key
181 |
182 | # This file type needs to be modified in place instead of built from scratch.
183 | with open(os.path.join(output_dir, file_name)) as f:
184 | file_contents_toml = tomlkit.load(f)
185 |
186 | toml_deps = tomlkit.array()
187 | for dep in dependencies:
188 | toml_deps.add_line(dep)
189 | toml_deps.add_line(indent="")
190 | toml_deps.comment(
191 | f"This list was generated by `{cli_name}`. To make changes, edit "
192 | f"{relative_path_to_config_file} and run `{cli_name}`."
193 | )
194 |
195 | # Recursively descend into subtables like "[x.y.z]", creating tables as needed.
196 | table = file_contents_toml
197 | for section in extras.table.split("."):
198 | try:
199 | table = table[section]
200 | except tomlkit.exceptions.NonExistentKey:
201 | # If table is not a super-table (i.e. if it has its own contents and is
202 | # not simply parted of a nested table name 'x.y.z') add a new line
203 | # before adding a new sub-table.
204 | if not table.is_super_table():
205 | table.add(tomlkit.nl())
206 | table[section] = tomlkit.table()
207 | table = table[section]
208 |
209 | table[key] = toml_deps
210 |
211 | file_contents = tomlkit.dumps(file_contents_toml)
212 |
213 | return file_contents
214 |
215 |
216 | def get_filename(file_type: _config.Output, file_key: str, matrix_combo: dict[str, str]):
217 | """Get the name of the file to which to write a generated dependency set.
218 |
219 | The file name will be composed of the following components, each determined
220 | by the `file_type`:
221 | - A file-type-based prefix e.g. "requirements" for requirements.txt files.
222 | - A name determined by the value of $FILENAME in the corresponding
223 | [files.$FILENAME] section of dependencies.yaml. This name is used for some
224 | output types (conda, requirements) and not others (pyproject).
225 | - A matrix description encoding the key-value pairs in `matrix_combo`.
226 | - A suitable extension for the file (e.g. ".yaml" for conda environment files.)
227 |
228 | Parameters
229 | ----------
230 | file_type : Output
231 | An Output value used to determine the file type.
232 | file_key : str
233 | The name of this member in the [files] list in dependencies.yaml.
234 | matrix_combo : dict[str, str]
235 | A mapping of key-value pairs corresponding to the
236 | [files.$FILENAME.matrix] entry in dependencies.yaml.
237 |
238 | Returns
239 | -------
240 | str
241 | The name of the file to generate.
242 | """
243 | file_type_prefix = ""
244 | file_ext = ""
245 | file_name_prefix = file_key
246 | suffix = "_".join([f"{k}-{v}" for k, v in matrix_combo.items() if v])
247 | if file_type == _config.Output.CONDA:
248 | file_ext = ".yaml"
249 | elif file_type == _config.Output.REQUIREMENTS:
250 | file_ext = ".txt"
251 | file_type_prefix = "requirements"
252 | elif file_type == _config.Output.PYPROJECT:
253 | file_ext = ".toml"
254 | # Unlike for files like requirements.txt or conda environment YAML files, which
255 | # may be named with additional prefixes (e.g. all_cuda_*) pyproject.toml files
256 | # need to have that exact name and are never prefixed.
257 | file_name_prefix = "pyproject"
258 | suffix = ""
259 | filename = "_".join(filter(None, (file_type_prefix, file_name_prefix, suffix))).replace(".", "")
260 | return filename + file_ext
261 |
262 |
263 | def get_output_dir(*, file_type: _config.Output, config_file_path: os.PathLike, file_config: _config.File):
264 | """Get the directory containing a generated dependency file's contents.
265 |
266 | The output directory is determined by the `file_type` and the corresponding
267 | key in the `file_config`. The path provided in `file_config` will be taken
268 | relative to `output_root`.
269 |
270 | Parameters
271 | ----------
272 | file_type : Output
273 | An Output value used to determine the file type.
274 | config_file_path : PathLike
275 | Path to the dependency-file-generator config file (e.g. dependencies.yaml).
276 | file_config : File
277 | A dictionary corresponding to one of the [files.$FILENAME] sections of
278 | the dependencies.yaml file. May contain `conda_dir`, `pyproject_dir`, or `requirements_dir`.
279 |
280 | Returns
281 | -------
282 | str
283 | The directory containing the dependency file's contents.
284 | """
285 | path = [os.path.dirname(config_file_path)]
286 | if file_type == _config.Output.CONDA:
287 | path.append(file_config.conda_dir)
288 | elif file_type == _config.Output.REQUIREMENTS:
289 | path.append(file_config.requirements_dir)
290 | elif file_type == _config.Output.PYPROJECT:
291 | path.append(file_config.pyproject_dir)
292 | return os.path.join(*path)
293 |
294 |
295 | def should_use_specific_entry(matrix_combo: dict[str, str], specific_entry_matrix: dict[str, str]) -> bool:
296 | """Check if an entry should be used.
297 |
298 | Dependencies listed in the [dependencies.$DEPENDENCY_GROUP.specific]
299 | section are specific to a particular matrix entry provided by the
300 | [matrices] list. This function validates the [matrices.matrix] value
301 | against the provided `matrix_combo` to check if they are compatible.
302 |
303 | A `specific_entry_matrix` is compatible with a `matrix_combo` if and only
304 | if `specific_entry_matrix[key]` matches the glob pattern
305 | `matrix_combo[key]` for every key defined in `specific_entry_matrix`. A
306 | `matrix_combo` may contain additional keys not specified by
307 | `specific_entry_matrix`.
308 |
309 | Parameters
310 | ----------
311 | matrix_combo : dict[str, str]
312 | A mapping from matrix keys to values for the current file being
313 | generated.
314 | specific_entry_matrix : dict[str, str]
315 | A mapping from matrix keys to values for the current specific
316 | dependency set being checked.
317 |
318 | Returns
319 | -------
320 | bool
321 | True if the `specific_entry_matrix` is compatible with the current
322 | `matrix_combo` and False otherwise.
323 | """
324 | return all(
325 | matrix_combo.get(specific_key) and fnmatch.fnmatch(matrix_combo[specific_key], specific_value)
326 | for specific_key, specific_value in specific_entry_matrix.items()
327 | )
328 |
329 |
330 | @dataclass
331 | class _DependencyCollection:
332 | str_deps: set[str]
333 | # e.g. {"pip": ["dgl", "pyg"]}, used in conda envs
334 | dict_deps: dict[str, list[str]]
335 |
336 | def update(self, deps: typing.Sequence[typing.Union[str, dict[str, list[str]]]]) -> None:
337 | for dep in deps:
338 | if isinstance(dep, dict):
339 | for k, v in dep.items():
340 | if k in self.dict_deps:
341 | self.dict_deps[k].extend(v)
342 | self.dict_deps[k] = sorted(set(self.dict_deps[k]))
343 | else:
344 | self.dict_deps[k] = v
345 | else:
346 | self.str_deps.add(dep)
347 |
348 | @property
349 | def deps_list(self) -> typing.Sequence[typing.Union[str, dict[str, list[str]]]]:
350 | if self.dict_deps:
351 | return [*sorted(self.str_deps), self.dict_deps]
352 |
353 | return [*sorted(self.str_deps)]
354 |
355 |
356 | def make_dependency_files(
357 | *,
358 | parsed_config: _config.Config,
359 | file_keys: list[str],
360 | output: typing.Union[set[_config.Output], None],
361 | matrix: typing.Union[dict[str, list[str]], None],
362 | prepend_channels: list[str],
363 | to_stdout: bool,
364 | ) -> None:
365 | """Generate dependency files.
366 |
367 | This function iterates over data parsed from a YAML file conforming to the
368 | `dependencies.yaml file spec `_
369 | and produces the requested files.
370 |
371 | Parameters
372 | ----------
373 | parsed_config : Config
374 | The parsed dependencies.yaml config file.
375 | file_keys : list[str]
376 | The list of file keys to use.
377 | output : set[Output] | None
378 | The set of file types to write, or None to write the file types
379 | specified by the file key.
380 | matrix : dict[str, list[str]] | None
381 | The matrix to use, or None if the default matrix from each file key
382 | should be used.
383 | prepend_channels : list[str]
384 | List of channels to prepend to the ones from parsed_config.
385 | to_stdout : bool
386 | Whether the output should be written to stdout. If False, it will be
387 | written to a file computed based on the output file type and
388 | config_file_path.
389 |
390 | Raises
391 | ------
392 | ValueError
393 | If the file is malformed. There are numerous different error cases
394 | which are described by the error messages.
395 | """
396 | if to_stdout and len(file_keys) > 1 and output is not None and _config.Output.PYPROJECT in output:
397 | raise ValueError(
398 | f"Using --file-key multiple times together with '--output {_config.Output.PYPROJECT.value}' "
399 | "when writing to stdout is not supported."
400 | )
401 |
402 | # the list of conda channels does not depend on individual file keys
403 | conda_channels = prepend_channels + parsed_config.channels
404 |
405 | # initialize a container for "all dependencies found across all files", to support
406 | # passing multiple files keys and writing a merged result to stdout
407 | all_dependencies = _DependencyCollection(str_deps=set(), dict_deps={})
408 |
409 | for file_key in file_keys:
410 | file_config = parsed_config.files[file_key]
411 | file_types_to_generate = file_config.output if output is None else output
412 | if matrix is not None:
413 | file_matrix = matrix
414 | else:
415 | file_matrix = file_config.matrix
416 | calculated_grid = list(grid(file_matrix))
417 | if _config.Output.PYPROJECT in file_types_to_generate and len(calculated_grid) > 1:
418 | raise ValueError("Pyproject outputs can't have more than one matrix output")
419 | for file_type in file_types_to_generate:
420 | for matrix_combo in calculated_grid:
421 | dependencies = []
422 |
423 | # Collect all includes from each dependency list corresponding
424 | # to this (file_name, file_type, matrix_combo) tuple. The
425 | # current tuple corresponds to a single file to be written.
426 | for include in file_config.includes:
427 | dependency_entry = parsed_config.dependencies[include]
428 |
429 | for common_entry in dependency_entry.common:
430 | if file_type not in common_entry.output_types:
431 | continue
432 | dependencies.extend(common_entry.packages)
433 |
434 | for specific_entry in dependency_entry.specific:
435 | if file_type not in specific_entry.output_types:
436 | continue
437 |
438 | # Ensure that all specific matrices are unique
439 | num_matrices = len(specific_entry.matrices)
440 | num_unique = len(
441 | {
442 | frozenset(specific_matrices_entry.matrix.items())
443 | for specific_matrices_entry in specific_entry.matrices
444 | }
445 | )
446 | if num_matrices != num_unique:
447 | err = f"All matrix entries must be unique. Found duplicates in '{include}':"
448 | for specific_matrices_entry in specific_entry.matrices:
449 | err += f"\n - {specific_matrices_entry.matrix}"
450 | raise ValueError(err)
451 |
452 | fallback_entry = None
453 | for specific_matrices_entry in specific_entry.matrices:
454 | # An empty `specific_matrices_entry["matrix"]` is
455 | # valid and can be used to specify a fallback_entry for a
456 | # `matrix_combo` for which no specific entry
457 | # exists. In that case we save the fallback_entry result
458 | # and only use it at the end if nothing more
459 | # specific is found.
460 | if not specific_matrices_entry.matrix:
461 | fallback_entry = specific_matrices_entry
462 | continue
463 |
464 | if should_use_specific_entry(matrix_combo, specific_matrices_entry.matrix):
465 | # A package list may be empty as a way to
466 | # indicate that for some matrix elements no
467 | # packages should be installed.
468 | dependencies.extend(specific_matrices_entry.packages or [])
469 | break
470 | else:
471 | if fallback_entry:
472 | dependencies.extend(fallback_entry.packages)
473 | else:
474 | raise ValueError(f"No matching matrix found in '{include}' for: {matrix_combo}")
475 |
476 | # Dedupe deps and print / write to filesystem
477 | full_file_name = get_filename(file_type, file_key, matrix_combo)
478 | deduped_deps = dedupe(dependencies)
479 |
480 | output_dir = get_output_dir(
481 | file_type=file_type,
482 | config_file_path=parsed_config.path,
483 | file_config=file_config,
484 | )
485 | contents = make_dependency_file(
486 | file_type=file_type,
487 | conda_env_name=os.path.splitext(full_file_name)[0],
488 | file_name=full_file_name,
489 | config_file=parsed_config.path,
490 | output_dir=output_dir,
491 | conda_channels=conda_channels,
492 | dependencies=deduped_deps,
493 | extras=file_config.extras,
494 | )
495 |
496 | if to_stdout:
497 | if len(file_keys) == 1:
498 | print(contents)
499 | else:
500 | all_dependencies.update(deduped_deps)
501 | else:
502 | os.makedirs(output_dir, exist_ok=True)
503 | file_path = os.path.join(output_dir, full_file_name)
504 | with open(file_path, "w") as f:
505 | f.write(contents)
506 |
507 | # create one unified output from all the file_keys, and print it to stdout
508 | if to_stdout and len(file_keys) > 1:
509 | # convince mypy that 'output' is not None here
510 | #
511 | # 'output' is technically a set because of https://github.com/rapidsai/dependency-file-generator/pull/74,
512 | # but since https://github.com/rapidsai/dependency-file-generator/pull/79 it's only ever one of the following:
513 | #
514 | # - an exactly-1-item set (stdout=True, or when used by rapids-build-backend)
515 | # - 'None' (stdout=False)
516 | #
517 | err_msg = (
518 | "Exactly 1 output type should be provided when asking rapids-dependency-file-generator to write to stdout. "
519 | "If you see this, you've found a bug. Please report it."
520 | )
521 | assert output is not None, err_msg
522 |
523 | contents = make_dependency_file(
524 | file_type=output.pop(),
525 | conda_env_name=None,
526 | file_name="ignored-because-multiple-pyproject-files-are-not-supported",
527 | config_file=parsed_config.path,
528 | output_dir=parsed_config.path,
529 | conda_channels=conda_channels,
530 | dependencies=all_dependencies.deps_list,
531 | extras=None,
532 | )
533 | print(contents)
534 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/_rapids_dependency_file_validator.py:
--------------------------------------------------------------------------------
1 | """Logic for validating dependency files."""
2 |
3 | import importlib.resources
4 | import json
5 | import sys
6 | import textwrap
7 | import typing
8 |
9 | import jsonschema
10 | from jsonschema.exceptions import best_match
11 |
12 | SCHEMA = json.loads(importlib.resources.files(__package__).joinpath("schema.json").read_bytes())
13 |
14 |
15 | def validate_dependencies(dependencies: dict[str, typing.Any]) -> None:
16 | """Validate a dictionary against the dependencies.yaml spec.
17 |
18 | Parameters
19 | ----------
20 | dependencies : dict
21 | The parsed dependencies.yaml file.
22 |
23 | Raises
24 | ------
25 | jsonschema.exceptions.ValidationError
26 | If the dependencies do not conform to the schema
27 | """
28 | validator = jsonschema.Draft7Validator(SCHEMA)
29 | errors = list(validator.iter_errors(dependencies))
30 | if len(errors) > 0:
31 | print("The provided dependency file contains schema errors.", file=sys.stderr)
32 | best_matching_error = best_match(errors)
33 | print("\n", textwrap.indent(str(best_matching_error), "\t"), "\n", file=sys.stderr)
34 | raise RuntimeError("The provided dependencies data is invalid.")
35 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/_version.py:
--------------------------------------------------------------------------------
1 | __version__ = "1.18.1"
2 |
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rapidsai/dependency-file-generator/3a213df6eabc8a768f41f05031c3265cb4380b58/src/rapids_dependency_file_generator/py.typed
--------------------------------------------------------------------------------
/src/rapids_dependency_file_generator/schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-07/schema#",
3 | "$id": "https://raw.githubusercontent.com/rapidsai/dependency-file-generator/v1.18.1/src/rapids_dependency_file_generator/schema.json",
4 | "type": "object",
5 | "title": "RAPIDS Package Dependency Specification Format",
6 | "description": "Consolidated specification of RAPIDS project dependencies",
7 | "properties": {
8 | "files": {
9 | "type": "object",
10 | "patternProperties": {
11 | ".*": {
12 | "type": "object",
13 | "properties": {
14 | "output": {"$ref": "#/$defs/outputs"},
15 | "extras": {"$ref": "#/$defs/extras"},
16 | "includes": {"type": "array", "items": {"type": "string"}},
17 | "matrix": {"$ref": "#/$defs/matrix"},
18 | "requirements_dir": {"type": "string"},
19 | "conda_dir": {"type": "string"},
20 | "pyproject_dir": {"type": "string"}
21 | },
22 | "additionalProperties": false,
23 | "required": ["output", "includes"]
24 | }
25 | },
26 | "minProperties": 1
27 | },
28 | "dependencies": {
29 | "type": "object",
30 | "patternProperties": {
31 | ".*": {
32 | "type": "object",
33 | "properties": {
34 | "common": {
35 | "type": "array",
36 | "items": {
37 | "type": "object",
38 | "properties": {
39 | "output_types": {"$ref": "#/$defs/outputs"},
40 | "packages": {"$ref": "#/$defs/packages"}
41 | },
42 | "required": ["output_types", "packages"],
43 | "additionalProperties": false
44 | }
45 | },
46 | "specific": {
47 | "type": "array",
48 | "items": {
49 | "type": "object",
50 | "properties": {
51 | "output_types": {"$ref": "#/$defs/outputs"},
52 | "matrices": {"$ref": "#/$defs/matrices"}
53 | },
54 | "required": ["output_types", "matrices"],
55 | "additionalProperties": false
56 | }
57 | }
58 | },
59 | "minProperties": 1,
60 | "additionalProperties": false
61 | }
62 | }
63 | },
64 | "channels": {"$ref": "#/$defs/channels"}
65 | },
66 | "required": ["files", "dependencies"],
67 | "additionalProperties": false,
68 | "$defs": {
69 | "channel": {
70 | "type": "string",
71 | "format": "iri-reference"
72 | },
73 | "channel-list": {
74 | "type": "array",
75 | "items": {
76 | "$ref": "#/$defs/channel"
77 | }
78 | },
79 | "channels": {
80 | "$oneOf": [
81 | {"$ref": "#/$defs/channel"},
82 | {"$ref": "#/$defs/channel-list"}
83 | ]
84 | },
85 | "matrix": {
86 | "type": "object",
87 | "patternProperties": {
88 | ".*": {
89 | "type": "array",
90 | "items": {"oneOf": [
91 | {"type": "string"},
92 | {"type": "null"}
93 | ]}
94 | }
95 | }
96 | },
97 | "matrix-matcher": {
98 | "type": "object",
99 | "properties": {
100 | "matrix": {
101 | "oneOf": [
102 | {
103 | "type": "object",
104 | "patternProperties": {
105 | ".*": {"type": "string"}
106 | }
107 | },
108 | {"type": "null"}
109 | ]
110 | },
111 | "packages": {"oneOf": [
112 | {"$ref": "#/$defs/packages"},
113 | {"type": "null"}
114 | ]}
115 | },
116 | "requiredProperties": ["matrix", "packages"],
117 | "additionalProperties": false
118 | },
119 | "matrices": {
120 | "type": "array",
121 | "items": {"$ref": "#/$defs/matrix-matcher"}
122 | },
123 | "output-types": {
124 | "enum": ["conda", "requirements", "pyproject"]
125 | },
126 | "output-types-array": {
127 | "type": "array",
128 | "item": {"$ref": "#/$defs/output-types"}
129 | },
130 | "outputs": {
131 | "oneOf": [
132 | {"$ref": "#/$defs/output-types"},
133 | {"$ref": "#/$defs/output-types-array"},
134 | {"const": "none"}
135 | ]
136 | },
137 | "packages": {
138 | "type": "array",
139 | "items": {
140 | "oneOf": [
141 | {"$ref": "#/$defs/requirement"},
142 | {"$ref": "#/$defs/pip-requirements"}
143 | ]
144 | }
145 | },
146 | "requirement": {
147 | "type": "string"
148 | },
149 | "requirements": {
150 | "type": "array",
151 | "items": {
152 | "$ref": "#/$defs/requirement"
153 | },
154 | "minItems": 1
155 |
156 | },
157 | "pip-requirements": {
158 | "type": "object",
159 | "properties": {
160 | "pip": {"$ref": "#/$defs/requirements"}
161 | },
162 | "additionalProperties": false,
163 | "required": ["pip"]
164 | },
165 | "extras": {
166 | "type": "object",
167 | "properties": {
168 | "table": { "type": "string", "required": true },
169 | "key": {"type": "string", "required": false }
170 | },
171 | "additionalProperties": false
172 | }
173 | }
174 | }
175 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from rapids_dependency_file_generator._rapids_dependency_file_validator import SCHEMA
4 |
5 |
6 | @pytest.fixture(scope="session")
7 | def schema():
8 | return SCHEMA
9 |
--------------------------------------------------------------------------------
/tests/examples/conda-minimal/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | build:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.6"]
7 | arch: [x86_64]
8 | includes:
9 | - build
10 | channels:
11 | - rapidsai
12 | - conda-forge
13 | dependencies:
14 | build:
15 | common:
16 | - output_types: [conda, requirements]
17 | packages:
18 | - clang=11.1.0
19 | - spdlog>=1.8.5,<1.9
20 | - output_types: conda
21 | packages:
22 | - pip
23 | - pip:
24 | - git+https://github.com/python-streamz/streamz.git@master
25 | specific:
26 | - output_types: [conda, requirements]
27 | matrices:
28 | - matrix:
29 | cuda: "11.5"
30 | packages:
31 | - cuda-python>=11.5,<11.7.1
32 | - matrix:
33 | cuda: "11.6"
34 | packages:
35 | - cuda-python>=11.6,<11.7.1
36 | - output_types: conda
37 | matrices:
38 | - matrix:
39 | cuda: "11.5"
40 | packages:
41 | - cudatoolkit=11.5
42 | - matrix:
43 | cuda: "11.6"
44 | packages:
45 | - cudatoolkit=11.6
46 |
--------------------------------------------------------------------------------
/tests/examples/conda-minimal/output/expected/build_cuda-115_arch-x86_64.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang=11.1.0
8 | - cuda-python>=11.5,<11.7.1
9 | - cudatoolkit=11.5
10 | - pip
11 | - spdlog>=1.8.5,<1.9
12 | - pip:
13 | - git+https://github.com/python-streamz/streamz.git@master
14 | name: build_cuda-115_arch-x86_64
15 |
--------------------------------------------------------------------------------
/tests/examples/conda-minimal/output/expected/build_cuda-116_arch-x86_64.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang=11.1.0
8 | - cuda-python>=11.6,<11.7.1
9 | - cudatoolkit=11.6
10 | - pip
11 | - spdlog>=1.8.5,<1.9
12 | - pip:
13 | - git+https://github.com/python-streamz/streamz.git@master
14 | name: build_cuda-116_arch-x86_64
15 |
--------------------------------------------------------------------------------
/tests/examples/duplicate-specific-matrix-entries/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.8"]
7 | includes:
8 | - cudatoolkit
9 | channels:
10 | - rapidsai
11 | - conda-forge
12 | dependencies:
13 | cudatoolkit:
14 | specific:
15 | - output_types: conda
16 | matrices:
17 | - matrix:
18 | cuda: "11.5"
19 | packages:
20 | - cudatoolkit=11.5
21 | - matrix:
22 | cuda: "11.5"
23 | packages:
24 | - cudatoolkit=11.5
25 |
--------------------------------------------------------------------------------
/tests/examples/integration/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all:
3 | output: [conda, requirements]
4 | requirements_dir: output/actual
5 | conda_dir: output/actual
6 | matrix:
7 | cuda: ["11.5", "11.6"]
8 | includes:
9 | - build
10 | - test
11 | test:
12 | output: none
13 | includes:
14 | - test
15 | channels:
16 | - rapidsai
17 | - conda-forge
18 | dependencies:
19 | build:
20 | common:
21 | - output_types: [conda, requirements]
22 | packages:
23 | - black=22.3.0
24 | - clang=11.1.0
25 | - output_types: conda
26 | packages:
27 | - clang-tools=11.1.0
28 | - spdlog>=1.8.5,<1.9
29 | - output_types: requirements
30 | packages:
31 | - some_common_req_misc_dep
32 | specific:
33 | - output_types: [conda, requirements]
34 | matrices:
35 | - matrix:
36 | cuda: "11.5"
37 | packages:
38 | - cuda-python>=11.5,<11.7.1
39 | - matrix:
40 | cuda: "11.6"
41 | packages:
42 | - cuda-python>=11.6,<11.7.1
43 | - output_types: conda
44 | matrices:
45 | - matrix:
46 | cuda: "11.5"
47 | packages:
48 | - cudatoolkit=11.5
49 | - matrix:
50 | cuda: "11.6"
51 | packages:
52 | - cudatoolkit=11.6
53 | test:
54 | common:
55 | - output_types: [conda, requirements]
56 | packages:
57 | - pytest
58 | - pytest-cov
59 |
--------------------------------------------------------------------------------
/tests/examples/integration/output/expected/all_cuda-115.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - black=22.3.0
8 | - clang-tools=11.1.0
9 | - clang=11.1.0
10 | - cuda-python>=11.5,<11.7.1
11 | - cudatoolkit=11.5
12 | - pytest
13 | - pytest-cov
14 | - spdlog>=1.8.5,<1.9
15 | name: all_cuda-115
16 |
--------------------------------------------------------------------------------
/tests/examples/integration/output/expected/all_cuda-116.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - black=22.3.0
8 | - clang-tools=11.1.0
9 | - clang=11.1.0
10 | - cuda-python>=11.6,<11.7.1
11 | - cudatoolkit=11.6
12 | - pytest
13 | - pytest-cov
14 | - spdlog>=1.8.5,<1.9
15 | name: all_cuda-116
16 |
--------------------------------------------------------------------------------
/tests/examples/integration/output/expected/requirements_all_cuda-115.txt:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | black=22.3.0
4 | clang=11.1.0
5 | cuda-python>=11.5,<11.7.1
6 | pytest
7 | pytest-cov
8 | some_common_req_misc_dep
9 |
--------------------------------------------------------------------------------
/tests/examples/integration/output/expected/requirements_all_cuda-116.txt:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | black=22.3.0
4 | clang=11.1.0
5 | cuda-python>=11.6,<11.7.1
6 | pytest
7 | pytest-cov
8 | some_common_req_misc_dep
9 |
--------------------------------------------------------------------------------
/tests/examples/invalid/invalid-requirement/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | build:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.6"]
7 | arch: [x86_64]
8 | includes:
9 | - build
10 | channels:
11 | - rapidsai
12 | - conda-forge
13 | dependencies:
14 | build:
15 | common:
16 | - output_types: [conda, requirements]
17 | packages:
18 | - clang=11.1.0
19 | - spdlog>=1.8.5,<1.9
20 | - output_types: conda
21 | packages:
22 | - pip
23 | - pip:
24 | - git+https://github.com/python-streamz/streamz.git@master
25 | specific:
26 | - output_types: [conda, requirements]
27 | matrices:
28 | - matrix:
29 | cuda: "11.5"
30 | packages:
31 | - 1234
32 | - cuda-python>=11.5,<11.7.1
33 | - matrix:
34 | cuda: "11.6"
35 | packages:
36 | - cuda-python>=11.6,<11.7.1
37 | - output_types: conda
38 | matrices:
39 | - matrix:
40 | cuda: "11.5"
41 | packages:
42 | - cudatoolkit=11.5
43 | - matrix:
44 | cuda: "11.6"
45 | packages:
46 | - cudatoolkit=11.6
47 |
--------------------------------------------------------------------------------
/tests/examples/invalid/pip-no-list/dependencies.yaml:
--------------------------------------------------------------------------------
1 | # tests that dfg rejects a config where 'pip:' in a conda requirements is a single string
2 | # (it should always be an array of packages)
3 | files:
4 | build:
5 | output: conda
6 | includes:
7 | - build
8 | channels:
9 | - rapidsai
10 | - conda-forge
11 | dependencies:
12 | build:
13 | common:
14 | - output_types: [conda]
15 | packages:
16 | - beep-boop=1.2.3
17 | - pip
18 | - pip: pandas
19 |
--------------------------------------------------------------------------------
/tests/examples/matrix-glob/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | dev:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["10.0", "11.8", "12.0"]
7 | includes:
8 | - build
9 | channels:
10 | - rapidsai
11 | - conda-forge
12 | dependencies:
13 | build:
14 | common:
15 | - output_types: [conda]
16 | packages:
17 | - clang-tools=11.1.0
18 | specific:
19 | - output_types: [conda]
20 | matrices:
21 | - matrix:
22 | cuda: "11.*"
23 | packages:
24 | - cudatoolkit=11.*
25 | - matrix:
26 | cuda: "12.*"
27 | packages:
28 | - cuda-version=12.*
29 | - matrix:
30 | packages:
31 |
--------------------------------------------------------------------------------
/tests/examples/matrix-glob/output/expected/dev_cuda-100.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | name: dev_cuda-100
9 |
--------------------------------------------------------------------------------
/tests/examples/matrix-glob/output/expected/dev_cuda-118.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.*
9 | name: dev_cuda-118
10 |
--------------------------------------------------------------------------------
/tests/examples/matrix-glob/output/expected/dev_cuda-120.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cuda-version=12.*
9 | name: dev_cuda-120
10 |
--------------------------------------------------------------------------------
/tests/examples/matrix-null-item/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | dev:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5"]
7 | arch: [x86_64, arm64]
8 | py: ["3.8", null]
9 | includes:
10 | - build
11 | channels:
12 | - rapidsai
13 | - conda-forge
14 | dependencies:
15 | build:
16 | common:
17 | - output_types: [conda]
18 | packages:
19 | - clang-tools=11.1.0
20 | - spdlog>=1.8.5,<1.9
21 | specific:
22 | - output_types: [conda]
23 | matrices:
24 | - matrix:
25 | cuda: "11.5"
26 | packages:
27 | - cudatoolkit=11.5
28 | - matrix:
29 | cuda: "11.6"
30 | packages:
31 | - cudatoolkit=11.6
32 | - output_types: [conda]
33 | matrices:
34 | - matrix:
35 | arch: x86_64
36 | py: "3.9"
37 | packages:
38 | - some_amd64_39_build_dep
39 | - matrix:
40 | packages:
41 | - output_types: [conda]
42 | matrices:
43 | - matrix:
44 | arch: arm64
45 | cuda: "11.5"
46 | py: "3.8"
47 | packages:
48 | - super_specific_dep
49 | - matrix:
50 | packages:
51 | - output_types: [conda]
52 | matrices:
53 | - matrix:
54 | cuda: "11.5"
55 | py: "3.8"
56 | packages:
57 | - some_115_38_build_dep
58 | - matrix:
59 | packages:
60 |
--------------------------------------------------------------------------------
/tests/examples/matrix-null-item/output/expected/dev_cuda-115_arch-arm64.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - spdlog>=1.8.5,<1.9
10 | name: dev_cuda-115_arch-arm64
11 |
--------------------------------------------------------------------------------
/tests/examples/matrix-null-item/output/expected/dev_cuda-115_arch-arm64_py-38.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - some_115_38_build_dep
10 | - spdlog>=1.8.5,<1.9
11 | - super_specific_dep
12 | name: dev_cuda-115_arch-arm64_py-38
13 |
--------------------------------------------------------------------------------
/tests/examples/matrix-null-item/output/expected/dev_cuda-115_arch-x86_64.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - spdlog>=1.8.5,<1.9
10 | name: dev_cuda-115_arch-x86_64
11 |
--------------------------------------------------------------------------------
/tests/examples/matrix-null-item/output/expected/dev_cuda-115_arch-x86_64_py-38.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - some_115_38_build_dep
10 | - spdlog>=1.8.5,<1.9
11 | name: dev_cuda-115_arch-x86_64_py-38
12 |
--------------------------------------------------------------------------------
/tests/examples/matrix/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | dev:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.6"]
7 | arch: [x86_64, arm64]
8 | py: ["3.8", "3.9"]
9 | includes:
10 | - build
11 | channels:
12 | - rapidsai
13 | - conda-forge
14 | dependencies:
15 | build:
16 | common:
17 | - output_types: [conda]
18 | packages:
19 | - clang-tools=11.1.0
20 | - spdlog>=1.8.5,<1.9
21 | specific:
22 | - output_types: [conda]
23 | matrices:
24 | - matrix:
25 | cuda: "11.5"
26 | packages:
27 | - cudatoolkit=11.5
28 | - matrix:
29 | cuda: "11.6"
30 | packages:
31 | - cudatoolkit=11.6
32 | - output_types: [conda]
33 | matrices:
34 | - matrix:
35 | arch: x86_64
36 | py: "3.9"
37 | packages:
38 | - some_amd64_39_build_dep
39 | - matrix:
40 | packages:
41 | - output_types: [conda]
42 | matrices:
43 | - matrix:
44 | arch: arm64
45 | cuda: "11.5"
46 | py: "3.8"
47 | packages:
48 | - super_specific_dep
49 | - matrix:
50 | packages:
51 | - output_types: [conda]
52 | matrices:
53 | - matrix:
54 | cuda: "11.5"
55 | py: "3.8"
56 | packages:
57 | - some_115_38_build_dep
58 | - matrix:
59 | packages:
60 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-115_arch-arm64_py-38.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - some_115_38_build_dep
10 | - spdlog>=1.8.5,<1.9
11 | - super_specific_dep
12 | name: dev_cuda-115_arch-arm64_py-38
13 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-115_arch-arm64_py-39.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - spdlog>=1.8.5,<1.9
10 | name: dev_cuda-115_arch-arm64_py-39
11 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-115_arch-x86_64_py-38.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - some_115_38_build_dep
10 | - spdlog>=1.8.5,<1.9
11 | name: dev_cuda-115_arch-x86_64_py-38
12 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-115_arch-x86_64_py-39.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.5
9 | - some_amd64_39_build_dep
10 | - spdlog>=1.8.5,<1.9
11 | name: dev_cuda-115_arch-x86_64_py-39
12 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-116_arch-arm64_py-38.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.6
9 | - spdlog>=1.8.5,<1.9
10 | name: dev_cuda-116_arch-arm64_py-38
11 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-116_arch-arm64_py-39.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.6
9 | - spdlog>=1.8.5,<1.9
10 | name: dev_cuda-116_arch-arm64_py-39
11 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-116_arch-x86_64_py-38.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.6
9 | - spdlog>=1.8.5,<1.9
10 | name: dev_cuda-116_arch-x86_64_py-38
11 |
--------------------------------------------------------------------------------
/tests/examples/matrix/output/expected/dev_cuda-116_arch-x86_64_py-39.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang-tools=11.1.0
8 | - cudatoolkit=11.6
9 | - some_amd64_39_build_dep
10 | - spdlog>=1.8.5,<1.9
11 | name: dev_cuda-116_arch-x86_64_py-39
12 |
--------------------------------------------------------------------------------
/tests/examples/nested-pyproject/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | sparkly_unicorn:
3 | output: pyproject
4 | includes:
5 | - run_deps
6 | extras:
7 | table: project
8 | pyproject_dir: some/cool/code
9 | dependencies:
10 | run_deps:
11 | common:
12 | - output_types: [pyproject]
13 | packages:
14 | - fsspec>=0.6.0
15 | specific:
16 | - output_types: [pyproject]
17 | matrices:
18 | - matrix: {"cuda": "100.*"}
19 | packages:
20 | - cuda-python>=100.1,<101.0a0
21 | - matrix: {"cuda": "11.*"}
22 | packages:
23 | - cuda-python>=11.7.1,<12.0a0
24 | - {matrix: null, packages: ["should-not-be-found-by-test"]}
25 |
--------------------------------------------------------------------------------
/tests/examples/nested-pyproject/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.some-nonsense]
2 | should_dfg_update_this = "no"
3 |
--------------------------------------------------------------------------------
/tests/examples/nested-pyproject/some/cool/code/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "beep-boop"
3 | version = "1.2.3"
4 | dependencies = [
5 | "fsspec>=0.6.0",
6 | "should-not-be-found-by-test",
7 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../../dependencies.yaml and run `rapids-dependency-file-generator`.
8 |
--------------------------------------------------------------------------------
/tests/examples/no-matrix/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | checks:
3 | output: conda
4 | conda_dir: output/actual
5 | includes:
6 | - checks
7 | channels:
8 | - rapidsai
9 | - conda-forge
10 | dependencies:
11 | checks:
12 | common:
13 | - output_types: [conda, requirements]
14 | packages:
15 | - clang=11.1.0
16 | - spdlog>=1.8.5,<1.9
17 | - output_types: conda
18 | packages:
19 | - pip
20 | - pip:
21 | - git+https://github.com/python-streamz/streamz.git@master
22 | specific:
23 | - output_types: [conda, requirements]
24 | matrices:
25 | - matrix:
26 | cuda: "11.5"
27 | packages:
28 | - cuda-python>=11.5,<11.7.1
29 | - matrix:
30 | cuda: "11.6"
31 | packages:
32 | - cuda-python>=11.6,<11.7.1
33 | - matrix:
34 | packages:
35 | - default-cuda-python
36 | - output_types: conda
37 | matrices:
38 | - matrix:
39 | cuda: "11.5"
40 | packages:
41 | - cudatoolkit=11.5
42 | - matrix:
43 | cuda: "11.6"
44 | packages:
45 | - cudatoolkit=11.6
46 | - matrix:
47 | packages:
48 | - default-cudatoolkit
49 |
--------------------------------------------------------------------------------
/tests/examples/no-matrix/output/expected/checks.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - clang=11.1.0
8 | - default-cuda-python
9 | - default-cudatoolkit
10 | - pip
11 | - spdlog>=1.8.5,<1.9
12 | - pip:
13 | - git+https://github.com/python-streamz/streamz.git@master
14 | name: checks
15 |
--------------------------------------------------------------------------------
/tests/examples/no-specific-match/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all:
3 | output: conda
4 | requirements_dir: output/actual
5 | matrix:
6 | cuda: ["11.8"]
7 | includes:
8 | - cudatoolkit
9 | channels:
10 | - rapidsai
11 | - conda-forge
12 | dependencies:
13 | cudatoolkit:
14 | specific:
15 | - output_types: conda
16 | matrices:
17 | - matrix:
18 | cuda: "11.5"
19 | packages:
20 | - cudatoolkit=11.5
21 | - matrix:
22 | cuda: "11.6"
23 | packages:
24 | - cudatoolkit=11.6
25 |
--------------------------------------------------------------------------------
/tests/examples/overlapping-deps/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | build_deps:
3 | output: [pyproject]
4 | pyproject_dir: output/actual
5 | extras:
6 | table: build-system
7 | includes:
8 | - rapids_build_skbuild
9 | - depends_on_numpy
10 | even_more_build_deps:
11 | output: [pyproject]
12 | pyproject_dir: output/actual
13 | extras:
14 | table: tool.rapids-build-backend
15 | key: requires
16 | includes:
17 | - depends_on_numpy
18 | - depends_on_pandas
19 | test_deps:
20 | output: none
21 | includes:
22 | - depends_on_numpy
23 | - depends_on_pandas
24 | even_more_test_deps:
25 | output: none
26 | includes:
27 | - depends_on_numpy
28 | - test_python
29 | test_with_sklearn:
30 | output: none
31 | includes:
32 | - depends_on_scikit_learn
33 | channels:
34 | - rapidsai
35 | - conda-forge
36 | dependencies:
37 | depends_on_numpy:
38 | common:
39 | - output_types: [requirements, pyproject]
40 | packages:
41 | - numpy>=2.0
42 | # using 'pip' intentionally to test handling of that nested list
43 | - output_types: [conda]
44 | packages:
45 | - pip
46 | - pip:
47 | - numpy>=2.0
48 | depends_on_pandas:
49 | common:
50 | - output_types: [conda, requirements, pyproject]
51 | packages:
52 | - pandas<3.0
53 | depends_on_scikit_learn:
54 | common:
55 | - output_types: [conda, requirements, pyproject]
56 | packages:
57 | - scikit-learn>=1.5
58 | test_python:
59 | common:
60 | - output_types: [conda, requirements, pyproject]
61 | packages:
62 | - matplotlib
63 | - output_types: [conda]
64 | packages:
65 | - pip
66 | # intentional overlap (numpy) with depends_on_numpy's pip list, to
67 | # test that pip dependencies don't have duplicates
68 | - pip:
69 | # intentionally not in alphabetical order
70 | - numpy>=2.0
71 | - folium
72 | rapids_build_skbuild:
73 | common:
74 | - output_types: [conda, requirements, pyproject]
75 | packages:
76 | - rapids-build-backend>=0.3.1
77 | - output_types: [requirements, pyproject]
78 | packages:
79 | - scikit-build-core[pyproject]>=0.9.0
80 | - output_types: [conda]
81 | packages:
82 | - scikit-build-core>=0.9.0
83 |
--------------------------------------------------------------------------------
/tests/examples/overlapping-deps/output/expected/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | build-backend = "rapids_build_backend.build_meta"
3 | requires = [
4 | "numpy>=2.0",
5 | "rapids-build-backend>=0.3.1",
6 | "scikit-build-core[pyproject]>=0.9.0",
7 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
8 |
9 | [project]
10 | name = "libbeepboop"
11 | version = "0.1.2"
12 | dependencies = [
13 | "scipy",
14 | ]
15 |
16 | [tool.rapids-build-backend]
17 | requires = [
18 | "numpy>=2.0",
19 | "pandas<3.0",
20 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
21 |
--------------------------------------------------------------------------------
/tests/examples/prepend-channels/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | build:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.6"]
7 | arch: [x86_64]
8 | includes:
9 | - build
10 | channels:
11 | - rapidsai
12 | - conda-forge
13 | dependencies:
14 | build:
15 | common:
16 | - output_types: [conda, requirements]
17 | packages:
18 | - clang=11.1.0
19 | - spdlog>=1.8.5,<1.9
20 | - output_types: conda
21 | packages:
22 | - pip
23 | - pip:
24 | - git+https://github.com/python-streamz/streamz.git@master
25 | specific:
26 | - output_types: [conda, requirements]
27 | matrices:
28 | - matrix:
29 | cuda: "11.5"
30 | packages:
31 | - cuda-python>=11.5,<11.7.1
32 | - matrix:
33 | cuda: "11.6"
34 | packages:
35 | - cuda-python>=11.6,<11.7.1
36 | - output_types: conda
37 | matrices:
38 | - matrix:
39 | cuda: "11.5"
40 | packages:
41 | - cudatoolkit=11.5
42 | - matrix:
43 | cuda: "11.6"
44 | packages:
45 | - cudatoolkit=11.6
46 |
--------------------------------------------------------------------------------
/tests/examples/prepend-channels/output/expected/build_cuda-115_arch-x86_64.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - my_channel
5 | - my_other_channel
6 | - rapidsai
7 | - conda-forge
8 | dependencies:
9 | - clang=11.1.0
10 | - cuda-python>=11.5,<11.7.1
11 | - cudatoolkit=11.5
12 | - pip
13 | - spdlog>=1.8.5,<1.9
14 | - pip:
15 | - git+https://github.com/python-streamz/streamz.git@master
16 | name: build_cuda-115_arch-x86_64
17 |
--------------------------------------------------------------------------------
/tests/examples/prepend-channels/output/expected/build_cuda-116_arch-x86_64.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - my_channel
5 | - my_other_channel
6 | - rapidsai
7 | - conda-forge
8 | dependencies:
9 | - clang=11.1.0
10 | - cuda-python>=11.6,<11.7.1
11 | - cudatoolkit=11.6
12 | - pip
13 | - spdlog>=1.8.5,<1.9
14 | - pip:
15 | - git+https://github.com/python-streamz/streamz.git@master
16 | name: build_cuda-116_arch-x86_64
17 |
--------------------------------------------------------------------------------
/tests/examples/pyproject-no-extras/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | beep_boop:
3 | output: pyproject
4 | includes:
5 | - run_deps
6 | pyproject_dir: .
7 | dependencies:
8 | run_deps:
9 | common:
10 | - output_types: [pyproject]
11 | packages:
12 | - fsspec>=0.6.0
13 |
--------------------------------------------------------------------------------
/tests/examples/pyproject_bad_key/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | py_build:
3 | output: pyproject
4 | pyproject_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.6"]
7 | includes:
8 | - build
9 | extras:
10 | table: build-system
11 | key: dependencies
12 | channels:
13 | - rapidsai
14 | - conda-forge
15 | dependencies:
16 | build:
17 | specific:
18 | - output_types: [conda, requirements]
19 | matrices:
20 | - matrix:
21 | cuda: "11.5"
22 | packages:
23 | - cuda-python>=11.5,<11.7.1
24 | - matrix:
25 | cuda: "11.6"
26 | packages:
27 | - cuda-python>=11.6,<11.7.1
28 |
--------------------------------------------------------------------------------
/tests/examples/pyproject_matrix/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | py_build:
3 | output: pyproject
4 | pyproject_dir: output/actual
5 | matrix:
6 | cuda: ["11.6"]
7 | includes:
8 | - build
9 | extras:
10 | table: build-system
11 | channels:
12 | - rapidsai
13 | - conda-forge
14 | dependencies:
15 | build:
16 | specific:
17 | - output_types: [conda, pyproject, requirements]
18 | matrices:
19 | - matrix:
20 | cuda: "11.5"
21 | packages:
22 | - cuda-python>=11.5,<11.7.1
23 | - matrix:
24 | cuda: "11.6"
25 | packages:
26 | - cuda-python>=11.6,<11.7.1
27 |
--------------------------------------------------------------------------------
/tests/examples/pyproject_matrix/output/expected/pyproject.toml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | [build-system]
4 | build-backend = "setuptools.build_meta"
5 | requires = [
6 | "cuda-python>=11.6,<11.7.1",
7 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
8 |
9 | [project]
10 | name = "test-cu11"
11 | version = "0.0.0"
12 | dependencies = [
13 | "numpy",
14 | "scipy",
15 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
16 |
17 | [project.optional-dependencies]
18 | test = [
19 | "scikit-image",
20 | "scikit-learn",
21 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
22 |
--------------------------------------------------------------------------------
/tests/examples/pyproject_matrix_fallback/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | py_build:
3 | output: pyproject
4 | pyproject_dir: output/actual
5 | matrix:
6 | cuda: ["11.6"]
7 | includes:
8 | - build
9 | extras:
10 | table: build-system
11 | channels:
12 | - rapidsai
13 | - conda-forge
14 | dependencies:
15 | build:
16 | specific:
17 | - output_types: [conda, pyproject, requirements]
18 | matrices:
19 | - matrix:
20 | cuda: "11.5"
21 | packages:
22 | - cuda-python>=11.5,<11.7.1
23 | - matrix:
24 | packages:
25 | - cuda-python>=11.6,<11.7.1
26 |
--------------------------------------------------------------------------------
/tests/examples/pyproject_matrix_fallback/output/expected/pyproject.toml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | [build-system]
4 | build-backend = "setuptools.build_meta"
5 | requires = [
6 | "cuda-python>=11.6,<11.7.1",
7 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
8 |
9 | [project]
10 | name = "test-cu11"
11 | version = "0.0.0"
12 | dependencies = [
13 | "numpy",
14 | "scipy",
15 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
16 |
17 | [project.optional-dependencies]
18 | test = [
19 | "scikit-image",
20 | "scikit-learn",
21 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
22 |
--------------------------------------------------------------------------------
/tests/examples/pyproject_matrix_multi/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | py_build:
3 | output: pyproject
4 | pyproject_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.6"]
7 | includes:
8 | - build
9 | extras:
10 | table: build-system
11 | channels:
12 | - rapidsai
13 | - conda-forge
14 | dependencies:
15 | build:
16 | specific:
17 | - output_types: [conda, pyproject, requirements]
18 | matrices:
19 | - matrix:
20 | cuda: "11.5"
21 | packages:
22 | - cuda-python>=11.5,<11.7.1
23 | - matrix:
24 | cuda: "11.6"
25 | packages:
26 | - cuda-python>=11.6,<11.7.1
27 |
--------------------------------------------------------------------------------
/tests/examples/requirements-minimal/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all:
3 | output: requirements
4 | requirements_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.6"]
7 | arch: [x86_64]
8 | includes:
9 | - build
10 | py_build:
11 | output: pyproject
12 | pyproject_dir: output/actual
13 | includes:
14 | - python_build_dependencies
15 | extras:
16 | table: build-system
17 | py_run:
18 | output: pyproject
19 | pyproject_dir: output/actual
20 | includes:
21 | - python_run_dependencies
22 | extras:
23 | table: project
24 | py_optional_test:
25 | output: pyproject
26 | pyproject_dir: output/actual
27 | includes:
28 | - python_test_dependencies
29 | extras:
30 | table: project.optional-dependencies
31 | key: test
32 | channels:
33 | - rapidsai
34 | - conda-forge
35 | dependencies:
36 | build:
37 | common:
38 | - output_types: [conda, requirements]
39 | packages:
40 | - clang=11.1.0
41 | - output_types: requirements
42 | packages:
43 | - spdlog>=1.8.5,<1.9
44 | specific:
45 | - output_types: [conda, requirements]
46 | matrices:
47 | - matrix:
48 | cuda: "11.5"
49 | packages:
50 | - cuda-python>=11.5,<11.7.1
51 | - matrix:
52 | cuda: "11.6"
53 | packages:
54 | - cuda-python>=11.6,<11.7.1
55 | - output_types: requirements
56 | matrices:
57 | - matrix:
58 | cuda: "11.5"
59 | packages:
60 | - cudatoolkit=11.5
61 | - matrix:
62 | cuda: "11.6"
63 | packages:
64 | - cudatoolkit=11.6
65 | python_build_dependencies:
66 | common:
67 | - output_types: pyproject
68 | packages:
69 | - setuptools
70 | python_run_dependencies:
71 | common:
72 | - output_types: pyproject
73 | packages:
74 | - numpy
75 | - scipy
76 | python_test_dependencies:
77 | common:
78 | - output_types: pyproject
79 | packages:
80 | - scikit-image
81 | - scikit-learn
82 |
--------------------------------------------------------------------------------
/tests/examples/requirements-minimal/output/expected/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | build-backend = "setuptools.build_meta"
3 | requires = [
4 | "setuptools",
5 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
6 |
7 | [project]
8 | name = "test"
9 | version = "0.0.0"
10 | dependencies = [
11 | "numpy",
12 | "scipy",
13 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
14 |
15 | [project.optional-dependencies]
16 | test = [
17 | "scikit-image",
18 | "scikit-learn",
19 | ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
20 |
--------------------------------------------------------------------------------
/tests/examples/requirements-minimal/output/expected/requirements_all_cuda-115_arch-x86_64.txt:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | clang=11.1.0
4 | cuda-python>=11.5,<11.7.1
5 | cudatoolkit=11.5
6 | spdlog>=1.8.5,<1.9
7 |
--------------------------------------------------------------------------------
/tests/examples/requirements-minimal/output/expected/requirements_all_cuda-116_arch-x86_64.txt:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | clang=11.1.0
4 | cuda-python>=11.6,<11.7.1
5 | cudatoolkit=11.6
6 | spdlog>=1.8.5,<1.9
7 |
--------------------------------------------------------------------------------
/tests/examples/requirements-pip-dict/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all_of_the_things:
3 | output: requirements
4 | includes:
5 | - run_deps
6 | requirements_dir: .
7 | dependencies:
8 | run_deps:
9 | common:
10 | - output_types: [requirements]
11 | packages:
12 | - fsspec>=0.6.0
13 | - pip:
14 | - pandas<1.0
15 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback-first/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.8"]
7 | includes:
8 | - cudatoolkit
9 | channels:
10 | - rapidsai
11 | - conda-forge
12 | dependencies:
13 | cudatoolkit:
14 | specific:
15 | - output_types: conda
16 | matrices:
17 | - matrix:
18 | packages:
19 | - cudatoolkit
20 | - matrix:
21 | cuda: "11.5"
22 | packages:
23 | - cudatoolkit=11.5
24 | - matrix:
25 | cuda: "11.6"
26 | packages:
27 | - cudatoolkit=11.6
28 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback-first/output/expected/all_cuda-115.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - cudatoolkit=11.5
8 | name: all_cuda-115
9 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback-first/output/expected/all_cuda-118.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - cudatoolkit
8 | name: all_cuda-118
9 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback-multiple-matches/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.8"]
7 | includes:
8 | - cudatoolkit
9 | channels:
10 | - rapidsai
11 | - conda-forge
12 | dependencies:
13 | cudatoolkit:
14 | specific:
15 | - output_types: conda
16 | matrices:
17 | - matrix:
18 | cuda: "11.5"
19 | packages:
20 | - cudatoolkit=11.5
21 | - matrix:
22 | cuda: "11.*"
23 | packages:
24 | - cudatoolkit=11.*
25 | - matrix:
26 | packages:
27 | - cudatoolkit
28 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback-multiple-matches/output/expected/all_cuda-115.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - cudatoolkit=11.5
8 | name: all_cuda-115
9 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback-multiple-matches/output/expected/all_cuda-118.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - cudatoolkit=11.*
8 | name: all_cuda-118
9 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback/dependencies.yaml:
--------------------------------------------------------------------------------
1 | files:
2 | all:
3 | output: conda
4 | conda_dir: output/actual
5 | matrix:
6 | cuda: ["11.5", "11.8"]
7 | includes:
8 | - cudatoolkit
9 | channels:
10 | - rapidsai
11 | - conda-forge
12 | dependencies:
13 | cudatoolkit:
14 | specific:
15 | - output_types: conda
16 | matrices:
17 | - matrix:
18 | cuda: "11.5"
19 | packages:
20 | - cudatoolkit=11.5
21 | - matrix:
22 | cuda: "11.6"
23 | packages:
24 | - cudatoolkit=11.6
25 | - matrix:
26 | packages:
27 | - cudatoolkit
28 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback/output/expected/all_cuda-115.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - cudatoolkit=11.5
8 | name: all_cuda-115
9 |
--------------------------------------------------------------------------------
/tests/examples/specific-fallback/output/expected/all_cuda-118.yaml:
--------------------------------------------------------------------------------
1 | # This file is generated by `rapids-dependency-file-generator`.
2 | # To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`.
3 | channels:
4 | - rapidsai
5 | - conda-forge
6 | dependencies:
7 | - cudatoolkit
8 | name: all_cuda-118
9 |
--------------------------------------------------------------------------------
/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from rapids_dependency_file_generator._cli import generate_matrix, validate_args
4 |
5 |
6 | def test_generate_matrix():
7 | matrix = generate_matrix("cuda=11.5;arch=x86_64")
8 | assert matrix == {"cuda": ["11.5"], "arch": ["x86_64"]}
9 |
10 | matrix = generate_matrix(None)
11 | assert matrix is None
12 |
13 |
14 | def test_generate_matrix_allows_duplicates_and_chooses_the_final_value():
15 | # duplicate keys
16 | matrix = generate_matrix("thing=abc;other_thing=true;thing=def;thing=ghi")
17 | assert matrix == {"other_thing": ["true"], "thing": ["ghi"]}
18 |
19 | # duplicate keys and values
20 | matrix = generate_matrix("thing=abc;thing=abc")
21 | assert matrix == {"thing": ["abc"]}
22 |
23 |
24 | def test_validate_args():
25 | # Missing output
26 | with pytest.raises(Exception):
27 | validate_args(["--matrix", "cuda=11.5;arch=x86_64", "--file-key", "all"])
28 |
29 | # Missing matrix
30 | with pytest.raises(Exception):
31 | validate_args(["--output", "conda", "--file-key", "all"])
32 |
33 | # Missing file_key
34 | with pytest.raises(Exception):
35 | validate_args(["--output", "conda", "--matrix", "cuda=11.5;arch=x86_64"])
36 |
37 | # Prepending channels with an output type that is not conda
38 | with pytest.raises(Exception):
39 | validate_args(
40 | [
41 | "--output",
42 | "requirements",
43 | "--matrix",
44 | "cuda=11.5;arch=x86_64",
45 | "--file-key",
46 | "all",
47 | "--prepend-channel",
48 | "my_channel",
49 | "--prepend-channel",
50 | "my_other_channel",
51 | ]
52 | )
53 |
54 | # Valid
55 | validate_args(
56 | [
57 | "--output",
58 | "conda",
59 | "--matrix",
60 | "cuda=11.5;arch=x86_64",
61 | "--file-key",
62 | "all",
63 | ]
64 | )
65 |
66 | # Valid
67 | validate_args(
68 | [
69 | "--config",
70 | "dependencies2.yaml",
71 | "--output",
72 | "pyproject",
73 | "--matrix",
74 | "cuda=11.5;arch=x86_64",
75 | "--file-key",
76 | "all",
77 | ]
78 | )
79 |
80 | # Valid
81 | validate_args(
82 | [
83 | "-c",
84 | "dependencies2.yaml",
85 | "--output",
86 | "pyproject",
87 | "--matrix",
88 | "cuda=11.5;arch=x86_64",
89 | "--file-key",
90 | "all",
91 | ]
92 | )
93 |
94 | # Valid, with prepended channels
95 | validate_args(
96 | [
97 | "--prepend-channel",
98 | "my_channel",
99 | "--prepend-channel",
100 | "my_other_channel",
101 | ]
102 | )
103 |
104 | # Valid, with output/matrix/file_key and prepended channels
105 | validate_args(
106 | [
107 | "--output",
108 | "conda",
109 | "--matrix",
110 | "cuda=11.5;arch=x86_64",
111 | "--file-key",
112 | "all",
113 | "--prepend-channel",
114 | "my_channel",
115 | "--prepend-channel",
116 | "my_other_channel",
117 | ]
118 | )
119 |
120 | # Valid, with duplicates in --matrix
121 | validate_args(
122 | [
123 | "-c",
124 | "dependencies2.yaml",
125 | "--output",
126 | "pyproject",
127 | "--matrix",
128 | "cuda_suffixed=true;arch=x86_64;cuda_suffixed=false",
129 | "--file-key",
130 | "all",
131 | ]
132 | )
133 |
134 | # Valid, with 2 files for --output requirements
135 | validate_args(
136 | [
137 | "--output",
138 | "requirements",
139 | "--matrix",
140 | "cuda=12.5",
141 | "--file-key",
142 | "all",
143 | "--file-key",
144 | "test_python",
145 | ]
146 | )
147 |
148 | # Valid, with 2 files for --output conda
149 | validate_args(
150 | [
151 | "--output",
152 | "conda",
153 | "--matrix",
154 | "cuda=12.5",
155 | "--file-key",
156 | "all",
157 | "--file-key",
158 | "test_python",
159 | ]
160 | )
161 |
162 | # Valid, with 3 files
163 | validate_args(
164 | [
165 | "--output",
166 | "requirements",
167 | "--matrix",
168 | "cuda=12.5",
169 | "--file-key",
170 | "all",
171 | "--file-key",
172 | "test_python",
173 | "--file-key",
174 | "build_python",
175 | ]
176 | )
177 |
178 | # Verify --version flag
179 | args = validate_args([])
180 | assert not args.version
181 |
182 | args = validate_args(["--version"])
183 | assert args.version
184 |
--------------------------------------------------------------------------------
/tests/test_config.py:
--------------------------------------------------------------------------------
1 | import tempfile
2 | import textwrap
3 | from pathlib import Path
4 |
5 | import pytest
6 |
7 | from rapids_dependency_file_generator import _config, _constants
8 |
9 |
10 | @pytest.mark.parametrize(
11 | ["input", "output"],
12 | [
13 | *((e.value, {e}) for e in _config.Output),
14 | ("none", set()),
15 | (["none"], set()),
16 | (
17 | ["pyproject", "requirements", "conda"],
18 | {
19 | _config.Output.PYPROJECT,
20 | _config.Output.REQUIREMENTS,
21 | _config.Output.CONDA,
22 | },
23 | ),
24 | ("invalid", ValueError),
25 | (["invalid"], ValueError),
26 | (["none", "pyproject"], ValueError),
27 | ],
28 | )
29 | def test_parse_outputs(input, output):
30 | if isinstance(output, type) and issubclass(output, Exception):
31 | with pytest.raises(output):
32 | _config._parse_outputs(input)
33 | else:
34 | assert _config._parse_outputs(input) == output
35 |
36 |
37 | @pytest.mark.parametrize(
38 | ["input", "output"],
39 | [
40 | ("package", "package"),
41 | ({"pip": ["package", "other-package"]}, _config.PipRequirements(pip=["package", "other-package"])),
42 | ({"other": "invalid"}, KeyError),
43 | ],
44 | )
45 | def test_parse_requirement(input, output):
46 | if isinstance(output, type) and issubclass(output, Exception):
47 | with pytest.raises(output):
48 | _config._parse_requirement(input)
49 | else:
50 | assert _config._parse_requirement(input) == output
51 |
52 |
53 | @pytest.mark.parametrize(
54 | ["input", "output"],
55 | [
56 | (
57 | {"table": "build-system", "key": "requires"},
58 | _config.FileExtras(table="build-system", key="requires"),
59 | ),
60 | (
61 | {"table": "build-system"},
62 | _config.FileExtras(table="build-system", key=None),
63 | ),
64 | ({}, KeyError),
65 | ],
66 | )
67 | def test_parse_extras(input, output):
68 | if isinstance(output, type) and issubclass(output, Exception):
69 | with pytest.raises(output):
70 | _config._parse_extras(input)
71 | else:
72 | assert _config._parse_extras(input) == output
73 |
74 |
75 | @pytest.mark.parametrize(
76 | ["input", "output"],
77 | [
78 | (
79 | {
80 | "output": "none",
81 | "includes": [],
82 | },
83 | _config.File(
84 | output=set(),
85 | extras=None,
86 | includes=[],
87 | matrix={},
88 | requirements_dir=Path(_constants.default_requirements_dir),
89 | conda_dir=Path(_constants.default_conda_dir),
90 | pyproject_dir=Path(_constants.default_pyproject_dir),
91 | ),
92 | ),
93 | (
94 | {
95 | "output": ["conda", "pyproject"],
96 | "extras": {
97 | "table": "build-system",
98 | "key": "requires",
99 | },
100 | "includes": ["py_build", "py_run"],
101 | "matrix": {
102 | "cuda": ["11", "12"],
103 | "arch": ["x86_64", "aarch64"],
104 | },
105 | "requirements_dir": "python_requirements",
106 | "conda_dir": "conda_recipe",
107 | "pyproject_dir": "python_pyproject",
108 | },
109 | _config.File(
110 | output={_config.Output.CONDA, _config.Output.PYPROJECT},
111 | extras=_config.FileExtras(table="build-system", key="requires"),
112 | includes=["py_build", "py_run"],
113 | matrix={
114 | "cuda": ["11", "12"],
115 | "arch": ["x86_64", "aarch64"],
116 | },
117 | requirements_dir=Path("python_requirements"),
118 | conda_dir=Path("conda_recipe"),
119 | pyproject_dir=Path("python_pyproject"),
120 | ),
121 | ),
122 | ],
123 | )
124 | def test_parse_file(input, output):
125 | if isinstance(output, type) and issubclass(output, Exception):
126 | with pytest.raises(output):
127 | _config._parse_file(input)
128 | else:
129 | assert _config._parse_file(input) == output
130 |
131 |
132 | @pytest.mark.parametrize(
133 | ["input", "output"],
134 | [
135 | (
136 | {},
137 | _config.Dependencies(common=[], specific=[]),
138 | ),
139 | (
140 | {
141 | "common": [
142 | {
143 | "output_types": "none",
144 | "packages": [],
145 | },
146 | {
147 | "output_types": ["pyproject", "requirements"],
148 | "packages": [
149 | "package1",
150 | {
151 | "pip": ["package2"],
152 | },
153 | ],
154 | },
155 | ],
156 | "specific": [
157 | {
158 | "output_types": "none",
159 | "matrices": [
160 | {
161 | "matrix": None,
162 | "packages": None,
163 | },
164 | ],
165 | },
166 | {
167 | "output_types": ["requirements", "conda"],
168 | "matrices": [
169 | {
170 | "matrix": {
171 | "cuda": "11",
172 | "arch": "x86_64",
173 | },
174 | "packages": [
175 | "package3",
176 | {
177 | "pip": ["package4"],
178 | },
179 | ],
180 | },
181 | ],
182 | },
183 | ],
184 | },
185 | _config.Dependencies(
186 | common=[
187 | _config.CommonDependencies(
188 | output_types=set(),
189 | packages=[],
190 | ),
191 | _config.CommonDependencies(
192 | output_types={
193 | _config.Output.PYPROJECT,
194 | _config.Output.REQUIREMENTS,
195 | },
196 | packages=[
197 | "package1",
198 | _config.PipRequirements(pip=["package2"]),
199 | ],
200 | ),
201 | ],
202 | specific=[
203 | _config.SpecificDependencies(
204 | output_types=set(),
205 | matrices=[
206 | _config.MatrixMatcher(
207 | matrix={},
208 | packages=[],
209 | ),
210 | ],
211 | ),
212 | _config.SpecificDependencies(
213 | output_types={
214 | _config.Output.REQUIREMENTS,
215 | _config.Output.CONDA,
216 | },
217 | matrices=[
218 | _config.MatrixMatcher(
219 | matrix={"cuda": "11", "arch": "x86_64"},
220 | packages=[
221 | "package3",
222 | _config.PipRequirements(pip=["package4"]),
223 | ],
224 | ),
225 | ],
226 | ),
227 | ],
228 | ),
229 | ),
230 | ],
231 | )
232 | def test_parse_dependencies(input, output):
233 | if isinstance(output, type) and issubclass(output, Exception):
234 | with pytest.raises(output):
235 | _config._parse_dependencies(input)
236 | else:
237 | assert _config._parse_dependencies(input) == output
238 |
239 |
240 | @pytest.mark.parametrize(
241 | ["input", "output"],
242 | [
243 | ("conda-forge", ["conda-forge"]),
244 | (["conda-forge", "nvidia"], ["conda-forge", "nvidia"]),
245 | ],
246 | )
247 | def test_parse_channels(input, output):
248 | if isinstance(output, type) and issubclass(output, Exception):
249 | with pytest.raises(output):
250 | _config._parse_channels(input)
251 | else:
252 | assert _config._parse_channels(input) == output
253 |
254 |
255 | @pytest.mark.parametrize(
256 | ["input", "path", "output"],
257 | [
258 | (
259 | {
260 | "files": {
261 | "python": {
262 | "output": "pyproject",
263 | "includes": ["py_build"],
264 | },
265 | },
266 | "channels": [
267 | "conda-forge",
268 | "nvidia",
269 | ],
270 | "dependencies": {
271 | "py_build": {
272 | "common": [
273 | {
274 | "output_types": "pyproject",
275 | "packages": [
276 | "package1",
277 | ],
278 | },
279 | ],
280 | "specific": [
281 | {
282 | "output_types": ["conda", "requirements"],
283 | "matrices": [
284 | {
285 | "matrix": None,
286 | "packages": None,
287 | },
288 | ],
289 | },
290 | ],
291 | },
292 | },
293 | },
294 | "dependencies.yaml",
295 | _config.Config(
296 | path=Path("dependencies.yaml"),
297 | files={
298 | "python": _config.File(
299 | output={_config.Output.PYPROJECT},
300 | includes=["py_build"],
301 | ),
302 | },
303 | channels=[
304 | "conda-forge",
305 | "nvidia",
306 | ],
307 | dependencies={
308 | "py_build": _config.Dependencies(
309 | common=[
310 | _config.CommonDependencies(
311 | output_types={_config.Output.PYPROJECT},
312 | packages=[
313 | "package1",
314 | ],
315 | ),
316 | ],
317 | specific=[
318 | _config.SpecificDependencies(
319 | output_types={
320 | _config.Output.CONDA,
321 | _config.Output.REQUIREMENTS,
322 | },
323 | matrices=[
324 | _config.MatrixMatcher(
325 | matrix={},
326 | packages=[],
327 | ),
328 | ],
329 | ),
330 | ],
331 | ),
332 | },
333 | ),
334 | ),
335 | ],
336 | )
337 | def test_parse_config(input, path, output):
338 | if isinstance(output, type) and issubclass(output, Exception):
339 | with pytest.raises(output):
340 | _config.parse_config(input, path)
341 | else:
342 | assert _config.parse_config(input, path) == output
343 |
344 |
345 | @pytest.mark.parametrize(
346 | ["input", "output"],
347 | [
348 | (
349 | textwrap.dedent(
350 | """\
351 | files:
352 | python:
353 | output: "pyproject"
354 | includes: ["py_build"]
355 | channels:
356 | - conda-forge
357 | - nvidia
358 | dependencies:
359 | py_build:
360 | common:
361 | - output_types: "pyproject"
362 | packages:
363 | - package1
364 | specific:
365 | - output_types: ["conda", "requirements"]
366 | matrices:
367 | - matrix:
368 | packages:
369 | """
370 | ),
371 | _config.Config(
372 | path=Path("dependencies.yaml"),
373 | files={
374 | "python": _config.File(
375 | output={_config.Output.PYPROJECT},
376 | includes=["py_build"],
377 | ),
378 | },
379 | channels=[
380 | "conda-forge",
381 | "nvidia",
382 | ],
383 | dependencies={
384 | "py_build": _config.Dependencies(
385 | common=[
386 | _config.CommonDependencies(
387 | output_types={_config.Output.PYPROJECT},
388 | packages=[
389 | "package1",
390 | ],
391 | ),
392 | ],
393 | specific=[
394 | _config.SpecificDependencies(
395 | output_types={
396 | _config.Output.CONDA,
397 | _config.Output.REQUIREMENTS,
398 | },
399 | matrices=[
400 | _config.MatrixMatcher(
401 | matrix={},
402 | packages=[],
403 | ),
404 | ],
405 | ),
406 | ],
407 | ),
408 | },
409 | ),
410 | ),
411 | ],
412 | )
413 | def test_load_config_from_file(input, output):
414 | with tempfile.NamedTemporaryFile("w") as f:
415 | f.write(input)
416 | f.flush()
417 |
418 | if isinstance(output, type) and issubclass(output, Exception):
419 | with pytest.raises(output):
420 | _config.load_config_from_file(f.name)
421 | else:
422 | output.path = Path(f.name)
423 | assert _config.load_config_from_file(f.name) == output
424 |
--------------------------------------------------------------------------------
/tests/test_examples.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import os
3 | import pathlib
4 | import shutil
5 |
6 | import jsonschema
7 | import pytest
8 | import yaml
9 | from jsonschema.exceptions import ValidationError
10 |
11 | from rapids_dependency_file_generator._cli import main
12 |
13 | CURRENT_DIR = pathlib.Path(__file__).parent
14 |
15 | # Erroneous examples raise runtime errors from the generator.
16 | _erroneous_examples = [
17 | "duplicate-specific-matrix-entries",
18 | "no-specific-match",
19 | "pyproject-no-extras",
20 | "pyproject_bad_key",
21 | "pyproject_matrix_multi",
22 | "requirements-pip-dict",
23 | ]
24 | EXAMPLE_FILES = [
25 | pth
26 | for pth in CURRENT_DIR.glob("examples/*/dependencies.yaml")
27 | if all(ex not in str(pth.absolute()) for ex in _erroneous_examples)
28 | ]
29 | # Invalid examples raise validation errors upon schema validation.
30 | INVALID_EXAMPLE_FILES = list(CURRENT_DIR.glob("examples/invalid/*/dependencies.yaml"))
31 |
32 |
33 | def make_file_set(file_dir):
34 | return {
35 | pathlib.Path(f).relative_to(file_dir)
36 | for f in glob.glob(str(file_dir) + "/**", recursive=True)
37 | if pathlib.Path(f).is_file()
38 | }
39 |
40 |
41 | @pytest.fixture(
42 | params=[example_file.parent for example_file in EXAMPLE_FILES],
43 | ids=[example_file.parent.stem for example_file in EXAMPLE_FILES],
44 | )
45 | def example_dir(request):
46 | return request.param
47 |
48 |
49 | @pytest.fixture(
50 | params=[example_file.parent for example_file in INVALID_EXAMPLE_FILES],
51 | ids=[example_file.parent.stem for example_file in INVALID_EXAMPLE_FILES],
52 | )
53 | def invalid_example_dir(request):
54 | return request.param
55 |
56 |
57 | def test_examples(example_dir):
58 | expected_dir = example_dir.joinpath("output", "expected")
59 | actual_dir = example_dir.joinpath("output", "actual")
60 | dep_file_path = example_dir.joinpath("dependencies.yaml")
61 |
62 | # Copy pyproject.toml files from expected to actual since they are modified in place
63 | for dirpath, _, filenames in os.walk(expected_dir):
64 | for filename in filenames:
65 | if filename == "pyproject.toml":
66 | full_path = pathlib.Path(dirpath) / filename
67 | relative_path = full_path.relative_to(expected_dir)
68 | new_path = actual_dir / relative_path
69 | new_path.parent.mkdir(parents=True, exist_ok=True)
70 | shutil.copyfile(full_path, new_path)
71 |
72 | cli_args = [
73 | "--config",
74 | str(dep_file_path),
75 | "--clean",
76 | str(example_dir.joinpath("output", "actual")),
77 | ]
78 |
79 | # Prepend channels for the prepend_channels tests
80 | if example_dir.name in ("prepend-channels"):
81 | cli_args = [
82 | "--prepend-channel",
83 | "my_channel",
84 | "--prepend-channel",
85 | "my_other_channel",
86 | ] + cli_args
87 |
88 | main(cli_args)
89 |
90 | expected_file_set = make_file_set(expected_dir)
91 | actual_file_set = make_file_set(actual_dir)
92 |
93 | assert expected_file_set == actual_file_set
94 |
95 | for file in actual_file_set:
96 | actual_file = open(actual_dir.joinpath(file)).read()
97 | expected_file = open(expected_dir.joinpath(file)).read()
98 | assert actual_file == expected_file
99 |
100 |
101 | @pytest.mark.parametrize("test_name", _erroneous_examples)
102 | def test_error_examples(test_name):
103 | test_dir = CURRENT_DIR.joinpath("examples", test_name)
104 | dep_file_path = test_dir.joinpath("dependencies.yaml")
105 |
106 | with pytest.raises(ValueError):
107 | main(
108 | [
109 | "--config",
110 | str(dep_file_path),
111 | "--clean",
112 | str(test_dir.joinpath("output", "actual")),
113 | ]
114 | )
115 |
116 |
117 | def test_examples_are_valid(schema, example_dir):
118 | dep_file_path = example_dir / "dependencies.yaml"
119 | instance = yaml.load(dep_file_path.read_text(), Loader=yaml.SafeLoader)
120 | jsonschema.validate(instance, schema=schema)
121 |
122 |
123 | def test_invalid_examples_are_invalid(schema, invalid_example_dir):
124 | dep_file_path = invalid_example_dir / "dependencies.yaml"
125 | instance = yaml.load(dep_file_path.read_text(), Loader=yaml.SafeLoader)
126 | with pytest.raises(ValidationError):
127 | jsonschema.validate(instance, schema=schema)
128 |
--------------------------------------------------------------------------------
/tests/test_rapids_dependency_file_generator.py:
--------------------------------------------------------------------------------
1 | from unittest import mock
2 |
3 | import yaml
4 | import tomlkit
5 | import pathlib
6 | import pytest
7 | import re
8 |
9 | from rapids_dependency_file_generator import _config
10 | from rapids_dependency_file_generator._constants import cli_name
11 | from rapids_dependency_file_generator._rapids_dependency_file_generator import (
12 | dedupe,
13 | make_dependency_file,
14 | make_dependency_files,
15 | should_use_specific_entry,
16 | )
17 |
18 |
19 | def test_dedupe():
20 | # simple list
21 | deduped = dedupe(["dep1", "dep1", "dep2"])
22 | assert deduped == ["dep1", "dep2"]
23 |
24 | # list w/ mix of simple and pip dependencies
25 | deduped = dedupe(
26 | [
27 | "dep1",
28 | "dep1",
29 | _config.PipRequirements(pip=["pip_dep1", "pip_dep2"]),
30 | _config.PipRequirements(pip=["pip_dep1", "pip_dep2"]),
31 | ]
32 | )
33 | assert deduped == ["dep1", {"pip": ["pip_dep1", "pip_dep2"]}]
34 |
35 | # list w/ only pip dependencies
36 | deduped = dedupe(
37 | [
38 | _config.PipRequirements(pip=["pip_dep1", "pip_dep2"]),
39 | _config.PipRequirements(pip=["pip_dep3", "pip_dep1"]),
40 | ]
41 | )
42 | assert deduped == [{"pip": ["pip_dep1", "pip_dep2", "pip_dep3"]}]
43 |
44 |
45 | @mock.patch(
46 | "rapids_dependency_file_generator._rapids_dependency_file_generator.os.path.relpath"
47 | )
48 | def test_make_dependency_file(mock_relpath):
49 | relpath = "../../config_file.yaml"
50 | mock_relpath.return_value = relpath
51 | header = f"""\
52 | # This file is generated by `{cli_name}`.
53 | # To make changes, edit {relpath} and run `{cli_name}`.
54 | """
55 | env = make_dependency_file(
56 | file_type=_config.Output.CONDA,
57 | conda_env_name="tmp_env",
58 | file_name="tmp_env.yaml",
59 | config_file="config_file",
60 | output_dir="output_path",
61 | conda_channels=["rapidsai", "nvidia"],
62 | dependencies=["dep1", "dep2"],
63 | extras=None,
64 | )
65 | assert env == header + yaml.dump(
66 | {
67 | "name": "tmp_env",
68 | "channels": ["rapidsai", "nvidia"],
69 | "dependencies": ["dep1", "dep2"],
70 | }
71 | )
72 |
73 | env = make_dependency_file(
74 | file_type=_config.Output.REQUIREMENTS,
75 | conda_env_name="tmp_env",
76 | file_name="tmp_env.txt",
77 | config_file="config_file",
78 | output_dir="output_path",
79 | conda_channels=["rapidsai", "nvidia"],
80 | dependencies=["dep1", "dep2"],
81 | extras=None,
82 | )
83 | assert env == header + "dep1\ndep2\n"
84 |
85 |
86 | def test_make_dependency_file_should_raise_informative_error_when_extras_is_missing_for_pyproj():
87 |
88 | current_dir = pathlib.Path(__file__).parent
89 | with pytest.raises(ValueError, match=r"The 'extras' field must be provided for the 'pyproject' file type"):
90 | make_dependency_files(
91 | parsed_config=_config.load_config_from_file(current_dir / "examples" / "pyproject-no-extras" / "dependencies.yaml"),
92 | file_keys=["beep_boop"],
93 | output={_config.Output.PYPROJECT},
94 | matrix=None,
95 | prepend_channels=[],
96 | to_stdout=True
97 | )
98 |
99 |
100 | def test_make_dependency_files_should_raise_informative_error_when_multiple_files_requested_for_pyproject():
101 |
102 | current_dir = pathlib.Path(__file__).parent
103 | with pytest.raises(ValueError, match=r"Using \-\-file\-key multiple times together with.*pyproject"):
104 | make_dependency_files(
105 | parsed_config=_config.load_config_from_file(current_dir / "examples" / "integration" / "dependencies.yaml"),
106 | file_keys=["all", "test"],
107 | output={_config.Output.PYPROJECT},
108 | matrix=None,
109 | prepend_channels=[],
110 | to_stdout=True
111 | )
112 |
113 | def test_make_dependency_files_should_raise_informative_error_on_map_inputs_for_requirements():
114 |
115 | current_dir = pathlib.Path(__file__).parent
116 | with pytest.raises(ValueError, match=re.escape("Map inputs like {'pip': ['pandas<1.0']} are not allowed for the 'requirements' file type.")):
117 | make_dependency_files(
118 | parsed_config=_config.load_config_from_file(current_dir / "examples" / "requirements-pip-dict" / "dependencies.yaml"),
119 | file_keys=["all_of_the_things"],
120 | output={_config.Output.REQUIREMENTS},
121 | matrix=None,
122 | prepend_channels=[],
123 | to_stdout=True
124 | )
125 |
126 |
127 | def test_make_dependency_files_should_choose_correct_pyproject_toml(capsys):
128 |
129 | current_dir = pathlib.Path(__file__).parent
130 | make_dependency_files(
131 | parsed_config=_config.load_config_from_file(current_dir / "examples" / "nested-pyproject" / "dependencies.yaml"),
132 | file_keys=["sparkly_unicorn"],
133 | output={_config.Output.PYPROJECT},
134 | matrix={"cuda": ["100.17"]},
135 | prepend_channels=[],
136 | to_stdout=True
137 | )
138 | captured_stdout = capsys.readouterr().out
139 |
140 | # should be valid TOML, containing the expected dependencies and the other contents of
141 | # the nested pyproject.toml file
142 | doc = tomlkit.loads(captured_stdout)
143 | assert doc["project"]["name"] == "beep-boop"
144 | assert doc["project"]["version"] == "1.2.3"
145 | assert sorted(doc["project"]["dependencies"]) == ["cuda-python>=100.1,<101.0a0", "fsspec>=0.6.0"]
146 |
147 | # and should NOT contain anything from the root-level pyproject.toml
148 | assert set(dict(doc).keys()) == {"project"}
149 |
150 | def test_make_dependency_files_requirements_to_stdout_with_multiple_file_keys_works(capsys):
151 |
152 | current_dir = pathlib.Path(__file__).parent
153 | make_dependency_files(
154 | parsed_config=_config.load_config_from_file(current_dir / "examples" / "overlapping-deps" / "dependencies.yaml"),
155 | file_keys=["build_deps", "even_more_build_deps"],
156 | output={_config.Output.REQUIREMENTS},
157 | matrix={"arch": ["x86_64"]},
158 | prepend_channels=[],
159 | to_stdout=True
160 | )
161 | captured_stdout = capsys.readouterr().out
162 | reqs_list = [r for r in captured_stdout.split("\n") if not (r.startswith(r"#") or r == "")]
163 |
164 | # should contain exactly the expected dependencies, sorted alphabetically, with no duplicates
165 | assert reqs_list == ["numpy>=2.0", "pandas<3.0", "rapids-build-backend>=0.3.1", "scikit-build-core[pyproject]>=0.9.0"]
166 |
167 | def test_make_dependency_files_conda_to_stdout_with_multiple_file_keys_works(capsys):
168 |
169 | current_dir = pathlib.Path(__file__).parent
170 | make_dependency_files(
171 | parsed_config=_config.load_config_from_file(current_dir / "examples" / "overlapping-deps" / "dependencies.yaml"),
172 | file_keys=["test_with_sklearn", "test_deps", "even_more_test_deps"],
173 | output={_config.Output.CONDA},
174 | matrix={"py": ["4.7"]},
175 | prepend_channels=[],
176 | to_stdout=True
177 | )
178 | captured_stdout = capsys.readouterr().out
179 | env_dict = yaml.safe_load(captured_stdout)
180 |
181 | # should only have the expected keys
182 | assert sorted(env_dict.keys()) == ["channels", "dependencies"]
183 |
184 | # should use preserve the channels from dependencies.yaml, in the order they were supplied
185 | assert env_dict["channels"] == ["rapidsai", "conda-forge"]
186 |
187 | # dependencies list should:
188 | #
189 | # * be sorted alphabetically (other than "pip:" list at the end)
190 | # * should include the "pip:" subsection
191 | # * should not have any duplicates
192 | # * should contain the union of all dependencies from all requested file keys
193 | #
194 | assert env_dict["dependencies"] == [
195 | "matplotlib",
196 | "pandas<3.0",
197 | "pip",
198 | "scikit-learn>=1.5",
199 | {"pip": [
200 | "folium",
201 | "numpy>=2.0",
202 | ]}
203 | ]
204 |
205 |
206 | def test_should_use_specific_entry():
207 | # no match
208 | matrix_combo = {"cuda": "11.5", "arch": "x86_64"}
209 | specific_entry = {"cuda": "11.6"}
210 | result = should_use_specific_entry(matrix_combo, specific_entry)
211 | assert result is False
212 |
213 | # one match
214 | matrix_combo = {"cuda": "11.5", "arch": "x86_64"}
215 | specific_entry = {"cuda": "11.5"}
216 | result = should_use_specific_entry(matrix_combo, specific_entry)
217 | assert result is True
218 |
219 | # many matches
220 | matrix_combo = {"cuda": "11.5", "arch": "x86_64", "python": "3.6"}
221 | specific_entry = {"cuda": "11.5", "arch": "x86_64"}
222 | result = should_use_specific_entry(matrix_combo, specific_entry)
223 | assert result is True
224 |
--------------------------------------------------------------------------------
/tests/test_schema.py:
--------------------------------------------------------------------------------
1 | import jsonschema
2 |
3 |
4 | def test_schema_is_valid(schema):
5 | jsonschema.Draft7Validator.check_schema(schema)
6 |
--------------------------------------------------------------------------------