├── .flake8
├── .github
└── workflows
│ ├── lint.yml
│ └── main.yml
├── .gitignore
├── .isort.cfg
├── .pre-commit-config.yaml
├── .readthedocs.yml
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE
├── MANIFEST.in
├── README.md
├── boa
├── __init__.py
├── _version.py
├── cli
│ ├── __init__.py
│ ├── boa.py
│ ├── convert.py
│ ├── mambabuild.py
│ ├── test.py
│ ├── transmute.py
│ └── validate.py
├── core
│ ├── __init__.py
│ ├── build.py
│ ├── conda_build_spec.py
│ ├── config.py
│ ├── environ.py
│ ├── jinja_support.py
│ ├── metadata.py
│ ├── monkey_patch_emscripten.py
│ ├── recipe_handling.py
│ ├── recipe_output.py
│ ├── render.py
│ ├── run_build.py
│ ├── solver.py
│ ├── test.py
│ ├── transmute.py
│ ├── utils.py
│ ├── validation.py
│ ├── variant_arithmetic.py
│ └── windows.py
├── helpers
│ ├── __init__.py
│ ├── asciigraph.py
│ └── ast_extract_syms.py
├── schemas
│ ├── generate_schemas.sh
│ ├── info
│ │ ├── about.py
│ │ ├── index.py
│ │ ├── info-about.schema.json
│ │ ├── info-index.schema.json
│ │ ├── info-paths.schema.json
│ │ └── paths.py
│ ├── model.py
│ └── recipe.v1.json
└── tui
│ ├── __init__.py
│ ├── exceptions.py
│ ├── patching.py
│ └── tui.py
├── docs
├── Makefile
├── assets
│ └── boa_header.png
├── environment.yml
├── make.bat
└── source
│ ├── _static
│ ├── boa.svg
│ └── style.css
│ ├── advanced_recipe_spec.md
│ ├── conf.py
│ ├── getting_started.md
│ ├── index.md
│ ├── jsonschema_spec.md
│ ├── mambabuild.md
│ └── recipe_spec.md
├── setup.py
└── tests
├── env.yml
├── lint.yml
├── recipes-v2
├── environ
│ └── recipe.yaml
├── grayskull
│ ├── LICENSE
│ └── recipe.yaml
├── pin_compatible
│ └── recipe.yaml
└── xtensor
│ ├── bld.bat
│ ├── build.sh
│ └── recipe.yaml
├── recipes
├── baddeps
│ └── meta.yaml
├── dep_error_has_constaint
│ └── meta.yaml
├── dep_error_needed_by
│ └── meta.yaml
├── dep_error_nothing_provides
│ └── meta.yaml
├── dep_error_package_requires
│ └── meta.yaml
├── jedi
│ └── meta.yaml
├── multioutput
│ └── meta.yaml
├── multiple_license
│ ├── LICENSE
│ ├── NOTICE.md
│ └── meta.yaml
└── stackvana
│ └── meta.yaml
├── test_boa_build.py
├── test_helpers.py
├── test_mambabuild.py
├── test_rendering.py
├── tests-v2
├── metapackage-channel-pin
│ └── recipe.yaml
└── runexports
│ └── recipe.yaml
└── variants
├── compiler_test
├── cbc_default.yaml
├── compilers.yaml
└── recipe.yaml
├── underscores
├── cbc1.yaml
├── cbc2.yaml
├── recipe.yaml
└── recipe2.yaml
└── variant_test
├── cbc1.yaml
├── cbc2.yaml
├── cbc3.yaml
├── cbc4.yaml
├── cbc5.yaml
├── cbc6.yaml
├── cbc7.yaml
├── recipe.yaml
└── recipe2.yaml
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | max-line-length=88
3 | extend-ignore=E203,D104,D100,I004,E501
4 | exclude=tests/data/*
5 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: Linters (Python)
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | branches:
9 | - main
10 |
11 | jobs:
12 | build:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v4
16 | - name: install mamba
17 | uses: mamba-org/setup-micromamba@v1
18 | with:
19 | environment-file: tests/lint.yml
20 | - name: Run all linters
21 | shell: bash -l {0}
22 | run: |
23 | pre-commit run --all-files --verbose
24 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | branches:
9 | - main
10 |
11 | jobs:
12 | run:
13 | runs-on: ${{ matrix.os }}
14 |
15 | strategy:
16 | fail-fast: false
17 | matrix:
18 | os: [ubuntu-latest, macos-latest, windows-latest]
19 | conda-channel: ['conda-forge', 'conda-canary/label/dev']
20 | include:
21 | # Lowest versions to test for.
22 | - conda-channel: 'conda-forge'
23 | conda-build-version: '3.25'
24 | python-version: '3.8'
25 | # Unbound/dev versions to test for.
26 | - conda-channel: 'conda-canary/label/dev'
27 | conda-build-version: '*'
28 | python-version: '*'
29 | steps:
30 | - uses: actions/checkout@v4
31 | - name: install mamba
32 | uses: mamba-org/setup-micromamba@v1
33 | with:
34 | environment-file: tests/env.yml
35 | create-args: >-
36 | --channel-priority=flexible
37 | python=${{ matrix.python-version }}
38 | ${{ matrix.conda-channel }}::conda
39 | ${{ matrix.conda-channel }}::conda-build=${{ matrix.conda-build-version }}
40 | - name: Install boa
41 | shell: bash -l {0}
42 | run: |
43 | pip install .
44 | - name: Run tests
45 | shell: bash -l {0}
46 | run: |
47 | conda config --add channels conda-forge
48 | conda config --set channel_priority strict
49 | pytest tests
50 | env:
51 | PYTHONUTF8: 1
52 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | # Byte-compiled / optimized / DLL files
3 | __pycache__/
4 | *.py[cod]
5 | *$py.class
6 |
7 | # C extensions
8 | *.so
9 |
10 | # Distribution / packaging
11 | .Python
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 | cover/
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | .pybuilder/
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | # For a library or package, you might want to ignore these files since the code is
88 | # intended to run in multiple environments; otherwise, check them in:
89 | # .python-version
90 |
91 | # pipenv
92 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
93 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
94 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
95 | # install all needed dependencies.
96 | #Pipfile.lock
97 |
98 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
99 | __pypackages__/
100 |
101 | # Celery stuff
102 | celerybeat-schedule
103 | celerybeat.pid
104 |
105 | # SageMath parsed files
106 | *.sage.py
107 |
108 | # Environments
109 | .env
110 | .venv
111 | env/
112 | venv/
113 | ENV/
114 | env.bak/
115 | venv.bak/
116 |
117 | # Spyder project settings
118 | .spyderproject
119 | .spyproject
120 |
121 | # Rope project settings
122 | .ropeproject
123 |
124 | # mkdocs documentation
125 | /site
126 |
127 | # mypy
128 | .mypy_cache/
129 | .dmypy.json
130 | dmypy.json
131 |
132 | # Pyre type checker
133 | .pyre/
134 |
135 | # pytype static type analyzer
136 | .pytype/
137 |
138 | # Cython debug symbols
139 | cython_debug/
140 |
141 | # Mamba files
142 | *pkgs/*
143 |
--------------------------------------------------------------------------------
/.isort.cfg:
--------------------------------------------------------------------------------
1 | [settings]
2 | line_length=1000
3 | known_third_party=requests,ruamel,yaml,pytest,rapidfuzz,opensource,colorama,progressbar,progressbar2
4 | multi_line_output=3
5 | include_trailing_comma=True
6 | force_grid_wrap=0
7 | use_parentheses=True
8 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/psf/black
3 | rev: 23.10.1
4 | hooks:
5 | - id: black
6 | args: [--safe, --quiet]
7 | - repo: https://github.com/asottile/blacken-docs
8 | rev: 1.16.0
9 | hooks:
10 | - id: blacken-docs
11 | additional_dependencies: [black==22.3.0]
12 | - repo: https://github.com/pre-commit/pre-commit-hooks
13 | rev: v4.5.0
14 | hooks:
15 | - id: trailing-whitespace
16 | - id: end-of-file-fixer
17 | # - repo: https://github.com/pre-commit/mirrors-isort
18 | # rev: v5.2.2
19 | # hooks:
20 | # - id: isort
21 | # exclude: tests/data
22 | - repo: https://github.com/pycqa/flake8
23 | rev: 6.1.0
24 | hooks:
25 | - id: flake8
26 | args:
27 | # used by flake8-typing-imports
28 | - --min-python-version=3.7.0
29 | exclude: tests/data
30 | language_version: python3
31 | additional_dependencies:
32 | - flake8-typing-imports
33 | - flake8-builtins
34 | - flake8-bugbear
35 | # - flake8-isort==3.0.1
36 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | conda:
4 | environment: docs/environment.yml
5 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | 0.17.0 (March 21, 2024)
2 | =======================
3 |
4 | - Many fixes for the latest conda, conda-build and mamba versions (thanks @mbargull @jaimergp @bamarco @deepsihag) #395 #393 #384 #380
5 |
6 | 0.15.1 (June 21, 2023)
7 | ======================
8 |
9 | - Disable error messages for now to see if that fixes segfault issues observed on conda-forge (https://github.com/conda-forge/conda-forge.github.io/issues/1960) #352
10 |
11 | 0.15.0 (May 17, 2023)
12 | =====================
13 |
14 | - Run export types by @wolfv in https://github.com/mamba-org/boa/pull/324
15 | - Fix pin_compatible by @ruben-arts in https://github.com/mamba-org/boa/pull/325
16 | - emscripten 32 - conditional monkeypatch by @wolfv in https://github.com/mamba-org/boa/pull/333
17 | - Fix #322 by adding "about" section in Output constructor by @moqmar in https://github.com/mamba-org/boa/pull/327
18 | - add support for the new error messages by @jaimergp in https://github.com/mamba-org/boa/pull/340
19 | - Switch to setup-micromamba by @pavelzw in https://github.com/mamba-org/boa/pull/339
20 | - Support passing build variants from cli by @frmdstryr in https://github.com/mamba-org/boa/pull/337
21 | - Allow for multiple license files by @dhirschfeld in https://github.com/mamba-org/boa/pull/342
22 | - Make it possible to include the recipe.yaml inside a pyproject.toml by @moqmar in https://github.com/mamba-org/boa/pull/345
23 | - Implement separate "boa test" command to fix #326 by @moqmar in https://github.com/mamba-org/boa/pull/343
24 | - fix: support strict channel priorities by @johanneskoester in https://github.com/mamba-org/boa/pull/347
25 |
26 | 0.14.0 (November 10, 2022)
27 | ==========================
28 |
29 | - [boa] fix schema mistake (destination should be folder in schema) (thanks @alexshagiev) #316
30 | - [boa] For issue 313 use os.path.normpath not os.normpath when setting git_url. (thanks @ericvoltron) #314
31 | - [boa, mambabuild] update precommit files (thanks @mariusvniekerk) #315
32 |
33 | 0.13.0 (November 10, 2022)
34 | ==========================
35 |
36 | - [boa] compatibility with mamba 1.0.0
37 | - [mambabuild] dependency error parse improvements (thanks @mbargull) #306
38 | - [mambabuild] fix parsing problems when test_downstream is called (thanks @isuruf) #301
39 | - [boa] Add environ to context and proper build/script_env handling #299
40 |
41 |
42 | 0.12.0 (October 5, 2022)
43 | ========================
44 |
45 | - [boa] compatibility with mamba 0.27.0
46 | - [boa] Fix documentation and docstrings (thanks @LunarLanding and @martinRenou)
47 | - [boa] Allow channel pinnings in boa build (thanks @mariusvniekerk)
48 | - [boa] Support additional conda build command line arguments (thanks @mariusvniekerk)
49 | - [boa] Fix propagation of build strings in dependencies (thanks @alexshagiev)
50 | - [boa] Proper run exports handling
51 | - [boa] Fix error in test when include_recipe is false (thanks @frmdstryr)
52 |
53 | 0.11.0 (April 27, 2022)
54 | =======================
55 |
56 | - [boa] compatibility with mamba 0.23.0
57 | - [boa] fix an AttributeError if there is no pin (thanks @TobiasFischer) #268
58 |
59 | 0.10.0 (March 18, 2022)
60 | =======================
61 |
62 | - [boa] add `boa --version`
63 | - [boa] add more docs and vastly improved new recipe schema, render recipe schema in docs
64 | - [boa] add version from top-level to outputs to make validation pass
65 | - [boa] move CondaBuildSpec class to it's own file
66 | - [boa] save properly rendered recipe into final package
67 | - [boa] implement build steps and variant inheritance logic
68 | - [boa] read and respect binary_relocation value (thanks @frmdstryr)
69 | - [boa] add debug assert messages (thanks @dhirschfeld)
70 |
71 |
72 | 0.9.0 (February 11, 2022)
73 | =========================
74 |
75 | - [boa] add support for `build.py` Python based build scripts (also check out [`bitfurnace`](https://github.com/mamba-org/bitfurnace))
76 | - [boa,mambabuild] fix compatibility with mamba 0.21.*
77 |
78 | 0.8.2 (January 31, 2022)
79 | ========================
80 |
81 | - [boa] fix multi-output
82 | - [boa] fix keep run_export and existing spec when existing spec is not simple
83 | - [mambabuild] allow testing multiple recipes (thanks @gabm)
84 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | When contributing to this repository, it is always a good idea to first
4 | discuss the change you wish to make via issue, email, or any other method with
5 | the owners of this repository before making a change.
6 |
7 | We welcome all kinds of contribution -- code or non-code -- and value them
8 | highly. We pledge to treat everyones contribution fairly and with respect and
9 | we are here to help awesome pull requests over the finish line.
10 |
11 | Please note we have a code of conduct, and follow it in all your interactions with the project.
12 |
13 | We follow the [NumFOCUS code of conduct](https://numfocus.org/code-of-conduct).
14 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2020 QuantStack and the Boa contributors.
2 |
3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
4 |
5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
6 |
7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
8 |
9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
10 |
11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
12 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include README.md
3 | include CONTRIBUTING.md
4 |
5 | graft boa/schemas
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | > [!WARNING]
4 | > Boa is superseded by [`rattler-build`](http://github.com/prefix-dev/rattler-build).
5 | > It's a complete reimplementation of `conda-build` / `boa` in Rust and dramatically faster than `conda-build` and (even) `boa`.
6 | > This project is archived, no further public update will be made.
7 |
8 | ## The Fast Conda and Mamba Package Builder
9 |
10 |
11 |
12 |
13 | | part of mamba-org |
14 |
15 |
16 |
17 |
18 | | Package Manager mamba |
19 | Package Server quetz |
20 | Package Builder boa |
21 |
22 |
23 |
24 |
25 | # boa, the fast build tool for conda packages
26 |
27 | ```
28 | Note: boa is still a work-in-progress.
29 | ```
30 |
31 | **boa** is a package builder for conda packages.
32 | It largely re-uses the `conda-build` infrastructure, except for some parts. For example the 'solving stage' which, in Boa, is done using `mamba`, the fast `conda`-alternative. Learn more about `mamba` [here](https://github.com/mamba-org/mamba#readme).
33 |
34 | We are also working towards a new "meta.yaml" format in the `boa/cli/render.py` source file. Read more about it [here](https://boa-build.readthedocs.io/en/latest/recipe_spec.html).
35 | The new "meta.yaml" format is still a work-in-progress and might not work as expected.
36 |
37 | The discussions about this new `meta.yaml` format take place [here](https://hackmd.io/axI1tQdwQB2pTJKt5XdY5w). We encourage you to participate.
38 |
39 | The short-term goal for boa is to parse the new version spec, and produce a `conda_build.MetaData` class in Python that describes how to assemble the final package.
40 |
41 | [](https://asciinema.org/a/HBduIi9TgdFgS3zV7mB3h0KpN)
42 |
43 |
44 | We have these tools included with boa:
45 |
46 | ```
47 | conda mambabuild my_recipe_folder
48 | ```
49 | This is equivalent to running `conda build my_recipe_folder` but using mamba as a solver.
50 |
51 | ```
52 | boa render my_recipe_folder
53 | ```
54 | "Render" a recipe. (Note that you must use the non-final v2 syntax. Check the recipes folder for examples.)
55 |
56 | ```
57 | boa build my_recipe_folder
58 | ```
59 | Runs a "build" of the v2 recipe.
60 |
61 | ### Dev Installation
62 |
63 | Install the boa dependencies:
64 | ```
65 | mamba install "conda-build>=3.20" colorama pip ruamel ruamel.yaml rich mamba jsonschema -c conda-forge
66 | ```
67 |
68 | Now install boa:
69 | ```
70 | pip install -e .
71 | ```
72 | ### Documentation
73 |
74 | The boa documentation can be found [here](https://boa-build.readthedocs.io/en/latest/).
75 |
76 | ### License
77 |
78 | We use a shared copyright model that enables all contributors to maintain the copyright on their contributions.
79 |
80 | This software is licensed under the BSD-3-Clause license. See the LICENSE file for details.
81 |
--------------------------------------------------------------------------------
/boa/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from ._version import version_info, __version__ # noqa
5 |
--------------------------------------------------------------------------------
/boa/_version.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | version_info = (0, 17, 0)
5 | __version__ = ".".join(map(str, version_info))
6 |
--------------------------------------------------------------------------------
/boa/cli/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/boa/cli/__init__.py
--------------------------------------------------------------------------------
/boa/cli/boa.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | import sys
5 | import argparse
6 |
7 | from conda.base.context import context
8 |
9 | from boa.core import monkey_patch_emscripten
10 |
11 | if any("emscripten" in arg for arg in sys.argv):
12 | print("Monkeypatching emscripten")
13 | monkey_patch_emscripten.patch()
14 |
15 | from boa.core.config import init_global_config
16 | from boa._version import __version__
17 | from boa.core.utils import init_api_context
18 |
19 | cc_conda_build = context.conda_build if hasattr(context, "conda_build") else {}
20 |
21 | banner = r"""
22 | _
23 | | |__ ___ __ _
24 | | '_ \ / _ \ / _` |
25 | | |_) | (_) | (_| |
26 | |_.__/ \___/ \__,_|
27 | """
28 |
29 |
30 | def main(config=None):
31 | parser = argparse.ArgumentParser(
32 | description="Boa, the fast, mamba powered-build tool for conda packages."
33 | )
34 | parser.add_argument("--version", action="version", version=__version__)
35 |
36 | subparsers = parser.add_subparsers(help="sub-command help", dest="command")
37 | parent_parser = argparse.ArgumentParser(add_help=False)
38 | parent_parser.add_argument("--recipe-dir", type=str)
39 | parent_parser.add_argument("target", type=str, default="")
40 | parent_parser.add_argument("--features", type=str)
41 | parent_parser.add_argument("--offline", action="store_true")
42 | parent_parser.add_argument("--target-platform", type=str)
43 | parent_parser.add_argument("--json", action="store_true")
44 | parent_parser.add_argument("--debug", action="store_true")
45 | parent_parser.add_argument(
46 | "--pyproject-recipes",
47 | action="store_true",
48 | help="""Use [tool.boa] section from pyproject.toml as a recipe instead of a separate recipe.yaml.""",
49 | )
50 |
51 | variant_parser = argparse.ArgumentParser(add_help=False)
52 | variant_parser.add_argument(
53 | "-m",
54 | "--variant-config-files",
55 | action="append",
56 | help="""Additional variant config files to add. These yaml files can contain
57 | keys such as `c_compiler` and `target_platform` to form a build matrix.""",
58 | )
59 |
60 | subparsers.add_parser(
61 | "render", parents=[parent_parser, variant_parser], help="render a recipe"
62 | )
63 | subparsers.add_parser(
64 | "convert",
65 | parents=[parent_parser],
66 | help="convert old-style meta.yaml to recipe.yaml",
67 | )
68 | subparsers.add_parser(
69 | "validate",
70 | parents=[parent_parser],
71 | help="Validate recipe.yaml",
72 | )
73 |
74 | test_parser = argparse.ArgumentParser(add_help=False)
75 | test_parser.add_argument(
76 | "--extra-deps",
77 | action="append",
78 | help="Extra dependencies to add to all test environment creation steps.",
79 | )
80 | subparsers.add_parser(
81 | "test",
82 | parents=[parent_parser, test_parser],
83 | help="test an already built package (include_recipe of the package must be true)",
84 | )
85 |
86 | build_parser = argparse.ArgumentParser(add_help=False)
87 | build_parser.add_argument(
88 | "-i",
89 | "--interactive",
90 | action="store_true",
91 | help="Use interactive mode if build fails",
92 | )
93 |
94 | build_parser.add_argument(
95 | "--output-folder",
96 | help=(
97 | "folder to dump output package to. Package are moved here if build or test succeeds."
98 | " Destination folder must exist prior to using this."
99 | ),
100 | default=cc_conda_build.get("output_folder"),
101 | )
102 |
103 | build_parser.add_argument(
104 | "--skip-existing",
105 | nargs="?",
106 | default="default",
107 | const="yes",
108 | )
109 | build_parser.add_argument(
110 | "--no-test",
111 | action="store_true",
112 | dest="notest",
113 | help="Do not test the package.",
114 | )
115 | build_parser.add_argument(
116 | "--continue-on-failure",
117 | action="store_true",
118 | help="Continue building remaining recipes if a recipe fails.",
119 | )
120 | # The following arguments are taken directly from conda-build
121 | conda_build_parser = build_parser.add_argument_group("special conda-build flags")
122 | conda_build_parser.add_argument(
123 | "--build-id-pat",
124 | default=False,
125 | dest="conda_build_build_id_pat",
126 | help="""\
127 | specify a templated pattern to use as build folder names. Use if having issues with
128 | paths being too long, or to ensure a particular build folder name.
129 | When not specified, the default is to use the pattern {n}_{t}.
130 | Template variables are: n: package name, t: timestamp, v: package_version""",
131 | )
132 | conda_build_parser.add_argument(
133 | "--no-remove-work-dir",
134 | dest="conda_build_remove_work_dir",
135 | default=True,
136 | action="store_false",
137 | help="""\
138 | Disable removal of the work dir before testing. Be careful using this option, as
139 | you package may depend on files that are not included in the package, and may pass
140 | tests, but ultimately fail on installed systems.""",
141 | )
142 | conda_build_parser.add_argument(
143 | "--keep-old-work",
144 | action="store_true",
145 | dest="conda_build_keep_old_work",
146 | help="Do not remove anything from environment, even after successful build and test.",
147 | )
148 | conda_build_parser.add_argument(
149 | "--prefix-length",
150 | dest="conda_build_prefix_length",
151 | help="""\
152 | length of build prefix. For packages with binaries that embed the path, this is
153 | critical to ensuring that your package can run as many places as possible. Note
154 | that this value can be altered by the OS below boa (e.g. encrypted
155 | filesystems on Linux), and you should prefer to set --croot to a non-encrypted
156 | location instead, so that you maintain a known prefix length.""",
157 | default=255,
158 | type=int,
159 | )
160 | conda_build_parser.add_argument(
161 | "--croot",
162 | dest="conda_build_croot",
163 | help="Build root folder. Equivalent to CONDA_BLD_PATH, but applies only to this call of `boa build`.",
164 | )
165 | build_parser.add_argument(
166 | "--pkg-format",
167 | dest="conda_pkg_format",
168 | choices=["1", "2"],
169 | default="1",
170 | help="Package format version. Version 1 is the standard .tar.bz2 format. Version 2 is the new .conda format.",
171 | )
172 | conda_build_parser.add_argument(
173 | "--zstd-compression-level",
174 | help="""\
175 | When building v2 packages, set the compression level used by
176 | conda-package-handling. Defaults to the maximum.""",
177 | type=int,
178 | choices=range(1, 22),
179 | default=22,
180 | )
181 |
182 | for k in ("perl", "lua", "python", "numpy", "r_base"):
183 | conda_build_parser.add_argument(
184 | "--{}".format(k),
185 | dest="{}_variant".format(k),
186 | help="Set the {} variant used by conda build.".format(k),
187 | )
188 |
189 | subparsers.add_parser(
190 | "build",
191 | parents=[parent_parser, build_parser, variant_parser],
192 | help="build a recipe",
193 | )
194 |
195 | transmute_parser = subparsers.add_parser(
196 | "transmute",
197 | parents=(),
198 | help="transmute one or many tar.bz2 packages into a conda packages (or vice versa!)",
199 | )
200 | transmute_parser.add_argument("files", type=str, nargs="+")
201 | transmute_parser.add_argument("-o", "--output-folder", type=str, default=".")
202 | transmute_parser.add_argument("-c", "--compression-level", type=int, default=22)
203 | transmute_parser.add_argument(
204 | "-n_jobs",
205 | "--num_jobs",
206 | type=int,
207 | default=1,
208 | help="the number of parallel processing elements",
209 | )
210 |
211 | args = parser.parse_args()
212 |
213 | command = args.command
214 |
215 | init_api_context()
216 | init_global_config(args)
217 |
218 | from boa.core.run_build import run_build
219 | from boa.cli import convert
220 | from boa.cli import transmute
221 | from boa.cli import validate
222 | from boa.cli import test
223 |
224 | if command == "convert":
225 | convert.main(args.target)
226 | exit()
227 |
228 | if command == "validate":
229 | validate.main(args.target)
230 | exit()
231 |
232 | if command == "test":
233 | test.main(args)
234 | exit()
235 |
236 | if command == "transmute":
237 | transmute.main(args)
238 | exit()
239 |
240 | from boa.core.config import boa_config
241 |
242 | boa_config.console.print(banner)
243 |
244 | if command == "build" or command == "render":
245 | run_build(args)
246 |
247 | if not command:
248 | parser.print_help(sys.stdout)
249 |
250 |
251 | if __name__ == "__main__":
252 | main()
253 |
--------------------------------------------------------------------------------
/boa/cli/convert.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | # convert between recipe.yaml and meta.yaml
5 | import ruamel
6 | from ruamel.yaml.representer import RoundTripRepresenter
7 | from ruamel.yaml.comments import CommentedMap
8 | from ruamel.yaml import YAML
9 | from collections import OrderedDict
10 | import re
11 |
12 |
13 | class MyRepresenter(RoundTripRepresenter):
14 | pass
15 |
16 |
17 | ruamel.yaml.add_representer(
18 | OrderedDict, MyRepresenter.represent_dict, representer=MyRepresenter
19 | )
20 |
21 | RECIPE_FIELD_ORDER = [
22 | "package",
23 | "source",
24 | "build",
25 | "requirements",
26 | "test",
27 | "app",
28 | "outputs",
29 | "about",
30 | "extra",
31 | ]
32 |
33 |
34 | def order_output_dict(d):
35 | result_list = []
36 | for k in RECIPE_FIELD_ORDER:
37 | if k in d:
38 | result_list.append((k, d[k]))
39 |
40 | leftover_keys = d.keys() - set(RECIPE_FIELD_ORDER)
41 | result_list += [(k, d[k]) for k in leftover_keys]
42 | return OrderedDict(result_list)
43 |
44 |
45 | def main(docname):
46 | with open(docname, "r") as fi:
47 | lines = fi.readlines()
48 | context = {}
49 | rest_lines = []
50 | for line in lines:
51 | # print(line)
52 | if "{%" in line:
53 | set_expr = re.search("{%(.*)%}", line)
54 | set_expr = set_expr.group(1)
55 | set_expr = set_expr.replace("set", "", 1).strip()
56 | exec(set_expr, globals(), context)
57 | else:
58 | rest_lines.append(line)
59 |
60 | yaml = YAML(typ="rt")
61 | yaml.preserve_quotes = True
62 | yaml.default_flow_style = False
63 | yaml.indent(sequence=4, offset=2)
64 | yaml.width = 1000
65 | yaml.Representer = MyRepresenter
66 | yaml.Loader = ruamel.yaml.RoundTripLoader
67 |
68 | result_yaml = CommentedMap()
69 | result_yaml["context"] = context
70 |
71 | def has_selector(s):
72 | return s.strip().endswith("]")
73 |
74 | quoted_lines = []
75 | for line in rest_lines:
76 | if has_selector(line):
77 | selector_start = line.rfind("[")
78 | selector_end = line.rfind("]")
79 | selector_content = line[selector_start + 1 : selector_end]
80 |
81 | if line.strip().startswith("-"):
82 | line = (
83 | line[: line.find("-") + 1]
84 | + f" sel({selector_content}): "
85 | + line[
86 | line.find("-") + 1 : min(line.rfind("#"), line.rfind("["))
87 | ].strip()
88 | + "\n"
89 | )
90 | quoted_lines.append(line)
91 | rest_lines = quoted_lines
92 |
93 | def check_if_quoted(s):
94 | s = s.strip()
95 | return s.startswith('"') or s.startswith("'")
96 |
97 | quoted_lines = []
98 | for line in rest_lines:
99 | if "{{" in line:
100 | # make sure that jinja stuff is quoted
101 | if line.find(":") != -1:
102 | idx = line.find(":")
103 | elif line.strip().startswith("-"):
104 | idx = line.find("-")
105 | rest = line[idx + 1 :]
106 |
107 | if not check_if_quoted(rest):
108 | if "'" in rest:
109 | rest = rest.replace("'", '"')
110 |
111 | line = line[: idx + 1] + f" '{rest.strip()}'\n"
112 | quoted_lines.append(line)
113 | rest_lines = quoted_lines
114 |
115 | skips, wo_skip_lines = [], []
116 | for line in rest_lines:
117 | if line.strip().startswith("skip"):
118 | parts = line.split(":")
119 | rhs = parts[1].strip()
120 | if rhs.startswith("true"):
121 | selector_start = line.rfind("[")
122 | selector_end = line.rfind("]")
123 | selector_content = line[selector_start + 1 : selector_end]
124 | skips.append(selector_content)
125 | else:
126 | print("ATTENTION skip: false not handled!")
127 | else:
128 | wo_skip_lines.append(line)
129 |
130 | rest_lines = wo_skip_lines
131 | result_yaml.update(yaml.load("".join(rest_lines)))
132 |
133 | if len(skips) != 0:
134 | result_yaml["build"]["skip"] = skips
135 |
136 | if result_yaml.get("outputs"):
137 | for o in result_yaml["outputs"]:
138 | name = o["name"]
139 | package = {"name": name}
140 | del o["name"]
141 | if o.get("version"):
142 | package["version"] = o["version"]
143 | del o["version"]
144 |
145 | build = {}
146 | if o.get("script"):
147 | build["script"] = o["script"]
148 | del o["script"]
149 |
150 | o["package"] = package
151 | o["build"] = build
152 |
153 | for d in result_yaml["outputs"]:
154 | print(order_output_dict(d))
155 | result_yaml["outputs"] = [order_output_dict(d) for d in result_yaml["outputs"]]
156 |
157 | from io import StringIO
158 |
159 | output = StringIO()
160 | yaml.dump(result_yaml, output)
161 |
162 | # Hacky way to insert an empty line after the context-key-object
163 | context_output = StringIO()
164 | yaml.dump(context, context_output)
165 | context_output = context_output.getvalue()
166 | context_output_len = len(context_output.split("\n"))
167 |
168 | final_result = output.getvalue()
169 | final_result_lines = final_result.split("\n")
170 | final_result_lines.insert(context_output_len, "")
171 |
172 | print("\n".join(final_result_lines))
173 |
--------------------------------------------------------------------------------
/boa/cli/mambabuild.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | import os
5 | import sys
6 | import re
7 | from glob import glob
8 |
9 | from conda.models.match_spec import MatchSpec
10 | from conda.gateways.disk.create import mkdir_p
11 |
12 | import conda_build.environ
13 | from conda_build import api
14 | from conda_build.config import Config, get_channel_urls
15 | from conda_build.cli.main_build import parse_args
16 | from conda_build.exceptions import DependencyNeedsBuildingError
17 | from conda_index.index import update_index
18 |
19 | from conda.base.context import context
20 |
21 | from boa.core.solver import MambaSolver
22 | from boa.core.utils import normalize_subdir
23 | from boa.core.utils import init_api_context
24 | from boa.core.config import boa_config
25 |
26 | only_dot_or_digit_re = re.compile(r"^[\d\.]+$")
27 |
28 | solver_map = {}
29 |
30 | # e.g. package-1.2.3-h5487548_0
31 | dashed_spec_pattern = r"([^ ]+)-([^- ]+)-([^- ]+)"
32 | # e.g. package 1.2.8.*
33 | conda_build_spec_pattern = r"([^ ]+)(?:\ ([^ ]+))?(?:\ ([^ ]+))?"
34 |
35 | problem_re = re.compile(
36 | rf"""
37 | ^(?:\ *-\ +)+
38 | (?:
39 | (?:
40 | package
41 | \ {dashed_spec_pattern}
42 | \ requires
43 | \ {conda_build_spec_pattern}
44 | ,\ but\ none\ of\ the\ providers\ can\ be\ installed
45 | ) | (?:
46 | package
47 | \ {dashed_spec_pattern}
48 | \ has\ constraint
49 | \ .*
50 | \ conflicting\ with
51 | \ {dashed_spec_pattern}
52 | ) | (?:
53 | nothing\ provides
54 | \ {conda_build_spec_pattern}
55 | \ needed\ by
56 | \ {dashed_spec_pattern}
57 | ) | (?:
58 | nothing\ provides(?:\ requested)?
59 | \ {conda_build_spec_pattern}
60 | )
61 | )
62 | """,
63 | re.VERBOSE,
64 | )
65 |
66 |
67 | def parse_problems(problems):
68 | conflicts = {}
69 | for line in problems.splitlines():
70 | match = problem_re.match(line)
71 | if not match:
72 | continue
73 | # All capture groups in problem_re only come from dashed_spec_pattern
74 | # and conda_build_spec_pattern and thus are always multiples of 3.
75 | for name, version, build in zip(*([iter(match.groups())] * 3)):
76 | if name is None:
77 | continue
78 | kwargs = {"name": name}
79 | if version is not None:
80 | kwargs["version"] = version
81 | if build is not None:
82 | kwargs["build"] = build
83 | conflicts[name] = MatchSpec(**kwargs)
84 | return set(conflicts.values())
85 |
86 |
87 | def suppress_stdout():
88 | context.quiet = True
89 | init_api_context()
90 | boa_config.quiet = True
91 | boa_config.console.quiet = True
92 |
93 |
94 | def _get_solver(channel_urls, subdir, output_folder):
95 | """Gets a solver from cache or creates a new one if needed."""
96 | subdir = normalize_subdir(subdir)
97 |
98 | if subdir in solver_map:
99 | solver = solver_map[subdir]
100 | solver.replace_channels()
101 | else:
102 | solver = MambaSolver(channel_urls, subdir, output_folder)
103 | solver_map[subdir] = solver
104 |
105 | return solver
106 |
107 |
108 | def mamba_get_package_records(
109 | prefix,
110 | specs,
111 | env,
112 | retries=0,
113 | subdir=None,
114 | verbose=True,
115 | debug=False,
116 | locking=True,
117 | bldpkgs_dirs=None,
118 | timeout=900,
119 | disable_pip=False,
120 | max_env_retry=3,
121 | output_folder=None,
122 | channel_urls=None,
123 | ):
124 | solver = _get_solver(channel_urls, subdir, output_folder)
125 |
126 | _specs = [MatchSpec(s) for s in specs]
127 | for idx, s in enumerate(_specs):
128 | if s.version:
129 | vspec = str(s.version)
130 | if re.match(only_dot_or_digit_re, vspec):
131 | n = s.conda_build_form()
132 | sn = n.split()
133 | if vspec.count(".") <= 1:
134 | sn[1] = vspec + ".*"
135 | else:
136 | sn[1] = vspec + "*"
137 | _specs[idx] = MatchSpec(" ".join(sn))
138 |
139 | _specs = [s.conda_build_form() for s in _specs]
140 | try:
141 | # We only create fresh environments in builds and can ignore unlink precs.
142 | _, link_precs = solver.solve_for_unlink_link_precs(_specs, prefix)
143 | except RuntimeError as e:
144 | conflict_packages = parse_problems(str(e))
145 |
146 | # we need to throw this exception for conda-build so it continues to search
147 | # the build tree
148 | err = DependencyNeedsBuildingError(packages=[str(x) for x in conflict_packages])
149 | err.matchspecs = conflict_packages
150 | err.subdir = subdir
151 | raise err
152 |
153 | return link_precs
154 |
155 |
156 | if hasattr(conda_build.environ, "get_package_records"):
157 | # conda-build>=24.1 avoids the legacy "actions"/"Dist"-based installs.
158 | conda_build.environ.get_package_records = mamba_get_package_records
159 | else:
160 | # conda-build<24.1 needs get_package_records' result wrapped in "actions" dict.
161 | def mamba_get_install_actions(
162 | prefix,
163 | specs,
164 | env,
165 | retries=0,
166 | subdir=None,
167 | verbose=True,
168 | debug=False,
169 | locking=True,
170 | bldpkgs_dirs=None,
171 | timeout=900,
172 | disable_pip=False,
173 | max_env_retry=3,
174 | output_folder=None,
175 | channel_urls=None,
176 | ):
177 | from conda.models.dist import Dist
178 | from conda.plan import get_blank_actions
179 |
180 | link_precs = mamba_get_package_records(
181 | prefix=prefix,
182 | specs=specs,
183 | env=env,
184 | retries=retries,
185 | subdir=subdir,
186 | verbose=verbose,
187 | debug=debug,
188 | locking=locking,
189 | bldpkgs_dirs=bldpkgs_dirs,
190 | timeout=timeout,
191 | disable_pip=disable_pip,
192 | max_env_retry=max_env_retry,
193 | output_folder=output_folder,
194 | channel_urls=channel_urls,
195 | )
196 | actions = get_blank_actions(prefix)
197 | actions["LINK"].extend(Dist(prec) for prec in link_precs)
198 | return actions
199 |
200 | conda_build.environ.get_install_actions = mamba_get_install_actions
201 |
202 |
203 | def prepare(**kwargs):
204 | """
205 | Prepare and configure the stage for mambabuild to run.
206 |
207 | The given **kwargs are passed to conda-build's Config which
208 | is the value returned by this function.
209 | """
210 | config = Config(**kwargs)
211 | config.channel_urls = get_channel_urls(kwargs)
212 |
213 | init_api_context()
214 |
215 | config.output_folder = os.path.abspath(config.output_folder)
216 | if not os.path.exists(config.output_folder):
217 | mkdir_p(config.output_folder)
218 |
219 | print(f"Updating build index: {(config.output_folder)}\n")
220 | update_index(config.output_folder, verbose=config.debug, threads=1)
221 |
222 | return config
223 |
224 |
225 | def call_conda_build(action, config, **kwargs):
226 | """
227 | After having set up the stage for boa's mambabuild to
228 | use the mamba solver, we delegate the work of building
229 | the conda package back to conda-build.
230 |
231 | Args:
232 | action: "build" or "test"
233 | config: conda-build's Config
234 |
235 | Kwargs:
236 | additional keyword arguments are passed to conda-build
237 |
238 | Return:
239 | The result of conda-build's build: the built packages
240 | """
241 | recipe = config.recipe[0]
242 |
243 | if action == "output":
244 | suppress_stdout()
245 | result = api.get_output_file_paths(recipe, config=config, **kwargs)
246 | print("\n".join(sorted(result)))
247 | elif action == "test":
248 | failed_recipes = []
249 | recipes = [
250 | item
251 | for sublist in [
252 | glob(os.path.abspath(recipe)) if "*" in recipe else [recipe]
253 | for recipe in config.recipe
254 | ]
255 | for item in sublist
256 | ]
257 | for recipe in recipes:
258 | try:
259 | result = api.test(recipe, config=config, **kwargs)
260 | except Exception:
261 | failed_recipes.append(recipe)
262 | continue
263 | if failed_recipes:
264 | print("Failed recipes:")
265 | for recipe in failed_recipes:
266 | print(" - %s" % recipe)
267 | sys.exit(len(failed_recipes))
268 | else:
269 | print("All tests passed")
270 | result = []
271 |
272 | elif action == "build":
273 | result = api.build(
274 | recipe,
275 | post=config.post,
276 | build_only=config.build_only,
277 | notest=config.notest,
278 | config=config,
279 | variants=config.variants,
280 | **kwargs,
281 | )
282 | else:
283 | raise ValueError("action should be 'build' or 'test', got: %r" % action)
284 |
285 | return result
286 |
287 |
288 | def main():
289 | boa_config.is_mambabuild = True
290 | _, args = parse_args(sys.argv[1:])
291 |
292 | config = prepare(**args.__dict__)
293 |
294 | if args.test:
295 | action = "test"
296 | elif args.output:
297 | action = "output"
298 | else:
299 | action = "build"
300 |
301 | call_conda_build(action, config)
302 |
--------------------------------------------------------------------------------
/boa/cli/test.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 | from boa.core.run_build import initialize_conda_build_config
4 | from boa.core.test import run_test
5 |
6 | from rich.console import Console
7 |
8 | console = Console()
9 |
10 |
11 | def main(args):
12 | stats = {}
13 | config = initialize_conda_build_config(args)
14 |
15 | run_test(
16 | args.target,
17 | config,
18 | stats,
19 | move_broken=False,
20 | provision_only=False,
21 | extra_deps=getattr(args, "extra_deps", []),
22 | )
23 |
--------------------------------------------------------------------------------
/boa/cli/transmute.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from glob import glob
5 | import os
6 | from pathlib import Path
7 | from math import log
8 | from libmambapy import transmute as mamba_transmute
9 | from joblib import Parallel, delayed
10 |
11 | from rich.console import Console
12 |
13 | console = Console()
14 |
15 | unit_list = list(zip(["bytes", "kB", "MB", "GB", "TB", "PB"], [0, 0, 1, 2, 2, 2]))
16 |
17 |
18 | def sizeof_fmt(num):
19 | """Human friendly file size"""
20 | if num > 1:
21 | exponent = min(int(log(num, 1024)), len(unit_list) - 1)
22 | quotient = float(num) / 1024**exponent
23 | unit, num_decimals = unit_list[exponent]
24 | format_string = "{:.%sf} {}" % (num_decimals)
25 | return format_string.format(quotient, unit)
26 | if num == 0:
27 | return "0 bytes"
28 | if num == 1:
29 | return "1 byte"
30 |
31 |
32 | def transmute_task(f, args):
33 | filename = os.path.basename(f)
34 | outpath = os.path.abspath(args.output_directory)
35 |
36 | if f.endswith(".tar.bz2"):
37 | filename = filename[:-8]
38 | outfile = os.path.join(outpath, filename + ".conda")
39 | elif f.endswith(".conda"):
40 | filename = filename[:-6]
41 | outfile = os.path.join(outpath, filename + ".tar.bz2")
42 | else:
43 | console.print("[bold red]Transmute can only handle .tar.bz2 and .conda formats")
44 |
45 | console.print(f"Processing {filename}")
46 | mamba_transmute(f, outfile, args.compression_level)
47 |
48 | stat_before = Path(f).stat()
49 | stat_after = Path(outfile).stat()
50 |
51 | saved_percent = 1.0 - (stat_after.st_size / stat_before.st_size)
52 | color = "[bold green]" if saved_percent > 0 else "[bold red]"
53 |
54 | return filename, outfile, stat_before, stat_after, saved_percent, color
55 |
56 |
57 | def main(args):
58 | # from libmambapy import Context
59 | # api_ctx = Context()
60 | # api_ctx.set_verbosity(1)
61 |
62 | files = args.files
63 | final_files = []
64 |
65 | if not os.path.exists(args.output_directory):
66 | Path(args.output_directory).mkdir(parents=True, exist_ok=True)
67 |
68 | for f in files:
69 | final_files += [os.path.abspath(fx) for fx in glob(f)]
70 |
71 | logs = Parallel(n_jobs=args.num_jobs)(
72 | delayed(transmute_task)(f, args) for f in final_files
73 | )
74 |
75 | for filename, outfile, stat_before, stat_after, saved_percent, color in logs:
76 | console.print(f"\nConverting [bold]{filename}")
77 | console.print(f"Done: [bold]{outfile}")
78 | console.print(f" Before : {sizeof_fmt(stat_before.st_size)}")
79 | console.print(f" After : {sizeof_fmt(stat_after.st_size)}")
80 | console.print(f" Difference: {color}{saved_percent * 100:.2f}%")
81 |
--------------------------------------------------------------------------------
/boa/cli/validate.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from boa.core.validation import validate, ValidationError, SchemaError
5 | from boa.core.render import render
6 | from boa.core.utils import get_config
7 |
8 | from rich.console import Console
9 |
10 | console = Console()
11 |
12 |
13 | def main(recipe):
14 | cbc, config = get_config(recipe)
15 | ydoc = render(recipe, config, is_pyproject_recipe=recipe.endswith(".toml"))
16 | console.print("\n\nNormalized Recipe:\n")
17 | console.print(ydoc)
18 | try:
19 | result = validate(ydoc)
20 | if result is None:
21 | console.print("\n[green]Validation OK[/green]")
22 | except ValidationError:
23 | exit(1)
24 | except SchemaError:
25 | exit(1)
26 |
--------------------------------------------------------------------------------
/boa/core/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/boa/core/__init__.py
--------------------------------------------------------------------------------
/boa/core/conda_build_spec.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | import re
5 |
6 | from dataclasses import dataclass
7 | from typing import Tuple, Optional, Union
8 |
9 | from conda_build.utils import apply_pin_expressions
10 |
11 |
12 | @dataclass
13 | class PinSubpackage:
14 | name: str
15 | max_pin: str
16 | exact: bool
17 |
18 | def __init__(self, splitted):
19 | max_pin, exact = splitted[1][len("PIN_SUBPACKAGE") + 1 : -1].split(",")
20 | self.max_pin = max_pin
21 | self.exact = exact == "True"
22 | self.name = splitted[0]
23 |
24 |
25 | class PinCompatible:
26 | name: str
27 | lower_bound: Optional[str] = None
28 | upper_bound: Optional[str] = None
29 | min_pin: str
30 | max_pin: str
31 | exact: bool
32 |
33 | def __init__(self, splitted):
34 | lower_bound, upper_bound, min_pin, max_pin, exact = splitted[1][
35 | len("PIN_COMPATIBLE") + 1 : -1
36 | ].split(",")
37 | if lower_bound == "None":
38 | lower_bound = None
39 | if upper_bound == "None":
40 | upper_bound = None
41 |
42 | self.lower_bound = lower_bound
43 | self.upper_bound = upper_bound
44 | self.min_pin = min_pin
45 | self.max_pin = max_pin
46 | self.exact = exact == "True"
47 |
48 |
49 | @dataclass
50 | class CondaBuildSpec:
51 | name: str
52 | raw: str
53 | splitted: Tuple[str]
54 | pin: Optional[Union[PinSubpackage]] = None
55 |
56 | is_inherited: bool = False
57 | is_compiler: bool = False
58 | is_transitive_dependency: bool = False
59 | channel: str = ""
60 | # final: String
61 |
62 | from_run_export: bool = False
63 | from_pinnings: bool = False
64 |
65 | def __init__(self, ms, is_inherited=False):
66 | self.raw = ms
67 | self.splitted = ms.split()
68 | self.name = self.splitted[0]
69 |
70 | is_pin = False
71 | if len(self.splitted) > 1:
72 | is_pin = self.splitted[1].startswith("PIN_")
73 | self.is_compiler = self.splitted[0].startswith("COMPILER_")
74 |
75 | self.is_inherited = is_inherited
76 | self.is_simple = len(self.splitted) == 1
77 | self.final = self.raw
78 |
79 | if is_pin:
80 | is_pin_compatible = self.splitted[1].startswith("PIN_COMPATIBLE")
81 | is_pin_subpackage = self.splitted[1].startswith("PIN_SUBPACKAGE")
82 |
83 | if is_pin_compatible:
84 | self.final[len("PIN_COMPATIBLE") + 1 : -1]
85 | self.pin = PinCompatible(self.splitted)
86 | elif is_pin_subpackage:
87 | self.pin = PinSubpackage(self.splitted)
88 | else:
89 | raise RuntimeError("could nto parse pin (" + self.splitted[1] + ")")
90 |
91 | @property
92 | def is_pin(self):
93 | return self.pin is not None
94 |
95 | @property
96 | def is_pin_compatible(self):
97 | return isinstance(self.pin, PinCompatible)
98 |
99 | @property
100 | def is_pin_subpackage(self):
101 | return isinstance(self.pin, PinSubpackage)
102 |
103 | @property
104 | def final_name(self):
105 | return self.final.split(" ")[0]
106 |
107 | @property
108 | def final_pin(self):
109 | if hasattr(self, "final_version"):
110 | return f"{self.final_name} {self.final_version[0]} {self.final_version[1]}"
111 | else:
112 | return self.final
113 |
114 | @property
115 | def final_triplet(self):
116 | return f"{self.final_name}-{self.final_version[0]}-{self.final_version[1]}"
117 |
118 | def loosen_spec(self):
119 | if self.is_compiler or self.is_pin:
120 | return
121 |
122 | if len(self.splitted) == 1:
123 | return
124 |
125 | if re.search(r"[^0-9\.]+", self.splitted[1]) is not None:
126 | return
127 |
128 | dot_c = self.splitted[1].count(".")
129 |
130 | app = "*" if dot_c >= 2 else ".*"
131 |
132 | if len(self.splitted) == 3:
133 | self.final = (
134 | f"{self.splitted[0]} {self.splitted[1]}{app} {self.splitted[2]}"
135 | )
136 | else:
137 | self.final = f"{self.splitted[0]} {self.splitted[1]}{app}"
138 |
139 | def __repr__(self):
140 | self.loosen_spec()
141 | return self.final
142 |
143 | def eval_pin_subpackage(self, all_outputs):
144 | pkg_name = self.name
145 | output = None
146 |
147 | # TODO are we pinning the right version if building multiple variants?!
148 | for o in all_outputs:
149 | if o.name == pkg_name:
150 | output = o
151 | break
152 |
153 | if not output:
154 | raise RuntimeError(f"Could not find output with name {pkg_name}")
155 |
156 | version = output.version
157 | build_string = output.final_build_id
158 |
159 | if self.is_pin and self.pin.exact:
160 | self.final = f"{pkg_name} {version} {build_string}"
161 | else:
162 | version_parts = version.split(".")
163 | count_pin = self.pin.max_pin.count(".")
164 | version_pin = ".".join(version_parts[: count_pin + 1])
165 | version_pin += ".*"
166 | self.final = f"{pkg_name} {version_pin}"
167 |
168 | def eval_pin_compatible(self, build, host):
169 | versions = {b.name: b for b in build}
170 | versions.update({h.name: h for h in host})
171 |
172 | compatibility = None
173 | if versions:
174 | if self.pin.exact and versions.get(self.name):
175 | compatibility = " ".join(versions[self.name].final_version)
176 | else:
177 | version = (
178 | self.pin.lower_bound or versions.get(self.name).final_version[0]
179 | )
180 | if version:
181 | if self.pin.upper_bound:
182 | if self.pin.min_pin or self.pin.lower_bound:
183 | compatibility = ">=" + str(version) + ","
184 | compatibility += "<{upper_bound}".format(
185 | upper_bound=self.pin.upper_bound
186 | )
187 | else:
188 | compatibility = apply_pin_expressions(
189 | version, self.pin.min_pin, self.pin.max_pin
190 | )
191 |
192 | self.final = (
193 | " ".join((self.name, compatibility))
194 | if compatibility is not None
195 | else self.name
196 | )
197 |
--------------------------------------------------------------------------------
/boa/core/config.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from rich.console import Console
5 |
6 | boa_config = None
7 |
8 |
9 | class BoaConfig:
10 | console = Console()
11 | json: bool = False
12 | debug: bool = False
13 | quiet: bool = False
14 | is_mambabuild = False
15 |
16 | def __init__(self, args=None):
17 | if args and getattr(args, "json", False):
18 | self.console.quiet = True
19 | self.json = True
20 |
21 | if args and getattr(args, "quiet", False):
22 | self.console.quiet = True
23 | self.quiet = True
24 |
25 | if args and getattr(args, "debug", False):
26 | self.debug = args.debug
27 |
28 |
29 | def init_global_config(args=None):
30 | global boa_config
31 | boa_config = BoaConfig(args)
32 |
33 |
34 | if not boa_config:
35 | init_global_config()
36 |
--------------------------------------------------------------------------------
/boa/core/environ.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | import os
3 | import sys
4 |
5 | from conda_build.environ import (
6 | conda_build_vars,
7 | python_vars,
8 | perl_vars,
9 | lua_vars,
10 | r_vars,
11 | system_vars,
12 | feature_list,
13 | LANGUAGES,
14 | )
15 | from conda_build.os_utils import external
16 | from conda_build.environ import get_git_info, get_hg_build_info, verify_git_repo
17 | from conda_build import utils
18 |
19 |
20 | def meta_vars(meta, skip_build_id=False):
21 | d = {}
22 | for key, value in meta.get_value("build/script_env", {}).items():
23 | if not value:
24 | warnings.warn(
25 | f"The environment variable '{key}' is undefined.",
26 | UserWarning,
27 | stacklevel=1,
28 | )
29 | else:
30 | d[key] = value
31 |
32 | folder = meta.get_value("source/0/folder", "")
33 | repo_dir = os.path.join(meta.config.work_dir, folder)
34 | git_dir = os.path.join(repo_dir, ".git")
35 | hg_dir = os.path.join(repo_dir, ".hg")
36 |
37 | if not isinstance(git_dir, str):
38 | # On Windows, subprocess env can't handle unicode.
39 | git_dir = git_dir.encode(sys.getfilesystemencoding() or "utf-8")
40 |
41 | git_exe = external.find_executable("git", meta.config.build_prefix)
42 | if git_exe and os.path.exists(git_dir):
43 | # We set all 'source' metavars using the FIRST source entry in meta.yaml.
44 | git_url = meta.get_value("source/0/git_url")
45 |
46 | if os.path.exists(git_url):
47 | if sys.platform == "win32":
48 | git_url = utils.convert_unix_path_to_win(git_url)
49 | # If git_url is a relative path instead of a url, convert it to an abspath
50 | git_url = os.path.normpath(os.path.join(meta.path, git_url))
51 |
52 | _x = False
53 |
54 | if git_url:
55 | _x = verify_git_repo(
56 | git_exe,
57 | git_dir,
58 | git_url,
59 | meta.config.git_commits_since_tag,
60 | meta.config.debug,
61 | meta.get_value("source/0/git_rev", "HEAD"),
62 | )
63 |
64 | if _x or meta.get_value("source/0/path"):
65 | d.update(get_git_info(git_exe, git_dir, meta.config.debug))
66 |
67 | elif external.find_executable("hg", meta.config.build_prefix) and os.path.exists(
68 | hg_dir
69 | ):
70 | d.update(get_hg_build_info(hg_dir))
71 |
72 | # use `get_value` to prevent early exit while name is still unresolved during rendering
73 | d["PKG_NAME"] = meta.get_value("package/name")
74 | d["PKG_VERSION"] = meta.version()
75 | d["PKG_BUILDNUM"] = str(meta.build_number())
76 | if meta.final and not skip_build_id:
77 | d["PKG_BUILD_STRING"] = str(meta.build_id())
78 | d["PKG_HASH"] = meta.hash_dependencies()
79 | else:
80 | d["PKG_BUILD_STRING"] = "placeholder"
81 | d["PKG_HASH"] = "1234567"
82 | d["RECIPE_DIR"] = meta.path
83 | return d
84 |
85 |
86 | def get_dict(
87 | m,
88 | prefix=None,
89 | for_env=True,
90 | skip_build_id=False,
91 | escape_backslash=False,
92 | variant=None,
93 | ):
94 | if not prefix:
95 | prefix = m.config.host_prefix
96 |
97 | m.config._merge_build_host = m.build_is_host
98 |
99 | # conda-build specific vars
100 | d = conda_build_vars(prefix, m.config)
101 |
102 | # languages
103 | d.update(python_vars(m, prefix, escape_backslash))
104 | d.update(perl_vars(m, prefix, escape_backslash))
105 | d.update(lua_vars(m, prefix, escape_backslash))
106 | d.update(r_vars(m, prefix, escape_backslash))
107 |
108 | if m:
109 | d.update(meta_vars(m, skip_build_id=skip_build_id))
110 |
111 | # system
112 | d.update(system_vars(d, m, prefix))
113 |
114 | # features
115 | d.update({feat.upper(): str(int(value)) for feat, value in feature_list})
116 |
117 | variant = variant or m.config.variant
118 | for k, v in variant.items():
119 | if not for_env or (k.upper() not in d and k.upper() not in LANGUAGES):
120 | d[k] = v
121 | return d
122 |
--------------------------------------------------------------------------------
/boa/core/jinja_support.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | import os
5 | from functools import partial
6 | from conda_build.jinja_context import cdt
7 |
8 |
9 | def pin_subpackage(name, max_pin="x.x.x.x.x", exact=False):
10 | return f"{name} PIN_SUBPACKAGE[{max_pin},{exact}]"
11 |
12 |
13 | def pin_compatible(
14 | name,
15 | lower_bound=None,
16 | upper_bound=None,
17 | min_pin="x.x.x.x.x.x",
18 | max_pin="x",
19 | exact=False,
20 | ):
21 | return f"{name} PIN_COMPATIBLE[{lower_bound},{upper_bound},{min_pin},{max_pin},{exact}]"
22 |
23 |
24 | def compiler(language):
25 | return f"COMPILER_{language.upper()} {language}"
26 |
27 |
28 | def jinja_functions(config, context_dict):
29 | return {
30 | "pin_subpackage": pin_subpackage,
31 | "pin_compatible": pin_compatible,
32 | "cdt": partial(cdt, config=config, permit_undefined_jinja=False),
33 | "compiler": compiler,
34 | "environ": os.environ,
35 | }
36 |
--------------------------------------------------------------------------------
/boa/core/monkey_patch_emscripten.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 |
5 | def patch():
6 | ###############################################
7 | # CONDA MONKEY-PATCH
8 | ###############################################
9 | from conda.base import constants
10 |
11 | KNOWN_SUBDIRS = PLATFORM_DIRECTORIES = (
12 | "noarch",
13 | "linux-32",
14 | "linux-64",
15 | "linux-aarch64",
16 | "linux-armv6l",
17 | "linux-armv7l",
18 | "linux-ppc64",
19 | "linux-ppc64le",
20 | "linux-s390x",
21 | "osx-64",
22 | "osx-arm64",
23 | "win-32",
24 | "win-64",
25 | "zos-z",
26 | "emscripten-32",
27 | )
28 | constants.KNOWN_SUBDIRS = KNOWN_SUBDIRS
29 | constants.PLATFORM_DIRECTORIES = PLATFORM_DIRECTORIES
30 |
31 | ###############################################
32 | # CONDA-BUILD MONKEY-PATCH
33 | ###############################################
34 |
35 | from conda.base.context import non_x86_machines as non_x86_linux_machines
36 |
37 | from conda_build import utils, variants, environ
38 | from conda_build import metadata
39 | from conda_build.features import feature_list
40 |
41 | def ns_cfg(config):
42 | # Remember to update the docs of any of this changes
43 | plat = config.host_subdir
44 | d = dict(
45 | linux=plat.startswith("linux-"),
46 | linux32=bool(plat == "linux-32"),
47 | linux64=bool(plat == "linux-64"),
48 | arm=plat.startswith("linux-arm"),
49 | osx=plat.startswith("osx-"),
50 | emscripten=plat.startswith("emscripten-"),
51 | emscripten32=bool(plat == "emscripten-32"),
52 | emscripten64=bool(plat == "emscripten-64"),
53 | unix=plat.startswith(("linux-", "osx-", "emscripten-")),
54 | win=plat.startswith("win-"),
55 | win32=bool(plat == "win-32"),
56 | win64=bool(plat == "win-64"),
57 | x86=plat.endswith(("-32", "-64")),
58 | x86_64=plat.endswith("-64"),
59 | os=os,
60 | environ=os.environ,
61 | nomkl=bool(int(os.environ.get("FEATURE_NOMKL", False))),
62 | )
63 |
64 | defaults = variants.get_default_variant(config)
65 | py = config.variant.get("python", defaults["python"])
66 | # there are times when python comes in as a tuple
67 | if not hasattr(py, "split"):
68 | py = py[0]
69 | # go from "3.6 *_cython" -> "36"
70 | # or from "3.6.9" -> "36"
71 | py = int("".join(py.split(" ")[0].split(".")[:2]))
72 |
73 | d["build_platform"] = config.build_subdir
74 |
75 | d.update(
76 | dict(
77 | py=py,
78 | py3k=bool(30 <= py < 40),
79 | py2k=bool(20 <= py < 30),
80 | py26=bool(py == 26),
81 | py27=bool(py == 27),
82 | py33=bool(py == 33),
83 | py34=bool(py == 34),
84 | py35=bool(py == 35),
85 | py36=bool(py == 36),
86 | )
87 | )
88 |
89 | np = config.variant.get("numpy")
90 | if not np:
91 | np = defaults["numpy"]
92 | if config.verbose:
93 | utils.get_logger(__name__).warn(
94 | "No numpy version specified in conda_build_config.yaml. "
95 | "Falling back to default numpy value of {}".format(
96 | defaults["numpy"]
97 | )
98 | )
99 | d["np"] = int("".join(np.split(".")[:2]))
100 |
101 | pl = config.variant.get("perl", defaults["perl"])
102 | d["pl"] = pl
103 |
104 | lua = config.variant.get("lua", defaults["lua"])
105 | d["lua"] = lua
106 | d["luajit"] = bool(lua[0] == "2")
107 |
108 | for machine in non_x86_linux_machines:
109 | d[machine] = bool(plat.endswith("-%s" % machine))
110 |
111 | for feature, value in feature_list:
112 | d[feature] = value
113 | d.update(os.environ)
114 |
115 | # here we try to do some type conversion for more intuitive usage. Otherwise,
116 | # values like 35 are strings by default, making relational operations confusing.
117 | # We also convert "True" and things like that to booleans.
118 | for k, v in config.variant.items():
119 | if k not in d:
120 | try:
121 | d[k] = int(v)
122 | except (TypeError, ValueError):
123 | if isinstance(v, str) and v.lower() in ("false", "true"):
124 | v = v.lower() == "true"
125 | d[k] = v
126 | return d
127 |
128 | metadata.ns_cfg = ns_cfg
129 |
130 | DEFAULT_SUBDIRS = {
131 | "linux-64",
132 | "linux-32",
133 | "linux-s390x",
134 | "linux-ppc64",
135 | "linux-ppc64le",
136 | "linux-armv6l",
137 | "linux-armv7l",
138 | "linux-aarch64",
139 | "win-64",
140 | "win-32",
141 | "osx-64",
142 | "osx-arm64",
143 | "zos-z",
144 | "noarch",
145 | "emscripten-32",
146 | }
147 |
148 | utils.DEFAULT_SUBDIRS = DEFAULT_SUBDIRS
149 |
150 | def get_shlib_ext(host_platform):
151 | # Return the shared library extension.
152 | if host_platform.startswith("win"):
153 | return ".dll"
154 | elif host_platform in ["osx", "darwin"]:
155 | return ".dylib"
156 | elif host_platform.startswith("linux") or host_platform.startswith(
157 | "emscripten"
158 | ):
159 | return ".so"
160 | elif host_platform == "noarch":
161 | # noarch packages should not contain shared libraries, use the system
162 | # platform if this is requested
163 | return get_shlib_ext(sys.platform)
164 | else:
165 | raise NotImplementedError(host_platform)
166 |
167 | environ.get_shlib_ext = get_shlib_ext
168 |
--------------------------------------------------------------------------------
/boa/core/recipe_handling.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | import os
5 | import re
6 | import time
7 | from os.path import exists, isdir, join
8 | from subprocess import CalledProcessError
9 |
10 | import yaml
11 | from bs4 import UnicodeDammit
12 |
13 | from conda.gateways.disk.create import mkdir_p
14 | from conda_build import utils
15 | from conda_build.utils import check_output_env, get_logger
16 |
17 | from boa import __version__ as boa_version
18 |
19 |
20 | def get_repository_info(recipe_path):
21 | """This tries to get information about where a recipe came from. This is different
22 | from the source - you can have a recipe in svn that gets source via git."""
23 | try:
24 | if exists(join(recipe_path, ".git")):
25 | origin = check_output_env(
26 | ["git", "config", "--get", "remote.origin.url"], cwd=recipe_path
27 | )
28 | rev = check_output_env(["git", "rev-parse", "HEAD"], cwd=recipe_path)
29 | return "Origin {}, commit {}".format(origin, rev)
30 | elif isdir(join(recipe_path, ".hg")):
31 | origin = check_output_env(["hg", "paths", "default"], cwd=recipe_path)
32 | rev = check_output_env(["hg", "id"], cwd=recipe_path).split()[0]
33 | return "Origin {}, commit {}".format(origin, rev)
34 | elif isdir(join(recipe_path, ".svn")):
35 | info = check_output_env(["svn", "info"], cwd=recipe_path)
36 | info = info.decode(
37 | "utf-8"
38 | ) # Py3 returns a byte string, but re needs unicode or str.
39 | server = re.search("Repository Root: (.*)$", info, flags=re.M).group(1)
40 | revision = re.search("Revision: (.*)$", info, flags=re.M).group(1)
41 | return "{}, Revision {}".format(server, revision)
42 | else:
43 | return "{}, last modified {}".format(
44 | recipe_path,
45 | time.ctime(os.path.getmtime(join(recipe_path, "recipe.yaml"))),
46 | )
47 | except CalledProcessError:
48 | get_logger(__name__).debug("Failed to checkout source in " + recipe_path)
49 | return "{}, last modified {}".format(
50 | recipe_path, time.ctime(os.path.getmtime(join(recipe_path, "recipe.yaml")))
51 | )
52 |
53 |
54 | def _copy_top_level_recipe(path, config, dest_dir, destination_subdir=None):
55 | files = utils.rec_glob(path, "*")
56 | file_paths = sorted([f.replace(path + os.sep, "") for f in files])
57 |
58 | # when this actually has a value, we're copying the top-level recipe into a subdirectory,
59 | # so that we have record of what parent recipe produced subpackages.
60 | if destination_subdir:
61 | dest_dir = join(dest_dir, destination_subdir)
62 | else:
63 | # exclude recipe.yaml because the json dictionary captures its content
64 | file_paths = [
65 | f
66 | for f in file_paths
67 | if not (f == "recipe.yaml" or f == "conda_build_config.yaml")
68 | ]
69 | file_paths = utils.filter_files(file_paths, path)
70 | for f in file_paths:
71 | utils.copy_into(
72 | join(path, f),
73 | join(dest_dir, f),
74 | timeout=config.timeout,
75 | locking=config.locking,
76 | clobber=True,
77 | )
78 |
79 |
80 | def _copy_output_recipe(m, dest_dir):
81 | _copy_top_level_recipe(m.path, m.config, dest_dir, "parent")
82 |
83 | this_output = m.get_rendered_output(m.name()) or {}
84 | install_script = this_output.get("script")
85 | build_inputs = []
86 | inputs = [install_script] + build_inputs
87 | file_paths = [script for script in inputs if script]
88 | file_paths = utils.filter_files(file_paths, m.path)
89 |
90 | for f in file_paths:
91 | utils.copy_into(
92 | join(m.path, f),
93 | join(dest_dir, f),
94 | timeout=m.config.timeout,
95 | locking=m.config.locking,
96 | clobber=True,
97 | )
98 |
99 |
100 | def output_yaml(metadata, filename=None, suppress_outputs=False):
101 | local_metadata = metadata.rendered_meta().copy()
102 | if suppress_outputs and metadata.is_output and "outputs" in local_metadata:
103 | del local_metadata["outputs"]
104 |
105 | output = yaml.dump((local_metadata), default_flow_style=False, indent=4)
106 | if filename:
107 | if any(sep in filename for sep in ("\\", "/")):
108 | mkdir_p(os.path.dirname(filename))
109 | with open(filename, "w") as f:
110 | f.write(output)
111 | return "Wrote yaml to %s" % filename
112 | else:
113 | return output
114 |
115 |
116 | def copy_recipe(m):
117 | if m.config.include_recipe and m.include_recipe():
118 | # store the rendered recipe.yaml file, plus information about where it came from
119 | # and what version of conda-build created it
120 | recipe_dir = join(m.config.info_dir, "recipe")
121 | mkdir_p(recipe_dir)
122 |
123 | original_recipe = ""
124 |
125 | if m.is_output:
126 | _copy_output_recipe(m, recipe_dir)
127 | else:
128 | _copy_top_level_recipe(m.path, m.config, recipe_dir)
129 | if exists(m.meta_path):
130 | original_recipe = m.meta_path
131 |
132 | output_metadata = m.copy()
133 | # hard code the build string, so that tests don't get it mixed up
134 | build = output_metadata.meta.get("build", {})
135 | build["string"] = output_metadata.build_id()
136 | output_metadata.meta["build"] = build
137 |
138 | # just for lack of confusion, don't show outputs in final rendered recipes
139 | if "outputs" in output_metadata.meta:
140 | del output_metadata.meta["outputs"]
141 | if "parent_recipe" in output_metadata.meta.get("extra", {}):
142 | del output_metadata.meta["extra"]["parent_recipe"]
143 |
144 | utils.sort_list_in_nested_structure(
145 | output_metadata.meta, ("build/script", "test/commands")
146 | )
147 |
148 | rendered = output_yaml(output_metadata)
149 |
150 | if original_recipe:
151 | with open(original_recipe, "rb") as f:
152 | original_recipe_text = UnicodeDammit(f.read()).unicode_markup
153 |
154 | if not original_recipe or not original_recipe_text == rendered:
155 | with open(join(recipe_dir, "recipe.yaml"), "w") as f:
156 | f.write("# This file created by boa {}\n".format(boa_version))
157 | if original_recipe:
158 | f.write("# recipe.yaml template originally from:\n")
159 | f.write("# " + get_repository_info(m.path) + "\n")
160 | f.write("# ------------------------------------------------\n\n")
161 | f.write(rendered)
162 | if original_recipe:
163 | utils.copy_into(
164 | original_recipe,
165 | os.path.join(recipe_dir, "recipe.yaml.template"),
166 | timeout=m.config.timeout,
167 | locking=m.config.locking,
168 | clobber=True,
169 | )
170 |
171 | # dump the full variant in use for this package to the recipe folder
172 | with open(os.path.join(recipe_dir, "conda_build_config.yaml"), "w") as f:
173 | yaml.dump(m.config.variant, f)
174 |
--------------------------------------------------------------------------------
/boa/core/render.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from ruamel.yaml import YAML
5 | import jinja2
6 | import os
7 | from boa.core.jinja_support import jinja_functions
8 | from conda_build.metadata import eval_selector, ns_cfg
9 | from collections.abc import Mapping, Iterable
10 |
11 | from boa.core.config import boa_config
12 |
13 | console = boa_config.console
14 |
15 |
16 | def render_recursive(dict_or_array, context_dict, jenv):
17 | # check if it's a dict?
18 | if isinstance(dict_or_array, Mapping):
19 | for key, value in dict_or_array.items():
20 | if isinstance(value, str):
21 | tmpl = jenv.from_string(value)
22 | dict_or_array[key] = tmpl.render(context_dict)
23 | elif isinstance(value, Mapping):
24 | render_recursive(dict_or_array[key], context_dict, jenv)
25 | elif isinstance(value, Iterable):
26 | render_recursive(dict_or_array[key], context_dict, jenv)
27 |
28 | elif isinstance(dict_or_array, Iterable):
29 | for i in range(len(dict_or_array)):
30 | value = dict_or_array[i]
31 | if isinstance(value, str):
32 | tmpl = jenv.from_string(value)
33 | dict_or_array[i] = tmpl.render(context_dict)
34 | elif isinstance(value, Mapping):
35 | render_recursive(value, context_dict, jenv)
36 | elif isinstance(value, Iterable):
37 | render_recursive(value, context_dict, jenv)
38 |
39 |
40 | def flatten_selectors(ydoc, namespace):
41 | if isinstance(ydoc, str):
42 | return ydoc
43 |
44 | if isinstance(ydoc, Mapping):
45 | has_sel = any(k.startswith("sel(") for k in ydoc.keys())
46 | if has_sel:
47 | for k, v in ydoc.items():
48 | selected = eval_selector(k[3:], namespace, [])
49 | if selected:
50 | return v
51 |
52 | return None
53 |
54 | for k, v in ydoc.items():
55 | ydoc[k] = flatten_selectors(v, namespace)
56 |
57 | elif isinstance(ydoc, Iterable):
58 | to_delete = []
59 | for idx, el in enumerate(ydoc):
60 | res = flatten_selectors(el, namespace)
61 | if res is None:
62 | to_delete.append(idx)
63 | else:
64 | ydoc[idx] = res
65 |
66 | if len(to_delete):
67 | ydoc = [ydoc[idx] for idx in range(len(ydoc)) if idx not in to_delete]
68 |
69 | # flatten lists if necessary
70 | if any([isinstance(x, list) for x in ydoc]):
71 | final_list = []
72 | for x in ydoc:
73 | if isinstance(x, list):
74 | final_list += x
75 | else:
76 | final_list.append(x)
77 | ydoc = final_list
78 |
79 | return ydoc
80 |
81 |
82 | def ensure_list(x):
83 | if not type(x) is list:
84 | return [x]
85 | else:
86 | return x
87 |
88 |
89 | def normalize_recipe(ydoc):
90 | # normalizing recipe:
91 | # sources -> list
92 | # every output -> to steps list
93 | if ydoc.get("context"):
94 | del ydoc["context"]
95 |
96 | if ydoc.get("source"):
97 | ydoc["source"] = ensure_list(ydoc["source"])
98 |
99 | toplevel_output = None
100 |
101 | if ydoc.get("outputs"):
102 | ydoc["steps"] = ydoc["outputs"]
103 | del ydoc["outputs"]
104 |
105 | if not ydoc.get("steps"):
106 | ydoc["steps"] = [{"package": ydoc["package"]}]
107 | toplevel_output = ydoc["steps"][0]
108 | else:
109 | for o in ydoc["steps"]:
110 | if "package" not in o:
111 | continue
112 | if not toplevel_output and o["package"]["name"] == ydoc["package"]["name"]:
113 | toplevel_output = o
114 |
115 | # merge version into steps if they don't have one
116 | if "version" not in o["package"]:
117 | o["package"]["version"] = ydoc["package"]["version"]
118 |
119 | # how do we handle no-output toplevel?!
120 | if toplevel_output is None:
121 | assert not ydoc.get("requirements")
122 |
123 | # move these under toplevel output
124 | if ydoc.get("requirements"):
125 | assert not toplevel_output.get("requirements")
126 | toplevel_output["requirements"] = ydoc["requirements"]
127 | del ydoc["requirements"]
128 |
129 | # move these under toplevel output
130 | if ydoc.get("test"):
131 | assert not toplevel_output.get("test")
132 | toplevel_output["test"] = ydoc["test"]
133 | del ydoc["test"]
134 |
135 | def move_to_toplevel(key):
136 | if ydoc.get("build", {}).get(key):
137 | if not toplevel_output.get("build"):
138 | toplevel_output["build"] = {}
139 | toplevel_output["build"][key] = ydoc["build"][key]
140 | del ydoc["build"][key]
141 |
142 | move_to_toplevel("run_exports")
143 | move_to_toplevel("ignore_run_exports")
144 | return ydoc
145 |
146 |
147 | def default_jinja_vars(config):
148 | res = {}
149 | cfg = ns_cfg(config)
150 |
151 | res["build_platform"] = cfg["build_platform"]
152 | res["target_platform"] = cfg.get("target_platform", cfg["build_platform"])
153 |
154 | tgp = res["target_platform"]
155 |
156 | if tgp.startswith("win"):
157 | prefix = "%PREFIX%"
158 | else:
159 | prefix = "$PREFIX"
160 |
161 | # this adds PYTHON, R, RSCRIPT ... etc so that they can be used in the
162 | # recipe script
163 | for lang in ["python", "lua", "r", "rscript", "perl"]:
164 | res[lang.upper()] = getattr(config, "_get_" + lang)(prefix, tgp)
165 |
166 | return res
167 |
168 |
169 | def render(recipe_path, config=None, is_pyproject_recipe=False):
170 | # console.print(f"\n[yellow]Rendering {recipe_path}[/yellow]\n")
171 | # step 1: parse YAML
172 | with open(recipe_path, "rb") as fi:
173 | if is_pyproject_recipe:
174 | try: # Python >=3.11
175 | import tomllib
176 |
177 | ydoc = tomllib.load(fi)
178 | except ImportError: # Python <3.11
179 | import toml
180 |
181 | ydoc = toml.load(fi)
182 | else:
183 | loader = YAML(typ="safe")
184 | ydoc = loader.load(fi)
185 |
186 | # step 2: fill out context dict
187 | context_dict = default_jinja_vars(config)
188 | if is_pyproject_recipe:
189 | # Use [tool.boa] section from pyproject as a recipe, everything else as the context.
190 | context_dict["pyproject"] = ydoc
191 | ydoc = ydoc["tool"]["boa"]
192 | context_dict.update(ydoc.get("context", {}))
193 | context_dict["environ"] = os.environ
194 | jenv = jinja2.Environment()
195 | for key, value in context_dict.items():
196 | if isinstance(value, str):
197 | tmpl = jenv.from_string(value)
198 | context_dict[key] = tmpl.render(context_dict)
199 |
200 | # step 3: recursively loop over the entire recipe and render jinja with context
201 | jenv.globals.update(jinja_functions(config, context_dict))
202 | for key in ydoc:
203 | render_recursive(ydoc[key], context_dict, jenv)
204 |
205 | flatten_selectors(ydoc, ns_cfg(config))
206 |
207 | # Normalize the entire recipe
208 | ydoc = normalize_recipe(ydoc)
209 | # console.print("\n[yellow]Normalized recipe[/yellow]\n")
210 | # console.print(ydoc)
211 | return ydoc
212 |
--------------------------------------------------------------------------------
/boa/core/solver.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | import os
5 | import tempfile
6 |
7 | from boltons.setutils import IndexedSet
8 |
9 | from conda.base.constants import ChannelPriority
10 | from conda.core.solve import diff_for_unlink_link_precs
11 | from conda.common.serialize import json_dump
12 | from conda.models.prefix_graph import PrefixGraph
13 | from conda.core.prefix_data import PrefixData
14 | from conda.models.match_spec import MatchSpec
15 | from conda.common.url import remove_auth, split_anaconda_token
16 | from conda.core.index import _supplement_index_with_system
17 | from conda.base.context import context
18 | from conda.core.package_cache_data import PackageCacheData
19 |
20 | import libmambapy
21 |
22 | from boa.core.utils import (
23 | get_index,
24 | load_channels,
25 | pkgs_dirs,
26 | to_package_record_from_subjson,
27 | )
28 | from boa.core.config import boa_config
29 |
30 | console = boa_config.console
31 |
32 | solver_cache = {}
33 |
34 |
35 | def refresh_solvers():
36 | for _, v in solver_cache.items():
37 | v.replace_channels()
38 |
39 |
40 | def get_solver(subdir, output_folder="local"):
41 | pkg_cache = PackageCacheData.first_writable().pkgs_dir
42 | if subdir == "noarch":
43 | subdir = context.subdir
44 | elif subdir != context.subdir:
45 | pkg_cache = os.path.join(pkg_cache, subdir)
46 | if not os.path.exists(pkg_cache):
47 | os.makedirs(pkg_cache, exist_ok=True)
48 |
49 | if not solver_cache.get(subdir):
50 | solver_cache[subdir] = MambaSolver([], subdir, output_folder)
51 |
52 | return solver_cache[subdir], pkg_cache
53 |
54 |
55 | def get_url_from_channel(c):
56 | if c.startswith("file://"):
57 | # The conda functions (specifically remove_auth) assume the input
58 | # is a url; a file uri on windows with a drive letter messes them
59 | # up.
60 | return c
61 | else:
62 | return split_anaconda_token(remove_auth(c))[0]
63 |
64 |
65 | def to_unlink_link_precs(
66 | specs_to_add, specs_to_remove, prefix, to_link, to_unlink, index
67 | ):
68 | to_link_records = []
69 |
70 | prefix_data = PrefixData(prefix)
71 | final_precs = IndexedSet(prefix_data.iter_records())
72 |
73 | lookup_dict = {}
74 | for _, entry in index:
75 | lookup_dict[
76 | entry["channel"].platform_url(entry["platform"], with_credentials=False)
77 | ] = entry
78 |
79 | assert len(to_unlink) == 0
80 |
81 | for c, pkg, jsn_s in to_link:
82 | entry = lookup_dict[get_url_from_channel(c)]
83 | rec = to_package_record_from_subjson(entry, pkg, jsn_s)
84 | final_precs.add(rec)
85 | to_link_records.append(rec)
86 |
87 | unlink_precs, link_precs = diff_for_unlink_link_precs(
88 | prefix,
89 | final_precs=IndexedSet(PrefixGraph(final_precs).graph),
90 | specs_to_add=specs_to_add,
91 | )
92 |
93 | return unlink_precs, link_precs
94 |
95 |
96 | def get_virtual_packages():
97 | result = {"packages": {}}
98 |
99 | # add virtual packages as installed packages
100 | # they are packages installed on the system that conda can do nothing
101 | # about (e.g. glibc)
102 | # if another version is needed, installation just fails
103 | # they don't exist anywhere (they start with __)
104 | installed = dict()
105 | _supplement_index_with_system(installed)
106 | installed = list(installed)
107 |
108 | for prec in installed:
109 | json_rec = prec.dist_fields_dump()
110 | json_rec["depends"] = prec.depends
111 | json_rec["build"] = prec.build
112 | result["packages"][prec.fn] = json_rec
113 |
114 | installed_json_f = tempfile.NamedTemporaryFile("w", delete=False)
115 | installed_json_f.write(json_dump(result))
116 | installed_json_f.flush()
117 | return installed_json_f
118 |
119 |
120 | class MambaSolver:
121 | def __init__(self, channels, platform, output_folder=None):
122 | self.channels = channels
123 | self.platform = platform
124 | self.output_folder = output_folder or "local"
125 | self.pool = libmambapy.Pool()
126 | self.repos = []
127 |
128 | self.index = load_channels(
129 | self.pool, self.channels, self.repos, platform=platform
130 | )
131 |
132 | # if platform == context.subdir:
133 | installed_json_f = get_virtual_packages()
134 | repo = libmambapy.Repo(self.pool, "installed", installed_json_f.name, "")
135 | repo.set_installed()
136 | self.repos.append(repo)
137 |
138 | self.local_index = []
139 | self.local_repos = {}
140 | # load local repo, too
141 | self.replace_channels()
142 |
143 | def replace_installed(self, prefix):
144 | prefix_data = libmambapy.PrefixData(prefix)
145 | vp = libmambapy.get_virtual_packages()
146 | prefix_data.add_virtual_packages(vp)
147 | prefix_data.load()
148 | repo = libmambapy.Repo(self.pool, prefix_data)
149 | repo.set_installed()
150 |
151 | def replace_channels(self):
152 | console.print(f"[blue]Reloading output folder: {self.output_folder}")
153 | self.local_index = get_index(
154 | (self.output_folder,), platform=self.platform, prepend=False
155 | )
156 |
157 | for _, v in self.local_repos.items():
158 | v.clear(True)
159 |
160 | start_prio = len(self.channels) + len(self.index)
161 | for subdir, channel in self.local_index:
162 | if not subdir.loaded():
163 | continue
164 |
165 | # support new mamba
166 | if isinstance(channel, dict):
167 | channelstr = channel["url"]
168 | channelurl = channel["url"]
169 | else:
170 | channelstr = str(channel)
171 | channelurl = channel.url(with_credentials=True)
172 |
173 | cp = subdir.cache_path()
174 | if cp.endswith(".solv"):
175 | os.remove(subdir.cache_path())
176 | cp = cp.replace(".solv", ".json")
177 |
178 | self.local_repos[channelstr] = libmambapy.Repo(
179 | self.pool, channelstr, cp, channelurl
180 | )
181 |
182 | self.local_repos[channelstr].set_priority(start_prio, 0)
183 | start_prio -= 1
184 |
185 | def solve(self, specs, pkg_cache_path=None):
186 | """Solve given a set of specs.
187 | Parameters
188 | ----------
189 | specs : list of str
190 | A list of package specs. You can use `conda.models.match_spec.MatchSpec`
191 | to get them to the right form by calling
192 | `MatchSpec(mypec).conda_build_form()`
193 | Returns
194 | -------
195 | transaction : libmambapy.Transaction
196 | The mamba transaction.
197 | Raises
198 | ------
199 | RuntimeError :
200 | If the solver did not find a solution.
201 | """
202 | solver_options = [(libmambapy.SOLVER_FLAG_ALLOW_DOWNGRADE, 1)]
203 |
204 | if context.channel_priority is ChannelPriority.STRICT:
205 | solver_options.append((libmambapy.SOLVER_FLAG_STRICT_REPO_PRIORITY, 1))
206 |
207 | api_solver = libmambapy.Solver(self.pool, solver_options)
208 | _specs = specs
209 |
210 | api_solver.add_jobs(_specs, libmambapy.SOLVER_INSTALL)
211 | success = api_solver.solve()
212 |
213 | if not success:
214 | error_string = "Mamba failed to solve:\n"
215 | for s in _specs:
216 | error_string += f" - {s}\n"
217 | error_string += "\nwith channels:\n"
218 | for c in self.channels:
219 | error_string += f" - {c}\n"
220 | pstring = api_solver.problems_to_str()
221 |
222 | pstring = "\n".join(["- " + el for el in pstring.split("\n")])
223 | error_string += f"\nThe reported errors are:\n{pstring}"
224 |
225 | # This might be the cause of segfaults, that's why it's commented out
226 | # if (
227 | # hasattr(api_solver, "explain_problems")
228 | # # can cause errors in explain_problems
229 | # and "unsupported request" not in pstring
230 | # ):
231 | # error_string += f"\n\n{api_solver.explain_problems()}"
232 |
233 | print(error_string)
234 | raise RuntimeError("Solver could not find solution." + error_string)
235 |
236 | if pkg_cache_path is None:
237 | # use values from conda
238 | pkg_cache_path = pkgs_dirs
239 |
240 | package_cache = libmambapy.MultiPackageCache(pkg_cache_path)
241 | return libmambapy.Transaction(api_solver, package_cache)
242 |
243 | def solve_for_unlink_link_precs(self, specs, prefix):
244 | t = self.solve(specs)
245 | if not boa_config.quiet and not boa_config.is_mambabuild:
246 | t.print()
247 |
248 | mmb_specs, to_link, to_unlink = t.to_conda()
249 | specs_to_add = [MatchSpec(m) for m in mmb_specs[0]]
250 | specs_to_remove = [MatchSpec(m) for m in mmb_specs[1]]
251 |
252 | return to_unlink_link_precs(
253 | specs_to_add,
254 | specs_to_remove,
255 | prefix,
256 | to_link,
257 | to_unlink,
258 | self.index + self.local_index,
259 | )
260 |
--------------------------------------------------------------------------------
/boa/core/transmute.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/boa/core/transmute.py
--------------------------------------------------------------------------------
/boa/core/utils.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from __future__ import absolute_import, division, print_function, unicode_literals
5 |
6 | import collections
7 | import sys
8 | import os
9 | import typing
10 | import json
11 | import urllib.parse
12 |
13 | from conda.base.context import context
14 | from conda_build import utils
15 | from conda_build.config import get_or_merge_config
16 | from conda_build.variants import find_config_files, parse_config_file, combine_specs
17 | from conda.base.constants import ChannelPriority
18 | from conda.gateways.connection.session import CondaHttpAuth
19 | from conda.core.index import check_allowlist
20 | from conda.models.channel import Channel as CondaChannel
21 | from conda.models.records import PackageRecord
22 | from conda.common.url import join_url
23 |
24 | from boa.core.config import boa_config
25 | import libmambapy as api
26 |
27 |
28 | if typing.TYPE_CHECKING:
29 | from typing import Any
30 | from conda_build.config import Config as CondaBuildConfig
31 |
32 |
33 | console = boa_config.console
34 |
35 | env_path_backup_var_exists = os.environ.get("CONDA_PATH_BACKUP", None)
36 | pkgs_dirs = list(context.pkgs_dirs)
37 |
38 | if "bsd" in sys.platform:
39 | shell_path = "/bin/sh"
40 | elif utils.on_win:
41 | shell_path = "bash"
42 | else:
43 | shell_path = "/bin/bash"
44 |
45 |
46 | def get_config(
47 | folder,
48 | variant=None,
49 | additional_files=None,
50 | config: "CondaBuildConfig | None" = None,
51 | ) -> "tuple[Any, CondaBuildConfig]":
52 | if not additional_files:
53 | additional_files = []
54 | if not variant:
55 | variant = {}
56 | config = get_or_merge_config(config, variant)
57 |
58 | config_files = find_config_files(folder, config)
59 | all_files = [os.path.abspath(p) for p in config_files + additional_files]
60 |
61 | # reverse files an uniquify
62 | def make_unique_list(lx):
63 | seen = set()
64 | return [x for x in lx if not (x in seen or seen.add(x))]
65 |
66 | # we reverse the order so that command line can overwrite the hierarchy
67 | all_files = make_unique_list(all_files[::-1])[::-1]
68 |
69 | console.print(f"\nLoading config files: [green]{', '.join(all_files)}\n")
70 | parsed_cfg = collections.OrderedDict()
71 |
72 | for f in all_files:
73 | parsed_cfg[f] = parse_config_file(f, config)
74 |
75 | # this merges each of the specs, providing a debug message when a given setting is overridden
76 | # by a later spec
77 | combined_spec = combine_specs(parsed_cfg, log_output=config.verbose)
78 | # console.print(combined_spec)
79 |
80 | return combined_spec, config
81 |
82 |
83 | def normalize_subdir(subdir):
84 | if subdir == "noarch":
85 | subdir = context.subdir
86 | else:
87 | return subdir
88 |
89 |
90 | def get_sys_vars_stubs(target_platform):
91 | res = ["CONDA_BUILD_SYSROOT"]
92 | if sys.platform == "win32":
93 | res += [
94 | "SCRIPTS",
95 | "LIBRARY_PREFIX",
96 | "LIBRARY_BIN",
97 | "LIBRARY_INC",
98 | "LIBRARY_LIB",
99 | "CYGWIN_PREFIX",
100 | "ALLUSERSPROFILE",
101 | "APPDATA",
102 | "CommonProgramFiles",
103 | "CommonProgramFiles(x86)",
104 | "CommonProgramW6432",
105 | "COMPUTERNAME",
106 | "ComSpec",
107 | "HOMEDRIVE",
108 | "HOMEPATH",
109 | "LOCALAPPDATA",
110 | "LOGONSERVER",
111 | "NUMBER_OF_PROCESSORS",
112 | "PATHEXT",
113 | "ProgramData",
114 | "ProgramFiles",
115 | "ProgramFiles(x86)",
116 | "ProgramW6432",
117 | "PROMPT",
118 | "PSModulePath",
119 | "PUBLIC",
120 | "SystemDrive",
121 | "SystemRoot",
122 | "TEMP",
123 | "TMP",
124 | "USERDOMAIN",
125 | "USERNAME",
126 | "USERPROFILE",
127 | "windir",
128 | "PROCESSOR_ARCHITEW6432",
129 | "PROCESSOR_ARCHITECTURE",
130 | "PROCESSOR_IDENTIFIER",
131 | "BUILD",
132 | ]
133 | else:
134 | res += ["HOME", "PKG_CONFIG_PATH", "CMAKE_GENERATOR", "SSL_CERT_FILE"]
135 |
136 | if target_platform.startswith("osx"):
137 | res += [
138 | "OSX_ARCH",
139 | "MACOSX_DEPLOYMENT_TARGET",
140 | "BUILD",
141 | "macos_machine",
142 | "macos_min_version",
143 | ]
144 | elif target_platform.startswith("linux"):
145 | res += [
146 | "CFLAGS",
147 | "CXXFLAGS",
148 | "LDFLAGS",
149 | "QEMU_LD_PREFIX",
150 | "QEMU_UNAME",
151 | "DEJAGNU",
152 | "DISPLAY",
153 | "LD_RUN_PATH",
154 | "BUILD",
155 | ]
156 | return res
157 |
158 |
159 | def get_index(
160 | channel_urls=(),
161 | prepend=True,
162 | platform=None,
163 | use_local=False,
164 | use_cache=False,
165 | unknown=None,
166 | prefix=None,
167 | repodata_fn="repodata.json",
168 | ):
169 | if isinstance(platform, str):
170 | platform = [platform, "noarch"]
171 |
172 | all_channels = []
173 | if use_local:
174 | all_channels.append("local")
175 | all_channels.extend(channel_urls)
176 | if prepend:
177 | all_channels.extend(context.channels)
178 | check_allowlist(all_channels)
179 |
180 | # Remove duplicates but retain order
181 | all_channels = list(collections.OrderedDict.fromkeys(all_channels))
182 |
183 | dlist = api.DownloadTargetList()
184 |
185 | index = []
186 |
187 | def fixup_channel_spec(spec):
188 | at_count = spec.count("@")
189 | if at_count > 1:
190 | first_at = spec.find("@")
191 | spec = (
192 | spec[:first_at]
193 | + urllib.parse.quote(spec[first_at])
194 | + spec[first_at + 1 :]
195 | )
196 | if platform:
197 | spec = spec + "[" + ",".join(platform) + "]"
198 | return spec
199 |
200 | all_channels = list(map(fixup_channel_spec, all_channels))
201 | pkgs_dirs = api.MultiPackageCache(context.pkgs_dirs)
202 | api.create_cache_dir(str(pkgs_dirs.first_writable_path))
203 |
204 | for channel in api.get_channels(all_channels):
205 | for channel_platform, url in channel.platform_urls(with_credentials=True):
206 | full_url = CondaHttpAuth.add_binstar_token(url)
207 |
208 | sd = api.SubdirData(
209 | channel, channel_platform, full_url, pkgs_dirs, repodata_fn
210 | )
211 |
212 | needs_finalising = sd.download_and_check_targets(dlist)
213 | index.append(
214 | (
215 | sd,
216 | {
217 | "platform": channel_platform,
218 | "url": url,
219 | "channel": channel,
220 | "needs_finalising": needs_finalising,
221 | },
222 | )
223 | )
224 |
225 | for sd, info in index:
226 | if info["needs_finalising"]:
227 | sd.finalize_checks()
228 | dlist.add(sd)
229 |
230 | is_downloaded = dlist.download(api.MAMBA_DOWNLOAD_FAILFAST)
231 |
232 | if not is_downloaded:
233 | raise RuntimeError("Error downloading repodata.")
234 |
235 | return index
236 |
237 |
238 | def load_channels(
239 | pool,
240 | channels,
241 | repos,
242 | has_priority=None,
243 | prepend=True,
244 | platform=None,
245 | use_local=False,
246 | use_cache=True,
247 | repodata_fn="repodata.json",
248 | ):
249 | index = get_index(
250 | channel_urls=channels,
251 | prepend=prepend,
252 | platform=platform,
253 | use_local=use_local,
254 | repodata_fn=repodata_fn,
255 | use_cache=use_cache,
256 | )
257 |
258 | if has_priority is None:
259 | has_priority = context.channel_priority in [
260 | ChannelPriority.STRICT,
261 | ChannelPriority.FLEXIBLE,
262 | ]
263 |
264 | subprio_index = len(index)
265 | if has_priority:
266 | # first, count unique channels
267 | n_channels = len(set([entry["channel"].canonical_name for _, entry in index]))
268 | current_channel = index[0][1]["channel"].canonical_name
269 | channel_prio = n_channels
270 |
271 | for subdir, entry in index:
272 | # add priority here
273 | if has_priority:
274 | if entry["channel"].canonical_name != current_channel:
275 | channel_prio -= 1
276 | current_channel = entry["channel"].canonical_name
277 | priority = channel_prio
278 | else:
279 | priority = 0
280 | if has_priority:
281 | subpriority = 0
282 | else:
283 | subpriority = subprio_index
284 | subprio_index -= 1
285 |
286 | if not subdir.loaded() and entry["platform"] != "noarch":
287 | # ignore non-loaded subdir if channel is != noarch
288 | continue
289 |
290 | if context.verbosity != 0 and not context.json:
291 | print(
292 | "Channel: {}, platform: {}, prio: {} : {}".format(
293 | entry["channel"], entry["platform"], priority, subpriority
294 | )
295 | )
296 | print("Cache path: ", subdir.cache_path())
297 |
298 | repo = subdir.create_repo(pool)
299 | repo.set_priority(priority, subpriority)
300 | repos.append(repo)
301 |
302 | return index
303 |
304 |
305 | def init_api_context(use_mamba_experimental=False):
306 | api_ctx = api.Context()
307 |
308 | api_ctx.json = context.json
309 | api_ctx.dry_run = context.dry_run
310 | if context.json:
311 | context.always_yes = True
312 | context.quiet = True
313 | if use_mamba_experimental:
314 | context.json = False
315 |
316 | api_ctx.verbosity = context.verbosity
317 | api_ctx.set_verbosity(context.verbosity)
318 | api_ctx.quiet = context.quiet
319 | api_ctx.offline = context.offline
320 | api_ctx.local_repodata_ttl = context.local_repodata_ttl
321 | api_ctx.use_index_cache = context.use_index_cache
322 | api_ctx.always_yes = context.always_yes
323 | api_ctx.channels = context.channels
324 | api_ctx.platform = context.subdir
325 | # Conda uses a frozendict here
326 | api_ctx.proxy_servers = dict(context.proxy_servers)
327 |
328 | if "MAMBA_EXTRACT_THREADS" in os.environ:
329 | try:
330 | max_threads = int(os.environ["MAMBA_EXTRACT_THREADS"])
331 | api_ctx.extract_threads = max_threads
332 | except ValueError:
333 | v = os.environ["MAMBA_EXTRACT_THREADS"]
334 | raise ValueError(
335 | f"Invalid conversion of env variable 'MAMBA_EXTRACT_THREADS' from value '{v}'"
336 | )
337 |
338 | def get_base_url(url, name=None):
339 | tmp = url.rsplit("/", 1)[0]
340 | if name:
341 | if tmp.endswith(name):
342 | return tmp.rsplit("/", 1)[0]
343 | return tmp
344 |
345 | api_ctx.channel_alias = str(
346 | get_base_url(context.channel_alias.url(with_credentials=True))
347 | )
348 |
349 | additional_custom_channels = {}
350 | for el in context.custom_channels:
351 | if context.custom_channels[el].canonical_name not in ["local", "defaults"]:
352 | additional_custom_channels[el] = get_base_url(
353 | context.custom_channels[el].url(with_credentials=True), el
354 | )
355 | api_ctx.custom_channels = additional_custom_channels
356 |
357 | additional_custom_multichannels = {}
358 | for el in context.custom_multichannels:
359 | if el not in ["defaults", "local"]:
360 | additional_custom_multichannels[el] = []
361 | for c in context.custom_multichannels[el]:
362 | additional_custom_multichannels[el].append(
363 | get_base_url(c.url(with_credentials=True))
364 | )
365 | api_ctx.custom_multichannels = additional_custom_multichannels
366 |
367 | api_ctx.default_channels = [
368 | get_base_url(x.url(with_credentials=True)) for x in context.default_channels
369 | ]
370 |
371 | if context.ssl_verify is False:
372 | api_ctx.ssl_verify = ""
373 | elif context.ssl_verify is not True:
374 | api_ctx.ssl_verify = context.ssl_verify
375 | api_ctx.target_prefix = context.target_prefix
376 | api_ctx.root_prefix = context.root_prefix
377 | api_ctx.conda_prefix = context.conda_prefix
378 | api_ctx.pkgs_dirs = context.pkgs_dirs
379 | api_ctx.envs_dirs = context.envs_dirs
380 |
381 | api_ctx.connect_timeout_secs = int(round(context.remote_connect_timeout_secs))
382 | api_ctx.max_retries = context.remote_max_retries
383 | api_ctx.retry_backoff = context.remote_backoff_factor
384 | api_ctx.add_pip_as_python_dependency = context.add_pip_as_python_dependency
385 | api_ctx.use_only_tar_bz2 = context.use_only_tar_bz2
386 |
387 | if context.channel_priority is ChannelPriority.STRICT:
388 | api_ctx.channel_priority = api.ChannelPriority.kStrict
389 | elif context.channel_priority is ChannelPriority.FLEXIBLE:
390 | api_ctx.channel_priority = api.ChannelPriority.kFlexible
391 | elif context.channel_priority is ChannelPriority.DISABLED:
392 | api_ctx.channel_priority = api.ChannelPriority.kDisabled
393 |
394 |
395 | def to_conda_channel(channel, platform):
396 | if channel.scheme == "file":
397 | return CondaChannel.from_value(
398 | channel.platform_url(platform, with_credentials=False)
399 | )
400 |
401 | return CondaChannel(
402 | channel.scheme,
403 | channel.auth,
404 | channel.location,
405 | channel.token,
406 | channel.name,
407 | platform,
408 | channel.package_filename,
409 | )
410 |
411 |
412 | def to_package_record_from_subjson(entry, pkg, jsn_string):
413 | channel_url = entry["url"]
414 | info = json.loads(jsn_string)
415 | info["fn"] = pkg
416 | info["channel"] = to_conda_channel(entry["channel"], entry["platform"])
417 | info["url"] = join_url(channel_url, pkg)
418 | if not info.get("subdir"):
419 | info["subdir"] = entry["platform"]
420 | package_record = PackageRecord(**info)
421 | return package_record
422 |
--------------------------------------------------------------------------------
/boa/core/validation.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from jsonschema import validate as json_validate
5 | import json5 as json
6 | from jsonschema.exceptions import ValidationError, SchemaError
7 | from pathlib import Path
8 | from rich.console import Console
9 |
10 | console = Console()
11 |
12 |
13 | def schema_dir():
14 | return Path(__file__).parent / ".." / "schemas"
15 |
16 |
17 | def validate(obj):
18 | with open(schema_dir() / "recipe.v1.json") as schema_in:
19 | schema = json.load(schema_in)
20 | try:
21 | validation_result = json_validate(instance=obj, schema=schema)
22 | except ValidationError as e:
23 | console.print("\n[red]Recipe validation error\n")
24 | console.print(e)
25 | raise e
26 | except SchemaError as e:
27 | console.print("\n[red]Recipe schema validation error\n")
28 | console.print(e)
29 | raise e
30 | return validation_result
31 |
--------------------------------------------------------------------------------
/boa/core/variant_arithmetic.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | import copy
3 |
4 | if False: # TYPE_CHECKING
5 | from typing import OrderedDict
6 |
7 | from boa.core.conda_build_spec import CondaBuildSpec
8 | from boa.core.config import boa_config
9 | from boa.core.render import ensure_list
10 | from boa.core.utils import get_sys_vars_stubs
11 |
12 | from conda_build.variants import get_default_variant
13 | from conda.models.match_spec import MatchSpec
14 | import conda_build.jinja_context
15 |
16 | console = boa_config.console
17 |
18 |
19 | def _assemble_variants(
20 | env, conda_build_config, config, variants, sys_var_stubs, default_variant
21 | ):
22 | specs = {}
23 |
24 | for var in sys_var_stubs:
25 | if var in conda_build_config:
26 | variants[var] = ensure_list(conda_build_config[var])
27 |
28 | for s in env:
29 | spec = CondaBuildSpec(s)
30 | specs[spec.name] = spec
31 |
32 | for n, cb_spec in specs.items():
33 | if cb_spec.is_compiler:
34 | # This is a compiler package
35 | _, lang = cb_spec.raw.split()
36 | compiler = conda_build.jinja_context.compiler(lang, config)
37 | cb_spec.final = compiler
38 | config_key = f"{lang}_compiler"
39 | config_version_key = f"{lang}_compiler_version"
40 |
41 | if conda_build_config.get(config_key):
42 | variants[config_key] = conda_build_config[config_key]
43 | if conda_build_config.get(config_version_key):
44 | variants[config_version_key] = conda_build_config[config_version_key]
45 |
46 | # Note: as a historical artifact we __have to__ use underscore-replaced
47 | # names here!
48 | variant_key = n.replace("-", "_")
49 | vlist = None
50 | if variant_key in conda_build_config:
51 | vlist = conda_build_config[variant_key]
52 | elif variant_key in default_variant:
53 | vlist = [default_variant[variant_key]]
54 | if vlist:
55 | # we need to check if v matches the spec
56 | if cb_spec.is_simple:
57 | variants[variant_key] = vlist
58 | elif cb_spec.is_pin:
59 | # ignore variants?
60 | pass
61 | else:
62 | # check intersection of MatchSpec and variants
63 | ms = MatchSpec(cb_spec.raw)
64 | filtered = []
65 | for var in vlist:
66 | vsplit = var.split()
67 | if len(vsplit) == 1:
68 | p = {
69 | "name": n,
70 | "version": vsplit[0],
71 | "build_number": 0,
72 | "build": "",
73 | }
74 | elif len(vsplit) == 2:
75 | p = {
76 | "name": n,
77 | "version": var.split()[0],
78 | "build": var.split()[1],
79 | "build_number": 0,
80 | }
81 | else:
82 | raise RuntimeError("Check your conda_build_config")
83 |
84 | if ms.match(p):
85 | filtered.append(var)
86 | else:
87 | console.print(
88 | f"Configured variant ignored because of the recipe requirement:\n {cb_spec.raw} : {var}\n"
89 | )
90 |
91 | if len(filtered):
92 | variants[variant_key] = filtered
93 |
94 | return variants
95 |
96 |
97 | def get_dependency_variants(variant_keys, conda_build_config, config):
98 | variants = {}
99 | default_variant = get_default_variant(config)
100 |
101 | variants["target_platform"] = conda_build_config.get(
102 | "target_platform", [default_variant["target_platform"]]
103 | )
104 |
105 | if conda_build_config["target_platform"] == [None]:
106 | variants["target_platform"] = [default_variant["target_platform"]]
107 |
108 | config.variant["target_platform"] = variants["target_platform"][0]
109 |
110 | sys_var_stubs = get_sys_vars_stubs(config.variant["target_platform"])
111 |
112 | v = _assemble_variants(
113 | variant_keys,
114 | conda_build_config,
115 | config,
116 | variants,
117 | sys_var_stubs,
118 | default_variant,
119 | )
120 | return v
121 |
122 |
123 | def apply_variants(output, variants, cbc):
124 | final_outputs = []
125 |
126 | # this is all a bit hacky ... will have to clean that up eventually
127 | # variant_name = output.name
128 |
129 | # # need to strip static away from output name... :/
130 | # static_feature = output.selected_features.get("static", False)
131 |
132 | # if static_feature and output.name.endswith("-static"):
133 | # variant_name = output.name[: -len("-static")]
134 | # stop hacky ti hacky
135 |
136 | # zip keys need to be contracted
137 | zipped_keys = cbc.get("zip_keys", [])
138 |
139 | if variants:
140 | vzipped = copy.copy(variants)
141 | zippers = {}
142 | for zkeys in zipped_keys:
143 | # we check if our variant contains keys that need to be zipped
144 | if sum(k in variants for k in zkeys) > 1:
145 | filtered_zip_keys = [k for k in variants if k in zkeys]
146 |
147 | zkname = "__zip_" + "_".join(filtered_zip_keys)
148 |
149 | zklen = None
150 | for zk in filtered_zip_keys:
151 | if zk not in cbc:
152 | raise RuntimeError(
153 | f"Trying to zip keys, but not all zip keys found on conda-build-config {zk}"
154 | )
155 |
156 | zkl = len(cbc[zk])
157 | if not zklen:
158 | zklen = zkl
159 |
160 | if zklen and zkl != zklen:
161 | raise RuntimeError(
162 | f"Trying to zip keys, but not all zip keys have the same length {zkeys}"
163 | )
164 |
165 | vzipped[zkname] = [str(i) for i in range(zklen)]
166 | zippers[zkname] = {zk: cbc[zk] for zk in filtered_zip_keys}
167 |
168 | for zk in filtered_zip_keys:
169 | del vzipped[zk]
170 |
171 | combos = []
172 | differentiating_keys = []
173 | for k, vz in vzipped.items():
174 | if len(vz) > 1:
175 | differentiating_keys.append(k)
176 | combos.append([(k, x) for x in vz])
177 |
178 | all_combinations = tuple(itertools.product(*combos))
179 | all_combinations = [dict(x) for x in all_combinations]
180 |
181 | # unzip the zipped keys
182 | unzipped_combinations = []
183 | for c in all_combinations:
184 | unz_combo = {}
185 | for vc in c:
186 | if vc.startswith("__zip_"):
187 | ziptask = zippers[vc]
188 | zipindex = int(c[vc])
189 | for zippkg in ziptask:
190 | unz_combo[zippkg] = ziptask[zippkg][zipindex]
191 | if vc in differentiating_keys:
192 | differentiating_keys.remove(vc)
193 | differentiating_keys.extend(zippers[vc].keys())
194 | else:
195 | unz_combo[vc] = c[vc]
196 |
197 | unzipped_combinations.append(unz_combo)
198 |
199 | for c in unzipped_combinations:
200 | x = output.apply_variant(c, differentiating_keys)
201 | final_outputs.append(x)
202 | else:
203 | x = output.apply_variant({})
204 | final_outputs.append(x)
205 | return final_outputs
206 |
207 |
208 | def add_prev_steps(output, variant, prev_outputs, variants):
209 | requirements = output.all_requirements()
210 | for k in requirements:
211 | if k.is_pin_subpackage and k.pin.exact:
212 | # add additional variants for each output of the subpackage
213 | add_exact_pkgs = []
214 | for o in prev_outputs:
215 | if o.name == k.name:
216 | add_exact_pkgs.append(o)
217 |
218 | if not output.requirements.get("virtual"):
219 | output.requirements["virtual"] = []
220 | output.requirements["virtual"] += o.differentiating_keys
221 | variant.update(variants[k.name])
222 |
223 |
224 | def variant_overlap(a, b):
225 | overlap = 0
226 | for ak, av in a.items():
227 | if b.get(ak) == av:
228 | overlap += 1
229 | return overlap
230 |
231 |
232 | def get_variants(sorted_outputs: "OrderedDict", cbc: dict, config):
233 | variants = {}
234 |
235 | final_outputs = []
236 | for name, output in sorted_outputs.items():
237 | variants[name] = get_dependency_variants(output.variant_keys(), cbc, config)
238 | add_prev_steps(output, variants[name], final_outputs, variants)
239 | final_outputs += apply_variants(output, variants[name], cbc)
240 |
241 | # create a proper graph
242 | for output in final_outputs:
243 | # if we have pin_subpackage(exact) packages, we need to find those
244 | # with the largest common variant to connect them
245 | # same for the empty build steps
246 | parent_steps = []
247 |
248 | for required_step in output.required_steps:
249 | max_overlap = 0
250 | best_step_variant = None
251 |
252 | for f in final_outputs:
253 | overlap = variant_overlap(f.variant, output.variant)
254 | if f.name == required_step and overlap > max_overlap:
255 | best_step_variant = f
256 | max_overlap = overlap
257 |
258 | parent_steps.append(best_step_variant)
259 |
260 | requirements = output.all_requirements()
261 | for k in requirements:
262 | max_overlap = 0
263 | best_step_variant = None
264 |
265 | if k.is_pin_subpackage and k.pin.exact:
266 | for f in final_outputs:
267 | overlap = variant_overlap(f.variant, output.variant)
268 | if f.name == k.name and overlap > max_overlap:
269 | best_step_variant = f
270 | max_overlap = overlap
271 |
272 | parent_steps.append(best_step_variant)
273 |
274 | output.parent_steps = parent_steps
275 |
276 | return variants, final_outputs
277 |
--------------------------------------------------------------------------------
/boa/core/windows.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | from conda_build.utils import (
5 | check_call_env,
6 | path_prepended,
7 | )
8 | from conda_build.variants import set_language_env_vars
9 | from conda_build.windows import fix_staged_scripts, write_build_scripts
10 |
11 | from boa.core import environ
12 |
13 |
14 | def build(m, bld_bat, stats, provision_only=False):
15 | with path_prepended(m.config.host_prefix):
16 | with path_prepended(m.config.build_prefix):
17 | env = environ.get_dict(m=m)
18 | env["CONDA_BUILD_STATE"] = "BUILD"
19 |
20 | # hard-code this because we never want pip's build isolation
21 | # https://github.com/conda/conda-build/pull/2972#discussion_r198290241
22 | #
23 | # Note that pip env "NO" variables are inverted logic.
24 | # PIP_NO_BUILD_ISOLATION=False means don't use build isolation.
25 | #
26 | env["PIP_NO_BUILD_ISOLATION"] = "False"
27 | # some other env vars to have pip ignore dependencies.
28 | # we supply them ourselves instead.
29 | # See note above about inverted logic on "NO" variables
30 | env["PIP_NO_DEPENDENCIES"] = True
31 | env["PIP_IGNORE_INSTALLED"] = True
32 |
33 | # pip's cache directory (PIP_NO_CACHE_DIR) should not be
34 | # disabled as this results in .egg-info rather than
35 | # .dist-info directories being created, see gh-3094
36 | # set PIP_CACHE_DIR to a path in the work dir that does not exist.
37 | env["PIP_CACHE_DIR"] = m.config.pip_cache_dir
38 |
39 | # tell pip to not get anything from PyPI, please. We have everything we need
40 | # locally, and if we don't, it's a problem.
41 | env["PIP_NO_INDEX"] = True
42 |
43 | # set variables like CONDA_PY in the test environment
44 | env.update(set_language_env_vars(m.config.variant))
45 |
46 | for name in "BIN", "INC", "LIB":
47 | path = env["LIBRARY_" + name]
48 | if not os.path.isdir(path):
49 | os.makedirs(path)
50 |
51 | work_script, env_script = write_build_scripts(m, env, bld_bat)
52 |
53 | if not provision_only and os.path.isfile(work_script):
54 | cmd = ["cmd.exe", "/d", "/c", os.path.basename(work_script)]
55 | # rewrite long paths in stdout back to their env variables
56 | if m.config.debug or m.config.no_rewrite_stdout_env:
57 | rewrite_env = None
58 | else:
59 | rewrite_env = {
60 | k: env[k] for k in ["PREFIX", "BUILD_PREFIX", "SRC_DIR"] if k in env
61 | }
62 | print(f"Rewriting env in output: {rewrite_env}", file=sys.stderr)
63 | check_call_env(
64 | cmd, cwd=m.config.work_dir, stats=stats, rewrite_stdout_env=rewrite_env
65 | )
66 | fix_staged_scripts(
67 | os.path.join(m.config.host_prefix, "Scripts"), config=m.config
68 | )
69 |
--------------------------------------------------------------------------------
/boa/helpers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/boa/helpers/__init__.py
--------------------------------------------------------------------------------
/boa/helpers/asciigraph.py:
--------------------------------------------------------------------------------
1 | # Ported from DVC
2 | # Originally released under Apache-2.0 License
3 | # https://github.com/iterative/dvc/blob/a2e25cf44947c20be9881272a9d4469b7f38f268/dvc/dagascii.py
4 |
5 | import logging
6 | import math
7 | import os
8 |
9 | from grandalf.graphs import Edge, Graph, Vertex
10 | from grandalf.layouts import SugiyamaLayout
11 | from grandalf.routing import EdgeViewer, route_with_lines
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | class VertexViewer:
17 | """Class to define vertex box boundaries that will be accounted for during
18 | graph building by grandalf.
19 | Args:
20 | name (str): name of the vertex.
21 | """
22 |
23 | MIN_HEIGHT = 3 # top and bottom box edges + text
24 |
25 | def __init__(self, name):
26 | # pylint: disable=invalid-name
27 | self._h = (
28 | max(self.MIN_HEIGHT, len(name.splitlines())) + 4
29 | ) # top and bottom box edges + text
30 | self._w = (
31 | max(len(line) for line in name.splitlines()) + 4
32 | ) # right and left bottom edges + text
33 |
34 | @property
35 | def h(self): # pylint: disable=invalid-name
36 | """Height of the box."""
37 | return self._h
38 |
39 | @property
40 | def w(self): # pylint: disable=invalid-name
41 | """Width of the box."""
42 | return self._w
43 |
44 |
45 | class AsciiCanvas:
46 | """Class for drawing in ASCII.
47 | Args:
48 | cols (int): number of columns in the canvas. Should be > 1.
49 | lines (int): number of lines in the canvas. Should be > 1.
50 | """
51 |
52 | TIMEOUT = 10
53 |
54 | def __init__(self, cols, lines):
55 | assert cols > 1
56 | assert lines > 1
57 |
58 | self.cols = cols
59 | self.lines = lines
60 |
61 | self.canvas = [[" "] * cols for line in range(lines)]
62 |
63 | def draw(self):
64 | """Draws ASCII canvas on the screen."""
65 | lines = map("".join, self.canvas)
66 | joined_lines = os.linesep.join(lines)
67 | return joined_lines
68 |
69 | def point(self, x, y, char):
70 | """Create a point on ASCII canvas.
71 | Args:
72 | x (int): x coordinate. Should be >= 0 and < number of columns in
73 | the canvas.
74 | y (int): y coordinate. Should be >= 0 an < number of lines in the
75 | canvas.
76 | char (str): character to place in the specified point on the
77 | canvas.
78 | """
79 | assert len(char) == 1
80 | assert x >= 0
81 | assert x < self.cols
82 | assert y >= 0
83 | assert y < self.lines
84 |
85 | self.canvas[y][x] = char
86 |
87 | def line(self, x0, y0, x1, y1, char):
88 | """Create a line on ASCII canvas.
89 | Args:
90 | x0 (int): x coordinate where the line should start.
91 | y0 (int): y coordinate where the line should start.
92 | x1 (int): x coordinate where the line should end.
93 | y1 (int): y coordinate where the line should end.
94 | char (str): character to draw the line with.
95 | """
96 | # pylint: disable=too-many-arguments, too-many-branches
97 | if x0 > x1:
98 | x1, x0 = x0, x1
99 | y1, y0 = y0, y1
100 |
101 | dx = x1 - x0
102 | dy = y1 - y0
103 |
104 | if dx == 0 and dy == 0:
105 | self.point(x0, y0, char)
106 | elif abs(dx) >= abs(dy):
107 | for x in range(x0, x1 + 1):
108 | if dx == 0:
109 | y = y0
110 | else:
111 | y = y0 + int(round((x - x0) * dy / float(dx)))
112 | self.point(x, y, char)
113 | elif y0 < y1:
114 | for y in range(y0, y1 + 1):
115 | if dy == 0:
116 | x = x0
117 | else:
118 | x = x0 + int(round((y - y0) * dx / float(dy)))
119 | self.point(x, y, char)
120 | else:
121 | for y in range(y1, y0 + 1):
122 | if dy == 0:
123 | x = x0
124 | else:
125 | x = x1 + int(round((y - y1) * dx / float(dy)))
126 | self.point(x, y, char)
127 |
128 | def text(self, x, y, text):
129 | """Print a text on ASCII canvas.
130 | Args:
131 | x (int): x coordinate where the text should start.
132 | y (int): y coordinate where the text should start.
133 | text (str): string that should be printed.
134 | """
135 | for i, line in enumerate(text.splitlines()):
136 | for j, char in enumerate(line):
137 | self.point(x + j, y + i, char)
138 |
139 | def box(self, x0, y0, width, height):
140 | """Create a box on ASCII canvas.
141 | Args:
142 | x0 (int): x coordinate of the box corner.
143 | y0 (int): y coordinate of the box corner.
144 | width (int): box width.
145 | height (int): box height.
146 | """
147 | return
148 | assert width > 1
149 | assert height > 1
150 |
151 | width -= 1
152 | height -= 1
153 |
154 | for x in range(x0, x0 + width):
155 | self.point(x, y0, "-")
156 | self.point(x, y0 + height, "-")
157 |
158 | for y in range(y0, y0 + height):
159 | self.point(x0, y, "|")
160 | self.point(x0 + width, y, "|")
161 |
162 | self.point(x0, y0, "+")
163 | self.point(x0 + width, y0, "+")
164 | self.point(x0, y0 + height, "+")
165 | self.point(x0 + width, y0 + height, "+")
166 |
167 |
168 | def _build_sugiyama_layout(vertices, edges):
169 | #
170 | # Just a reminder about naming conventions:
171 | # +------------X
172 | # |
173 | # |
174 | # |
175 | # |
176 | # Y
177 | #
178 |
179 | vertices = {i: Vertex(v) for i, v in enumerate(vertices)}
180 | # NOTE: reverting edges to correctly orientate the graph
181 | edges = [Edge(vertices[e], vertices[s]) for s, e in edges]
182 | vertices = vertices.values()
183 | graph = Graph(vertices, edges)
184 |
185 | for vertex in vertices:
186 | vertex.view = VertexViewer(vertex.data)
187 |
188 | # NOTE: determine min box length to create the best layout
189 | minw = min(v.view.w for v in vertices)
190 | minh = min(v.view.h for v in vertices)
191 |
192 | for edge in edges:
193 | edge.view = EdgeViewer()
194 |
195 | sugs = []
196 | for g in graph.C:
197 | sug = SugiyamaLayout(g)
198 | graph = g
199 | roots = list(filter(lambda x: len(x.e_in()) == 0, graph.sV))
200 |
201 | sug.init_all(roots=roots, optimize=True)
202 |
203 | sug.yspace = minh
204 | sug.xspace = minw
205 | sug.route_edge = route_with_lines
206 |
207 | sug.draw()
208 | sugs.append(sug)
209 | return sugs
210 |
211 |
212 | def draw(vertices, edges):
213 | """Build a DAG and draw it in ASCII.
214 | Args:
215 | vertices (list): list of graph vertices.
216 | edges (list): list of graph edges.
217 | """
218 | # pylint: disable=too-many-locals
219 | # NOTE: coordinates might me negative, so we need to shift
220 | # everything to the positive plane before we actually draw it.
221 | Xs = [] # pylint: disable=invalid-name
222 | Ys = [] # pylint: disable=invalid-name
223 |
224 | if edges and isinstance(edges[0][0], str):
225 | iedges = []
226 | for e in edges:
227 | iedges.append([vertices.index(e[0]), vertices.index(e[1])])
228 | edges = iedges
229 |
230 | sugs = _build_sugiyama_layout(vertices, edges)
231 | canvass = []
232 | for sug in sugs:
233 | for vertex in sug.g.sV:
234 | # NOTE: moving boxes w/2 to the left
235 | Xs.append(vertex.view.xy[0] - vertex.view.w / 2.0)
236 | Xs.append(vertex.view.xy[0] + vertex.view.w / 2.0)
237 | Ys.append(vertex.view.xy[1] - vertex.view.h)
238 | Ys.append(vertex.view.xy[1] + vertex.view.h + 1)
239 |
240 | for edge in sug.g.sE:
241 | for x, y in edge.view._pts: # pylint: disable=protected-access
242 | Xs.append(x)
243 | Ys.append(y)
244 |
245 | minx = min(Xs)
246 | miny = min(Ys)
247 | maxx = max(Xs)
248 | maxy = max(Ys)
249 |
250 | canvas_cols = int(math.ceil(math.ceil(maxx) - math.floor(minx))) + 1
251 | canvas_lines = int(round(maxy - miny))
252 |
253 | canvas = AsciiCanvas(canvas_cols, canvas_lines)
254 |
255 | # NOTE: first draw edges so that node boxes could overwrite them
256 | for edge in sug.g.sE:
257 | # pylint: disable=protected-access
258 | assert len(edge.view._pts) > 1
259 | for index in range(1, len(edge.view._pts)):
260 | start = edge.view._pts[index - 1]
261 | end = edge.view._pts[index]
262 |
263 | start_x = int(round(start[0] - minx))
264 | start_y = int(round(start[1] - miny))
265 | end_x = int(round(end[0] - minx))
266 | end_y = int(round(end[1] - miny))
267 |
268 | assert start_x >= 0
269 | assert start_y >= 0
270 | assert end_x >= 0
271 | assert end_y >= 0
272 |
273 | canvas.line(start_x, start_y, end_x, end_y, "*")
274 |
275 | for vertex in sug.g.sV:
276 | # NOTE: moving boxes w/2 to the left
277 | x = vertex.view.xy[0] - vertex.view.w / 2.0
278 | y = vertex.view.xy[1]
279 |
280 | canvas.box(
281 | int(round(x - minx)),
282 | int(round(y - miny)),
283 | vertex.view.w,
284 | vertex.view.h,
285 | )
286 |
287 | canvas.text(int(round(x - minx)) + 1, int(round(y - miny)) + 1, vertex.data)
288 |
289 | canvass.append(canvas.draw())
290 |
291 | return canvass
292 |
--------------------------------------------------------------------------------
/boa/helpers/ast_extract_syms.py:
--------------------------------------------------------------------------------
1 | import ast
2 |
3 |
4 | class NameCollector(ast.NodeTransformer):
5 | def __init__(self):
6 | self.collected_names = []
7 |
8 | def visit_Name(self, node):
9 | self.collected_names.append(node.id)
10 |
11 |
12 | def ast_extract_syms(expr):
13 | nodes = ast.parse(expr)
14 | transformer = NameCollector()
15 | transformer.visit(nodes)
16 | return transformer.collected_names
17 |
18 |
19 | if __name__ == "__main__":
20 | print(ast_extract_syms("vc <14"))
21 | print(ast_extract_syms("python > (3,6)"))
22 | print(ast_extract_syms("somevar == (3,6)"))
23 | print(ast_extract_syms("target_platform == 'linux'"))
24 |
--------------------------------------------------------------------------------
/boa/schemas/generate_schemas.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | python model.py > recipe.v1.json
4 | python info/about.py > info/info-about.schema.json
5 | python info/index.py > info/info-index.schema.json
6 | python info/paths.py > info/info-paths.schema.json
7 |
--------------------------------------------------------------------------------
/boa/schemas/info/about.py:
--------------------------------------------------------------------------------
1 | # generated by datamodel-codegen:
2 | # filename: about.json
3 | # timestamp: 2021-11-25T18:26:02+00:00
4 |
5 | from __future__ import annotations
6 |
7 | from typing import List
8 |
9 | from pydantic import BaseModel, Field
10 |
11 |
12 | class Model(BaseModel):
13 | channels: List[str]
14 | conda_build_version: str
15 | conda_private: bool
16 | conda_version: str
17 | description: str
18 | dev_url: str
19 | doc_url: str
20 | env_vars: dict
21 | extra: dict
22 | home: str
23 | identifiers: List[str]
24 | keywords: List[str]
25 | license_: str = Field(alias="license")
26 | license_family: str
27 | license_file: str
28 | root_pkgs: List[str]
29 | summary: str
30 | tags: List[str]
31 |
32 |
33 | if __name__ == "__main__":
34 | print(Model.schema_json(indent=2))
35 |
--------------------------------------------------------------------------------
/boa/schemas/info/index.py:
--------------------------------------------------------------------------------
1 | # generated by datamodel-codegen:
2 | # filename: index.json
3 | # timestamp: 2021-11-25T18:29:15+00:00
4 |
5 | from __future__ import annotations
6 |
7 | from typing import List
8 |
9 | from pydantic import BaseModel, Field
10 |
11 |
12 | class Model(BaseModel):
13 | arch: str
14 | build: str
15 | build_number: int
16 | constrains: List[str]
17 | depends: List[str]
18 | license_: str = Field(alias="license")
19 | license_family: str
20 | name: str
21 | platform: str
22 | subdir: str
23 | timestamp: int
24 | version: str
25 |
26 |
27 | if __name__ == "__main__":
28 | print(Model.schema_json(indent=2))
29 |
--------------------------------------------------------------------------------
/boa/schemas/info/info-about.schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Model",
3 | "type": "object",
4 | "properties": {
5 | "channels": {
6 | "title": "Channels",
7 | "type": "array",
8 | "items": {
9 | "type": "string"
10 | }
11 | },
12 | "conda_build_version": {
13 | "title": "Conda Build Version",
14 | "type": "string"
15 | },
16 | "conda_private": {
17 | "title": "Conda Private",
18 | "type": "boolean"
19 | },
20 | "conda_version": {
21 | "title": "Conda Version",
22 | "type": "string"
23 | },
24 | "description": {
25 | "title": "Description",
26 | "type": "string"
27 | },
28 | "dev_url": {
29 | "title": "Dev Url",
30 | "type": "string"
31 | },
32 | "doc_url": {
33 | "title": "Doc Url",
34 | "type": "string"
35 | },
36 | "env_vars": {
37 | "title": "Env Vars",
38 | "type": "object"
39 | },
40 | "extra": {
41 | "title": "Extra",
42 | "type": "object"
43 | },
44 | "home": {
45 | "title": "Home",
46 | "type": "string"
47 | },
48 | "identifiers": {
49 | "title": "Identifiers",
50 | "type": "array",
51 | "items": {
52 | "type": "string"
53 | }
54 | },
55 | "keywords": {
56 | "title": "Keywords",
57 | "type": "array",
58 | "items": {
59 | "type": "string"
60 | }
61 | },
62 | "license": {
63 | "title": "License",
64 | "type": "string"
65 | },
66 | "license_family": {
67 | "title": "License Family",
68 | "type": "string"
69 | },
70 | "license_file": {
71 | "title": "License File",
72 | "type": "string"
73 | },
74 | "root_pkgs": {
75 | "title": "Root Pkgs",
76 | "type": "array",
77 | "items": {
78 | "type": "string"
79 | }
80 | },
81 | "summary": {
82 | "title": "Summary",
83 | "type": "string"
84 | },
85 | "tags": {
86 | "title": "Tags",
87 | "type": "array",
88 | "items": {
89 | "type": "string"
90 | }
91 | }
92 | },
93 | "required": [
94 | "channels",
95 | "conda_build_version",
96 | "conda_private",
97 | "conda_version",
98 | "description",
99 | "dev_url",
100 | "doc_url",
101 | "env_vars",
102 | "extra",
103 | "home",
104 | "identifiers",
105 | "keywords",
106 | "license",
107 | "license_family",
108 | "license_file",
109 | "root_pkgs",
110 | "summary",
111 | "tags"
112 | ]
113 | }
114 |
--------------------------------------------------------------------------------
/boa/schemas/info/info-index.schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Model",
3 | "type": "object",
4 | "properties": {
5 | "arch": {
6 | "title": "Arch",
7 | "type": "string"
8 | },
9 | "build": {
10 | "title": "Build",
11 | "type": "string"
12 | },
13 | "build_number": {
14 | "title": "Build Number",
15 | "type": "integer"
16 | },
17 | "constrains": {
18 | "title": "Constrains",
19 | "type": "array",
20 | "items": {
21 | "type": "string"
22 | }
23 | },
24 | "depends": {
25 | "title": "Depends",
26 | "type": "array",
27 | "items": {
28 | "type": "string"
29 | }
30 | },
31 | "license": {
32 | "title": "License",
33 | "type": "string"
34 | },
35 | "license_family": {
36 | "title": "License Family",
37 | "type": "string"
38 | },
39 | "name": {
40 | "title": "Name",
41 | "type": "string"
42 | },
43 | "platform": {
44 | "title": "Platform",
45 | "type": "string"
46 | },
47 | "subdir": {
48 | "title": "Subdir",
49 | "type": "string"
50 | },
51 | "timestamp": {
52 | "title": "Timestamp",
53 | "type": "integer"
54 | },
55 | "version": {
56 | "title": "Version",
57 | "type": "string"
58 | }
59 | },
60 | "required": [
61 | "arch",
62 | "build",
63 | "build_number",
64 | "constrains",
65 | "depends",
66 | "license",
67 | "license_family",
68 | "name",
69 | "platform",
70 | "subdir",
71 | "timestamp",
72 | "version"
73 | ]
74 | }
75 |
--------------------------------------------------------------------------------
/boa/schemas/info/info-paths.schema.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Model",
3 | "type": "object",
4 | "properties": {
5 | "paths": {
6 | "title": "Paths",
7 | "type": "array",
8 | "items": {
9 | "$ref": "#/definitions/Path"
10 | }
11 | },
12 | "paths_version": {
13 | "title": "Paths Version",
14 | "type": "integer"
15 | }
16 | },
17 | "required": [
18 | "paths",
19 | "paths_version"
20 | ],
21 | "definitions": {
22 | "Path": {
23 | "title": "Path",
24 | "type": "object",
25 | "properties": {
26 | "path_type": {
27 | "title": "Path Type",
28 | "type": "string"
29 | },
30 | "sha256": {
31 | "title": "Sha256",
32 | "type": "string"
33 | },
34 | "size_in_bytes": {
35 | "title": "Size In Bytes",
36 | "type": "integer"
37 | },
38 | "file_mode": {
39 | "title": "File Mode",
40 | "type": "string"
41 | },
42 | "prefix_placeholder": {
43 | "title": "Prefix Placeholder",
44 | "type": "string"
45 | }
46 | },
47 | "required": [
48 | "path_type",
49 | "sha256",
50 | "size_in_bytes"
51 | ]
52 | }
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/boa/schemas/info/paths.py:
--------------------------------------------------------------------------------
1 | # generated by datamodel-codegen:
2 | # filename: paths.json
3 | # timestamp: 2021-11-25T18:32:52+00:00
4 |
5 | from __future__ import annotations
6 |
7 | from typing import List, Optional
8 |
9 | from pydantic import BaseModel
10 |
11 |
12 | class Path(BaseModel):
13 | _path: str
14 | path_type: str
15 | sha256: str
16 | size_in_bytes: int
17 | file_mode: Optional[str] = None
18 | prefix_placeholder: Optional[str] = None
19 |
20 |
21 | class Model(BaseModel):
22 | paths: List[Path]
23 | paths_version: int
24 |
25 |
26 | if __name__ == "__main__":
27 | print(Model.schema_json(indent=2))
28 |
--------------------------------------------------------------------------------
/boa/schemas/model.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import Any, Dict, List, Optional, Union
4 | from enum import Enum
5 | from pydantic import BaseModel, Extra, Field, conint, constr
6 |
7 |
8 | ###################
9 | # Package section #
10 | ###################
11 |
12 |
13 | class Package(BaseModel):
14 | class Config:
15 | extra = Extra.forbid
16 |
17 | name: str = Field(description="The package name")
18 | version: str = Field(description="The package version")
19 |
20 |
21 | ###################
22 | # Source section #
23 | ###################
24 |
25 |
26 | sha256str = constr(min_length=64, max_length=64, regex=r"^[0-9a-fA-F]{64}$")
27 | md5str = constr(min_length=32, max_length=32, regex=r"^[0-9a-fA-F]{32}$")
28 | sha1 = constr(min_length=40, max_length=40, regex=r"^[0-9a-fA-F]{40}$")
29 |
30 | # We require some paths to contain no backslashes, even on Windows
31 | path_no_backslash = constr(regex=r"^[^\\]+$")
32 | ListNoBackslash = List[path_no_backslash]
33 |
34 |
35 | class BaseSourceItem(BaseModel):
36 | class Config:
37 | extra = Extra.forbid
38 |
39 | patches: Optional[List[str]] = None
40 | folder: Optional[str] = None
41 |
42 |
43 | class UrlSource(BaseSourceItem):
44 | url: str = None
45 |
46 | sha256: Optional[sha256str] = None
47 | md5: Optional[md5str] = None
48 | sha1: Optional[sha1] = None
49 | fn: Optional[str] = None
50 |
51 |
52 | class GitSource(BaseSourceItem):
53 | git_rev: str = "HEAD"
54 | git_url: str
55 | git_depth: int = -1
56 |
57 |
58 | class HgSource(BaseSourceItem):
59 | hg_url: str
60 | hg_tag: str = "tip"
61 |
62 |
63 | class SvnSource(BaseSourceItem):
64 | svn_url: str
65 | svn_rev: str = "head"
66 | svn_ignore_externals: bool = False
67 |
68 |
69 | class LocalSource(BaseSourceItem):
70 | path: str
71 |
72 |
73 | SourceItem = Union[UrlSource, GitSource, HgSource, SvnSource, LocalSource]
74 |
75 |
76 | ###################
77 | # Build section #
78 | ###################
79 |
80 |
81 | class NoarchType(Enum):
82 | generic = "generic"
83 | python = "python"
84 |
85 |
86 | class RunExports(BaseModel):
87 | class Config:
88 | extra = Extra.forbid
89 |
90 | weak: Optional[List[str]] = Field(
91 | None, description="Weak run exports apply from the host env to the run env"
92 | )
93 | strong: Optional[List[str]] = Field(
94 | None,
95 | description="Strong run exports apply from the build and host env to the run env",
96 | )
97 | noarch: Optional[List[str]] = Field(
98 | None,
99 | description="Noarch run exports are the only ones looked at when building noarch packages",
100 | )
101 | weak_constrains: Optional[List[str]] = Field(
102 | None, description="Weak run constrains add run_constrains from the host env"
103 | )
104 | strong_constrains: Optional[List[str]] = Field(
105 | None,
106 | description="Strong run constrains add run_constrains from the build and host env",
107 | )
108 |
109 |
110 | class Build(BaseModel):
111 | class Config:
112 | extra = Extra.forbid
113 |
114 | number: Optional[conint(ge=0)] = Field(
115 | 0,
116 | description="Build number to version current build in addition to package version",
117 | )
118 | string: Optional[str] = Field(
119 | None,
120 | description="Build string to identify build variant (if not explicitly set, computed automatically from used build variant)",
121 | )
122 | skip: Optional[List[str]] = Field(
123 | None,
124 | description="List of conditions under which to skip the build of the package.",
125 | )
126 | script: Optional[Union[str, List[str]]] = Field(
127 | None,
128 | description="Build script to be used. If not given, tries to find 'build.sh' on Unix or 'bld.bat' on Windows inside the recipe folder.",
129 | )
130 |
131 | noarch: Optional[NoarchType] = Field(
132 | None,
133 | description="Can be either 'generic' or 'python'. A noarch 'python' package compiles .pyc files upon installation.",
134 | )
135 | # Note: entry points only valid if noarch: python is used! Write custom validator?
136 | entry_points: Optional[List[str]] = None
137 | # Deprecated
138 | # noarch_python: bool = False
139 |
140 | run_exports: Optional[Union[RunExports, List[str]]] = None
141 | ignore_run_exports: Optional[List[str]] = None
142 | ignore_run_exports_from: Optional[List[str]] = None
143 |
144 | # deprecated, but still used to downweigh packages
145 | track_features: Optional[List[str]] = None
146 |
147 | # Features are completely deprecated
148 | # features: List[str]
149 | # requires_features: Dict[str, str]
150 | # provides_features: Dict[str, str],
151 |
152 | include_recipe: bool = Field(True, description="Include recipe in final package.")
153 |
154 | pre_link: Optional[str] = Field(
155 | None,
156 | alias="pre-link",
157 | description="Script to execute when installing - before linking. Highly discouraged!",
158 | )
159 | post_link: Optional[str] = Field(
160 | None,
161 | alias="post-link",
162 | description="Script to execute when installing - after linking.",
163 | )
164 | pre_unlink: Optional[str] = Field(
165 | None,
166 | alias="pre-unlink",
167 | description="Script to execute when removing - before unlinking.",
168 | )
169 |
170 | osx_is_app: bool = False
171 | disable_pip: bool = False
172 | preserve_egg_dir: bool = False
173 |
174 | no_link: Optional[ListNoBackslash] = None
175 | binary_relocation: Union[bool, ListNoBackslash] = True
176 |
177 | has_prefix_files: ListNoBackslash = []
178 | binary_has_prefix_files: Optional[ListNoBackslash] = None
179 | ignore_prefix_files: Union[bool, ListNoBackslash] = False
180 |
181 | # the following is defaulting to True on UNIX and False on Windows
182 | detect_binary_files_with_prefix: Optional[bool] = None
183 |
184 | skip_compile_pyc: Optional[List[str]] = None
185 |
186 | rpaths: Optional[List[str]] = None
187 | rpaths_patcher: Optional[str] = None
188 |
189 | # Note: this deviates from conda-build `script_env`!
190 | script_env: Optional[Dict[str, str]] = None
191 |
192 | # Files to be included even if they are present in the PREFIX before building
193 | always_include_files: Optional[List[str]] = None
194 |
195 | # msvc_compiler: Optional[str] = None -- deprecated in conda_build
196 | # pin_depends: Optional[str] -- did not find usage anywhere, removed
197 | # preferred_env: Optional[str]
198 | # preferred_env_executable_paths': Optional[List]
199 |
200 | # note didnt find _any_ usage of force_use_keys in conda-forge
201 | force_use_keys: Optional[List[str]] = None
202 | force_ignore_keys: Optional[List[str]] = None
203 |
204 | merge_build_host: bool = False
205 |
206 | missing_dso_whitelist: Optional[List[str]] = None
207 | error_overdepending: bool = Field(False, description="Error on overdepending")
208 | error_overlinking: bool = Field(False, description="Error on overlinking")
209 |
210 |
211 | ###################
212 | # About section #
213 | ###################
214 |
215 |
216 | class About(BaseModel):
217 | # URLs
218 | home: Optional[str] = None
219 | dev_url: Optional[str] = None
220 | doc_url: Optional[str] = None
221 | doc_source_url: Optional[str] = None
222 | license_url: Optional[str] = None
223 |
224 | # Text
225 | license_: Optional[str] = Field(None, alias="license")
226 | summary: Optional[str] = None
227 | description: Optional[str] = None
228 | license_family: Optional[str] = None
229 |
230 | # Lists
231 | identifiers: Optional[List[str]] = None
232 | tags: Optional[List[str]] = None
233 | keywords: Optional[List[str]] = None
234 |
235 | # Paths in source tree
236 | license_file: Optional[List[str]] = None
237 | prelink_message: Optional[str] = None
238 | readme: Optional[str] = None
239 |
240 |
241 | #########################
242 | # Requirements Section #
243 | #########################
244 |
245 |
246 | class Requirements(BaseModel):
247 | build: Optional[List[str]] = None
248 | host: Optional[List[str]] = None
249 | run: Optional[List[str]] = None
250 | run_constrained: Optional[List[str]] = None
251 |
252 |
253 | class Test(BaseModel):
254 | files: Optional[List[str]] = Field(
255 | None,
256 | description="Test files that are copied from the recipe into the temporary test directory and are needed during testing.",
257 | )
258 | source_files: Optional[List[str]] = Field(
259 | None,
260 | description="Test files that are copied from the source work directory into the temporary test directory and are needed during testing.",
261 | )
262 | requires: Optional[List[str]] = Field(
263 | None,
264 | description="In addition to the runtime requirements, you can specify requirements needed during testing.",
265 | )
266 | imports: Optional[List[str]] = Field(None, description="Test importing modules.")
267 | commands: Optional[List[str]] = Field(
268 | None, description="The test commands to execute."
269 | )
270 |
271 |
272 | class Output(BaseModel):
273 | package: Package = Field(..., description="The package name and version")
274 | build: Optional[Build] = None
275 | requirements: Optional[Requirements] = None
276 | test: Optional[Test] = None
277 |
278 |
279 | class BoaRecipeV1(BaseModel):
280 | class Config:
281 | extra = Extra.forbid
282 |
283 | context: Optional[Dict[str, Any]] = Field(None, description="The recipe context.")
284 | package: Optional[Package] = Field(
285 | None, description="The package name and version."
286 | )
287 | source: Optional[List[SourceItem]] = Field(
288 | None, description="The source items to be downloaded and used for the build."
289 | )
290 | build: Optional[Build] = None
291 | features: Optional[List] = None
292 | steps: Optional[List[Output]] = None
293 | about: Optional[About] = None
294 | extra: Optional[Dict[str, Any]] = None
295 |
296 |
297 | if __name__ == "__main__":
298 | print(BoaRecipeV1.schema_json(indent=2))
299 |
--------------------------------------------------------------------------------
/boa/tui/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/boa/tui/__init__.py
--------------------------------------------------------------------------------
/boa/tui/exceptions.py:
--------------------------------------------------------------------------------
1 | class BoaExitException(Exception):
2 | pass
3 |
4 |
5 | class BoaRunBuildException(Exception):
6 | pass
7 |
--------------------------------------------------------------------------------
/boa/tui/patching.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | import subprocess
5 | import tempfile
6 |
7 | from boa.core.build import download_source
8 |
9 | from rich.console import Console
10 |
11 | console = Console()
12 |
13 | patch_reference_dir = None
14 |
15 |
16 | def create_patch(dir_a, dir_b):
17 | # create a patch file from dir_a to dir_b
18 | # ignoring conda files...
19 | git = False
20 | if git:
21 | exclude = [":(exclude)conda_build.sh", ":(exclude)build_env_setup.sh"]
22 | cmd = ["git", "diff", dir_a, dir_b] + exclude
23 | else:
24 | exclude = [".git", "conda_build.sh", "build_env_setup.sh"]
25 | exclude_args = [
26 | item for pair in zip(len(exclude) * ["-x"], exclude) for item in pair
27 | ]
28 | cmd = ["diff", "-uraN", dir_a, dir_b] + exclude_args
29 |
30 | console.print(f"[yellow]Calling: [/yellow] {' '.join(cmd)}")
31 | try:
32 | subprocess.check_output(cmd)
33 | except subprocess.CalledProcessError as exc:
34 | if exc.returncode == 1:
35 | # ah, actually all is well!
36 | output = exc.output.decode("utf-8", errors="ignore")
37 | output = output.replace(dir_a, "old")
38 | output = output.replace(dir_b, "new")
39 | return output
40 | if exc.returncode == 2:
41 | # ouch, 2 means trouble!
42 | raise exc
43 | else:
44 | return None
45 |
46 |
47 | def create_reference_dir(meta):
48 | if hasattr(meta, "boa_patch_reference_dir"):
49 | return meta.boa_patch_reference_dir
50 | temp_dir = tempfile.mkdtemp()
51 | bkup = meta.config.croot
52 | meta.config.croot = temp_dir
53 | patch_reference_dir = meta.config.build_folder
54 | bkup_verbose = meta.config.verbose = False
55 | console.print("Preparing reference dir... this might take a while")
56 | download_source(meta)
57 | meta.config.verbose = bkup_verbose
58 | meta.config.croot = bkup
59 | console.print(f"Reference dir: {patch_reference_dir}\n")
60 | meta.boa_patch_reference_dir = patch_reference_dir
61 | return patch_reference_dir
62 |
63 |
64 | if __name__ == "__main__":
65 | create_patch("/Users/wolfv/Programs/boa", "/Users/wolfv/Programs/boa2")
66 |
--------------------------------------------------------------------------------
/boa/tui/tui.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2021, QuantStack
2 | # SPDX-License-Identifier: BSD-3-Clause
3 |
4 | from prompt_toolkit import PromptSession
5 | from prompt_toolkit.formatted_text import HTML
6 | from prompt_toolkit.patch_stdout import patch_stdout
7 | from prompt_toolkit.history import FileHistory
8 | from prompt_toolkit.completion import NestedCompleter, PathCompleter
9 |
10 | from ruamel.yaml import YAML
11 |
12 | from boa.tui import patching
13 |
14 | from .exceptions import BoaExitException, BoaRunBuildException
15 |
16 | try:
17 | from watchgod import awatch
18 |
19 | watchgod_available = True
20 | except ImportError:
21 | watchgod_available = False
22 |
23 | import asyncio
24 | import subprocess
25 | import os
26 | import shutil
27 | import platform
28 | from pathlib import Path
29 | from glob import glob
30 |
31 | from rich.console import Console
32 | from rich.syntax import Syntax
33 | from rich.rule import Rule
34 |
35 | yaml = YAML(typ="rt")
36 | yaml.preserve_quotes = True
37 | yaml.default_flow_style = False
38 | yaml.indent(sequence=4, offset=2)
39 | yaml.width = 1000
40 | # yaml.Representer = ruamel.yaml.representer.RoundTripRepresenter
41 | # yaml.Loader = ruamel.yaml.RoundTripLoader
42 |
43 | console = Console()
44 |
45 | help_text = """
46 | Enter a command:
47 | glob
48 | edit
49 | show
50 | build
51 | """
52 |
53 | build_context = None
54 |
55 |
56 | def print_help():
57 | print(help_text)
58 |
59 |
60 | def _get_prefix(env):
61 | if env == "host":
62 | return build_context.config.host_prefix
63 | if env == "build":
64 | return build_context.config.build_prefix
65 | if env == "work":
66 | return build_context.config.work_dir
67 |
68 |
69 | def remove_prefix(strings):
70 | def replace_all(strings, x, r):
71 | for s in strings:
72 | res = []
73 | for s in strings:
74 | tmp = s.replace(x, r)
75 | tmp = tmp.replace("//", "/")
76 | res.append(tmp)
77 | return res
78 |
79 | res = replace_all(strings, build_context.config.build_prefix, "$BUILD_PREFIX/")
80 | res = replace_all(res, build_context.config.host_prefix, "$PREFIX/")
81 | res = replace_all(res, build_context.config.work_dir, "$WORK_DIR/")
82 | return res
83 |
84 |
85 | def glob_search(env, search_text):
86 | p = _get_prefix(env)
87 | search_result = glob(os.path.join(p, search_text))
88 | if search_result:
89 | console.print(remove_prefix(search_result))
90 | else:
91 | console.print(f"[red]No results found for glob {search_text}[/red]")
92 |
93 |
94 | def bottom_toolbar():
95 | return HTML('Interactive mode is !')
96 |
97 |
98 | fh = FileHistory(".boa_tui_history")
99 | session = PromptSession(fh)
100 |
101 |
102 | def get_completer():
103 | def get_paths():
104 | return [build_context.config.work_dir]
105 |
106 | return NestedCompleter.from_nested_dict(
107 | {
108 | "help": None,
109 | "glob": {"build": None, "host": None},
110 | "exit": None,
111 | "ls": PathCompleter(get_paths=get_paths),
112 | "edit": {
113 | "file": PathCompleter(get_paths=get_paths),
114 | "script": None,
115 | "recipe": None,
116 | },
117 | "build": None,
118 | "patch": {"show": None, "save": None},
119 | }
120 | )
121 |
122 |
123 | def generate_patch(args):
124 | if len(args):
125 | cmd = args[0]
126 | else:
127 | cmd = "show"
128 |
129 | ref_dir = patching.create_reference_dir(build_context)
130 | patch_contents = patching.create_patch(
131 | os.path.join(ref_dir, "work"), build_context.config.work_dir
132 | )
133 | if patch_contents is None:
134 | console.print("[red]No difference found![/red]")
135 | else:
136 | console.print("\n")
137 | console.print(Rule("Diff Contents", end="\n\n"))
138 | console.print(Syntax(patch_contents, "diff"))
139 | console.print(Rule("", end="\n"))
140 |
141 | if cmd == "save" and patch_contents:
142 | if len(args) >= 2:
143 | fn = args[1]
144 | if not fn.endswith(".patch"):
145 | fn += ".patch"
146 | out_fn = Path(build_context.meta_path).parent / fn
147 | with open(out_fn, "w") as fo:
148 | fo.write(patch_contents)
149 | console.print(f"[green]Patch saved under: {out_fn}")
150 |
151 | data = yaml.load(open(build_context.meta_path))
152 | if "patches" in data["source"][0]:
153 | data["source"][0]["patches"].append(fn)
154 | else:
155 | data["source"][0]["patches"] = [fn]
156 | fp = open(build_context.meta_path, "w")
157 | yaml.dump(data, fp)
158 | else:
159 | console.print("[red]Please give a patch name as third argument")
160 |
161 |
162 | cache_editor = None
163 |
164 |
165 | def get_editor():
166 | global cache_editor
167 |
168 | if os.environ.get("EDITOR"):
169 | return os.environ["EDITOR"]
170 | elif cache_editor:
171 | return cache_editor
172 | else:
173 | for e in ["subl", "code", "vim", "emacs", "nano"]:
174 | cmd = shutil.which(e)
175 | if cmd:
176 | cache_editor = cmd
177 | break
178 | return cache_editor
179 |
180 |
181 | def execute_tokens(token):
182 | if token[0] == "help":
183 | print_help()
184 | elif token[0] == "patch":
185 | generate_patch(token[1:])
186 | elif token[0] == "glob":
187 | glob_search(*token[1:])
188 | elif token[0] == "edit":
189 | if token[1] == "recipe":
190 | subprocess.call([get_editor(), build_context.meta_path])
191 | if token[1] == "script":
192 | subprocess.call(
193 | [get_editor(), os.path.join(build_context.path, "build.sh")]
194 | )
195 | elif token[1] == "file":
196 | if len(token) == 3:
197 | file = os.path.join(build_context.config.work_dir, token[2])
198 | else:
199 | file = build_context.config.work_dir
200 | subprocess.call([get_editor(), file])
201 |
202 | elif token[0] == "ls":
203 | # TODO add autocomplete
204 | color_arg = ""
205 |
206 | if platform.system() == "Darwin":
207 | color_arg = "-G"
208 | elif platform.system() == "Linux":
209 | color_arg = "--color=always"
210 |
211 | out = subprocess.check_output(
212 | [
213 | "ls",
214 | "-l",
215 | "-a",
216 | color_arg,
217 | os.path.join(build_context.config.work_dir, *token[1:]),
218 | ]
219 | )
220 | print(out.decode("utf-8", errors="ignore"))
221 | elif token[0] == "build":
222 | console.print("[yellow]Running build![/yellow]")
223 | raise BoaRunBuildException()
224 | elif token[0] == "exit":
225 | print("Exiting.")
226 | raise BoaExitException()
227 | else:
228 | console.print(f'[red]Could not understand command "{token[0]}"[/red]')
229 |
230 |
231 | async def input_coroutine():
232 | completer = get_completer()
233 | while True:
234 | with patch_stdout(raw=True):
235 | text = await session.prompt_async(
236 | "> ", bottom_toolbar=bottom_toolbar, completer=completer
237 | )
238 | token = text.split()
239 |
240 | if len(token) == 0:
241 | continue
242 |
243 | try:
244 | execute_tokens(token)
245 | except KeyboardInterrupt:
246 | pass
247 | except BoaExitException as e:
248 | raise e
249 | except BoaRunBuildException as e:
250 | raise e
251 | except Exception as e:
252 | console.print(e)
253 |
254 |
255 | async def watch_files_coroutine():
256 | if not watchgod_available:
257 | await asyncio.Future()
258 |
259 | async for changes in awatch(Path(build_context.meta_path).parent):
260 | console.print(
261 | "\n[green]recipe.yaml changed: rebuild by entering [/green][white]> [italic]build[/italic][/white]\n"
262 | )
263 |
264 |
265 | async def prompt_coroutine():
266 | result = "exit"
267 | exit_tui = False
268 |
269 | watch_files_task = asyncio.create_task(watch_files_coroutine())
270 | while not exit_tui:
271 | try:
272 | await input_coroutine()
273 | except EOFError:
274 | text = await session.prompt_async(
275 | "Do you really want to exit ([y]/n)? ", bottom_toolbar=bottom_toolbar
276 | )
277 | if text == "y" or text == "":
278 | exit_tui = True
279 | except BoaExitException:
280 | exit_tui = True
281 | except BoaRunBuildException:
282 | exit_tui = True
283 | result = "run_build"
284 | except KeyboardInterrupt:
285 | print("CTRL+C pressed. Use CTRL-D to exit.")
286 |
287 | watch_files_task.cancel()
288 | console.print("[yellow]Goodbye![/yellow]")
289 |
290 | return result
291 |
292 |
293 | async def enter_tui(context):
294 | global build_context
295 | build_context = context
296 |
297 | return await prompt_coroutine()
298 |
299 |
300 | async def main():
301 | class Bunch(object):
302 | def __init__(self, adict):
303 | self.__dict__.update(adict)
304 |
305 | meta = Bunch(
306 | {
307 | "meta_path": "/home/wolfv/Programs/recipes/micromamba-feedstock/recipe/recipe.yaml",
308 | "config": Bunch(
309 | {
310 | "work_dir": "/home/wolfv/miniconda3/conda-bld/micromamba_1603476359590/work/",
311 | "host_prefix": "/home/wolfv/miniconda3/conda-bld/micromamba_1603476359590/_h_env_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_placehold_/",
312 | "build_prefix": "/home/wolfv/miniconda3/conda-bld/micromamba_1603476359590/_build_env/",
313 | "recipe_dir": "/home/wolfv/Programs/recipes/micromamba-feedstock/recipe/",
314 | }
315 | ),
316 | }
317 | )
318 | return await enter_tui(meta)
319 |
320 |
321 | if __name__ == "__main__":
322 | asyncio.run(main())
323 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/assets/boa_header.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/docs/assets/boa_header.png
--------------------------------------------------------------------------------
/docs/environment.yml:
--------------------------------------------------------------------------------
1 | name: docs
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - myst-parser
6 | - sphinx
7 | - sphinx-book-theme
8 | - sphinx-jsonschema
9 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/_static/style.css:
--------------------------------------------------------------------------------
1 | #boarecipev1 .table p
2 | {
3 | font-size: 0.75em;
4 | }
5 |
6 | #boarecipev1 .table td
7 | {
8 | padding: 0;
9 | }
10 |
11 | #boarecipev1 ul
12 | {
13 | padding-left: 3px;
14 | list-style: none;
15 | font-weight: bold;
16 | }
17 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = "boa"
21 | copyright = "2020, QuantStack & boa contributors" # noqa
22 | author = "QuantStack & boa contributors"
23 |
24 | _version_py = "../../boa/_version.py"
25 | version_ns = {}
26 | exec(compile(open(_version_py).read(), _version_py, "exec"), version_ns)
27 | # The short X.Y version.
28 | version = "%i.%i" % version_ns["version_info"][:2]
29 | # The full version, including alpha/beta/rc tags.
30 | release = version_ns["__version__"]
31 |
32 | # -- General configuration ---------------------------------------------------
33 |
34 | # Add any Sphinx extension module names here, as strings. They can be
35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
36 | # ones.
37 | extensions = ["myst_parser", "sphinx-jsonschema"]
38 |
39 | # Add any paths that contain templates here, relative to this directory.
40 | templates_path = ["_templates"]
41 |
42 | # List of patterns, relative to source directory, that match files and
43 | # directories to ignore when looking for source files.
44 | # This pattern also affects html_static_path and html_extra_path.
45 | exclude_patterns = []
46 |
47 |
48 | # -- Options for HTML output -------------------------------------------------
49 |
50 | # The theme to use for HTML and HTML Help pages. See the documentation for
51 | # a list of builtin themes.
52 | #
53 |
54 | html_theme = "sphinx_book_theme"
55 | html_logo = "_static/boa.svg"
56 | html_title = "documentation"
57 |
58 |
59 | # Add any paths that contain custom static files (such as style sheets) here,
60 | # relative to this directory. They are copied after the builtin static files,
61 | # so a file named "default.css" will overwrite the builtin "default.css".
62 | html_static_path = ["_static"]
63 | html_css_files = [
64 | "style.css",
65 | ]
66 |
--------------------------------------------------------------------------------
/docs/source/getting_started.md:
--------------------------------------------------------------------------------
1 | Getting started with boa
2 | ========================
3 |
4 | Installation
5 | ------------
6 |
7 | You can install boa from conda-forge:
8 |
9 | ```
10 | # using mamba
11 | mamba install boa -c conda-forge
12 |
13 | # using conda
14 | conda install boa -c conda-forge
15 | ```
16 |
17 | Basic Usage
18 | -----------
19 |
20 | Boa implements two main commands:
21 |
22 | - `boa build ...`
23 | - `conda mambabuild ...`
24 |
25 | ### mambabuild
26 |
27 | The `conda mambabuild` command is a "drop-in" replacement for the `conda build` command and uses the same recipes and configuration files as conda-build. The only difference is that it swaps out the package resolver to use `mamba` instead of conda, which makes environment resolution faster, and can help in the debugging of resolution problems as the error messages from mamba are usually easier to understand than the ones from conda.
28 |
29 | To use `conda mambabuild` just replace your existing command line usage of `conda build ...` with `conda mambabuild ...` (all the same arguments should function properly). For more information on the recipe spec and conda_build_config.yaml usage, please refer to the [conda-build documentation](https://docs.conda.io/projects/conda-build/en/latest/).
30 |
31 | ### boa build
32 |
33 | The boa package also implements a new recipe spec (described under `spec`). To build a package that follows this new convention, just use
34 |
35 | ```
36 | boa build mypackage
37 | ```
38 |
39 | The `boa build` command does not yet support all flags that can be used with conda-build and is still very much under active development.
40 |
--------------------------------------------------------------------------------
/docs/source/index.md:
--------------------------------------------------------------------------------
1 | Welcome to boa's documentation!
2 | ===============================
3 |
4 | Boa is a package build tool for `.conda` packages. As such, it is an alternative to `conda-build`, but uses the more recently developed `mamba` package installer as a "backend". Additionally, boa implements a new and improved recipe spec, and also implements a `conda mambabuild ...` command to build "legacy" recipes with the faster mamba backend. This can help when debugging recipes, as the output of the mamba solver is often more readable than the one from conda.
5 |
6 | ```{toctree}
7 | :maxdepth: 2
8 | :caption: "Contents:"
9 |
10 | getting_started
11 | mambabuild
12 | recipe_spec
13 | ```
14 |
15 |
16 | Indices and tables
17 | ==================
18 |
19 | * {ref}`Index `
20 | * {ref}`Search `
21 |
22 |
23 |
--------------------------------------------------------------------------------
/docs/source/jsonschema_spec.md:
--------------------------------------------------------------------------------
1 | # Spec generated from JSON schema
2 |
3 | This represents the spec that is used to validate recipes against.
4 |
5 |
6 | ```{jsonschema} ../../boa/schemas/recipe.v1.json
7 | ```
8 |
--------------------------------------------------------------------------------
/docs/source/mambabuild.md:
--------------------------------------------------------------------------------
1 | The conda mambabuild command
2 | ============================
3 |
4 | Boa comes with an extension to `conda build`: the `conda mambabuild` command. It replaces the `conda` solver with the faster `mamba` solver. To learn more about mamba, visit the [mamba documentation](http://mamba.readthedocs.io).
5 |
6 | There are some benefits:
7 |
8 | 1. Faster solve speed: for complicated environments, mamba is significantly faster than conda and will speed up builds.
9 | 2. Better error messages: when you have an un-solvable environment, conda will print a large error message that is often hard to decipher. Mamba prints something that is easier to understand.
10 | 3. Full compatibility with existing recipes and the conda-build command line arguments -- since we're "monkeypatching" conda-build and only replace the solver part
11 |
12 | To use `conda mambabuild`, just install `boa` and call `conda mambabuild` with the same arguments that would be used with `conda build` (replacing `build` with `mambabuild`).
13 |
14 | For example:
15 |
16 | ```
17 | $ conda mambabuild libsolv -m libsolv/.ci_support/linux_64_.yaml --no-test
18 | ```
19 |
20 | Please refer to the [conda-build documentation](https://docs.conda.io/projects/conda-build/en/latest/) to learn about all the command line arguments that can be used.
21 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from pathlib import Path
3 | from setuptools import setup
4 |
5 | here = Path(__file__).parent.absolute()
6 |
7 | version_ns = {}
8 | with open(here.joinpath("boa", "_version.py")) as f:
9 | exec(f.read(), {}, version_ns)
10 |
11 | __version__ = version_ns["__version__"]
12 |
13 | deps = [
14 | "jinja2",
15 | "setuptools",
16 | "rich",
17 | "ruamel.yaml >=0.18.0",
18 | "json5",
19 | "watchgod",
20 | "prompt-toolkit",
21 | "joblib",
22 | "beautifulsoup4",
23 | "boltons",
24 | ]
25 |
26 | setup(
27 | name="boa",
28 | version=__version__,
29 | author="Wolf Vollprecht",
30 | author_email="wolf.vollprecht@quantstack.net",
31 | url="https://github.com/mamba-org/boa",
32 | license="BSD 3-clause",
33 | classifiers=[],
34 | description="The mamba-powered conda package builder",
35 | long_description=open("README.md").read(),
36 | packages=["boa", "boa.cli", "boa.core", "boa.tui", "boa.helpers"],
37 | entry_points={
38 | "console_scripts": [
39 | "conda-mambabuild = boa.cli.mambabuild:main",
40 | "boa = boa.cli.boa:main",
41 | ]
42 | },
43 | install_requires=deps,
44 | package_data={"boa": ["schemas/*.json"]},
45 | )
46 |
--------------------------------------------------------------------------------
/tests/env.yml:
--------------------------------------------------------------------------------
1 | name: test
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - python>=3.7
6 | - pip
7 | - boltons
8 | - conda
9 | - libmambapy >=1.5,<1.6
10 | - pytest
11 | - conda-build >=3.25
12 | - conda-index
13 | - ruamel
14 | - ruamel.yaml
15 | - rich
16 | - jsonschema
17 | - json5
18 | - beautifulsoup4
19 | - prompt-toolkit
20 | - watchgod
21 | - joblib
22 |
--------------------------------------------------------------------------------
/tests/lint.yml:
--------------------------------------------------------------------------------
1 | name: lint
2 | channels: [conda-forge]
3 | dependencies:
4 | - pre-commit
5 | - pip
6 |
--------------------------------------------------------------------------------
/tests/recipes-v2/environ/recipe.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: "test_environ"
3 |
4 | package:
5 | name: "{{ name }}"
6 | version: '{{ environ.get("ENV_PKG_VERSION", "2.2") }}'
7 |
8 | build:
9 | number: 0
10 | script_env:
11 | KEY1: '{{ environ.get("KEY1", "TEST_KEY1_VALUE") }}'
12 | KEY2: JUST A VALUE
13 | script:
14 | sel(unix):
15 | - echo $KEY1 > $PREFIX/key1.txt
16 | - echo $KEY2 > $PREFIX/key2.txt
17 | sel(win):
18 | - "@echo off"
19 | - echo %KEY1% > %PREFIX%/key1.txt
20 | - echo %KEY2% > %PREFIX%/key2.txt
21 |
--------------------------------------------------------------------------------
/tests/recipes-v2/grayskull/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright 2023 Marcelo Duarte Trevisani
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/tests/recipes-v2/grayskull/recipe.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: grayskull
3 | version: 0.7.3
4 |
5 | package:
6 | name: '{{ name|lower }}'
7 | version: '{{ version }}'
8 |
9 | source:
10 | - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
11 | sha256: bbbefd3cbc240c74f22322fabf7862bd36108ac9b4c42a5121b4e68636eab0af
12 |
13 | build:
14 | number: 1
15 |
16 | steps:
17 | - package:
18 | name: grayskull
19 | build:
20 | # script has to go below build
21 | # script: build_base.sh
22 | # script: "${PYTHON} -m pip install . --no-deps -vv"
23 | script: "python -m pip install . --no-deps -vv"
24 | noarch: python
25 | entry_points:
26 | - grayskull = grayskull.__main__:main
27 | - greyskull = grayskull.__main__:main
28 | requirements:
29 | host:
30 | - pip
31 | - python >=3.7
32 | - setuptools >=30.3.0
33 | - setuptools_scm
34 | run:
35 | - colorama
36 | - rapidfuzz >=0.7.6
37 | - pip
38 | - progressbar2
39 | - python >=3.7
40 | - requests
41 | - ruamel.yaml >=0.15.3
42 | - ruamel.yaml.jinja2
43 | - setuptools >=30.3.0
44 | - stdlib-list
45 | - git
46 | test:
47 | imports:
48 | - grayskull
49 | commands:
50 | - pip check
51 | - grayskull --help
52 | - greyskull --help
53 | requires:
54 | - pip
55 | exists:
56 | site_packages:
57 | - grayskull
58 | bin:
59 | - grayskull
60 |
61 | - package:
62 | name: greyskull
63 | build:
64 | noarch: generic
65 | requirements:
66 | run:
67 | - '{{ pin_subpackage(name, max_pin="x.x.x") }}'
68 | test:
69 | imports:
70 | - grayskull
71 | commands:
72 | - grayskull --help
73 | - greyskull --help
74 |
75 | about:
76 | home: https://pypi.org/project/grayskull/
77 | summary: Project to generate recipes for conda.
78 | dev_url: https://github.com/marcelotrevisani/grayskull
79 | license: MIT
80 | license_file: LICENSE
81 |
82 | extra:
83 | recipe-maintainers:
84 | - marcelotrevisani
85 |
--------------------------------------------------------------------------------
/tests/recipes-v2/pin_compatible/recipe.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: "test_pin_compatible"
3 |
4 | package:
5 | name: "{{ name }}"
6 | version: '1.2.3'
7 |
8 | build:
9 | number: 0
10 |
11 | requirements:
12 | host:
13 | - numpy >=1.20
14 | run:
15 | - "{{ pin_compatible('numpy', lower_bound='1.20') }}"
16 |
--------------------------------------------------------------------------------
/tests/recipes-v2/xtensor/bld.bat:
--------------------------------------------------------------------------------
1 | cmake -G "NMake Makefiles" -D BUILD_TESTS=OFF -D CMAKE_INSTALL_PREFIX=%LIBRARY_PREFIX% %SRC_DIR%
2 | if errorlevel 1 exit 1
3 |
4 | nmake
5 | if errorlevel 1 exit 1
6 |
7 | nmake install
8 | if errorlevel 1 exit 1
9 |
--------------------------------------------------------------------------------
/tests/recipes-v2/xtensor/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | cmake ${CMAKE_ARGS} -DBUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX=$PREFIX $SRC_DIR -DCMAKE_INSTALL_LIBDIR=lib
4 | make install
5 |
--------------------------------------------------------------------------------
/tests/recipes-v2/xtensor/recipe.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: xtensor
3 | version: 0.23.10
4 |
5 | package:
6 | name: '{{ name|lower }}'
7 | version: '{{ version }}'
8 |
9 | source:
10 | url: https://github.com/xtensor-stack/xtensor/archive/{{ version }}.tar.gz
11 | sha256: 2e770a6d636962eedc868fef4930b919e26efe783cd5d8732c11e14cf72d871c
12 |
13 | build:
14 | number: 0
15 |
16 | requirements:
17 | build:
18 | - '{{ compiler("cxx") }}'
19 | - cmake
20 | - sel(unix): make
21 | host:
22 | - xtl >=0.7,<0.8
23 | run:
24 | - xtl >=0.7,<0.8
25 | run_constrained:
26 | - xsimd >=7.4.8,<8
27 |
28 | test:
29 | commands:
30 | - sel(unix): test -d ${PREFIX}/include/xtensor
31 | - sel(unix): test -f ${PREFIX}/include/xtensor/xarray.hpp
32 | - sel(unix): test -f ${PREFIX}/lib/cmake/xtensor/xtensorConfig.cmake
33 | - sel(unix): test -f ${PREFIX}/lib/cmake/xtensor/xtensorConfigVersion.cmake
34 | - sel(win): if not exist %LIBRARY_PREFIX%\include\xtensor\xarray.hpp (exit 1)
35 | - sel(win): if not exist %LIBRARY_PREFIX%\lib\cmake\xtensor\xtensorConfig.cmake (exit 1)
36 | - sel(win): if not exist %LIBRARY_PREFIX%\lib\cmake\xtensor\xtensorConfigVersion.cmake (exit 1)
37 | exists:
38 | include:
39 | - xtensor
40 | - xtensor.hpp
41 | - xtensor/xarray.hpp
42 | cmake_find:
43 | - xtensor {{ version }}
44 | pkg_config:
45 | - xtensor
46 |
47 | about:
48 | home: https://github.com/xtensor-stack/xtensor
49 | license: BSD-3-Clause
50 | license_family: BSD
51 | license_file: LICENSE
52 | summary: The C++ tensor algebra library
53 | description: Multi dimensional arrays with broadcasting and lazy computing
54 | doc_url: https://xtensor.readthedocs.io
55 | dev_url: https://github.com/xtensor-stack/xtensor
56 |
57 | extra:
58 | recipe-maintainers:
59 | - SylvainCorlay
60 | - JohanMabille
61 | - wolfv
62 | - davidbrochart
63 |
--------------------------------------------------------------------------------
/tests/recipes/baddeps/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set name = "stackvana-core" %}
2 | {% set version = "0.2021.43" %}
3 |
4 | package:
5 | name: {{ name|lower }}
6 | version: {{ version }}
7 |
8 | build:
9 | number: 0
10 |
11 | outputs:
12 | - name: stackvana-core-impl
13 | version: {{ version }}
14 | build:
15 | script:
16 | - echo "BUILDING IMPL" >> $PREFIX/stackvana-core-impl # [unix]
17 | - echo "BUILDING IMPL" >> %PREFIX%/stackvana-core-impl # [win]
18 | - name: stackvana-core
19 | version: {{ version }}
20 | run_exports:
21 | - {{ pin_subpackage('stackvana-core-impl', exact=True) }}
22 |
23 | requirements:
24 | run:
25 | - thispackagedoesnotexist >=100000000
26 |
--------------------------------------------------------------------------------
/tests/recipes/dep_error_has_constaint/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: dep_error_has_constraint
3 | version: 1.0
4 |
5 | requirements:
6 | host:
7 | - python_abi 3.10
8 | - python 3.11
9 |
--------------------------------------------------------------------------------
/tests/recipes/dep_error_needed_by/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set name = "dep_error_needed_by" %}
2 |
3 | package:
4 | name: {{ name }}
5 | version: 1.0
6 |
7 | outputs:
8 | - name: {{ name }}_1
9 | requirements:
10 | run:
11 | - thispackagedoesnotexist
12 | - name: {{ name }}_2
13 | requirements:
14 | host:
15 | - {{ name }}_1
16 |
--------------------------------------------------------------------------------
/tests/recipes/dep_error_nothing_provides/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: dep_error_nothing_provides
3 | version: 1.0
4 |
5 | requirements:
6 | host:
7 | - thispackagedoesnotexist
8 |
--------------------------------------------------------------------------------
/tests/recipes/dep_error_package_requires/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: dep_error_package_requires
3 | version: 1.0
4 |
5 | requirements:
6 | host:
7 | - cython * py310*
8 | - python 3.11
9 |
--------------------------------------------------------------------------------
/tests/recipes/jedi/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set version = "0.17.2" %}
2 |
3 | package:
4 | name: jedi
5 | version: {{ version }}
6 |
7 | source:
8 | url: https://pypi.io/packages/source/j/jedi/jedi-{{ version }}.tar.gz
9 | sha256: 86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20
10 |
11 | build:
12 | number: 1
13 | script: {{ PYTHON }} -m pip install . --no-deps
14 |
15 | requirements:
16 | host:
17 | - python
18 | - pip
19 | run:
20 | - python
21 | - parso >=0.7.0,<0.8.0
22 |
23 | test:
24 | imports:
25 | - jedi
26 | - jedi.api
27 | - jedi.common
28 | - jedi.inference
29 | - jedi.inference.compiled
30 | - jedi.inference.compiled.subprocess
31 | - jedi.inference.gradual
32 | - jedi.inference.value
33 | - jedi.plugins
34 |
--------------------------------------------------------------------------------
/tests/recipes/multioutput/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set name = "ocp" %}
2 | {% set version = "7.5.2beta" %}
3 | {% set occt_version = "=7.5.2" %}
4 |
5 | package:
6 | name: {{ name }}-split
7 | version: {{ version }}
8 |
9 | build:
10 | number: 0
11 |
12 | outputs:
13 | - name: ocp-devel
14 | build:
15 | script:
16 | - echo "BUILDING IMPL" >> $PREFIX/stackvana-core-impl # [unix]
17 | - echo "BUILDING IMPL" >> %PREFIX%/stackvana-core-impl # [win]
18 |
19 | - name: ocp
20 | build:
21 | script:
22 | - echo "BUILDING IMPL" >> $PREFIX/ocp # [unix]
23 | - echo "BUILDING IMPL" >> %PREFIX%/ocp # [win]
24 | requirements:
25 | host:
26 | - "{{ pin_subpackage('ocp-devel', exact=True) }}"
27 | run:
28 | - "{{ pin_subpackage('ocp-devel', exact=True) }}"
29 |
--------------------------------------------------------------------------------
/tests/recipes/multiple_license/LICENSE:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/tests/recipes/multiple_license/LICENSE
--------------------------------------------------------------------------------
/tests/recipes/multiple_license/NOTICE.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/tests/recipes/multiple_license/NOTICE.md
--------------------------------------------------------------------------------
/tests/recipes/multiple_license/meta.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: multiple-license
3 | version: 1.0.0
4 |
5 | build:
6 | number: 0
7 | script:
8 | - echo "BUILDING IMPL" >> $PREFIX/multiple_license # [unix]
9 | - echo "BUILDING IMPL" >> %PREFIX%/multiple_license # [win]
10 |
11 | about:
12 | license: MIT
13 | license_family: MIT
14 | license_file:
15 | - LICENSE
16 | - NOTICE.md
17 |
--------------------------------------------------------------------------------
/tests/recipes/stackvana/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set name = "stackvana-core" %}
2 | {% set version = "0.2021.43" %}
3 |
4 | package:
5 | name: {{ name|lower }}
6 | version: {{ version }}
7 |
8 | build:
9 | number: 0
10 |
11 | outputs:
12 | - name: stackvana-core-impl
13 | version: {{ version }}
14 | build:
15 | script:
16 | - echo "BUILDING IMPL" >> $PREFIX/stackvana-core-impl # [unix]
17 | - echo "BUILDING IMPL" >> %PREFIX%/stackvana-core-impl # [win]
18 | - name: stackvana-core
19 | version: {{ version }}
20 | run_exports:
21 | - {{ pin_subpackage('stackvana-core-impl', exact=True) }}
22 |
23 | requirements:
24 | run:
25 | - {{ pin_subpackage('stackvana-core-impl', exact=True) }}
26 |
--------------------------------------------------------------------------------
/tests/test_boa_build.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from subprocess import check_call
3 | import sys
4 | import tarfile
5 | import json
6 | import os
7 |
8 | from pathlib import Path
9 |
10 | import pytest
11 |
12 |
13 | recipes_dir = pathlib.Path(__file__).parent / "recipes-v2"
14 | tests_dir = pathlib.Path(__file__).parent / "tests-v2"
15 |
16 |
17 | def test_build_recipes():
18 | recipes = [str(x) for x in recipes_dir.iterdir() if x.is_dir()]
19 | for recipe in recipes:
20 | check_call(["boa", "build", recipe])
21 |
22 |
23 | def test_build_notest():
24 | recipes = [str(x) for x in recipes_dir.iterdir() if x.is_dir()]
25 | recipe = recipes[0]
26 | check_call(["boa", "build", recipe, "--no-test"])
27 |
28 |
29 | def test_run_exports(tmp_path: Path):
30 | recipe = tests_dir / "runexports"
31 | check_call(["boa", "build", str(recipe), "--output-folder", str(tmp_path)])
32 |
33 | rex_a = next(tmp_path.rglob("**/rex-a*.tar.bz2"))
34 |
35 | with tarfile.open(rex_a) as fin:
36 | rexport = json.load(fin.extractfile("info/run_exports.json"))
37 | assert rexport["weak"]
38 | assert "strong" not in rexport
39 | assert rexport["weak"] == ["rex-exporter 0.1.*"]
40 |
41 | rex_b = next(tmp_path.rglob("**/rex-b*.tar.bz2"))
42 |
43 | with tarfile.open(rex_b) as fin:
44 | rexport = json.load(fin.extractfile("info/run_exports.json"))
45 | assert rexport["weak"]
46 | assert rexport["weak"] == ["rex-a 0.1.0.*"]
47 | assert rexport["strong"]
48 | assert rexport["strong"] == ["rex-exporter 0.1.*"]
49 |
50 | rexporter = next(tmp_path.rglob("**/rex-exporter*.tar.bz2"))
51 | with tarfile.open(rexporter) as fin:
52 | names = [x.name for x in fin.getmembers()]
53 | print(names)
54 | assert "info/run_exports.json" not in names
55 |
56 |
57 | @pytest.mark.skipif(sys.platform == "win32", reason="No pytorch on Windows")
58 | def test_build_with_channel_pins(tmp_path: Path):
59 | # Ensure that channel pins round trip correctly
60 | recipe = tests_dir / "metapackage-channel-pin"
61 | check_call(["boa", "build", str(recipe), "--output-folder", str(tmp_path)])
62 |
63 | channel_pins = next(tmp_path.rglob("**/metapackage-channel-pin*.tar.bz2"))
64 |
65 | with tarfile.open(channel_pins) as fin:
66 | info = json.load(fin.extractfile("info/index.json"))
67 | assert "conda-forge::pytorch" in info["depends"]
68 |
69 |
70 | def test_build_with_script_env(tmp_path: Path):
71 | # Ensure that channel pins round trip correctly
72 | recipe = recipes_dir / "environ"
73 | os.environ["KEY1"] = "KEY1_RANDOM_VALUE"
74 | check_call(["boa", "build", str(recipe), "--output-folder", str(tmp_path)])
75 |
76 | result = next(tmp_path.rglob("**/test_environ*.tar.bz2"))
77 |
78 | with tarfile.open(result) as fin:
79 | key1 = fin.extractfile("key1.txt").read().decode("utf8").strip()
80 | assert key1 == "KEY1_RANDOM_VALUE"
81 | key2 = fin.extractfile("key2.txt").read().decode("utf8").strip()
82 | assert key2 == "JUST A VALUE"
83 |
--------------------------------------------------------------------------------
/tests/test_helpers.py:
--------------------------------------------------------------------------------
1 | from boa.helpers.ast_extract_syms import ast_extract_syms
2 |
3 |
4 | def test_helpers():
5 | assert ast_extract_syms("vc <14") == ["vc"]
6 | assert ast_extract_syms("python > (3,6)") == ["python"]
7 | assert ast_extract_syms("somevar==(3,6)") == ["somevar"]
8 | assert ast_extract_syms("somevar<=linux") == ["somevar", "linux"]
9 | assert ast_extract_syms("target_platform == 'linux'") == ["target_platform"]
10 |
--------------------------------------------------------------------------------
/tests/test_mambabuild.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import sys
3 | from pathlib import Path
4 | from queue import Queue
5 | from subprocess import CalledProcessError, PIPE, Popen, check_call
6 | from threading import Thread
7 |
8 | recipes_dir = Path(__file__).parent / "recipes"
9 |
10 | dep_error_recipes = {
11 | str(recipes_dir / name): deps
12 | for name, *deps in (
13 | ("baddeps", "thispackagedoesnotexist"),
14 | ("dep_error_nothing_provides", "thispackagedoesnotexist"),
15 | ("dep_error_needed_by", "thispackagedoesnotexist", "dep_error_needed_by_1"),
16 | ("dep_error_package_requires", "python", "cython"),
17 | ("dep_error_has_constaint", "python=", "python_abi="),
18 | )
19 | }
20 | recipes = [
21 | str(x)
22 | for x in recipes_dir.iterdir()
23 | if x.is_dir() and str(x) not in dep_error_recipes
24 | ]
25 | notest_recipes = [str(recipes_dir / "baddeps")]
26 |
27 |
28 | def dep_error_capture_call(cmd):
29 | def capture(pipe, put):
30 | err_lines = []
31 | for line in iter(pipe.readline, ""):
32 | if err_lines or line.startswith(
33 | "conda_build.exceptions.DependencyNeedsBuildingError:"
34 | ):
35 | err_lines.append(line)
36 | put(line)
37 | put(None)
38 | put("".join(err_lines).replace("\n", ""))
39 | pipe.close()
40 |
41 | def passthrough(write, get):
42 | for line in iter(get, None):
43 | write(line)
44 |
45 | def create_thread(target, *args):
46 | return Thread(target=target, args=args, daemon=True)
47 |
48 | process = Popen(cmd, stderr=PIPE, close_fds=True, text=True)
49 | queue = Queue()
50 | capture_thread = create_thread(capture, process.stderr, queue.put)
51 | passthrough_thread = create_thread(passthrough, sys.stderr.write, queue.get)
52 | capture_thread.start()
53 | passthrough_thread.start()
54 | process.wait()
55 | capture_thread.join()
56 | passthrough_thread.join()
57 | if process.returncode:
58 | raise CalledProcessError(process.returncode, cmd, None, queue.get())
59 |
60 |
61 | @pytest.mark.parametrize("recipe,deps", dep_error_recipes.items())
62 | def test_build_dep_error_recipes(recipe, deps):
63 | with pytest.raises(CalledProcessError) as exc_info:
64 | dep_error_capture_call(["conda", "mambabuild", recipe])
65 | error = exc_info.value.stderr
66 | for dep in deps:
67 | assert f'MatchSpec("{dep}' in error
68 |
69 |
70 | @pytest.mark.parametrize("recipe", recipes)
71 | def test_build_recipes(recipe):
72 | check_call(["conda", "mambabuild", recipe])
73 |
74 |
75 | @pytest.mark.parametrize("recipe", notest_recipes)
76 | def test_build_notest(recipe):
77 | check_call(["conda", "mambabuild", recipe, "--no-test"])
78 |
--------------------------------------------------------------------------------
/tests/test_rendering.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | from subprocess import check_output
4 | import json
5 | import pytest
6 | from boa.core.run_build import extract_features, build_recipe
7 | from boa.core.utils import get_config
8 | import pathlib
9 |
10 |
11 | tests_path = pathlib.Path(__file__).parent / "variants"
12 |
13 |
14 | def test_extract_features():
15 | feats = extract_features("[static, ~xz, zlib, bzip2, ~something]")
16 | assert feats["static"] is True
17 | assert feats["xz"] is False
18 | assert feats["zlib"] is True
19 | assert feats["bzip2"] is True
20 | assert feats["something"] is False
21 |
22 | with pytest.raises(AssertionError):
23 | feats = extract_features("[static, ~xz, zlib, bzip2, ~something")
24 |
25 | with pytest.raises(AssertionError):
26 | feats = extract_features("static, ~xz, zlib, bzip2, ~something]")
27 |
28 | feats = extract_features("")
29 | assert feats == {}
30 |
31 |
32 | def get_target_platform():
33 | if sys.platform == "win32":
34 | return "win-64"
35 | else:
36 | return "linux-64"
37 |
38 |
39 | def get_outputs(
40 | cbcfname, recipename="recipe.yaml", folder="variant_test", cmd="render"
41 | ):
42 | recipe = tests_path / folder / recipename
43 | cbc_file = tests_path / folder / cbcfname
44 |
45 | variant = {"target_platform": get_target_platform()}
46 |
47 | cbc, config = get_config(".", variant, [cbc_file])
48 | cbc["target_platform"] = [variant["target_platform"]]
49 |
50 | sorted_outputs = build_recipe(
51 | cmd,
52 | recipe,
53 | cbc,
54 | config,
55 | selected_features={},
56 | notest=True,
57 | skip_existing=False,
58 | interactive=False,
59 | continue_on_failure=False,
60 | )
61 |
62 | return cbc, sorted_outputs
63 |
64 |
65 | def test_variants_zipping():
66 | cbc, sorted_outputs = get_outputs("cbc1.yaml")
67 | assert cbc == {
68 | "python": ["3.6", "3.7", "3.8"],
69 | "target_platform": [get_target_platform()],
70 | }
71 |
72 | expected_variants = ["python 3.6.*", "python 3.7.*", "python 3.8.*"]
73 |
74 | for o in sorted_outputs:
75 | assert o.name == "variant_test"
76 | assert o.version == "0.1.0"
77 | assert str(o.requirements["host"][0]) in expected_variants
78 | assert o.requirements["host"][0].from_pinnings is True
79 |
80 | cbc, sorted_outputs = get_outputs("cbc2.yaml")
81 | assert len(sorted_outputs) == 9
82 |
83 | cbc, sorted_outputs = get_outputs("cbc3.yaml")
84 |
85 | assert len(sorted_outputs) == 3
86 |
87 | expected_variants = [
88 | ["python 3.6.*", "pip 1.*"],
89 | ["python 3.7.*", "pip 2.*"],
90 | ["python 3.8.*", "pip 3.*"],
91 | ]
92 | got_variants = []
93 | for o in sorted_outputs:
94 | assert o.name == "variant_test"
95 | assert o.version == "0.1.0"
96 | got_variants.append([str(x) for x in o.requirements["host"]])
97 | assert o.requirements["host"][0].from_pinnings is True
98 | assert got_variants == expected_variants
99 |
100 | cbc, sorted_outputs = get_outputs("cbc4.yaml")
101 | got_variants = []
102 | for o in sorted_outputs:
103 | assert o.name == "variant_test"
104 | assert o.version == "0.1.0"
105 | got_variants.append([str(x) for x in o.requirements["host"]])
106 | assert o.requirements["host"][0].from_pinnings is True
107 | assert got_variants == expected_variants
108 |
109 | cbc, sorted_outputs = get_outputs("cbc3.yaml", "recipe2.yaml")
110 |
111 | assert len(sorted_outputs) == 3
112 |
113 | expected_variants = [
114 | ["python 3.6.*", "pip 1.*", "libxyz"],
115 | ["python 3.7.*", "pip 2.*", "libxyz"],
116 | ["python 3.8.*", "pip 3.*", "libxyz"],
117 | ]
118 | got_variants = []
119 | for o in sorted_outputs:
120 | assert o.name == "variant_test"
121 | assert o.version == "0.1.0"
122 | got_variants.append([str(x) for x in o.requirements["host"]])
123 | assert o.requirements["host"][0].from_pinnings is True
124 | assert got_variants == expected_variants
125 |
126 | cbc, sorted_outputs = get_outputs("cbc5.yaml", "recipe2.yaml")
127 |
128 | expected_variants = [
129 | ["python 3.6.*", "pip 1.*", "libxyz 5.*"],
130 | ["python 3.7.*", "pip 2.*", "libxyz 5.*"],
131 | ["python 3.6.*", "pip 1.*", "libxyz 6.*"],
132 | ["python 3.7.*", "pip 2.*", "libxyz 6.*"],
133 | ["python 3.6.*", "pip 1.*", "libxyz 7.*"],
134 | ["python 3.7.*", "pip 2.*", "libxyz 7.*"],
135 | ]
136 | got_variants = []
137 | for o in sorted_outputs:
138 | assert o.name == "variant_test"
139 | assert o.version == "0.1.0"
140 | got_variants.append([str(x) for x in o.requirements["host"]])
141 | assert o.requirements["host"][0].from_pinnings is True
142 |
143 | assert got_variants == expected_variants
144 |
145 | with pytest.raises(ValueError):
146 | cbc, sorted_outputs = get_outputs("cbc6.yaml", "recipe2.yaml")
147 |
148 | cbc, sorted_outputs = get_outputs("cbc7.yaml", "recipe2.yaml")
149 | expected_variants = [
150 | ["python 3.6.*", "pip 1.*", "libxyz 5.*"],
151 | ["python 3.7.*", "pip 2.*", "libxyz 6.*"],
152 | ]
153 | got_variants = []
154 | for o in sorted_outputs:
155 | assert o.name == "variant_test"
156 | assert o.version == "0.1.0"
157 | got_variants.append([str(x) for x in o.requirements["host"]])
158 | assert o.requirements["host"][0].from_pinnings is True
159 |
160 | assert got_variants == expected_variants
161 |
162 |
163 | def test_variants():
164 | cbc, sorted_outputs = get_outputs("cbc1.yaml", folder="underscores")
165 | assert cbc["abseil_cpp"] == ["20200225.2"]
166 | assert cbc["arpack"] == ["3.6.3"]
167 |
168 | expected_variants = [
169 | "abseil-cpp 20200225.2.*",
170 | "arrow-cpp 0.17.*",
171 | "boost-cpp 1.72.0.*",
172 | ]
173 |
174 | for o in sorted_outputs:
175 | assert o.name == "underscores"
176 | assert o.version == "0.1.0"
177 | print(o.requirements)
178 | assert str(o.requirements["host"][0]) in expected_variants
179 | assert o.requirements["host"][0].from_pinnings is True
180 |
181 | cbc, sorted_outputs = get_outputs(
182 | "cbc2.yaml", "recipe2.yaml", folder="underscores", cmd="full-render"
183 | )
184 |
185 |
186 | def test_compiler():
187 | cbc, sorted_outputs = get_outputs("cbc_default.yaml", folder="compiler_test")
188 | for o in sorted_outputs:
189 | assert o.name == "compiler_test"
190 | assert o.version == "0.1.0"
191 | print(o.requirements)
192 | c_comp = str(o.requirements["build"][0])
193 | assert c_comp.rsplit("_", 1)[1] == get_target_platform()
194 | if sys.platform == "linux":
195 | assert c_comp in {"gcc_linux-64", "gxx_linux-64"}
196 | assert (
197 | str(o.requirements["build"][1]).rsplit("_", 1)[1] == get_target_platform()
198 | )
199 | assert (
200 | str(o.requirements["build"][2]).rsplit("_", 1)[1] == get_target_platform()
201 | )
202 | assert o.requirements["build"][0].from_pinnings is True
203 |
204 | cbc, sorted_outputs = get_outputs("compilers.yaml", folder="compiler_test")
205 | expected_compilers = [
206 | f"customcompiler_{get_target_platform()} 11*",
207 | f"fortranisstillalive_{get_target_platform()} 2000*",
208 | f"cppcompiler_{get_target_platform()} 200*",
209 | ]
210 | for o in sorted_outputs:
211 | assert o.name == "compiler_test"
212 | assert o.version == "0.1.0"
213 | print(o.requirements)
214 | comps = [str(x) for x in o.requirements["build"]]
215 | assert sorted(comps) == sorted(expected_compilers)
216 |
217 |
218 | def call_render(recipe):
219 | os.chdir(recipe.parent)
220 |
221 | print(["boa", "render", str(recipe.resolve()), "--json"])
222 | out = check_output(["boa", "render", str(recipe.resolve()), "--json"])
223 | out = out.decode("utf8")
224 | print(out)
225 | return json.loads(out)
226 |
227 |
228 | recipe_tests_path = pathlib.Path(__file__).parent / "recipes-v2"
229 |
230 |
231 | def test_environ():
232 | out = call_render(recipe_tests_path / "environ" / "recipe.yaml")
233 | assert out[0]["name"] == "test_environ"
234 | assert out[0]["version"] == "2.2"
235 |
236 | os.environ["ENV_PKG_VERSION"] = "100.2000"
237 | out = call_render(recipe_tests_path / "environ" / "recipe.yaml")
238 | assert out[0]["version"] == "100.2000"
239 |
--------------------------------------------------------------------------------
/tests/tests-v2/metapackage-channel-pin/recipe.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: "metapackage-channel-pin"
3 | version: "0.1.0"
4 |
5 | build:
6 | number: 0
7 |
8 | requirements:
9 | run:
10 | - conda-forge::pytorch
11 |
--------------------------------------------------------------------------------
/tests/tests-v2/runexports/recipe.yaml:
--------------------------------------------------------------------------------
1 | package:
2 | name: "rex-a"
3 | version: "0.1.0"
4 |
5 | build:
6 | number: 0
7 |
8 | outputs:
9 | - package:
10 | name: rex-a
11 | version: "0.1.0"
12 |
13 | build:
14 | run_exports:
15 | - "{{ pin_subpackage('rex-exporter', max_pin='x.x') }}"
16 |
17 | - package:
18 | name: rex-b
19 | version: "0.1.0"
20 | build:
21 | run_exports:
22 | strong:
23 | - "{{ pin_subpackage('rex-exporter', max_pin='x.x') }}"
24 | weak:
25 | - "{{ pin_subpackage('rex-a', max_pin='x.x.x') }}"
26 |
27 | - package:
28 | name: rex-exporter
29 | version: "0.1.0"
30 |
--------------------------------------------------------------------------------
/tests/variants/compiler_test/cbc_default.yaml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mamba-org/boa/183fef0cf7d18502da246a85379bd8e6423b1fb8/tests/variants/compiler_test/cbc_default.yaml
--------------------------------------------------------------------------------
/tests/variants/compiler_test/compilers.yaml:
--------------------------------------------------------------------------------
1 | c_compiler:
2 | - customcompiler
3 | c_compiler_version:
4 | - 11
5 | cxx_compiler:
6 | - cppcompiler
7 | cxx_compiler_version:
8 | - 200
9 | fortran_compiler:
10 | - fortranisstillalive
11 | fortran_compiler_version:
12 | - 2000
13 |
--------------------------------------------------------------------------------
/tests/variants/compiler_test/recipe.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: compiler_test
3 | version: 0.1.0
4 |
5 | package:
6 | name: '{{ name|lower }}'
7 | version: '{{ version }}'
8 |
9 | source:
10 | url: https://some.url/pkg.tar.gz
11 | sha256: bbbefd3cbc240c74f22322fabf7862bd36108ac9b4c42a5121b4e68636eab0af
12 |
13 | build:
14 | number: 1
15 |
16 | requirements:
17 | build:
18 | - "{{ compiler('cxx') }}"
19 | - "{{ compiler('c') }}"
20 | - "{{ compiler('fortran') }}"
21 |
--------------------------------------------------------------------------------
/tests/variants/underscores/cbc1.yaml:
--------------------------------------------------------------------------------
1 | abseil_cpp:
2 | - '20200225.2'
3 | arb:
4 | - 2.17
5 | arpack:
6 | - 3.6.3
7 | arrow_cpp:
8 | - 0.17.0
9 | boost:
10 | - 1.72.0
11 | boost_cpp:
12 | - 1.72.0
13 |
--------------------------------------------------------------------------------
/tests/variants/underscores/cbc2.yaml:
--------------------------------------------------------------------------------
1 | pin_run_as_build:
2 | boost_cpp:
3 | max_pin: x.x.x
4 | pyarrow:
5 | max_pin: x.x.x
6 | arrow_cpp:
7 | max_pin: x.x.x
8 | python:
9 | - 3.6.* *_cpython
10 | - 3.8.* *_cpython
11 | boost_cpp:
12 | - '1.74'
13 | - '1.74'
14 | numpy:
15 | - '1.16'
16 | - '1.16'
17 | arrow_cpp:
18 | - '2.0.0'
19 | - '3.0.0'
20 | pyarrow:
21 | - '2.0.0'
22 | - '3.0.0'
23 | zip_keys:
24 | - - arrow_cpp
25 | - pyarrow
26 | - boost_cpp
27 | - numpy
28 |
--------------------------------------------------------------------------------
/tests/variants/underscores/recipe.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: underscores
3 | version: 0.1.0
4 |
5 | package:
6 | name: '{{ name|lower }}'
7 | version: '{{ version }}'
8 |
9 | source:
10 | url: https://some.url/pkg.tar.gz
11 | sha256: bbbefd3cbc240c74f22322fabf7862bd36108ac9b4c42a5121b4e68636eab0af
12 |
13 | build:
14 | number: 1
15 |
16 | requirements:
17 | host:
18 | - abseil-cpp
19 | - arrow-cpp
20 | - arpack
21 | - boost-cpp
22 |
--------------------------------------------------------------------------------
/tests/variants/underscores/recipe2.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: underscores
3 | version: 0.1.0
4 |
5 | package:
6 | name: '{{ name|lower }}'
7 | version: '{{ version }}'
8 |
9 | source:
10 | url: https://some.url/pkg.tar.gz
11 | sha256: bbbefd3cbc240c74f22322fabf7862bd36108ac9b4c42a5121b4e68636eab0af
12 |
13 | build:
14 | number: 1
15 |
16 | requirements:
17 | host:
18 | - abseil-cpp
19 | - arrow-cpp
20 | - boost-cpp
21 | - pyarrow
22 | - numpy
23 |
24 | run:
25 | - abseil-cpp
26 | - arrow-cpp
27 | - boost-cpp
28 | - pyarrow
29 | - numpy
30 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/cbc1.yaml:
--------------------------------------------------------------------------------
1 | python:
2 | - 3.6
3 | - 3.7
4 | - 3.8
5 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/cbc2.yaml:
--------------------------------------------------------------------------------
1 | python:
2 | - 3.6
3 | - 3.7
4 | - 3.8
5 | pip:
6 | - 1
7 | - 2
8 | - 3
9 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/cbc3.yaml:
--------------------------------------------------------------------------------
1 | zip_keys:
2 | -
3 | - python
4 | - pip
5 |
6 | python:
7 | - 3.6
8 | - 3.7
9 | - 3.8
10 | pip:
11 | - 1
12 | - 2
13 | - 3
14 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/cbc4.yaml:
--------------------------------------------------------------------------------
1 | zip_keys:
2 | -
3 | - python
4 | - pip
5 | - numpy
6 |
7 | python:
8 | - 3.6
9 | - 3.7
10 | - 3.8
11 | pip:
12 | - 1
13 | - 2
14 | - 3
15 | numpy:
16 | - 5
17 | - 6
18 | - 7
19 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/cbc5.yaml:
--------------------------------------------------------------------------------
1 | zip_keys:
2 | -
3 | - python
4 | - pip
5 |
6 | python:
7 | - 3.6
8 | - 3.7
9 | pip:
10 | - 1
11 | - 2
12 | libxyz:
13 | - 5
14 | - 6
15 | - 7
16 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/cbc6.yaml:
--------------------------------------------------------------------------------
1 | zip_keys:
2 | -
3 | - python
4 | - pip
5 | - libxyz
6 |
7 | python:
8 | - 3.6
9 | - 3.7
10 | pip:
11 | - 1
12 | - 2
13 | libxyz:
14 | - 5
15 | - 6
16 | - 7
17 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/cbc7.yaml:
--------------------------------------------------------------------------------
1 | zip_keys:
2 | -
3 | - python
4 | - pip
5 | - libxyz
6 |
7 | python:
8 | - 3.6
9 | - 3.7
10 | pip:
11 | - 1
12 | - 2
13 | libxyz:
14 | - 5
15 | - 6
16 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/recipe.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: variant_test
3 | version: 0.1.0
4 |
5 | package:
6 | name: '{{ name|lower }}'
7 | version: '{{ version }}'
8 |
9 | source:
10 | url: https://some.url/pkg.tar.gz
11 | sha256: bbbefd3cbc240c74f22322fabf7862bd36108ac9b4c42a5121b4e68636eab0af
12 |
13 | build:
14 | number: 1
15 |
16 | requirements:
17 | host:
18 | - python
19 | - pip
20 | run:
21 | - python
22 | - pip
23 |
--------------------------------------------------------------------------------
/tests/variants/variant_test/recipe2.yaml:
--------------------------------------------------------------------------------
1 | context:
2 | name: variant_test
3 | version: 0.1.0
4 |
5 | package:
6 | name: '{{ name|lower }}'
7 | version: '{{ version }}'
8 |
9 | source:
10 | url: https://some.url/pkg.tar.gz
11 | sha256: bbbefd3cbc240c74f22322fabf7862bd36108ac9b4c42a5121b4e68636eab0af
12 |
13 | build:
14 | number: 1
15 |
16 | requirements:
17 | host:
18 | - python
19 | - pip
20 | - libxyz
21 | run:
22 | - python
23 | - pip
24 |
--------------------------------------------------------------------------------