├── .git-blame-ignore-revs ├── .github └── workflows │ ├── docs.yaml │ ├── lint.yaml │ ├── main.yaml │ └── publish.yaml ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── README.md ├── bin └── stack-config ├── development.md ├── docs ├── build-caches.md ├── building.md ├── cluster-config.md ├── configuring.md ├── development.md ├── index.md ├── installing.md ├── interfaces.md ├── readme.md ├── recipes.md ├── stylesheets │ └── extra.css └── tutorial.md ├── lint ├── mkdocs.yml ├── pyproject.toml ├── requirements-docs.txt ├── requirements.txt ├── serve ├── setup.cfg ├── stackinator ├── __init__.py ├── builder.py ├── cache.py ├── etc │ ├── Make.inc │ ├── add-compiler-links.py │ ├── bwrap-mutable-root.sh │ └── envvars.py ├── main.py ├── plan.md ├── recipe.py ├── schema.py ├── schema │ ├── cache.json │ ├── compilers.json │ ├── config.json │ └── environments.json ├── spack_util.py └── templates │ ├── Make.user │ ├── Makefile │ ├── Makefile.compilers │ ├── Makefile.environments │ ├── Makefile.generate-config │ ├── compilers.bootstrap.spack.yaml │ ├── compilers.gcc.spack.yaml │ ├── compilers.llvm.spack.yaml │ ├── environments.spack.yaml │ ├── repos.yaml │ └── stack-debug.sh ├── test_stackinator.py └── unittests ├── .gitignore ├── data └── arbor-uenv │ ├── env │ ├── arbor │ │ └── activate.sh │ └── develop │ │ └── activate.sh │ └── meta │ ├── configure.json │ ├── env.json.in │ └── recipe │ ├── compilers.yaml │ ├── config.yaml │ ├── environments.yaml │ └── modules.yaml ├── recipes ├── base-amdgpu │ ├── compilers.yaml │ ├── config.yaml │ ├── environments.yaml │ └── modules.yaml ├── base-nvgpu │ ├── compilers.yaml │ ├── config.yaml │ ├── environments.yaml │ └── modules.yaml ├── cache │ ├── compilers.yaml │ ├── config.yaml │ ├── environments.yaml │ └── modules.yaml ├── host-recipe │ ├── compilers.yaml │ ├── config.yaml │ ├── environments.yaml │ ├── modules.yaml │ ├── post-install │ └── pre-install ├── unique-bootstrap │ ├── compilers.yaml │ ├── config.yaml │ ├── environments.yaml │ └── modules.yaml └── with-repo │ ├── compilers.yaml │ ├── config.yaml │ ├── environments.yaml │ └── repo │ └── packages │ └── dummy │ └── package.py ├── test-envvars.sh ├── test_schema.py └── yaml ├── compilers.defaults.yaml ├── compilers.full.yaml ├── config.defaults.yaml ├── config.full.yaml ├── environments.err-providers.yaml └── environments.full.yaml /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # .git-blame-ignore-revs 2 | # Format entire codebase with black and isort 3 | dbd21f12d58d1511cdcac6eb927f8cd73e371568 4 | -------------------------------------------------------------------------------- /.github/workflows/docs.yaml: -------------------------------------------------------------------------------- 1 | name: publish docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - feature/docs 7 | - main 8 | permissions: 9 | contents: write 10 | jobs: 11 | deploy: 12 | if: github.repository == 'eth-cscs/stackinator' 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v3 16 | - uses: actions/setup-python@v4 17 | with: 18 | python-version: 3.x 19 | - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV 20 | - uses: actions/cache@v3 21 | with: 22 | key: mkdocs-material-${{ env.cache_id }} 23 | path: .cache 24 | restore-keys: | 25 | mkdocs-material- 26 | - run: pip install --upgrade pip 27 | - run: | 28 | pip install mkdocs-material 29 | - run: rm -rf site 30 | - run: mkdocs gh-deploy --force 31 | - run: mkdocs --version 32 | -------------------------------------------------------------------------------- /.github/workflows/lint.yaml: -------------------------------------------------------------------------------- 1 | name: lint 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | lint: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v3 10 | - name: Install uv 11 | run: | 12 | curl -LsSf https://astral.sh/uv/install.sh | sh 13 | - name: ruff 14 | run: | 15 | uvx ruff format --check 16 | uvx ruff check 17 | -------------------------------------------------------------------------------- /.github/workflows/main.yaml: -------------------------------------------------------------------------------- 1 | name: Stackinator CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | unittestpy: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v3 10 | - name: Install uv 11 | run: | 12 | curl -LsSf https://astral.sh/uv/install.sh | sh 13 | - name: Generic Unittests 14 | run: | 15 | ./test_stackinator.py 16 | -------------------------------------------------------------------------------- /.github/workflows/publish.yaml: -------------------------------------------------------------------------------- 1 | name: Deploy and Publish Stackinator to PyPI 2 | 3 | on: 4 | release: 5 | types: [prereleased, released] 6 | 7 | jobs: 8 | publish: 9 | if: github.repository == 'eth-cscs/stackinator' 10 | runs-on: ubuntu-20.04 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Setup up Python 3.6 14 | uses: actions/setup-python@v4 15 | with: 16 | python-version: 3.6 17 | - name: Generate dist packages 18 | run: | 19 | python -m pip install --upgrade pip setuptools build 20 | python -m build 21 | - name: Publish Stackinator to PyPI 22 | uses: pypa/gh-action-pypi-publish@release/v1 23 | with: 24 | verbose: true 25 | user: __token__ 26 | password: ${{ secrets.PYPI_API_TOKEN }} 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # path used to test internally 2 | work 3 | 4 | # path used to boostrap environment 5 | external 6 | 7 | __pycache__ 8 | 9 | # log files 10 | log_config* 11 | 12 | # vim working files 13 | .*.sw[opnq] 14 | 15 | # generated by mkdocs 16 | site 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2022-2024 Eidgenössische Technische Hochschule Zürich (ETHZ) 2 | 3 | Redistribution and use in source and binary forms, with or without modification, 4 | are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this 7 | list of conditions and the following disclaimer. 8 | 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | 3. Neither the name of the copyright holder nor the names of its contributors 14 | may be used to endorse or promote products derived from this software without 15 | specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 18 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 19 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 21 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 22 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 23 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 24 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 26 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | graft stackinator/etc 2 | graft stackinator/schema 3 | graft stackinator/share 4 | graft stackinator/templates 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Stackinator 2 | 3 | A tool for building a scientific software stack from a recipe for vClusters on CSCS' Alps infrastructure. 4 | 5 | Read the [documentation](https://eth-cscs.github.io/stackinator/) to get started. 6 | 7 | Create a ticket in our [GitHub issues](https://github.com/eth-cscs/stackinator/issues) if you find a bug, have a feature request or have a question. 8 | -------------------------------------------------------------------------------- /bin/stack-config: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S uv run --script 2 | # /// script 3 | # requires-python = ">=3.12" 4 | # dependencies = [ 5 | # "jinja2", 6 | # "jsonschema", 7 | # "pyYAML", 8 | # ] 9 | # /// 10 | 11 | import pathlib 12 | import sys 13 | 14 | prefix = pathlib.Path(__file__).parent.parent.resolve() 15 | sys.path = [prefix.as_posix()] + sys.path 16 | 17 | from stackinator.main import main 18 | 19 | # Once we've set up the system path, run the tool's main method 20 | if __name__ == "__main__": 21 | sys.exit(main()) 22 | -------------------------------------------------------------------------------- /development.md: -------------------------------------------------------------------------------- 1 | # Developer Notes 2 | 3 | ## Versioning 4 | 5 | During initial development there will be frequent breaking changes to the 6 | software, and the major release number will be incremented frequently. 7 | Follow semantic versioning with a `MAJOR.MINOR` numbering scheme, and not 8 | follow `PATCH` versions during the initial disruptive development. 9 | 10 | * `2.0` - a new version with breaking changes from `1.x` series`. 11 | * `2.1` - an update with bug fixes and non-breaking extensions and improvements to `2.0`. 12 | * `2.2-dev` - the version after `2.1` has been released. The next released version 13 | will be either `2.3` or `3.0`. 14 | 15 | The version is defined in the `VERSION` file in the root of the project, 16 | and this file should be used as the input for any process that needs 17 | version information. 18 | 19 | ## Pull Requests 20 | 21 | The PR description should describe the changes being made in the PR, and 22 | be updated to reflect changes made during the review process. 23 | 24 | Use "squash and merge" when merging PRs so that each commit to the `master` 25 | branch is a tested and approved version of the tool. 26 | For the commit message, use the text of the PR description. 27 | -------------------------------------------------------------------------------- /docs/build-caches.md: -------------------------------------------------------------------------------- 1 | # Build Caches 2 | 3 | Stackinator facilitates using Spack's binary build caches to speed up image builds. 4 | Build caches are essential if you plan to build images regularly, as they generally lead to a roughly 10x speed up. 5 | This is the difference between half an hour or 3 minutes to build a typical image. 6 | 7 | ## Using Build caches 8 | 9 | To use a build cache, create a simple YAML file: 10 | 11 | ```yaml title='cache-config.yaml' 12 | root: $SCRATCH/uenv-cache 13 | key: $SCRATCH/.keys/spack-push-key.gpg 14 | ``` 15 | 16 | To use the cache, pass the configuration as an option to `stack-config` via the `-c/--cache` flag: 17 | 18 | ```bash 19 | stack-config -b $build_path -r $recipe_path -s $system_config -c cache-config.yaml 20 | ``` 21 | 22 | ??? warning "If you using an old binary build cache" 23 | Since v3, Stackinator creates a sub-directory in the build cache for each mount point. 24 | For example, in the above example, the build cache for the `/user-environment` mount point would be `$SCRATCH/uenv-cache/user-environment`. 25 | The rationale for this is so that packages for different mount points are not mixed, to avoid having to relocate binaries. 26 | 27 | To continue using a build caches from before v3, first copy the `build_cache` path to a subdirectory, e.g.: 28 | 29 | ```bash 30 | mkdir $SCRATCH/uenv-cache/user-environment 31 | mv $SCRATCH/uenv-cache/build_cache $SCRATCH/uenv-cache/user-environment 32 | ``` 33 | 34 | ### Build-only caches 35 | 36 | A build cache can be configured to be read-only by not providing a `key` in the cache configuration file. 37 | 38 | ## Creating a Build Cache 39 | 40 | To create a build cache we need two things: 41 | 42 | 1. An empty directory where the cache will be populated by Spack. 43 | 2. A private PGP key 44 | * Only required for Stackinator to push packages to the cache when it builds a package that was not in the cache. 45 | 46 | Creating the cache directory is easy! For example, to create a cache on your scratch storage: 47 | ```bash 48 | mkdir $SCRATCH/uenv-cache 49 | ``` 50 | 51 | ### Generating Keys 52 | 53 | An installation of Spack can be used to generate the key file: 54 | 55 | ```bash 56 | # create a key 57 | spack gpg create 58 | 59 | # export key 60 | spack gpg export --secret spack-push-key.gpg 61 | ``` 62 | 63 | See the [spack documentation](https://spack.readthedocs.io/en/latest/getting_started.html#gpg-signing) for more information about GPG keys. 64 | 65 | ### Managing Keys 66 | 67 | The key needs to be in a location that is accessible during the build process, and secure. 68 | To keep your PGP key secret, you can generate it then move it to a path with appropriate permissions. 69 | In the example below, we create a path `.keys` for storing the key: 70 | ```bash 71 | # create .keys path is visible only to you 72 | mkdir $SCRATCH/.keys 73 | chmod 700 $SCRATCH/.keys 74 | 75 | # generate the key 76 | spack gpg create 77 | spack gpg export --secret $SCRATCH/.keys/spack-push-key.gpg 78 | chmod 600 $SCRATCH/.keys/spack-push-key.gpg 79 | ``` 80 | 81 | The cache-configuration would look like the following, where we assume that the cache is in `$SCRATCH/uenv-cache`: 82 | ```yaml 83 | root: $SCRATCH/uenv-cache 84 | key: $SCRATCH/.keys/spack-push-key.gpg 85 | ``` 86 | !!! warning 87 | Don't blindly copy this documentation's advice on security settings. 88 | 89 | !!! failure "Don't use `$HOME`" 90 | Don't put the keys in `$HOME`, because the build process remounts `~` as a tmpfs, and you will get error messages that Spack can't read the key. 91 | 92 | ## Force pushing to build cache 93 | 94 | When build caches are enabled, all packages in a each Spack environment are pushed to the build cache after the whole environment has been built successfully -- nothing will be pushed to the cache if there is an error when building one of the packages. 95 | 96 | When debugging a recipe, where failing builds have to be run multiple times, the overheads of rebuilding all packages from scratch can be wasteful. 97 | To force push all packages that have been built, use the `cache-force` makefile target: 98 | 99 | ```bash 100 | env --ignore-environment PATH=/usr/bin:/bin:`pwd`/spack/bin make cache-force 101 | ``` 102 | -------------------------------------------------------------------------------- /docs/building.md: -------------------------------------------------------------------------------- 1 | # Building Spack Stacks 2 | 3 | Once a stack has been [configured](configuring.md) using `stack-config`, it's time to build the software stack. 4 | 5 | ## How to Build 6 | 7 | The configuration generates a build path, with a top-level `Makefile` that performs the build. 8 | 9 | ``` 10 | # configure the build 11 | stack-config --build $BUILD_PATH ... 12 | 13 | # perform the build 14 | cd $BUILD_PATH 15 | env --ignore-environment PATH=/usr/bin:/bin:`pwd`/spack/bin make modules store.squashfs -j32 16 | ``` 17 | 18 | The call to `make` is wrapped with with `env --ignore-env` to unset all environment variables, to improve reproducability of builds. 19 | 20 | Build times for stacks typically vary between 30 minutes to 3 hours, depending on the specific packages that have to be built. 21 | Using [build caches](build-caches.md) and building in shared memory (see below) are the most effective methods to speed up builds. 22 | 23 | ## Where to Build 24 | 25 | Spack detects the CPU μ-arch that it is being run on, and configures the packages to target it. 26 | In order to ensure the best results, build the stack on a compute node with the target architecture, not a login node. 27 | 28 | !!! alps 29 | Alps vClusters often have different CPU μ-arch on login nodes (zen2) and compute nodes (zen3). 30 | 31 | Build times can be signficantly reduced by creating the build path in memory, for example in `/dev/shm/$USER/build`, so that all of the dependencies are built and stored in memory, instead of on a slower shared file system. 32 | 33 | !!! alps 34 | All of the Cray EX nodes on Alps have 512 GB of memory, which is sufficient for building software stacks, though it is important that the memory is cleaned up, preferably via an automated policy. 35 | 36 | !!! warning 37 | Take care to remove the build path when building in shared memory -- otherwise it will reduce the amount of memory available for later users of the node, because some clusters do not automatically clean up `/dev/shm` on compute nodes -- and `/dev/shm` is only cleared on login nodes when they are reset. 38 | 39 | -------------------------------------------------------------------------------- /docs/cluster-config.md: -------------------------------------------------------------------------------- 1 | # Cluster Configuration 2 | 3 | Spack stacks are built on bare-metal clusters using a minimum of dependencies from the underlying system. 4 | A cluster configuration is a directory with the following structure: 5 | 6 | ``` 7 | /path/to/cluster/configuration 8 | ├─ compilers.yaml # system compiler 9 | ├─ packages.yaml # external system packages 10 | ├─ concretiser.yaml 11 | └─ repos.yaml # optional reference to additional site packages 12 | ``` 13 | 14 | The configuration is provided during the [configuration](configuring.md) step with the `--system/-s` flag. 15 | The following example targets the Clariden system at CSCS: 16 | 17 | ```bash 18 | git clone git@github.com:eth-cscs/alps-cluster-config.git 19 | stack-config --system ./alps-cluster-config/clariden --recipe --build 20 | ``` 21 | 22 | !!! alps 23 | The CSCS _official configuration_ for vClusters on Alps are maintained in a GitHub repository [github.com/eth-cscs/alps-cluster-config](https://github.com/eth-cscs/alps-cluster-config). 24 | 25 | Software stacks provided by CSCS will only use the official configuration, and support will only be provided for user-built stacks that used the official configuration. 26 | 27 | If there are additional system packages that you want to use in a recipe, consider adding a `packages.yaml` file to the recipe, in which you can define additional external packages. 28 | 29 | !!! warning 30 | Only use external dependencies that are strictly necessary: 31 | 32 | * the more dependencies, the more potential that software stacks will have to be rebuilt when the system is updated, and the more potential there are for breaking changes; 33 | * the external packages are part of the Spack upstream configuration generated with the Stack - you might be constraining the choices of downstream users. 34 | 35 | ## Site and System Configurations 36 | 37 | The `repo.yaml` configuration can be used to provide a list of additional Spack package repositories to use on the target system. 38 | These are applied automatically to every recipe built on the target cluster. 39 | 40 | To provide site wide defaults, links to additional package repositories can be provdided in the the cluster definition. 41 | For example, the following definition would link to a set of site-wide package definitions 42 | 43 | ```yaml 44 | repos: 45 | - ../site/repo 46 | ``` 47 | 48 | The paths are always interpretted as relative to the system configuration. 49 | This is designed to make it encourage putting cluster definitions and the site description in the same git repository. 50 | 51 | ``` 52 | /path/to/cluster-configs 53 | ├─ my_cluster 54 | │ ├─ compilers.yaml 55 | │ ├─ packages.yaml 56 | │ ├─ concretiser.yaml 57 | │ └─ repos.yaml # refers to ../site/repo 58 | └─ site 59 | └─ repo # the site wide repo 60 | └─ packages 61 | ``` 62 | 63 | !!! alps 64 | The site wide package definitions on Alps are maintained in the [alps-cluster-config repository](https://github.com/eth-cscs/alps-cluster-config/tree/master/site/repo). 65 | 66 | ## Package Precedence 67 | 68 | If custom package definitions are provided for the same package in more than one location, Stackinator has to choose which definition to use. 69 | 70 | The following precedence is applied, where 1 has higher precedence than 2 or 3: 71 | 72 | 1. packages defined in the (optional) `repo` path in the [recipe](recipes.md#custom-spack-packages) 73 | 2. packages defined in the (optional) site repo(s) defined in the `repo/repos.yaml` file of cluster configuration (documented here) 74 | 3. packages provided by Spack (in the `var/spack/repos/builtin` path) 75 | 76 | As of Stackinator v4, the definitions of some custom repositories (mainly CSCS' custom cray-mpich and its dependencies) was removed from Stackinator, and moved to the the site configuration 77 | -------------------------------------------------------------------------------- /docs/configuring.md: -------------------------------------------------------------------------------- 1 | # Configuring Spack Stacks 2 | 3 | Stackinator generates the make files and spack configurations that build the spack environments that are packaged together in the spack stack. 4 | It can be thought of as equivalent to calling `cmake` or `configure`, performed using the `stack-config` CLI tool: 5 | 6 | ```bash 7 | # configure the build 8 | ./bin/stack-config --build $BUILD_PATH --recipe $RECIPE_PATH --system $SYSTEM_CONFIG_PATH 9 | ``` 10 | 11 | The following flags are required: 12 | 13 | * `-b/--build`: the path where the [build](building.md) is to be performed. 14 | * `-r/--recipe`: the path with the [recipe](recipes.md) yaml files that describe the environment. 15 | * `-s/--system`: the path containing the [system configuration](cluster-config.md) for the target cluster. 16 | 17 | The following flags are optional: 18 | 19 | * `-c/--cache`: configure the [build cache](build-caches.md). 20 | * `--develop`: introduce compatibility with Spack's `develop` branch (can't be used with `--spack-version`, see below). 21 | * `--spack-version`: explicitly set the Spack version used for template configuration (can't be used with `--develop`, see below). 22 | * `-m/--mount`: override the [mount point](installing.md) where the stack will be installed. 23 | * `--version`: print the stackinator version. 24 | * `-h/--help`: print help message. 25 | 26 | ## Support for different versions of Spack 27 | 28 | Stackinator supports the latest two or three minor versions of Spack, while trying to keep track of the latest changes in the `develop` branch of Spack, which will be included in the next release. 29 | 30 | !!! note 31 | Currently v0.21, v0.22 and v0.23 of Spack are supported. 32 | 33 | The next official version will be v1.0 -- for which Stackinator will most likely drop support for all of the v0.2x versions. 34 | 35 | By default, Stackinator will inspect the name of the `spack:commit` field in the `config.yaml` recipe file, to determine the Spack version (e.g. `releases/v0.23` would set `spack_version="0.23"`). 36 | This default can be overriden the: 37 | 38 | * `--develop` flag, which sets `spack_version` to the version of the next release. 39 | * the `--spack-version` option, through which the version can be set explicitly. 40 | 41 | Explicitly setting the Spack version using either `--develop` or `--spack-version` is recommended when using a commit or branch of Spack from which it is not possible for `stack-config` to infer the correct version. 42 | 43 | 44 | ```bash 45 | # configure the build 46 | # the recipe's config.yaml uses a Spack commit later than the latest release 47 | stack-config --build $BUILD_PATH --recipe $RECIPE_PATH --system $SYSTEM_CONFIG_PATH --develop 48 | 49 | # configure the templates for compatibility with Spack v0.23 50 | stack-config --build $BUILD_PATH --recipe $RECIPE_PATH --system $SYSTEM_CONFIG_PATH --spack-version=0.23 51 | 52 | # v0.24 is the next version of Spack, so this is equivalent to using the --develop flag 53 | stack-config --build $BUILD_PATH --recipe $RECIPE_PATH --system $SYSTEM_CONFIG_PATH --spack-version=0.24 54 | ``` 55 | 56 | !!! note 57 | Spack's `develop` is supported on a best-effort basis and the Stackinator might be broken from upstream changes in Spack before we notice them. If you notice that Spack's `develop` breaks the Stackinator tool, please open an issue and we will introduce the required workaround for `--develop`. 58 | 59 | -------------------------------------------------------------------------------- /docs/development.md: -------------------------------------------------------------------------------- 1 | # Development 2 | 3 | This page is for developers and maintainers of Stackinator. 4 | 5 | ## Debug environment 6 | 7 | Debugging stack builds can be challenging, because the build uses an environment with paths mounted and remounted using bwrap and different environment variables than the calling shell. 8 | 9 | A helper script that will open a new shell with the same environment as the stack build is generated in the build path. 10 | The script, `stack-debug.sh`, can be sourced to start the new bash shell: 11 | 12 | ```bash 13 | user@hostname:/dev/shm/project-build > source ./stack-debug.sh 14 | build-env >>> 15 | ``` 16 | 17 | The new shell has `spack` in its path, and has the store path mounted at the environment's mount point. 18 | To finish debugging, exit the shell with `exit` or ctrl-d. 19 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Stackinator 2 | 3 | A tool for building a scientific software stack from a recipe on HPE Cray EX systems. 4 | 5 | It is used to build software vClusters on Alps infrastructure at CSCS. 6 | 7 | ## Getting Stackinator 8 | 9 | ### From GitHub (recommended) 10 | 11 | To get the latest version, download directly from GitHub. 12 | 13 | ``` bash 14 | git clone https://github.com/eth-cscs/stackinator.git 15 | cd stackinator 16 | ./bootstrap.sh 17 | ``` 18 | 19 | !!! warning 20 | The `main` branch of Stackinator includes features for Spack v1.0, and may break older recipes. 21 | 22 | For existing recipes use Spack v0.23 and earlier, use [version 5](#versions): 23 | 24 | ```bash 25 | git clone --branch=releases/v5 https://github.com/eth-cscs/stackinator.git 26 | ``` 27 | 28 | The `bootstrap.sh` script will install the necessary dependencies, so that Stackinator can be run as a standalone application. 29 | 30 | Once installed, add the `bin` sub-directory to your path: 31 | 32 | ```bash 33 | export PATH="/bin:$PATH" 34 | ``` 35 | 36 | ### Using Pip 37 | 38 | Stackinator is available on PyPi: 39 | 40 | ``` 41 | pip install stackinator 42 | ``` 43 | 44 | !!! warning 45 | The PyPi package is only updated for releases, so you will likely be missing the latest and greatest features. 46 | Let us know if you need more regular PyPi updates. 47 | 48 | ### Versions 49 | 50 | Stackinator version 6 will be the first release of Stackinator to support Spack 1.0, when it is released in June 2025. 51 | There will be significant changes introduced in Spack 1.0, which will require making some non-trivial changes to Stackinator, and possibly adding breaking changes to the Stackinator recipe specification. 52 | 53 | The git branch `releases/v5` will be maintained to provide support for all versions 0.21, 0.22 and 0.23 of Spack and existing recipes. 54 | 55 | The `main` branch of Stackinator will contain 56 | 57 | !!! warning 58 | After the release of version 5, the main development branch was changed from `master` to `main`. 59 | 60 | ## Quick Start 61 | 62 | Stackinator generates the make files and spack configurations that build the spack environments that are packaged together in the spack stack. 63 | It can be thought of as equivalent to calling `cmake` or `configure`, before running make to run the configured build. 64 | 65 | ```bash 66 | # configure the build 67 | stack-config --build $BUILD_PATH --recipe $RECIPE_PATH --system $SYSTEM_CONFIG_PATH 68 | ``` 69 | 70 | Where the `BUILD_PATH` is the path where the build will be configured, the `RECIPE_PATH` contains the [recipe](recipes.md) for the sotware stack, and `SYSTEM_CONFIG_PATH` is the [system configuration](cluster-config.md) for the cluster being targeted. 71 | 72 | Once configured, the build stack is built in the build path using make: 73 | 74 | ```bash 75 | # build the spack stack 76 | cd $BUILD_PATH 77 | env --ignore-environment PATH=/usr/bin:/bin:`pwd`/spack/bin make modules store.squashfs -j64 78 | ``` 79 | 80 | See the documentation on [building Spack stacks](building.md) for more information. 81 | 82 | Once the build has finished successfully the software can be [installed](installing.md). 83 | 84 | !!! alps 85 | On Alps the software stack can be tested using the [SquashFS](installing.md#squashfs-installation) image generated by the build: 86 | ```bash 87 | squashfs-mount store.squashfs:/user-environment -- bash 88 | ls /user-environment 89 | ``` 90 | -------------------------------------------------------------------------------- /docs/installing.md: -------------------------------------------------------------------------------- 1 | # Installing Stacks 2 | 3 | The installation path of the software stack is set when the stack is configured. 4 | 5 | The default location for a recipe is set in the `store` field of `config.yaml` in the recipe: 6 | ```yaml title='config.yaml' 7 | name: best-stack-ever 8 | store: /user-environment 9 | spack: 10 | commit: releases/v0.20 11 | repo: https://github.com/spack/spack.git 12 | ``` 13 | 14 | The installation path can be overridden using the `--mount/-m` flag to `stack-config`. 15 | The software is built using rpaths hard-coded to the installation path, which simplifies dynamic linking (`LD_LIBRARY_PATH` does not have to be set during run time). 16 | 17 | !!! alps 18 | For deployment on Alps, stacks should use the standard `/user-environment` mount point. 19 | 20 | !!! warning 21 | Environments built for one mount point should not be mounted at a different location. 22 | If a new mount point is desired, rebuild the stack for the new mount point. 23 | 24 | ## Installing the software 25 | 26 | Running `make` to build the environment generates two versions of the software stack in the build path: 27 | ``` 28 | build_path 29 | ├─ store 30 | └─ store.squashfs 31 | ``` 32 | 33 | ### Shared file system installation 34 | 35 | The `store` sub-directory contains the full software stack installation tree. 36 | 37 | !!! note 38 | The "simplest" method for installing the software stack, that does not require installing additional tools to use the stack, is to copy the contents of `store` to the installation path. 39 | 40 | 41 | ### SquashFS installation 42 | 43 | The `store.squashfs` file is a compressed [SquashFS](https://tldp.org/HOWTO/SquashFS-HOWTO/whatis.html) image of the contents of the `store` path. 44 | This can be mounted at runtime using [`squashfs-mount`](https://github.com/eth-cscs/squashfs-mount) or the [Slurm plugin](https://github.com/eth-cscs/slurm-uenv-mount/), or mounted by a system-administrator using [`mount`](https://man7.org/linux/man-pages/man2/mount.2.html), in order the to take advantage of the benefits of SquashFS over shared file systems. 45 | -------------------------------------------------------------------------------- /docs/interfaces.md: -------------------------------------------------------------------------------- 1 | # Interfaces 2 | 3 | Software stacks offer a choice of interfaces that can be presented to users. 4 | 5 | ## Spack Upstream 6 | 7 | Every stack can be used as a Spack upstream for users of Spack on the system. 8 | This means that users can access all of the software packages and custom recipes provided by a software Spack directly in their Spack configuration. 9 | 10 | The installation contains a [custom configuration scope](https://spack.readthedocs.io/en/latest/configuration.html#custom-scopes) in the `config` sub-directory, and additional information about custom Spack packages in the `repo` sub-directory. 11 | For example, the Spack configuration is in the following files when a stack has been installed at the default `/user-environment` mount point: 12 | ``` 13 | /user-environment 14 | ├─ config 15 | │ ├─ compilers.yaml 16 | │ ├─ repos.yaml 17 | │ ├─ packages.yaml 18 | │ └─ upstreams.yaml 19 | └─ repo 20 | ├─ repo.yaml 21 | └─ packages 22 | └─ cray-mpich 23 | └─ package.py 24 | ``` 25 | 26 | Notes on the configuration files: 27 | 28 | * `upstream.yaml`: Registers the spack packages installed in the Spack stack so that they will be found by the downstream user when searching for packages: 29 | ```yaml 30 | upstreams: 31 | system: 32 | install_tree: /user-environment 33 | ``` 34 | * `compilers.yaml`: includes all compilers that were installed in the `gcc:` and `llvm:` sections of the `compilers.yaml` recipe file. Note that the `bootstrap` compiler is not included. 35 | * `packages.yaml`: refers to the external packages that were used to configure the recipe: both the defaults in the cluster configuration, and any additional packages that were set in the recipe. 36 | * `repos.yaml`: points to the custom Spack repository: 37 | ```yaml 38 | repos: 39 | - /user-environment/repo 40 | ``` 41 | 42 | End users can use the Spack stack in their Spack installations in a variety of ways, including: 43 | ```bash 44 | # set an environment variable 45 | export SPACK_SYSTEM_CONFIG_PATH=/user-environment/config 46 | 47 | # pass on command line to Spack 48 | spack --config-scope /user-environment/config ... 49 | ``` 50 | 51 | See the [Spack documentation](https://spack.readthedocs.io/en/latest/configuration.html) for the diverse ways that custom configurations can be used. 52 | 53 | The `repo` path contains the custom `cray-mpich` package configuration. 54 | If the stack recipe provided additional custom packages, these will also be in sub-directories of `$install_path/repo/packages` 55 | 56 | ## Modules 57 | 58 | Module files can be provided as an optional interface, for users that and use-cases that prefer or require them. 59 | 60 | If modules are available, the generated module files are in the `modules` sub-directory of the installation path, and end users can make them available via `module use`: 61 | 62 | ```bash 63 | # make the modules available 64 | module use /user-environment/modules 65 | 66 | # list the available moduels 67 | module avail 68 | 69 | -------------------------- /user-environment/modules -------------------------- 70 | cmake/3.26.3 gcc/11.3.0 libtree/3.1.1 python/3.10.10 71 | cray-mpich hdf5/1.14.1-2 osu-micro-benchmarks/5.9 tree/2.1.0 72 | ``` 73 | 74 | ## Environment Views 75 | 76 | File system views are an optional way to provide the software from an environment in a directory structure similar to `/usr/local`, based on Spack's [filesystem views](https://spack.readthedocs.io/en/latest/environments.html#filesystem-views). 77 | See the [recipe documentation](recipes.md#views) for details on how to configure views. 78 | 79 | The views are created in the `env` path of the installation. 80 | As an example, given two views named `default` and `no-python` for an stack installed in the standard `/user-environment` location, then two directory trees named after the views are generated in `/user-environment/env`: 81 | 82 | ``` 83 | /user-environment 84 | └─ env 85 | ├─ default 86 | │ ├─ bin 87 | │ ├─ lib 88 | │ ├─ ... 89 | │ └─ activate.sh 90 | └─ no-python 91 | ├─ bin 92 | ├─ lib 93 | ├─ ... 94 | └─ activate.sh 95 | ``` 96 | 97 | The `activate.sh` script in each view can be used to load the view by setting environment variables like `PATH`, `LD_LIBRARY_PATH`, `CPATH` etc. 98 | 99 | ```bash 100 | source /user-environment/env/no-python/activate.sh 101 | ``` 102 | 103 | !!! note 104 | Meta data about the environment views provided by a Spack stack is provided in the file `meta/env.json`. 105 | 106 | -------------------------------------------------------------------------------- /docs/readme.md: -------------------------------------------------------------------------------- 1 | The documentation for Stackinator is built from the markdown files in this path using MkDocs and MkDocs-material. 2 | You can view the latest documentation online at [github.io](https://eth-cscs.github.io/stackinator/) 3 | 4 | To view work in progress docs, run the serve script and follow the link it provides to view a local copy of the docs in your browser. 5 | ```bash 6 | ./serve 7 | ``` 8 | 9 | > [!IMPORTANT] 10 | > to run the serve script, you need to first install [uv](https://docs.astral.sh/uv/getting-started/installation/). 11 | 12 | -------------------------------------------------------------------------------- /docs/recipes.md: -------------------------------------------------------------------------------- 1 | # Recipes 2 | 3 | A recipe is a description of all of the compilers and software packages to be installed, along with configuration of modules and environment scripts the stack will provide to users. 4 | A recipe is comprised of the following yaml files in a directory: 5 | 6 | * `config.yaml`: common configuration for the stack. 7 | * `compilers.yaml`: the compilers provided by the stack. 8 | * `environments.yaml`: environments that contain all the software packages. 9 | * `modules.yaml`: _optional_ module generation rules 10 | * follows the spec for [spack module configuration](https://spack.readthedocs.io/en/latest/module_file_support.html) 11 | * `packages.yaml`: _optional_ define external packages 12 | * follows the spec for [spack package configuration](https://spack.readthedocs.io/en/latest/build_settings.html) 13 | * `repo`: _optional_ custom spack package definitions. 14 | * `extra`: _optional_ additional meta data to copy to the meta data of the stack. 15 | * `post-install`: _optional_ a script to run after Spack has been executed to build the stack. 16 | * `pre-install`: _optional_ a script to run before any packages have been built. 17 | 18 | ## Configuration 19 | 20 | ```yaml title="config.yaml" 21 | name: prgenv-gnu 22 | store: /user-environment 23 | spack: 24 | repo: https://github.com/spack/spack.git 25 | commit: releases/v0.20 26 | modules: true 27 | description: "HPC development tools for building MPI applications with the GNU compiler toolchain" 28 | version: 2 29 | ``` 30 | 31 | * `name`: a plain text name for the environment 32 | * `store`: the location where the environment will be mounted. 33 | * `spack`: which spack repository to use for installation. 34 | * `modules`: _optional_ enable/diasble module file generation (default `true`). 35 | * `description`: _optional_ a string that describes the environment (default empty). 36 | * `version`: _default = 1_ the version of the uenv recipe (see below) 37 | 38 | !!! note "uenv recipe versions" 39 | Stackinator 6 introduces breaking changes to the uenv recipe format, introduced to support Spack v1.0. 40 | 41 | We have started versioning uenv recipes: 42 | 43 | * **version 1**: original uenv recipes for Spack v0.23 and earlier, supported by Stackinator version 5. 44 | * **version 2**: uenv recipes for Spack v1.0 and later, supported by Stackinator version 6. 45 | 46 | The default version is 1, so that old recipes that do not set a version are supported. 47 | 48 | !!! warning "You must set version 2 explicitly to use Spack v1.0" 49 | 50 | !!! warning "Version 1 recipes must be configured using Stackinator v5" 51 | Version 5 of Stackinator is maintained in the `releases/v5` branch of stackinator. 52 | 53 | You must also use the `releases/v5` branch of [Alps cluster config](https://github.com/eth-cscs/alps-cluster-config). 54 | 55 | ## Compilers 56 | 57 | Take an example configuration: 58 | ```yaml title="compilers.yaml" 59 | bootstrap: 60 | spec: gcc@11 61 | gcc: 62 | specs: 63 | - gcc@11 64 | llvm: 65 | requires: gcc@11 66 | specs: 67 | - nvhpc@21.7 68 | - llvm@14 69 | ``` 70 | 71 | The compilers are built in multiple stages: 72 | 73 | 1. *bootstrap*: A bootstrap gcc compiler is built using the system compiler (currently gcc 4.7.5). 74 | * `gcc:specs`: single spec of the form `gcc@version`. 75 | * The selected version should have full support for the target architecture in order to build optimised gcc toolchains in step 2. 76 | 2. *gcc*: The bootstrap compiler is then used to build the gcc version(s) provided by the stack. 77 | * `gcc:specs`: A list of _at least one_ of the specs of the form `gcc@version`. 78 | 3. *llvm*: (optional) The nvhpc and/or llvm toolchains are built using one of the gcc toolchains installed in step 2. 79 | * `llvm:specs`: a list of specs of the form `nvhpc@version` or `llvm@version`. 80 | * `llvm:requires`: the version of gcc from step 2 that is used to build the llvm compilers. 81 | 82 | The first two steps are required, so that the simplest stack will provide at least one version of gcc compiled for the target architecture. 83 | 84 | !!! note 85 | Don't provide full specs, because the tool will insert "opinionated" specs for the target node type, for example: 86 | 87 | * `nvhpc@21.7` generates `nvhpc@21.7 ~mpi~blas~lapack` 88 | * `llvm@14` generates `llvm@14 +clang targets=x86 ~gold ^ninja@kitware` 89 | * `gcc@11` generates `gcc@11 build_type=Release +profiled +strip` 90 | 91 | ## Environments 92 | 93 | The software packages to install using the compiler toolchains are configured as disjoint environments, each built with the same compiler, and configured with an optional implementation of MPI. 94 | These are specified in the `environments.yaml` file. 95 | 96 | For example, consider a workflow that has to build more multiple applications - some of which require Fortran+OpenACC and others that are CPU only C code that can be built with GCC. 97 | To provide a single Spack stack that meets the workflow's needs, we would create two environments, one for each of the `nvhpc` and `gcc` compiler toolchains: 98 | 99 | ```yaml title="environments.yaml high level overview" 100 | # A GCC-based programming environment 101 | prgenv-gnu: 102 | compiler: # ... compiler toolchain 103 | mpi: # ... mpi configuration 104 | deprecated: # ... whether to allow usage of deprecated packages or not 105 | unify: # ... configure Spack concretizer 106 | specs: # ... list of packages to install 107 | variants: # ... variants to apply to packages (e.g. +mpi) 108 | packages: # ... list of external packages to use 109 | views: # ... environment views to provide to users 110 | # An NVIDIA programming environment 111 | prgenv-nvgpu: 112 | # ... same structure as prgenv-gnu 113 | ``` 114 | 115 | In the following sections, we will explore each of the environment configuration fields in detail. 116 | 117 | ### Compilers 118 | 119 | The `compiler` field describes a list compilers to use to build the software stack. 120 | Each compiler toolchain is specified using toolchain and spec 121 | 122 | ```yaml title="compile all packages with gcc@11" 123 | compiler: 124 | - toolchain: gcc 125 | spec: gcc@11 126 | ``` 127 | 128 | Sometimes two compiler toolchains are required, for example when using the `nvhpc` compilers, there are often dependencies that can't be built using the NVIDIA, or are better being built with GCC (for example `cmake`, `perl` and `netcdf-c`). 129 | The example below uses the `nvhpc` compilers with `gcc@11`. 130 | 131 | ```yaml title="compile all packages with gcc@11" 132 | compiler: 133 | - toolchain: gcc 134 | spec: gcc@11 135 | - toolchain: llvm 136 | spec: nvhpc@22.7 137 | ``` 138 | 139 | !!! note 140 | If more than one version of gcc has been installed, use the same version that was used to install `nvhpc`. 141 | 142 | !!! warning 143 | Stackinator does not test or support using two versions of gcc in the same toolchain. 144 | 145 | !!! note 146 | It is generally advisable not to overspecify compiler version, so whenever possible constrain at most the major version. 147 | 148 | The order of the compilers is significant. The first compiler is the default, and the other compilers will only be used to build packages when explicitly added to a spec. 149 | For example, in the recipe below, only `netcdf-fortran` will be built with the `nvhpc` toolchain, while the root specs `cmake` and `netcdf-c` and all dependencies will be built using the `gcc` toolchain. 150 | 151 | 152 | ```yaml title="compile all packages with gcc@11" 153 | compiler: 154 | - toolchain: gcc 155 | spec: gcc 156 | - toolchain: llvm 157 | spec: nvhpc 158 | specs 159 | - cmake 160 | - netcdf-c 161 | - netcdf-fortran%nvhpc 162 | ``` 163 | 164 | !!! note 165 | This approach is typically used to build Fortran applications and packages with one toolchain (e.g. `nvhpc`), and all of the C/C++ dependencies with a different toolchain (e.g. `gcc`). 166 | 167 | ### MPI 168 | 169 | Stackinator can configure cray-mpich (CUDA, ROCM, or non-GPU aware) on a per-environment basis, by setting the `mpi` field in an environment. 170 | 171 | !!! note 172 | Future versions of Stackinator will support OpenMPI, MPICH and MVAPICH when (and if) they develop robust support for HPE SlingShot 11 interconnect. 173 | 174 | If the `mpi` field is not set, or is set to `null`, MPI will not be configured in an environment: 175 | ```yaml title="environments.yaml: no MPI" 176 | serial-env: 177 | mpi: null 178 | # ... 179 | ``` 180 | 181 | To configure MPI without GPU support, set the `spec` field with an optional version: 182 | ```yaml title="environments.yaml: MPI without GPU support" 183 | host-env: 184 | mpi: 185 | spec: cray-mpich@8.1.23 186 | # ... 187 | ``` 188 | 189 | GPU-aware MPI can be configured by setting the optional `gpu` field to specify whether to support `cuda` or `rocm` GPUs: 190 | ```yaml title="environments.yaml: GPU aware MPI" 191 | cuda-env: 192 | mpi: 193 | spec: cray-mpich 194 | gpu: cuda 195 | # ... 196 | rocm-env: 197 | mpi: 198 | spec: cray-mpich 199 | gpu: rocm 200 | # ... 201 | ``` 202 | 203 | !!! alps 204 | 205 | As new versions of cray-mpich are released with CPE, they are provided on Alps vClusters, via the Spack package repo in the [CSCS cluster configuration repo](https://github.com/eth-cscs/alps-cluster-config/tree/master/site/repo). 206 | The following versions of cray-mpich are currently provided: 207 | 208 | | cray-mpich | CPE | notes | 209 | | :------------ | :-------- | :----------------------- | 210 | | 8.1.29 | 24.03 | pre-release | 211 | | 8.1.28 | 23.12 | released 2023-12 **default** | 212 | | 8.1.27 | 23.09 | released 2023-09 | 213 | | 8.1.26 | 23.06 | released 2023-06 | 214 | | 8.1.25 | 23.03 | released 2023-02-26 | 215 | | 8.1.24 | 23.02 | released 2023-01-19 | 216 | | 8.1.23 | 22.12 | released 2022-11-29 | 217 | | 8.1.21.1 | 22.11 | released 2022-10-25 | 218 | | 8.1.18.4 | 22.08 | released 2022-07-21 | 219 | 220 | All versions of cray-mpich in the table have been validated on Alps vClusters with Slingshot 11 and libfabric 1.15.2. 221 | 222 | !!! note 223 | The `cray-mpich` spec is added to the list of package specs automatically, and all packages that use the virtual dependency `+mpi` will use this `cray-mpich`. 224 | 225 | ### Specs 226 | 227 | The list of software packages to install is configured in the `spec:` field of an environment. The specs follow the [standard Spack practice](https://spack.readthedocs.io/en/latest/environments.html#spec-concretization). 228 | 229 | The `deprecated: ` field controls if Spack should consider versions marked as deprecated, and can be set to `true` or `false` (for considering or not considering deprecated versions, respectively). 230 | 231 | The `unify:` field controls the Spack concretiser, and can be set to three values `true`, `false` or `when_possible`. 232 | The 233 | 234 | ```yaml 235 | cuda-env: 236 | specs: 237 | - cmake 238 | - hdf5 239 | - python@3.10 240 | unify: true 241 | ``` 242 | 243 | To install more than one version of the same package, or to concretise some more challenging combinations of packages, you might have to relax the concretiser to `when_possible` or `false`. 244 | For example, this environment provides `hdf5` both with and without MPI support: 245 | 246 | ```yaml 247 | cuda-env: 248 | specs: 249 | - cmake 250 | - hdf5~mpi 251 | - hdf5+mpi 252 | - python@3.10 253 | unify: when_possible 254 | ``` 255 | 256 | !!! note 257 | Use `unify:true` when possible, then `unify:when_possible`, and finally `unify:false`. 258 | 259 | !!! warning 260 | Don't provide a spec for MPI or Compilers, which are configured in the [`mpi:`](recipes.md#mpi) and [`compilers`](recipes.md#compilers) fields respecively. 261 | 262 | !!! warning 263 | Stackinator does not support "spec matrices", and likely won't, because they use multiple compiler toolchains in a manner that is contrary to the Stackinator "keep it simple" principle. 264 | 265 | ### Packages 266 | 267 | To specify external packages that should be used instead of building them, use the `packages` field. 268 | For example, if the `perl`, `python@3` and `git` packages are build dependencies of an environment and the versions that are available in the base CrayOS installation are sufficient, the following spec would be specified: 269 | 270 | ```yaml title="environments.yaml: specifying external packages" 271 | my-env: 272 | packages: 273 | - perl 274 | - git 275 | ``` 276 | 277 | !!! note 278 | If a package is not found, it will be built by Spack. 279 | 280 | !!! note 281 | External packages specified in this manner will only be used when concretising this environment, and will not affect downstream users. 282 | 283 | ??? note "expand if you are curious how Stackinator configures Spack for packages" 284 | The following Spack call is used to generate `packages.yaml` in the Spack environment that Stackinator generates in the build path to concretise and build the packages in the example above: 285 | 286 | ```bash title="Makefile target for external packages in an environment" 287 | packages.yaml: 288 | spack external find --not-buildable --scope=user perl git 289 | ``` 290 | 291 | ### Variants 292 | 293 | To specify variants that should be applied to all package specs in the environment by default (unless overridden explicitly in a package spec), use the `variants` field. 294 | For example, to concretise all specs in an environment that support MPI or CUDA and target A100 GPUs, the following `variants` could be set: 295 | 296 | ```yaml title="environments.yaml: variants for MPI and CUDA on A100" 297 | cuda-env: 298 | variants: 299 | - +mpi 300 | - +cuda 301 | - cuda_arch=80 302 | ``` 303 | 304 | ??? note "expand if you are curious how Stackinator configures Spack for variants" 305 | The above will add the following to the generated `spack.yaml` file used internally by Spack. 306 | 307 | ```yaml title="spack.yaml: packages spec generated for variants" 308 | spack: 309 | packages: 310 | all: 311 | variants: 312 | - +mpi 313 | - +cuda 314 | - cuda_arch=80 315 | ``` 316 | 317 | ### Views 318 | 319 | File system views are an optional way to provide the software from an environment in a directory structure similar to `/usr/local`, based on Spack's [filesystem views](https://spack.readthedocs.io/en/latest/environments.html#filesystem-views). 320 | 321 | Each environment can provide more than one view, and the structure of the YAML is the same as used by the version of Spack used to build the Spack stack. 322 | For example, the `views` description: 323 | 324 | ```yaml 325 | cuda-env: 326 | views: 327 | default: 328 | no-python: 329 | exclude: 330 | - 'python' 331 | ``` 332 | 333 | will configure two views: 334 | 335 | * `default`: a view of all the software in the environment using the default settings of Spack. 336 | * `no-python`: everything in the default view, except any versions of `python`. 337 | 338 | Stackinator provides some additional options that are not provided by Spack, to fine tune the view, that can be set in the `uenv:` field: 339 | 340 | ```yaml 341 | cuda-env: 342 | views: 343 | uenv: 344 | add_compilers: true 345 | prefix_paths: 346 | LD_LIBRARY_PATH: [lib, lib64] 347 | ``` 348 | 349 | * `add_compilers` (default `true`): by default Spack will not add compilers to the `PATH` variable. Stackinator automatically adds the `gcc` and/or `nvhpc` to path. This option can be used to explicitly disable or enable this feature. 350 | * `prefix_paths` (default empty): this option can be used to customise prefix style environment variables (`PATH`, `LD_LIBRARY_PATH`, `PKG_CONFIG_PATH`, `PYTHONPATH`, etc). 351 | * the key is the environment variable, and the value is a list of paths to search for in the environment view. All paths that match an entry in the list will be prepended to the prefix path environment variable. 352 | * the main use for this feature is to opt-in to setting `LD_LIBRARY_PATH`. By default Spack does not add `lib` and `lib64` to `LD_LIBRARY_PATH` because that can break system installed applications that depend on `LD_LIBRARY_PATH` or finding their dependencies in standard locations like `/usr/lib`. 353 | 354 | See the [interfaces documentation](interfaces.md#environment-views) for more information about how the environment views are provided to users of a stack. 355 | 356 | ## Modules 357 | 358 | Modules are generated for the installed compilers and packages by spack. The default module generation rules set by the version of spack specified in `config.yaml` will be used if no `modules.yaml` file is provided. 359 | 360 | To set rules for module generation, provide a `modules.yaml` file as per the [spack documentation](https://spack.readthedocs.io/en/latest/module_file_support.html). 361 | 362 | To disable module generation, set the field `config:modules:False` in `config.yaml`. 363 | 364 | ## Custom Spack Packages 365 | 366 | An optional package repository can be added to a recipe to provide new or customized Spack packages in addition to Spack's `builtin` package repository, if a `repo` path is provided in the recipe. 367 | 368 | For example, the following `repo` path will add custom package definitions for the `hdf5` and `nvhpc` packages: 369 | 370 | ``` 371 | repo 372 | └─ packages 373 | ├─ hdf5 374 | │ └─ package.py 375 | └─ nvhpc 376 | └─ package.py 377 | ``` 378 | 379 | Additional custom packages can be provided as part of the cluster configuration, as well as additional site packages. 380 | These packages are all optional, and will be installed together in a single Spack package repository that is made available to downstream users of the generated uenv stack. 381 | See the documentation for [cluster configuration](cluster-config.md) for more detail. 382 | 383 | !!! note 384 | If you need to backport a spack package from a more recent spack version, you can do it by using an already checked out spack repository like this 385 | 386 | (disclaimer: the package might need adjustments due to spack directives changes) 387 | 388 | ``` 389 | # ensure to have the folder for custom packages in your recipe 390 | mkdir -p stackinator-recipe/repo/packages 391 | # switch to the already checked out spack repository 392 | cd $SPACK_ROOT 393 | # use git to extract package files into your "custom packages" section of the stackinator recipe 394 | git archive origin/develop `spack location -p fmt` | tar -x --strip-components=5 -C stackinator-recipe/repo/packages 395 | ``` 396 | 397 | In the above case, the package `fmt` is backported from `origin/develop` into the `stackinator-recipe`. 398 | 399 | !!! alps 400 | All packages are installed under a single spack package repository called `alps`. 401 | The CSCS configurations in [github.com/eth-cscs/alps-cluster-config](https://github.com/eth-cscs/alps-cluster-config) provides a site configuration that defines cray-mpich, its dependencies, and the most up to date versions of cuda, nvhpc etc to all clusters on Alps. 402 | 403 | !!! warning 404 | Unlike Spack package repositories, any `repos.yaml` file in the `repo` path will be ignored. 405 | This is because the provided packages are added to the `alps` namespace. 406 | 407 | ## Post install configuration 408 | 409 | If a script `post-install` is provided in the recipe, it will be run during the build process: after the stack has been built, and just before the final squashfs image is generated. 410 | Post install scripts can be used to modify or extend an environment with operations that can't be performed in Spack, for example: 411 | 412 | * configure a license file; 413 | * install additional software outside of Spack; 414 | * generate activation scripts. 415 | 416 | The following steps are effectively run, where we assume that the recipe is in `$recipe` and the mount point is the default `/user-environment`: 417 | 418 | ```bash 419 | # copy the post-install script to the mount point 420 | cp "$recipe"/post-install /user-environment 421 | chmod +x /user-environment/post-install 422 | 423 | # apply Jinja templates 424 | jinja -d env.json /user-environment/post-install > /user-environment/post-install 425 | 426 | # execute the script from the mount point 427 | cd /user-environment 428 | /user-environment/post-install 429 | ``` 430 | 431 | The post-install script is templated using Jinja, with the following variables available for use in a script: 432 | 433 | | Variable | Description | 434 | | ----------- | ------------------------------------ | 435 | | `env.mount` | The mount point of the image - default `/user-environment` | 436 | | `env.config`| The installation tree of the Spack installation that was built in previous steps | 437 | | `env.build` | The build path | 438 | | `env.spack` | The location of Spack used to build the software stack (only available during installation) | 439 | 440 | The use of Jinja templates is demonstrated in the following example of a bash script that generates an activation script that adds the installation path of GROMACS to the system PATH: 441 | 442 | ```bash title="post-install script that generates a simple activation script." 443 | #!/bin/bash 444 | 445 | gmx_path=$(spack -C {{ env.config }} location -i gromacs)/bin 446 | echo "export PATH=$gmx_path:$PATH" >> {{ env.mount }}/activate.sh 447 | ``` 448 | 449 | !!! note 450 | The copy of Spack used to build the stack is available in the environment in which `post-install` runs, and can be called directly. 451 | 452 | !!! note 453 | The script does not have to be bash - it can be in any scripting language, such as Python or Perl, that is available on the target system. 454 | 455 | ## Pre install configuration 456 | 457 | Similarly to the post-install hook, if a `pre-install` script is provided in the recipe, it will be run during the build process: 458 | 459 | * directly after the initial test that Spack has been installed correctly; 460 | * directly before the build cache is configured, and/or the first compiler environment is concretised. 461 | 462 | The pre-install script is copied, templated and executed similarly to the post-install hook (see above). 463 | 464 | ## Meta-Data 465 | 466 | Stackinator generates meta-data about the stack to the `extra` path of the installation path. 467 | A recipe can install arbitrary meta data by providing a `extra` path, the contents of which will be copied to the `meta/extra` path in the installation path. 468 | 469 | !!! alps 470 | This is used to provide additional information required by ReFrame as part of the CI/CD pipeline for software stacks on Alps, defined in the [GitHub eth-cscs/alps-spack-stacks](https://github.com/eth-cscs/alps-spack-stacks) repository. 471 | -------------------------------------------------------------------------------- /docs/stylesheets/extra.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --md-admonition-icon--alps: url('data:image/svg+xml;charset=utf-8,') 3 | } 4 | .md-typeset .admonition.alps, 5 | .md-typeset details.alps { 6 | border-color: rgb(255, 51, 51); 7 | } 8 | .md-typeset .alps > .admonition-title, 9 | .md-typeset .alps > summary { 10 | background-color: rgba(255, 51, 51, 0.1); 11 | } 12 | .md-typeset .alps > .admonition-title::before, 13 | .md-typeset .alps > summary::before { 14 | background-color: rgb(255, 51, 51); 15 | -webkit-mask-image: var(--md-admonition-icon--alps); 16 | mask-image: var(--md-admonition-icon--alps); 17 | } 18 | -------------------------------------------------------------------------------- /docs/tutorial.md: -------------------------------------------------------------------------------- 1 | # Tutorial 2 | 3 | !!! warning "TODO" 4 | write a tutorial that explains building an image step by step. 5 | 6 | A spack stack with everything needed to develop Arbor on for the A100 nodes on Hohgant. 7 | 8 | This guide walks us through the process of configuring a spack stack, building and using it. 9 | 10 | Arbor is a C++ library, with optional support for CUDA, MPI and Python. An Arbor developer would ideally have an environment that provides everything needed to build Arbor with these options enabled. 11 | 12 | The full list of all of the Spack packages needed to build a full-featured CUDA version is: 13 | 14 | - MPI: `cray-mpich-binary` 15 | - compiler: `gcc@11` 16 | - Python: `python@3.10` 17 | - CUDA: `cuda@11.8` 18 | - `cmake` 19 | - `fmt` 20 | - `pugixml` 21 | - `nlohmann-json` 22 | - `random123` 23 | - `py-mpi4py` 24 | - `py-numpy` 25 | - `py-pybind11` 26 | - `py-sphinx` 27 | - `py-svgwrite` 28 | 29 | For the compiler, we choose `gcc@11`, which is compatible with cuda@11.8. 30 | -------------------------------------------------------------------------------- /lint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | uvx ruff format 4 | uvx ruff check --fix 5 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Stackinator 2 | nav: 3 | - Home: index.md 4 | - 'User Guide': 5 | - 'Configuring Stacks': configuring.md 6 | - 'Building Stacks': building.md 7 | - 'Installing Stacks': installing.md 8 | - 'Recipes': recipes.md 9 | - 'Cluster Configuration': cluster-config.md 10 | - 'Interfaces': interfaces.md 11 | - 'Build Caches': build-caches.md 12 | - 'Development': development.md 13 | # - Tutorial: tutorial.md 14 | theme: 15 | name: material 16 | features: 17 | - content.code.copy 18 | - navigation.tabs 19 | palette: 20 | primary: deep orange 21 | accent: deep orange 22 | markdown_extensions: 23 | - attr_list # for internal links 24 | - admonition 25 | - pymdownx.details 26 | - pymdownx.superfences 27 | - pymdownx.highlight: 28 | anchor_linenums: true 29 | line_spans: __span 30 | pygments_lang_class: true 31 | - tables 32 | - toc: 33 | permalink: true 34 | extra_css: 35 | - stylesheets/extra.css 36 | repo_url: https://github.com/eth-cscs/stackinator 37 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.ruff] 6 | line-length = 120 7 | extend-exclude = ["external/", "unittests/recipes"] 8 | exclude = ["unittests/recipes/with-repo/repo/packages"] 9 | 10 | [tool.ruff.lint] 11 | select = ["E", "F"] 12 | ignore = ["E203"] 13 | -------------------------------------------------------------------------------- /requirements-docs.txt: -------------------------------------------------------------------------------- 1 | mkdocs 2 | mkdocs-material 3 | mkdocs-autorefs 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | Jinja2 2 | jsonschema 3 | pytest 4 | PyYAML 5 | -------------------------------------------------------------------------------- /serve: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # use uv to run mkdocs with mkdocs-material and its dependencies installed 4 | uv tool run --with-requirements ./requirements-docs.txt mkdocs ${@:-serve} 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = Stackinator 3 | version = attr: stackinator.VERSION 4 | author = Swiss National Supercomputing Center (CSCS/ETH Zurich) 5 | description = Stackinator is a tool for building a scientific software stack from a recipe for vClusters on CSCS' Alps infrastructure 6 | url = https://github.com/eth-cscs/stackinator 7 | license = BSD 3-Clause 8 | long_description = file: README.md 9 | long_description_content_type = text/markdown 10 | classifiers = 11 | Development Status :: 5 - Production/Stable 12 | Programming Language :: Python :: 3.6 13 | Programming Language :: Python :: 3.7 14 | Programming Language :: Python :: 3.8 15 | Programming Language :: Python :: 3.9 16 | Programming Language :: Python :: 3.10 17 | Programming Language :: Python :: 3.11 18 | License :: OSI Approved :: BSD License 19 | Operating System :: POSIX :: Linux 20 | Environment :: Console 21 | 22 | [options] 23 | packages = stackinator 24 | include_package_data = True 25 | python_requires = >=3.6 26 | scripts = bin/stack-config 27 | install_requires = 28 | Jinja2 29 | jsonschema 30 | PyYAML 31 | -------------------------------------------------------------------------------- /stackinator/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | VERSION = "6.0.0-dev" 5 | root_logger = logging.getLogger("stackinator") 6 | 7 | stackinator_version_info = tuple(re.split(r"\.|-", VERSION)) 8 | -------------------------------------------------------------------------------- /stackinator/builder.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import pathlib 4 | import platform 5 | import shutil 6 | import stat 7 | import subprocess 8 | import sys 9 | from datetime import datetime 10 | 11 | import jinja2 12 | import yaml 13 | 14 | from . import VERSION, cache, root_logger, spack_util 15 | 16 | 17 | def install(src, dst, *, ignore=None, symlinks=False): 18 | """Call shutil.copytree or shutil.copy2. copy2 is used if `src` is not a directory. 19 | Afterwards run the equivalent of chmod a+rX dst.""" 20 | 21 | def apply_permissions_recursive(directory): 22 | """Apply permissions recursively to an entire directory.""" 23 | 24 | def set_permissions(path): 25 | """Set permissions for a given path based on chmod a+rX equivalent.""" 26 | mode = os.stat(path).st_mode 27 | # Always give read permissions for user, group, and others. 28 | new_mode = mode | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH 29 | # If it's a directory or execute bit is set for owner or group, 30 | # set execute bit for all. 31 | if stat.S_ISDIR(mode) or mode & (stat.S_IXUSR | stat.S_IXGRP): 32 | new_mode |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH 33 | os.chmod(path, new_mode) 34 | 35 | set_permissions(directory) 36 | for dirpath, dirnames, filenames in os.walk(directory): 37 | for dirname in dirnames: 38 | set_permissions(os.path.join(dirpath, dirname)) 39 | for filename in filenames: 40 | set_permissions(os.path.join(dirpath, filename)) 41 | 42 | if stat.S_ISDIR(os.stat(src).st_mode): 43 | shutil.copytree( 44 | src, 45 | dst, 46 | ignore=ignore, 47 | symlinks=symlinks, 48 | ) 49 | else: 50 | shutil.copy2(src, dst, follow_symlinks=symlinks) 51 | # set permissions 52 | apply_permissions_recursive(dst) 53 | 54 | 55 | class Builder: 56 | def __init__(self, args): 57 | self._logger = root_logger 58 | path = pathlib.Path(args.build) 59 | if not path.is_absolute(): 60 | path = pathlib.Path.cwd() / path 61 | 62 | # check that if the path exists that it is not a file 63 | if path.exists(): 64 | if not path.is_dir(): 65 | raise IOError("build path is not a directory") 66 | 67 | parts = path.parts 68 | 69 | # the build path can't be root 70 | if len(parts) == 1: 71 | raise IOError("build path can't be root '/'") 72 | 73 | # the build path can't be in /tmp because the build step rebinds /tmp. 74 | if parts[1] == "tmp": 75 | raise IOError("build path can't be in '/tmp'") 76 | 77 | # the build path can't be in $HOME because the build step rebinds $HOME 78 | # NOTE that this would be much easier to determine with PosixPath.is_relative_to 79 | # introduced in Python 3.9. 80 | home_parts = pathlib.Path.home().parts 81 | if (len(home_parts) <= len(parts)) and (home_parts == parts[: len(home_parts)]): 82 | raise IOError("build path can't be in '$HOME' or '~'") 83 | # if path.is_relative_to(pathlib.Path.home()): 84 | # raise IOError("build path can't be in '$HOME' or '~'") 85 | 86 | self.path = path 87 | self.root = pathlib.Path(__file__).parent.resolve() 88 | 89 | @property 90 | def configuration_meta(self): 91 | """Meta data about the configuration and build""" 92 | return self._configuration_meta 93 | 94 | @configuration_meta.setter 95 | def configuration_meta(self, recipe): 96 | # generate configuration meta data 97 | meta = {} 98 | meta["time"] = datetime.now().strftime("%Y%m%d %H:%M:%S") 99 | host_data = platform.uname() 100 | meta["host"] = { 101 | "machine": host_data.machine, 102 | "node": host_data.node, 103 | "processor": host_data.processor, 104 | "release": host_data.release, 105 | "system": host_data.system, 106 | "version": host_data.version, 107 | } 108 | meta["cluster"] = os.getenv("CLUSTER_NAME", default="unknown") 109 | meta["stackinator"] = { 110 | "version": VERSION, 111 | "args": sys.argv, 112 | "python": sys.executable, 113 | } 114 | meta["mount"] = str(recipe.mount) 115 | meta["spack"] = recipe.config["spack"] 116 | self._configuration_meta = meta 117 | 118 | @property 119 | def environment_meta(self): 120 | """The meta data file that describes the environments""" 121 | return self._environment_meta 122 | 123 | @environment_meta.setter 124 | def environment_meta(self, recipe): 125 | """ 126 | The output that we want to generate looks like the following, 127 | Which should correspond directly to the environment_view_meta provided 128 | by the recipe. 129 | 130 | { 131 | name: "prgenv-gnu", 132 | description: "useful programming tools", 133 | mount: "/user-environment" 134 | modules: { 135 | "root": /user-environment/modules, 136 | }, 137 | views: { 138 | "default": { 139 | "root": /user-environment/env/default, 140 | "activate": /user-environment/env/default/activate.sh, 141 | "description": "simple devolpment env: compilers, MPI, python, cmake." 142 | }, 143 | "tools": { 144 | "root": /user-environment/env/tools, 145 | "activate": /user-environment/env/tools/activate.sh, 146 | "description": "handy tools" 147 | } 148 | } 149 | } 150 | """ 151 | conf = recipe.config 152 | meta = {} 153 | meta["name"] = conf["name"] 154 | meta["description"] = conf["description"] 155 | meta["views"] = recipe.environment_view_meta 156 | meta["mount"] = str(recipe.mount) 157 | modules = None 158 | if conf["modules"]: 159 | modules = {"root": str(recipe.mount / "modules")} 160 | meta["modules"] = modules 161 | self._environment_meta = meta 162 | 163 | def generate(self, recipe): 164 | # make the paths, in case bwrap is not used, directly write to recipe.mount 165 | store_path = self.path / "store" if not recipe.no_bwrap else pathlib.Path(recipe.mount) 166 | tmp_path = self.path / "tmp" 167 | 168 | self.path.mkdir(exist_ok=True, parents=True) 169 | store_path.mkdir(exist_ok=True) 170 | tmp_path.mkdir(exist_ok=True) 171 | 172 | # check out the version of spack 173 | spack_version = recipe.spack_version 174 | self._logger.debug(f"spack version for templates: {spack_version}") 175 | spack = recipe.config["spack"] 176 | spack_path = self.path / "spack" 177 | 178 | # set general build and configuration meta data for the project 179 | self.configuration_meta = recipe 180 | 181 | # set the environment view meta data 182 | self.environment_meta = recipe 183 | 184 | # Clone the spack repository if it has not already been checked out 185 | if not (spack_path / ".git").is_dir(): 186 | self._logger.info(f"spack: clone repository {spack['repo']}") 187 | 188 | # clone the repository 189 | capture = subprocess.run( 190 | ["git", "clone", "--filter=tree:0", spack["repo"], spack_path], 191 | shell=False, 192 | stdout=subprocess.PIPE, 193 | stderr=subprocess.STDOUT, 194 | ) 195 | self._logger.debug(capture.stdout.decode("utf-8")) 196 | 197 | if capture.returncode != 0: 198 | self._logger.error(f"error cloning the repository {spack['repo']}") 199 | capture.check_returncode() 200 | 201 | # Fetch the specific branch 202 | if spack["commit"]: 203 | self._logger.info(f"spack: fetch branch/commit {spack['commit']}") 204 | capture = subprocess.run( 205 | ["git", "-C", spack_path, "fetch", "origin", spack["commit"]], 206 | shell=False, 207 | stdout=subprocess.PIPE, 208 | stderr=subprocess.STDOUT, 209 | ) 210 | self._logger.debug(capture.stdout.decode("utf-8")) 211 | 212 | if capture.returncode != 0: 213 | self._logger.debug(f"unable to change to the fetch {spack['commit']}") 214 | capture.check_returncode() 215 | 216 | # Check out a branch or commit if one was specified 217 | if spack["commit"]: 218 | self._logger.info(f"spack: checkout branch/commit {spack['commit']}") 219 | capture = subprocess.run( 220 | ["git", "-C", spack_path, "checkout", spack["commit"]], 221 | shell=False, 222 | stdout=subprocess.PIPE, 223 | stderr=subprocess.STDOUT, 224 | ) 225 | self._logger.debug(capture.stdout.decode("utf-8")) 226 | 227 | if capture.returncode != 0: 228 | self._logger.debug(f"unable to change to the requested commit {spack['commit']}") 229 | capture.check_returncode() 230 | 231 | # get the spack commit 232 | git_commit_result = subprocess.run( 233 | ["git", "-C", spack_path, "rev-parse", "HEAD"], shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE 234 | ) 235 | spack_meta = { 236 | "ref": spack["commit"], 237 | "commit": git_commit_result.stdout.strip().decode("utf-8"), 238 | "url": spack["repo"], 239 | } 240 | 241 | # load the jinja templating environment 242 | template_path = self.root / "templates" 243 | jinja_env = jinja2.Environment( 244 | loader=jinja2.FileSystemLoader(template_path), 245 | trim_blocks=True, 246 | lstrip_blocks=True, 247 | ) 248 | 249 | # generate top level makefiles 250 | makefile_template = jinja_env.get_template("Makefile") 251 | 252 | with (self.path / "Makefile").open("w") as f: 253 | f.write( 254 | makefile_template.render( 255 | cache=recipe.mirror, 256 | modules=recipe.config["modules"], 257 | post_install_hook=recipe.post_install_hook, 258 | pre_install_hook=recipe.pre_install_hook, 259 | spack_version=spack_version, 260 | spack_meta=spack_meta, 261 | exclude_from_cache=["nvhpc", "cuda", "perl"], 262 | verbose=False, 263 | ) 264 | ) 265 | f.write("\n") 266 | 267 | make_user_template = jinja_env.get_template("Make.user") 268 | with (self.path / "Make.user").open("w") as f: 269 | f.write( 270 | make_user_template.render( 271 | spack_version=spack_version, 272 | build_path=self.path, 273 | store=recipe.mount, 274 | no_bwrap=recipe.no_bwrap, 275 | verbose=False, 276 | ) 277 | ) 278 | f.write("\n") 279 | 280 | etc_path = self.root / "etc" 281 | for f_etc in ["Make.inc", "bwrap-mutable-root.sh", "envvars.py"]: 282 | shutil.copy2(etc_path / f_etc, self.path / f_etc) 283 | 284 | # used to configure both pre and post install hooks, if they are provided. 285 | hook_env = { 286 | "mount": recipe.mount, 287 | "config": recipe.mount / "config", 288 | "build": self.path, 289 | "spack": self.path / "spack", 290 | } 291 | 292 | # copy post install hook file, if provided 293 | post_hook = recipe.post_install_hook 294 | if post_hook is not None: 295 | self._logger.debug("installing post-install-hook script") 296 | jinja_recipe_env = jinja2.Environment(loader=jinja2.FileSystemLoader(recipe.path)) 297 | post_hook_template = jinja_recipe_env.get_template("post-install") 298 | post_hook_destination = store_path / "post-install-hook" 299 | 300 | with post_hook_destination.open("w") as f: 301 | f.write(post_hook_template.render(env=hook_env, verbose=False)) 302 | f.write("\n") 303 | 304 | os.chmod( 305 | post_hook_destination, 306 | os.stat(post_hook_destination).st_mode | stat.S_IEXEC, 307 | ) 308 | 309 | # copy pre install hook file, if provided 310 | pre_hook = recipe.pre_install_hook 311 | if pre_hook is not None: 312 | self._logger.debug("installing pre-install-hook script") 313 | jinja_recipe_env = jinja2.Environment(loader=jinja2.FileSystemLoader(recipe.path)) 314 | pre_hook_template = jinja_recipe_env.get_template("pre-install") 315 | pre_hook_destination = store_path / "pre-install-hook" 316 | 317 | with pre_hook_destination.open("w") as f: 318 | f.write(pre_hook_template.render(env=hook_env, verbose=False)) 319 | f.write("\n") 320 | 321 | os.chmod( 322 | pre_hook_destination, 323 | os.stat(pre_hook_destination).st_mode | stat.S_IEXEC, 324 | ) 325 | 326 | # Generate the system configuration: the compilers, environments, etc. 327 | # that are defined for the target cluster. 328 | config_path = self.path / "config" 329 | config_path.mkdir(exist_ok=True) 330 | system_config_path = pathlib.Path(recipe.system_config_path) 331 | 332 | # Copy the yaml files to the spack config path 333 | for f_config in system_config_path.iterdir(): 334 | # print warning if mirrors.yaml is found 335 | if f_config.name in ["mirrors.yaml"]: 336 | self._logger.error( 337 | "mirrors.yaml have been removed from cluster configurations," 338 | " use the --cache option on stack-config instead." 339 | ) 340 | raise RuntimeError("Unsupported mirrors.yaml file in cluster configuration.") 341 | 342 | # construct full file path 343 | src = system_config_path / f_config.name 344 | dst = config_path / f_config.name 345 | # copy only files 346 | if src.is_file(): 347 | shutil.copy(src, dst) 348 | 349 | # generate a mirrors.yaml file if build caches have been configured 350 | if recipe.mirror: 351 | dst = config_path / "mirrors.yaml" 352 | self._logger.debug(f"generate the build cache mirror: {dst}") 353 | with dst.open("w") as fid: 354 | fid.write(cache.generate_mirrors_yaml(recipe.mirror)) 355 | 356 | # append recipe packages to packages.yaml 357 | if recipe.packages: 358 | system_packages = system_config_path / "packages.yaml" 359 | packages_data = {} 360 | if system_packages.is_file(): 361 | # load system yaml 362 | with system_packages.open() as fid: 363 | raw = yaml.load(fid, Loader=yaml.Loader) 364 | packages_data = raw["packages"] 365 | packages_data.update(recipe.packages["packages"]) 366 | packages_yaml = yaml.dump({"packages": packages_data}) 367 | packages_path = config_path / "packages.yaml" 368 | with packages_path.open("w") as fid: 369 | fid.write(packages_yaml) 370 | 371 | # Add custom spack package recipes, configured via Spack repos. 372 | # Step 1: copy Spack repos to store_path where they will be used to 373 | # build the stack, and then be part of the upstream provided 374 | # to users of the stack. 375 | # 376 | # Packages in the recipe are prioritised over cluster specific packages, 377 | # etc. The order of preference from highest to lowest is: 378 | # 379 | # 3. recipe/repo 380 | # 2. cluster-config/repos.yaml 381 | # - if the repos.yaml file exists it will contain a list of relative paths 382 | # to search for package 383 | # 1. spack/var/spack/repos/builtin 384 | 385 | # Build a list of repos with packages to install. 386 | repos = [] 387 | 388 | # check for a repo in the recipe 389 | if recipe.spack_repo is not None: 390 | self._logger.debug(f"adding recipe spack package repo: {recipe.spack_repo}") 391 | repos.append(recipe.spack_repo) 392 | 393 | # look for repos.yaml file in the system configuration 394 | repo_yaml = system_config_path / "repos.yaml" 395 | if repo_yaml.exists() and repo_yaml.is_file(): 396 | # open repos.yaml file and reat the list of repos 397 | with repo_yaml.open() as fid: 398 | raw = yaml.load(fid, Loader=yaml.Loader) 399 | P = raw["repos"] 400 | 401 | self._logger.debug(f"the system configuration has a repo file {repo_yaml} refers to {P}") 402 | 403 | # test each path 404 | for rel_path in P: 405 | repo_path = (system_config_path / rel_path).resolve() 406 | if spack_util.is_repo(repo_path): 407 | repos.append(repo_path) 408 | self._logger.debug(f"adding site spack package repo: {repo_path}") 409 | else: 410 | self._logger.error(f"{repo_path} from {repo_yaml} is not a spack package repository") 411 | raise RuntimeError("invalid system-provided package repository") 412 | 413 | self._logger.debug(f"full list of spack package repo: {repos}") 414 | 415 | # Delete the store/repo path, if it already exists. 416 | # Do this so that incremental builds (though not officially supported) won't break if a repo is updated. 417 | repo_dst = store_path / "repo" 418 | self._logger.debug(f"creating the stack spack prepo in {repo_dst}") 419 | if repo_dst.exists(): 420 | self._logger.debug(f"{repo_dst} exists ... deleting") 421 | shutil.rmtree(repo_dst) 422 | 423 | # create the repository step 1: create the repo directory 424 | pkg_dst = repo_dst / "packages" 425 | pkg_dst.mkdir(mode=0o755, parents=True) 426 | self._logger.debug(f"created the repo packages path {pkg_dst}") 427 | 428 | # create the repository step 2: create the repo.yaml file that 429 | # configures the repo. 430 | with (repo_dst / "repo.yaml").open("w") as f: 431 | f.write( 432 | """\ 433 | repo: 434 | namespace: alps 435 | """ 436 | ) 437 | 438 | # create the repository step 2: create the repos.yaml file in build_path/config 439 | repos_yaml_template = jinja_env.get_template("repos.yaml") 440 | with (config_path / "repos.yaml").open("w") as f: 441 | repo_path = recipe.mount / "repo" 442 | f.write(repos_yaml_template.render(repo_path=repo_path.as_posix(), verbose=False)) 443 | f.write("\n") 444 | 445 | # Iterate over the source repositories copying their contents to the consolidated repo in the uenv. 446 | # Do overwrite packages that have been copied from an earlier source repo, enforcing a descending 447 | # order of precidence. 448 | if len(repos) > 0: 449 | for repo_src in repos: 450 | self._logger.debug(f"installing repo {repo_src}") 451 | packages_path = repo_src / "packages" 452 | for pkg_path in packages_path.iterdir(): 453 | dst = pkg_dst / pkg_path.name 454 | if pkg_path.is_dir() and not dst.exists(): 455 | self._logger.debug(f" installing package {pkg_path} to {pkg_dst}") 456 | install(pkg_path, dst) 457 | elif dst.exists(): 458 | self._logger.debug(f" NOT installing package {pkg_path}") 459 | 460 | # Generate the makefile and spack.yaml files that describe the compilers 461 | compiler_files = recipe.compiler_files 462 | compiler_path = self.path / "compilers" 463 | compiler_path.mkdir(exist_ok=True) 464 | with (compiler_path / "Makefile").open(mode="w") as f: 465 | f.write(compiler_files["makefile"]) 466 | 467 | for name, yml in compiler_files["config"].items(): 468 | compiler_config_path = compiler_path / name 469 | compiler_config_path.mkdir(exist_ok=True) 470 | with (compiler_config_path / "spack.yaml").open(mode="w") as f: 471 | f.write(yml) 472 | 473 | # generate the makefile and spack.yaml files that describe the environments 474 | environment_files = recipe.environment_files 475 | environments_path = self.path / "environments" 476 | os.makedirs(environments_path, exist_ok=True) 477 | with (environments_path / "Makefile").open(mode="w") as f: 478 | f.write(environment_files["makefile"]) 479 | 480 | for name, yml in environment_files["config"].items(): 481 | env_config_path = environments_path / name 482 | env_config_path.mkdir(exist_ok=True) 483 | with (env_config_path / "spack.yaml").open(mode="w") as f: 484 | f.write(yml) 485 | 486 | # generate the makefile that generates the configuration for the spack 487 | # installation in the generate-config sub-directory of the build path. 488 | make_config_template = jinja_env.get_template("Makefile.generate-config") 489 | generate_config_path = self.path / "generate-config" 490 | generate_config_path.mkdir(exist_ok=True) 491 | 492 | # write generate-config/Makefile 493 | all_compilers = [x for x in recipe.compilers.keys()] 494 | release_compilers = [x for x in all_compilers if x != "bootstrap"] 495 | with (generate_config_path / "Makefile").open("w") as f: 496 | f.write( 497 | make_config_template.render( 498 | build_path=self.path.as_posix(), 499 | all_compilers=all_compilers, 500 | release_compilers=release_compilers, 501 | verbose=False, 502 | ) 503 | ) 504 | 505 | # write modules/modules.yaml 506 | modules_yaml = recipe.modules_yaml 507 | generate_modules_path = self.path / "modules" 508 | generate_modules_path.mkdir(exist_ok=True) 509 | with (generate_modules_path / "modules.yaml").open("w") as f: 510 | f.write(modules_yaml) 511 | 512 | # write the meta data 513 | meta_path = store_path / "meta" 514 | meta_path.mkdir(exist_ok=True) 515 | # write a json file with basic meta data 516 | with (meta_path / "configure.json").open("w") as f: 517 | # default serialisation is str to serialise the pathlib.PosixPath 518 | f.write(json.dumps(self.configuration_meta, sort_keys=True, indent=2, default=str)) 519 | f.write("\n") 520 | 521 | # write a json file with the environment view meta data 522 | with (meta_path / "env.json.in").open("w") as f: 523 | # default serialisation is str to serialise the pathlib.PosixPath 524 | f.write(json.dumps(self.environment_meta, sort_keys=True, indent=2, default=str)) 525 | f.write("\n") 526 | 527 | # copy the recipe to a recipe subdirectory of the meta path 528 | meta_recipe_path = meta_path / "recipe" 529 | meta_recipe_path.mkdir(exist_ok=True) 530 | if meta_recipe_path.exists(): 531 | shutil.rmtree(meta_recipe_path) 532 | install(recipe.path, meta_recipe_path, ignore=shutil.ignore_patterns(".git")) 533 | 534 | # create the meta/extra path and copy recipe meta data if it exists 535 | meta_extra_path = meta_path / "extra" 536 | meta_extra_path.mkdir(exist_ok=True) 537 | if meta_extra_path.exists(): 538 | shutil.rmtree(meta_extra_path) 539 | if recipe.user_extra is not None: 540 | self._logger.debug(f"copying extra recipe meta data to {meta_extra_path}") 541 | install(recipe.user_extra, meta_extra_path) 542 | 543 | # create debug helper script 544 | debug_script_path = self.path / "stack-debug.sh" 545 | debug_script_template = jinja_env.get_template("stack-debug.sh") 546 | with debug_script_path.open("w") as f: 547 | f.write( 548 | debug_script_template.render( 549 | mount_path=recipe.mount, 550 | build_path=str(self.path), 551 | use_bwrap=not recipe.no_bwrap, 552 | spack_version=spack_version, 553 | verbose=False, 554 | ) 555 | ) 556 | f.write("\n") 557 | -------------------------------------------------------------------------------- /stackinator/cache.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pathlib 3 | 4 | import yaml 5 | 6 | from . import schema 7 | 8 | 9 | def configuration_from_file(file, mount): 10 | with file.open() as fid: 11 | # load the raw yaml input 12 | raw = yaml.load(fid, Loader=yaml.Loader) 13 | 14 | # validate the yaml 15 | schema.cache_validator.validate(raw) 16 | 17 | # verify that the root path exists 18 | path = pathlib.Path(os.path.expandvars(raw["root"])) 19 | if not path.is_absolute(): 20 | raise FileNotFoundError(f"The build cache path '{path}' is not absolute") 21 | if not path.is_dir(): 22 | raise FileNotFoundError(f"The build cache path '{path}' does not exist") 23 | 24 | raw["root"] = path 25 | 26 | # Put the build cache in a sub-directory named after the mount point. 27 | # This avoids relocation issues. 28 | raw["path"] = pathlib.Path(path.as_posix() + mount.as_posix()) 29 | 30 | # verify that the key file exists if it was specified 31 | key = raw["key"] 32 | if key is not None: 33 | key = pathlib.Path(os.path.expandvars(key)) 34 | if not key.is_absolute(): 35 | raise FileNotFoundError(f"The build cache key '{key}' is not absolute") 36 | if not key.is_file(): 37 | raise FileNotFoundError(f"The build cache key '{key}' does not exist") 38 | raw["key"] = key 39 | 40 | return raw 41 | 42 | 43 | def generate_mirrors_yaml(config): 44 | path = config["path"].as_posix() 45 | mirrors = { 46 | "mirrors": { 47 | "alpscache": { 48 | "fetch": { 49 | "url": f"file://{path}", 50 | "access_pair": [None, None], 51 | "access_token": None, 52 | "profile": None, 53 | "endpoint_url": None, 54 | }, 55 | "push": { 56 | "url": f"file://{path}", 57 | "access_pair": [None, None], 58 | "access_token": None, 59 | "profile": None, 60 | "endpoint_url": None, 61 | }, 62 | } 63 | } 64 | } 65 | 66 | return yaml.dump(mirrors, default_flow_style=False) 67 | -------------------------------------------------------------------------------- /stackinator/etc/Make.inc: -------------------------------------------------------------------------------- 1 | # vi: filetype=make 2 | 3 | SPACK ?= spack 4 | 5 | SPACK_ENV = $(SPACK) -e $(dir $@) 6 | 7 | ifndef STORE 8 | $(error STORE should point to a Spack install root) 9 | endif 10 | 11 | ifeq ($(wildcard $(SPACK_SYSTEM_CONFIG_PATH)),) 12 | $(error SPACK_SYSTEM_CONFIG_PATH = "$(SPACK_SYSTEM_CONFIG_PATH)" doesn't point to an existing directory) 13 | endif 14 | 15 | store: 16 | mkdir -p $(STORE) 17 | 18 | # Concretization 19 | %/spack.lock: %/spack.yaml %/compilers.yaml %/config.yaml %/packages.yaml 20 | $(SPACK_ENV) concretize -f 21 | 22 | # Generate Makefiles for the environment install 23 | %/Makefile: %/spack.lock 24 | $(SPACK_ENV) env depfile --make-target-prefix $*/generated -o $@ 25 | 26 | # For generating {compilers,config,packages}.yaml files. 27 | %.yaml: export SPACK_USER_CONFIG_PATH=$(abspath $(dir $@)) 28 | %.yaml: 29 | touch $@ 30 | 31 | # Because Spack doesn't know how to find compilers, we help it by getting the bin folder of gcc, clang, nvc given a install prefix 32 | compiler_bin_dirs = $$(find $(1) '(' -name gcc -o -name clang -o -name nvc ')' -path '*/bin/*' '(' -type f -o -type l ')' -exec dirname {} +) 33 | -------------------------------------------------------------------------------- /stackinator/etc/add-compiler-links.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Hard code the path to the system python3 on HPE Cray EX systems. 4 | 5 | import argparse 6 | import os 7 | 8 | import yaml 9 | 10 | 11 | # parse compilers.yaml file. 12 | # return a list with the compiler descriptions from the yaml file. 13 | def load_compilers_yaml(path): 14 | with open(path, "r") as file: 15 | data = yaml.safe_load(file) 16 | compilers = [c["compiler"] for c in data["compilers"]] 17 | return compilers 18 | 19 | 20 | def parse_export(line): 21 | s = line.replace("=", " ").split() 22 | var = s[1] 23 | paths = None 24 | if len(s) > 2: 25 | paths = s[2].rstrip(";").split(":") 26 | return {"variable": var, "paths": paths} 27 | 28 | 29 | def split_line(line): 30 | return line.strip().rstrip(";").replace("=", " ").split() 31 | 32 | 33 | def is_export(parts): 34 | return len(parts) > 1 and parts[0] == "export" 35 | 36 | 37 | def is_alias(parts): 38 | return len(parts) > 0 and parts[0] == "alias" 39 | 40 | 41 | # Returns True if the given path is a descendant of prefix, False otherwise. 42 | def has_prefix(path, prefix): 43 | prefix = os.path.realpath(prefix) 44 | path = os.path.realpath(path) 45 | return os.path.commonprefix([path, prefix]) == prefix 46 | 47 | 48 | parser = argparse.ArgumentParser() 49 | parser.add_argument("compiler_path", help="Path to the compilers.yaml file") 50 | parser.add_argument("activate_path", help="Path to the activate script to configure") 51 | parser.add_argument("build_path", help="Path where the build is performed") 52 | args = parser.parse_args() 53 | 54 | if not os.path.exists(args.compiler_path): 55 | print(f"error - compiler file '{args.compiler_path}' does not exist.") 56 | exit(2) 57 | 58 | if not os.path.exists(args.activate_path): 59 | print(f"error - activation file '{args.activate_path}' does not exist.") 60 | exit(2) 61 | 62 | if not os.path.exists(args.build_path): 63 | print(f"error - build path '{args.build_path}' does not exist.") 64 | exit(2) 65 | 66 | compilers = load_compilers_yaml(args.compiler_path) 67 | 68 | paths = [] 69 | for c in compilers: 70 | local_paths = set([os.path.dirname(v) for k, v in c["paths"].items() if v is not None]) 71 | paths += local_paths 72 | print(f"adding compiler {c['spec']} -> {[p for p in local_paths]}") 73 | 74 | # find unique paths and concatenate them 75 | pathstring = ":".join(set(paths)) 76 | 77 | # Parse the spack env activation script line by line. 78 | # Remove spack-specific environment variables and references the build path. 79 | 80 | # NOTE: the activation script generated by spack effectively clears PATH, CPATH, 81 | # etc. This may or may not be surprising for users, and we may have to append 82 | # :$PATH, :$CPATH, etc. 83 | 84 | lines = [] 85 | with open(args.activate_path) as fid: 86 | for line in fid: 87 | parts = split_line(line) 88 | 89 | # drop any aliases (bash functions created by spack) 90 | if is_alias(parts): 91 | pass 92 | elif is_export(parts): 93 | export = parse_export(line) 94 | 95 | # parse PATH to remove references to the build directory 96 | if export["variable"] == "PATH": 97 | paths = [p for p in export["paths"] if not has_prefix(p, args.build_path)] 98 | lines.append(f"export PATH={':'.join(paths)};\n") 99 | 100 | # drop the SPACK_ENV variable 101 | elif export["variable"] == "SPACK_ENV": 102 | pass 103 | 104 | else: 105 | lines.append(line.strip() + "\n") 106 | else: 107 | lines.append(line.strip() + "\n") 108 | 109 | # Prepend the compiler paths to PATH 110 | lines.append("# compiler paths added by stackinator\n") 111 | lines.append(f"export PATH={pathstring}:$PATH;\n") 112 | 113 | # Write a modified version of the activation script. 114 | with open(args.activate_path, "w") as fid: 115 | fid.writelines(lines) 116 | -------------------------------------------------------------------------------- /stackinator/etc/bwrap-mutable-root.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | args=() 3 | shopt -s dotglob 4 | for d in /*; do 5 | # skip invalid symlinks, as they will break bwrap 6 | if [ ! -L "$d" ] || [ -e "$d" ]; then 7 | args+=("--dev-bind" "$d" "$d") 8 | fi 9 | done 10 | PS1="\[\e[36;1m\]build-env >>> \[\e[0m\]" bwrap "${args[@]}" "$@" 11 | 12 | -------------------------------------------------------------------------------- /stackinator/etc/envvars.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import argparse 4 | import json 5 | import os 6 | from enum import Enum 7 | from typing import List, Optional 8 | 9 | import yaml 10 | 11 | 12 | class EnvVarOp(Enum): 13 | PREPEND = 1 14 | APPEND = 2 15 | SET = 3 16 | 17 | def __str__(self): 18 | return self.name.lower() 19 | 20 | 21 | class EnvVarKind(Enum): 22 | SCALAR = 2 23 | LIST = 2 24 | 25 | 26 | list_variables = { 27 | "ACLOCAL_PATH", 28 | "CMAKE_PREFIX_PATH", 29 | "CPATH", 30 | "LD_LIBRARY_PATH", 31 | "LIBRARY_PATH", 32 | "MANPATH", 33 | "MODULEPATH", 34 | "PATH", 35 | "PKG_CONFIG_PATH", 36 | "PYTHONPATH", 37 | } 38 | 39 | 40 | class EnvVarError(Exception): 41 | """Exception raised when there is an error with environment variable manipulation.""" 42 | 43 | def __init__(self, message): 44 | self.message = message 45 | super().__init__(self.message) 46 | 47 | def __str__(self): 48 | return self.message 49 | 50 | 51 | def is_env_value_list(v): 52 | return isinstance(v, list) and all(isinstance(item, str) for item in v) 53 | 54 | 55 | class ListEnvVarUpdate: 56 | def __init__(self, value: List[str], op: EnvVarOp): 57 | # clean up paths as they are inserted 58 | self._value = [os.path.normpath(p) for p in value] 59 | self._op = op 60 | 61 | @property 62 | def op(self): 63 | return self._op 64 | 65 | @property 66 | def value(self): 67 | return self._value 68 | 69 | def set_op(self, op: EnvVarOp): 70 | self._op = op 71 | 72 | # remove all paths that have root as common root 73 | def remove_root(self, root: str): 74 | root = os.path.normpath(root) 75 | self._value = [p for p in self._value if root != os.path.commonprefix([root, p])] 76 | 77 | def __repr__(self): 78 | return f"envvar.ListEnvVarUpdate({self.value}, {self.op})" 79 | 80 | def __str__(self): 81 | return f"({self.value}, {self.op})" 82 | 83 | 84 | class EnvVar: 85 | def __init__(self, name: str): 86 | self._name = name 87 | 88 | @property 89 | def name(self): 90 | return self._name 91 | 92 | 93 | class ListEnvVar(EnvVar): 94 | def __init__(self, name: str, value: List[str], op: EnvVarOp): 95 | super().__init__(name) 96 | 97 | self._updates = [ListEnvVarUpdate(value, op)] 98 | 99 | def update(self, value: List[str], op: EnvVarOp): 100 | self._updates.append(ListEnvVarUpdate(value, op)) 101 | 102 | def remove_root(self, root: str): 103 | for i in range(len(self._updates)): 104 | self._updates[i].remove_root(root) 105 | 106 | @property 107 | def updates(self): 108 | return self._updates 109 | 110 | def concat(self, other: "ListEnvVar"): 111 | self._updates += other.updates 112 | 113 | def make_dirty(self): 114 | if len(self._updates) > 0: 115 | self._updates[0].set_op(EnvVarOp.PREPEND) 116 | 117 | @property 118 | def paths(self): 119 | paths = [] 120 | for u in self._updates: 121 | paths += u.value 122 | return paths 123 | 124 | # Given the current value, return the value that should be set 125 | # current is None implies that the variable is not set 126 | # 127 | # dirty allows for not overriding the current value of the variable. 128 | def get_value(self, current: Optional[str], dirty: bool = False): 129 | v = current 130 | 131 | # if the variable is currently not set, first initialise it as empty. 132 | if v is None: 133 | if len(self._updates) == 0: 134 | return None 135 | v = "" 136 | 137 | first = True 138 | for update in self._updates: 139 | joined = ":".join(update.value) 140 | if first and dirty and update.op == EnvVarOp.SET: 141 | op = EnvVarOp.PREPEND 142 | else: 143 | op = update.op 144 | 145 | if v == "" or op == EnvVarOp.SET: 146 | v = joined 147 | elif op == EnvVarOp.APPEND: 148 | v = ":".join([v, joined]) 149 | elif op == EnvVarOp.PREPEND: 150 | v = ":".join([joined, v]) 151 | else: 152 | raise EnvVarError(f"Internal error: implement the operation {update.op}") 153 | 154 | first = False 155 | # strip any leading/trailing ":" 156 | v = v.strip(":") 157 | 158 | return v 159 | 160 | def __repr__(self): 161 | return f'envvars.ListEnvVar("{self.name}", {self._updates})' 162 | 163 | def __str__(self): 164 | return f'("{self.name}": [{",".join([str(u) for u in self._updates])}])' 165 | 166 | 167 | class ScalarEnvVar(EnvVar): 168 | def __init__(self, name: str, value: Optional[str]): 169 | super().__init__(name) 170 | self._value = value 171 | 172 | @property 173 | def value(self): 174 | return self._value 175 | 176 | @property 177 | def is_null(self): 178 | return self.value is None 179 | 180 | def update(self, value: Optional[str]): 181 | self._value = value 182 | 183 | def get_value(self, value: Optional[str]): 184 | if value is not None: 185 | return value 186 | return self._value 187 | 188 | def __repr__(self): 189 | return f'envvars.ScalarEnvVar("{self.name}", "{self.value}")' 190 | 191 | def __str__(self): 192 | return f'("{self.name}": "{self.value}")' 193 | 194 | 195 | class Env: 196 | def __init__(self): 197 | self._vars = {} 198 | 199 | def apply(self, var: EnvVar): 200 | self._vars[var.name] = var 201 | 202 | 203 | # returns true if the environment variable with name is a list variable, 204 | # e.g. PATH, LD_LIBRARY_PATH, PKG_CONFIG_PATH, etc. 205 | def is_list_var(name: str) -> bool: 206 | return name in list_variables 207 | 208 | 209 | class EnvVarSet: 210 | """ 211 | A set of environment variable updates. 212 | 213 | The values need to be applied before they are valid. 214 | """ 215 | 216 | def __init__(self): 217 | self._lists = {} 218 | self._scalars = {} 219 | # toggles whether post export commands will be generated 220 | self._generate_post = True 221 | 222 | @property 223 | def lists(self): 224 | return self._lists 225 | 226 | def clear(self): 227 | self._lists = {} 228 | self._scalars = {} 229 | 230 | @property 231 | def scalars(self): 232 | return self._scalars 233 | 234 | def make_dirty(self): 235 | for name in self._lists: 236 | self._lists[name].make_dirty() 237 | 238 | def remove_root(self, root: str): 239 | for name in self._lists: 240 | self._lists[name].remove_root(root) 241 | 242 | def set_scalar(self, name: str, value: str): 243 | self._scalars[name] = ScalarEnvVar(name, value) 244 | 245 | def set_list(self, name: str, value: List[str], op: EnvVarOp): 246 | var = ListEnvVar(name, value, op) 247 | if var.name in self._lists.keys(): 248 | self._lists[var.name].concat(var) 249 | else: 250 | self._lists[var.name] = var 251 | 252 | def __repr__(self): 253 | return f'envvars.EnvVarSet("{self.lists}", "{self.scalars}")' 254 | 255 | def __str__(self): 256 | s = "EnvVarSet:\n" 257 | s += " scalars:\n" 258 | for _, v in self.scalars.items(): 259 | s += f" {v.name}: {v.value}\n" 260 | s += " lists:\n" 261 | for _, v in self.lists.items(): 262 | s += f" {v.name}:\n" 263 | for u in v.updates: 264 | s += f" {u.op}: {':'.join(u.value)}\n" 265 | return s 266 | 267 | # Update the environment variables using the values in another EnvVarSet. 268 | # This operation is used when environment variables are sourced from more 269 | # than one location, e.g. multiple activation scripts. 270 | def update(self, other: "EnvVarSet"): 271 | for name, var in other.scalars.items(): 272 | self.set_scalar(name, var.value) 273 | for name, var in other.lists.items(): 274 | if name in self.lists.keys(): 275 | self.lists[name].concat(var) 276 | else: 277 | self.lists[name] = var 278 | 279 | # Generate the commands that set and unset the environment variables. 280 | # Returns a dictionary with two fields: 281 | # "pre": the list of commands to be executed before the command 282 | # "post": the list of commands to be executed to revert the environment 283 | # 284 | # The "post" list is optional, and should not be used for commands that 285 | # update the environment like "uenv view" and "uenv modules use", instead 286 | # it should be used for commands that should not alter the calling environment, 287 | # like "uenv run" and "uenv start". 288 | # 289 | # The dirty flag will preserve the state of variables like PATH, LD_LIBRARY_PATH, etc. 290 | def export(self, dirty=False): 291 | pre = [] 292 | post = [] 293 | 294 | for name, var in self.scalars.items(): 295 | # get the value of the environment variable 296 | current = os.getenv(name) 297 | new = var.get_value(current) 298 | 299 | if new is None: 300 | pre.append(f"unset {name}") 301 | else: 302 | pre.append(f"export {name}={new}") 303 | 304 | if self._generate_post: 305 | if current is None: 306 | post.append(f"unset {name}") 307 | else: 308 | post.append(f"export {name}={current}") 309 | 310 | for name, var in self.lists.items(): 311 | # get the value of the environment variable 312 | current = os.getenv(name) 313 | new = var.get_value(current, dirty) 314 | 315 | if new is None: 316 | pre.append(f"unset {name}") 317 | else: 318 | pre.append(f"export {name}={new}") 319 | 320 | if self._generate_post: 321 | if current is None: 322 | post.append(f"unset {name}") 323 | else: 324 | post.append(f"export {name}={current}") 325 | 326 | return {"pre": pre, "post": post} 327 | 328 | def as_dict(self) -> dict: 329 | # create a dictionary with the information formatted for JSON 330 | d = {"list": {}, "scalar": {}} 331 | 332 | for name, var in self.lists.items(): 333 | ops = [] 334 | for u in var.updates: 335 | op = "set" if u.op == EnvVarOp.SET else ("prepend" if u.op == EnvVarOp.PREPEND else "append") 336 | ops.append({"op": op, "value": u.value}) 337 | 338 | d["list"][name] = ops 339 | 340 | for name, var in self.scalars.items(): 341 | d["scalar"][name] = var.value 342 | 343 | return d 344 | 345 | # returns a string that represents the environment variable modifications 346 | # in json format 347 | # { 348 | # "list": { 349 | # "PATH": [ 350 | # {"op": "set", "value": "/user-environment/bin"}, 351 | # {"op": "prepend", "value": "/user-environment/env/default/bin"} 352 | # ], 353 | # "LD_LIBRARY_PATH": [ 354 | # {"op": "prepend", "value": "/user-environment/env/default/lib"} 355 | # {"op": "prepend", "value": "/user-environment/env/default/lib64"} 356 | # ] 357 | # }, 358 | # "scalar": { 359 | # "CUDA_HOME": "/user-environment/env/default", 360 | # "MPIF90": "/user-environment/env/default/bin/mpif90" 361 | # } 362 | # } 363 | def as_json(self) -> str: 364 | return json.dumps(self.as_dict(), separators=(",", ":")) 365 | 366 | def set_post(self, value: bool): 367 | self._generate_post = value 368 | 369 | 370 | def read_activation_script(filename: str, env: Optional[EnvVarSet] = None) -> EnvVarSet: 371 | if env is None: 372 | env = EnvVarSet() 373 | 374 | with open(filename) as fid: 375 | for line in fid: 376 | ls = line.strip().rstrip(";") 377 | # skip empty lines and comments 378 | if (len(ls) == 0) or (ls[0] == "#"): 379 | continue 380 | # split on the first whitespace 381 | # this splits lines of the form 382 | # export Y 383 | # where Y is an arbitray string into ['export', 'Y'] 384 | fields = ls.split(maxsplit=1) 385 | 386 | # handle lines of the form 'export Y' 387 | if len(fields) > 1 and fields[0] == "export": 388 | fields = fields[1].split("=", maxsplit=1) 389 | # get the name of the environment variable 390 | name = fields[0] 391 | 392 | # ignore SPACK environment variables: setting these will interfere with downstream 393 | # user spack configuration. 394 | if name.startswith("SPACK_"): 395 | continue 396 | 397 | # if there was only one field, there was no = sign, so pass 398 | if len(fields) < 2: 399 | continue 400 | 401 | # rhs the value that is assigned to the environment variable 402 | rhs = fields[1] 403 | if name in list_variables: 404 | fields = [f for f in rhs.split(":") if len(f.strip()) > 0] 405 | # look for $name as one of the fields (only works for append or prepend) 406 | 407 | if len(fields) == 0: 408 | env.set_list(name, fields, EnvVarOp.SET) 409 | elif fields[0] == f"${name}": 410 | env.set_list(name, fields[1:], EnvVarOp.APPEND) 411 | elif fields[-1] == f"${name}": 412 | env.set_list(name, fields[:-1], EnvVarOp.PREPEND) 413 | else: 414 | env.set_list(name, fields, EnvVarOp.SET) 415 | else: 416 | env.set_scalar(name, rhs) 417 | 418 | return env 419 | 420 | 421 | def view_impl(args): 422 | print( 423 | f"parsing view {args.root}\n compilers {args.compilers}\n prefix_paths '{args.prefix_paths}'\n \ 424 | build_path '{args.build_path}'" 425 | ) 426 | 427 | if not os.path.isdir(args.root): 428 | print(f"error - environment root path {args.root} does not exist") 429 | exit(1) 430 | 431 | root_path = args.root 432 | activate_path = root_path + "/activate.sh" 433 | if not os.path.isfile(activate_path): 434 | print(f"error - activation script {activate_path} does not exist") 435 | exit(1) 436 | 437 | envvars = read_activation_script(activate_path) 438 | 439 | # force all prefix path style variables (list vars) to use PREPEND the first operation. 440 | envvars.make_dirty() 441 | envvars.remove_root(args.build_path) 442 | 443 | if args.compilers is not None: 444 | if not os.path.isfile(args.compilers): 445 | print(f"error - compiler yaml file {args.compilers} does not exist") 446 | exit(1) 447 | 448 | with open(args.compilers, "r") as file: 449 | data = yaml.safe_load(file) 450 | compilers = [c["compiler"] for c in data["compilers"]] 451 | 452 | compiler_paths = [] 453 | for c in compilers: 454 | local_paths = set([os.path.dirname(v) for _, v in c["paths"].items() if v is not None]) 455 | compiler_paths += local_paths 456 | print(f"adding compiler {c['spec']} -> {[p for p in local_paths]}") 457 | 458 | envvars.set_list("PATH", compiler_paths, EnvVarOp.PREPEND) 459 | 460 | if args.prefix_paths: 461 | # get the root path of the env 462 | print(f"prefix_paths: searching in {root_path}") 463 | 464 | for p in args.prefix_paths.split(","): 465 | name, value = p.split("=") 466 | paths = [] 467 | for path in [os.path.normpath(p) for p in value.split(":")]: 468 | test_path = f"{root_path}/{path}" 469 | if os.path.isdir(test_path): 470 | paths.append(test_path) 471 | 472 | print(f"{name}:") 473 | for p in paths: 474 | print(f" {p}") 475 | 476 | if len(paths) > 0: 477 | if name in envvars.lists: 478 | ld_paths = envvars.lists[name].paths 479 | final_paths = [p for p in paths if p not in ld_paths] 480 | envvars.set_list(name, final_paths, EnvVarOp.PREPEND) 481 | else: 482 | envvars.set_list(name, paths, EnvVarOp.PREPEND) 483 | 484 | json_path = os.path.join(root_path, "env.json") 485 | print(f"writing JSON data to {json_path}") 486 | envvar_dict = {"version": 1, "values": envvars.as_dict()} 487 | with open(json_path, "w") as fid: 488 | json.dump(envvar_dict, fid) 489 | fid.write("\n") 490 | 491 | 492 | def meta_impl(args): 493 | # verify that the paths exist 494 | if not os.path.exists(args.mount): 495 | print(f"error - uenv mount '{args.mount}' does not exist.") 496 | exit(1) 497 | 498 | # parse the uenv meta data from file 499 | meta_in_path = os.path.normpath(f"{args.mount}/meta/env.json.in") 500 | meta_path = os.path.normpath(f"{args.mount}/meta/env.json") 501 | print(f"loading meta data to update: {meta_in_path}") 502 | with open(meta_in_path) as fid: 503 | meta = json.load(fid) 504 | 505 | for name, data in meta["views"].items(): 506 | env_root = data["root"] 507 | 508 | # read the json view data from file 509 | json_path = os.path.join(env_root, "env.json") 510 | print(f"reading view {name} data rom {json_path}") 511 | 512 | if not os.path.exists(json_path): 513 | print(f"error - meta data file '{json_path}' does not exist.") 514 | exit(1) 515 | 516 | with open(json_path, "r") as fid: 517 | envvar_dict = json.load(fid) 518 | 519 | # update the global meta data to include the environment variable state 520 | meta["views"][name]["env"] = envvar_dict 521 | meta["views"][name]["type"] = "spack-view" 522 | 523 | # process spack and modules 524 | if args.modules: 525 | module_path = f"{args.mount}/modules" 526 | meta["views"]["modules"] = { 527 | "activate": "/dev/null", 528 | "description": "activate modules", 529 | "root": module_path, 530 | "env": { 531 | "version": 1, 532 | "type": "augment", 533 | "values": {"list": {"MODULEPATH": [{"op": "prepend", "value": [module_path]}]}, "scalar": {}}, 534 | }, 535 | } 536 | 537 | if args.spack is not None: 538 | spack_url, spack_ref, spack_commit = args.spack.split(",") 539 | spack_path = f"{args.mount}/config".replace("//", "/") 540 | meta["views"]["spack"] = { 541 | "activate": "/dev/null", 542 | "description": "configure spack upstream", 543 | "root": spack_path, 544 | "env": { 545 | "version": 1, 546 | "type": "augment", 547 | "values": { 548 | "list": {}, 549 | "scalar": { 550 | "UENV_SPACK_CONFIG_PATH": spack_path, 551 | "UENV_SPACK_REF": spack_ref, 552 | "UENV_SPACK_COMMIT": spack_commit, 553 | "UENV_SPACK_URL": spack_url, 554 | }, 555 | }, 556 | }, 557 | } 558 | 559 | # update the uenv meta data file with the new env. variable description 560 | with open(meta_path, "w") as fid: 561 | # write updated meta data 562 | json.dump(meta, fid) 563 | fid.write("\n") 564 | print(f"wrote the uenv meta data {meta_path}") 565 | 566 | 567 | if __name__ == "__main__": 568 | # parse CLI arguments 569 | parser = argparse.ArgumentParser() 570 | subparsers = parser.add_subparsers(dest="command") 571 | view_parser = subparsers.add_parser( 572 | "view", formatter_class=argparse.RawDescriptionHelpFormatter, help="generate env.json for a view" 573 | ) 574 | view_parser.add_argument("root", help="root path of the view", type=str) 575 | view_parser.add_argument("build_path", help="build_path", type=str) 576 | view_parser.add_argument( 577 | "--prefix_paths", help="a list of relative prefix path searchs of the form X=y:z,Y=p:q", default="", type=str 578 | ) 579 | # only add compilers if this argument is passed 580 | view_parser.add_argument("--compilers", help="path of the compilers.yaml file", type=str, default=None) 581 | 582 | uenv_parser = subparsers.add_parser( 583 | "uenv", 584 | formatter_class=argparse.RawDescriptionHelpFormatter, 585 | help="generate meta.json meta data file for a uenv.", 586 | ) 587 | uenv_parser.add_argument("mount", help="mount point of the image", type=str) 588 | uenv_parser.add_argument("--modules", help="configure a module view", action="store_true") 589 | uenv_parser.add_argument( 590 | "--spack", 591 | help='configure a spack view. Format is "spack_url,git_ref,git_commit"', 592 | type=str, 593 | default=None, 594 | ) 595 | 596 | args = parser.parse_args() 597 | 598 | if args.command == "uenv": 599 | print("!!! running meta") 600 | meta_impl(args) 601 | elif args.command == "view": 602 | print("!!! running view") 603 | view_impl(args) 604 | -------------------------------------------------------------------------------- /stackinator/main.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import hashlib 3 | import logging 4 | import os 5 | import platform 6 | import sys 7 | import time 8 | import traceback 9 | 10 | from . import VERSION, root_logger 11 | from .builder import Builder 12 | from .recipe import Recipe 13 | 14 | 15 | def generate_logfile_name(name=""): 16 | idstr = f"{time.localtime()}{os.getpid}{platform.uname()}" 17 | return f"log{name}_{hashlib.md5(idstr.encode('utf-8')).hexdigest()}" 18 | 19 | 20 | def configure_logging(logfile): 21 | root_logger.setLevel(logging.DEBUG) 22 | 23 | # create stdout handler and set level to info 24 | ch = logging.StreamHandler(stream=sys.stdout) 25 | ch.setLevel(logging.INFO) 26 | ch.setFormatter(logging.Formatter("%(message)s")) 27 | root_logger.addHandler(ch) 28 | 29 | # create log file handler and set level to debug 30 | fh = logging.FileHandler(logfile) # , mode='w') 31 | fh.setLevel(logging.DEBUG) 32 | fh.setFormatter(logging.Formatter("%(asctime)s : %(levelname)-7s : %(message)s")) 33 | root_logger.addHandler(fh) 34 | 35 | 36 | def log_header(args): 37 | root_logger.info("Stackinator") 38 | root_logger.info(f" recipe path: {args.recipe}") 39 | root_logger.info(f" build path : {args.build}") 40 | root_logger.info(f" system : {args.system}") 41 | mount = args.mount or "default" 42 | root_logger.info(f" mount : {mount}") 43 | root_logger.info(f" build cache: {args.cache}") 44 | root_logger.info(f" develop : {args.develop}") 45 | 46 | 47 | def make_argparser(): 48 | parser = argparse.ArgumentParser(description=("Generate a build configuration for a spack stack from a recipe.")) 49 | parser.add_argument("--version", action="version", version=f"stackinator version {VERSION}") 50 | parser.add_argument("-b", "--build", required=True, type=str) 51 | parser.add_argument("--no-bwrap", action="store_true", required=False) 52 | parser.add_argument("-r", "--recipe", required=True, type=str) 53 | parser.add_argument("-s", "--system", required=True, type=str) 54 | parser.add_argument("-d", "--debug", action="store_true") 55 | parser.add_argument("-m", "--mount", required=False, type=str) 56 | parser.add_argument("-c", "--cache", required=False, type=str) 57 | spack_version_group = parser.add_mutually_exclusive_group() 58 | spack_version_group.add_argument("--develop", action="store_true", required=False) 59 | spack_version_group.add_argument("--spack-version", required=False, type=str) 60 | 61 | return parser 62 | 63 | 64 | def main(): 65 | logfile = generate_logfile_name("_config") 66 | configure_logging(logfile) 67 | 68 | try: 69 | parser = make_argparser() 70 | args = parser.parse_args() 71 | root_logger.debug(f"Command line arguments: {args}") 72 | log_header(args) 73 | 74 | recipe = Recipe(args) 75 | builder = Builder(args) 76 | 77 | builder.generate(recipe) 78 | 79 | root_logger.info("\nConfiguration finished, run the following to build the environment:\n") 80 | root_logger.info(f"cd {builder.path}") 81 | root_logger.info( 82 | 'env --ignore-environment http_proxy="$http_proxy" https_proxy="$https_proxy" no_proxy="$no_proxy"' 83 | " PATH=/usr/bin:/bin:`pwd`/spack/bin HOME=$HOME make store.squashfs -j32" 84 | ) 85 | return 0 86 | except Exception as e: 87 | root_logger.debug(traceback.format_exc()) 88 | root_logger.error(str(e)) 89 | root_logger.info(f"see {logfile} for more information") 90 | return 1 91 | -------------------------------------------------------------------------------- /stackinator/plan.md: -------------------------------------------------------------------------------- 1 | remove support for v0.20 2 | - search for all v0.20 3 | 4 | test on the following spack versions: 5 | - branch develop -> 0.24 6 | - branch releases/v0.20 -> raise exception 7 | - branch releases/v0.21 -> 0.21 8 | - branch releases/v0.22 -> 0.22 9 | - branch releases/v0.23 -> 0.23 10 | - tag v0.22.3 -> v0.22 11 | - random recent commit + --develop -> v0.24 12 | 13 | remove `develop` as an option that is passed into makefiles 14 | -------------------------------------------------------------------------------- /stackinator/recipe.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import pathlib 3 | 4 | import jinja2 5 | import yaml 6 | 7 | from . import cache, root_logger, schema, spack_util 8 | 9 | 10 | class Recipe: 11 | valid_mpi_specs = { 12 | "cray-mpich": (None, None), 13 | "mpich": ("4.1", "device=ch4 netmod=ofi +slurm"), 14 | "mvapich2": ( 15 | "3.0a", 16 | "+xpmem fabrics=ch4ofi ch4_max_vcis=4 process_managers=slurm", 17 | ), 18 | "openmpi": ("5", "+internal-pmix +legacylaunchers +orterunprefix fabrics=cma,ofi,xpmem schedulers=slurm"), 19 | } 20 | 21 | @property 22 | def path(self): 23 | """the path of the recipe""" 24 | return self._path 25 | 26 | @path.setter 27 | def path(self, recipe_path): 28 | path = pathlib.Path(recipe_path) 29 | if not path.is_absolute(): 30 | path = pathlib.Path.cwd() / path 31 | 32 | if not path.is_dir(): 33 | raise FileNotFoundError(f"The recipe path '{path}' does not exist") 34 | 35 | self._path = path 36 | 37 | def __init__(self, args): 38 | self._logger = root_logger 39 | self._logger.debug("Generating recipe") 40 | 41 | self.no_bwrap = args.no_bwrap 42 | 43 | # set the system configuration path 44 | self.system_config_path = args.system 45 | 46 | # set the recipe path 47 | self.path = args.recipe 48 | 49 | self.template_path = pathlib.Path(__file__).parent.resolve() / "templates" 50 | 51 | # required config.yaml file 52 | self.config = self.path / "config.yaml" 53 | 54 | # check the version of the recipe 55 | if self.config["version"] != 2: 56 | rversion = self.config["version"] 57 | if rversion == 1: 58 | self._logger.error( 59 | "\nThe recipe is an old version 1 recipe for Spack v0.23 and earlier.\n" 60 | "This version of Stackinator supports Spack 1.0, and has deprecated support for Spack v0.23.\n" 61 | "Use version 5 of stackinator, which can be accessed via the releases/v5 branch:\n" 62 | " git switch releases/v5\n\n" 63 | "If this recipe is to be used with Spack 1.0, then please add the field 'version: 2' to\n" 64 | "config.yaml in your recipe.\n\n" 65 | "For more information: https://eth-cscs.github.io/stackinator/recipes/#configuration\n" 66 | ) 67 | raise RuntimeError("incompatible uenv recipe version") 68 | else: 69 | self._logger.error( 70 | f"\nThe config.yaml file sets an unknown recipe version={rversion}.\n" 71 | "This version of Stackinator supports version 2 recipes.\n\n" 72 | "For more information: https://eth-cscs.github.io/stackinator/recipes/#configuration\n" 73 | ) 74 | raise RuntimeError("incompatible uenv recipe version") 75 | 76 | # override the mount point if defined as a CLI argument 77 | if args.mount: 78 | self.config["store"] = args.mount 79 | 80 | # ensure that the requested mount point exists 81 | if not self.mount.is_dir(): 82 | raise FileNotFoundError(f"the mount point '{self.mount}' must exist") 83 | 84 | # required compilers.yaml file 85 | compiler_path = self.path / "compilers.yaml" 86 | self._logger.debug(f"opening {compiler_path}") 87 | if not compiler_path.is_file(): 88 | raise FileNotFoundError(f"The recipe path '{compiler_path}' does not contain compilers.yaml") 89 | 90 | with compiler_path.open() as fid: 91 | raw = yaml.load(fid, Loader=yaml.Loader) 92 | schema.compilers_validator.validate(raw) 93 | self.generate_compiler_specs(raw) 94 | 95 | # required environments.yaml file 96 | environments_path = self.path / "environments.yaml" 97 | self._logger.debug(f"opening {environments_path}") 98 | if not environments_path.is_file(): 99 | raise FileNotFoundError(f"The recipe path '{environments_path}' does not contain environments.yaml") 100 | 101 | with environments_path.open() as fid: 102 | raw = yaml.load(fid, Loader=yaml.Loader) 103 | schema.environments_validator.validate(raw) 104 | self.generate_environment_specs(raw) 105 | 106 | # optional modules.yaml file 107 | modules_path = self.path / "modules.yaml" 108 | self._logger.debug(f"opening {modules_path}") 109 | if not modules_path.is_file(): 110 | modules_path = pathlib.Path(args.build) / "spack/etc/spack/defaults/modules.yaml" 111 | self._logger.debug(f"no modules.yaml provided - using the {modules_path}") 112 | 113 | self.modules = modules_path 114 | 115 | # optional packages.yaml file 116 | packages_path = self.path / "packages.yaml" 117 | self._logger.debug(f"opening {packages_path}") 118 | self.packages = None 119 | if packages_path.is_file(): 120 | with packages_path.open() as fid: 121 | self.packages = yaml.load(fid, Loader=yaml.Loader) 122 | 123 | # optional mirror configurtion 124 | mirrors_path = self.path / "mirrors.yaml" 125 | if mirrors_path.is_file(): 126 | self._logger.warning( 127 | "mirrors.yaml have been removed from recipes, use the --cache option on stack-config instead." 128 | ) 129 | raise RuntimeError("Unsupported mirrors.yaml file in recipe.") 130 | 131 | self.mirror = (args.cache, self.mount) 132 | 133 | # optional post install hook 134 | if self.post_install_hook is not None: 135 | self._logger.debug(f"post install hook {self.post_install_hook}") 136 | else: 137 | self._logger.debug("no post install hook provided") 138 | 139 | # optional pre install hook 140 | if self.pre_install_hook is not None: 141 | self._logger.debug(f"pre install hook {self.pre_install_hook}") 142 | else: 143 | self._logger.debug("no pre install hook provided") 144 | 145 | # determine the version of spack being used: 146 | # --develop flag implies the next release of spack 147 | # --spack-version option explicitly sets the version 148 | # otherwise the name of the commit provided in the config.yaml file is inspected 149 | self.spack_version = self.find_spack_version(args.develop, args.spack_version) 150 | 151 | # Returns: 152 | # Path: if the recipe contains a spack package repository 153 | # None: if there is the recipe contains no repo 154 | @property 155 | def spack_repo(self): 156 | repo_path = self.path / "repo" 157 | if spack_util.is_repo(repo_path): 158 | return repo_path 159 | return None 160 | 161 | # Returns: 162 | # Path: of the recipe extra path if it exists 163 | # None: if there is no user-provided extra path in the recipe 164 | @property 165 | def user_extra(self): 166 | extra_path = self.path / "extra" 167 | if extra_path.exists() and extra_path.is_dir(): 168 | return extra_path 169 | return None 170 | 171 | # Returns: 172 | # Path: of the recipe post install script if it was provided 173 | # None: if there is no user-provided post install script 174 | @property 175 | def post_install_hook(self): 176 | hook_path = self.path / "post-install" 177 | if hook_path.exists() and hook_path.is_file(): 178 | return hook_path 179 | return None 180 | 181 | # Returns: 182 | # Path: of the recipe pre install script if it was provided 183 | # None: if there is no user-provided pre install script 184 | @property 185 | def pre_install_hook(self): 186 | hook_path = self.path / "pre-install" 187 | if hook_path.exists() and hook_path.is_file(): 188 | return hook_path 189 | return None 190 | 191 | # Returns a dictionary with the following fields 192 | # 193 | # root: /path/to/cache 194 | # path: /path/to/cache/user-environment 195 | # key: /path/to/private-pgp-key 196 | @property 197 | def mirror(self): 198 | return self._mirror 199 | 200 | # configuration is a tuple with two fields: 201 | # - a Path of the yaml file containing the cache configuration 202 | # - the mount point of the image 203 | @mirror.setter 204 | def mirror(self, configuration): 205 | self._logger.debug(f"configuring build cache mirror with {configuration}") 206 | self._mirror = None 207 | 208 | file, mount = configuration 209 | 210 | if file is not None: 211 | mirror_config_path = pathlib.Path(file) 212 | if not mirror_config_path.is_file(): 213 | raise FileNotFoundError(f"The cache configuration '{file}' is not a file") 214 | 215 | self._mirror = cache.configuration_from_file(mirror_config_path, pathlib.Path(mount)) 216 | 217 | @property 218 | def config(self): 219 | return self._config 220 | 221 | @config.setter 222 | def config(self, config_path): 223 | self._logger.debug(f"opening {config_path}") 224 | if not config_path.is_file(): 225 | raise FileNotFoundError(f"The recipe path '{config_path}' does not contain config.yaml") 226 | 227 | with config_path.open() as fid: 228 | raw = yaml.load(fid, Loader=yaml.Loader) 229 | schema.config_validator.validate(raw) 230 | self._config = raw 231 | 232 | def find_spack_version(self, develop, spack_version): 233 | # determine the "major" version, if it can be inferred. 234 | # one of "0.21", "0.22", "0.23", "0.24" or "unknown". 235 | # "0.24" implies the latest features in develop that will 236 | # are being developed for the next version of spack 237 | 238 | # the user has explicitly requested develop: 239 | if develop: 240 | return "0.24" 241 | 242 | if spack_version is not None: 243 | return spack_version 244 | 245 | # infer from the branch name 246 | # Note: this could be improved by first downloading 247 | # the requested spack version/tag/commit, then checking 248 | # the version returned by `spack --version` 249 | # 250 | # this would require defering this decision until after 251 | # the repo is cloned in build.py... a lot of work. 252 | commit = self.config["spack"]["commit"] 253 | if commit is None or commit == "develop": 254 | return "0.24" 255 | # currently supported 256 | if commit.find("0.24") >= 0: 257 | return "0.24" 258 | # currently supported 259 | if commit.find("0.23") >= 0: 260 | return "0.23" 261 | # currently supported 262 | if commit.find("0.22") >= 0: 263 | return "0.22" 264 | # currently supported 265 | if commit.find("0.21") >= 0: 266 | return "0.21" 267 | # currently supported 268 | if commit.find("0.20") >= 0: 269 | raise ValueError(f"spack minimum version is v0.21 - recipe uses {commit}") 270 | 271 | return "unknown" 272 | 273 | @property 274 | def environment_view_meta(self): 275 | # generate the view meta data that is presented in the squashfs image meta data 276 | view_meta = {} 277 | for _, env in self.environments.items(): 278 | view = env["view"] 279 | if view is not None: 280 | view_meta[view["name"]] = { 281 | "root": view["config"]["root"], 282 | "activate": view["config"]["root"] + "/activate.sh", 283 | "description": "", # leave the description empty for now 284 | } 285 | 286 | return view_meta 287 | 288 | @property 289 | def modules_yaml(self): 290 | with self.modules.open() as fid: 291 | raw = yaml.load(fid, Loader=yaml.Loader) 292 | raw["modules"]["default"]["roots"]["tcl"] = (pathlib.Path(self.mount) / "modules").as_posix() 293 | return yaml.dump(raw) 294 | 295 | # creates the self.environments field that describes the full specifications 296 | # for all of the environments sets, grouped in environments, from the raw 297 | # environments.yaml input. 298 | def generate_environment_specs(self, raw): 299 | environments = raw 300 | 301 | # enumerate large binary packages that should not be pushed to binary caches 302 | for _, config in environments.items(): 303 | config["exclude_from_cache"] = ["cuda", "nvhpc", "perl"] 304 | 305 | # check the environment descriptions and ammend where features are missing 306 | for name, config in environments.items(): 307 | if ("specs" not in config) or (config["specs"] is None): 308 | environments[name]["specs"] = [] 309 | 310 | if "mpi" not in config: 311 | environments[name]["mpi"] = {"spec": None, "gpu": None} 312 | 313 | # complete configuration of MPI in each environment 314 | for name, config in environments.items(): 315 | if config["mpi"]: 316 | mpi = config["mpi"] 317 | mpi_spec = mpi["spec"] 318 | mpi_gpu = mpi["gpu"] 319 | if mpi_spec: 320 | try: 321 | mpi_impl, mpi_ver = mpi_spec.strip().split(sep="@", maxsplit=1) 322 | except ValueError: 323 | mpi_impl = mpi_spec.strip() 324 | mpi_ver = None 325 | 326 | if mpi_impl in Recipe.valid_mpi_specs: 327 | default_ver, options = Recipe.valid_mpi_specs[mpi_impl] 328 | if mpi_ver: 329 | version_opt = f"@{mpi_ver}" 330 | else: 331 | version_opt = f"@{default_ver}" if default_ver else "" 332 | 333 | spec = f"{mpi_impl}{version_opt} {options or ''}".strip() 334 | 335 | if mpi_gpu: 336 | spec = f"{spec} +{mpi_gpu}" 337 | 338 | environments[name]["specs"].append(spec) 339 | else: 340 | # TODO: Create a custom exception type 341 | raise Exception(f"Unsupported mpi: {mpi_impl}") 342 | 343 | # set constraints that ensure the the main compiler is always used to build packages 344 | # that do not explicitly request a compiler. 345 | for name, config in environments.items(): 346 | compilers = config["compiler"] 347 | if len(compilers) == 1: 348 | config["toolchain_constraints"] = [] 349 | continue 350 | requires = [f"%{compilers[0]['spec']}"] 351 | for spec in config["specs"]: 352 | if "%" in spec: 353 | requires.append(spec) 354 | 355 | config["toolchain_constraints"] = requires 356 | 357 | # An awkward hack to work around spack not supporting creating activation 358 | # scripts for each file system view in an environment: it only generates them 359 | # for the "default" view. 360 | # The workaround is to create multiple versions of the same environment, one 361 | # for each view. 362 | # TODO: remove when the minimum supported version of spack is v0.21, in which 363 | # this issue was fixed, see https://github.com/spack/spack/pull/40549 364 | # we have a `--develop` workaround that uses the current approach of generating 365 | # a separate environment for each view, with a view named "default", and uses 366 | # the name default to generated the activation script. 367 | env_names = set() 368 | env_name_map = {} 369 | for name, config in environments.items(): 370 | env_name_map[name] = [] 371 | for view, vc in config["views"].items(): 372 | if view in env_names: 373 | raise Exception(f"An environment view with the name '{view}' already exists.") 374 | # set some default values: 375 | # vc["link"] = "roots" 376 | # vc["uenv"]["add_compilers"] = True 377 | # vc["uenv"]["prefix_paths"] = {} 378 | if vc is None: 379 | vc = {} 380 | vc.setdefault("link", "roots") 381 | vc.setdefault("uenv", {}) 382 | vc["uenv"].setdefault("add_compilers", True) 383 | vc["uenv"].setdefault("prefix_paths", {}) 384 | prefix_string = ",".join( 385 | [f"{name}={':'.join(paths)}" for name, paths in vc["uenv"]["prefix_paths"].items()] 386 | ) 387 | vc["uenv"]["prefix_string"] = prefix_string 388 | # save a copy of the view configuration 389 | env_name_map[name].append((view, vc)) 390 | 391 | # Iterate over each environment: 392 | # - creating copies of the env so that there is one copy per view. 393 | # - configure each view 394 | for name, views in env_name_map.items(): 395 | numviews = len(env_name_map[name]) 396 | 397 | # The configuration of the environment without views 398 | base = copy.deepcopy(environments[name]) 399 | 400 | environments[name]["view"] = None 401 | for i in range(numviews): 402 | # pick a name for the environment 403 | cname = name if i == 0 else name + f"-{i + 1}__" 404 | if i > 0: 405 | environments[cname] = copy.deepcopy(base) 406 | 407 | view_name, view_config = views[i] 408 | # note: the root path is stored as a string, not as a pathlib.PosixPath 409 | # to avoid serialisation issues when generating the spack.yaml file for 410 | # each environment. 411 | if view_config is None: 412 | view_config = {"root": str(self.mount / "env" / view_name)} 413 | else: 414 | view_config["root"] = str(self.mount / "env" / view_name) 415 | 416 | # The "uenv" field is not spack configuration, it is additional information 417 | # used by stackinator additionally set compiler paths and LD_LIBRARY_PATH 418 | # Remove it from the view_config that will be passed directly to spack, and pass 419 | # it separately for configuring the envvars.py helper during the uenv build. 420 | extra = view_config.pop("uenv") 421 | 422 | environments[cname]["view"] = {"name": view_name, "config": view_config, "extra": extra} 423 | 424 | self.environments = environments 425 | 426 | # creates the self.compilers field that describes the full specifications 427 | # for all of the compilers from the raw compilers.yaml input 428 | def generate_compiler_specs(self, raw): 429 | compilers = {} 430 | 431 | bootstrap = {} 432 | bootstrap["packages"] = { 433 | "external": [ 434 | "perl", 435 | "m4", 436 | "autoconf", 437 | "automake", 438 | "libtool", 439 | "gawk", 440 | "python", 441 | "texinfo", 442 | "gawk", 443 | ], 444 | } 445 | bootstrap_spec = raw["bootstrap"]["spec"] 446 | bootstrap["specs"] = [ 447 | f"{bootstrap_spec} languages=c,c++", 448 | "squashfs default_compression=zstd", 449 | ] 450 | bootstrap["exclude_from_cache"] = ["cuda", "nvhpc", "perl"] 451 | compilers["bootstrap"] = bootstrap 452 | 453 | gcc = {} 454 | gcc["packages"] = { 455 | "external": [ 456 | "perl", 457 | "m4", 458 | "autoconf", 459 | "automake", 460 | "libtool", 461 | "gawk", 462 | "python", 463 | "texinfo", 464 | "gawk", 465 | ], 466 | } 467 | gcc["specs"] = raw["gcc"]["specs"] 468 | gcc["requires"] = bootstrap_spec 469 | gcc["exclude_from_cache"] = ["cuda", "nvhpc", "perl"] 470 | compilers["gcc"] = gcc 471 | if raw["llvm"] is not None: 472 | llvm = {} 473 | llvm["packages"] = False 474 | llvm["specs"] = [] 475 | for spec in raw["llvm"]["specs"]: 476 | if spec.startswith("nvhpc"): 477 | llvm["specs"].append(f"{spec}~mpi~blas~lapack") 478 | 479 | if spec.startswith("llvm"): 480 | llvm["specs"].append(f"{spec} +clang targets=x86 ~gold ^ninja@kitware") 481 | 482 | llvm["requires"] = raw["llvm"]["requires"] 483 | llvm["exclude_from_cache"] = ["cuda", "nvhpc", "perl"] 484 | compilers["llvm"] = llvm 485 | 486 | self.compilers = compilers 487 | 488 | # The path of the default configuration for the target system/cluster 489 | @property 490 | def system_config_path(self): 491 | return self._system_path 492 | 493 | @system_config_path.setter 494 | def system_config_path(self, path): 495 | system_path = pathlib.Path(path) 496 | if not system_path.is_absolute(): 497 | system_path = pathlib.Path.cwd() / system_path 498 | 499 | if not system_path.is_dir(): 500 | raise FileNotFoundError(f"The system configuration path '{system_path}' does not exist") 501 | 502 | self._system_path = system_path 503 | 504 | @property 505 | def mount(self): 506 | return pathlib.Path(self.config["store"]) 507 | 508 | @property 509 | def compiler_files(self): 510 | files = {} 511 | 512 | env = jinja2.Environment( 513 | loader=jinja2.FileSystemLoader(self.template_path), 514 | trim_blocks=True, 515 | lstrip_blocks=True, 516 | ) 517 | 518 | makefile_template = env.get_template("Makefile.compilers") 519 | push_to_cache = self.mirror is not None 520 | files["makefile"] = makefile_template.render( 521 | compilers=self.compilers, 522 | push_to_cache=push_to_cache, 523 | spack_version=self.spack_version, 524 | ) 525 | 526 | # generate compilers//spack.yaml 527 | files["config"] = {} 528 | for compiler, config in self.compilers.items(): 529 | spack_yaml_template = env.get_template(f"compilers.{compiler}.spack.yaml") 530 | files["config"][compiler] = spack_yaml_template.render(config=config) 531 | 532 | return files 533 | 534 | @property 535 | def environment_files(self): 536 | files = {} 537 | 538 | jenv = jinja2.Environment( 539 | loader=jinja2.FileSystemLoader(self.template_path), 540 | trim_blocks=True, 541 | lstrip_blocks=True, 542 | ) 543 | jenv.filters["py2yaml"] = schema.py2yaml 544 | 545 | makefile_template = jenv.get_template("Makefile.environments") 546 | push_to_cache = self.mirror is not None 547 | files["makefile"] = makefile_template.render( 548 | environments=self.environments, 549 | push_to_cache=push_to_cache, 550 | spack_version=self.spack_version, 551 | ) 552 | 553 | files["config"] = {} 554 | for env, config in self.environments.items(): 555 | spack_yaml_template = jenv.get_template("environments.spack.yaml") 556 | files["config"][env] = spack_yaml_template.render(config=config, name=env, store=self.mount) 557 | 558 | return files 559 | -------------------------------------------------------------------------------- /stackinator/schema.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pathlib 3 | 4 | import jsonschema 5 | import yaml 6 | 7 | prefix = pathlib.Path(__file__).parent.resolve() 8 | 9 | # create a validator that will insert optional fields with their default values 10 | # if they have not been provided. 11 | 12 | 13 | def extend_with_default(validator_class): 14 | validate_properties = validator_class.VALIDATORS["properties"] 15 | 16 | def set_defaults(validator, properties, instance, schema): 17 | # if instance is none, it's not possible to set any default for any sub-property 18 | if instance is not None: 19 | for property, subschema in properties.items(): 20 | if "default" in subschema: 21 | instance.setdefault(property, subschema["default"]) 22 | 23 | for error in validate_properties( 24 | validator, 25 | properties, 26 | instance, 27 | schema, 28 | ): 29 | yield error 30 | 31 | return jsonschema.validators.extend( 32 | validator_class, 33 | {"properties": set_defaults}, 34 | ) 35 | 36 | 37 | def py2yaml(data, indent): 38 | dump = yaml.dump(data) 39 | lines = [ln for ln in dump.split("\n") if ln != ""] 40 | res = ("\n" + " " * indent).join(lines) 41 | return res 42 | 43 | 44 | validator = extend_with_default(jsonschema.Draft7Validator) 45 | 46 | # load recipe yaml schema 47 | config_schema = json.load(open(prefix / "schema/config.json")) 48 | config_validator = validator(config_schema) 49 | compilers_schema = json.load(open(prefix / "schema/compilers.json")) 50 | compilers_validator = validator(compilers_schema) 51 | environments_schema = json.load(open(prefix / "schema/environments.json")) 52 | environments_validator = validator(environments_schema) 53 | cache_schema = json.load(open(prefix / "schema/cache.json")) 54 | cache_validator = validator(cache_schema) 55 | -------------------------------------------------------------------------------- /stackinator/schema/cache.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "title": "Schema for Spack Stack cache.yaml", 4 | "type" : "object", 5 | "additionalProperties": false, 6 | "required": ["key", "root"], 7 | "properties" : { 8 | "key" : { 9 | "oneOf": [ 10 | {"type" : "string"}, 11 | {"type" : "null"} 12 | ], 13 | "default": null 14 | }, 15 | "root" : { 16 | "type": "string" 17 | } 18 | } 19 | } 20 | 21 | -------------------------------------------------------------------------------- /stackinator/schema/compilers.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "title": "Schema for Spack Stack compilers.yaml recipe file", 4 | "type": "object", 5 | "additionalProperties": false, 6 | "required": ["bootstrap", "gcc"], 7 | "defs": { 8 | "gcc_version_spec": { 9 | "type": "string", 10 | "pattern": "^gcc@\\d{1,2}(\\.\\d{1}(\\.\\d{1})?)?$" 11 | }, 12 | "gcc_version_spec_list": { 13 | "type": "array", 14 | "items": {"$ref": "#/defs/gcc_version_spec"}, 15 | "minItems": 1 16 | }, 17 | "llvm_version_spec": { 18 | "type": "string", 19 | "pattern": "^llvm@\\d{1,2}(\\.\\d{1}(\\.\\d{1})?)?$" 20 | }, 21 | "nvhpc_version_spec": { 22 | "type": "string", 23 | "pattern": "^nvhpc@\\d{2}(\\.\\d{1,2})?$" 24 | }, 25 | "llvm_version_spec_list": { 26 | "type": "array", 27 | "items": { 28 | "anyOf": [ 29 | {"$ref": "#/defs/llvm_version_spec"}, 30 | {"$ref": "#/defs/nvhpc_version_spec"} 31 | ] 32 | }, 33 | "minItems": 1 34 | } 35 | }, 36 | "properties": { 37 | "bootstrap": { 38 | "type": "object", 39 | "properties": { 40 | "spec": {"$ref": "#/defs/gcc_version_spec"} 41 | }, 42 | "additionalProperties": false, 43 | "required": ["spec"] 44 | }, 45 | "gcc": { 46 | "type": "object", 47 | "properties": { 48 | "specs": {"$ref": "#/defs/gcc_version_spec_list"} 49 | }, 50 | "additionalProperties": false, 51 | "required": ["specs"] 52 | }, 53 | "llvm": { 54 | "oneOf": [ 55 | { 56 | "type": "object", 57 | "properties": { 58 | "requires": {"$ref": "#/defs/gcc_version_spec"}, 59 | "specs": {"$ref": "#/defs/llvm_version_spec_list"} 60 | }, 61 | "additionalProperties": false, 62 | "required": ["requires", "specs"] 63 | }, 64 | { 65 | "type": "null" 66 | } 67 | ], 68 | "default": null 69 | } 70 | } 71 | } 72 | 73 | -------------------------------------------------------------------------------- /stackinator/schema/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "title": "Schema for Spack Stack config.yaml recipe file", 4 | "type" : "object", 5 | "additionalProperties": false, 6 | "required": ["name", "spack"], 7 | "properties" : { 8 | "name" : { 9 | "type": "string" 10 | }, 11 | "store" : { 12 | "type" : "string", 13 | "default" : "/user-environment" 14 | }, 15 | "spack" : { 16 | "type" : "object", 17 | "additionalProperties": false, 18 | "properties" : { 19 | "repo": { 20 | "type": "string" 21 | }, 22 | "commit": { 23 | "oneOf": [ 24 | {"type" : "string"}, 25 | {"type" : "null"} 26 | ], 27 | "default": null 28 | } 29 | } 30 | }, 31 | "mirror" : { 32 | "type" : "object", 33 | "additionalProperties": false, 34 | "default": {"enable": true, "key": null}, 35 | "properties" : { 36 | "enable" : { 37 | "type": "boolean", 38 | "default": true 39 | }, 40 | "key" : { 41 | "oneOf": [ 42 | {"type" : "string"}, 43 | {"type" : "null"} 44 | ], 45 | "default": null 46 | } 47 | } 48 | }, 49 | "modules" : { 50 | "type": "boolean", 51 | "default": true 52 | }, 53 | "description" : { 54 | "oneOf": [ 55 | {"type" : "string"}, 56 | {"type" : "null"} 57 | ], 58 | "default": null 59 | }, 60 | "version" : { 61 | "type": "number", 62 | "default": 1 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /stackinator/schema/environments.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "http://json-schema.org/draft-07/schema#", 3 | "title": "Schema for Spack Stack environments.yaml recipe file", 4 | "type": "object", 5 | "additionalProperties": false, 6 | "patternProperties": { 7 | "\\w[\\w-]*": { 8 | "type": "object", 9 | "required": ["compiler", "specs"], 10 | "additionalProperties": false, 11 | "properties": { 12 | "deprecated": { 13 | "type": "boolean", 14 | "default": false 15 | }, 16 | "unify": { 17 | "enum": ["when_possible", true, false], 18 | "default": true 19 | }, 20 | "compiler": { 21 | "type": "array", 22 | "items": { 23 | "type": "object", 24 | "additionalProperties": false, 25 | "properties": { 26 | "toolchain": {"type": "string"}, 27 | "spec": {"type": "string"} 28 | } 29 | } 30 | }, 31 | "specs": { 32 | "type": "array", 33 | "items": {"type": "string"} 34 | }, 35 | "variants": { 36 | "type": "array", 37 | "items": {"type": "string"}, 38 | "default": [] 39 | }, 40 | "mpi": { 41 | "oneOf": [ 42 | { 43 | "type": "object", 44 | "additionalProperties": false, 45 | "properties": { 46 | "spec": {"type": "string"}, 47 | "gpu": { 48 | "enum": ["cuda", "rocm", null, false], 49 | "default": null 50 | } 51 | } 52 | }, 53 | {"enum": [null, false]} 54 | ], 55 | "default": null 56 | }, 57 | "packages": { 58 | "type": "array", 59 | "items": {"type": "string"}, 60 | "default": [] 61 | }, 62 | "views": { 63 | "type": "object", 64 | "default": {}, 65 | "patternProperties": { 66 | "\\w+": { 67 | "additionalProperties": false, 68 | "comment": "we can't set default values in such a construction, so they are set inside stackinator", 69 | "properties": { 70 | "link": { 71 | "type": "string", 72 | "enum": ["roots", "all", "run"] 73 | }, 74 | "select": { 75 | "type": "array", 76 | "items": {"type": "string"} 77 | }, 78 | "exclude": { 79 | "type": "array", 80 | "items": {"type": "string"} 81 | }, 82 | "projections": { 83 | "type": "object", 84 | "patternProperties": { 85 | "all|\\w[\\w-]*": {"type": "string"} 86 | } 87 | }, 88 | "uenv": { 89 | "type": "object", 90 | "additionalProperties": false, 91 | "properties": { 92 | "add_compilers": { "type": "boolean" }, 93 | "prefix_paths": { 94 | "type": "object", 95 | "patternProperties": { 96 | "\\w+": { 97 | "type": "array", 98 | "items": {"type": "string"} 99 | } 100 | } 101 | } 102 | } 103 | } 104 | } 105 | } 106 | } 107 | } 108 | } 109 | } 110 | } 111 | } 112 | 113 | -------------------------------------------------------------------------------- /stackinator/spack_util.py: -------------------------------------------------------------------------------- 1 | def is_repo(path): 2 | """ 3 | Returns True if path contains a spack package repo, where the definition of 4 | a spack package repo is a directory with a sub-directory named packages 5 | 6 | Otherwise returns False. 7 | """ 8 | pkg_path = path / "packages" 9 | if pkg_path.exists() and pkg_path.is_dir(): 10 | return True 11 | return False 12 | -------------------------------------------------------------------------------- /stackinator/templates/Make.user: -------------------------------------------------------------------------------- 1 | # vim: filetype=make 2 | 3 | # Copy this file to Make.user and set some variables. 4 | 5 | # This is the root of the software stack directory. 6 | BUILD_ROOT := {{ build_path }} 7 | 8 | # What Spack should we use? 9 | SPACK := spack 10 | 11 | # This uses the same spack, but ensures "plain" output. 12 | # Useful when output has to be manipulated (e.g. build a usable path) 13 | SPACK_HELPER := $(SPACK) --color=never 14 | 15 | # The Spack installation root. 16 | STORE := {{ store }} 17 | 18 | # When already building inside a sandbox, use `SANDBOX :=` (empty string) 19 | # Without a sandbox, make sure to hide sensitive data such as ~/.ssh through bubblewrap. 20 | # Also bind the directories `./tmp -> /tmp` and `./store -> $(STORE)`, so that 21 | # builds and installs happen inside the current directory. For speed, either 22 | # put the project itself in-memory, or use a flag like --bind /dev/shm/store 23 | # $(STORE). Use `bwrap-mutable-root.sh` in case you need to create a new 24 | # directory at the root /. 25 | {% if no_bwrap %} 26 | SANDBOX := 27 | {% else %} 28 | SANDBOX := $(BUILD_ROOT)/bwrap-mutable-root.sh $\ 29 | --tmpfs ~ $\ 30 | --bind $(BUILD_ROOT)/tmp /tmp $\ 31 | --bind $(BUILD_ROOT)/store $(STORE) 32 | {% endif %} 33 | # Makes sure that make -Orecurse continues to print in color. 34 | export SPACK_COLOR := always 35 | 36 | # Do not use user config, cause more often than not you pick up the wrong 37 | # config files in ~/.spack. Note that our recommended bwrap setup already puts 38 | # a tmpfs in the home folder, but when bwrap isn't used, this also helps a bit 39 | # with reproducibility. 40 | {% if spack_version>="0.22" %} 41 | # spack after 0.22.1 chokes on /dev/null 42 | export SPACK_USER_CONFIG_PATH := ~ 43 | {% else %} 44 | export SPACK_USER_CONFIG_PATH := /dev/null 45 | {% endif %} 46 | 47 | # Set up the system config scope that has the system packages we don't want 48 | # build, for example slurm, pmix, etc. Also should have the system compiler. 49 | export SPACK_SYSTEM_CONFIG_PATH := $(BUILD_ROOT)/config 50 | 51 | # Put clingo and friends here... 52 | export SPACK_USER_CACHE_PATH := $(BUILD_ROOT)/cache 53 | 54 | # Output the full build log to stdout. 55 | {% if verbose %} 56 | export SPACK_INSTALL_FLAGS := --verbose 57 | {% endif %} 58 | 59 | # Reproducibility 60 | export LC_ALL := en_US.UTF-8 61 | export TZ := UTC 62 | 63 | # I tried UNIX epoch 0 here, but it results in build errors with Python 64 | # packages using wheels, since they rely on zipfiles, and zipfiles can only 65 | # handle DOS epoch, which is the magic number below (1980-01-01). 66 | export SOURCE_DATE_EPOCH := 315576060 67 | -------------------------------------------------------------------------------- /stackinator/templates/Makefile: -------------------------------------------------------------------------------- 1 | {% set pipejoiner = joiner('|') %} 2 | -include Make.user 3 | 4 | .PHONY: compilers environments generate-config clean spack-setup 5 | 6 | all: environments 7 | 8 | # Keep track of what Spack version was used. 9 | spack-version: 10 | $(SANDBOX) $(SPACK) --version > $@ 11 | 12 | # Do some sanity checks: (a) are we not on cray, (b) are we using the same 13 | # version as before, (c) ensure that the concretizer is bootstrapped to avoid a 14 | # race where multiple processes start doing that. 15 | spack-setup: spack-version 16 | @printf "spack arch... " ; \ 17 | arch="$$($(SANDBOX) $(SPACK) arch)"; \ 18 | printf "%s\n" "$$arch"; \ 19 | case "$$arch" in \ 20 | *cray*) \ 21 | echo "You are running on Cray, which is usually a bad idea, since it turns Spack into modules mode. Try running in an clean environment with env -i."; \ 22 | exit 1 \ 23 | ;; \ 24 | esac; \ 25 | printf "spack version... "; \ 26 | version="$$($(SANDBOX) $(SPACK) --version)"; \ 27 | printf "%s\n" "$$version"; \ 28 | if [ "$$version" != "$$(cat spack-version)" ]; then \ 29 | echo "The spack version seems to have been changed in the meantime... remove ./spack-version if that was intended"; \ 30 | exit 1; \ 31 | fi; \ 32 | printf "checking if spack concretizer works... "; \ 33 | $(SANDBOX) $(SPACK_HELPER) -d spec zlib > $(BUILD_ROOT)/spack-bootstrap-output 2>&1; \ 34 | if [ "$$?" != "0" ]; then \ 35 | printf " failed, see %s\n" $(BUILD_ROOT)/spack-bootstrap-output; \ 36 | exit 1; \ 37 | fi; \ 38 | printf " success\n"; \ 39 | touch spack-setup 40 | 41 | pre-install: spack-setup 42 | $(SANDBOX) $(STORE)/pre-install-hook 43 | 44 | mirror-setup: spack-setup{% if pre_install_hook %} pre-install{% endif %} 45 | 46 | {% if cache %} 47 | $(SANDBOX) $(SPACK) buildcache keys --install --trust 48 | {% if cache.key %} 49 | $(SANDBOX) $(SPACK) gpg trust {{ cache.key }} 50 | {% endif %} 51 | {% endif %} 52 | touch mirror-setup 53 | 54 | compilers: mirror-setup 55 | $(SANDBOX) $(MAKE) -C $@ 56 | 57 | generate-config: compilers 58 | $(SANDBOX) $(MAKE) -C $@ 59 | 60 | environments: compilers 61 | $(SANDBOX) $(MAKE) -C $@ 62 | 63 | {% if modules %} 64 | modules-done: environments generate-config 65 | $(SANDBOX) $(SPACK) -C $(BUILD_ROOT)/modules module tcl refresh --upstream-modules --delete-tree --yes-to-all 66 | touch modules-done 67 | {% endif %} 68 | 69 | 70 | env-meta: generate-config environments{% if modules %} modules-done{% endif %} 71 | 72 | $(SANDBOX) $(BUILD_ROOT)/envvars.py uenv {% if modules %}--modules{% endif %} --spack='{{ spack_meta.url }},{{ spack_meta.ref }},{{ spack_meta.commit }}' $(STORE) 73 | touch env-meta 74 | 75 | post-install: env-meta 76 | {% if post_install_hook %} 77 | $(SANDBOX) $(STORE)/post-install-hook 78 | {% endif %} 79 | touch post-install 80 | 81 | # Create a squashfs file from the installed software. 82 | store.squashfs: post-install 83 | # clean up the __pycache__ paths in the repo 84 | $(SANDBOX) find $(STORE)/repo -type d -name __pycache__ -exec rm -r {} + 85 | $(SANDBOX) chmod -R a+rX $(STORE) 86 | $(SANDBOX) env -u SOURCE_DATE_EPOCH "$$($(SANDBOX) $(SPACK_HELPER) -e ./compilers/bootstrap find --format='{prefix}' squashfs | head -n1)/bin/mksquashfs" $(STORE) $@ -force-uid nobody -force-gid nobody -all-time $$(date +%s) -no-recovery -noappend -Xcompression-level 3 87 | 88 | # Force push all built packages to the build cache 89 | cache-force: mirror-setup 90 | {% if cache.key %} 91 | $(warning ================================================================================) 92 | $(warning Generate the config in order to force push partially built compiler environments) 93 | $(warning if this step is performed with partially built compiler envs, you will) 94 | $(warning likely have to start a fresh build (but that's okay, because build caches FTW)) 95 | $(warning ================================================================================) 96 | $(SANDBOX) $(MAKE) -C generate-config 97 | $(SANDBOX) $(SPACK) -C $(STORE)/config buildcache create --rebuild-index --only=package alpscache \ 98 | $$($(SANDBOX) $(SPACK_HELPER) -C $(STORE)/config find --format '{name};{/hash};version={version}' \ 99 | | grep -v -E '^({% for p in exclude_from_cache %}{{ pipejoiner() }}{{ p }}{% endfor %});'\ 100 | | grep -v -E 'version=git\.'\ 101 | | cut -d ';' -f2) 102 | {% else %} 103 | $(warning "pushing to the build cache is not enabled. See the documentation on how to add a key: https://eth-cscs.github.io/stackinator/build-caches/") 104 | {% endif %} 105 | 106 | # A backup of all the generated files during the build, useful for posterity, 107 | # excluding the binaries themselves, since they're in the squashfs file 108 | build.tar.gz: spack-version Make.user Make.inc Makefile | environments 109 | tar czf $@ $^ $$(find environments compilers config -maxdepth 2 -name Makefile -o -name '*.yaml') 110 | 111 | # Clean generate files, does *not* remove installed software. 112 | clean: 113 | rm -rf -- $(wildcard */*/spack.lock) $(wildcard */*/.spack-env) $(wildcard */*/Makefile) $(wildcard */*/generated) $(wildcard cache) $(wildcard compilers/*/config.yaml) $(wildcard compilers/*/packages.yaml) $(wildcard compilers/*/compilers.yaml) $(wildcard environments/*/config.yaml) $(wildcard environments/*/packages.yaml) $(wildcard environments/*/compilers.yaml) post-install modules-done env-meta store.squashfs 114 | 115 | include Make.inc 116 | -------------------------------------------------------------------------------- /stackinator/templates/Makefile.compilers: -------------------------------------------------------------------------------- 1 | {% set pipejoiner = joiner('|') %} 2 | -include ../Make.user 3 | 4 | MAKEFLAGS += --output-sync=recurse 5 | 6 | .PHONY: all .locks .packages.yaml 7 | 8 | all:{% for compiler in compilers %} {{ compiler }}/generated/build_cache{% endfor %} 9 | 10 | 11 | # Ensure that spack.lock files are never removed as intermediate files... 12 | .locks:{% for compiler in compilers %} {{ compiler }}/spack.lock{% endfor %} 13 | 14 | 15 | # Ensure that package yaml files are never removed as intermediate files... 16 | .packages.yaml:{% for compiler in compilers %} {{ compiler }}/packages.yaml{% endfor %} 17 | 18 | 19 | {% for compiler, config in compilers.items() %} 20 | {{ compiler }}/generated/build_cache: {{ compiler }}/generated/env 21 | {% if push_to_cache %} 22 | $(SPACK) -e ./{{ compiler }} buildcache create --rebuild-index --only=package alpscache \ 23 | $$($(SPACK_HELPER) -e ./{{ compiler }} find --format '{name};{/hash}' \ 24 | | grep -v -E '^({% for p in config.exclude_from_cache %}{{ pipejoiner() }}{{ p }}{% endfor %});'\ 25 | | cut -d ';' -f2) 26 | {% endif %} 27 | touch $@ 28 | 29 | {% endfor %} 30 | 31 | # Configure the install location. 32 | {% for compiler in compilers %}{{ compiler }}/config.yaml {% endfor %}: | store 33 | $(SPACK) config --scope=user add config:install_tree:root:$(STORE) 34 | 35 | # Configure external system dependencies for each compiler toolchain 36 | {% for compiler, config in compilers.items() %} 37 | {% if config.packages and config.packages.external %} 38 | {{ compiler }}/packages.yaml: 39 | $(SPACK) external find --scope=user {% for package in config.packages.external %} {{package}}{% endfor %} 40 | 41 | {% endif %} 42 | {% endfor %} 43 | # Configure dependencies between compilers 44 | gcc/compilers.yaml: bootstrap/generated/env 45 | $(SPACK) compiler find --scope=user $(call compiler_bin_dirs, $$($(SPACK_HELPER) -e ./bootstrap find --format '{prefix}' {{ compilers.gcc.requires }})) 46 | 47 | {% if compilers.llvm %} 48 | llvm/compilers.yaml: gcc/generated/env 49 | $(SPACK) compiler find --scope=user $(call compiler_bin_dirs, $$($(SPACK_HELPER) -e ./gcc find --format '{prefix}' {{ compilers.llvm.requires }})) 50 | {% endif %} 51 | 52 | 53 | include ../Make.inc 54 | 55 | # GNU Make isn't very smart about dependencies across included Makefiles, so we 56 | # specify the order here by conditionally including them, when the dependent exists. 57 | ifeq (,$(filter clean,$(MAKECMDGOALS))) 58 | 59 | include bootstrap/Makefile 60 | 61 | ifneq (,$(wildcard bootstrap/Makefile)) 62 | include gcc/Makefile 63 | endif 64 | 65 | {% if compilers.llvm %} 66 | ifneq (,$(wildcard gcc/Makefile)) 67 | include llvm/Makefile 68 | endif 69 | {% endif %} 70 | 71 | 72 | endif 73 | -------------------------------------------------------------------------------- /stackinator/templates/Makefile.environments: -------------------------------------------------------------------------------- 1 | {% set pipejoiner = joiner('|') %} 2 | -include ../Make.user 3 | 4 | MAKEFLAGS += --output-sync=recurse 5 | 6 | .PHONY: all .locks .packages.yaml 7 | 8 | all:{% for env in environments %} {{ env }}/generated/build_cache{% endfor %} 9 | 10 | 11 | # Ensure that spack.lock files are never removed as intermediate files 12 | .locks:{% for env in environments %} {{ env }}/spack.lock{% endfor %} 13 | 14 | # Ensure that package yaml files are never removed as intermediate files... 15 | .packages.yaml:{% for env in environments %} {{ env }}/packages.yaml{% endfor %} 16 | 17 | # Push built packages to a binary cache if a key has been provided 18 | {% for env, config in environments.items() %} 19 | {{ env }}/generated/build_cache: {{ env }}/generated/view_config 20 | {% if push_to_cache %} 21 | $(SPACK) -e ./{{ env }} buildcache create --rebuild-index --only=package alpscache \ 22 | $$($(SPACK_HELPER) -e ./{{ env }} find --format '{name};{/hash};version={version}' \ 23 | | grep -v -E '^({% for p in config.exclude_from_cache %}{{ pipejoiner() }}{{ p }}{% endfor %});'\ 24 | | grep -v -E 'version=git\.'\ 25 | | cut -d ';' -f2) 26 | {% endif %} 27 | touch $@ 28 | 29 | {% endfor %} 30 | 31 | # Create environment view where requested 32 | {% for env, config in environments.items() %} 33 | {{ env }}/generated/view_config: {{ env }}/generated/env 34 | {% if config.view %} 35 | $(SPACK) env activate --with-view default --sh ./{{ env }} > $(STORE)/env/{{ config.view.name }}/activate.sh 36 | $(BUILD_ROOT)/envvars.py view {% if config.view.extra.add_compilers %}--compilers=./{{ env }}/compilers.yaml {% endif %} --prefix_paths="{{ config.view.extra.prefix_string }}" $(STORE)/env/{{ config.view.name }} $(BUILD_ROOT) 37 | {% endif %} 38 | touch $@ 39 | 40 | {% endfor %} 41 | 42 | 43 | {% for env in environments %}{{ env }}/config.yaml {% endfor %}: | store 44 | $(SPACK) config --scope=user add config:install_tree:root:$(STORE) 45 | 46 | # Create the compilers.yaml configuration for each environment 47 | {% for env, config in environments.items() %} 48 | {{ env }}_PREFIX = {% for C in config.compiler %} $$($(SPACK_HELPER) -e ../compilers/{{ C.toolchain }} find --format '{prefix}' {{ C.spec }}){% endfor %} 49 | 50 | {{ env }}/compilers.yaml: 51 | $(SPACK) compiler find --scope=user $(call compiler_bin_dirs, $({{ env }}_PREFIX)) 52 | 53 | {% endfor %} 54 | 55 | # Configure external system dependencies for each compiler toolchain 56 | {% for env, config in environments.items() %} 57 | {% if config.packages %} 58 | {{ env }}/packages.yaml: 59 | $(SPACK) external find --not-buildable --scope=user {% for package in config.packages %} {{package}}{% endfor %} 60 | 61 | 62 | {% endif %} 63 | {% endfor %} 64 | 65 | -include ../Make.inc 66 | 67 | ifeq (,$(filter clean,$(MAKECMDGOALS))) 68 | {% for env in environments %} 69 | include {{ env }}/Makefile 70 | {% endfor %} 71 | endif 72 | -------------------------------------------------------------------------------- /stackinator/templates/Makefile.generate-config: -------------------------------------------------------------------------------- 1 | include ../Make.user 2 | 3 | CONFIG_DIR = $(STORE)/config 4 | MODULE_DIR = $(BUILD_ROOT)/modules 5 | 6 | # These will be the prefixes of the GCCs, LLVMs and NVHPCs in the respective environments. 7 | ALL_COMPILER_PREFIXES ={% for compiler in all_compilers %} $$($(SPACK_HELPER) -e ../compilers/{{ compiler }} find --format='{prefix}' gcc llvm nvhpc){% endfor %} 8 | 9 | 10 | COMPILER_PREFIXES ={% for compiler in release_compilers %} $$($(SPACK_HELPER) -e ../compilers/{{ compiler }} find --format='{prefix}' gcc llvm nvhpc){% endfor %} 11 | 12 | 13 | all: $(CONFIG_DIR)/upstreams.yaml $(CONFIG_DIR)/compilers.yaml $(CONFIG_DIR)/packages.yaml $(CONFIG_DIR)/repos.yaml $(MODULE_DIR)/upstreams.yaml $(MODULE_DIR)/compilers.yaml 14 | 15 | # Generate the upstream configuration that will be provided by the mounted image 16 | $(CONFIG_DIR)/compilers.yaml: 17 | $(SPACK) compiler find --scope=user $(call compiler_bin_dirs, $(COMPILER_PREFIXES)) 18 | 19 | $(CONFIG_DIR)/upstreams.yaml: 20 | $(SPACK) config --scope=user add upstreams:system:install_tree:$(STORE) 21 | 22 | # Copy the cluster-specific packages.yaml file to the configuration. 23 | # requires compilers.yaml to ensure that the path $(CONFIG_DIR) has been created. 24 | $(CONFIG_DIR)/packages.yaml: $(CONFIG_DIR)/compilers.yaml 25 | install -m 644 $(BUILD_ROOT)/config/packages.yaml $(CONFIG_DIR)/packages.yaml 26 | 27 | $(CONFIG_DIR)/repos.yaml: $(CONFIG_DIR)/compilers.yaml 28 | install -m 644 $(BUILD_ROOT)/config/repos.yaml $(CONFIG_DIR)/repos.yaml 29 | 30 | # Generate a configuration used to generate the module files 31 | # The configuration in CONFIG_DIR can't be used for this purpose, because a compilers.yaml 32 | # that includes the bootstrap compiler is required to build the modules. 33 | $(MODULE_DIR)/compilers.yaml: 34 | $(SPACK) compiler find --scope=user $(call compiler_bin_dirs, $(ALL_COMPILER_PREFIXES)) 35 | 36 | $(MODULE_DIR)/upstreams.yaml: 37 | $(SPACK) config --scope=user add upstreams:system:install_tree:$(STORE) 38 | 39 | include ../Make.inc 40 | -------------------------------------------------------------------------------- /stackinator/templates/compilers.bootstrap.spack.yaml: -------------------------------------------------------------------------------- 1 | spack: 2 | include: 3 | - packages.yaml 4 | - config.yaml 5 | specs: 6 | {% for spec in config.specs %} 7 | - {{ spec }} 8 | {% endfor %} 9 | view: false 10 | concretizer: 11 | unify: true 12 | reuse: false 13 | packages: 14 | gcc: 15 | variants: [build_type=Release ~bootstrap +strip] 16 | mpc: 17 | variants: [libs=static] 18 | gmp: 19 | variants: [libs=static] 20 | mpfr: 21 | variants: [libs=static] 22 | zstd: 23 | variants: [libs=static] 24 | zlib: 25 | variants: [~shared] 26 | 27 | -------------------------------------------------------------------------------- /stackinator/templates/compilers.gcc.spack.yaml: -------------------------------------------------------------------------------- 1 | spack: 2 | include: 3 | - packages.yaml 4 | - config.yaml 5 | - compilers.yaml 6 | specs: 7 | {% for spec in config.specs %} 8 | - {{ spec }} 9 | {% endfor %} 10 | view: false 11 | concretizer: 12 | unify: when_possible 13 | reuse: false 14 | packages: 15 | gcc: 16 | variants: [build_type=Release +bootstrap +strip] 17 | mpc: 18 | variants: [libs=static] 19 | gmp: 20 | variants: [libs=static] 21 | mpfr: 22 | variants: [libs=static] 23 | zstd: 24 | variants: [libs=static] 25 | zlib: 26 | variants: [~shared] 27 | -------------------------------------------------------------------------------- /stackinator/templates/compilers.llvm.spack.yaml: -------------------------------------------------------------------------------- 1 | spack: 2 | include: 3 | - packages.yaml 4 | - config.yaml 5 | - compilers.yaml 6 | specs: 7 | {% for spec in config.specs %} 8 | - {{ spec }} 9 | {% endfor %} 10 | view: false 11 | concretizer: 12 | unify: when_possible 13 | reuse: false 14 | 15 | -------------------------------------------------------------------------------- /stackinator/templates/environments.spack.yaml: -------------------------------------------------------------------------------- 1 | spack: 2 | include: 3 | {% if config.packages %} 4 | - packages.yaml 5 | {% endif %} 6 | - compilers.yaml 7 | - config.yaml 8 | config: 9 | deprecated: {{ config.deprecated }} 10 | concretizer: 11 | unify: {{ config.unify }} 12 | reuse: false 13 | specs: 14 | {% for spec in config.specs %} 15 | - '{{ spec }}' 16 | {% endfor %} 17 | packages: 18 | all: 19 | {% set separator = joiner(', ') %} 20 | compiler: [{% for c in config.compiler %}{{ separator() }}'{{ c.spec }}'{% endfor %}] 21 | {% if config.toolchain_constraints %} 22 | require: 23 | {% set separator = joiner(', ') %} 24 | - one_of: [{% for c in config.toolchain_constraints %}{{ separator() }}'{{ c }}'{% endfor %}] 25 | {% endif %} 26 | {% if config.variants %} 27 | {% set separator = joiner(', ') %} 28 | variants: [{% for v in config.variants %}{{ separator() }}'{{ v }}'{% endfor %}] 29 | {% endif %} 30 | {% if config.mpi.spec %} 31 | mpi: 32 | require: '{{ config.mpi.spec }}' 33 | {% endif %} 34 | {% if config.view %} 35 | view: 36 | default: 37 | {{ config.view.config|py2yaml(6) }} 38 | {% else %} 39 | view: false 40 | {% endif %} 41 | -------------------------------------------------------------------------------- /stackinator/templates/repos.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - {{ repo_path }} 3 | -------------------------------------------------------------------------------- /stackinator/templates/stack-debug.sh: -------------------------------------------------------------------------------- 1 | env --ignore-environment PATH=/usr/bin:/bin:{{ build_path }}/spack/bin http_proxy=$http_proxy https_proxy=$https_proxy HOME=$HOME BUILD_ROOT={{ build_path }} STORE={{ mount_path }} SPACK_SYSTEM_CONFIG_PATH={{ build_path }}/config SPACK_USER_CACHE_PATH={{ build_path }}/cache SPACK=spack SPACK_COLOR=always SPACK_USER_CONFIG_PATH={% if spack_version>="0.23" %}~{% else %}/dev/null{% endif %} LC_ALL=en_US.UTF-8 TZ=UTC SOURCE_DATE_EPOCH=315576060 {% if use_bwrap %} {{ build_path }}/bwrap-mutable-root.sh --tmpfs ~ --bind {{ build_path }}/tmp /tmp --bind {{ build_path }}/store {{ mount_path }} {% endif %} bash -noprofile -l 2 | -------------------------------------------------------------------------------- /test_stackinator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S uv run --script 2 | # /// script 3 | # requires-python = ">=3.12" 4 | # dependencies = [ 5 | # "jinja2", 6 | # "jsonschema", 7 | # "pytest", 8 | # "pyYAML", 9 | # ] 10 | # /// 11 | 12 | import pathlib 13 | import sys 14 | 15 | prefix = pathlib.Path(__file__).parent.resolve() 16 | sys.path = [prefix.as_posix()] + sys.path 17 | 18 | import pytest # noqa: E402 19 | 20 | if __name__ == "__main__": 21 | sys.argv = [sys.argv[0], "-vv", "unittests"] 22 | sys.exit(pytest.main()) 23 | -------------------------------------------------------------------------------- /unittests/.gitignore: -------------------------------------------------------------------------------- 1 | scratch 2 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/env/arbor/activate.sh: -------------------------------------------------------------------------------- 1 | export SPACK_ENV_VIEW=default; 2 | export ACLOCAL_PATH=/user-environment/env/arbor/share/aclocal:/usr/share/aclocal; 3 | export CMAKE_PREFIX_PATH=/user-environment/env/arbor; 4 | export MANPATH=/user-environment/env/arbor/share/man:/usr/share/man:/user-environment/env/arbor/man:; 5 | export PATH=/user-environment/env/arbor/bin:/usr/bin:/bin; 6 | export PKG_CONFIG_PATH=/user-environment/env/arbor/share/pkgconfig:/user-environment/env/arbor/lib/pkgconfig:/usr/share/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/user-environment/env/arbor/lib64/pkgconfig; 7 | export PYTHONPATH=/user-environment/env/arbor/lib/python3.11/site-packages:/user-environment/env/arbor/misc; 8 | # compiler paths added by stackinator 9 | export PATH=/user-environment/linux-archrolling-zen4/gcc-13.3.0/gcc-13.2.0-rmq2jx2h54owhxopaprg7yg4ocbdqv2j/bin:$PATH; 10 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/env/develop/activate.sh: -------------------------------------------------------------------------------- 1 | export SPACK_ENV_VIEW=default; 2 | export ACLOCAL_PATH=/user-environment/env/develop/share/aclocal:/usr/share/aclocal; 3 | export CMAKE_PREFIX_PATH=/user-environment/env/develop; 4 | export MANPATH=/user-environment/env/develop/share/man:/usr/share/man:/user-environment/env/develop/man:; 5 | export PATH=/user-environment/env/develop/bin:/usr/bin:/bin; 6 | export PKG_CONFIG_PATH=/user-environment/env/develop/share/pkgconfig:/user-environment/env/develop/lib/pkgconfig:/usr/share/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/user-environment/env/develop/lib64/pkgconfig; 7 | export PYTHONPATH=/user-environment/env/develop/lib/python3.11/site-packages:/user-environment/env/develop/misc; 8 | # compiler paths added by stackinator 9 | export PATH=/user-environment/linux-archrolling-zen4/gcc-13.3.0/gcc-13.2.0-rmq2jx2h54owhxopaprg7yg4ocbdqv2j/bin:$PATH; 10 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/meta/configure.json: -------------------------------------------------------------------------------- 1 | { 2 | "cluster": "unknown", 3 | "host": { 4 | "machine": "x86_64", 5 | "node": "arapiles", 6 | "processor": "", 7 | "release": "6.9.3-arch1-1", 8 | "system": "Linux", 9 | "version": "#1 SMP PREEMPT_DYNAMIC Fri, 31 May 2024 15:14:45 +0000" 10 | }, 11 | "mount": "/user-environment", 12 | "spack": { 13 | "commit": "releases/v0.21", 14 | "repo": "https://github.com/spack/spack.git" 15 | }, 16 | "stackinator": { 17 | "args": [ 18 | "/home/bcumming/software/uenv-dev/stackinator/bin/stack-config", 19 | "-s", 20 | "./arapiles", 21 | "-r", 22 | "./recipes/arbor", 23 | "-b", 24 | "/dev/shm/bcumming/arbor", 25 | "-c", 26 | "./cache.yaml" 27 | ], 28 | "python": "/usr/bin/python3", 29 | "version": "4.1.0-dev" 30 | }, 31 | "time": "20240611 12:06:38" 32 | } 33 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/meta/env.json.in: -------------------------------------------------------------------------------- 1 | { 2 | "description": "The Arbor neuroscience simulation package and its dependencies for multicore systems.", 3 | "modules": { 4 | "root": "@@mount@@/modules" 5 | }, 6 | "mount": "@@mount@@", 7 | "name": "arbor", 8 | "views": { 9 | "arbor": { 10 | "activate": "@@mount@@/env/arbor/activate.sh", 11 | "description": "", 12 | "root": "@@mount@@/env/arbor" 13 | }, 14 | "develop": { 15 | "activate": "@@mount@@/env/develop/activate.sh", 16 | "description": "", 17 | "root": "@@mount@@/env/develop" 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/meta/recipe/compilers.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@12.3 3 | gcc: 4 | specs: 5 | - gcc@13.2 6 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/meta/recipe/config.yaml: -------------------------------------------------------------------------------- 1 | name: arbor 2 | spack: 3 | commit: releases/v0.21 4 | repo: https://github.com/spack/spack.git 5 | store: /user-environment 6 | description: The Arbor neuroscience simulation package and its dependencies for multicore systems. 7 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/meta/recipe/environments.yaml: -------------------------------------------------------------------------------- 1 | arbor: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@13.2 5 | unify: true 6 | specs: 7 | # arbor 8 | - arbor@0.9 +python -mpi 9 | # build tools 10 | - cmake 11 | - googletest 12 | - ninja 13 | - python@3.11 14 | # C++ dependencies 15 | - fmt 16 | - pugixml 17 | - nlohmann-json 18 | - random123 19 | # python packages 20 | - py-numpy 21 | - py-pip 22 | - py-pybind11 23 | # etc 24 | packages: 25 | - diffutils 26 | - gettext 27 | - gmake 28 | - libxml2 29 | - perl 30 | views: 31 | arbor: 32 | link: roots 33 | develop: 34 | link: roots 35 | exclude: [arbor] 36 | -------------------------------------------------------------------------------- /unittests/data/arbor-uenv/meta/recipe/modules.yaml: -------------------------------------------------------------------------------- 1 | modules: 2 | # Paths to check when creating modules for all module sets 3 | prefix_inspections: 4 | bin: 5 | - PATH 6 | lib: 7 | - LD_LIBRARY_PATH 8 | lib64: 9 | - LD_LIBRARY_PATH 10 | 11 | default: 12 | arch_folder: false 13 | # Where to install modules 14 | roots: 15 | tcl: /user-environment/modules 16 | tcl: 17 | all: 18 | autoload: none 19 | hash_length: 0 20 | exclude_implicits: true 21 | exclude: ['%gcc@13.3.0', 'gcc %gcc@13.3.0'] 22 | projections: 23 | all: '{name}/{version}' 24 | -------------------------------------------------------------------------------- /unittests/recipes/base-amdgpu/compilers.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@11 6 | -------------------------------------------------------------------------------- /unittests/recipes/base-amdgpu/config.yaml: -------------------------------------------------------------------------------- 1 | name: amdgpu 2 | store: /user-environment 3 | spack: 4 | repo: https://github.com/spack/spack.git 5 | commit: 6408b51 6 | -------------------------------------------------------------------------------- /unittests/recipes/base-amdgpu/environments.yaml: -------------------------------------------------------------------------------- 1 | gcc-env: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@11 5 | unify: true 6 | specs: 7 | - hipsolver@5.2 8 | - rocblas@5.2 9 | - rocfft@5.2 10 | - hipblas@5.2 11 | - rocsolver@5.2 12 | - hipfft@5.2 13 | - rocm-cmake@5.2 14 | - hip@5.2 15 | - osu-micro-benchmarks@5.9 16 | mpi: 17 | spec: cray-mpich 18 | gpu: rocm 19 | tools: 20 | compiler: 21 | - toolchain: gcc 22 | spec: gcc@11 23 | unify: true 24 | specs: 25 | - cmake 26 | - python@3.10 27 | -------------------------------------------------------------------------------- /unittests/recipes/base-amdgpu/modules.yaml: -------------------------------------------------------------------------------- 1 | modules: 2 | # Paths to check when creating modules for all module sets 3 | prefix_inspections: 4 | bin: 5 | - PATH 6 | lib: 7 | - LD_LIBRARY_PATH 8 | lib64: 9 | - LD_LIBRARY_PATH 10 | 11 | default: 12 | arch_folder: false 13 | # Where to install modules 14 | roots: 15 | tcl: /user-environment/modules 16 | tcl: 17 | all: 18 | autoload: none 19 | hash_length: 0 20 | exclude_implicits: true 21 | exclude: ['%gcc@7.5.0', 'gcc %gcc@7.5.0'] 22 | projections: 23 | all: '{name}/{version}' 24 | cray-mpich: 'cray-mpich' 25 | -------------------------------------------------------------------------------- /unittests/recipes/base-nvgpu/compilers.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@11 6 | -------------------------------------------------------------------------------- /unittests/recipes/base-nvgpu/config.yaml: -------------------------------------------------------------------------------- 1 | name: cuda-env 2 | store: /user-environment 3 | spack: 4 | repo: https://github.com/spack/spack.git 5 | commit: 6408b51 6 | mirror: 7 | enable: false 8 | -------------------------------------------------------------------------------- /unittests/recipes/base-nvgpu/environments.yaml: -------------------------------------------------------------------------------- 1 | gcc-env: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@11 5 | unify: true 6 | specs: 7 | - cuda@11.8 8 | - osu-micro-benchmarks@5.9 9 | mpi: 10 | spec: cray-mpich 11 | gpu: cuda 12 | variants: 13 | - +mpi 14 | - +cuda 15 | - cuda_arch=80 16 | tools: 17 | compiler: 18 | - toolchain: gcc 19 | spec: gcc@11 20 | unify: true 21 | specs: 22 | - cmake 23 | - python@3.10 24 | -------------------------------------------------------------------------------- /unittests/recipes/base-nvgpu/modules.yaml: -------------------------------------------------------------------------------- 1 | modules: 2 | # Paths to check when creating modules for all module sets 3 | prefix_inspections: 4 | bin: 5 | - PATH 6 | lib: 7 | - LD_LIBRARY_PATH 8 | lib64: 9 | - LD_LIBRARY_PATH 10 | 11 | default: 12 | arch_folder: false 13 | # Where to install modules 14 | roots: 15 | tcl: /user-environment/modules 16 | tcl: 17 | all: 18 | autoload: none 19 | hash_length: 0 20 | exclude_implicits: true 21 | exclude: ['%gcc@7.5.0', 'gcc %gcc@7.5.0'] 22 | projections: 23 | all: '{name}/{version}' 24 | cray-mpich: 'cray-mpich' 25 | -------------------------------------------------------------------------------- /unittests/recipes/cache/compilers.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@11 6 | llvm: 7 | requires: gcc@11 8 | specs: 9 | - llvm@13 10 | -------------------------------------------------------------------------------- /unittests/recipes/cache/config.yaml: -------------------------------------------------------------------------------- 1 | name: cached-example 2 | store: '/user-environment' 3 | spack: 4 | repo: https://github.com/spack/spack.git 5 | commit: 6408b51 6 | mirror: 7 | key: /scratch/e1000/bcumming/secret/spack-key.gpg 8 | enable: true 9 | -------------------------------------------------------------------------------- /unittests/recipes/cache/environments.yaml: -------------------------------------------------------------------------------- 1 | gcc-env: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@11 5 | unify: true 6 | specs: 7 | - fmt 8 | mpi: 9 | spec: cray-mpich 10 | gpu: false 11 | tools: 12 | compiler: 13 | - toolchain: gcc 14 | spec: gcc@11 15 | unify: true 16 | specs: 17 | - cmake 18 | - python@3.10 19 | -------------------------------------------------------------------------------- /unittests/recipes/cache/modules.yaml: -------------------------------------------------------------------------------- 1 | modules: 2 | # Paths to check when creating modules for all module sets 3 | prefix_inspections: 4 | bin: 5 | - PATH 6 | lib: 7 | - LD_LIBRARY_PATH 8 | lib64: 9 | - LD_LIBRARY_PATH 10 | 11 | default: 12 | arch_folder: false 13 | # Where to install modules 14 | roots: 15 | tcl: /user-environment/modules 16 | tcl: 17 | all: 18 | autoload: none 19 | hash_length: 0 20 | exclude_implicits: true 21 | exclude: ['%gcc@7.5.0', 'gcc %gcc@7.5.0'] 22 | projections: 23 | all: '{name}/{version}' 24 | cray-mpich: 'cray-mpich' 25 | -------------------------------------------------------------------------------- /unittests/recipes/host-recipe/compilers.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@11 6 | -------------------------------------------------------------------------------- /unittests/recipes/host-recipe/config.yaml: -------------------------------------------------------------------------------- 1 | name: host-example 2 | store: /user-environment 3 | description: "An example gcc configuration for CPU-only development" 4 | spack: 5 | commit: releases/v0.23 6 | repo: https://github.com/spack/spack.git 7 | 8 | -------------------------------------------------------------------------------- /unittests/recipes/host-recipe/environments.yaml: -------------------------------------------------------------------------------- 1 | gcc-env: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@11.3.0 5 | unify: true 6 | specs: 7 | - cmake 8 | - osu-micro-benchmarks@5.9 9 | - hdf5 +mpi 10 | - python@3.10 11 | - tree 12 | - libtree 13 | mpi: 14 | spec: cray-mpich 15 | gpu: false 16 | views: 17 | default: 18 | run: 19 | link: run 20 | roots: 21 | link: roots 22 | no-python: 23 | exclude: 24 | - python 25 | 26 | -------------------------------------------------------------------------------- /unittests/recipes/host-recipe/modules.yaml: -------------------------------------------------------------------------------- 1 | modules: 2 | # Paths to check when creating modules for all module sets 3 | prefix_inspections: 4 | bin: 5 | - PATH 6 | lib: 7 | - LD_LIBRARY_PATH 8 | lib64: 9 | - LD_LIBRARY_PATH 10 | 11 | default: 12 | arch_folder: false 13 | # Where to install modules 14 | roots: 15 | tcl: /user-environment/modules 16 | tcl: 17 | all: 18 | autoload: none 19 | hash_length: 0 20 | exclude_implicits: true 21 | exclude: ['%gcc@7.5.0', 'gcc %gcc@7.5.0'] 22 | projections: 23 | all: '{name}/{version}' 24 | cray-mpich: 'cray-mpich' 25 | python: '{name}/{version}' 26 | -------------------------------------------------------------------------------- /unittests/recipes/host-recipe/post-install: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "=====================================" 4 | echo "===== post install hook =====" 5 | 6 | mount_path={{ env.mount }} 7 | echo RUNNING IN $mount_path 8 | 9 | echo 10 | echo "===== environment variabls =====" 11 | printenv 12 | 13 | echo 14 | echo "===== create post file {{ env.mount }}/post =====" 15 | echo "$(date)" > "$mount_path/post" 16 | 17 | echo 18 | echo "===== list all spack packages =====" 19 | spack -C {{ env.config }} find 20 | 21 | echo "=====================================" 22 | -------------------------------------------------------------------------------- /unittests/recipes/host-recipe/pre-install: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "=====================================" 4 | echo "===== pre install hook =====" 5 | 6 | mount_path={{ env.mount }} 7 | build_path={{ env.build }} 8 | echo RUNNING IN $mount_path with build path $build_path 9 | 10 | echo 11 | echo "===== environment variabls =====" 12 | printenv 13 | 14 | echo 15 | echo "===== create \"config\" file {{ env.build }}/configxxx =====" 16 | echo "$(date)" > "$build_path/configxxx" 17 | 18 | echo "=====================================" 19 | -------------------------------------------------------------------------------- /unittests/recipes/unique-bootstrap/compilers.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@12 6 | -------------------------------------------------------------------------------- /unittests/recipes/unique-bootstrap/config.yaml: -------------------------------------------------------------------------------- 1 | name: example 2 | store: '/user-environment' 3 | spack: 4 | repo: https://github.com/spack/spack.git 5 | commit: v0.22.3 6 | mirror: 7 | enable: false 8 | -------------------------------------------------------------------------------- /unittests/recipes/unique-bootstrap/environments.yaml: -------------------------------------------------------------------------------- 1 | mpi: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@12 5 | unify: true 6 | specs: 7 | - cuda@11.8 8 | - osu-micro-benchmarks@5.9 9 | mpi: 10 | spec: cray-mpich 11 | gpu: cuda 12 | tools: 13 | compiler: 14 | - toolchain: gcc 15 | spec: gcc@12 16 | unify: true 17 | specs: 18 | - cmake 19 | -------------------------------------------------------------------------------- /unittests/recipes/unique-bootstrap/modules.yaml: -------------------------------------------------------------------------------- 1 | modules: 2 | # Paths to check when creating modules for all module sets 3 | prefix_inspections: 4 | bin: 5 | - PATH 6 | lib: 7 | - LD_LIBRARY_PATH 8 | lib64: 9 | - LD_LIBRARY_PATH 10 | 11 | default: 12 | arch_folder: false 13 | # Where to install modules 14 | roots: 15 | tcl: /user-environment/modules 16 | tcl: 17 | all: 18 | autoload: none 19 | hash_length: 0 20 | exclude_implicits: true 21 | exclude: ['%gcc@7.5.0', 'gcc %gcc@7.5.0'] 22 | projections: 23 | all: '{name}/{version}' 24 | cray-mpich: 'cray-mpich' 25 | python: '{name}/{version}' 26 | -------------------------------------------------------------------------------- /unittests/recipes/with-repo/compilers.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@11 6 | -------------------------------------------------------------------------------- /unittests/recipes/with-repo/config.yaml: -------------------------------------------------------------------------------- 1 | name: with-repo 2 | store: '/user-environment' 3 | spack: 4 | repo: https://github.com/spack/spack.git 5 | commit: v21.0 6 | -------------------------------------------------------------------------------- /unittests/recipes/with-repo/environments.yaml: -------------------------------------------------------------------------------- 1 | gcc-env: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@11.3.0 5 | unify: true 6 | specs: 7 | - osu-micro-benchmarks@5.9 8 | - hdf5 +mpi 9 | mpi: 10 | spec: cray-mpich 11 | gpu: false 12 | tools: 13 | compiler: 14 | - toolchain: gcc 15 | spec: gcc@11.3.0 16 | unify: true 17 | specs: 18 | - cmake 19 | -------------------------------------------------------------------------------- /unittests/recipes/with-repo/repo/packages/dummy/package.py: -------------------------------------------------------------------------------- 1 | from spack import * 2 | 3 | 4 | class Dummy(Package): 5 | pass 6 | -------------------------------------------------------------------------------- /unittests/test-envvars.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | root=$(pwd) 4 | input_path=${root}/data/arbor-uenv 5 | for p in man misc aclocal lib64 lib64/pkgconfig bin lib lib/pkgconfig lib/python3.11 lib/python3.11/site-packages share share/pkgconfig 6 | do 7 | mkdir -p $input_path/env/develop/$p 8 | done 9 | for p in man misc aclocal bin lib lib/pkgconfig lib/python3.11 lib/python3.11/site-packages share share/pkgconfig 10 | do 11 | mkdir -p $input_path/env/arbor/$p 12 | done 13 | scratch_path=${root}/scratch 14 | mount_path=${scratch_path}/user-environment 15 | echo "===== setting up test mount path ${mount_path}" 16 | rm -rf ${mount_path} 17 | 18 | mkdir -p ${scratch_path} 19 | cp -R ${input_path} ${mount_path} 20 | meta_path=${mount_path}/meta/env.json 21 | meta_in_path=${mount_path}/meta/env.json.in 22 | echo "===== input meta/env.json.in ${meta_in_path}" 23 | if [[ "$OSTYPE" == "linux-gnu"* ]]; then 24 | sed -i "s|@@mount@@|${mount_path}|g" ${meta_in_path} 25 | else 26 | sed -i '' "s|@@mount@@|${mount_path}|g" ${meta_in_path} 27 | fi 28 | 29 | echo "===== envvars view develop" 30 | ../stackinator/etc/envvars.py view ${mount_path}/env/arbor /dev/shm/bcumming/arbor 31 | echo "===== envvars view arbor" 32 | ../stackinator/etc/envvars.py view --prefix_paths="LD_LIBRARY_PATH=lib:lib64" ${mount_path}/env/develop /dev/shm/bcumming/arbor 33 | 34 | echo "===== all env.json files after running view meta generation" 35 | find $scratch_path -name env.json 36 | 37 | echo "===== running final meta data stage ${mount_path}" 38 | 39 | ../stackinator/etc/envvars.py uenv ${mount_path}/ --modules --spack="https://github.com/spack/spack.git,releases/v0.20" 40 | 41 | echo 42 | echo "===== develop" 43 | echo 44 | cat ${meta_path} | jq .views.develop 45 | 46 | echo 47 | echo "===== arbor" 48 | echo 49 | cat ${meta_path} | jq .views.arbor 50 | 51 | echo 52 | echo "===== spack view" 53 | echo 54 | cat ${meta_path} | jq .views.spack 55 | 56 | echo 57 | echo "===== modules view" 58 | echo 59 | cat ${meta_path} | jq .views.modules 60 | 61 | -------------------------------------------------------------------------------- /unittests/test_schema.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import pathlib 4 | 5 | import jsonschema 6 | import pytest 7 | import yaml 8 | 9 | import stackinator.schema as schema 10 | 11 | 12 | @pytest.fixture 13 | def test_path(): 14 | return pathlib.Path(__file__).parent.resolve() 15 | 16 | 17 | @pytest.fixture 18 | def yaml_path(test_path): 19 | return test_path / "yaml" 20 | 21 | 22 | @pytest.fixture 23 | def recipes(): 24 | return [ 25 | "host-recipe", 26 | "base-amdgpu", 27 | "base-nvgpu", 28 | "cache", 29 | "unique-bootstrap", 30 | "with-repo", 31 | ] 32 | 33 | 34 | @pytest.fixture 35 | def recipe_paths(test_path, recipes): 36 | return [test_path / "recipes" / r for r in recipes] 37 | 38 | 39 | def test_config_yaml(yaml_path): 40 | # test that the defaults are set as expected 41 | with open(yaml_path / "config.defaults.yaml") as fid: 42 | raw = yaml.load(fid, Loader=yaml.Loader) 43 | schema.validator(schema.config_schema).validate(raw) 44 | assert raw["store"] == "/user-environment" 45 | assert raw["spack"]["commit"] is None 46 | assert raw["modules"] == True # noqa: E712 47 | assert raw["mirror"] == {"enable": True, "key": None} 48 | assert raw["description"] is None 49 | 50 | with open(yaml_path / "config.full.yaml") as fid: 51 | raw = yaml.load(fid, Loader=yaml.Loader) 52 | schema.validator(schema.config_schema).validate(raw) 53 | assert raw["store"] == "/alternative-point" 54 | assert raw["spack"]["commit"] == "6408b51" 55 | assert raw["modules"] == False # noqa: E712 56 | assert raw["mirror"] == {"enable": True, "key": "/home/bob/veryprivate.key"} 57 | assert raw["description"] == "a really useful environment" 58 | 59 | 60 | def test_recipe_config_yaml(recipe_paths): 61 | # validate the config.yaml in the test recipes 62 | for p in recipe_paths: 63 | with open(p / "config.yaml") as fid: 64 | raw = yaml.load(fid, Loader=yaml.Loader) 65 | schema.validator(schema.config_schema).validate(raw) 66 | 67 | 68 | def test_compilers_yaml(yaml_path): 69 | # test that the defaults are set as expected 70 | with open(yaml_path / "compilers.defaults.yaml") as fid: 71 | raw = yaml.load(fid, Loader=yaml.Loader) 72 | schema.validator(schema.compilers_schema).validate(raw) 73 | assert raw["bootstrap"] == {"spec": "gcc@11"} 74 | assert raw["gcc"] == {"specs": ["gcc@10.2"]} 75 | assert raw["llvm"] is None 76 | 77 | with open(yaml_path / "compilers.full.yaml") as fid: 78 | raw = yaml.load(fid, Loader=yaml.Loader) 79 | schema.validator(schema.compilers_schema).validate(raw) 80 | assert raw["bootstrap"]["spec"] == "gcc@11" 81 | assert raw["gcc"] == {"specs": ["gcc@11", "gcc@10.2", "gcc@9.3.0"]} 82 | assert raw["llvm"] == { 83 | "specs": ["llvm@13", "llvm@11.2", "nvhpc@22.11"], 84 | "requires": "gcc@10.2", 85 | } 86 | 87 | 88 | def test_recipe_compilers_yaml(recipe_paths): 89 | # validate the compilers.yaml in the test recipes 90 | for p in recipe_paths: 91 | with open(p / "compilers.yaml") as fid: 92 | raw = yaml.load(fid, Loader=yaml.Loader) 93 | schema.validator(schema.compilers_schema).validate(raw) 94 | 95 | 96 | def test_environments_yaml(yaml_path): 97 | with open(yaml_path / "environments.full.yaml") as fid: 98 | raw = yaml.load(fid, Loader=yaml.Loader) 99 | schema.validator(schema.environments_schema).validate(raw) 100 | 101 | # the defaults-env does not set fields 102 | # test that they have been set to the defaults correctly 103 | 104 | assert "defaults-env" in raw 105 | env = raw["defaults-env"] 106 | 107 | # test the required fields were read correctly 108 | assert env["compiler"] == [{"toolchain": "gcc", "spec": "gcc@11"}] 109 | assert env["specs"] == ["tree"] 110 | 111 | # test defaults were set correctly 112 | assert env["unify"] == True # noqa: E712 113 | assert env["packages"] == [] 114 | assert env["variants"] == [] 115 | assert env["mpi"] is None 116 | assert env["views"] == {} 117 | 118 | env = raw["defaults-env-mpi-nogpu"] 119 | assert env["mpi"]["spec"] is not None 120 | assert env["mpi"]["gpu"] is None 121 | 122 | # the full-env sets all of the fields 123 | # test that they have been read correctly 124 | 125 | assert "full-env" in raw 126 | env = raw["full-env"] 127 | assert env["compiler"] == [ 128 | {"toolchain": "gcc", "spec": "gcc@11"}, 129 | {"toolchain": "gcc", "spec": "gcc@12"}, 130 | ] 131 | assert env["specs"] == ["osu-micro-benchmarks@5.9", "hdf5 +mpi"] 132 | 133 | # test defaults were set correctly 134 | assert env["unify"] == "when_possible" 135 | assert env["packages"] == ["perl", "git"] 136 | assert env["mpi"] == {"spec": "cray-mpich", "gpu": "cuda"} 137 | assert env["variants"] == ["+mpi", "+cuda"] 138 | assert env["views"] == {"default": None} 139 | 140 | # check that only allowed fields are accepted 141 | # from an example that was silently validated 142 | with open(yaml_path / "environments.err-providers.yaml") as fid: 143 | raw = yaml.load(fid, Loader=yaml.Loader) 144 | with pytest.raises( 145 | jsonschema.exceptions.ValidationError, 146 | match=r"Additional properties are not allowed \('providers' was unexpected", 147 | ): 148 | schema.validator(schema.environments_schema).validate(raw) 149 | 150 | 151 | def test_recipe_environments_yaml(recipe_paths): 152 | # validate the environments.yaml in the test recipes 153 | for p in recipe_paths: 154 | with open(p / "environments.yaml") as fid: 155 | raw = yaml.load(fid, Loader=yaml.Loader) 156 | schema.validator(schema.environments_schema).validate(raw) 157 | -------------------------------------------------------------------------------- /unittests/yaml/compilers.defaults.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@10.2 6 | -------------------------------------------------------------------------------- /unittests/yaml/compilers.full.yaml: -------------------------------------------------------------------------------- 1 | bootstrap: 2 | spec: gcc@11 3 | gcc: 4 | specs: 5 | - gcc@11 6 | - gcc@10.2 7 | - gcc@9.3.0 8 | llvm: 9 | requires: gcc@10.2 10 | specs: 11 | - llvm@13 12 | - llvm@11.2 13 | - nvhpc@22.11 14 | -------------------------------------------------------------------------------- /unittests/yaml/config.defaults.yaml: -------------------------------------------------------------------------------- 1 | name: cuda-env 2 | # default /user-environment 3 | #store: /user-environment 4 | spack: 5 | repo: https://github.com/spack/spack.git 6 | # default: None == no `git checkout` command 7 | #commit: 6408b51 8 | #mirror: 9 | # default None 10 | #key: None 11 | # default True 12 | #enable: True 13 | # default True 14 | #modules: True 15 | -------------------------------------------------------------------------------- /unittests/yaml/config.full.yaml: -------------------------------------------------------------------------------- 1 | name: cuda-env 2 | store: /alternative-point 3 | spack: 4 | repo: https://github.com/spack/spack.git 5 | commit: 6408b51 6 | mirror: 7 | key: /home/bob/veryprivate.key 8 | enable: True 9 | modules: False 10 | description: "a really useful environment" 11 | -------------------------------------------------------------------------------- /unittests/yaml/environments.err-providers.yaml: -------------------------------------------------------------------------------- 1 | full-env: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@11 5 | - toolchain: gcc 6 | spec: gcc@12 7 | unify: when_possible 8 | specs: 9 | - osu-micro-benchmarks@5.9 10 | - hdf5 +mpi 11 | mpi: 12 | spec: cray-mpich 13 | gpu: cuda 14 | packages: 15 | - perl 16 | - git 17 | variants: 18 | - +mpi 19 | - +cuda 20 | # expect an error because 'providers' is not defined in schema. 21 | providers: 22 | libglx: [opengl] 23 | -------------------------------------------------------------------------------- /unittests/yaml/environments.full.yaml: -------------------------------------------------------------------------------- 1 | full-env: 2 | compiler: 3 | - toolchain: gcc 4 | spec: gcc@11 5 | - toolchain: gcc 6 | spec: gcc@12 7 | unify: when_possible 8 | specs: 9 | - osu-micro-benchmarks@5.9 10 | - hdf5 +mpi 11 | mpi: 12 | spec: cray-mpich 13 | gpu: cuda 14 | packages: 15 | - perl 16 | - git 17 | variants: 18 | - +mpi 19 | - +cuda 20 | views: 21 | default: 22 | defaults-env: 23 | compiler: 24 | - toolchain: gcc 25 | spec: gcc@11 26 | specs: 27 | - tree 28 | # assert variants=[] 29 | # assert unify=True 30 | # assert mpi=None 31 | # assert packages=[] 32 | # assert view=True 33 | defaults-env-mpi-nogpu: 34 | compiler: 35 | - toolchain: gcc 36 | spec: gcc@11 37 | specs: 38 | - tree 39 | mpi: 40 | spec: cray-mpich 41 | --------------------------------------------------------------------------------