├── .gitignore ├── .gitmodules ├── COPYING ├── README.md ├── default.nix ├── fi ├── README.md ├── default.nix ├── docker │ ├── builder.sh │ ├── default.nix │ └── setup.sh ├── env ├── gcc-13.3-nvcc.patch ├── linkfiles.sh ├── lmod-no-sys-tcl.patch ├── lmod │ ├── modules.lua │ ├── modules.nix │ └── modules.sh ├── openmpi-1.10-gcc.PATCH ├── openmpi-1.10.7.PATCH ├── openmpi-2.1.6.PATCH ├── py-extension-helpers-setup.py.patch ├── python-ncursesw-py-3.11.4.patch ├── python-ncursesw.patch ├── r.nix ├── repo │ ├── packages │ │ ├── blender │ │ │ └── package.py │ │ ├── disBatch │ │ │ └── package.py │ │ ├── idl │ │ │ └── package.py │ │ ├── libjansson │ │ │ └── package.py │ │ ├── paraview │ │ │ └── paraview_wrapper.sh │ │ ├── pvfmm │ │ │ └── package.py │ │ ├── py-abopt │ │ │ └── package.py │ │ ├── py-bigfile │ │ │ └── package.py │ │ ├── py-cachey │ │ │ └── package.py │ │ ├── py-cons │ │ │ └── package.py │ │ ├── py-etuples │ │ │ └── package.py │ │ ├── py-hdf5plugin │ │ │ └── package.py │ │ ├── py-jupyter-remote-desktop-proxy │ │ │ └── package.py │ │ ├── py-jupyterlmod │ │ │ └── package.py │ │ ├── py-kvsstcp │ │ │ └── package.py │ │ ├── py-logical-unification │ │ │ └── package.py │ │ ├── py-mcfit │ │ │ └── package.py │ │ ├── py-minikanren │ │ │ └── package.py │ │ ├── py-nbodykit │ │ │ └── package.py │ │ ├── py-pfft-python │ │ │ └── package.py │ │ ├── py-pmesh │ │ │ └── package.py │ │ ├── py-pymc │ │ │ └── package.py │ │ ├── py-pyslurm │ │ │ └── package.py │ │ ├── py-pytensor │ │ │ └── package.py │ │ ├── py-runtests │ │ │ └── package.py │ │ ├── py-sharedmem │ │ │ └── package.py │ │ ├── py-websockify │ │ │ └── package.py │ │ ├── py-xarray-einstats │ │ │ └── package.py │ │ ├── rockstar │ │ │ ├── 0001-Fix-to-solve-linking-problem-with-gcc-10.patch │ │ │ └── package.py │ │ ├── slack │ │ │ └── package.py │ │ ├── stkfmm │ │ │ └── package.py │ │ ├── triqs-cthyb │ │ │ └── package.py │ │ ├── triqs-ctseg │ │ │ └── package.py │ │ ├── triqs-dft-tools │ │ │ └── package.py │ │ ├── triqs-hartree-fock │ │ │ └── package.py │ │ ├── triqs-hubbardi │ │ │ └── package.py │ │ ├── triqs-maxent │ │ │ └── package.py │ │ ├── triqs-omegamaxent-interface │ │ │ └── package.py │ │ ├── triqs-tprf │ │ │ └── package.py │ │ └── triqs │ │ │ └── package.py │ └── repo.yaml ├── run └── sra-tools-javadoc.patch ├── jupyter └── kernel │ ├── bash │ ├── builder.sh │ └── default.nix │ ├── builder.py │ ├── default.nix │ └── ir │ ├── builder.R │ ├── builder.sh │ └── default.nix ├── nixpkgs ├── default.nix ├── jdupes.nix ├── overlay.nix ├── sssd │ └── nss-client.nix └── stdenv.nix ├── packs ├── capture.sh ├── default.nix ├── lib.nix ├── lmodCache.nix └── lmodCache.sh ├── patch ├── default.nix ├── nix-ignore-fsea.patch ├── openssh-keysign-setuid.patch └── shadow-nosuid.patch ├── spack ├── bin.nix ├── bin.sh ├── builder.py ├── cache.py ├── config.nix ├── config.sh ├── generate.py ├── install.sh ├── modules.nix ├── modules.py ├── nixpack.py └── repo │ ├── packages │ ├── libass │ │ └── package.py │ ├── mplayer │ │ └── package.py │ ├── mpv │ │ └── package.py │ ├── mupdf │ │ └── package.py │ ├── wecall │ │ ├── cmake-rhel-regex.patch │ │ ├── ncurses.patch │ │ └── package.py │ └── xscreensaver │ │ └── package.py │ └── repo.yaml └── view ├── builder.py └── default.nix /.gitignore: -------------------------------------------------------------------------------- 1 | /result 2 | /singularity 3 | /apptainer 4 | spackBin 5 | result 6 | result-* 7 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "spack"] 2 | path = fi/spack 3 | url = https://github.com/flatironinstitute/spack 4 | branch = fi-pkgs 5 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | Copyright (c) 2021 Dylan Simon, Flatiron Institute 2 | Portions copyright (c) 2003-2021 Eelco Dolstra and the Nixpkgs/NixOS contributors 3 | Portions copyright (c) 2013-2020 LLNS, LLC and other Spack Project Developers. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | "Software"), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # nixpack = [nix](https://nixos.org/nix)+[spack](https://spack.io/) 2 | 3 | A hybrid of the [nix package manager](https://github.com/NixOS/nix) and [spack](https://github.com/spack/spack) where nix (without nixpkgs) is used to solve and manage packages, using the package repository, builds, and modules from spack. 4 | 5 | If you love nix's expressiveness and efficiency, but don't need the purity of nixpkgs (in the sense of independence from the host system)... if you like the spack packages and package.py format, but are tired of managing roots and concretizations, this may be for you. 6 | Nix on the outside, spack on the inside. 7 | 8 | ## Usage 9 | 10 | 1. Install and configure [nix](https://nixos.org/manual/nix/stable/#chap-installation), sufficient to build derivations. 11 | 1. Edit (or copy) [`default.nix`](default.nix). 12 | - It's recommended to set `packs.spackSrc.rev` to a fixed version of spack. Changing the spack version requires all packages to be rebuilt. If you want to update individual packages without a rebuild, you can put them in `spack/repo/packages` (or another repo in `packs.repos`). 13 | - Set `packs.os` and `packs.global.target`. 14 | - Set `packs.spackConfig.config.source_cache` and add any other custom spack config you want (nixpack ignores system and user spack config for purity, but will load default and site config from the spack repo itself). 15 | - Set `bootstrapPacks.package.compiler` to a pre-existing (system/external) compiler to be used to bootstrap. 16 | - Set `packs.package.gcc` to choose your default compiler, or `packs.package.compiler` to use something other than gcc. 17 | - Add any other package preferences to `packs.package` (versions, variants, virtual providers, etc.) 18 | - See `packs.global.fixedDeps`: by default multiple different instances of any given package may be built in order to satisfy requirements, but you may prefer to force only one version of each package, which will improve performance and build times. 19 | 1. Run `nix-build -A pkgs.foo` to build the spack package `foo`. 20 | 1. To build modules, configure `packs.mods` and run `nix-build -A mods`. 21 | 22 | ## Flatiron Specific 23 | 24 | We have our local Flatiron-specific configuration and repositories in [`fi`](fi), complete with views and modules, some of which may be more generally useful or at least helpful reference or template for creating a full working system. 25 | See the [README](fi/README.md) in that directory for more information. 26 | 27 | ## Compatibility 28 | 29 | nixpack uses an unmodified checkout of spack (as specified in `spackSrc`), and should work with other forks as well. 30 | However, it makes many assumptions about the internals of spack builds, so may not work on much older (or newer) versions. 31 | 32 | ## Implementation and terminology 33 | 34 | In nixpkgs, there's mainly the concept of package, and arguments that can be overridden. 35 | In spack, there are packages and specs, and "spec" is used in many different ways. 36 | We define a few more specific concepts to merge the two. 37 | 38 | ### package descriptor 39 | 40 | The metadata for a spack package. 41 | These are generated by [`spack/generate.py`](spack/generate.py) from the spack repo `package.py`s and loaded into `packs.repo`. 42 | They look like this: 43 | 44 | ```nix 45 | example = { 46 | namespace = "builtin"; 47 | version = ["2.0" "1.2" "1.0"]; # in decreasing order of preference 48 | variants = { 49 | flag = true; 50 | option = ["a" "b" "c"]; # single-valued, first is default 51 | multi = { 52 | a = true; 53 | b = false; 54 | }; 55 | }; 56 | depends = { 57 | /* package preferences for dependencies (see below) */ 58 | compiler = { # added implicitly if missing 59 | deptype = ["build" "link"]; 60 | }; 61 | deppackage = { 62 | version = "1.5:2.1"; 63 | deptype = ["run" "test"]; 64 | }; 65 | notused = null; 66 | }; 67 | provides = { 68 | virtual = "2:"; 69 | }; 70 | paths = {}; # paths to tools provided by this package (like `cc` for compilers) 71 | patches = []; # extra patches to apply (in addition to those in spack) 72 | conflicts = []; # any conflicts (non-empty means invalid) 73 | }; 74 | ``` 75 | 76 | Most things default to empty. 77 | This is not a complete build description, just the metadata necessary to resolve dependencies (concretize). 78 | In practice, these are constructed as functions that take a resolved package spec as an argument, so that dependencies and such be conditional on a specific version and variants. 79 | 80 | You can build the repo using `nix-build -A spackRepo` (and see `result`). 81 | 82 | ### package preferences 83 | 84 | Constraints for a package that come from the user, or a depending package. 85 | These are used in package descriptor depends and in user global and per-package preferences. 86 | They look similar to package descriptors and can be used to override or constrain some of those values. 87 | 88 | ```nix 89 | example = { 90 | version = "1.3:1.5"; 91 | variants = { 92 | flag = true; 93 | option = "b"; 94 | /* multi options can be specified as list of trues or explicitly */ 95 | multi = ["a"]; 96 | multi = { 97 | a = true; 98 | b = false; 99 | }; 100 | }; 101 | depends = { 102 | compiler = { 103 | name = "clang"; # use clang as the compiler virtual provider 104 | }; 105 | deppackage = { 106 | version = ... # use a specific version for a dependency 107 | }; 108 | virtualdep = { 109 | name = "provider"; 110 | version = ...; 111 | ... 112 | }; 113 | # dependencies can also be set to a specific package: 114 | builddep = packs.pkgs.builddep; 115 | }; 116 | provides = { 117 | virtual = "version"; # this requires that this package provides virtual (not that it does) 118 | }; 119 | patches = []; # extra patches to apply (in additon to those in the descriptor) 120 | extern = "/opt/local/mypackage"; # a prefix string or derivation (e.g., nixpkgs package) for an external installation (overrides depends) 121 | fixedDeps = false; # only use user preferences to resolve dependencies (see default.nix) 122 | target = "microarch"; # defaults to currentSystem (e.g., x86_64) 123 | verbose = true; # to enable nix-build -Q and nix-store -l (otherwise only spack keeps build logs) 124 | tests = false; # run tests (not implemented) 125 | resolver = ...; # where to find dependent packages (see default.nix) 126 | }; 127 | ``` 128 | 129 | ### package spec 130 | 131 | A resolved (concrete) package specifier created by applying (optional) package preferences to a package descriptor. 132 | This looks just like a package descriptior but with concrete values. 133 | It also includes settings from prefereces like `extern` and `target`. 134 | 135 | ### package 136 | 137 | An actual derivation. 138 | These contain a `spec` metadata attribute. 139 | They also have a `withPrefs` function that can be used to make a new version of this package with updated prefs (unless they are extern). 140 | 141 | ### compiler 142 | 143 | Rather than spack's dedicated `%compiler` concept, we introduce a new virtual "compiler" that all packages depend on and is provided by gcc and llvm (by default). 144 | By setting the package preference for compiler, you determine which compiler to use. 145 | 146 | ### `packs` 147 | 148 | The world, like `nixpkgs`. 149 | It contains `pkgs` with actual packages, as well as `repo`, `view`, `modules`, and other functions. 150 | See [`packs/default.nix`](packs/default.nix) for full details. 151 | 152 | You can have one or more `packs` instances. 153 | Each instance is defined by a set of global user preferences, as passed to `import ./packs`. 154 | You can also create additional sets based on an existing one using `packs.withPrefs`. 155 | Thus, difference package sets can have different providers or package settings (like a different compiler, mpi version, blas provider, variants, etc.). 156 | 157 | See [`default.nix`](default.nix) for preferences that can be set and their descriptions. 158 | 159 | ### Bootstrapping 160 | 161 | The default compiler is specified in [`default.nix`](default.nix) by `compiler = bootstrapPacks.pkgs.gcc` which means that the compiler used to build everything is `packs` comes from `bootstrapPacks`, and is built with the preferences and compiler defined there. 162 | `bootstrapPacks` in turn specifies a compiler of gcc with `extern` set, i.e., one from the host system. 163 | This compiler is used to build any other bootstrap packages, which are then used to build the main compiler. 164 | You could specify more extern packages in bootstrap to speed up bootstrapping. 165 | 166 | You could also add additional bootstrap layers by setting the bootstrap compiler `resolver` to a different set. 167 | You could also replace specific dependencies or packages from a different `packs` set to bootstrap or modify other packages. 168 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | let 2 | 3 | packs = import ./packs { 4 | /* packs prefs */ 5 | system = builtins.currentSystem; 6 | os = "centos7"; 7 | 8 | /* where to get the spack respository. Note that since everything depends on 9 | spack, changing the spack revision will trigger rebuilds of all packages. 10 | Can also be set a path (string) to an existing spack install, which will 11 | eliminate the dependency and also break purity, so can cause your repo 12 | metadata to get out of sync, and is not recommended for production. 13 | See also repos and repoPatch below for other ways of updating packages 14 | without modifying the spack repo. */ 15 | spackSrc = { 16 | /* default: 17 | url = "https://github.com/spack/spack"; */ 18 | ref = "develop"; 19 | #rev = "f17e76e9d8d8252245eeefc7b85b6fa0433837b8"; 20 | }; 21 | /* extra config settings for spack itself. Can contain any standard spack 22 | configuration, but don't put compilers (automatically generated), packages 23 | (based on package preferences below), or modules (passed to modules 24 | function) here. */ 25 | spackConfig = { 26 | config = { 27 | /* must be set to somewhere your nix builder(s) can write to */ 28 | source_cache = "/tmp/spack_cache"; 29 | }; 30 | }; 31 | /* environment for running spack. spack needs things like python, cp, tar, 32 | etc. These can be string paths to the system or to packages/environments 33 | from nixpkgs or similar, but regardless need to be external to nixpacks. */ 34 | spackPython = "/usr/bin/python3"; 35 | spackEnv = { 36 | PATH = "/bin:/usr/bin"; 37 | }; 38 | 39 | /* packs can optionally include nixpkgs for additional packages or bootstrapping. 40 | omit to disable. */ 41 | nixpkgsSrc = { 42 | #url = "https://github.com/NixOS/nixpkgs"; 43 | ref = "release-23.11"; 44 | #rev = "c8c5faff75fd017e468e8733312525b51cea1af2"; 45 | }; 46 | 47 | /* additional spack repos to include by path, managed by nixpack. 48 | These should be normal spack repos, including repo.yaml, and are prepended 49 | to any configured spack repos. 50 | Repos specified here have the advantage of correctly managing nix 51 | dependencies, so changing a package will only trigger rebuilds of 52 | it and dependent packages. 53 | Theoretically you could copy the entire spack builtins repo here and 54 | manage package updates that way, leaving spackSrc at a fixed revision. 55 | However, if you update the repo, you'll need to ensure compatibility with 56 | the spack core libraries, too. */ 57 | repos = [ 58 | spack/repo 59 | ]; 60 | /* updates to the spack repo (see patch/default.nix for examples) 61 | repoPatch = { 62 | package = [spec: [old:]] { 63 | new... 64 | }; 65 | }; */ 66 | 67 | /* global defaults for all packages (merged with per-package prefs) */ 68 | global = { 69 | /* spack architecture target */ 70 | target = "broadwell"; 71 | /* set spack verbose to print build logs during spack bulids (and thus 72 | captured by nix). regardless, spack also keeps logs in pkg/.spack. */ 73 | verbose = false; 74 | /* enable build tests (and test deps) */ 75 | tests = false; 76 | /* how to resolve dependencies, similar to concretize together or separately. 77 | fixedDeps = false: Dependencies are resolved dynamically based on 78 | preferences and constraints imposed by each depender. This can result 79 | in many different versions of each package existing in packs. 80 | fixedDeps = true: Dependencies are resolved only by user prefs, and an 81 | error is produced if dependencies don't conform to their dependers' 82 | constraints. This ensures only one version of each dependent package 83 | exists within packs. Different packs with different prefs may have 84 | different versions. Top-level packages explicitly resolved with 85 | different prefs or dependency prefs may also be different. Virtuals 86 | are always resolved (to a package name) dynamically. 87 | this can be overridden per-package for only that package's dependencies. */ 88 | fixedDeps = false; 89 | /* How to find dependencies. Normally dependencies are pulled from other 90 | packages in this same packs. In some cases you may want some or all 91 | dependencies for a package to come from a different packs, perhaps 92 | because you don't care if build-only dependencies use the same compiler 93 | or python version. This lets you override where dependencies come from. 94 | It takes two optional arguments: 95 | * list of dependency types (["build" "link" "run" "test"]) 96 | * the name of the dependent package 97 | And should return either: 98 | * null, meaning use the current packs default 99 | * an existing packs object, to use instead 100 | * a function taking package preferences to a resolved package (like 101 | packs.getResolver). In this case, prefs will be {} if fixedDeps = 102 | true, or the dependency prefs from the parent if fixedDeps = false. 103 | resolver = [deptype: [name: ]]; */ 104 | }; 105 | /* package-specific preferences */ 106 | package = { 107 | /* compiler is an implicit virtual dependency for every package */ 108 | compiler = bootstrapPacks.pkgs.gcc; 109 | /* preferences for individual packages or virtuals */ 110 | /* get cpio from system: 111 | cpio = { 112 | extern = "/usr"; 113 | version = "2.11"; 114 | }; */ 115 | /* specify virtual providers: can be (lists of) package or { name; ...prefs } 116 | mpi = [ packs.pkgs.openmpi ]; 117 | java = { name = "openjdk"; version = "10"; }; */ 118 | /* use gcc 7.x: 119 | gcc = { 120 | version = "7"; 121 | }; */ 122 | /* enable cairo+pdf: 123 | cairo = { 124 | variants = { 125 | pdf = true; 126 | }; 127 | }; */ 128 | /* use an external slurm: 129 | slurm = { 130 | extern = "/cm/shared/apps/slurm/current"; 131 | version = "20.02.5"; 132 | variants = { 133 | sysconfdir = "/cm/shared/apps/slurm/var/etc/slurm"; 134 | pmix = true; 135 | hwloc = true; 136 | }; 137 | }; */ 138 | nix = { 139 | variants = { 140 | storedir = let v = builtins.getEnv "NIX_STORE_DIR"; in if v == "" then "none" else v; 141 | statedir = let v = builtins.getEnv "NIX_STATE_DIR"; in if v == "" then "none" else v; 142 | }; 143 | }; 144 | }; 145 | 146 | }; 147 | 148 | /* A set of packages with different preferences, based on packs above. 149 | This set is used to bootstrap gcc, but other packs could also be used to set 150 | different virtuals, versions, variants, compilers, etc. */ 151 | bootstrapPacks = packs.withPrefs { 152 | package = { 153 | /* must be set to an external compiler capable of building compiler (above) */ 154 | compiler = { 155 | name = "gcc"; 156 | version = "4.8.5"; 157 | extern = "/usr"; /* install prefix */ 158 | /* can also have multiple layers of bootstrapping, where each compiler is built by another */ 159 | }; 160 | /* can speed up bootstrapping by providing more externs 161 | zlib = { 162 | extern = "/usr"; 163 | version = "..."; 164 | }; ... */ 165 | }; 166 | }; 167 | 168 | in 169 | 170 | packs // { 171 | mods = packs.modules { 172 | /* this correspond to module config in spack */ 173 | /* modtype = "lua"; */ 174 | coreCompilers = [packs.pkgs.compiler bootstrapPacks.pkgs.compiler]; 175 | /* 176 | config = { 177 | hiearchy = ["mpi"]; 178 | hash_length = 0; 179 | projections = { 180 | # warning: order is lost 181 | "package+variant" = "{name}/{version}-variant"; 182 | }; 183 | prefix_inspections = { 184 | "dir" = ["VAR"]; 185 | }; 186 | all = { 187 | autoload = "none"; 188 | }; 189 | package = { 190 | environment = { 191 | prepend_path = { 192 | VAR = "{prefix}/path"; 193 | }; 194 | }; 195 | }; 196 | }; 197 | */ 198 | pkgs = with packs.pkgs; [ 199 | gcc 200 | { pkg = gcc.withPrefs { # override package defaults 201 | version = "10"; 202 | }; 203 | default = true; # set as default version 204 | # extra content to append to module file 205 | postscript = '' 206 | LModMessage("default gcc loaded") 207 | ''; 208 | } 209 | perl 210 | /* 211 | { # a custom module, not from spack 212 | name = "other-package"; 213 | version = "1.2"; 214 | prefix = "/opt/other"; 215 | # overrides for module config 216 | environment = { 217 | prepend_path = { 218 | VAR = "{prefix}/path"; 219 | }; 220 | }; 221 | projection = "{name}/{version}-local"; 222 | context = { # other variables to set in the template 223 | short_description = "Some other package"; 224 | }; 225 | } 226 | */ 227 | ]; 228 | }; 229 | } 230 | -------------------------------------------------------------------------------- /fi/README.md: -------------------------------------------------------------------------------- 1 | # Flatiron Modules 2 | 3 | This directory contains all the configuration for building the FI modules used on rusty and popeye. 4 | 5 | ## Package management 6 | 7 | Most configuration goes in the [`default.nix`](default.nix) file. 8 | There are a few important sections you may need to interact with. 9 | Search for "----- *header*" to find them. 10 | 11 | ### global package preferences 12 | 13 | This section is an alphabetical list of key-value pairs of package preferences that apply globally. 14 | That is, all modules and all their dependencies used these settings. 15 | For example, this makes the default hdf5 version 1.10.x and enables some features. 16 | These can still be overridden for individual modules or specific dependencies. 17 | 18 | ``` 19 | hdf5 = { 20 | version = "1.10"; 21 | variants = { 22 | hl = true; 23 | fortran = true; 24 | cxx = true; 25 | }; 26 | }; 27 | ``` 28 | 29 | For example, if you get an error about "XXX dependency YYY: package YYY does not match dependency constraints ZZZ", you may have to add a global preference like: 30 | 31 | ``` 32 | YYY = { 33 | # for XXX 34 | ZZZ 35 | }; 36 | ``` 37 | 38 | ### Core modules 39 | 40 | The core modules are those built only with the default compiler and without MPI. 41 | This includes mainly command-line tools or things without fortran libraries that the user may want to link against. 42 | This is a simple list of packages in alphabetical order. 43 | You can add simple packages names here, or `(PACKAGE.withPrefs { ... })` to override global preferences. 44 | You can also add module settings with `{ pkg = PACKAGE; ... }`. 45 | 46 | ### compiler modules 47 | 48 | These modules are build with all compilers (which is really just whatever versions of gcc we've enabled). 49 | This should include libraries that may change between compilers, for example fortran or C++ libraries or other things that may impact performance or linking. 50 | This is otherwise a list just like core modules. 51 | 52 | #### compilers 53 | 54 | The list of enabled compilers, each of which is used to build all packages in this section 55 | 56 | #### MPI modules 57 | 58 | These modules are built with all MPI libraries (crossed with all compilers). 59 | This is also really just a list, but has a lot of conditionals as some things only build with some compilers or MPI combinations. 60 | 61 | ##### mpis 62 | 63 | The list of all MPI libraries, used to build all packages in this section. 64 | 65 | ##### python+mpi modules 66 | 67 | These modules are built with for all python versions and MPI libraries (crossed with all compilers). 68 | It has both a list of python packages that get combined into a view (like python packages below), and a list of modules build with these pythons (though currently this only includes triqs, which is conditioned to only build for the default python and mpi). 69 | 70 | #### python packages 71 | 72 | These packages are all combined into a single view and exposed as a single module, so should really only contain python packages. 73 | Otherwise it's just another list of packages. 74 | These are built for all enabled python versions (crossed with all compilers). 75 | 76 | ##### python 77 | 78 | The list of all python versions, used to build all packages in this section. 79 | 80 | ### nixpkgs modules 81 | 82 | Modules built from nixpkgs. 83 | This should only be for applications, as they are built purely from nixpkgs, including its compiler and libc. 84 | 85 | ### misc modules 86 | 87 | Other pseudo-modules that don't correspond to packages. 88 | 89 | ## Command-line usage 90 | 91 | Builds and other operations should be run on worker1000 or pcn-1-01. 92 | To test a change, just run "fi/run build -j 1 --cores 8" (or whatever parallelism you prefer). 93 | This will (if successful) produce a "result" directory with the modules. 94 | You can unset MODULEPATH and source "result/setup.sh" in your shell to try out the newly built modules. 95 | 96 | If some package fails to build, you can re-run with "-K" and then (as root) go look at the failed build in /dev/shm/nix-build-NAME (which you should manually clean up when done). 97 | 98 | ### Utility script 99 | 100 | The script [`run`](run) can help with common tasks (some of which are more generally useful): 101 | ``` 102 | Usage: fi/run COMMAND 103 | 104 | Commands: 105 | 106 | build Build modules into result. Takes the same arguments as 107 | nix-build (-jN, --cores M, -K, ...). 108 | spec [PKG] Print the spec tree for a specific package or all modules, 109 | along with the total number of unique packages. 110 | gc Cleanup any unreferenced nix stores (nix-store --gc). 111 | release Publish a release profile for... 112 | modules nixpack lmod modules (default) 113 | jupyter jupyterhub server environment 114 | nix nix build environment 115 | spack ... Run a spack command in the nixpack environment (things like list 116 | and info work, but those managing packages will not) 117 | ``` 118 | 119 | ### Environment setup 120 | 121 | You can source `env` to setup a build environment for running `nix` command-line tools (like `nix-build`). 122 | For example, to build a single package into `result`, run: 123 | ``` 124 | ./fi/run build -j 1 --cores 8 -A pkgs.packagename 125 | ``` 126 | 127 | To build a specific Python package: 128 | 129 | ``` 130 | ./fi/run build -j 1 -A pkgStruct.compilers.0.pythons.0.packs.pkgs.py-classylss 131 | ``` 132 | 133 | ### Releases 134 | 135 | To do a release: 136 | 137 | 1. `fi/run release` (or `fi/run release all` if enough has changed to affect jupyter, nix, lmod, etc., or whatever subset makes sense) 138 | 2. Release should now show up as new `modules` version, which you can load to test. 139 | 3. Update default symlink in /cm/shared/sw/lmod/modules/modules when ready. 140 | 4. Run `fi/run modules` to update cache (after any change to modules). 141 | 142 | ## Branches 143 | `fi` corresponds to the current modules set; `main` is the upcoming set. 144 | -------------------------------------------------------------------------------- /fi/docker/builder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | mkdir -p $out/bin 4 | tar -xzf $docker -C $out/bin --strip-components=1 5 | tar -xzf $rootless -C $out/bin --strip-components=1 6 | 7 | rm -f $out/bin/dockerd-rootless-setuptool.sh 8 | cp $setupsh $out/bin/dockerd-rootless-setup.sh 9 | -------------------------------------------------------------------------------- /fi/docker/default.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | 3 | let docker = derivation rec { 4 | inherit (packs) system; 5 | pname = "docker"; 6 | version = "28.0.1"; 7 | name = "${pname}-${version}"; 8 | docker = builtins.fetchurl { 9 | url = "https://download.docker.com/linux/static/stable/${packs.target}/${name}.tgz"; 10 | sha256 = "0ij7ha9b596lq7pvcxd5r345nm76dlgdim5w1nn9w6bqbmmximjy"; 11 | }; 12 | rootless = builtins.fetchurl { 13 | url = "https://download.docker.com/linux/static/stable/${packs.target}/docker-rootless-extras-${version}.tgz"; 14 | sha256 = "1fsx7w5b91r23pad3hpwyvcljc62hw60b42nqqpp463ggvfzykil"; 15 | }; 16 | PATH = "/bin:/usr/bin"; 17 | setupsh = ./setup.sh; 18 | builder = ./builder.sh; 19 | }; in 20 | 21 | docker // { 22 | module = with docker; { 23 | name = pname; 24 | version = version; 25 | prefix = docker; 26 | context = { 27 | short_description = "user rootless docker (for workstations)"; 28 | long_description = "Use this module to run docker on your own workstation."; 29 | }; 30 | postscript = '' 31 | local xdg_runtime_dir = os.getenv("XDG_RUNTIME_DIR") 32 | if (mode() == "load") then 33 | local user = os.getenv("USER") 34 | local subid = capture("/bin/getsubids " .. user); 35 | if not (subid:match(user) and isDir(pathJoin("/home", user)) and isDir(xdg_runtime_dir)) then 36 | LmodBreak("The docker module can be used to run a rootless docker daemon on your own workstation. If you have a workstation and you'd like to use docker, please try loading it there. For more details, see: https://wiki.flatironinstitute.org/SCC/Software/DockerSingularity") 37 | end 38 | end 39 | 40 | execute {cmd="${docker}/bin/dockerd-rootless-setup.sh && /bin/systemctl --user start docker", modeA={"load"}} 41 | execute {cmd="/bin/systemctl --user stop docker", modeA={"unload"}} 42 | setenv("DOCKER_HOST", "unix://" .. pathJoin(xdg_runtime_dir, "docker.sock")) 43 | ''; 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /fi/docker/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [[ ! -x $DOCKER_ROOT/bin/dockerd-rootless.sh || ! -d /home/$USER ]] || ! /bin/getsubids $USER >& /dev/null ; then 4 | echo "Please make sure the docker module is loaded and you are on your own workstation." 5 | exit 1 6 | fi 7 | 8 | xdg=${XDG_CONFIG_HOME:-$HOME/.config} 9 | cfg=$xdg/systemd/user 10 | mkdir -p $cfg 11 | rm -f $cfg/docker.service 12 | cat <<- EOT > $cfg/docker.service 13 | [Unit] 14 | Description=Docker Application Container Engine (Rootless) 15 | Documentation=https://docs.docker.com/go/rootless/ 16 | RequiresMountsFor=/home/$USER 17 | ConditionHost=`hostname` 18 | ConditionUser=$USER 19 | 20 | [Service] 21 | Environment=PATH=$DOCKER_ROOT/bin:/sbin:/usr/sbin:/bin:/usr/bin 22 | ExecStart=$DOCKER_ROOT/bin/dockerd-rootless.sh --data-root /home/$USER/.local/share/docker 23 | ExecReload=/bin/kill -s HUP \$MAINPID 24 | TimeoutSec=10 25 | Restart=no 26 | StartLimitBurst=3 27 | StartLimitInterval=60s 28 | LimitNOFILE=infinity 29 | LimitNPROC=infinity 30 | LimitCORE=infinity 31 | TasksMax=infinity 32 | Delegate=yes 33 | Type=notify 34 | NotifyAccess=all 35 | KillMode=mixed 36 | EOT 37 | if [[ ! -f $xdg/docker/daemon.json ]] ; then 38 | mkdir -p $xdg/docker 39 | cat << EOT > $xdg/docker/daemon.json 40 | { 41 | "runtimes": { 42 | "nvidia": { 43 | "args": [], 44 | "path": "nvidia-container-runtime" 45 | } 46 | } 47 | } 48 | EOT 49 | fi 50 | systemctl --user daemon-reload 51 | -------------------------------------------------------------------------------- /fi/env: -------------------------------------------------------------------------------- 1 | # setup FI environment 2 | nix=/mnt/sw/nix 3 | daemon=nixsw 4 | nixenv=$nix/state/profiles/$(uname -m)/nix 5 | 6 | export NIX_REMOTE=unix:///run/$daemon-daemon/socket 7 | export NIX_STORE_DIR=$nix/store 8 | export NIX_STATE_DIR=$nix/state 9 | export NIX_LINK=$HOME/.nix-profile 10 | export PATH=$NIX_LINK/bin:$nixenv/bin:$PATH 11 | export MANPATH=$NIX_LINK/share/man:$nixenv/share/man:$MANPATH 12 | export NIX_SSL_CERT_FILE=/etc/pki/tls/certs/ca-bundle.crt 13 | export TMPDIR=/dev/shm 14 | -------------------------------------------------------------------------------- /fi/gcc-13.3-nvcc.patch: -------------------------------------------------------------------------------- 1 | Reverts https://github.com/gcc-mirror/gcc/commit/2b3ecdf4fb13471b69d80583e10c5baedfe84d7c 2 | for compatibility with nvcc 3 | https://forums.developer.nvidia.com/t/compilation-errors-with-gcc-versions-11-14-and-cuda-toolkit-12-5-12-6-due-to-undefined-builtin-ia32-ldtilecfg-and-builtin-ia32-sttilecfg-etc/308401 4 | 5 | diff --git a/gcc/config/i386/amxtileintrin.h b/gcc/config/i386/amxtileintrin.h 6 | index cc6022657a87f..2ee7b6bad2bf3 100644 7 | --- a/gcc/config/i386/amxtileintrin.h 8 | +++ b/gcc/config/i386/amxtileintrin.h 9 | @@ -39,14 +39,14 @@ extern __inline void 10 | __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 11 | _tile_loadconfig (const void *__config) 12 | { 13 | - __builtin_ia32_ldtilecfg (__config); 14 | + __asm__ volatile ("ldtilecfg\t%X0" :: "m" (*((const void **)__config))); 15 | } 16 | 17 | extern __inline void 18 | __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 19 | _tile_storeconfig (void *__config) 20 | { 21 | - __builtin_ia32_sttilecfg (__config); 22 | + __asm__ volatile ("sttilecfg\t%X0" : "=m" (*((void **)__config))); 23 | } 24 | 25 | extern __inline void 26 | -------------------------------------------------------------------------------- /fi/linkfiles.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | /bin/mkdir -p $out 3 | /bin/ln -s -t $out "$@" 4 | -------------------------------------------------------------------------------- /fi/lmod-no-sys-tcl.patch: -------------------------------------------------------------------------------- 1 | diff --git a/Makefile.in b/Makefile.in 2 | index 9d73693d..397002a7 100644 3 | --- a/Makefile.in 4 | +++ b/Makefile.in 5 | @@ -367,7 +367,7 @@ src/computeHashSum: $(ComputeHashSum) 6 | tcl2lua: 7 | if [ -d $(srcdir)/pkgs/tcl2lua ]; then \ 8 | $(MAKE) -C $(srcdir)/pkgs/tcl2lua LUA_INC=$(LUA_INCLUDE) \ 9 | - TCL_INCLUDE=$(TCL_INCLUDE) TCL_LIBS=$(TCL_LIBS) \ 10 | + TCL_INCLUDE=$(TCL_INCLUDE) TCL_LIBS="$(TCL_LIBS)" \ 11 | LIB=$(DESTDIR)$(LIB) LIBS=@LIBS@ CC="$(CC)" \ 12 | SHARE=$(DESTDIR)$(LIBEXEC) MODE_R=$(MODE_R) \ 13 | install; \ 14 | diff --git a/configure b/configure 15 | index 5d9e8ddb..27643553 100755 16 | --- a/configure 17 | +++ b/configure 18 | @@ -4370,18 +4370,6 @@ if test "$ALLOW_TCL_MFILES" = no ; then 19 | fi 20 | 21 | if test "$FAST_TCL_INTERP" = yes ; then 22 | - OS=$(uname -s) 23 | - if test "$OS" = Darwin ; then 24 | - DIR=$(xcrun --show-sdk-path) 25 | - TCL_INCLUDE="-I${DIR}/usr/include" 26 | - else 27 | - for i in "" tcl tcl8.8 tcl8.7 tcl8.6 tcl8.5; do 28 | - if test -f /usr/include/$i/tcl.h ; then 29 | - TCL_INCLUDE="-I/usr/include/$i" 30 | - break; 31 | - fi 32 | - done 33 | - fi 34 | 35 | if test -z "$TCL_INCLUDE" ; then 36 | # Extract the first word of "pkg-config", so it can be a program name with args. 37 | @@ -4509,10 +4497,10 @@ return Tcl_CreateInterp (); 38 | return 0; 39 | } 40 | _ACEOF 41 | -for ac_lib in '' tcl tcl8.8 tcl8.7 tcl8.6 tcl8.5 42 | -do 43 | +for ac_lib in '' ; do 44 | if test -z "$ac_lib"; then 45 | ac_res="none required" 46 | + LIBS="$TCL_LIBS $ac_func_search_save_LIBS" 47 | else 48 | ac_res=-l$ac_lib 49 | LIBS="-l$ac_lib $TCL_LIBS $ac_func_search_save_LIBS" 50 | -------------------------------------------------------------------------------- /fi/lmod/modules.lua: -------------------------------------------------------------------------------- 1 | whatis("Use modules build release from nixpack @GIT@ @DATE@") 2 | setenv("MODULEPATH_BASE", "@MODS@") 3 | prepend_path("MODULEPATH", "@MODS@/Core") 4 | add_property("lmod","sticky") 5 | -------------------------------------------------------------------------------- /fi/lmod/modules.nix: -------------------------------------------------------------------------------- 1 | gitrev: packs: mods: 2 | # a simple modules.lua meta-module for adding modules 3 | derivation (packs.spackEnv // { 4 | name = "modules.lua"; 5 | inherit (mods) system; 6 | mods = "${mods}/${packs.platform}-${packs.os}-${packs.target}"; 7 | src = ./modules.lua; 8 | git = gitrev; 9 | builder = ./modules.sh; 10 | }) 11 | -------------------------------------------------------------------------------- /fi/lmod/modules.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -eu 2 | /bin/sed "s:@MODS@:$mods:g;s!@DATE@!`/bin/date`!g;s:@GIT@:$git:g" $src > $out 3 | -------------------------------------------------------------------------------- /fi/openmpi-1.10-gcc.PATCH: -------------------------------------------------------------------------------- 1 | diff -up ./ompi/mca/mtl/psm/mtl_psm_types.h.ORIG ./ompi/mca/mtl/psm/mtl_psm_types.h 2 | --- ./ompi/mca/mtl/psm/mtl_psm_types.h.ORIG 2016-08-24 10:54:57.000000000 -0400 3 | +++ ./ompi/mca/mtl/psm/mtl_psm_types.h 2021-07-07 11:50:26.362739893 -0400 4 | @@ -70,7 +70,7 @@ 5 | }; 6 | typedef struct mca_mtl_psm_component_t mca_mtl_psm_component_t; 7 | 8 | -OMPI_DECLSPEC mca_mtl_psm_component_t mca_mtl_psm_component; 9 | +OMPI_DECLSPEC extern mca_mtl_psm_component_t mca_mtl_psm_component; 10 | 11 | #define PSM_MAKE_MQTAG(ctxt,rank,utag) \ 12 | ( (((ctxt)&0xffffULL)<<48)| (((rank)&0xffffULL)<<32)| \ 13 | -------------------------------------------------------------------------------- /fi/openmpi-1.10.7.PATCH: -------------------------------------------------------------------------------- 1 | diff -up ./ompi/mca/btl/openib/mca-btl-openib-device-params.ini.ORIG ./ompi/mca/btl/openib/mca-btl-openib-device-params.ini 2 | --- ./ompi/mca/btl/openib/mca-btl-openib-device-params.ini.ORIG 2017-04-22 23:28:31.000000000 -0400 3 | +++ ./ompi/mca/btl/openib/mca-btl-openib-device-params.ini 2020-09-02 18:16:36.863520711 -0400 4 | @@ -190,6 +190,15 @@ max_inline_data = 256 5 | 6 | ############################################################################ 7 | 8 | +[Mellanox ConnectX6] 9 | +vendor_id = 0x2c9,0x5ad,0x66a,0x8f1,0x1708,0x03ba,0x15b3,0x119f 10 | +vendor_part_id = 4123 11 | +use_eager_rdma = 1 12 | +mtu = 4096 13 | +max_inline_data = 256 14 | + 15 | +############################################################################ 16 | + 17 | [IBM eHCA 4x and 12x] 18 | vendor_id = 0x5076 19 | vendor_part_id = 0 20 | diff -up ./ompi/mca/common/verbs/common_verbs_port.c.ORIG ./ompi/mca/common/verbs/common_verbs_port.c 21 | --- ./ompi/mca/common/verbs/common_verbs_port.c.ORIG 2016-08-24 10:54:57.000000000 -0400 22 | +++ ./ompi/mca/common/verbs/common_verbs_port.c 2020-09-02 18:14:40.482659000 -0400 23 | @@ -70,6 +70,10 @@ int ompi_common_verbs_port_bw(struct ibv 24 | /* EDR: 25.78125 Gbps * 64/66, in megabits */ 25 | *bandwidth = 25000; 26 | break; 27 | + case 64: 28 | + /* HDR: 50Gbps * 64/66, in megabits */ 29 | + *bandwidth = 50000; 30 | + break; 31 | default: 32 | /* Who knows? */ 33 | return OMPI_ERR_NOT_FOUND; 34 | @@ -92,6 +96,10 @@ int ompi_common_verbs_port_bw(struct ibv 35 | /* 12x */ 36 | *bandwidth *= 12; 37 | break; 38 | + case 16: 39 | + /* 16x */ 40 | + *bandwidth *= 16; 41 | + break; 42 | default: 43 | /* Who knows? */ 44 | return OMPI_ERR_NOT_FOUND; 45 | -------------------------------------------------------------------------------- /fi/openmpi-2.1.6.PATCH: -------------------------------------------------------------------------------- 1 | diff -up ./opal/mca/btl/openib/mca-btl-openib-device-params.ini.ORIG ./opal/mca/btl/openib/mca-btl-openib-device-params.ini 2 | --- ./opal/mca/btl/openib/mca-btl-openib-device-params.ini.ORIG 2020-09-02 10:44:22.212245000 -0400 3 | +++ ./opal/mca/btl/openib/mca-btl-openib-device-params.ini 2020-09-02 18:16:41.658047216 -0400 4 | @@ -190,6 +190,15 @@ max_inline_data = 256 5 | 6 | ############################################################################ 7 | 8 | +[Mellanox ConnectX6] 9 | +vendor_id = 0x2c9,0x5ad,0x66a,0x8f1,0x1708,0x03ba,0x15b3,0x119f 10 | +vendor_part_id = 4123 11 | +use_eager_rdma = 1 12 | +mtu = 4096 13 | +max_inline_data = 256 14 | + 15 | +############################################################################ 16 | + 17 | [IBM eHCA 4x and 12x] 18 | vendor_id = 0x5076 19 | vendor_part_id = 0 20 | diff -up ./opal/mca/common/verbs/common_verbs_port.c.ORIG ./opal/mca/common/verbs/common_verbs_port.c 21 | --- ./opal/mca/common/verbs/common_verbs_port.c.ORIG 2018-08-04 13:28:59.000000000 -0400 22 | +++ ./opal/mca/common/verbs/common_verbs_port.c 2020-09-01 22:08:40.187660000 -0400 23 | @@ -94,6 +94,10 @@ int opal_common_verbs_port_bw(struct ibv 24 | /* 12x */ 25 | *bandwidth *= 12; 26 | break; 27 | + case 16: 28 | + /* 16x */ 29 | + *bandwidth *= 16; 30 | + break; 31 | default: 32 | /* Who knows? */ 33 | return OPAL_ERR_NOT_FOUND; 34 | -------------------------------------------------------------------------------- /fi/py-extension-helpers-setup.py.patch: -------------------------------------------------------------------------------- 1 | diff --git a/extension_helpers/_setup_helpers.py b/extension_helpers/_setup_helpers.py 2 | index 7e766da..8636873 100644 3 | --- a/extension_helpers/_setup_helpers.py 4 | +++ b/extension_helpers/_setup_helpers.py 5 | @@ -94,8 +94,13 @@ def get_extensions(srcdir='.'): 6 | if len(ext_modules) > 0: 7 | main_package_dir = min(packages, key=len) 8 | src_path = os.path.join(os.path.dirname(__file__), 'src') 9 | - shutil.copy(os.path.join(src_path, 'compiler.c'), 10 | - os.path.join(srcdir, main_package_dir, '_compiler.c')) 11 | + dst_file = os.path.join(srcdir, main_package_dir, '_compiler.c') 12 | + try: 13 | + # remove dst_file in case it exists but is read-only 14 | + os.remove(dst_file) 15 | + except FileNotFoundError: 16 | + pass 17 | + shutil.copy(os.path.join(src_path, 'compiler.c'), dst_file) 18 | ext = Extension(main_package_dir + '.compiler_version', 19 | [os.path.join(main_package_dir, '_compiler.c')]) 20 | ext_modules.append(ext) 21 | -------------------------------------------------------------------------------- /fi/python-ncursesw-py-3.11.4.patch: -------------------------------------------------------------------------------- 1 | --- python/configure.ac.orig 2022-07-25 16:01:43.547382695 -0400 2 | +++ python/configure.ac 2022-07-25 16:02:15.427530089 -0400 3 | @@ -5021,10 +5021,6 @@ 4 | 5 | # first curses header check 6 | ac_save_cppflags="$CPPFLAGS" 7 | -if test "$cross_compiling" = no; then 8 | - CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw" 9 | -fi 10 | - 11 | AC_CHECK_HEADERS(curses.h ncurses.h) 12 | 13 | # On Solaris, term.h requires curses.h 14 | --- python/setup.py.orig 2022-07-25 16:07:43.893049824 -0400 15 | +++ python/setup.py 2022-07-25 16:07:48.429070837 -0400 16 | @@ -955,8 +955,6 @@ 17 | panel_library = 'panel' 18 | if curses_library == 'ncursesw': 19 | curses_defines.append(('HAVE_NCURSESW', '1')) 20 | - if not CROSS_COMPILING: 21 | - curses_includes.append('/usr/include/ncursesw') 22 | # Bug 1464056: If _curses.so links with ncursesw, 23 | # _curses_panel.so must link with panelw. 24 | panel_library = 'panelw' 25 | -------------------------------------------------------------------------------- /fi/python-ncursesw.patch: -------------------------------------------------------------------------------- 1 | --- python/configure.ac.orig 2022-07-25 16:01:43.547382695 -0400 2 | +++ python/configure.ac 2022-07-25 16:02:15.427530089 -0400 3 | @@ -5021,10 +5021,6 @@ 4 | 5 | # first curses header check 6 | ac_save_cppflags="$CPPFLAGS" 7 | -if test "$cross_compiling" = no; then 8 | - CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw" 9 | -fi 10 | - 11 | AC_CHECK_HEADERS(curses.h ncurses.h) 12 | 13 | # On Solaris, term.h requires curses.h 14 | --- python/configure.orig 2022-07-25 16:01:48.851407214 -0400 15 | +++ python/configure 2022-07-25 16:02:15.063528406 -0400 16 | @@ -15968,10 +15968,6 @@ 17 | 18 | # first curses header check 19 | ac_save_cppflags="$CPPFLAGS" 20 | -if test "$cross_compiling" = no; then 21 | - CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw" 22 | -fi 23 | - 24 | for ac_header in curses.h ncurses.h 25 | do : 26 | as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` 27 | --- python/setup.py.orig 2022-07-25 16:07:43.893049824 -0400 28 | +++ python/setup.py 2022-07-25 16:07:48.429070837 -0400 29 | @@ -955,8 +955,6 @@ 30 | panel_library = 'panel' 31 | if curses_library == 'ncursesw': 32 | curses_defines.append(('HAVE_NCURSESW', '1')) 33 | - if not CROSS_COMPILING: 34 | - curses_includes.append('/usr/include/ncursesw') 35 | # Bug 1464056: If _curses.so links with ncursesw, 36 | # _curses_panel.so must link with panelw. 37 | panel_library = 'panelw' 38 | -------------------------------------------------------------------------------- /fi/r.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | with packs.pkgs; [ 3 | r 4 | r-irkernel 5 | r-annotationdbi 6 | r-bh 7 | r-bsgenome 8 | r-biasedurn 9 | r-biocinstaller 10 | r-biocmanager 11 | r-cairo 12 | #r-deseq2 #build error 13 | r-dt 14 | #r-diffbind 15 | r-formula 16 | r-gostats 17 | r-gseabase 18 | r-genomicalignments 19 | r-genomicfeatures 20 | r-genomicranges 21 | r-iranges 22 | r-keggrest 23 | r-rbgl 24 | r-rcurl 25 | r-r-methodss3 26 | #r-rsoo 27 | #r-rsutils 28 | r-rcpparmadillo 29 | r-rcppeigen 30 | #r-rcppgsl 31 | r-rhdf5lib 32 | r-rsamtools 33 | r-rtsne 34 | r-tfmpvalue 35 | r-vgam 36 | #r-venndiagram 37 | r-acepack 38 | r-ade4 39 | r-askpass 40 | r-assertthat 41 | r-backports 42 | #r-biomart 43 | #r-biomformat 44 | r-bit64 45 | r-bitops 46 | r-blob 47 | r-catools 48 | r-callr 49 | r-checkmate 50 | r-cli 51 | r-clipr 52 | r-clisymbols 53 | r-crosstalk 54 | r-desc 55 | r-devtools 56 | r-dplyr 57 | r-evaluate 58 | r-formatr 59 | r-fs 60 | r-futile-logger 61 | r-futile-options 62 | r-gdata 63 | r-genefilter 64 | r-getopt 65 | r-ggplot2 66 | r-ggrastr 67 | r-glmnet 68 | r-glue 69 | r-gplots 70 | #r-grimport 71 | r-gridextra 72 | r-gtools 73 | r-hexbin 74 | r-highr 75 | #r-huge 76 | r-hms 77 | r-htmltable 78 | r-httpuv 79 | #r-idr 80 | r-igraph 81 | r-ini 82 | r-jpeg 83 | r-knitr 84 | r-lambda-r 85 | r-later 86 | r-lattice 87 | r-latticeextra 88 | r-lazyeval 89 | r-limma 90 | r-markdown 91 | r-matrixstats 92 | r-memoise 93 | r-mime 94 | r-miniui 95 | r-multtest 96 | #r-nabor 97 | r-nloptr 98 | #r-pdsh 99 | r-pheatmap 100 | #r-phyloseq 101 | r-pkgbuild 102 | r-pkgconfig 103 | r-pkgload 104 | r-plogr 105 | r-plotly 106 | r-png 107 | r-polynom 108 | r-powerlaw 109 | r-preprocesscore 110 | #r-preseqr 111 | r-processx 112 | r-progress 113 | r-promises 114 | r-ps 115 | #r-pulsar 116 | r-purrr 117 | r-randomforest 118 | r-rcmdcheck 119 | r-readr 120 | r-remotes 121 | r-rlang 122 | r-rprojroot 123 | r-rstudioapi 124 | r-rtracklayer 125 | r-segmented 126 | r-seqinr 127 | r-sessioninfo 128 | r-seurat 129 | r-sf 130 | r-shape 131 | r-shiny 132 | r-snow 133 | r-sourcetools 134 | r-sys 135 | r-terra 136 | r-tibble 137 | r-tidyr 138 | r-tidyselect 139 | r-units 140 | r-viridis 141 | r-whisker 142 | r-xfun 143 | r-xopen 144 | r-xtable 145 | r-yaml 146 | r-zlibbioc 147 | #rstudio? 148 | ] 149 | -------------------------------------------------------------------------------- /fi/repo/packages/blender/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | import os 7 | from spack.package import * 8 | 9 | 10 | class Blender(CMakePackage): 11 | """Blender is the free and open source 3D creation suite. 12 | It supports the entirety of the 3D pipeline-modeling, 13 | rigging, animation, simulation, rendering, compositing and 14 | motion tracking, even video editing and game creation.""" 15 | 16 | homepage = "https://www.blender.org/" 17 | url = "http://download.blender.org/source/blender-2.79b.tar.gz" 18 | 19 | version('2.92.0', 20 | url='https://download.blender.org/source/blender-2.92.0.tar.xz', 21 | sha256='e791cfc403292383577c3c8ce2cd34e5aa2cd8da0a7483041049a1609ddb4595') 22 | version('2.80', sha256='cd9d7e505c1f6e63a4f72366ed04d446859977eeb34cde21283aaea6a304a5c0') 23 | version('2.79b', sha256='4c944c304a49e68ac687ea06f5758204def049b66dc211e1cffa1857716393bc') 24 | 25 | variant('cycles', default=False, description='Build with cycles support') 26 | variant('blender', default=True, description='disable to build only the blender player') 27 | variant('player', default=True, description='Build Player') 28 | variant('ffmpeg', default=False, description='Enable FFMPeg Support') 29 | variant('headless', default=False, description='Build without graphical support (renderfarm, server mode only)') 30 | variant('llvm', default=False, description='Necessary for OSL.') 31 | variant('ocio', default=False, description='Currently broken due to conflicting python') 32 | variant('opensubdiv', default=False, description='Build with opensubdiv support') 33 | variant('jemalloc', default=True) 34 | 35 | # https://developer.blender.org/diffusion/B/browse/blender-v2.92-release/build_files/build_environment/cmake/versions.cmake 36 | depends_on('python@3.5:', when="@:2.79b") 37 | depends_on('python@3.7:', when="@2.80:") 38 | depends_on('python@3.7.7:', when="@2.92.0:") 39 | 40 | depends_on('py-numpy', when="@2.80:") 41 | depends_on('py-numpy@1.17.5:', when='@2.92.0:') 42 | 43 | depends_on('glew') 44 | depends_on('glew@1.13.0:', when='@2.92.0:') 45 | 46 | #depends_on('opengl') 47 | # depends_on('openglu') 48 | depends_on('gl') 49 | depends_on('glu') 50 | depends_on('glx') 51 | 52 | depends_on('libpng') 53 | depends_on('libpng@1.6.37:', when='@2.92.0:') 54 | 55 | depends_on('libjpeg') 56 | depends_on('libjpeg@2.0.4:', when='@2.92.0:') 57 | 58 | depends_on('openjpeg') 59 | depends_on('openjpeg@2.3.1:', when='@2.92.0:') 60 | 61 | # depends_on('boost@1.49:1.69') 62 | depends_on('boost@1.70.0:', when='@2.92.0:') 63 | 64 | depends_on('openimageio', when='+cycles') 65 | depends_on('openimageio@2.1.15.0:', when='@2.92.0: +cycles') 66 | 67 | # Upper bound per: https://developer.blender.org/T54779 68 | depends_on('ffmpeg@3.2.1:3.999', when='@:2.79b+ffmpeg') 69 | depends_on('ffmpeg@3.2.1:', when='@2.80:+ffmpeg') 70 | depends_on('ffmpeg@4.2.3:', when='@2.92.0:+ffmpeg') 71 | 72 | # depends_on('opencolorio@1.0:', when='+ocio') 73 | 74 | depends_on('llvm@3.0:', when='+llvm') 75 | depends_on('llvm@9.0.1:', when='@2.92.0:+llvm') 76 | # depends_on('openshadinglanguage') 77 | # depends_on('openvdb@3.1:') 78 | 79 | # FIXME: this is only temporarily commented out. needs to be fixed 80 | # depends_on('freetype') 81 | depends_on('freetype@2.10.2:', when='@2.92.0:') 82 | 83 | depends_on('libuuid') 84 | depends_on('jemalloc', when='+jemalloc') 85 | depends_on('ilmbase') 86 | 87 | depends_on('opensubdiv+openmp', when='+opensubdiv') 88 | depends_on('opensubdiv@3.4.3:', when='@2.92.0:+opensubdiv') 89 | 90 | #depends_on('cuda@10.1.0:10.1.999', when='+cycles', type=('link','run')) 91 | depends_on('cuda@11.0:', when='@2.92.0:+cycles', type=('link','run')) 92 | # FIXME: The version of GCC should probably be the version of GCC that is actually 93 | # compiling blender, not hardcoding the version that the package creater is using. 94 | # depends_on('gcc@7.4.0', when='+cycles', type=('run')) 95 | 96 | 97 | # Dependencies for 2.92.0 98 | depends_on('zlib@1.2.11:', when='@2.92.0:') 99 | depends_on('openal-soft@1.20.1:', when='@2.92.0:') 100 | depends_on('c-blosc@1.5.0:', when='@2.92.0:') 101 | # depends_on('pthreads@3.0.0:', when='@2.92.0:') 102 | # depends_on('openexr@2.4.0:', when='@2.92.0:') 103 | # depends_on('freeglut@3.0.0:', when='@2.92.0:') 104 | depends_on('alembic@1.7.12:', when='@2.92.0:') 105 | # depends_on('glfw@3.1.2:', when='@2.92.0:') 106 | # depends_on('sdl@2.0.12:', when='@2.92.0:') 107 | # depends_on('opencollada@1.6.68:', when='@2.92.0:') 108 | # depends_on('opencolorio@1.1.1:', when='@2.92.0:') 109 | depends_on('libtiff@4.1.0:', when='@2.92.0:') 110 | # depends_on('openshadinglanguage@1.10.10:', when='@2.92.0:') 111 | # depends_on('tbb@2019_u9:', when='@2.92.0:') 112 | # depends_on('openvdb@7.0.0:', when='@2.92.0:') 113 | # depends_on('idna@2.9:', when='@2.92.0:') 114 | # depends_on('lame@3.100:', when='@2.92.0:') 115 | depends_on('libogg@1.3.4:', when='@2.92.0:') 116 | depends_on('libvorbis@1.3.6:', when='@2.92.0:') 117 | depends_on('libtheora@1.1.1:', when='@2.92.0:') 118 | depends_on('flac@1.3.3:', when='@2.92.0:') 119 | # depends_on('vpx@1.8.2:', when='@2.92.0:') 120 | depends_on('opus@1.3.1:', when='@2.92.0:') 121 | # depends_on('xvidcore@1.3.7:', when='@2.92.0:') 122 | depends_on('fftw@3.3.8:', when='@2.92.0:') 123 | depends_on('libiconv@1.16:', when='@2.92.0:') 124 | depends_on('libsndfile@1.0.28:', when='@2.92.0:') 125 | # sndfile 126 | 127 | # FIXME: ~ispc is temporary fix for 128 | # ispc requires llvm variant ~libcxx, but spec asked for +libcxx 129 | depends_on('embree@3.10.0:~ispc', when='@2.92.0:') 130 | 131 | depends_on('pugixml@1.10:', when='@2.92.0:') 132 | 133 | depends_on('gmp@6.2.0:', when='@2.92.0:') 134 | 135 | def setup_run_environment(self, env): 136 | env.prepend_path('PATH', os.path.dirname(self.compiler.cc)) 137 | 138 | def cmake_args(self): 139 | spec = self.spec 140 | args = [] 141 | 142 | python_exe = spec['python'].command.path 143 | python_lib = spec['python'].libs[0] 144 | python_include_dir = spec['python'].headers.directories[0] 145 | 146 | args.append('-DPYTHON_EXECUTABLE={0}'.format(python_exe)) 147 | args.append('-DPYTHON_LIBRARY={0}'.format(python_lib)) 148 | args.append('-DPYTHON_INCLUDE_DIR={0}'.format(python_include_dir)) 149 | args.append('-DPYTHON_VERSION={0}'.format(spec['python'].version.up_to(2))) 150 | 151 | args.append('-DWITH_INSTALL_PORTABLE=NO') 152 | 153 | args.append('-DCMAKE_CXX_FLAGS=-I{0}/include/OpenEXR'.format(spec['ilmbase'].prefix)) 154 | 155 | if '@2.8:' in spec: 156 | args.append( 157 | '-DPYTHON_NUMPY_PATH:PATH={0}/python{1}/site-packages'.format( 158 | spec['py-numpy'].prefix.lib, 159 | spec['python'].version.up_to(2))) 160 | args.append( 161 | '-DPYTHON_NUMPY_INCLUDE_DIRS:PATH={0}/python{1}/site-packages/numpy/core/include'.format( 162 | spec['py-numpy'].prefix.lib, 163 | spec['python'].version.up_to(2))) 164 | 165 | if '+opensubdiv' in spec: 166 | args.append('-DWITH_OPENSUBDIV:BOOL=ON') 167 | else: 168 | args.append('-DWITH_OPENSUBDIV:BOOL=OFF') 169 | 170 | if '~cycles' in spec: 171 | args.append('-DWITH_CYCLES:BOOL=OFF') 172 | 173 | if '~blender' in spec: 174 | args.append('-DWITH_BLENDER:BOOL=OFF') 175 | # UNTESTED 176 | 177 | if '+ffmpeg' in spec: 178 | args.append('-DWITH_CODEC_FFMPEG:BOOL=ON') 179 | 180 | if '+headless' in spec: 181 | args.append('-DWITH_HEADLESS:BOOL=OFF') 182 | 183 | if '+llvm' in spec: 184 | args.append('-DWITH_LLVM:BOOL=ON') 185 | 186 | if '+player' in spec: 187 | args.append('-DWITH_PLAYER:BOOL=ON') 188 | 189 | # >> 106 CMake Error at CMakeLists.txt:924 (message): 190 | # 107 WITH_MOD_OCEANSIM requires WITH_FFTW3 to be ON 191 | if self.spec.satisfies('@2.92.0:'): 192 | args.append('-DWITH_MOD_OCEANSIM:BOOL=OFF') 193 | 194 | return args 195 | -------------------------------------------------------------------------------- /fi/repo/packages/disBatch/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack.package import * 7 | import os 8 | 9 | class Disbatch(PythonPackage): 10 | """Distributed processing of a batch of tasks""" 11 | 12 | homepage = "https://github.com/flatironinstitute/disBatch" 13 | git = "https://github.com/flatironinstitute/disBatch.git" 14 | 15 | version('2.5', tag='2.5', commit='abee40342f1ecb5e9b801744d860b5b1414d4b2c', submodules=True) 16 | version('2.0', tag='2.0', submodules=True) 17 | version('1.4', tag='1.4', submodules=True) 18 | 19 | depends_on('py-setuptools', type='build', when='@2:') 20 | depends_on('py-kvsstcp', type='run', when='@:2.0') 21 | 22 | @run_after('install') 23 | def create_symlink(self): 24 | if self.spec.satisfies('@1'): 25 | script_source = os.path.join(self.prefix.bin, 'disBatch.py') 26 | script_dest = os.path.join(self.prefix.bin, 'disBatch') 27 | os.symlink(script_source, script_dest) 28 | 29 | script = Executable(script_source) 30 | script('--fix-paths') 31 | -------------------------------------------------------------------------------- /fi/repo/packages/idl/package.py: -------------------------------------------------------------------------------- 1 | import os 2 | import stat 3 | 4 | from spack.package import * 5 | 6 | class Idl(Package): 7 | """IDL Software: Interactive Data Visulation. 8 | 9 | Note: IDL is a licensed software. You will also need an existing 10 | downloaded tarball of IDL in your current directory or in a 11 | spack mirror in order to install.""" 12 | 13 | homepage = "https://www.harrisgeospatial.com/Software-Technology/IDL" 14 | manual_download = False 15 | url = "file://{0}/idl8.8.3-linux.tar.gz".format("/mnt/sw/pkg") 16 | 17 | version("8.8.3", sha256="5de8a95b1c552a9e3606848e426450268a79b785dbcd246aebfa3f1467f181c7") 18 | version("8.9", sha256="55c10a8ffc48d6f6cb219660dfc3f9b49010310cb9977eb0fd26f20e6e3ea655") 19 | version("9.0", sha256="8faf7ec8091ee77e6297f91a823e5c6216f2ab90909071955bec008c268b0f62") 20 | 21 | license_required = True 22 | 23 | @run_before("install") 24 | def pre_install(self): 25 | os.chmod("silent/idl_answer_file", stat.S_IRUSR | stat.S_IWUSR) 26 | # for version >= 9.0, revert default prefix: 27 | filter_file("/usr/local/nv5", "/usr/local/harris", "silent/idl_answer_file") 28 | 29 | def install(self, spec, prefix): 30 | # replace default install dir to self.prefix by editing answer file 31 | filter_file("/usr/local/harris", prefix, "silent/idl_answer_file") 32 | 33 | # execute install script 34 | install_script = Executable("./install.sh") 35 | install_script("-s", input="silent/idl_answer_file") 36 | 37 | def setup_run_environment(self, env): 38 | # set necessary environment variables 39 | env.prepend_path("EXELIS_DIR", self.prefix) 40 | env.prepend_path("IDL_DIR", self.prefix.idl) 41 | 42 | # add bin to path 43 | env.prepend_path("PATH", self.prefix.idl.bin) 44 | -------------------------------------------------------------------------------- /fi/repo/packages/libjansson/package.py: -------------------------------------------------------------------------------- 1 | from spack.package import * 2 | 3 | class Libjansson(CMakePackage): 4 | """libjansson""" 5 | 6 | homepage = "https://digip.org/jansson/" 7 | url = "http://digip.org/jansson/releases/jansson-2.13.tar.gz" 8 | 9 | version('2.13.1', sha256='f4f377da17b10201a60c1108613e78ee15df6b12016b116b6de42209f47a474f') 10 | -------------------------------------------------------------------------------- /fi/repo/packages/paraview/paraview_wrapper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | pvpython_vers=$(pvpython -c 'import platform; print(".".join(platform.python_version_tuple()[0:2]))') 4 | python_vers=$(python3 -c 'import platform; print(".".join(platform.python_version_tuple()[0:2]))') 5 | 6 | if test "$pvpython_vers" != "$python_vers"; then 7 | echo "Python3 version and paraview python version don't match. Not loading extra python libs into paraview..." 8 | echo "Load default python module and optional relevant virtual environment to extend paraview" 9 | else 10 | export PYTHONPATH=$(python3 < 3 | Date: Sat, 4 Sep 2021 15:20:44 +0900 4 | Subject: [PATCH] Fix to solve linking problem with gcc-10 5 | 6 | --- 7 | client.c | 2 +- 8 | fun_times.h | 2 +- 9 | interleaving.h | 2 +- 10 | 3 files changed, 3 insertions(+), 3 deletions(-) 11 | 12 | diff --git a/client.c b/client.c 13 | index 5525eb9..cd4e1ff 100644 14 | --- a/client.c 15 | +++ b/client.c 16 | @@ -764,7 +764,7 @@ void transfer_stuff(int64_t s, int64_t c, int64_t timestep) { 17 | } 18 | 19 | void do_projections(void) { 20 | - int64_t i, j, idx, dir; 21 | + int64_t i, j, idx, dir=0; 22 | assert(BOX_SIZE > 0); 23 | for (i=0; i] - assert 190388 == 190386 159 | doCheck = false; 160 | }); 161 | }; 162 | }; 163 | 164 | python311 = python311.override { 165 | packageOverrides = self: super: { 166 | numpy = super.numpy.overridePythonAttrs (old: { 167 | # FAIL: TestAccuracy.test_validate_transcendentals 168 | doCheck = false; 169 | }); 170 | }; 171 | }; 172 | 173 | python312 = python312.override { 174 | packageOverrides = self: super: { 175 | numpy = super.numpy.overridePythonAttrs (old: { 176 | # FAIL: TestAccuracy.test_validate_transcendentals 177 | doCheck = false; 178 | }); 179 | }; 180 | }; 181 | 182 | pipewire = (pipewire.override { 183 | rocSupport = false; # temporarily workaround sox broken download (though probably don't need it anyway) 184 | }).overrideAttrs (old: { 185 | buildInputs = old.buildInputs ++ [libopus]; 186 | }); 187 | 188 | pulseaudio = pulseaudio.override { 189 | bluetoothSupport = false; 190 | }; 191 | 192 | blender = (blender.override { 193 | #tbb = tbb_2021_8; 194 | }).overrideAttrs (old: { 195 | cmakeFlags = old.cmakeFlags ++ ["-DWITH_OPENAL=OFF"]; 196 | }); 197 | 198 | SDL = SDL.overrideAttrs (old: { 199 | # this is already patched into configure.in, but not configure 200 | postConfigure = '' 201 | sed -i '/SDL_VIDEO_DRIVER_X11_CONST_PARAM_XDATA32/s/.*/#define SDL_VIDEO_DRIVER_X11_CONST_PARAM_XDATA32 1/' include/SDL_config.h 202 | ''; 203 | }); 204 | 205 | umockdev = umockdev.overrideAttrs (old: { 206 | doCheck = false; # static-code unknown failure 207 | }); 208 | 209 | libpsl = libpsl.overrideAttrs (old: { 210 | doCheck = false; # valgrind unknown instruction 211 | }); 212 | 213 | haskell = haskell // { 214 | packages = haskell.packages // { 215 | ghc8107Binary = haskell.packages.ghc8107Binary.override { 216 | ghc = haskell.packages.ghc8107Binary.ghc.overrideAttrs (old: { 217 | postUnpack = old.postUnpack + '' 218 | patchShebangs ghc-${old.version}/inplace/bin 219 | ''; 220 | }); 221 | }; 222 | }; 223 | packageOverrides = self: super: { 224 | crypton = super.crypton.overrideAttrs (old: { 225 | # FAIL: Ed448 verify sig? 226 | doCheck = false; 227 | }); 228 | cryptonite = super.cryptonite.overrideAttrs (old: { 229 | # FAIL: Ed448 verify sig? 230 | doCheck = false; 231 | }); 232 | crypton-x509-validation = super.crypton-x509-validation.overrideAttrs (old: { 233 | doCheck = false; 234 | }); 235 | http2 = super.http2.overrideAttrs (old: { 236 | # tests hang 237 | doCheck = false; 238 | }); 239 | tls = super.tls.overrideAttrs (old: { 240 | doCheck = false; 241 | }); 242 | }; 243 | }; 244 | 245 | jdupes = callPackage ./jdupes.nix { }; 246 | 247 | rapidjson = rapidjson.overrideAttrs (old: { 248 | doCheck = false; # valgrind unknown instruction 249 | }); 250 | 251 | vamp-plugin-sdk = vamp-plugin-sdk.overrideAttrs (old: { 252 | src = fetchFromGitHub { 253 | owner = "vamp-plugins"; 254 | repo = "vamp-plugin-sdk"; 255 | rev = "vamp-plugin-sdk-v${old.version}"; 256 | hash = "sha256-5jNA6WmeIOVjkEMZXB5ijxyfJT88alVndBif6dnUFdI="; 257 | }; 258 | }); 259 | } 260 | -------------------------------------------------------------------------------- /nixpkgs/sssd/nss-client.nix: -------------------------------------------------------------------------------- 1 | { stdenv 2 | , fetchFromGitHub 3 | , autoreconfHook 4 | , pkg-config 5 | , glibc, pam, openldap, libkrb5, dnsutils, cyrus_sasl, nss 6 | , popt, talloc, tdb, tevent, ldb, ding-libs, pcre2, c-ares 7 | , glib, dbus 8 | , jansson, libunistring, openssl, p11-kit 9 | }: 10 | 11 | let 12 | version = "2.9.4"; 13 | in 14 | 15 | stdenv.mkDerivation rec { 16 | name = "sssd-nss-client-${version}"; 17 | 18 | src = fetchFromGitHub { 19 | owner = "SSSD"; 20 | repo = "sssd"; 21 | rev = "refs/tags/${version}"; 22 | hash = "sha256-VJXZndbmC6mAVxzvv5Wjb4adrQkP16Rt4cgjl4qGDIc="; 23 | }; 24 | 25 | # libnss_sss.so does not in fact use any of these -- they're just needed for configure 26 | nativeBuildInputs = [ autoreconfHook pkg-config 27 | pam openldap libkrb5 dnsutils cyrus_sasl nss 28 | popt tdb tevent ldb ding-libs pcre2 c-ares 29 | glib dbus 30 | jansson p11-kit 31 | ]; 32 | buildInputs = [ 33 | talloc 34 | openssl libunistring 35 | ]; 36 | 37 | preConfigure = '' 38 | configureFlagsArray=( 39 | --prefix=$out 40 | --localstatedir=/var 41 | --sysconfdir=/etc 42 | --with-os=redhat 43 | --with-nscd=${glibc.bin}/sbin/nscd 44 | --with-ldb-lib-dir=$out/modules/ldb 45 | --disable-cifs-idmap-plugin 46 | --without-autofs 47 | --without-kcm 48 | --without-libnl 49 | --without-libwbclient 50 | --without-manpages 51 | --without-nfsv4-idmapd-plugin 52 | --without-python2-bindings 53 | --without-python3-bindings 54 | --without-samba 55 | --without-secrets 56 | --without-selinux 57 | --without-semanage 58 | --without-ssh 59 | --without-sudo 60 | --without-oidc-child 61 | ) 62 | ''; 63 | 64 | enableParallelBuilding = true; 65 | 66 | buildFlags = [ "libnss_sss.la" ]; 67 | installTargets = [ "install-nsslibLTLIBRARIES" ]; 68 | 69 | } 70 | -------------------------------------------------------------------------------- /nixpkgs/stdenv.nix: -------------------------------------------------------------------------------- 1 | { pkgs 2 | }: 3 | 4 | # Bootstrap a new stdenv that includes our nss_sss in glibc 5 | 6 | let 7 | glibc = pkgs.glibc.overrideDerivation (old: { 8 | postInstall = old.postInstall + '' 9 | ln -s ${pkgs.nss_sss}/lib/*.so.* $out/lib 10 | ''; 11 | }); 12 | binutils = pkgs.binutils.override { 13 | libc = glibc; 14 | }; 15 | gcc = pkgs.gcc.override { 16 | bintools = binutils; 17 | libc = glibc; 18 | }; 19 | in 20 | 21 | pkgs.stdenv.override { 22 | cc = gcc; 23 | overrides = self: super: { 24 | inherit glibc binutils gcc; 25 | }; 26 | allowedRequisites = pkgs.stdenv.allowedRequisites ++ 27 | [ glibc.out glibc.dev glibc.bin binutils pkgs.nss_sss 28 | pkgs.talloc pkgs.libunistring pkgs.mpdecimal pkgs.mailcap pkgs.libxcrypt pkgs.gdbm.lib pkgs.tzdata pkgs.expat pkgs.ncurses pkgs.libffi pkgs.python3 pkgs.readline 29 | pkgs.sqlite.out pkgs.openssl_3_3.out 30 | ]; 31 | } 32 | -------------------------------------------------------------------------------- /packs/capture.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ue 2 | "$@" > $out 3 | -------------------------------------------------------------------------------- /packs/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | 3 | lib = import ./lib.nix; 4 | 5 | versionsUnion = l: 6 | if builtins.isList l then 7 | let l' = lib.remove null l; 8 | in lib.when (l' != []) (builtins.concatStringsSep "," l') 9 | else l; 10 | 11 | defaultSpackConfig = { 12 | bootstrap = { enable = false; }; 13 | config = { 14 | locks = false; 15 | install_tree = { 16 | root = "/rootless-spack"; 17 | }; 18 | misc_cache = "$tempdir/cache"; /* overridden by spackCache (except for extern spackSrc) */ 19 | }; 20 | compilers = []; 21 | }; 22 | 23 | /* fill in package descriptor with defaults */ 24 | fillDesc = name: /* simple name of package */ 25 | { namespace ? "builtin" 26 | , dir 27 | , version ? [] /* list of available concrete versions */ 28 | , variants ? {} /* set of variant to (bool, string, or set of opt to bool) */ 29 | , patches ? [] /* list of patches to apply (after those in spack) */ 30 | , depends ? {} /* dependencies, set of name to {deptype; Constrants} */ 31 | , conflicts ? [] /* list of conflict messages (package is not buildable if non-empty) */ 32 | , provides ? {} /* set of provided virtuals to (version ranges or unioned list thereof) */ 33 | , paths ? {} /* set of tools to path prefixes */ 34 | , build ? {} /* extra build variables to set */ 35 | , compiler_spec ? name 36 | }: { 37 | inherit name namespace dir version variants patches paths build compiler_spec; 38 | depends = { 39 | compiler = { 40 | deptype = ["build" "link"]; 41 | }; 42 | } // builtins.mapAttrs (n: lib.prefsIntersection) depends; 43 | provides = builtins.mapAttrs (n: versionsUnion) provides; 44 | conflicts = lib.remove null conflicts; 45 | }; 46 | 47 | patchDesc = patch: gen: 48 | if builtins.isFunction gen then 49 | spec: let desc = gen spec; in 50 | desc // lib.applyOptional (lib.applyOptional patch spec) desc 51 | else lib.applyOptional patch gen; 52 | patchRepo = patch: repo: repo // 53 | builtins.mapAttrs (name: f: patchDesc f (repo.${name} or null)) patch; 54 | 55 | prefsUpdate = let 56 | scalar = a: b: b; 57 | updaters = { 58 | system = scalar; 59 | os = scalar; 60 | label = a: b: "${a}.${b}"; 61 | spackSrc = scalar; 62 | spackConfig = lib.recursiveUpdate; 63 | spackPython = scalar; 64 | spackEnv = a: b: a // b; 65 | nixpkgsSrc = scalar; 66 | verbose = scalar; 67 | repoPatch = a: b: a // b; 68 | global = lib.prefsUpdate; 69 | package = a: b: a // b; 70 | }; 71 | in 72 | lib.mergeWithKeys (k: updaters.${k}); 73 | 74 | spackTarget = builtins.replaceStrings ["-"] ["_"]; 75 | 76 | packsWithPrefs = 77 | { system ? builtins.currentSystem 78 | , os ? "unknown" 79 | , label ? "packs" 80 | , spackSrc ? {} 81 | , spackConfig ? {} 82 | , spackPython ? "/usr/bin/python3" 83 | , spackEnv ? { 84 | PATH = "/bin:/usr/bin"; 85 | } 86 | , nixpkgsSrc ? null 87 | , nixpkgsOverlays ? [] 88 | , repos ? [ ../spack/repo ] 89 | , repoPatch ? {} 90 | , global ? {} 91 | , package ? {} 92 | } @ packPrefs: 93 | lib.fix (packs: with packs; { 94 | inherit lib; 95 | prefs = packPrefs; 96 | inherit system os label; 97 | splitSystem = lib.splitRegex "-" system; 98 | target = builtins.head splitSystem; 99 | platform = builtins.elemAt splitSystem 1; 100 | withPrefs = p: packsWithPrefs (prefsUpdate packPrefs 101 | ({ label = "withPrefs"; } // p)); 102 | 103 | spack = if builtins.isString spackSrc then spackSrc else 104 | builtins.fetchGit ({ name = "spack"; url = "https://github.com/spack/spack"; } // spackSrc); 105 | 106 | makeSpackConfig = import ../spack/config.nix packs; 107 | 108 | inherit spackPython spackEnv; 109 | spackConfig = makeSpackConfig (lib.recursiveUpdate defaultSpackConfig packPrefs.spackConfig); 110 | 111 | spackNixLib = derivation (spackEnv // { 112 | name = "nix-spack-py"; 113 | inherit system; 114 | builder = ../spack/install.sh; 115 | src = ../spack/nixpack.py; 116 | }); 117 | 118 | /* common attributes for running spack */ 119 | spackBuilder = attrs: builtins.removeAttrs (derivation (spackEnv // { 120 | inherit (packs) system os spackConfig; 121 | builder = spackPython; 122 | PYTHONPATH = "${spackNixLib}:${spack}/lib/spack:${spack}/lib/spack/external:${spack}/lib/spack/external/_vendoring"; 123 | LC_ALL = "en_US.UTF-8"; # work around spack bugs processing log files 124 | repos = if attrs ? withRepos 125 | then lib.when attrs.withRepos repos 126 | else map (r: (builtins.path { name="repo.yaml"; path="${r}/repo.yaml"; })) repos; 127 | spackCache = lib.when (packPrefs.spackCache or true) (if packPrefs.spackCacheRepos or true && attrs.withRepos or false then spackCacheRepos else spackCache); 128 | } // attrs)) ["PYTHONPATH" "PATH" "LC_ALL" "spackConfig" "spackCache" "passAsFile"]; 129 | 130 | /* pre-generated spack repo index cache (both with and without overlay repos) */ 131 | makeSpackCache = withRepos: lib.when (builtins.isAttrs spackSrc) 132 | (spackBuilder { 133 | name = "spack-cache" + (if withRepos then "-repos" else ""); 134 | args = [../spack/cache.py]; 135 | spackCache = null; 136 | inherit withRepos; 137 | }); 138 | 139 | spackCache = makeSpackCache false; 140 | spackCacheRepos = makeSpackCache true; 141 | 142 | isVirtual = name: builtins.isList repo.${name} or null; 143 | 144 | /* look up a package requirement and resolve it with prefs */ 145 | getResolver = name: pref: builtins.addErrorContext "getting package ${label}.${name}" 146 | (if pref == {} 147 | then pkgs.${name} 148 | else resolvers.${name} (lib.prefsUpdate (getPackagePrefs name) pref)); 149 | 150 | /* look up a package with default prefs */ 151 | getPackage = arg: 152 | if arg == null then 153 | null 154 | else if lib.isPkg arg then 155 | arg 156 | else if builtins.isString arg then 157 | getResolver arg {} 158 | else if arg ? name then 159 | getResolver arg.name (builtins.removeAttrs arg ["name"]) 160 | else throw "invalid package"; 161 | 162 | /* get the list of packages a given package might depend on (from the repo, makes assumptions about repo structure) */ 163 | getPossibleDepends = name: 164 | (lib.applyOptional repo.${name} (throw "getPossibleDepends ${name}")).depends or {}; 165 | 166 | /* fill in package prefs with defaults */ 167 | fillPrefs = 168 | { version ? null 169 | , variants ? {} 170 | , flags ? {} 171 | , patches ? [] 172 | , depends ? {} 173 | , extern ? null 174 | , provides ? {} 175 | , tests ? false 176 | , fixedDeps ? false 177 | , resolver ? null 178 | , target ? packs.target 179 | , paths ? {} 180 | , build ? {} # only used by builder 181 | , compiler_spec ? null 182 | , verbose ? false # only used by builder 183 | } @ prefs: 184 | prefs // { 185 | inherit version variants flags patches depends extern tests provides fixedDeps target paths; 186 | resolver = deptype: name: let r = lib.applyOptional (lib.applyOptional resolver deptype) name; in 187 | if builtins.isFunction r then r 188 | else (lib.coalesce r packs).getResolver name; 189 | }; 190 | 191 | getPackagePrefs = name: lib.prefsUpdate global package.${name} or {}; 192 | 193 | /* resolve a named package descriptor into a concrete spec (concretize) 194 | 195 | Resolving a (non-virtual) package requires these sources of information: 196 | - R = repo.${name}: package desc from repo 197 | - G: global user prefs = packPrefs.(package.${name}) 198 | - P: specific instance prefs = arg 199 | We describe this resolution as R(G // P). 200 | Resolving each dependency x involves these sources of information: 201 | - R': dependency desc from repo 202 | - C = R.depends.${x}: constraints from parent desc 203 | - G': global user prefs 204 | - P' = (G // P).depends.${x}: inherited prefs from parent 205 | with fixedDeps = true, we want R'(G' // P'), checked against C. 206 | with fixedDeps = false, we want R'((G' // P') intersect C) 207 | 208 | Resolving a virtual dependency is a bit different: 209 | - R': provider list from repo 210 | - C = R.depends.${x}: virtual version from parent desc 211 | - G': global provider prefs = packPrefs.package.${name} 212 | - P': inherited provider prefs from parent. 213 | If G' // P' is not null or empty, it should be a (list of) specific 214 | packages names or { name; desc...}, and these are the candidates. 215 | Otherwise, R' are the canditates. 216 | with fixedDeps = true, resolve the first candidate and check it against C. 217 | with fixedDeps = false, try each candidate until one matches C. 218 | */ 219 | resolvePackage = pname: 220 | let 221 | /* combining preferences with descriptor to get concrete package spec */ 222 | resolveEach = resolver: arg: pref: 223 | /* let unknowns = builtins.removeAttrs pref (builtins.attrNames arg); in 224 | (if unknowns == {} then lib.id else builtins.trace "Warning: ${pname}: unknown prefs: ${toString (builtins.attrNames unknowns)}") */ 225 | (builtins.mapAttrs (n: a: resolver n a pref.${n} or null) arg); 226 | resolveVersion = arg: pref: 227 | /* special version matching: a (list of intersected) version constraint */ 228 | let v = builtins.filter (v: lib.versionMatches v pref) arg; 229 | in if v == [] 230 | then throw "${pname}: no version matching ${toString pref} from ${builtins.concatStringsSep "," arg}" 231 | else builtins.head v; 232 | resolveVariants = resolveEach (vname: arg: pref: 233 | let err = throw "${pname} variant ${vname}: invalid ${builtins.toJSON pref} (for ${builtins.toJSON arg})"; in 234 | if pref == null then 235 | /* no preference: use default */ 236 | if builtins.isList arg then builtins.head arg else arg 237 | else if builtins.isList arg then 238 | /* list of options */ 239 | if builtins.elem (lib.fromList pref) arg 240 | then pref 241 | else err 242 | else if builtins.isAttrs arg then 243 | /* multi */ 244 | let r = arg // ( 245 | if builtins.isAttrs pref then pref else 246 | if builtins.isList pref then 247 | builtins.listToAttrs (map (name: { inherit name; value = true; }) pref) 248 | else err); in 249 | if builtins.attrNames r == builtins.attrNames arg && builtins.all builtins.isBool (builtins.attrValues r) then 250 | r 251 | else err 252 | else if builtins.typeOf arg == builtins.typeOf pref then 253 | /* a simple value: any value of that type */ 254 | pref 255 | else err); 256 | 257 | 258 | /* these need to happen in parallel due to depend conditionals being evaluated recursively */ 259 | resolveDepends = depends: pprefs: 260 | resolveEach (dname: dep: pref: let 261 | deptype = (t: if pprefs.tests then t else lib.remove "test" t) dep.deptype or []; 262 | res = pprefs.resolver deptype dname; 263 | clean = d: builtins.removeAttrs d ["deptype"]; 264 | virtualize = { deptype, version ? ":" }: 265 | { provides = { "${dname}" = version; }; }; 266 | dep' = lib.mapNullable (if isVirtual dname then virtualize else clean) dep; 267 | 268 | /* dynamic */ 269 | isr = builtins.elem "link" deptype; 270 | dpref = lib.prefsIntersect dep' pref; 271 | /* for link dependencies with dependencies in common with ours, we propagate our prefs down. 272 | this doesn't entirely ensure consistent linking, but helps in many common cases. */ 273 | pdeps = builtins.intersectAttrs (getPossibleDepends dname) pprefs.depends; 274 | rdeps = lib.prefsIntersect dpref { depends = builtins.mapAttrs (n: lib.mapNullable clean) pdeps; }; 275 | dpkg = res (if isr then rdeps else dpref); 276 | 277 | /* static */ 278 | spkg = res (lib.coalesce pref {}); 279 | 280 | pkg = (if pprefs.fixedDeps then spkg else dpkg) // { inherit deptype; }; 281 | in lib.when (deptype != []) 282 | (if lib.specMatches pkg.spec dep' then pkg else 283 | throw "${pname} dependency ${dname}: package ${lib.specToString spkg.spec} does not match dependency constraints ${builtins.toJSON dep'}")) 284 | depends pprefs.depends; 285 | 286 | /* create a package from a spec */ 287 | makePackage = gen: desc: spec: pprefs: let 288 | name = "${spec.name}-${spec.version}"; 289 | in if spec.extern != null 290 | then { 291 | inherit name spec; 292 | out = spec.extern; 293 | /* externs don't provide withPrefs */ 294 | } 295 | else spackBuilder ({ 296 | args = [../spack/builder.py]; 297 | inherit name; 298 | verbose = pprefs.verbose or false; 299 | spec = builtins.toJSON spec; 300 | passAsFile = ["spec"]; 301 | gccPkg = pkgs.gcc.spec.package; /* for nullCompiler */ 302 | } // desc.build // pprefs.build or {}) // { 303 | inherit spec; 304 | withPrefs = p: resolvePackage pname gen (lib.prefsUpdate pprefs p); 305 | }; 306 | 307 | /* resolve a real package into a spec */ 308 | package = gen: pprefs: builtins.addErrorContext "resolving package ${pname}" (let 309 | desc = fillDesc pname (gen spec); 310 | prefs = fillPrefs pprefs; 311 | spec = { 312 | inherit (desc) name namespace provides; 313 | inherit (prefs) flags extern tests; 314 | target = spackTarget prefs.target; 315 | package = builtins.path { name="repo-pkgs-${pname}"; path=desc.dir; }; 316 | paths = desc.paths // prefs.paths; 317 | version = if prefs.extern != null && lib.versionIsConcrete prefs.version 318 | then prefs.version 319 | else resolveVersion desc.version prefs.version; 320 | patches = desc.patches ++ prefs.patches; 321 | variants = resolveVariants desc.variants prefs.variants; 322 | depends = if prefs.extern != null then {} 323 | else resolveDepends desc.depends prefs; 324 | deptypes = builtins.mapAttrs (n: d: d.deptype or null) spec.depends; 325 | compiler_spec = prefs.compiler_spec or desc.compiler_spec; 326 | }; 327 | in 328 | if lib.isPkg pprefs then pprefs 329 | else if ! (builtins.all 330 | (p: desc.provides.${p} or null != null && lib.versionsOverlap desc.provides.${p} prefs.provides.${p}) 331 | (builtins.attrNames prefs.provides)) then 332 | throw "${pname}: does not provide ${builtins.toJSON prefs.provides}" 333 | else if spec.extern == null && desc.conflicts != [] then 334 | throw "${pname}: has conflicts: ${toString desc.conflicts}" 335 | else makePackage gen desc spec pprefs); 336 | 337 | /* resolving virtual packages, which resolve to a specific package as soon as prefs are applied */ 338 | virtual = providers: prefs: builtins.addErrorContext "resolving virtual ${pname}" (let 339 | provs = if builtins.isAttrs prefs && !(prefs ? name) 340 | then map (p: prefs // { name = p; }) providers 341 | else lib.toList prefs; 342 | 343 | /* TODO: really need to try multiple versions too (see: java) */ 344 | opts = map getPackage provs; 345 | check = opt: (builtins.tryEval opt.spec).success; /* catch conflicts/provides */ 346 | choice = if prefs.fixedDeps or global.fixedDeps or false /* what if prefs is a list? */ 347 | then opts 348 | else builtins.filter check opts; 349 | in if choice == [] 350 | then throw "no providers for ${pname}" 351 | else builtins.head choice); 352 | 353 | in desc: 354 | if builtins.isList desc then 355 | virtual desc 356 | else if builtins.isFunction desc then 357 | package desc 358 | else if builtins.isAttrs desc then 359 | package (lib.const desc) 360 | else throw "${pname}: invalid package descriptor ${builtins.typeOf desc}"; 361 | 362 | /* generate nix package metadata from spack repos */ 363 | spackRepo = spackBuilder { 364 | name = "spack-repo.nix"; 365 | args = [../spack/generate.py]; 366 | withRepos = true; 367 | }; 368 | 369 | /* full metadata repo package descriptions */ 370 | repo = patchRepo repoPatch (patchRepo (import ../patch packs) 371 | (import spackRepo { 372 | /* utilities needed by the repo */ 373 | inherit (lib) when versionMatches variantMatches; 374 | inherit platform os target; 375 | })); 376 | 377 | /* partially applied specs, which take preferences as argument */ 378 | resolvers = builtins.mapAttrs resolvePackage repo; 379 | 380 | /* fully applied resolved packages with default preferences */ 381 | pkgs = builtins.mapAttrs (name: res: res (getPackagePrefs name)) resolvers; 382 | 383 | /* debugging to show package spec */ 384 | traceSpecs = builtins.mapAttrs (name: lib.traceSpecTree) pkgs; 385 | 386 | /* use this packs to bootstrap another with the specified compiler */ 387 | withCompiler = compiler: packs.withPrefs { 388 | package = { inherit compiler; }; 389 | }; 390 | 391 | /* create a view (or an "env" in nix terms): a merged set of packages */ 392 | view = import ../view packs; 393 | 394 | /* view with appropriate settings for python environments */ 395 | pythonView = args: view ({ shbang = ["bin/*"]; copy = ["bin/python*"]; jupyter = ["share/jupyter/kernels/*/kernel.json"]; } // args); 396 | 397 | modules = import ../spack/modules.nix packs; 398 | 399 | lmodCache = import ./lmodCache.nix packs; 400 | 401 | /* a runnable (if only partly functional) spack binary */ 402 | spackBin = import ../spack/bin.nix packs; 403 | 404 | nixpkgs = lib.when (nixpkgsSrc != null) 405 | (import ../nixpkgs { 406 | inherit system; 407 | target = global.target or target; 408 | src = nixpkgsSrc; 409 | overlays = nixpkgsOverlays; 410 | }); 411 | }); 412 | 413 | in packsWithPrefs 414 | -------------------------------------------------------------------------------- /packs/lib.nix: -------------------------------------------------------------------------------- 1 | with builtins; 2 | rec { 3 | 4 | id = x: x; 5 | const = x: y: x; 6 | flip = f: a: b: f b a; 7 | fix = f: let x = f x; in x; 8 | when = c: x: if c then x else null; 9 | coalesce = x: d: if x == null then d else x; 10 | coalesces = l: let r = remove null l; in when (r != []) (head r); 11 | coalesceWith = f: a: b: if a == null then b else if b == null then a else f a b; 12 | mapNullable = f: a: if a == null then a else f a; 13 | 14 | applyOptional = f: x: if isFunction f then f x else f; 15 | 16 | cons = x: l: [x] ++ l; 17 | toList = x: if isList x then x else if x == null then [] else [x]; 18 | fromList = x: if isList x && length x == 1 then head x else x; 19 | optionals = c: x: if c then x else []; 20 | 21 | traceId = x: trace x x; 22 | traceLabel = s: x: trace ("${s}: ${toJSON x}") x; 23 | traceId' = x: deepSeq x (traceId x); 24 | 25 | hasPrefix = pref: str: substring 0 (stringLength pref) str == pref; 26 | takePrefix = pref: str: if hasPrefix pref str then substring (stringLength pref) (-1) str else str; 27 | 28 | remove = e: filter (x: x != e); 29 | nub = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) []; 30 | nubBy = eq: l: 31 | if l == [] then l else 32 | let x = head l; in 33 | cons x (nubBy eq (filter (y: ! (eq x y)) (tail l))); 34 | 35 | /* is a a prefix of b? */ 36 | listHasPrefix = a: b: 37 | a == [] || b != [] && head a == head b && listHasPrefix (tail a) (tail b); 38 | 39 | union = a: b: a ++ filter (x: ! elem x a) b; 40 | 41 | /* do the elements of list a all appear in-order in list b? */ 42 | subsetOrdered = a: b: 43 | a == [] || (b != [] && subsetOrdered (tail a) (if head a == head b then tail b else b)); 44 | 45 | mapKeys = f: set: 46 | listToAttrs (map (a: { name = f a; value = set.${a}; }) (attrNames set)); 47 | 48 | mergeWithKeys = f: a: b: 49 | mapAttrs (k: v: if hasAttr k a && hasAttr k b then f k a.${k} v else v) (a // b); 50 | 51 | mergeWith = f: mergeWithKeys (k: f); 52 | 53 | recursiveUpdate = a: b: 54 | if isAttrs a && isAttrs b then 55 | mergeWith recursiveUpdate a b 56 | else b; 57 | 58 | /* should this be lazy? */ 59 | concatAttrs = foldl' (a: b: a // b) {}; 60 | 61 | filterAttrs = pred: set: 62 | listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [{ inherit name; value = v; }] else []) (attrNames set)); 63 | 64 | splitRegex = r: s: filter isString (split r s); 65 | 66 | versionOlder = v1: v2: compareVersions v1 v2 < 0; 67 | versionNewer = v1: v2: compareVersions v1 v2 > 0; 68 | versionAtLeast = v1: v2: compareVersions v1 v2 >= 0; 69 | versionAtMost = v1: v2: compareVersions v1 v2 <= 0; 70 | versionMax = v1: v2: if versionAtLeast v1 v2 then v1 else v2; 71 | 72 | versionSplitCompare = s1: s2: 73 | if s1 == [] then -2 else 74 | if s2 == [] then 2 else 75 | let c = compareVersions (head s1) (head s2); in 76 | if c == 0 then versionSplitCompare (tail s1) (tail s2) else 77 | c; 78 | /* like compareVersions but -2 if s1 is a prefix of s2, and +2 if s2 is a prefix of s1 */ 79 | versionCompare = v1: v2: if v1 == v2 then 0 else versionSplitCompare (splitVersion v1) (splitVersion v2); 80 | 81 | /* while 3.4 > 3 by nix (above), we want to treat 3.4 < 3 82 | v are concrete versions, s version specs */ 83 | versionAtMostSpec = v1: s2: versionCompare v1 s2 != 1; 84 | /* here 3.4 < 3 */ 85 | versionMinSpec = s1: s2: { 86 | "-2" = s2; 87 | "-1" = s1; 88 | "0" = s1; 89 | "1" = s2; 90 | "2" = s1; 91 | }.${toString (versionCompare s1 s2)}; 92 | 93 | versionIsConcrete = v: v != null && match ".*[:,].*" v == null; 94 | 95 | versionRange = v: let 96 | s = splitRegex ":" v; 97 | l = length s; 98 | in 99 | if l == 1 then { min = v; max = v; } else 100 | if l == 2 then { min = head s; max = elemAt s 1; } else 101 | throw "invalid version range ${v}"; 102 | 103 | rangeVersion = a: b: 104 | if a == b then a else "${a}:${b}"; 105 | 106 | /* spack version spec semantics: does concrete version v match spec m? */ 107 | versionMatches = v: match: 108 | if match == null then true else 109 | if isList match then all (versionMatches v) match else 110 | let 111 | versionMatch = m: 112 | if hasPrefix "=" m then v == substring 1 (-1) m else 113 | let 114 | mr = versionRange m; 115 | in versionAtLeast v mr.min && 116 | (versionAtMostSpec v mr.max); 117 | in any versionMatch (splitRegex "," match); 118 | 119 | versionsOverlap = a: b: 120 | let 121 | as = splitRegex "," a; 122 | bs = splitRegex "," b; 123 | vo = a: b: let 124 | ar = versionRange a; 125 | br = versionRange b; 126 | in versionAtMostSpec ar.min br.max && 127 | versionAtMostSpec br.min ar.max; 128 | in any (a: any (vo a) bs) as; 129 | 130 | versionsIntersect = a: b: 131 | let 132 | as = splitRegex "," a; 133 | bs = splitRegex "," b; 134 | vi = a: b: let 135 | ar = versionRange a; 136 | br = versionRange b; 137 | in rangeVersion (versionMax ar.min br.min) (versionMinSpec ar.max br.max); 138 | in 139 | concatStringsSep "," (concatMap (a: map (vi a) bs) as); 140 | 141 | /* does concrete variant v match spec m? */ 142 | variantMatches = v: ms: all (m: 143 | if isAttrs v then v.${m} else 144 | if isList v then elem m v else 145 | v == m) (toList ms); 146 | 147 | deptypeChars = dt: 148 | concatStringsSep "" (map (t: 149 | if elem t dt then substring 0 1 t else " ") 150 | [ "build" "link" "run" "test" ]); 151 | 152 | /* a very simple version of Spec.format */ 153 | specFormat = fmt: spec: let 154 | variantToString = n: v: 155 | if v == true then "+"+n 156 | else if v == false then "~"+n 157 | else " ${n}="+ 158 | (if isList v then concatStringsSep "," v 159 | else if isAttrs v then concatStringsSep "," (map (n: variantToString n v.${n}) (attrNames v)) 160 | else toString v); 161 | fmts = { 162 | inherit (spec) name version; 163 | variants = concatStringsSep "" (map (v: variantToString v spec.variants.${v}) 164 | (sort (a: b: typeOf spec.variants.${a} < typeOf spec.variants.${b}) (attrNames spec.variants))); 165 | deptype = if spec ? deptype 166 | then " [" + deptypeChars spec.deptype + "]" 167 | else ""; 168 | flags = concatStringsSep "" (map (f: " ${f}=\""+(concatStringsSep " " spec.flags.${f})+"\"") 169 | (attrNames spec.flags)); 170 | }; 171 | in replaceStrings (map (n: "{${n}}") (attrNames fmts)) (attrValues fmts) fmt; 172 | 173 | /* simple name@version */ 174 | specName = specFormat "{name}@{version}"; 175 | 176 | /* like spack default format */ 177 | specToString = specFormat "{name}@{version}{variants}{flags}{deptype}"; 178 | 179 | /* check that a given spec conforms to the specified preferences */ 180 | specMatches = spec: 181 | { name ? null 182 | , version ? null 183 | , variants ? {} 184 | , patches ? [] 185 | , depends ? {} 186 | , provides ? {} 187 | , extern ? spec.extern 188 | } @ prefs: 189 | (name == null || name == spec.name) 190 | && versionMatches spec.version version 191 | && all (name: variantMatches (spec.variants.${name} or null) variants.${name}) (attrNames variants) 192 | && subsetOrdered patches spec.patches 193 | && all (name: specMatches spec.depends.${name} depends.${name}) (attrNames depends) 194 | && all (name: hasAttr name spec.provides && versionsOverlap spec.provides.${name} provides.${name}) (attrNames provides) 195 | && spec.extern == extern; 196 | 197 | /* determine if something is a package (derivation) */ 198 | isPkg = p: p ? out; 199 | 200 | /* update two prefs, with the second overriding the first */ 201 | prefsUpdate = let 202 | scalar = a: b: b; 203 | updaters = { 204 | name = scalar; 205 | version = scalar; 206 | variants = mergeWith (a: b: 207 | if isAttrs a && isAttrs b then a // b 208 | else b); 209 | flags = a: b: a // b; 210 | patches = scalar; 211 | depends = mergeWith prefsUpdate; 212 | extern = scalar; 213 | tests = scalar; 214 | fixedDeps = scalar; 215 | resolver = scalar; 216 | deptype = scalar; 217 | target = scalar; 218 | provides = a: b: a // b; 219 | verbose = scalar; 220 | }; 221 | in 222 | a: b: 223 | if isPkg b then b else 224 | if isPkg a then a.withPrefs b else 225 | mergeWithKeys (k: updaters.${k}) a b; 226 | 227 | /* unify two prefs, making sure they're compatible */ 228 | prefsIntersect = let 229 | err = a: b: throw "incompatible prefs: ${toJSON a} vs ${toJSON b}"; 230 | scalar = a: b: if a == b then a else err a b; 231 | intersectors = { 232 | version = versionsIntersect; 233 | variants = mergeWith (a: b: if a == b then a else 234 | union (toList a) (toList b)); 235 | flags = mergeWith scalar; 236 | patches = a: b: a ++ b; 237 | depends = mergeWith prefsIntersect; 238 | extern = scalar; 239 | tests = scalar; 240 | fixedDeps = scalar; 241 | resolver = scalar; 242 | deptype = union; 243 | target = scalar; 244 | provides = mergeWith versionsIntersect; 245 | verbose = scalar; 246 | }; 247 | intersectPkg = o: p: if specMatches o.spec p then o else err o p; 248 | in coalesceWith (a: b: 249 | if isPkg a 250 | then if isPkg b 251 | then intersectScalar a b 252 | else intersectPkg a b 253 | else if isPkg b 254 | then intersectPkg b a 255 | else mergeWithKeys (k: intersectors.${k}) a b); 256 | 257 | /* unify a list of package prefs, making sure they're compatible */ 258 | prefsIntersection = l: if isList l then foldl' prefsIntersect null l else l; 259 | 260 | /* traverse all dependencies of given package(s) that satisfy pred recursively and return them as a list (in bredth-first order) */ 261 | findDeps = pred: 262 | let 263 | adddeps = s: pkgs: add s 264 | (foldl' (deps: p: 265 | (deps ++ filter (d: d != null && ! (elem d s) && ! (elem d deps) && pred d) 266 | (attrValues p.spec.depends))) 267 | [] pkgs); 268 | add = s: pkgs: if pkgs == [] then s else adddeps (s ++ pkgs) pkgs; 269 | in pkg: add [] (toList pkg); 270 | 271 | /* debugging to trace full package dependencies (and return count of packages) */ 272 | traceSpecTree = let 273 | sst = seen: ind: dname: dt: pkg: if pkg == null then seen else 274 | trace (ind 275 | + (if dt != null then "[" + deptypeChars dt + "] " else "") 276 | + (if dname != null && dname != pkg.spec.name then "${dname}=" else "") 277 | + specToString pkg.spec + " " 278 | + takePrefix storeDir pkg.out) 279 | (if elem pkg seen then seen else 280 | foldl' (seen: d: sst seen (ind + " ") d pkg.spec.deptypes.${d} or null pkg.spec.depends.${d}) 281 | (seen ++ [pkg]) 282 | (attrNames pkg.spec.depends)); 283 | in pkgs: length (foldl' (seen: sst seen "" null null) [] (toList pkgs)); 284 | 285 | capture = args: env: readFile (derivation ({ 286 | name = "capture-${baseNameOf (head args)}"; 287 | system = currentSystem; 288 | builder = ./capture.sh; 289 | args = args; 290 | } // env)); 291 | } 292 | -------------------------------------------------------------------------------- /packs/lmodCache.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | src: 3 | derivation (packs.spackEnv // { 4 | inherit (packs) system; 5 | name = "lmodCache"; 6 | builder = ./lmodCache.sh; 7 | lmod = packs.pkgs.lmod; 8 | MODULEPATH = "${src}/${packs.platform}-${packs.os}-${packs.target}/Core"; 9 | }) 10 | -------------------------------------------------------------------------------- /packs/lmodCache.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | $lmod/lmod/lmod/libexec/update_lmod_system_cache_files -d $out/cacheDir -t $out/cacheTS.txt $MODULEPATH 3 | cat > $out/lmodrc.lua < gcc symlink 45 | post = '' 46 | os.symlink('gcc', os.path.join(pkg.prefix, 'bin/cc')) 47 | ''; 48 | }; 49 | }; 50 | 51 | llvm = spec: old: { 52 | depends = old.depends // { 53 | compiler = { deptype = ["build"]; }; 54 | }; 55 | compiler_spec = "clang"; 56 | }; 57 | 58 | nvhpc = spec: old: { 59 | provides = old.provides or {} // { 60 | compiler = ":"; 61 | }; 62 | }; 63 | 64 | aocc = spec: old: { 65 | paths = { 66 | cc = "bin/clang"; 67 | cxx = "bin/clang++"; 68 | f77 = "bin/flang"; 69 | fc = "bin/flang"; 70 | }; 71 | depends = old.depends // { 72 | compiler = null; 73 | llvm = { 74 | # uses llvm package 75 | deptype = ["build"]; 76 | }; 77 | }; 78 | }; 79 | 80 | apptainer = spec: old: { 81 | depends = old.depends // { 82 | # imports package 83 | singularityce = { deptype = ["build"]; }; 84 | }; 85 | }; 86 | 87 | intel-oneapi-compilers = spec: old: { 88 | compiler_spec = "oneapi"; # can be overridden as "intel" with prefs 89 | provides = old.provides or {} // { 90 | compiler = ":"; 91 | }; 92 | }; 93 | 94 | intel-parallel-studio = spec: old: { 95 | compiler_spec = "intel@19.1.3.304"; # version may need correcting 96 | provides = old.provides or {} // { 97 | compiler = ":"; 98 | }; 99 | depends = old.depends or {} // { 100 | compiler = null; 101 | }; 102 | }; 103 | 104 | openssh = { 105 | /* disable installing with setuid */ 106 | patches = [./openssh-keysign-setuid.patch]; 107 | }; 108 | 109 | nix = { 110 | patches = [./nix-ignore-fsea.patch]; 111 | }; 112 | 113 | shadow = { 114 | /* disable installing with set[ug]id */ 115 | patches = [./shadow-nosuid.patch]; 116 | }; 117 | 118 | util-linux = { 119 | build = { 120 | enable_makeinstall_setuid = "no"; 121 | }; 122 | }; 123 | 124 | librsvg = { 125 | build = cargohome.build // { 126 | /* tries to install into gdk-pixbuf -- TODO: patch and use GDK_PIXBUF_MODULE_FILE (like nixpkgs) */ 127 | enable_pixbuf_loader = "no"; 128 | }; 129 | }; 130 | 131 | py-cryptography = cargohome; 132 | py-maturin = cargohome; 133 | py-rpds-py = cargohome; 134 | py-ruff = cargohome; 135 | 136 | /* for pdflatex */ 137 | r = { 138 | build = { 139 | setup = '' 140 | os.environ['TEXMFVAR'] = os.path.join(os.environ['TMPDIR'], 'texmf') 141 | ''; 142 | }; 143 | }; 144 | /* tries to set ~/.gitconfig */ 145 | r-credentials = tmphome; 146 | r-gert = tmphome; 147 | 148 | /* creates various cache stuff */ 149 | npm = tmphome; 150 | 151 | py-jaxlib = spec: old: { 152 | build = { 153 | setup = '' 154 | os.environ['XDG_CACHE_HOME'] = os.environ['TMPDIR'] 155 | os.environ['TEST_TMPDIR'] = os.environ['TMPDIR'] 156 | ''; 157 | }; 158 | }; 159 | 160 | /* uses npm */ 161 | py-jupyter-server = tmphome; 162 | py-jupyter-server-proxy = tmphome; 163 | py-jupyterlmod = tmphome; 164 | py-ipyparallel = tmphome; 165 | 166 | paraview = spec: old: { 167 | /* without explicit libx11 dep, ends up linking system libX11 (perhaps via system libGL) and not working */ 168 | depends = old.depends // { 169 | libx11 = { 170 | deptype = ["link"]; 171 | }; 172 | }; 173 | }; 174 | 175 | emacs = spec: old: { 176 | depends = old.depends // { 177 | fontconfig = { 178 | deptype = ["build" "link"]; 179 | }; 180 | libxft = { 181 | deptype = ["build" "link"]; 182 | }; 183 | libjansson = { 184 | deptype = ["build" "link"]; 185 | }; 186 | }; 187 | }; 188 | 189 | git-lfs = spec: old: { 190 | build = { 191 | setup = '' 192 | os.environ['GOPATH'] = os.path.join(os.environ['TMPDIR'], 'gopath') 193 | os.environ['GOCACHE'] = os.path.join(os.environ['TMPDIR'], 'gocache') 194 | ''; 195 | }; 196 | }; 197 | 198 | go = spec: old: { 199 | build = { 200 | setup = '' 201 | os.environ['GOCACHE'] = os.path.join(os.environ['TMPDIR'], 'go-cache') 202 | ''; 203 | }; 204 | }; 205 | 206 | rust = spec: old: { 207 | build = cargohome.build // { 208 | # workaround for https://github.com/rust-lang/cargo/issues/10303 209 | CARGO_NET_GIT_FETCH_WITH_CLI = "true"; 210 | }; 211 | }; 212 | 213 | vtk = spec: old: { 214 | depends = old.depends // { 215 | # imports package 216 | boost = { deptype = ["build"]; }; 217 | }; 218 | }; 219 | 220 | /* some things don't use a compiler */ 221 | intel-mkl = nocompiler; 222 | intel-mpi = nocompiler; 223 | intel-oneapi-mkl = nocompiler; 224 | intel-oneapi-mpi = nocompiler; 225 | intel-oneapi-tbb = nocompiler; 226 | cuda = nocompiler; 227 | cudnn = nocompiler; 228 | ghostscript-fonts = nocompiler; 229 | matlab = nocompiler; 230 | mathematica = nocompiler; 231 | 232 | lua-bit32 = noluajit; 233 | lua-bitlib = noluajit; 234 | lua-lpeg = noluajit; 235 | lua-luafilesystem = noluajit; 236 | lua-luaposix = noluajit; 237 | lua-mpack = noluajit; 238 | lua-sol2 = noluajit; 239 | } 240 | -------------------------------------------------------------------------------- /patch/nix-ignore-fsea.patch: -------------------------------------------------------------------------------- 1 | --- nix-2.0/src/libstore/local-store.cc 1969-12-31 16:00:01.000000000 -0800 2 | +++ nix-2.0-patch/src/libstore/local-store.cc 2018-02-27 06:58:16.979381054 -0800 3 | @@ -463,6 +463,8 @@ 4 | /* Ignore SELinux security labels since these cannot be 5 | removed even by root. */ 6 | if (eaName == "security.selinux") continue; 7 | + if (eaName == "lustre.lov") continue; 8 | + if (eaName == "system.nfs4_acl") continue; 9 | if (lremovexattr(path.c_str(), eaName.c_str()) == -1) 10 | throw SysError("removing extended attribute '%s' from '%s'", eaName, path); 11 | } 12 | -------------------------------------------------------------------------------- /patch/openssh-keysign-setuid.patch: -------------------------------------------------------------------------------- 1 | --- src/Makefile.in.orig 2024-10-19 16:42:58.275604182 -0400 2 | +++ src/Makefile.in 2024-10-19 16:43:14.199771778 -0400 3 | @@ -411,7 +411,7 @@ 4 | $(INSTALL) -m 0755 $(STRIP_OPT) ssh-keyscan$(EXEEXT) $(DESTDIR)$(bindir)/ssh-keyscan$(EXEEXT) 5 | $(INSTALL) -m 0755 $(STRIP_OPT) sshd$(EXEEXT) $(DESTDIR)$(sbindir)/sshd$(EXEEXT) 6 | $(INSTALL) -m 0755 $(STRIP_OPT) sshd-session$(EXEEXT) $(DESTDIR)$(SSHD_SESSION)$(EXEEXT) 7 | - $(INSTALL) -m 4711 $(STRIP_OPT) ssh-keysign$(EXEEXT) $(DESTDIR)$(SSH_KEYSIGN)$(EXEEXT) 8 | + $(INSTALL) -m 0711 $(STRIP_OPT) ssh-keysign$(EXEEXT) $(DESTDIR)$(SSH_KEYSIGN)$(EXEEXT) 9 | $(INSTALL) -m 0755 $(STRIP_OPT) ssh-pkcs11-helper$(EXEEXT) $(DESTDIR)$(SSH_PKCS11_HELPER)$(EXEEXT) 10 | $(INSTALL) -m 0755 $(STRIP_OPT) ssh-sk-helper$(EXEEXT) $(DESTDIR)$(SSH_SK_HELPER)$(EXEEXT) 11 | $(INSTALL) -m 0755 $(STRIP_OPT) sftp$(EXEEXT) $(DESTDIR)$(bindir)/sftp$(EXEEXT) 12 | -------------------------------------------------------------------------------- /patch/shadow-nosuid.patch: -------------------------------------------------------------------------------- 1 | diff -ru src.orig/src/Makefile.am src/src/Makefile.am 2 | --- src.orig/src/Makefile.am 2020-01-12 08:19:28.000000000 -0500 3 | +++ src/src/Makefile.am 2021-08-12 21:23:09.870336130 -0400 4 | @@ -4,8 +4,8 @@ 5 | 6 | ubindir = ${prefix}/bin 7 | usbindir = ${prefix}/sbin 8 | -suidperms = 4755 9 | -sgidperms = 2755 10 | +suidperms = 0755 11 | +sgidperms = 0755 12 | 13 | AM_CPPFLAGS = \ 14 | -I${top_srcdir}/lib \ 15 | diff -ru src.orig/src/Makefile.in src/src/Makefile.in 16 | --- src.orig/src/Makefile.in 2020-01-23 15:57:50.000000000 -0500 17 | +++ src/src/Makefile.in 2021-08-12 21:23:15.454370068 -0400 18 | @@ -557,8 +557,8 @@ 19 | 20 | ubindir = ${prefix}/bin 21 | usbindir = ${prefix}/sbin 22 | -suidperms = 4755 23 | -sgidperms = 2755 24 | +suidperms = 0755 25 | +sgidperms = 0755 26 | AM_CPPFLAGS = \ 27 | -I${top_srcdir}/lib \ 28 | -I$(top_srcdir)/libmisc \ 29 | -------------------------------------------------------------------------------- /spack/bin.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | with packs; 3 | packs.spackBuilder { 4 | name = "nixpack-spack-bin.py"; 5 | builder = ./bin.sh; 6 | inherit spackNixLib spack; 7 | SPACK_PYTHON = spackPython; 8 | withRepos = true; 9 | } 10 | -------------------------------------------------------------------------------- /spack/bin.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | gen() { 3 | 4 | cat < $out 24 | chmod +x $out 25 | -------------------------------------------------------------------------------- /spack/builder.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | import os 3 | import functools 4 | import shutil 5 | import json 6 | 7 | import nixpack 8 | import spack 9 | 10 | try: 11 | from spack.context import Context 12 | except ImportError: 13 | class Context: 14 | BUILD = 'build' 15 | TEST = 'test' 16 | 17 | # disable pre_ and post_install hooks (sbang, permissions, licensing) 18 | def noop_hook(*args, **kwargs): 19 | pass 20 | spack.hooks.pre_install = noop_hook 21 | spack.hooks.post_install = noop_hook 22 | 23 | nixpack.getVar('name') 24 | nixspec = nixpack.getJson('spec') 25 | 26 | spec = nixpack.NixSpec.get(nixspec, nixpack.getVar('out')) 27 | spec.concretize() 28 | 29 | pkg = spec.package 30 | pkg.run_tests = spec.tests 31 | try: 32 | default_format = spack.spec.DEFAULT_FORMAT 33 | except AttributeError: 34 | default_format = spack.spec.default_format 35 | print(spec.tree(cover='edges', format=default_format + ' {/hash}', show_types=True)) 36 | 37 | opts = { 38 | 'install_deps': False, 39 | 'verbose': not not nixpack.getVar('verbose'), 40 | 'tests': spec.tests, 41 | } 42 | 43 | # package-specific fixes 44 | os.environ['CCACHE_DISABLE'] = '1' 45 | if 'go' in spec._dependencies: 46 | # move go cache to tmp 47 | os.environ['GOCACHE'] = os.path.join(os.environ['TMPDIR'], 'go-cache') 48 | 49 | setup = nixpack.getVar('setup', None) 50 | post = nixpack.getVar('post', None) 51 | if setup: 52 | exec(setup) 53 | 54 | origenv = os.environ.copy() 55 | # create and stash some metadata 56 | spack.build_environment.setup_package(pkg, True, context=Context.BUILD) 57 | os.makedirs(pkg.metadata_dir, exist_ok=True) 58 | 59 | # log build phases to nix 60 | def wrapPhase(p, f, *args): 61 | nixpack.nixLog({'action': 'setPhase', 'phase': p}) 62 | return f(*args) 63 | 64 | if hasattr(pkg, '_InstallPhase_phases'): 65 | for pn, pa in zip(pkg.phases, pkg._InstallPhase_phases): 66 | pf = getattr(pkg, pa) 67 | setattr(pkg, pa, functools.partial(wrapPhase, pn, pf)) 68 | else: 69 | builder = spack.builder.create(pkg) 70 | for phase in builder: 71 | phase.execute = functools.partial(wrapPhase, phase.name, phase.execute) 72 | 73 | if not opts['verbose']: 74 | def print_log(pkg, phase, log): 75 | with open(log, 'r') as f: 76 | print(f.read()) 77 | spack.hooks.on_phase_error = print_log 78 | 79 | # make sure cache is group-writable (should be configurable, ideally in spack) 80 | os.umask(0o002) 81 | # do the actual install 82 | spack.installer.build_process(pkg, opts) 83 | 84 | # we do this even if not testing as it may create more things (e.g., perl "extensions") 85 | os.environ.clear() 86 | os.environ.update(origenv) 87 | spack.build_environment.setup_package(pkg, True, context=Context.TEST) 88 | 89 | with open(os.path.join(spec.prefix, nixpack.NixSpec.nixSpecFile), 'w') as sf: 90 | json.dump(spec.nixspec, sf) 91 | 92 | if post: 93 | exec(post) 94 | -------------------------------------------------------------------------------- /spack/cache.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | 3 | import os 4 | import nixpack 5 | import spack 6 | 7 | spack.config.set('config:misc_cache', os.environ['out'], 'nixpack') 8 | print("Prepopulating spack repo cache...") 9 | spack.repo.PATH.all_package_names() 10 | -------------------------------------------------------------------------------- /spack/config.nix: -------------------------------------------------------------------------------- 1 | packs: config: derivation ({ 2 | inherit (packs.prefs) system; 3 | name = "spackConfig"; 4 | builder = ./config.sh; 5 | sections = builtins.attrNames config; 6 | } // builtins.mapAttrs (n: v: builtins.toJSON { "${n}" = v; }) config 7 | // packs.spackEnv) 8 | -------------------------------------------------------------------------------- /spack/config.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -eu 2 | 3 | mkdir -p $out 4 | for section in $sections ; do 5 | eval "echo \"\$$section\"" > $out/$section.yaml 6 | done 7 | -------------------------------------------------------------------------------- /spack/generate.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | 3 | import os 4 | import sys 5 | import re 6 | from collections import defaultdict 7 | 8 | import nixpack 9 | import spack 10 | try: 11 | from spack.version import any_version 12 | except ImportError: 13 | any_version = spack.spec._any_version 14 | 15 | identPat = re.compile("[a-zA-Z_][a-zA-Z0-9'_-]*") 16 | reserved = {'if','then','else','derivation','let','rec','in','inherit','import','with'} 17 | 18 | def isident(s: str): 19 | return identPat.fullmatch(s) and s not in reserved 20 | 21 | class Nix: 22 | prec = 0 23 | def paren(self, obj, indent, out, nl=False): 24 | prec = obj.prec if isinstance(obj, Nix) else 0 25 | parens = prec > self.prec 26 | if parens: 27 | if nl: 28 | out.write('\n' + ' '*indent) 29 | out.write('(') 30 | printNix(obj, indent, out) 31 | if parens: 32 | out.write(')') 33 | 34 | class Expr(Nix): 35 | def __init__(self, s, prec=0): 36 | self.str = s 37 | self.prec = prec 38 | def print(self, indent, out): 39 | out.write(self.str) 40 | 41 | class List(Nix): 42 | def __init__(self, items): 43 | self.items = items 44 | def print(self, indent, out): 45 | out.write('[') 46 | first = True 47 | indent += 2 48 | for x in self.items: 49 | if first: 50 | first = False 51 | else: 52 | out.write(' ') 53 | self.paren(x, indent, out, True) 54 | out.write(']') 55 | 56 | class Attr(Nix): 57 | def __init__(self, key, val): 58 | if not isinstance(key, str): 59 | raise TypeError(key) 60 | self.key = key 61 | self.val = val 62 | def print(self, indent, out): 63 | out.write(' '*indent) 64 | if isident(self.key): 65 | out.write(self.key) 66 | else: 67 | printNix(self.key, indent, out) 68 | out.write(' = ') 69 | printNix(self.val, indent, out) 70 | out.write(';\n') 71 | 72 | class AttrSet(Nix, dict): 73 | def print(self, indent, out): 74 | out.write('{') 75 | first = True 76 | for k, v in sorted(self.items()): 77 | if first: 78 | out.write('\n') 79 | first = False 80 | Attr(k, v).print(indent+2, out) 81 | if not first: 82 | out.write(' '*indent) 83 | out.write('}') 84 | 85 | class Select(Nix): 86 | prec = 1 87 | def __init__(self, val, *attr: str): 88 | self.val = val 89 | self.attr = attr 90 | def print(self, indent, out): 91 | if isinstance(self.val, str): 92 | out.write(self.val) 93 | else: 94 | self.paren(self.val, indent, out) 95 | for a in self.attr: 96 | out.write('.') 97 | if isident(a): 98 | out.write(a) 99 | else: 100 | self.paren(a, indent, out) 101 | 102 | class SelectOr(Select): 103 | prec = 1 104 | def __init__(self, val, attr: str, ore): 105 | super().__init__(val, attr) 106 | self.ore = ore 107 | def print(self, indent, out): 108 | super().print(indent, out) 109 | out.write(' or ') 110 | self.paren(self.ore, indent, out) 111 | 112 | class Fun(Nix): 113 | prec = 16 # not actually listed? 114 | def __init__(self, var: str, expr): 115 | self.var = var 116 | self.expr = expr 117 | def print(self, indent, out): 118 | out.write(self.var) 119 | out.write(': ') 120 | self.paren(self.expr, indent, out) 121 | 122 | class App(Nix): 123 | prec = 2 124 | def __init__(self, fun, *args): 125 | self.fun = fun 126 | self.args = args 127 | def print(self, indent, out): 128 | if isinstance(self.fun, str): 129 | out.write(self.fun) 130 | else: 131 | self.paren(self.fun, indent, out) 132 | for a in self.args: 133 | out.write(' ') 134 | self.paren(a, indent, out) 135 | 136 | class Or(Nix): 137 | prec = 13 138 | def __init__(self, *args): 139 | self.args = args 140 | def print(self, indent, out): 141 | first = True 142 | for a in self.args: 143 | if first: 144 | first = False 145 | else: 146 | out.write(' || ') 147 | self.paren(a, indent, out) 148 | if first: 149 | out.write('false') 150 | 151 | class And(Nix): 152 | prec = 12 153 | def __init__(self, *args): 154 | self.args = args 155 | def print(self, indent, out): 156 | first = True 157 | for a in self.args: 158 | if first: 159 | first = False 160 | else: 161 | out.write(' && ') 162 | self.paren(a, indent, out) 163 | if first: 164 | out.write('true') 165 | 166 | class Eq(Nix): 167 | prec = 11 168 | def __init__(self, a, b): 169 | self.a = a 170 | self.b = b 171 | def print(self, indent, out): 172 | self.paren(self.a, indent, out) 173 | out.write(' == ') 174 | self.paren(self.b, indent, out) 175 | 176 | class Ne(Nix): 177 | prec = 11 178 | def __init__(self, a, b): 179 | self.a = a 180 | self.b = b 181 | def print(self, indent, out): 182 | self.paren(self.a, indent, out) 183 | out.write(' != ') 184 | self.paren(self.b, indent, out) 185 | 186 | class If(Nix): 187 | prec = 15 188 | def __init__(self, i, t, e): 189 | self.i = i 190 | self.t = t 191 | self.e = e 192 | def print(self, indent, out): 193 | out.write('if ') 194 | self.paren(self.i, indent, out) 195 | out.write(' then ') 196 | self.paren(self.t, indent, out) 197 | out.write(' else ') 198 | self.paren(self.e, indent, out) 199 | 200 | nixStrEsc = str.maketrans({'"': '\\"', '\\': '\\\\', '$': '\\$', '\n': '\\n', '\r': '\\r', '\t': '\\t'}) 201 | def printNix(x, indent=0, out=sys.stdout): 202 | if isinstance(x, Nix): 203 | x.print(indent, out) 204 | elif isinstance(x, str): 205 | out.write('"' + x.translate(nixStrEsc) + '"') 206 | elif type(x) is bool: 207 | out.write('true' if x else 'false') 208 | elif x is None: 209 | out.write('null') 210 | elif isinstance(x, int): 211 | out.write(repr(x)) 212 | elif isinstance(x, float): 213 | # messy but rare (needed for nix parsing #5063) 214 | out.write('%.15e'%x) 215 | elif isinstance(x, (list, tuple)): 216 | List(x).print(indent, out) 217 | elif isinstance(x, set): 218 | List(sorted(x)).print(indent, out) 219 | elif isinstance(x, dict): 220 | AttrSet(x).print(indent, out) 221 | else: 222 | raise TypeError(type(x)) 223 | 224 | def unlist(l): 225 | if isinstance(l, (list, tuple)) and len(l) == 1: 226 | return l[0] 227 | return l 228 | 229 | def specPrefs(s): 230 | p = {} 231 | if s.versions != any_version: 232 | p['version'] = str(s.versions) 233 | if s.variants: 234 | p['variants'] = {n: unlist(v.value) for n, v in s.variants.items()} 235 | d = s.dependencies() 236 | if d: 237 | p['depends'] = {x.name: specPrefs(x) for x in d} 238 | return p 239 | 240 | def depPrefs(d): 241 | p = specPrefs(d.spec) 242 | try: 243 | p['deptype'] = spack.deptypes.flag_to_tuple(d.depflag) 244 | except AttributeError: 245 | p['deptype'] = d.type 246 | if d.patches: 247 | print(f"{d} has unsupported dependency patches", file=sys.stderr) 248 | return p 249 | 250 | def conditions(c, p, s, dep=None): 251 | def addConditions(a, s): 252 | deps = Select(a,'depends') 253 | if s.versions != any_version: 254 | c.append(App("versionMatches", Select(a,'version'), str(s.versions))) 255 | if s.variants: 256 | for n, v in sorted(s.variants.items()): 257 | c.append(App("variantMatches", Select(a,'variants',n), unlist(v.value))) 258 | if s.compiler: 259 | notExtern = Eq(Select(a,'extern'), None) 260 | if s.compiler.name: 261 | c.append(And(notExtern, Eq(Select(deps,'compiler','spec','name'), s.compiler.name))) 262 | if s.compiler.versions != any_version: 263 | c.append(And(notExtern, App("versionMatches", Select(deps,'compiler','spec','version'), str(s.compiler.versions)))) 264 | for d in s.dependencies(): 265 | if dep and d.name == dep.spec.name: 266 | print(f"{dep}: skipping recursive dependency conditional {d}", file=sys.stderr) 267 | continue 268 | c.append(Ne(SelectOr(deps,d.name,None),None)) 269 | addConditions(Select(deps,d.name,'spec'), d) 270 | if s.architecture: 271 | if s.architecture.os: 272 | c.append(Eq(Expr('os'), s.architecture.os)) 273 | if s.architecture.platform: 274 | c.append(Eq(Expr('platform'), s.architecture.platform)) 275 | if s.architecture.target: 276 | # this isn't actually correct due to fancy targets but good enough for this 277 | c.append(Eq(Expr('target'), str(s.architecture.target).rstrip(':'))) 278 | if s.name is not None and s.name != p.name: 279 | # spack sometimes interprets this to mean p provides a virtual of s.name, and sometimes to refer to the named package anywhere in the dep tree 280 | print(f"{p.name}: ignoring unsupported named condition {s}") 281 | c.append(False) 282 | addConditions('spec', s) 283 | 284 | def whenCondition(p, s, a, dep=None): 285 | c = [] 286 | conditions(c, p, s, dep) 287 | if not c: 288 | return a 289 | return App('when', And(*c), a) 290 | 291 | try: 292 | VariantValue = spack.variant.ConditionalValue 293 | except AttributeError: 294 | try: 295 | VariantValue = spack.variant.Value 296 | except AttributeError: 297 | VariantValue = None 298 | 299 | def variant1(p, v): 300 | def value(x): 301 | if VariantValue and isinstance(x, VariantValue): 302 | print(f"{p.name} variant {v.name}: ignoring unsupported conditional on value {x}", file=sys.stderr) 303 | return x.value 304 | return x 305 | 306 | d = str(v.default) 307 | if v.multi and v.values is not None: 308 | d = d.split(',') 309 | return {x: x in d for x in map(value, v.values)} 310 | elif v.values == (True, False): 311 | return d.upper() == 'TRUE' 312 | elif v.values: 313 | l = list(map(value, v.values)) 314 | try: 315 | l.remove(d) 316 | l.insert(0, d) 317 | except ValueError: 318 | print(f"{p.name}: variant {v.name} default {v.default!r} not in {v.values!r}", file=sys.stderr) 319 | return l 320 | else: 321 | return d 322 | 323 | def variant(p, v): 324 | if type(v) is tuple: 325 | a = variant1(p, v[0]) 326 | l = [] 327 | for w in v[1]: 328 | c = [] 329 | conditions(c, p, w) 330 | if not c: 331 | return a 332 | l.append(And(*c)) 333 | return App('when', Or(*l), a) 334 | else: 335 | return variant1(p, v) 336 | 337 | def variant_definitions(p, l): 338 | if not l: 339 | return None 340 | w, v = l[0] 341 | a = variant1(p, v) 342 | c = [] 343 | conditions(c, p, w) 344 | if not c: 345 | return a 346 | # fold right 347 | return If(And(*c), a, variant_definitions(p, l[1:])) 348 | 349 | def variant_name(p, n): 350 | return variant_definitions(p, p.variant_definitions(n)) 351 | 352 | def depend(p, d): 353 | c = [whenCondition(p, w, depPrefs(s), s) for w, l in sorted(d.items()) for s in l] 354 | if len(c) == 1: 355 | return c[0] 356 | return List(c) 357 | 358 | def provide(p, wv): 359 | c = [whenCondition(p, w, str(v)) for w, v in wv] 360 | if len(c) == 1: 361 | return c[0] 362 | return List(c) 363 | 364 | def conflict(p, c, w, m): 365 | l = [] 366 | conditions(l, p, spack.spec.Spec(c)) 367 | conditions(l, p, w) 368 | return App('when', And(*l), str(c) + (' ' + m if m else '')) 369 | 370 | namespaces = ', '.join(r.namespace for r in spack.repo.PATH.repos) 371 | print(f"Generating package repo for {namespaces}...") 372 | f = open(os.environ['out'], 'w') 373 | print("spackLib: with spackLib; {", file=f) 374 | def output(k, v): 375 | printNix(Attr(k, v), out=f) 376 | 377 | virtuals = defaultdict(set) 378 | n = 0 379 | for p in spack.repo.PATH.all_package_classes(): 380 | desc = dict() 381 | desc['namespace'] = p.namespace 382 | desc['dir'] = p.package_dir 383 | vers = [(i.get('preferred',False), not (v.isdevelop() or i.get('deprecated',False)), v) 384 | for v, i in p.versions.items()] 385 | vers.sort(reverse = True) 386 | desc['version'] = [str(v) for _, _, v in vers] 387 | if p.variants: 388 | if hasattr(p, "variant_names"): 389 | desc['variants'] = {n: variant_name(p, n) for n in p.variant_names()} 390 | else: 391 | desc['variants'] = {n: variant(p, e) for n, e in p.variants.items()} 392 | if p.dependencies: 393 | desc['depends'] = {n: depend(p, d) for n, d in p.dependencies_by_name(when=True).items()} 394 | if p.conflicts: 395 | desc['conflicts'] = [conflict(p, c, w, m) for c, wm in sorted(p.conflicts.items()) for w, m in wm] 396 | if p.provided: 397 | provides = defaultdict(list) 398 | for w, vs in sorted(p.provided.items()): 399 | for v in vs: 400 | provides[v.name].append((w, v.versions)) 401 | virtuals[v.name].add(p.name) 402 | desc['provides'] = {v: provide(p, c) for v, c in sorted(provides.items())} 403 | if getattr(p, 'family', None) == 'compiler' or 'compiler' in getattr(p, 'tags', []): 404 | desc.setdefault('provides', {}).setdefault('compiler', ':') 405 | output(p.name, Fun('spec', desc)) 406 | n += 1 407 | print(f"Generated {n} packages") 408 | 409 | # use spack config for provider ordering 410 | prefs = spack.config.get("packages:all:providers", {}) 411 | for v, providers in sorted(virtuals.items()): 412 | prov = [] 413 | for p in prefs.get(v, []): 414 | n = spack.spec.Spec(p).name 415 | try: 416 | providers.remove(n) 417 | except KeyError: 418 | continue 419 | prov.append(n) 420 | prov.extend(sorted(providers)) 421 | output(v, prov) 422 | print(f"Generated {len(virtuals)} virtuals") 423 | 424 | print("}", file=f) 425 | f.close() 426 | -------------------------------------------------------------------------------- /spack/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | mkdir -p $out 3 | cp $src $out/nixpack.py 4 | -------------------------------------------------------------------------------- /spack/modules.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | { name ? "modules" 3 | , modtype ? "lmod" /* lmod or tcl */ 4 | , config ? {} 5 | , pkgs /* packages to include, list of: 6 | pkg (spack derivation) 7 | { pkg = pkg; default = true; } (for default module) 8 | { pkg = pkg; environment = { ... }; projection = "{name}/{version}"; } (overrides config) 9 | { name = "name"; static = "content"; } 10 | { name = "name"; static = { template variables }; } 11 | */ 12 | , coreCompilers ? [packs.pkgs.compiler] 13 | }: 14 | let 15 | jsons = { 16 | inherit config pkgs coreCompilers; 17 | }; 18 | in 19 | packs.spackBuilder ({ 20 | args = [./modules.py]; 21 | inherit name modtype; 22 | withRepos = true; 23 | enableParallelBuilding = false; # broken in some cases 24 | } // builtins.mapAttrs (name: builtins.toJSON) jsons // { 25 | passAsFile = builtins.attrNames jsons; 26 | }) // jsons 27 | -------------------------------------------------------------------------------- /spack/modules.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | import os 3 | import json 4 | import datetime 5 | 6 | import nixpack 7 | import spack 8 | 9 | try: 10 | from spack.package_base import PackageBase 11 | except ImportError: 12 | from spack.package import PackageBase 13 | 14 | root = nixpack.getVar('out') 15 | name = nixpack.getVar('name') 16 | modtype = nixpack.getVar('modtype') 17 | 18 | coreCompilers = [nixpack.NixSpec.get(p, top=False) for p in nixpack.getJson('coreCompilers')] 19 | coreCompilers.append(nixpack.nullCompilerSpec) 20 | 21 | modconf = nixpack.getJson('config') 22 | modconf.setdefault('core_compilers', []) 23 | modconf['core_compilers'].extend(str(comp.as_compiler) for comp in coreCompilers) 24 | core_specs = modconf.setdefault('core_specs', []) 25 | 26 | cls = spack.modules.module_types[modtype] 27 | 28 | class NullContext: 29 | "contextlib.nullcontext" 30 | def __enter__(self): 31 | pass 32 | def __exit__(self, *exc): 33 | pass 34 | 35 | class TempConfig: 36 | def __init__(self, key, value): 37 | self.key = key 38 | self.value = value 39 | 40 | def __enter__(self): 41 | self.oldconf = spack.config.get(self.key) 42 | spack.config.set(self.key, self.value, scope='nixpack') 43 | 44 | def __exit__(self, *exc): 45 | spack.config.set(self.key, self.oldconf, scope='nixpack') 46 | 47 | def tempProjection(projection): 48 | if projection: 49 | return TempConfig(f'modules:{name}:{modtype}:projections', {'all': projection}) 50 | else: 51 | return NullContext() 52 | 53 | class FakePackage(PackageBase): 54 | extendees = () 55 | provided = {} 56 | 57 | class FakeSpec(nixpack.NixSpec): 58 | def __init__(self, desc): 59 | h = spack.util.hash.b32_hash(json.dumps(desc, sort_keys=True)) 60 | nixspec = { 61 | 'name': f'static-module-{h}', 62 | 'namespace': 'dummy', 63 | 'version': '0', 64 | 'variants': {}, 65 | 'flags': {}, 66 | 'tests': False, 67 | 'paths': {}, 68 | 'depends': desc.get('depends', {}), 69 | 'deptypes': {}, 70 | 'patches': [] 71 | } 72 | 73 | prefix = desc.get('prefix', f"/{nixspec['namespace']}/{nixspec['name']}") 74 | nixspec['extern'] = prefix 75 | for n, d in nixspec['depends'].items(): 76 | try: 77 | t = d['deptype'] 78 | except Exception: 79 | t = ('run',) 80 | nixspec['deptypes'][n] = t 81 | 82 | super().__init__(nixspec, prefix, True) 83 | self._package = FakePackage(self) 84 | 85 | def concretize(self): 86 | self._mark_concrete() 87 | 88 | @property 89 | def package_class(self): 90 | return self._package 91 | 92 | class ModSpec: 93 | def __init__(self, p): 94 | if isinstance(p, str) or 'spec' in p: 95 | self.pkg = p 96 | p = {} 97 | else: 98 | self.pkg = p.get('pkg', None) 99 | if self.pkg: 100 | self.spec = nixpack.NixSpec.get(self.pkg) 101 | else: 102 | self.spec = FakeSpec(p) 103 | 104 | if 'name' in p: 105 | self.spec.name = p['name'] 106 | if 'version' in p: 107 | self.spec.versions = spack.version.VersionList([spack.version.Version(p['version'])]) 108 | self.default = p.get('default', False) 109 | self.static = p.get('static', None) 110 | self.path = p.get('path', None) 111 | self.environment = p.get('environment', {}) 112 | self.context = p.get('context', {}) 113 | if p.get('core', False): 114 | core_specs.append(self.spec.format()) 115 | self.projection = p.get('projection') 116 | self.autoload = p.get('autoload', []) 117 | self.prerequisites = p.get('prerequisites', []) 118 | self.postscript = p.get('postscript', '') 119 | 120 | @property 121 | def writer(self): 122 | try: 123 | return self._writer 124 | except AttributeError: 125 | self.spec.concretize() 126 | self._writer = cls(self.spec, name) 127 | for t in ('autoload', 'prerequisites'): 128 | self._writer.conf.conf[t].extend(map(nixpack.NixSpec.get, getattr(self, t))) 129 | if 'unlocked_paths' in self.context: 130 | for i, p in enumerate(self.context['unlocked_paths']): 131 | if not os.path.isabs(p): 132 | self.context['unlocked_paths'][i] = os.path.join(self._writer.layout.arch_dirname, p) 133 | elif self.spec in coreCompilers: 134 | # messy hack to prevent core compilers from unlocking themselves (should be handled in spack) 135 | self.context['unlocked_paths'] = [] 136 | for t in ('environment', 'context'): 137 | spack.modules.common.update_dictionary_extending_lists( 138 | self._writer.conf.conf.setdefault(t, {}), 139 | getattr(self, t)) 140 | return self._writer 141 | 142 | @property 143 | def filename(self): 144 | layout = self.writer.layout 145 | if self.path: 146 | base, name = os.path.split(self.path) 147 | return os.path.join(layout.arch_dirname, base or 'Core', name) + "." + layout.extension 148 | else: 149 | with tempProjection(self.projection): 150 | return layout.filename 151 | 152 | def __str__(self): 153 | try: 154 | default_format = spack.spec.DEFAULT_FORMAT 155 | except AttributeError: 156 | default_format = spack.spec.default_format 157 | return self.spec.cformat(default_format + ' {prefix}') 158 | 159 | def write(self, fn): 160 | dn = os.path.dirname(fn) 161 | if self.static: 162 | os.makedirs(dn, exist_ok=True) 163 | content = self.static 164 | if isinstance(content, dict): 165 | template = spack.tengine.make_environment().get_template(self.writer.default_template) 166 | content.setdefault('spec', content) 167 | content['spec'].setdefault('target', nixpack.basetarget) 168 | content['spec'].setdefault('name', self.spec.name) 169 | content['spec'].setdefault('short_spec', 'static module via nixpack') 170 | content.setdefault('timestamp', datetime.datetime.now()) 171 | content = template.render(content) 172 | with open(fn, 'x') as f: 173 | f.write(content) 174 | else: 175 | with tempProjection(self.projection): 176 | self.writer.write() 177 | if self.postscript: 178 | with open(fn, 'a') as f: 179 | f.write(self.postscript) 180 | if self.default: 181 | bn = os.path.basename(fn) 182 | os.symlink(bn, os.path.join(dn, "default")) 183 | 184 | specs = [ModSpec(p) for p in nixpack.getJson('pkgs')] 185 | 186 | config = { 187 | 'prefix_inspections': modconf.pop('prefix_inspections', {}), 188 | name: { 189 | 'enable': [modtype], 190 | 'roots': { modtype: root }, 191 | modtype: modconf 192 | } 193 | } 194 | spack.config.set('modules', config, 'nixpack') 195 | spack.repo.PATH.provider_index # precompute 196 | 197 | print(f"Generating {len(specs)} {modtype} modules in {root}...") 198 | def write(s): 199 | fn = s.filename 200 | print(f" {os.path.relpath(fn, root)}: {s}") 201 | s.write(fn) 202 | return fn 203 | 204 | def proc(si): 205 | return write(specs[si]) 206 | 207 | if nixpack.cores > 1: 208 | import multiprocessing 209 | pool = multiprocessing.Pool(nixpack.cores) 210 | paths = pool.imap_unordered(proc, range(len(specs))) 211 | pool.close() 212 | else: 213 | pool = None 214 | paths = map(write, specs) 215 | 216 | seen = set() 217 | for fn in paths: 218 | assert fn not in seen, f"Duplicate path: {fn}" 219 | seen.add(fn) 220 | 221 | if pool: 222 | pool.join() 223 | -------------------------------------------------------------------------------- /spack/repo/packages/libass/package.py: -------------------------------------------------------------------------------- 1 | from spack.package import * 2 | 3 | 4 | class Libass(AutotoolsPackage): 5 | """libass is a portable subtitle renderer for the ASS/SSA 6 | (Advanced Substation Alpha/Substation Alpha) subtitle format.""" 7 | 8 | homepage = "https://github.com/libass/libass" 9 | url = "https://github.com/libass/libass/releases/download/0.15.1/libass-0.15.1.tar.gz" 10 | 11 | maintainers = ['alexdotc'] 12 | 13 | version('0.15.1', sha256='101e2be1bf52e8fc265e7ca2225af8bd678839ba13720b969883eb9da43048a6') 14 | version('0.15.0', sha256='9cbddee5e8c87e43a5fe627a19cd2aa4c36552156eb4edcf6c5a30bd4934fe58') 15 | version('0.14.0', sha256='8d5a5c920b90b70a108007ffcd2289ac652c0e03fc88e6eecefa37df0f2e7fdf') 16 | version('0.13.7', sha256='008a05a4ed341483d8399c8071d57a39853cf025412b32da277e76ad8226e158') 17 | version('0.13.6', sha256='62070da83b2139c1875c9db65ece37f80f955097227b7d46ade680221efdff4b') 18 | version('0.13.5', sha256='e5c6d9ae81c3c75721a3920960959d2512e2ef14666910d76f976589d2f89b3f') 19 | version('0.13.4', sha256='6711469df5fcc47d06e92f7383dcebcf1282591002d2356057997e8936840792') 20 | version('0.13.3', sha256='86c8c45d14e4fd23b5aa45c72d9366c46b4e28087da306e04d52252e04a87d0a') 21 | version('0.13.2', sha256='8baccf663553b62977b1c017d18b3879835da0ef79dc4d3b708f2566762f1d5e') 22 | version('0.13.1', sha256='9741b9b4059e18b4369f8f3f77248416f988589896fd7bf9ce3da7dfb9a84797') 23 | -------------------------------------------------------------------------------- /spack/repo/packages/mplayer/package.py: -------------------------------------------------------------------------------- 1 | from spack.package import * 2 | 3 | class Mplayer(AutotoolsPackage): 4 | """MPlayer is a movie player which runs on many systems (see the documentation). 5 | It plays most MPEG/VOB, AVI, Ogg/OGM, VIVO, ASF/WMA/WMV, QT/MOV/MP4, RealMedia, 6 | Matroska, NUT, NuppelVideo, FLI, YUV4MPEG, FILM, RoQ, PVA files, supported by many 7 | native, XAnim, and Win32 DLL codecs. You can watch VideoCD, SVCD, DVD, 3ivx, 8 | DivX 3/4/5, WMV and even H.264 movies.""" 9 | 10 | homepage = "https://www.mplayerhq.hu" 11 | url = "http://www.mplayerhq.hu/MPlayer/releases/MPlayer-1.4.tar.xz" 12 | 13 | version('1.4', sha256='82596ed558478d28248c7bc3828eb09e6948c099bbd76bb7ee745a0e3275b548') 14 | version('1.3.0', sha256='3ad0846c92d89ab2e4e6fb83bf991ea677e7aa2ea775845814cbceb608b09843') 15 | version('1.2.1', sha256='831baf097d899bdfcdad0cb80f33cc8dff77fa52cb306bee5dee6843b5c52b5f') 16 | version('1.2', sha256='ffe7f6f10adf2920707e8d6c04f0d3ed34c307efc6cd90ac46593ee8fba2e2b6') 17 | version('1.1.1', sha256='ce8fc7c3179e6a57eb3a58cb7d1604388756b8a61764cc93e095e7aff3798c76') 18 | version('1.1', sha256='76cb47eadb52b420ca028276ebd8112114ad0ab3b726af60f07fb2f39dae6c9c') 19 | -------------------------------------------------------------------------------- /spack/repo/packages/mpv/package.py: -------------------------------------------------------------------------------- 1 | from spack.package import * 2 | 3 | class Mpv(WafPackage): 4 | """MPV media player""" 5 | 6 | homepage = "https://mpv.io" 7 | url = "https://github.com/mpv-player/mpv/archive/refs/tags/v0.33.1.tar.gz" 8 | 9 | maintainers = ['alexdotc'] 10 | 11 | version('0.33.1', sha256='100a116b9f23bdcda3a596e9f26be3a69f166a4f1d00910d1789b6571c46f3a9') 12 | 13 | depends_on('libass') 14 | depends_on('ffmpeg') 15 | 16 | @run_before('configure') 17 | def get_waf(self): 18 | python('bootstrap.py') 19 | -------------------------------------------------------------------------------- /spack/repo/packages/mupdf/package.py: -------------------------------------------------------------------------------- 1 | from spack.package import * 2 | 3 | 4 | class Mupdf(MakefilePackage): 5 | """ MuPDF is a lightweight PDF, XPS, and E-book viewer. """ 6 | 7 | homepage = "https://www.example.com" 8 | url = "https://mupdf.com/downloads/archive/mupdf-1.18.0-source.tar.xz" 9 | 10 | version('1.18.0', sha256='592d4f6c0fba41bb954eb1a41616661b62b134d5b383e33bd45a081af5d4a59a') 11 | version('1.17.0', sha256='c935fb2593d9a28d9b56b59dad6e3b0716a6790f8a257a68fa7dcb4430bc6086') 12 | version('1.16.1', sha256='6fe78184bd5208f9595e4d7f92bc8df50af30fbe8e2c1298b581c84945f2f5da') 13 | version('1.16.0', sha256='d28906cea4f602ced98f0b08d04138a9a4ac2e5462effa8c45f86c0816ab1da4') 14 | version('1.15.0', sha256='565036cf7f140139c3033f0934b72e1885ac7e881994b7919e15d7bee3f8ac4e') 15 | version('1.14.0', sha256='603e69a96b04cdf9b19a3e41bd7b20c63b39abdcfba81a7460fcdcc205f856df') 16 | version('1.13.0', sha256='746698e0d5cd113bdcb8f65d096772029edea8cf20704f0d15c96cb5449a4904') 17 | version('1.12.0', sha256='577b3820c6b23d319be91e0e06080263598aa0662d9a7c50af500eb6f003322d') 18 | 19 | depends_on('openssl') 20 | depends_on('curl') 21 | depends_on('libxext') 22 | depends_on('libxau') 23 | 24 | def edit(self, spec, prefix): 25 | env['XCFLAGS'] = "-std=c99" 26 | self.install_targets.append('prefix={}'.format(prefix)) 27 | -------------------------------------------------------------------------------- /spack/repo/packages/wecall/cmake-rhel-regex.patch: -------------------------------------------------------------------------------- 1 | diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt 2 | index 411e10d..6d82b94 100644 3 | --- a/cpp/CMakeLists.txt 4 | +++ b/cpp/CMakeLists.txt 5 | @@ -450,7 +450,7 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/weCall-userguide.pdf 6 | if(${DISTRIBUTOR_ID} MATCHES "Ubuntu" OR ${DISTRIBUTOR_ID} MATCHES "Debian") 7 | SET(CPACK_GENERATOR "DEB") 8 | ELSEIF(${DISTRIBUTOR_ID} MATCHES "RedHat" OR ${DISTRIBUTOR_ID} MATCHES "CentOS") 9 | - string(REGEX MATCH "[0-9]*" RH_VERSION ${DISTRIBUTOR_RELEASE}) 10 | + string(REGEX MATCH "[0-9]+" RH_VERSION ${DISTRIBUTOR_RELEASE}) 11 | SET(DISTRIBUTION_CODENAME "el${RH_VERSION}") 12 | SET(CPACK_GENERATOR "RPM") 13 | ELSE() 14 | -- 15 | -------------------------------------------------------------------------------- /spack/repo/packages/wecall/ncurses.patch: -------------------------------------------------------------------------------- 1 | diff -u a/vendor/samtools/Makefile b/vendor/samtools/Makefile 2 | --- a/vendor/samtools/Makefile 2018-10-16 10:54:11.000000000 -0400 3 | +++ b/vendor/samtools/Makefile 2021-11-14 21:04:53.775016000 -0500 4 | @@ -15,7 +15,7 @@ 5 | INCLUDES= -I. 6 | SUBDIRS= . bcftools misc 7 | LIBPATH= 8 | -LIBCURSES= -ltinfo -lcurses # -lXCurses 9 | +LIBCURSES= -ltinfo -lncurses # -lXCurses 10 | 11 | .SUFFIXES:.c .o 12 | .PHONY: all lib 13 | -- 14 | -------------------------------------------------------------------------------- /spack/repo/packages/wecall/package.py: -------------------------------------------------------------------------------- 1 | from spack.package import * 2 | 3 | class Wecall(MakefilePackage): 4 | """Fast, accurate and simple to use command line tool for variant detection in NGS data. """ 5 | 6 | url = "https://github.com/Genomicsplc/wecall/archive/refs/tags/v2.0.0.tar.gz" 7 | 8 | version('2.0.0', sha256='c67cc7ca686432e4438ceb9160f698394e4d21734baa97bc3fc781065d59b410') 9 | 10 | patch('cmake-rhel-regex.patch') 11 | patch('ncurses.patch') 12 | 13 | depends_on('ncurses') 14 | depends_on('zlib') 15 | depends_on('boost+regex+test') 16 | depends_on('cmake', type='build') 17 | depends_on('texlive', type='build') 18 | depends_on('python', type='build') 19 | 20 | def install(self, spec, prefix): 21 | doc = join_path(prefix, 'share/doc/wecall') 22 | bin = join_path(prefix, 'bin') 23 | mkdirp(doc) 24 | mkdirp(bin) 25 | with working_dir(join_path(self.stage.source_path, 'build')): 26 | copy("weCall", bin) 27 | copy("weCall-userguide.pdf", doc) 28 | -------------------------------------------------------------------------------- /spack/repo/packages/xscreensaver/package.py: -------------------------------------------------------------------------------- 1 | from spack.package import * 2 | 3 | 4 | class Xscreensaver(AutotoolsPackage): 5 | """Xscreensaver package""" 6 | 7 | homepage = "https://www.jwz.org" 8 | url = "https://www.jwz.org/xscreensaver/xscreensaver-6.01.tar.gz" 9 | 10 | maintainers = ['alexdotc'] 11 | 12 | version('6.01', sha256='085484665d91f60b4a1dedacd94bcf9b74b0fb096bcedc89ff1c245168e5473b') 13 | 14 | @run_before('configure') 15 | def fix_GTK_paths(self): 16 | filter_file(r'(@GTK_DATADIR@)|(@PO_DATADIR@)', '@datadir@', 17 | 'driver/Makefile.in', 'po/Makefile.in.in') 18 | 19 | 20 | def configure_args(self): 21 | args = ['--with-app-defaults='+prefix.share] 22 | return args 23 | -------------------------------------------------------------------------------- /spack/repo/repo.yaml: -------------------------------------------------------------------------------- 1 | repo: 2 | namespace: nixpack 3 | -------------------------------------------------------------------------------- /view/builder.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | from typing import TYPE_CHECKING, Union, Optional, Type, List, Any 3 | if not TYPE_CHECKING: 4 | # hack for __future__.annotatinons (python<3.7) 5 | Path = Any 6 | Inode = Any 7 | 8 | import os 9 | import sys 10 | import stat 11 | import errno 12 | import fnmatch 13 | import json 14 | 15 | def pathstr(s: bytes) -> str: 16 | return s.decode('ISO-8859-1') 17 | 18 | srcPaths = os.environb[b'pkgs'].split() 19 | dstPath = os.environb[b'out'] 20 | forceSrcs = [srcPaths.index(p) if p else None for p in os.environb[b'forcePkgs'].split(b' ')] 21 | 22 | def getOpt(opt: bytes): 23 | v = os.environb[opt] 24 | if v == b'': 25 | return lambda x: None 26 | if v == b'1': 27 | return lambda x: True 28 | l = [ fnmatch._compile_pattern(x) for x in v.split(b' ') ] 29 | def check(x: bytes): 30 | for i, m in enumerate(l): 31 | if m(x): 32 | return i 33 | return None 34 | return check 35 | 36 | # path handling options 37 | opts = {o: getOpt(o) for o in 38 | # in order of precedece: 39 | [ b'exclude' # paths not to link 40 | , b'shbang' # paths to translate #! 41 | , b'jupyter' # paths to translate argv[0] 42 | , b'wrap' # paths to wrap executables 43 | , b'copy' # paths to copy 44 | , b'force' # paths to process only from corresponding forcePkgs 45 | , b'ignoreConflicts' # paths to ignore conflicts 46 | ] } 47 | 48 | maxSrcLen = max(len(p) for p in srcPaths) 49 | 50 | class Path: 51 | """ 52 | Keep track of a rooted path, including relative path, open parent dir_fd, 53 | open state, and other operations. 54 | """ 55 | def __init__(self, dir: Union[Path, bytes], ent: Union[os.DirEntry,bytes]=b''): 56 | self.dir = dir # parent or root 57 | self.ent = ent # relative 58 | if isinstance(dir, Path): 59 | path = dir.path 60 | self.relpath: bytes = os.path.join(dir.relpath, self.name) if ent else dir.relpath 61 | else: 62 | path = dir 63 | self.relpath = self.name 64 | self.path: bytes = os.path.join(path, self.name) if ent else path # full path 65 | self.fd: Optional[int] = None 66 | 67 | def __str__(self) -> str: 68 | return pathstr(self.path) 69 | 70 | @property 71 | def root(self) -> bytes: 72 | "Root path. os.path.join(self.root, self.relpath) == self.path" 73 | if isinstance(self.dir, Path): 74 | return self.dir.root 75 | return self.dir 76 | 77 | @property 78 | def dirfd(self) -> Optional[int]: 79 | "Parent open dir fd. file(self.dirfd, self.name) == self.path" 80 | if isinstance(self.dir, Path): 81 | return self.dir.fd 82 | return None 83 | 84 | @property 85 | def name(self) -> bytes: 86 | "Name relative to dirfd" 87 | if isinstance(self.ent, os.DirEntry): 88 | return self.ent.name 89 | return self.ent 90 | 91 | def sub(self, ent: Union[os.DirEntry,bytes]): 92 | "Create a child path" 93 | return Path(self, ent) 94 | 95 | def _dirop(self, fun, *args, **kwargs): 96 | if self.dirfd is not None: 97 | return fun(self.name, *args, dir_fd=self.dirfd, **kwargs) 98 | else: 99 | return fun(self.path, *args, **kwargs) 100 | 101 | def optsrc(self, opt: bytes) -> Optional[int]: 102 | "Check whether this path matches the given option and return the index" 103 | return opts[opt](self.relpath) 104 | 105 | def opt(self, opt: bytes) -> bool: 106 | "Check whether this path matches the given option" 107 | return self.optsrc(opt) is not None 108 | 109 | def _dostat(self): 110 | if self.fd is not None: 111 | return os.fstat(self.fd) 112 | try: 113 | if isinstance(self.ent, os.DirEntry): 114 | return self.ent.stat(follow_symlinks=False) 115 | else: 116 | return self._dirop(os.lstat) 117 | except OSError as e: 118 | if e.errno == errno.ENOENT: 119 | return None 120 | raise 121 | 122 | def stat(self): 123 | "really lstat" 124 | try: 125 | return self._stat 126 | except AttributeError: 127 | self._stat = self._dostat() 128 | return self._stat 129 | 130 | def isdir(self): 131 | if isinstance(self.ent, os.DirEntry): 132 | return self.ent.is_dir(follow_symlinks=False) 133 | else: 134 | return stat.S_ISDIR(self.stat().st_mode) 135 | 136 | def islnk(self): 137 | if isinstance(self.ent, os.DirEntry): 138 | return self.ent.is_symlink() 139 | else: 140 | return stat.S_ISLNK(self.stat().st_mode) 141 | 142 | def isexe(self): 143 | return self.stat().st_mode & 0o111 144 | 145 | def readlink(self): 146 | return self._dirop(os.readlink) 147 | 148 | def symlink(self, target: Union[bytes,Path]): 149 | if isinstance(target, Path): 150 | target = target.path 151 | if self.dirfd is not None: 152 | return os.symlink(target, self.name, dir_fd=self.dirfd) 153 | else: 154 | return os.symlink(target, self.path) 155 | 156 | def link(self, old: Union[bytes,Path]): 157 | args = {} 158 | if self.dirfd is not None: 159 | args['dst_dir_fd'] = self.dirfd 160 | dst = self.name 161 | else: 162 | dst = self.path 163 | if isinstance(old, Path): 164 | if old.dirfd is not None: 165 | args['src_dir_fd'] = old.dirfd 166 | src = old.name 167 | else: 168 | src = old.path 169 | else: 170 | src = old 171 | return os.link(src, dst, **args) 172 | 173 | def open(self): 174 | "set the mode to open for reading. must be used as 'with path.open()'" 175 | self.mode = os.O_RDONLY|os.O_NOFOLLOW; 176 | return self 177 | 178 | def opendir(self): 179 | "set the mode to open directory for reading. must be used as 'with path.opendir()'" 180 | self.mode = os.O_RDONLY|os.O_NOFOLLOW|os.O_DIRECTORY; 181 | return self 182 | 183 | def create(self, perm): 184 | "set the mode to open and create. must be used as 'with path.create()'" 185 | self.mode = os.O_WRONLY|os.O_CREAT|os.O_EXCL|os.O_NOFOLLOW 186 | if isinstance(perm, Path): 187 | perm = perm.stat().st_mode 188 | self.perm = perm 189 | return self 190 | 191 | def mkdir(self): 192 | "create a directory and set the mode to open for reading. should be used as 'with path.mkdir()'" 193 | try: 194 | self._dirop(os.mkdir) 195 | except OSError as e: 196 | if e.errno != errno.EEXIST: 197 | raise 198 | self.mode = os.O_RDONLY|os.O_NOFOLLOW|os.O_DIRECTORY|os.O_PATH; 199 | return self 200 | 201 | def __enter__(self): 202 | self.fd = self._dirop(os.open, self.mode, getattr(self, 'perm', 0o777)) 203 | self.mode = None 204 | return self 205 | 206 | def __exit__(self, *args): 207 | os.close(self.fd) 208 | self.fd = None 209 | 210 | def read(self, len: int): 211 | assert self.fd is not None 212 | return os.read(self.fd, len) 213 | 214 | def write(self, data: bytes): 215 | assert self.fd is not None 216 | l = 0 217 | while l < len(data): 218 | l += os.write(self.fd, data[l:]) 219 | return l 220 | 221 | def readInterp(self) -> Optional[bytes]: 222 | "extract the interpreter from #! script, if any" 223 | hb = self.read(maxSrcLen+4) 224 | if hb[0:2] != b'#!': 225 | return None 226 | return hb[2:].lstrip() 227 | 228 | def sendfile(self, src, z): 229 | "try os.sendfile to self from src, falling back to read/write" 230 | try: 231 | return os.sendfile(self.fd, src.fd, None, z) 232 | except OSError as err: 233 | if err.errno in (errno.EINVAL, errno.ENOSYS): 234 | return self.write(os.read(src.fd, z)) 235 | else: 236 | raise err 237 | 238 | def copyfile(self, src): 239 | "write the contents of this open file from the open src file" 240 | z = src.stat().st_size 241 | while self.sendfile(src, z) > 0: 242 | pass 243 | 244 | def compare(self, other) -> bool: 245 | "compare stat and contents of two files, return true if identical" 246 | sstat = self.stat() 247 | if not stat.S_ISREG(sstat.st_mode): 248 | return False 249 | ostat = other.stat() 250 | if sstat.st_mode != ostat.st_mode or sstat.st_uid != ostat.st_uid or sstat.st_gid != ostat.st_gid or sstat.st_size != ostat.st_size: 251 | return False 252 | with self.open(), other.open(): 253 | z = 65536 254 | while True: 255 | b1 = self.read(z) 256 | b2 = other.read(z) 257 | if b1 != b2: 258 | return False 259 | if not b1: 260 | return True 261 | 262 | def _scandir(self): 263 | if self.fd is not None: 264 | try: 265 | return os.scandir(self.fd) 266 | except TypeError: 267 | pass 268 | return os.scandir(self.path) 269 | 270 | def scandir(self): 271 | "return an iterator of child Paths" 272 | return map(self.sub, self._scandir()) 273 | 274 | def newpath(path: bytes) -> bytes: 275 | "rewrite a path pointing to a src to the dst" 276 | if not os.path.isabs(path): 277 | return path 278 | for sp in srcPaths: 279 | if path.startswith(sp): 280 | return os.path.join(dstPath, os.path.relpath(path, sp)) 281 | return path 282 | 283 | class Conflict(Exception): 284 | def __init__(self, path: Path, *nodes: Inode): 285 | self.path = path.relpath 286 | self.nodes = nodes 287 | 288 | def __str__(self): 289 | srcs = ', '.join(pathstr(srcPaths[n.src]) for n in self.nodes if n.src is not None) 290 | return f'Conflicting file {pathstr(self.path)} from {srcs}' 291 | 292 | class Inode: 293 | "An abstract class representing a node of a file tree" 294 | def __init__(self, src: int, path: Path): 295 | self.path = path 296 | self.src: Optional[int] = src # index into srcPaths 297 | 298 | @property 299 | def needed(self): 300 | "does this node need special processing/copying/translaton?" 301 | return self.src == None 302 | 303 | def compatible(self, other: Inode) -> bool: 304 | "is this node compatible with the other" 305 | return type(self) == type(other) 306 | 307 | def resolve(self, node: Optional[Inode]) -> Inode: 308 | "return a unified object or raise Conflict" 309 | if node is None: 310 | return self 311 | if not self.compatible(node): 312 | raise Conflict(self.path, self, node) 313 | return node 314 | 315 | def srcpath(self, path: Path) -> Path: 316 | "translate the given path to the specific src path for this node" 317 | assert self.src is not None, path 318 | return Path(srcPaths[self.src], path.relpath) 319 | 320 | def create(self, dst: Path) -> None: 321 | "actually copy/link/populate dst path" 322 | dst.symlink(self.srcpath(dst)) 323 | 324 | class Symlink(Inode): 325 | def __init__(self, src: int, path: Path): 326 | targ = path.readlink() 327 | self.targ = newpath(targ) 328 | super().__init__(src, path) 329 | 330 | @property 331 | def needed(self): 332 | # for recursion -- don't bother creating directories just for symlinks 333 | return False 334 | 335 | def compatible(self, other) -> bool: 336 | if super().compatible(other) and self.targ == other.targ: 337 | return True 338 | # partial special case to handle unifying a symlink with its target 339 | if self.targ == newpath(other.path.path): 340 | return True 341 | # last resort 342 | if os.path.realpath(self.path.path) == os.path.realpath(other.path.path): 343 | return True 344 | return False 345 | 346 | def __repr__(self): 347 | return f'Symlink({self.src}, {self.targ!r})' 348 | 349 | def create(self, dst: Path): 350 | dst.symlink(self.targ) 351 | 352 | class File(Inode): 353 | shbang = False 354 | jupyter = False 355 | wrap = False 356 | copy = False 357 | 358 | def __init__(self, src: int, path: Path): 359 | super().__init__(src, path) 360 | if path.isexe(): 361 | if path.opt(b'shbang'): 362 | with path.open(): 363 | interp = path.readInterp() 364 | if interp and any(interp.startswith(p) for p in srcPaths): 365 | self.shbang = True 366 | return 367 | if path.opt(b'wrap'): 368 | self.wrap = True 369 | if path.opt(b'jupyter'): 370 | self.jupyter = True 371 | if path.opt(b'copy'): 372 | self.copy = True 373 | 374 | @property 375 | def needed(self): 376 | return self.shbang or self.jupyter or self.wrap or self.copy 377 | 378 | def compatile(self, other) -> bool: 379 | if not super().compatible(other): 380 | return False 381 | # allow identical files 382 | return self.path.compare(other.path) 383 | 384 | def __repr__(self): 385 | return f'File({self.src}{", needed" if self.needed else ""})' 386 | 387 | def create(self, dst: Path): 388 | src = self.srcpath(dst) 389 | if self.shbang: 390 | with src.open(): 391 | interp = src.readInterp() 392 | assert interp 393 | new = newpath(interp) 394 | with dst.create(src): 395 | dst.write(b'#!'+new) 396 | dst.copyfile(src) 397 | elif self.wrap: 398 | with dst.create(src): 399 | dst.write(b'#!/bin/sh\nexec -a "$0" '+src.path+b' "$@"\n') 400 | elif self.jupyter: 401 | with src.open(): 402 | j = json.loads(src.read(src.stat().st_size)) 403 | j['argv'][0] = newpath(j['argv'][0].encode()).decode() 404 | with dst.create(src): 405 | dst.write(json.dumps(j).encode()) 406 | elif self.copy: 407 | try: 408 | dst.link(src) 409 | except PermissionError: 410 | with src.open(): 411 | with dst.create(src): 412 | dst.copyfile(src) 413 | else: 414 | dst.symlink(src) 415 | 416 | class Dir(Inode): 417 | needany = False 418 | 419 | def __init__(self, src: int, path: Path): 420 | super().__init__(src, path) 421 | self.dir = dict() 422 | 423 | def resolve(self, node: Optional[Inode]) -> Inode: 424 | node = super().resolve(node) 425 | if self.src != node.src: 426 | node.src = None 427 | with self.path.opendir(): 428 | for ent in self.path.scandir(): 429 | n = scan(node.dir.get(ent.name), self.src, ent) 430 | if n: 431 | node.dir[ent.name] = n 432 | if not node.needany and n.needed: 433 | node.needany = True 434 | return node 435 | 436 | @property 437 | def needed(self): 438 | return self.needany or super().needed 439 | 440 | def __repr__(self): 441 | return f'Dir({self.src}, {self.dir!r})' 442 | 443 | def create(self, dst: Path): 444 | if self.needed: 445 | with dst.mkdir(): 446 | for n, f in self.dir.items(): 447 | f.create(dst.sub(n)) 448 | else: 449 | super().create(dst) 450 | 451 | def scan(node, src: int, path: Path): 452 | if path.opt(b'exclude'): 453 | return node 454 | force = path.optsrc(b'force') 455 | if force is not None and forceSrcs[force] != src: 456 | return node 457 | if path.isdir(): 458 | cls: Type[Inode] = Dir 459 | elif path.islnk(): 460 | cls = Symlink 461 | else: 462 | cls = File 463 | try: 464 | return cls(src, path).resolve(node) 465 | except Conflict: 466 | if path.opt(b'ignoreConflicts'): 467 | return node 468 | raise 469 | 470 | print(f"Creating view {pathstr(dstPath)} from...") 471 | # scan and merge all source paths 472 | top = None 473 | for i, src in enumerate(srcPaths): 474 | print(f" {pathstr(src)}") 475 | top = scan(top, i, Path(src)) 476 | 477 | # populate the destination with the result 478 | assert top, "No paths found" 479 | top.create(Path(dstPath)) 480 | with open(os.path.join(dstPath, b".view-src"), "xb") as f: 481 | f.write(b"\n".join(srcPaths)) 482 | -------------------------------------------------------------------------------- /view/default.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | packs.lib.fix (makeView: 3 | { name ? meta.name + "-view" 4 | , pkgs /* packages to include */ 5 | , exclude ? [] /* globs of files to exclude (all globs rooted at top) */ 6 | , shbang ? [] /* files for which to copy and translate #! paths to new root */ 7 | , jupyter ? [] /* files for which to translate argv[0] to new root */ 8 | , wrap ? [] /* files to replace with executable wrapper "exec -a new old" */ 9 | , copy ? [] /* files to copy as-is (rather than link) */ 10 | , ignoreConflicts ? [] /* files for which to ignore any conflicts (first package wins) */ 11 | , meta ? builtins.head pkgs /* behave as package in terms of modules and dependencies */ 12 | } @ args: 13 | derivation { 14 | inherit (packs) system; 15 | builder = packs.prefs.spackPython; 16 | args = [./builder.py]; 17 | inherit name pkgs exclude shbang jupyter wrap copy ignoreConflicts; 18 | force = [".spack" ".nixpack.spec"]; 19 | forcePkgs = [meta meta]; 20 | } // rec { 21 | inherit (meta) spec; 22 | overrideView = a: makeView (args // a); 23 | extendView = p: overrideView { pkgs = pkgs ++ p; }; 24 | }) 25 | --------------------------------------------------------------------------------