├── packs ├── capture.sh ├── lmodCache.nix ├── lmodCache.sh └── lib.nix ├── fi ├── repo │ ├── repo.yaml │ └── packages │ │ ├── libjansson │ │ └── package.py │ │ ├── slack │ │ └── package.py │ │ ├── py_pyslurm │ │ └── package.py │ │ ├── triqs_hubbardi │ │ └── package.py │ │ ├── py_sharedmem │ │ └── package.py │ │ ├── triqs_hartree_fock │ │ └── package.py │ │ ├── py_kvsstcp │ │ └── package.py │ │ ├── py_cons │ │ └── package.py │ │ ├── py_websockify │ │ └── package.py │ │ ├── py_cachey │ │ └── package.py │ │ ├── py_mcfit │ │ └── package.py │ │ ├── triqs_ctseg │ │ └── package.py │ │ ├── paraview │ │ └── paraview_wrapper.sh │ │ ├── py_etuples │ │ └── package.py │ │ ├── py_jupyterlmod │ │ └── package.py │ │ ├── pvfmm │ │ └── package.py │ │ ├── py_abopt │ │ └── package.py │ │ ├── py_logical_unification │ │ └── package.py │ │ ├── py_xarray_einstats │ │ └── package.py │ │ ├── py_runtests │ │ └── package.py │ │ ├── stkfmm │ │ └── package.py │ │ ├── py_bigfile │ │ └── package.py │ │ ├── triqs_maxent │ │ └── package.py │ │ ├── py_jupyter_remote_desktop_proxy │ │ └── package.py │ │ ├── triqs_omegamaxent_interface │ │ └── package.py │ │ ├── py_minikanren │ │ └── package.py │ │ ├── triqs_tprf │ │ └── package.py │ │ ├── py_pmesh │ │ └── package.py │ │ ├── triqs_dft_tools │ │ └── package.py │ │ ├── disbatch │ │ └── package.py │ │ ├── py_pfft_python │ │ └── package.py │ │ ├── py_pymc │ │ └── package.py │ │ ├── triqs_cthyb │ │ └── package.py │ │ ├── py_pytensor │ │ └── package.py │ │ ├── py_hdf5plugin │ │ └── package.py │ │ ├── rockstar │ │ ├── 0001-Fix-to-solve-linking-problem-with-gcc-10.patch │ │ └── package.py │ │ ├── idl │ │ └── package.py │ │ ├── triqs │ │ └── package.py │ │ ├── py_nbodykit │ │ └── package.py │ │ └── blender │ │ └── package.py ├── linkfiles.sh ├── lmod │ ├── modules.sh │ ├── modules.lua │ └── modules.nix ├── docker │ ├── builder.sh │ ├── setup.sh │ └── default.nix ├── env ├── sra-tools-javadoc.patch ├── openmpi-1.10-gcc.PATCH ├── python-ncursesw-py-3.11.4.patch ├── py-extension-helpers-setup.py.patch ├── gcc-13.3-nvcc.patch ├── openmpi-2.1.6.PATCH ├── python-ncursesw.patch ├── openmpi-1.10.7.PATCH ├── r.nix ├── run └── README.md ├── spack ├── repo │ ├── repo.yaml │ └── packages │ │ ├── wecall │ │ ├── ncurses.patch │ │ ├── cmake-rhel-regex.patch │ │ └── package.py │ │ ├── mpv │ │ └── package.py │ │ ├── xscreensaver │ │ └── package.py │ │ ├── mplayer │ │ └── package.py │ │ ├── mupdf │ │ └── package.py │ │ └── libass │ │ └── package.py ├── install.sh ├── config.sh ├── bin.nix ├── cache.py ├── config.nix ├── bin.sh ├── modules.nix ├── builder.py ├── modules.py ├── nixpack.py └── generate.py ├── .gitignore ├── .gitmodules ├── jupyter └── kernel │ ├── ir │ ├── builder.sh │ ├── default.nix │ └── builder.R │ ├── bash │ ├── builder.sh │ └── default.nix │ ├── default.nix │ └── builder.py ├── patch ├── nix-ignore-fsea.patch ├── shadow-nosuid.patch ├── openssh-keysign-setuid.patch └── default.nix ├── nixpkgs ├── default.nix ├── stdenv.nix ├── sssd │ └── nss-client.nix ├── jdupes.nix └── overlay.nix ├── view ├── default.nix └── builder.py ├── COPYING ├── README.md └── default.nix /packs/capture.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -ue 2 | "$@" > $out 3 | -------------------------------------------------------------------------------- /fi/repo/repo.yaml: -------------------------------------------------------------------------------- 1 | repo: 2 | namespace: fi 3 | api: v2.0 4 | -------------------------------------------------------------------------------- /spack/repo/repo.yaml: -------------------------------------------------------------------------------- 1 | repo: 2 | namespace: nixpack 3 | api: v2.0 4 | -------------------------------------------------------------------------------- /fi/linkfiles.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | /bin/mkdir -p $out 3 | /bin/ln -s -t $out "$@" 4 | -------------------------------------------------------------------------------- /spack/install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | mkdir -p $out 3 | cp $src $out/nixpack.py 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /result 2 | /singularity 3 | /apptainer 4 | spackBin 5 | result 6 | result-* 7 | -------------------------------------------------------------------------------- /fi/lmod/modules.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -eu 2 | /bin/sed "s:@MODS@:$mods:g;s!@DATE@!`/bin/date`!g;s:@GIT@:$git:g" $src > $out 3 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "spack-packgaes"] 2 | path = spack-packages 3 | url = https://github.com/flatironinstitute/spack-packages 4 | branch = fi 5 | -------------------------------------------------------------------------------- /spack/config.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -eu 2 | 3 | mkdir -p $out 4 | for section in $sections ; do 5 | eval "echo \"\$$section\"" > $out/$section.yaml 6 | done 7 | -------------------------------------------------------------------------------- /jupyter/kernel/ir/builder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | HOME=$TMPDIR PATH=/bin:$pkg/bin:$jupyter/bin R_LIBS_SITE=$pkg/rlib/R/library $pkg/bin/R --vanilla -f $rBuilder 3 | -------------------------------------------------------------------------------- /jupyter/kernel/bash/builder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | pypath=($pkg/lib/python*/site-packages) 3 | PYTHONPATH=$pypath $jupyter/bin/python -m bash_kernel.install --prefix $out 4 | -------------------------------------------------------------------------------- /fi/lmod/modules.lua: -------------------------------------------------------------------------------- 1 | whatis("Use modules build release from nixpack @GIT@ @DATE@") 2 | setenv("MODULEPATH_BASE", "@MODS@") 3 | prepend_path("MODULEPATH", "@MODS@/Core") 4 | add_property("lmod","sticky") 5 | -------------------------------------------------------------------------------- /jupyter/kernel/bash/default.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | { pkg 3 | , name ? pkg.name 4 | , jupyter 5 | }: 6 | 7 | derivation { 8 | inherit (packs) system; 9 | builder = ./builder.sh; 10 | inherit name pkg jupyter; 11 | } 12 | -------------------------------------------------------------------------------- /spack/bin.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | with packs; 3 | packs.spackBuilder { 4 | name = "nixpack-spack-bin.py"; 5 | builder = ./bin.sh; 6 | inherit spackNixLib spack; 7 | SPACK_PYTHON = spackPython; 8 | withRepos = true; 9 | } 10 | -------------------------------------------------------------------------------- /spack/cache.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | 3 | import os 4 | import nixpack 5 | import spack 6 | 7 | spack.config.set('config:misc_cache', os.environ['out'], 'nixpack') 8 | print("Prepopulating spack repo cache...") 9 | spack.repo.PATH.all_package_names() 10 | -------------------------------------------------------------------------------- /fi/docker/builder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | mkdir -p $out/bin 4 | tar -xzf $docker -C $out/bin --strip-components=1 5 | tar -xzf $rootless -C $out/bin --strip-components=1 6 | 7 | rm -f $out/bin/dockerd-rootless-setuptool.sh 8 | cp $setupsh $out/bin/dockerd-rootless-setup.sh 9 | -------------------------------------------------------------------------------- /spack/config.nix: -------------------------------------------------------------------------------- 1 | packs: config: derivation ({ 2 | inherit (packs.prefs) system; 3 | name = "spackConfig"; 4 | builder = ./config.sh; 5 | sections = builtins.attrNames config; 6 | } // builtins.mapAttrs (n: v: builtins.toJSON { "${n}" = v; }) config 7 | // packs.spackEnv) 8 | -------------------------------------------------------------------------------- /packs/lmodCache.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | src: 3 | derivation (packs.spackEnv // { 4 | inherit (packs) system; 5 | name = "lmodCache"; 6 | builder = ./lmodCache.sh; 7 | lmod = packs.pkgs.lmod; 8 | MODULEPATH = "${src}/${packs.platform}-${packs.os}-${packs.target}/Core"; 9 | }) 10 | -------------------------------------------------------------------------------- /jupyter/kernel/ir/default.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | { pkg 3 | , name ? pkg.name 4 | , jupyter 5 | }: 6 | 7 | derivation { 8 | inherit (packs) system; 9 | builder = ./builder.sh; 10 | rBuilder = ./builder.R; 11 | name = "jupyter-kernel-ir-${name}"; 12 | inherit pkg jupyter; 13 | } 14 | -------------------------------------------------------------------------------- /packs/lmodCache.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | $lmod/lmod/lmod/libexec/update_lmod_system_cache_files -d $out/cacheDir -t $out/cacheTS.txt $MODULEPATH 3 | cat > $out/lmodrc.lua < $out 24 | chmod +x $out 25 | -------------------------------------------------------------------------------- /spack/repo/packages/mpv/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.waf import WafPackage 2 | from spack.package import * 3 | 4 | class Mpv(WafPackage): 5 | """MPV media player""" 6 | 7 | homepage = "https://mpv.io" 8 | url = "https://github.com/mpv-player/mpv/archive/refs/tags/v0.33.1.tar.gz" 9 | 10 | maintainers = ['alexdotc'] 11 | 12 | version('0.33.1', sha256='100a116b9f23bdcda3a596e9f26be3a69f166a4f1d00910d1789b6571c46f3a9') 13 | 14 | depends_on('libass') 15 | depends_on('ffmpeg') 16 | 17 | @run_before('configure') 18 | def get_waf(self): 19 | python('bootstrap.py') 20 | -------------------------------------------------------------------------------- /fi/openmpi-1.10-gcc.PATCH: -------------------------------------------------------------------------------- 1 | diff -up ./ompi/mca/mtl/psm/mtl_psm_types.h.ORIG ./ompi/mca/mtl/psm/mtl_psm_types.h 2 | --- ./ompi/mca/mtl/psm/mtl_psm_types.h.ORIG 2016-08-24 10:54:57.000000000 -0400 3 | +++ ./ompi/mca/mtl/psm/mtl_psm_types.h 2021-07-07 11:50:26.362739893 -0400 4 | @@ -70,7 +70,7 @@ 5 | }; 6 | typedef struct mca_mtl_psm_component_t mca_mtl_psm_component_t; 7 | 8 | -OMPI_DECLSPEC mca_mtl_psm_component_t mca_mtl_psm_component; 9 | +OMPI_DECLSPEC extern mca_mtl_psm_component_t mca_mtl_psm_component; 10 | 11 | #define PSM_MAKE_MQTAG(ctxt,rank,utag) \ 12 | ( (((ctxt)&0xffffULL)<<48)| (((rank)&0xffffULL)<<32)| \ 13 | -------------------------------------------------------------------------------- /fi/repo/packages/py_pyslurm/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.python import PythonPackage 2 | from spack.package import * 3 | 4 | 5 | class PyPyslurm(PythonPackage): 6 | """multiplicatively convolutional fast integral transforms""" 7 | 8 | homepage = "https://github.com/PySlurm/pyslurm" 9 | url = "https://github.com/PySlurm/pyslurm/archive/refs/tags/v24.11.0.tar.gz" 10 | 11 | version("24.11.0", sha256="77d97c42bf3639f4babdfcbaa7e674351d974e10d85a7d0015cafd342b15f769") 12 | 13 | depends_on("py-setuptools", type="build") 14 | depends_on("slurm", type=("build", "run")) 15 | depends_on("py-cython", type="build") 16 | -------------------------------------------------------------------------------- /patch/nix-ignore-fsea.patch: -------------------------------------------------------------------------------- 1 | --- nix-2.0/src/libstore/local-store.cc 1969-12-31 16:00:01.000000000 -0800 2 | +++ nix-2.0-patch/src/libstore/local-store.cc 2018-02-27 06:58:16.979381054 -0800 3 | @@ -463,6 +463,8 @@ 4 | /* Ignore SELinux security labels since these cannot be 5 | removed even by root. */ 6 | if (eaName == "security.selinux") continue; 7 | + if (eaName == "lustre.lov") continue; 8 | + if (eaName == "system.nfs4_acl") continue; 9 | if (lremovexattr(path.c_str(), eaName.c_str()) == -1) 10 | throw SysError("removing extended attribute '%s' from '%s'", eaName, path); 11 | } 12 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_hubbardi/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsHubbardi(CMakePackage): 5 | """TRIQS: Hubbard I solver""" 6 | 7 | homepage = "https://triqs.github.io/hubbardI" 8 | url = "https://github.com/TRIQS/hubbardI/archive/refs/tags/3.3.0.tar.gz" 9 | 10 | version('3.3.0', sha256='374fb0d7c5a52f9bda3763cb6910a9bdeb8f2e3d1494dfe1024f3e0be098edf6') 11 | 12 | # TRIQS Dependencies 13 | depends_on('cmake', type='build') 14 | depends_on('mpi', type=('build', 'link')) 15 | depends_on('triqs', type=('build', 'link')) 16 | depends_on('python@3.7:', type=('build', 'link', 'run')) 17 | extends('python') 18 | -------------------------------------------------------------------------------- /spack/repo/packages/wecall/cmake-rhel-regex.patch: -------------------------------------------------------------------------------- 1 | diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt 2 | index 411e10d..6d82b94 100644 3 | --- a/cpp/CMakeLists.txt 4 | +++ b/cpp/CMakeLists.txt 5 | @@ -450,7 +450,7 @@ install(FILES ${CMAKE_CURRENT_BINARY_DIR}/weCall-userguide.pdf 6 | if(${DISTRIBUTOR_ID} MATCHES "Ubuntu" OR ${DISTRIBUTOR_ID} MATCHES "Debian") 7 | SET(CPACK_GENERATOR "DEB") 8 | ELSEIF(${DISTRIBUTOR_ID} MATCHES "RedHat" OR ${DISTRIBUTOR_ID} MATCHES "CentOS") 9 | - string(REGEX MATCH "[0-9]*" RH_VERSION ${DISTRIBUTOR_RELEASE}) 10 | + string(REGEX MATCH "[0-9]+" RH_VERSION ${DISTRIBUTOR_RELEASE}) 11 | SET(DISTRIBUTION_CODENAME "el${RH_VERSION}") 12 | SET(CPACK_GENERATOR "RPM") 13 | ELSE() 14 | -- 15 | -------------------------------------------------------------------------------- /nixpkgs/default.nix: -------------------------------------------------------------------------------- 1 | { system ? builtins.currentSystem 2 | , target ? builtins.head (builtins.split "-" system) 3 | , src ? {} 4 | , overlays ? [] 5 | }: 6 | 7 | let 8 | 9 | nixpkgs = fetchGit ({ 10 | url = "https://github.com/NixOS/nixpkgs"; 11 | ref = "master"; 12 | } // src); 13 | 14 | args = { 15 | localSystem = { 16 | inherit system; 17 | gcc = { arch = target; }; 18 | }; 19 | config = { 20 | replaceStdenv = import ./stdenv.nix; 21 | nix = { 22 | storeDir = builtins.getEnv "NIX_STORE_DIR"; 23 | stateDir = builtins.getEnv "NIX_STATE_DIR"; 24 | }; 25 | allowUnfree = true; 26 | cudaSupport = true; 27 | }; 28 | overlays = [(import ./overlay.nix)] ++ overlays; 29 | }; 30 | 31 | in 32 | 33 | import nixpkgs args 34 | -------------------------------------------------------------------------------- /fi/repo/packages/py_sharedmem/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PySharedmem(PythonPackage): 11 | """A different flavor of multiprocessing in Python""" 12 | 13 | homepage = "https://github.com/rainwoodman/sharedmem" 14 | pypi = "sharedmem/sharedmem-0.3.8.tar.gz" 15 | 16 | version("0.3.8", sha256="c654a6bee2e2f35c82e6cc8b6c262fcabd378f5ba11ac9ef71530f8dabb8e2f7") 17 | 18 | depends_on("py-setuptools", type="build") 19 | 20 | depends_on("py-numpy", type=("build", "run")) 21 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_hartree_fock/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsHartreeFock(CMakePackage): 5 | """TRIQS: Hartree-Fock lattice and impurity solvers based on the TRIQS library""" 6 | 7 | homepage = "https://triqs.github.io/hartree_fock" 8 | url = "https://github.com/TRIQS/hartree_fock/archive/refs/tags/3.3.0.tar.gz" 9 | 10 | version('3.3.0', sha256='4ed9d5637d5a82b113036a1e862f88ac79f9628fb07dc93f8299a5c9c9a471dc') 11 | 12 | # TRIQS Dependencies 13 | depends_on('cmake', type='build') 14 | depends_on('mpi', type=('build', 'link')) 15 | depends_on('triqs', type=('build', 'link')) 16 | depends_on('python@3.7:', type=('build', 'link', 'run')) 17 | extends('python') 18 | -------------------------------------------------------------------------------- /spack/repo/packages/xscreensaver/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.autotools import AutotoolsPackage 2 | from spack.package import * 3 | 4 | 5 | class Xscreensaver(AutotoolsPackage): 6 | """Xscreensaver package""" 7 | 8 | homepage = "https://www.jwz.org" 9 | url = "https://www.jwz.org/xscreensaver/xscreensaver-6.01.tar.gz" 10 | 11 | maintainers = ['alexdotc'] 12 | 13 | version('6.01', sha256='085484665d91f60b4a1dedacd94bcf9b74b0fb096bcedc89ff1c245168e5473b') 14 | 15 | @run_before('configure') 16 | def fix_GTK_paths(self): 17 | filter_file(r'(@GTK_DATADIR@)|(@PO_DATADIR@)', '@datadir@', 18 | 'driver/Makefile.in', 'po/Makefile.in.in') 19 | 20 | 21 | def configure_args(self): 22 | args = ['--with-app-defaults='+prefix.share] 23 | return args 24 | -------------------------------------------------------------------------------- /fi/repo/packages/py_kvsstcp/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyKvsstcp(PythonPackage): 11 | """Key value storage server""" 12 | 13 | homepage = "https://github.com/flatironinstitute/kvsstcp" 14 | url = "https://github.com/flatironinstitute/kvsstcp/archive/refs/tags/1.1.tar.gz" 15 | 16 | version('1.2', sha256='022ac2c03234dc9e3a921edf6015caa246fa7faf3ec0bf70511fc1bc94036cf5') 17 | version('1.1', sha256='c2ffc1077055626610995d71bad9028da03181a3e4c89a3c0eda0c9db8d06fe5') 18 | 19 | depends_on('py-setuptools', type='build') 20 | -------------------------------------------------------------------------------- /fi/repo/packages/py_cons/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyCons(PythonPackage): 11 | """An implementation of Lisp/Scheme-like cons in Python.""" 12 | 13 | homepage = "https://github.com/pythological/python-cons" 14 | pypi = "cons/cons-0.4.5.tar.gz" 15 | 16 | version("0.4.5", sha256="b46b48adb5a5af7f44375da346d926e55a325d4dc12b9add9f20280d3b3742cb") 17 | 18 | depends_on("python@3.6:", type=("build", "run")) 19 | 20 | depends_on("py-setuptools", type="build") 21 | depends_on("py-logical-unification@0.4.0:", type=("build","run")) 22 | -------------------------------------------------------------------------------- /spack/modules.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | { name ? "modules" 3 | , modtype ? "lmod" /* lmod or tcl */ 4 | , config ? {} 5 | , pkgs /* packages to include, list of: 6 | pkg (spack derivation) 7 | { pkg = pkg; default = true; } (for default module) 8 | { pkg = pkg; environment = { ... }; projection = "{name}/{version}"; } (overrides config) 9 | { name = "name"; static = "content"; } 10 | { name = "name"; static = { template variables }; } 11 | */ 12 | , coreCompilers ? [packs.pkgs.c] 13 | }: 14 | let 15 | jsons = { 16 | inherit config pkgs coreCompilers; 17 | }; 18 | in 19 | packs.spackBuilder ({ 20 | args = [./modules.py]; 21 | inherit name modtype; 22 | withRepos = true; 23 | enableParallelBuilding = false; # broken in some cases 24 | } // builtins.mapAttrs (name: builtins.toJSON) jsons // { 25 | passAsFile = builtins.attrNames jsons; 26 | }) // jsons 27 | -------------------------------------------------------------------------------- /fi/repo/packages/py_websockify/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.python import PythonPackage 2 | from spack.package import * 3 | 4 | class PyWebsockify(PythonPackage): 5 | """websockify: WebSockets support for any application/server""" 6 | 7 | homepage = "https://github.com/novnc/websockify" 8 | #git = "https://github.com/novnc/websockify.git" 9 | git = "https://github.com/dylex/websockify.git" 10 | 11 | version("master", branch="master", commit="3b1fee6b91bf7e36a55e5b9e0a444b7364063ce2") 12 | version("0.11.0", tag="v0.11.0", commit="e817fbdb1f06443fddd982c30434662277ab94f7") 13 | 14 | depends_on("py-setuptools", type="build") 15 | depends_on("py-numpy", type=("build", "run")) 16 | 17 | @run_after("install") 18 | def install_rebind(self): 19 | make('rebind.so') 20 | copy('rebind.so', self.prefix.lib) 21 | -------------------------------------------------------------------------------- /fi/repo/packages/py_cachey/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyCachey(PythonPackage): 11 | """Caching based on computation time and storage space""" 12 | 13 | homepage = "https://github.com/dask/cachey" 14 | pypi = "cachey/cachey-0.2.1.tar.gz" 15 | 16 | version("0.2.1", sha256="0310ba8afe52729fa7626325c8d8356a8421c434bf887ac851e58dcf7cf056a6") 17 | 18 | depends_on("py-setuptools", type="build") 19 | 20 | depends_on("py-heapdict", type=("build", "run")) 21 | 22 | depends_on("py-pytest", type=("test")) 23 | depends_on("py-pytest-runner", type=("test")) 24 | -------------------------------------------------------------------------------- /fi/repo/packages/py_mcfit/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyMcfit(PythonPackage): 11 | """multiplicatively convolutional fast integral transforms""" 12 | 13 | homepage = "https://github.com/eelregit/mcfit" 14 | pypi = "mcfit/mcfit-0.0.18.tar.gz" 15 | 16 | version("0.0.18", sha256="2d2564b4f511c7101caf1d06947927140ef2068175a42c966d0844c7ddb9914c") 17 | 18 | depends_on("py-setuptools", type="build") 19 | 20 | depends_on("py-numpy", type=("build", "run")) 21 | depends_on("py-scipy", type=("build", "run")) 22 | depends_on("py-mpmath", type=("build", "run")) 23 | -------------------------------------------------------------------------------- /patch/shadow-nosuid.patch: -------------------------------------------------------------------------------- 1 | diff -ru src.orig/src/Makefile.am src/src/Makefile.am 2 | --- src.orig/src/Makefile.am 2020-01-12 08:19:28.000000000 -0500 3 | +++ src/src/Makefile.am 2021-08-12 21:23:09.870336130 -0400 4 | @@ -4,8 +4,8 @@ 5 | 6 | ubindir = ${prefix}/bin 7 | usbindir = ${prefix}/sbin 8 | -suidperms = 4755 9 | -sgidperms = 2755 10 | +suidperms = 0755 11 | +sgidperms = 0755 12 | 13 | AM_CPPFLAGS = \ 14 | -I${top_srcdir}/lib \ 15 | diff -ru src.orig/src/Makefile.in src/src/Makefile.in 16 | --- src.orig/src/Makefile.in 2020-01-23 15:57:50.000000000 -0500 17 | +++ src/src/Makefile.in 2021-08-12 21:23:15.454370068 -0400 18 | @@ -557,8 +557,8 @@ 19 | 20 | ubindir = ${prefix}/bin 21 | usbindir = ${prefix}/sbin 22 | -suidperms = 4755 23 | -sgidperms = 2755 24 | +suidperms = 0755 25 | +sgidperms = 0755 26 | AM_CPPFLAGS = \ 27 | -I${top_srcdir}/lib \ 28 | -I$(top_srcdir)/libmisc \ 29 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_ctseg/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsCtseg(CMakePackage): 5 | """A segment picture impurity solver with spin-spin interactions. """ 6 | 7 | homepage = "https://triqs.github.io/ctseg" 8 | url = "https://github.com/TRIQS/ctseg/archive/refs/tags/3.3.0.tar.gz" 9 | 10 | version('3.3.0', sha256='2fc8c358e339b22d40c7d8b8b60f2a6f61bce786045f3fe0831b86447e2e9c8f') 11 | 12 | # TRIQS Dependencies 13 | depends_on('cmake', type='build') 14 | depends_on('mpi', type=('build', 'link')) 15 | depends_on('nfft', type=('build', 'link')) 16 | depends_on('triqs', type=('build', 'link')) 17 | depends_on('python@3.7:', type=('build', 'link', 'run')) 18 | variant('complex', default=False, description='Build with complex number support') 19 | extends('python') 20 | -------------------------------------------------------------------------------- /fi/repo/packages/paraview/paraview_wrapper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | pvpython_vers=$(pvpython -c 'import platform; print(".".join(platform.python_version_tuple()[0:2]))') 4 | python_vers=$(python3 -c 'import platform; print(".".join(platform.python_version_tuple()[0:2]))') 5 | 6 | if test "$pvpython_vers" != "$python_vers"; then 7 | echo "Python3 version and paraview python version don't match. Not loading extra python libs into paraview..." 8 | echo "Load default python module and optional relevant virtual environment to extend paraview" 9 | else 10 | export PYTHONPATH=$(python3 < 0: 7 | main_package_dir = min(packages, key=len) 8 | src_path = os.path.join(os.path.dirname(__file__), 'src') 9 | - shutil.copy(os.path.join(src_path, 'compiler.c'), 10 | - os.path.join(srcdir, main_package_dir, '_compiler.c')) 11 | + dst_file = os.path.join(srcdir, main_package_dir, '_compiler.c') 12 | + try: 13 | + # remove dst_file in case it exists but is read-only 14 | + os.remove(dst_file) 15 | + except FileNotFoundError: 16 | + pass 17 | + shutil.copy(os.path.join(src_path, 'compiler.c'), dst_file) 18 | ext = Extension(main_package_dir + '.compiler_version', 19 | [os.path.join(main_package_dir, '_compiler.c')]) 20 | ext_modules.append(ext) 21 | -------------------------------------------------------------------------------- /spack/repo/packages/wecall/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.makefile import MakefilePackage 2 | from spack.package import * 3 | 4 | class Wecall(MakefilePackage): 5 | """Fast, accurate and simple to use command line tool for variant detection in NGS data. """ 6 | 7 | url = "https://github.com/Genomicsplc/wecall/archive/refs/tags/v2.0.0.tar.gz" 8 | 9 | version('2.0.0', sha256='c67cc7ca686432e4438ceb9160f698394e4d21734baa97bc3fc781065d59b410') 10 | 11 | patch('cmake-rhel-regex.patch') 12 | patch('ncurses.patch') 13 | 14 | depends_on('ncurses') 15 | depends_on('zlib') 16 | depends_on('boost+regex+test') 17 | depends_on('cmake', type='build') 18 | depends_on('texlive', type='build') 19 | depends_on('python', type='build') 20 | 21 | def install(self, spec, prefix): 22 | doc = join_path(prefix, 'share/doc/wecall') 23 | bin = join_path(prefix, 'bin') 24 | mkdirp(doc) 25 | mkdirp(bin) 26 | with working_dir(join_path(self.stage.source_path, 'build')): 27 | copy("weCall", bin) 28 | copy("weCall-userguide.pdf", doc) 29 | -------------------------------------------------------------------------------- /fi/repo/packages/stkfmm/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class Stkfmm(CMakePackage): 5 | """A C++ library implements the Kernel Aggregated Fast Multipole Method based on the library PVFMM.""" 6 | 7 | homepage = "https://github.com/flatironinstitute/stkfmm" 8 | git = "https://github.com/flatironinstitute/stkfmm.git" 9 | 10 | maintainers = ['blackwer', 'wenyan4work'] 11 | version('1.1.0', commit='56bfce38397b19a245cca2a1a8c47a221aa2da40') 12 | depends_on('blas', type=('build', 'link')) 13 | depends_on('mpi', type=('build', 'link')) 14 | depends_on('eigen', type=('build')) 15 | depends_on('fftw-api@3', type=('build', 'link')) 16 | depends_on('pvfmm+extended_bc', type=('build', 'link')) 17 | 18 | variant('python', True) 19 | 20 | def cmake_args(self): 21 | cxx_flags = '-g' 22 | options = [] 23 | if '+python' in self.spec: 24 | options.append(self.define('PyInterface', True)) 25 | options.append(self.define('CMAKE_CXX_FLAGS', cxx_flags)) 26 | 27 | return options 28 | -------------------------------------------------------------------------------- /fi/gcc-13.3-nvcc.patch: -------------------------------------------------------------------------------- 1 | Reverts https://github.com/gcc-mirror/gcc/commit/2b3ecdf4fb13471b69d80583e10c5baedfe84d7c 2 | for compatibility with nvcc 3 | https://forums.developer.nvidia.com/t/compilation-errors-with-gcc-versions-11-14-and-cuda-toolkit-12-5-12-6-due-to-undefined-builtin-ia32-ldtilecfg-and-builtin-ia32-sttilecfg-etc/308401 4 | 5 | diff --git a/gcc/config/i386/amxtileintrin.h b/gcc/config/i386/amxtileintrin.h 6 | index cc6022657a87f..2ee7b6bad2bf3 100644 7 | --- a/gcc/config/i386/amxtileintrin.h 8 | +++ b/gcc/config/i386/amxtileintrin.h 9 | @@ -39,14 +39,14 @@ extern __inline void 10 | __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 11 | _tile_loadconfig (const void *__config) 12 | { 13 | - __builtin_ia32_ldtilecfg (__config); 14 | + __asm__ volatile ("ldtilecfg\t%X0" :: "m" (*((const void **)__config))); 15 | } 16 | 17 | extern __inline void 18 | __attribute__((__gnu_inline__, __always_inline__, __artificial__)) 19 | _tile_storeconfig (void *__config) 20 | { 21 | - __builtin_ia32_sttilecfg (__config); 22 | + __asm__ volatile ("sttilecfg\t%X0" : "=m" (*((void **)__config))); 23 | } 24 | 25 | extern __inline void 26 | -------------------------------------------------------------------------------- /fi/repo/packages/py_bigfile/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyBigfile(PythonPackage): 11 | """A reproducible massively parallel IO library for hierarchical data""" 12 | 13 | homepage = "https://github.com/rainwoodman/bigfile" 14 | pypi = "bigfile/bigfile-0.1.51.tar.gz" 15 | 16 | version("0.1.51", sha256="1fad962defc7a5dff2965025dff9a3efa23594e1c2300de0c9a43940d4717b65") 17 | 18 | variant("mpi", default=True, description="MPI support") 19 | 20 | depends_on("py-setuptools", type="build") 21 | 22 | depends_on("py-cython", type=("build", "run")) 23 | depends_on("py-numpy", type=("build", "run")) 24 | 25 | depends_on("mpi", when="+mpi") 26 | depends_on("py-mpi4py", type=("build", "run"), when="+mpi") 27 | 28 | def patch(self): 29 | # removing cythonized file from sdist 30 | remove('bigfile/pyxbigfile.c') 31 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_maxent/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsMaxent(CMakePackage): 5 | """TRIQS: modular Maximum Entropy program to perform analytic continuation.""" 6 | 7 | homepage = "https://triqs.github.io/maxent" 8 | url = "https://github.com/TRIQS/maxent/archive/refs/tags/3.3.0.tar.gz" 9 | 10 | version('3.3.0', sha256='e28507093a9a51466d64a83ccc2686b35b78091fe44616d4d1f66a54fbd841c1') 11 | version('1.2.0', sha256='41be8c4233df47c7c4454bce9b611d0dc8fb117778a5c4f7352ebf6bd7b9ac77') 12 | version('1.1.1', sha256='b0e00bcd5e8b143faf23d47225c53b8ceec36537ce4a97fe725874e7e9214289') 13 | version('1.1.0', sha256='87523adabdfe0c6d0a1fd84bdc1b4bceed64361adde922809d85e19c155e4c68') 14 | version('1.0.0', sha256='798383792902b5085ec3da01ddd2866fa337037bfdffe1df42475624fe0cb1a8') 15 | 16 | # TRIQS Dependencies 17 | depends_on('cmake', type='build') 18 | depends_on('mpi', type=('build', 'link')) 19 | depends_on('triqs', type=('build', 'link')) 20 | depends_on('python@3.7:', type=('build', 'link', 'run')) 21 | extends('python') 22 | -------------------------------------------------------------------------------- /fi/repo/packages/py_jupyter_remote_desktop_proxy/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.python import PythonPackage 2 | from spack.package import * 3 | 4 | class PyJupyterRemoteDesktopProxy(PythonPackage): 5 | """Jupyter Remote Desktop Proxy""" 6 | 7 | homepage = "https://github.com/jupyterhub/jupyter-remote-desktop-proxy" 8 | pypi = "jupyter-remote-desktop-proxy/jupyter-remote-desktop-proxy-1.2.1.tar.gz" 9 | git = "https://github.com/flatironinstitute/jupyter-remote-desktop-proxy" 10 | 11 | version("main", branch="main", commit="91cf78ae8b187ad5cad8125fe576694a97d8961d") 12 | version("1.2.1", sha256="8adf71303e653360653c7dc5b9c1a836a239ab3fb2884d3259846046f6b82bda") 13 | 14 | depends_on("py-setuptools", type="build") 15 | depends_on("py-jupyter-server", type="build") 16 | depends_on("py-jupyter-server-proxy", type="run") 17 | depends_on("npm", type="build") 18 | 19 | @run_after('install') 20 | def enable(self): 21 | jupyter = which("jupyter-server") 22 | jupyter("extension", "enable", "--user", "jupyter_remote_desktop_proxy", 23 | extra_env={'JUPYTER_CONFIG_DIR': self.prefix.etc.jupyter}) 24 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_omegamaxent_interface/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsOmegamaxentInterface(CMakePackage): 5 | """TRIQS: python interface to the maximum entropy analytic continuation program OmegaMaxEnt""" 6 | 7 | homepage = "https://triqs.github.io/omegamaxent_interface" 8 | url = "https://github.com/TRIQS/omegamaxent_interface/archive/refs/tags/3.3.0.tar.gz" 9 | 10 | version('3.3.0', sha256='8637ba25408ccd27dd6ed79b6dade38f95862852ef22871bf87875328074debf') 11 | version('3.1.0', sha256='1a77080314a0e448379180337b572af2fb20fcb6d50312588d4532d0938f81c8') 12 | version('3.0.0', sha256='fef80d36bea614820bdb2fa650ff545d6099cd4c478276a96d0ff30ed8844338') 13 | 14 | # TRIQS Dependencies 15 | depends_on('cmake', type='build') 16 | depends_on('mpi', type=('build', 'link')) 17 | depends_on('triqs', type=('build', 'link')) 18 | depends_on('python@3.7:', type=('build', 'link', 'run')) 19 | depends_on('blas', type=('build', 'link', 'run')) 20 | depends_on('fftw', type=('build', 'link', 'run')) 21 | depends_on('gsl', type=('build', 'link', 'run')) 22 | extends('python') 23 | -------------------------------------------------------------------------------- /fi/repo/packages/py_minikanren/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyMinikanren(PythonPackage): 11 | """An extensible, lightweight relational/logic programming DSL written in pure Python""" 12 | 13 | homepage = "https://github.com/pythological/kanren" 14 | pypi = "miniKanren/miniKanren-1.0.3.tar.gz" 15 | 16 | version("1.0.3", sha256="1ec8bdb01144ad5e8752c7c297fb8a122db920f859276d25a72d164e998d7f6e") 17 | 18 | depends_on("python@3.6:", type=("build", "run")) 19 | 20 | depends_on("py-setuptools", type="build") 21 | 22 | depends_on("py-toolz", type=("build", "run")) 23 | depends_on("py-cons@0.4.0:", type=("build", "run")) 24 | depends_on("py-multipledispatch", type=("build", "run")) 25 | depends_on("py-etuples@0.3.1:", type=("build", "run")) 26 | depends_on("py-logical-unification@0.4.1:", type=("build", "run")) 27 | depends_on("py-typing-extensions", type=("build", "run")) 28 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_tprf/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsTprf(CMakePackage): 5 | """TRIQS: Two-Particle Response Function toolbox""" 6 | 7 | homepage = "https://triqs.github.io/tprf" 8 | url = "https://github.com/TRIQS/tprf/archive/refs/tags/3.3.1.tar.gz" 9 | 10 | version('3.3.1', sha256='4a39648629888ec07c1a5c333ae5aa85ac04a46a7ec3b5d5d588cfe5246b7572') 11 | version('3.3.0', sha256='e703b490873293efa2835147c73b19b90860fe2e2d8b1ec9da79c8b602512b10') 12 | version('3.2.1', sha256='f1d4dd5986af4b37dc65f3af2a0be507455f0b4a74ea7d4de892739ccd86158c') 13 | version('3.1.1', sha256='63d4de9cfc3daf0d74db45cfa7445b817fd22a38a8485db3ce9a81febe263b50') 14 | version('3.1.0', sha256='75f6e79d891342951652353ea4d9914074d9947c67cad60844ebaa3f82bd17b5') 15 | version('3.0.0', sha256='8e20620145bb8cbcc187f4637884457c0cacaed79ba6e1709a951046ee5ffc4b') 16 | 17 | # TRIQS Dependencies 18 | depends_on('cmake', type='build') 19 | depends_on('mpi', type=('build', 'link')) 20 | depends_on('triqs', type=('build', 'link')) 21 | depends_on('python@3.7:', type=('build', 'link', 'run')) 22 | extends('python') 23 | -------------------------------------------------------------------------------- /fi/repo/packages/py_pmesh/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | 11 | class PyPmesh(PythonPackage): 12 | """Particle Mesh in Python""" 13 | 14 | homepage = "https://github.com/rainwoodman/pmesh" 15 | 16 | version("0.1.56-7-g6fe8b2d", 17 | url='https://github.com/rainwoodman/pmesh/tarball/6fe8b2da4a3fd408517ff16698da0eac05b8cd13', 18 | sha256="65ab0a89f894f6a41059cc07307c6d98fe0946ae3a3fd45e2b697094f8f3aa5f", 19 | ) 20 | 21 | variant("abopt", default=False, description="Add support for abopt") 22 | 23 | depends_on("py-setuptools", type="build") 24 | depends_on("py-cython", type=("build", "run")) 25 | depends_on("py-numpy", type=("build", "run")) 26 | depends_on("mpi") 27 | depends_on("py-mpi4py", type=("build", "run")) 28 | depends_on("py-mpsort", type=("build", "run")) 29 | depends_on("py-pfft-python", type=("build", "run")) 30 | 31 | depends_on('py-abopt', type=("build", "run"), when="+abopt") 32 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_dft_tools/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsDftTools(CMakePackage): 5 | """TRIQS: continuous-time hybridization-expansion solver""" 6 | 7 | homepage = "https://triqs.github.io/dft_tools" 8 | url = "https://github.com/TRIQS/dft_tools/archive/refs/tags/3.3.1.tar.gz" 9 | 10 | version('3.3.1', sha256='f4a9b9d44769fb7fe06892640217a915d133845169eae2fe450a41974c4b47c3') 11 | version('3.3.0', sha256='30dca657d491d8265aaffdccd1563b0d749460592ac5a86bc7d8fbfe84d4f02a') 12 | version('3.2.1', sha256='772d9326056faa3afc5a6d4ea04bdf8d18359bab518db29f68b1c2136c34b7d3') 13 | version('3.2.0', sha256='77d89bc5c9a36636a720b6cae78967cd6dd83d0018c854a68bef91219a456307') 14 | version('3.1.0', sha256='57b7d0fe5a96c5a42bb684c60ca8e136a33e1385bf6cd7e9d1371fa507dc2ec4') 15 | version('3.0.0', sha256='646d1d2dca5cf6ad90e18d0706124f701aa94ec39c5236d8fcf36dc5c628a3f6') 16 | 17 | # TRIQS Dependencies 18 | depends_on('cmake', type='build') 19 | depends_on('mpi', type=('build', 'link')) 20 | depends_on('triqs', type=('build', 'link')) 21 | depends_on('python@3.7:', type=('build', 'link', 'run')) 22 | extends('python') 23 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | Copyright (c) 2021 Dylan Simon, Flatiron Institute 2 | Portions copyright (c) 2003-2021 Eelco Dolstra and the Nixpkgs/NixOS contributors 3 | Portions copyright (c) 2013-2020 LLNS, LLC and other Spack Project Developers. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | "Software"), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /spack/repo/packages/mplayer/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.autotools import AutotoolsPackage 2 | from spack.package import * 3 | 4 | class Mplayer(AutotoolsPackage): 5 | """MPlayer is a movie player which runs on many systems (see the documentation). 6 | It plays most MPEG/VOB, AVI, Ogg/OGM, VIVO, ASF/WMA/WMV, QT/MOV/MP4, RealMedia, 7 | Matroska, NUT, NuppelVideo, FLI, YUV4MPEG, FILM, RoQ, PVA files, supported by many 8 | native, XAnim, and Win32 DLL codecs. You can watch VideoCD, SVCD, DVD, 3ivx, 9 | DivX 3/4/5, WMV and even H.264 movies.""" 10 | 11 | homepage = "https://www.mplayerhq.hu" 12 | url = "http://www.mplayerhq.hu/MPlayer/releases/MPlayer-1.4.tar.xz" 13 | 14 | version('1.4', sha256='82596ed558478d28248c7bc3828eb09e6948c099bbd76bb7ee745a0e3275b548') 15 | version('1.3.0', sha256='3ad0846c92d89ab2e4e6fb83bf991ea677e7aa2ea775845814cbceb608b09843') 16 | version('1.2.1', sha256='831baf097d899bdfcdad0cb80f33cc8dff77fa52cb306bee5dee6843b5c52b5f') 17 | version('1.2', sha256='ffe7f6f10adf2920707e8d6c04f0d3ed34c307efc6cd90ac46593ee8fba2e2b6') 18 | version('1.1.1', sha256='ce8fc7c3179e6a57eb3a58cb7d1604388756b8a61764cc93e095e7aff3798c76') 19 | version('1.1', sha256='76cb47eadb52b420ca028276ebd8112114ad0ab3b726af60f07fb2f39dae6c9c') 20 | -------------------------------------------------------------------------------- /fi/repo/packages/disbatch/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | import os 9 | 10 | class Disbatch(PythonPackage): 11 | """Distributed processing of a batch of tasks""" 12 | 13 | homepage = "https://github.com/flatironinstitute/disBatch" 14 | git = "https://github.com/flatironinstitute/disBatch.git" 15 | 16 | version('2.5', tag='2.5', commit='abee40342f1ecb5e9b801744d860b5b1414d4b2c', submodules=True) 17 | version('2.0', tag='2.0', submodules=True) 18 | version('1.4', tag='1.4', submodules=True) 19 | 20 | depends_on('py-setuptools', type='build', when='@2:') 21 | depends_on('py-kvsstcp', type='run', when='@:2.0') 22 | 23 | @run_after('install') 24 | def create_symlink(self): 25 | if self.spec.satisfies('@1'): 26 | script_source = os.path.join(self.prefix.bin, 'disBatch.py') 27 | script_dest = os.path.join(self.prefix.bin, 'disBatch') 28 | os.symlink(script_source, script_dest) 29 | 30 | script = Executable(script_source) 31 | script('--fix-paths') 32 | -------------------------------------------------------------------------------- /fi/repo/packages/py_pfft_python/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyPfftPython(PythonPackage): 11 | """python binding of PFFT, a massively parallel FFT library""" 12 | 13 | homepage = "https://github.com/rainwoodman/pfft-python" 14 | pypi = "pfft-python/pfft-python-0.1.21.tar.gz" 15 | 16 | version("0.1.21", sha256="2c5bf26170dffbe06c897f1edbbcf35961baf48fb3a383eedcc3103648e4d334") 17 | 18 | depends_on("py-setuptools", type="build") 19 | 20 | depends_on("py-numpy", type=("build", "run")) 21 | depends_on("mpi") 22 | # Need to use the bundled, patched pfft (which in turn bundles FFTW) 23 | # depends_on("pfft", type=("build", "link", "run")) 24 | depends_on("py-mpi4py", type=("build", "run")) 25 | depends_on("py-cython", type=("build", "run")) 26 | 27 | def patch(self): 28 | # removing cythonized file from sdist 29 | remove('pfft/core.c') 30 | 31 | if 'sse' not in self.spec.target: 32 | filter_file(r"optimize=.*sse.*", 33 | "optimize=''", 34 | 'setup.py', 35 | ) 36 | -------------------------------------------------------------------------------- /fi/repo/packages/py_pymc/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyPymc(PythonPackage): 11 | """Probabilistic Programming in Python: Bayesian Modeling and Probabilistic Machine Learning with PyTensor 12 | """ 13 | 14 | homepage = "http://github.com/pymc-devs/pymc" 15 | pypi = "pymc/pymc-5.5.0.tar.gz" 16 | 17 | version("5.5.0", sha256="7fe2ac72de8a5d04b76566fa44f64a400d67939c8393e6487d8a99f920f4f277") 18 | 19 | depends_on("python@3.8:", type=("build", "run")) 20 | 21 | depends_on("py-setuptools", type="build") 22 | 23 | depends_on("py-arviz@0.13:", type=("build", "run")) 24 | depends_on("py-cachetools@4.2.1:", type=("build", "run")) 25 | depends_on("py-cloudpickle", type=("build", "run")) 26 | depends_on("py-fastprogress@0.2.0:", type=("build", "run")) 27 | depends_on("py-numpy@1.15:", type=("build", "run")) 28 | depends_on("py-pandas@0.24:", type=("build", "run")) 29 | depends_on("py-pytensor@2.12.0:2.12", type=("build", "run")) 30 | depends_on("py-scipy@1.4.1:", type=("build", "run")) 31 | depends_on("py-typing-extensions@3.7.4:", type=("build", "run")) 32 | -------------------------------------------------------------------------------- /fi/openmpi-2.1.6.PATCH: -------------------------------------------------------------------------------- 1 | diff -up ./opal/mca/btl/openib/mca-btl-openib-device-params.ini.ORIG ./opal/mca/btl/openib/mca-btl-openib-device-params.ini 2 | --- ./opal/mca/btl/openib/mca-btl-openib-device-params.ini.ORIG 2020-09-02 10:44:22.212245000 -0400 3 | +++ ./opal/mca/btl/openib/mca-btl-openib-device-params.ini 2020-09-02 18:16:41.658047216 -0400 4 | @@ -190,6 +190,15 @@ max_inline_data = 256 5 | 6 | ############################################################################ 7 | 8 | +[Mellanox ConnectX6] 9 | +vendor_id = 0x2c9,0x5ad,0x66a,0x8f1,0x1708,0x03ba,0x15b3,0x119f 10 | +vendor_part_id = 4123 11 | +use_eager_rdma = 1 12 | +mtu = 4096 13 | +max_inline_data = 256 14 | + 15 | +############################################################################ 16 | + 17 | [IBM eHCA 4x and 12x] 18 | vendor_id = 0x5076 19 | vendor_part_id = 0 20 | diff -up ./opal/mca/common/verbs/common_verbs_port.c.ORIG ./opal/mca/common/verbs/common_verbs_port.c 21 | --- ./opal/mca/common/verbs/common_verbs_port.c.ORIG 2018-08-04 13:28:59.000000000 -0400 22 | +++ ./opal/mca/common/verbs/common_verbs_port.c 2020-09-01 22:08:40.187660000 -0400 23 | @@ -94,6 +94,10 @@ int opal_common_verbs_port_bw(struct ibv 24 | /* 12x */ 25 | *bandwidth *= 12; 26 | break; 27 | + case 16: 28 | + /* 16x */ 29 | + *bandwidth *= 16; 30 | + break; 31 | default: 32 | /* Who knows? */ 33 | return OPAL_ERR_NOT_FOUND; 34 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs_cthyb/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class TriqsCthyb(CMakePackage): 5 | """TRIQS continuous-time hybridization-expansion solver""" 6 | 7 | homepage = "https://triqs.github.io/cthyb" 8 | url = "https://github.com/TRIQS/cthyb/archive/refs/tags/3.3.0.tar.gz" 9 | 10 | version('3.3.0', sha256='96635f2e2b6016a5f99e372aefc6bd7c5af7ef2ebe26c429bb1b479bc49b67ef') 11 | version('3.2.1', sha256='6f4cd36efcd19b0f1efbed2c9aa6d2640ef84f8fcf7b97675af8d54cdc327c9f') 12 | version('3.1.0', sha256='8d6d2c4d5b3928d062b72fad4ea9df9aae198e39dd9c1fd3cc5dc34a5019acc0') 13 | version('3.0.0', sha256='64970bfc73f5be819a87044411b4cc9e1f7996d122158c5c011046b7e1aec4e5') 14 | 15 | # TRIQS Dependencies 16 | depends_on('cmake', type='build') 17 | depends_on('mpi', type=('build', 'link')) 18 | depends_on('nfft', type=('build', 'link')) 19 | depends_on('triqs', type=('build', 'link')) 20 | depends_on('python@3.7:', type=('build', 'link', 'run')) 21 | variant('complex', default=False, description='Build with complex number support') 22 | extends('python') 23 | 24 | def cmake_args(self): 25 | args = super().cmake_args() 26 | if self.spec.satisfies('+complex'): 27 | args.append('-DHybridisation_is_complex=ON') 28 | args.append('-DLocal_hamiltonian_is_complex=ON') 29 | 30 | return args 31 | -------------------------------------------------------------------------------- /fi/docker/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [[ ! -x $DOCKER_ROOT/bin/dockerd-rootless.sh || ! -d /home/$USER ]] || ! /bin/getsubids $USER >& /dev/null ; then 4 | echo "Please make sure the docker module is loaded and you are on your own workstation." 5 | exit 1 6 | fi 7 | 8 | xdg=${XDG_CONFIG_HOME:-$HOME/.config} 9 | cfg=$xdg/systemd/user 10 | mkdir -p $cfg 11 | rm -f $cfg/docker.service 12 | cat <<- EOT > $cfg/docker.service 13 | [Unit] 14 | Description=Docker Application Container Engine (Rootless) 15 | Documentation=https://docs.docker.com/go/rootless/ 16 | RequiresMountsFor=/home/$USER 17 | ConditionHost=`hostname` 18 | ConditionUser=$USER 19 | 20 | [Service] 21 | Environment=PATH=$DOCKER_ROOT/bin:/sbin:/usr/sbin:/bin:/usr/bin 22 | ExecStart=$DOCKER_ROOT/bin/dockerd-rootless.sh --data-root /home/$USER/.local/share/docker 23 | ExecReload=/bin/kill -s HUP \$MAINPID 24 | TimeoutSec=10 25 | Restart=no 26 | StartLimitBurst=3 27 | StartLimitInterval=60s 28 | LimitNOFILE=infinity 29 | LimitNPROC=infinity 30 | LimitCORE=infinity 31 | TasksMax=infinity 32 | Delegate=yes 33 | Type=notify 34 | NotifyAccess=all 35 | KillMode=mixed 36 | EOT 37 | if [[ ! -f $xdg/docker/daemon.json ]] ; then 38 | mkdir -p $xdg/docker 39 | cat << EOT > $xdg/docker/daemon.json 40 | { 41 | "runtimes": { 42 | "nvidia": { 43 | "args": [], 44 | "path": "nvidia-container-runtime" 45 | } 46 | } 47 | } 48 | EOT 49 | fi 50 | systemctl --user daemon-reload 51 | -------------------------------------------------------------------------------- /fi/python-ncursesw.patch: -------------------------------------------------------------------------------- 1 | --- python/configure.ac.orig 2022-07-25 16:01:43.547382695 -0400 2 | +++ python/configure.ac 2022-07-25 16:02:15.427530089 -0400 3 | @@ -5021,10 +5021,6 @@ 4 | 5 | # first curses header check 6 | ac_save_cppflags="$CPPFLAGS" 7 | -if test "$cross_compiling" = no; then 8 | - CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw" 9 | -fi 10 | - 11 | AC_CHECK_HEADERS(curses.h ncurses.h) 12 | 13 | # On Solaris, term.h requires curses.h 14 | --- python/configure.orig 2022-07-25 16:01:48.851407214 -0400 15 | +++ python/configure 2022-07-25 16:02:15.063528406 -0400 16 | @@ -15968,10 +15968,6 @@ 17 | 18 | # first curses header check 19 | ac_save_cppflags="$CPPFLAGS" 20 | -if test "$cross_compiling" = no; then 21 | - CPPFLAGS="$CPPFLAGS -I/usr/include/ncursesw" 22 | -fi 23 | - 24 | for ac_header in curses.h ncurses.h 25 | do : 26 | as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` 27 | --- python/setup.py.orig 2022-07-25 16:07:43.893049824 -0400 28 | +++ python/setup.py 2022-07-25 16:07:48.429070837 -0400 29 | @@ -955,8 +955,6 @@ 30 | panel_library = 'panel' 31 | if curses_library == 'ncursesw': 32 | curses_defines.append(('HAVE_NCURSESW', '1')) 33 | - if not CROSS_COMPILING: 34 | - curses_includes.append('/usr/include/ncursesw') 35 | # Bug 1464056: If _curses.so links with ncursesw, 36 | # _curses_panel.so must link with panelw. 37 | panel_library = 'panelw' 38 | -------------------------------------------------------------------------------- /spack/repo/packages/mupdf/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.makefile import MakefilePackage 2 | from spack.package import * 3 | 4 | 5 | class Mupdf(MakefilePackage): 6 | """ MuPDF is a lightweight PDF, XPS, and E-book viewer. """ 7 | 8 | homepage = "https://www.example.com" 9 | url = "https://mupdf.com/downloads/archive/mupdf-1.18.0-source.tar.xz" 10 | 11 | version('1.18.0', sha256='592d4f6c0fba41bb954eb1a41616661b62b134d5b383e33bd45a081af5d4a59a') 12 | version('1.17.0', sha256='c935fb2593d9a28d9b56b59dad6e3b0716a6790f8a257a68fa7dcb4430bc6086') 13 | version('1.16.1', sha256='6fe78184bd5208f9595e4d7f92bc8df50af30fbe8e2c1298b581c84945f2f5da') 14 | version('1.16.0', sha256='d28906cea4f602ced98f0b08d04138a9a4ac2e5462effa8c45f86c0816ab1da4') 15 | version('1.15.0', sha256='565036cf7f140139c3033f0934b72e1885ac7e881994b7919e15d7bee3f8ac4e') 16 | version('1.14.0', sha256='603e69a96b04cdf9b19a3e41bd7b20c63b39abdcfba81a7460fcdcc205f856df') 17 | version('1.13.0', sha256='746698e0d5cd113bdcb8f65d096772029edea8cf20704f0d15c96cb5449a4904') 18 | version('1.12.0', sha256='577b3820c6b23d319be91e0e06080263598aa0662d9a7c50af500eb6f003322d') 19 | 20 | depends_on('openssl') 21 | depends_on('curl') 22 | depends_on('libxext') 23 | depends_on('libxau') 24 | 25 | def edit(self, spec, prefix): 26 | env['XCFLAGS'] = "-std=c99" 27 | self.install_targets.append('prefix={}'.format(prefix)) 28 | -------------------------------------------------------------------------------- /spack/repo/packages/libass/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.autotools import AutotoolsPackage 2 | from spack.package import * 3 | 4 | 5 | class Libass(AutotoolsPackage): 6 | """libass is a portable subtitle renderer for the ASS/SSA 7 | (Advanced Substation Alpha/Substation Alpha) subtitle format.""" 8 | 9 | homepage = "https://github.com/libass/libass" 10 | url = "https://github.com/libass/libass/releases/download/0.15.1/libass-0.15.1.tar.gz" 11 | 12 | maintainers = ['alexdotc'] 13 | 14 | version('0.15.1', sha256='101e2be1bf52e8fc265e7ca2225af8bd678839ba13720b969883eb9da43048a6') 15 | version('0.15.0', sha256='9cbddee5e8c87e43a5fe627a19cd2aa4c36552156eb4edcf6c5a30bd4934fe58') 16 | version('0.14.0', sha256='8d5a5c920b90b70a108007ffcd2289ac652c0e03fc88e6eecefa37df0f2e7fdf') 17 | version('0.13.7', sha256='008a05a4ed341483d8399c8071d57a39853cf025412b32da277e76ad8226e158') 18 | version('0.13.6', sha256='62070da83b2139c1875c9db65ece37f80f955097227b7d46ade680221efdff4b') 19 | version('0.13.5', sha256='e5c6d9ae81c3c75721a3920960959d2512e2ef14666910d76f976589d2f89b3f') 20 | version('0.13.4', sha256='6711469df5fcc47d06e92f7383dcebcf1282591002d2356057997e8936840792') 21 | version('0.13.3', sha256='86c8c45d14e4fd23b5aa45c72d9366c46b4e28087da306e04d52252e04a87d0a') 22 | version('0.13.2', sha256='8baccf663553b62977b1c017d18b3879835da0ef79dc4d3b708f2566762f1d5e') 23 | version('0.13.1', sha256='9741b9b4059e18b4369f8f3f77248416f988589896fd7bf9ce3da7dfb9a84797') 24 | -------------------------------------------------------------------------------- /fi/repo/packages/py_pytensor/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | from spack_repo.builtin.build_systems.python import PythonPackage 7 | from spack.package import * 8 | 9 | 10 | class PyPytensor(PythonPackage): 11 | """ PyTensor is a fork of Aesara -- a Python library for defining, optimizing, and 12 | efficiently evaluating mathematical expressions involving multi-dimensional arrays. 13 | """ 14 | 15 | homepage = "https://pytensor.readthedocs.io/" 16 | pypi = "pytensor/pytensor-2.12.2.tar.gz" 17 | 18 | version("2.12.2", sha256="ee4f1a4aefda269a5a399b7bc90da75b263cf019ba881f30cc5881e5886e9230") 19 | 20 | depends_on("python@3.8:", type=("build", "run")) 21 | 22 | depends_on("py-setuptools@48.0.0:", type=("build", "run")) 23 | depends_on("py-cython", type="build") 24 | depends_on("py-numpy@1.17.0:", type=("build", "run")) 25 | depends_on("py-versioneer@0.28 +toml", type="build") 26 | 27 | depends_on("py-scipy@0.14:", type=("build", "run")) 28 | depends_on("py-filelock", type=("build", "run")) 29 | depends_on("py-etuples", type=("build", "run")) 30 | depends_on("py-logical-unification", type=("build", "run")) 31 | depends_on("py-minikanren", type=("build", "run")) 32 | depends_on("py-cons", type=("build", "run")) 33 | depends_on("py-typing-extensions", type=("build", "run")) 34 | -------------------------------------------------------------------------------- /fi/repo/packages/py_hdf5plugin/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.python import PythonPackage 2 | from spack.package import * 3 | 4 | 5 | class PyHdf5plugin(PythonPackage): 6 | '''hdf5plugin provides HDF5 compression filters (namely: Blosc, Blosc2, 7 | BitShuffle, BZip2, FciDecomp, LZ4, SZ, SZ3, Zfp, ZStd) and makes them 8 | usable from h5py. 9 | ''' 10 | 11 | # http://www.silx.org/doc/hdf5plugin/latest/install.html 12 | 13 | pypi = 'hdf5plugin/hdf5plugin-4.1.1.tar.gz' 14 | 15 | version("4.1.1", sha256="96a989679f1f38251e0dcae363180d382ba402f6c89aab73ca351a391ac23b36") 16 | 17 | # Don't link against compression libs in the spec. hdf5plugin is doing static inclusions. 18 | depends_on('hdf5') 19 | depends_on('py-setuptools', type='build') 20 | depends_on('py-py-cpuinfo@8.0.0', type='build') 21 | depends_on('py-wheel', type='build') 22 | 23 | def setup_build_environment(self, env): 24 | env.set('HDF5PLUGIN_HDF5_DIR', self.spec['hdf5'].prefix) 25 | env.set('HDF5PLUGIN_OPENMP', 'True') 26 | env.set('HDF5PLUGIN_NATIVE', 'False') 27 | env.set('HDF5PLUGIN_SSE2', 'True' if 'sse2' in self.spec.target else 'False') 28 | env.set('HDF5PLUGIN_AVX2', 'True' if 'avx2' in self.spec.target else 'False') 29 | env.set('HDF5PLUGIN_AVX512', 'True' if 'avx512' in self.spec.target else 'False') 30 | env.set('HDF5PLUGIN_BMI2', 'True' if 'bmi2' in self.spec.target else 'False') 31 | env.set('HDF5PLUGIN_CPP11', 'True') 32 | env.set('HDF5PLUGIN_CPP14', 'True') 33 | # env.set('HDF5PLUGIN_INTEL_IPP_DIR', ) 34 | -------------------------------------------------------------------------------- /fi/repo/packages/rockstar/0001-Fix-to-solve-linking-problem-with-gcc-10.patch: -------------------------------------------------------------------------------- 1 | From 36ce9eea36eeda4c333acf56f8bb0d40ff0df2a1 Mon Sep 17 00:00:00 2001 2 | From: Peter Behroozi 3 | Date: Sat, 4 Sep 2021 15:20:44 +0900 4 | Subject: [PATCH] Fix to solve linking problem with gcc-10 5 | 6 | --- 7 | client.c | 2 +- 8 | fun_times.h | 2 +- 9 | interleaving.h | 2 +- 10 | 3 files changed, 3 insertions(+), 3 deletions(-) 11 | 12 | diff --git a/client.c b/client.c 13 | index 5525eb9..cd4e1ff 100644 14 | --- a/client.c 15 | +++ b/client.c 16 | @@ -764,7 +764,7 @@ void transfer_stuff(int64_t s, int64_t c, int64_t timestep) { 17 | } 18 | 19 | void do_projections(void) { 20 | - int64_t i, j, idx, dir; 21 | + int64_t i, j, idx, dir=0; 22 | assert(BOX_SIZE > 0); 23 | for (i=0; i= 9.0, revert default prefix: 28 | filter_file("/usr/local/nv5", "/usr/local/harris", "silent/idl_answer_file") 29 | 30 | def install(self, spec, prefix): 31 | # replace default install dir to self.prefix by editing answer file 32 | filter_file("/usr/local/harris", prefix, "silent/idl_answer_file") 33 | 34 | # execute install script 35 | install_script = Executable("./install.sh") 36 | install_script("-s", input="silent/idl_answer_file") 37 | 38 | def setup_run_environment(self, env): 39 | # set necessary environment variables 40 | env.prepend_path("EXELIS_DIR", self.prefix) 41 | env.prepend_path("IDL_DIR", self.prefix.idl) 42 | 43 | # add bin to path 44 | env.prepend_path("PATH", self.prefix.idl.bin) 45 | -------------------------------------------------------------------------------- /nixpkgs/sssd/nss-client.nix: -------------------------------------------------------------------------------- 1 | { stdenv 2 | , fetchFromGitHub 3 | , autoreconfHook 4 | , pkg-config 5 | , glibc, pam, openldap, libkrb5, dnsutils, cyrus_sasl, nss 6 | , popt, talloc, tdb, tevent, ldb, ding-libs, pcre2, c-ares 7 | , glib, dbus 8 | , jansson, libunistring, openssl, p11-kit 9 | }: 10 | 11 | let 12 | version = "2.9.4"; 13 | in 14 | 15 | stdenv.mkDerivation rec { 16 | name = "sssd-nss-client-${version}"; 17 | 18 | src = fetchFromGitHub { 19 | owner = "SSSD"; 20 | repo = "sssd"; 21 | rev = "refs/tags/${version}"; 22 | hash = "sha256-VJXZndbmC6mAVxzvv5Wjb4adrQkP16Rt4cgjl4qGDIc="; 23 | }; 24 | 25 | # libnss_sss.so does not in fact use any of these -- they're just needed for configure 26 | nativeBuildInputs = [ autoreconfHook pkg-config 27 | pam openldap libkrb5 dnsutils cyrus_sasl nss 28 | popt tdb tevent ldb ding-libs pcre2 c-ares 29 | glib dbus 30 | jansson p11-kit 31 | ]; 32 | buildInputs = [ 33 | talloc 34 | openssl libunistring 35 | ]; 36 | 37 | preConfigure = '' 38 | configureFlagsArray=( 39 | --prefix=$out 40 | --localstatedir=/var 41 | --sysconfdir=/etc 42 | --with-os=redhat 43 | --with-nscd=${glibc.bin}/sbin/nscd 44 | --with-ldb-lib-dir=$out/modules/ldb 45 | --disable-cifs-idmap-plugin 46 | --without-autofs 47 | --without-kcm 48 | --without-libnl 49 | --without-libwbclient 50 | --without-manpages 51 | --without-nfsv4-idmapd-plugin 52 | --without-python2-bindings 53 | --without-python3-bindings 54 | --without-samba 55 | --without-secrets 56 | --without-selinux 57 | --without-semanage 58 | --without-ssh 59 | --without-sudo 60 | --without-oidc-child 61 | ) 62 | ''; 63 | 64 | enableParallelBuilding = true; 65 | 66 | buildFlags = [ "libnss_sss.la" ]; 67 | installTargets = [ "install-nsslibLTLIBRARIES" ]; 68 | 69 | } 70 | -------------------------------------------------------------------------------- /fi/docker/default.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | 3 | let docker = derivation rec { 4 | inherit (packs) system; 5 | pname = "docker"; 6 | version = "28.0.1"; 7 | name = "${pname}-${version}"; 8 | docker = builtins.fetchurl { 9 | url = "https://download.docker.com/linux/static/stable/${packs.target}/${name}.tgz"; 10 | sha256 = "0ij7ha9b596lq7pvcxd5r345nm76dlgdim5w1nn9w6bqbmmximjy"; 11 | }; 12 | rootless = builtins.fetchurl { 13 | url = "https://download.docker.com/linux/static/stable/${packs.target}/docker-rootless-extras-${version}.tgz"; 14 | sha256 = "1fsx7w5b91r23pad3hpwyvcljc62hw60b42nqqpp463ggvfzykil"; 15 | }; 16 | PATH = "/bin:/usr/bin"; 17 | setupsh = ./setup.sh; 18 | builder = ./builder.sh; 19 | }; in 20 | 21 | docker // { 22 | module = with docker; { 23 | name = pname; 24 | version = version; 25 | prefix = docker; 26 | context = { 27 | short_description = "user rootless docker (for workstations)"; 28 | long_description = "Use this module to run docker on your own workstation."; 29 | }; 30 | postscript = '' 31 | local xdg_runtime_dir = os.getenv("XDG_RUNTIME_DIR") 32 | if (mode() == "load") then 33 | local user = os.getenv("USER") 34 | local subid = capture("/bin/getsubids " .. user); 35 | if not (subid:match(user) and isDir(pathJoin("/home", user)) and isDir(xdg_runtime_dir)) then 36 | LmodBreak("The docker module can be used to run a rootless docker daemon on your own workstation. If you have a workstation and you'd like to use docker, please try loading it there. For more details, see: https://wiki.flatironinstitute.org/SCC/Software/DockerSingularity") 37 | end 38 | end 39 | 40 | execute {cmd="${docker}/bin/dockerd-rootless-setup.sh && /bin/systemctl --user start docker", modeA={"load"}} 41 | execute {cmd="/bin/systemctl --user stop docker", modeA={"unload"}} 42 | setenv("DOCKER_HOST", "unix://" .. pathJoin(xdg_runtime_dir, "docker.sock")) 43 | ''; 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /fi/repo/packages/triqs/package.py: -------------------------------------------------------------------------------- 1 | from spack_repo.builtin.build_systems.cmake import CMakePackage 2 | from spack.package import * 3 | 4 | class Triqs(CMakePackage): 5 | """TRIQS: a Toolbox for Research on Interacting Quantum Systems""" 6 | 7 | homepage = "https://triqs.github.io" 8 | url = "https://github.com/TRIQS/triqs/archive/refs/tags/3.3.1.tar.gz" 9 | 10 | version('3.3.1', sha256='740efb57c9af39f4086115f8167a55833e84558261e0564c7179d8c17f911539') 11 | version('3.3.0', sha256='794bb02ed7e98498a93219831b270123ea0c893cc272ec9bb98217354b8f1b8d') 12 | version('3.2.1', sha256='f16103c6f7b68cd412cfb309f22f9ee4d379d794518432c0d514956a39ea05cb') 13 | version('3.2.0', sha256='b001ed1339ff6024f62b4e61fb8a955b044feac2d53b5a58575a3175e9bf6776') 14 | version('3.1.1', sha256='cf4f6064ea962fc088e0c2833bf7c4e52f4c827ea331bf3c57d1c9303649042b') 15 | version('3.1.0', sha256='f1f358ec73498bc7ac3ed9665829d8605908f7f7fc876a5c2a01efe37d368f0e') 16 | version('3.0.1', sha256='d555a4606c7ea2dde28aa8da056c6cc1ebbdd4e11cdb50b312b8c8f821a3edd2') 17 | 18 | variant('libclang', default=True, description='Build against libclang to enable c++2py support. ') 19 | 20 | # TRIQS Dependencies 21 | depends_on('cmake', type='build') 22 | depends_on('mpi', type=('build', 'link')) 23 | depends_on('blas', type=('build', 'link')) 24 | depends_on('lapack', type=('build', 'link')) 25 | depends_on('fftw@3:', type=('build', 'link')) 26 | depends_on('boost', type=('build', 'link')) 27 | depends_on('gmp', type=('build', 'link')) 28 | depends_on('hdf5', type=('build', 'link')) 29 | depends_on('llvm', type=('build', 'link'), when='+libclang') 30 | depends_on('python@3.7:', type=('build', 'link', 'run')) 31 | depends_on('py-scipy', type=('build', 'run')) 32 | depends_on('py-numpy', type=('build', 'run')) 33 | depends_on('py-h5py', type=('build', 'run')) 34 | depends_on('py-mpi4py', type=('build', 'run')) 35 | depends_on('py-matplotlib', type=('build', 'run')) 36 | depends_on('py-mako', type=('build', 'run')) 37 | depends_on('py-sphinx', type=('build', 'run')) 38 | 39 | extends('python') 40 | -------------------------------------------------------------------------------- /fi/repo/packages/py_nbodykit/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | 7 | from spack_repo.builtin.build_systems.python import PythonPackage 8 | from spack.package import * 9 | 10 | 11 | class PyNbodykit(PythonPackage): 12 | """ Analysis kit for large-scale structure datasets, the massively parallel way""" 13 | 14 | homepage = "https://nbodykit.readthedocs.io" 15 | 16 | version("0.3.15-40-g376c9d78", 17 | url='https://www.github.com/bccp/nbodykit/tarball/376c9d78204650afd9af81d148b172804432c02f', 18 | sha256="2a38ab2dd78893a542997af168bba57794e1916efbd5d436b0507487ae383dc5", 19 | ) 20 | 21 | variant('extras', default=True, description='Install extras') 22 | 23 | depends_on("py-setuptools", type="build") 24 | depends_on("py-numpy", type=("build", "run")) 25 | depends_on("py-cython", type=("build", "run")) 26 | depends_on("mpi") 27 | depends_on("py-mpi4py", type=("build", "run")) 28 | depends_on("py-scipy", type=("build", "run")) 29 | depends_on("py-astropy", type=("build", "run")) 30 | depends_on("py-pyerfa", type=("build", "run")) 31 | depends_on("py-six", type=("build", "run")) 32 | depends_on("py-runtests +mpi", type=("build", "run")) 33 | depends_on("py-pmesh", type=("build", "run")) 34 | depends_on("py-kdcount", type=("build", "run")) 35 | depends_on("py-mpsort", type=("build", "run")) 36 | depends_on("py-bigfile +mpi", type=("build", "run")) 37 | depends_on("py-pandas", type=("build", "run")) 38 | depends_on("py-dask@0.14.2:", type=("build", "run")) 39 | depends_on("py-cachey", type=("build", "run")) 40 | depends_on("py-sympy@1.6.2:", type=("build", "run")) 41 | depends_on("py-numexpr", type=("build", "run")) 42 | depends_on("py-corrfunc", type=("build", "run")) 43 | depends_on("py-mcfit", type=("build", "run")) 44 | depends_on("py-classylss@0.2:", type=("build", "run")) 45 | 46 | depends_on('py-halotools', type=("build", "run"), when='+extras ^python@3.9:') 47 | depends_on('py-h5py', type=("build", "run"), when='+extras') 48 | depends_on('py-fitsio', type=("build", "run"), when='+extras') 49 | -------------------------------------------------------------------------------- /nixpkgs/jdupes.nix: -------------------------------------------------------------------------------- 1 | # from https://github.com/NixOS/nixpkgs/pull/297457 2 | { lib, stdenv, fetchFromGitea, fetchpatch }: 3 | 4 | stdenv.mkDerivation rec { 5 | pname = "jdupes"; 6 | version = "1.21.0"; 7 | 8 | src = fetchFromGitea { 9 | domain = "codeberg.org"; 10 | owner = "jbruchon"; 11 | repo = "jdupes"; 12 | rev = "v${version}"; 13 | sha256 = "sha256-nDyRaV49bLVHlyqKJ7hf6OBWOLCfmHrTeHryK091c3w="; 14 | # Unicode file names lead to different checksums on HFS+ vs. other 15 | # filesystems because of unicode normalisation. The testdir 16 | # directories have such files and will be removed. 17 | postFetch = "rm -r $out/testdir"; 18 | }; 19 | 20 | patches = [ 21 | (fetchpatch { 22 | name = "darwin-stack-size.patch"; 23 | url = "https://codeberg.org/jbruchon/jdupes/commit/8f5b06109b44a9e4316f9445da3044590a6c63e2.patch"; 24 | sha256 = "0saq92v0mm5g979chr062psvwp3i3z23mgqrcliq4m07lvwc7i3s"; 25 | }) 26 | (fetchpatch { 27 | name = "linux-header-ioctl.patch"; 28 | url = "https://codeberg.org/jbruchon/jdupes/commit/0d4d98f51c99999d2c6dbbb89d554af551b5b69b.patch"; 29 | sha256 = "sha256-lyuZeRp0Laa8I9nDl1HGdlKa59OvGRQJnRg2fTWv7mQ="; 30 | }) 31 | (fetchpatch { 32 | name = "darwin-apfs-comp.patch"; 33 | url = "https://codeberg.org/jbruchon/jdupes/commit/517b7035945eacd82323392b13bc17b044bcc89d.patch"; 34 | sha256 = "sha256-lvOab6tyEyKUtik3JBdIs5SHpVjcQEDfN7n2bfUszBw="; 35 | }) 36 | ]; 37 | 38 | dontConfigure = true; 39 | 40 | makeFlags = [ 41 | "PREFIX=${placeholder "out"}" 42 | ] ++ lib.optionals stdenv.isLinux [ 43 | "ENABLE_DEDUPE=1" 44 | "STATIC_DEDUPE_H=1" 45 | ] ++ lib.optionals stdenv.cc.isGNU [ 46 | "HARDEN=1" 47 | ]; 48 | 49 | enableParallelBuilding = true; 50 | 51 | doCheck = false; # broken Makefile, the above also removes tests 52 | 53 | postInstall = '' 54 | install -Dm444 -t $out/share/doc/jdupes CHANGES LICENSE README.md 55 | ''; 56 | 57 | meta = with lib; { 58 | description = "A powerful duplicate file finder and an enhanced fork of 'fdupes'"; 59 | longDescription = '' 60 | jdupes is a program for identifying and taking actions upon 61 | duplicate files. This fork known as 'jdupes' is heavily modified 62 | from and improved over the original. 63 | ''; 64 | homepage = "https://codeberg.org/jbruchon/jdupes"; 65 | license = licenses.mit; 66 | maintainers = with maintainers; [ romildo ]; 67 | mainProgram = "jdupes"; 68 | }; 69 | } 70 | -------------------------------------------------------------------------------- /fi/r.nix: -------------------------------------------------------------------------------- 1 | packs: 2 | with packs.pkgs; [ 3 | r 4 | r-irkernel 5 | r-annotationdbi 6 | r-bh 7 | r-bsgenome 8 | r-biasedurn 9 | r-biocinstaller 10 | r-biocmanager 11 | r-cairo 12 | #r-deseq2 #build error 13 | r-dt 14 | #r-diffbind 15 | r-formula 16 | r-gostats 17 | r-gseabase 18 | r-genomicalignments 19 | r-genomicfeatures 20 | r-genomicranges 21 | r-iranges 22 | r-keggrest 23 | r-rbgl 24 | r-rcurl 25 | r-r-methodss3 26 | #r-rsoo 27 | #r-rsutils 28 | r-rcpparmadillo 29 | r-rcppeigen 30 | #r-rcppgsl 31 | r-rhdf5lib 32 | r-rsamtools 33 | r-rtsne 34 | r-tfmpvalue 35 | r-vgam 36 | #r-venndiagram 37 | r-acepack 38 | r-ade4 39 | r-askpass 40 | r-assertthat 41 | r-backports 42 | #r-biomart 43 | #r-biomformat 44 | r-bit64 45 | r-bitops 46 | r-blob 47 | r-catools 48 | r-callr 49 | r-checkmate 50 | r-cli 51 | r-clipr 52 | r-clisymbols 53 | r-crosstalk 54 | r-desc 55 | r-devtools 56 | r-dplyr 57 | r-evaluate 58 | r-formatr 59 | r-fs 60 | r-futile-logger 61 | r-futile-options 62 | r-gdata 63 | r-genefilter 64 | r-getopt 65 | r-ggplot2 66 | r-ggrastr 67 | r-glmnet 68 | r-glue 69 | r-gplots 70 | #r-grimport 71 | r-gridextra 72 | r-gtools 73 | r-hexbin 74 | r-highr 75 | #r-huge 76 | r-hms 77 | r-htmltable 78 | r-httpuv 79 | #r-idr 80 | r-igraph 81 | r-ini 82 | r-jpeg 83 | r-knitr 84 | r-lambda-r 85 | r-later 86 | r-lattice 87 | r-latticeextra 88 | r-lazyeval 89 | r-limma 90 | r-markdown 91 | r-matrixstats 92 | r-memoise 93 | r-mime 94 | r-miniui 95 | r-multtest 96 | #r-nabor 97 | r-nloptr 98 | #r-pdsh 99 | r-pheatmap 100 | #r-phyloseq 101 | r-pkgbuild 102 | r-pkgconfig 103 | r-pkgload 104 | r-plogr 105 | r-plotly 106 | r-png 107 | r-polynom 108 | r-powerlaw 109 | r-preprocesscore 110 | #r-preseqr 111 | r-processx 112 | r-progress 113 | r-promises 114 | r-ps 115 | #r-pulsar 116 | r-purrr 117 | r-randomforest 118 | r-rcmdcheck 119 | r-readr 120 | r-remotes 121 | r-rlang 122 | r-rprojroot 123 | r-rstudioapi 124 | r-rtracklayer 125 | r-segmented 126 | r-seqinr 127 | r-sessioninfo 128 | r-seurat 129 | r-sf 130 | r-shape 131 | r-shiny 132 | r-snow 133 | r-sourcetools 134 | r-sys 135 | r-terra 136 | r-tibble 137 | r-tidyr 138 | r-tidyselect 139 | r-units 140 | r-viridis 141 | r-whisker 142 | r-xfun 143 | r-xopen 144 | r-xtable 145 | r-yaml 146 | r-zlibbioc 147 | #rstudio? 148 | ] 149 | -------------------------------------------------------------------------------- /spack/builder.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | import os 3 | import functools 4 | import shutil 5 | import json 6 | 7 | import nixpack 8 | import spack 9 | import spack.installer 10 | 11 | try: 12 | from spack.context import Context 13 | except ImportError: 14 | class Context: 15 | BUILD = 'build' 16 | TEST = 'test' 17 | 18 | # disable pre_ and post_install hooks (sbang, permissions, licensing) 19 | def noop_hook(*args, **kwargs): 20 | pass 21 | spack.hooks.pre_install = noop_hook 22 | spack.hooks.post_install = noop_hook 23 | 24 | nixpack.getVar('name') 25 | nixspec = nixpack.getJson('spec') 26 | 27 | spec = nixpack.NixSpec.get(nixspec, nixpack.getVar('out')) 28 | spec.concretize() 29 | 30 | pkg = spec.package 31 | pkg.run_tests = spec.tests 32 | try: 33 | default_format = spack.spec.DEFAULT_FORMAT 34 | except AttributeError: 35 | default_format = spack.spec.default_format 36 | print(spec.tree(cover='edges', format=default_format + ' {/hash}', show_types=True)) 37 | 38 | opts = { 39 | 'install_deps': False, 40 | 'verbose': not not nixpack.getVar('verbose'), 41 | 'tests': spec.tests, 42 | } 43 | 44 | # package-specific fixes 45 | os.environ['CCACHE_DISABLE'] = '1' 46 | if 'go' in spec._dependencies: 47 | # move go cache to tmp 48 | os.environ['GOCACHE'] = os.path.join(os.environ['TMPDIR'], 'go-cache') 49 | 50 | setup = nixpack.getVar('setup', None) 51 | post = nixpack.getVar('post', None) 52 | if setup: 53 | exec(setup) 54 | 55 | origenv = os.environ.copy() 56 | opts['unmodified_env'] = origenv 57 | # create and stash some metadata 58 | opts['env_modifications'] = spack.build_environment.setup_package(pkg, True, context=Context.BUILD) 59 | os.makedirs(pkg.metadata_dir, exist_ok=True) 60 | 61 | # log build phases to nix 62 | def wrapPhase(p, f, *args): 63 | nixpack.nixLog({'action': 'setPhase', 'phase': p}) 64 | return f(*args) 65 | 66 | if hasattr(pkg, '_InstallPhase_phases'): 67 | for pn, pa in zip(pkg.phases, pkg._InstallPhase_phases): 68 | pf = getattr(pkg, pa) 69 | setattr(pkg, pa, functools.partial(wrapPhase, pn, pf)) 70 | else: 71 | builder = spack.builder.create(pkg) 72 | for phase in builder: 73 | phase.execute = functools.partial(wrapPhase, phase.name, phase.execute) 74 | 75 | if not opts['verbose']: 76 | def print_log(pkg, phase, log): 77 | with open(log, 'r') as f: 78 | print(f.read()) 79 | spack.hooks.on_phase_error = print_log 80 | 81 | # make sure cache is group-writable (should be configurable, ideally in spack) 82 | os.umask(0o002) 83 | # do the actual install 84 | spack.installer.build_process(pkg, opts) 85 | 86 | # we do this even if not testing as it may create more things (e.g., perl "extensions") 87 | #os.environ.clear() 88 | #os.environ.update(origenv) 89 | #spack.build_environment.setup_package(pkg, True, context=Context.TEST) 90 | 91 | with open(os.path.join(spec.prefix, nixpack.NixSpec.nixSpecFile), 'w') as sf: 92 | json.dump(spec.nixspec, sf) 93 | 94 | if post: 95 | exec(post) 96 | -------------------------------------------------------------------------------- /fi/repo/packages/rockstar/package.py: -------------------------------------------------------------------------------- 1 | # Adapted from Spack's built-in Rockstar 2 | 3 | import os 4 | 5 | from spack_repo.builtin.build_systems.makefile import MakefilePackage 6 | from spack.package import * 7 | 8 | 9 | class Rockstar(MakefilePackage): 10 | """The Rockstar halo finder""" 11 | 12 | homepage = "https://bitbucket.org/gfcstanford/rockstar" 13 | 14 | # main repo 15 | version("main.2021-09-04.36ce9e", 16 | git = 'https://bitbucket.org/gfcstanford/rockstar.git', 17 | commit = '36ce9eea36eeda4c333acf56f8bb0d40ff0df2a1', 18 | preferred=True, 19 | ) 20 | # awetzel's rockstar-galaxies fork 21 | version("galaxies.2022-12-29.a9d865", 22 | git = 'https://bitbucket.org/awetzel/rockstar-galaxies.git', 23 | commit = 'a9d8653c0aabc1ba31646e504c2d37013ffd11d4', 24 | ) 25 | 26 | variant("hdf5", description="HDF5 support", default=True) 27 | 28 | depends_on("hdf5", when="+hdf5") 29 | depends_on('libtirpc') 30 | 31 | patch('0001-Fix-to-solve-linking-problem-with-gcc-10.patch', 32 | when='@galaxies', 33 | ) 34 | 35 | def patch(self): 36 | oflags = ' '.join(self.extra_oflags()) 37 | filter_file( 38 | r'^(OFLAGS\s*=[^#\n]*)', 39 | rf'\1 {oflags}', 40 | 'Makefile', 41 | ) 42 | filter_file( 43 | r'(-D_BSD_SOURCE|-D_SVID_SOURCE)', 44 | r'-D_DEFAULT_SOURCE', 45 | 'Makefile', 46 | ) 47 | filter_file( 48 | r'^CC\s*=.*', 49 | r'', 50 | 'Makefile', 51 | ) 52 | 53 | def extra_oflags(self): 54 | return ['-ltirpc'] 55 | 56 | def install(self, spec, prefix): 57 | # install the entire repo 58 | # probably only the binaries will be used, though 59 | install_tree('.', prefix.src) 60 | 61 | mkdirp(prefix.bin) 62 | mkdirp(prefix.lib) 63 | 64 | util = ['util/bgc2_to_ascii', 65 | 'util/find_parents', 66 | 'util/finish_bgc2', 67 | 'util/subhalo_stats', 68 | ] 69 | for fn in util: 70 | install_link(join_path(prefix.src, fn), 71 | prefix.bin) 72 | 73 | if '@galaxies' in spec: 74 | install_link(join_path(prefix.src, 'rockstar-galaxies'), 75 | prefix.bin) 76 | install_link(join_path(prefix.src, 'librockstar-galaxies.so'), 77 | prefix.lib) 78 | else: 79 | install_link(join_path(prefix.src, 'rockstar'), 80 | prefix.bin) 81 | install_link(join_path(prefix.src, 'librockstar.so'), 82 | prefix.lib) 83 | 84 | @property 85 | def build_targets(self): 86 | targets = [ 87 | 'lib', 88 | 'bgc2', 89 | 'parents', 90 | 'substats' 91 | ] 92 | if '+hdf5' in self.spec: 93 | targets += ['with_hdf5'] 94 | else: 95 | targets += ['all'] 96 | return targets 97 | 98 | def install_link(src, dst): 99 | '''Install into `dst` dir via hard link''' 100 | os.link(src, join_path(dst, os.path.basename(src))) 101 | -------------------------------------------------------------------------------- /fi/run: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | shopt -s nullglob 3 | 4 | fi=`dirname $0` 5 | . $fi/env 6 | gitrev=$(git describe --always) 7 | os=$( . /etc/os-release ; echo $ID${VERSION_ID%.[0-9]*} ) 8 | arch=$(uname -m) 9 | nixargs="$fi --argstr gitrev ${gitrev:-unknown} --argstr os $os" 10 | site=fi 11 | lmodbase=/mnt/sw/lmod/$arch/$os 12 | 13 | if [[ `hostname -s` = pcn* ]] ; then 14 | # popeye 15 | site=popeye 16 | nixargs="$nixargs --argstr target skylake-avx512 --argstr cudaarch 70,80" 17 | fi 18 | 19 | traceSpecs() { 20 | nix-instantiate --eval -A "$1" $nixargs |& sed 's/^trace: //' 21 | } 22 | 23 | if [[ $# -eq 0 ]] ; then 24 | cmd=help 25 | else 26 | cmd="$1" 27 | shift 28 | fi 29 | case "$cmd" in 30 | (build) 31 | if [[ "$*" != *" -A "* ]] ; then 32 | set -- "$@" -A mods 33 | fi 34 | exec nix-build --show-trace "$@" $nixargs 35 | ;; 36 | (spec) 37 | if [[ $# -eq 0 ]] ; then 38 | traceSpecs traceModSpecs 39 | else 40 | for p in "$@" ; do 41 | traceSpecs "traceSpecs.$p" 42 | done 43 | fi 44 | ;; 45 | (gc) 46 | exec nix-store --gc 47 | ;; 48 | (profile) 49 | if [[ $# -eq 0 ]] ; then 50 | echo "Nothing to do" 51 | elif [[ $* == all ]] ; then 52 | set -- nix lmod jupyter shell 53 | fi 54 | for prof in "$@" ; do 55 | prefix= 56 | case "$prof" in 57 | (nix) 58 | attrs="nixpkgs.nix nixpkgs.git" 59 | prefix=$arch/ 60 | ;; 61 | (lmod) 62 | attrs="pkgs.lmod" 63 | ;; 64 | (jupyter*) 65 | attrs="jupyter" 66 | ;; 67 | (slurm) 68 | attrs="pkgs.cuda pkgs.pmix pkgs.oneapi-level-zero" 69 | ;; 70 | (pyslurm) 71 | attrs="pyslurm" 72 | ;; 73 | (shell) 74 | attrs="pkgs.zsh pkgs.tmux pkgs.git" 75 | ;; 76 | (viswall) 77 | attrs="nixpkgs.xscreensaver nixpkgs.mpv" 78 | ;; 79 | (*) 80 | echo "Unknown profile" 81 | exit 1 82 | ;; 83 | esac 84 | profile=$NIX_STATE_DIR/profiles/${prefix:=$arch/$os/}$prof 85 | nix-env -p $profile -i -r -f $nixargs -A $attrs 86 | l=$(readlink $profile) 87 | git tag $site/$prefix${l%-link} HEAD || true 88 | done 89 | ;; 90 | (modules) 91 | if [[ $1 == -f ]] ; then 92 | shift 93 | elif ! git diff-index --quiet HEAD -- ; then 94 | echo "Local modifications: refusing to relase (without -f)" 95 | exit 1 96 | fi 97 | if [[ $# -ne 1 ]] ; then 98 | echo "Usage: modules NAME" 99 | exit 1 100 | fi 101 | path=$1 102 | if [[ $path != */* ]] ; then 103 | tag=$path 104 | path=$lmodbase/modules/modules/$path 105 | fi 106 | if [[ $path != *.lua ]] ; then 107 | path=$path.lua 108 | fi 109 | # release 110 | nix-build -o $path -A modsMod $nixargs 111 | if [[ -n $tag ]] ; then 112 | git tag $site/$arch/$os/$tag HEAD 113 | fi 114 | ;& # fall-thru 115 | (cache) 116 | rm -f $lmodbase/cacheDir/spiderT.old.lua* 117 | $lmodbase/lmod/lmod/libexec/update_lmod_system_cache_files -d $lmodbase/cacheDir -t $lmodbase/cacheTS.txt $lmodbase/modules:$(realpath $lmodbase/lmod/lmod/modulefiles/Core) 118 | ;; 119 | (spack) 120 | nix-build -o spackBin -A spackBin $nixargs 121 | exec ./spackBin "$@" 122 | ;; 123 | (*) 124 | if [[ $cmd != help ]] ; then 125 | echo "Unknown command" 126 | fi 127 | 128 | cat < gcc symlink 31 | post = '' 32 | os.symlink('gcc', os.path.join(pkg.prefix, 'bin/cc')) 33 | ''; 34 | }; 35 | }; 36 | 37 | aocc = spec: old: { 38 | depends = old.depends // { 39 | llvm = { 40 | # uses llvm package 41 | deptype = ["build"]; 42 | }; 43 | }; 44 | }; 45 | 46 | apptainer = spec: old: { 47 | depends = old.depends // { 48 | # imports package 49 | singularityce = { deptype = ["build"]; }; 50 | }; 51 | }; 52 | 53 | openssh = { 54 | /* disable installing with setuid */ 55 | patches = [./openssh-keysign-setuid.patch]; 56 | }; 57 | 58 | nix = { 59 | patches = [./nix-ignore-fsea.patch]; 60 | }; 61 | 62 | shadow = { 63 | /* disable installing with set[ug]id */ 64 | patches = [./shadow-nosuid.patch]; 65 | }; 66 | 67 | util-linux = { 68 | build = { 69 | enable_makeinstall_setuid = "no"; 70 | }; 71 | }; 72 | 73 | librsvg = { 74 | build = cargohome.build // { 75 | /* tries to install into gdk-pixbuf -- TODO: patch and use GDK_PIXBUF_MODULE_FILE (like nixpkgs) */ 76 | enable_pixbuf_loader = "no"; 77 | }; 78 | }; 79 | 80 | py-cryptography = cargohome; 81 | py-maturin = cargohome; 82 | py-rpds-py = cargohome; 83 | py-ruff = cargohome; 84 | 85 | /* for pdflatex */ 86 | r = { 87 | build = { 88 | setup = '' 89 | os.environ['TEXMFVAR'] = os.path.join(os.environ['TMPDIR'], 'texmf') 90 | ''; 91 | }; 92 | }; 93 | /* tries to set ~/.gitconfig */ 94 | r-credentials = tmphome; 95 | r-gert = tmphome; 96 | 97 | /* creates various cache stuff */ 98 | npm = tmphome; 99 | 100 | py-jaxlib = spec: old: { 101 | build = { 102 | setup = '' 103 | os.environ['XDG_CACHE_HOME'] = os.environ['TMPDIR'] 104 | os.environ['TEST_TMPDIR'] = os.environ['TMPDIR'] 105 | ''; 106 | }; 107 | }; 108 | 109 | /* uses npm */ 110 | py-jupyter-server = tmphome; 111 | py-jupyter-server-proxy = tmphome; 112 | py-jupyterlmod = tmphome; 113 | py-ipyparallel = tmphome; 114 | 115 | paraview = spec: old: { 116 | /* without explicit libx11 dep, ends up linking system libX11 (perhaps via system libGL) and not working */ 117 | depends = old.depends // { 118 | libx11 = { 119 | deptype = ["link"]; 120 | }; 121 | }; 122 | }; 123 | 124 | emacs = spec: old: { 125 | depends = old.depends // { 126 | fontconfig = { 127 | deptype = ["build" "link"]; 128 | }; 129 | libxft = { 130 | deptype = ["build" "link"]; 131 | }; 132 | libjansson = { 133 | deptype = ["build" "link"]; 134 | }; 135 | }; 136 | }; 137 | 138 | git-lfs = spec: old: { 139 | build = { 140 | setup = '' 141 | os.environ['GOPATH'] = os.path.join(os.environ['TMPDIR'], 'gopath') 142 | os.environ['GOCACHE'] = os.path.join(os.environ['TMPDIR'], 'gocache') 143 | ''; 144 | }; 145 | }; 146 | 147 | go = spec: old: { 148 | build = { 149 | setup = '' 150 | os.environ['GOCACHE'] = os.path.join(os.environ['TMPDIR'], 'go-cache') 151 | ''; 152 | }; 153 | }; 154 | 155 | rust = spec: old: { 156 | build = cargohome.build // { 157 | # workaround for https://github.com/rust-lang/cargo/issues/10303 158 | CARGO_NET_GIT_FETCH_WITH_CLI = "true"; 159 | }; 160 | }; 161 | 162 | vtk = spec: old: { 163 | depends = old.depends // { 164 | # imports package 165 | boost = { deptype = ["build"]; }; 166 | }; 167 | }; 168 | 169 | lua-bit32 = noluajit; 170 | lua-bitlib = noluajit; 171 | lua-lpeg = noluajit; 172 | lua-luafilesystem = noluajit; 173 | lua-luaposix = noluajit; 174 | lua-mpack = noluajit; 175 | lua-sol2 = noluajit; 176 | 177 | /* these have cxx deps only but also need a c compiler */ 178 | gpu-burn = needc; 179 | ilmbase = needc; 180 | } 181 | -------------------------------------------------------------------------------- /fi/README.md: -------------------------------------------------------------------------------- 1 | # Flatiron Modules 2 | 3 | This directory contains all the configuration for building the FI modules used on rusty and popeye. 4 | 5 | ## Package management 6 | 7 | Most configuration goes in the [`default.nix`](default.nix) file. 8 | There are a few important sections you may need to interact with. 9 | Search for "----- *header*" to find them. 10 | 11 | ### global package preferences 12 | 13 | This section is an alphabetical list of key-value pairs of package preferences that apply globally. 14 | That is, all modules and all their dependencies used these settings. 15 | For example, this makes the default hdf5 version 1.10.x and enables some features. 16 | These can still be overridden for individual modules or specific dependencies. 17 | 18 | ``` 19 | hdf5 = { 20 | version = "1.10"; 21 | variants = { 22 | hl = true; 23 | fortran = true; 24 | cxx = true; 25 | }; 26 | }; 27 | ``` 28 | 29 | For example, if you get an error about "XXX dependency YYY: package YYY does not match dependency constraints ZZZ", you may have to add a global preference like: 30 | 31 | ``` 32 | YYY = { 33 | # for XXX 34 | ZZZ 35 | }; 36 | ``` 37 | 38 | ### Core modules 39 | 40 | The core modules are those built only with the default compiler and without MPI. 41 | This includes mainly command-line tools or things without fortran libraries that the user may want to link against. 42 | This is a simple list of packages in alphabetical order. 43 | You can add simple packages names here, or `(PACKAGE.withPrefs { ... })` to override global preferences. 44 | You can also add module settings with `{ pkg = PACKAGE; ... }`. 45 | 46 | ### compiler modules 47 | 48 | These modules are build with all compilers (which is really just whatever versions of gcc we've enabled). 49 | This should include libraries that may change between compilers, for example fortran or C++ libraries or other things that may impact performance or linking. 50 | This is otherwise a list just like core modules. 51 | 52 | #### compilers 53 | 54 | The list of enabled compilers, each of which is used to build all packages in this section 55 | 56 | #### MPI modules 57 | 58 | These modules are built with all MPI libraries (crossed with all compilers). 59 | This is also really just a list, but has a lot of conditionals as some things only build with some compilers or MPI combinations. 60 | 61 | ##### mpis 62 | 63 | The list of all MPI libraries, used to build all packages in this section. 64 | 65 | ##### python+mpi modules 66 | 67 | These modules are built with for all python versions and MPI libraries (crossed with all compilers). 68 | It has both a list of python packages that get combined into a view (like python packages below), and a list of modules build with these pythons (though currently this only includes triqs, which is conditioned to only build for the default python and mpi). 69 | 70 | #### python packages 71 | 72 | These packages are all combined into a single view and exposed as a single module, so should really only contain python packages. 73 | Otherwise it's just another list of packages. 74 | These are built for all enabled python versions (crossed with all compilers). 75 | 76 | ##### python 77 | 78 | The list of all python versions, used to build all packages in this section. 79 | 80 | ### nixpkgs modules 81 | 82 | Modules built from nixpkgs. 83 | This should only be for applications, as they are built purely from nixpkgs, including its compiler and libc. 84 | 85 | ### misc modules 86 | 87 | Other pseudo-modules that don't correspond to packages. 88 | 89 | ## Command-line usage 90 | 91 | Builds and other operations should be run on worker1000 or pcn-1-01. 92 | To test a change, just run "fi/run build -j 1 --cores 8" (or whatever parallelism you prefer). 93 | This will (if successful) produce a "result" directory with the modules. 94 | You can unset MODULEPATH and source "result/setup.sh" in your shell to try out the newly built modules. 95 | 96 | If some package fails to build, you can re-run with "-K" and then (as root) go look at the failed build in /dev/shm/nix-build-NAME (which you should manually clean up when done). 97 | 98 | ### Utility script 99 | 100 | The script [`run`](run) can help with common tasks (some of which are more generally useful): 101 | ``` 102 | Usage: fi/run COMMAND 103 | 104 | Commands: 105 | 106 | build Build modules into result. Takes the same arguments as 107 | nix-build (-jN, --cores M, -K, ...). 108 | spec [PKG] Print the spec tree for a specific package or all modules, 109 | along with the total number of unique packages. 110 | gc Cleanup any unreferenced nix stores (nix-store --gc). 111 | release Publish a release profile for... 112 | modules nixpack lmod modules (default) 113 | jupyter jupyterhub server environment 114 | nix nix build environment 115 | spack ... Run a spack command in the nixpack environment (things like list 116 | and info work, but those managing packages will not) 117 | ``` 118 | 119 | ### Environment setup 120 | 121 | You can source `env` to setup a build environment for running `nix` command-line tools (like `nix-build`). 122 | For example, to build a single package into `result`, run: 123 | ``` 124 | ./fi/run build -j 1 --cores 8 -A pkgs.packagename 125 | ``` 126 | 127 | To build a specific Python package: 128 | 129 | ``` 130 | ./fi/run build -j 1 -A pkgStruct.compilers.0.pythons.0.packs.pkgs.py-classylss 131 | ``` 132 | 133 | ### Releases 134 | 135 | To do a release: 136 | 137 | 1. `fi/run release` (or `fi/run release all` if enough has changed to affect jupyter, nix, lmod, etc., or whatever subset makes sense) 138 | 2. Release should now show up as new `modules` version, which you can load to test. 139 | 3. Update default symlink in /cm/shared/sw/lmod/modules/modules when ready. 140 | 4. Run `fi/run modules` to update cache (after any change to modules). 141 | 142 | ## Branches 143 | `fi` corresponds to the current modules set; `main` is the upcoming set. 144 | -------------------------------------------------------------------------------- /spack/modules.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | import os 3 | import json 4 | import datetime 5 | 6 | import nixpack 7 | import spack 8 | import spack.modules 9 | 10 | try: 11 | from spack.package_base import PackageBase 12 | except ImportError: 13 | from spack.package import PackageBase 14 | 15 | root = nixpack.getVar('out') 16 | name = nixpack.getVar('name') 17 | modtype = nixpack.getVar('modtype') 18 | 19 | coreCompilers = [nixpack.NixSpec.get(p, top=False) for p in nixpack.getJson('coreCompilers')] 20 | 21 | modconf = nixpack.getJson('config') 22 | modconf.setdefault('core_compilers', []) 23 | modconf['core_compilers'].extend(str(comp) for comp in coreCompilers) 24 | core_specs = modconf.setdefault('core_specs', []) 25 | 26 | cls = spack.modules.module_types[modtype] 27 | 28 | class NullContext: 29 | "contextlib.nullcontext" 30 | def __enter__(self): 31 | pass 32 | def __exit__(self, *exc): 33 | pass 34 | 35 | class TempConfig: 36 | def __init__(self, key, value): 37 | self.key = key 38 | self.value = value 39 | 40 | def __enter__(self): 41 | self.oldconf = spack.config.get(self.key) 42 | spack.config.set(self.key, self.value, scope='nixpack') 43 | 44 | def __exit__(self, *exc): 45 | spack.config.set(self.key, self.oldconf, scope='nixpack') 46 | 47 | def tempProjection(projection): 48 | if projection: 49 | return TempConfig(f'modules:{name}:{modtype}:projections', {'all': projection}) 50 | else: 51 | return NullContext() 52 | 53 | class FakePackage(PackageBase): 54 | extendees = () 55 | provided = {} 56 | 57 | class FakeSpec(nixpack.NixSpec): 58 | def __init__(self, desc): 59 | h = spack.util.hash.b32_hash(json.dumps(desc, sort_keys=True)) 60 | nixspec = { 61 | 'name': f'static-module-{h}', 62 | 'namespace': 'dummy', 63 | 'version': '0', 64 | 'variants': {}, 65 | 'flags': {}, 66 | 'tests': False, 67 | 'paths': {}, 68 | 'depends': desc.get('depends', {}), 69 | 'deptypes': {}, 70 | 'patches': [] 71 | } 72 | 73 | prefix = desc.get('prefix', f"/{nixspec['namespace']}/{nixspec['name']}") 74 | nixspec['extern'] = prefix 75 | for n, d in nixspec['depends'].items(): 76 | try: 77 | t = d['deptype'] 78 | except Exception: 79 | t = ('run',) 80 | nixspec['deptypes'][n] = t 81 | 82 | super().__init__(nixspec, prefix, True) 83 | self._package = FakePackage(self) 84 | 85 | def concretize(self): 86 | self._mark_concrete() 87 | 88 | @property 89 | def package_class(self): 90 | return self._package 91 | 92 | class ModSpec: 93 | def __init__(self, p): 94 | if isinstance(p, str) or 'spec' in p: 95 | self.pkg = p 96 | p = {} 97 | else: 98 | self.pkg = p.get('pkg', None) 99 | if self.pkg: 100 | self.spec = nixpack.NixSpec.get(self.pkg) 101 | else: 102 | self.spec = FakeSpec(p) 103 | 104 | if 'name' in p: 105 | self.spec.name = p['name'] 106 | if 'version' in p: 107 | self.spec.versions = spack.version.VersionList([spack.version.Version(p['version'])]) 108 | self.default = p.get('default', False) 109 | self.static = p.get('static', None) 110 | self.path = p.get('path', None) 111 | self.environment = p.get('environment', {}) 112 | self.context = p.get('context', {}) 113 | if p.get('core', False): 114 | core_specs.append(self.spec.format()) 115 | self.projection = p.get('projection') 116 | self.autoload = p.get('autoload', []) 117 | self.prerequisites = p.get('prerequisites', []) 118 | self.postscript = p.get('postscript', '') 119 | 120 | @property 121 | def writer(self): 122 | try: 123 | return self._writer 124 | except AttributeError: 125 | self.spec.concretize() 126 | self._writer = cls(self.spec, name) 127 | for t in ('autoload', 'prerequisites'): 128 | self._writer.conf.conf[t].extend(map(nixpack.NixSpec.get, getattr(self, t))) 129 | if 'unlocked_paths' in self.context: 130 | for i, p in enumerate(self.context['unlocked_paths']): 131 | if not os.path.isabs(p): 132 | self.context['unlocked_paths'][i] = os.path.join(self._writer.layout.arch_dirname, p) 133 | elif self.spec in coreCompilers: 134 | # messy hack to prevent core compilers from unlocking themselves (should be handled in spack) 135 | self.context['unlocked_paths'] = [] 136 | for t in ('environment', 'context'): 137 | spack.modules.common.update_dictionary_extending_lists( 138 | self._writer.conf.conf.setdefault(t, {}), 139 | getattr(self, t)) 140 | return self._writer 141 | 142 | @property 143 | def filename(self): 144 | layout = self.writer.layout 145 | if self.path: 146 | base, name = os.path.split(self.path) 147 | return os.path.join(layout.arch_dirname, base or 'Core', name) + "." + layout.extension 148 | else: 149 | with tempProjection(self.projection): 150 | return layout.filename 151 | 152 | def __str__(self): 153 | try: 154 | default_format = spack.spec.DEFAULT_FORMAT 155 | except AttributeError: 156 | default_format = spack.spec.default_format 157 | return self.spec.cformat(default_format + ' {prefix}') 158 | 159 | def write(self, fn): 160 | dn = os.path.dirname(fn) 161 | if self.static: 162 | os.makedirs(dn, exist_ok=True) 163 | content = self.static 164 | if isinstance(content, dict): 165 | template = spack.tengine.make_environment().get_template(self.writer.default_template) 166 | content.setdefault('spec', content) 167 | content['spec'].setdefault('target', nixpack.basetarget) 168 | content['spec'].setdefault('name', self.spec.name) 169 | content['spec'].setdefault('short_spec', 'static module via nixpack') 170 | content.setdefault('timestamp', datetime.datetime.now()) 171 | content = template.render(content) 172 | with open(fn, 'x') as f: 173 | f.write(content) 174 | else: 175 | with tempProjection(self.projection): 176 | self.writer.write() 177 | if self.postscript: 178 | with open(fn, 'a') as f: 179 | f.write(self.postscript) 180 | if self.default: 181 | bn = os.path.basename(fn) 182 | os.symlink(bn, os.path.join(dn, "default")) 183 | 184 | specs = [ModSpec(p) for p in nixpack.getJson('pkgs')] 185 | 186 | config = { 187 | 'prefix_inspections': modconf.pop('prefix_inspections', {}), 188 | name: { 189 | 'enable': [modtype], 190 | 'roots': { modtype: root }, 191 | modtype: modconf 192 | } 193 | } 194 | spack.config.set('modules', config, 'nixpack') 195 | spack.repo.PATH.provider_index # precompute 196 | 197 | print(f"Generating {len(specs)} {modtype} modules in {root}...") 198 | def write(s): 199 | fn = s.filename 200 | print(f" {os.path.relpath(fn, root)}: {s}") 201 | s.write(fn) 202 | return fn 203 | 204 | def proc(si): 205 | return write(specs[si]) 206 | 207 | if nixpack.cores > 1: 208 | import multiprocessing 209 | pool = multiprocessing.Pool(nixpack.cores) 210 | paths = pool.imap_unordered(proc, range(len(specs))) 211 | pool.close() 212 | else: 213 | pool = None 214 | paths = map(write, specs) 215 | 216 | seen = set() 217 | for fn in paths: 218 | assert fn not in seen, f"Duplicate path: {fn}" 219 | seen.add(fn) 220 | 221 | if pool: 222 | pool.join() 223 | -------------------------------------------------------------------------------- /nixpkgs/overlay.nix: -------------------------------------------------------------------------------- 1 | self: pkgs: 2 | with pkgs; 3 | 4 | let 5 | llvm_patch = llvmPackages: llvmPackages // (let 6 | tools = llvmPackages.tools.extend (self: super: { 7 | # broken glob test? 8 | libllvm = super.libllvm.overrideAttrs (old: { 9 | postPatch = old.postPatch + '' 10 | rm test/Other/ChangePrinters/DotCfg/print-changed-dot-cfg.ll 11 | ''; 12 | }); 13 | }); 14 | in { inherit tools; } // tools); 15 | in 16 | 17 | { 18 | nss_sss = callPackage sssd/nss-client.nix { }; 19 | 20 | patchelf = patchelf.overrideAttrs (old: { 21 | postPatch = '' 22 | sed -i 's/static bool forceRPath = false;/static bool forceRPath = true;/' src/patchelf.cc 23 | ''; 24 | doCheck = false; 25 | }); 26 | 27 | makeShellWrapper = makeShellWrapper.overrideAttrs (old: { 28 | # avoid infinite recursion by escaping to system (hopefully it's good enough) 29 | shell = "/bin/sh"; 30 | }); 31 | 32 | coreutils = (coreutils.override { 33 | autoreconfHook = null; # workaround nixpkgs #144747 34 | texinfo = null; 35 | }).overrideAttrs (old: { 36 | preBuild = "touch Makefile.in"; # avoid automake 37 | doCheck = false; # df/total-verify broken on ceph 38 | }); 39 | perl = perl.override { 40 | zlib = buildPackages.zlib.override { fetchurl = stdenv.fetchurlBoot; }; 41 | }; 42 | 43 | nix = (nix.override { 44 | withAWS = false; 45 | }).overrideAttrs (old: { 46 | doInstallCheck = false; 47 | }); 48 | 49 | bind = bind.overrideAttrs (old: { 50 | doCheck = false; # netmgr/tlsdns.c failure 51 | }); 52 | 53 | p11-kit = p11-kit.overrideAttrs (old: { 54 | doCheck = false; # test-compat sigabrt 55 | }); 56 | 57 | git = git.overrideAttrs (old: { 58 | doInstallCheck = false; # failure 59 | }); 60 | 61 | ell = ell.overrideAttrs (old: { 62 | doCheck = false; # test-dbus-properties failure: /tmp/ell-test-bus: EADDRINUSE 63 | }); 64 | 65 | gtk3 = gtk3.override { 66 | trackerSupport = false; 67 | }; 68 | 69 | openssl_1_0_2 = openssl_1_0_2.overrideAttrs (old: { 70 | postPatch = old.postPatch + '' 71 | sed -i 's:define\s\+X509_CERT_FILE\s\+.*$:define X509_CERT_FILE "/etc/pki/tls/certs/ca-bundle.crt":' crypto/cryptlib.h 72 | ''; 73 | }); 74 | 75 | openssl_1_1 = openssl_1_1.overrideAttrs (old: { 76 | postPatch = old.postPatch + '' 77 | sed -i 's:define\s\+X509_CERT_FILE\s\+.*$:define X509_CERT_FILE "/etc/pki/tls/certs/ca-bundle.crt":' include/internal/cryptlib.h 78 | ''; 79 | }); 80 | 81 | # we don't need libredirect for anything (just openssh tests), and it's broken 82 | libredirect = "/var/empty"; 83 | 84 | openssh = openssh.overrideAttrs (old: { 85 | doCheck = false; # strange environment mismatch 86 | }); 87 | 88 | libuv = libuv.overrideAttrs (old: { 89 | doCheck = false; # failure 90 | }); 91 | 92 | openimageio = openimageio.overrideAttrs (old: { 93 | # avoid finding system libjpeg.so 94 | cmakeFlags = old.cmakeFlags ++ ["-DJPEGTURBO_PATH=${libjpeg.out}"]; 95 | }); 96 | 97 | openimagedenoise = openimagedenoise.override { 98 | #tbb = tbb_2021_8; 99 | }; 100 | 101 | openvdb = openvdb.override { 102 | #tbb = tbb_2021_8; 103 | }; 104 | 105 | embree = (embree.override { 106 | #tbb = tbb_2021_8; 107 | }).overrideAttrs (old: { 108 | # based on spack flags 109 | cmakeFlags = 110 | let 111 | onoff = b: if b then "ON" else "OFF"; 112 | isa = n: f: "-DEMBREE_ISA_${n}=${onoff (!f)}"; 113 | in old.cmakeFlags ++ [ 114 | (isa "SSE2" stdenv.hostPlatform.sse4_2Support) 115 | (isa "SSE42" stdenv.hostPlatform.avxSupport) 116 | (isa "AVX" stdenv.hostPlatform.avx2Support) 117 | (isa "AVX2" stdenv.hostPlatform.avx512Support) 118 | (isa "AVX512SKX" false) 119 | ]; 120 | }); 121 | 122 | libical = libical.overrideAttrs (old: { 123 | cmakeFlags = old.cmakeFlags ++ ["-DBerkeleyDB_ROOT_DIR=${db}"]; 124 | }); 125 | 126 | llvmPackages_14 = llvm_patch llvmPackages_14; 127 | llvmPackages_15 = llvm_patch llvmPackages_15; 128 | llvmPackages_16 = llvm_patch llvmPackages_16; 129 | llvmPackages_17 = llvm_patch llvmPackages_17; 130 | llvmPackages_18 = llvm_patch llvmPackages_18; 131 | 132 | libxcrypt = libxcrypt.overrideAttrs (old: { 133 | /* sign-conversion warnings: */ 134 | configureFlags = old.configureFlags ++ [ "--disable-werror" ]; 135 | }); 136 | 137 | opencolorio = opencolorio.overrideAttrs (old: { 138 | # various minor numeric failures 139 | doCheck = false; 140 | }); 141 | 142 | openexr_3 = openexr_3.overrideAttrs (old: { 143 | # -nan != -nan 144 | doCheck = false; 145 | }); 146 | 147 | python310 = python310.override { 148 | packageOverrides = self: super: { 149 | pycryptodome = super.pycryptodome.overridePythonAttrs (old: { 150 | # FAIL: test_negate (Cryptodome.SelfTest.PublicKey.test_ECC_25519.TestEccPoint_Ed25519) 151 | doCheck = false; 152 | }); 153 | eventlet = super.eventlet.overridePythonAttrs (old: { 154 | # needs libredirect 155 | doCheck = false; 156 | }); 157 | numpy = super.numpy.overridePythonAttrs (old: { 158 | # FAIL: test_dtype.py::TestStructuredObjectRefcounting::test_structured_object_item_setting[] - assert 190388 == 190386 159 | doCheck = false; 160 | }); 161 | }; 162 | }; 163 | 164 | python311 = python311.override { 165 | packageOverrides = self: super: { 166 | numpy = super.numpy.overridePythonAttrs (old: { 167 | # FAIL: TestAccuracy.test_validate_transcendentals 168 | doCheck = false; 169 | }); 170 | }; 171 | }; 172 | 173 | python312 = python312.override { 174 | packageOverrides = self: super: { 175 | numpy = super.numpy.overridePythonAttrs (old: { 176 | # FAIL: TestAccuracy.test_validate_transcendentals 177 | doCheck = false; 178 | }); 179 | }; 180 | }; 181 | 182 | pipewire = (pipewire.override { 183 | rocSupport = false; # temporarily workaround sox broken download (though probably don't need it anyway) 184 | }).overrideAttrs (old: { 185 | buildInputs = old.buildInputs ++ [libopus]; 186 | }); 187 | 188 | pulseaudio = pulseaudio.override { 189 | bluetoothSupport = false; 190 | }; 191 | 192 | blender = (blender.override { 193 | #tbb = tbb_2021_8; 194 | }).overrideAttrs (old: { 195 | cmakeFlags = old.cmakeFlags ++ ["-DWITH_OPENAL=OFF"]; 196 | }); 197 | 198 | SDL = SDL.overrideAttrs (old: { 199 | # this is already patched into configure.in, but not configure 200 | postConfigure = '' 201 | sed -i '/SDL_VIDEO_DRIVER_X11_CONST_PARAM_XDATA32/s/.*/#define SDL_VIDEO_DRIVER_X11_CONST_PARAM_XDATA32 1/' include/SDL_config.h 202 | ''; 203 | }); 204 | 205 | umockdev = umockdev.overrideAttrs (old: { 206 | doCheck = false; # static-code unknown failure 207 | }); 208 | 209 | libpsl = libpsl.overrideAttrs (old: { 210 | doCheck = false; # valgrind unknown instruction 211 | }); 212 | 213 | haskell = haskell // { 214 | packages = haskell.packages // { 215 | ghc8107Binary = haskell.packages.ghc8107Binary.override { 216 | ghc = haskell.packages.ghc8107Binary.ghc.overrideAttrs (old: { 217 | postUnpack = old.postUnpack + '' 218 | patchShebangs ghc-${old.version}/inplace/bin 219 | ''; 220 | }); 221 | }; 222 | }; 223 | packageOverrides = self: super: { 224 | crypton = super.crypton.overrideAttrs (old: { 225 | # FAIL: Ed448 verify sig? 226 | doCheck = false; 227 | }); 228 | cryptonite = super.cryptonite.overrideAttrs (old: { 229 | # FAIL: Ed448 verify sig? 230 | doCheck = false; 231 | }); 232 | crypton-x509-validation = super.crypton-x509-validation.overrideAttrs (old: { 233 | doCheck = false; 234 | }); 235 | http2 = super.http2.overrideAttrs (old: { 236 | # tests hang 237 | doCheck = false; 238 | }); 239 | tls = super.tls.overrideAttrs (old: { 240 | doCheck = false; 241 | }); 242 | }; 243 | }; 244 | 245 | jdupes = callPackage ./jdupes.nix { }; 246 | 247 | rapidjson = rapidjson.overrideAttrs (old: { 248 | doCheck = false; # valgrind unknown instruction 249 | }); 250 | 251 | vamp-plugin-sdk = vamp-plugin-sdk.overrideAttrs (old: { 252 | src = fetchFromGitHub { 253 | owner = "vamp-plugins"; 254 | repo = "vamp-plugin-sdk"; 255 | rev = "vamp-plugin-sdk-v${old.version}"; 256 | hash = "sha256-5jNA6WmeIOVjkEMZXB5ijxyfJT88alVndBif6dnUFdI="; 257 | }; 258 | }); 259 | } 260 | -------------------------------------------------------------------------------- /fi/repo/packages/blender/package.py: -------------------------------------------------------------------------------- 1 | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other 2 | # Spack Project Developers. See the top-level COPYRIGHT file for details. 3 | # 4 | # SPDX-License-Identifier: (Apache-2.0 OR MIT) 5 | 6 | import os 7 | from spack_repo.builtin.build_systems.cmake import CMakePackage 8 | from spack.package import * 9 | 10 | 11 | class Blender(CMakePackage): 12 | """Blender is the free and open source 3D creation suite. 13 | It supports the entirety of the 3D pipeline-modeling, 14 | rigging, animation, simulation, rendering, compositing and 15 | motion tracking, even video editing and game creation.""" 16 | 17 | homepage = "https://www.blender.org/" 18 | url = "http://download.blender.org/source/blender-2.79b.tar.gz" 19 | 20 | version('2.92.0', 21 | url='https://download.blender.org/source/blender-2.92.0.tar.xz', 22 | sha256='e791cfc403292383577c3c8ce2cd34e5aa2cd8da0a7483041049a1609ddb4595') 23 | version('2.80', sha256='cd9d7e505c1f6e63a4f72366ed04d446859977eeb34cde21283aaea6a304a5c0') 24 | version('2.79b', sha256='4c944c304a49e68ac687ea06f5758204def049b66dc211e1cffa1857716393bc') 25 | 26 | variant('cycles', default=False, description='Build with cycles support') 27 | variant('blender', default=True, description='disable to build only the blender player') 28 | variant('player', default=True, description='Build Player') 29 | variant('ffmpeg', default=False, description='Enable FFMPeg Support') 30 | variant('headless', default=False, description='Build without graphical support (renderfarm, server mode only)') 31 | variant('llvm', default=False, description='Necessary for OSL.') 32 | variant('ocio', default=False, description='Currently broken due to conflicting python') 33 | variant('opensubdiv', default=False, description='Build with opensubdiv support') 34 | variant('jemalloc', default=True) 35 | 36 | # https://developer.blender.org/diffusion/B/browse/blender-v2.92-release/build_files/build_environment/cmake/versions.cmake 37 | depends_on('python@3.5:', when="@:2.79b") 38 | depends_on('python@3.7:', when="@2.80:") 39 | depends_on('python@3.7.7:', when="@2.92.0:") 40 | 41 | depends_on('py-numpy', when="@2.80:") 42 | depends_on('py-numpy@1.17.5:', when='@2.92.0:') 43 | 44 | depends_on('glew') 45 | depends_on('glew@1.13.0:', when='@2.92.0:') 46 | 47 | #depends_on('opengl') 48 | # depends_on('openglu') 49 | depends_on('gl') 50 | depends_on('glu') 51 | depends_on('glx') 52 | 53 | depends_on('libpng') 54 | depends_on('libpng@1.6.37:', when='@2.92.0:') 55 | 56 | depends_on('libjpeg') 57 | depends_on('libjpeg@2.0.4:', when='@2.92.0:') 58 | 59 | depends_on('openjpeg') 60 | depends_on('openjpeg@2.3.1:', when='@2.92.0:') 61 | 62 | # depends_on('boost@1.49:1.69') 63 | depends_on('boost@1.70.0:', when='@2.92.0:') 64 | 65 | depends_on('openimageio', when='+cycles') 66 | depends_on('openimageio@2.1.15.0:', when='@2.92.0: +cycles') 67 | 68 | # Upper bound per: https://developer.blender.org/T54779 69 | depends_on('ffmpeg@3.2.1:3.999', when='@:2.79b+ffmpeg') 70 | depends_on('ffmpeg@3.2.1:', when='@2.80:+ffmpeg') 71 | depends_on('ffmpeg@4.2.3:', when='@2.92.0:+ffmpeg') 72 | 73 | # depends_on('opencolorio@1.0:', when='+ocio') 74 | 75 | depends_on('llvm@3.0:', when='+llvm') 76 | depends_on('llvm@9.0.1:', when='@2.92.0:+llvm') 77 | # depends_on('openshadinglanguage') 78 | # depends_on('openvdb@3.1:') 79 | 80 | # FIXME: this is only temporarily commented out. needs to be fixed 81 | # depends_on('freetype') 82 | depends_on('freetype@2.10.2:', when='@2.92.0:') 83 | 84 | depends_on('libuuid') 85 | depends_on('jemalloc', when='+jemalloc') 86 | depends_on('ilmbase') 87 | 88 | depends_on('opensubdiv+openmp', when='+opensubdiv') 89 | depends_on('opensubdiv@3.4.3:', when='@2.92.0:+opensubdiv') 90 | 91 | #depends_on('cuda@10.1.0:10.1.999', when='+cycles', type=('link','run')) 92 | depends_on('cuda@11.0:', when='@2.92.0:+cycles', type=('link','run')) 93 | # FIXME: The version of GCC should probably be the version of GCC that is actually 94 | # compiling blender, not hardcoding the version that the package creater is using. 95 | # depends_on('gcc@7.4.0', when='+cycles', type=('run')) 96 | 97 | 98 | # Dependencies for 2.92.0 99 | depends_on('zlib@1.2.11:', when='@2.92.0:') 100 | depends_on('openal-soft@1.20.1:', when='@2.92.0:') 101 | depends_on('c-blosc@1.5.0:', when='@2.92.0:') 102 | # depends_on('pthreads@3.0.0:', when='@2.92.0:') 103 | # depends_on('openexr@2.4.0:', when='@2.92.0:') 104 | # depends_on('freeglut@3.0.0:', when='@2.92.0:') 105 | depends_on('alembic@1.7.12:', when='@2.92.0:') 106 | # depends_on('glfw@3.1.2:', when='@2.92.0:') 107 | # depends_on('sdl@2.0.12:', when='@2.92.0:') 108 | # depends_on('opencollada@1.6.68:', when='@2.92.0:') 109 | # depends_on('opencolorio@1.1.1:', when='@2.92.0:') 110 | depends_on('libtiff@4.1.0:', when='@2.92.0:') 111 | # depends_on('openshadinglanguage@1.10.10:', when='@2.92.0:') 112 | # depends_on('tbb@2019_u9:', when='@2.92.0:') 113 | # depends_on('openvdb@7.0.0:', when='@2.92.0:') 114 | # depends_on('idna@2.9:', when='@2.92.0:') 115 | # depends_on('lame@3.100:', when='@2.92.0:') 116 | depends_on('libogg@1.3.4:', when='@2.92.0:') 117 | depends_on('libvorbis@1.3.6:', when='@2.92.0:') 118 | depends_on('libtheora@1.1.1:', when='@2.92.0:') 119 | depends_on('flac@1.3.3:', when='@2.92.0:') 120 | # depends_on('vpx@1.8.2:', when='@2.92.0:') 121 | depends_on('opus@1.3.1:', when='@2.92.0:') 122 | # depends_on('xvidcore@1.3.7:', when='@2.92.0:') 123 | depends_on('fftw@3.3.8:', when='@2.92.0:') 124 | depends_on('libiconv@1.16:', when='@2.92.0:') 125 | depends_on('libsndfile@1.0.28:', when='@2.92.0:') 126 | # sndfile 127 | 128 | # FIXME: ~ispc is temporary fix for 129 | # ispc requires llvm variant ~libcxx, but spec asked for +libcxx 130 | depends_on('embree@3.10.0:~ispc', when='@2.92.0:') 131 | 132 | depends_on('pugixml@1.10:', when='@2.92.0:') 133 | 134 | depends_on('gmp@6.2.0:', when='@2.92.0:') 135 | 136 | def setup_run_environment(self, env): 137 | env.prepend_path('PATH', os.path.dirname(self.compiler.cc)) 138 | 139 | def cmake_args(self): 140 | spec = self.spec 141 | args = [] 142 | 143 | python_exe = spec['python'].command.path 144 | python_lib = spec['python'].libs[0] 145 | python_include_dir = spec['python'].headers.directories[0] 146 | 147 | args.append('-DPYTHON_EXECUTABLE={0}'.format(python_exe)) 148 | args.append('-DPYTHON_LIBRARY={0}'.format(python_lib)) 149 | args.append('-DPYTHON_INCLUDE_DIR={0}'.format(python_include_dir)) 150 | args.append('-DPYTHON_VERSION={0}'.format(spec['python'].version.up_to(2))) 151 | 152 | args.append('-DWITH_INSTALL_PORTABLE=NO') 153 | 154 | args.append('-DCMAKE_CXX_FLAGS=-I{0}/include/OpenEXR'.format(spec['ilmbase'].prefix)) 155 | 156 | if '@2.8:' in spec: 157 | args.append( 158 | '-DPYTHON_NUMPY_PATH:PATH={0}/python{1}/site-packages'.format( 159 | spec['py-numpy'].prefix.lib, 160 | spec['python'].version.up_to(2))) 161 | args.append( 162 | '-DPYTHON_NUMPY_INCLUDE_DIRS:PATH={0}/python{1}/site-packages/numpy/core/include'.format( 163 | spec['py-numpy'].prefix.lib, 164 | spec['python'].version.up_to(2))) 165 | 166 | if '+opensubdiv' in spec: 167 | args.append('-DWITH_OPENSUBDIV:BOOL=ON') 168 | else: 169 | args.append('-DWITH_OPENSUBDIV:BOOL=OFF') 170 | 171 | if '~cycles' in spec: 172 | args.append('-DWITH_CYCLES:BOOL=OFF') 173 | 174 | if '~blender' in spec: 175 | args.append('-DWITH_BLENDER:BOOL=OFF') 176 | # UNTESTED 177 | 178 | if '+ffmpeg' in spec: 179 | args.append('-DWITH_CODEC_FFMPEG:BOOL=ON') 180 | 181 | if '+headless' in spec: 182 | args.append('-DWITH_HEADLESS:BOOL=OFF') 183 | 184 | if '+llvm' in spec: 185 | args.append('-DWITH_LLVM:BOOL=ON') 186 | 187 | if '+player' in spec: 188 | args.append('-DWITH_PLAYER:BOOL=ON') 189 | 190 | # >> 106 CMake Error at CMakeLists.txt:924 (message): 191 | # 107 WITH_MOD_OCEANSIM requires WITH_FFTW3 to be ON 192 | if self.spec.satisfies('@2.92.0:'): 193 | args.append('-DWITH_MOD_OCEANSIM:BOOL=OFF') 194 | 195 | return args 196 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # nixpack = [nix](https://nixos.org/nix)+[spack](https://spack.io/) 2 | 3 | A hybrid of the [nix package manager](https://github.com/NixOS/nix) and [spack](https://github.com/spack/spack) where nix (without nixpkgs) is used to solve and manage packages, using the package repository, builds, and modules from spack. 4 | 5 | If you love nix's expressiveness and efficiency, but don't need the purity of nixpkgs (in the sense of independence from the host system)... if you like the spack packages and package.py format, but are tired of managing roots and concretizations, this may be for you. 6 | Nix on the outside, spack on the inside. 7 | 8 | ## Usage 9 | 10 | 1. Install and configure [nix](https://nixos.org/manual/nix/stable/#chap-installation), sufficient to build derivations. 11 | 1. Edit (or copy) [`default.nix`](default.nix). 12 | - It's recommended to set `packs.spackSrc.rev` to a fixed version of spack. Changing the spack version requires all packages to be rebuilt. If you want to update individual packages without a rebuild, you can put them in `spack/repo/packages` (or another repo in `packs.repos`). 13 | - Set `packs.os` and `packs.global.target`. 14 | - Set `packs.spackConfig.config.source_cache` and add any other custom spack config you want (nixpack ignores system and user spack config for purity, but will load default and site config from the spack repo itself). 15 | - Set `bootstrapPacks.package.compiler` to a pre-existing (system/external) compiler to be used to bootstrap. 16 | - Set `packs.package.gcc` to choose your default compiler, or `packs.package.compiler` to use something other than gcc. 17 | - Add any other package preferences to `packs.package` (versions, variants, virtual providers, etc.) 18 | - See `packs.global.fixedDeps`: by default multiple different instances of any given package may be built in order to satisfy requirements, but you may prefer to force only one version of each package, which will improve performance and build times. 19 | 1. Run `nix-build -A pkgs.foo` to build the spack package `foo`. 20 | 1. To build modules, configure `packs.mods` and run `nix-build -A mods`. 21 | 22 | ## Flatiron Specific 23 | 24 | We have our local Flatiron-specific configuration and repositories in [`fi`](fi), complete with views and modules, some of which may be more generally useful or at least helpful reference or template for creating a full working system. 25 | See the [README](fi/README.md) in that directory for more information. 26 | 27 | ## Compatibility 28 | 29 | nixpack uses an unmodified checkout of spack (as specified in `spackSrc`), and should work with other forks as well. 30 | However, it makes many assumptions about the internals of spack builds, so may not work on much older (or newer) versions. 31 | 32 | ## Implementation and terminology 33 | 34 | In nixpkgs, there's mainly the concept of package, and arguments that can be overridden. 35 | In spack, there are packages and specs, and "spec" is used in many different ways. 36 | We define a few more specific concepts to merge the two. 37 | 38 | ### package descriptor 39 | 40 | The metadata for a spack package. 41 | These are generated by [`spack/generate.py`](spack/generate.py) from the spack repo `package.py`s and loaded into `packs.repo`. 42 | They look like this: 43 | 44 | ```nix 45 | example = { 46 | namespace = "builtin"; 47 | version = ["2.0" "1.2" "1.0"]; # in decreasing order of preference 48 | variants = { 49 | flag = true; 50 | option = ["a" "b" "c"]; # single-valued, first is default 51 | multi = { 52 | a = true; 53 | b = false; 54 | }; 55 | }; 56 | depends = { 57 | /* package preferences for dependencies (see below) */ 58 | compiler = { # added implicitly if missing 59 | deptype = ["build" "link"]; 60 | }; 61 | deppackage = { 62 | version = "1.5:2.1"; 63 | deptype = ["run" "test"]; 64 | }; 65 | notused = null; 66 | }; 67 | provides = { 68 | virtual = "2:"; 69 | }; 70 | paths = {}; # paths to tools provided by this package (like `cc` for compilers) 71 | patches = []; # extra patches to apply (in addition to those in spack) 72 | conflicts = []; # any conflicts (non-empty means invalid) 73 | }; 74 | ``` 75 | 76 | Most things default to empty. 77 | This is not a complete build description, just the metadata necessary to resolve dependencies (concretize). 78 | In practice, these are constructed as functions that take a resolved package spec as an argument, so that dependencies and such be conditional on a specific version and variants. 79 | 80 | You can build the repo using `nix-build -A spackRepo` (and see `result`). 81 | 82 | ### package preferences 83 | 84 | Constraints for a package that come from the user, or a depending package. 85 | These are used in package descriptor depends and in user global and per-package preferences. 86 | They look similar to package descriptors and can be used to override or constrain some of those values. 87 | 88 | ```nix 89 | example = { 90 | version = "1.3:1.5"; 91 | variants = { 92 | flag = true; 93 | option = "b"; 94 | /* multi options can be specified as list of trues or explicitly */ 95 | multi = ["a"]; 96 | multi = { 97 | a = true; 98 | b = false; 99 | }; 100 | }; 101 | depends = { 102 | compiler = { 103 | name = "clang"; # use clang as the compiler virtual provider 104 | }; 105 | deppackage = { 106 | version = ... # use a specific version for a dependency 107 | }; 108 | virtualdep = { 109 | name = "provider"; 110 | version = ...; 111 | ... 112 | }; 113 | # dependencies can also be set to a specific package: 114 | builddep = packs.pkgs.builddep; 115 | }; 116 | provides = { 117 | virtual = "version"; # this requires that this package provides virtual (not that it does) 118 | }; 119 | patches = []; # extra patches to apply (in additon to those in the descriptor) 120 | extern = "/opt/local/mypackage"; # a prefix string or derivation (e.g., nixpkgs package) for an external installation (overrides depends) 121 | fixedDeps = false; # only use user preferences to resolve dependencies (see default.nix) 122 | target = "microarch"; # defaults to currentSystem (e.g., x86_64) 123 | verbose = true; # to enable nix-build -Q and nix-store -l (otherwise only spack keeps build logs) 124 | tests = false; # run tests (not implemented) 125 | resolver = ...; # where to find dependent packages (see default.nix) 126 | }; 127 | ``` 128 | 129 | ### package spec 130 | 131 | A resolved (concrete) package specifier created by applying (optional) package preferences to a package descriptor. 132 | This looks just like a package descriptior but with concrete values. 133 | It also includes settings from prefereces like `extern` and `target`. 134 | 135 | ### package 136 | 137 | An actual derivation. 138 | These contain a `spec` metadata attribute. 139 | They also have a `withPrefs` function that can be used to make a new version of this package with updated prefs (unless they are extern). 140 | 141 | ### compiler 142 | 143 | Rather than spack's dedicated `%compiler` concept, we introduce a new virtual "compiler" that all packages depend on and is provided by gcc and llvm (by default). 144 | By setting the package preference for compiler, you determine which compiler to use. 145 | 146 | ### `packs` 147 | 148 | The world, like `nixpkgs`. 149 | It contains `pkgs` with actual packages, as well as `repo`, `view`, `modules`, and other functions. 150 | See [`packs/default.nix`](packs/default.nix) for full details. 151 | 152 | You can have one or more `packs` instances. 153 | Each instance is defined by a set of global user preferences, as passed to `import ./packs`. 154 | You can also create additional sets based on an existing one using `packs.withPrefs`. 155 | Thus, difference package sets can have different providers or package settings (like a different compiler, mpi version, blas provider, variants, etc.). 156 | 157 | See [`default.nix`](default.nix) for preferences that can be set and their descriptions. 158 | 159 | ### Bootstrapping 160 | 161 | The default compiler is specified in [`default.nix`](default.nix) by `compiler = bootstrapPacks.pkgs.gcc` which means that the compiler used to build everything is `packs` comes from `bootstrapPacks`, and is built with the preferences and compiler defined there. 162 | `bootstrapPacks` in turn specifies a compiler of gcc with `extern` set, i.e., one from the host system. 163 | This compiler is used to build any other bootstrap packages, which are then used to build the main compiler. 164 | You could specify more extern packages in bootstrap to speed up bootstrapping. 165 | 166 | You could also add additional bootstrap layers by setting the bootstrap compiler `resolver` to a different set. 167 | You could also replace specific dependencies or packages from a different `packs` set to bootstrap or modify other packages. 168 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | let 2 | 3 | lib = import packs/lib.nix; 4 | packs = import ./packs { 5 | /* packs prefs */ 6 | system = builtins.currentSystem; 7 | os = "centos7"; 8 | 9 | /* where to get the spack respository. Note that since everything depends on 10 | spack, changing the spack revision will trigger rebuilds of all packages. 11 | Can also be set a path (string) to an existing spack install, which will 12 | eliminate the dependency and also break purity, so can cause your repo 13 | metadata to get out of sync, and is not recommended for production. 14 | See also repos and repoPatch below for other ways of updating packages 15 | without modifying the spack repo. */ 16 | spackSrc = { 17 | /* default: 18 | url = "https://github.com/spack/spack"; */ 19 | ref = "releases/v1.1"; 20 | rev = "0c2be44e4ece21eb091ad5de4c97716b7c6d4c87"; 21 | }; 22 | /* extra config settings for spack itself. Can contain any standard spack 23 | configuration, but don't put compilers (automatically generated), packages 24 | (based on package preferences below), or modules (passed to modules 25 | function) here. */ 26 | spackConfig = { 27 | config = { 28 | /* must be set to somewhere your nix builder(s) can write to */ 29 | source_cache = "/tmp/spack_cache"; 30 | }; 31 | }; 32 | /* environment for running spack. spack needs things like python, cp, tar, 33 | etc. These can be string paths to the system or to packages/environments 34 | from nixpkgs or similar, but regardless need to be external to nixpacks. */ 35 | spackPython = "/usr/bin/python3"; 36 | spackEnv = { 37 | PATH = "/bin:/usr/bin"; 38 | }; 39 | 40 | /* packs can optionally include nixpkgs for additional packages or bootstrapping. 41 | omit to disable. */ 42 | nixpkgsSrc = { 43 | #url = "https://github.com/NixOS/nixpkgs"; 44 | ref = "release-24.11"; 45 | #rev = "c8c5faff75fd017e468e8733312525b51cea1af2"; 46 | }; 47 | 48 | /* spack repos to include by path, managed by nixpack. 49 | These should be normal spack repos, including repo.yaml. 50 | Repos specified here have the advantage of correctly managing nix 51 | dependencies, so changing a package will only trigger rebuilds of 52 | it and dependent packages. */ 53 | repos = [ 54 | spack/repo 55 | spack-packages/repos/spack_repo/builtin 56 | ]; 57 | /* updates to the spack repo (see patch/default.nix for examples) 58 | repoPatch = { 59 | package = [spec: [old:]] { 60 | new... 61 | }; 62 | }; */ 63 | 64 | /* global defaults for all packages (merged with per-package prefs) */ 65 | global = { 66 | /* spack architecture target */ 67 | target = "broadwell"; 68 | /* set spack verbose to print build logs during spack bulids (and thus 69 | captured by nix). regardless, spack also keeps logs in pkg/.spack. */ 70 | verbose = false; 71 | /* enable build tests (and test deps) */ 72 | tests = false; 73 | /* how to resolve dependencies, similar to concretize together or separately. 74 | fixedDeps = false: Dependencies are resolved dynamically based on 75 | preferences and constraints imposed by each depender. This can result 76 | in many different versions of each package existing in packs. 77 | fixedDeps = true: Dependencies are resolved only by user prefs, and an 78 | error is produced if dependencies don't conform to their dependers' 79 | constraints. This ensures only one version of each dependent package 80 | exists within packs. Different packs with different prefs may have 81 | different versions. Top-level packages explicitly resolved with 82 | different prefs or dependency prefs may also be different. Virtuals 83 | are always resolved (to a package name) dynamically. 84 | this can be overridden per-package for only that package's dependencies. */ 85 | fixedDeps = false; 86 | /* How to find dependencies. Normally dependencies are pulled from other 87 | packages in this same packs. In some cases you may want some or all 88 | dependencies for a package to come from a different packs, perhaps 89 | because you don't care if build-only dependencies use the same compiler 90 | or python version. This lets you override where dependencies come from. 91 | It takes two optional arguments: 92 | * list of dependency types (["build" "link" "run" "test"]) 93 | * the name of the dependent package 94 | And should return either: 95 | * null, meaning use the current packs default 96 | * an existing packs object, to use instead 97 | * a function taking package preferences to a resolved package (like 98 | packs.getResolver). In this case, prefs will be {} if fixedDeps = 99 | true, or the dependency prefs from the parent if fixedDeps = false. 100 | resolver = [deptype: [name: ]]; */ 101 | }; 102 | /* package-specific preferences */ 103 | package = 104 | /* set all compiler virtuals */ 105 | lib.compilers bootstrapPacks.pkgs.gcc // { 106 | /* preferences for individual packages or virtuals */ 107 | /* get cpio from system: 108 | cpio = { 109 | extern = "/usr"; 110 | version = "2.11"; 111 | }; */ 112 | /* specify virtual providers: can be (lists of) package or { name; ...prefs } 113 | mpi = [ packs.pkgs.openmpi ]; 114 | java = { name = "openjdk"; version = "10"; }; */ 115 | /* use gcc 7.x: 116 | gcc = { 117 | version = "7"; 118 | }; */ 119 | /* enable cairo+pdf: 120 | cairo = { 121 | variants = { 122 | pdf = true; 123 | }; 124 | }; */ 125 | /* use an external slurm: 126 | slurm = { 127 | extern = "/cm/shared/apps/slurm/current"; 128 | version = "20.02.5"; 129 | variants = { 130 | sysconfdir = "/cm/shared/apps/slurm/var/etc/slurm"; 131 | pmix = true; 132 | hwloc = true; 133 | }; 134 | }; */ 135 | nix = { 136 | variants = { 137 | storedir = let v = builtins.getEnv "NIX_STORE_DIR"; in if v == "" then "none" else v; 138 | statedir = let v = builtins.getEnv "NIX_STATE_DIR"; in if v == "" then "none" else v; 139 | }; 140 | }; 141 | }; 142 | 143 | }; 144 | 145 | /* A set of packages with different preferences, based on packs above. 146 | This set is used to bootstrap gcc, but other packs could also be used to set 147 | different virtuals, versions, variants, compilers, etc. */ 148 | bootstrapPacks = packs.withPrefs { 149 | package = lib.compilers { 150 | /* must be set to an external compiler capable of building compiler (above) */ 151 | name = "gcc"; 152 | version = "8.5.0"; 153 | extern = "/usr"; /* install prefix */ 154 | extraAttributes = { 155 | compilers = { 156 | c = "/usr/bin/gcc"; 157 | cxx = "/usr/bin/g++"; 158 | fortran = "/usr/bin/gfortran"; 159 | }; 160 | }; 161 | /* can also have multiple layers of bootstrapping, where each compiler is built by another */ 162 | } // { 163 | /* can speed up bootstrapping by providing more externs 164 | zlib = { 165 | extern = "/usr"; 166 | version = "..."; 167 | }; ... */ 168 | }; 169 | }; 170 | 171 | gcc10Packs = packs.withPrefs { 172 | package = lib.compilers (packs.pkgs.gcc.withPrefs { #override package defaults 173 | version = "10"; 174 | }); 175 | }; 176 | 177 | in 178 | 179 | packs // { 180 | mods = packs.modules { 181 | /* this corresponds to module config in spack */ 182 | /* modtype = "lua"; */ 183 | coreCompilers = [packs.pkgs.c bootstrapPacks.pkgs.c]; 184 | config = { 185 | /* 186 | hierarchy = ["mpi"]; 187 | hash_length = 0; 188 | projections = { 189 | # warning: order is lost 190 | "package+variant" = "{name}/{version}-variant"; 191 | }; 192 | prefix_inspections = { 193 | "dir" = ["VAR"]; 194 | }; 195 | all = { 196 | autoload = "none"; 197 | }; 198 | package = { 199 | environment = { 200 | prepend_path = { 201 | VAR = "{prefix}/path"; 202 | }; 203 | }; 204 | }; 205 | */ 206 | }; 207 | pkgs = (with packs.pkgs; [ 208 | gcc 209 | zlib 210 | ]) ++ (with gcc10Packs.pkgs; [ 211 | { pkg = c; 212 | default = true; # set as default version 213 | # extra content to append to module file 214 | postscript = '' 215 | LModMessage("default gcc loaded") 216 | ''; 217 | } 218 | zlib 219 | /* 220 | { # a custom module, not from spack 221 | name = "other-package"; 222 | version = "1.2"; 223 | prefix = "/opt/other"; 224 | # overrides for module config 225 | environment = { 226 | prepend_path = { 227 | VAR = "{prefix}/path"; 228 | }; 229 | }; 230 | projection = "{name}/{version}-local"; 231 | context = { # other variables to set in the template 232 | short_description = "Some other package"; 233 | }; 234 | } 235 | */ 236 | ]); 237 | }; 238 | } 239 | -------------------------------------------------------------------------------- /packs/lib.nix: -------------------------------------------------------------------------------- 1 | with builtins; 2 | rec { 3 | 4 | id = x: x; 5 | const = x: y: x; 6 | flip = f: a: b: f b a; 7 | fix = f: let x = f x; in x; 8 | when = c: x: if c then x else null; 9 | coalesce = x: d: if x == null then d else x; 10 | coalesces = l: let r = remove null l; in when (r != []) (head r); 11 | coalesceWith = f: a: b: if a == null then b else if b == null then a else f a b; 12 | mapNullable = f: a: if a == null then a else f a; 13 | 14 | applyOptional = f: x: if isFunction f then f x else f; 15 | 16 | cons = x: l: [x] ++ l; 17 | toList = x: if isList x then x else if x == null then [] else [x]; 18 | fromList = x: if isList x && length x == 1 then head x else x; 19 | optionals = c: x: if c then x else []; 20 | 21 | traceId = x: trace x x; 22 | traceLabel = s: x: trace ("${s}: ${toJSON x}") x; 23 | traceId' = x: deepSeq x (traceId x); 24 | 25 | hasPrefix = pref: str: substring 0 (stringLength pref) str == pref; 26 | takePrefix = pref: str: if hasPrefix pref str then substring (stringLength pref) (-1) str else str; 27 | 28 | remove = e: filter (x: x != e); 29 | nub = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) []; 30 | nubBy = eq: l: 31 | if l == [] then l else 32 | let x = head l; in 33 | cons x (nubBy eq (filter (y: ! (eq x y)) (tail l))); 34 | 35 | /* is a a prefix of b? */ 36 | listHasPrefix = a: b: 37 | a == [] || b != [] && head a == head b && listHasPrefix (tail a) (tail b); 38 | 39 | union = a: b: a ++ filter (x: ! elem x a) b; 40 | 41 | /* do the elements of list a all appear in-order in list b? */ 42 | subsetOrdered = a: b: 43 | a == [] || (b != [] && subsetOrdered (tail a) (if head a == head b then tail b else b)); 44 | 45 | mapKeys = f: set: 46 | listToAttrs (map (a: { name = f a; value = set.${a}; }) (attrNames set)); 47 | 48 | mergeWithKeys = f: a: b: 49 | mapAttrs (k: v: if hasAttr k a && hasAttr k b then f k a.${k} v else v) (a // b); 50 | 51 | mergeWith = f: mergeWithKeys (k: f); 52 | 53 | recursiveUpdate = a: b: 54 | if isAttrs a && isAttrs b then 55 | mergeWith recursiveUpdate a b 56 | else b; 57 | 58 | /* should this be lazy? */ 59 | concatAttrs = foldl' (a: b: a // b) {}; 60 | 61 | filterAttrs = pred: set: 62 | listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [{ inherit name; value = v; }] else []) (attrNames set)); 63 | 64 | splitRegex = r: s: filter isString (split r s); 65 | 66 | versionOlder = v1: v2: compareVersions v1 v2 < 0; 67 | versionNewer = v1: v2: compareVersions v1 v2 > 0; 68 | versionAtLeast = v1: v2: compareVersions v1 v2 >= 0; 69 | versionAtMost = v1: v2: compareVersions v1 v2 <= 0; 70 | versionMax = v1: v2: if versionAtLeast v1 v2 then v1 else v2; 71 | 72 | versionSplitCompare = s1: s2: 73 | if s1 == [] then -2 else 74 | if s2 == [] then 2 else 75 | let c = compareVersions (head s1) (head s2); in 76 | if c == 0 then versionSplitCompare (tail s1) (tail s2) else 77 | c; 78 | /* like compareVersions but -2 if s1 is a prefix of s2, and +2 if s2 is a prefix of s1 */ 79 | versionCompare = v1: v2: if v1 == v2 then 0 else versionSplitCompare (splitVersion v1) (splitVersion v2); 80 | 81 | /* while 3.4 > 3 by nix (above), we want to treat 3.4 < 3 82 | v are concrete versions, s version specs */ 83 | versionAtMostSpec = v1: s2: versionCompare v1 s2 != 1; 84 | /* here 3.4 < 3 */ 85 | versionMinSpec = s1: s2: { 86 | "-2" = s2; 87 | "-1" = s1; 88 | "0" = s1; 89 | "1" = s2; 90 | "2" = s1; 91 | }.${toString (versionCompare s1 s2)}; 92 | 93 | versionIsConcrete = v: v != null && match ".*[:,].*" v == null; 94 | 95 | versionRange = v: let 96 | s = splitRegex ":" v; 97 | l = length s; 98 | in 99 | if l == 1 then { min = v; max = v; } else 100 | if l == 2 then { min = head s; max = elemAt s 1; } else 101 | throw "invalid version range ${v}"; 102 | 103 | rangeVersion = a: b: 104 | if a == b then a else "${a}:${b}"; 105 | 106 | /* spack version spec semantics: does concrete version v match spec m? */ 107 | versionMatches = v: match: 108 | if match == null then true else 109 | if isList match then all (versionMatches v) match else 110 | let 111 | versionMatch = m: 112 | if hasPrefix "=" m then v == substring 1 (-1) m else 113 | let 114 | mr = versionRange m; 115 | in versionAtLeast v mr.min && 116 | (versionAtMostSpec v mr.max); 117 | in any versionMatch (splitRegex "," match); 118 | 119 | versionsOverlap = a: b: 120 | let 121 | as = splitRegex "," a; 122 | bs = splitRegex "," b; 123 | vo = a: b: let 124 | ar = versionRange a; 125 | br = versionRange b; 126 | in versionAtMostSpec ar.min br.max && 127 | versionAtMostSpec br.min ar.max; 128 | in any (a: any (vo a) bs) as; 129 | 130 | versionsIntersect = a: b: 131 | let 132 | as = splitRegex "," a; 133 | bs = splitRegex "," b; 134 | vi = a: b: let 135 | ar = versionRange a; 136 | br = versionRange b; 137 | in rangeVersion (versionMax ar.min br.min) (versionMinSpec ar.max br.max); 138 | in 139 | concatStringsSep "," (concatMap (a: map (vi a) bs) as); 140 | 141 | /* does concrete variant v match spec m? */ 142 | variantMatches = v: ms: all (m: 143 | if isAttrs v then v.${m} else 144 | if isList v then elem m v else 145 | v == m) (toList ms); 146 | 147 | deptypeChars = dt: 148 | concatStringsSep "" (map (t: 149 | if elem t dt then substring 0 1 t else " ") 150 | [ "build" "link" "run" "test" ]); 151 | 152 | /* a very simple version of Spec.format */ 153 | specFormat = fmt: spec: let 154 | variantToString = n: v: 155 | if v == true then "+"+n 156 | else if v == false then "~"+n 157 | else " ${n}="+ 158 | (if isList v then concatStringsSep "," v 159 | else if isAttrs v then concatStringsSep "," (map (n: variantToString n v.${n}) (attrNames v)) 160 | else toString v); 161 | fmts = { 162 | inherit (spec) name version; 163 | variants = concatStringsSep "" (map (v: variantToString v spec.variants.${v}) 164 | (sort (a: b: typeOf spec.variants.${a} < typeOf spec.variants.${b}) (attrNames spec.variants))); 165 | deptype = if spec ? deptype 166 | then " [" + deptypeChars spec.deptype + "]" 167 | else ""; 168 | flags = concatStringsSep "" (map (f: " ${f}=\""+(concatStringsSep " " spec.flags.${f})+"\"") 169 | (attrNames spec.flags)); 170 | }; 171 | in replaceStrings (map (n: "{${n}}") (attrNames fmts)) (attrValues fmts) fmt; 172 | 173 | /* simple name@version */ 174 | specName = specFormat "{name}@{version}"; 175 | 176 | /* like spack default format */ 177 | specToString = specFormat "{name}@{version}{variants}{flags}{deptype}"; 178 | 179 | /* check that a given spec conforms to the specified preferences */ 180 | specMatches = spec: 181 | { name ? null 182 | , version ? null 183 | , variants ? {} 184 | , patches ? [] 185 | , depends ? {} 186 | , provides ? {} 187 | , extern ? spec.extern 188 | } @ prefs: 189 | (name == null || name == spec.name) 190 | && versionMatches spec.version version 191 | && all (name: variantMatches (spec.variants.${name} or null) variants.${name}) (attrNames variants) 192 | && subsetOrdered patches spec.patches 193 | && all (name: specMatches spec.depends.${name} depends.${name}) (attrNames depends) 194 | && all (name: hasAttr name spec.provides && versionsOverlap spec.provides.${name} provides.${name}) (attrNames provides) 195 | && spec.extern == extern; 196 | 197 | /* determine if something is a package (derivation) */ 198 | isPkg = p: p ? out; 199 | 200 | /* update two prefs, with the second overriding the first */ 201 | prefsUpdate = let 202 | scalar = a: b: b; 203 | updaters = { 204 | name = scalar; 205 | version = scalar; 206 | variants = mergeWith (a: b: 207 | if isAttrs a && isAttrs b then a // b 208 | else b); 209 | flags = a: b: a // b; 210 | patches = scalar; 211 | depends = mergeWith prefsUpdate; 212 | extern = scalar; 213 | tests = scalar; 214 | fixedDeps = scalar; 215 | resolver = scalar; 216 | deptype = scalar; 217 | target = scalar; 218 | provides = a: b: a // b; 219 | verbose = scalar; 220 | }; 221 | in 222 | a: b: 223 | if isPkg b then b else 224 | if isPkg a then a.withPrefs b else 225 | mergeWithKeys (k: updaters.${k}) a b; 226 | 227 | /* unify two prefs, making sure they're compatible */ 228 | prefsIntersect = let 229 | err = a: b: throw "incompatible prefs: ${toJSON a} vs ${toJSON b}"; 230 | scalar = a: b: if a == b then a else err a b; 231 | intersectors = { 232 | version = versionsIntersect; 233 | variants = mergeWith (a: b: if a == b then a else 234 | union (toList a) (toList b)); 235 | flags = mergeWith scalar; 236 | patches = a: b: a ++ b; 237 | depends = mergeWith prefsIntersect; 238 | extern = scalar; 239 | tests = scalar; 240 | fixedDeps = scalar; 241 | resolver = scalar; 242 | deptype = union; 243 | target = scalar; 244 | provides = mergeWith versionsIntersect; 245 | verbose = scalar; 246 | }; 247 | intersectPkg = o: p: if specMatches o.spec p then o else err o p; 248 | in coalesceWith (a: b: 249 | if isPkg a 250 | then if isPkg b 251 | then intersectScalar a b 252 | else intersectPkg a b 253 | else if isPkg b 254 | then intersectPkg b a 255 | else mergeWithKeys (k: intersectors.${k}) a b); 256 | 257 | /* unify a list of package prefs, making sure they're compatible */ 258 | prefsIntersection = l: if isList l then foldl' prefsIntersect null l else l; 259 | 260 | /* traverse all dependencies of given package(s) that satisfy pred recursively and return them as a list (in bredth-first order) */ 261 | findDeps = pred: 262 | let 263 | adddeps = s: pkgs: add s 264 | (foldl' (deps: p: 265 | (deps ++ filter (d: d != null && ! (elem d s) && ! (elem d deps) && pred d) 266 | (attrValues p.spec.depends))) 267 | [] pkgs); 268 | add = s: pkgs: if pkgs == [] then s else adddeps (s ++ pkgs) pkgs; 269 | in pkg: add [] (toList pkg); 270 | 271 | /* debugging to trace full package dependencies (and return count of packages) */ 272 | traceSpecTree = let 273 | sst = seen: ind: dname: dt: pkg: if pkg == null then seen else 274 | trace (ind 275 | + (if dt != null then "[" + deptypeChars dt + "] " else "") 276 | + (if dname != null && dname != pkg.spec.name then "${dname}=" else "") 277 | + specToString pkg.spec + " " 278 | + takePrefix storeDir pkg.out) 279 | (if elem pkg seen then seen else 280 | foldl' (seen: d: sst seen (ind + " ") d pkg.spec.deptypes.${d} or null pkg.spec.depends.${d}) 281 | (seen ++ [pkg]) 282 | (attrNames pkg.spec.depends)); 283 | in pkgs: length (foldl' (seen: sst seen "" null null) [] (toList pkgs)); 284 | 285 | capture = args: env: readFile (derivation ({ 286 | name = "capture-${baseNameOf (head args)}"; 287 | system = currentSystem; 288 | builder = ./capture.sh; 289 | args = args; 290 | } // env)); 291 | 292 | compilers = compiler: { c = compiler; cxx = compiler; fortran = compiler; }; 293 | } 294 | -------------------------------------------------------------------------------- /spack/nixpack.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import json 4 | import base64 5 | import re 6 | 7 | # translate from nix to spack because... 8 | b32trans = bytes.maketrans(b"0123456789abcdfghijklmnpqrsvwxyz", base64._b32alphabet.lower()) 9 | 10 | getVar = os.environ.pop 11 | 12 | passAsFile = set(getVar('passAsFile', '').split()) 13 | 14 | def getJson(var: str): 15 | if var in passAsFile: 16 | with open(getVar(var+'Path'), 'r') as f: 17 | return json.load(f) 18 | else: 19 | return json.loads(getVar(var)) 20 | 21 | if not sys.executable: # why not? 22 | sys.executable = getVar('builder') 23 | 24 | def linktree(src: str, dst: str): 25 | os.mkdir(dst) 26 | for srcentry in os.scandir(src): 27 | srcname = os.path.join(src, srcentry.name) 28 | dstname = os.path.join(dst, srcentry.name) 29 | srcobj = srcname 30 | if srcentry.is_dir(): 31 | linktree(srcname, dstname) 32 | else: 33 | os.symlink(srcname, dstname) 34 | 35 | import spack.main # otherwise you get recursive import errors 36 | import spack.vendor.archspec.cpu 37 | import spack.util.spack_yaml as syaml 38 | import spack.llnl.util.tty 39 | 40 | from spack.spec import _inject_patches_variant as inject_patches_variant 41 | 42 | # monkeypatch store.layout for the few things we need 43 | class NixLayout(): 44 | metadata_dir = '.spack' 45 | hidden_file_paths = (metadata_dir,) 46 | hidden_file_regexes = (re.escape(metadata_dir),) 47 | def metadata_path(self, spec): 48 | return os.path.join(spec.prefix, self.metadata_dir) 49 | def build_packages_path(self, spec): 50 | return os.path.join(self.metadata_path(spec), 'repos') 51 | class NixDatabase(): 52 | root = '/var/empty' 53 | upstream_dbs = [] 54 | class NixStore(): 55 | layout = NixLayout() 56 | db = NixDatabase() 57 | # this is used to find bin/sbang: 58 | unpadded_root = spack.paths.prefix 59 | spack.store.STORE = NixStore() 60 | 61 | spack.main.add_command_line_scopes(spack.config.CONFIG, getVar('spackConfig').split()) 62 | spack.config.CONFIG.remove_scope('system') 63 | spack.config.CONFIG.remove_scope('user') 64 | spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("nixpack")) 65 | 66 | spack.config.set('config:build_stage', [getVar('NIX_BUILD_TOP')], 'nixpack') 67 | enableParallelBuilding = bool(getVar('enableParallelBuilding', True)) 68 | cores = 1 69 | if enableParallelBuilding: 70 | cores = int(getVar('NIX_BUILD_CORES', 0)) 71 | if cores > 0: 72 | spack.config.set('config:build_jobs', cores, 'nixpack') 73 | 74 | spack.paths.set_working_dir() 75 | # add in dynamic overlay repos 76 | repos = getVar('repos', '').split() 77 | 78 | repoArgs = {} 79 | if hasattr(spack.repo, "from_path"): 80 | repoArgs['cache'] = spack.caches.MISC_CACHE 81 | 82 | def linkPkg(repo: spack.repo.Repo, path: str, name: str): 83 | try: 84 | os.symlink(path, repo.dirname_for_package_name(name)) 85 | # clear cache: 86 | repo._fast_package_checker = None 87 | except FileExistsError: 88 | # just trust that it should be identical 89 | pass 90 | 91 | repodir = os.path.join(os.environ['TMPDIR'], 'repos', 'spack_repo') 92 | os.makedirs(repodir) 93 | 94 | dynRepos = {} 95 | 96 | def prepRepo(a: str): 97 | with open(os.path.join(a, spack.repo.repo_config_name), encoding="utf-8") as f: 98 | n = syaml.load(f)["repo"]["namespace"] 99 | d = os.path.join(repodir, n) 100 | if os.path.isdir(os.path.join(a, "packages")): 101 | # whole repo, symlink whole as-is 102 | os.symlink(a, d) 103 | dyn = False 104 | else: 105 | # skeleton repo, symlink files 106 | os.mkdir(d) 107 | for f in os.listdir(a): 108 | os.symlink(os.path.join(a, f), os.path.join(d, f)) 109 | os.mkdir(os.path.join(d, "packages")) 110 | dyn = True 111 | r = spack.repo.Repo(d, **repoArgs) 112 | if dyn: 113 | dynRepos[n] = r 114 | return r 115 | 116 | spack.repo.PATH = spack.repo.RepoPath(*map(prepRepo, repos)) 117 | 118 | nixLogFd = int(getVar('NIX_LOG_FD', -1)) 119 | nixLogFile = None 120 | if nixLogFd >= 0: 121 | nixLogFile = os.fdopen(nixLogFd, 'w') 122 | 123 | def nixLog(j): 124 | if nixLogFile: 125 | print("@nix", json.dumps(j), file=nixLogFile) 126 | 127 | nixStore = getVar('NIX_STORE') 128 | 129 | system = getVar('system') 130 | basetarget, platform = system.split('-', 1) 131 | archos = getVar('os') 132 | 133 | class NixSpec(spack.spec.Spec): 134 | # to re-use identical specs so id is reasonable 135 | specCache = dict() 136 | nixSpecFile = '.nixpack.spec'; 137 | compilers = dict() 138 | 139 | @staticmethod 140 | def cacheKey(nixspec, prefix: str): 141 | if isinstance(prefix, str) and prefix.startswith(nixStore): 142 | # in nix store 143 | return prefix[len(nixStore):].lstrip('/') 144 | else: 145 | # extern: name + prefix should be enough 146 | return nixspec['name'] + "-" + nixspec['version'] + ":" + prefix 147 | 148 | @classmethod 149 | def get(self, arg, prefix: str=None, top: bool=True): 150 | if isinstance(arg, str): 151 | # path to existing nix store (containing nixSpecFile) 152 | nixspec = os.path.join(arg, self.nixSpecFile) 153 | if prefix is None: 154 | prefix = arg 155 | else: 156 | if 'spec' in arg: 157 | # inline dependency spec, containing spec and out 158 | nixspec = arg['spec'] 159 | if prefix is None: 160 | prefix = arg.get('out') 161 | else: 162 | # actual spec object 163 | nixspec = arg 164 | if prefix is None: 165 | prefix = nixspec['prefix'] 166 | 167 | try: 168 | return self.specCache[self.cacheKey(nixspec, prefix)] 169 | except KeyError: 170 | if isinstance(nixspec, str): 171 | with open(nixspec, 'r') as sf: 172 | nixspec = json.load(sf) 173 | return NixSpec(nixspec, prefix, top) 174 | 175 | def __init__(self, nixspec, prefix: str, top: bool): 176 | key = self.cacheKey(nixspec, prefix) 177 | self.specCache[key] = self 178 | 179 | super().__init__() 180 | self.nixspec = nixspec 181 | self.name = nixspec['name'] 182 | self.namespace = nixspec['namespace'] 183 | version = nixspec['version'] 184 | self.versions = spack.version.VersionList([spack.version.Version(version)]) 185 | self._set_architecture(target=nixspec.get('target', basetarget), platform=platform, os=archos) 186 | try: 187 | self.set_prefix(prefix) 188 | except AttributeError: 189 | self.prefix = prefix 190 | self.external_path = nixspec['extern'] 191 | if self.namespace in dynRepos: 192 | linkPkg(dynRepos[self.namespace], nixspec['package'], self.name) 193 | if top: 194 | self._top = True 195 | elif self.external_path: 196 | assert self.external_path == prefix, f"{self.name} extern {nixspec['extern']} doesn't match prefix {prefix}" 197 | else: 198 | # add any dynamic packages 199 | repodir = os.path.join(prefix, '.spack', 'repos', 'spack_repo') 200 | try: 201 | rl = os.listdir(repodir) 202 | except FileNotFoundError: 203 | rl = [] 204 | for r in rl: 205 | try: 206 | repo = dynRepos[r] 207 | except KeyError: 208 | continue 209 | pkgdir = os.path.join(repodir, r, 'packages') 210 | for p in os.listdir(pkgdir): 211 | linkPkg(repo, os.path.join(pkgdir, p), p) 212 | 213 | depends = nixspec['depends'].copy() 214 | 215 | for n, d in sorted(depends.items()): 216 | dtype = nixspec['deptypes'].get(n) or () 217 | try: 218 | dtype = spack.deptypes.canonicalize(dtype) 219 | except AttributeError: 220 | dtype = spack.dependency.canonical_deptype(dtype) 221 | if d: 222 | dep = self.get(d, top=False) 223 | cdep = None # any current dep on this package 224 | if hasattr(self, 'add_dependency_edge'): 225 | try: 226 | cdeps = self._dependencies.select(child=dep.name, depflag=dtype) 227 | except TypeError: 228 | cdeps = self._dependencies.select(child=dep.name, deptypes=dtype) 229 | if len(cdeps) == 1: 230 | # if multiple somehow, _add_dependency should catch it 231 | cdep = cdeps[0] 232 | else: 233 | cdep = self._dependencies.get(dep.name) 234 | if n != dep.name: 235 | virtuals = (n,) 236 | else: 237 | virtuals = () 238 | if cdep: 239 | assert cdep.spec == dep, f"{self.name}.{n}: conflicting dependencies on {dep.name}" 240 | cdep.update_deptypes(dtype) 241 | cdep.update_virtuals(virtuals) 242 | else: 243 | try: 244 | self._add_dependency(dep, depflag=dtype, virtuals=virtuals) 245 | except TypeError: 246 | self._add_dependency(dep, deptypes=dtype, virtuals=virtuals) 247 | try: 248 | lrdep = dtype & (spack.deptypes.LINK | spack.deptypes.RUN) 249 | except AttributeError: 250 | lrdep = 'link' in dtype or 'run' in dtype 251 | if not lrdep and n != "c": 252 | # trim build dep references (except compiler used in lmod hierachy) 253 | del nixspec['depends'][n] 254 | 255 | variants = nixspec['variants'] 256 | if not self.external: 257 | package_class = spack.repo.PATH.get_pkg_class(self.fullname) 258 | if hasattr(package_class, "variant_names"): 259 | pkgVariants = set(package_class.variant_names()) 260 | else: 261 | pkgVariants = package_class.variant.keys() 262 | assert variants.keys() == pkgVariants, f"{self.name} has mismatching variants {variants.keys()} vs. {pkgVariants}" 263 | for n, s in variants.items(): 264 | if s is None: 265 | continue 266 | if isinstance(s, bool): 267 | v = spack.variant.BoolValuedVariant(n, s) 268 | elif isinstance(s, list): 269 | v = spack.variant.MultiValuedVariant(n, s) 270 | elif isinstance(s, dict): 271 | v = spack.variant.MultiValuedVariant(n, [k for k,v in s.items() if v]) 272 | else: 273 | v = spack.variant.SingleValuedVariant(n, s) 274 | self.variants[n] = v 275 | for f in self.compiler_flags.valid_compiler_flags(): 276 | self.compiler_flags[f] = [] 277 | for n, s in nixspec['flags'].items(): 278 | assert n in self.compiler_flags and type(s) is list, f"{self.name} has invalid compiler flag {n}" 279 | self.compiler_flags[n] = s 280 | self.tests = nixspec['tests'] 281 | self.extra_attributes.update(nixspec['extraAttributes']) 282 | if self.external: 283 | # not really unique but shouldn't matter 284 | self._hash = spack.util.hash.b32_hash(self.external_path) 285 | else: 286 | self._nix_hash, nixname = key.split('-', 1) 287 | 288 | 289 | if not self.external and nixspec['patches']: 290 | patches = package_class.patches.setdefault(spack.spec.Spec(), []) 291 | for i, p in enumerate(nixspec['patches']): 292 | patches.append(spack.patch.FilePatch(package_class, p, 1, '.', ordering_key = ('~nixpack', i))) 293 | spack.repo.PATH.patch_index.update_package(self.fullname) 294 | 295 | def concretize(self): 296 | if self._concrete: 297 | return 298 | inject_patches_variant(self) 299 | self._mark_concrete() 300 | 301 | def copy(self, deps=True, **kwargs): 302 | # no! 303 | return self 304 | 305 | def dag_hash(self, length=None): 306 | try: 307 | return self._nix_hash[:length] 308 | except AttributeError: 309 | return super().dag_hash(length) 310 | 311 | def dag_hash_bit_prefix(self, bits): 312 | try: 313 | # nix and python use different base32 alphabets... 314 | h = self._nix_hash.translate(b32trans) 315 | except AttributeError: 316 | h = super().dag_hash() 317 | return spack.util.hash.base32_prefix_bits(h, bits) 318 | 319 | def _installed_explicitly(self): 320 | return getattr(self, '_top', False) 321 | -------------------------------------------------------------------------------- /spack/generate.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | 3 | import os 4 | import sys 5 | import re 6 | from collections import defaultdict 7 | 8 | import nixpack 9 | import spack 10 | try: 11 | from spack.version import any_version 12 | except ImportError: 13 | any_version = spack.spec._any_version 14 | 15 | identPat = re.compile("[a-zA-Z_][a-zA-Z0-9'_-]*") 16 | reserved = {'if','then','else','derivation','let','rec','in','inherit','import','with'} 17 | 18 | def isident(s: str): 19 | return identPat.fullmatch(s) and s not in reserved 20 | 21 | class Nix: 22 | prec = 0 23 | def paren(self, obj, indent, out, nl=False): 24 | prec = obj.prec if isinstance(obj, Nix) else 0 25 | parens = prec > self.prec 26 | if parens: 27 | if nl: 28 | out.write('\n' + ' '*indent) 29 | out.write('(') 30 | printNix(obj, indent, out) 31 | if parens: 32 | out.write(')') 33 | 34 | class Expr(Nix): 35 | def __init__(self, s, prec=0): 36 | self.str = s 37 | self.prec = prec 38 | def print(self, indent, out): 39 | out.write(self.str) 40 | 41 | class List(Nix): 42 | def __init__(self, items): 43 | self.items = items 44 | def print(self, indent, out): 45 | out.write('[') 46 | first = True 47 | indent += 2 48 | for x in self.items: 49 | if first: 50 | first = False 51 | else: 52 | out.write(' ') 53 | self.paren(x, indent, out, True) 54 | out.write(']') 55 | 56 | class Attr(Nix): 57 | def __init__(self, key, val): 58 | if not isinstance(key, str): 59 | raise TypeError(key) 60 | self.key = key 61 | self.val = val 62 | def print(self, indent, out): 63 | out.write(' '*indent) 64 | if isident(self.key): 65 | out.write(self.key) 66 | else: 67 | printNix(self.key, indent, out) 68 | out.write(' = ') 69 | printNix(self.val, indent, out) 70 | out.write(';\n') 71 | 72 | class AttrSet(Nix, dict): 73 | def print(self, indent, out): 74 | out.write('{') 75 | first = True 76 | for k, v in sorted(self.items()): 77 | if first: 78 | out.write('\n') 79 | first = False 80 | Attr(k, v).print(indent+2, out) 81 | if not first: 82 | out.write(' '*indent) 83 | out.write('}') 84 | 85 | class Select(Nix): 86 | prec = 1 87 | def __init__(self, val, *attr: str): 88 | self.val = val 89 | self.attr = attr 90 | def print(self, indent, out): 91 | if isinstance(self.val, str): 92 | out.write(self.val) 93 | else: 94 | self.paren(self.val, indent, out) 95 | for a in self.attr: 96 | out.write('.') 97 | if isident(a): 98 | out.write(a) 99 | else: 100 | self.paren(a, indent, out) 101 | 102 | class SelectOr(Select): 103 | prec = 1 104 | def __init__(self, val, attr: str, ore): 105 | super().__init__(val, attr) 106 | self.ore = ore 107 | def print(self, indent, out): 108 | super().print(indent, out) 109 | out.write(' or ') 110 | self.paren(self.ore, indent, out) 111 | 112 | class Fun(Nix): 113 | prec = 16 # not actually listed? 114 | def __init__(self, var: str, expr): 115 | self.var = var 116 | self.expr = expr 117 | def print(self, indent, out): 118 | out.write(self.var) 119 | out.write(': ') 120 | self.paren(self.expr, indent, out) 121 | 122 | class App(Nix): 123 | prec = 2 124 | def __init__(self, fun, *args): 125 | self.fun = fun 126 | self.args = args 127 | def print(self, indent, out): 128 | if isinstance(self.fun, str): 129 | out.write(self.fun) 130 | else: 131 | self.paren(self.fun, indent, out) 132 | for a in self.args: 133 | out.write(' ') 134 | self.paren(a, indent, out) 135 | 136 | class Or(Nix): 137 | prec = 13 138 | def __init__(self, *args): 139 | self.args = args 140 | def print(self, indent, out): 141 | first = True 142 | for a in self.args: 143 | if first: 144 | first = False 145 | else: 146 | out.write(' || ') 147 | self.paren(a, indent, out) 148 | if first: 149 | out.write('false') 150 | 151 | class And(Nix): 152 | prec = 12 153 | def __init__(self, *args): 154 | self.args = args 155 | def print(self, indent, out): 156 | first = True 157 | for a in self.args: 158 | if first: 159 | first = False 160 | else: 161 | out.write(' && ') 162 | self.paren(a, indent, out) 163 | if first: 164 | out.write('true') 165 | 166 | class Eq(Nix): 167 | prec = 11 168 | def __init__(self, a, b): 169 | self.a = a 170 | self.b = b 171 | def print(self, indent, out): 172 | self.paren(self.a, indent, out) 173 | out.write(' == ') 174 | self.paren(self.b, indent, out) 175 | 176 | class Ne(Nix): 177 | prec = 11 178 | def __init__(self, a, b): 179 | self.a = a 180 | self.b = b 181 | def print(self, indent, out): 182 | self.paren(self.a, indent, out) 183 | out.write(' != ') 184 | self.paren(self.b, indent, out) 185 | 186 | class If(Nix): 187 | prec = 15 188 | def __init__(self, i, t, e): 189 | self.i = i 190 | self.t = t 191 | self.e = e 192 | def print(self, indent, out): 193 | out.write('if ') 194 | self.paren(self.i, indent, out) 195 | out.write(' then ') 196 | self.paren(self.t, indent, out) 197 | out.write(' else ') 198 | self.paren(self.e, indent, out) 199 | 200 | nixStrEsc = str.maketrans({'"': '\\"', '\\': '\\\\', '$': '\\$', '\n': '\\n', '\r': '\\r', '\t': '\\t'}) 201 | def printNix(x, indent=0, out=sys.stdout): 202 | if isinstance(x, Nix): 203 | x.print(indent, out) 204 | elif isinstance(x, str): 205 | out.write('"' + x.translate(nixStrEsc) + '"') 206 | elif type(x) is bool: 207 | out.write('true' if x else 'false') 208 | elif x is None: 209 | out.write('null') 210 | elif isinstance(x, int): 211 | out.write(repr(x)) 212 | elif isinstance(x, float): 213 | # messy but rare (needed for nix parsing #5063) 214 | out.write('%.15e'%x) 215 | elif isinstance(x, (list, tuple)): 216 | List(x).print(indent, out) 217 | elif isinstance(x, set): 218 | List(sorted(x)).print(indent, out) 219 | elif isinstance(x, dict): 220 | AttrSet(x).print(indent, out) 221 | else: 222 | raise TypeError(type(x)) 223 | 224 | def unlist(l): 225 | if isinstance(l, (list, tuple)) and len(l) == 1: 226 | return l[0] 227 | return l 228 | 229 | def specPrefs(s): 230 | p = {} 231 | if s.versions != any_version: 232 | p['version'] = str(s.versions) 233 | if s.variants: 234 | p['variants'] = {n: unlist(v.value) for n, v in s.variants.items()} 235 | d = s.dependencies() 236 | if d: 237 | p['depends'] = {x.name: specPrefs(x) for x in d} 238 | return p 239 | 240 | def depPrefs(d): 241 | p = specPrefs(d.spec) 242 | try: 243 | p['deptype'] = spack.deptypes.flag_to_tuple(d.depflag) 244 | except AttributeError: 245 | p['deptype'] = d.type 246 | if d.patches: 247 | print(f"{d} has unsupported dependency patches", file=sys.stderr) 248 | return p 249 | 250 | def conditions(c, p, s, dep=None): 251 | def addConditions(a, s): 252 | deps = Select(a,'depends') 253 | if s.versions != any_version: 254 | c.append(App("versionMatches", Select(a,'version'), str(s.versions))) 255 | if s.variants: 256 | for n, v in sorted(s.variants.items()): 257 | c.append(App("variantMatches", Select(a,'variants',n), unlist(v.value))) 258 | for d in s.dependencies(): 259 | if dep and d.name == dep.spec.name: 260 | print(f"{dep}: skipping recursive dependency conditional {d}", file=sys.stderr) 261 | continue 262 | c.append(Ne(SelectOr(deps,d.name,None),None)) 263 | addConditions(Select(deps,d.name,'spec'), d) 264 | if s.architecture: 265 | if s.architecture.os: 266 | c.append(Eq(Expr('os'), s.architecture.os)) 267 | if s.architecture.platform: 268 | c.append(Eq(Expr('platform'), s.architecture.platform)) 269 | if s.architecture.target: 270 | # this isn't actually correct due to fancy targets but good enough for this 271 | c.append(Eq(Expr('target'), str(s.architecture.target).rstrip(':'))) 272 | if s.name and s.name != p.name: 273 | # spack sometimes interprets this to mean p provides a virtual of s.name, and sometimes to refer to the named package anywhere in the dep tree 274 | print(f"{p.name}: ignoring unsupported named condition {s}") 275 | c.append(False) 276 | addConditions('spec', s) 277 | 278 | def whenCondition(p, s, a, dep=None): 279 | c = [] 280 | conditions(c, p, s, dep) 281 | if not c: 282 | return a 283 | return App('when', And(*c), a) 284 | 285 | try: 286 | VariantValue = spack.variant.ConditionalValue 287 | except AttributeError: 288 | try: 289 | VariantValue = spack.variant.Value 290 | except AttributeError: 291 | VariantValue = None 292 | 293 | def variant1(p, v): 294 | def value(x): 295 | if VariantValue and isinstance(x, VariantValue): 296 | print(f"{p.name} variant {v.name}: ignoring unsupported conditional on value {x}", file=sys.stderr) 297 | return x.value 298 | return x 299 | 300 | d = str(v.default) 301 | if v.multi and v.values is not None: 302 | d = d.split(',') 303 | return {x: x in d for x in map(value, v.values)} 304 | elif v.values == (True, False): 305 | return d.upper() == 'TRUE' 306 | elif v.values: 307 | l = list(map(value, v.values)) 308 | try: 309 | l.remove(d) 310 | l.insert(0, d) 311 | except ValueError: 312 | print(f"{p.name}: variant {v.name} default {v.default!r} not in {v.values!r}", file=sys.stderr) 313 | return l 314 | else: 315 | return d 316 | 317 | def variant(p, v): 318 | if type(v) is tuple: 319 | a = variant1(p, v[0]) 320 | l = [] 321 | for w in v[1]: 322 | c = [] 323 | conditions(c, p, w) 324 | if not c: 325 | return a 326 | l.append(And(*c)) 327 | return App('when', Or(*l), a) 328 | else: 329 | return variant1(p, v) 330 | 331 | def variant_definitions(p, l): 332 | if not l: 333 | return None 334 | w, v = l[0] 335 | a = variant1(p, v) 336 | c = [] 337 | conditions(c, p, w) 338 | if not c: 339 | return a 340 | # fold right 341 | return If(And(*c), a, variant_definitions(p, l[1:])) 342 | 343 | def variant_name(p, n): 344 | return variant_definitions(p, p.variant_definitions(n)) 345 | 346 | def depend(p, d): 347 | c = [whenCondition(p, w, depPrefs(s), s) for w, l in sorted(d.items()) for s in l] 348 | if len(c) == 1: 349 | return c[0] 350 | return List(c) 351 | 352 | def provide(p, wv): 353 | c = [whenCondition(p, w, str(v)) for w, v in wv] 354 | if len(c) == 1: 355 | return c[0] 356 | return List(c) 357 | 358 | def conflict(p, c, w, m): 359 | l = [] 360 | conditions(l, p, spack.spec.Spec(c)) 361 | conditions(l, p, w) 362 | return App('when', And(*l), str(c) + (' ' + m if m else '')) 363 | 364 | namespaces = ', '.join(r.namespace for r in spack.repo.PATH.repos) 365 | print(f"Generating package repo for {namespaces}...") 366 | f = open(os.environ['out'], 'w') 367 | print("spackLib: with spackLib; {", file=f) 368 | def output(k, v): 369 | printNix(Attr(k, v), out=f) 370 | 371 | virtuals = defaultdict(set) 372 | n = 0 373 | for p in spack.repo.PATH.all_package_classes(): 374 | desc = dict() 375 | desc['namespace'] = p.namespace 376 | desc['dir'] = os.path.realpath(p.package_dir) 377 | vers = [(i.get('preferred',False), not (v.isdevelop() or i.get('deprecated',False)), v) 378 | for v, i in p.versions.items()] 379 | vers.sort(reverse = True) 380 | desc['version'] = [str(v) for _, _, v in vers] 381 | if p.variants: 382 | if hasattr(p, "variant_names"): 383 | desc['variants'] = {n: variant_name(p, n) for n in p.variant_names()} 384 | else: 385 | desc['variants'] = {n: variant(p, e) for n, e in p.variants.items()} 386 | if p.dependencies: 387 | desc['depends'] = {n: depend(p, d) for n, d in p.dependencies_by_name(when=True).items()} 388 | if 'c' in desc['depends'] or 'cxx' in desc['depends'] or 'fortran' in desc['depends']: 389 | desc['depends']['compiler-wrapper'] = {'deptype': ['build']} 390 | if p.conflicts: 391 | desc['conflicts'] = [conflict(p, c, w, m) for c, wm in sorted(p.conflicts.items()) for w, m in wm] 392 | if p.provided: 393 | provides = defaultdict(list) 394 | for w, vs in sorted(p.provided.items()): 395 | for v in vs: 396 | provides[v.name].append((w, v.versions)) 397 | virtuals[v.name].add(p.name) 398 | desc['provides'] = {v: provide(p, c) for v, c in sorted(provides.items())} 399 | output(p.name, Fun('spec', desc)) 400 | n += 1 401 | print(f"Generated {n} packages") 402 | 403 | # use spack config for provider ordering 404 | prefs = spack.config.get("packages:all:providers", {}) 405 | for v, providers in sorted(virtuals.items()): 406 | prov = [] 407 | for p in prefs.get(v, []): 408 | n = spack.spec.Spec(p).name 409 | try: 410 | providers.remove(n) 411 | except KeyError: 412 | continue 413 | prov.append(n) 414 | prov.extend(sorted(providers)) 415 | output(v, prov) 416 | print(f"Generated {len(virtuals)} virtuals") 417 | 418 | print("}", file=f) 419 | f.close() 420 | -------------------------------------------------------------------------------- /view/builder.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python3 2 | from typing import TYPE_CHECKING, Union, Optional, Type, List, Any 3 | if not TYPE_CHECKING: 4 | # hack for __future__.annotatinons (python<3.7) 5 | Path = Any 6 | Inode = Any 7 | 8 | import os 9 | import sys 10 | import stat 11 | import errno 12 | import fnmatch 13 | import json 14 | 15 | def pathstr(s: bytes) -> str: 16 | return s.decode('ISO-8859-1') 17 | 18 | srcPaths = os.environb[b'pkgs'].split() 19 | dstPath = os.environb[b'out'] 20 | forceSrcs = [srcPaths.index(p) if p else None for p in os.environb[b'forcePkgs'].split(b' ')] 21 | 22 | def getOpt(opt: bytes): 23 | v = os.environb[opt] 24 | if v == b'': 25 | return lambda x: None 26 | if v == b'1': 27 | return lambda x: True 28 | l = [ fnmatch._compile_pattern(x) for x in v.split(b' ') ] 29 | def check(x: bytes): 30 | for i, m in enumerate(l): 31 | if m(x): 32 | return i 33 | return None 34 | return check 35 | 36 | # path handling options 37 | opts = {o: getOpt(o) for o in 38 | # in order of precedece: 39 | [ b'exclude' # paths not to link 40 | , b'shbang' # paths to translate #! 41 | , b'jupyter' # paths to translate argv[0] 42 | , b'wrap' # paths to wrap executables 43 | , b'copy' # paths to copy 44 | , b'force' # paths to process only from corresponding forcePkgs 45 | , b'ignoreConflicts' # paths to ignore conflicts 46 | ] } 47 | 48 | maxSrcLen = max(len(p) for p in srcPaths) 49 | 50 | class Path: 51 | """ 52 | Keep track of a rooted path, including relative path, open parent dir_fd, 53 | open state, and other operations. 54 | """ 55 | def __init__(self, dir: Union[Path, bytes], ent: Union[os.DirEntry,bytes]=b''): 56 | self.dir = dir # parent or root 57 | self.ent = ent # relative 58 | if isinstance(dir, Path): 59 | path = dir.path 60 | self.relpath: bytes = os.path.join(dir.relpath, self.name) if ent else dir.relpath 61 | else: 62 | path = dir 63 | self.relpath = self.name 64 | self.path: bytes = os.path.join(path, self.name) if ent else path # full path 65 | self.fd: Optional[int] = None 66 | 67 | def __str__(self) -> str: 68 | return pathstr(self.path) 69 | 70 | @property 71 | def root(self) -> bytes: 72 | "Root path. os.path.join(self.root, self.relpath) == self.path" 73 | if isinstance(self.dir, Path): 74 | return self.dir.root 75 | return self.dir 76 | 77 | @property 78 | def dirfd(self) -> Optional[int]: 79 | "Parent open dir fd. file(self.dirfd, self.name) == self.path" 80 | if isinstance(self.dir, Path): 81 | return self.dir.fd 82 | return None 83 | 84 | @property 85 | def name(self) -> bytes: 86 | "Name relative to dirfd" 87 | if isinstance(self.ent, os.DirEntry): 88 | return self.ent.name 89 | return self.ent 90 | 91 | def sub(self, ent: Union[os.DirEntry,bytes]): 92 | "Create a child path" 93 | return Path(self, ent) 94 | 95 | def _dirop(self, fun, *args, **kwargs): 96 | if self.dirfd is not None: 97 | return fun(self.name, *args, dir_fd=self.dirfd, **kwargs) 98 | else: 99 | return fun(self.path, *args, **kwargs) 100 | 101 | def optsrc(self, opt: bytes) -> Optional[int]: 102 | "Check whether this path matches the given option and return the index" 103 | return opts[opt](self.relpath) 104 | 105 | def opt(self, opt: bytes) -> bool: 106 | "Check whether this path matches the given option" 107 | return self.optsrc(opt) is not None 108 | 109 | def _dostat(self): 110 | if self.fd is not None: 111 | return os.fstat(self.fd) 112 | try: 113 | if isinstance(self.ent, os.DirEntry): 114 | return self.ent.stat(follow_symlinks=False) 115 | else: 116 | return self._dirop(os.lstat) 117 | except OSError as e: 118 | if e.errno == errno.ENOENT: 119 | return None 120 | raise 121 | 122 | def stat(self): 123 | "really lstat" 124 | try: 125 | return self._stat 126 | except AttributeError: 127 | self._stat = self._dostat() 128 | return self._stat 129 | 130 | def isdir(self): 131 | if isinstance(self.ent, os.DirEntry): 132 | return self.ent.is_dir(follow_symlinks=False) 133 | else: 134 | return stat.S_ISDIR(self.stat().st_mode) 135 | 136 | def islnk(self): 137 | if isinstance(self.ent, os.DirEntry): 138 | return self.ent.is_symlink() 139 | else: 140 | return stat.S_ISLNK(self.stat().st_mode) 141 | 142 | def isexe(self): 143 | return self.stat().st_mode & 0o111 144 | 145 | def readlink(self): 146 | return self._dirop(os.readlink) 147 | 148 | def symlink(self, target: Union[bytes,Path]): 149 | if isinstance(target, Path): 150 | target = target.path 151 | if self.dirfd is not None: 152 | return os.symlink(target, self.name, dir_fd=self.dirfd) 153 | else: 154 | return os.symlink(target, self.path) 155 | 156 | def link(self, old: Union[bytes,Path]): 157 | args = {} 158 | if self.dirfd is not None: 159 | args['dst_dir_fd'] = self.dirfd 160 | dst = self.name 161 | else: 162 | dst = self.path 163 | if isinstance(old, Path): 164 | if old.dirfd is not None: 165 | args['src_dir_fd'] = old.dirfd 166 | src = old.name 167 | else: 168 | src = old.path 169 | else: 170 | src = old 171 | return os.link(src, dst, **args) 172 | 173 | def open(self): 174 | "set the mode to open for reading. must be used as 'with path.open()'" 175 | self.mode = os.O_RDONLY|os.O_NOFOLLOW; 176 | return self 177 | 178 | def opendir(self): 179 | "set the mode to open directory for reading. must be used as 'with path.opendir()'" 180 | self.mode = os.O_RDONLY|os.O_NOFOLLOW|os.O_DIRECTORY; 181 | return self 182 | 183 | def create(self, perm): 184 | "set the mode to open and create. must be used as 'with path.create()'" 185 | self.mode = os.O_WRONLY|os.O_CREAT|os.O_EXCL|os.O_NOFOLLOW 186 | if isinstance(perm, Path): 187 | perm = perm.stat().st_mode 188 | self.perm = perm 189 | return self 190 | 191 | def mkdir(self): 192 | "create a directory and set the mode to open for reading. should be used as 'with path.mkdir()'" 193 | try: 194 | self._dirop(os.mkdir) 195 | except OSError as e: 196 | if e.errno != errno.EEXIST: 197 | raise 198 | self.mode = os.O_RDONLY|os.O_NOFOLLOW|os.O_DIRECTORY|os.O_PATH; 199 | return self 200 | 201 | def __enter__(self): 202 | self.fd = self._dirop(os.open, self.mode, getattr(self, 'perm', 0o777)) 203 | self.mode = None 204 | return self 205 | 206 | def __exit__(self, *args): 207 | os.close(self.fd) 208 | self.fd = None 209 | 210 | def read(self, len: int): 211 | assert self.fd is not None 212 | return os.read(self.fd, len) 213 | 214 | def write(self, data: bytes): 215 | assert self.fd is not None 216 | l = 0 217 | while l < len(data): 218 | l += os.write(self.fd, data[l:]) 219 | return l 220 | 221 | def readInterp(self) -> Optional[bytes]: 222 | "extract the interpreter from #! script, if any" 223 | hb = self.read(maxSrcLen+4) 224 | if hb[0:2] != b'#!': 225 | return None 226 | return hb[2:].lstrip() 227 | 228 | def sendfile(self, src, z): 229 | "try os.sendfile to self from src, falling back to read/write" 230 | try: 231 | return os.sendfile(self.fd, src.fd, None, z) 232 | except OSError as err: 233 | if err.errno in (errno.EINVAL, errno.ENOSYS): 234 | return self.write(os.read(src.fd, z)) 235 | else: 236 | raise err 237 | 238 | def copyfile(self, src): 239 | "write the contents of this open file from the open src file" 240 | z = src.stat().st_size 241 | while self.sendfile(src, z) > 0: 242 | pass 243 | 244 | def compare(self, other) -> bool: 245 | "compare stat and contents of two files, return true if identical" 246 | sstat = self.stat() 247 | if not stat.S_ISREG(sstat.st_mode): 248 | return False 249 | ostat = other.stat() 250 | if sstat.st_mode != ostat.st_mode or sstat.st_uid != ostat.st_uid or sstat.st_gid != ostat.st_gid or sstat.st_size != ostat.st_size: 251 | return False 252 | with self.open(), other.open(): 253 | z = 65536 254 | while True: 255 | b1 = self.read(z) 256 | b2 = other.read(z) 257 | if b1 != b2: 258 | return False 259 | if not b1: 260 | return True 261 | 262 | def _scandir(self): 263 | #if self.fd is not None: 264 | # try: 265 | # return os.scandir(self.fd) # XXX returns str not bytes 266 | # except TypeError: 267 | # pass 268 | return os.scandir(self.path) 269 | 270 | def scandir(self): 271 | "return an iterator of child Paths" 272 | return map(self.sub, self._scandir()) 273 | 274 | def newpath(path: bytes) -> bytes: 275 | "rewrite a path pointing to a src to the dst" 276 | if not os.path.isabs(path): 277 | return path 278 | for sp in srcPaths: 279 | if path.startswith(sp): 280 | return os.path.join(dstPath, os.path.relpath(path, sp)) 281 | return path 282 | 283 | class Conflict(Exception): 284 | def __init__(self, path: Path, *nodes: Inode): 285 | self.path = path.relpath 286 | self.nodes = nodes 287 | 288 | def __str__(self): 289 | srcs = ', '.join(pathstr(srcPaths[n.src]) for n in self.nodes if n.src is not None) 290 | return f'Conflicting file {pathstr(self.path)} from {srcs}' 291 | 292 | class Inode: 293 | "An abstract class representing a node of a file tree" 294 | def __init__(self, src: int, path: Path): 295 | self.path = path 296 | self.src: Optional[int] = src # index into srcPaths 297 | 298 | @property 299 | def needed(self): 300 | "does this node need special processing/copying/translaton?" 301 | return self.src == None 302 | 303 | def compatible(self, other: Inode) -> bool: 304 | "is this node compatible with the other" 305 | return type(self) == type(other) 306 | 307 | def resolve(self, node: Optional[Inode]) -> Inode: 308 | "return a unified object or raise Conflict" 309 | if node is None: 310 | return self 311 | if not self.compatible(node): 312 | raise Conflict(self.path, self, node) 313 | return node 314 | 315 | def srcpath(self, path: Path) -> Path: 316 | "translate the given path to the specific src path for this node" 317 | assert self.src is not None, path 318 | return Path(srcPaths[self.src], path.relpath) 319 | 320 | def create(self, dst: Path) -> None: 321 | "actually copy/link/populate dst path" 322 | dst.symlink(self.srcpath(dst)) 323 | 324 | class Symlink(Inode): 325 | def __init__(self, src: int, path: Path): 326 | targ = path.readlink() 327 | self.targ = newpath(targ) 328 | super().__init__(src, path) 329 | 330 | @property 331 | def needed(self): 332 | # for recursion -- don't bother creating directories just for symlinks 333 | return False 334 | 335 | def compatible(self, other) -> bool: 336 | if super().compatible(other) and self.targ == other.targ: 337 | return True 338 | # partial special case to handle unifying a symlink with its target 339 | if self.targ == newpath(other.path.path): 340 | return True 341 | # last resort 342 | if os.path.realpath(self.path.path) == os.path.realpath(other.path.path): 343 | return True 344 | return False 345 | 346 | def __repr__(self): 347 | return f'Symlink({self.src}, {self.targ!r})' 348 | 349 | def create(self, dst: Path): 350 | dst.symlink(self.targ) 351 | 352 | class File(Inode): 353 | shbang = False 354 | jupyter = False 355 | wrap = False 356 | copy = False 357 | 358 | def __init__(self, src: int, path: Path): 359 | super().__init__(src, path) 360 | if path.isexe(): 361 | if path.opt(b'shbang'): 362 | with path.open(): 363 | interp = path.readInterp() 364 | if interp and any(interp.startswith(p) for p in srcPaths): 365 | self.shbang = True 366 | return 367 | if path.opt(b'wrap'): 368 | self.wrap = True 369 | if path.opt(b'jupyter'): 370 | self.jupyter = True 371 | if path.opt(b'copy'): 372 | self.copy = True 373 | 374 | @property 375 | def needed(self): 376 | return self.shbang or self.jupyter or self.wrap or self.copy 377 | 378 | def compatile(self, other) -> bool: 379 | if not super().compatible(other): 380 | return False 381 | # allow identical files 382 | return self.path.compare(other.path) 383 | 384 | def __repr__(self): 385 | return f'File({self.src}{", needed" if self.needed else ""})' 386 | 387 | def create(self, dst: Path): 388 | src = self.srcpath(dst) 389 | if self.shbang: 390 | with src.open(): 391 | interp = src.readInterp() 392 | assert interp 393 | new = newpath(interp) 394 | with dst.create(src): 395 | dst.write(b'#!'+new) 396 | dst.copyfile(src) 397 | elif self.wrap: 398 | with dst.create(src): 399 | dst.write(b'#!/bin/sh\nexec -a "$0" '+src.path+b' "$@"\n') 400 | elif self.jupyter: 401 | with src.open(): 402 | j = json.loads(src.read(src.stat().st_size)) 403 | j['argv'][0] = newpath(j['argv'][0].encode()).decode() 404 | with dst.create(src): 405 | dst.write(json.dumps(j).encode()) 406 | elif self.copy: 407 | try: 408 | dst.link(src) 409 | except PermissionError: 410 | with src.open(): 411 | with dst.create(src): 412 | dst.copyfile(src) 413 | else: 414 | dst.symlink(src) 415 | 416 | class Dir(Inode): 417 | needany = False 418 | 419 | def __init__(self, src: int, path: Path): 420 | super().__init__(src, path) 421 | self.dir = dict() 422 | 423 | def resolve(self, node: Optional[Inode]) -> Inode: 424 | node = super().resolve(node) 425 | if self.src != node.src: 426 | node.src = None 427 | with self.path.opendir(): 428 | for ent in self.path.scandir(): 429 | n = scan(node.dir.get(ent.name), self.src, ent) 430 | if n: 431 | node.dir[ent.name] = n 432 | if not node.needany and n.needed: 433 | node.needany = True 434 | return node 435 | 436 | @property 437 | def needed(self): 438 | return self.needany or super().needed 439 | 440 | def __repr__(self): 441 | return f'Dir({self.src}, {self.dir!r})' 442 | 443 | def create(self, dst: Path): 444 | if self.needed: 445 | with dst.mkdir(): 446 | for n, f in self.dir.items(): 447 | f.create(dst.sub(n)) 448 | else: 449 | super().create(dst) 450 | 451 | def scan(node, src: int, path: Path): 452 | if path.opt(b'exclude'): 453 | return node 454 | force = path.optsrc(b'force') 455 | if force is not None and forceSrcs[force] != src: 456 | return node 457 | if path.isdir(): 458 | cls: Type[Inode] = Dir 459 | elif path.islnk(): 460 | cls = Symlink 461 | else: 462 | cls = File 463 | try: 464 | return cls(src, path).resolve(node) 465 | except Conflict: 466 | if path.opt(b'ignoreConflicts'): 467 | return node 468 | raise 469 | 470 | print(f"Creating view {pathstr(dstPath)} from...") 471 | # scan and merge all source paths 472 | top = None 473 | for i, src in enumerate(srcPaths): 474 | print(f" {pathstr(src)}") 475 | top = scan(top, i, Path(src)) 476 | 477 | # populate the destination with the result 478 | assert top, "No paths found" 479 | top.create(Path(dstPath)) 480 | with open(os.path.join(dstPath, b".view-src"), "xb") as f: 481 | f.write(b"\n".join(srcPaths)) 482 | --------------------------------------------------------------------------------