├── .gitignore ├── .travis.yml ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.rst ├── compilers-overlay.nix ├── default.nix ├── deploy_rsa.enc ├── firefox-overlay.nix ├── flake.nix ├── git-cinnabar-overlay.nix ├── lib-overlay.nix ├── lib └── parseTOML.nix ├── overlays.nix ├── package-set.nix ├── phlay-overlay.nix ├── pinned.nix ├── pkgs ├── cbindgen │ └── default.nix ├── clang │ └── bug-14435.patch ├── firefox-nightly-bin │ └── update.nix ├── gcc-4.7 │ ├── arm-eabi.patch │ ├── builder.sh │ ├── default.nix │ ├── gfortran-driving.patch │ ├── gnat-cflags.patch │ ├── java-jvgenmain-link.patch │ ├── libstdc++-target.patch │ ├── no-sys-dirs.patch │ └── parallel-bconfig-4.7.patch ├── gecko │ ├── default.nix │ └── source.json ├── git-cinnabar │ └── default.nix ├── jsdoc │ ├── default.nix │ ├── node-env.nix │ ├── node-packages.nix │ └── package.json ├── lib │ ├── default.nix │ └── update.nix ├── nixpkgs.json ├── phlay │ └── default.nix └── servo │ └── default.nix ├── release.nix ├── rust-overlay-install.sh ├── rust-overlay.nix ├── rust-src-overlay.nix └── update.nix /.gitignore: -------------------------------------------------------------------------------- 1 | /result* 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: nix 2 | addons: 3 | ssh_known_hosts: floki.garbas.si 4 | env: 5 | - STDENV=clang 6 | - STDENV=clang36 7 | - STDENV=clang37 8 | - STDENV=clang38 9 | - STDENV=gcc 10 | - STDENV=gcc49 11 | - STDENV=gcc48 12 | script: 13 | - if [ "$TRAVIS_EVENT_TYPE" == "cron" ]; then 14 | nix-shell update.nix --pure; 15 | fi 16 | - if [ "$TRAVIS_PULL_REQUEST" != "true" -a "$TRAVIS_BRANCH" = "master" ]; then 17 | nix-build release.nix -A gecko."x86_64-linux"."$STDENV"; 18 | mkdir nars/; 19 | nix-push --dest "$PWD/nars/" --force ./result; 20 | fi 21 | before_install: 22 | - openssl aes-256-cbc -K $encrypted_be02022e0814_key -iv $encrypted_be02022e0814_iv -in deploy_rsa.enc -out deploy_rsa -d 23 | before_deploy: 24 | - eval "$(ssh-agent -s)" 25 | - chmod 600 $TRAVIS_BUILD_DIR/deploy_rsa 26 | - ssh-add $TRAVIS_BUILD_DIR/deploy_rsa 27 | deploy: 28 | provider: script 29 | skip_cleanup: true 30 | script: rsync -avh --ignore-existing $TRAVIS_BUILD_DIR/nars/ travis@floki.garbas.si:/var/travis/nixpkgs-mozilla/ 31 | on: 32 | branch: master 33 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Community Participation Guidelines 2 | 3 | This repository is governed by Mozilla's code of conduct and etiquette guidelines. 4 | For more details, please read the 5 | [Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). 6 | 7 | ## How to Report 8 | For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page. 9 | 10 | 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017 Mozilla 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 7 | of the Software, and to permit persons to whom the Software is furnished to do 8 | so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | nixpkgs-mozilla 2 | =============== 3 | 4 | Gathering nix efforts in one repository. 5 | 6 | 7 | Current packages 8 | ---------------- 9 | 10 | - gecko (https://github.com/mozilla/gecko-dev) 11 | - firefox-bin variants including Nightly 12 | 13 | firefox-bin variants 14 | -------------------- 15 | 16 | Nixpkgs already has definitions for `firefox 17 | `_, 18 | which is built from source, as well as `firefox-bin 19 | `_, 20 | which is the binary Firefox version built by Mozilla. 21 | 22 | The ``firefox-overlay.nix`` in this repository adds definitions for 23 | some other firefox-bin variants that Mozilla ships: 24 | ``firefox-nightly-bin``, ``firefox-beta-bin``, and 25 | ``firefox-esr-bin``. All are exposed under a ``latest`` attribute, 26 | e.g. ``latest.firefox-nightly-bin``. 27 | 28 | Unfortunately, these variants do not auto-update, and you may see some 29 | annoying pop-ups complaining about this. 30 | 31 | Note that all the ``-bin`` packages are "unfree" (because of the 32 | Firefox trademark, held by Mozilla), so you will need to set 33 | ``nixpkgs.config.allowUnfree`` in order to use them. More info `here 34 | `_. 35 | 36 | Rust overlay 37 | ------------ 38 | 39 | **NOTE:** Nix overlays only works on up-to-date versions of NixOS/nixpkgs, starting from 17.03. 40 | 41 | A nixpkgs overlay is provided to contain all of the latest rust releases. 42 | 43 | To use the rust overlay run the ``./rust-overlay-install.sh`` command. It will 44 | link the current ``./rust-overlay.nix`` into your ``~/.config/nixpkgs/overlays`` folder. 45 | 46 | Once this is done, use ``nix-env -iA nixpkgs.latest.rustChannels.nightly.rust`` for 47 | example. Replace the ``nixpkgs.`` prefix with ``nixos.`` on NixOS. 48 | 49 | Using in nix expressions 50 | ------------------------ 51 | 52 | Example of using in ```shell.nix```: 53 | 54 | .. code:: nix 55 | 56 | let 57 | moz_overlay = import (builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz); 58 | nixpkgs = import { overlays = [ moz_overlay ]; }; 59 | in 60 | with nixpkgs; 61 | stdenv.mkDerivation { 62 | name = "moz_overlay_shell"; 63 | buildInputs = [ 64 | # to use the latest nightly: 65 | nixpkgs.latest.rustChannels.nightly.rust 66 | # to use a specific nighly: 67 | (nixpkgs.rustChannelOf { date = "2018-04-11"; channel = "nightly"; }).rust 68 | # to use the project's rust-toolchain file: 69 | (nixpkgs.rustChannelOf { rustToolchain = ./rust-toolchain; }).rust 70 | ]; 71 | } 72 | 73 | Flake usage 74 | ----------- 75 | This repository contains a minimal flake interface for the various 76 | overlays in this repository. To use it in your own flake, add it as 77 | an input to your ``flake.nix``: 78 | 79 | .. code:: nix 80 | { 81 | inputs.nixpkgs.url = github:NixOS/nixpkgs; 82 | inputs.nixpkgs-mozilla.url = github:mozilla/nixpkgs-mozilla; 83 | 84 | outputs = { self, nixpkgs, nixpkgs-mozilla }: { 85 | devShell."x86_64-linux" = let 86 | pkgs = import nixpkgs { system = "x86_64-linux"; overlays = [ nixpkgs-mozilla.overlay ]; }; 87 | in pkgs.mkShell { 88 | buildInputs = [ pkgs.latest.rustChannels.nightly.rust ]; 89 | }; 90 | }; 91 | } 92 | The available overlays are ``nixpkgs-mozilla.overlay`` for the 93 | default overlay containing everything, and 94 | ``nixpkgs-mozilla.overlays.{lib, rust, rr, firefox, git-cinnabar}`` 95 | respectively. Depending on your use case, you might need to set the 96 | ``--impure`` flag when invoking the ``nix`` command. This is because 97 | this repository fetches resources from non-pinned URLs 98 | non-reproducibly. 99 | 100 | Firefox Development Environment 101 | ------------------------------- 102 | 103 | This repository provides several tools to facilitate development on 104 | Firefox. Firefox is built on an engine called Gecko, which lends its 105 | name to some of the files and derivations in this repo. 106 | 107 | Checking out Firefox 108 | ~~~~~~~~~~~~~~~~~~~~ 109 | 110 | To build Firefox from source, it is best to have a local checkout of 111 | ``mozilla-central``. ``mozilla-central`` is hosted in Mercurial, but 112 | some people prefer to access it using ``git`` and 113 | ``git-cinnabar``. The tools in this repo support either using 114 | mercurial or git. 115 | 116 | This repository provides a ``git-cinnabar-overlay.nix`` which defines 117 | a ``git-cinnabar`` derivation. This overlay can be used to install 118 | ``git-cinnabar``, either using ``nix-env`` or as part of a system-wide 119 | ``configuration.nix``. 120 | 121 | Building Firefox 122 | ~~~~~~~~~~~~~~~~ 123 | 124 | The ``firefox-overlay.nix`` provides an environment to build Firefox 125 | from its sources, once you have finished the checkout of 126 | ``mozilla-central``. You can use ``nix-shell`` to enter this 127 | environment to launch ``mach`` commands to build Firefox and test your 128 | build. 129 | 130 | Some debugging tools are available in this environment as well, but 131 | other development tools (such as those used to submit changes for 132 | review) are outside the scope of this environment. 133 | 134 | The ``nix-shell`` environment is available in the 135 | ``gecko..`` attribute of the ``release.nix`` file provided 136 | in this repository. 137 | 138 | The ```` attribute is either ``x86_64-linux`` or ``i686-linux``. The first 139 | one would create a native toolchain for compiling on x64, while the second one 140 | would give a native toolchain for compiling on x86. Note that due to the size of 141 | the compilation units on x86, the compilation might not be able to complete, but 142 | some sub part of Gecko, such as SpiderMonkey would compile fine. 143 | 144 | The ```` attribute is either ``gcc`` or ``clang``, or any specific version 145 | of the compiler available in the ``compiler-overlay.nix`` file which is repeated 146 | in ``release.nix``. This compiler would only be used for compiling Gecko, and 147 | the rest of the toolchain is compiled against the default ``stdenv`` of the 148 | architecture. 149 | 150 | When first entering the ``nix-shell``, the toolchain will pull and build all 151 | the dependencies necessary to build Gecko, this includes might take some time. 152 | This work will not be necessary the second time, unless you use a different 153 | toolchain or architecture. 154 | 155 | .. code:: sh 156 | 157 | ~/$ cd mozilla-central 158 | ~/mozilla-central$ nix-shell ../nixpkgs-mozilla/release.nix -A gecko.x86_64-linux.gcc --pure 159 | ... pull the rust compiler 160 | ... compile the toolchain 161 | # First time only - initialize virtualenv 162 | [~/mozilla-central] python ./mach create-mach-environment 163 | ... create .mozbuild/_virtualenvs/mach 164 | [~/mozilla-central] python ./mach build 165 | ... build firefox desktop 166 | [~/mozilla-central] python ./mach run 167 | ... run firefox 168 | 169 | When entering the ``nix-shell``, the ``MOZCONFIG`` environment variable is set 170 | to a local file, named ``.mozconfig.nix-shell``, created each time you enter the 171 | ``nix-shell``. You can create your own ``.mozconfig`` file which extends the 172 | default one, with your own options. 173 | 174 | .. code:: sh 175 | 176 | ~/mozilla-central$ nix-shell ../nixpkgs-mozilla/release.nix -A gecko.x86_64-linux.gcc --pure 177 | [~/mozilla-central] cat .mozconfig 178 | # Import current nix-shell config. 179 | . .mozconfig.nix-shell 180 | 181 | ac_add_options --enable-js-shell 182 | ac_add_options --disable-tests 183 | [~/mozilla-central] export MOZCONFIG="$(pwd)/.mozconfig" 184 | [~/mozilla-central] python ./mach build 185 | 186 | To avoid repeating yourself, you can also rely on the ``NIX_SHELL_HOOK`` 187 | environment variable, to reset the ``MOZCONFIG`` environment variable for you. 188 | 189 | .. code:: sh 190 | 191 | ~/mozilla-central$ export NIX_SHELL_HOOK="export MOZCONFIG=$(pwd)/.mozconfig;" 192 | ~/mozilla-central$ nix-shell ../nixpkgs-mozilla/release.nix -A gecko.x86_64-linux.gcc --pure 193 | [~/mozilla-central] python ./mach build 194 | 195 | Submitting Firefox patches 196 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 197 | 198 | Firefox development happens in `Mozilla Phabricator 199 | `_. Mozilla Phabricator 200 | docs are `here 201 | `_. 202 | 203 | To get your commits into Phabricator, some options include: 204 | 205 | - Arcanist, the upstream tool for interacting with 206 | Phabricator. Arcanist is packaged in nixpkgs already; you can find 207 | it in `nixos.arcanist`. Unfortunately, as of this writing, upstream 208 | Arcanist does not support ``git-cinnabar`` (according to `the 209 | "Setting up Arcanist" 210 | `_ 211 | documentation). `Mozilla maintains a fork of Arcanist 212 | `_ but it isn't yet 213 | packaged. (PRs welcome.) 214 | 215 | - `moz-phab `_, an in-house 216 | CLI for Phabricator. It's available in nix packages (unstable channel). 217 | 218 | - `phlay `_, a small Python script 219 | that speaks to the Phabricator API directly. This repository ships a 220 | ``phlay-overlay.nix`` that you can use to make ``phlay`` available 221 | in a nix-shell or nix-env. 222 | 223 | Note: although the ``nix-shell`` from the previous section may have 224 | all the tools you would normally use to do Firefox development, it 225 | isn't recommended that you use that shell for anything besides tasks 226 | that involve running ``mach``. Other development tasks such as 227 | committing code and submitting patches to code review are best handled 228 | in a separate nix-shell. 229 | 230 | TODO 231 | ---- 232 | 233 | - setup hydra to have binary channels 234 | 235 | - make sure pinned revisions get updated automatically (if build passes we 236 | should update revisions in default.nix) 237 | 238 | - pin to specific (working) nixpkgs revision (as we do for other sources) 239 | 240 | - can we make this work on darwin as well? 241 | 242 | - assign maintainers for our packages that will montior that it "always" builds 243 | 244 | - hook it with vulnix report to monitor CVEs (once vulnix is ready, it must be 245 | ready soon :P) 246 | -------------------------------------------------------------------------------- /compilers-overlay.nix: -------------------------------------------------------------------------------- 1 | # This overlays add a customStdenv attribute which provide an stdenv with 2 | # different versions of the compilers. This can be used to test Gecko builds 3 | # against different compiler settings, or different compiler versions. 4 | # 5 | # See release.nix "builder" function, to understand how these different stdenv 6 | # are used. 7 | self: super: 8 | 9 | let 10 | noSysDirs = (super.stdenv.system != "x86_64-darwin" 11 | && super.stdenv.system != "x86_64-freebsd" 12 | && super.stdenv.system != "i686-freebsd" 13 | && super.stdenv.system != "x86_64-kfreebsd-gnu"); 14 | crossSystem = null; 15 | 16 | gcc473 = super.wrapCC (super.callPackage ./pkgs/gcc-4.7 (with self; { 17 | inherit noSysDirs; 18 | texinfo = texinfo4; 19 | # I'm not sure if profiling with enableParallelBuilding helps a lot. 20 | # We can enable it back some day. This makes the *gcc* builds faster now. 21 | profiledCompiler = false; 22 | 23 | # When building `gcc.crossDrv' (a "Canadian cross", with host == target 24 | # and host != build), `cross' must be null but the cross-libc must still 25 | # be passed. 26 | cross = null; 27 | libcCross = if crossSystem != null then libcCross else null; 28 | libpthreadCross = 29 | if crossSystem != null && crossSystem.config == "i586-pc-gnu" 30 | then gnu.libpthreadCross 31 | else null; 32 | })); 33 | 34 | # By default wrapCC keep the same header files, but NixOS is using the 35 | # latest header files from GCC, which are not supported by clang, because 36 | # clang implement a different set of locking primitives than GCC. This 37 | # expression is used to wrap clang with a matching verion of the libc++. 38 | maybeWrapClang = cc: cc; 39 | /* 40 | if cc ? clang 41 | then clangWrapCC cc 42 | else cc; 43 | */ 44 | 45 | clangWrapCC = llvmPackages: 46 | let libcxx = 47 | super.lib.overrideDerivation llvmPackages.libcxx (drv: { 48 | # https://bugzilla.mozilla.org/show_bug.cgi?id=1277619 49 | # https://llvm.org/bugs/show_bug.cgi?id=14435 50 | patches = drv.patches ++ [ ./pkgs/clang/bug-14435.patch ]; 51 | }); 52 | in 53 | super.callPackage { 54 | cc = llvmPackages.clang-unwrapped or llvmPackages.clang; 55 | isClang = true; 56 | stdenv = self.clangStdenv; 57 | libc = self.glibc; 58 | # cc-wrapper pulls gcc headers, which are not compatible with features 59 | # implemented in clang. These packages are used to override that. 60 | extraPackages = [ self.libcxx llvmPackages.libcxxabi ]; 61 | nativeTools = false; 62 | nativeLibc = false; 63 | }; 64 | 65 | buildWithCompiler = cc: 66 | super.stdenvAdapters.overrideCC self.stdenv (maybeWrapClang cc); 67 | 68 | chgCompilerSource = cc: name: src: 69 | cc.override (conf: 70 | if conf ? gcc then # Nixpkgs 14.12 71 | { gcc = super.lib.overrideDerivation conf.gcc (old: { inherit name src; }); } 72 | else # Nixpkgs 15.05 73 | { cc = super.lib.overrideDerivation conf.cc (old: { inherit name src; }); } 74 | ); 75 | 76 | compilersByName = with self; { 77 | clang = llvmPackages.clang; 78 | clang36 = llvmPackages_36.clang; 79 | clang37 = llvmPackages_37.clang; 80 | clang38 = llvmPackages_38.clang; # not working yet. 81 | clang5 = llvmPackages_5.clang or llvmPackages.clang; 82 | clang6 = llvmPackages_6.clang or llvmPackages.clang; 83 | clang7 = llvmPackages_7.clang or llvmPackages.clang; 84 | clang12 = llvmPackages_12.clang or llvmPackages.clang; 85 | clang13 = llvmPackages_13.clang or llvmPackages.clang; 86 | gcc = gcc; 87 | gcc6 = gcc6; 88 | gcc5 = gcc5; 89 | gcc49 = gcc49; 90 | gcc48 = gcc48; 91 | gcc474 = chgCompilerSource gcc473 "gcc-4.7.4" (fetchurl { 92 | url = "mirror://gnu/gcc/gcc-4.7.4/gcc-4.7.4.tar.bz2"; 93 | sha256 = "10k2k71kxgay283ylbbhhs51cl55zn2q38vj5pk4k950qdnirrlj"; 94 | }); 95 | gcc473 = gcc473; 96 | # Version used on Linux slaves, except Linux x64 ASAN. 97 | gcc472 = chgCompilerSource gcc473 "gcc-4.7.2" (fetchurl { 98 | url = "mirror://gnu/gcc/gcc-4.7.2/gcc-4.7.2.tar.bz2"; 99 | sha256 = "115h03hil99ljig8lkrq4qk426awmzh0g99wrrggxf8g07bq74la"; 100 | }); 101 | }; 102 | 103 | in { 104 | customStdenvs = 105 | super.lib.mapAttrs (name: value: buildWithCompiler value) compilersByName; 106 | } 107 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | # Nixpkgs overlay which aggregates overlays for tools and products, used and 2 | # published by Mozilla. 3 | self: super: 4 | 5 | with super.lib; 6 | 7 | (foldl' (flip extends) (_: super) 8 | (map import (import ./overlays.nix))) 9 | self 10 | -------------------------------------------------------------------------------- /deploy_rsa.enc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/nixpkgs-mozilla/2292d4b35aa854e312ad2e95c4bb5c293656f21a/deploy_rsa.enc -------------------------------------------------------------------------------- /firefox-overlay.nix: -------------------------------------------------------------------------------- 1 | # This file provide the latest binary versions of Firefox published by Mozilla. 2 | self: super: 3 | 4 | let 5 | # This URL needs to be updated about every 2 years when the subkey is rotated. 6 | pgpKey = super.fetchurl { 7 | url = "https://download.cdn.mozilla.net/pub/firefox/candidates/138.0b1-candidates/build1/KEY"; 8 | hash = "sha256-FOGtyDxtZpW6AbNdSj0QoK1AYkQYxHPypT8zJr2XYQk="; 9 | }; 10 | 11 | # This file is currently maintained manually, if this Nix expression attempt 12 | # to download the wrong version, this is likely to be the problem. 13 | # 14 | # Open a pull request against https://github.com/mozilla-releng/shipit to 15 | # update the version, as done in 16 | # https://github.com/mozilla-releng/shipit/pull/1467 17 | firefox_versions = with builtins; 18 | fromJSON (readFile (fetchurl "https://product-details.mozilla.org/1.0/firefox_versions.json")); 19 | 20 | arch = if self.stdenv.system == "i686-linux" 21 | then "linux-i686" 22 | else "linux-x86_64"; 23 | 24 | yearOf = with super.lib; yyyymmddhhmmss: 25 | head (splitString "-" yyyymmddhhmmss); 26 | monthOf = with super.lib; yyyymmddhhmmss: 27 | head (tail (splitString "-" yyyymmddhhmmss)); 28 | 29 | # Given SHA512SUMS file contents and file name, extract matching sha512sum. 30 | extractSha512Sum = sha512sums: file: 31 | with builtins; 32 | # Nix 1.x do not have `builtins.split`. 33 | # Nix 2.0 have an bug in `builtins.match` (see https://github.com/NixOS/nix/issues/2147). 34 | # So I made separate logic for Nix 1.x and Nix 2.0. 35 | if builtins ? split then 36 | substring 0 128 (head 37 | (super.lib.filter 38 | (s: isString s && substring 128 (stringLength s) s == " ${file}") 39 | (split "\n" sha512sums))) 40 | else 41 | head (match ".*[\n]([0-9a-f]*) ${file}.*" sha512sums); 42 | 43 | # The timestamp argument is a yyyy-mm-dd-hh-mm-ss date, which corresponds to 44 | # one specific version. This is used mostly for bisecting. 45 | versionInfo = { name, version, release, system ? arch, timestamp ? null, info ? null, ... }: with builtins; 46 | if (info != null) then info else 47 | if release then 48 | # For versions such as Beta & Release: 49 | # https://download.cdn.mozilla.net/pub/firefox/releases/55.0b3/SHA256SUMS 50 | let 51 | dir = "https://download.cdn.mozilla.net/pub/firefox/releases/${version}"; 52 | # After version 134 firefox switched to using tar.xz instead of tar.bz2 53 | majorVersion = super.lib.strings.toInt ( 54 | builtins.elemAt (super.lib.strings.splitString "." version) 0 55 | ); 56 | extension = if majorVersion > 134 then "tar.xz" else "tar.bz2"; 57 | file = "${system}/en-US/firefox-${version}.${extension}"; 58 | sha512Of = chksum: file: extractSha512Sum (readFile (fetchurl chksum)) file; 59 | in rec { 60 | chksum = "${dir}/SHA512SUMS"; 61 | chksumSig = "${chksum}.asc"; 62 | chksumSha256 = hashFile "sha256" (fetchurl "${dir}/SHA512SUMS"); 63 | chksumSigSha256 = hashFile "sha256" (fetchurl "${chksum}.asc"); 64 | inherit file; 65 | url = "${dir}/${file}"; 66 | sha512 = sha512Of chksum file; 67 | sig = null; 68 | sigSha512 = null; 69 | } 70 | else 71 | # For Nightly versions: 72 | # https://download.cdn.mozilla.net/pub/firefox/nightly/latest-mozilla-central/firefox-56.0a1.en-US.linux-x86_64.checksums 73 | let 74 | dir = 75 | if timestamp == null then 76 | let 77 | buildhubJSON = with builtins; 78 | fromJSON (readFile (fetchurl "https://download.cdn.mozilla.net/pub/firefox/nightly/latest-mozilla-central/firefox-${version}.en-US.${system}.buildhub.json")); 79 | in builtins.replaceStrings [ "/${file}" ] [ "" ] buildhubJSON.download.url 80 | else "https://download.cdn.mozilla.net/pub/firefox/nightly/${yearOf timestamp}/${monthOf timestamp}/${timestamp}-mozilla-central" ; 81 | file = "firefox-${version}.en-US.${system}.tar.xz"; 82 | sha512Of = chksum: file: head (match ".*[\n]([0-9a-f]*) sha512 [0-9]* ${file}[\n].*" (readFile (fetchurl chksum))); 83 | in rec { 84 | chksum = "${dir}/firefox-${version}.en-US.${system}.checksums"; 85 | chksumSig = null; 86 | # file content: 87 | # sha512 62733881 firefox-56.0a1.en-US.linux-x86_64.tar.bz2 88 | # sha256 62733881 firefox-56.0a1.en-US.linux-x86_64.tar.bz2 89 | url = "${dir}/${file}"; 90 | sha512 = sha512Of chksum file; 91 | sig = "${dir}/${file}.asc"; 92 | sigSha512 = sha512Of chksum "${file}.asc"; 93 | }; 94 | 95 | # From the version info, check the authenticity of the check sum file, such 96 | # that we guarantee that we have 97 | verifyFileAuthenticity = { file, sha512, chksum, chksumSig }: 98 | assert extractSha512Sum (builtins.readFile chksum) file == sha512; 99 | super.runCommand "check-firefox-signature" { 100 | buildInputs = [ self.gnupg ]; 101 | FILE = chksum; 102 | ASC = chksumSig; 103 | } '' 104 | set -eu 105 | gpg --dearmor < ${pgpKey} > keyring.gpg 106 | gpgv --keyring=./keyring.gpg $ASC $FILE 107 | mkdir $out 108 | ''; 109 | 110 | # From the version info, create a fetchurl derivation which will get the 111 | # sources from the remote. 112 | fetchVersion = info: 113 | if info.chksumSig != null then 114 | super.fetchurl { 115 | inherit (info) url sha512; 116 | 117 | # This is a fixed derivation, but we still add as a dependency the 118 | # verification of the checksum. Thus, this fetch script can only be 119 | # executed once the verifyAuthenticity script finished successfully. 120 | postFetch = '' 121 | : # Authenticity Check (${verifyFileAuthenticity { 122 | inherit (info) file sha512; 123 | chksum = builtins.fetchurl { url = info.chksum; sha256 = info.chksumSha256; }; 124 | chksumSig = builtins.fetchurl { url = info.chksumSig; sha256 = info.chksumSigSha256; }; 125 | }}) 126 | ''; 127 | } 128 | else 129 | super.fetchurl { 130 | inherit (info) url sha512; 131 | 132 | # This would download the tarball, and then verify that the content 133 | # match the signature file. Fortunately, any failure of this code would 134 | # prevent the output from being reused. 135 | postFetch = 136 | let asc = super.fetchurl { url = info.sig; sha512 = info.sigSha512; }; in '' 137 | : # Authenticity Check 138 | set -eu 139 | export PATH="$PATH:${self.gnupg}/bin/" 140 | gpg --dearmor < ${pgpKey} > keyring.gpg 141 | gpgv --keyring=./keyring.gpg ${asc} $out 142 | ''; 143 | }; 144 | 145 | firefoxVersion = version: 146 | let 147 | info = versionInfo version; 148 | pkg = ((self.firefox-bin-unwrapped.override ({ 149 | generated = { 150 | version = version.version; 151 | sources = { inherit (info) url sha512; }; 152 | }; 153 | channel = version.channel; 154 | } // super.lib.optionalAttrs (self.firefox-bin-unwrapped.passthru ? applicationName) { 155 | applicationName = version.name; 156 | })).overrideAttrs (old: { 157 | # Add a dependency on the signature check. 158 | src = fetchVersion info; 159 | })); 160 | in super.wrapFirefox pkg ({ 161 | pname = "${pkg.binaryName}-bin"; 162 | wmClass = version.wmClass; 163 | } // super.lib.optionalAttrs (!self.firefox-bin-unwrapped.passthru ? applicationName) { 164 | desktopName = version.name; 165 | }); 166 | 167 | firefoxVariants = { 168 | firefox-nightly-bin = { 169 | name = "Firefox Nightly"; 170 | channel = "nightly"; 171 | wmClass = "firefox-nightly"; 172 | version = firefox_versions.FIREFOX_NIGHTLY; 173 | release = false; 174 | }; 175 | firefox-beta-bin = { 176 | name = "Firefox Beta"; 177 | channel = "beta"; 178 | wmClass = "firefox-beta"; 179 | version = firefox_versions.LATEST_FIREFOX_DEVEL_VERSION; 180 | release = true; 181 | }; 182 | firefox-bin = { 183 | name = "Firefox"; 184 | channel = "release"; 185 | wmClass = "firefox"; 186 | version = firefox_versions.LATEST_FIREFOX_VERSION; 187 | release = true; 188 | }; 189 | firefox-esr-bin = { 190 | name = "Firefox ESR"; 191 | channel = "release"; 192 | wmClass = "firefox"; 193 | version = firefox_versions.FIREFOX_ESR; 194 | release = true; 195 | }; 196 | }; 197 | in 198 | 199 | { 200 | lib = super.lib // { 201 | firefoxOverlay = { 202 | inherit pgpKey firefoxVersion versionInfo firefox_versions firefoxVariants; 203 | }; 204 | }; 205 | 206 | # Set of packages which are automagically updated. Do not rely on these for 207 | # reproducible builds. 208 | latest = (super.latest or {}) // (builtins.mapAttrs (n: v: firefoxVersion v) firefoxVariants); 209 | 210 | # Set of packages which used to build developer environment 211 | devEnv = (super.shell or {}) // { 212 | gecko = super.callPackage ./pkgs/gecko { 213 | inherit (self.python38Packages) setuptools; 214 | pythonFull = self.python38Full; 215 | nodejs = 216 | if builtins.compareVersions self.nodejs.name "nodejs-8.11.3" < 0 217 | then self.nodejs-8_x else self.nodejs; 218 | 219 | rust-cbindgen = 220 | if !(self ? "rust-cbindgen") then self.rust-cbindgen-latest 221 | else if builtins.compareVersions self.rust-cbindgen.version self.rust-cbindgen-latest.version < 0 222 | then self.rust-cbindgen-latest else self.rust-cbindgen; 223 | 224 | # Due to std::ascii::AsciiExt changes in 1.23, Gecko does not compile, so 225 | # use the latest Rust version before 1.23. 226 | # rust = (super.rustChannelOf { channel = "stable"; date = "2017-11-22"; }).rust; 227 | # rust = (super.rustChannelOf { channel = "stable"; date = "2020-03-12"; }).rust; 228 | inherit (self.latest.rustChannels.stable) rust; 229 | }; 230 | }; 231 | 232 | # Use rust-cbindgen imported from Nixpkgs (September 2018) unless the current 233 | # version of Nixpkgs already packages a version of rust-cbindgen. 234 | rust-cbindgen-latest = super.callPackage ./pkgs/cbindgen { 235 | rustPlatform = super.makeRustPlatform { 236 | cargo = self.latest.rustChannels.stable.rust; 237 | rustc = self.latest.rustChannels.stable.rust; 238 | }; 239 | }; 240 | 241 | jsdoc = super.callPackage ./pkgs/jsdoc {}; 242 | } 243 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Mozilla overlay for Nixpkgs"; 3 | 4 | outputs = { self, ... }: { 5 | # Default overlay. 6 | overlay = import ./default.nix; 7 | 8 | # Inidividual overlays. 9 | overlays = { 10 | lib = import ./lib-overlay.nix; 11 | rust = import ./rust-overlay.nix; 12 | firefox = import ./firefox-overlay.nix; 13 | git-cinnabar = import ./git-cinnabar-overlay.nix; 14 | }; 15 | }; 16 | } 17 | -------------------------------------------------------------------------------- /git-cinnabar-overlay.nix: -------------------------------------------------------------------------------- 1 | self: super: 2 | 3 | { 4 | git-cinnabar = super.callPackage ./pkgs/git-cinnabar { 5 | # we need urllib to recognize ssh. 6 | # python = self.pythonFull; 7 | python = self.mercurial.python; 8 | }; 9 | } 10 | -------------------------------------------------------------------------------- /lib-overlay.nix: -------------------------------------------------------------------------------- 1 | self: super: 2 | 3 | { 4 | lib = super.lib // (import ./pkgs/lib/default.nix { pkgs = self; }); 5 | } 6 | -------------------------------------------------------------------------------- /lib/parseTOML.nix: -------------------------------------------------------------------------------- 1 | with builtins; 2 | 3 | # Tokenizer. 4 | let 5 | layout_pat = "[ \n]+"; 6 | layout_pat_opt = "[ \n]*"; 7 | token_pat = ''=|[[][[][a-zA-Z0-9_."*-]+[]][]]|[[][a-zA-Z0-9_."*-]+[]]|[[][^]]+[]]|[a-zA-Z0-9_-]+|"[^"]*"''; #" 8 | 9 | tokenizer_1_11 = str: 10 | let 11 | tokenizer_rec = len: prevTokens: patterns: str: 12 | let 13 | pattern = head patterns; 14 | layoutAndTokens = match pattern str; 15 | matchLength = stringLength (head layoutAndTokens); 16 | tokens = prevTokens ++ tail layoutAndTokens; 17 | in 18 | if layoutAndTokens == null then 19 | # if we cannot reduce the pattern, return the list of token 20 | if tail patterns == [] then prevTokens 21 | # otherwise, take the next pattern, which only captures half the token. 22 | else tokenizer_rec len prevTokens (tail patterns) str 23 | else tokenizer_rec len tokens patterns (substring matchLength len str); 24 | 25 | avgTokenSize = 100; 26 | ceilLog2 = v: 27 | let inner = n: i: if i < v then inner (n + 1) (i * 2) else n; in 28 | inner 1 1; 29 | 30 | # The builtins.match function match the entire string, and generate a list of all captured 31 | # elements. This is the most efficient way to make a tokenizer, if we can make a pattern which 32 | # capture all token of the file. Unfortunately C++ std::regex does not support captures in 33 | # repeated patterns. As a work-around, we generate patterns which are matching tokens in multiple 34 | # of 2, such that we can avoid iterating too many times over the content. 35 | generatePatterns = str: 36 | let 37 | depth = ceilLog2 (stringLength str / avgTokenSize); 38 | inner = depth: 39 | if depth == 0 then [ "(${token_pat})" ] 40 | else 41 | let next = inner (depth - 1); in 42 | [ "${head next}${layout_pat}${head next}" ] ++ next; 43 | in 44 | map (pat: "(${layout_pat_opt}${pat}).*" ) (inner depth); 45 | 46 | in 47 | tokenizer_rec (stringLength str) [] (generatePatterns str) str; 48 | 49 | tokenizer_1_12 = str: 50 | let 51 | # Nix 1.12 has the builtins.split function which allow to tokenize the 52 | # file quickly. by iterating with a simple regexp. 53 | layoutTokenList = split "(${token_pat})" str; 54 | isLayout = s: match layout_pat_opt s != null; 55 | filterLayout = list: 56 | filter (s: 57 | if isString s then 58 | if isLayout s then false 59 | else throw "Error: Unexpected token: '${s}'" 60 | else true) list; 61 | removeTokenWrapper = list: 62 | map (x: assert tail x == []; head x) list; 63 | in 64 | removeTokenWrapper (filterLayout layoutTokenList); 65 | 66 | tokenizer = 67 | if builtins ? split 68 | then tokenizer_1_12 69 | else tokenizer_1_11; 70 | in 71 | 72 | # Parse entry headers 73 | let 74 | unescapeString = str: 75 | # Let's ignore any escape character for the moment. 76 | assert match ''"[^"]*"'' str != null; #" 77 | substring 1 (stringLength str - 2) str; 78 | 79 | # Match the content of TOML format section names. 80 | ident_pat = ''[a-zA-Z0-9_-]+|"[^"]*"''; #" 81 | 82 | removeBraces = token: wrapLen: 83 | substring wrapLen (stringLength token - 2 * wrapLen) token; 84 | 85 | # Note, this implementation is limited to 11 identifiers. 86 | matchPathFun_1_11 = token: 87 | let 88 | # match header_pat "a.b.c" == [ "a" ".b" "b" ".c" "c" ] 89 | header_pat = 90 | foldl' (pat: n: "(${ident_pat})([.]${pat})?") 91 | "(${ident_pat})" (genList (n: 0) 10); 92 | matchPath = match header_pat token; 93 | filterDot = filter (s: substring 0 1 s != ".") matchPath; 94 | in 95 | filterDot; 96 | 97 | matchPathFun_1_12 = token: 98 | map (e: head e) 99 | (filter (s: isList s) 100 | (split "(${ident_pat})" token)); 101 | 102 | matchPathFun = 103 | if builtins ? split 104 | then matchPathFun_1_12 105 | else matchPathFun_1_11; 106 | 107 | headerToPath = token: wrapLen: 108 | let 109 | token' = removeBraces token wrapLen; 110 | matchPath = matchPathFun token'; 111 | path = 112 | map (s: 113 | if substring 0 1 s != ''"'' then s #" 114 | else unescapeString s 115 | ) matchPath; 116 | in 117 | assert matchPath != null; 118 | # assert trace "Path: ${token'}; match as ${toString path}" true; 119 | path; 120 | in 121 | 122 | # Reconstruct the equivalent attribute set. 123 | let 124 | tokenToValue = token: 125 | if token == "true" then true 126 | else if token == "false" then false 127 | # TODO: convert the TOML list into a Nix list. 128 | else if match "[[][^]]+[]]" token != null then token 129 | else unescapeString token; 130 | 131 | parserInitState = { 132 | idx = 0; 133 | path = []; 134 | isList = false; 135 | output = []; 136 | elem = {}; 137 | }; 138 | 139 | # Imported from nixpkgs library. 140 | setAttrByPath = attrPath: value: 141 | if attrPath == [] then value 142 | else listToAttrs 143 | [ { name = head attrPath; value = setAttrByPath (tail attrPath) value; } ]; 144 | 145 | closeSection = state: 146 | state // { 147 | output = state.output ++ [ (setAttrByPath state.path ( 148 | if state.isList then [ state.elem ] 149 | else state.elem 150 | )) ]; 151 | }; 152 | 153 | readToken = state: token: 154 | # assert trace "Read '${token}'" true; 155 | if state.idx == 0 then 156 | if substring 0 2 token == "[[" then 157 | (closeSection state) // { 158 | path = headerToPath token 2; 159 | isList = true; 160 | elem = {}; 161 | } 162 | else if substring 0 1 token == "[" then 163 | (closeSection state) // { 164 | path = headerToPath token 1; 165 | isList = false; 166 | elem = {}; 167 | } 168 | else 169 | assert match "[a-zA-Z0-9_-]+" token != null; 170 | state // { idx = 1; name = token; } 171 | else if state.idx == 1 then 172 | assert token == "="; 173 | state // { idx = 2; } 174 | else 175 | assert state.idx == 2; 176 | state // { 177 | idx = 0; 178 | elem = state.elem // { 179 | "${state.name}" = tokenToValue token; 180 | }; 181 | }; 182 | 183 | # aggregate each section as individual attribute sets. 184 | parser = str: 185 | closeSection (foldl' readToken parserInitState (tokenizer str)); 186 | 187 | fromTOML = toml: 188 | let 189 | sections = (parser toml).output; 190 | # Inlined from nixpkgs library functions. 191 | zipAttrs = sets: 192 | listToAttrs (map (n: { 193 | name = n; 194 | value = 195 | let v = catAttrs n sets; in 196 | # assert trace "Visiting ${n}" true; 197 | if tail v == [] then head v 198 | else if isList (head v) then concatLists v 199 | else if isAttrs (head v) then zipAttrs v 200 | else throw "cannot merge sections"; 201 | }) (concatLists (map attrNames sets))); 202 | in 203 | zipAttrs sections; 204 | in 205 | 206 | { 207 | testing = fromTOML (builtins.readFile ./channel-rust-nightly.toml); 208 | testing_url = fromTOML (builtins.readFile (builtins.fetchurl 209 | "https://static.rust-lang.org/dist/channel-rust-nightly.toml")); 210 | inherit fromTOML; 211 | } 212 | -------------------------------------------------------------------------------- /overlays.nix: -------------------------------------------------------------------------------- 1 | [ 2 | ./lib-overlay.nix 3 | ./rust-overlay.nix 4 | ./firefox-overlay.nix 5 | ./git-cinnabar-overlay.nix 6 | ] 7 | -------------------------------------------------------------------------------- /package-set.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | 3 | with pkgs.lib; 4 | let 5 | self = foldl' 6 | (prev: overlay: prev // (overlay (pkgs // self) (pkgs // prev))) 7 | {} (map import (import ./overlays.nix)); 8 | in self 9 | -------------------------------------------------------------------------------- /phlay-overlay.nix: -------------------------------------------------------------------------------- 1 | self: super: 2 | 3 | { 4 | phlay = super.callPackage ./pkgs/phlay {}; 5 | } 6 | -------------------------------------------------------------------------------- /pinned.nix: -------------------------------------------------------------------------------- 1 | # This script extends nixpkgs with mozilla packages. 2 | # 3 | # First it imports the in the environment and depends on it 4 | # providing fetchFromGitHub and lib.importJSON. 5 | # 6 | # After that it loads a pinned release of nixos-unstable and uses that as the 7 | # base for the rest of packaging. One can pass it's own pkgsPath attribute if 8 | # desired, probably in the context of hydra. 9 | 10 | { pkgsPath ? null 11 | , overlays ? [] 12 | , system ? null 13 | , geckoSrc ? null 14 | }: 15 | 16 | # Pin a specific version of Nixpkgs. 17 | let 18 | _pkgs = import {}; 19 | _pkgsPath = 20 | if pkgsPath != null then pkgsPath 21 | else _pkgs.fetchFromGitHub (_pkgs.lib.importJSON ./pkgs/nixpkgs.json); 22 | nixpkgs = import _pkgsPath ({ 23 | overlays = import ./default.nix ++ overlays; 24 | } // (if system != null then { inherit system; } else {})); 25 | in 26 | nixpkgs // { 27 | # Do not add a name attribute attribute in an overlay !!! As this will cause 28 | # tons of recompilations. 29 | name = "nixpkgs"; 30 | updateScript = nixpkgs.lib.updateFromGitHub { 31 | owner = "NixOS"; 32 | repo = "nixpkgs-channels"; 33 | branch = "nixos-unstable-small"; 34 | path = "pkgs/nixpkgs.json"; 35 | }; 36 | } 37 | -------------------------------------------------------------------------------- /pkgs/cbindgen/default.nix: -------------------------------------------------------------------------------- 1 | ### NOTE: This file is a copy of the one from Nixpkgs repository 2 | ### (taken 2020 February) from commit 82d9ce45fe0b67e3708ab6ba47ffcb4bba09945d. 3 | ### It is used when the version of cbindgen in 4 | ### upstream nixpkgs is not up-to-date enough to compile Firefox. 5 | 6 | { stdenv, lib, fetchFromGitHub, rustPlatform 7 | # , Security 8 | }: 9 | 10 | rustPlatform.buildRustPackage rec { 11 | name = "rust-cbindgen-${version}"; 12 | version = "0.14.3"; 13 | 14 | src = fetchFromGitHub { 15 | owner = "eqrion"; 16 | repo = "cbindgen"; 17 | rev = "v${version}"; 18 | sha256 = "0pw55334i10k75qkig8bgcnlsy613zw2p5j4xyz8v71s4vh1a58j"; 19 | }; 20 | 21 | cargoSha256 = "0088ijnjhqfvdb1wxy9jc7hq8c0yxgj5brlg68n9vws1mz9rilpy"; 22 | 23 | # buildInputs = lib.optional stdenv.isDarwin Security; 24 | 25 | checkFlags = [ 26 | # https://github.com/eqrion/cbindgen/issues/338 27 | "--skip test_expand" 28 | ]; 29 | # https://github.com/NixOS/nixpkgs/issues/61618 30 | postConfigure = '' 31 | mkdir .cargo 32 | touch .cargo/.package-cache 33 | export HOME=`pwd` 34 | ''; 35 | 36 | meta = with lib; { 37 | description = "A project for generating C bindings from Rust code"; 38 | homepage = "https://github.com/eqrion/cbindgen"; 39 | license = licenses.mpl20; 40 | maintainers = with maintainers; [ jtojnar andir ]; 41 | }; 42 | } 43 | -------------------------------------------------------------------------------- /pkgs/clang/bug-14435.patch: -------------------------------------------------------------------------------- 1 | diff -x _inst -x _build -x .svn -ur libcxx.old/include/cstdio libcxx.new/include/cstdio 2 | --- libcxx.old/include/cstdio 2016-07-08 12:47:12.964181871 +0000 3 | +++ libcxx.new/include/cstdio 2016-07-08 12:47:27.540149147 +0000 4 | @@ -109,15 +109,15 @@ 5 | #endif 6 | 7 | #ifdef getc 8 | -inline _LIBCPP_INLINE_VISIBILITY int __libcpp_getc(FILE* __stream) {return getc(__stream);} 9 | +inline __attribute__ ((__always_inline__)) int __libcpp_getc(FILE* __stream) {return getc(__stream);} 10 | #undef getc 11 | -inline _LIBCPP_INLINE_VISIBILITY int getc(FILE* __stream) {return __libcpp_getc(__stream);} 12 | +inline __attribute__ ((__always_inline__)) int getc(FILE* __stream) {return __libcpp_getc(__stream);} 13 | #endif // getc 14 | 15 | #ifdef putc 16 | -inline _LIBCPP_INLINE_VISIBILITY int __libcpp_putc(int __c, FILE* __stream) {return putc(__c, __stream);} 17 | +inline __attribute__ ((__always_inline__)) int __libcpp_putc(int __c, FILE* __stream) {return putc(__c, __stream);} 18 | #undef putc 19 | -inline _LIBCPP_INLINE_VISIBILITY int putc(int __c, FILE* __stream) {return __libcpp_putc(__c, __stream);} 20 | +inline __attribute__ ((__always_inline__)) int putc(int __c, FILE* __stream) {return __libcpp_putc(__c, __stream);} 21 | #endif // putc 22 | 23 | #ifdef clearerr 24 | diff -x _inst -x _build -x .svn -ur libcxx.old/include/utility libcxx.new/include/utility 25 | --- libcxx.old/include/utility 2016-07-08 12:46:02.570334913 +0000 26 | +++ libcxx.new/include/utility 2016-07-08 12:51:00.760636878 +0000 27 | @@ -217,7 +217,7 @@ 28 | } 29 | 30 | template 31 | -inline _LIBCPP_INLINE_VISIBILITY 32 | +inline __attribute__ ((__always_inline__)) 33 | void 34 | swap(_Tp (&__a)[_Np], _Tp (&__b)[_Np]) _NOEXCEPT_(__is_nothrow_swappable<_Tp>::value) 35 | { 36 | -------------------------------------------------------------------------------- /pkgs/firefox-nightly-bin/update.nix: -------------------------------------------------------------------------------- 1 | { name 2 | , writeScript 3 | , xidel 4 | , coreutils 5 | , gnused 6 | , gnugrep 7 | , curl 8 | , jq 9 | }: 10 | 11 | let 12 | version = (builtins.parseDrvName name).version; 13 | in writeScript "update-firefox-nightly-bin" '' 14 | PATH=${coreutils}/bin:${gnused}/bin:${gnugrep}/bin:${xidel}/bin:${curl}/bin:${jq}/bin 15 | 16 | #set -eux 17 | pushd pkgs/firefox-nightly-bin 18 | 19 | tmpfile=`mktemp` 20 | url=https://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central/ 21 | 22 | nightly_file=`curl $url | \ 23 | xidel - --extract //a | \ 24 | grep firefox | \ 25 | grep linux-x86_64.json | \ 26 | tail -1 | \ 27 | sed -e 's/.json//'` 28 | nightly_json=`curl --silent $url$nightly_file.json` 29 | 30 | cat > $tmpfile < 24 | // 25 | Index: gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_unsigned/requirements/typedefs-2.cc 26 | =================================================================== 27 | --- gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_unsigned/requirements/typedefs-2.cc (revision 194579) 28 | +++ gcc-4_7-branch/libstdc++-v3/testsuite/20_util/make_unsigned/requirements/typedefs-2.cc (revision 194580) 29 | @@ -1,5 +1,5 @@ 30 | // { dg-options "-std=gnu++0x -funsigned-char -fshort-enums" } 31 | -// { dg-options "-std=gnu++0x -funsigned-char -fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi } } 32 | +// { dg-options "-std=gnu++0x -funsigned-char -fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi* } } 33 | 34 | // 2007-05-03 Benjamin Kosnik 35 | // 36 | Index: gcc-4_7-branch/libjava/configure.ac 37 | =================================================================== 38 | --- gcc-4_7-branch/libjava/configure.ac (revision 194579) 39 | +++ gcc-4_7-branch/libjava/configure.ac (revision 194580) 40 | @@ -931,7 +931,7 @@ 41 | # on Darwin -single_module speeds up loading of the dynamic libraries. 42 | extra_ldflags_libjava=-Wl,-single_module 43 | ;; 44 | -arm*linux*eabi) 45 | +arm*-*-linux*eabi*) 46 | # Some of the ARM unwinder code is actually in libstdc++. We 47 | # could in principle replicate it in libgcj, but it's better to 48 | # have a dependency on libstdc++. 49 | Index: gcc-4_7-branch/libjava/configure 50 | =================================================================== 51 | --- gcc-4_7-branch/libjava/configure (revision 194579) 52 | +++ gcc-4_7-branch/libjava/configure (revision 194580) 53 | @@ -20542,7 +20542,7 @@ 54 | # on Darwin -single_module speeds up loading of the dynamic libraries. 55 | extra_ldflags_libjava=-Wl,-single_module 56 | ;; 57 | -arm*linux*eabi) 58 | +arm*-*-linux*eabi*) 59 | # Some of the ARM unwinder code is actually in libstdc++. We 60 | # could in principle replicate it in libgcj, but it's better to 61 | # have a dependency on libstdc++. 62 | Index: gcc-4_7-branch/libgcc/config.host 63 | =================================================================== 64 | --- gcc-4_7-branch/libgcc/config.host (revision 194579) 65 | +++ gcc-4_7-branch/libgcc/config.host (revision 194580) 66 | @@ -327,7 +327,7 @@ 67 | arm*-*-linux*) # ARM GNU/Linux with ELF 68 | tmake_file="${tmake_file} arm/t-arm t-fixedpoint-gnu-prefix" 69 | case ${host} in 70 | - arm*-*-linux-*eabi) 71 | + arm*-*-linux-*eabi*) 72 | tmake_file="${tmake_file} arm/t-elf arm/t-bpabi arm/t-linux-eabi t-slibgcc-libgcc" 73 | tm_file="$tm_file arm/bpabi-lib.h" 74 | unwind_header=config/arm/unwind-arm.h 75 | Index: gcc-4_7-branch/gcc/doc/install.texi 76 | =================================================================== 77 | --- gcc-4_7-branch/gcc/doc/install.texi (revision 194579) 78 | +++ gcc-4_7-branch/gcc/doc/install.texi (revision 194580) 79 | @@ -3222,7 +3222,7 @@ 80 | @heading @anchor{arm-x-eabi}arm-*-eabi 81 | ARM-family processors. Subtargets that use the ELF object format 82 | require GNU binutils 2.13 or newer. Such subtargets include: 83 | -@code{arm-*-netbsdelf}, @code{arm-*-*linux-gnueabi} 84 | +@code{arm-*-netbsdelf}, @code{arm-*-*linux-gnueabi*} 85 | and @code{arm-*-rtemseabi}. 86 | 87 | @html 88 | Index: gcc-4_7-branch/gcc/testsuite/gcc.target/arm/synchronize.c 89 | =================================================================== 90 | --- gcc-4_7-branch/gcc/testsuite/gcc.target/arm/synchronize.c (revision 194579) 91 | +++ gcc-4_7-branch/gcc/testsuite/gcc.target/arm/synchronize.c (revision 194580) 92 | @@ -1,4 +1,4 @@ 93 | -/* { dg-final { scan-assembler "__sync_synchronize|dmb|mcr" { target arm*-*-linux-*eabi } } } */ 94 | +/* { dg-final { scan-assembler "__sync_synchronize|dmb|mcr" { target arm*-*-linux-*eabi* } } } */ 95 | 96 | void *foo (void) 97 | { 98 | Index: gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.jason/enum6.C 99 | =================================================================== 100 | --- gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.jason/enum6.C (revision 194579) 101 | +++ gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.jason/enum6.C (revision 194580) 102 | @@ -7,10 +7,10 @@ 103 | // enum-size attributes should only be emitted if there are values of 104 | // enum type that can escape the compilation unit, gcc cannot currently 105 | // detect this; if this facility is added then this linker option should 106 | -// not be needed. arm-*-linux*eabi should be a good approximation to 107 | +// not be needed. arm-*-linux*eabi* should be a good approximation to 108 | // those platforms where the EABI supplement defines enum values to be 109 | // 32 bits wide. 110 | -// { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi } } 111 | +// { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi* } } 112 | 113 | #include 114 | 115 | Index: gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.other/enum4.C 116 | =================================================================== 117 | --- gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.other/enum4.C (revision 194579) 118 | +++ gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.other/enum4.C (revision 194580) 119 | @@ -9,10 +9,10 @@ 120 | // enum-size attributes should only be emitted if there are values of 121 | // enum type that can escape the compilation unit, gcc cannot currently 122 | // detect this; if this facility is added then this linker option should 123 | -// not be needed. arm-*-linux*eabi should be a good approximation to 124 | +// not be needed. arm-*-linux*eabi* should be a good approximation to 125 | // those platforms where the EABI supplement defines enum values to be 126 | // 32 bits wide. 127 | -// { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi } } 128 | +// { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi* } } 129 | 130 | enum E { 131 | a = -312 132 | Index: gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.law/enum9.C 133 | =================================================================== 134 | --- gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.law/enum9.C (revision 194579) 135 | +++ gcc-4_7-branch/gcc/testsuite/g++.old-deja/g++.law/enum9.C (revision 194580) 136 | @@ -7,10 +7,10 @@ 137 | // enum-size attributes should only be emitted if there are values of 138 | // enum type that can escape the compilation unit, gcc cannot currently 139 | // detect this; if this facility is added then this linker option should 140 | -// not be needed. arm-*-linux*eabi should be a good approximation to 141 | +// not be needed. arm-*-linux*eabi* should be a good approximation to 142 | // those platforms where the EABI supplement defines enum values to be 143 | // 32 bits wide. 144 | -// { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi } } 145 | +// { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi* } } 146 | 147 | // GROUPS passed enums 148 | extern "C" int printf (const char *, ...); 149 | Index: gcc-4_7-branch/gcc/testsuite/lib/target-supports.exp 150 | =================================================================== 151 | --- gcc-4_7-branch/gcc/testsuite/lib/target-supports.exp (revision 194579) 152 | +++ gcc-4_7-branch/gcc/testsuite/lib/target-supports.exp (revision 194580) 153 | @@ -3818,7 +3818,7 @@ 154 | } 155 | } "" 156 | }] 157 | - } elseif { [istarget arm*-*-linux-gnueabi] } { 158 | + } elseif { [istarget arm*-*-linux-gnueabi*] } { 159 | return [check_runtime sync_longlong_runtime { 160 | #include 161 | int main () 162 | @@ -3860,7 +3860,7 @@ 163 | || [istarget i?86-*-*] 164 | || [istarget x86_64-*-*] 165 | || [istarget alpha*-*-*] 166 | - || [istarget arm*-*-linux-gnueabi] 167 | + || [istarget arm*-*-linux-gnueabi*] 168 | || [istarget bfin*-*linux*] 169 | || [istarget hppa*-*linux*] 170 | || [istarget s390*-*-*] 171 | @@ -3890,7 +3890,7 @@ 172 | || [istarget i?86-*-*] 173 | || [istarget x86_64-*-*] 174 | || [istarget alpha*-*-*] 175 | - || [istarget arm*-*-linux-gnueabi] 176 | + || [istarget arm*-*-linux-gnueabi*] 177 | || [istarget hppa*-*linux*] 178 | || [istarget s390*-*-*] 179 | || [istarget powerpc*-*-*] 180 | Index: gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_9.f90 181 | =================================================================== 182 | --- gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_9.f90 (revision 194579) 183 | +++ gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_9.f90 (revision 194580) 184 | @@ -1,6 +1,6 @@ 185 | ! { dg-do run } 186 | ! { dg-options "-fshort-enums" } 187 | -! { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi } } 188 | +! { dg-options "-fshort-enums -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi* } } 189 | ! Program to test enumerations when option -fshort-enums is given 190 | 191 | program main 192 | Index: gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_10.f90 193 | =================================================================== 194 | --- gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_10.f90 (revision 194579) 195 | +++ gcc-4_7-branch/gcc/testsuite/gfortran.dg/enum_10.f90 (revision 194580) 196 | @@ -1,7 +1,7 @@ 197 | ! { dg-do run } 198 | ! { dg-additional-sources enum_10.c } 199 | ! { dg-options "-fshort-enums -w" } 200 | -! { dg-options "-fshort-enums -w -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi } } 201 | +! { dg-options "-fshort-enums -w -Wl,--no-enum-size-warning" { target arm*-*-linux*eabi* } } 202 | ! Make sure short enums are indeed interoperable with the 203 | ! corresponding C type. 204 | 205 | Index: gcc-4_7-branch/gcc/ada/gcc-interface/Makefile.in 206 | =================================================================== 207 | --- gcc-4_7-branch/gcc/ada/gcc-interface/Makefile.in (revision 194579) 208 | +++ gcc-4_7-branch/gcc/ada/gcc-interface/Makefile.in (revision 194580) 209 | @@ -1866,7 +1866,7 @@ 210 | LIBRARY_VERSION := $(LIB_VERSION) 211 | endif 212 | 213 | -ifeq ($(strip $(filter-out arm% linux-gnueabi,$(arch) $(osys)-$(word 4,$(targ)))),) 214 | +ifeq ($(strip $(filter-out arm%-linux,$(arch)-$(osys)) $(if $(findstring eabi,$(word 4,$(targ))),,$(word 4,$(targ)))),) 215 | LIBGNAT_TARGET_PAIRS = \ 216 | a-intnam.ads zip != null && unzip != null 35 | && zlib != null && boehmgc != null 36 | && perl != null; # for `--enable-java-home' 37 | assert langAda -> gnatboot != null; 38 | assert langVhdl -> gnat != null; 39 | 40 | # LTO needs libelf and zlib. 41 | assert libelf != null -> zlib != null; 42 | 43 | # Make sure we get GNU sed. 44 | assert stdenv.isDarwin -> gnused != null; 45 | 46 | # The go frontend is written in c++ 47 | assert langGo -> langCC; 48 | 49 | with lib; 50 | with builtins; 51 | 52 | let version = "4.7.3"; 53 | 54 | # Whether building a cross-compiler for GNU/Hurd. 55 | crossGNU = cross != null && cross.config == "i586-pc-gnu"; 56 | 57 | /* gccinstall.info says that "parallel make is currently not supported since 58 | collisions in profile collecting may occur". 59 | 60 | Parallel make of gfortran is disabled because of an apparent race 61 | condition concerning the generation of "bconfig.h". Please try and 62 | re-enable parallel make for a later release of gfortran to check whether 63 | the error has been fixed. 64 | */ 65 | enableParallelBuilding = !profiledCompiler && !langFortran; 66 | 67 | patches = [] 68 | ++ optional enableParallelBuilding ./parallel-bconfig-4.7.patch 69 | ++ optional stdenv.isArm [ ./arm-eabi.patch ] 70 | ++ optional (cross != null) ./libstdc++-target.patch 71 | # ++ optional noSysDirs ./no-sys-dirs.patch 72 | # The GNAT Makefiles did not pay attention to CFLAGS_FOR_TARGET for its 73 | # target libraries and tools. 74 | ++ optional langAda ./gnat-cflags.patch 75 | ++ optional langFortran ./gfortran-driving.patch; 76 | 77 | javaEcj = fetchurl { 78 | # The `$(top_srcdir)/ecj.jar' file is automatically picked up at 79 | # `configure' time. 80 | 81 | # XXX: Eventually we might want to take it from upstream. 82 | url = "ftp://sourceware.org/pub/java/ecj-4.3.jar"; 83 | sha256 = "0jz7hvc0s6iydmhgh5h2m15yza7p2rlss2vkif30vm9y77m97qcx"; 84 | }; 85 | 86 | # Antlr (optional) allows the Java `gjdoc' tool to be built. We want a 87 | # binary distribution here to allow the whole chain to be bootstrapped. 88 | javaAntlr = fetchurl { 89 | url = "http://www.antlr.org/download/antlr-3.1.3.jar"; 90 | sha256 = "1f41j0y4kjydl71lqlvr73yagrs2jsg1fjymzjz66mjy7al5lh09"; 91 | }; 92 | 93 | xlibs = [ 94 | libX11 libXt libSM libICE libXtst libXrender libXrandr libXi 95 | xproto renderproto xextproto inputproto randrproto 96 | ]; 97 | 98 | javaAwtGtk = langJava && gtk != null; 99 | 100 | /* Platform flags */ 101 | platformFlags = let 102 | gccArch = lib.attrByPath [ "platform" "gcc" "arch" ] null stdenv; 103 | gccCpu = lib.attrByPath [ "platform" "gcc" "cpu" ] null stdenv; 104 | gccAbi = lib.attrByPath [ "platform" "gcc" "abi" ] null stdenv; 105 | gccFpu = lib.attrByPath [ "platform" "gcc" "fpu" ] null stdenv; 106 | gccFloat = lib.attrByPath [ "platform" "gcc" "float" ] null stdenv; 107 | gccMode = lib.attrByPath [ "platform" "gcc" "mode" ] null stdenv; 108 | withArch = if gccArch != null then " --with-arch=${gccArch}" else ""; 109 | withCpu = if gccCpu != null then " --with-cpu=${gccCpu}" else ""; 110 | withAbi = if gccAbi != null then " --with-abi=${gccAbi}" else ""; 111 | withFpu = if gccFpu != null then " --with-fpu=${gccFpu}" else ""; 112 | withFloat = if gccFloat != null then " --with-float=${gccFloat}" else ""; 113 | withMode = if gccMode != null then " --with-mode=${gccMode}" else ""; 114 | in 115 | (withArch + 116 | withCpu + 117 | withAbi + 118 | withFpu + 119 | withFloat + 120 | withMode); 121 | 122 | /* Cross-gcc settings */ 123 | crossMingw = (cross != null && cross.libc == "msvcrt"); 124 | crossConfigureFlags = let 125 | gccArch = lib.attrByPath [ "gcc" "arch" ] null cross; 126 | gccCpu = lib.attrByPath [ "gcc" "cpu" ] null cross; 127 | gccAbi = lib.attrByPath [ "gcc" "abi" ] null cross; 128 | gccFpu = lib.attrByPath [ "gcc" "fpu" ] null cross; 129 | gccFloat = lib.attrByPath [ "gcc" "float" ] null cross; 130 | gccMode = lib.attrByPath [ "gcc" "mode" ] null cross; 131 | withArch = if gccArch != null then " --with-arch=${gccArch}" else ""; 132 | withCpu = if gccCpu != null then " --with-cpu=${gccCpu}" else ""; 133 | withAbi = if gccAbi != null then " --with-abi=${gccAbi}" else ""; 134 | withFpu = if gccFpu != null then " --with-fpu=${gccFpu}" else ""; 135 | withFloat = if gccFloat != null then " --with-float=${gccFloat}" else ""; 136 | withMode = if gccMode != null then " --with-mode=${gccMode}" else ""; 137 | in 138 | "--target=${cross.config}" + 139 | withArch + 140 | withCpu + 141 | withAbi + 142 | withFpu + 143 | withFloat + 144 | withMode + 145 | (if crossMingw && crossStageStatic then 146 | " --with-headers=${libcCross}/include" + 147 | " --with-gcc" + 148 | " --with-gnu-as" + 149 | " --with-gnu-ld" + 150 | " --with-gnu-ld" + 151 | " --disable-shared" + 152 | " --disable-nls" + 153 | " --disable-debug" + 154 | " --enable-sjlj-exceptions" + 155 | " --enable-threads=win32" + 156 | " --disable-win32-registry" 157 | else if crossStageStatic then 158 | " --disable-libssp --disable-nls" + 159 | " --without-headers" + 160 | " --disable-threads " + 161 | " --disable-libmudflap " + 162 | " --disable-libgomp " + 163 | " --disable-libquadmath" + 164 | " --disable-shared" + 165 | " --disable-decimal-float" # libdecnumber requires libc 166 | else 167 | " --with-headers=${libcCross}/include" + 168 | " --enable-__cxa_atexit" + 169 | " --enable-long-long" + 170 | (if crossMingw then 171 | " --enable-threads=win32" + 172 | " --enable-sjlj-exceptions" + 173 | " --enable-hash-synchronization" + 174 | " --disable-libssp" + 175 | " --disable-nls" + 176 | " --with-dwarf2" + 177 | # I think noone uses shared gcc libs in mingw, so we better do the same. 178 | # In any case, mingw32 g++ linking is broken by default with shared libs, 179 | # unless adding "-lsupc++" to any linking command. I don't know why. 180 | " --disable-shared" + 181 | (if cross.config == "x86_64-w64-mingw32" then 182 | # To keep ABI compatibility with upstream mingw-w64 183 | " --enable-fully-dynamic-string" 184 | else "") 185 | else (if cross.libc == "uclibc" then 186 | # In uclibc cases, libgomp needs an additional '-ldl' 187 | # and as I don't know how to pass it, I disable libgomp. 188 | " --disable-libgomp" else "") + 189 | " --enable-threads=posix" + 190 | " --enable-nls" + 191 | " --disable-decimal-float") # No final libdecnumber (it may work only in 386) 192 | ); 193 | stageNameAddon = if crossStageStatic then "-stage-static" else 194 | "-stage-final"; 195 | crossNameAddon = if cross != null then "-${cross.config}" + stageNameAddon else ""; 196 | 197 | bootstrap = cross == null && !stdenv.isArm && !stdenv.isMips; 198 | 199 | in 200 | 201 | # We need all these X libraries when building AWT with GTK+. 202 | assert gtk != null -> (filter (x: x == null) xlibs) == []; 203 | 204 | stdenv.mkDerivation ({ 205 | name = "${name}${if stripped then "" else "-debug"}-${version}" + crossNameAddon; 206 | 207 | builder = ./builder.sh; 208 | 209 | src = fetchurl { 210 | url = "mirror://gnu/gcc/gcc-${version}/gcc-${version}.tar.bz2"; 211 | sha256 = "1hx9h64ivarlzi4hxvq42as5m9vlr5cyzaaq4gzj4i619zmkfz1g"; 212 | }; 213 | 214 | inherit patches; 215 | 216 | postPatch = 217 | if (stdenv.isGNU 218 | || (libcCross != null # e.g., building `gcc.crossDrv' 219 | && libcCross ? crossConfig 220 | && libcCross.crossConfig == "i586-pc-gnu") 221 | || (crossGNU && libcCross != null)) 222 | then 223 | # On GNU/Hurd glibc refers to Hurd & Mach headers and libpthread is not 224 | # in glibc, so add the right `-I' flags to the default spec string. 225 | assert libcCross != null -> libpthreadCross != null; 226 | let 227 | libc = if libcCross != null then libcCross else stdenv.glibc; 228 | gnu_h = "gcc/config/gnu.h"; 229 | extraCPPDeps = 230 | libc.propagatedBuildInputs 231 | ++ lib.optional (libpthreadCross != null) libpthreadCross 232 | ++ lib.optional (libpthread != null) libpthread; 233 | extraCPPSpec = 234 | concatStrings (intersperse " " 235 | (map (x: "-I${x}/include") extraCPPDeps)); 236 | extraLibSpec = 237 | if libpthreadCross != null 238 | then "-L${libpthreadCross}/lib ${libpthreadCross.TARGET_LDFLAGS}" 239 | else "-L${libpthread}/lib"; 240 | in 241 | '' echo "augmenting \`CPP_SPEC' in \`${gnu_h}' with \`${extraCPPSpec}'..." 242 | sed -i "${gnu_h}" \ 243 | -es'|CPP_SPEC *"\(.*\)$|CPP_SPEC "${extraCPPSpec} \1|g' 244 | 245 | echo "augmenting \`LIB_SPEC' in \`${gnu_h}' with \`${extraLibSpec}'..." 246 | sed -i "${gnu_h}" \ 247 | -es'|LIB_SPEC *"\(.*\)$|LIB_SPEC "${extraLibSpec} \1|g' 248 | 249 | echo "setting \`NATIVE_SYSTEM_HEADER_DIR' and \`STANDARD_INCLUDE_DIR' to \`${libc}/include'..." 250 | sed -i "${gnu_h}" \ 251 | -es'|#define STANDARD_INCLUDE_DIR.*$|#define STANDARD_INCLUDE_DIR "${libc}/include"|g' 252 | '' 253 | else if cross != null || stdenv.gcc.libc != null then 254 | # On NixOS, use the right path to the dynamic linker instead of 255 | # `/lib/ld*.so'. 256 | let 257 | libc = if libcCross != null then libcCross else stdenv.gcc.libc; 258 | in 259 | '' echo "fixing the \`GLIBC_DYNAMIC_LINKER' and \`UCLIBC_DYNAMIC_LINKER' macros..." 260 | for header in "gcc/config/"*-gnu.h "gcc/config/"*"/"*.h 261 | do 262 | grep -q LIBC_DYNAMIC_LINKER "$header" || continue 263 | echo " fixing \`$header'..." 264 | sed -i "$header" \ 265 | -e 's|define[[:blank:]]*\([UCG]\+\)LIBC_DYNAMIC_LINKER\([0-9]*\)[[:blank:]]"\([^\"]\+\)"$|define \1LIBC_DYNAMIC_LINKER\2 "${libc}\3"|g' 266 | done 267 | '' 268 | else null; 269 | 270 | inherit noSysDirs staticCompiler langJava crossStageStatic 271 | libcCross crossMingw; 272 | 273 | nativeBuildInputs = [ texinfo which gettext ] 274 | ++ (optional (perl != null) perl) 275 | ++ (optional javaAwtGtk pkgconfig); 276 | 277 | buildInputs = [ gmp mpfr mpc libelf ] 278 | ++ (optional (ppl != null) ppl) 279 | ++ (optional (cloog != null) cloog) 280 | ++ (optional (zlib != null) zlib) 281 | ++ (optionals langJava [ boehmgc zip unzip ]) 282 | ++ (optionals javaAwtGtk ([ gtk libart_lgpl ] ++ xlibs)) 283 | ++ (optionals (cross != null) [binutilsCross]) 284 | ++ (optionals langAda [gnatboot]) 285 | ++ (optionals langVhdl [gnat]) 286 | 287 | # The builder relies on GNU sed (for instance, Darwin's `sed' fails with 288 | # "-i may not be used with stdin"), and `stdenvNative' doesn't provide it. 289 | ++ (optional stdenv.isDarwin gnused) 290 | ; 291 | 292 | NIX_LDFLAGS = lib.optionalString stdenv.isSunOS "-lm -ldl"; 293 | 294 | preConfigure = '' 295 | configureFlagsArray=( 296 | ${lib.optionalString (ppl != null && ppl ? dontDisableStatic && ppl.dontDisableStatic) 297 | "'--with-host-libstdcxx=-lstdc++ -lgcc_s'"} 298 | ${lib.optionalString (ppl != null && stdenv.isSunOS) 299 | "\"--with-host-libstdcxx=-Wl,-rpath,\$prefix/lib/amd64 -lstdc++\" 300 | \"--with-boot-ldflags=-L../prev-x86_64-pc-solaris2.11/libstdc++-v3/src/.libs\""} 301 | ); 302 | ${lib.optionalString (stdenv.isSunOS && stdenv.is64bit) 303 | '' 304 | export NIX_LDFLAGS=`echo $NIX_LDFLAGS | sed -e s~$prefix/lib~$prefix/lib/amd64~g` 305 | export LDFLAGS_FOR_TARGET="-Wl,-rpath,$prefix/lib/amd64 $LDFLAGS_FOR_TARGET" 306 | export CXXFLAGS_FOR_TARGET="-Wl,-rpath,$prefix/lib/amd64 $CXXFLAGS_FOR_TARGET" 307 | export CFLAGS_FOR_TARGET="-Wl,-rpath,$prefix/lib/amd64 $CFLAGS_FOR_TARGET" 308 | ''} 309 | ''; 310 | 311 | # 'iant' at #go-nuts@freenode, gccgo maintainer, said that 312 | # they have a bug in 4.7.1 if adding "--disable-static" 313 | dontDisableStatic = langGo || staticCompiler; 314 | 315 | configureFlags = " 316 | ${if stdenv.isSunOS then 317 | " --enable-long-long --enable-libssp --enable-threads=posix --disable-nls --enable-__cxa_atexit " + 318 | # On Illumos/Solaris GNU as is preferred 319 | " --with-gnu-as --without-gnu-ld " 320 | else ""} 321 | --enable-lto 322 | ${if enableMultilib then "" else "--disable-multilib"} 323 | ${if enableShared then "" else "--disable-shared"} 324 | ${if enablePlugin then "--enable-plugin" else "--disable-plugin"} 325 | ${if ppl != null then "--with-ppl=${ppl} --disable-ppl-version-check" else ""} 326 | ${if cloog != null then 327 | "--with-cloog=${cloog} --disable-cloog-version-check --enable-cloog-backend=isl" 328 | else ""} 329 | ${if langJava then 330 | "--with-ecj-jar=${javaEcj} " + 331 | 332 | # Follow Sun's layout for the convenience of IcedTea/OpenJDK. See 333 | # . 334 | "--enable-java-home --with-java-home=\${prefix}/lib/jvm/jre " 335 | else ""} 336 | ${if javaAwtGtk then "--enable-java-awt=gtk" else ""} 337 | ${if langJava && javaAntlr != null then "--with-antlr-jar=${javaAntlr}" else ""} 338 | --with-gmp=${gmp} 339 | --with-mpfr=${mpfr} 340 | --with-mpc=${mpc} 341 | ${if libelf != null then "--with-libelf=${libelf}" else ""} 342 | --disable-libstdcxx-pch 343 | --without-included-gettext 344 | --with-system-zlib 345 | --enable-languages=${ 346 | concatStrings (intersperse "," 347 | ( optional langC "c" 348 | ++ optional langCC "c++" 349 | ++ optional langFortran "fortran" 350 | ++ optional langJava "java" 351 | ++ optional langAda "ada" 352 | ++ optional langVhdl "vhdl" 353 | ++ optional langGo "go" 354 | ) 355 | ) 356 | } 357 | ${if (stdenv ? glibc && cross == null) 358 | then " --with-native-system-header-dir=${stdenv.glibc}/include" 359 | else ""} 360 | ${if langAda then " --enable-libada" else ""} 361 | ${if cross == null && stdenv.isi686 then "--with-arch=i686" else ""} 362 | ${if cross != null then crossConfigureFlags else ""} 363 | ${if !bootstrap then "--disable-bootstrap" else ""} 364 | ${if cross == null then platformFlags else ""} 365 | "; 366 | 367 | targetConfig = if cross != null then cross.config else null; 368 | 369 | buildFlags = if bootstrap then 370 | (if profiledCompiler then "profiledbootstrap" else "bootstrap") 371 | else ""; 372 | 373 | installTargets = 374 | if stripped 375 | then "install-strip" 376 | else "install"; 377 | 378 | crossAttrs = let 379 | xgccArch = lib.attrByPath [ "gcc" "arch" ] null stdenv.cross; 380 | xgccCpu = lib.attrByPath [ "gcc" "cpu" ] null stdenv.cross; 381 | xgccAbi = lib.attrByPath [ "gcc" "abi" ] null stdenv.cross; 382 | xgccFpu = lib.attrByPath [ "gcc" "fpu" ] null stdenv.cross; 383 | xgccFloat = lib.attrByPath [ "gcc" "float" ] null stdenv.cross; 384 | xwithArch = if xgccArch != null then " --with-arch=${xgccArch}" else ""; 385 | xwithCpu = if xgccCpu != null then " --with-cpu=${xgccCpu}" else ""; 386 | xwithAbi = if xgccAbi != null then " --with-abi=${xgccAbi}" else ""; 387 | xwithFpu = if xgccFpu != null then " --with-fpu=${xgccFpu}" else ""; 388 | xwithFloat = if xgccFloat != null then " --with-float=${xgccFloat}" else ""; 389 | in { 390 | AR = "${stdenv.cross.config}-ar"; 391 | LD = "${stdenv.cross.config}-ld"; 392 | CC = "${stdenv.cross.config}-gcc"; 393 | CXX = "${stdenv.cross.config}-gcc"; 394 | AR_FOR_TARGET = "${stdenv.cross.config}-ar"; 395 | LD_FOR_TARGET = "${stdenv.cross.config}-ld"; 396 | CC_FOR_TARGET = "${stdenv.cross.config}-gcc"; 397 | NM_FOR_TARGET = "${stdenv.cross.config}-nm"; 398 | CXX_FOR_TARGET = "${stdenv.cross.config}-g++"; 399 | # If we are making a cross compiler, cross != null 400 | NIX_GCC_CROSS = if cross == null then "${stdenv.gccCross}" else ""; 401 | dontStrip = true; 402 | configureFlags = '' 403 | ${if enableMultilib then "" else "--disable-multilib"} 404 | ${if enableShared then "" else "--disable-shared"} 405 | ${if ppl != null then "--with-ppl=${ppl.crossDrv}" else ""} 406 | ${if cloog != null then "--with-cloog=${cloog.crossDrv} --enable-cloog-backend=isl" else ""} 407 | ${if langJava then "--with-ecj-jar=${javaEcj.crossDrv}" else ""} 408 | ${if javaAwtGtk then "--enable-java-awt=gtk" else ""} 409 | ${if langJava && javaAntlr != null then "--with-antlr-jar=${javaAntlr.crossDrv}" else ""} 410 | --with-gmp=${gmp.crossDrv} 411 | --with-mpfr=${mpfr.crossDrv} 412 | --disable-libstdcxx-pch 413 | --without-included-gettext 414 | --with-system-zlib 415 | --enable-languages=${ 416 | concatStrings (intersperse "," 417 | ( optional langC "c" 418 | ++ optional langCC "c++" 419 | ++ optional langFortran "fortran" 420 | ++ optional langJava "java" 421 | ++ optional langAda "ada" 422 | ++ optional langVhdl "vhdl" 423 | ++ optional langGo "go" 424 | ) 425 | ) 426 | } 427 | ${if langAda then " --enable-libada" else ""} 428 | --target=${stdenv.cross.config} 429 | ${xwithArch} 430 | ${xwithCpu} 431 | ${xwithAbi} 432 | ${xwithFpu} 433 | ${xwithFloat} 434 | ''; 435 | buildFlags = ""; 436 | }; 437 | 438 | 439 | # Needed for the cross compilation to work 440 | AR = "ar"; 441 | LD = "ld"; 442 | # http://gcc.gnu.org/install/specific.html#x86-64-x-solaris210 443 | CC = if stdenv.system == "x86_64-solaris" then "gcc -m64" 444 | else "gcc"; 445 | 446 | # Setting $CPATH and $LIBRARY_PATH to make sure both `gcc' and `xgcc' find 447 | # the library headers and binaries, regarless of the language being 448 | # compiled. 449 | 450 | # Note: When building the Java AWT GTK+ peer, the build system doesn't 451 | # honor `--with-gmp' et al., e.g., when building 452 | # `libjava/classpath/native/jni/java-math/gnu_java_math_GMP.c', so we just 453 | # add them to $CPATH and $LIBRARY_PATH in this case. 454 | # 455 | # Likewise, the LTO code doesn't find zlib. 456 | 457 | CPATH = concatStrings 458 | (intersperse ":" (map (x: x + "/include") 459 | (optionals (zlib != null) [ zlib ] 460 | ++ optionals langJava [ boehmgc ] 461 | ++ optionals javaAwtGtk xlibs 462 | ++ optionals javaAwtGtk [ gmp mpfr ] 463 | ++ optional (libpthread != null) libpthread 464 | ++ optional (libpthreadCross != null) libpthreadCross 465 | 466 | # On GNU/Hurd glibc refers to Mach & Hurd 467 | # headers. 468 | ++ optionals (libcCross != null && 469 | hasAttr "propagatedBuildInputs" libcCross) 470 | libcCross.propagatedBuildInputs))); 471 | 472 | LIBRARY_PATH = concatStrings 473 | (intersperse ":" (map (x: x + "/lib") 474 | (optionals (zlib != null) [ zlib ] 475 | ++ optionals langJava [ boehmgc ] 476 | ++ optionals javaAwtGtk xlibs 477 | ++ optionals javaAwtGtk [ gmp mpfr ] 478 | ++ optional (libpthread != null) libpthread))); 479 | 480 | EXTRA_TARGET_CFLAGS = 481 | if cross != null && libcCross != null 482 | then "-idirafter ${libcCross}/include" 483 | else null; 484 | 485 | EXTRA_TARGET_LDFLAGS = 486 | if cross != null && libcCross != null 487 | then "-B${libcCross}/lib -Wl,-L${libcCross}/lib" + 488 | (optionalString (libpthreadCross != null) 489 | " -L${libpthreadCross}/lib -Wl,${libpthreadCross.TARGET_LDFLAGS}") 490 | else null; 491 | 492 | passthru = { inherit langC langCC langAda langFortran langVhdl 493 | langGo enableMultilib version; }; 494 | 495 | inherit enableParallelBuilding; 496 | 497 | meta = { 498 | homepage = "http://gcc.gnu.org/"; 499 | license = "GPLv3+"; # runtime support libraries are typically LGPLv3+ 500 | description = "GNU Compiler Collection, version ${version}" 501 | + (if stripped then "" else " (with debugging info)"); 502 | 503 | longDescription = '' 504 | The GNU Compiler Collection includes compiler front ends for C, C++, 505 | Objective-C, Fortran, OpenMP for C/C++/Fortran, Java, and Ada, as well 506 | as libraries for these languages (libstdc++, libgcj, libgomp,...). 507 | 508 | GCC development is a part of the GNU Project, aiming to improve the 509 | compiler used in the GNU system including the GNU/Linux variant. 510 | ''; 511 | 512 | maintainers = [ 513 | lib.maintainers.ludo 514 | lib.maintainers.viric 515 | lib.maintainers.shlevy 516 | ]; 517 | 518 | # Volunteers needed for the {Cyg,Dar}win ports of *PPL. 519 | # gnatboot is not available out of linux platforms, so we disable the darwin build 520 | # for the gnat (ada compiler). 521 | platforms = lib.platforms.linux ++ optionals (langAda == false && libelf == null) [ "i686-darwin" ]; 522 | }; 523 | } 524 | 525 | // optionalAttrs (cross != null && cross.libc == "msvcrt" && crossStageStatic) { 526 | makeFlags = [ "all-gcc" "all-target-libgcc" ]; 527 | installTargets = "install-gcc install-target-libgcc"; 528 | } 529 | 530 | 531 | # Strip kills static libs of other archs (hence cross != null) 532 | // optionalAttrs (!stripped || cross != null) { dontStrip = true; NIX_STRIP_DEBUG = 0; } 533 | ) 534 | -------------------------------------------------------------------------------- /pkgs/gcc-4.7/gfortran-driving.patch: -------------------------------------------------------------------------------- 1 | This patch fixes interaction with Libtool. 2 | See , for details. 3 | 4 | --- a/gcc/fortran/gfortranspec.c 5 | +++ b/gcc/fortran/gfortranspec.c 6 | @@ -461,8 +461,15 @@ For more information about these matters, see the file named COPYING\n\n")); 7 | { 8 | fprintf (stderr, _("Driving:")); 9 | for (i = 0; i < g77_newargc; i++) 10 | + { 11 | + if (g77_new_decoded_options[i].opt_index == OPT_l) 12 | + /* Make sure no white space is inserted after `-l'. */ 13 | + fprintf (stderr, " -l%s", 14 | + g77_new_decoded_options[i].canonical_option[1]); 15 | + else 16 | fprintf (stderr, " %s", 17 | g77_new_decoded_options[i].orig_option_with_args_text); 18 | + } 19 | fprintf (stderr, "\n"); 20 | } 21 | -------------------------------------------------------------------------------- /pkgs/gcc-4.7/gnat-cflags.patch: -------------------------------------------------------------------------------- 1 | diff --git a/libada/Makefile.in b/libada/Makefile.in 2 | index f5057a0..337e0c6 100644 3 | --- a/libada/Makefile.in 4 | +++ b/libada/Makefile.in 5 | @@ -55,7 +55,7 @@ GCC_WARN_CFLAGS = $(LOOSE_WARN) 6 | WARN_CFLAGS = @warn_cflags@ 7 | 8 | TARGET_LIBGCC2_CFLAGS= 9 | -GNATLIBCFLAGS= -g -O2 10 | +GNATLIBCFLAGS= -g -O2 $(CFLAGS) 11 | GNATLIBCFLAGS_FOR_C = $(GNATLIBCFLAGS) $(TARGET_LIBGCC2_CFLAGS) -fexceptions \ 12 | -DIN_RTS @have_getipinfo@ 13 | 14 | --- a/gcc/ada/gcc-interface/Makefile.in 15 | +++ b/gcc/ada/gcc-interface/Makefile.in 16 | @@ -105,7 +105,7 @@ ADAFLAGS = -W -Wall -gnatpg -gnata 17 | SOME_ADAFLAGS =-gnata 18 | FORCE_DEBUG_ADAFLAGS = -g 19 | GNATLIBFLAGS = -gnatpg -nostdinc 20 | -GNATLIBCFLAGS = -g -O2 21 | +GNATLIBCFLAGS = -g -O2 $(CFLAGS_FOR_TARGET) 22 | # Pretend that _Unwind_GetIPInfo is available for the target by default. This 23 | # should be autodetected during the configuration of libada and passed down to 24 | # here, but we need something for --disable-libada and hope for the best. 25 | @@ -193,7 +193,7 @@ RTSDIR = rts$(subst /,_,$(MULTISUBDIR)) 26 | # Link flags used to build gnat tools. By default we prefer to statically 27 | # link with libgcc to avoid a dependency on shared libgcc (which is tricky 28 | # to deal with as it may conflict with the libgcc provided by the system). 29 | -GCC_LINK_FLAGS=-static-libgcc 30 | +GCC_LINK_FLAGS=-static-libgcc $(CFLAGS_FOR_TARGET) 31 | 32 | # End of variables for you to override. 33 | 34 | -------------------------------------------------------------------------------- /pkgs/gcc-4.7/java-jvgenmain-link.patch: -------------------------------------------------------------------------------- 1 | The `jvgenmain' executable must be linked against `vec.o', among others, 2 | since it uses its vector API. 3 | 4 | --- gcc-4.3.3/gcc/java/Make-lang.in 2008-12-05 00:00:19.000000000 +0100 5 | +++ gcc-4.3.3/gcc/java/Make-lang.in 2009-07-03 16:11:41.000000000 +0200 6 | @@ -109,9 +109,9 @@ jcf-dump$(exeext): $(JCFDUMP_OBJS) $(LIB 7 | $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o $@ $(JCFDUMP_OBJS) \ 8 | $(CPPLIBS) $(ZLIB) $(LDEXP_LIB) $(LIBS) 9 | 10 | -jvgenmain$(exeext): $(JVGENMAIN_OBJS) $(LIBDEPS) 11 | +jvgenmain$(exeext): $(JVGENMAIN_OBJS) $(LIBDEPS) $(BUILD_RTL) 12 | rm -f $@ 13 | - $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o $@ $(JVGENMAIN_OBJS) $(LIBS) 14 | + $(CC) $(ALL_CFLAGS) $(LDFLAGS) -o $@ $(JVGENMAIN_OBJS) $(BUILD_RTL) $(LIBS) 15 | 16 | # 17 | # Build hooks: 18 | -------------------------------------------------------------------------------- /pkgs/gcc-4.7/libstdc++-target.patch: -------------------------------------------------------------------------------- 1 | Patch to make the target libraries 'configure' scripts find the proper CPP. 2 | I noticed that building the mingw32 cross compiler. 3 | Looking at the build script for mingw in archlinux, I think that only nixos 4 | needs this patch. I don't know why. 5 | diff --git a/Makefile.in b/Makefile.in 6 | index 93f66b6..d691917 100644 7 | --- a/Makefile.in 8 | +++ b/Makefile.in 9 | @@ -266,6 +266,7 @@ BASE_TARGET_EXPORTS = \ 10 | AR="$(AR_FOR_TARGET)"; export AR; \ 11 | AS="$(COMPILER_AS_FOR_TARGET)"; export AS; \ 12 | CC="$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CC; \ 13 | + CPP="$(CC_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CC; \ 14 | CFLAGS="$(CFLAGS_FOR_TARGET)"; export CFLAGS; \ 15 | CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ 16 | CPPFLAGS="$(CPPFLAGS_FOR_TARGET)"; export CPPFLAGS; \ 17 | @@ -291,11 +292,13 @@ BASE_TARGET_EXPORTS = \ 18 | RAW_CXX_TARGET_EXPORTS = \ 19 | $(BASE_TARGET_EXPORTS) \ 20 | CXX_FOR_TARGET="$(RAW_CXX_FOR_TARGET)"; export CXX_FOR_TARGET; \ 21 | - CXX="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; 22 | + CXX="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; \ 23 | + CXXCPP="$(RAW_CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CXX; 24 | 25 | NORMAL_TARGET_EXPORTS = \ 26 | $(BASE_TARGET_EXPORTS) \ 27 | - CXX="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; 28 | + CXX="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS"; export CXX; \ 29 | + CXXCPP="$(CXX_FOR_TARGET) $(XGCC_FLAGS_FOR_TARGET) $$TFLAGS -E"; export CXX; 30 | 31 | # Where to find GMP 32 | HOST_GMPLIBS = @gmplibs@ 33 | -------------------------------------------------------------------------------- /pkgs/gcc-4.7/no-sys-dirs.patch: -------------------------------------------------------------------------------- 1 | diff -ru gcc-4.3.1-orig/gcc/cppdefault.c gcc-4.3.1/gcc/cppdefault.c 2 | --- gcc-4.3.1-orig/gcc/cppdefault.c 2007-07-26 10:37:01.000000000 +0200 3 | +++ gcc-4.3.1/gcc/cppdefault.c 2008-06-25 17:48:23.000000000 +0200 4 | @@ -41,6 +41,10 @@ 5 | # undef CROSS_INCLUDE_DIR 6 | #endif 7 | 8 | +#undef LOCAL_INCLUDE_DIR 9 | +#undef SYSTEM_INCLUDE_DIR 10 | +#undef STANDARD_INCLUDE_DIR 11 | + 12 | const struct default_include cpp_include_defaults[] 13 | #ifdef INCLUDE_DEFAULTS 14 | = INCLUDE_DEFAULTS; 15 | diff -ru gcc-4.3.1-orig/gcc/gcc.c gcc-4.3.1/gcc/gcc.c 16 | --- gcc-4.3.1-orig/gcc/gcc.c 2008-03-02 23:55:19.000000000 +0100 17 | +++ gcc-4.3.1/gcc/gcc.c 2008-06-25 17:52:53.000000000 +0200 18 | @@ -1478,10 +1478,10 @@ 19 | /* Default prefixes to attach to command names. */ 20 | 21 | #ifndef STANDARD_STARTFILE_PREFIX_1 22 | -#define STANDARD_STARTFILE_PREFIX_1 "/lib/" 23 | +#define STANDARD_STARTFILE_PREFIX_1 "" 24 | #endif 25 | #ifndef STANDARD_STARTFILE_PREFIX_2 26 | -#define STANDARD_STARTFILE_PREFIX_2 "/usr/lib/" 27 | +#define STANDARD_STARTFILE_PREFIX_2 "" 28 | #endif 29 | 30 | #ifdef CROSS_DIRECTORY_STRUCTURE /* Don't use these prefixes for a cross compiler. */ 31 | --- gcc-4.3.1-orig/gcc/Makefile.in 2008-05-11 20:54:15.000000000 +0200 32 | +++ gcc-4.3.1/gcc/Makefile.in 2008-06-25 17:48:23.000000000 +0200 33 | @@ -3277,7 +3281,7 @@ 34 | -DGPLUSPLUS_INCLUDE_DIR=\"$(gcc_gxx_include_dir)\" \ 35 | -DGPLUSPLUS_TOOL_INCLUDE_DIR=\"$(gcc_gxx_include_dir)/$(target_noncanonical)\" \ 36 | -DGPLUSPLUS_BACKWARD_INCLUDE_DIR=\"$(gcc_gxx_include_dir)/backward\" \ 37 | - -DLOCAL_INCLUDE_DIR=\"$(local_includedir)\" \ 38 | + -DLOCAL_INCLUDE_DIR=\"/no-such-dir\" \ 39 | -DCROSS_INCLUDE_DIR=\"$(CROSS_SYSTEM_HEADER_DIR)\" \ 40 | -DTOOL_INCLUDE_DIR=\"$(gcc_tooldir)/include\" \ 41 | -DPREFIX=\"$(prefix)/\" \ 42 | -------------------------------------------------------------------------------- /pkgs/gcc-4.7/parallel-bconfig-4.7.patch: -------------------------------------------------------------------------------- 1 | diff --git a/gcc/Makefile.in b/gcc/Makefile.in 2 | index 0f6735a..ba93e9b 100644 3 | --- a/gcc/Makefile.in 4 | +++ b/gcc/Makefile.in 5 | @@ -3904,21 +3904,21 @@ build/genflags.o : genflags.c $(RTL_BASE_H) $(OBSTACK_H) $(BCONFIG_H) \ 6 | $(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(READ_MD_H) gensupport.h 7 | build/gengenrtl.o : gengenrtl.c $(BCONFIG_H) $(SYSTEM_H) rtl.def 8 | gengtype-lex.o build/gengtype-lex.o : gengtype-lex.c gengtype.h $(SYSTEM_H) 9 | -gengtype-lex.o: $(CONFIG_H) 10 | +gengtype-lex.o: $(CONFIG_H) $(BCONFIG_H) 11 | build/gengtype-lex.o: $(BCONFIG_H) 12 | gengtype-parse.o build/gengtype-parse.o : gengtype-parse.c gengtype.h \ 13 | $(SYSTEM_H) 14 | -gengtype-parse.o: $(CONFIG_H) 15 | +gengtype-parse.o: $(CONFIG_H) $(BCONFIG_H) 16 | build/gengtype-parse.o: $(BCONFIG_H) 17 | gengtype-state.o build/gengtype-state.o: gengtype-state.c $(SYSTEM_H) \ 18 | gengtype.h errors.h double-int.h version.h $(HASHTAB_H) $(OBSTACK_H) \ 19 | $(XREGEX_H) 20 | -gengtype-state.o: $(CONFIG_H) 21 | +gengtype-state.o: $(CONFIG_H) $(BCONFIG_H) 22 | build/gengtype-state.o: $(BCONFIG_H) 23 | gengtype.o build/gengtype.o : gengtype.c $(SYSTEM_H) gengtype.h \ 24 | rtl.def insn-notes.def errors.h double-int.h version.h $(HASHTAB_H) \ 25 | $(OBSTACK_H) $(XREGEX_H) 26 | -gengtype.o: $(CONFIG_H) 27 | +gengtype.o: $(CONFIG_H) $(BCONFIG_H) 28 | build/gengtype.o: $(BCONFIG_H) 29 | build/genmddeps.o: genmddeps.c $(BCONFIG_H) $(SYSTEM_H) coretypes.h \ 30 | errors.h $(READ_MD_H) 31 | -------------------------------------------------------------------------------- /pkgs/gecko/default.nix: -------------------------------------------------------------------------------- 1 | { geckoSrc ? null, lib 2 | , stdenv, fetchFromGitHub, pythonFull, which, autoconf213, m4 3 | , perl, unzip, zip, gnumake, yasm, pkgconfig, xlibs, gnome2, pango, freetype, fontconfig, cairo 4 | , dbus, dbus_glib, alsaLib, libpulseaudio 5 | , gtk3, glib, gobjectIntrospection, gdk_pixbuf, atk, gtk2 6 | , git, mercurial, openssl, cmake, procps 7 | , libnotify 8 | , valgrind, gdb, rr 9 | , inotify-tools 10 | , setuptools 11 | , rust # rust & cargo bundled. (otheriwse use pkgs.rust.{rustc,cargo}) 12 | , buildFHSUserEnv # Build a FHS environment with all Gecko dependencies. 13 | , llvm, llvmPackages, nasm 14 | , ccache 15 | 16 | , zlib, xorg 17 | , rust-cbindgen 18 | , nodejs 19 | , jsdoc 20 | , fzf # needed by "mack try fuzzy" 21 | }: 22 | 23 | let 24 | 25 | inherit (lib) updateFromGitHub importJSON optionals inNixShell; 26 | 27 | gcc = if stdenv.cc.isGNU then stdenv.cc.cc else stdenv.cc.cc.stdenv.cc.cc; 28 | 29 | # Gecko sources are huge, we do not want to import them in the nix-store when 30 | # we use this expression for making a build environment. 31 | src = 32 | if inNixShell then 33 | null 34 | else if geckoSrc == null then 35 | fetchFromGitHub (importJSON ./source.json) 36 | else 37 | geckoSrc; 38 | 39 | version = "HEAD"; # XXX: builtins.readFile "${src}/browser/config/version.txt"; 40 | 41 | buildInputs = [ 42 | 43 | # Expected by "mach" 44 | pythonFull setuptools which autoconf213 m4 45 | 46 | # Expected by the configure script 47 | perl unzip zip gnumake yasm pkgconfig 48 | 49 | xlibs.libICE xlibs.libSM xlibs.libX11 xlibs.libXau xlibs.libxcb 50 | xlibs.libXdmcp xlibs.libXext xlibs.libXt xlibs.libXtst 51 | xlibs.libXcomposite 52 | xlibs.libXfixes 53 | xlibs.libXdamage xlibs.libXrender 54 | ] ++ (if xlibs ? xproto then [ 55 | xlibs.damageproto xlibs.printproto xlibs.kbproto 56 | xlibs.renderproto xlibs.xextproto xlibs.xproto 57 | xlibs.compositeproto xlibs.fixesproto 58 | ] else [ 59 | xorg.xorgproto 60 | ]) ++ [ 61 | gnome2.libart_lgpl gnome2.libbonobo gnome2.libbonoboui 62 | gnome2.libgnome gnome2.libgnomecanvas gnome2.libgnomeui 63 | gnome2.libIDL 64 | 65 | pango freetype fontconfig cairo 66 | 67 | dbus dbus_glib 68 | 69 | alsaLib libpulseaudio 70 | 71 | gtk3 glib gobjectIntrospection gdk_pixbuf atk 72 | gtk2 gnome2.GConf 73 | 74 | rust 75 | 76 | # For building bindgen 77 | # Building bindgen is now done with the extra options added by genMozConfig 78 | # shellHook, do not include clang directly in order to avoid messing up with 79 | # the choices of the compilers. 80 | 81 | # clang 82 | llvm 83 | 84 | # mach mochitest 85 | procps 86 | 87 | # "mach vendor rust" wants to list modified files by using the vcs. 88 | git mercurial 89 | 90 | # needed for compiling cargo-vendor and its dependencies 91 | openssl cmake 92 | 93 | # Useful for getting notification at the end of the build. 94 | libnotify 95 | 96 | # cbindgen is used to generate C bindings for WebRender. 97 | rust-cbindgen 98 | 99 | # nasm is used to build libdav1d. 100 | nasm 101 | 102 | # NodeJS is used for tooling around JS development. 103 | nodejs 104 | 105 | # Used for building documentation. 106 | # jsdoc 107 | 108 | ] ++ optionals inNixShell [ 109 | valgrind gdb ccache 110 | (if stdenv.isAarch64 then null else rr) 111 | fzf # needed by "mach try fuzzy" 112 | inotify-tools # Workaround download of prebuilt binaries. 113 | ]; 114 | 115 | # bindgen.configure now has a rule to check that with-libclang-path matches CC 116 | # or CXX. Default to the stdenv compiler if we are compiling with clang. 117 | clang_path = 118 | if stdenv.cc.isGNU then "${llvmPackages.clang}/bin/clang" 119 | else "${stdenv.cc}/bin/cc"; 120 | libclang_path = 121 | if stdenv.cc.isGNU then "${llvmPackages.clang.cc.lib}/lib" 122 | else "${stdenv.cc.cc.lib}/lib"; 123 | 124 | genMozConfig = '' 125 | cxxLib=$( echo -n ${gcc}/include/c++/* ) 126 | archLib=$cxxLib/$( ${gcc}/bin/gcc -dumpmachine ) 127 | 128 | cat - > $MOZCONFIG <> $MOZCONFIG " 193 | # . $src/build/mozconfig.common 194 | 195 | ac_add_options --enable-application=browser 196 | mk_add_options MOZ_OBJDIR=$builddir 197 | ac_add_options --prefix=$out 198 | ac_add_options --enable-official-branding 199 | " 200 | ''; 201 | 202 | AUTOCONF = "${autoconf213}/bin/autoconf"; 203 | 204 | buildPhase = '' 205 | cd $builddir 206 | $src/mach build 207 | ''; 208 | 209 | installPhase = '' 210 | cd $builddir 211 | $src/mach install 212 | ''; 213 | 214 | # TODO: are there tests we would like to run? or should we package them separately? 215 | doCheck = false; 216 | doInstallCheck = false; 217 | 218 | # This is for debugging purposes, go to hell damn wrapper which are removing 219 | # all I need for debugging. 220 | hardeningDisable = [ "all" ]; 221 | 222 | passthru.updateScript = updateFromGitHub { 223 | owner = "mozilla"; 224 | repo = "gecko-dev"; 225 | branch = "master"; 226 | path = "pkgs/gecko/source.json"; 227 | }; 228 | passthru.fhs = fhs; # gecko.x86_64-linux.gcc.fhs.env 229 | } 230 | -------------------------------------------------------------------------------- /pkgs/gecko/source.json: -------------------------------------------------------------------------------- 1 | { 2 | "owner": "mozilla", 3 | "repo": "gecko-dev", 4 | "rev": "fee636af734a0ce6dc7335691cc94664bafc385d", 5 | "sha256": "0nnkqmglbi2znkz1avnyn064i5hngvsqrmhw8ccg6g4ga9bac8fv" 6 | } 7 | -------------------------------------------------------------------------------- /pkgs/git-cinnabar/default.nix: -------------------------------------------------------------------------------- 1 | { stdenv, fetchFromGitHub, autoconf 2 | , zlib 3 | , python 4 | , perl 5 | , gettext 6 | , git 7 | , mercurial 8 | , curl 9 | }: 10 | 11 | # NOTE: git-cinnabar depends on a specific version of git-core, thus you should 12 | # ensure that you install a git-cinnabar version which matches your git version. 13 | # 14 | # NOTE: This package only provides git-cinnabar tools, as a git users might want 15 | # to have additional commands not provided by this forked version of git-core. 16 | stdenv.mkDerivation rec { 17 | version = "0.5.4"; 18 | name = "git-cinnabar-${version}"; 19 | src = fetchFromGitHub { 20 | owner = "glandium"; 21 | repo = "git-cinnabar"; 22 | inherit name; 23 | rev = version; # tag name 24 | fetchSubmodules = true; 25 | sha256 = "1cjn2cc6mj4m736wxab9s6qx83p5n5ha8cr3x84s9ra6rxs8d7pi"; 26 | }; 27 | buildInputs = [ autoconf python gettext git curl ]; 28 | 29 | ZLIB_PATH = zlib; 30 | ZLIB_DEV_PATH = zlib.dev; 31 | 32 | PERL_PATH = "${perl}/bin/perl"; 33 | NO_TCLTK = true; 34 | V=1; 35 | 36 | preBuild = '' 37 | export ZLIB_PATH; 38 | export ZLIB_DEV_PATH; 39 | substituteInPlace git-core/Makefile --replace \ 40 | '$(ZLIB_PATH)/include' '$(ZLIB_DEV_PATH)/include' 41 | # Comment out calls to git to try to verify that git-core is up to date 42 | substituteInPlace Makefile \ 43 | --replace '$(eval $(call exec,git' '# $(eval $(call exec,git' 44 | 45 | 46 | export PERL_PATH; 47 | export NO_TCLTK 48 | export V; 49 | ''; 50 | 51 | makeFlags = "prefix=\${out}"; 52 | 53 | installTargets = "git-install"; 54 | 55 | postInstall = 56 | let mercurial-py = mercurial + "/" + mercurial.python.sitePackages; in '' 57 | # git-cinnabar rebuild git, we do not need that. 58 | rm -rf $out/bin/* $out/share $out/lib 59 | for f in $out/libexec/git-core/{git-remote-hg,git-cinnabar} ; do 60 | substituteInPlace $f --replace \ 61 | "sys.path.append(os.path.join(os.path.dirname(__file__), 'pythonlib'))" \ 62 | "sys.path.extend(['$out/libexec/git-core/pythonlib', '${mercurial-py}'])" 63 | mv $f $out/bin 64 | done 65 | mv $out/libexec/git-core/git-cinnabar-helper $out/bin/git-cinnabar-helper 66 | mv $out/libexec/git-core/pythonlib $out/pythonlib 67 | rm -rf $out/libexec/git-core/* 68 | mv $out/pythonlib $out/libexec/git-core/pythonlib 69 | substituteInPlace $out/libexec/git-core/pythonlib/cinnabar/helper.py \ 70 | --replace 'Git.config('cinnabar.helper')' "Git.config('cinnabar.helper') or '$out/bin/git-cinnabar-helper'" 71 | ''; 72 | } 73 | -------------------------------------------------------------------------------- /pkgs/jsdoc/default.nix: -------------------------------------------------------------------------------- 1 | # This file has been generated by node2nix 1.9.0. Do not edit! 2 | 3 | {pkgs ? import { 4 | inherit system; 5 | }, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-12_x"}: 6 | 7 | let 8 | nodeEnv = import ./node-env.nix { 9 | inherit (pkgs) stdenv lib python2 runCommand writeTextFile; 10 | inherit pkgs nodejs; 11 | libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null; 12 | }; 13 | in 14 | import ./node-packages.nix { 15 | inherit (pkgs) fetchurl nix-gitignore stdenv lib fetchgit; 16 | inherit nodeEnv; 17 | } 18 | -------------------------------------------------------------------------------- /pkgs/jsdoc/node-env.nix: -------------------------------------------------------------------------------- 1 | # This file originates from node2nix 2 | 3 | {lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}: 4 | 5 | let 6 | # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master 7 | utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux; 8 | 9 | python = if nodejs ? python then nodejs.python else python2; 10 | 11 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise 12 | tarWrapper = runCommand "tarWrapper" {} '' 13 | mkdir -p $out/bin 14 | 15 | cat > $out/bin/tar <> $out/nix-support/hydra-build-products 40 | ''; 41 | }; 42 | 43 | includeDependencies = {dependencies}: 44 | lib.optionalString (dependencies != []) 45 | (lib.concatMapStrings (dependency: 46 | '' 47 | # Bundle the dependencies of the package 48 | mkdir -p node_modules 49 | cd node_modules 50 | 51 | # Only include dependencies if they don't exist. They may also be bundled in the package. 52 | if [ ! -e "${dependency.name}" ] 53 | then 54 | ${composePackage dependency} 55 | fi 56 | 57 | cd .. 58 | '' 59 | ) dependencies); 60 | 61 | # Recursively composes the dependencies of a package 62 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: 63 | builtins.addErrorContext "while evaluating node package '${packageName}'" '' 64 | DIR=$(pwd) 65 | cd $TMPDIR 66 | 67 | unpackFile ${src} 68 | 69 | # Make the base dir in which the target dependency resides first 70 | mkdir -p "$(dirname "$DIR/${packageName}")" 71 | 72 | if [ -f "${src}" ] 73 | then 74 | # Figure out what directory has been unpacked 75 | packageDir="$(find . -maxdepth 1 -type d | tail -1)" 76 | 77 | # Restore write permissions to make building work 78 | find "$packageDir" -type d -exec chmod u+x {} \; 79 | chmod -R u+w "$packageDir" 80 | 81 | # Move the extracted tarball into the output folder 82 | mv "$packageDir" "$DIR/${packageName}" 83 | elif [ -d "${src}" ] 84 | then 85 | # Get a stripped name (without hash) of the source directory. 86 | # On old nixpkgs it's already set internally. 87 | if [ -z "$strippedName" ] 88 | then 89 | strippedName="$(stripHash ${src})" 90 | fi 91 | 92 | # Restore write permissions to make building work 93 | chmod -R u+w "$strippedName" 94 | 95 | # Move the extracted directory into the output folder 96 | mv "$strippedName" "$DIR/${packageName}" 97 | fi 98 | 99 | # Unset the stripped name to not confuse the next unpack step 100 | unset strippedName 101 | 102 | # Include the dependencies of the package 103 | cd "$DIR/${packageName}" 104 | ${includeDependencies { inherit dependencies; }} 105 | cd .. 106 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 107 | ''; 108 | 109 | pinpointDependencies = {dependencies, production}: 110 | let 111 | pinpointDependenciesFromPackageJSON = writeTextFile { 112 | name = "pinpointDependencies.js"; 113 | text = '' 114 | var fs = require('fs'); 115 | var path = require('path'); 116 | 117 | function resolveDependencyVersion(location, name) { 118 | if(location == process.env['NIX_STORE']) { 119 | return null; 120 | } else { 121 | var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); 122 | 123 | if(fs.existsSync(dependencyPackageJSON)) { 124 | var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); 125 | 126 | if(dependencyPackageObj.name == name) { 127 | return dependencyPackageObj.version; 128 | } 129 | } else { 130 | return resolveDependencyVersion(path.resolve(location, ".."), name); 131 | } 132 | } 133 | } 134 | 135 | function replaceDependencies(dependencies) { 136 | if(typeof dependencies == "object" && dependencies !== null) { 137 | for(var dependency in dependencies) { 138 | var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); 139 | 140 | if(resolvedVersion === null) { 141 | process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); 142 | } else { 143 | dependencies[dependency] = resolvedVersion; 144 | } 145 | } 146 | } 147 | } 148 | 149 | /* Read the package.json configuration */ 150 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); 151 | 152 | /* Pinpoint all dependencies */ 153 | replaceDependencies(packageObj.dependencies); 154 | if(process.argv[2] == "development") { 155 | replaceDependencies(packageObj.devDependencies); 156 | } 157 | replaceDependencies(packageObj.optionalDependencies); 158 | 159 | /* Write the fixed package.json file */ 160 | fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); 161 | ''; 162 | }; 163 | in 164 | '' 165 | node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} 166 | 167 | ${lib.optionalString (dependencies != []) 168 | '' 169 | if [ -d node_modules ] 170 | then 171 | cd node_modules 172 | ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} 173 | cd .. 174 | fi 175 | ''} 176 | ''; 177 | 178 | # Recursively traverses all dependencies of a package and pinpoints all 179 | # dependencies in the package.json file to the versions that are actually 180 | # being used. 181 | 182 | pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: 183 | '' 184 | if [ -d "${packageName}" ] 185 | then 186 | cd "${packageName}" 187 | ${pinpointDependencies { inherit dependencies production; }} 188 | cd .. 189 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 190 | fi 191 | ''; 192 | 193 | # Extract the Node.js source code which is used to compile packages with 194 | # native bindings 195 | nodeSources = runCommand "node-sources" {} '' 196 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} 197 | mv node-* $out 198 | ''; 199 | 200 | # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) 201 | addIntegrityFieldsScript = writeTextFile { 202 | name = "addintegrityfields.js"; 203 | text = '' 204 | var fs = require('fs'); 205 | var path = require('path'); 206 | 207 | function augmentDependencies(baseDir, dependencies) { 208 | for(var dependencyName in dependencies) { 209 | var dependency = dependencies[dependencyName]; 210 | 211 | // Open package.json and augment metadata fields 212 | var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); 213 | var packageJSONPath = path.join(packageJSONDir, "package.json"); 214 | 215 | if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored 216 | console.log("Adding metadata fields to: "+packageJSONPath); 217 | var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); 218 | 219 | if(dependency.integrity) { 220 | packageObj["_integrity"] = dependency.integrity; 221 | } else { 222 | packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. 223 | } 224 | 225 | if(dependency.resolved) { 226 | packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided 227 | } else { 228 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. 229 | } 230 | 231 | if(dependency.from !== undefined) { // Adopt from property if one has been provided 232 | packageObj["_from"] = dependency.from; 233 | } 234 | 235 | fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); 236 | } 237 | 238 | // Augment transitive dependencies 239 | if(dependency.dependencies !== undefined) { 240 | augmentDependencies(packageJSONDir, dependency.dependencies); 241 | } 242 | } 243 | } 244 | 245 | if(fs.existsSync("./package-lock.json")) { 246 | var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); 247 | 248 | if(![1, 2].includes(packageLock.lockfileVersion)) { 249 | process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n"); 250 | process.exit(1); 251 | } 252 | 253 | if(packageLock.dependencies !== undefined) { 254 | augmentDependencies(".", packageLock.dependencies); 255 | } 256 | } 257 | ''; 258 | }; 259 | 260 | # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes 261 | reconstructPackageLock = writeTextFile { 262 | name = "addintegrityfields.js"; 263 | text = '' 264 | var fs = require('fs'); 265 | var path = require('path'); 266 | 267 | var packageObj = JSON.parse(fs.readFileSync("package.json")); 268 | 269 | var lockObj = { 270 | name: packageObj.name, 271 | version: packageObj.version, 272 | lockfileVersion: 1, 273 | requires: true, 274 | dependencies: {} 275 | }; 276 | 277 | function augmentPackageJSON(filePath, dependencies) { 278 | var packageJSON = path.join(filePath, "package.json"); 279 | if(fs.existsSync(packageJSON)) { 280 | var packageObj = JSON.parse(fs.readFileSync(packageJSON)); 281 | dependencies[packageObj.name] = { 282 | version: packageObj.version, 283 | integrity: "sha1-000000000000000000000000000=", 284 | dependencies: {} 285 | }; 286 | processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); 287 | } 288 | } 289 | 290 | function processDependencies(dir, dependencies) { 291 | if(fs.existsSync(dir)) { 292 | var files = fs.readdirSync(dir); 293 | 294 | files.forEach(function(entry) { 295 | var filePath = path.join(dir, entry); 296 | var stats = fs.statSync(filePath); 297 | 298 | if(stats.isDirectory()) { 299 | if(entry.substr(0, 1) == "@") { 300 | // When we encounter a namespace folder, augment all packages belonging to the scope 301 | var pkgFiles = fs.readdirSync(filePath); 302 | 303 | pkgFiles.forEach(function(entry) { 304 | if(stats.isDirectory()) { 305 | var pkgFilePath = path.join(filePath, entry); 306 | augmentPackageJSON(pkgFilePath, dependencies); 307 | } 308 | }); 309 | } else { 310 | augmentPackageJSON(filePath, dependencies); 311 | } 312 | } 313 | }); 314 | } 315 | } 316 | 317 | processDependencies("node_modules", lockObj.dependencies); 318 | 319 | fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); 320 | ''; 321 | }; 322 | 323 | prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}: 324 | let 325 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; 326 | in 327 | '' 328 | # Pinpoint the versions of all dependencies to the ones that are actually being used 329 | echo "pinpointing versions of dependencies..." 330 | source $pinpointDependenciesScriptPath 331 | 332 | # Patch the shebangs of the bundled modules to prevent them from 333 | # calling executables outside the Nix store as much as possible 334 | patchShebangs . 335 | 336 | # Deploy the Node.js package by running npm install. Since the 337 | # dependencies have been provided already by ourselves, it should not 338 | # attempt to install them again, which is good, because we want to make 339 | # it Nix's responsibility. If it needs to install any dependencies 340 | # anyway (e.g. because the dependency parameters are 341 | # incomplete/incorrect), it fails. 342 | # 343 | # The other responsibilities of NPM are kept -- version checks, build 344 | # steps, postprocessing etc. 345 | 346 | export HOME=$TMPDIR 347 | cd "${packageName}" 348 | runHook preRebuild 349 | 350 | ${lib.optionalString bypassCache '' 351 | ${lib.optionalString reconstructLock '' 352 | if [ -f package-lock.json ] 353 | then 354 | echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!" 355 | echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!" 356 | rm package-lock.json 357 | else 358 | echo "No package-lock.json file found, reconstructing..." 359 | fi 360 | 361 | node ${reconstructPackageLock} 362 | ''} 363 | 364 | node ${addIntegrityFieldsScript} 365 | ''} 366 | 367 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild 368 | 369 | if [ "''${dontNpmInstall-}" != "1" ] 370 | then 371 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. 372 | rm -f npm-shrinkwrap.json 373 | 374 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install 375 | fi 376 | ''; 377 | 378 | # Builds and composes an NPM package including all its dependencies 379 | buildNodePackage = 380 | { name 381 | , packageName 382 | , version 383 | , dependencies ? [] 384 | , buildInputs ? [] 385 | , production ? true 386 | , npmFlags ? "" 387 | , dontNpmInstall ? false 388 | , bypassCache ? false 389 | , reconstructLock ? false 390 | , preRebuild ? "" 391 | , dontStrip ? true 392 | , unpackPhase ? "true" 393 | , buildPhase ? "true" 394 | , ... }@args: 395 | 396 | let 397 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; 398 | in 399 | stdenv.mkDerivation ({ 400 | name = "node_${name}-${version}"; 401 | buildInputs = [ tarWrapper python nodejs ] 402 | ++ lib.optional (stdenv.isLinux) utillinux 403 | ++ lib.optional (stdenv.isDarwin) libtool 404 | ++ buildInputs; 405 | 406 | inherit nodejs; 407 | 408 | inherit dontStrip; # Stripping may fail a build for some package deployments 409 | inherit dontNpmInstall preRebuild unpackPhase buildPhase; 410 | 411 | compositionScript = composePackage args; 412 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; 413 | 414 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; 415 | 416 | installPhase = '' 417 | # Create and enter a root node_modules/ folder 418 | mkdir -p $out/lib/node_modules 419 | cd $out/lib/node_modules 420 | 421 | # Compose the package and all its dependencies 422 | source $compositionScriptPath 423 | 424 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 425 | 426 | # Create symlink to the deployed executable folder, if applicable 427 | if [ -d "$out/lib/node_modules/.bin" ] 428 | then 429 | ln -s $out/lib/node_modules/.bin $out/bin 430 | fi 431 | 432 | # Create symlinks to the deployed manual page folders, if applicable 433 | if [ -d "$out/lib/node_modules/${packageName}/man" ] 434 | then 435 | mkdir -p $out/share 436 | for dir in "$out/lib/node_modules/${packageName}/man/"* 437 | do 438 | mkdir -p $out/share/man/$(basename "$dir") 439 | for page in "$dir"/* 440 | do 441 | ln -s $page $out/share/man/$(basename "$dir") 442 | done 443 | done 444 | fi 445 | 446 | # Run post install hook, if provided 447 | runHook postInstall 448 | ''; 449 | } // extraArgs); 450 | 451 | # Builds a node environment (a node_modules folder and a set of binaries) 452 | buildNodeDependencies = 453 | { name 454 | , packageName 455 | , version 456 | , src 457 | , dependencies ? [] 458 | , buildInputs ? [] 459 | , production ? true 460 | , npmFlags ? "" 461 | , dontNpmInstall ? false 462 | , bypassCache ? false 463 | , reconstructLock ? false 464 | , dontStrip ? true 465 | , unpackPhase ? "true" 466 | , buildPhase ? "true" 467 | , ... }@args: 468 | 469 | let 470 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; 471 | in 472 | stdenv.mkDerivation ({ 473 | name = "node-dependencies-${name}-${version}"; 474 | 475 | buildInputs = [ tarWrapper python nodejs ] 476 | ++ lib.optional (stdenv.isLinux) utillinux 477 | ++ lib.optional (stdenv.isDarwin) libtool 478 | ++ buildInputs; 479 | 480 | inherit dontStrip; # Stripping may fail a build for some package deployments 481 | inherit dontNpmInstall unpackPhase buildPhase; 482 | 483 | includeScript = includeDependencies { inherit dependencies; }; 484 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; 485 | 486 | passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; 487 | 488 | installPhase = '' 489 | mkdir -p $out/${packageName} 490 | cd $out/${packageName} 491 | 492 | source $includeScriptPath 493 | 494 | # Create fake package.json to make the npm commands work properly 495 | cp ${src}/package.json . 496 | chmod 644 package.json 497 | ${lib.optionalString bypassCache '' 498 | if [ -f ${src}/package-lock.json ] 499 | then 500 | cp ${src}/package-lock.json . 501 | fi 502 | ''} 503 | 504 | # Go to the parent folder to make sure that all packages are pinpointed 505 | cd .. 506 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 507 | 508 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 509 | 510 | # Expose the executables that were installed 511 | cd .. 512 | ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 513 | 514 | mv ${packageName} lib 515 | ln -s $out/lib/node_modules/.bin $out/bin 516 | ''; 517 | } // extraArgs); 518 | 519 | # Builds a development shell 520 | buildNodeShell = 521 | { name 522 | , packageName 523 | , version 524 | , src 525 | , dependencies ? [] 526 | , buildInputs ? [] 527 | , production ? true 528 | , npmFlags ? "" 529 | , dontNpmInstall ? false 530 | , bypassCache ? false 531 | , reconstructLock ? false 532 | , dontStrip ? true 533 | , unpackPhase ? "true" 534 | , buildPhase ? "true" 535 | , ... }@args: 536 | 537 | let 538 | nodeDependencies = buildNodeDependencies args; 539 | in 540 | stdenv.mkDerivation { 541 | name = "node-shell-${name}-${version}"; 542 | 543 | buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs; 544 | buildCommand = '' 545 | mkdir -p $out/bin 546 | cat > $out/bin/shell <&1 | \ 21 | tail -1 22 | } 23 | 24 | echo "=== ${owner}/${repo}@${branch} ===" 25 | 26 | echo -n "Looking up latest revision ... " 27 | rev=$(github_rev "${owner}" "${repo}" "${branch}"); 28 | echo "revision is \`$rev\`." 29 | 30 | sha256=$(github_sha256 "${owner}" "${repo}" "$rev"); 31 | echo "sha256 is \`$sha256\`." 32 | 33 | if [ "$sha256" == "" ]; then 34 | echo "sha256 is not valid!" 35 | exit 2 36 | fi 37 | source_file=${path} 38 | echo "Content of source file (``$source_file``) written." 39 | cat < 3 | , supportedSystems ? [ "x86_64-linux" "i686-linux" /* "x86_64-darwin" */ 4 | "aarch64-linux" 5 | ] 6 | }: 7 | 8 | let 9 | lib = (import nixpkgsSrc {}).lib; 10 | 11 | # Make an attribute set for each system, the builder is then specialized to 12 | # use the selected system. 13 | forEachSystem = systems: builder /* system -> stdenv -> pkgs */: 14 | lib.genAttrs systems builder; 15 | 16 | # Make an attribute set for each compiler, the builder is then be specialized 17 | # to use the selected compiler. 18 | forEachCompiler = compilers: builder: system: 19 | builtins.listToAttrs (map (compiler: { 20 | name = compiler; 21 | value = builder compiler system; 22 | }) compilers); 23 | 24 | 25 | # Overide the previous derivation, with a different stdenv. 26 | builder = path: compiler: system: 27 | lib.getAttrFromPath path (import nixpkgsSrc { 28 | inherit system; 29 | overlays = [ 30 | # Add all packages from nixpkgs-mozilla. 31 | (import ./default.nix) 32 | 33 | # Define customStdenvs, which is a set of various compilers which can be 34 | # used to compile the given package against. 35 | (import ./compilers-overlay.nix) 36 | 37 | # Use the following overlay to override the requested package from 38 | # nixpkgs, with a custom stdenv taken from the compilers-overlay. 39 | (self: super: 40 | if compiler == null then {} 41 | else lib.setAttrByPath path ((lib.getAttrFromPath path super).override { 42 | stdenv = self.customStdenvs."${compiler}"; 43 | })) 44 | ]; 45 | }); 46 | 47 | build = path: { systems ? supportedSystems, compilers ? null }: 48 | forEachSystem systems ( 49 | if compilers == null 50 | then builder path null 51 | else forEachCompiler compilers (builder path) 52 | ); 53 | 54 | geckoCompilers = [ 55 | "clang" 56 | "clang36" 57 | "clang37" 58 | "clang38" 59 | "clang5" 60 | "clang6" 61 | "clang7" 62 | "clang12" 63 | "clang13" 64 | "gcc" 65 | "gcc6" 66 | "gcc5" 67 | "gcc49" 68 | "gcc48" 69 | #"gcc474" 70 | #"gcc473" 71 | #"gcc472" 72 | ]; 73 | 74 | jobs = { 75 | 76 | # For each system, and each compiler, create an attribute with the name of 77 | # the system and compiler. Use this attribute name to select which 78 | # environment you are interested in for building firefox. These can be 79 | # build using the following command: 80 | # 81 | # $ nix-build release.nix -A gecko.x86_64-linux.clang -o firefox-x64 82 | # $ nix-build release.nix -A gecko.i686-linux.gcc48 -o firefox-x86 83 | # 84 | # If you are only interested in getting a build environment, the use the 85 | # nix-shell command instead, which will skip the copy of Firefox sources, 86 | # and pull the the dependencies needed for building firefox with this 87 | # environment. 88 | # 89 | # $ nix-shell release.nix -A gecko.i686-linux.gcc --pure --command '$CC --version' 90 | # $ nix-shell release.nix -A gecko.x86_64-linux.clang --pure 91 | # 92 | # As some of the test script of Gecko are checking against absolute path, a 93 | # fake-FHS is provided for Gecko. It can be accessed by appending 94 | # ".fhs.env" behind the previous commands: 95 | # 96 | # $ nix-shell release.nix -A gecko.x86_64-linux.gcc.fhs.env 97 | # 98 | # Which will spawn a new shell where the closure of everything used to build 99 | # Gecko would be part of the fake-root. 100 | gecko = build [ "devEnv" "gecko" ] { compilers = geckoCompilers; }; 101 | latest = { 102 | "firefox-nightly-bin" = build [ "latest" "firefox-nightly-bin" ]; 103 | }; 104 | 105 | git-cinnabar = build [ "git-cinnabar" ]; 106 | }; 107 | 108 | in jobs 109 | -------------------------------------------------------------------------------- /rust-overlay-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | cd "$(dirname "$0")" || exit 4 | 5 | overlay_dir=$HOME/.config/nixpkgs/overlays 6 | name=rust-overlay.nix 7 | 8 | echo Installing $name as an overlay 9 | 10 | set -x 11 | mkdir -p "$overlay_dir" 12 | ln -s "$PWD/$name" "$overlay_dir/$name" 13 | -------------------------------------------------------------------------------- /rust-overlay.nix: -------------------------------------------------------------------------------- 1 | # This file provide a Rust overlay, which provides pre-packaged bleeding edge versions of rustc 2 | # and cargo. 3 | self: super: 4 | 5 | let 6 | fromTOML = 7 | # nix 2.1 added the fromTOML builtin 8 | if builtins ? fromTOML 9 | then builtins.fromTOML 10 | else (import ./lib/parseTOML.nix).fromTOML; 11 | 12 | parseRustToolchain = file: with builtins; 13 | if file == null then 14 | { } 15 | # Parse *.toml files as TOML 16 | else if self.lib.strings.hasSuffix ".toml" file then 17 | ({ channel ? null, date ? null, ... }: { inherit channel date; }) 18 | (fromTOML (readFile file)).toolchain 19 | else 20 | # Otherwise, assume the file contains just a rust version string 21 | let 22 | str = readFile file; 23 | # Match toolchain descriptions of type "nightly" or "nightly-2020-01-01" 24 | channel_by_name = match "([a-z]+)(-([0-9]{4}-[0-9]{2}-[0-9]{2}))?.*" str; 25 | # Match toolchain descriptions of type "1.34.0" or "1.34.0-2019-04-10" 26 | channel_by_version = match "([0-9]+\\.[0-9]+\\.[0-9]+)(-([0-9]{4}-[0-9]{2}-[0-9]{2}))?.*" str; 27 | in 28 | (x: { channel = head x; date = (head (tail (tail x))); }) ( 29 | if channel_by_name != null then 30 | channel_by_name 31 | else 32 | channel_by_version 33 | ); 34 | 35 | # In NixOS 24.11, the `pkgs.rust.toRustTarget` has become deprecated in favor of the 36 | # `.rust.rustcTarget` attribute of the platform. This function provides backwards compatibility in 37 | # case the caller is using a nixpkgs older than NixOS 24.11. 38 | toRustTargetCompat = platform: 39 | if platform ? rust && platform.rust ? rustcTarget 40 | then platform.rust.rustcTarget 41 | else super.rust.toRustTarget platform; 42 | 43 | # See https://github.com/rust-lang-nursery/rustup.rs/blob/master/src/dist/src/dist.rs 44 | defaultDistRoot = "https://static.rust-lang.org"; 45 | manifest_v1_url = { 46 | dist_root ? defaultDistRoot + "/dist", 47 | date ? null, 48 | staging ? false, 49 | # A channel can be "nightly", "beta", "stable", or "\d{1}\.\d{1,3}\.\d{1,2}". 50 | channel ? "nightly", 51 | # A path that points to a rust-toolchain file, typically ./rust-toolchain. 52 | rustToolchain ? null, 53 | ... 54 | }: 55 | let args = { inherit channel date; } // parseRustToolchain rustToolchain; in 56 | let inherit (args) date channel; in 57 | if date == null && staging == false 58 | then "${dist_root}/channel-rust-${channel}" 59 | else if date != null && staging == false 60 | then "${dist_root}/${date}/channel-rust-${channel}" 61 | else if date == null && staging == true 62 | then "${dist_root}/staging/channel-rust-${channel}" 63 | else throw "not a real-world case"; 64 | 65 | manifest_v2_url = args: (manifest_v1_url args) + ".toml"; 66 | 67 | getComponentsWithFixedPlatform = pkgs: pkgname: stdenv: 68 | let 69 | pkg = pkgs.${pkgname}; 70 | srcInfo = pkg.target.${toRustTargetCompat stdenv.targetPlatform} or pkg.target."*"; 71 | components = srcInfo.components or []; 72 | componentNamesList = 73 | builtins.map (pkg: pkg.pkg) (builtins.filter (pkg: (pkg.target != "*")) components); 74 | in 75 | componentNamesList; 76 | 77 | getExtensions = pkgs: pkgname: stdenv: 78 | let 79 | inherit (super.lib) unique; 80 | pkg = pkgs.${pkgname}; 81 | srcInfo = pkg.target.${toRustTargetCompat stdenv.targetPlatform} or pkg.target."*"; 82 | extensions = srcInfo.extensions or []; 83 | extensionNamesList = unique (builtins.map (pkg: pkg.pkg) extensions); 84 | in 85 | extensionNamesList; 86 | 87 | hasTarget = pkgs: pkgname: target: 88 | pkgs ? ${pkgname}.target.${target}; 89 | 90 | getTuples = pkgs: name: targets: 91 | builtins.map (target: { inherit name target; }) (builtins.filter (target: hasTarget pkgs name target) targets); 92 | 93 | # In the manifest, a package might have different components which are bundled with it, as opposed as the extensions which can be added. 94 | # By default, a package will include the components for the same architecture, and offers them as extensions for other architectures. 95 | # 96 | # This functions returns a list of { name, target } attribute sets, which includes the current system package, and all its components for the selected targets. 97 | # The list contains the package for the pkgTargets as well as the packages for components for all compTargets 98 | getTargetPkgTuples = pkgs: pkgname: pkgTargets: compTargets: stdenv: 99 | let 100 | inherit (builtins) elem; 101 | inherit (super.lib) intersectLists; 102 | components = getComponentsWithFixedPlatform pkgs pkgname stdenv; 103 | extensions = getExtensions pkgs pkgname stdenv; 104 | compExtIntersect = intersectLists components extensions; 105 | tuples = (getTuples pkgs pkgname pkgTargets) ++ (builtins.map (name: getTuples pkgs name compTargets) compExtIntersect); 106 | in 107 | tuples; 108 | 109 | getFetchUrl = pkgs: pkgname: target: stdenv: fetchurl: 110 | let 111 | pkg = pkgs.${pkgname}; 112 | srcInfo = pkg.target.${target}; 113 | in 114 | (super.fetchurl { url = srcInfo.xz_url or srcInfo.url; sha256 = srcInfo.xz_hash or srcInfo.hash; }); 115 | 116 | checkMissingExtensions = pkgs: pkgname: stdenv: extensions: 117 | let 118 | inherit (builtins) head; 119 | inherit (super.lib) concatStringsSep subtractLists; 120 | availableExtensions = getExtensions pkgs pkgname stdenv; 121 | missingExtensions = subtractLists availableExtensions extensions; 122 | extensionsToInstall = 123 | if missingExtensions == [] then extensions else throw '' 124 | While compiling ${pkgname}: the extension ${head missingExtensions} is not available. 125 | Select extensions from the following list: 126 | ${concatStringsSep "\n" availableExtensions}''; 127 | in 128 | extensionsToInstall; 129 | 130 | getComponents = pkgs: pkgname: targets: extensions: targetExtensions: stdenv: fetchurl: 131 | let 132 | inherit (builtins) head map; 133 | inherit (super.lib) flatten remove subtractLists unique; 134 | targetExtensionsToInstall = checkMissingExtensions pkgs pkgname stdenv targetExtensions; 135 | extensionsToInstall = checkMissingExtensions pkgs pkgname stdenv extensions; 136 | hostTargets = [ "*" (toRustTargetCompat stdenv.hostPlatform) (toRustTargetCompat stdenv.targetPlatform) ]; 137 | pkgTuples = flatten (getTargetPkgTuples pkgs pkgname hostTargets targets stdenv); 138 | extensionTuples = flatten (map (name: getTargetPkgTuples pkgs name hostTargets targets stdenv) extensionsToInstall); 139 | targetExtensionTuples = flatten (map (name: getTargetPkgTuples pkgs name targets targets stdenv) targetExtensionsToInstall); 140 | pkgsTuples = pkgTuples ++ extensionTuples ++ targetExtensionTuples; 141 | missingTargets = subtractLists (map (tuple: tuple.target) pkgsTuples) (remove "*" targets); 142 | pkgsTuplesToInstall = 143 | if missingTargets == [] then pkgsTuples else throw '' 144 | While compiling ${pkgname}: the target ${head missingTargets} is not available for any package.''; 145 | in 146 | map (tuple: { name = tuple.name; src = (getFetchUrl pkgs tuple.name tuple.target stdenv fetchurl); }) pkgsTuplesToInstall; 147 | 148 | installComponents = stdenv: namesAndSrcs: 149 | let 150 | inherit (builtins) map; 151 | installComponent = name: src: 152 | stdenv.mkDerivation { 153 | inherit name; 154 | inherit src; 155 | 156 | # No point copying src to a build server, then copying back the 157 | # entire unpacked contents after just a little twiddling. 158 | preferLocalBuild = true; 159 | 160 | # (@nbp) TODO: Check on Windows and Mac. 161 | # This code is inspired by patchelf/setup-hook.sh to iterate over all binaries. 162 | installPhase = '' 163 | patchShebangs install.sh 164 | CFG_DISABLE_LDCONFIG=1 ./install.sh --prefix=$out --verbose 165 | 166 | setInterpreter() { 167 | local dir="$1" 168 | [ -e "$dir" ] || return 0 169 | 170 | echo "Patching interpreter of ELF executables and libraries in $dir" 171 | local i 172 | while IFS= read -r -d ''$'\0' i; do 173 | if [[ "$i" =~ .build-id ]]; then continue; fi 174 | if ! isELF "$i"; then continue; fi 175 | echo "setting interpreter of $i" 176 | 177 | if [[ -x "$i" ]]; then 178 | # Handle executables 179 | patchelf \ 180 | --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" \ 181 | --set-rpath "${super.lib.makeLibraryPath [ self.zlib ]}:$out/lib" \ 182 | "$i" || true 183 | else 184 | # Handle libraries 185 | patchelf \ 186 | --set-rpath "${super.lib.makeLibraryPath [ self.zlib ]}:$out/lib" \ 187 | "$i" || true 188 | fi 189 | done < <(find "$dir" -type f -print0) 190 | } 191 | 192 | setInterpreter $out 193 | ''; 194 | 195 | postFixup = '' 196 | # Function moves well-known files from etc/ 197 | handleEtc() { 198 | local oldIFS="$IFS" 199 | 200 | # Directories we are aware of, given as substitution lists 201 | for paths in \ 202 | "etc/bash_completion.d","share/bash_completion/completions","etc/bash_completions.d","share/bash_completions/completions"; 203 | do 204 | # Some directoties may be missing in some versions. If so we just skip them. 205 | # See https://github.com/mozilla/nixpkgs-mozilla/issues/48 for more infomation. 206 | if [ ! -e $paths ]; then continue; fi 207 | 208 | IFS="," 209 | set -- $paths 210 | IFS="$oldIFS" 211 | 212 | local orig_path="$1" 213 | local wanted_path="$2" 214 | 215 | # Rename the files 216 | if [ -d ./"$orig_path" ]; then 217 | mkdir -p "$(dirname ./"$wanted_path")" 218 | fi 219 | mv -v ./"$orig_path" ./"$wanted_path" 220 | 221 | # Fail explicitly if etc is not empty so we can add it to the list and/or report it upstream 222 | rmdir ./etc || { 223 | echo Installer tries to install to /etc: 224 | find ./etc 225 | exit 1 226 | } 227 | done 228 | } 229 | 230 | if [ -d "$out"/etc ]; then 231 | pushd "$out" 232 | handleEtc 233 | popd 234 | fi 235 | ''; 236 | 237 | dontStrip = true; 238 | }; 239 | in 240 | map (nameAndSrc: (installComponent nameAndSrc.name nameAndSrc.src)) namesAndSrcs; 241 | 242 | # Manifest files are organized as follow: 243 | # { date = "2017-03-03"; 244 | # pkg.cargo.version= "0.18.0-nightly (5db6d64 2017-03-03)"; 245 | # pkg.cargo.target.x86_64-unknown-linux-gnu = { 246 | # available = true; 247 | # hash = "abce..."; # sha256 248 | # url = "https://static.rust-lang.org/dist/....tar.gz"; 249 | # xz_hash = "abce..."; # sha256 250 | # xz_url = "https://static.rust-lang.org/dist/....tar.xz"; 251 | # }; 252 | # } 253 | # 254 | # The packages available usually are: 255 | # cargo, rust-analysis, rust-docs, rust-src, rust-std, rustc, and 256 | # rust, which aggregates them in one package. 257 | # 258 | # For each package the following options are available: 259 | # extensions - The extensions that should be installed for the package. 260 | # For example, install the package rust and add the extension rust-src. 261 | # targets - The package will always be installed for the host system, but with this option 262 | # extra targets can be specified, e.g. "mips-unknown-linux-musl". The target 263 | # will only apply to components of the package that support being installed for 264 | # a different architecture. For example, the rust package will install rust-std 265 | # for the host system and the targets. 266 | # targetExtensions - If you want to force extensions to be installed for the given targets, this is your option. 267 | # All extensions in this list will be installed for the target architectures. 268 | # *Attention* If you want to install an extension like rust-src, that has no fixed architecture (arch *), 269 | # you will need to specify this extension in the extensions options or it will not be installed! 270 | fromManifestFile = manifest: { stdenv, lib, fetchurl, patchelf }: 271 | let 272 | inherit (builtins) elemAt; 273 | inherit (super) makeOverridable; 274 | inherit (super.lib) flip mapAttrs; 275 | pkgs = fromTOML (builtins.readFile manifest); 276 | in 277 | flip mapAttrs pkgs.pkg (name: pkg: 278 | makeOverridable ({extensions, targets, targetExtensions}: 279 | let 280 | version' = builtins.match "([^ ]*) [(]([^ ]*) ([^ ]*)[)]" pkg.version; 281 | version = "${elemAt version' 0}-${elemAt version' 2}-${elemAt version' 1}"; 282 | namesAndSrcs = getComponents pkgs.pkg name targets extensions targetExtensions stdenv fetchurl; 283 | components = installComponents stdenv namesAndSrcs; 284 | componentsOuts = builtins.map (comp: (super.lib.strings.escapeNixString (super.lib.getOutput "out" comp))) components; 285 | in 286 | super.pkgs.symlinkJoin { 287 | name = name + "-" + version; 288 | paths = components; 289 | postBuild = '' 290 | # If rustc or rustdoc is in the derivation, we need to copy their 291 | # executable into the final derivation. This is required 292 | # for making them find the correct SYSROOT. 293 | # Similarly, we copy the python files for gdb pretty-printers since 294 | # its auto-load-safe-path mechanism doesn't like symlinked files. 295 | for target in $out/bin/{rustc,rustdoc} $out/lib/rustlib/etc/*.py; do 296 | if [ -e $target ]; then 297 | cp --remove-destination "$(realpath -e $target)" $target 298 | 299 | # The SYSROOT is determined by using the librustc_driver-*.so. 300 | # So, we need to point to the *.so files in our derivation. 301 | chmod u+w $target 302 | patchelf --set-rpath "$out/lib" $target || true 303 | fi 304 | done 305 | 306 | # Here we copy the librustc_driver-*.so to our derivation. 307 | # The SYSROOT is determined based on the path of this library. 308 | if test "" != $out/lib/librustc_driver-*.so &> /dev/null; then 309 | RUSTC_DRIVER_PATH=$(realpath -e $out/lib/librustc_driver-*.so) 310 | rm $out/lib/librustc_driver-*.so 311 | cp $RUSTC_DRIVER_PATH $out/lib/ 312 | fi 313 | ''; 314 | 315 | # Export the manifest file as part of the nix-support files such 316 | # that one can compute the sha256 of a manifest to freeze it for 317 | # reproducible builds. 318 | MANIFEST_FILE = manifest; 319 | postInstall = '' 320 | mkdir $out/nix-support 321 | cp $MANIFEST_FILE $out/nix-support/manifest.toml 322 | ''; 323 | 324 | # Add the compiler as part of the propagated build inputs in order 325 | # to run: 326 | # 327 | # $ nix-shell -p rustChannels.stable.rust 328 | # 329 | # And get a fully working Rust compiler, with the stdenv linker. 330 | propagatedBuildInputs = [ stdenv.cc ]; 331 | 332 | meta.platforms = lib.platforms.all; 333 | } 334 | ) { extensions = []; targets = []; targetExtensions = []; } 335 | ); 336 | 337 | fromManifest = sha256: manifest: { stdenv, lib, fetchurl, patchelf }: 338 | let manifestFile = if sha256 == null then builtins.fetchurl manifest else fetchurl { url = manifest; inherit sha256; }; 339 | in fromManifestFile manifestFile { inherit stdenv lib fetchurl patchelf; }; 340 | 341 | in 342 | 343 | rec { 344 | lib = super.lib // { 345 | inherit fromTOML; 346 | rustLib = { 347 | inherit fromManifest fromManifestFile manifest_v2_url; 348 | }; 349 | }; 350 | 351 | rustChannelOf = { sha256 ? null, ... } @ manifest_args: fromManifest 352 | sha256 (manifest_v2_url manifest_args) 353 | { inherit (super) lib; 354 | inherit (self) stdenv fetchurl patchelf; 355 | } ; 356 | 357 | # Set of packages which are automagically updated. Do not rely on these for 358 | # reproducible builds. 359 | latest = (super.latest or {}) // { 360 | rustChannels = { 361 | nightly = rustChannelOf { channel = "nightly"; }; 362 | beta = rustChannelOf { channel = "beta"; }; 363 | stable = rustChannelOf { channel = "stable"; }; 364 | }; 365 | }; 366 | 367 | # Helper builder 368 | rustChannelOfTargets = channel: date: targets: 369 | (rustChannelOf { inherit channel date; }) 370 | .rust.override { inherit targets; }; 371 | 372 | # For backward compatibility 373 | rustChannels = latest.rustChannels; 374 | 375 | # For each channel: 376 | # latest.rustChannels.nightly.cargo 377 | # latest.rustChannels.nightly.rust # Aggregate all others. (recommended) 378 | # latest.rustChannels.nightly.rustc 379 | # latest.rustChannels.nightly.rust-analysis 380 | # latest.rustChannels.nightly.rust-docs 381 | # latest.rustChannels.nightly.rust-src 382 | # latest.rustChannels.nightly.rust-std 383 | 384 | # For a specific date: 385 | # (rustChannelOf { date = "2017-06-06"; channel = "beta"; }).rust 386 | } 387 | -------------------------------------------------------------------------------- /rust-src-overlay.nix: -------------------------------------------------------------------------------- 1 | # Overlay that builds on top of rust-overlay.nix. 2 | # Adds rust-src component to all channels which is helpful for racer, intellij, ... 3 | 4 | self: super: 5 | 6 | let mapAttrs = super.lib.mapAttrs; 7 | flip = super.lib.flip; 8 | in { 9 | # install stable rust with rust-src: 10 | # "nix-env -i -A nixos.latest.rustChannels.stable.rust" 11 | 12 | latest.rustChannels = 13 | flip mapAttrs super.latest.rustChannels (name: value: value // { 14 | rust = value.rust.override { 15 | extensions = ["rust-src"]; 16 | }; 17 | }); 18 | } 19 | -------------------------------------------------------------------------------- /update.nix: -------------------------------------------------------------------------------- 1 | let 2 | _pkgs = import {}; 3 | _nixpkgs = _pkgs.fetchFromGitHub (_pkgs.lib.importJSON ./pkgs/nixpkgs.json); 4 | in 5 | 6 | { pkgs ? import _nixpkgs {} 7 | , package ? null 8 | , maintainer ? null 9 | , dont_prompt ? false 10 | }: 11 | 12 | # TODO: add assert statements 13 | 14 | let 15 | 16 | pkgs-mozilla = import ./default.nix { inherit pkgs; }; 17 | 18 | dont_prompt_str = if dont_prompt then "yes" else "no"; 19 | 20 | packagesWith = cond: return: set: 21 | pkgs.lib.flatten 22 | (pkgs.lib.mapAttrsToList 23 | (name: pkg: 24 | let 25 | result = builtins.tryEval ( 26 | if pkgs.lib.isDerivation pkg && cond name pkg 27 | then [(return name pkg)] 28 | else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false 29 | then packagesWith cond return pkg 30 | else [] 31 | ); 32 | in 33 | if result.success then result.value 34 | else [] 35 | ) 36 | set 37 | ); 38 | 39 | packagesWithUpdateScriptAndMaintainer = maintainer': 40 | let 41 | maintainer = 42 | if ! builtins.hasAttr maintainer' pkgs.lib.maintainers then 43 | builtins.throw "Maintainer with name `${maintainer'} does not exist in `lib/maintainers.nix`." 44 | else 45 | builtins.getAttr maintainer' pkgs.lib.maintainers; 46 | in 47 | packagesWith (name: pkg: builtins.hasAttr "updateScript" pkg && 48 | (if builtins.hasAttr "maintainers" pkg.meta 49 | then (if builtins.isList pkg.meta.maintainers 50 | then builtins.elem maintainer pkg.meta.maintainers 51 | else maintainer == pkg.meta.maintainers 52 | ) 53 | else false 54 | ) 55 | ) 56 | (name: pkg: pkg) 57 | pkgs-mozilla; 58 | 59 | packageByName = name: 60 | let 61 | package = pkgs.lib.attrByPath (pkgs.lib.splitString "." name) null pkgs-mozilla; 62 | in 63 | if package == null then 64 | builtins.throw "Package with an attribute name `${name}` does not exists." 65 | else if ! builtins.hasAttr "updateScript" package then 66 | builtins.throw "Package with an attribute name `${name}` does have an `passthru.updateScript` defined." 67 | else 68 | package; 69 | 70 | packages = 71 | if package != null then 72 | [ (packageByName package) ] 73 | else if maintainer != null then 74 | packagesWithUpdateScriptAndMaintainer maintainer 75 | else 76 | builtins.throw "No arguments provided.\n\n${helpText}"; 77 | 78 | helpText = '' 79 | Please run: 80 | 81 | % nix-shell maintainers/scripts/update.nix --argstr maintainer garbas 82 | 83 | to run all update scripts for all packages that lists \`garbas\` as a maintainer 84 | and have \`updateScript\` defined, or: 85 | 86 | % nix-shell maintainers/scripts/update.nix --argstr package garbas 87 | 88 | to run update script for specific package. 89 | ''; 90 | 91 | runUpdateScript = package: '' 92 | echo -ne " - ${package.name}: UPDATING ..."\\r 93 | ${package.updateScript} &> ${(builtins.parseDrvName package.name).name}.log 94 | CODE=$? 95 | if [ "$CODE" != "0" ]; then 96 | echo " - ${package.name}: ERROR " 97 | echo "" 98 | echo "--- SHOWING ERROR LOG FOR ${package.name} ----------------------" 99 | echo "" 100 | cat ${(builtins.parseDrvName package.name).name}.log 101 | echo "" 102 | echo "--- SHOWING ERROR LOG FOR ${package.name} ----------------------" 103 | exit $CODE 104 | else 105 | rm ${(builtins.parseDrvName package.name).name}.log 106 | fi 107 | echo " - ${package.name}: DONE. " 108 | ''; 109 | 110 | in pkgs.stdenv.mkDerivation { 111 | name = "nixpkgs-mozilla-update-script"; 112 | buildCommand = '' 113 | echo "" 114 | echo "----------------------------------------------------------------" 115 | echo "" 116 | echo "Not possible to update packages using \`nix-build\`" 117 | echo "" 118 | echo "${helpText}" 119 | echo "----------------------------------------------------------------" 120 | exit 1 121 | ''; 122 | shellHook = '' 123 | echo "" 124 | echo "Going to be running update for following packages:" 125 | echo "${builtins.concatStringsSep "\n" (map (x: " - ${x.name}") packages)}" 126 | echo "" 127 | if [ "${dont_prompt_str}" = "no" ]; then 128 | read -n1 -r -p "Press space to continue..." confirm 129 | else 130 | confirm="" 131 | fi 132 | if [ "$confirm" = "" ]; then 133 | echo "" 134 | echo "Running update for:" 135 | ${builtins.concatStringsSep "\n" (map runUpdateScript packages)} 136 | echo "" 137 | echo "Packages updated!" 138 | exit 0 139 | else 140 | echo "Aborting!" 141 | exit 1 142 | fi 143 | ''; 144 | } 145 | --------------------------------------------------------------------------------