├── .gitignore
├── LICENSE
├── README.md
├── bin
├── README.md
└── ddl
├── lib.nix
├── lib
└── default.nix
├── nixos
├── cloud
│ ├── cloud-config.nix
│ ├── ntpd.nix
│ ├── signing-keys.nix
│ └── swapfile.nix
├── default.nix
├── ec2
│ ├── ami-ebs.nix
│ ├── ami-s3.nix
│ ├── autohostname.nix
│ └── default.nix
├── qemu
│ └── default.nix
├── vendor-module-list.nix
└── virtualbox
│ └── default.nix
├── pkgs
├── ImageMagick
│ └── default.nix
├── README.md
├── angel
│ ├── default.nix
│ ├── less-logs.patch
│ └── static.patch
├── ares
│ └── default.nix
├── bundler
│ └── bundler-head.nix
├── couchbase
│ ├── default.nix
│ └── patch-ep-engine
├── curator
│ └── default.nix
├── curl-loader
│ └── default.nix
├── default.nix
├── dynomite
│ └── default.nix
├── erlang
│ └── default.nix
├── exim
│ └── default.nix
├── filebeat
│ └── default.nix
├── flame-graph
│ └── default.nix
├── galera-wsrep
│ └── default.nix
├── graphviz
│ └── default.nix
├── heavy-sync
│ └── default.nix
├── incron
│ ├── default.nix
│ └── makefile.patch
├── jenkins
│ ├── default.nix
│ └── plugins.nix
├── jmaps
│ └── default.nix
├── letsencrypt
│ └── default.nix
├── logstash-all-plugins
│ └── default.nix
├── mariadb-galera
│ └── default.nix
├── mariadb
│ ├── default.nix
│ └── service.nix
├── memcached
│ ├── damemtop
│ └── memcached-tool
├── mergex
│ └── default.nix
├── mkebs
│ ├── default.nix
│ └── mkebs.sh
├── myrapi
│ └── default.nix
├── mysql
│ ├── 5.5.x.nix
│ └── tztime-symlink-loop-hell.patch
├── newrelic-php
│ └── default.nix
├── newrelic-plugin-agent
│ └── default.nix
├── newrelic-python
│ └── default.nix
├── newrelic-sysmond
│ └── default.nix
├── nginx
│ └── unstable.nix
├── nix
│ └── default.nix
├── nq
│ └── default.nix
├── packer
│ └── default.nix
├── percona-toolkit
│ └── default.nix
├── perf-map-agent
│ └── default.nix
├── php
│ ├── default.nix
│ ├── fix-paths-php7.patch
│ └── fix-paths.patch
├── pivotal_agent
│ ├── Gemfile
│ ├── Gemfile.lock
│ ├── default.nix
│ └── gemset.nix
├── rabbitmq-clusterer
│ └── default.nix
├── rabbitmq
│ └── default.nix
├── replicator
│ └── default.nix
├── retry
│ ├── default.nix
│ └── retry
├── runc
│ ├── default.nix
│ └── tasks.patch
├── simp_le
│ ├── default.nix
│ └── python-packages.nix
├── sproxy
│ └── default.nix
├── sysdig
│ └── default.nix
├── syslog-ng
│ └── default.nix
├── terraform
│ └── default.nix
├── thumbor
│ ├── default.nix
│ ├── thumbor-newrelic.nix
│ └── thumbor-nostrip.patch
├── to-json-array
│ ├── default.nix
│ ├── to-json-array.cabal
│ └── to-json-array.hs
├── twemproxy
│ └── default.nix
├── unicron
│ └── default.nix
├── upcast
│ ├── default.nix
│ └── ng.nix
├── vault
│ ├── Godeps.nix
│ └── default.nix
├── virtualbox
│ ├── default.nix
│ ├── guest-additions
│ │ └── default.nix
│ └── hardened.patch
├── vk-aws-route53
│ ├── default.nis
│ └── default.nix
├── vk-posix-pty
│ └── default.nix
├── wai-app-static
│ └── default.nix
├── xd
│ ├── default.nix
│ └── xd
└── ybc
│ └── default.nix
├── sdk.nix
├── to-nix
├── golang
│ ├── README.md
│ └── example.nix
├── haskell
│ └── README.md
└── python
│ ├── .gitignore
│ ├── LICENSE
│ ├── README.md
│ ├── pip.requirements
│ ├── python2nix
│ ├── __init__.py
│ ├── __main__.py
│ └── pip_deps.py
│ └── setup.py
└── wtf
├── README.md
├── api.nix
├── default.nix
├── shell.nix
├── test.nix
└── wtf
/.gitignore:
--------------------------------------------------------------------------------
1 | /deps/
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2015-present, Zalora South East Asia Pte. Ltd
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining
4 | a copy of this software and associated documentation files (the
5 | "Software"), to deal in the Software without restriction, including
6 | without limitation the rights to use, copy, modify, merge, publish,
7 | distribute, sublicense, and/or sell copies of the Software, and to
8 | permit persons to whom the Software is furnished to do so, subject to
9 | the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included
12 | in all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19 | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Microgram
2 |
3 | Microgram is a project that aims to provide tooling for accomodating end-to-end lifecycle of cloud-native applications.
4 |
5 | Microgram uses Nix for expressing deployment topologies and packaging software
6 | using the [Nix package manager](http://nixos.org/nix/).
7 |
8 | Watch this space for updates.
9 |
10 |
11 |
12 | ### NixOS
13 |
14 | Microgram includes opinionated [NixOS 15.09](https://github.com/nixos/nixpkgs/tree/release-15.09) configuration targeted for deployments in IaaS environments.
15 |
16 | Currently supported backends: EC2 AMI (EBS and instance-store), QEMU (automated testing), VirtualBox (works in other providers that support OVAs, like VMWare).
17 |
18 | * [nixos/](nixos/) - opinionated base NixOS configuration, used to be a part of [Upcast](https://github.com/zalora/upcast)
19 |
20 | To test images of Nginx:
21 |
22 | You need `` in your `$NIX_PATH` to build, branch `release-15.09`.
23 | If you don't have a remote builder, substitute `upcast build -t builder.example.com` with `nix-build`.
24 |
25 | ```bash
26 | # builds an OVA (exported by VirtualBox)
27 | upcast build -t builder.example.com -A ova tests/nginx.nix
28 |
29 | # builds a VDI (to be used with VirtualBox), might be faster as it doesn't have VirtualBox as a dependency
30 | upcast build -t builder.example.com -A vdi tests/nginx.nix
31 |
32 | # builds and runs a test in qemu
33 | upcast build -t builder.example.com -A qemu-test tests/nginx.nix
34 |
35 | export AWS_ACCOUNT_ID=...
36 | export AWS_X509_CERT=...
37 | export AWS_X509_KEY=...
38 | export AWS_ACCESS_KEY=...
39 | export AWS_SECRET_KEY=...
40 |
41 | # bundles an instance-store AMI
42 | upcast build -t builder.example.com -A s3-bundle tests/nginx.nix
43 |
44 | # builds a script that sets up an EBS AMI
45 | upcast build -t builder.example.com -A ebs-ami-builder tests/nginx.nix
46 | ```
47 |
48 | ### Packaging software
49 |
50 | * [to-nix/python](to-nix/python) - Python packaging documentation and tools (previously known as python2nix)
51 | * [to-nix/haskell](to-nix/haskell) - Haskell packaging documentation for programs using Cabal
52 | * [to-nix/golang](to-nix/golang) - Go packaging documentation for Nix
53 |
54 | ### Utility scripts
55 |
56 | * [bin/](bin) - Lightweight Nix-related tooling
57 |
58 | ### Additional tooling
59 |
60 | Other platform-related projects are:
61 |
62 | * [Defnix](https://github.com/zalora/defnix) - aims to replace some core NixOS components that were not designed for our use cases and provides
63 | a novel interface to supersede NixOS modules. It uses a lot of metaprogramming and [nix-exec](https://github.com/shlevy/nix-exec) for running effectful nix builds.
64 | * [Upcast](https://github.com/zalora/upcast) - provides AWS infrastructure provisioning (insipired by NixOps but decoupling infrastructure from software) and tooling that extends nix-build.
65 |
--------------------------------------------------------------------------------
/bin/README.md:
--------------------------------------------------------------------------------
1 | Nix convenience scripts.
2 |
3 | * `ddl`: describe derivations dependent on `nix-store`d shared object files;
4 | the inverse of `ldd`:
5 |
6 | ```
7 | % ddl /nix/store/x0advqg4yky9iyc2f2yfp77g44f8bn49-libXinerama-1.1.3/lib/libXinerama.so.1.0.0
8 | /nix/store/39h1msyil0g1aix9jzbg42haha1hmhnl-gtk+3-3.12.2.drv
9 | /nix/store/jpjv5iaks49sds1xa0a1mcjyj2p290kx-gtk+-2.24.25.drv
10 | ```
11 |
--------------------------------------------------------------------------------
/bin/ddl:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | file "$(realpath $1)" | grep -q "shared object" || {
4 | echo "Argument error: address a shared object file" >&2
5 | exit 1
6 | };
7 |
8 | find $(nix-store -q --referrers "$1") -executable -type f -name '*.so' -exec sh -c '
9 | realpath {} | xargs ldd | grep -o "/nix/store/.* " | xargs realpath | sed "s,^,$(nix-store -q --deriver {}):,"
10 | ' ';' | sort -u | grep "$(realpath $1)" | grep -Po '^.*(?=:)'
11 |
--------------------------------------------------------------------------------
/lib.nix:
--------------------------------------------------------------------------------
1 | {
2 | # common source directory filter used when building packages from git repos
3 | git-repo-filter =
4 | let f =
5 | path: type:
6 | let base = baseNameOf path;
7 | in type != "unknown" && base != ".git" && base != "result";
8 | in builtins.filterSource f;
9 | }
10 |
--------------------------------------------------------------------------------
/lib/default.nix:
--------------------------------------------------------------------------------
1 | let
2 | inherit (import ) lib sdk;
3 | inherit (lib) mapAttrsToList concatStringsSep makeSearchPath;
4 | in rec {
5 |
6 | makeBinPath = makeSearchPath "bin";
7 |
8 | mapcats = f: x: concatStringsSep "\n" (mapAttrsToList f x);
9 | }
10 |
--------------------------------------------------------------------------------
/nixos/cloud/cloud-config.nix:
--------------------------------------------------------------------------------
1 | { config, lib, ... }: with lib;
2 | let
3 | inherit (import ) sdk pkgs nixpkgs-config;
4 | systemd-pkg = pkgs.systemd;
5 |
6 | cloudDefault = mkOverride 900;
7 |
8 | fd-limit.soft = "262140";
9 | fd-limit.hard = "524280";
10 | core-limit = "1048576"; # one gigabyte
11 | in
12 | {
13 | imports = [
14 | ./ntpd.nix
15 | ];
16 |
17 | nixpkgs.config = nixpkgs-config;
18 |
19 | # usually covered by things like security groups
20 | networking.firewall.enable = cloudDefault false;
21 |
22 | # likely not needed on a cloud box
23 | environment.noXlibs = cloudDefault true;
24 |
25 | time.timeZone = cloudDefault "UTC";
26 | i18n.supportedLocales = cloudDefault ["en_US.UTF-8/UTF-8"];
27 |
28 | services.ntp.enable = true;
29 | services.ntp.servers = [
30 | "0.amazon.pool.ntp.org"
31 | "1.amazon.pool.ntp.org"
32 | "3.amazon.pool.ntp.org"
33 | ];
34 |
35 | nix.package = sdk.nix;
36 | nix.readOnlyStore = true;
37 | nix.trustedBinaryCaches = [ "http://hydra.nixos.org" ];
38 |
39 | services.openssh.enable = cloudDefault true;
40 | services.openssh.passwordAuthentication = cloudDefault false;
41 | services.openssh.challengeResponseAuthentication = cloudDefault false;
42 |
43 | security.pam.loginLimits = [ # login sessions only, not systemd services
44 | { domain = "*"; type = "hard"; item = "core"; value = core-limit; }
45 | { domain = "*"; type = "soft"; item = "core"; value = core-limit; }
46 |
47 | { domain = "*"; type = "soft"; item = "nofile"; value = fd-limit.soft; }
48 | { domain = "*"; type = "hard"; item = "nofile"; value = fd-limit.hard; }
49 | ];
50 |
51 | systemd.extraConfig = ''
52 | DefaultLimitCORE=${core-limit}
53 | DefaultLimitNOFILE=${fd-limit.soft}
54 | '';
55 |
56 | environment.etc."systemd/coredump.conf".text = ''
57 | [Coredump]
58 | Storage=journal
59 | '';
60 |
61 | # Don't start a tty on the serial consoles.
62 | #systemd.services."serial-getty@ttyS0".enable = false;
63 | #systemd.services."serial-getty@hvc0".enable = false;
64 | #systemd.services."getty@tty1".enable = false;
65 | #systemd.services."autovt@".enable = false;
66 |
67 | boot.kernelParams = [ "panic=1" "boot.panic_on_fail" ];
68 |
69 | boot.tmpOnTmpfs = cloudDefault false;
70 | boot.cleanTmpDir = cloudDefault true;
71 | boot.vesa = false;
72 |
73 | environment.systemPackages = [
74 | config.boot.kernelPackages.sysdig
75 | config.boot.kernelPackages.perf
76 | ];
77 | boot.extraModulePackages = [ config.boot.kernelPackages.sysdig ];
78 | boot.kernelModules = [ "sysdig-probe" ];
79 |
80 | boot.kernel.sysctl = {
81 | # allows control of core dumps with systemd-coredumpctl
82 | "kernel.core_pattern" = cloudDefault "|${systemd-pkg}/lib/systemd/systemd-coredump %p %u %g %s %t %e";
83 |
84 | "fs.file-max" = cloudDefault fd-limit.hard;
85 |
86 | # moar ports
87 | "net.ipv4.ip_local_port_range" = cloudDefault "10000 65535";
88 |
89 | # should be the default, really
90 | "net.ipv4.tcp_slow_start_after_idle" = cloudDefault "0";
91 | "net.ipv4.tcp_early_retrans" = cloudDefault "1"; # 3.5+
92 |
93 | # backlogs
94 | "net.core.netdev_max_backlog" = cloudDefault "4096";
95 | "net.core.somaxconn" = cloudDefault "4096";
96 |
97 | # tcp receive flow steering (newer kernels)
98 | "net.core.rps_sock_flow_entries" = cloudDefault "32768";
99 |
100 | # max bounds for buffer autoscaling (16 megs for 10 gbe)
101 | #"net.core.rmem_max" = cloudDefault "16777216";
102 | #"net.core.wmem_max" = cloudDefault "16777216";
103 | #"net.core.optmem_max" = cloudDefault "40960";
104 | #"net.ipv4.tcp_rmem" = cloudDefault "4096 87380 16777216";
105 | #"net.ipv4.tcp_wmem" = cloudDefault "4096 65536 16777216";
106 |
107 | "net.ipv4.tcp_max_syn_backlog" = cloudDefault "8096";
108 |
109 | # read http://vincent.bernat.im/en/blog/2014-tcp-time-wait-state-linux.html
110 | "net.ipv4.tcp_tw_reuse" = cloudDefault "1";
111 |
112 | # vm
113 | #"vm.overcommit_memory" = lib.mkDefault "2"; # no overcommit
114 | #"vm.overcommit_ratio" = "100";
115 | "vm.swappiness" = cloudDefault "10"; # discourage swap
116 |
117 | # just in case for postgres and friends
118 | "kernel.msgmnb" = cloudDefault "65536";
119 | "kernel.msgmax" = cloudDefault "65536";
120 | "kernel.shmmax" = cloudDefault "68719476736";
121 | };
122 | }
123 |
--------------------------------------------------------------------------------
/nixos/cloud/ntpd.nix:
--------------------------------------------------------------------------------
1 | { config, lib, pkgs, ... }:
2 |
3 | with lib;
4 |
5 | let
6 |
7 | inherit (pkgs) ntp;
8 |
9 | stateDir = "/var/lib/ntp";
10 |
11 | ntpUser = "ntp";
12 |
13 | configFile = pkgs.writeText "ntp.conf" ''
14 | driftfile ${stateDir}/ntp.drift
15 |
16 | restrict 127.0.0.1
17 | restrict -6 ::1
18 |
19 | interface listen all
20 | ${toString (map (interface: "interface ignore ${interface}\n") config.services.ntp.ignoreInterfaces)}
21 |
22 | ${toString (map (server: "server " + server + " iburst\n") config.services.ntp.servers)}
23 | '';
24 |
25 | ntpFlags = lib.concatStringsSep " " ([
26 | "-c ${configFile}"
27 | "-u ${ntpUser}:nogroup"
28 | ]);
29 | in
30 | { options = {
31 | services.ntp = {
32 | enable = mkOption {
33 | default = !config.boot.isContainer;
34 | description = ''
35 | Whether to synchronise your machine's time using the NTP
36 | protocol.
37 | '';
38 | };
39 |
40 | servers = mkOption {
41 | default = [
42 | "0.nixos.pool.ntp.org"
43 | "1.nixos.pool.ntp.org"
44 | "2.nixos.pool.ntp.org"
45 | "3.nixos.pool.ntp.org"
46 | ];
47 | description = ''
48 | The set of NTP servers from which to synchronise.
49 | '';
50 | };
51 |
52 | ignoreInterfaces = mkOption {
53 | default = [];
54 | description = ''
55 | Don't try binding on any of these interfaces.
56 | '';
57 | };
58 | };
59 | };
60 | config = mkIf config.services.ntp.enable {
61 | environment.systemPackages = [ pkgs.ntp ];
62 | users.users = singleton
63 | { name = ntpUser;
64 | uid = config.ids.uids.ntp;
65 | description = "NTP daemon user";
66 | home = stateDir;
67 | };
68 | systemd.services.ntpd =
69 | { description = "NTP Daemon";
70 | wantedBy = [ "multi-user.target" ];
71 | preStart =
72 | ''
73 | mkdir -m 0755 -p ${stateDir}
74 | chown ${ntpUser} ${stateDir}
75 | '';
76 | serviceConfig = {
77 | ExecStart = "@${ntp}/bin/ntpd ntpd -g ${ntpFlags}";
78 | Type = "forking";
79 | };
80 | };
81 | };
82 | }
83 |
--------------------------------------------------------------------------------
/nixos/cloud/signing-keys.nix:
--------------------------------------------------------------------------------
1 | { config, pkgs, lib, ... }:
2 | {
3 | system.activationScripts.buildSigningKeys = lib.stringAfter ["users"] ''
4 | if [[ ! -f /etc/nix/signing-key.sec ]]; then
5 | mkdir -p /etc/nix
6 | # ssh-keygen needs users to exist
7 | ${pkgs.openssh}/bin/ssh-keygen -b 4096 -N "" -f /etc/nix/signing-key.sec
8 | rm -f /etc/nix/signing-key.sec.pub
9 | ${pkgs.openssl}/bin/openssl rsa -in /etc/nix/signing-key.sec -pubout > /etc/nix/signing-key.pub
10 | chmod a-rwx,g+r /etc/nix/signing-key.*
11 | chown nobody:users /etc/nix/signing-key.*
12 | fi
13 | '';
14 | }
15 |
--------------------------------------------------------------------------------
/nixos/cloud/swapfile.nix:
--------------------------------------------------------------------------------
1 | { config, lib, pkgs, ... }:
2 |
3 | let
4 |
5 | cfg = config.services.swapfile;
6 |
7 | inherit (lib) mkOption types;
8 |
9 | in {
10 |
11 | options = {
12 | services.swapfile = {
13 | enabled = mkOption {
14 | type = types.bool;
15 | default = false;
16 | description = ''
17 | Whether to heuristically create a swapfile and activate it.
18 | The size of the swapfile is set to the minimum of the total amount
19 | of memory and half of the size of the disk where the swapfile is stored.
20 | '';
21 | };
22 | filePath = mkOption {
23 | type = types.str;
24 | description = ''
25 | The path to the swapfile
26 | '';
27 | };
28 | memoryLimit = mkOption {
29 | type = types.int;
30 | default = 12*1024;
31 | description = ''
32 | If the host has more than this many MB of RAM, no swapfile is
33 | activated.
34 | '';
35 | };
36 | };
37 | };
38 |
39 | config = {
40 | systemd.services.swapfile = {
41 | description = "EC2: setup swapfile";
42 |
43 | wantedBy = [ "multi-user.target" ];
44 | before = [ "multi-user.target" ];
45 |
46 | path = [ pkgs.coreutils pkgs.utillinux pkgs.gnugrep ];
47 |
48 | script = ''
49 | memSize=$(grep MemTotal: /proc/meminfo | tr -d '[:alpha:][:space:]:')
50 | if (( $memSize > ${toString (cfg.memoryLimit * 1024)} )); then
51 | echo "Instance has enough memory, skipping swapfile"
52 | exit 0
53 | fi
54 | if ! [ -w "$(dirname "${cfg.filePath}")" ]; then
55 | echo "Can't write to swapfile's dir, skipping swapfile"
56 | exit 0
57 | fi
58 | if ! [ -f "${cfg.filePath}" ]; then
59 | diskSize=$(($(df --output=size "$(dirname "${cfg.filePath}")" | tail -n1) / 2))
60 | if (( $diskSize < $memSize )); then
61 | swapSize=$diskSize
62 | else
63 | swapSize=$memSize
64 | fi
65 | dd if=/dev/zero of="${cfg.filePath}" bs=1K count=$swapSize
66 | mkswap "${cfg.filePath}"
67 | fi
68 | chmod 0600 "${cfg.filePath}"
69 | swapon "${cfg.filePath}" || true
70 | '';
71 |
72 | serviceConfig = {
73 | Type = "oneshot";
74 | RemainAfterExit = true;
75 | };
76 | };
77 | };
78 | }
79 |
--------------------------------------------------------------------------------
/nixos/default.nix:
--------------------------------------------------------------------------------
1 | { configuration
2 | , system ? "x86_64-linux"
3 | }:
4 |
5 | let
6 | eval-config = import ;
7 | baseModules = [stub-module]
8 | ++ import ;
9 |
10 | lib = import ;
11 |
12 | stub = with lib; mkOption {
13 | type = types.attrsOf types.unspecified;
14 | default = {
15 | enable = false;
16 | nssmdns = false;
17 | nsswins = false;
18 | syncPasswordsByPam = false;
19 | isContainer = false;
20 | devices = [];
21 | };
22 | };
23 |
24 | stub-module = {
25 | options = {
26 | services.xserver = stub;
27 | services.bind = stub;
28 | services.dnsmasq = stub;
29 | services.avahi = stub;
30 | services.samba = stub;
31 | services.mstpd = stub;
32 | services.resolved = stub;
33 | services.fprintd = stub;
34 | security.grsecurity = stub;
35 | services.virtualboxGuest = stub;
36 | users.ldap = stub;
37 | krb5 = stub;
38 | powerManagement = stub;
39 | security.pam.usb = stub;
40 | security.pam.mount = stub;
41 | security.pam.oath = stub;
42 | boot.isContainer = lib.mkOption { default = false; };
43 | boot.initrd.luks = stub;
44 | networking.wireless = stub;
45 | networking.connman = stub;
46 | virtualisation.vswitch = stub;
47 | };
48 | config = {
49 | services.virtualboxGuest = true; # renamed
50 | services.xserver.enable = false;
51 | powerManagement.enable = false;
52 | powerManagement.resumeCommands = "";
53 | powerManagement.powerUpCommands = "";
54 | powerManagement.powerDownCommands = "";
55 |
56 | nixpkgs.config = (import ).nixpkgs-config;
57 | };
58 | };
59 |
60 | eval = eval-config {
61 | inherit system baseModules;
62 | modules = [ configuration ];
63 | };
64 | in rec {
65 | inherit (eval) config options;
66 |
67 | system = eval.config.system.build.toplevel;
68 | }
69 |
--------------------------------------------------------------------------------
/nixos/ec2/ami-ebs.nix:
--------------------------------------------------------------------------------
1 | { config
2 | , lib
3 | , ... }:
4 | let
5 | inherit (import ) pkgs ugpkgs;
6 | inherit (lib) optionalString;
7 | in
8 | (as: { config.system.build = as; }) rec {
9 | # The builder instance IAM role must be able to manage EBS volumes and register images.
10 | # Unlike its ami-s3 cousin, this script doesn't to builds within derivations, but generates
11 | # a script that needs to run in a privileged environment.
12 | aminator =
13 | let
14 | toplevel = config.system.build.toplevel;
15 | graph = ugpkgs.fns.exportGraph toplevel;
16 | in pkgs.runCommand "aminate-ebs" {
17 | __noChroot = true;
18 | preferLocalBuild = true;
19 | } ''
20 | echo env graph=${graph} toplevel=${toplevel} ${ugpkgs.mkebs} | tee $out
21 | chmod +x $out
22 | '';
23 | }
24 |
--------------------------------------------------------------------------------
/nixos/ec2/ami-s3.nix:
--------------------------------------------------------------------------------
1 | { config
2 | , lib
3 | , aws-env ? {}
4 | , upload-context ? rec {
5 | region = "eu-west-1";
6 | bucket = "platform-${region}";
7 | }
8 | , ... }:
9 | let
10 | inherit (import ) pkgs;
11 |
12 | inherit (lib) foldAttrs listToAttrs nameValuePair optionalString;
13 |
14 | ec2-bundle-image = "${pkgs.ec2_ami_tools}/bin/ec2-bundle-image";
15 | ec2-upload-bundle = "${pkgs.ec2_ami_tools}/bin/ec2-upload-bundle";
16 | awscli = "${pkgs.awscli}/bin/aws";
17 | jq = "${pkgs.jq}/bin/jq";
18 |
19 | env =
20 | let
21 | getEnvs = xs: listToAttrs (map (x: nameValuePair x (builtins.getEnv x)) xs);
22 |
23 | base1 = getEnvs [ "AWS_ACCOUNT_ID" "AWS_X509_CERT" "AWS_X509_KEY" ];
24 | base2 = getEnvs [ "AWS_ACCESS_KEY" "AWS_SECRET_KEY" ];
25 | more = {
26 | AWS_ACCESS_KEY_ID = env.AWS_ACCESS_KEY;
27 | AWS_SECRET_ACCESS_KEY = env.AWS_SECRET_KEY;
28 | };
29 | in base1 // base2 // more // aws-env // { __noChroot = true; };
30 |
31 | ami = config.system.build.amazonImage;
32 | ami-name = "$(basename ${ami})-nixos-platform";
33 | in
34 | (as: { config.system.build = as; }) rec {
35 | # 1. bundle (chop the system image into parts & sign)
36 | s3Bundle = pkgs.runCommand "ami-ec2-bundle-image" env ''
37 | mkdir -p $out
38 |
39 | ${ec2-bundle-image} \
40 | -c "$AWS_X509_CERT" -k "$AWS_X509_KEY" -u "$AWS_ACCOUNT_ID" \
41 | -i "${ami}/nixos.img" --arch x86_64 -d $out
42 | '';
43 |
44 | # 2. upload (copy chopped parts & manifest to S3)
45 | s3Upload = pkgs.runCommand "ami-ec2-upload-image" env ''
46 | export PATH=${pkgs.curl}/bin:$PATH
47 | export CURL_CA_BUNDLE=${pkgs.cacert}/etc/ca-bundle.crt
48 |
49 | ${ec2-upload-bundle} \
50 | -b "${upload-context.bucket}/${ami-name}" \
51 | -d ${s3Bundle} -m ${s3Bundle}/nixos.img.manifest.xml \
52 | -a "$AWS_ACCESS_KEY" -s "$AWS_SECRET_KEY" --region ${upload-context.region}
53 |
54 | echo "${upload-context.bucket}/${ami-name}/nixos.img.manifest.xml" > $out
55 | '';
56 |
57 | # 3. register (register the manifest with ec2), get AMI id
58 | s3Register = pkgs.runCommand "ami-ec2-register-image" env ''
59 | set -o pipefail
60 |
61 | ${awscli} ec2 register-image \
62 | --region "${upload-context.region}" \
63 | --name "${ami-name}" \
64 | --description "${ami-name}" \
65 | --image-location "$(cat ${s3Upload})" \
66 | --virtualization-type "hvm" | ${jq} -r .ImageId > $out || rm -f $out
67 | cat $out
68 | '';
69 | }
70 |
--------------------------------------------------------------------------------
/nixos/ec2/autohostname.nix:
--------------------------------------------------------------------------------
1 | { config, lib, pkgs, ... }:
2 | let
3 | inherit (lib) concatStringsSep mapAttrsToList mkOverride mkOption types optionalString;
4 |
5 | inherit (import ) sdk pkgs;
6 | retry = "${sdk.retry}/bin/retry";
7 | base64 = "${pkgs.coreutils}/bin/base64";
8 | jq = "/usr/bin/env LD_LIBRARY_PATH=${pkgs.jq}/lib ${pkgs.jq}/bin/jq";
9 | curl = "${pkgs.curl}/bin/curl -s --retry 3 --retry-delay 0 --fail";
10 | curl-nofail = "${pkgs.curl}/bin/curl -s --retry 3 --retry-delay 0";
11 | wget = "${pkgs.wget}/bin/wget -q --retry-connrefused -O -";
12 | awk = "${pkgs.gawk}/bin/awk";
13 | openssl = "${pkgs.openssl}/bin/openssl";
14 | hostname = "${pkgs.nettools}/bin/hostname";
15 | ip = "${pkgs.iproute}/sbin/ip";
16 | bash = "${pkgs.bash}/bin/bash";
17 | xargs = "${pkgs.findutils}/bin/xargs";
18 |
19 | retry-wrapper = script: pkgs.writeScript "retry-${script.name}" ''
20 | ${retry} ${script}
21 | '';
22 |
23 | # TODO: move to a proper package
24 | register-hostname = {
25 | zoneId, zone, iamCredentialName,
26 | useLocalHostname,
27 | query ? if useLocalHostname then "local-ipv4" else "public-hostname",
28 | recordType ? if useLocalHostname then "A" else "CNAME"
29 | }: sdk.writeBashScriptOverride ["SC2046"] "ec2-register-hostname-${zone}" ''
30 |
31 | export SSL_CERT_FILE='${sdk.cacert}/etc/ssl/certs/ca-bundle.crt'
32 |
33 | date=$(${curl} -I https://route53.amazonaws.com/date | ${awk} '/^Date: / {sub("Date: ", "", $0); sub("\\r", "", $0); print $0}')
34 |
35 | iam="${iamCredentialName}"
36 | if [ -z "$iam" ]; then
37 | # autodetect
38 | set -- $(${curl} http://169.254.169.254/latest/meta-data/iam/security-credentials/ 2>/dev/null | head -1)
39 | iam="$1"
40 | if [ -z "$iam" ]; then
41 | exit 1
42 | fi
43 | fi
44 |
45 | set -- $(${curl} http://169.254.169.254/latest/user-data | ${jq} -r .hostname || true)
46 | [ -n "$1" ] && HOSTNAME="$1"; HOSTNAME="$HOSTNAME.${zone}"
47 |
48 | set -- $(${wget} http://169.254.169.254/latest/meta-data/iam/security-credentials/${iamCredentialName} \
49 | | ${jq} -r '.SecretAccessKey, .AccessKeyId, .Token')
50 |
51 | signature="$(echo -n "$date" | ${openssl} dgst -binary -sha1 -hmac "$1" | ${base64})"
52 | auth_header="X-Amzn-Authorization: AWS3-HTTPS AWSAccessKeyId=$2,Algorithm=HmacSHA1,Signature=$signature"
53 | record_value=$(${wget} http://169.254.169.254/latest/meta-data/${query})
54 |
55 | ${curl-nofail} -d @/dev/stdin \
56 | -H "Content-Type: text/xml" \
57 | -H "x-amz-date: $date" \
58 | -H "$auth_header" \
59 | -H "x-amz-security-token: $3" \
60 | -X POST https://route53.amazonaws.com/2013-04-01/hostedzone/${zoneId}/rrset <<__EOF
61 |
62 |
63 |
64 |
65 |
66 | UPSERT
67 |
68 | $HOSTNAME
69 | ${recordType}
70 | 30
71 |
72 | $record_value
73 |
74 |
75 |
76 |
77 |
78 |
79 | __EOF
80 |
81 | curl_error=$?
82 |
83 | echo
84 | exit $curl_error
85 | '';
86 |
87 | ec2-autohostname = ''
88 | set -efuo pipefail
89 | ${ip} route delete blackhole 169.254.169.254 2>/dev/null || true
90 | # registering route 53 hostnames if any:
91 | echo ${concatStringsSep " " (
92 | mapAttrsToList (_: args: retry-wrapper (register-hostname {
93 | zone = args.name;
94 | inherit (args) zoneId iamCredentialName useLocalHostname;
95 | })) config.ec2.route53RegisterHostname)} | ${xargs} -n1 -P2 ${bash}
96 | '';
97 | in
98 | {
99 | options = {
100 | ec2.route53RegisterHostname = mkOption {
101 | type = types.attrsOf (types.submodule ({ lib, name, ... }: with lib; {
102 | options = {
103 | name = mkOption {
104 | type = types.string;
105 | default = name;
106 | description = ''
107 | Route53 Hosted Domain Name (can be a sub-domain of a more high-level domain name).
108 | '';
109 | };
110 |
111 | zoneId = mkOption {
112 | type = types.string;
113 | example = "ZOZONEZONEZONE";
114 | description = ''
115 | Route53 Hosted Zone ID for the domain specified in name;
116 | '';
117 | };
118 |
119 | iamCredentialName = mkOption {
120 | type = types.string;
121 | example = "doge-iam-dns-profile";
122 | default = "";
123 | description = ''
124 | Instance IAM Role name. Leave empty to autodetect.
125 | '';
126 | };
127 |
128 | useLocalHostname = mkOption {
129 | type = types.bool;
130 | default = false;
131 | description = ''
132 | CNAMEs to the internal hostname. Useful when doing VPC tunneling.
133 | '';
134 | };
135 | };
136 | }));
137 |
138 | default = {};
139 | };
140 | };
141 |
142 | config = {
143 | systemd.services.ec2-autohostname = {
144 | description = "EC2: periodically apply dynamic hostname";
145 |
146 | wantedBy = [ "multi-user.target" ];
147 | after = [ "fetch-ec2-data.service" ];
148 |
149 | script = ''
150 | while true; do
151 | ${sdk.writeBashScript "ec2-autohostname" ec2-autohostname}
152 | sleep $((120 + $RANDOM % 40))m
153 | done
154 | '';
155 |
156 | serviceConfig.Restart = "always";
157 | unitConfig.X-StopOnReconfiguration = true;
158 | };
159 | };
160 | }
161 |
--------------------------------------------------------------------------------
/nixos/ec2/default.nix:
--------------------------------------------------------------------------------
1 | { config, lib, pkgs, ... }:
2 | let
3 | inherit (lib) mkOverride mkDefault optionalString mkForce;
4 | in
5 | {
6 | imports = [
7 |
8 | ./ami-ebs.nix
9 | ./ami-s3.nix
10 |
11 | # fetch-ec2-data and print-host-key
12 | ];
13 |
14 | config = {
15 | nixpkgs.system = mkOverride 100 "x86_64-linux";
16 |
17 | #boot.loader.grub.extraPerEntryConfig = mkIf isEc2Hvm ( mkOverride 10 "root (hd0,0)" );
18 |
19 | # By default, 'fetch-ec2-data' assigns hostnames and writes SSH host keys
20 | # from user data. We don't want that.
21 | systemd.services."fetch-ec2-data".script = mkForce ''
22 | wget="wget -q --retry-connrefused -O -"
23 |
24 | # Don't download the SSH key if it has already been injected
25 | # into the image (a Nova feature).
26 | if ! [ -e /root/.ssh/authorized_keys ]; then
27 | echo "obtaining SSH key..."
28 | mkdir -m 0700 -p /root/.ssh
29 | $wget http://169.254.169.254/1.0/meta-data/public-keys/0/openssh-key > /root/key.pub
30 | if [ $? -eq 0 -a -e /root/key.pub ]; then
31 | if ! grep -q -f /root/key.pub /root/.ssh/authorized_keys; then
32 | cat /root/key.pub >> /root/.ssh/authorized_keys
33 | echo "new key added to authorized_keys"
34 | fi
35 | chmod 600 /root/.ssh/authorized_keys
36 | rm -f /root/key.pub
37 | fi
38 | fi
39 | '';
40 |
41 | # By default, 'print-host-key' assumes DSA.
42 | systemd.services."print-host-key".script = mkForce ''
43 | # Print the host public key on the console so that the user
44 | # can obtain it securely by parsing the output of
45 | # ec2-get-console-output.
46 | echo "-----BEGIN SSH HOST KEY FINGERPRINTS-----" > /dev/console
47 | ${config.programs.ssh.package}/bin/ssh-keygen -l -f /etc/ssh/ssh_host_rsa_key.pub > /dev/console
48 | echo "-----END SSH HOST KEY FINGERPRINTS-----" > /dev/console
49 | '';
50 | };
51 | }
52 |
--------------------------------------------------------------------------------
/nixos/qemu/default.nix:
--------------------------------------------------------------------------------
1 | { config, lib, ... }:
2 | let
3 | inherit (lib) mkForce;
4 | in
5 | {
6 | imports = [
7 |
8 |
9 |
10 | ];
11 |
12 | networking.hostName = mkForce "client"; # referenced in perl code as $client
13 | nix.readOnlyStore = mkForce false;
14 | # test framework mounts 9p *before* tmpfs, so 9p stuff gets masked
15 | boot.tmpOnTmpfs = mkForce false;
16 | # .. or removed
17 | boot.cleanTmpDir = mkForce false;
18 |
19 | virtualisation = {
20 | memorySize = 4096;
21 | diskSize = 8192;
22 | graphics = false;
23 | #useBootLoader = true;
24 | };
25 | }
26 |
--------------------------------------------------------------------------------
/nixos/vendor-module-list.nix:
--------------------------------------------------------------------------------
1 | [
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 | ]
63 |
--------------------------------------------------------------------------------
/nixos/virtualbox/default.nix:
--------------------------------------------------------------------------------
1 | # used to be nixops/nix/virtualbox-image-nixops.nix
2 | { config, pkgs, lib, ... }:
3 |
4 | let
5 | clientKeyPath = "/root/.vbox-nixops-client-key";
6 | in
7 | {
8 | imports = [
9 |
10 |
11 | ];
12 |
13 | virtualisation.virtualbox.guest.enable = true;
14 |
15 | services.openssh.authorizedKeysFiles = [ ".vbox-nixops-client-key" ];
16 |
17 | # VirtualBox doesn't seem to lease IP addresses persistently, so we
18 | # may get a different IP address if dhcpcd is restarted. So don't
19 | # restart dhcpcd.
20 | systemd.services.dhcpcd.restartIfChanged = false;
21 |
22 | nix = {
23 | extraOptions = ''
24 | allow-unsafe-native-code-during-evaluation = true
25 | '';
26 | };
27 |
28 | users.extraUsers.root = lib.mkDefault {
29 | hashedPassword = null;
30 | password = "root";
31 | };
32 | services.openssh.enable = true;
33 | services.openssh.passwordAuthentication = lib.mkDefault true;
34 | services.openssh.permitRootLogin = lib.mkDefault "yes";
35 | services.openssh.challengeResponseAuthentication = lib.mkDefault true;
36 |
37 | boot.loader.grub.device = "/dev/sda";
38 | fileSystems."/".device = "/dev/disk/by-label/nixos";
39 | }
40 |
--------------------------------------------------------------------------------
/pkgs/ImageMagick/default.nix:
--------------------------------------------------------------------------------
1 | { lib
2 | , stdenv
3 | , fetchurl
4 | , pkgconfig
5 | , bzip2
6 | , fontconfig
7 | , freetype
8 | , ghostscript ? null
9 | , libjpeg
10 | , libpng
11 | , libtiff
12 | , libxml2
13 | , zlib
14 | , libtool
15 | , jasper
16 | , libX11
17 | , tetex ? null
18 | , librsvg ? null
19 | }:
20 |
21 | let
22 |
23 | version = "6.9.2-10";
24 |
25 | arch =
26 | if stdenv.system == "i686-linux" then "i686"
27 | else if stdenv.system == "x86_64-linux" || stdenv.system == "x86_64-darwin" then "x86-64"
28 | else throw "ImageMagick is not supported on this platform.";
29 |
30 | ghostscriptEnabled = (stdenv.system != "x86_64-darwin" && ghostscript != null);
31 |
32 | in
33 |
34 | stdenv.mkDerivation rec {
35 | name = "ImageMagick-${version}";
36 |
37 | src = fetchurl {
38 | urls = [
39 | "http://ftp.sunet.se/pub/multimedia/graphics/ImageMagick/releases/${name}.tar.xz"
40 | "http://distfiles.macports.org/ImageMagick/ImageMagick-${version}.tar.xz"
41 | "mirror://imagemagick/releases/${name}.tar.xz"
42 | ];
43 | sha256 = "0g01q8rygrf977d9rpixg1bhnavqfwzz30qpn7fj17yn8fx6ybys";
44 | };
45 |
46 | enableParallelBuilding = true;
47 |
48 | preConfigure = if tetex != null then
49 | ''
50 | export DVIDecodeDelegate=${tetex}/bin/dvips
51 | '' else "";
52 |
53 | configureFlags =
54 | [ "--with-frozenpaths" ]
55 | ++ [ "--with-gcc-arch=${arch}" ]
56 | ++ lib.optional (librsvg != null) "--with-rsvg"
57 | ++ lib.optionals ghostscriptEnabled
58 | [ "--with-gs-font-dir=${ghostscript}/share/ghostscript/fonts"
59 | "--with-gslib"
60 | ];
61 |
62 | propagatedBuildInputs =
63 | [ bzip2 fontconfig freetype libjpeg libpng libtiff libxml2 zlib librsvg
64 | libtool jasper libX11
65 | ] ++ lib.optional ghostscriptEnabled ghostscript;
66 |
67 | buildInputs = [ tetex pkgconfig ];
68 |
69 | postInstall = ''(cd "$out/include" && ln -s ImageMagick* ImageMagick)'';
70 |
71 | meta = with stdenv.lib; {
72 | homepage = http://www.imagemagick.org/;
73 | description = "A software suite to create, edit, compose, or convert bitmap images";
74 | platforms = platforms.linux ++ [ "x86_64-darwin" ];
75 | };
76 | }
77 |
--------------------------------------------------------------------------------
/pkgs/README.md:
--------------------------------------------------------------------------------
1 | ## platform pkgs
2 |
3 | Please keep these in `callPackage` style and add everything to `./default.nix`.
4 |
--------------------------------------------------------------------------------
/pkgs/angel/default.nix:
--------------------------------------------------------------------------------
1 | { mkDerivation, base, configurator, containers, fetchgit, hspec
2 | , mtl, old-locale, process, stdenv, stm, text, time, unix
3 | , unordered-containers
4 | }:
5 | mkDerivation {
6 | pname = "angel";
7 | # do not bump until https://github.com/MichaelXavier/Angel/issues/40 is fixed
8 | version = "0.5.1";
9 | src = fetchgit {
10 | url = "https://github.com/zalora/Angel.git";
11 | # branch backports
12 | rev = "ff7f2dfc08edeede9c9da23ac4e6aa9afd2a56cc";
13 | sha256 = "0643829fa4378f22220759e17bab8422c50ec8fb15f9ad2474aa04322b0d9a3f";
14 | };
15 | patches = [ ./static.patch ./less-logs.patch ];
16 | isLibrary = false;
17 | isExecutable = true;
18 | doCheck = false;
19 | executableHaskellDepends = [
20 | base configurator containers mtl old-locale process stm text time
21 | unix unordered-containers
22 | ];
23 | homepage = "http://github.com/MichaelXavier/Angel";
24 | description = "Process management and supervision daemon";
25 | license = stdenv.lib.licenses.bsd3;
26 | }
27 |
--------------------------------------------------------------------------------
/pkgs/angel/less-logs.patch:
--------------------------------------------------------------------------------
1 | diff --git a/src/Angel/Job.hs b/src/Angel/Job.hs
2 | index f62d6fd..dd7725e 100644
3 | --- a/src/Angel/Job.hs
4 | +++ b/src/Angel/Job.hs
5 | @@ -69,7 +69,6 @@ ifEmpty s ioa iob = if s == "" then ioa else iob
6 | -- |tail call.
7 | supervise :: TVar GroupConfig -> String -> IO ()
8 | supervise sharedGroupConfig id' = do
9 | - logger' "START"
10 | cfg <- atomically $ readTVar sharedGroupConfig
11 | let my_spec = find_me cfg
12 | ifEmpty (name my_spec)
13 | @@ -92,8 +91,6 @@ supervise sharedGroupConfig id' = do
14 | let onPidError lph ph = do logger' "Failed to create pidfile"
15 | killProcess $ toKillDirective my_spec ph lph
16 |
17 | - logger' $ "Spawning process with env " ++ show (env procSpec)
18 | -
19 | startMaybeWithPidFile procSpec mPfile (\pHandle -> do
20 | updateRunningPid my_spec (Just pHandle) lHandle
21 | logProcess logger' pHandle
22 | @@ -101,11 +98,9 @@ supervise sharedGroupConfig id' = do
23 |
24 | cfg' <- atomically $ readTVar sharedGroupConfig
25 | if M.notMember id' (spec cfg')
26 | - then logger' "QUIT"
27 | + then return ()
28 | else do
29 | - logger' "WAITING"
30 | sleepSecs . fromMaybe defaultDelay . delay $ my_spec
31 | - logger' "RESTART"
32 | supervise sharedGroupConfig id'
33 | )
34 |
35 | @@ -163,9 +158,8 @@ supervise sharedGroupConfig id' = do
36 |
37 | logProcess :: (String -> IO ()) -> ProcessHandle -> IO ()
38 | logProcess logSink pHandle = do
39 | - logSink "RUNNING"
40 | waitForProcess pHandle
41 | - logSink "ENDED"
42 | + return ()
43 |
44 | --TODO: paralellize
45 | killProcesses :: [KillDirective] -> IO ()
46 |
--------------------------------------------------------------------------------
/pkgs/angel/static.patch:
--------------------------------------------------------------------------------
1 | diff --git a/angel.cabal b/angel.cabal
2 | index fbf2a57..654802c 100644
3 | --- a/angel.cabal
4 | +++ b/angel.cabal
5 | @@ -56,7 +56,7 @@ Executable angel
6 |
7 | Extensions: OverloadedStrings,ScopedTypeVariables,BangPatterns,ViewPatterns
8 |
9 | - Ghc-Options: -threaded -fwarn-missing-import-lists
10 | + Ghc-Options: -threaded -fwarn-missing-import-lists -static
11 |
12 | test-suite spec
13 | Type: exitcode-stdio-1.0
14 |
--------------------------------------------------------------------------------
/pkgs/ares/default.nix:
--------------------------------------------------------------------------------
1 | { fetchgit, mkDerivation, aeson, attoparsec, base, bytestring, cond, containers
2 | , directory, extra, filelock, filepath, network, process
3 | , servant-server, stdenv, temporary, text, transformers, Unique
4 | , unix, wai, wai-extra, warp
5 | }:
6 | mkDerivation {
7 | pname = "ares";
8 | version = "4";
9 | src = fetchgit {
10 | url = https://github.com/zalora/ares;
11 | rev = "8521dc9ec47c7ac01be69280998fa4b45170bba8";
12 | sha256 = "1mjvskhp63ksrd2rs60vck8qvj2mvcdqw999yc5fg58gl152i1gh";
13 | };
14 | isLibrary = false;
15 | isExecutable = true;
16 | executableHaskellDepends = [
17 | aeson attoparsec base bytestring cond containers directory extra
18 | filelock filepath network process servant-server temporary text
19 | transformers Unique unix wai wai-extra warp
20 | ];
21 | license = stdenv.lib.licenses.bsd3;
22 | }
23 |
--------------------------------------------------------------------------------
/pkgs/bundler/bundler-head.nix:
--------------------------------------------------------------------------------
1 | { buildRubyGem, coreutils, fetchgit }:
2 |
3 | buildRubyGem {
4 | name = "bundler-1.8.9";
5 | namePrefix = "";
6 | sha256 = "1k4sk4vf0mascqnahdnqymhr86dqj92bddciz5b2p9sv3qzryq57";
7 | dontPatchShebangs = true;
8 | postInstall = ''
9 | find $out -type f -perm -0100 | while read f; do
10 | substituteInPlace $f \
11 | --replace "/usr/bin/env" "${coreutils}/bin/env"
12 | done
13 | '';
14 | }
15 |
--------------------------------------------------------------------------------
/pkgs/couchbase/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, perl, erlangR14, curl, libevent }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "couchbase-${version}";
5 | version = "1.8.1";
6 |
7 | src = fetchurl {
8 | url = "http://packages.couchbase.com/releases/${version}/couchbase-server_src-${version}.tar.gz";
9 | sha256 = "0fh4nj5q2jvc67fn2v886bir9jp2b8kra0c5j6b3r7b6i3j487py";
10 | };
11 |
12 | patches = [ ./patch-ep-engine ];
13 |
14 | postPatch = ''
15 | substituteInPlace bucket_engine/configure --replace Werror Wno-error
16 | substituteInPlace couchbase-python-client/configure --replace Werror Wno-error
17 | substituteInPlace ep-engine/configure --replace Werror Wno-error
18 | substituteInPlace libconflate/configure --replace Werror Wno-error
19 | substituteInPlace libmemcached/configure --replace Werror Wno-error
20 | substituteInPlace libvbucket/configure --replace Werror Wno-error
21 | substituteInPlace membase-cli/configure --replace Werror Wno-error
22 | substituteInPlace memcached/configure --replace Werror Wno-error
23 | substituteInPlace memcachetest/configure --replace Werror Wno-error
24 | substituteInPlace moxi/configure --replace Werror Wno-error
25 | substituteInPlace ns_server/configure --replace Werror Wno-error
26 | substituteInPlace portsigar/configure --replace Werror Wno-error
27 | substituteInPlace sigar/configure --replace Werror Wno-error
28 | substituteInPlace vbucketmigrator/configure --replace Werror Wno-error
29 | substituteInPlace workload-generator/configure --replace Werror Wno-error
30 | '';
31 |
32 | preConfigure = ''
33 | export LDFLAGS="-pthread"
34 | '';
35 |
36 | installPhase = ''
37 | cp -R ./install/ $out
38 | '';
39 |
40 | buildInputs = [ perl curl libevent ];
41 | propagatedBuildInputs = [ erlangR14 ];
42 | }
43 |
--------------------------------------------------------------------------------
/pkgs/couchbase/patch-ep-engine:
--------------------------------------------------------------------------------
1 | diff --git a/ep-engine/management/dbconvert.cc b/ep-engine/management/dbconvert.cc
2 | index bfb84c6..b6e7635 100644
3 | --- a/ep-engine/management/dbconvert.cc
4 | +++ b/ep-engine/management/dbconvert.cc
5 | @@ -10,6 +10,7 @@
6 | #ifdef HAVE_SYSEXITS_H
7 | #include
8 | #endif
9 | +#include
10 |
11 | #include
12 |
13 | diff --git a/ep-engine/memory_tracker.cc b/ep-engine/memory_tracker.cc
14 | index 62a2af8..cdbdb7c 100644
15 | --- a/ep-engine/memory_tracker.cc
16 | +++ b/ep-engine/memory_tracker.cc
17 | @@ -15,6 +15,7 @@
18 | * limitations under the License.
19 | */
20 |
21 | +#include
22 | #include "memory_tracker.hh"
23 |
24 | #include "objectregistry.hh"
25 | diff --git a/ep-engine/stored-value.hh b/ep-engine/stored-value.hh
26 | index cd64083..e0e76f1 100644
27 | --- a/ep-engine/stored-value.hh
28 | +++ b/ep-engine/stored-value.hh
29 | @@ -2,6 +2,7 @@
30 | #ifndef STORED_VALUE_H
31 | #define STORED_VALUE_H 1
32 |
33 | +#include
34 | #include
35 | #include
36 | #include
37 |
--------------------------------------------------------------------------------
/pkgs/curator/default.nix:
--------------------------------------------------------------------------------
1 | { buildPythonPackage, fetchgit, stdenv, click, elasticsearch, urllib3, ... }:
2 |
3 | buildPythonPackage {
4 | name = "curator";
5 |
6 | src = fetchgit {
7 | url = "https://github.com/elastic/curator.git";
8 | rev = "f4fb814fd4ffb227dd20b3010940575f00c509f1";
9 | sha256 = "1p6qmfk9j9ng8rgkz8q3mc7f9mc96xhn585g14f8zqy65wflz3q1";
10 | };
11 |
12 | # Test suite tries to make requests against a local elasticsearch, would
13 | # rather not supply this in the build environment. Revisit later?
14 | doCheck = false;
15 |
16 | propagatedBuildInputs = [click elasticsearch urllib3];
17 | }
18 |
--------------------------------------------------------------------------------
/pkgs/curl-loader/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, openssl, file, zlib }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "curl-loader-0.56";
5 |
6 | src = fetchurl {
7 | url = "mirror://sourceforge/curl-loader/${name}.tar.bz2";
8 | sha256 = "0915jibf2k10afrza72625nsxvqa2rp1vyndv1cy7138mjijn4f2";
9 | };
10 |
11 | buildInputs = [ openssl file zlib ];
12 |
13 | installPhase = ''
14 | make DESTDIR=$out install
15 | mv $out/usr/* $out
16 | '';
17 | }
18 |
19 |
--------------------------------------------------------------------------------
/pkgs/dynomite/default.nix:
--------------------------------------------------------------------------------
1 | { fetchurl, autoreconfHook, stdenv, openssl }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "dynomite-0.5.6";
5 |
6 | buildInputs = [ autoreconfHook openssl ];
7 |
8 | dontStrip = true;
9 |
10 | # See https://github.com/Netflix/dynomite/pull/223
11 | # Keep debug info in case there are more crashes.
12 | NIX_CFLAGS_COMPILE = [
13 | "-O0"
14 | "-ggdb"
15 | ];
16 |
17 | src = fetchurl {
18 | url = https://github.com/Netflix/dynomite/archive/v0.5.6.tar.gz;
19 | sha256 = "1jim17bg414lc4zd007q17hfbpgq8qgqafi06s3p746rzxc0iy6z";
20 | };
21 | }
22 |
--------------------------------------------------------------------------------
/pkgs/erlang/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, perl, gnum4, ncurses, openssl
2 | , gnused, gawk, makeWrapper }:
3 |
4 | with stdenv.lib;
5 |
6 | stdenv.mkDerivation rec {
7 | name = "erlang-" + version;
8 | version = "17.1";
9 |
10 | src = fetchurl {
11 | url = "http://www.erlang.org/download/otp_src_${version}.tar.gz";
12 | sha256 = "0mn3p5rwvjfsxjnn1vrm0lxdq40wq9bmd9nibl6hqbfcnnrga1mq";
13 | };
14 |
15 | buildInputs = [ perl gnum4 ncurses openssl makeWrapper ];
16 |
17 | patchPhase = '' sed -i "s@/bin/rm@rm@" lib/odbc/configure erts/configure '';
18 |
19 | preConfigure = ''
20 | export HOME=$PWD/../
21 | sed -e s@/bin/pwd@pwd@g -i otp_build
22 | '';
23 |
24 | configureFlags= "--with-ssl=${openssl} ${optionalString stdenv.isDarwin "--enable-darwin-64bit"}";
25 |
26 | postInstall = ''
27 | ln -s $out/lib/erlang/lib/erl_interface*/bin/erl_call $out/bin/erl_call
28 | '';
29 |
30 | # Some erlang bin/ scripts run sed and awk
31 | postFixup = ''
32 | wrapProgram $out/lib/erlang/bin/erl --prefix PATH ":" "${gnused}/bin/"
33 | wrapProgram $out/lib/erlang/bin/start_erl --prefix PATH ":" "${gnused}/bin/:${gawk}/bin"
34 | '';
35 |
36 | meta = {
37 | homepage = "http://www.erlang.org/";
38 | description = "Programming language used for massively scalable soft real-time systems";
39 |
40 | longDescription = ''
41 | Erlang is a programming language used to build massively scalable
42 | soft real-time systems with requirements on high availability.
43 | Some of its uses are in telecoms, banking, e-commerce, computer
44 | telephony and instant messaging. Erlang's runtime system has
45 | built-in support for concurrency, distribution and fault
46 | tolerance.
47 | '';
48 |
49 | platforms = platforms.unix;
50 | };
51 | }
52 |
--------------------------------------------------------------------------------
/pkgs/exim/default.nix:
--------------------------------------------------------------------------------
1 | { coreutils, fetchurl, db, openssl, pcre, perl, pkgconfig, stdenv }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "exim-4.85";
5 |
6 | src = fetchurl {
7 | url = "http://mirror.switch.ch/ftp/mirror/exim/exim/exim4/old/${name}.tar.bz2";
8 | sha256 = "195a3ll5ck9viazf9pvgcyc0sziln5g0ggmlm6ax002lphmiy88k";
9 | };
10 |
11 | buildInputs = [ coreutils db openssl pcre perl pkgconfig ];
12 |
13 | preBuild = ''
14 | sed '
15 | s:^\(BIN_DIRECTORY\)=.*:\1='"$out"'/bin:
16 | s:^\(CONFIGURE_FILE\)=.*:\1=/etc/exim.conf:
17 | s:^\(EXIM_USER\)=.*:\1=ref\:nobody:
18 | s:^\(SPOOL_DIRECTORY\)=.*:\1=/exim-homeless-shelter:
19 | s:^# \(SUPPORT_MAILDIR\)=.*:\1=yes:
20 | s:^EXIM_MONITOR=.*$:# &:
21 | s:^\(FIXED_NEVER_USERS\)=root$:\1=0:
22 | s:^# \(WITH_CONTENT_SCAN\)=.*:\1=yes:
23 | s:^# \(AUTH_PLAINTEXT\)=.*:\1=yes:
24 | s:^# \(SUPPORT_TLS\)=.*:\1=yes:
25 | s:^# \(USE_OPENSSL_PC=openssl\)$:\1:
26 | s:^# \(LOG_FILE_PATH=syslog\)$:\1:
27 | s:^# \(HAVE_IPV6=yes\)$:\1:
28 | s:^# \(CHOWN_COMMAND\)=.*:\1=${coreutils}/bin/chown:
29 | s:^# \(CHGRP_COMMAND\)=.*:\1=${coreutils}/bin/chgrp:
30 | s:^# \(CHMOD_COMMAND\)=.*:\1=${coreutils}/bin/chmod:
31 | s:^# \(MV_COMMAND\)=.*:\1=${coreutils}/bin/mv:
32 | s:^# \(RM_COMMAND\)=.*:\1=${coreutils}/bin/rm:
33 | s:^# \(TOUCH_COMMAND\)=.*:\1=${coreutils}/bin/touch:
34 | s:^# \(PERL_COMMAND\)=.*:\1=${perl}/bin/perl:
35 | #/^\s*#.*/d
36 | #/^\s*$/d
37 | ' < src/EDITME > Local/Makefile
38 | '';
39 |
40 | installPhase = ''
41 | mkdir -p $out/bin $out/share/man/man8
42 | cp doc/exim.8 $out/share/man/man8
43 |
44 | ( cd build-Linux-*
45 | cp exicyclog exim_checkaccess exim_dumpdb exim_lock exim_tidydb \
46 | exipick exiqsumm exigrep exim_dbmbuild exim exim_fixdb eximstats \
47 | exinext exiqgrep exiwhat \
48 | $out/bin )
49 |
50 | ( cd $out/bin
51 | for i in mailq newaliases rmail rsmtp runq sendmail; do
52 | ln -s exim $i
53 | done )
54 | '';
55 |
56 | meta = {
57 | homepage = "http://exim.org/";
58 | description = "Exim is a mail transfer agent (MTA) for hosts that are running Unix or Unix-like operating systems.";
59 | license = stdenv.lib.licenses.gpl3;
60 | platforms = stdenv.lib.platforms.linux;
61 | maintainers = [ stdenv.lib.maintainers.tv ];
62 | };
63 | }
64 |
--------------------------------------------------------------------------------
/pkgs/filebeat/default.nix:
--------------------------------------------------------------------------------
1 | { bash, fetchgit, go, stdenv }:
2 | let
3 | version = "1.2.1";
4 | go-libGeoIP = fetchgit {
5 | url = https://github.com/nranchev/go-libGeoIP;
6 | rev = "c78e8bd2dd3599feb21fd30886043979e82fe948";
7 | sha256 = "035khy2b84gc96b08c5rq3a5p6d8860vysbdj8ww9p0p2nracy5d";
8 | };
9 | in
10 | stdenv.mkDerivation {
11 | name = "filebeat-${version}";
12 | src = fetchgit {
13 | url = https://github.com/elastic/beats;
14 | rev = "refs/tags/v${version}";
15 | sha256 = "0yy2pg4sncn9p0zlc5wbri3lx0q4f03vg02lv2bvddyl5yy7phy1";
16 | };
17 | buildInputs = [ go ];
18 | patchPhase = ''
19 | find -type f -exec sed -i 's:/bin/bash:${bash}&:g' {} \;
20 | '';
21 | buildPhase = ''
22 | export GOPATH=$GOPATH:$PWD:$PWD/vendor
23 | mkdir -p src/github.com/elastic
24 | ln -sf $PWD src/github.com/elastic/beats
25 | make -C filebeat
26 | '';
27 | installPhase = ''
28 | mkdir -p $out/bin
29 | cp filebeat/filebeat $out/bin
30 |
31 | # trick nix into not considering go as a runtime dependency
32 | go_path=${go}
33 | go_path=''${go_path#/nix/store/}
34 | go_path=''${go_path%%-go-*}
35 | sed -i "s#$go_path#................................#g" $out/bin/filebeat
36 | '';
37 | }
38 |
--------------------------------------------------------------------------------
/pkgs/flame-graph/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchgit, perl }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "FlameGraph";
5 |
6 | src = fetchgit {
7 | url = https://github.com/brendangregg/FlameGraph.git;
8 | rev = "182b24fb635345d48c91ed1de58a08b620312f3d";
9 | sha256 = "6ca6c9b309b79828f61bc7666a0a88740d1e511b32a97990344a008128075fb6";
10 | };
11 |
12 | buildInputs = [ perl ];
13 |
14 | installPhase = ''
15 | mkdir -p $out/bin
16 | cp *.pl $out/bin
17 | '';
18 | }
19 |
--------------------------------------------------------------------------------
/pkgs/galera-wsrep/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, scons, boost, openssl, buildEnv, check }:
2 |
3 | let
4 | libs = buildEnv {
5 | name = "galera-lib-inputs-united";
6 | paths = [ openssl boost.lib check ];
7 | };
8 | in
9 |
10 | stdenv.mkDerivation rec {
11 | name = "galera-wsrep-${version}";
12 | version = "25.3.12";
13 |
14 | src = fetchurl {
15 | url = "https://zalora-ug-mirror.s3.amazonaws.com/galera-3-${version}.tar.gz";
16 | md5 = "cd981290a9767d51c97810786dbd211b";
17 | };
18 |
19 | buildInputs = [ scons boost openssl check ];
20 |
21 | patchPhase = ''
22 | substituteInPlace SConstruct \
23 | --replace "boost_library_path = '''" "boost_library_path = '${boost.lib}/lib'"
24 | substituteInPlace SConstruct \
25 | --replace "boost_library_suffix = '''" "boost_library_suffix = '-mt'"
26 | '';
27 |
28 | buildPhase = ''
29 | export CPPFLAGS="-I${boost.dev}/include -I${openssl}/include -I${check}/include"
30 | export LIBPATH="${libs}/lib"
31 | scons -j$NIX_BUILD_CORES strict_build_flags=0
32 | '';
33 |
34 | installPhase = ''
35 | # copied with modifications from scripts/packages/freebsd.sh
36 | PBR="$out"
37 | PBD="$PWD"
38 | GALERA_LICENSE_DIR="$PBR/share/licenses"
39 |
40 | install -d "$PBR/"{bin,lib/galera,share/doc/galera}
41 | install -m 555 "$PBD/garb/garbd" "$PBR/bin/garbd"
42 | install -m 444 "$PBD/libgalera_smm.so" "$PBR/lib/galera/libgalera_smm.so"
43 | install -m 444 "$PBD/scripts/packages/README" "$PBR/share/doc/galera/"
44 | install -m 444 "$PBD/scripts/packages/README-MySQL" "$PBR/share/doc/galera/"
45 |
46 | install -m 755 -d "$GALERA_LICENSE_DIR"
47 | install -m 444 "$PBD/LICENSE" "$GALERA_LICENSE_DIR/GPLv2"
48 | install -m 444 "$PBD/scripts/packages/freebsd/LICENSE" "$GALERA_LICENSE_DIR"
49 | install -m 444 "$PBD/asio/LICENSE_1_0.txt" "$GALERA_LICENSE_DIR/LICENSE.asio"
50 | install -m 444 "$PBD/www.evanjones.ca/LICENSE" "$GALERA_LICENSE_DIR/LICENSE.crc32c"
51 | install -m 444 "$PBD/chromium/LICENSE" "$GALERA_LICENSE_DIR/LICENSE.chromium"
52 | '';
53 |
54 | meta = {
55 | homepage = http://galeracluster.com/;
56 | description = "Galera 3 wsrep provider library";
57 | };
58 | }
59 |
--------------------------------------------------------------------------------
/pkgs/graphviz/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, pkgconfig, libpng, libjpeg
2 | , yacc, libtool, fontconfig, pango, gd, gts, expat
3 | }:
4 |
5 | # this is a stripped version of graphviz that does not support include xlibs, pango, expat and smth else
6 |
7 | assert stdenv.system != "x86_64-darwin";
8 |
9 | stdenv.mkDerivation rec {
10 | name = "graphviz-2.36.0";
11 |
12 | src = fetchurl {
13 | url = "http://www.graphviz.org/pub/graphviz/ARCHIVE/${name}.tar.gz";
14 | sha256 = "0qb30z5sxlbjni732ndad3j4x7l36vsxpxn4fmf5fn7ivvc6dz9p";
15 | };
16 |
17 | buildInputs = [ pkgconfig libpng libjpeg yacc libtool fontconfig gd gts ];
18 |
19 | configureFlags =
20 | [ "--with-pngincludedir=${libpng}/include"
21 | "--with-pnglibdir=${libpng}/lib"
22 | "--with-jpegincludedir=${libjpeg}/include"
23 | "--with-jpeglibdir=${libjpeg}/lib"
24 | "--with-expatincludedir=${expat}/include" # see http://thread.gmane.org/gmane.comp.video.graphviz/5041/focus=5046
25 | "--with-expatlibdir=${expat}/lib"
26 | "--without-x"
27 | ];
28 |
29 | preBuild = ''
30 | sed -e 's@am__append_5 *=.*@am_append_5 =@' -i lib/gvc/Makefile
31 | '';
32 |
33 | meta = {
34 | homepage = "http://www.graphviz.org/";
35 | description = "Open source graph visualization software";
36 |
37 | longDescription = ''
38 | Graphviz is open source graph visualization software. Graph
39 | visualization is a way of representing structural information as
40 | diagrams of abstract graphs and networks. It has important
41 | applications in networking, bioinformatics, software engineering,
42 | database and web design, machine learning, and in visual
43 | interfaces for other technical domains.
44 | '';
45 |
46 | hydraPlatforms = stdenv.lib.platforms.linux;
47 | };
48 | }
49 |
--------------------------------------------------------------------------------
/pkgs/heavy-sync/default.nix:
--------------------------------------------------------------------------------
1 | { buildPythonPackage, fetchFromGitHub, stdenv, boto, gcs-oauth2-boto-plugin, sqlite3 }:
2 |
3 | buildPythonPackage rec {
4 |
5 | name = "heavy-sync-0.1";
6 |
7 | src = fetchFromGitHub {
8 | owner = "zalora";
9 | repo = "heavy-sync";
10 | rev = "c41e0b7244941108c4cf655ff4c981654ccdfa21";
11 | sha256 = "0ngp2bmjhgzzdbx65wx3c7g8z0iasdfy44wwbb7s2c1m4rhnwzb6";
12 | };
13 |
14 | propagatedBuildInputs = [
15 | boto gcs-oauth2-boto-plugin
16 | sqlite3 # For SQLite 3 support in Python
17 | ];
18 |
19 | meta = with stdenv.lib; {
20 | description = "Synchronize huge cloud buckets with ease";
21 | homepage = "https://github.com/zalora/heavy-sync";
22 | license = licenses.mpl20;
23 | };
24 | }
25 |
--------------------------------------------------------------------------------
/pkgs/incron/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, gnused
2 | }:
3 |
4 | stdenv.mkDerivation rec {
5 | name = "incron-0.5.10";
6 |
7 | src = fetchurl {
8 | url = "http://inotify.aiken.cz/download/incron/${name}.tar.gz";
9 | sha256 = "12p7707halp8ji7vsbw1nmimcrgp4ank6r4af97nxhhnbzdvljjx";
10 | };
11 |
12 | patches = [ ./makefile.patch ];
13 |
14 | preConfigure = ''
15 | sed -i '1,1i#include ' inotify-cxx.cpp icd-main.cpp
16 | sed -i '1,1i#include ' icd-main.cpp inotify-cxx.cpp usertable.cpp appargs.cpp
17 | sed -i 's|strchr(s,|(char*)strchr(s,|' incroncfg.cpp
18 | '';
19 |
20 | buildInputs = [ gnused ];
21 | }
22 |
--------------------------------------------------------------------------------
/pkgs/incron/makefile.patch:
--------------------------------------------------------------------------------
1 | diff -ur incron-0.5.10.orig/Makefile incron-0.5.10/Makefile
2 | --- incron-0.5.10.orig/Makefile 2012-04-07 00:19:27.000000000 +0200
3 | +++ incron-0.5.10/Makefile 2015-03-13 14:07:34.387856335 +0100
4 | @@ -1,15 +1,11 @@
5 |
6 | -PREFIX = /usr/local
7 | -USERDATADIR = /var/spool/incron
8 | -SYSDATADIR = /etc/incron.d
9 | +PREFIX = $(out)
10 | CFGDIR = /etc
11 | MANPATH = $(PREFIX)/share/man
12 | RELEASE = incron-`cat VERSION`
13 | RELEASEDIR = /tmp/$(RELEASE)
14 | DOCDIR = $(PREFIX)/share/doc/$(RELEASE)/
15 |
16 | -USER = root
17 | -
18 | CXX ?= g++
19 | INSTALL = install
20 |
21 | @@ -48,10 +44,8 @@
22 | $(INSTALL) -m 0755 -d $(DESTDIR)$(PREFIX)/bin/
23 | $(INSTALL) -m 0755 -d $(DESTDIR)$(PREFIX)/sbin/
24 | $(INSTALL) -m 0755 -d $(DESTDIR)$(DOCDIR)/
25 | - $(INSTALL) -m 04755 -o $(USER) incrontab $(DESTDIR)$(PREFIX)/bin/
26 | + $(INSTALL) -m 04755 incrontab $(DESTDIR)$(PREFIX)/bin/
27 | $(INSTALL) -m 0755 incrond $(DESTDIR)$(PREFIX)/sbin/
28 | - $(INSTALL) -m 0755 -o $(USER) -d $(DESTDIR)$(USERDATADIR)
29 | - $(INSTALL) -m 0755 -o $(USER) -d $(DESTDIR)$(SYSDATADIR)
30 | $(INSTALL) -m 0644 incron.conf.example $(DESTDIR)$(DOCDIR)/
31 |
32 | install-man: incrontab.1 incrontab.5 incrond.8 incron.conf.5
33 |
--------------------------------------------------------------------------------
/pkgs/jenkins/default.nix:
--------------------------------------------------------------------------------
1 | { pkgs, lib, stdenv, fetchurl }:
2 | let
3 |
4 | # some plugins have dependencies
5 | # use external tools to get it all
6 | plugins-info = import ./plugins.nix ;
7 |
8 | jpi = plugin: info: fetchurl {
9 | inherit (info) sha1;
10 | name = "jenkins-${plugin}-${info.version}.hpi";
11 | url = "https://updates.jenkins-ci.org/download/plugins/" +
12 | "${plugin}/${info.version}/${plugin}.hpi";
13 | };
14 |
15 | plugins = stdenv.mkDerivation {
16 | name = "jenkins-plugins";
17 | phases = [ "installPhase" ];
18 | installPhase = ''
19 | mkdir -p $out
20 | '' + lib.concatStrings (
21 | lib.mapAttrsToList (name: info:
22 | ''
23 | ln -svf "${jpi name info}" "$out/${name}.hpi"
24 | '') plugins-info
25 | );
26 | };
27 |
28 |
29 | in
30 |
31 | stdenv.mkDerivation rec {
32 | name = "jenkins-${version}+plugins.war";
33 | version = "1.641";
34 | src = fetchurl {
35 | url = "http://mirrors.jenkins-ci.org/war/${version}/jenkins.war";
36 | sha256 = "14svpwz9r7zw5i263pkmjb3d6vfxalk521mmiasi2g2fzqw6qrgp";
37 | };
38 |
39 | # https://wiki.jenkins-ci.org/display/JENKINS/Bundling+plugins+with+Jenkins
40 | build-xml = pkgs.writeText "jenkins.build.xml"
41 | ''
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | '';
52 |
53 | meta = with stdenv.lib; {
54 | description = "An extendable open source continuous integration server";
55 | homepage = http://jenkins-ci.org;
56 | license = licenses.mit;
57 | platforms = platforms.all;
58 | };
59 |
60 | buildInputs = with pkgs; [ jre ];
61 |
62 | phases = [ "buildPhase" "installPhase" ];
63 | buildPhase = ''
64 | ln -sf ${build-xml} build.xml
65 | ${pkgs.ant}/bin/ant bundle
66 | '';
67 | installPhase = "cp jenkins.war $out";
68 | }
69 |
--------------------------------------------------------------------------------
/pkgs/jenkins/plugins.nix:
--------------------------------------------------------------------------------
1 | # Generated by jenkins4nix, https://github.com/zalora/jenkins4nix
2 | # Command: jenkins4nix antisamy-markup-formatter console-column-plugin console-tail copyartifact credentials credentials-binding dashboard-view environment-dashboard external-monitor-job extra-columns ghprb git git-parameter github github-oauth google-login google-oauth-plugin jenkinswalldisplay jquery log-command mailer mapdb-api matrix-auth matrix-project nested-view oauth-credentials plain-credentials promoted-builds rebuild s3 script-security show-build-parameters simple-theme-plugin ssh-agent ssh-credentials ssh-slaves timestamper token-macro workflow-step-api slack parameterized-trigger build-user-vars-plugin
3 | {
4 | "antisamy-markup-formatter" = {
5 | version = "1.3";
6 | sha1 = "b94e906e1f13b1333063cf620ae670bad3542240";
7 | depends = [ ];
8 | };
9 | "aws-java-sdk" = {
10 | version = "1.10.26";
11 | sha1 = "555e7a17e7662e5421c7b32e921cfd88ab95e5c3";
12 | depends = [ ];
13 | };
14 | "build-user-vars-plugin" = {
15 | version = "1.5";
16 | sha1 = "c0fd86facd9c8f8e3319c878deade392d8134887";
17 | depends = [ "mailer" ];
18 | };
19 | "conditional-buildstep" = {
20 | version = "1.3.3";
21 | sha1 = "3acd06f560dadb9c9e75b10b7e1b2450ed99070d";
22 | depends = [ "token-macro" "run-condition" ];
23 | };
24 | "console-column-plugin" = {
25 | version = "1.5";
26 | sha1 = "9ad77fe5c4cd3301435e30cb4a5dc6791770c996";
27 | depends = [ ];
28 | };
29 | "console-tail" = {
30 | version = "1.1";
31 | sha1 = "dd875be276508de3c3284c52b5ab1416df97c186";
32 | depends = [ ];
33 | };
34 | "copyartifact" = {
35 | version = "1.37";
36 | sha1 = "594ab621e1cc21fa8fe3e8b1398a8e13808bca41";
37 | depends = [ "matrix-project" ];
38 | };
39 | "credentials" = {
40 | version = "1.24";
41 | sha1 = "9400dcaa054f2a332628073f5333842280be4fe8";
42 | depends = [ ];
43 | };
44 | "credentials-binding" = {
45 | version = "1.6";
46 | sha1 = "6e5db4be8240725612d0072a81f4819e40436ad9";
47 | depends = [ "credentials" "plain-credentials" ];
48 | };
49 | "dashboard-view" = {
50 | version = "2.9.6";
51 | sha1 = "78ab362d84b35bd48d164a59b8629a16b05433d8";
52 | depends = [ ];
53 | };
54 | "environment-dashboard" = {
55 | version = "1.1.4";
56 | sha1 = "8efd6dbb8c924776ac841badf880501becdc3ca3";
57 | depends = [ "jquery" ];
58 | };
59 | "external-monitor-job" = {
60 | version = "1.4";
61 | sha1 = "6969401bf7f766e38f1a6a2754201bd32ecfed01";
62 | depends = [ ];
63 | };
64 | "extra-columns" = {
65 | version = "1.16";
66 | sha1 = "54d4156c8ed7c476d4e1d30d97d23af4351ad0b0";
67 | depends = [ ];
68 | };
69 | "ghprb" = {
70 | version = "1.29.6";
71 | sha1 = "481c5355341416178209e42791dd25eb7c2b8a4f";
72 | depends = [ "matrix-project" "github-api" "git" "ssh-agent" "token-macro" "credentials" "plain-credentials" ];
73 | };
74 | "git" = {
75 | version = "2.4.0";
76 | sha1 = "5f6fcb3b43f25aef661c78585f2e9796f18b0a4a";
77 | depends = [ "git-client" "credentials" "ssh-credentials" "scm-api" "matrix-project" "mailer" "token-macro" "promoted-builds" ];
78 | };
79 | "git-client" = {
80 | version = "1.19.0";
81 | sha1 = "aea765523f4bf602adf3ee24fc8cf4124869f2e7";
82 | depends = [ "ssh-credentials" ];
83 | };
84 | "git-parameter" = {
85 | version = "0.4.0";
86 | sha1 = "b6f7bbfa5652ce0980f5d6dd0346a24e7de33598";
87 | depends = [ "credentials" "git" ];
88 | };
89 | "github" = {
90 | version = "1.14.1";
91 | sha1 = "ebfb1ff39f20620b8531a393faf5fc3b08ad49fa";
92 | depends = [ "github-api" "git" "credentials" "plain-credentials" "token-macro" ];
93 | };
94 | "github-api" = {
95 | version = "1.71";
96 | sha1 = "27321ac5dcbff5db99e97464f90f064517ef0516";
97 | depends = [ ];
98 | };
99 | "github-oauth" = {
100 | version = "0.22.2";
101 | sha1 = "a8a08062782404c6d0608722827decbcf28b4bfa";
102 | depends = [ "mailer" "github-api" "git" ];
103 | };
104 | "google-login" = {
105 | version = "1.2.1";
106 | sha1 = "1582db184cc3fc4a79b3f2624c8ec33872a47897";
107 | depends = [ "mailer" ];
108 | };
109 | "google-oauth-plugin" = {
110 | version = "0.4";
111 | sha1 = "e9afd46f206d22af97fe92818e461a894fa6cb09";
112 | depends = [ "oauth-credentials" ];
113 | };
114 | "jenkinswalldisplay" = {
115 | version = "0.6.30";
116 | sha1 = "bf5a7e4c00beb7371335517dcb91a3f71e6ffd27";
117 | depends = [ "nested-view" ];
118 | };
119 | "jquery" = {
120 | version = "1.11.2-0";
121 | sha1 = "78e9df74a02ad634c92c0e97279a9f4225c163aa";
122 | depends = [ ];
123 | };
124 | "junit" = {
125 | version = "1.10";
126 | sha1 = "3a22739b318ea026821741fdfad0e824f102c3b6";
127 | depends = [ ];
128 | };
129 | "log-command" = {
130 | version = "1.0.1";
131 | sha1 = "e3ade31612cc5bb24cd29e2578570074e9f1f203";
132 | depends = [ ];
133 | };
134 | "mailer" = {
135 | version = "1.16";
136 | sha1 = "3ffe05908197e4b80ce7ecb8c48471374da04c7b";
137 | depends = [ ];
138 | };
139 | "mapdb-api" = {
140 | version = "1.0.6.0";
141 | sha1 = "fe1bea979b187ed051d56e6042370f8c2359c74a";
142 | depends = [ ];
143 | };
144 | "matrix-auth" = {
145 | version = "1.2";
146 | sha1 = "91a7ebbee0c81b6fa5a954b182f11e7e4ba4a5d1";
147 | depends = [ ];
148 | };
149 | "matrix-project" = {
150 | version = "1.6";
151 | sha1 = "0b3cb0dff3a5dacdc75ae06ddbd85e13db8d91a2";
152 | depends = [ "junit" "script-security" ];
153 | };
154 | "nested-view" = {
155 | version = "1.14";
156 | sha1 = "6d3e852353e2ab7e45e32fe2a86ea5c3acec9b1b";
157 | depends = [ ];
158 | };
159 | "oauth-credentials" = {
160 | version = "0.3";
161 | sha1 = "4c355b5b36445ef15754c3e9ea1e885c9acad7ee";
162 | depends = [ ];
163 | };
164 | "parameterized-trigger" = {
165 | version = "2.29";
166 | sha1 = "fc8e1ce328ff2e2acdf98f8e73f7df2ad7aea593";
167 | depends = [ "subversion" "matrix-project" "promoted-builds" "conditional-buildstep" ];
168 | };
169 | "plain-credentials" = {
170 | version = "1.1";
171 | sha1 = "f459e09981859df206a4d9ee40d8bc3707c528ae";
172 | depends = [ "credentials" ];
173 | };
174 | "promoted-builds" = {
175 | version = "2.24";
176 | sha1 = "dcda9552e8d02bc8a1e263007cfb5c24e067f24c";
177 | depends = [ "token-macro" ];
178 | };
179 | "rebuild" = {
180 | version = "1.25";
181 | sha1 = "a62bbf56acc6bb851f60be8be0937334dd3fbc35";
182 | depends = [ ];
183 | };
184 | "run-condition" = {
185 | version = "1.0";
186 | sha1 = "0764ca10ef31e61300a6025778fa05d629bcee96";
187 | depends = [ "token-macro" ];
188 | };
189 | "s3" = {
190 | version = "0.8";
191 | sha1 = "0e4fcfa1a71b77ccf1001ac001cc1a0218e54c82";
192 | depends = [ "aws-java-sdk" "copyartifact" ];
193 | };
194 | "scm-api" = {
195 | version = "1.0";
196 | sha1 = "77e69ef2ebc915cd1d497860575f48a4412be76e";
197 | depends = [ ];
198 | };
199 | "script-security" = {
200 | version = "1.15";
201 | sha1 = "1ff014a4aa278db11db387aee2981297d89407d8";
202 | depends = [ ];
203 | };
204 | "show-build-parameters" = {
205 | version = "1.0";
206 | sha1 = "69ee5b0307cc907ce80c182cc80035c88311b9c0";
207 | depends = [ ];
208 | };
209 | "simple-theme-plugin" = {
210 | version = "0.3";
211 | sha1 = "ffec39d93f62916fe6aadbab75544d443c4efe5e";
212 | depends = [ ];
213 | };
214 | "slack" = {
215 | version = "1.8.1";
216 | sha1 = "92f2599baf9397d75e0edaeecd52e2d89e19cb52";
217 | depends = [ ];
218 | };
219 | "ssh-agent" = {
220 | version = "1.9";
221 | sha1 = "ec57bfe38b0fd7d88ca86d83083ba4dfba398d00";
222 | depends = [ "credentials" "ssh-credentials" ];
223 | };
224 | "ssh-credentials" = {
225 | version = "1.11";
226 | sha1 = "d47e6a2899ee75e48336f6d2637da4e9ba0e3d21";
227 | depends = [ "credentials" ];
228 | };
229 | "ssh-slaves" = {
230 | version = "1.10";
231 | sha1 = "18b00399db95b1e80982c94fe45cf5d4edee009b";
232 | depends = [ "credentials" "ssh-credentials" ];
233 | };
234 | "subversion" = {
235 | version = "2.5.4";
236 | sha1 = "fff0ffffd34b3ba22df8ca8ad2d46e79e91dbfcc";
237 | depends = [ "mapdb-api" "credentials" "ssh-credentials" "scm-api" ];
238 | };
239 | "timestamper" = {
240 | version = "1.7.2";
241 | sha1 = "cd0c0cc294235688f2e6abc510119e985fb903b0";
242 | depends = [ ];
243 | };
244 | "token-macro" = {
245 | version = "1.12.1";
246 | sha1 = "9eb703e6c6fdcf06c34b210f1c06f2dd51bf319f";
247 | depends = [ ];
248 | };
249 | "workflow-step-api" = {
250 | version = "1.12";
251 | sha1 = "fef8b9d6c300f13398d2e6068bd1ec743017079c";
252 | depends = [ ];
253 | };
254 | }
255 |
--------------------------------------------------------------------------------
/pkgs/jmaps/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, openjdk, fetchurl, perf-map-agent, runCommand }: let
2 | jmaps = fetchurl {
3 | url = https://raw.githubusercontent.com/brendangregg/Misc/fc2856eb0d0d7fe7a3b3dfef8f873603f6a4a3eb/java/jmaps;
4 | sha256 = "15bihx36qdjv25l1xdnkg3lrlmkjy9cmj6gsk4dybr141xfg9x4i";
5 | };
6 | in runCommand "jmaps" {} ''
7 | mkdir -p $out/bin
8 | cp ${jmaps} $out/bin/jmaps
9 | sed -i 's,/usr/lib/jvm/java-8-oracle,${openjdk}/lib/openjdk,' $out/bin/jmaps
10 | sed -i 's,/usr/lib/jvm/perf-map-agent,${perf-map-agent},' $out/bin/jmaps
11 | sed -i 's,su - $user -c,sudo -u $user sh -c,' $out/bin/jmaps
12 | chmod +x $out/bin/jmaps
13 | ''
14 |
--------------------------------------------------------------------------------
/pkgs/letsencrypt/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, pythonPackages, fetchurl, dialog }:
2 |
3 | pythonPackages.buildPythonApplication rec {
4 | version = "0.1.0";
5 | name = "letsencrypt-${version}";
6 |
7 | src = fetchurl {
8 | url = "https://github.com/letsencrypt/letsencrypt/archive/v${version}.tar.gz";
9 | sha256 = "056y5bsmpc4ya5xxals4ypzsm927j6n5kwby3bjc03sy3sscf6hw";
10 | };
11 |
12 | propagatedBuildInputs = with pythonPackages; [
13 | zope_interface zope_component six requests2 pytz pyopenssl psutil mock acme
14 | cryptography configobj pyRFC3339 python2-pythondialog parsedatetime ConfigArgParse
15 | ];
16 | buildInputs = with pythonPackages; [ nose dialog ];
17 |
18 | patchPhase = ''
19 | substituteInPlace letsencrypt/notify.py --replace "/usr/sbin/sendmail" "/var/setuid-wrappers/sendmail"
20 | '';
21 |
22 | postInstall = ''
23 | for i in $out/bin/*; do
24 | wrapProgram "$i" --prefix PYTHONPATH : "$PYTHONPATH" \
25 | --prefix PATH : "${dialog}/bin:$PATH"
26 | done
27 | '';
28 |
29 | meta = with stdenv.lib; {
30 | homepage = https://github.com/letsencrypt/letsencrypt;
31 | description = "ACME client that can obtain certs and extensibly update server configurations";
32 | platforms = platforms.unix;
33 | maintainers = [ maintainers.iElectric ];
34 | license = licenses.asl20;
35 | };
36 | }
37 |
--------------------------------------------------------------------------------
/pkgs/logstash-all-plugins/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "logstash-all-plugins-2.3.1";
5 |
6 | src = fetchurl {
7 | url = "https://download.elasticsearch.org/logstash/logstash/${name}.tar.gz";
8 | sha256 = "1zi2d0q7nhcfrp1xq6sq65x4d0kk8rh3ip5wd8a8lkyibcyxxppc";
9 | };
10 |
11 | dontBuild = true;
12 | dontPatchELF = true;
13 | dontStrip = true;
14 | dontPatchShebangs = true;
15 |
16 | installPhase = ''
17 | mkdir -p $out
18 | cp -r {Gemfile*,vendor,lib,bin} $out
19 | '';
20 | }
21 |
--------------------------------------------------------------------------------
/pkgs/mariadb-galera/default.nix:
--------------------------------------------------------------------------------
1 | {
2 | stdenv, fetchurl, cmake, bison, ncurses, openssl, readline, zlib, perl,
3 | findutils, gnutar, procps, gnugrep, gawk, rsync, which, lsof, coreutils
4 | }:
5 |
6 | stdenv.mkDerivation rec {
7 | name = "mariadb-galera-${version}";
8 | version = "10.0.21";
9 |
10 | src = fetchurl {
11 | url = "https://github.com/MariaDB/server/archive/${name}.tar.gz";
12 | sha256 = "0qi0yqh0h82avvppwa5538y9a4zf7b2jmfvchi3c5l588ybc0w01";
13 | };
14 |
15 | buildInputs = [ cmake bison ncurses openssl perl readline zlib ]
16 | ++ stdenv.lib.optional stdenv.isDarwin perl;
17 |
18 | enableParallelBuilding = true;
19 |
20 | cmakeFlags = [
21 | "-DWITH_SSL=yes"
22 | "-DWITH_READLINE=yes"
23 | "-DWITH_EMBEDDED_SERVER=no"
24 | "-DWITH_ZLIB=yes"
25 | "-DHAVE_IPV6=yes"
26 | "-DWITHOUT_TOKUDB=1"
27 | "-DINSTALL_SCRIPTDIR=bin"
28 | # Galera Cluster:
29 | "-DWITH_WSREP=ON"
30 | "-DWITH_INNODB_DISALLOW_WRITES=1"
31 | ];
32 |
33 | NIX_LDFLAGS = stdenv.lib.optionalString stdenv.isLinux "-lgcc_s";
34 |
35 | prePatch = ''
36 | sed -i -e "s|/usr/bin/libtool|libtool|" cmake/libutils.cmake
37 | '';
38 |
39 | postInstall =
40 | let
41 | inherit (import ) makeBinPath;
42 | # These deps are not passed in PATH when mysqld starts wsrep_* scripts
43 | wsrep-deps = [
44 | findutils gnutar procps gnugrep gawk rsync which lsof coreutils
45 | ];
46 | in ''
47 | sed -i -e "s|-lssl|-L${openssl}/lib -lssl|g" $out/bin/mysql_config
48 | sed -i -e "s|basedir=\"\"|basedir=\"$out\"|" $out/bin/mysql_install_db
49 | # https://github.com/NixOS/nixpkgs/issues/7117
50 | rm -r $out/mysql-test $out/sql-bench $out/data # Don't need testing data
51 | rm $out/bin/mysqlbug # Encodes a path to gcc and not really useful
52 | find $out/bin -name \*test\* -exec rm {} \;
53 | sed -i -e "2iexport PATH=$out/bin:${makeBinPath wsrep-deps}" $out/bin/wsrep_*
54 | '';
55 |
56 | meta = {
57 | homepage = https://mariadb.com/kb/en/mariadb/what-is-mariadb-galera-cluster/;
58 | description = "MariaDB Galera Cluster is a synchronous multi-master cluster for MariaDB.";
59 | };
60 | }
61 |
--------------------------------------------------------------------------------
/pkgs/mariadb/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, cmake, ncurses, zlib, openssl, pcre, boost, judy, bison, libxml2
2 | , libaio, libevent, groff, jemalloc, perl, fixDarwinDylibNames
3 | }:
4 |
5 | with stdenv.lib;
6 | stdenv.mkDerivation rec {
7 | name = "mariadb-${version}";
8 | version = "10.0.15";
9 |
10 | src = fetchurl {
11 | url = "https://github.com/MariaDB/server/archive/${name}.tar.gz";
12 | sha256 = "1maa0mwqxljh9nd0kjbcr9hy9v9k2x25b15xb5d5p41wadrxk6jy";
13 | };
14 |
15 | buildInputs = [ cmake ncurses openssl zlib pcre libxml2 boost judy bison libevent ]
16 | ++ stdenv.lib.optionals stdenv.isLinux [ jemalloc libaio ]
17 | ++ stdenv.lib.optionals stdenv.isDarwin [ perl fixDarwinDylibNames ];
18 |
19 | patches = stdenv.lib.optional stdenv.isDarwin ./my_context_asm.patch;
20 |
21 | cmakeFlags = [
22 | "-DBUILD_CONFIG=mysql_release"
23 | "-DDEFAULT_CHARSET=utf8"
24 | "-DDEFAULT_COLLATION=utf8_general_ci"
25 | "-DENABLED_LOCAL_INFILE=ON"
26 | "-DINSTALL_SYSCONFDIR=etc/mysql"
27 | "-DINSTALL_INFODIR=share/mysql/docs"
28 | "-DINSTALL_MANDIR=share/man"
29 | "-DINSTALL_PLUGINDIR=lib/mysql/plugin"
30 | "-DINSTALL_SCRIPTDIR=bin"
31 | "-DINSTALL_INCLUDEDIR=include/mysql"
32 | "-DINSTALL_DOCREADMEDIR=share/mysql"
33 | "-DINSTALL_SUPPORTFILESDIR=share/mysql"
34 | "-DINSTALL_MYSQLSHAREDIR=share/mysql"
35 | "-DINSTALL_DOCDIR=share/mysql/docs"
36 | "-DINSTALL_SHAREDIR=share/mysql"
37 | "-DWITH_READLINE=ON"
38 | "-DWITH_ZLIB=system"
39 | "-DWITH_SSL=system"
40 | "-DWITH_PCRE=system"
41 | "-DWITH_EXTRA_CHARSETS=complex"
42 | "-DWITH_EMBEDDED_SERVER=OFF"
43 | "-DWITH_ARCHIVE_STORAGE_ENGINE=1"
44 | "-DWITH_BLACKHOLE_STORAGE_ENGINE=1"
45 | "-DWITH_INNOBASE_STORAGE_ENGINE=1"
46 | "-DWITH_PARTITION_STORAGE_ENGINE=1"
47 | "-DWITHOUT_EXAMPLE_STORAGE_ENGINE=1"
48 | "-DWITHOUT_FEDERATED_STORAGE_ENGINE=1"
49 | ] ++ stdenv.lib.optionals stdenv.isDarwin [
50 | "-DWITHOUT_OQGRAPH_STORAGE_ENGINE=1"
51 | "-DWITHOUT_TOKUDB=1"
52 | ];
53 |
54 | enableParallelBuilding = true;
55 |
56 | outputs = [ "out" "lib" ];
57 |
58 | prePatch = ''
59 | substituteInPlace cmake/libutils.cmake \
60 | --replace /usr/bin/libtool libtool
61 | sed -i "s,SET(DEFAULT_MYSQL_HOME.*$,SET(DEFAULT_MYSQL_HOME /not/a/real/dir),g" CMakeLists.txt
62 | sed -i "s,SET(PLUGINDIR.*$,SET(PLUGINDIR $lib/lib/mysql/plugin),g" CMakeLists.txt
63 |
64 | sed -i "s,SET(pkgincludedir.*$,SET(pkgincludedir $lib/include),g" scripts/CMakeLists.txt
65 | sed -i "s,SET(pkglibdir.*$,SET(pkglibdir $lib/lib),g" scripts/CMakeLists.txt
66 | sed -i "s,SET(pkgplugindir.*$,SET(pkgplugindir $lib/lib/mysql/plugin),g" scripts/CMakeLists.txt
67 |
68 | sed -i "s,set(libdir.*$,SET(libdir $lib/lib),g" storage/mroonga/vendor/groonga/CMakeLists.txt
69 | sed -i "s,set(includedir.*$,SET(includedir $lib/include),g" storage/mroonga/vendor/groonga/CMakeLists.txt
70 | sed -i "/\"\$[{]CMAKE_INSTALL_PREFIX}\/\$[{]GRN_RELATIVE_PLUGINS_DIR}\"/d" storage/mroonga/vendor/groonga/CMakeLists.txt
71 | sed -i "s,set(GRN_PLUGINS_DIR.*$,SET(GRN_PLUGINS_DIR $lib/\$\{GRN_RELATIVE_PLUGINS_DIR}),g" storage/mroonga/vendor/groonga/CMakeLists.txt
72 | sed -i 's,[^"]*/var/log,/var/log,g' storage/mroonga/vendor/groonga/CMakeLists.txt
73 | '';
74 |
75 | postInstall = ''
76 | substituteInPlace $out/bin/mysql_install_db \
77 | --replace basedir=\"\" basedir=\"$out\"
78 |
79 | # Remove superfluous files
80 | rm -r $out/mysql-test $out/sql-bench $out/data # Don't need testing data
81 | rm $out/share/man/man1/mysql-test-run.pl.1
82 | rm $out/bin/rcmysql # Not needed with nixos units
83 | rm $out/bin/mysqlbug # Encodes a path to gcc and not really useful
84 | find $out/bin -name \*test\* -exec rm {} \;
85 |
86 | # Separate libs and includes into their own derivation
87 | mkdir -p $lib
88 | mv $out/lib $lib
89 | mv $out/include $lib
90 |
91 | # Fix the mysql_config
92 | sed -i $out/bin/mysql_config \
93 | -e 's,-lz,-L${zlib}/lib -lz,g' \
94 | -e 's,-lssl,-L${openssl}/lib -lssl,g'
95 |
96 | # Add mysql_config to libs since configure scripts use it
97 | mkdir -p $lib/bin
98 | cp $out/bin/mysql_config $lib/bin
99 | sed -i "/\(execdir\|bindir\)/ s,'[^\"']*',$lib/bin,g" $lib/bin/mysql_config
100 |
101 | # Make sure to propagate lib for compatability
102 | mkdir -p $out/nix-support
103 | echo "$lib" > $out/nix-support/propagated-native-build-inputs
104 |
105 | # Don't install static libraries.
106 | rm -f $lib/lib/*.a
107 | '';
108 |
109 | passthru.mysqlVersion = "5.6";
110 |
111 | meta = with stdenv.lib; {
112 | description = "An enhanced, drop-in replacement for MySQL";
113 | homepage = https://mariadb.org/;
114 | license = stdenv.lib.licenses.gpl2;
115 | maintainers = with stdenv.lib.maintainers; [ thoughtpolice wkennington ];
116 | platforms = stdenv.lib.platforms.all;
117 | };
118 | }
119 |
--------------------------------------------------------------------------------
/pkgs/memcached/memcached-tool:
--------------------------------------------------------------------------------
1 | #!/usr/bin/perl
2 | #
3 | # memcached-tool:
4 | # stats/management tool for memcached.
5 | #
6 | # Author:
7 | # Brad Fitzpatrick
8 | #
9 | # Contributor:
10 | # Andrey Niakhaichyk
11 | #
12 | # License:
13 | # public domain. I give up all rights to this
14 | # tool. modify and copy at will.
15 | #
16 |
17 | use strict;
18 | use IO::Socket::INET;
19 |
20 | my $addr = shift;
21 | my $mode = shift || "display";
22 | my ($from, $to);
23 |
24 | if ($mode eq "display") {
25 | undef $mode if @ARGV;
26 | } elsif ($mode eq "move") {
27 | $from = shift;
28 | $to = shift;
29 | undef $mode if $from < 6 || $from > 17;
30 | undef $mode if $to < 6 || $to > 17;
31 | print STDERR "ERROR: parameters out of range\n\n" unless $mode;
32 | } elsif ($mode eq 'dump') {
33 | ;
34 | } elsif ($mode eq 'stats') {
35 | ;
36 | } elsif ($mode eq 'settings') {
37 | ;
38 | } elsif ($mode eq 'sizes') {
39 | ;
40 | } else {
41 | undef $mode;
42 | }
43 |
44 | undef $mode if @ARGV;
45 |
46 | die
47 | "Usage: memcached-tool [mode]\n
48 | memcached-tool 10.0.0.5:11211 display # shows slabs
49 | memcached-tool 10.0.0.5:11211 # same. (default is display)
50 | memcached-tool 10.0.0.5:11211 stats # shows general stats
51 | memcached-tool 10.0.0.5:11211 settings # shows settings stats
52 | memcached-tool 10.0.0.5:11211 sizes # shows sizes stats
53 | memcached-tool 10.0.0.5:11211 dump # dumps keys and values
54 |
55 | WARNING! sizes is a development command.
56 | As of 1.4 it is still the only command which will lock your memcached instance for some time.
57 | If you have many millions of stored items, it can become unresponsive for several minutes.
58 | Run this at your own risk. It is roadmapped to either make this feature optional
59 | or at least speed it up.
60 | " unless $addr && $mode;
61 |
62 |
63 | my $sock;
64 | if ($addr =~ m:/:) {
65 | $sock = IO::Socket::UNIX->new(
66 | Peer => $addr,
67 | );
68 | }
69 | else {
70 | $addr .= ':11211' unless $addr =~ /:\d+$/;
71 |
72 | $sock = IO::Socket::INET->new(
73 | PeerAddr => $addr,
74 | Proto => 'tcp',
75 | );
76 | }
77 | die "Couldn't connect to $addr\n" unless $sock;
78 |
79 | if ($mode eq 'dump') {
80 | my %items;
81 | my $totalitems;
82 |
83 | print $sock "stats items\r\n";
84 |
85 | while (<$sock>) {
86 | last if /^END/;
87 | if (/^STAT items:(\d*):number (\d*)/) {
88 | $items{$1} = $2;
89 | $totalitems += $2;
90 | }
91 | }
92 | print STDERR "Dumping memcache contents\n";
93 | print STDERR " Number of buckets: " . scalar(keys(%items)) . "\n";
94 | print STDERR " Number of items : $totalitems\n";
95 |
96 | foreach my $bucket (sort(keys(%items))) {
97 | print STDERR "Dumping bucket $bucket - " . $items{$bucket} . " total items\n";
98 | print $sock "stats cachedump $bucket $items{$bucket}\r\n";
99 | my %keyexp;
100 | while (<$sock>) {
101 | last if /^END/;
102 | # return format looks like this
103 | # ITEM foo [6 b; 1176415152 s]
104 | if (/^ITEM (\S+) \[.* (\d+) s\]/) {
105 | $keyexp{$1} = $2;
106 | }
107 | }
108 |
109 | foreach my $k (keys(%keyexp)) {
110 | print $sock "get $k\r\n";
111 | my $response = <$sock>;
112 | if ($response =~ /VALUE (\S+) (\d+) (\d+)/) {
113 | my $flags = $2;
114 | my $len = $3;
115 | my $val;
116 | read $sock, $val, $len;
117 | print "add $k $flags $keyexp{$k} $len\r\n$val\r\n";
118 | # get the END
119 | $_ = <$sock>;
120 | $_ = <$sock>;
121 | }
122 | }
123 | }
124 | exit;
125 | }
126 |
127 | if ($mode eq 'stats') {
128 | my %items;
129 |
130 | print $sock "stats\r\n";
131 |
132 | while (<$sock>) {
133 | last if /^END/;
134 | chomp;
135 | if (/^STAT\s+(\S*)\s+(.*)/) {
136 | $items{$1} = $2;
137 | }
138 | }
139 | printf ("#%-17s %5s %11s\n", $addr, "Field", "Value");
140 | foreach my $name (sort(keys(%items))) {
141 | printf ("%24s %12s\n", $name, $items{$name});
142 |
143 | }
144 | exit;
145 | }
146 |
147 | if ($mode eq 'settings') {
148 | my %items;
149 |
150 | print $sock "stats settings\r\n";
151 |
152 | while (<$sock>) {
153 | last if /^END/;
154 | chomp;
155 | if (/^STAT\s+(\S*)\s+(.*)/) {
156 | $items{$1} = $2;
157 | }
158 | }
159 | printf ("#%-17s %5s %11s\n", $addr, "Field", "Value");
160 | foreach my $name (sort(keys(%items))) {
161 | printf ("%24s %12s\n", $name, $items{$name});
162 | }
163 | exit;
164 | }
165 |
166 |
167 | if ($mode eq 'sizes') {
168 | my %items;
169 |
170 | print $sock "stats sizes\r\n";
171 |
172 | while (<$sock>) {
173 | last if /^END/;
174 | chomp;
175 | if (/^STAT\s+(\S*)\s+(.*)/) {
176 | $items{$1} = $2;
177 | }
178 | }
179 | printf ("#%-17s %5s %11s\n", $addr, "Size", "Count");
180 | foreach my $name (sort(keys(%items))) {
181 | printf ("%24s %12s\n", $name, $items{$name});
182 | }
183 | exit;
184 | }
185 |
186 | # display mode:
187 |
188 | my %items; # class -> { number, age, chunk_size, chunks_per_page,
189 | # total_pages, total_chunks, used_chunks,
190 | # free_chunks, free_chunks_end }
191 |
192 | print $sock "stats items\r\n";
193 | my $max = 0;
194 | while (<$sock>) {
195 | last if /^END/;
196 | if (/^STAT items:(\d+):(\w+) (\d+)/) {
197 | $items{$1}{$2} = $3;
198 | }
199 | }
200 |
201 | print $sock "stats slabs\r\n";
202 | while (<$sock>) {
203 | last if /^END/;
204 | if (/^STAT (\d+):(\w+) (\d+)/) {
205 | $items{$1}{$2} = $3;
206 | $max = $1;
207 | }
208 | }
209 |
210 | print " # Item_Size Max_age Pages Count Full? Evicted Evict_Time OOM\n";
211 | foreach my $n (1..$max) {
212 | my $it = $items{$n};
213 | next if (0 == $it->{total_pages});
214 | my $size = $it->{chunk_size} < 1024 ?
215 | "$it->{chunk_size}B" :
216 | sprintf("%.1fK", $it->{chunk_size} / 1024.0);
217 | my $full = $it->{free_chunks_end} == 0 ? "yes" : " no";
218 | printf("%3d %8s %9ds %7d %7d %7s %8d %8d %4d\n",
219 | $n, $size, $it->{age}, $it->{total_pages},
220 | $it->{number}, $full, $it->{evicted},
221 | $it->{evicted_time}, $it->{outofmemory});
222 | }
223 |
224 |
--------------------------------------------------------------------------------
/pkgs/mergex/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchgit }:
2 |
3 | stdenv.mkDerivation {
4 | name = "mergex";
5 | src = fetchgit {
6 | url = "https://github.com/NicoJuicy/mergex.git";
7 | rev = "d038ceb60d00bb788afe0d51d0b4d4e412f5a333";
8 | sha256 = "0cqq4a1pclkbys84x5v1bxvlm4a6d1h9536jan0cg5p8fdajzaga";
9 | };
10 | buildPhase = ''
11 | make all
12 | '';
13 | installPhase = ''
14 | mkdir -p $out/bin
15 | mv -v mergex $out/bin/
16 | '';
17 | }
18 |
--------------------------------------------------------------------------------
/pkgs/mkebs/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, coreutils, perl, parted, nix, bash, utillinux, pathsFromGraph,
2 | grub2, rsync, gnugrep, gnused, curl, awscli, jq, e2fsprogs }:
3 |
4 | stdenv.mkDerivation rec {
5 | name = "mkebs";
6 |
7 | path = stdenv.lib.makeSearchPath "bin" [
8 | coreutils perl parted nix bash utillinux rsync
9 | grub2 gnugrep gnused curl awscli jq e2fsprogs
10 | ];
11 | inherit pathsFromGraph;
12 |
13 | script = ./mkebs.sh;
14 |
15 | buildCommand = ''
16 | substituteAll $script $out
17 | chmod +x $out
18 | '';
19 |
20 | meta = with stdenv.lib; {
21 | platforms = platforms.linux;
22 | license = licenses.mit;
23 | };
24 | }
25 |
--------------------------------------------------------------------------------
/pkgs/mkebs/mkebs.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 | set -o pipefail
5 | set -u
6 |
7 | # inputs:
8 | # $toplevel is nixos top level closure
9 | test -d "$toplevel"
10 | # $graph should point to a file that contains exportReferencesGraph output from toplevel
11 | test -f "$graph"
12 |
13 | meta() {
14 | curl -s "http://169.254.169.254/latest/meta-data/$1"
15 | }
16 |
17 | BASE_RESOURCE=${BASE_RESOURCE:-""}
18 | AZ="${AZ:-$(meta placement/availability-zone)}"
19 |
20 | volume=${volume:-""}
21 | suffix=${suffix:-""}
22 | aminame=${toplevel}-hvm${suffix}
23 | volume_args="--volume-type gp2 --size 40"
24 | region=${region:-${AZ%%[abcdef]}}
25 | ec2="aws --region ${region} ec2"
26 |
27 | # global in: $aminame
28 | amitest() {
29 | set -- $($ec2 describe-images --filters "Name=name,Values=$aminame" | jq -r '.Images | .[] | [.Name, .ImageId] | .[]')
30 |
31 | if [ "${1:-}" = "$aminame" ]; then
32 | echo AMI already exists >&2
33 | echo "$2" # stdout
34 | exit 0
35 | fi
36 | }
37 |
38 | export PATH=@path@
39 | pathsFromGraph=${pathsFromGraph:-@pathsFromGraph@}
40 |
41 | # global in: $volume
42 | vwait() {
43 | while ! $ec2 describe-volumes --volume-ids "$volume" | grep -q available; do
44 | echo waiting for volume "$volume" >&2
45 | sleep 20
46 | done
47 | $ec2 create-tags --resources "$volume" --tags Key=ug-mkebs,Value="$toplevel" Key=Name,Value=mkebs-scratch >&2
48 | }
49 |
50 | # global in: $volume
51 | # global out: $device
52 | attach() {
53 | local attached
54 | attached=0
55 | for i in {d..z}; do
56 | device=/dev/xvd${i}
57 | if $ec2 attach-volume --volume-id "$volume" --instance-id "$(meta instance-id)" --device "$(basename $device)" >&2; then
58 | attached=1
59 | # danger: successful attachment may hang forever when reattaching new volumes sometimes
60 | while ! lsblk "$device" >&2; do echo waiting for $device... >&2; sleep 1; done
61 | break
62 | fi
63 | done
64 |
65 | if [ "$attached" -eq 0 ]; then
66 | echo could not attach the ebs volume anywhere, leaking "$volume" >&2
67 | exit 11
68 | fi
69 | }
70 |
71 | amitest
72 |
73 | if [ -n "$BASE_RESOURCE" ]; then
74 | case ${BASE_RESOURCE%-*} in
75 | vol)
76 | volume=$BASE_RESOURCE
77 | ;;
78 | snap) # starting from a base snapshot
79 | volume=$($ec2 create-volume --availability-zone "$AZ" $volume_args --snapshot-id "$BASE_RESOURCE" | jq -r .VolumeId)
80 | ;;
81 | *)
82 | echo "unkown base resource: $BASE_RESOURCE" >&2
83 | exit 12
84 | ;;
85 | esac
86 | vwait
87 | attach
88 | else
89 | echo 'WARNING: starting from scratch. This is slow, consider setting $BASE_RESOURCE' >&2
90 | echo '$BASE_RESOURCE can look like snap-xxxxxx or vol-xxxxxx' >&2
91 | volume=$($ec2 create-volume --availability-zone "$AZ" $volume_args | jq -r .VolumeId)
92 | vwait
93 | attach
94 |
95 | parted -s "$device" -- mklabel msdos >&2
96 | parted -s "$device" -- mkpart primary ext2 1M -1s >&2
97 |
98 | mkfs.ext4 -L nixos "$device"1 >&2
99 | fi
100 |
101 | while ! lsblk "$device"1 >&2; do echo waiting for $device... >&2; sleep 1; done
102 |
103 | mountpoint=${mountpoint:-$(mktemp -d)}
104 |
105 | if ! mount | grep -q "$device.*$mountpoint"; then
106 | mount "$device"1 "$mountpoint"
107 | fi
108 |
109 | touch "$mountpoint/.ebs"
110 | mkdir -p "$mountpoint"/{bin,etc/nixos,dev,sys,proc,nix/store}
111 |
112 | if [ ! -f "$mountpoint"/proc/filesystems ]; then
113 | mount -o bind /proc "$mountpoint"/proc
114 | fi
115 |
116 | if [ ! -c "$mountpoint"/dev/null ]; then
117 | mount -o bind /dev "$mountpoint"/dev
118 | fi
119 |
120 | if [ ! -d "$mountpoint"/sys/kernel ]; then
121 | mount -o bind /sys "$mountpoint"/sys
122 | fi
123 |
124 | storePaths=$(perl "$pathsFromGraph" "$graph")
125 |
126 | echo "rsyncing graph for $toplevel (will take a while)..." >&2
127 | rsync --stats -au $storePaths "$mountpoint/nix/store/" >&2
128 |
129 | # Register the paths in the Nix database.
130 | printRegistration=1 perl "$pathsFromGraph" "$graph" | \
131 | chroot "$mountpoint" "$toplevel"/sw/bin/nix-store --load-db --option build-users-group "" >&2
132 |
133 | # Create the system profile to allow nixos-rebuild to work.
134 | chroot "$mountpoint" "$toplevel"/sw/bin/nix-env --option build-users-group "" -p /nix/var/nix/profiles/system --set "$toplevel" >&2
135 |
136 | # `nixos-rebuild' requires an /etc/NIXOS.
137 | touch "$mountpoint"/etc/NIXOS
138 |
139 | # `switch-to-configuration' requires a /bin/sh
140 | ln -sf "$(readlink "$toplevel"/sw/bin/sh)" "$mountpoint"/bin/sh
141 |
142 | # XXX: we don't really need to generate any menus as there are no rollbacks
143 | # Generate the GRUB menu.
144 | LC_ALL=C NIXOS_INSTALL_GRUB=0 chroot "$mountpoint" "$toplevel"/bin/switch-to-configuration switch >&2 || true
145 |
146 | grub-install --recheck --root-directory="$mountpoint" "$device" >&2
147 |
148 | umount "$mountpoint"/proc
149 | umount "$mountpoint"/dev
150 | umount "$mountpoint"/sys
151 | umount "$mountpoint"
152 |
153 | if [ -n "$volume" ]; then
154 | $ec2 detach-volume --volume-id "$volume" >&2
155 |
156 | date >&2
157 | snapshot=$($ec2 create-snapshot --volume-id "$volume" | jq -r .SnapshotId)
158 | echo now wait two years for: $snapshot >&2
159 |
160 | $ec2 create-tags --resources "$snapshot" --tags Key=ug-mkebs,Value="$toplevel" Key=Name,Value=mkebs-ami >&2
161 |
162 | progress=$($ec2 describe-snapshots --snapshot-ids "$snapshot" | jq -r '.Snapshots | .[] | .Progress')
163 |
164 | while [ "$progress" != "100%" ]; do
165 | echo creating snapshot: $progress >&2
166 | sleep 10
167 | progress=$($ec2 describe-snapshots --snapshot-ids "$snapshot" | jq -r '.Snapshots | .[] | .Progress')
168 | done
169 |
170 | while ! $ec2 describe-snapshots --snapshot-ids "$snapshot" | grep -q completed; do
171 | echo waiting for state "'completed'" >&2
172 | sleep 10
173 | done
174 | date >&2
175 |
176 | echo not deleting $volume >&2
177 |
178 | ami=$($ec2 register-image --architecture x86_64 --name "$aminame" \
179 | --root-device-name /dev/xvda \
180 | --block-device-mappings \
181 | "[{\"DeviceName\": \"/dev/xvda\",
182 | \"Ebs\":{\"SnapshotId\":\"$snapshot\",
183 | \"VolumeType\":\"gp2\",
184 | \"DeleteOnTermination\":true}}]" \
185 | --virtualization-type hvm | jq -r .ImageId)
186 |
187 | $ec2 create-tags --resources "$ami" --tags Key=ug-mkebs,Value="$toplevel" >&2
188 | echo $ami # only this gets printed to stdout
189 | fi
190 |
--------------------------------------------------------------------------------
/pkgs/myrapi/default.nix:
--------------------------------------------------------------------------------
1 | { mkDerivation, aeson, base, base64-bytestring, byteable
2 | , bytestring, cryptohash, either, fetchgit, http-types, old-locale
3 | , optparse-applicative, servant, servant-client, stdenv, text, time
4 | , time-locale-compat, transformers
5 | }:
6 | mkDerivation {
7 | pname = "myrapi";
8 | version = "0.1.0.0";
9 | src = fetchgit {
10 | url = "https://github.com/zalora/myrapi.git";
11 | sha256 = "cab997dbfdc059fbb93571cf64f51ba45015f2f6c92040b1c961f20fe35b6096";
12 | rev = "ed359ed4a522447a3549135160b4cda803254031";
13 | };
14 | isLibrary = true;
15 | isExecutable = true;
16 | jailbreak = true;
17 | libraryHaskellDepends = [
18 | aeson base base64-bytestring byteable bytestring cryptohash either
19 | http-types old-locale servant servant-client text time
20 | time-locale-compat transformers
21 | ];
22 | executableHaskellDepends = [
23 | aeson base bytestring optparse-applicative servant servant-client
24 | text
25 | ];
26 | license = stdenv.lib.licenses.mit;
27 | }
28 |
--------------------------------------------------------------------------------
/pkgs/mysql/5.5.x.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, cmake, bison, ncurses, openssl, readline, zlib, perl }:
2 |
3 | # Note: zlib is not required; MySQL can use an internal zlib.
4 |
5 | stdenv.mkDerivation rec {
6 | name = "mysql-${version}";
7 | version = "5.5.42";
8 |
9 | src = fetchurl {
10 | url = "mirror://mysql/MySQL-5.5/${name}.tar.gz";
11 | md5 = "c4d946e6c9f54c386aacb8d3ea957cf8";
12 | };
13 |
14 | patches = [
15 | ./tztime-symlink-loop-hell.patch
16 | ];
17 |
18 | preConfigure = stdenv.lib.optional stdenv.isDarwin ''
19 | ln -s /bin/ps $TMPDIR/ps
20 | export PATH=$PATH:$TMPDIR
21 | '';
22 |
23 | buildInputs = [ cmake bison ncurses openssl readline zlib ]
24 | ++ stdenv.lib.optional stdenv.isDarwin perl;
25 |
26 | enableParallelBuilding = true;
27 |
28 | cmakeFlags = [
29 | "-DWITH_SSL=yes"
30 | "-DWITH_READLINE=yes"
31 | "-DWITH_EMBEDDED_SERVER=no"
32 | "-DHAVE_IPV6=yes"
33 | "-DINSTALL_DOCDIR=share/doc/mysql"
34 | "-DINSTALL_DOCREADMEDIR=share/doc/mysql"
35 | "-DINSTALL_INCLUDEDIR=include/mysql"
36 | "-DINSTALL_INFODIR=share/doc/mysql"
37 | "-DINSTALL_MANDIR=share/man"
38 | "-DINSTALL_MYSQLSHAREDIR=share/mysql"
39 | "-DINSTALL_MYSQLPLUGINDIR=lib/mysql/plugin"
40 | "-DINSTALL_SCRIPTDIR=bin"
41 | "-DINSTALL_SUPPORTFILESDIR=share/mysql"
42 | ];
43 |
44 | NIX_LDFLAGS = stdenv.lib.optionalString stdenv.isLinux "-lgcc_s";
45 |
46 | prePatch = ''
47 | sed -i -e "s|/usr/bin/libtool|libtool|" cmake/libutils.cmake
48 | '';
49 | postInstall = ''
50 | sed -i -e "s|basedir=\"\"|basedir=\"$out\"|" $out/bin/mysql_install_db
51 | sed -i -e "s|-lssl|-L${openssl}/lib -lssl|g" $out/bin/mysql_config
52 | rm -r $out/mysql-test $out/sql-bench $out/data
53 | rm $out/share/man/man1/mysql-test-run.pl.1
54 | rm -v $out/bin/mysqlbug*
55 | rm -rf $out/share/doc
56 | '';
57 |
58 | passthru.mysqlVersion = "5.5";
59 |
60 | meta = {
61 | homepage = http://www.mysql.com/;
62 | description = "The world's most popular open source database";
63 | };
64 | }
65 |
--------------------------------------------------------------------------------
/pkgs/mysql/tztime-symlink-loop-hell.patch:
--------------------------------------------------------------------------------
1 | Description: mysql_tzinfo_to_sql errors with tzdata 2013f and
2 | above Allow only one level of symlink recursion in mysql_tzdata_to_sql, to
3 | avoid infinite loops.
4 | Bug: https://mariadb.atlassian.net/browse/MDEV-5226
5 | Author: Alexander Barkov
6 | Origin: MariaDB, https://github.com/MariaDB/server/commit/f2cfcd91f56d81d40b1f5e2dbb5b643dc207bf53
7 | Date: Wed, 13 Nov 2013 18:26:03 +0400
8 |
9 | Index: mysql-5.5-5.5.40/sql/tztime.cc
10 | ===================================================================
11 | --- mysql-5.5-5.5.40.orig/sql/tztime.cc
12 | +++ mysql-5.5-5.5.40/sql/tztime.cc
13 | @@ -2470,7 +2470,7 @@ char *root_name_end;
14 |
15 | */
16 | my_bool
17 | -scan_tz_dir(char * name_end)
18 | +scan_tz_dir(char * name_end, uint symlink_recursion_level)
19 | {
20 | MY_DIR *cur_dir;
21 | char *name_end_tmp;
22 | @@ -2490,7 +2490,32 @@ scan_tz_dir(char * name_end)
23 |
24 | if (MY_S_ISDIR(cur_dir->dir_entry[i].mystat->st_mode))
25 | {
26 | - if (scan_tz_dir(name_end_tmp))
27 | + my_bool is_symlink;
28 | + if ((is_symlink= my_is_symlink(fullname)) &&
29 | + symlink_recursion_level > 0)
30 | + {
31 | + /*
32 | + The timezone definition data in some Linux distributions
33 | + (e.g. the "timezone-data-2013f" package in Gentoo)
34 | + may have synlimks like:
35 | + /usr/share/zoneinfo/posix/ -> /usr/share/zoneinfo/,
36 | + so the same timezone files are available under two names
37 | + (e.g. "CET" and "posix/CET").
38 | +
39 | + We allow one level of symlink recursion for backward
40 | + compatibility with earlier timezone data packages that have
41 | + duplicate copies of the same timezone files inside the root
42 | + directory and the "posix" subdirectory (instead of symlinking).
43 | + This makes "posix/CET" still available, but helps to avoid
44 | + following such symlinks infinitely:
45 | + /usr/share/zoneinfo/posix/posix/posix/.../posix/
46 | + */
47 | + fflush(stdout);
48 | + fprintf(stderr, "Warning: Skipping directory '%s': "
49 | + "to avoid infinite symlink recursion.\n", fullname);
50 | + continue;
51 | + }
52 | + if (scan_tz_dir(name_end_tmp, symlink_recursion_level + is_symlink))
53 | {
54 | my_dirend(cur_dir);
55 | return 1;
56 | @@ -2502,14 +2527,20 @@ scan_tz_dir(char * name_end)
57 | if (!tz_load(fullname, &tz_info, &tz_storage))
58 | print_tz_as_sql(root_name_end + 1, &tz_info);
59 | else
60 | + {
61 | + fflush(stdout);
62 | fprintf(stderr,
63 | "Warning: Unable to load '%s' as time zone. Skipping it.\n",
64 | fullname);
65 | + }
66 | free_root(&tz_storage, MYF(0));
67 | }
68 | else
69 | + {
70 | + fflush(stdout);
71 | fprintf(stderr, "Warning: '%s' is not regular file or directory\n",
72 | fullname);
73 | + }
74 | }
75 | }
76 |
77 | @@ -2542,8 +2573,9 @@ main(int argc, char **argv)
78 | printf("TRUNCATE TABLE time_zone_transition;\n");
79 | printf("TRUNCATE TABLE time_zone_transition_type;\n");
80 |
81 | - if (scan_tz_dir(root_name_end))
82 | + if (scan_tz_dir(root_name_end, 0))
83 | {
84 | + fflush(stdout);
85 | fprintf(stderr, "There were fatal errors during processing "
86 | "of zoneinfo directory\n");
87 | return 1;
88 | @@ -2562,6 +2594,7 @@ main(int argc, char **argv)
89 | {
90 | if (tz_load(argv[2], &tz_info, &tz_storage))
91 | {
92 | + fflush(stdout);
93 | fprintf(stderr, "Problems with zoneinfo file '%s'\n", argv[2]);
94 | return 1;
95 | }
96 | @@ -2571,6 +2604,7 @@ main(int argc, char **argv)
97 | {
98 | if (tz_load(argv[1], &tz_info, &tz_storage))
99 | {
100 | + fflush(stdout);
101 | fprintf(stderr, "Problems with zoneinfo file '%s'\n", argv[2]);
102 | return 1;
103 | }
104 |
--------------------------------------------------------------------------------
/pkgs/newrelic-php/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, bash }:
2 |
3 | stdenv.mkDerivation rec {
4 |
5 | name = "newrelic-php5-${version}";
6 | version = "6.4.0.163";
7 |
8 | src = fetchurl {
9 | url = "https://download.newrelic.com/php_agent/archive/${version}/${name}-linux.tar.gz";
10 | sha256 = "1qb77gn96arlcy4rvmr3c7br1y8hhz9n5979crrh3zywmdf0g1kv";
11 | };
12 |
13 | buildPhase = ":";
14 |
15 | installPhase = ''
16 |
17 | mkdir -p $out/bin
18 |
19 | # patchelf is supposed to do this but it is broken (both the latest release
20 | # and master at 0ea5aff2edf59da4bcd5287f6268dac75c340959)
21 | # https://github.com/NixOS/patchelf/issues/66
22 | orig=$out/bin/newrelic-daemon.x64.orig
23 | wrapped=$out/bin/newrelic-daemon.x64
24 | cp daemon/newrelic-daemon.x64 $orig
25 | {
26 | echo '#!${bash}/bin/bash'
27 | echo "exec $(cat $NIX_CC/nix-support/dynamic-linker) $orig \$@"
28 | } > $wrapped
29 | chmod +x $wrapped
30 |
31 | mkdir -p $out/extensions
32 | cp agent/x64/newrelic-20151012.so $out/extensions/newrelic.so
33 | '';
34 |
35 | meta = with stdenv.lib; {
36 | description = "PHP agent for New Relic";
37 | homepage = http://newrelic.com/docs/php/new-relic-for-php;
38 | };
39 | }
40 |
--------------------------------------------------------------------------------
/pkgs/newrelic-plugin-agent/default.nix:
--------------------------------------------------------------------------------
1 | { buildPythonPackage, fetchurl, stdenv, helper, requests2 }:
2 |
3 | buildPythonPackage rec {
4 | name = "newrelic-plugin-agent-1.3.0";
5 |
6 | src = fetchurl {
7 | url = "https://pypi.python.org/packages/source/n/newrelic_plugin_agent/newrelic_plugin_agent-1.3.0.tar.gz";
8 | md5 = "8855e9802cd0476d862ebb12ed22bd32";
9 | };
10 |
11 | # The package tries to install some example config files to /opt
12 | preInstall = ''
13 | sed -i '/data_files=/d' setup.py
14 | '';
15 |
16 | propagatedBuildInputs = [
17 | helper
18 | requests2
19 | ];
20 |
21 | meta = with stdenv.lib; {
22 | description = "Python based agent for collecting metrics for NewRelic";
23 | homepage = https://github.com/MeetMe/newrelic-plugin-agent;
24 | };
25 | }
26 |
--------------------------------------------------------------------------------
/pkgs/newrelic-python/default.nix:
--------------------------------------------------------------------------------
1 | { pkgs ? import {}, ... }:
2 | with pkgs;
3 | with pkgs.stdenv;
4 |
5 | pythonPackages.buildPythonPackage rec {
6 | name = "newrelic-python-2.18.1.15";
7 |
8 | src = fetchurl {
9 | url = "https://pypi.python.org/packages/source/n/newrelic/newrelic-2.18.1.15.tar.gz";
10 | md5 = "f0421d3752d6b2f208ebf01c3265b259";
11 | };
12 |
13 | meta = with stdenv.lib; {
14 | description = "Python agent for New Relic";
15 | homepage = http://newrelic.com/docs/python/new-relic-for-python;
16 | };
17 | }
18 |
--------------------------------------------------------------------------------
/pkgs/newrelic-sysmond/default.nix:
--------------------------------------------------------------------------------
1 | { pkgs ? import {}, ... }:
2 | with pkgs;
3 |
4 | stdenv.mkDerivation rec {
5 | name = "newrelic-sysmond-${version}";
6 | version = "2.0.2.111";
7 |
8 | buildPhase = ":";
9 | installPhase = ''
10 | ensureDir $out/bin
11 | cp daemon/nrsysmond.x64 $out/bin/
12 | ${patchelf}/bin/patchelf --interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out/bin/nrsysmond.x64
13 | eval fixupPhase
14 | '';
15 |
16 | src = fetchurl {
17 | url = "https://download.newrelic.com/server_monitor/archive/${version}/${name}-linux.tar.gz";
18 | sha256 = "036bayrl53fnnwnyhz0h9dg0bsrm9ahbw531hiwy2ycm6vj6ic4g";
19 | };
20 | }
21 |
--------------------------------------------------------------------------------
/pkgs/nginx/unstable.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, fetchFromGitHub, openssl, zlib, pcre, libxml2, libxslt, expat
2 | , gd, geoip, luajit
3 | , rtmp ? false
4 | , fullWebDAV ? false
5 | , syslog ? false
6 | , moreheaders ? false
7 | , echo ? false
8 | , ngx_lua ? false
9 | , withStream ? false }:
10 |
11 | with stdenv.lib;
12 |
13 | let
14 | version = "1.9.4";
15 | mainSrc = fetchurl {
16 | url = "http://nginx.org/download/nginx-${version}.tar.gz";
17 | sha256 = "1a1bixw2a4s5c3qzw3583s4a4y6i0sdzhihhlbab5rkyfh1hr6s7";
18 | };
19 |
20 | rtmp-ext = fetchFromGitHub {
21 | owner = "arut";
22 | repo = "nginx-rtmp-module";
23 | rev = "v1.1.5";
24 | sha256 = "1d9ws4prxz22yq3nhh5h18jrs331zivrdh784l6wznc1chg3gphn";
25 | };
26 |
27 | dav-ext = fetchFromGitHub {
28 | owner = "arut";
29 | repo = "nginx-dav-ext-module";
30 | rev = "v0.0.3";
31 | sha256 = "1qck8jclxddncjad8yv911s9z7lrd58bp96jf13m0iqk54xghx91";
32 | };
33 |
34 | syslog-ext = fetchFromGitHub {
35 | owner = "yaoweibin";
36 | repo = "nginx_syslog_patch";
37 | rev = "v0.25";
38 | sha256 = "0734f884838wcjyrrddn8wzj834wid1zffrk093jrx18447cryxl";
39 | };
40 |
41 | moreheaders-ext = fetchFromGitHub {
42 | owner = "openresty";
43 | repo = "headers-more-nginx-module";
44 | rev = "v0.25";
45 | sha256 = "1d71y1i0smi4gkzz731fhn58gr03b3s6jz6ipnfzxxaizmgxm3rb";
46 | };
47 |
48 | echo-ext = fetchFromGitHub {
49 | owner = "openresty";
50 | repo = "echo-nginx-module";
51 | rev = "v0.56";
52 | sha256 = "03vaf1ffhkj2s089f90h45n079h3zw47h6y5zpk752f4ydiagpgd";
53 | };
54 |
55 | develkit-ext = fetchFromGitHub {
56 | owner = "simpl";
57 | repo = "ngx_devel_kit";
58 | rev = "v0.2.19";
59 | sha256 = "1cqcasp4lc6yq5pihfcdw4vp4wicngvdc3nqg3bg52r63c1qrz76";
60 | };
61 |
62 | lua-ext = fetchFromGitHub {
63 | owner = "openresty";
64 | repo = "lua-nginx-module";
65 | rev = "v0.9.16";
66 | sha256 = "0dvdam228jhsrayb22ishljdkgib08bakh8ygn84sq0c2xbidzlp";
67 | };
68 |
69 | in
70 |
71 | stdenv.mkDerivation rec {
72 | name = "nginx-${version}";
73 | src = mainSrc;
74 |
75 | buildInputs =
76 | [ openssl zlib pcre libxml2 libxslt gd geoip
77 | ] ++ optional fullWebDAV expat
78 | ++ optional ngx_lua luajit;
79 |
80 | LUAJIT_LIB = if ngx_lua then "${luajit}/lib" else "";
81 | LUAJIT_INC = if ngx_lua then "${luajit}/include/luajit-2.0" else "";
82 |
83 | patches = if syslog then [ "${syslog-ext}/syslog-1.5.6.patch" ] else [];
84 |
85 | configureFlags = [
86 | "--with-http_ssl_module"
87 | "--with-http_spdy_module"
88 | "--with-http_realip_module"
89 | "--with-http_addition_module"
90 | "--with-http_xslt_module"
91 | "--with-http_image_filter_module"
92 | "--with-http_geoip_module"
93 | "--with-http_sub_module"
94 | "--with-http_dav_module"
95 | "--with-http_flv_module"
96 | "--with-http_mp4_module"
97 | "--with-http_gunzip_module"
98 | "--with-http_gzip_static_module"
99 | "--with-http_auth_request_module"
100 | "--with-http_random_index_module"
101 | "--with-http_secure_link_module"
102 | "--with-http_degradation_module"
103 | "--with-http_stub_status_module"
104 | "--with-ipv6"
105 | # Install destination problems
106 | # "--with-http_perl_module"
107 | ] ++ optional rtmp "--add-module=${rtmp-ext}"
108 | ++ optional fullWebDAV "--add-module=${dav-ext}"
109 | ++ optional syslog "--add-module=${syslog-ext}"
110 | ++ optional moreheaders "--add-module=${moreheaders-ext}"
111 | ++ optional echo "--add-module=${echo-ext}"
112 | ++ optional ngx_lua "--add-module=${develkit-ext} --add-module=${lua-ext}"
113 | ++ optional withStream "--with-stream"
114 | ++ optional (elem stdenv.system (with platforms; linux ++ freebsd)) "--with-file-aio";
115 |
116 |
117 | additionalFlags = optionalString stdenv.isDarwin "-Wno-error=deprecated-declarations";
118 |
119 | preConfigure = ''
120 | export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -I${libxml2}/include/libxml2 $additionalFlags"
121 | '';
122 |
123 | postInstall = ''
124 | mv $out/sbin $out/bin
125 | '';
126 |
127 | meta = {
128 | description = "A reverse proxy and lightweight webserver";
129 | homepage = http://nginx.org;
130 | license = licenses.bsd2;
131 | platforms = platforms.all;
132 | maintainers = with maintainers; [ thoughtpolice raskin ];
133 | };
134 | }
135 |
--------------------------------------------------------------------------------
/pkgs/nix/default.nix:
--------------------------------------------------------------------------------
1 | { lib, stdenv, fetchurl, perl, curl, bzip2, sqlite, openssl ? null
2 | , pkgconfig, boehmgc, perlPackages, libsodium
3 | , storeDir ? "/nix/store"
4 | , stateDir ? "/nix/var"
5 | }:
6 |
7 | let
8 |
9 | common = { name, src }: stdenv.mkDerivation rec {
10 | inherit name src;
11 |
12 | outputs = [ "out" "doc" ];
13 |
14 | nativeBuildInputs = [ perl pkgconfig ];
15 |
16 | buildInputs = [ curl openssl sqlite ] ++
17 | lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium;
18 |
19 | propagatedBuildInputs = [ boehmgc ];
20 |
21 | # Note: bzip2 is not passed as a build input, because the unpack phase
22 | # would end up using the wrong bzip2 when cross-compiling.
23 | # XXX: The right thing would be to reinstate `--with-bzip2' in Nix.
24 | postUnpack =
25 | '' export CPATH="${bzip2}/include"
26 | export LIBRARY_PATH="${bzip2}/lib"
27 | export CXXFLAGS="-Wno-error=reserved-user-defined-literal"
28 | '';
29 |
30 | configureFlags =
31 | ''
32 | --with-store-dir=${storeDir} --localstatedir=${stateDir} --sysconfdir=/etc
33 | --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
34 | --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
35 | --with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
36 | --disable-init-state
37 | --enable-gc
38 | '';
39 |
40 | makeFlags = "profiledir=$(out)/etc/profile.d";
41 |
42 | installFlags = "sysconfdir=$(out)/etc";
43 |
44 | doInstallCheck = false;
45 |
46 | separateDebugInfo = true;
47 |
48 | crossAttrs = {
49 | postUnpack =
50 | '' export CPATH="${bzip2.crossDrv}/include"
51 | export NIX_CROSS_LDFLAGS="-L${bzip2.crossDrv}/lib -rpath-link ${bzip2.crossDrv}/lib $NIX_CROSS_LDFLAGS"
52 | '';
53 |
54 | configureFlags =
55 | ''
56 | --with-store-dir=${storeDir} --localstatedir=${stateDir}
57 | --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
58 | --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
59 | --with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
60 | --disable-init-state
61 | --enable-gc
62 | '' + stdenv.lib.optionalString (
63 | stdenv.cross ? nix && stdenv.cross.nix ? system
64 | ) ''--with-system=${stdenv.cross.nix.system}'';
65 |
66 | doInstallCheck = false;
67 | };
68 |
69 | enableParallelBuilding = true;
70 |
71 | meta = {
72 | description = "Powerful package manager that makes package management reliable and reproducible";
73 | longDescription = ''
74 | Nix is a powerful package manager for Linux and other Unix systems that
75 | makes package management reliable and reproducible. It provides atomic
76 | upgrades and rollbacks, side-by-side installation of multiple versions of
77 | a package, multi-user package management and easy setup of build
78 | environments.
79 | '';
80 | homepage = http://nixos.org/;
81 | license = stdenv.lib.licenses.lgpl2Plus;
82 | maintainers = [ stdenv.lib.maintainers.eelco ];
83 | platforms = stdenv.lib.platforms.all;
84 | };
85 | };
86 |
87 | in
88 | common rec {
89 | name = "nix-1.10";
90 | src = fetchurl {
91 | url = "http://nixos.org/releases/nix/${name}/${name}.tar.xz";
92 | sha256 = "5612ca7a549dd1ee20b208123e041aaa95a414a0e8f650ea88c672dc023d10f6";
93 | };
94 | }
95 |
--------------------------------------------------------------------------------
/pkgs/nq/default.nix:
--------------------------------------------------------------------------------
1 | { fetchgit, stdenv }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "nq-0.1";
5 |
6 | src = fetchgit {
7 | url = https://github.com/chneukirchen/nq;
8 | rev = "2556453d844f9d5224abd843ea2de3df1e8a0d42";
9 | sha256 = "cdd7b758a8925e28a3af76e663a55af2d1c8aa14d456ea1bd861227245deb894";
10 | };
11 |
12 | configurePhase = ''
13 | sed -i "s:^PREFIX=.*:PREFIX=$out:" Makefile
14 | '';
15 | }
16 |
--------------------------------------------------------------------------------
/pkgs/packer/default.nix:
--------------------------------------------------------------------------------
1 | { fetchurl, stdenv, unzip }:
2 | let
3 | version = "0.10.0";
4 | in stdenv.mkDerivation rec {
5 | name = "packer-${version}";
6 | src = fetchurl {
7 | url = "https://releases.hashicorp.com/packer/0.10.0/packer_0.10.0_linux_amd64.zip";
8 | sha256 = "1ra5kgabwvkhy2078xkffi0gbmiyyjjwvhcz0ls2194g1yy37pga";
9 | };
10 | buildInputs = [ unzip ];
11 | sourceRoot = ".";
12 | installPhase = ''
13 | mkdir -p $out/bin
14 | mv $sourceRoot/packer* $out/bin/
15 | chmod +x $out/bin/*
16 | '';
17 | }
18 |
--------------------------------------------------------------------------------
/pkgs/percona-toolkit/default.nix:
--------------------------------------------------------------------------------
1 | { perlPackages, fetchurl }:
2 |
3 | perlPackages.buildPerlPackage rec {
4 | name = "percona-toolkit-2.2.13";
5 |
6 | src = fetchurl {
7 | url = http://www.percona.com/redir/downloads/percona-toolkit/2.2.13/tarball/percona-toolkit-2.2.13.tar.gz;
8 | sha256 = "0qsazzpb2za6fc552nbmdkq0hv8gvx2g275x4bx7mkb3s4czcscf";
9 | };
10 |
11 | preConfigure = ''
12 | find . | while read fn; do
13 | if test -f "$fn"; then
14 | first=$(dd if="$fn" count=2 bs=1 2> /dev/null)
15 | if test "$first" = "#!"; then
16 | sed < "$fn" > "$fn".tmp \
17 | -e "s|^#\!\(.*[/\ ]perl.*\)$|#\!$perl/bin/perl $perlFlags|"
18 | if test -x "$fn"; then chmod +x "$fn".tmp; fi
19 | mv "$fn".tmp "$fn"
20 | fi
21 | fi
22 | done
23 | '';
24 |
25 | doCheck = false;
26 |
27 | propagatedBuildInputs = [ perlPackages.DBI perlPackages.DBDmysql ];
28 | }
29 |
--------------------------------------------------------------------------------
/pkgs/perf-map-agent/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchgit, cmake, openjdk }:
2 |
3 | stdenv.mkDerivation {
4 | name = "perf-map-agent";
5 | src = fetchgit {
6 | url = https://github.com/jrudolph/perf-map-agent;
7 | rev = "3acf8f487c444266eaaa8037b5a6de8573347313";
8 | sha256 = "4526fd92dc82dfec21fff94b67345464204778068c788c4b0a943bf986a27bb4";
9 | };
10 | buildInputs = [cmake openjdk];
11 | cmakeFlags = [
12 | "-DJAVA_HOME=${openjdk}/lib/openjdk/jre/"
13 | "-DJAVA_INCLUDE_PATH=${openjdk}/lib/openjdk/lib"
14 | "-DJAVA_INCLUDE_PATH2=${openjdk}/include/linux/"
15 | ];
16 | installPhase = ''
17 | mkdir $out
18 | cp -r out/attach-main.jar out/libperfmap.so $out
19 | '';
20 | }
21 |
--------------------------------------------------------------------------------
/pkgs/php/fix-paths-php7.patch:
--------------------------------------------------------------------------------
1 | --- php-7.0.0beta1/configure 2015-07-10 12:11:41.810045613 +0000
2 | +++ php-7.0.0beta1-new/configure 2015-07-17 16:10:21.775528267 +0000
3 | @@ -6172,7 +6172,7 @@
4 | as_fn_error $? "Please note that Apache version >= 2.0.44 is required" "$LINENO" 5
5 | fi
6 |
7 | - APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`
8 | + APXS_LIBEXECDIR="$prefix/modules"
9 | if test -z `$APXS -q SYSCONFDIR`; then
10 | INSTALL_IT="\$(mkinstalldirs) '$APXS_LIBEXECDIR' && \
11 | $APXS -S LIBEXECDIR='$APXS_LIBEXECDIR' \
12 | @@ -37303,9 +37303,7 @@
13 |
14 |
15 | if test "$PHP_GETTEXT" != "no"; then
16 | - for i in $PHP_GETTEXT /usr/local /usr; do
17 | - test -r $i/include/libintl.h && GETTEXT_DIR=$i && break
18 | - done
19 | + GETTEXT_DIR=$PHP_GETTEXT
20 |
21 | if test -z "$GETTEXT_DIR"; then
22 | as_fn_error $? "Cannot locate header file libintl.h" "$LINENO" 5
23 |
24 |
--------------------------------------------------------------------------------
/pkgs/php/fix-paths.patch:
--------------------------------------------------------------------------------
1 | diff -ru php-5.4.14/configure php-5.4.14-new/configure
2 | --- php-5.4.14/configure 2013-04-10 09:53:26.000000000 +0200
3 | +++ php-5.4.14-new/configure 2013-04-22 17:13:55.039043622 +0200
4 | @@ -6513,7 +6513,7 @@
5 |
6 | case $host_alias in
7 | *aix*)
8 | - APXS_LIBEXECDIR=`$APXS -q LIBEXECDIR`
9 | + APXS_LIBEXECDIR="$prefix/modules"
10 | EXTRA_LDFLAGS="$EXTRA_LDFLAGS -Wl,-brtl -Wl,-bI:$APXS_LIBEXECDIR/httpd.exp"
11 | PHP_AIX_LDFLAGS="-Wl,-brtl"
12 | build_type=shared
13 | @@ -6706,7 +6706,7 @@
14 | if test "$?" != "0"; then
15 | APACHE_INSTALL="$APXS -i -a -n php5 $SAPI_SHARED" # Old apxs does not have -S option
16 | else
17 | - APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`
18 | + APXS_LIBEXECDIR="$prefix/modules"
19 | if test -z `$APXS -q SYSCONFDIR`; then
20 | APACHE_INSTALL="\$(mkinstalldirs) '$APXS_LIBEXECDIR' && \
21 | $APXS -S LIBEXECDIR='$APXS_LIBEXECDIR' \
22 | @@ -7909,7 +7909,7 @@
23 | { (exit 1); exit 1; }; }
24 | fi
25 |
26 | - APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`
27 | + APXS_LIBEXECDIR="$prefix/modules"
28 | if test -z `$APXS -q SYSCONFDIR`; then
29 | INSTALL_IT="\$(mkinstalldirs) '$APXS_LIBEXECDIR' && \
30 | $APXS -S LIBEXECDIR='$APXS_LIBEXECDIR' \
31 | @@ -8779,7 +8779,7 @@
32 | { (exit 1); exit 1; }; }
33 | fi
34 |
35 | - APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`
36 | + APXS_LIBEXECDIR="$prefix/modules"
37 | if test -z `$APXS -q SYSCONFDIR`; then
38 | INSTALL_IT="\$(mkinstalldirs) '$APXS_LIBEXECDIR' && \
39 | $APXS -S LIBEXECDIR='$APXS_LIBEXECDIR' \
40 | @@ -9634,7 +9634,7 @@
41 |
42 | case $host_alias in
43 | *aix*)
44 | - APXS_LIBEXECDIR=`$APXS -q LIBEXECDIR`
45 | + APXS_LIBEXECDIR="$prefix/modules"
46 | EXTRA_LDFLAGS="$EXTRA_LDFLAGS -Wl,-brtl -Wl,-bI:$APXS_LIBEXECDIR/httpd.exp"
47 | PHP_AIX_LDFLAGS="-Wl,-brtl"
48 | build_type=shared
49 | @@ -9827,7 +9827,7 @@
50 | if test "$?" != "0"; then
51 | APACHE_HOOKS_INSTALL="$APXS -i -a -n php5 $SAPI_SHARED" # Old apxs does not have -S option
52 | else
53 | - APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`
54 | + APXS_LIBEXECDIR="$prefix/modules"
55 | if test -z `$APXS -q SYSCONFDIR`; then
56 | APACHE_HOOKS_INSTALL="\$(mkinstalldirs) '$APXS_LIBEXECDIR' && \
57 | $APXS -S LIBEXECDIR='$APXS_LIBEXECDIR' \
58 | @@ -59657,9 +59657,7 @@
59 |
60 |
61 | if test "$PHP_GETTEXT" != "no"; then
62 | - for i in $PHP_GETTEXT /usr/local /usr; do
63 | - test -r $i/include/libintl.h && GETTEXT_DIR=$i && break
64 | - done
65 | + GETTEXT_DIR=$PHP_GETTEXT
66 |
67 | if test -z "$GETTEXT_DIR"; then
68 | { { $as_echo "$as_me:$LINENO: error: Cannot locate header file libintl.h" >&5
69 |
--------------------------------------------------------------------------------
/pkgs/pivotal_agent/Gemfile:
--------------------------------------------------------------------------------
1 | source "http://rubygems.org"
2 | gem "newrelic_plugin"
3 | gem 'rabbitmq_manager', '~> 0.1.0'
4 | gem "redis"
5 | gem "daemons"
6 |
--------------------------------------------------------------------------------
/pkgs/pivotal_agent/Gemfile.lock:
--------------------------------------------------------------------------------
1 | GEM
2 | remote: http://rubygems.org/
3 | specs:
4 | daemons (1.2.3)
5 | faraday (0.9.2)
6 | multipart-post (>= 1.2, < 3)
7 | faraday_middleware (0.10.0)
8 | faraday (>= 0.7.4, < 0.10)
9 | json (1.8.3)
10 | multipart-post (2.0.0)
11 | newrelic_plugin (1.3.1)
12 | json
13 | rabbitmq_manager (0.1.0)
14 | faraday
15 | faraday_middleware
16 | redis (3.2.1)
17 |
18 | PLATFORMS
19 | ruby
20 |
21 | DEPENDENCIES
22 | daemons
23 | newrelic_plugin
24 | rabbitmq_manager (~> 0.1.0)
25 | redis
26 |
--------------------------------------------------------------------------------
/pkgs/pivotal_agent/default.nix:
--------------------------------------------------------------------------------
1 | { pkgs, stdenv, ruby, bundler }:
2 |
3 | let
4 | version = "1.0.5";
5 |
6 | env = pkgs.bundlerEnv {
7 | name = "pivotal_agent-gems-${version}";
8 | inherit (pkgs) ruby;
9 | gemfile = ./Gemfile;
10 | lockfile = ./Gemfile.lock;
11 | gemset = ./gemset.nix;
12 | };
13 | in
14 |
15 | stdenv.mkDerivation {
16 | name = "pivotal_agent-${version}";
17 |
18 | src = pkgs.fetchgit {
19 | url = https://github.com/pivotalsoftware/newrelic_pivotal_agent;
20 | rev = "0b14856792b47280e598b0275725a5ddefbee58a";
21 | sha256 = "d9d065c44dfdc1b219847222fdbdda10feb9cece8b5b91bbdb57087040549d3f";
22 | };
23 |
24 | buildInputs = [
25 | ruby
26 | bundler
27 | ];
28 |
29 | GEM_HOME = "${env}/${ruby.gemPath}";
30 |
31 | buildPhase = ''
32 | cat > config/newrelic_plugin.yml < $out/profile <> $out/sbin/rabbitmq-env
30 | '';
31 | }
32 |
--------------------------------------------------------------------------------
/pkgs/replicator/default.nix:
--------------------------------------------------------------------------------
1 | { mkDerivation, base, bytestring, ConfigFile, containers, directory
2 | , fetchgit, filepath, Glob, hflags, lens-family-core, MissingH
3 | , monad-parallel, mtl, pipes, pipes-bytestring, pipes-group
4 | , pipes-safe, pipes-shell, pipes-zlib, raw-strings-qq
5 | , regex-applicative, stdenv, time, transformers, unix
6 | }:
7 | mkDerivation {
8 | pname = "replicator";
9 | version = "0.5.x";
10 | src = fetchgit {
11 | url = "https://github.com/zalora/replicator.git";
12 | rev = "3dbe8614813cd4d1742286b3bce023187871354e";
13 | sha256 = "1qb8z6cj5rx902r1dni9hq24balvj6mqzcqy4v7ny9h5vv1y88dk";
14 | };
15 | isLibrary = false;
16 | isExecutable = true;
17 | executableHaskellDepends = [
18 | base bytestring ConfigFile containers directory filepath Glob
19 | hflags lens-family-core MissingH monad-parallel mtl pipes
20 | pipes-bytestring pipes-group pipes-safe pipes-shell pipes-zlib
21 | raw-strings-qq regex-applicative time transformers unix
22 | ];
23 | description = "Automate creating MySQL multi-source slaves";
24 | license = stdenv.lib.licenses.mit;
25 | }
26 |
--------------------------------------------------------------------------------
/pkgs/retry/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, coreutils }:
2 |
3 | stdenv.mkDerivation {
4 | name = "retry";
5 |
6 | # Used by substituteAll.
7 | inherit coreutils;
8 |
9 | buildCommand = ''
10 | mkdir -p $out/bin
11 | substituteAll $script $out/bin/retry
12 | chmod +x $out/bin/retry
13 | '';
14 |
15 | script = ./retry;
16 |
17 | meta = with stdenv.lib; {
18 | description = "Retry failed commands";
19 | platforms = platforms.all;
20 | license = licenses.mit;
21 | };
22 | }
23 |
--------------------------------------------------------------------------------
/pkgs/retry/retry:
--------------------------------------------------------------------------------
1 | #! /bin/sh
2 | #
3 | # usage: retry COMMAND [ARGS...]
4 | #
5 | # Retry to run a command successfully with configurable delays between runs.
6 | #
7 | # Notice that the command won't receive any data via standard input.
8 | #
9 | set -euf
10 |
11 | export PATH=@coreutils@/bin"${PATH+:$PATH}"
12 |
13 | # By default try to run command 10 times with a delay of 3s between retries.
14 | retry_count=${RETRY_COUNT:-10}
15 | retry_delay=${RETRY_DELAY:-3}
16 |
17 | # retry_delay_seq specifies the delays between unsuccessful attempts to run the
18 | # command as space separated list of sleep arguments. See also sleep(1).
19 | #
20 | # Notice how this sequence is one less than the desired number of retries
21 | # because we don't need to wait after the last failed attempt.
22 | #
23 | # You can override this variable to e.g. implement a non-linear retry schema.
24 | retry_delay_seq=${RETRY_DELAY_SEQ:-$(
25 | for i in $(seq 2 $retry_count); do
26 | echo $retry_delay
27 | done
28 | )}
29 |
30 | # main COMMAND [ARGS...]
31 | main() {
32 | try_exec "$@"
33 | for delay in $retry_delay_seq; do
34 | echo "$0: \`$@\` exit code $exit_code; retrying after sleep $delay..." >&2
35 | sleep $delay
36 | try_exec "$@"
37 | done
38 | echo "$0: \`$@\` exit code $exit_code; giving up." >&2
39 | exit $exit_code
40 | }
41 |
42 | # try_exec COMMAND [ARGS...]
43 | # If command exits with a zero exit code, then try_exec will exit the current
44 | # process (mimicking the behavior of exec). Otherwise set the exit_code
45 | # variable for further inspection (or retry in our case).
46 | try_exec() {
47 | if env "$@" $wrapper
18 |
19 | params=("$@")
20 | for ((n = 2; n < ''${#params[*]}; n += 1)); do
21 | p=''${params[$n]}
22 |
23 | if test "$p" = "--set"; then
24 | varName=''${params[$((n + 1))]}
25 | value=''${params[$((n + 2))]}
26 | n=$((n + 2))
27 | echo "export $varName=$value" >> $wrapper
28 | fi
29 |
30 | if test "$p" = "--run"; then
31 | command=''${params[$((n + 1))]}
32 | n=$((n + 1))
33 | echo "$command" >> $wrapper
34 | fi
35 |
36 | if test "$p" = "--suffix" -o "$p" = "--prefix"; then
37 | varName=''${params[$((n + 1))]}
38 | separator=''${params[$((n + 2))]}
39 | value=''${params[$((n + 3))]}
40 | n=$((n + 3))
41 | if test -n "$value"; then
42 | if test "$p" = "--suffix"; then
43 | echo "export $varName=\$$varName\''${$varName:+$separator}$value" >> $wrapper
44 | else
45 | echo "export $varName=$value\''${$varName:+$separator}\$$varName" >> $wrapper
46 | fi
47 | fi
48 | fi
49 |
50 | if test "$p" = "--suffix-each"; then
51 | varName=''${params[$((n + 1))]}
52 | separator=''${params[$((n + 2))]}
53 | values=''${params[$((n + 3))]}
54 | n=$((n + 3))
55 | for value in $values; do
56 | echo "export $varName=\$$varName\''${$varName:+$separator}$value" >> $wrapper
57 | done
58 | fi
59 |
60 | if test "$p" = "--suffix-contents" -o "$p" = "--prefix-contents"; then
61 | varName=''${params[$((n + 1))]}
62 | separator=''${params[$((n + 2))]}
63 | fileNames=''${params[$((n + 3))]}
64 | n=$((n + 3))
65 | for fileName in $fileNames; do
66 | if test "$p" = "--suffix-contents"; then
67 | echo "export $varName=\$$varName\''${$varName:+$separator}$(cat $fileName)" >> $wrapper
68 | else
69 | echo "export $varName=$(cat $fileName)\''${$varName:+$separator}\$$varName" >> $wrapper
70 | fi
71 | done
72 | fi
73 |
74 | if test "$p" = "--add-flags"; then
75 | flags=''${params[$((n + 1))]}
76 | n=$((n + 1))
77 | flagsBefore="$flagsBefore $flags"
78 | fi
79 | done
80 |
81 | # Note: extraFlagsArray is an array containing additional flags
82 | # that may be set by --run actions.
83 | # XXX: this doesn't have exec here:
84 | echo "$original" $flagsBefore '"''${extraFlagsArray[@]}"' '"$@"' >> $wrapper
85 |
86 | chmod +x $wrapper
87 | }
88 |
89 | wrapPythonPrograms() {
90 | wrapPythonProgramsIn $out "$out $pythonPath"
91 | }
92 |
93 | wrapPythonProgramsIn() {
94 | local dir="$1"
95 | local pythonPath="$2"
96 | local python="@executable@"
97 | local i
98 |
99 | declare -A pythonPathsSeen=()
100 | program_PYTHONPATH=
101 | program_PATH=
102 | for i in $pythonPath; do
103 | _addToPythonPath $i
104 | done
105 |
106 | for i in $(find "$dir" -type f -perm +0100); do
107 |
108 | # Rewrite "#! .../env python" to "#! /nix/store/.../python".
109 | if head -n1 "$i" | grep -q '#!.*/env.*\(python\|pypy\)'; then
110 | sed -i "$i" -e "1 s^.*/env[ ]*\(python\|pypy\)^#! $python^"
111 | fi
112 |
113 | if head -n1 "$i" | grep -q '/python\|/pypy'; then
114 | # dont wrap EGG-INFO scripts since they are called from python
115 | if echo "$i" | grep -v EGG-INFO/scripts; then
116 | echo "wrapping \`$i'..."
117 | wrapProgram "$i" \
118 | --prefix PYTHONPATH ":" $program_PYTHONPATH \
119 | --prefix PATH ":" $program_PATH \
120 | --run "exec ${newrelic-python}/bin/newrelic-admin run-python \\"
121 | fi
122 | fi
123 | done
124 | }
125 | wrapPythonPrograms
126 | '';
127 | })
128 |
--------------------------------------------------------------------------------
/pkgs/thumbor/thumbor-nostrip.patch:
--------------------------------------------------------------------------------
1 | diff --git a/thumbor/handlers/__init__.py b/thumbor/handlers/__init__.py
2 | index 953d129..ee1f86a 100644
3 | --- a/thumbor/handlers/__init__.py
4 | +++ b/thumbor/handlers/__init__.py
5 | @@ -36,6 +36,9 @@ CONTENT_TYPE = {
6 |
7 |
8 | class BaseHandler(tornado.web.RequestHandler):
9 | + DIRTY_PROTOCOL = 'http:/static'
10 | + CLEAN_PROTOCOL = 'http://static'
11 | +
12 | def _error(self, status, msg=None):
13 | self.set_status(status)
14 | if msg is not None:
15 | @@ -87,8 +90,13 @@ class BaseHandler(tornado.web.RequestHandler):
16 |
17 | after_transform_cb = functools.partial(self.after_transform, self.context)
18 | Transformer(self.context).transform(after_transform_cb)
19 | + image_url = self.sanitize(self.context.request.image_url)
20 | + self._fetch(image_url, self.context.request.extension, callback)
21 |
22 | - self._fetch(self.context.request.image_url, self.context.request.extension, callback)
23 | + def sanitize(self, url):
24 | + if url.find(self.DIRTY_PROTOCOL) > -1:
25 | + return url.replace(self.DIRTY_PROTOCOL, self.CLEAN_PROTOCOL)
26 | + return url
27 |
28 | def normalize_crops(self, normalized, req, engine):
29 | new_crops = None
30 | diff --git a/thumbor/handlers/imaging.py b/thumbor/handlers/imaging.py
31 | index 8eb75d3..3e29881 100644
32 | --- a/thumbor/handlers/imaging.py
33 | +++ b/thumbor/handlers/imaging.py
34 | @@ -25,16 +25,17 @@ class ImagingHandler(ContextHandler):
35 | if self.context.modules.storage.exists(kw['image'][:32]):
36 | kw['image'] = kw['image'][:32]
37 |
38 | - url = self.request.uri
39 | -
40 | - if not self.validate(kw['image']):
41 | + url = self.sanitize(self.request.uri)
42 | +
43 | + image_url = self.sanitize(kw['image'])
44 | + if not self.validate(image_url):
45 | self._error(404, 'No original image was specified in the given URL')
46 | return
47 |
48 | kw['request'] = self.request
49 |
50 | self.context.request = RequestParameters(**kw)
51 | -
52 | + self.context.request.image_url = self.sanitize(self.context.request.image_url)
53 | has_none = not self.context.request.unsafe and not self.context.request.hash
54 | has_both = self.context.request.unsafe and self.context.request.hash
55 |
56 |
--------------------------------------------------------------------------------
/pkgs/to-json-array/default.nix:
--------------------------------------------------------------------------------
1 | { mkDerivation, aeson, base, bytestring, stdenv }:
2 | mkDerivation {
3 | pname = "to-json-array";
4 | version = "1";
5 | src = ./.;
6 | isLibrary = false;
7 | isExecutable = true;
8 | executableHaskellDepends = [ aeson base bytestring ];
9 | description = "command-line utility to turn arguments into a JSON array of strings";
10 | license = stdenv.lib.licenses.unfree;
11 | }
12 |
--------------------------------------------------------------------------------
/pkgs/to-json-array/to-json-array.cabal:
--------------------------------------------------------------------------------
1 | build-type: Simple
2 | cabal-version: >= 1.2
3 | name: to-json-array
4 | synopsis: command-line utility to turn arguments into a JSON array of strings
5 | version: 1
6 |
7 | Executable to-json-array
8 | Build-Depends:
9 | aeson,
10 | base,
11 | bytestring
12 | GHC-Options: -Wall
13 | Main-Is: to-json-array.hs
14 |
--------------------------------------------------------------------------------
/pkgs/to-json-array/to-json-array.hs:
--------------------------------------------------------------------------------
1 | module Main (main) where
2 |
3 | import Prelude ((.),(=<<),IO)
4 | import Data.Aeson.Encode (encode)
5 | import System.Environment (getArgs)
6 | import Data.ByteString.Lazy.Char8 (putStrLn)
7 |
8 | main :: IO ()
9 | main = putStrLn . encode =<< getArgs
10 |
--------------------------------------------------------------------------------
/pkgs/twemproxy/default.nix:
--------------------------------------------------------------------------------
1 | { fetchurl, autoreconfHook, stdenv }:
2 |
3 | stdenv.mkDerivation rec {
4 | name = "twemproxy-0.4.1";
5 |
6 | buildInputs = [ autoreconfHook ];
7 |
8 | src = fetchurl {
9 | url = https://github.com/twitter/twemproxy/archive/v0.4.1.tar.gz;
10 | sha256 = "1q7dm1yhalcxzjzaz2i3azkx988smim32j53ayaflywlj47r9hh0";
11 | };
12 | }
13 |
--------------------------------------------------------------------------------
/pkgs/unicron/default.nix:
--------------------------------------------------------------------------------
1 | { mkDerivation, attoparsec, base, derive, fetchzip, hspec
2 | , hspec-expectations, mtl, mtl-compat, old-locale, process
3 | , QuickCheck, stdenv, text, time, transformers-compat
4 | }:
5 | mkDerivation {
6 | pname = "cron";
7 | version = "0.3.0";
8 | src = fetchzip {
9 | url = "https://github.com/proger/cron/archive/unicron.zip";
10 | sha256 = "02lr2ykxhfbzrq10z5cd5cf9019pls22f3wk6xdvg68p1gy43hmm";
11 | };
12 | isLibrary = true;
13 | isExecutable = true;
14 | configureFlags = [
15 | "--ghc-option=-threaded"
16 | "--ghc-option=-rtsopts"
17 | "--ghc-option=-with-rtsopts=-N"
18 | ];
19 | libraryHaskellDepends = [
20 | attoparsec base mtl mtl-compat old-locale text time
21 | ];
22 | executableHaskellDepends = [
23 | attoparsec base mtl mtl-compat old-locale process text time
24 | ];
25 | testHaskellDepends = [
26 | attoparsec base derive hspec hspec-expectations QuickCheck text
27 | time transformers-compat
28 | ];
29 | doCheck = false;
30 | homepage = "http://github.com/michaelxavier/cron";
31 | description = "Cron datatypes and Attoparsec parser";
32 | license = stdenv.lib.licenses.mit;
33 | }
34 |
--------------------------------------------------------------------------------
/pkgs/upcast/default.nix:
--------------------------------------------------------------------------------
1 | { mkDerivation, aeson, aeson-pretty, amazonka, amazonka-core
2 | , amazonka-ec2, amazonka-elb, amazonka-route53, async, attoparsec
3 | , base, base64-bytestring, bytestring, conduit, conduit-extra
4 | , containers, directory, exceptions, fetchgit, filepath, lens
5 | , lifted-base, mtl, natural-sort, optparse-applicative, pretty-show
6 | , process, random, resourcet, scientific, semigroups, stdenv
7 | , tagged, text, time, unix, unordered-containers, vector
8 | , vk-posix-pty, xml-conduit
9 | }:
10 | mkDerivation {
11 | pname = "upcast";
12 | version = "0.1.1.0";
13 | src = fetchgit {
14 | url = "https://github.com/zalora/upcast.git";
15 | sha256 = "dbb721b2bbbe549cd4608d15c4abc8b6e25c7f5dd9c25fe9b7b62a381e17f8a0";
16 | rev = "ed869de9551cce1eff1d35941d145e4075d9f1fe";
17 | };
18 | isLibrary = true;
19 | isExecutable = true;
20 | libraryHaskellDepends = [
21 | aeson aeson-pretty amazonka amazonka-core amazonka-ec2 amazonka-elb
22 | amazonka-route53 async attoparsec base base64-bytestring bytestring
23 | conduit conduit-extra containers directory exceptions filepath lens
24 | lifted-base mtl natural-sort optparse-applicative pretty-show
25 | process random resourcet scientific semigroups tagged text time
26 | unix unordered-containers vector vk-posix-pty xml-conduit
27 | ];
28 | executableHaskellDepends = [
29 | aeson aeson-pretty amazonka amazonka-core amazonka-ec2 amazonka-elb
30 | amazonka-route53 async attoparsec base base64-bytestring bytestring
31 | conduit conduit-extra containers directory exceptions filepath lens
32 | lifted-base mtl natural-sort optparse-applicative pretty-show
33 | process random resourcet scientific semigroups tagged text time
34 | unix unordered-containers vector vk-posix-pty xml-conduit
35 | ];
36 | homepage = "https://github.com/zalora/upcast#readme";
37 | description = "Nix-based Linux deployment platform tools";
38 | license = stdenv.lib.licenses.mit;
39 | }
40 |
--------------------------------------------------------------------------------
/pkgs/upcast/ng.nix:
--------------------------------------------------------------------------------
1 | { mkDerivation, aeson, aeson-pretty, amazonka, amazonka-cloudwatch
2 | , amazonka-core, amazonka-ec2, amazonka-elb, amazonka-route53, array, async
3 | , attoparsec, base, base64-bytestring, bifunctors, bytestring
4 | , conduit, conduit-extra, containers, directory, exceptions
5 | , fetchgit, filepath, hashable, iproute, lens, lens-action
6 | , lens-aeson, lifted-base, mtl, natural-sort, optparse-applicative
7 | , pretty-show, process, random, resourcet, scientific, semigroups
8 | , stdenv, tagged, text, time, unix, unordered-containers, vector
9 | , vk-posix-pty, witherable
10 | }:
11 | mkDerivation {
12 | pname = "upcast";
13 | version = "0.1.1.0";
14 | src = fetchgit {
15 | url = git://github.com/zalora/upcast.git;
16 | rev = "eac3b8c89e69d55d566f59f30e0226926381766d";
17 | sha256 = "ded7efa4a43915e656ca8117b1d01257f03eba9c1b8197d5c627efe1b5159667";
18 | };
19 | isLibrary = true;
20 | isExecutable = true;
21 | libraryHaskellDepends = [
22 | aeson aeson-pretty amazonka amazonka-cloudwatch amazonka-core amazonka-ec2 amazonka-elb
23 | amazonka-route53 array async attoparsec base base64-bytestring
24 | bifunctors bytestring conduit conduit-extra containers directory
25 | exceptions filepath hashable iproute lens lens-action lens-aeson
26 | lifted-base mtl natural-sort optparse-applicative pretty-show
27 | process random resourcet scientific semigroups tagged text time
28 | unix unordered-containers vector vk-posix-pty witherable
29 | ];
30 | executableHaskellDepends = [
31 | optparse-applicative
32 | ];
33 | homepage = "https://github.com/zalora/upcast#readme";
34 | description = "Nix-based Linux deployment platform tools";
35 | license = stdenv.lib.licenses.mit;
36 | }
37 |
--------------------------------------------------------------------------------
/pkgs/vault/default.nix:
--------------------------------------------------------------------------------
1 | { fetchgit, goPackages, runCommand, stdenv, ... }:
2 | let
3 | # Godeps.nix generated by:
4 | # git clone https://github.com/hashicorp/vault /tmp/go/src/github.com/hashicorp/vault
5 | # export XC_ARCH=amd64 XC_OS=linux PATH=/tmp/go/bin:$PATH GOPATH=/tmp/go
6 | # make -C /tmp/go/src/github.com/hashicorp/vault bootstrap
7 | # make -C /tmp/go/src/github.com/hashicorp/vault bin
8 | # ./Godeps2nix.sh /tmp/go/src/github.com/hashicorp/vault/Godeps/Godeps.json > Godeps.nix
9 | # using https://gist.github.com/4z3/54b74d5324af82f52dc2#file-godeps2nix-sh
10 | Godeps = import ./Godeps.nix { inherit runCommand fetchgit; };
11 | in
12 |
13 | stdenv.mkDerivation rec {
14 | name = "vault-0.4.0";
15 | src = fetchgit {
16 | url = https://github.com/hashicorp/vault;
17 | rev = "refs/tags/v0.4.0";
18 | sha256 = "143q2ng6rcci34d9mz36b4dpq75hd81f242ar94d2zxzmp5jy5mj";
19 | };
20 |
21 | buildPhase = ''
22 | mkdir deps self
23 | ln -s ${Godeps} deps/src
24 | mkdir -p self/src/github.com/hashicorp
25 | ln -s $PWD self/src/github.com/hashicorp/vault
26 | export GOPATH=$PWD/deps:$PWD/self
27 | mkdir workdir
28 | cd workdir
29 | ${goPackages.go}/bin/go build -v github.com/hashicorp/vault
30 | '';
31 |
32 | installPhase = ''
33 | mkdir -p $out/bin
34 | cp vault $out/bin
35 | '';
36 | }
37 |
--------------------------------------------------------------------------------
/pkgs/virtualbox/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv, fetchurl, lib, iasl, dev86, pam, libxslt, libxml2
2 | , libIDL, libcap, zlib, libpng, glib, kernel, lvm2
3 | , which, alsaLib, curl, libvpx, gawk, nettools
4 | , xorriso, makeself, perl, pkgconfig, nukeReferences
5 | , javaBindings ? false, jdk ? null
6 | , pythonBindings ? false, python ? null
7 | , enableExtensionPack ? false, requireFile ? null, patchelf ? null, fakeroot ? null
8 | , pulseSupport ? false, pulseaudio ? null
9 | , guiSupport ? false, xproto ? null, libX11 ? null, libXext ? null, libXcursor ? null, qt4 ? null, SDL ? null, libXmu ? null
10 | , enableHardening ? false
11 | }:
12 |
13 | with stdenv.lib;
14 |
15 | let
16 | buildType = "release";
17 |
18 | version = "4.3.26";
19 |
20 | forEachModule = action: ''
21 | for mod in \
22 | out/linux.*/${buildType}/bin/src/vboxdrv \
23 | out/linux.*/${buildType}/bin/src/vboxpci \
24 | out/linux.*/${buildType}/bin/src/vboxnetadp \
25 | out/linux.*/${buildType}/bin/src/vboxnetflt
26 | do
27 | if [ "x$(basename "$mod")" != xvboxdrv -a ! -e "$mod/Module.symvers" ]
28 | then
29 | cp -v out/linux.*/${buildType}/bin/src/vboxdrv/Module.symvers \
30 | "$mod/Module.symvers"
31 | fi
32 | INSTALL_MOD_PATH="$out" INSTALL_MOD_DIR=misc \
33 | make -C "$MODULES_BUILD_DIR" DEPMOD=/do_not_use_depmod \
34 | "M=\$(PWD)/$mod" BUILD_TYPE="${buildType}" ${action}
35 | done
36 | '';
37 |
38 | # See https://github.com/NixOS/nixpkgs/issues/672 for details
39 | extpackRevision = "98988";
40 | extensionPack = requireFile rec {
41 | name = "Oracle_VM_VirtualBox_Extension_Pack-${version}-${extpackRevision}.vbox-extpack";
42 | # IMPORTANT: Hash must be base16 encoded because it's used as an input to
43 | # VBoxExtPackHelperApp!
44 | # Tip: see http://dlc.sun.com.edgesuite.net/virtualbox/4.3.10/SHA256SUMS
45 | sha256 = "4e39a6d0da23799a31c3f6a4022b144ef3ddfe30c523e51b21bf7d9ebade62ce";
46 | message = ''
47 | In order to use the extension pack, you need to comply with the VirtualBox Personal Use
48 | and Evaluation License (PUEL) by downloading the related binaries from:
49 |
50 | https://www.virtualbox.org/wiki/Downloads
51 |
52 | Once you have downloaded the file, please use the following command and re-run the
53 | installation:
54 |
55 | nix-prefetch-url file://${name}
56 | '';
57 | };
58 |
59 | graphicalInputs = [
60 | xproto libX11 libXext libXcursor qt4
61 | SDL libXmu
62 | ];
63 |
64 | in stdenv.mkDerivation {
65 | name = "virtualbox-${version}-${kernel.version}";
66 |
67 | src = fetchurl {
68 | url = "http://download.virtualbox.org/virtualbox/${version}/VirtualBox-${version}.tar.bz2";
69 | sha256 = "e2949c250a1de30997e658de9e3d8545e71318a9844d80536137d76db4f08961";
70 | };
71 |
72 | buildInputs =
73 | [ iasl dev86 libxslt libxml2 libIDL
74 | libcap glib lvm2 python alsaLib curl
75 | libvpx pam xorriso makeself perl
76 | pkgconfig which nukeReferences libpng ]
77 | ++ optional javaBindings jdk
78 | ++ optional pythonBindings python
79 | ++ optional pulseSupport pulseaudio
80 | ++ optionals guiSupport graphicalInputs;
81 |
82 | prePatch = ''
83 | set -x
84 | MODULES_BUILD_DIR=`echo ${kernel.dev}/lib/modules/*/build`
85 | sed -e 's@/lib/modules/`uname -r`/build@'$MODULES_BUILD_DIR@ \
86 | -e 's@MKISOFS --version@MKISOFS -version@' \
87 | -e 's@PYTHONDIR=.*@PYTHONDIR=${if pythonBindings then python else ""}@' \
88 | -i configure
89 | ls kBuild/bin/linux.x86/k* tools/linux.x86/bin/* | xargs -n 1 patchelf --set-interpreter ${stdenv.glibc}/lib/ld-linux.so.2
90 | ls kBuild/bin/linux.amd64/k* tools/linux.amd64/bin/* | xargs -n 1 patchelf --set-interpreter ${stdenv.glibc}/lib/ld-linux-x86-64.so.2
91 | find . -type f -iname '*makefile*' -exec sed -i -e 's/depmod -a/:/g' {} +
92 | sed -e 's@"libasound.so.2"@"${alsaLib}/lib/libasound.so.2"@g' -i src/VBox/Main/xml/Settings.cpp src/VBox/Devices/Audio/alsa_stubs.c
93 | export USER=nix
94 | set +x
95 | '';
96 |
97 | patches = optional enableHardening ./hardened.patch;
98 |
99 | postPatch = ''
100 | sed -i -e 's|/sbin/ifconfig|${nettools}/bin/ifconfig|' \
101 | src/apps/adpctl/VBoxNetAdpCtl.cpp
102 | '';
103 |
104 | configurePhase = ''
105 | cat >> LocalConfig.kmk < ./VBoxLinuxAdditions.run
29 | chmod 755 ./VBoxLinuxAdditions.run
30 | ./VBoxLinuxAdditions.run --noexec --keep
31 | ''
32 | else throw ("Architecture: "+stdenv.system+" not supported for VirtualBox guest additions")
33 | }
34 |
35 | # Unpack files
36 | cd install
37 | ${if stdenv.system == "i686-linux" then ''
38 | tar xfvj VBoxGuestAdditions-x86.tar.bz2
39 | ''
40 | else if stdenv.system == "x86_64-linux" then ''
41 | tar xfvj VBoxGuestAdditions-amd64.tar.bz2
42 | ''
43 | else throw ("Architecture: "+stdenv.system+" not supported for VirtualBox guest additions")
44 | }
45 |
46 |
47 | # Build kernel modules
48 | cd src
49 |
50 | for i in *
51 | do
52 | cd $i
53 | find . -type f | xargs sed 's/depmod -a/true/' -i
54 | make
55 | cd ..
56 | done
57 |
58 | cd ..
59 |
60 | # Change the interpreter for various binaries
61 | for i in sbin/VBoxService bin/{VBoxClient,VBoxControl} lib/VBoxGuestAdditions/mount.vboxsf
62 | do
63 | ${if stdenv.system == "i686-linux" then ''
64 | patchelf --set-interpreter ${stdenv.glibc}/lib/ld-linux.so.2 $i
65 | ''
66 | else if stdenv.system == "x86_64-linux" then ''
67 | patchelf --set-interpreter ${stdenv.glibc}/lib/ld-linux-x86-64.so.2 $i
68 | ''
69 | else throw ("Architecture: "+stdenv.system+" not supported for VirtualBox guest additions")
70 | }
71 | patchelf --set-rpath ${stdenv.cc.cc}/lib:${dbus}/lib:${libX11}/lib:${libXt}/lib:${libXext}/lib:${libXmu}/lib:${libXfixes}/lib:${libXrandr}/lib:${libXcursor}/lib $i
72 | done
73 |
74 | for i in lib/VBoxOGL*.so
75 | do
76 | patchelf --set-rpath $out/lib:${dbus}/lib $i
77 | done
78 |
79 | # FIXME: Virtualbox 4.3.22 moved VBoxClient-all (required by Guest Additions
80 | # NixOS module) to 98vboxadd-xclient. For now, just work around it:
81 | mv lib/VBoxGuestAdditions/98vboxadd-xclient bin/VBoxClient-all
82 |
83 | # Remove references to /usr from various scripts and files
84 | sed -i -e "s|/usr/bin|$out/bin|" share/VBoxGuestAdditions/vboxclient.desktop
85 | sed -i -e "s|/usr/bin|$out/bin|" bin/VBoxClient-all
86 |
87 | # Install binaries
88 | mkdir -p $out/sbin
89 | install -m 4755 lib/VBoxGuestAdditions/mount.vboxsf $out/sbin/mount.vboxsf
90 | install -m 755 sbin/VBoxService $out/sbin
91 |
92 | mkdir -p $out/bin
93 | install -m 755 bin/VBoxClient $out/bin
94 | install -m 755 bin/VBoxControl $out/bin
95 | install -m 755 bin/VBoxClient-all $out/bin
96 |
97 | # Nixpkgs tends to prefer bin over sbin. See `dontMoveSbin` in
98 | # http://nixos.org/nixpkgs/manual/#ssec-fixup-phase.
99 | ln -s $out/sbin/* $out/bin/
100 |
101 | wrapProgram $out/bin/VBoxClient-all \
102 | --prefix PATH : "${which}/bin"
103 |
104 | # Install OpenGL libraries
105 | mkdir -p $out/lib
106 | cp -v lib/VBoxOGL*.so $out/lib
107 | mkdir -p $out/lib/dri
108 | ln -s $out/lib/VBoxOGL.so $out/lib/dri/vboxvideo_dri.so
109 |
110 | # Install desktop file
111 | mkdir -p $out/share/autostart
112 | cp -v share/VBoxGuestAdditions/vboxclient.desktop $out/share/autostart
113 |
114 | # Install Xorg drivers
115 | mkdir -p $out/lib/xorg/modules/{drivers,input}
116 | install -m 644 lib/VBoxGuestAdditions/vboxvideo_drv_${xserverABI}.so $out/lib/xorg/modules/drivers/vboxvideo_drv.so
117 |
118 | # Install kernel modules
119 | cd src
120 |
121 | for i in *
122 | do
123 | cd $i
124 | kernelVersion=$(cd ${kernel.dev}/lib/modules; ls)
125 | export MODULE_DIR=$out/lib/modules/$kernelVersion/misc
126 | find . -type f | xargs sed -i -e "s|-o root||g" \
127 | -e "s|-g root||g"
128 | make install
129 | cd ..
130 | done
131 | ''; # */
132 |
133 | meta = {
134 | description = "Guest additions for VirtualBox";
135 | longDescriptions = ''
136 | Various add-ons which makes NixOS work better as guest OS inside VirtualBox.
137 | This add-on provides support for dynamic resizing of the X Display, shared
138 | host/guest clipboard support and guest OpenGL support.
139 | '';
140 | license = "GPL";
141 | platforms = lib.platforms.linux;
142 | };
143 | }
144 |
--------------------------------------------------------------------------------
/pkgs/vk-aws-route53/default.nis:
--------------------------------------------------------------------------------
1 | { cabal, aws, httpConduit, httpTypes, resourcet, text, time
2 | , xmlConduit, xmlHamlet
3 | }:
4 | cabal.mkDerivation (self: {
5 | pname = "vk-aws-route53";
6 | version = "0.1.2";
7 | sha256 = "0sblvj89bb7vxgy09m88gcphqc9w2mpawg8kdz0r77y7db0vzb4x";
8 | buildDepends = [
9 | aws httpConduit httpTypes resourcet text time xmlConduit xmlHamlet
10 | ];
11 | meta = {
12 | description = "Amazon Route53 DNS service plugin for the aws package";
13 | license = self.stdenv.lib.licenses.bsd3;
14 | platforms = self.ghc.meta.platforms;
15 | };
16 | })
17 |
--------------------------------------------------------------------------------
/pkgs/vk-aws-route53/default.nix:
--------------------------------------------------------------------------------
1 | { cabal, aws, httpConduit, httpTypes, resourcet, text, time
2 | , xmlConduit, xmlHamlet
3 | }:
4 | cabal.mkDerivation (self: {
5 | pname = "vk-aws-route53";
6 | version = "0.1.2";
7 | sha256 = "0sblvj89bb7vxgy09m88gcphqc9w2mpawg8kdz0r77y7db0vzb4x";
8 | buildDepends = [
9 | aws httpConduit httpTypes resourcet text time xmlConduit xmlHamlet
10 | ];
11 | meta = {
12 | description = "Amazon Route53 DNS service plugin for the aws package";
13 | license = self.stdenv.lib.licenses.bsd3;
14 | platforms = self.ghc.meta.platforms;
15 | };
16 | })
17 |
--------------------------------------------------------------------------------
/pkgs/vk-posix-pty/default.nix:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by cabal2nix. Please do NOT edit manually!
2 |
3 | { cabal }:
4 |
5 | cabal.mkDerivation (self: {
6 | pname = "vk-posix-pty";
7 | version = "0.2.1";
8 | sha256 = "1kj06niwcsb4lyhppv5bs67cf8frcs4g8fyyzv3cpipn0xdhsr97";
9 | isLibrary = true;
10 | isExecutable = true;
11 | meta = {
12 | homepage = "https://github.com/proger/posix-pty/tree/fork";
13 | description = "Pseudo terminal interaction with subprocesses";
14 | license = self.stdenv.lib.licenses.bsd3;
15 | platforms = self.ghc.meta.platforms;
16 | };
17 | })
18 |
--------------------------------------------------------------------------------
/pkgs/wai-app-static/default.nix:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by cabal2nix. Please do NOT edit manually!
2 |
3 | { cabal, base64Bytestring, blazeBuilder, blazeHtml, blazeMarkup
4 | , byteable, cryptohash, cryptohashConduit, fileEmbed, filepath
5 | , hspec, httpDate, httpTypes, mimeTypes, network
6 | , optparseApplicative, systemFileio, systemFilepath, text, time
7 | , transformers, unixCompat, unorderedContainers, wai, waiExtra
8 | , warp, zlib
9 | }:
10 |
11 | cabal.mkDerivation (self: {
12 | pname = "wai-app-static";
13 | version = "3.0.0.6";
14 | sha256 = "0ilwlawffvib1p98q5jcc5m2i93n7iwmszwlbkb3ihlh1wz5q2b8";
15 | isLibrary = true;
16 | isExecutable = true;
17 | buildDepends = [
18 | base64Bytestring blazeBuilder blazeHtml blazeMarkup byteable
19 | cryptohash cryptohashConduit fileEmbed filepath httpDate httpTypes
20 | mimeTypes optparseApplicative systemFileio systemFilepath text time
21 | transformers unixCompat unorderedContainers wai waiExtra warp zlib
22 | ];
23 | testDepends = [
24 | hspec httpDate httpTypes mimeTypes network text time transformers
25 | unixCompat wai waiExtra zlib
26 | ];
27 | meta = {
28 | homepage = "http://www.yesodweb.com/book/web-application-interface";
29 | description = "WAI application for static serving";
30 | license = self.stdenv.lib.licenses.mit;
31 | platforms = self.ghc.meta.platforms;
32 | };
33 | })
34 |
--------------------------------------------------------------------------------
/pkgs/xd/default.nix:
--------------------------------------------------------------------------------
1 | { stdenv }:
2 |
3 | stdenv.mkDerivation {
4 | name = "xd";
5 |
6 | buildCommand = ''
7 | install -Dm755 $script $out/bin/xd
8 | '';
9 |
10 | script = ./xd;
11 |
12 | meta = with stdenv.lib; {
13 | description = "Execute command setting the current dir to the first argument";
14 | platforms = platforms.all;
15 | license = licenses.mit;
16 | };
17 | }
18 |
--------------------------------------------------------------------------------
/pkgs/xd/xd:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | cd "$1"
4 | shift
5 | exec "$@"
6 |
--------------------------------------------------------------------------------
/pkgs/ybc/default.nix:
--------------------------------------------------------------------------------
1 | { goPackages, fetchgit, fetchFromGitHub, runCommand }:
2 |
3 |
4 | let
5 | iniflags = goPackages.buildGoPackage rec {
6 | rev = "7e26b9281b070a9562df8694980c68e56d298c36";
7 | name = "iniflags-${rev}";
8 | goPackagePath = "github.com/vharitonsky/iniflags";
9 | src = fetchgit {
10 | url = "https://${goPackagePath}";
11 | inherit rev;
12 | sha256 = "8643aafd59293ff4176cbae2c883563d274cabb5f83b7ce6210f97ecfbd38518";
13 | };
14 | };
15 |
16 | gomemcache = goPackages.buildGoPackage rec {
17 | rev = "72a68649ba712ee7c4b5b4a943a626bcd7d90eb8";
18 | name = "gomemcache-${rev}";
19 | goPackagePath = "github.com/bradfitz/gomemcache/memcache";
20 | src = fetchgit {
21 | url = "https://github.com/bradfitz/gomemcache";
22 | inherit rev;
23 | sha256 = "5fafdc33f130528127b8cdee42d36e47d80b2dcaff6052cf22f50c5f490293cb";
24 | };
25 | };
26 | in
27 | goPackages.buildGoPackage rec {
28 | rev = "5c1da0f157654a2a98bc6a074df39f18b62811a5";
29 | name = "ybc-${rev}";
30 | goPackagePath = "github.com/valyala/ybc";
31 | buildInputs = [ iniflags gomemcache ];
32 | src = fetchgit {
33 | url = "https://${goPackagePath}";
34 | inherit rev;
35 | sha256 = "34dfb8293a28240651c813229f400a0d52bae65c2b1345507143a82f72dcd9aa";
36 | };
37 | }
38 |
--------------------------------------------------------------------------------
/sdk.nix:
--------------------------------------------------------------------------------
1 | # SDK is meant to be a bridge between software packages and apps.
2 | #
3 | # `sdk` attribute replaces all uses of `pkgs` within App modules.
4 | # This makes the interface between the two entities thin enough to be able to
5 | # replace any component of it at any point in time or evaluate environment
6 | # configuration without relying on nixpkgs at all.
7 | #
8 | # Values returned by these functions are exported as the `sdk` attribute
9 | # in arguments of every Apps' function.
10 | rec {
11 | #
12 | # Use only this attribute. The rest is "exported" for debugging convenience.
13 | #
14 | sdk = fns // exports // { inherit sdk-env phpPackages perlPackages; };
15 |
16 | sdk-env = pkgs.buildEnv {
17 | name = "sdk";
18 | paths = lib.filter lib.isDerivation (lib.attrValues (exports // { inherit phpPackages; }));
19 | ignoreCollisions = true;
20 | };
21 |
22 | nixpkgs-config = {
23 | allowUnfree = true;
24 |
25 | php = {
26 | apxs2 = false; # apache support
27 | ldap = false; # openldap
28 | mssql = false; # freetds
29 | bz2 = false; pdo_pgsql = false; postgresql = false;
30 | sqlite = false; xsl = false;
31 |
32 | bcmath = true; curl = true; exif = true; fpm = true;
33 | ftp = true; gd = true; gettext = true; intl = true;
34 | libxml2 = true; mbstring = true; mcrypt = true; mhash = true;
35 | mysql = true; mysqli = true; openssl = true; pcntl = true;
36 | pdo_mysql = true; readline = true; soap = true; sockets = true;
37 | zip = true; zlib = true;
38 | };
39 |
40 | packageOverrides = pkgs: rec {
41 | inherit (ugpkgs) bundler_HEAD erlang imagemagick linux nix;
42 | mysql = ugpkgs.mariadb;
43 | php = ugpkgs.php70;
44 | git = pkgs.git.override {
45 | withManual = false;
46 | pythonSupport = false;
47 | };
48 | go = pkgs.go_1_5;
49 | glibcLocales = pkgs.glibcLocales.override {
50 | allLocales = false;
51 | locales = ["en_US.UTF-8/UTF-8"];
52 | };
53 | gnupg = pkgs.gnupg.override {
54 | pinentry = null;
55 | x11Support = false; openldap = null; libusb = null;
56 | };
57 | python27 = pkgs.python27.override {
58 | x11Support = false;
59 | };
60 | python3 = pkgs.python3.override {
61 | libX11 = null;
62 | tk = null; tcl = null;
63 | };
64 | python34 = pkgs.python34.override {
65 | libX11 = null;
66 | tk = null; tcl = null;
67 | };
68 | qemu = pkgs.qemu.override {
69 | pulseSupport = false;
70 | sdlSupport = false;
71 | spiceSupport = false;
72 | };
73 | };
74 | };
75 |
76 | pkgs = import {
77 | system = "x86_64-linux";
78 | config = nixpkgs-config;
79 | };
80 |
81 | module = {
82 | system.activationScripts.microgram-sdk-env = lib.stringAfter ["nix" "systemd"] ''
83 | ${sdk.nix}/bin/nix-env -p /nix/var/nix/profiles/sdk --set ${sdk-env}
84 | '';
85 | nixpkgs.config = nixpkgs-config;
86 | };
87 |
88 | ugpkgs = import ;
89 | inherit (pkgs) lib;
90 |
91 | # sdk function exports (things that have arguments)
92 | fns = {
93 | # functions that do not produce derivations
94 | inherit (builtins) toFile;
95 |
96 | inherit (lib) makeSearchPath;
97 |
98 | # functions that do produce derivations
99 | inherit (pkgs)
100 | symlinkJoin
101 | runCommand writeScriptBin writeScript
102 | substituteAll buildEnv writeText writeTextDir writeTextFile;
103 | inherit (ugpkgs.fns)
104 | compileHaskell
105 | staticHaskellCallPackage
106 | writeBashScript
107 | writeBashScriptBin
108 | writeBashScriptBinOverride
109 | writeBashScriptOverride;
110 | };
111 |
112 | exports = pkgs // rec {
113 |
114 | solr4 = pkgs.solr;
115 | inherit (ugpkgs)
116 | angel
117 | ares
118 | clj-json curator curl-loader
119 | damemtop dynomite
120 | elasticsearch-cloud-aws elastisch erlang
121 | exim
122 | filebeat flame-graph
123 | galera-wsrep get-user-data gdb-quiet graphviz
124 | heavy-sync
125 | jackson-core-asl jenkins jmaps
126 | kibana4 kiries
127 | logstash-all-plugins lua-json
128 | mariadb mariadb-galera memcached-tool mergex mkebs myrapi
129 | newrelic-memcached-plugin newrelic-mysql-plugin newrelic-plugin-agent newrelic-sysmond nginx nix nq
130 | packer percona-toolkit pivotal_agent
131 | rabbitmq rabbitmq-clusterer replicator retry rootfs-busybox runc
132 | ShellCheck simp_le sproxy stack syslog-ng
133 | terraform thumbor to-json-array twemproxy
134 | unicron
135 | upcast
136 | upcast-ng
137 | vault
138 | xd
139 | ybc;
140 | inherit (ugpkgs)
141 | newrelic-java; # is a file
142 |
143 | inherit (pkgs.haskellPackages) ghc cabal-install;
144 | cabal = cabal-install;
145 | };
146 |
147 | phpPackages = {
148 | inherit (pkgs.phpPackages.override { php = ugpkgs.php70; })
149 | composer redis;
150 | inherit (ugpkgs) imagick memcached newrelic-php xdebug zmq lz4;
151 | };
152 |
153 | perlPackages = {
154 | inherit (pkgs.perlPackages) NetAddrIP;
155 | };
156 | }
157 |
--------------------------------------------------------------------------------
/to-nix/golang/README.md:
--------------------------------------------------------------------------------
1 | ## Packaging Go apps
2 |
3 | It's quite easy to package Go apps with Nix if you're using [Godep](https://github.com/tools/godep).
4 |
5 | [example.nix](example.nix) is meant to be copy-pasted into your packages
6 | collection with subsequent minor edits.
7 |
8 | ```nix
9 | pkgs.callPackage ./example.nix { src = ; }
10 | ```
11 |
--------------------------------------------------------------------------------
/to-nix/golang/example.nix:
--------------------------------------------------------------------------------
1 | { stdenv, go, src }:
2 |
3 | let
4 | name = "my-app";
5 | namespace = "github.com/zalora/myApp";
6 | in
7 | stdenv.mkDerivation {
8 | inherit name src;
9 |
10 | buildInputs = [ go ];
11 |
12 | buildPhase = ''
13 | mkdir -p "Godeps/_workspace/src/${namespace}"
14 | ln -s $src Godeps/_workspace/src/${namespace}"
15 | GOPATH=$PWD/Godeps/_workspace go build -o ${name}
16 | '';
17 |
18 | installPhase = ''
19 | mkdir -p $out/bin
20 | cp ${name} $out/bin
21 | # probably need to copy more things here
22 | '';
23 | }
24 |
--------------------------------------------------------------------------------
/to-nix/haskell/README.md:
--------------------------------------------------------------------------------
1 | # Packaging Haskell apps
2 |
3 | Start with [cabal2nix](https://github.com/NixOS/cabal2nix).
4 |
5 | The workflow roughly looks like:
6 |
7 | ```bash
8 | $ cabal2nix --sha256=ignoreme file:///path/to/package.cabal > ./my-app/default.nix
9 | $ vim ./my-app/default.nix # remove sha256 stuff and add src reference:
10 | # src = git-repo-filter ;
11 |
12 | # now add to the packages attrset:
13 | my-app = pkgs.haskellPackages.callPackage ./my-app {};
14 | ```
15 |
16 | You'll have to double-check if the dependencies inferred by `cabal2nix` actually exist
17 | in your `pkgs` collection and if not, you have to repeat the procedure for it.
18 |
--------------------------------------------------------------------------------
/to-nix/python/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 |
--------------------------------------------------------------------------------
/to-nix/python/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2014-2015 Vladimir Kirillov
2 |
3 | Permission to use, copy, modify, and/or distribute this software for any
4 | purpose with or without fee is hereby granted, provided that the above
5 | copyright notice and this permission notice appear in all copies.
6 |
7 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8 | WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9 | MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
10 | ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
12 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
13 | OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14 |
--------------------------------------------------------------------------------
/to-nix/python/README.md:
--------------------------------------------------------------------------------
1 | ## python2nix
2 |
3 | Helper to build python [nix packages](https://github.com/NixOS/nixpkgs) that generates nix-expressions.
4 |
5 | Quick start:
6 |
7 | * `python -mpython2nix thumbor`
8 | * double-check dependencies in nixpkgs
9 | * re-run for missing dependencies
10 | * copy-paste
11 | * ???
12 | * PROFIT!
13 |
14 | Don't consider these scripts to be stable.
15 |
16 | ### Known issues
17 |
18 | * Needs pip==1.5.6 to work.
19 | * Apparently doesn't handle `tests_require`.
20 |
--------------------------------------------------------------------------------
/to-nix/python/pip.requirements:
--------------------------------------------------------------------------------
1 | pip
2 | requests
3 |
--------------------------------------------------------------------------------
/to-nix/python/python2nix/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zalora/microgram/58f9a4dd2bd7d29860a64bc85d77b549362a2edb/to-nix/python/python2nix/__init__.py
--------------------------------------------------------------------------------
/to-nix/python/python2nix/__main__.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import requests
3 |
4 | from python2nix import pip_deps
5 |
6 | PACKAGE = """\
7 | {{ {args_str} }}:
8 |
9 | buildPythonPackage rec {{
10 |
11 | name = "{name}";
12 |
13 | src = fetchurl {{
14 | url = "{url}";
15 | md5 = "{md5}";
16 | }};
17 |
18 | propagatedBuildInputs = [ {inputs_str} ];
19 |
20 | meta = with stdenv.lib; {{
21 | description = "{description}";
22 | homepage = {homepage};
23 | license = {license};
24 | }};
25 | }}"""
26 |
27 | LICENSE_MAP = {
28 | 'APL2': 'licenses.asl20',
29 | 'ASL 2': 'licenses.asl20',
30 | 'Apache 2.0': 'licenses.asl20',
31 | 'BSD License': 'licenses.bsd',
32 | 'BSD or Apache License, Version 2.0': 'licenses.bsd',
33 | 'BSD': 'licenses.bsd',
34 | 'MIT License': 'licenses.mit',
35 | 'MIT license': 'licenses.mit',
36 | 'MIT': 'licenses.mit',
37 | 'PSF or ZPL': 'licenses.psfl',
38 | 'PSF': 'licenses.psfl',
39 | 'http://www.apache.org/licenses/LICENSE-2.0': 'licenses.asl20',
40 | 'http://www.opensource.org/licenses/mit-license.php': 'licenses.mit',
41 | }
42 |
43 | _missing = object()
44 | def guess_license(info):
45 | l = info['info']['license']
46 | license = LICENSE_MAP.get(l, _missing)
47 | if license is _missing:
48 | sys.stderr.write('WARNING: unknown license (please update LICENSE_MAP): ' + l + '\n')
49 | return 'unknown'
50 | return license
51 |
52 | _pip_dependency_cache = {}
53 |
54 | def pip_dump_dependencies(name): # memoized version
55 | if name in _pip_dependency_cache:
56 | return _pip_dependency_cache[name]
57 | ret = pip_deps.pip_dump_dependencies(name)
58 | _pip_dependency_cache[name] = ret
59 | return ret
60 |
61 | def build_inputs(name):
62 | reqs, vsns = pip_dump_dependencies(name)
63 |
64 | def get_workaround(adict, name):
65 | v = adict.get(name)
66 | if not v:
67 | name = name.replace('_', '-') # pypi workaround ?
68 | v = adict.get(name)
69 | return v
70 |
71 | return [name.lower() for name, specs in get_workaround(reqs, name)]
72 |
73 | def package_to_info(package):
74 | url = "https://pypi.python.org/pypi/{}/json".format(package)
75 | r = requests.get(url)
76 | try:
77 | return r.json()
78 | except Exception as e:
79 | sys.stderr.write('package_to_info failed: {}\n'.format(r))
80 | raise e
81 |
82 | def info_to_expr(info):
83 | name = info['info']['name'].lower()
84 | inputs = build_inputs(name)
85 | inputs_str = ' '.join(build_inputs(name))
86 | args = [ 'buildPythonPackage', 'fetchurl', 'stdenv' ] + inputs
87 | args_str = ', '.join(args)
88 |
89 | url = None
90 | md5 = None
91 | for url_item in info['urls']:
92 | url_ext = url_item['url']
93 | if url_ext.endswith('zip') or url_ext.endswith('tar.gz'):
94 | url = url_item['url']
95 | md5 = url_item['md5_digest']
96 | break
97 | if url is None:
98 | raise Exception('No download url found :-(')
99 |
100 | description = info['info']['description'].split('\n')[0]
101 | homepage = info['info']['home_page'] or '""'
102 | license = guess_license(info)
103 |
104 | return PACKAGE.format(**locals())
105 |
106 |
107 | def main():
108 | if len(sys.argv) < 2 or "--help" in sys.argv:
109 | print "Usage: python2nix "
110 | else:
111 | print info_to_expr(package_to_info(sys.argv[1]))
112 |
113 |
114 | if __name__ == '__main__':
115 | main()
116 |
--------------------------------------------------------------------------------
/to-nix/python/python2nix/pip_deps.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python2.7
2 | """
3 | """
4 |
5 | import sys
6 | import pip
7 | import tempfile
8 | import shutil
9 |
10 | # this stuff is a hack that is based on pip 1.5.4 source code (that has poor documentation)
11 |
12 | from pip.req import InstallRequirement, RequirementSet, parse_requirements
13 | from pip.download import PipSession
14 | from pip.commands import install
15 | from pip.log import logger
16 | from pip._vendor import pkg_resources
17 |
18 |
19 | logger.add_consumers(
20 | (logger.level_for_integer(3), sys.stderr), # less is quieter, max is 4
21 | )
22 |
23 | session = PipSession()
24 | temp_target_dir = tempfile.mkdtemp()
25 | download_cache = '/tmp'
26 |
27 | def pip_dump_dependencies(name, download_cache=download_cache):
28 | """
29 | Returns a dictionary of involved packages and their direct dependencies, uses pip's private APIs.
30 | Side effects: removes pip build directory before starting (if one existed),
31 | populates the downloads cache in `download_cache',
32 | populates the build cache with unpacked tarballs
33 | """
34 | cmd = install.InstallCommand()
35 | options, args = cmd.parse_args([name])
36 | index_urls = [options.index_url] + options.extra_index_urls
37 | finder = cmd._build_package_finder(options, index_urls, session)
38 |
39 | requirement_set = RequirementSet(
40 | build_dir=options.build_dir,
41 | src_dir=options.src_dir,
42 | download_dir=options.download_dir,
43 | download_cache=download_cache,
44 | upgrade=options.upgrade,
45 | as_egg=options.as_egg,
46 | ignore_installed=options.ignore_installed,
47 | ignore_dependencies=options.ignore_dependencies,
48 | force_reinstall=options.force_reinstall,
49 | use_user_site=options.use_user_site,
50 | target_dir=temp_target_dir,
51 | session=session,
52 | pycompile=options.compile,
53 | )
54 |
55 | # i/o
56 | shutil.rmtree(options.build_dir)
57 |
58 | requirement_set.add_requirement(InstallRequirement.from_line(name, None))
59 |
60 | # i/o
61 | requirement_set.prepare_files(finder, force_root_egg_info=cmd.bundle, bundle=cmd.bundle)
62 |
63 | def safe_requirements(self):
64 | """
65 | safe implementation of pip.req.InstallRequirement.requirements() generator, doesn't blow up with OSError
66 | """
67 |
68 | in_extra = None
69 | try:
70 | for line in self.egg_info_lines('requires.txt'):
71 | match = self._requirements_section_re.match(line.lower())
72 | if match:
73 | in_extra = match.group(1)
74 | continue
75 | if in_extra:
76 | logger.debug('skipping extra %s' % in_extra)
77 | # Skip requirement for an extra we aren't requiring
78 | continue
79 | yield line
80 | except OSError:
81 | pass
82 |
83 | def req_string_to_name_and_specs(s):
84 | p = pkg_resources.Requirement.parse(s)
85 | return (p.project_name, p.specs)
86 |
87 | def req_safe_version(req):
88 | try:
89 | return req.pkg_info()['version']
90 | except:
91 | return ''
92 |
93 | reqs = dict(requirement_set.requirements)
94 | human_reqs = dict((req.name, map(req_string_to_name_and_specs, list(safe_requirements(req)))) for req in reqs.values())
95 | actual_versions = dict((req.name, req_safe_version(req)) for req in reqs.values())
96 | return human_reqs, actual_versions
97 |
98 | if __name__ == '__main__':
99 | name = sys.argv[1]
100 | reqs, vsns = pip_dump_dependencies(name)
101 | print 'reqs:', reqs
102 | print 'actual_versions:', vsns
103 |
--------------------------------------------------------------------------------
/to-nix/python/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 | from setuptools import find_packages
3 |
4 | setup(name='python2nix',
5 | version='0.1',
6 | description='writing nix expressions for python packages made easy',
7 | classifiers=[
8 | "Programming Language :: Python",
9 | ],
10 | author='Vladimir Kirillov',
11 | author_email='',
12 | url='https://github.com/proger/python2nix',
13 | license='???',
14 | packages=find_packages(),
15 | install_requires=['requests', 'pip'],
16 | entry_points="""
17 | [console_scripts]
18 | python2nix = python2nix:main
19 | """,
20 | include_package_data=True,
21 | zip_safe=False,
22 | )
23 |
--------------------------------------------------------------------------------
/wtf/README.md:
--------------------------------------------------------------------------------
1 | ## wtf
2 |
3 | `wtf` is a unified diagnostics API for lightweight monitoring and low-frequency time-series checks.
4 | `wtf/default.nix` builds a commandline tool, `wtf`, by assembling checks according to the interface specified in `api.nix`.
5 |
6 | It is possible to create various backends for these diagnostics, such as CloudWatch, collectd and so on.
7 |
8 | See [test.nix](test.nix) for usage example.
9 |
10 | #### API usage
11 |
12 | There are a some of different kinds of diagnostics that you can implement.
13 | The simplest one is just a shell script that should investigate whether your
14 | application works or not and exit with code `0` if the check passes, any
15 | other code if the check fails. You add such a check to your module in the
16 | following way:
17 |
18 | ```nix
19 | diagnostics = {
20 | mysql-running.script-retcode = ''
21 | ${sdk.mariadb}/bin/mysql -e ';'
22 | '';
23 | };
24 | ```
25 |
26 | This adds the check `mysql-running`, that just tries to do a connection to a
27 | MySQL server running on localhost. If you deploy a module with such a check to
28 | an instance you can then run the following command, logged in to that instance:
29 |
30 | ```bash
31 | $ wtf mysql-running
32 | ```
33 |
34 | If check is successful this command will print nothing;
35 | otherwise it will print `mysql-running FAILED`.
36 |
37 | You can also tell the platform that the check should run periodically and that
38 | the result should be aggregated centrally (needs a backend implementation) so that it
39 | is possible to do analytics and alerts. You do this by just setting
40 | `timeseries.enable = true` on the check:
41 |
42 | ```nix
43 | diagnostics.mysql-running = {
44 | timeseries.enable = true;
45 | script-retcode = ''
46 | ${sdk.mariadb}/bin/mysql -e ';'
47 | '';
48 | };
49 | ```
50 |
51 | This code is mostly `0` or `1`, so it is only useful
52 | for triggering alerts when something goes down. A more interesting metric could
53 | be some kind of counter from the application. This can also be implemented by a
54 | simple script, that should return a numeric metric on `stdout`:
55 |
56 | ```nix
57 | diagnostics.tcp-retransmits = {
58 | script-metric = ''
59 | awk 'NR==2 { print $42 + $46 + $47 + $48 + $102; }' /proc/net/netstat
60 | '';
61 | timeseries.enable = true;
62 | };
63 | ```
64 |
65 | Another simple check is the `http-ok` type. This check verifies that a specific
66 | URL returns status 200. This is very useful if your application is a web
67 | application that has some kind of healthcheck functionality builtin. You define
68 | such a check this way:
69 |
70 | ```nix
71 | diagnostics = {
72 | app-healthcheck = {
73 | http-ok = "http://localhost/healthcheck";
74 | timeseries.enable = true;
75 | };
76 | };
77 | ```
78 |
79 | There are some more types of checks, and it is possible to add more when needed. Check out
80 | [api.nix](api.nix) for details on the options that exist.
81 |
--------------------------------------------------------------------------------
/wtf/api.nix:
--------------------------------------------------------------------------------
1 | { config, options, lib, name, ... }:
2 | let
3 | inherit (lib) types mkOption replaceChars;
4 |
5 | memcached-tuple =
6 | types.nullOr (types.submodule ({ ... }: {
7 | options = {
8 | key = mkOption { type = types.str; default = "hello"; };
9 | target = mkOption { type = types.str; default = "localhost:11211"; };
10 | };
11 | }));
12 | in
13 | {
14 | imports = [
15 | ./shell.nix
16 | ];
17 |
18 | options = {
19 | name = mkOption { default = name; };
20 |
21 | description = mkOption {
22 | type = types.str;
23 | default = "";
24 | description = "Check description.";
25 | };
26 |
27 | tags = mkOption {
28 | type = with types; listOf str;
29 | default = [];
30 | description = "";
31 | };
32 |
33 | timeseries = {
34 | enable = mkOption {
35 | type = types.bool;
36 | default = false;
37 | description = "Activate implementation-defined timeseries polling for this check.";
38 | };
39 |
40 | period = mkOption {
41 | type = types.str;
42 | default = "1m";
43 | description = "Check period, on a format accepted by sleep.";
44 | };
45 |
46 | unit = mkOption {
47 | type = types.enum [
48 | "Seconds" "Microseconds" "Milliseconds" "Bytes"
49 | "Kilobytes" "Megabytes" "Gigabytes" "Terabytes"
50 | "Bits" "Kilobits" "Megabits" "Gigabits" "Terabits"
51 | "Percent" "Count" "Bytes/Second" "Kilobytes/Second"
52 | "Megabytes/Second" "Gigabytes/Second" "Terabytes/Second"
53 | "Bits/Second" "Kilobits/Second" "Megabits/Second" "Gigabits/Second"
54 | "Terabits/Second" "Count/Second" "None"
55 | ];
56 | default = "None";
57 | description = ''
58 | Metric unit.
59 | Currently limited to choices from
60 | http://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_MetricDatum.html
61 | '';
62 | };
63 |
64 | descriptive-unit = mkOption {
65 | type = types.nullOr types.str;
66 | default = if config.timeseries.unit == "None" then null else config.timeseries.unit;
67 | description = ''
68 | A description of the metric's unit, suitable for interpolation into
69 | the output of 'wtf'.
70 | '';
71 | };
72 | };
73 |
74 | alarm = mkOption {
75 | default = null;
76 | type = types.nullOr (types.submodule ({ ... }: {
77 | options = {
78 | enable = mkOption {
79 | type = types.bool;
80 | default = true;
81 | };
82 | statistic = mkOption {
83 | type = types.enum [
84 | "SampleCount" "Average" "Sum" "Minimum" "Maximum"
85 | ];
86 | default = "Sum";
87 | description = ''
88 | The statistic to apply to the check's metric when evaluating whether
89 | to trigger an alarm. Choose from {SampleCount, Average, Sum, Minimum,
90 | Maximum}, defaults to "Sum".
91 | '';
92 | };
93 | evaluation-periods = mkOption {
94 | type = types.int;
95 | default = 1;
96 | description = ''
97 | The number of periods over which data is compared to the threshold,
98 | see 'comparison-operator'.
99 | '';
100 | };
101 | threshold = mkOption {
102 | type = types.str;
103 | default = "0";
104 | description = ''
105 | The value against which the specified statistic is compared, see
106 | 'comparison-operator'. For implementation reasons (Nix cannot type
107 | double-precision floating points), this is a double formatted as a
108 | string.
109 | '';
110 | };
111 | comparison-operator = mkOption {
112 | type = types.enum [
113 | "GreaterThanOrEqualToThreshold" "GreaterThanThreshold"
114 | "LessThanThreshold" "LessThanOrEqualToThreshold"
115 | ];
116 | default = "GreaterThanOrEqualToThreshold";
117 | description = ''
118 | The arithmetic operation to use when comparing the statistic
119 | and threshold over its evaluation period, determines whether
120 | the alarm is triggered.
121 | Choose from {GreaterThanOrEqualToThreshold, GreaterThanThreshold,
122 | LessThanThreshold, LessThanOrEqualToThreshold}, defaults to
123 | "GreaterThanOrEqualToThreshold".
124 | '';
125 | };
126 | action = mkOption {
127 | type = types.str;
128 | description = ''
129 | The action perform if the alarm is triggered, specified as an ARN.
130 | '';
131 | };
132 | };
133 | }));
134 | };
135 |
136 | zero-is-fine = mkOption {
137 | type = types.bool;
138 | default = false;
139 | description = ''If true and metric is 0, "OK" will be printed instead of "0".
140 | For some checks this option is ignored and effectively "true",
141 | e. g. for "script-retcode" and "http-ok".
142 | '';
143 | };
144 |
145 | timeout = mkOption {
146 | type = types.str;
147 | default = "10s";
148 | description = "Check timeout, on a format accepted by sleep.";
149 | };
150 |
151 | script-retcode = mkOption {
152 | type = types.nullOr types.lines;
153 | default = null;
154 | description = "Bash script that returns non-zero exitcode on failure. Do not use exec in a script.";
155 | };
156 |
157 | script-metric = mkOption {
158 | type = types.nullOr types.lines;
159 | default = null;
160 | description = "Bash script that outputs the numeric metric value to stdout. Do not use exec in a script.";
161 | };
162 |
163 | http-ok = mkOption {
164 | type = types.nullOr types.str;
165 | default = null;
166 | description = "HTTP URL that is supposed to return a 2xx or a 3xx response.";
167 | };
168 |
169 | mysql-metric = mkOption {
170 | type = types.nullOr types.str;
171 | default = null;
172 | description = "MySQL query that returns one row with a single column which is a numeric value.";
173 | };
174 |
175 | mysql-status = mkOption {
176 | type = types.nullOr types.str;
177 | default = null;
178 | description = "MySQL variable from SHOW STATUS that returns a numeric value.";
179 | };
180 |
181 | memcached-stat = mkOption {
182 | type = types.nullOr (types.submodule ({ ... }: {
183 | options = {
184 | key = mkOption { type = types.str; default = "total_connections"; };
185 | target = mkOption { type = types.str; default = "localhost:11211"; };
186 | };
187 | }));
188 | default = null;
189 | description = "Memcached numeric statistic.";
190 | };
191 |
192 | memcached-kvmetric = mkOption {
193 | type = memcached-tuple;
194 | default = null;
195 | description = "Memcached key lookup that returns a numeric value.";
196 | };
197 |
198 | memcached-kvmetric-exists = mkOption {
199 | type = memcached-tuple;
200 | default = null;
201 | description = "Memcached key lookup that returns a positive status code when a key is there.";
202 | };
203 | };
204 | }
205 |
--------------------------------------------------------------------------------
/wtf/default.nix:
--------------------------------------------------------------------------------
1 | { diagnostics, name ? "wtf" }:
2 | let
3 | inherit (import ) lib sdk pkgs;
4 | inherit (pkgs) stdenv;
5 | inherit (lib) types mkOption concatStrings mapAttrsToList mapAttrs;
6 |
7 | eval = lib.evalModules { modules = [ module ]; };
8 | diags = eval.config.diagnostics;
9 |
10 | module = {
11 | options = {
12 | diagnostics = mkOption {
13 | type = types.attrsOf (types.submodule (import ./api.nix));
14 | default = diagnostics;
15 | };
16 |
17 | paths = {
18 | wtf = mkOption { type = types.path; default = wtf; };
19 | wtfdb = mkOption { type = types.path; default = wtfdb; };
20 | };
21 |
22 | test = mkOption {
23 | type = types.attrsOf types.str;
24 | default = mapAttrs (_: diag: "${wtf}/bin/wtf ${diag.name}") diags;
25 | };
26 |
27 | wait = mkOption {
28 | type = types.attrsOf types.str;
29 | default = mapAttrs (_: diag: "${wtf}/bin/wtf -w ${diag.name}") diags;
30 | };
31 | };
32 | };
33 |
34 | wtfdb = sdk.writeText "wtfdb.json" (builtins.toJSON diags);
35 |
36 | wtfenv = pkgs.buildEnv {
37 | name = "${name}-env";
38 | paths = with sdk; [
39 | bash coreutils curl findutils gawk gnused inetutils jq ntp
40 | ];
41 | };
42 |
43 | wtf = stdenv.mkDerivation {
44 | inherit name;
45 |
46 | buildCommand = ''
47 | mkdir -p $out/bin
48 | substituteAll ${./wtf} $out/bin/wtf
49 | chmod +x $out/bin/wtf
50 | '';
51 |
52 | inherit (sdk) bash;
53 |
54 | inherit wtfenv wtfdb;
55 | };
56 | in eval
57 |
--------------------------------------------------------------------------------
/wtf/shell.nix:
--------------------------------------------------------------------------------
1 | { config, options, name, ... }:
2 | let
3 | inherit (import )
4 | types mkOption replaceChars optionalString range mapAttrs;
5 | inherit (import ) sdk lib;
6 |
7 |
8 | # Convert time periods (e.g. "1h") to seconds
9 | toSeconds = p: with builtins;
10 | let
11 | count = let
12 | count' = substring 0 (stringLength p - 1) p;
13 | in if count' == "" then 0 else fromJSON count';
14 | unit = substring (stringLength p - 1) 1 p ;
15 | conv = mapAttrs (_: mul) {
16 | s = 1; m = 60; h = 3600; d = 86400;
17 | } // (listToAttrs
18 | (map (n: { name = toString n; value = (x: x * 10 + n); }) (range 0 9))
19 | );
20 | in if conv ? "${unit}" then conv.${unit} count
21 | else throw "Invalid period ${p}";
22 |
23 | max = a: b: if a > b then a else b;
24 |
25 | writeBashScript = n: c: sdk.writeScript n ''
26 | #!${sdk.bash}/bin/bash
27 | set -e
28 | set -o pipefail
29 | ${c}
30 | '';
31 |
32 | shellImpl = check:
33 | let
34 | timeout = toString (toSeconds check.timeout);
35 | timeoutCmd = "timeout ${timeout}";
36 | zero-is-ok = zero-is-fine: optionalString zero-is-fine
37 | ''
38 | if declare -F humanize_zero_is_ok >/dev/null; then
39 | humanize_zero_is_ok
40 | fi
41 | '';
42 | # Sleep a bit to allow data to flow back from the possibly remote target
43 | memcached-get = key: target: sdk.writeBashScript "memcached-get.sh" ''
44 | (echo get ${key} && sleep 1) \
45 | | ${timeoutCmd} ${sdk.netcat-openbsd}/bin/nc ${replaceChars [":"] [" "] target}
46 | '';
47 | in
48 | if check.http-ok != null then ''
49 | metric=0
50 | curl --max-time ${timeout} -f -sS -o /dev/null "${check.http-ok}" || metric=$?
51 | ${zero-is-ok true}
52 | ''
53 | else if check.mysql-metric != null then ''
54 | metric=$(${timeoutCmd} ${sdk.mariadb}/bin/mysql -h 127.0.0.1 -qrN -B < ${builtins.toFile "mysql-metric" check.mysql-metric})
55 | ${zero-is-ok (check.zero-is-fine or false)}
56 | ''
57 | else if check.mysql-status != null then ''
58 | metric=$(${timeoutCmd} ${sdk.mariadb}/bin/mysql -h 127.0.0.1 -qrN -B < ${builtins.toFile "mysql-status" ''
59 | select variable_value from information_schema.global_status where variable_name = '${check.mysql-status}'
60 | ''})
61 | ''
62 | else if check.memcached-stat != null then let inherit (check.memcached-stat) key target; in ''
63 | metric=$(${timeoutCmd} ${sdk.memcached-tool}/bin/memcached-tool ${target} stats | awk '$1 == "${key}" {print $2}')
64 | ''
65 | else if check.memcached-kvmetric != null then let inherit (check.memcached-kvmetric) key target; in ''
66 | metric=$(${memcached-get key target} \
67 | | awk '/^VALUE/ {exists=1; next} exists { print $1; exit }')
68 | ''
69 | else if check.memcached-kvmetric-exists != null then let inherit (check.memcached-kvmetric-exists) key target; in ''
70 | metric=$(${memcached-get key target} \
71 | | awk -v exists=1 '/^VALUE/ {exists=0; next} END { print exists }')
72 | ${zero-is-ok true}
73 | ''
74 | else if check.script-metric != null then ''
75 | metric=$(${timeoutCmd} ${writeBashScript check.name check.script-metric})
76 | ''
77 | else if check.script-retcode != null then ''
78 | metric=0
79 | ${timeoutCmd} ${writeBashScript check.name check.script-retcode} || metric=$?
80 | ${zero-is-ok true}
81 | ''
82 | else abort "need at least one implementation for diagnostic ${check.name}";
83 |
84 | # Numerical $metric is mandatory for automated checks.
85 | # String $out is for humans running `wtf`.
86 | # Each check may call humanize_* to define $out
87 | wtfWrapper = check: writeBashScript check.name ''
88 | if [ ''${WTF_ON_TERMINAL:-0} -eq 1 ]; then
89 | failed="\e[1;31mFAILED\e[0m"
90 | ok="\e[0;32mOK\e[0m"
91 | else
92 | failed="FAILED"
93 | ok="OK"
94 | fi
95 | out=
96 | metric=
97 | trap 'echo -e "--> $out"' EXIT
98 |
99 | humanize_zero_is_ok()
100 | {
101 | if [ "$metric" -eq 0 ]; then
102 | out="$ok"
103 | else
104 | out="$metric ($failed)"
105 | fi
106 | }
107 |
108 | trap 'out="$failed"' ERR INT TERM
109 | ${check.shell-impl}
110 | trap - ERR INT TERM
111 |
112 | metric=$(echo "$metric" | tr -d " \r\n\t")
113 |
114 | if [ -z "$metric" ]; then
115 | out="$failed"
116 | fi
117 |
118 | if [ -z "$out" ]; then
119 | out="$metric ${toString check.timeseries.descriptive-unit}"
120 | fi
121 |
122 | num=$(echo "$metric" | tr -cd 0-9)
123 | [ -z "$num" ] || [ "$num" -eq 0 ]
124 | '';
125 |
126 | in
127 | {
128 | options = {
129 | shell-impl = mkOption {
130 | type = types.str;
131 | internal = true;
132 | description = ''
133 | Shell script snippet that should perform the check and set the
134 | variable metric
. Must have a proper timeout mechanism.
135 | '';
136 | default = shellImpl config;
137 | };
138 |
139 | wtf-wrapper = mkOption {
140 | type = types.path;
141 | internal = true;
142 | description = ''
143 | Wrapper check used by wtf.
144 | '';
145 |
146 | default = wtfWrapper config;
147 | };
148 | };
149 | }
150 |
--------------------------------------------------------------------------------
/wtf/test.nix:
--------------------------------------------------------------------------------
1 | let
2 | diagnostics = {
3 | google.http-ok = "http://google.com";
4 | true0.script-retcode = ''
5 | true
6 | '';
7 | metric.script-metric = ''
8 | echo 42
9 | '';
10 | slow = {
11 | tags = [ "slow" ];
12 | script-metric = ''
13 | sleep 2
14 | echo 42
15 | '';
16 | };
17 | };
18 | in
19 | (import ./. { inherit diagnostics; }).config.paths.wtf
20 |
--------------------------------------------------------------------------------
/wtf/wtf:
--------------------------------------------------------------------------------
1 | #!@bash@/bin/bash
2 |
3 | set -ef
4 | set -o pipefail
5 |
6 | export SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
7 | export GIT_SSL_CAINFO=/etc/ssl/certs/ca-certificates.crt
8 | export OPENSSL_X509_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
9 |
10 | export PATH=@wtfenv@/bin:$PATH
11 | export WTFDB=${WTFDB:-@wtfdb@}
12 |
13 | test -f "$WTFDB"
14 |
15 | if [ -t 1 ]; then
16 | WTF_ON_TERMINAL=1
17 | else
18 | WTF_ON_TERMINAL=0
19 | fi
20 | export WTF_ON_TERMINAL
21 |
22 | wait=0
23 | checks=()
24 | tags=()
25 | nottags=( "slow" )
26 | alwayschecks=()
27 |
28 | wtfdb() {
29 | jq "$@" < "$WTFDB"
30 | }
31 |
32 | while [ $# -gt 0 ]; do
33 | case "$1" in
34 | -h|--help)
35 | echo 'wtf [-w] [-t ]... [-n ]... [...]'
36 | echo
37 | echo 'Available checks (name [tags]):'
38 | wtfdb -M -r 'to_entries[] | "\(.key) \(.value.tags)"'
39 | exit 0
40 | ;;
41 | -t|--tag)
42 | tags+=("$2")
43 | shift
44 | ;;
45 | -n|--nottag)
46 | nottags+=("$2")
47 | shift
48 | ;;
49 | -w|--wait)
50 | wait=1 # poll until the check starts returning zero
51 | ;;
52 | *)
53 | alwayschecks+=("$1")
54 | ;;
55 | esac
56 | shift
57 | done
58 |
59 | checks=$(
60 | wtfdb \
61 | -r \
62 | --arg ytags "${tags[*]}" \
63 | --arg ntags "${nottags[*]}" \
64 | --arg names "${alwayschecks[*]}" \
65 | '
66 | ($ytags / " ") as $ytags |
67 | ($ntags / " " - $ytags) as $ntags |
68 | ($names / " ") as $names |
69 |
70 | def in(xs):
71 | . as $i | xs | map(. == $i) | any;
72 |
73 | def enabled_by_name(x):
74 | x.name | in($names);
75 |
76 | def enabled_by_ytags(x):
77 | (($ytags | length) == 0 and ($names | length) == 0) or
78 | (x.tags | map(in($ytags)) | any);
79 |
80 | def disabled_by_ntags(x):
81 | x.tags | map(in($ntags)) | any;
82 |
83 | def enabled_by_tags(x):
84 | enabled_by_ytags(x) and (disabled_by_ntags(x) | not);
85 |
86 | def enabled(x):
87 | enabled_by_name(x) or
88 | enabled_by_tags(x);
89 |
90 | to_entries[] | .value | select(enabled(.)) |
91 | @sh "wtf_wrapper=\(."wtf-wrapper") name=\(.name)"
92 | '
93 | )
94 |
95 | run() {
96 | echo "$checks" | xargs -d '\n' -P 0 -I {} bash -o pipefail -e -c '
97 | {}
98 |
99 | logfile=/dev/null
100 | if [ -d "$WTFLOGS" ]; then
101 | logfile=$WTFLOGS/$name.log
102 | fi
103 |
104 | prefix() {
105 | date "+%Y-%m-%d %H:%M:%S $name START" >> "$logfile"
106 | while read line; do
107 | echo "$name: $line"
108 | echo "$line" >> "$logfile"
109 | done
110 | date "+%Y-%m-%d %H:%M:%S $name DONE" >> "$logfile"
111 | }
112 |
113 | "$wtf_wrapper" 2>&1 | prefix
114 | '
115 | }
116 |
117 | if test -z "$checks"; then
118 | echo 'No checks selected!' >&2
119 | else
120 | while ! run && [ $wait -eq 1 ]; do
121 | sleep 1
122 | done
123 | fi
124 |
--------------------------------------------------------------------------------