├── version ├── nix ├── gofer-rev ├── node-packages.json ├── docker.nix ├── nodepkgs.nix ├── default.nix ├── sources.json ├── sources.nix └── node-env.nix ├── tests ├── resources │ ├── password │ ├── mitmproxy │ │ ├── mitmproxy-ca.p12 │ │ ├── mitmproxy-ca-cert.p12 │ │ ├── mitmproxy-dhparam.pem │ │ ├── mitmproxy-ca-cert.crt │ │ ├── mitmproxy-ca-cert.pem │ │ └── mitmproxy-ca.pem │ ├── caps.json │ ├── ssb-keys.json │ ├── keys │ │ └── UTC--2020-04-20T06-52-55.157141634Z--1f8fbe73820765677e68eb6e933dcb3c94c9b708 │ └── genesis.json ├── lib │ ├── median │ │ ├── bin │ │ │ └── deploy-median │ │ ├── default.nix │ │ ├── shell.nix │ │ ├── .dapp.json │ │ └── dapp2.nix │ ├── dedup-mitm │ ├── include.sh │ └── tap.sh ├── default.nix └── smoke │ └── test ├── testchain ├── keystore │ ├── password │ ├── feeder.json │ ├── genesis.json │ └── relayer.json ├── keystore.nix ├── bin │ └── median-deploy ├── median-deploy.nix ├── default.nix ├── config │ └── feed.conf └── geth-testchain.nix ├── README.md ├── systemd ├── ssb-config-updates ├── ssb-server.service ├── gofer-agent.service ├── spire-agent.service ├── leeloo-agent.service ├── splitter-agent.service ├── ssb-config.json ├── omnia.service ├── shell.nix ├── default.nix ├── leeloo.json ├── spire.json ├── install-omnia └── gofer.json ├── .gitignore ├── starkware ├── signature │ ├── __init__.py │ ├── math_utils.py │ ├── starkex_messages.py │ └── signature.py ├── setup.py ├── default.nix └── stark_cli.py ├── shell ├── functions.sh └── versioning.sh ├── default.nix ├── nixos ├── default.nix ├── omnia.nix └── omnia-options.nix ├── docker ├── ssb │ ├── feeder │ └── relayer ├── bin │ ├── filer │ ├── confer │ └── runner ├── docker-compose.prod.yml ├── README.md └── docker-compose.dev.yml ├── Dockerfile.omnia-dev ├── shell.nix ├── Dockerfile.testchain ├── vagrant ├── init.sh └── oracle.sh ├── Dockerfile.omnia-prod ├── pull_request_template.md ├── CHANGELOG.md ├── ssb-server └── ssb-db+19.2.0.patch ├── omnia └── config │ ├── relayer-testchain.conf │ ├── feed.conf │ ├── relayer-kovan.conf │ ├── relayer-goerli.conf │ └── relayer.conf ├── Vagrantfile └── .circleci └── config.yml /version: -------------------------------------------------------------------------------- 1 | 1.10.0-rc.3 -------------------------------------------------------------------------------- /nix/gofer-rev: -------------------------------------------------------------------------------- 1 | add-nix 2 | -------------------------------------------------------------------------------- /tests/resources/password: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /testchain/keystore/password: -------------------------------------------------------------------------------- 1 | password 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Please refer to https://github.com/chronicleprotocol/oracles for the latest version. 2 | -------------------------------------------------------------------------------- /systemd/ssb-config-updates: -------------------------------------------------------------------------------- 1 | # deafult .friends.hops to 6 2 | .friends.hops = (.friends.hops // 6) 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.out 2 | test-results/ 3 | .env 4 | logs/ 5 | 6 | .local/ 7 | .vagrant/ 8 | 9 | result* 10 | -------------------------------------------------------------------------------- /starkware/signature/__init__.py: -------------------------------------------------------------------------------- 1 | from .math_utils import * 2 | from .signature import * 3 | from .starkex_messages import * 4 | -------------------------------------------------------------------------------- /nix/node-packages.json: -------------------------------------------------------------------------------- 1 | [ 2 | { "ssb-server": "15.3.0" }, 3 | { "node-gyp-build": "4.1.0" }, 4 | { "tap-xunit": "2.4.1" } 5 | ] 6 | -------------------------------------------------------------------------------- /tests/resources/mitmproxy/mitmproxy-ca.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sky-ecosystem/oracles-v2/HEAD/tests/resources/mitmproxy/mitmproxy-ca.p12 -------------------------------------------------------------------------------- /tests/resources/mitmproxy/mitmproxy-ca-cert.p12: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sky-ecosystem/oracles-v2/HEAD/tests/resources/mitmproxy/mitmproxy-ca-cert.p12 -------------------------------------------------------------------------------- /tests/resources/caps.json: -------------------------------------------------------------------------------- 1 | { 2 | "shs": "BRsTikbASMIC6jAvsIbZy24Wd6IpLQ5FbEx1oyooGb8=", 3 | "sign": "HOGP1DI4ZybjiHYv7SvaadeSLSnt1MQ2bDo2v7aszh0=" 4 | } 5 | -------------------------------------------------------------------------------- /shell/functions.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | updateNodePackages() { 4 | (cd "$ROOT_DIR"/nix && { 5 | node2nix -i node-packages.json -c nodepkgs.nix --nodejs-10 6 | }) 7 | } 8 | echo ' * updateNodePackages' -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | let srcs = import ./nix; 2 | in { pkgs ? srcs.pkgs }: 3 | let ssb-server = pkgs.lib.setPrio 8 srcs.ssb-server; 4 | in { 5 | inherit ssb-server; 6 | inherit (srcs) omnia install-omnia stark-cli oracle-suite; 7 | } 8 | -------------------------------------------------------------------------------- /starkware/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | setup(name='stark-cli', 3 | version='0.0', 4 | packages=['signature'], 5 | scripts=['stark_cli.py'], 6 | package_data={'': ['*.json']} 7 | ) 8 | -------------------------------------------------------------------------------- /nixos/default.nix: -------------------------------------------------------------------------------- 1 | { oracle-suite ? (import ../nix).oracle-suite }: 2 | { pkgs, lib, ... }: { 3 | options.services.omnia = import ./omnia-options.nix { inherit lib pkgs; }; 4 | imports = [ (import ./omnia.nix { inherit oracle-suite; }) ]; 5 | } 6 | -------------------------------------------------------------------------------- /testchain/keystore.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? import {} }: rec { 2 | keystorePath = ./keystore; 3 | passwordFile = ./keystore/password; 4 | # Return an account address for given account form the keystore: 5 | address = account: (pkgs.lib.importJSON ("${toString keystorePath}/${account}.json")).address; 6 | 7 | } 8 | -------------------------------------------------------------------------------- /tests/resources/ssb-keys.json: -------------------------------------------------------------------------------- 1 | { 2 | "curve": "ed25519", 3 | "public": "YSa2zbx07RNKQrrFX1vS5mFN+Pbnul61hd9GGymao1o=.ed25519", 4 | "private": "XhEkyFWb0TkhRU5t/yDTCI6Q9gwhsJM/SpL02UUwVtZhJrbNvHTtE0pCusVfW9LmYU349ue6XrWF30YbKZqjWg==.ed25519", 5 | "id": "@YSa2zbx07RNKQrrFX1vS5mFN+Pbnul61hd9GGymao1o=.ed25519" 6 | } -------------------------------------------------------------------------------- /systemd/ssb-server.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Scuttlebot 3 | Requires=network.target 4 | After=network.target 5 | 6 | [Service] 7 | Environment="PATH=/home/$USER/.nix-profile/bin" 8 | User=$USER 9 | Group=$USER 10 | ExecStart=$SSB_PATH start 11 | Restart=always 12 | RestartSec=5 13 | 14 | [Install] 15 | WantedBy=multi-user.target 16 | -------------------------------------------------------------------------------- /starkware/default.nix: -------------------------------------------------------------------------------- 1 | { }: 2 | 3 | let 4 | sources = import ../nix/sources.nix; 5 | mach-nix = import sources.mach-nix {}; 6 | in 7 | 8 | mach-nix.buildPythonPackage { 9 | pname = "stark-cli"; 10 | version = "0.0.0"; 11 | 12 | src = ./.; 13 | 14 | requirements = '' 15 | mpmath 16 | sympy 17 | ecdsa==0.16.0 18 | ''; 19 | } 20 | -------------------------------------------------------------------------------- /docker/ssb/feeder: -------------------------------------------------------------------------------- 1 | # @sPKbgg0756Jia3oAgKTlWaVKqzxMSEfQ3k0Mrt3hI6I=.ed25519 2 | { 3 | "curve": "ed25519", 4 | "public": "sPKbgg0756Jia3oAgKTlWaVKqzxMSEfQ3k0Mrt3hI6I=.ed25519", 5 | "private": "KufyyTt8yoOou1u8pBgdK6t+63NUW+DpSz11uPFzFU2w8puCDTvnomJregCApOVZpUqrPExIR9DeTQyu3eEjog==.ed25519", 6 | "id": "@sPKbgg0756Jia3oAgKTlWaVKqzxMSEfQ3k0Mrt3hI6I=.ed25519" 7 | } 8 | -------------------------------------------------------------------------------- /docker/ssb/relayer: -------------------------------------------------------------------------------- 1 | # @ncAoQ2Kd61Q1baeWgP3cSWVYRokM8qYp2kPoxPeF9ek=.ed25519 2 | { 3 | "curve": "ed25519", 4 | "public": "ncAoQ2Kd61Q1baeWgP3cSWVYRokM8qYp2kPoxPeF9ek=.ed25519", 5 | "private": "Qyn5bY1WYpT8258QLAwxEJL/mMGjWW0AGP3uIgSB/eCdwChDYp3rVDVtp5aA/dxJZVhGiQzypinaQ+jE94X16Q==.ed25519", 6 | "id": "@ncAoQ2Kd61Q1baeWgP3cSWVYRokM8qYp2kPoxPeF9ek=.ed25519" 7 | } 8 | -------------------------------------------------------------------------------- /docker/bin/filer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | for filePathVar in ${!FILE_PATH_*}; do 5 | fileContentVar="FILE_CONT_${filePathVar#FILE_PATH_}" 6 | filePath="${!filePathVar}" 7 | echo >&2 "Creating file from ENV $fileContentVar -> $filePath" 8 | mkdir -p $(dirname "$filePath") 9 | echo -n "${!fileContentVar}" > "$filePath" 10 | done 11 | -------------------------------------------------------------------------------- /systemd/gofer-agent.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Gofer Agent 3 | Requires=network.target 4 | After=network.target 5 | 6 | [Service] 7 | Environment="PATH=/home/$USER/.nix-profile/bin" 8 | Environment="GOFER_CONFIG=$GOFER_CONFIG" 9 | User=$USER 10 | Group=$USER 11 | ExecStart=$GOFER_PATH -c "$GOFER_CONFIG" agent 12 | Restart=always 13 | RestartSec=5 14 | 15 | [Install] 16 | WantedBy=multi-user.target -------------------------------------------------------------------------------- /systemd/spire-agent.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Spire Agent 3 | Requires=network.target 4 | After=network.target 5 | 6 | [Service] 7 | Environment="PATH=/home/$USER/.nix-profile/bin" 8 | Environment="SPIRE_CONFIG=$SPIRE_CONFIG" 9 | User=$USER 10 | Group=$USER 11 | ExecStart=$SPIRE_PATH -c "$SPIRE_CONFIG" agent 12 | Restart=always 13 | RestartSec=5 14 | 15 | [Install] 16 | WantedBy=multi-user.target -------------------------------------------------------------------------------- /systemd/leeloo-agent.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Leeloo Agent 3 | Requires=network.target 4 | After=network.target 5 | 6 | [Service] 7 | Environment="PATH=/home/$USER/.nix-profile/bin" 8 | Environment="LEELOO_CONFIG=$LEELOO_CONFIG" 9 | User=$USER 10 | Group=$USER 11 | ExecStart=$LEELOO_PATH -c "$LEELOO_CONFIG" agent 12 | Restart=always 13 | RestartSec=5 14 | 15 | [Install] 16 | WantedBy=multi-user.target -------------------------------------------------------------------------------- /systemd/splitter-agent.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=RPC Splitter Agent 3 | Requires=network.target 4 | After=network.target 5 | 6 | [Service] 7 | Environment="PATH=/home/$USER/.nix-profile/bin" 8 | User=$USER 9 | Group=$USER 10 | ExecStart=$SPLITTER_PATH --listen 127.0.0.1:9989 --eth-rpc="$SPLITTER_URLS" agent 11 | Restart=always 12 | RestartSec=5 13 | 14 | [Install] 15 | WantedBy=multi-user.target 16 | WantedBy=gofer-agent.service -------------------------------------------------------------------------------- /testchain/keystore/feeder.json: -------------------------------------------------------------------------------- 1 | {"address":"131ea366b4aae41859101fe0954fe4723329cd71","crypto":{"cipher":"aes-128-ctr","ciphertext":"a0d2c44e87f587949eed92855cdcdc438929c6a5dbf06458e6698551de72fcec","cipherparams":{"iv":"c8e834889971f32afc10db516cfc88c1"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":4096,"p":6,"r":8,"salt":"7cd020f4bc901539b36ae113f318c91e3bb07bfad3f034c6ae9908df6b32bc4b"},"mac":"c2f872be5e28842e78d6f741dc0464d7e382f9624d02341232fd845d36a3ed74"},"id":"7cb93061-c2a0-44fd-9673-537206caf242","version":3} 2 | -------------------------------------------------------------------------------- /testchain/keystore/genesis.json: -------------------------------------------------------------------------------- 1 | {"address":"336472a374e1944c146d7f5edccdc37711d0af86","crypto":{"cipher":"aes-128-ctr","ciphertext":"23f89099f2c705fe64ed03ea37852a035bf0258b7e8cea1e5f446e3e3e27d9dc","cipherparams":{"iv":"584ce21f9b7b88f8508d829f981c0a82"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":4096,"p":6,"r":8,"salt":"d6002f3fdd4be62bdd1e277f2c4c21e410435e151f438dbfc041992e17f82c72"},"mac":"16761b3f23e42ee6a7213da55a921f6ab38a848396abd8886d03b3f80efa8f1b"},"id":"b7e40129-4011-4b97-b748-b4ff3b573c6b","version":3} 2 | -------------------------------------------------------------------------------- /testchain/keystore/relayer.json: -------------------------------------------------------------------------------- 1 | {"address":"436177e390e835959c82afc56e063bd032ad57fb","crypto":{"cipher":"aes-128-ctr","ciphertext":"17501222af7f95bfbf6fddb6f488d694a4ca0e4fa35d79a7d4948400a70dfb29","cipherparams":{"iv":"820ddbbb480f76bf68b46c0a747fd451"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":4096,"p":6,"r":8,"salt":"9e9b396fb8d488a06a0dcdfeb4869cfab47195b605e614e39668e3b54315c7f9"},"mac":"362a157227db9911106a98333f01c6a4b2fd0ee1eabaf5f9be3b3a81f86fcfff"},"id":"64faf175-5ef9-469f-adbe-a9c1b0b9a2bf","version":3} 2 | -------------------------------------------------------------------------------- /nix/docker.nix: -------------------------------------------------------------------------------- 1 | let 2 | srcs = import ./default.nix; 3 | 4 | inherit (builtins) readFile; 5 | inherit (srcs) pkgs ssb-server omnia; 6 | 7 | path = with pkgs; lib.makeBinPath [ coreutils findutils bash jq gnused ssb-server omnia ]; 8 | in with pkgs; 9 | 10 | runCommand "omnia-runner" { nativeBuildInputs = [ makeWrapper ]; } '' 11 | mkdir -p $out/bin 12 | cp ${../docker/bin}/* $out/bin 13 | for x in $out/bin/*; do 14 | wrapProgram "$x" \ 15 | --set PATH "$out/bin:${path}" 16 | done 17 | 18 | '' 19 | -------------------------------------------------------------------------------- /Dockerfile.omnia-dev: -------------------------------------------------------------------------------- 1 | FROM nixos/nix@sha256:909992c623023c15950f088185326b80012584127fbaef6366980d26a91c73d5 2 | 3 | # Install dependencies 4 | RUN apk add --no-cache bash git jq 5 | 6 | # Setup Nix cache 7 | RUN nix run -f https://cachix.org/api/v1/install cachix -c cachix use maker \ 8 | && nix run -f https://cachix.org/api/v1/install cachix -c cachix use dapp \ 9 | && nix-collect-garbage -d 10 | 11 | WORKDIR /src 12 | 13 | CMD [ "bash", "-c", "nix-env -i -f /src/nix/docker.nix --verbose && /nix/var/nix/profiles/default/bin/runner" ] 14 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | let srcs = import ./nix; 2 | 3 | in { pkgs ? srcs.pkgs }@args: 4 | 5 | let oracles = import ./. args; 6 | 7 | in pkgs.mkShell rec { 8 | name = "oracle-shell"; 9 | buildInputs = oracles.omnia.runtimeDeps ++ (with pkgs; [ git niv nodePackages.node2nix nodePackages.semver ]) 10 | ++ [ srcs.omnia ]; 11 | 12 | VERSION_FILE = toString ./version; 13 | ROOT_DIR = toString ./.; 14 | 15 | shellHook = '' 16 | source ${./shell/versioning.sh} 17 | source ${./shell/functions.sh} 18 | 19 | cd ${toString ./.} 20 | ''; 21 | } 22 | -------------------------------------------------------------------------------- /nix/nodepkgs.nix: -------------------------------------------------------------------------------- 1 | # This file has been generated by node2nix 1.8.0. Do not edit! 2 | 3 | {pkgs ? import { 4 | inherit system; 5 | }, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-10_x"}: 6 | 7 | let 8 | nodeEnv = import ./node-env.nix { 9 | inherit (pkgs) stdenv python2 utillinux runCommand writeTextFile; 10 | inherit nodejs; 11 | libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null; 12 | }; 13 | in 14 | import ./node-packages.nix { 15 | inherit (pkgs) fetchurl fetchgit; 16 | inherit nodeEnv; 17 | } -------------------------------------------------------------------------------- /tests/resources/keys/UTC--2020-04-20T06-52-55.157141634Z--1f8fbe73820765677e68eb6e933dcb3c94c9b708: -------------------------------------------------------------------------------- 1 | {"address":"1f8fbe73820765677e68eb6e933dcb3c94c9b708","crypto":{"cipher":"aes-128-ctr","ciphertext":"ae7e6f23a2c777e15192e16e39852391b6d7b01677194626c286e0e1ca2b799a","cipherparams":{"iv":"743832c39148879f35ef10a6f1dbf3af"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"da4d802b249eba644fc98cabad5e5c6314f8d63130adae4647d7a81a62d7fde9"},"mac":"66a04ec343cc5514378e797bd9b159e2bf00179ebb7a9fb3091b6cdfc7916f9b"},"id":"be213470-aa0d-4d4e-ad01-c1dde275d44a","version":3} -------------------------------------------------------------------------------- /systemd/ssb-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "connections": { 3 | "incoming": { 4 | "net": [ 5 | { 6 | "port": 8007, 7 | "scope": [ 8 | "public", 9 | "local" 10 | ], 11 | "transform": "shs" 12 | } 13 | ], 14 | "ws": [ 15 | { 16 | "port": 8988, 17 | "scope": [ 18 | "public", 19 | "local" 20 | ], 21 | "transform": "shs" 22 | } 23 | ] 24 | } 25 | }, 26 | "friends": { 27 | "hops": 6 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /systemd/omnia.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Omnia 3 | Requires=network.target 4 | After=network.target 5 | Wants=ssb-server.service 6 | 7 | [Service] 8 | Environment="PATH=/home/$USER/.nix-profile/bin" 9 | Environment="OMNIA_CONFIG=$OMNIA_CONFIG" 10 | Environment="GOFER_CONFIG=$GOFER_CONFIG" 11 | Environment="SPIRE_CONFIG=$SPIRE_CONFIG" 12 | Environment="SSB_CONF=$SSB_CONF" 13 | Environment="SSB_KEYS=$SSB_KEYS" 14 | Environment="SSB_PORT=$SSB_PORT" 15 | Environment="SSB_HOST=$SSB_HOST" 16 | User=$USER 17 | Group=$USER 18 | ExecStart=$OMNIA_PATH 19 | Restart=always 20 | RestartSec=5 21 | 22 | [Install] 23 | WantedBy=multi-user.target 24 | -------------------------------------------------------------------------------- /tests/lib/median/bin/deploy-median: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | join() { local IFS="$1"; shift; echo "$*"; } 5 | 6 | declare -a toLift 7 | declare -a pairs 8 | 9 | for arg in "$@"; do 10 | if [[ $arg =~ ^0x ]]; then 11 | toLift+=("${arg#0x}") 12 | else 13 | pairs+=("$arg") 14 | fi 15 | done 16 | 17 | for pair in "${pairs[@]-ETHUSD}"; do 18 | contractName=${pair^^} 19 | contractName=${contractName/\/} 20 | contractName=Median${contractName/BTC/WBTC} 21 | address=$(dapp create $contractName) 22 | seth send $address 'lift(address[] memory)' "[$(join , "${toLift[@]}")]" >&2 23 | jq -n --arg p $pair --arg a $address '{($p):$a}' 24 | done | jq -Ss add 25 | -------------------------------------------------------------------------------- /docker/bin/confer: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | prefix="$1" 5 | 6 | for confVar in $(eval "echo \${!${prefix}_STR_*}"); do 7 | confPath=$(eval "echo \"\${confVar#${prefix}_STR_}\"") 8 | eval "${prefix}_SET_${confPath}=\""'\"'"${!confVar}"'\""' 9 | done 10 | 11 | query="." 12 | for confVar in $(eval "echo \${!${prefix}_SET_*}"); do 13 | confPath=$(eval "echo \"\${confVar#${prefix}_SET_}\"") 14 | confPath="${confPath//__/[].}" 15 | confPath="${confPath//_/.}" 16 | query+=" 17 | | .$confPath = ${!confVar}" 18 | done 19 | 20 | conf="$(eval "echo \"\${${prefix}_SET:-\$(cat \"\${${prefix}_FILE_SET:-/dev/null}\")}\"")" 21 | [ -n "$conf" ] || { 22 | echo >&2 "Error: Both ${prefix}_SET and ${prefix}_FILE_SET are empty." 23 | exit 1; 24 | } 25 | jq "$query" <<<"$conf" 26 | -------------------------------------------------------------------------------- /tests/lib/dedup-mitm: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import io 3 | import sys 4 | 5 | from mitmproxy.io import FlowReader, FlowWriter 6 | 7 | def flow_str(flow): 8 | return "%s|%s" % (flow.request.url, flow.request.content) 9 | 10 | def dedup(fn): 11 | ids = set() 12 | flows = list() 13 | with open(fn, "rb") as fin: 14 | reader = FlowReader(fin) 15 | for flow in reader.stream(): 16 | id = flow_str(flow) 17 | if not id in ids: 18 | ids.add(id) 19 | flows.append(flow) 20 | with open(fn, "wb") as fout: 21 | writer = FlowWriter(fout) 22 | for flow in flows: 23 | writer.add(flow) 24 | 25 | if __name__ == "__main__": 26 | if len(sys.argv) != 2: 27 | print("Usage: %s FILE" % (sys.argv[0],)) 28 | exit(1) 29 | else: 30 | dedup(sys.argv[1]) 31 | -------------------------------------------------------------------------------- /Dockerfile.testchain: -------------------------------------------------------------------------------- 1 | FROM nixos/nix@sha256:909992c623023c15950f088185326b80012584127fbaef6366980d26a91c73d5 2 | 3 | # Install dependencies 4 | RUN apk add --no-cache bash git 5 | 6 | # Setup Nix cache 7 | RUN nix run -f https://cachix.org/api/v1/install cachix -c cachix use maker \ 8 | && nix run -f https://cachix.org/api/v1/install cachix -c cachix use dapp \ 9 | && nix-collect-garbage -d 10 | 11 | # Copy testnet source code inside the container 12 | COPY testchain /src/ 13 | 14 | # Install Omnia runner and dependencies 15 | RUN nix-env -i -f /src/default.nix --verbose \ 16 | && nix-collect-garbage -d 17 | 18 | # Deploy medianizer contract and save their addresses to the /src/contracts.json 19 | RUN median-deployer /src/contracts.json 20 | 21 | CMD [ "bash", "-c", "cat /src/contracts.json; testchain-runner" ] 22 | -------------------------------------------------------------------------------- /tests/lib/median/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | srcs = import ../../../nix/default.nix; 3 | sources = import ../../../nix/sources.nix; 4 | 5 | in { makerpkgs ? srcs.makerpkgs, srcRoot ? null, ... }@args: 6 | 7 | with makerpkgs; 8 | 9 | let 10 | inherit (builtins) mapAttrs attrValues; 11 | inherit (callPackage ./dapp2.nix { inherit srcRoot; }) specs packageSpecs; 12 | 13 | # Update dependency specs with default values 14 | deps = packageSpecs (mapAttrs (_: spec: 15 | spec // { 16 | #inherit doCheck; 17 | solc = solc-versions.solc_0_5_12; 18 | }) specs.this.deps); 19 | 20 | in makerScriptPackage { 21 | name = "median-deploy"; 22 | nativeBuildInputs = [ bash ]; 23 | 24 | # Specify files to add to build environment 25 | src = lib.sourceByRegex ./. [ "bin" "bin/.*" ]; 26 | 27 | solidityPackages = attrValues deps; 28 | } 29 | -------------------------------------------------------------------------------- /tests/resources/mitmproxy/mitmproxy-dhparam.pem: -------------------------------------------------------------------------------- 1 | 2 | -----BEGIN DH PARAMETERS----- 3 | MIICCAKCAgEAyT6LzpwVFS3gryIo29J5icvgxCnCebcdSe/NHMkD8dKJf8suFCg3 4 | O2+dguLakSVif/t6dhImxInJk230HmfC8q93hdcg/j8rLGJYDKu3ik6H//BAHKIv 5 | j5O9yjU3rXCfmVJQic2Nne39sg3CreAepEts2TvYHhVv3TEAzEqCtOuTjgDv0ntJ 6 | Gwpj+BJBRQGG9NvprX1YGJ7WOFBP/hWU7d6tgvE6Xa7T/u9QIKpYHMIkcN/l3ZFB 7 | chZEqVlyrcngtSXCROTPcDOQ6Q8QzhaBJS+Z6rcsd7X+haiQqvoFcmaJ08Ks6LQC 8 | ZIL2EtYJw8V8z7C0igVEBIADZBI6OTbuuhDwRw//zU1uq52Oc48CIZlGxTYG/Evq 9 | o9EWAXUYVzWkDSTeBH1r4z/qLPE2cnhtMxbFxuvK53jGB0emy2y1Ei6IhKshJ5qX 10 | IB/aE7SSHyQ3MDHHkCmQJCsOd4Mo26YX61NZ+n501XjqpCBQ2+DfZCBh8Va2wDyv 11 | A2Ryg9SUz8j0AXViRNMJgJrr446yro/FuJZwnQcO3WQnXeqSBnURqKjmqkeFP+d8 12 | 6mk2tqJaY507lRNqtGlLnj7f5RNoBFJDCLBNurVgfvq9TCVWKDIFD4vZRjCrnl6I 13 | rD693XKIHUCWOjMh1if6omGXKHH40QuME2gNa50+YPn1iYDl88uDbbMCAQI= 14 | -----END DH PARAMETERS----- 15 | -------------------------------------------------------------------------------- /vagrant/init.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euo pipefail 4 | 5 | curl --silent --location https://nixos.org/nix/install | sh 6 | source /home/vagrant/.nix-profile/etc/profile.d/nix.sh 7 | 8 | nix-env -iA cachix -f https://cachix.org/api/v1/install 9 | cachix use maker 10 | cachix use dapp 11 | 12 | nix-env -iA nixpkgs.jq 13 | 14 | mkdir -p /home/vagrant/bin 15 | ln -sf /vagrant/vagrant/oracle.sh /home/vagrant/bin/oracle 16 | 17 | sudo rm -f /usr/local/share/ca-certificates/* 18 | sudo cp /vagrant/tests/resources/mitmproxy/mitmproxy-ca-cert.crt /usr/local/share/ca-certificates 19 | sudo chmod 0644 /usr/local/share/ca-certificates/* 20 | sudo update-ca-certificates 21 | ls -la /etc/ssl/certs | grep mitm 22 | 23 | cat < /dev/null; do sleep 5; done 19 | 20 | # Deploy medianizer contracts 21 | output=$( 22 | for pair in "${pairs[@]}"; do 23 | contractName=${pair^^} 24 | contractName=${contractName/\/} 25 | contractName=Median${contractName/BTC/WBTC} 26 | address=$(dapp create $contractName) 27 | jq -n --arg p $pair --arg a $address '{($p):$a}' 28 | done | jq -Ss add 29 | ) 30 | 31 | # Add feeder addresses to medianizer contracts 32 | if [[ -n "${toLift[*]}" ]]; then 33 | for pair in "${pairs[@]}"; do 34 | address=$(jq -r --arg p $pair '.[$p]' <<<"$output") 35 | seth send $address 'lift(address[] memory)' "[$(join , "${toLift[@]}")]" >&2 36 | done 37 | fi 38 | 39 | echo "$output" 40 | -------------------------------------------------------------------------------- /docker/bin/runner: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | log() { "${@:2}" 2> >(sed "s/^/[$1] /" >&2) | sed "s/^/[$1] /"; } 5 | 6 | filer 7 | 8 | echo >&2 "Creating Scuttlebot config file -> $HOME/.ssb/config" 9 | mkdir -p $HOME/.ssb 10 | confer SSB > $HOME/.ssb/config 11 | 12 | export OMNIA_CONFIG=$HOME/omnia.conf 13 | echo >&2 "Creating Omnia config file -> $OMNIA_CONFIG" 14 | confer OMNIA > $OMNIA_CONFIG 15 | 16 | # Kill script process group on exit 17 | trap "trap - EXIT; kill -- -$$" EXIT 18 | 19 | echo >&2 "Starting Scuttlebot server" 20 | log SSB ssb-server start & 21 | sleep 5 22 | 23 | test -z "$SSB_INVITE" || { 24 | echo >&2 "Accepting Scuttlebot invite" 25 | log SSB_INVITE ssb-server invite.accept "$SSB_INVITE" || true 26 | sleep 1 27 | } 28 | 29 | # SSB server becomes unresponsive after accepting an invite 30 | # As it spends all its single thread resources to index new data. 31 | # Wait for SSB server to index data only then move on. 32 | until ssb-server whoami &> /dev/null; do 33 | echo >&2 "Waiting for Scuttlebot server indexing to finish..." 34 | sleep 30 35 | done 36 | 37 | echo >&2 "Starting Omnia" 38 | log OMNIA omnia 39 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [Unreleased] 8 | ### Added 9 | - Support for configurable external gas price sources for relayers. 10 | ### Changed 11 | - Logging to enable JSON format 12 | 13 | ## [1.7.0] - 2021-07-28 14 | ### Added 15 | - Introduce `--override-origin` option to enable for adding params to origins (e.g. API Key). 16 | - MATIC/USD 17 | ### Changed 18 | - Make sure `install-omnia` works with the new config structures of spire and gofer. 19 | 20 | ## [1.6.1] - 2021-07-07 21 | ### Fixed 22 | - Fixed default configurations 23 | 24 | ## [1.6.0] - 2021-06-15 25 | ### Added 26 | - Introduced second transport method to allow for more resilient price updates. 27 | 28 | [Unreleased]: https://github.com/makerdao/oracles-v2/compare/v1.7.0...HEAD 29 | [1.7.0]: https://github.com/makerdao/oracles-v2/compare/v1.6.1...v1.7.0 30 | [1.6.1]: https://github.com/makerdao/oracles-v2/compare/v1.6.0...v1.6.1 31 | [1.6.0]: https://github.com/makerdao/oracles-v2/releases/tag/v1.6.0 32 | -------------------------------------------------------------------------------- /tests/lib/median/.dapp.json: -------------------------------------------------------------------------------- 1 | { 2 | "contracts": { 3 | "ds-test_eb7148d": { 4 | "deps": {}, 5 | "name": "ds-test", 6 | "repo": { 7 | "name": "ds-test-eb7148d", 8 | "ref": "HEAD", 9 | "rev": "eb7148d43c1ca6f9890361e2e2378364af2430ba", 10 | "url": "https://github.com/dapphub/ds-test" 11 | } 12 | }, 13 | "median_bc85f34": { 14 | "deps": { 15 | "ds-test": "ds-test_eb7148d" 16 | }, 17 | "name": "median", 18 | "repo": { 19 | "name": "median-bc85f34", 20 | "ref": "HEAD", 21 | "rev": "bc85f3448da72400095ec249c895b1cb29a6f25a", 22 | "url": "https://github.com/makerdao/median" 23 | } 24 | }, 25 | "testchain-medians_688ada1": { 26 | "deps": { 27 | "ds-test": "ds-test_eb7148d", 28 | "median": "median_bc85f34" 29 | }, 30 | "name": "testchain-medians", 31 | "repo": { 32 | "name": "testchain-medians-688ada1", 33 | "ref": "HEAD", 34 | "rev": "688ada16a3bb74b3677b6faf716345bdadee9671", 35 | "url": "https://github.com/makerdao/testchain-medians" 36 | } 37 | } 38 | }, 39 | "this": { 40 | "deps": { 41 | "testchain-medians": "testchain-medians_688ada1" 42 | }, 43 | "name": "medianzier-atd" 44 | }, 45 | "version": 1 46 | } 47 | -------------------------------------------------------------------------------- /systemd/shell.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? import { }, nixos ? import { } }: 2 | let 3 | generateSystemd = type: name: config: 4 | pkgs.writeText "${name}.${type}" (nixos.system { 5 | system = "x86_64-linux"; 6 | configuration = ({ ... }: { config.systemd."${type}s".${name} = config; }); 7 | }).config.systemd.units."${name}.${type}".text; 8 | mkService = generateSystemd "service"; 9 | mkUserService = name: config: 10 | pkgs.writeShellScriptBin "activate" '' 11 | set -euo pipefail 12 | export XDG_RUNTIME_DIR="/run/user/$UID" 13 | loginctl enable-linger "$USER" 14 | mkdir -p "$HOME/.config/systemd/user" "$HOME/.config/systemd/user/default.target.wants" 15 | rm -f -- "$HOME/.config/systemd/user/${name}.service" "$HOME/.config/systemd/user/default.target.wants/${name}.service" 16 | ln -s ${mkService name config} "$HOME/.config/systemd/user/${name}.service" 17 | ln -s "$HOME/.config/systemd/user/${name}.service" "$HOME/.config/systemd/user/default.target.wants" 18 | systemctl --user daemon-reload 19 | systemctl --user restart ${name} 20 | ''; 21 | 22 | ccc = (nixos.system { 23 | system = "x86_64-linux"; 24 | configuration = import ../nixos { }; 25 | }).config.systemd.units."gofer.services"; 26 | in pkgs.mkShell { buildInputs = [ (mkUserService "test" ccc) ]; } 27 | -------------------------------------------------------------------------------- /ssb-server/ssb-db+19.2.0.patch: -------------------------------------------------------------------------------- 1 | diff --git a/node_modules/ssb-db/db.js b/node_modules/ssb-db/db.js 2 | index 8825682..97d32cd 100644 3 | --- a/node_modules/ssb-db/db.js 4 | +++ b/node_modules/ssb-db/db.js 5 | @@ -1,13 +1,9 @@ 6 | -var ViewHashTable = require('flumeview-hashtable') 7 | +var ViewLevel = require('flumeview-level') 8 | 9 | module.exports = function (dir, keys, opts) { 10 | var db = require('./minimal')(dir, keys, opts) 11 | 12 | - .use('keys', ViewHashTable(2, function (key) { 13 | - var b = Buffer.from(key.substring(1, 7), 'base64').readUInt32BE(0) 14 | - return b 15 | - }) 16 | - ) 17 | + .use('keys', ViewLevel(3, (msg) => [ msg.key ]) ) 18 | .use('clock', require('./indexes/clock')()) 19 | 20 | db.progress = {} 21 | diff --git a/node_modules/ssb-db/minimal.js b/node_modules/ssb-db/minimal.js 22 | index 052aefe..8c177a5 100644 23 | --- a/node_modules/ssb-db/minimal.js 24 | +++ b/node_modules/ssb-db/minimal.js 25 | @@ -80,7 +80,7 @@ module.exports = function (dirname, keys, opts) { 26 | 27 | var unboxers = [ mainUnboxer ] 28 | 29 | - var log = OffsetLog(path.join(dirname, 'log.offset'), { blockSize: 1024 * 16, codec }) 30 | + var log = OffsetLog(path.join(dirname, 'log.offset'), { blockSize: 1024 * 16, codec, offsetCodec: 53 }) 31 | 32 | const unboxerMap = (msg, cb) => cb(null, db.unbox(msg)) 33 | const maps = [ unboxerMap ] 34 | -------------------------------------------------------------------------------- /testchain/median-deploy.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? import {} }: 2 | let 3 | medianScript = ./bin/median-deploy; 4 | medianFetchGit = pkgs.fetchgit { 5 | url = "https://github.com/makerdao/testchain-medians.git"; 6 | sha256 = "1rimgmxlfxymmjyx3yz0lp30zin7jrf4ygrj6ydvdwjgw3fmnwgh"; 7 | rev = "1fe9f7bcaa41dacdcf8bb2527e007e186e3e3c09"; 8 | deepClone = true; 9 | fetchSubmodules = true; 10 | }; 11 | wrapperPath = pkgs.lib.makeBinPath ([ 12 | pkgs.jq 13 | pkgs.bash 14 | pkgs.dapp 15 | pkgs.seth 16 | ]); 17 | in pkgs.stdenv.mkDerivation { 18 | name = "median-deploy"; 19 | 20 | buildInputs = [ 21 | pkgs.makeWrapper 22 | pkgs.solc-static-versions.solc_0_5_12 23 | pkgs.dapp 24 | ]; 25 | 26 | unpackPhase = '' 27 | cp -r ${medianFetchGit}/* ./ 28 | cp ${medianScript} ./ 29 | ''; 30 | 31 | buildPhase = '' 32 | # Beacuse of https://github.com/NixOS/docker/issues/34 we can't use "make build" directly. 33 | dapp --use "${pkgs.solc-static-versions.solc_0_5_12}/bin/solc" build 34 | ''; 35 | 36 | installPhase = '' 37 | mkdir -p $out/bin 38 | mkdir -p $out/src 39 | cp out/dapp.sol.json $out/src/dapp.sol.json 40 | cp ${medianScript} $out/bin/median-deploy 41 | chmod u+x $out/bin/median-deploy 42 | wrapProgram $out/bin/median-deploy --set DAPP_JSON $out/src/dapp.sol.json --prefix PATH : "${wrapperPath}" 43 | ''; 44 | } 45 | -------------------------------------------------------------------------------- /docker/docker-compose.prod.yml: -------------------------------------------------------------------------------- 1 | version: "3.6" 2 | 3 | x-omnia: &default 4 | build: 5 | context: ../ 6 | dockerfile: Dockerfile.omnia-prod 7 | expose: 8 | - ${SSB_NET_PORT-8008} 9 | - ${SSB_WS_PORT-8989} 10 | environment: &env 11 | ETH_GAS: 7000000 12 | SSB_INVITE: ${SSB_INVITE} 13 | SSB_FILE_SET: /src/ssb-config.json 14 | SSB_STR_connections_incoming_net__external: ${EXT_IP-127.0.0.1} 15 | SSB_SET_connections_incoming_net__port: ${SSB_NET_PORT-8008} 16 | SSB_STR_connections_incoming_ws__external: ${EXT_IP-127.0.0.1} 17 | SSB_SET_connections_incoming_ws__port: ${SSB_WS_PORT-8989} 18 | SSB_SET_caps: ${SSB_CAPS} 19 | OMNIA_STR_ethereum_from: ${ETH_FROM} 20 | OMNIA_STR_ethereum_keystore: /home/omnia/keys 21 | OMNIA_STR_ethereum_password: /home/omnia/password 22 | OMNIA_STR_ethereum_network: ${ETH_NET-mainnet} 23 | OMNIA_STR_ethereum_infuraKey: ${INFURAKEY} 24 | FILE_PATH_ETH_PASSWORD: /home/omnia/password 25 | FILE_CONT_ETH_PASSWORD: ${ETH_PASSWORD} 26 | FILE_PATH_ETH_KEY: /home/omnia/keys/key.json 27 | FILE_CONT_ETH_KEY: ${ETH_KEY} 28 | 29 | services: 30 | feed: 31 | <<: *default 32 | environment: 33 | <<: *env 34 | OMNIA_FILE_SET: /src/omnia/config/feed.conf 35 | 36 | relay: 37 | <<: *default 38 | environment: 39 | <<: *env 40 | OMNIA_FILE_SET: /src/omnia/config/relayer-${ETH_NET-mainnet}.conf 41 | -------------------------------------------------------------------------------- /tests/resources/mitmproxy/mitmproxy-ca-cert.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDoTCCAomgAwIBAgIGDpHpE7iqMA0GCSqGSIb3DQEBCwUAMCgxEjAQBgNVBAMM 3 | CW1pdG1wcm94eTESMBAGA1UECgwJbWl0bXByb3h5MB4XDTIwMTAwNDExMzkwM1oX 4 | DTIzMTAwNjExMzkwM1owKDESMBAGA1UEAwwJbWl0bXByb3h5MRIwEAYDVQQKDAlt 5 | aXRtcHJveHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsXyd4r9gs 6 | Rmqn8IXIXTmlHFP8G4+5+rR5XNKcrPmukJ4/zPmTmJTLIpshXidUHg5Ocvu9Wqk7 7 | rU4eSIPg5cDPcrlN5Cjsfdn2y26BvhPhG32QnUthY+ccxEPKITZIABIDy6X9K7Si 8 | DZioAOCRtIK9Pllvsi4BBtUYxrjyHFO40EUyCSK+2BSfmJD+G2BPUrDkBd6jOAyp 9 | 9z1hqBZxsjZ7PgrcDMWyIe3REeSOR9XhLxlihh5PqjHEsOxPdzS1phgT6Le14u7J 10 | pUOQChWqUe3NoAupYAraXibNHrssnzqFmOIh4CYHGn6+okU2IUaoG/VNeuXm095Z 11 | 3K0htM11yH6LAgMBAAGjgdAwgc0wDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhC 12 | AQEEBAMCAgQweAYDVR0lBHEwbwYIKwYBBQUHAwEGCCsGAQUFBwMCBggrBgEFBQcD 13 | BAYIKwYBBQUHAwgGCisGAQQBgjcCARUGCisGAQQBgjcCARYGCisGAQQBgjcKAwEG 14 | CisGAQQBgjcKAwMGCisGAQQBgjcKAwQGCWCGSAGG+EIEATAOBgNVHQ8BAf8EBAMC 15 | AQYwHQYDVR0OBBYEFCVFHG7I0iyWBdtqmuZ1FCkc3VmCMA0GCSqGSIb3DQEBCwUA 16 | A4IBAQCTP5wA1CHFySuYoucK/DVzGCM7GM1RxRfXOLifOQ9DuvSYSvT/KGHm+dfN 17 | NRe10ob1CmhYfcd9i3OmPxACPqEuoruSDkHMcyFxDfnjUiKgCcj2YUBu7CAobjLS 18 | vDSzGE1nuXUbg5AhxAxTpQ7I85bdh0QpyHhlRov5TGP1vlxEF1UnJ/xWfMez6CbU 19 | Op0glxX8I90oZ6GvBk3vRut06kP8nfEZbNmL74+jKdG58SiPJYXNbk0m9anyhgUG 20 | /iKNHk/bQW32okir383Q6Wg6P0LUhHXiCGR+njR2j2HDED5Fp12M/a2y++w42bJ8 21 | m3McqL8rU1/xPB56HeQYFnfEXxyX 22 | -----END CERTIFICATE----- 23 | -------------------------------------------------------------------------------- /tests/resources/mitmproxy/mitmproxy-ca-cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIDoTCCAomgAwIBAgIGDpHpE7iqMA0GCSqGSIb3DQEBCwUAMCgxEjAQBgNVBAMM 3 | CW1pdG1wcm94eTESMBAGA1UECgwJbWl0bXByb3h5MB4XDTIwMTAwNDExMzkwM1oX 4 | DTIzMTAwNjExMzkwM1owKDESMBAGA1UEAwwJbWl0bXByb3h5MRIwEAYDVQQKDAlt 5 | aXRtcHJveHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsXyd4r9gs 6 | Rmqn8IXIXTmlHFP8G4+5+rR5XNKcrPmukJ4/zPmTmJTLIpshXidUHg5Ocvu9Wqk7 7 | rU4eSIPg5cDPcrlN5Cjsfdn2y26BvhPhG32QnUthY+ccxEPKITZIABIDy6X9K7Si 8 | DZioAOCRtIK9Pllvsi4BBtUYxrjyHFO40EUyCSK+2BSfmJD+G2BPUrDkBd6jOAyp 9 | 9z1hqBZxsjZ7PgrcDMWyIe3REeSOR9XhLxlihh5PqjHEsOxPdzS1phgT6Le14u7J 10 | pUOQChWqUe3NoAupYAraXibNHrssnzqFmOIh4CYHGn6+okU2IUaoG/VNeuXm095Z 11 | 3K0htM11yH6LAgMBAAGjgdAwgc0wDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhC 12 | AQEEBAMCAgQweAYDVR0lBHEwbwYIKwYBBQUHAwEGCCsGAQUFBwMCBggrBgEFBQcD 13 | BAYIKwYBBQUHAwgGCisGAQQBgjcCARUGCisGAQQBgjcCARYGCisGAQQBgjcKAwEG 14 | CisGAQQBgjcKAwMGCisGAQQBgjcKAwQGCWCGSAGG+EIEATAOBgNVHQ8BAf8EBAMC 15 | AQYwHQYDVR0OBBYEFCVFHG7I0iyWBdtqmuZ1FCkc3VmCMA0GCSqGSIb3DQEBCwUA 16 | A4IBAQCTP5wA1CHFySuYoucK/DVzGCM7GM1RxRfXOLifOQ9DuvSYSvT/KGHm+dfN 17 | NRe10ob1CmhYfcd9i3OmPxACPqEuoruSDkHMcyFxDfnjUiKgCcj2YUBu7CAobjLS 18 | vDSzGE1nuXUbg5AhxAxTpQ7I85bdh0QpyHhlRov5TGP1vlxEF1UnJ/xWfMez6CbU 19 | Op0glxX8I90oZ6GvBk3vRut06kP8nfEZbNmL74+jKdG58SiPJYXNbk0m9anyhgUG 20 | /iKNHk/bQW32okir383Q6Wg6P0LUhHXiCGR+njR2j2HDED5Fp12M/a2y++w42bJ8 21 | m3McqL8rU1/xPB56HeQYFnfEXxyX 22 | -----END CERTIFICATE----- 23 | -------------------------------------------------------------------------------- /docker/README.md: -------------------------------------------------------------------------------- 1 | ## Production environments 2 | 3 | 1) Create a `.env` file and fill missing values: 4 | ```sh 5 | cat > .env <<$ 6 | SSB_CAPS='{ 7 | "shs": "BRsTikbASMIC6jAvsIbZy24Wd6IpLQ5FbEx1oyooGb8=", 8 | "sign": "HOGP1DI4ZybjiHYv7SvaadeSLSnt1MQ2bDo2v7aszh0=" 9 | }' 10 | SSB_INVITE= 11 | SSB_NET_PORT=8008 12 | SSB_WS_PORT=8988 13 | EXT_IP=127.0.0.1 14 | ETH_GAS=7000000 15 | ETH_NET=kovan 16 | INFURAKEY= 17 | ETH_FROM=0x 18 | ETH_PASSWORD= 19 | ETH_KEY='{}' 20 | ``` 21 | 22 | 2) Start up your containers: 23 | ```sh 24 | docker-compose up -f docker-compose.prod.yml 25 | ``` 26 | 27 | ## Development environment 28 | 29 | ### First-time configuration 30 | 31 | 1) Start all containers: 32 | ```sh 33 | docker-compose up -f docker-compose.dev.yml 34 | ``` 35 | 36 | 2) Wait until all containers are ready, then generate `Scuttlebot` invitation: 37 | ```sh 38 | docker-compose -f docker-compose.dev.yml exec feed nix-shell /src/shell.nix --command "ssb-server invite.create 1" 39 | ``` 40 | 41 | 3) Accept the invitation in relay container (the $INVITATION variable is the invitation generated in the previous step): 42 | ```sh 43 | docker-compose -f docker-compose.dev.yml exec relay nix-shell /src/shell.nix --command "ssb-server invite.accept $INVITATION" 44 | ``` 45 | 46 | ### Stopping and resuming containers 47 | 48 | To manage lifecycle of containers, standard `docker-compose` methods can be used: 49 | ```sh 50 | docker-compose -f docker-compose.dev.yml start 51 | docker-compose -f docker-compose.dev.yml stop 52 | ``` 53 | -------------------------------------------------------------------------------- /systemd/default.nix: -------------------------------------------------------------------------------- 1 | { stdenv, lib, makeWrapper, shellcheck, glibcLocales, coreutils, gettext, jq, omnia, ssb-server, oracle-suite }: 2 | stdenv.mkDerivation rec { 3 | name = "install-omnia-${version}"; 4 | version = lib.fileContents ../version; 5 | src = ./.; 6 | 7 | passthru.runtimeDeps = [ coreutils gettext jq ]; 8 | nativeBuildInputs = [ makeWrapper shellcheck ]; 9 | 10 | buildPhase = "true"; 11 | installPhase = let 12 | path = lib.makeBinPath passthru.runtimeDeps; 13 | locales = lib.optionalString (glibcLocales != null) ''--set LOCALE_ARCHIVE "${glibcLocales}"/lib/locale/locale-archive''; 14 | omniaConf = ../omnia/config; 15 | in '' 16 | mkdir -p $out/{bin,share} 17 | cp -t $out/bin install-omnia 18 | cp -t $out/share *.service *.json *-updates ${omniaConf}/* 19 | 20 | wrapProgram "$out/bin/install-omnia" \ 21 | --prefix PATH : "${path}" \ 22 | --set SHARE_PATH "$out/share" \ 23 | --set OMNIA_PATH "${omnia}/bin/omnia" \ 24 | --set OMNIA_LIB_PATH "${omnia}/lib" \ 25 | --set OMNIA_CONF_PATH "$out/share" \ 26 | --set GOFER_PATH "${oracle-suite}/bin/gofer" \ 27 | --set SPIRE_PATH "${oracle-suite}/bin/spire" \ 28 | --set SPLITTER_PATH "${oracle-suite}/bin/rpc-splitter" \ 29 | --set SSB_PATH "${ssb-server}/bin/ssb-server" \ 30 | ${locales} 31 | ''; 32 | 33 | doCheck = true; 34 | checkPhase = '' 35 | shellcheck -x install-omnia 36 | ''; 37 | 38 | meta = { 39 | description = "Installer script for Omnia service"; 40 | homepage = "https://github.com/makerdao/oracles-v2"; 41 | license = lib.licenses.gpl3; 42 | inherit version; 43 | }; 44 | } 45 | -------------------------------------------------------------------------------- /omnia/config/relayer-testchain.conf: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "relayer", 3 | "ethereum": { 4 | "from": "0x", 5 | "keystore": "", 6 | "password": "", 7 | "network": "" 8 | "gasPrice": { 9 | "source": "node", 10 | "maxPriceMultiplier": 2, 11 | "tipMultiplier": 1, 12 | "priority": "fast" 13 | }, 14 | }, 15 | "transports":["transport-ssb"], 16 | "feeds": [ 17 | "0x01" 18 | ], 19 | "services":{ 20 | "scuttlebotIdMap":{ 21 | "0x01":"@sPKbgg0756Jia3oAgKTlWaVKqzxMSEfQ3k0Mrt3hI6I=.ed25519" 22 | } 23 | }, 24 | "options": { 25 | "interval": 60, 26 | "msgLimit": 30, 27 | "verbose": true, 28 | "logFormat": "json" 29 | }, 30 | "pairs": { 31 | "BATUSD": { 32 | "oracle": "0x0415bCB2c72d63c355AC2cfAaAbc378A0c22856D", 33 | "oracleSpread": 1.0, 34 | "oracleExpiration": 15500, 35 | "msgExpiration": 1800 36 | }, 37 | "BTCUSD": { 38 | "oracle": "0x014E5D3aC7401998Aab574b7d69cc5c50b2029f1", 39 | "oracleSpread": 0.5, 40 | "oracleExpiration": 15500, 41 | "msgExpiration": 1800 42 | }, 43 | "ETHUSD": { 44 | "oracle": "0x5198181B263719Ee1094B943AB34C91a47a138CB", 45 | "oracleSpread": 0.5, 46 | "oracleExpiration": 15500, 47 | "msgExpiration": 1800 48 | }, 49 | "KNCUSD": { 50 | "oracle": "0x9D4931f0e416f514bE5c138aDCC9ada1A7a45CE9", 51 | "oracleSpread": 1.0, 52 | "oracleExpiration": 15500, 53 | "msgExpiration": 1800 54 | }, 55 | "MANAUSD": { 56 | "oracle": "0x43e2f122e4C337aB9FEB52cFAC20c8C15f2B9Ca4", 57 | "oracleSpread": 1.0, 58 | "oracleExpiration": 15500, 59 | "msgExpiration": 1800 60 | } 61 | } 62 | } -------------------------------------------------------------------------------- /tests/default.nix: -------------------------------------------------------------------------------- 1 | let srcs = import ../nix/default.nix; 2 | 3 | in { pkgs ? srcs.pkgs, makerpkgs ? srcs.makerpkgs, nodepkgs ? srcs.nodepkgs }@args: 4 | 5 | let 6 | oracles = import ./.. args; 7 | median = import ./lib/median args; 8 | dapptools = import srcs.dapptools; 9 | seth = dapptools.seth; 10 | in pkgs.mkShell rec { 11 | name = "oracle-test-shell"; 12 | buildInputs = with pkgs; 13 | [ 14 | procps 15 | curl 16 | jq 17 | mitmproxy 18 | go-ethereum 19 | makerpkgs.dappPkgsVersions.latest.dapp 20 | nodepkgs.tap-xunit 21 | median 22 | oracles.omnia 23 | oracles.install-omnia 24 | ] ++ oracles.omnia.buildInputs; 25 | 26 | RESULTS_DIR = "${toString ./.}/test-results"; 27 | SMOKE_TEST = toString ./smoke/test; 28 | 29 | shellHook = '' 30 | _xunit() { 31 | local name="$1" 32 | local tap="$2" 33 | mkdir -p "$RESULTS_DIR/$name" 34 | tap-xunit < "$tap" \ 35 | > "$RESULTS_DIR/$name/results.xml" 36 | cp "$tap" "$RESULTS_DIR/$name/" 37 | } 38 | 39 | xunit() { 40 | local name="$1" 41 | local tests=("''${@:2}") 42 | if [[ $tests ]]; then 43 | for test in "''${tests[@]}"; do 44 | _xunit "$name-''${test%.*}" "$test" 45 | done 46 | else 47 | local output="$(mktemp tap-XXXXXXXX).tap" 48 | tee "$output" 49 | _xunit "$name" "$output" 50 | fi 51 | } 52 | 53 | _runTest() { 54 | local ecode=0 55 | "''${@:2}" 56 | ecode=$? 57 | xunit "$1" logs/*.tap || true 58 | return $ecode 59 | } 60 | 61 | testSmoke() { _runTest smoke sh -c 'mkdir -p logs && "$1" | tee logs/smoke.tap' _ "$SMOKE_TEST"; } 62 | ''; 63 | } 64 | -------------------------------------------------------------------------------- /nix/default.nix: -------------------------------------------------------------------------------- 1 | let 2 | inherit (builtins) map listToAttrs attrValues isString; 3 | 4 | sources = import ./sources.nix; 5 | 6 | inherit (import sources.nixpkgs { 7 | overlays = [ 8 | (self: super: { inherit (import "${sources.dapptools}/overlay.nix" self super) hevm ethsign seth; }) 9 | (self: super: (super // { dapptoolsSrc = ./.; })) # hacky but works - TODO: suggest to daptools to not use it in seth 10 | ]; 11 | }) 12 | pkgs; 13 | inherit (pkgs.lib.strings) removePrefix; 14 | 15 | getName = x: let parse = drv: (builtins.parseDrvName drv).name; in if isString x then parse x else x.pname or (parse x.name); 16 | 17 | ssb-patches = ../ssb-server; 18 | in rec { 19 | inherit pkgs; 20 | 21 | makerpkgs = import sources.makerpkgs { }; 22 | 23 | nodepkgs = let 24 | nodepkgs' = import ./nodepkgs.nix { pkgs = pkgs // { stdenv = pkgs.stdenv // { lib = pkgs.lib; }; }; }; 25 | shortNames = listToAttrs (map (x: { 26 | name = removePrefix "node_" (getName x.name); 27 | value = x; 28 | }) (attrValues nodepkgs')); 29 | in nodepkgs' // shortNames; 30 | 31 | ssb-server = nodepkgs.ssb-server.override { 32 | name = "patched-ssb-server"; 33 | buildInputs = with pkgs; [ gnumake nodepkgs.node-gyp-build git ]; 34 | postInstall = '' 35 | git apply ${ssb-patches}/ssb-db+19.2.0.patch 36 | ''; 37 | }; 38 | 39 | oracle-suite = pkgs.callPackage sources.oracle-suite { }; 40 | 41 | setzer = pkgs.callPackage sources.setzer { }; 42 | 43 | stark-cli = pkgs.callPackage ../starkware { }; 44 | 45 | omnia = pkgs.callPackage sources.omnia { 46 | inherit ssb-server stark-cli oracle-suite; 47 | setzer-mcd = setzer; 48 | }; 49 | 50 | install-omnia = pkgs.callPackage ../systemd { inherit omnia ssb-server oracle-suite; }; 51 | } 52 | -------------------------------------------------------------------------------- /tests/lib/median/dapp2.nix: -------------------------------------------------------------------------------- 1 | # Generated by dapp2nix 2.1.7, do not edit! 2 | { solidityPackage, solc, hevm, dapp2, srcRoot ? null }: 3 | 4 | let 5 | inherit (builtins) map listToAttrs attrNames attrValues length fromJSON readFile; 6 | mapAttrs = if (builtins ? mapAttrs) 7 | then builtins.mapAttrs 8 | else f: attrs: 9 | listToAttrs (map 10 | (name: { inherit name; value = f name attrs."${name}"; }) 11 | (attrNames attrs)); 12 | 13 | defaults = { 14 | inherit solc hevm; 15 | test-hevm = dapp2.test-hevm; 16 | doCheck = true; 17 | }; 18 | 19 | package = spec: let 20 | spec' = defaults // (removeAttrs spec [ "repo" ]); 21 | deps = map (spec: 22 | package (spec // { inherit (spec') solc test-hevm hevm doCheck; }) 23 | ) (attrValues spec'.deps); 24 | in solidityPackage (spec' // { inherit deps; }); 25 | 26 | packageSpecs = mapAttrs (_: package); 27 | 28 | jsonSpecs = fromJSON (readFile ./.dapp.json); 29 | 30 | resolveDeps = root: _: v: 31 | let 32 | contract = jsonSpecs.contracts."${v}"; 33 | contract' = contract // { 34 | src = if (srcRoot == null) 35 | then "${fetchGit contract.repo}/src" 36 | else "${srcRoot}/${root}/${contract.name}/src"; 37 | }; 38 | noDeps = length (attrNames contract.deps) == 0; 39 | in 40 | if noDeps 41 | then contract' 42 | else contract' // { 43 | deps = mapAttrs 44 | (resolveDeps "${root}/${contract.name}/lib") 45 | contract.deps; 46 | }; 47 | 48 | specs = (mapAttrs resolveDeps jsonSpecs.contracts) // { 49 | this = jsonSpecs.this // { 50 | src = ./.; 51 | deps = mapAttrs (resolveDeps ".") jsonSpecs.this.deps; 52 | }; 53 | }; 54 | in { 55 | inherit package packageSpecs specs; 56 | this = package specs.this; 57 | deps = packageSpecs specs.this.deps; 58 | } 59 | -------------------------------------------------------------------------------- /docker/docker-compose.dev.yml: -------------------------------------------------------------------------------- 1 | version: "3.6" 2 | 3 | x-omnia: &default 4 | build: 5 | context: ../ 6 | dockerfile: Dockerfile.omnia-dev 7 | environment: &env 8 | ETH_GAS: 7000000 9 | SSB_FILE_SET: /src/systemd/ssb-config.json 10 | SSB_SET_connections_incoming_net__port: 8008 11 | SSB_SET_connections_incoming_ws__port: 8989 12 | SSB_SET_caps: | 13 | { 14 | "shs": "BRsTikbASMIC6jAvsIbZy24Wd6IpLQ5FbEx1oyooGb8=", 15 | "sign": "HOGP1DI4ZybjiHYv7SvaadeSLSnt1MQ2bDo2v7aszh0=" 16 | } 17 | OMNIA_STR_ethereum_keystore: /etc/omnia/keystore 18 | OMNIA_STR_ethereum_password: /etc/omnia/password 19 | OMNIA_STR_ethereum_network: testchain:8545 20 | 21 | services: 22 | testchain: 23 | build: 24 | context: ../ 25 | dockerfile: Dockerfile.testchain 26 | 27 | feed: 28 | <<: *default 29 | volumes: 30 | - nix:/nix 31 | - ../:/src 32 | - ../testchain/keystore:/etc/omnia/keystore 33 | - ../testchain/keystore/password:/etc/omnia/password 34 | - ./ssb/feeder:/root/.ssb/secret 35 | environment: 36 | <<: *env 37 | OMNIA_FILE_SET: /src/omnia/config/feed.conf 38 | OMNIA_STR_ethereum_from: "0x131ea366b4aae41859101fe0954fe4723329cd71" 39 | SSB_STR_connections_incoming_net__external: feed 40 | SSB_STR_connections_incoming_ws__external: feed 41 | 42 | relay: 43 | <<: *default 44 | volumes: 45 | - nix:/nix 46 | - ../:/src 47 | - ../testchain/keystore:/etc/omnia/keystore 48 | - ../testchain/keystore/password:/etc/omnia/password 49 | - ./ssb/relayer:/root/.ssb/secret 50 | environment: 51 | <<: *env 52 | OMNIA_FILE_SET: /src/omnia/config/relayer-testchain.conf 53 | OMNIA_STR_ethereum_from: "0x436177e390e835959c82afc56e063bd032ad57fb" 54 | SSB_STR_connections_incoming_net__external: relay 55 | SSB_STR_connections_incoming_ws__external: relay 56 | 57 | volumes: 58 | nix: 59 | -------------------------------------------------------------------------------- /Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | # All Vagrant configuration is done below. The "2" in Vagrant.configure 5 | # configures the configuration version (we support older styles for 6 | # backwards compatibility). Please don't change it unless you know what 7 | # you're doing. 8 | Vagrant.configure("2") do |config| 9 | # The most common configuration options are documented and commented below. 10 | # For a complete reference, please see the online documentation at 11 | # https://docs.vagrantup.com. 12 | 13 | # Every Vagrant development environment requires a box. You can search for 14 | # boxes at https://vagrantcloud.com/search. 15 | config.vm.box = "ubuntu/focal64" 16 | 17 | # Create a forwarded port mapping which allows access to a specific port 18 | # within the machine from a port on the host machine. In the example below, 19 | # accessing "localhost:8080" will access port 80 on the guest machine. 20 | # NOTE: This will enable public access to the opened port 21 | # config.vm.network "forwarded_port", guest: 80, host: 8080 22 | 23 | # Create a forwarded port mapping which allows access to a specific port 24 | # within the machine from a port on the host machine and only allow access 25 | # via 127.0.0.1 to disable public access 26 | # config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1" 27 | 28 | # Create a private network, which allows host-only access to the machine 29 | # using a specific IP. 30 | # config.vm.network "private_network", ip: "192.168.33.10" 31 | 32 | # Create a public network, which generally matched to bridged network. 33 | # Bridged networks make the machine appear as another physical device on 34 | # your network. 35 | # config.vm.network "public_network" 36 | 37 | config.vm.provider "virtualbox" do |vb| 38 | vb.memory = "4096" 39 | end 40 | config.vm.provision "shell", path: "vagrant/init.sh", privileged: false 41 | config.vm.synced_folder "../../chronicle", "/opt/chronicle" 42 | config.vm.synced_folder "../../maker", "/opt/maker" 43 | end -------------------------------------------------------------------------------- /testchain/default.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? import {}}: 2 | let 3 | # Address used for the genesis block 4 | genesisKey = "genesis"; 5 | # List of feeders, these addresses are allowed to send pricees 6 | feedersKeys = ["feeder"]; 7 | # List of relayers, these addresses will be able to connect to the RPC api 8 | relayerKeys = ["relayer"]; 9 | # List of contracts to deploy 10 | pairs = ["BATUSD" "BTCUSD" "ETHUSD" "KNCUSD" "MANAUSD"]; 11 | 12 | makerPkgs = import (builtins.fetchTarball "https://github.com/makerdao/makerpkgs/tarball/4d71760d27e88e244f9b5fe4d064b4c207b9b92d") { inherit pkgs; }; 13 | keystore = import ./keystore.nix { pkgs = makerPkgs; }; 14 | medianDeploy = import ./median-deploy.nix { pkgs = makerPkgs; }; 15 | gethTestchain = import ./geth-testchain.nix { pkgs = makerPkgs; keystore = keystore; genesisKey = genesisKey; unlockKeys = relayerKeys; allocKeys = feedersKeys ++ relayerKeys; }; 16 | 17 | medianDeployerBin = pkgs.writeShellScript "median-deployer" '' 18 | # Run go-ethereum in background 19 | ${gethTestchain}/bin/geth-testchain & 20 | pid=$! 21 | 22 | OUTPUT=$1 23 | export ETH_GAS=7000000 24 | export ETH_KEYSTORE=${keystore.keystorePath} 25 | export ETH_FROM=${keystore.address genesisKey} 26 | export ETH_PASSWORD=${keystore.passwordFile} 27 | 28 | # Deploy medianizer contracts 29 | ${medianDeploy}/bin/median-deploy ${builtins.concatStringsSep " " pairs} ${builtins.concatStringsSep " " (map (x: "0x" + (keystore.address x)) feedersKeys)} > $OUTPUT 30 | 31 | # Stop go-ethereum process 32 | kill "$pid" 33 | ''; 34 | 35 | testchainRunnerBin = pkgs.writeShellScript "testchain-runner" '' 36 | ${gethTestchain}/bin/geth-testchain 37 | ''; 38 | in pkgs.stdenv.mkDerivation { 39 | name = "geth-testchain"; 40 | 41 | unpackPhase = "true"; 42 | 43 | buildInputs = [ 44 | medianDeploy 45 | gethTestchain 46 | ]; 47 | 48 | installPhase = '' 49 | mkdir -p $out/bin 50 | cp ${medianDeployerBin} $out/bin/median-deployer 51 | cp ${testchainRunnerBin} $out/bin/testchain-runner 52 | ''; 53 | } 54 | -------------------------------------------------------------------------------- /omnia/config/feed.conf: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "feed", 3 | "ethereum": { 4 | "from": "0x", 5 | "keystore": "", 6 | "password": "" 7 | }, 8 | "options": { 9 | "interval": 60, 10 | "msgLimit": 35, 11 | "srcTimeout": 10, 12 | "setzerTimeout": 10, 13 | "setzerCacheExpiry": 120, 14 | "setzerMinMedian": 3, 15 | "setzerEthRpcUrl": "", 16 | "verbose": true, 17 | "logFormat": "text" 18 | }, 19 | "sources":["gofer","setzer"], 20 | "transports":["transport-spire","transport-ssb"], 21 | "pairs": { 22 | "AAVE/USD":{"msgExpiration":1800,"msgSpread":0.5}, 23 | "AVAX/USD":{"msgExpiration":1800,"msgSpread":0.5}, 24 | "BAL/USD":{"msgExpiration":1800,"msgSpread":0.5}, 25 | "BAT/USD":{"msgExpiration":1800,"msgSpread":0.5}, 26 | "BTC/USD":{"msgExpiration":1800,"msgSpread":0.5}, 27 | "COMP/USD":{"msgExpiration":1800,"msgSpread":0.5}, 28 | "CRV/USD":{"msgExpiration":1800,"msgSpread":0.5}, 29 | "DOT/USD":{"msgExpiration":1800,"msgSpread":0.5}, 30 | "ETH/BTC":{"msgExpiration":1800,"msgSpread":0.5}, 31 | "ETH/USD":{"msgExpiration":1800,"msgSpread":0.5}, 32 | "FIL/USD":{"msgExpiration":1800,"msgSpread":0.5}, 33 | "KNC/USD":{"msgExpiration":1800,"msgSpread":0.5}, 34 | "LINK/USD":{"msgExpiration":1800,"msgSpread":0.5}, 35 | "LRC/USD":{"msgExpiration":1800,"msgSpread":0.5}, 36 | "MANA/USD":{"msgExpiration":1800,"msgSpread":0.5}, 37 | "MATIC/USD":{"msgExpiration":1800,"msgSpread":0.5}, 38 | "PAXG/USD":{"msgExpiration":1800,"msgSpread":0.5}, 39 | "SNX/USD":{"msgExpiration":1800,"msgSpread":0.5}, 40 | "SOL/USD":{"msgExpiration":1800,"msgSpread":0.5}, 41 | "UNI/USD":{"msgExpiration":1800,"msgSpread":0.5}, 42 | "USDT/USD":{"msgExpiration":1800,"msgSpread":0.5}, 43 | "WNXM/USD":{"msgExpiration":1800,"msgSpread":0.5}, 44 | "WSTETH/USD":{"msgExpiration":1800,"msgSpread":0.5}, 45 | "XRP/USD":{"msgExpiration":1800,"msgSpread":0.5}, 46 | "XTZ/USD":{"msgExpiration":1800,"msgSpread":0.5}, 47 | "YFI/USD":{"msgExpiration":1800,"msgSpread":0.5}, 48 | "ZEC/USD":{"msgExpiration":1800,"msgSpread":0.5}, 49 | "ZRX/USD":{"msgExpiration":1800,"msgSpread":0.5} 50 | } 51 | } -------------------------------------------------------------------------------- /systemd/leeloo.json: -------------------------------------------------------------------------------- 1 | { 2 | "transport": { 3 | "libp2p": { 4 | "bootstrapAddrs": [ 5 | "/dns/spire-bootstrap1.makerops.services/tcp/8000/p2p/12D3KooWRfYU5FaY9SmJcRD5Ku7c1XMBRqV6oM4nsnGQ1QRakSJi", 6 | "/dns/spire-bootstrap2.makerops.services/tcp/8000/p2p/12D3KooWBGqjW4LuHUoYZUhbWW1PnDVRUvUEpc4qgWE3Yg9z1MoR" 7 | ] 8 | } 9 | }, 10 | "feeds": [ 11 | "0xDA1d2961Da837891f43235FddF66BAD26f41368b", 12 | "0x4b0E327C08e23dD08cb87Ec994915a5375619aa2", 13 | "0x75ef8432566A79C86BBF207A47df3963B8Cf0753", 14 | "0x83e23C207a67a9f9cB680ce84869B91473403e7d", 15 | "0xFbaF3a7eB4Ec2962bd1847687E56aAEE855F5D00", 16 | "0xfeEd00AA3F0845AFE52Df9ECFE372549B74C69D2", 17 | "0x71eCFF5261bAA115dcB1D9335c88678324b8A987", 18 | "0x8ff6a38A1CD6a42cAac45F08eB0c802253f68dfD", 19 | "0x16655369Eb59F3e1cAFBCfAC6D3Dd4001328f747", 20 | "0xD09506dAC64aaA718b45346a032F934602e29cca", 21 | "0xc00584B271F378A0169dd9e5b165c0945B4fE498", 22 | "0x60da93D9903cb7d3eD450D4F81D402f7C4F71dd9", 23 | "0xa580BBCB1Cee2BCec4De2Ea870D20a12A964819e", 24 | "0xD27Fa2361bC2CfB9A591fb289244C538E190684B", 25 | "0x8de9c5F1AC1D4d02bbfC25fD178f5DAA4D5B26dC", 26 | "0xE6367a7Da2b20ecB94A25Ef06F3b551baB2682e6", 27 | "0xA8EB82456ed9bAE55841529888cDE9152468635A", 28 | "0x130431b4560Cd1d74A990AE86C337a33171FF3c6", 29 | "0x8aFBD9c3D794eD8DF903b3468f4c4Ea85be953FB", 30 | "0xd94BBe83b4a68940839cD151478852d16B3eF891", 31 | "0xC9508E9E3Ccf319F5333A5B8c825418ABeC688BA", 32 | "0x77EB6CF8d732fe4D92c427fCdd83142DB3B742f7", 33 | "0x3CB645a8f10Fb7B0721eaBaE958F77a878441Cb9", 34 | "0x4f95d9B4D842B2E2B1d1AC3f2Cf548B93Fd77c67", 35 | "0xaC8519b3495d8A3E3E44c041521cF7aC3f8F63B3", 36 | "0xd72BA9402E9f3Ff01959D6c841DDD13615FFff42" 37 | ], 38 | "ethereum": { 39 | "from": "", 40 | "keystore": "", 41 | "password": "" 42 | }, 43 | "leeloo": { 44 | "listeners": { 45 | "wormhole": [ 46 | { 47 | "rpc": [], 48 | "interval": 30, 49 | "blocksBehind": [10, 500, 1000, 2500, 5000, 10000, 20000, 30000, 40000, 50000, 60000], 50 | "maxBlocks": 1000, 51 | "addresses": [ 52 | "0x45440Ae4988965A4cD94651E715fC9A04e62Fb41" 53 | ] 54 | } 55 | ] 56 | } 57 | } 58 | } -------------------------------------------------------------------------------- /omnia/config/relayer-kovan.conf: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "relayer", 3 | "ethereum": { 4 | "from": "0x", 5 | "keystore": "", 6 | "password": "", 7 | "network": "kovan", 8 | "gasPrice": { 9 | "source": "node", 10 | "maxPriceMultiplier": 2, 11 | "tipMultiplier": 1, 12 | "priority": "fast" 13 | } 14 | }, 15 | "transports":["transport-ssb"], 16 | "feeds": [ 17 | "0x01", 18 | "0x02", 19 | "0x03", 20 | "0x04" 21 | ], 22 | "services":{ 23 | "scuttlebotIdMap":{ 24 | "0x01":"@Egj2PrjCPRpIGguJd67vVwqqwAref9AW3bwQ11V5QyU=.ed25519", 25 | "0x02":"@nqpoLLvOYwFBmjC4v24J1yaSLhrDF5e+N3pzr6OWbGc=.ed25519", 26 | "0x03":"@hA6mPczongEIjSzkYmtNM1fKTtXw8sUzwxo2dMQFFco=.ed25519", 27 | "0x04":"@DnzQIjLRo5Fx/OfS2jcxs0xG0CIrX7osXNN0vvJUjCk=.ed25519" 28 | } 29 | }, 30 | "options": { 31 | "interval": 60, 32 | "msgLimit": 30, 33 | "verbose": true, 34 | "logFormat": "json" 35 | }, 36 | "pairs": { 37 | "BATUSD": { 38 | "oracle": "0xAb7366b12C982ca2DE162F35571b4d21E38a16FB", 39 | "oracleSpread": 1.0, 40 | "oracleExpiration": 15500, 41 | "msgExpiration": 1800 42 | }, 43 | "BTCUSD": { 44 | "oracle": "0xAb7366b12C982ca2DE162F35571b4d21E38a16FB", 45 | "oracleSpread": 0.5, 46 | "oracleExpiration": 15500, 47 | "msgExpiration": 1800 48 | }, 49 | "ETHBTC": { 50 | "oracle": "0xAb7366b12C982ca2DE162F35571b4d21E38a16FB", 51 | "oracleSpread": 0.5, 52 | "oracleExpiration": 15500, 53 | "msgExpiration": 1800 54 | }, 55 | "ETHUSD": { 56 | "oracle": "0xAb7366b12C982ca2DE162F35571b4d21E38a16FB", 57 | "oracleSpread": 0.5, 58 | "oracleExpiration": 15500, 59 | "msgExpiration": 1800 60 | }, 61 | "KNCUSD": { 62 | "oracle": "0xAb7366b12C982ca2DE162F35571b4d21E38a16FB", 63 | "oracleSpread": 1.0, 64 | "oracleExpiration": 15500, 65 | "msgExpiration": 1800 66 | }, 67 | "MANAUSD": { 68 | "oracle": "0xAb7366b12C982ca2DE162F35571b4d21E38a16FB", 69 | "oracleSpread": 1.0, 70 | "oracleExpiration": 15500, 71 | "msgExpiration": 1800 72 | }, 73 | "MATIC/USD": { 74 | "oracle": "0x6125dC3a51407640A8b439E2ECd78878bC00EAd3", 75 | "oracleSpread": 4.0, 76 | "oracleExpiration": 15500, 77 | "msgExpiration": 1800 78 | }, 79 | "WSTETH/USD": { 80 | "oracle": "0x7E851f0eeD546a956aE66F77363B4031D6bfBa54", 81 | "oracleSpread": 0.5, 82 | "oracleExpiration": 15500, 83 | "msgExpiration": 1800 84 | }, 85 | } 86 | } -------------------------------------------------------------------------------- /systemd/spire.json: -------------------------------------------------------------------------------- 1 | { 2 | "ethereum": { 3 | "from": "", 4 | "keystore": "", 5 | "password": "" 6 | }, 7 | "transport":{ 8 | "libp2p": { 9 | "bootstrapAddrs": [ 10 | "/dns/spire-bootstrap1.makerops.services/tcp/8000/p2p/12D3KooWRfYU5FaY9SmJcRD5Ku7c1XMBRqV6oM4nsnGQ1QRakSJi", 11 | "/dns/spire-bootstrap2.makerops.services/tcp/8000/p2p/12D3KooWBGqjW4LuHUoYZUhbWW1PnDVRUvUEpc4qgWE3Yg9z1MoR" 12 | ] 13 | } 14 | }, 15 | "feeds": [ 16 | "0xDA1d2961Da837891f43235FddF66BAD26f41368b", 17 | "0x4b0E327C08e23dD08cb87Ec994915a5375619aa2", 18 | "0x75ef8432566A79C86BBF207A47df3963B8Cf0753", 19 | "0x83e23C207a67a9f9cB680ce84869B91473403e7d", 20 | "0xFbaF3a7eB4Ec2962bd1847687E56aAEE855F5D00", 21 | "0xfeEd00AA3F0845AFE52Df9ECFE372549B74C69D2", 22 | "0x71eCFF5261bAA115dcB1D9335c88678324b8A987", 23 | "0x8ff6a38A1CD6a42cAac45F08eB0c802253f68dfD", 24 | "0x16655369Eb59F3e1cAFBCfAC6D3Dd4001328f747", 25 | "0xD09506dAC64aaA718b45346a032F934602e29cca", 26 | "0xc00584B271F378A0169dd9e5b165c0945B4fE498", 27 | "0x60da93D9903cb7d3eD450D4F81D402f7C4F71dd9", 28 | "0xa580BBCB1Cee2BCec4De2Ea870D20a12A964819e", 29 | "0xD27Fa2361bC2CfB9A591fb289244C538E190684B", 30 | "0x8de9c5F1AC1D4d02bbfC25fD178f5DAA4D5B26dC", 31 | "0xE6367a7Da2b20ecB94A25Ef06F3b551baB2682e6", 32 | "0xA8EB82456ed9bAE55841529888cDE9152468635A", 33 | "0x130431b4560Cd1d74A990AE86C337a33171FF3c6", 34 | "0x8aFBD9c3D794eD8DF903b3468f4c4Ea85be953FB", 35 | "0xd94BBe83b4a68940839cD151478852d16B3eF891", 36 | "0xC9508E9E3Ccf319F5333A5B8c825418ABeC688BA", 37 | "0x77EB6CF8d732fe4D92c427fCdd83142DB3B742f7", 38 | "0x3CB645a8f10Fb7B0721eaBaE958F77a878441Cb9", 39 | "0x4f95d9B4D842B2E2B1d1AC3f2Cf548B93Fd77c67", 40 | "0xaC8519b3495d8A3E3E44c041521cF7aC3f8F63B3", 41 | "0xd72BA9402E9f3Ff01959D6c841DDD13615FFff42" 42 | ], 43 | "spire": { 44 | "rpc": { 45 | "address": "127.0.0.1:8082" 46 | }, 47 | "pairs": [ 48 | "AAVEUSD", 49 | "AVAXUSD", 50 | "BALUSD", 51 | "BATUSD", 52 | "BTCUSD", 53 | "COMPUSD", 54 | "CRVUSD", 55 | "DOTUSD", 56 | "ETHBTC", 57 | "ETHUSD", 58 | "FILUSD", 59 | "KNCUSD", 60 | "LINKUSD", 61 | "LRCUSD", 62 | "MANAUSD", 63 | "MATICUSD", 64 | "PAXGUSD", 65 | "SNXUSD", 66 | "SOLUSD", 67 | "UNIUSD", 68 | "USDTUSD", 69 | "WNXMUSD", 70 | "WSTETHUSD", 71 | "XRPUSD", 72 | "XTZUSD", 73 | "YFIUSD", 74 | "ZECUSD", 75 | "ZRXUSD" 76 | ] 77 | } 78 | } -------------------------------------------------------------------------------- /testchain/config/feed.conf: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "feed", 3 | "ethereum": { 4 | "from": "0x", 5 | "keystore": "", 6 | "password": "" 7 | }, 8 | "options": { 9 | "interval": 60, 10 | "msgLimit": 35, 11 | "srcTimeout": 10, 12 | "setzerTimeout": 10, 13 | "setzerCacheExpiry": 120, 14 | "setzerMinMedian": 3, 15 | "verbose": true 16 | }, 17 | "sources":["setzer"], 18 | "transports":["transport-ssb"], 19 | "services": { 20 | }, 21 | "pairs": { 22 | "AAVE/USD": { 23 | "msgExpiration": 1800, 24 | "msgSpread": 0.5 25 | }, 26 | "AVAX/USD": { 27 | "msgExpiration": 1800, 28 | "msgSpread": 0.5 29 | }, 30 | "BAL/USD": { 31 | "msgExpiration": 1800, 32 | "msgSpread": 0.5 33 | }, 34 | "BAT/USD": { 35 | "msgExpiration": 1800, 36 | "msgSpread": 0.5 37 | }, 38 | "BTC/USD": { 39 | "msgExpiration": 1800, 40 | "msgSpread": 0.5 41 | }, 42 | "COMP/USD": { 43 | "msgExpiration": 1800, 44 | "msgSpread": 0.5 45 | }, 46 | "CRV/USD": { 47 | "msgExpiration": 1800, 48 | "msgSpread": 0.5 49 | }, 50 | "DOT/USD": { 51 | "msgExpiration": 1800, 52 | "msgSpread": 0.5 53 | }, 54 | "ETH/BTC": { 55 | "msgExpiration": 1800, 56 | "msgSpread": 0.5 57 | }, 58 | "ETH/USD": { 59 | "msgExpiration": 1800, 60 | "msgSpread": 0.5 61 | }, 62 | "FIL/USD": { 63 | "msgExpiration": 1800, 64 | "msgSpread": 0.5 65 | }, 66 | "KNC/ETH": { 67 | "msgExpiration": 1800, 68 | "msgSpread": 0.5 69 | }, 70 | "KNC/USD": { 71 | "msgExpiration": 1800, 72 | "msgSpread": 0.5 73 | }, 74 | "LINK/USD": { 75 | "msgExpiration": 1800, 76 | "msgSpread": 0.5 77 | }, 78 | "LRC/USD": { 79 | "msgExpiration": 1800, 80 | "msgSpread": 0.5 81 | }, 82 | "MANA/USD": { 83 | "msgExpiration": 1800, 84 | "msgSpread": 0.5 85 | }, 86 | "PAXG/USD": { 87 | "msgExpiration": 1800, 88 | "msgSpread": 0.5 89 | }, 90 | "SNX/USD": { 91 | "msgExpiration": 1800, 92 | "msgSpread": 0.5 93 | }, 94 | "SOL/USD": { 95 | "msgExpiration": 1800, 96 | "msgSpread": 0.5 97 | }, 98 | "UNI/USD": { 99 | "msgExpiration": 1800, 100 | "msgSpread": 0.5 101 | }, 102 | "USDT/USD": { 103 | "msgExpiration": 1800, 104 | "msgSpread": 0.5 105 | }, 106 | "WNXM/USD": { 107 | "msgExpiration": 1800, 108 | "msgSpread": 0.5 109 | }, 110 | "XRP/USD": { 111 | "msgExpiration": 1800, 112 | "msgSpread": 0.5 113 | }, 114 | "XTZ/USD": { 115 | "msgExpiration": 1800, 116 | "msgSpread": 0.5 117 | }, 118 | "YFI/USD": { 119 | "msgExpiration": 1800, 120 | "msgSpread": 0.5 121 | }, 122 | "ZEC/USD": { 123 | "msgExpiration": 1800, 124 | "msgSpread": 0.5 125 | }, 126 | "YFI/USD": { 127 | "msgExpiration": 1800, 128 | "msgSpread": 0.5 129 | }, 130 | "ZRX/USD": { 131 | "msgExpiration": 1800, 132 | "msgSpread": 0.5 133 | } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /testchain/geth-testchain.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? import {}, keystore ? import ./keystore.nix {}, genesisKey ? "genesis", unlockKeys ? [ "genesis" ], allocKeys ? [] }: 2 | let 3 | __listToBalances = list: 4 | let 5 | recurse = list: balances: n: 6 | if n < builtins.length list 7 | then recurse list (balances // builtins.listToAttrs [{ name = builtins.elemAt list n; value = { "balance" = "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"; }; }]) (n+1) 8 | else balances; 9 | in recurse list {} 0; 10 | 11 | # Testchain configuration: 12 | genesisAddr = keystore.address genesisKey; 13 | unlockAddrs = pkgs.lib.forEach unlockKeys keystore.address; 14 | chainId = 99; 15 | dataDir = "/var/lib/testchain"; 16 | rpcPort = 8545; 17 | rpcAddr = "0.0.0.0"; 18 | genesis = pkgs.writeText "genesis.json" (builtins.toJSON { 19 | alloc = __listToBalances ([genesisAddr] ++ pkgs.lib.forEach allocKeys keystore.address); 20 | config = { 21 | byzantiumBlock = 0; 22 | chainId = chainId; 23 | clique = { 24 | epoch = 3000; 25 | period = 0; 26 | }; 27 | constantinopleBlock = 0; 28 | eip150Block = 0; 29 | eip155Block = 0; 30 | eip158Block = 0; 31 | eip160Block = 0; 32 | homesteadBlock = 0; 33 | istanbulBlock = 0; 34 | petersburgBlock = 0; 35 | }; 36 | difficulty = "0x1"; 37 | extraData = "0x3132333400000000000000000000000000000000000000000000000000000000${genesisAddr}0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; 38 | gaslimit = "0xffffffffffffffff"; 39 | }); 40 | 41 | geth-testchain = pkgs.writeShellScript "geth-testchain" '' 42 | mkdir -p ${dataDir} 43 | if [[ "$(realpath "${dataDir}"/genesis.json)" != "${genesis}" ]]; then 44 | ln -sf "${genesis}" "${dataDir}/genesis.json" 45 | ln -sfT "${keystore.keystorePath}" "${dataDir}"/keystore 46 | 47 | ${pkgs.go-ethereum}/bin/geth \ 48 | --datadir "${dataDir}" \ 49 | init "${dataDir}/genesis.json" 50 | 51 | for ((n=0;n<${toString ((builtins.length unlockAddrs) + 1)};n++)); do 52 | cat ${keystore.passwordFile} >> ${dataDir}/passwords 53 | done 54 | fi 55 | 56 | ${pkgs.go-ethereum}/bin/geth \ 57 | --datadir "${dataDir}" \ 58 | --networkid "${toString chainId}" \ 59 | --mine \ 60 | --minerthreads=1 \ 61 | --allow-insecure-unlock \ 62 | --rpc \ 63 | --rpcapi "web3,eth,net,debug,personal" \ 64 | --rpccorsdomain="*" \ 65 | --rpcvhosts="*" \ 66 | --nodiscover \ 67 | --rpcaddr="${rpcAddr}" \ 68 | --rpcport="${toString rpcPort}" \ 69 | --unlock="${builtins.concatStringsSep "," (map (x: "0x" + x) (unlockAddrs ++ [genesisAddr]))}" \ 70 | --password="${dataDir}/passwords" \ 71 | --etherbase="0x${genesisAddr}" 72 | ''; 73 | in pkgs.stdenv.mkDerivation { 74 | name = "geth-testchain"; 75 | 76 | unpackPhase = "true"; 77 | 78 | buildInputs = [ 79 | pkgs.go-ethereum 80 | ]; 81 | 82 | installPhase = '' 83 | mkdir -p $out/bin 84 | cp ${geth-testchain} $out/bin/geth-testchain 85 | ''; 86 | } 87 | -------------------------------------------------------------------------------- /tests/resources/mitmproxy/mitmproxy-ca.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN PRIVATE KEY----- 2 | MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCsXyd4r9gsRmqn 3 | 8IXIXTmlHFP8G4+5+rR5XNKcrPmukJ4/zPmTmJTLIpshXidUHg5Ocvu9Wqk7rU4e 4 | SIPg5cDPcrlN5Cjsfdn2y26BvhPhG32QnUthY+ccxEPKITZIABIDy6X9K7SiDZio 5 | AOCRtIK9Pllvsi4BBtUYxrjyHFO40EUyCSK+2BSfmJD+G2BPUrDkBd6jOAyp9z1h 6 | qBZxsjZ7PgrcDMWyIe3REeSOR9XhLxlihh5PqjHEsOxPdzS1phgT6Le14u7JpUOQ 7 | ChWqUe3NoAupYAraXibNHrssnzqFmOIh4CYHGn6+okU2IUaoG/VNeuXm095Z3K0h 8 | tM11yH6LAgMBAAECggEAMWTjDrZ1H9rd3VoTPCNazXGcr3XGbejlppFu2O7IbbmL 9 | X4UBT8Y9fShhg+JTKsuiFeDlOpGsRSmYZR0qjVvJMKkTaLqk73/w4cPR2uP5Flym 10 | UraAT1wtchTbyCRURbWfc+77ldzcWkPDJwF5WMd9X3Vqc5N/k1O95OetP9PWOfMc 11 | O6dJ8EouBsjO39+/ai3hqScbGp+9qoPglCGMA1cA4nGvIbQ8e+xMKbaoFeIVk1dY 12 | OIdWkyKWPJSIOBODxPwnjiVuJtzJWwG2v5YLxp8GgyklL4dMYVitICC6Yp7+DJmg 13 | 8M45FYvauqDLulhcgAkyuWjczRjKD5Pjwu8VGW9E4QKBgQDTuFWemx9MSQS3umEI 14 | +3fvZsdj9nRRaFUblS/nZKJVTX20vmqcRaRB7oc1SdqSmu9VjgQpxtbNv0RDvz0B 15 | Lj35OC9pGAQHGpwKCLJtO9wZ+kycjOhl4kbKf2FIlQxtxXxCOjpjDL1eHVNiiCGe 16 | LkYSN1VXollwtdCCNX5GixUP9wKBgQDQbBOD0XC41vKlnnpkR0u8bnUAdVPJE0Pe 17 | Z4NAKP4n0yYBjdtUcVVENBMHpZAOzQZo0+ny3W81I93l85PYIuqKImBRAwf1zw43 18 | mkCMLYk5KOHMA7S3AioHgS/5Af1Vq8cpRwZpH0PgzP1yJkgxjJG/EFFrSiCUrS5r 19 | R49ZfVEJDQKBgBlelDveIKD2eu89ERWPdG6mZSwQQx3iH5ifhs72WxFXqMF4s7Oo 20 | EKep2l/DSsKYeCdLTipnXiPnUFnB16CFlv1/AwdXzlu/j4UQR7kZHCWsvXNg2xQi 21 | ShNHsjONUKDyshIJgieORj65yt0arNSUfOb/5GUM2iTMVIDVwhzX250/AoGBAMQn 22 | IwqpX4wmiAFFMG0WCHpT/nuq8GrKXdJ/2y/UJLZ4a0mFJ4nGi/f/hnVo5TZ40PJx 23 | lrmCm3oIwnr7jgYZfmD4z2sPDjvSXdAItXz4EjCggDmQt1CRkdq0Q+D3dWReobcA 24 | aFEhIrH8Wv9GdiVj5XtLMUVnY1hp0MkYdRpE7bI1AoGADik2EpLHQFTlNz0STa7m 25 | bo64ahLmEpVq6/Hn9ZKjwzTk5hdhDCZHDQSrJhL4UzVRi4sD05Y9+OlbAfsEafwh 26 | ps2zBefdIkSpTSd92jWZ+n0xvn0HjEa0MaTIWMnG6p01aKLZrfC1RdBDvZL2jPD/ 27 | JGcWuIj3nteJfolXfmF75Gk= 28 | -----END PRIVATE KEY----- 29 | -----BEGIN CERTIFICATE----- 30 | MIIDoTCCAomgAwIBAgIGDpHpE7iqMA0GCSqGSIb3DQEBCwUAMCgxEjAQBgNVBAMM 31 | CW1pdG1wcm94eTESMBAGA1UECgwJbWl0bXByb3h5MB4XDTIwMTAwNDExMzkwM1oX 32 | DTIzMTAwNjExMzkwM1owKDESMBAGA1UEAwwJbWl0bXByb3h5MRIwEAYDVQQKDAlt 33 | aXRtcHJveHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsXyd4r9gs 34 | Rmqn8IXIXTmlHFP8G4+5+rR5XNKcrPmukJ4/zPmTmJTLIpshXidUHg5Ocvu9Wqk7 35 | rU4eSIPg5cDPcrlN5Cjsfdn2y26BvhPhG32QnUthY+ccxEPKITZIABIDy6X9K7Si 36 | DZioAOCRtIK9Pllvsi4BBtUYxrjyHFO40EUyCSK+2BSfmJD+G2BPUrDkBd6jOAyp 37 | 9z1hqBZxsjZ7PgrcDMWyIe3REeSOR9XhLxlihh5PqjHEsOxPdzS1phgT6Le14u7J 38 | pUOQChWqUe3NoAupYAraXibNHrssnzqFmOIh4CYHGn6+okU2IUaoG/VNeuXm095Z 39 | 3K0htM11yH6LAgMBAAGjgdAwgc0wDwYDVR0TAQH/BAUwAwEB/zARBglghkgBhvhC 40 | AQEEBAMCAgQweAYDVR0lBHEwbwYIKwYBBQUHAwEGCCsGAQUFBwMCBggrBgEFBQcD 41 | BAYIKwYBBQUHAwgGCisGAQQBgjcCARUGCisGAQQBgjcCARYGCisGAQQBgjcKAwEG 42 | CisGAQQBgjcKAwMGCisGAQQBgjcKAwQGCWCGSAGG+EIEATAOBgNVHQ8BAf8EBAMC 43 | AQYwHQYDVR0OBBYEFCVFHG7I0iyWBdtqmuZ1FCkc3VmCMA0GCSqGSIb3DQEBCwUA 44 | A4IBAQCTP5wA1CHFySuYoucK/DVzGCM7GM1RxRfXOLifOQ9DuvSYSvT/KGHm+dfN 45 | NRe10ob1CmhYfcd9i3OmPxACPqEuoruSDkHMcyFxDfnjUiKgCcj2YUBu7CAobjLS 46 | vDSzGE1nuXUbg5AhxAxTpQ7I85bdh0QpyHhlRov5TGP1vlxEF1UnJ/xWfMez6CbU 47 | Op0glxX8I90oZ6GvBk3vRut06kP8nfEZbNmL74+jKdG58SiPJYXNbk0m9anyhgUG 48 | /iKNHk/bQW32okir383Q6Wg6P0LUhHXiCGR+njR2j2HDED5Fp12M/a2y++w42bJ8 49 | m3McqL8rU1/xPB56HeQYFnfEXxyX 50 | -----END CERTIFICATE----- 51 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | workflows: 4 | version: 2 5 | build-test-cache: 6 | jobs: 7 | - smoke-test: 8 | context: clone-gofer-sh 9 | - cache: 10 | requires: 11 | - smoke-test 12 | context: NIX_BUILD 13 | filters: 14 | branches: 15 | only: 16 | - master 17 | - /release\/.*/ 18 | - docker: 19 | requires: 20 | - cache 21 | 22 | commands: 23 | setup-nix: 24 | description: Setup Nix 25 | steps: 26 | - run: 27 | name: Install Nix 28 | command: | 29 | curl -L https://nixos.org/nix/install | sh 30 | run-nix: 31 | description: Source nix profile and run shell command 32 | parameters: 33 | name_: 34 | type: string 35 | command: 36 | type: string 37 | steps: 38 | - run: 39 | name: << parameters.name_ >> 40 | command: | 41 | . $HOME/.nix-profile/etc/profile.d/nix.sh || true 42 | << parameters.command >> 43 | setup-cachix: 44 | description: Setup Cachix 45 | steps: 46 | - run-nix: 47 | name_: Install dependencies 48 | command: | 49 | nix-env -iA nixpkgs.gitMinimal nixpkgs.curl 50 | nix-env -iA cachix -f https://cachix.org/api/v1/install 51 | - run-nix: 52 | name_: Add build caches 53 | command: | 54 | cachix use maker 55 | cachix use dapp 56 | setup-ca: 57 | description: Setup CA certificate 58 | parameters: 59 | cert: 60 | type: string 61 | steps: 62 | - run: 63 | name: Install cert tools 64 | command: | 65 | command -v update-ca-certificates || apk --no-progress add --update --no-cache ca-certificates 66 | - run: 67 | name: Install cert << parameters.cert >> 68 | command: | 69 | cp "<< parameters.cert >>" /usr/local/share/ca-certificates 70 | chmod 0644 /usr/local/share/ca-certificates/* 71 | update-ca-certificates 72 | pre-fetch: 73 | description: Pre-fetch Nix dependencies 74 | parameters: 75 | path: 76 | type: string 77 | steps: 78 | - run-nix: 79 | name_: Pre-fetch dependencies for << parameters.path >> 80 | command: | 81 | nix-shell "<< parameters.path >>" --show-trace --run true 82 | executors: 83 | nix-executor: 84 | docker: 85 | - image: nixos/nix@sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff 86 | ubuntu-executor: 87 | machine: 88 | image: ubuntu-2004:202010-01 89 | 90 | jobs: 91 | smoke-test: 92 | executor: ubuntu-executor 93 | steps: 94 | - setup-nix 95 | - setup-cachix 96 | - checkout 97 | - pre-fetch: 98 | path: tests 99 | - run-nix: 100 | name_: Smoke test 101 | command: | 102 | nix-shell tests --run testSmoke 103 | - store_test_results: 104 | path: tests/test-results 105 | cache: 106 | executor: nix-executor 107 | steps: 108 | - setup-cachix 109 | - checkout 110 | - run-nix: 111 | name_: Push to Cachix 112 | command: nix-build --no-out-link | cachix push maker 113 | docker: 114 | executor: nix-executor 115 | steps: 116 | - setup-cachix 117 | - checkout 118 | - setup_remote_docker: 119 | version: 19.03.13 120 | docker_layer_caching: true 121 | - run: 122 | name: Build docker image 123 | command: | 124 | nix-env -i docker 125 | TAG="circleci-${CIRCLE_BUILD_NUM:-latest}" 126 | docker build -t makerdao/omnia:$TAG . -------------------------------------------------------------------------------- /shell/versioning.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | version() { 4 | local _v 5 | _v=$(cat "$VERSION_FILE" 2>/dev/null) 6 | if [[ -z $_v ]]; then 7 | _v="0.0.0" 8 | fi 9 | echo "$_v" 10 | } 11 | echo ' * version' 12 | 13 | release() { 14 | local _level="${1:-stable}" 15 | if [[ $_level =~ -?-he?l?p? ]]; then 16 | echo >&2 "Usage: release [ -h | --help ] [ LEVEL [ -e | --exact ] ]" 17 | echo >&2 " Arguments:" 18 | echo >&2 " LEVEL: major | minor | patch | rc | stable" 19 | echo >&2 " Options:" 20 | echo >&2 " -e | --exact: major, minor and patch LEVEL will be prepended by 'pre' if this is not present" 21 | return 1 22 | fi 23 | shift 24 | local OPT_EXACT="" 25 | local OPT_FORCE="" 26 | while [[ -n "$1" ]]; do 27 | case "$1" in 28 | -e|--exact) 29 | OPT_EXACT="yes" 30 | ;; 31 | -f|--force) 32 | OPT_FORCE="yes" 33 | ;; 34 | *) 35 | echo >&2 "'$1' is not a valid option" 36 | ;; 37 | esac 38 | shift 39 | done 40 | 41 | local _preId 42 | _preId="rc" 43 | 44 | if [[ $_level == "$_preId" ]]; then 45 | _level="prerelease" 46 | fi 47 | if [[ $_level != "stable" && ! $_level =~ ^pre && -z $OPT_EXACT ]]; then 48 | _level="pre$_level" 49 | fi 50 | 51 | local branch 52 | branch=$(git rev-parse --abbrev-ref HEAD) 53 | 54 | local oldVersion 55 | oldVersion=$(version) 56 | 57 | local version 58 | if [[ $_level =~ major|minor$ ]]; then 59 | [[ $branch == master || -n $OPT_FORCE ]] || { 60 | echo >&2 "Not on master branch, checkout 'master' to create a new release branch." 61 | return 1 62 | } 63 | 64 | version=$(semver --increment "$_level" --preid ${_preId} "$oldVersion") 65 | 66 | local _branchVersion 67 | _branchVersion=$(semver --increment "$version") 68 | _branchVersion=''${_branchVersion%.*} 69 | 70 | echo "$version" > "$VERSION_FILE" 71 | git commit -m "Start '$_branchVersion' release line with 'v$version'" "$VERSION_FILE" 72 | git tag "v$version" || { 73 | echo >&2 74 | echo >&2 "Failed to create tag. Use force if necessary." 75 | echo " git tag v$version --force" 76 | } 77 | echo >&2 78 | echo >&2 "To publish this commit as a release candidate run:" 79 | echo " git push --atomic origin master master:release/$_branchVersion v$version" 80 | echo >&2 81 | echo >&2 "To patch this '$_level' release checkout the release branch:" 82 | echo " git checkout release/$_branchVersion" 83 | elif [[ $_level =~ patch|prerelease$ ]]; then 84 | [[ $branch =~ ^release/ || -n $OPT_FORCE ]] || { 85 | echo >&2 "Not on a release branch, checkout a 'release/*' or create one by: release minor|major" 86 | return 1 87 | } 88 | 89 | version=$(semver --increment "$_level" --preid ${_preId} "$oldVersion") 90 | 91 | echo "$version" > "$VERSION_FILE" 92 | git commit -m "Bump '$_level' version to 'v$version'" "$VERSION_FILE" 93 | git tag "v$version" || { 94 | echo >&2 95 | echo >&2 "Failed to create tag. Use force if necessary." 96 | echo " git tag v$version --force" 97 | } 98 | echo >&2 99 | echo >&2 "To publish this commit as a release candidate run:" 100 | echo " git push --atomic origin $branch v$version" 101 | elif [[ $_level == "stable" ]]; then 102 | [[ $branch =~ ^release/ || -n $OPT_FORCE ]] || { 103 | echo >&2 "Not on a release branch, checkout a 'release/*' or create one by: release minor|major" 104 | return 1 105 | } 106 | [[ $oldVersion =~ -${_preId}\. ]] || { 107 | echo >&2 "Current version ($oldVersion) is not a Release Candidate. Run: release major|minor|patch|rc" 108 | return 1 109 | } 110 | 111 | version=$(semver --increment "$oldVersion") 112 | 113 | echo "$version" > "$VERSION_FILE" 114 | git commit -m "Release 'v$version' as 'stable'" "$VERSION_FILE" 115 | git tag "v$version" && { 116 | git tag stable --force 117 | echo >&2 "To publish this commit as a stable release run:" 118 | echo " git push --atomic origin $branch v$version stable" 119 | } 120 | else 121 | echo >&2 "Unknown release level ($_level)" 122 | return 1 123 | fi 124 | } 125 | echo ' * release' -------------------------------------------------------------------------------- /starkware/signature/math_utils.py: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Copyright 2019 StarkWare Industries Ltd. # 3 | # # 4 | # Licensed under the Apache License, Version 2.0 (the "License"). # 5 | # You may not use this file except in compliance with the License. # 6 | # You may obtain a copy of the License at # 7 | # # 8 | # https://www.starkware.co/open-source-license/ # 9 | # # 10 | # Unless required by applicable law or agreed to in writing, # 11 | # software distributed under the License is distributed on an "AS IS" BASIS, # 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # 13 | # See the License for the specific language governing permissions # 14 | # and limitations under the License. # 15 | ############################################################################### 16 | 17 | 18 | from typing import Tuple 19 | 20 | import mpmath 21 | import sympy 22 | from sympy.core.numbers import igcdex 23 | 24 | # A type that represents a point (x,y) on an elliptic curve. 25 | ECPoint = Tuple[int, int] 26 | 27 | 28 | def pi_as_string(digits: int) -> str: 29 | """ 30 | Returns pi as a string of decimal digits without the decimal point ("314..."). 31 | """ 32 | mpmath.mp.dps = digits # Set number of digits. 33 | return '3' + str(mpmath.mp.pi)[2:] 34 | 35 | 36 | def is_quad_residue(n: int, p: int) -> bool: 37 | """ 38 | Returns True if n is a quadratic residue mod p. 39 | """ 40 | return sympy.is_quad_residue(n, p) 41 | 42 | 43 | def sqrt_mod(n: int, p: int) -> int: 44 | """ 45 | Finds the minimum positive integer m such that (m*m) % p == n 46 | """ 47 | return min(sympy.sqrt_mod(n, p, all_roots=True)) 48 | 49 | 50 | def div_mod(n: int, m: int, p: int) -> int: 51 | """ 52 | Finds a nonnegative integer 0 <= x < p such that (m * x) % p == n 53 | """ 54 | a, b, c = igcdex(m, p) 55 | assert c == 1 56 | return (n * a) % p 57 | 58 | 59 | def ec_add(point1: ECPoint, point2: ECPoint, p: int) -> ECPoint: 60 | """ 61 | Gets two points on an elliptic curve mod p and returns their sum. 62 | Assumes the points are given in affine form (x, y) and have different x coordinates. 63 | """ 64 | assert (point1[0] - point2[0]) % p != 0 65 | m = div_mod(point1[1] - point2[1], point1[0] - point2[0], p) 66 | x = (m * m - point1[0] - point2[0]) % p 67 | y = (m * (point1[0] - x) - point1[1]) % p 68 | return x, y 69 | 70 | 71 | def ec_neg(point: ECPoint, p: int) -> ECPoint: 72 | """ 73 | Given a point (x,y) return (x, -y) 74 | """ 75 | x, y = point 76 | return (x, (-y) % p) 77 | 78 | 79 | def ec_double(point: ECPoint, alpha: int, p: int) -> ECPoint: 80 | """ 81 | Doubles a point on an elliptic curve with the equation y^2 = x^3 + alpha*x + beta mod p. 82 | Assumes the point is given in affine form (x, y) and has y != 0. 83 | """ 84 | assert point[1] % p != 0 85 | m = div_mod(3 * point[0] * point[0] + alpha, 2 * point[1], p) 86 | x = (m * m - 2 * point[0]) % p 87 | y = (m * (point[0] - x) - point[1]) % p 88 | return x, y 89 | 90 | 91 | def ec_mult(m: int, point: ECPoint, alpha: int, p: int) -> ECPoint: 92 | """ 93 | Multiplies by m a point on the elliptic curve with equation y^2 = x^3 + alpha*x + beta mod p. 94 | Assumes the point is given in affine form (x, y) and that 0 < m < order(point). 95 | """ 96 | if m == 1: 97 | return point 98 | if m % 2 == 0: 99 | return ec_mult(m // 2, ec_double(point, alpha, p), alpha, p) 100 | return ec_add(ec_mult(m - 1, point, alpha, p), point, p) 101 | -------------------------------------------------------------------------------- /tests/lib/include.sh: -------------------------------------------------------------------------------- 1 | _include() { 2 | E2E_LOGS="./logs" 3 | mkdir -p "$E2E_LOGS" 4 | E2E_HOME=$(mktemp -d "${TMPDIR:-/tmp}"/home.XXXXXXXX) 5 | mkdir -p "$E2E_HOME" 6 | E2E_EXIT_HOOK=" 7 | rm -rf \"$E2E_HOME\" 8 | " 9 | trap 'trap - EXIT; bash -c "$E2E_EXIT_HOOK"' EXIT 10 | set -eo pipefail 11 | } 12 | _include 13 | 14 | startProxyRecord() { 15 | local proxyUrl=http://localhost:8080 16 | local _path=$(cd "${BASH_SOURCE[0]%/*}"; pwd) 17 | 18 | echo >&2 "# Record through proxy" 19 | rm -f "$E2E_TARGET_DIR/replay.mitm" 20 | { 21 | pkill mitmdump || true 22 | mitmdump \ 23 | -w "$E2E_TARGET_DIR/replay.mitm" \ 24 | --set "confdir=$_path/../resources/mitmproxy" \ 25 | --anticache 26 | 27 | #"$_path/dedup-mitm" "$E2E_TARGET_DIR/replay.mitm" 28 | } >"$E2E_LOGS/${E2E_TARGET-test}-rec-mitm.out" 2>&1 & 29 | E2E_EXIT_HOOK+='pkill mitmdump;' 30 | 31 | export HTTP_PROXY="$proxyUrl" 32 | export HTTPS_PROXY="$proxyUrl" 33 | sleep 1 34 | } 35 | 36 | startProxyReplay() { 37 | local proxyUrl=http://localhost:8080 38 | local _path=$(cd "${BASH_SOURCE[0]%/*}"; pwd) 39 | 40 | echo >&2 "# Replay through proxy" 41 | pkill mitmdump || true 42 | mitmdump \ 43 | -S "$E2E_TARGET_DIR/replay.mitm" \ 44 | --set "confdir=$_path/../resources/mitmproxy" \ 45 | --set upstream_cert=false \ 46 | -k \ 47 | --server-replay-refresh \ 48 | --server-replay-kill-extra \ 49 | --server-replay-nopop \ 50 | --anticache \ 51 | >"logs/${E2E_TARGET-test}-replay-mitm.out" 2>&1 & 52 | E2E_EXIT_HOOK+='pkill mitmdump;' 53 | 54 | export HTTP_PROXY="$proxyUrl" 55 | export HTTPS_PROXY="$proxyUrl" 56 | sleep 5 57 | } 58 | 59 | startProxy() { 60 | if [[ $E2E_RECORD ]]; then 61 | startProxyRecord 62 | else 63 | startProxyReplay 64 | fi 65 | } 66 | 67 | exportEthEnvs() { 68 | local _path=$(cd "${BASH_SOURCE[0]%/*}"; pwd) 69 | local r_path="$_path/../resources" 70 | 71 | export ETH_FROM="0x1f8fbe73820765677e68eb6e933dcb3c94c9b708" 72 | export ETH_KEYSTORE="$r_path/keys" 73 | export ETH_PASSWORD="$r_path/password" 74 | } 75 | 76 | ssbId() { 77 | HOME="$E2E_HOME" ssb-server whoami | jq -r .id 78 | } 79 | 80 | ssbReadMessages() { 81 | local _id=$(ssbId) 82 | local _limit="$1" 83 | 84 | HOME="$E2E_HOME" \ 85 | ssb-server createUserStream \ 86 | --id "$_id" \ 87 | --limit "$limit" \ 88 | --reverse --fillCache 1 \ 89 | | jq -s 90 | } 91 | 92 | ssbPublishMessages() { 93 | while IFS= read -r msg; do 94 | HOME="$E2E_HOME" ssb-server publish . <<<"$msg" >/dev/null 95 | done < <(cat) 96 | } 97 | 98 | startSSB() { 99 | echo >&2 "# Start SSB server" 100 | mkdir -p "$E2E_HOME/.ssb" 101 | HOME="$E2E_HOME" \ 102 | ssb-server start >"$E2E_LOGS/${E2E_TARGET-test}-ssb.out" 2>&1 & 103 | E2E_EXIT_HOOK+='pkill ssb-server;' 104 | 105 | sleep 3 106 | } 107 | 108 | spirePublishMessages() { 109 | while IFS= read -r msg; do 110 | HOME="$E2E_HOME" \ 111 | spire -c "$SPIRE_CONFIG" push price <<<"$msg" 112 | done < <(cat) 113 | } 114 | 115 | startLibp2p() { 116 | echo >&2 "# Start libp2p server" 117 | HOME="$E2E_HOME" \ 118 | spire -v debug -c "$SPIRE_CONFIG" agent >"$E2E_LOGS/${E2E_TARGET-test}-spire.out" 2>&1 & 119 | E2E_EXIT_HOOK+='pkill spire;' 120 | 121 | sleep 15 122 | } 123 | 124 | startGeth() { 125 | local _path=$(cd "${BASH_SOURCE[0]%/*}"; pwd) 126 | echo >&2 "# Start Geth testnet" 127 | { 128 | HOME="$E2E_HOME" dapp testnet 2>&1 "$E2E_LOGS/${E2E_TARGET-test}-dapp.out" & 130 | E2E_EXIT_HOOK+='pkill dapp;' 131 | 132 | grep -q 'DAPP_EXIT\|0x[a-zA-Z0-9]\{40\}' <(tail -f "$E2E_LOGS/${E2E_TARGET-test}-dapp.out") 133 | 134 | export ETH_FROM=$(grep -o '0x[a-zA-Z0-9]\{40\}' < "$E2E_LOGS/${E2E_TARGET-test}-dapp.out") 135 | export ETH_KEYSTORE="$E2E_HOME"/.dapp/testnet/8545/keystore 136 | export ETH_PASSWORD="$_path/../resources/password" 137 | export ETH_RPC_URL="http://127.0.0.1:8545" 138 | export ETH_GAS=7000000 139 | } 140 | 141 | startOmnia() { 142 | echo >&2 "# Start omnia" 143 | { 144 | HOME="$E2E_HOME" omnia 2>&1 || echo "OMNIA_EXIT" 145 | } >"$E2E_LOGS/${E2E_TARGET-test}-omnia.out" & 146 | 147 | grep -q "OMNIA_EXIT\|${1:-${E2E_OMNIA_STOP_PHRASE:-Sleeping}}" \ 148 | <(tail -f "$E2E_LOGS/${E2E_TARGET-test}-omnia.out") 149 | pkill omnia 150 | } 151 | -------------------------------------------------------------------------------- /nix/sources.json: -------------------------------------------------------------------------------- 1 | { 2 | "dapptools": { 3 | "branch": "master", 4 | "description": "Dapp, Seth, Hevm, and more", 5 | "homepage": "https://dapp.tools", 6 | "owner": "dapphub", 7 | "repo": "dapptools", 8 | "rev": "249c8cca1a4806b8bc47882ca7214ab380d29082", 9 | "sha256": "0lp9s1l0n6s2jhaccymrp6437cg6brag5cfn5c0nnaycvx1mgrdr", 10 | "type": "tarball", 11 | "url": "https://github.com/dapphub/dapptools/archive/249c8cca1a4806b8bc47882ca7214ab380d29082.tar.gz", 12 | "url_template": "https://github.com///archive/.tar.gz" 13 | }, 14 | "mach-nix": { 15 | "branch": "master", 16 | "description": "Create highly reproducible python environments", 17 | "homepage": "", 18 | "owner": "DavHau", 19 | "repo": "mach-nix", 20 | "rev": "b56a541af15efd2062ffb9abb69f63dcceafb64d", 21 | "sha256": "0zdifqdq478q938wm3pwdph8xv9ksk9qvf6s3kckchyzw18x28k0", 22 | "type": "tarball", 23 | "url": "https://github.com/DavHau/mach-nix/archive/b56a541af15efd2062ffb9abb69f63dcceafb64d.tar.gz", 24 | "url_template": "https://github.com///archive/.tar.gz" 25 | }, 26 | "makerpkgs": { 27 | "branch": "master", 28 | "description": "Common nixpkgs", 29 | "homepage": "", 30 | "owner": "makerdao", 31 | "repo": "makerpkgs", 32 | "rev": "76151418a2d9ffcfa55dec8acda5bde10f504224", 33 | "sha256": "0vknhf3n54v6rgvq23rhi3b3lraczn9yzd7cw8i1hdxkb028f34r", 34 | "type": "tarball", 35 | "url": "https://github.com/makerdao/makerpkgs/archive/76151418a2d9ffcfa55dec8acda5bde10f504224.tar.gz", 36 | "url_template": "https://github.com///archive/.tar.gz" 37 | }, 38 | "niv": { 39 | "branch": "master", 40 | "description": "Easy dependency management for Nix projects", 41 | "homepage": "https://github.com/nmattia/niv", 42 | "owner": "nmattia", 43 | "repo": "niv", 44 | "rev": "65a61b147f307d24bfd0a5cd56ce7d7b7cc61d2e", 45 | "sha256": "17mirpsx5wyw262fpsd6n6m47jcgw8k2bwcp1iwdnrlzy4dhcgqh", 46 | "type": "tarball", 47 | "url": "https://github.com/nmattia/niv/archive/65a61b147f307d24bfd0a5cd56ce7d7b7cc61d2e.tar.gz", 48 | "url_template": "https://github.com///archive/.tar.gz" 49 | }, 50 | "nixpkgs": { 51 | "branch": "nixpkgs-unstable", 52 | "description": "Nix Packages collection", 53 | "homepage": "", 54 | "owner": "NixOS", 55 | "repo": "nixpkgs", 56 | "rev": "ee084c02040e864eeeb4cf4f8538d92f7c675671", 57 | "sha256": "1x8amcixdaw3ryyia32pb706vzhvn5whq9n8jin0qcha5qnm1fnh", 58 | "type": "tarball", 59 | "url": "https://github.com/NixOS/nixpkgs/archive/ee084c02040e864eeeb4cf4f8538d92f7c675671.tar.gz", 60 | "url_template": "https://github.com///archive/.tar.gz" 61 | }, 62 | "omnia": { 63 | "branch": "v1.9.5", 64 | "description": null, 65 | "homepage": "", 66 | "owner": "chronicleprotocol", 67 | "repo": "omnia", 68 | "rev": "dc42f77625b4961cd649623d0bca1ee5a284909c", 69 | "sha256": "114d6l5kzxdal7vrpz197gsm05ij20p3ng2qwcdma0hma2hk0w1q", 70 | "type": "tarball", 71 | "url": "https://github.com/chronicleprotocol/omnia/archive/dc42f77625b4961cd649623d0bca1ee5a284909c.tar.gz", 72 | "url_template": "https://github.com///archive/.tar.gz" 73 | }, 74 | "oracle-suite": { 75 | "branch": "v0.4.2", 76 | "description": null, 77 | "homepage": null, 78 | "owner": "chronicleprotocol", 79 | "repo": "oracle-suite", 80 | "rev": "8102e416cb3bfcf7dc9b8431140770e1b6c1d528", 81 | "sha256": "0jspw0cilnn9p7iv217nps33lv8kql9x6jsia5659dppb8lcz2vn", 82 | "type": "tarball", 83 | "url": "https://github.com/chronicleprotocol/oracle-suite/archive/8102e416cb3bfcf7dc9b8431140770e1b6c1d528.tar.gz", 84 | "url_template": "https://github.com///archive/.tar.gz" 85 | }, 86 | "setzer": { 87 | "branch": "v0.4.2", 88 | "description": null, 89 | "homepage": "", 90 | "owner": "chronicleprotocol", 91 | "repo": "setzer", 92 | "rev": "61a356fa3b4a3104761414facfe128d160ca198b", 93 | "sha256": "1jpyc48n75vr8gy6hvgpkxczkw707idxy2xxv9qcqsr048bjbnbp", 94 | "type": "tarball", 95 | "url": "https://github.com/chronicleprotocol/setzer/archive/61a356fa3b4a3104761414facfe128d160ca198b.tar.gz", 96 | "url_template": "https://github.com///archive/.tar.gz" 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /tests/smoke/test: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | _path="$(cd ${BASH_SOURCE[0]%/*}; pwd)" 3 | r_path="$_path/../resources" 4 | 5 | . "$_path/../lib/include.sh" 6 | 7 | exportEthEnvs 8 | env | grep ETH_ >&2 9 | 10 | from_addr="$ETH_FROM" 11 | keystore_path="$ETH_KEYSTORE" 12 | key_path="$ETH_PASSWORD" 13 | 14 | install_feed() { 15 | install-omnia feed \ 16 | --from "$from_addr" \ 17 | --keystore "$keystore_path" \ 18 | --password "$key_path" \ 19 | --ssb-external "example.org" \ 20 | --ssb-caps "$r_path/caps.json" \ 21 | | sudo sh 22 | } 23 | 24 | install_relayer() { 25 | # Start geth testnet 26 | rm -rf "$HOME/.dapp/testnet" 27 | nohup >/dev/null 2>&1 \ 28 | dapp --testnet-launch & 29 | sleep 2 30 | 31 | # Change some config params 32 | updatedConf=$(jq '.friends.hops = 2' "$HOME/.ssb/config") 33 | printf %s "$updatedConf" > "$HOME/.ssb/config" 34 | 35 | install-omnia relay \ 36 | --network "testnet" \ 37 | --ssb-external "example-2.org" \ 38 | | sudo sh 39 | } 40 | 41 | after() { 42 | pkill dapp 43 | sudo systemctl stop omnia ssb-server 44 | } 45 | 46 | . "$_path/../lib/tap.sh" 47 | 48 | plan 33 49 | timeout 120 50 | 51 | note <<<"INSTALL FEED" 52 | 53 | feed_start=$(date +"%F %T") 54 | assert "Install feed" run install_feed 55 | 56 | assert "Scuttlebot config installed" \ 57 | test -f $HOME/.ssb/config 58 | 59 | cat $HOME/.ssb/config > $wdir/output 60 | assert "SSB external IP set" \ 61 | json '.connections.incoming.net[0].external' <<<'"example.org"' 62 | assert "SSB external IP set" \ 63 | json '.connections.incoming.ws[0].external' <<<'"example.org"' 64 | assert "SSB caps set" \ 65 | json .caps < "$r_path/caps.json" 66 | assert "SSB hops set" \ 67 | json .friends.hops <<<"6" 68 | 69 | assert "Omnia feed config installed" \ 70 | test -f /etc/omnia.conf 71 | 72 | cat /etc/omnia.conf > $wdir/output 73 | assert "Mode is feed" \ 74 | json .mode <<<'"feed"' 75 | assert "Has set ethereum from address" \ 76 | json .ethereum.from <<<"\"$from_addr\"" 77 | assert "Has set ethereum keystore" \ 78 | json .ethereum.keystore <<<"\"$keystore_path\"" 79 | assert "Has set keystore password file" \ 80 | json .ethereum.password <<<"\"$key_path\"" 81 | 82 | sleep 2 83 | 84 | assert "Omnia feed service is active" \ 85 | match "Active: active" < <(capture systemctl status omnia) 86 | assert "Scuttlebot service is active" \ 87 | match "Active: active" < <(capture systemctl status ssb-server) 88 | 89 | sleep 5 90 | 91 | assert "Omnia feed service is up" \ 92 | match "INITIALIZATION COMPLETE" < <(capture journalctl --since "$feed_start" -u omnia) 93 | assert "Scuttlebot service is up" \ 94 | match "my key ID:" < <(capture journalctl --since "$feed_start" -u ssb-server) 95 | 96 | assert "SSB create invite" \ 97 | match '^"example.org:8007:' < <(capture ssb-server invite.create 1) 98 | 99 | note <<<"INSTALL RELAY" 100 | 101 | relayer_start=$(date +"%F %T") 102 | assert "Install relay" run install_relayer 103 | 104 | assert "Scuttlebot config installed" \ 105 | test -f $HOME/.ssb/config 106 | 107 | cat $HOME/.ssb/config > $wdir/output 108 | assert "SSB external IP set" \ 109 | json '.connections.incoming.net[0].external' <<<'"example-2.org"' 110 | assert "SSB external IP set" \ 111 | json '.connections.incoming.ws[0].external' <<<'"example-2.org"' 112 | assert "SSB caps set" \ 113 | json .caps < "$r_path/caps.json" 114 | assert "SSB hops set" \ 115 | json .friends.hops <<<"2" 116 | 117 | assert "Omnia relay config installed" \ 118 | test -f /etc/omnia.conf 119 | 120 | cat /etc/omnia.conf > $wdir/output 121 | assert "Mode is relay" \ 122 | json .mode <<<'"relayer"' 123 | assert "Ethereum from address not overwritten" \ 124 | json .ethereum.from <<<"\"$from_addr\"" 125 | assert "Ethereum keystore not overwritten" \ 126 | json .ethereum.keystore <<<"\"$keystore_path\"" 127 | assert "Keystore password file not overwritten" \ 128 | json .ethereum.password <<<"\"$key_path\"" 129 | assert "Has set ethereum network" \ 130 | json .ethereum.network <<<'"testnet"' 131 | 132 | sleep 2 133 | 134 | assert "Omnia relay service is active" \ 135 | match "Active: active" < <(capture systemctl status omnia) 136 | assert "Scuttlebot service is active" \ 137 | match "Active: active" < <(capture systemctl status ssb-server) 138 | 139 | sleep 5 140 | 141 | assert "Omnia relay service is up" \ 142 | match "INITIALIZATION COMPLETE" < <(capture journalctl --since "$relayer_start" -u omnia) 143 | assert "Scuttlebot service is up" \ 144 | match "my key ID:" < <(capture journalctl --since "$relayer_start" -u ssb-server) 145 | 146 | assert "SSB create invite" \ 147 | match '^"example-2.org:8007:' < <(capture ssb-server invite.create 1) -------------------------------------------------------------------------------- /vagrant/oracle.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eu 4 | 5 | _command="${1:-""}" 6 | 7 | if [[ -z "$_command" ]]; then 8 | cat <&2 "No cleanup needed" 43 | echo "Installing from: $_file" 44 | nix-env --install --file "$_file" 45 | fi 46 | 47 | if [[ "$1" == "configure" ]]; then 48 | opts=() 49 | 50 | opts+=(--ssb-caps "/vagrant/tests/resources/caps.json") 51 | opts+=(--ssb-port "8008") 52 | opts+=(--ssb-host "localhost") 53 | opts+=(--override-origin "openexchangerates" "apiKey" "xxx") 54 | opts+=(--ssb-external "$(curl -s ifconfig.me)") 55 | opts+=(--keystore "/vagrant/tests/resources/keys") 56 | opts+=(--password "/vagrant/tests/resources/password") 57 | opts+=(--from "0x$(jq -c -r '.address' "/vagrant/tests/resources/keys/UTC--2020-04-20T06-52-55.157141634Z--1f8fbe73820765677e68eb6e933dcb3c94c9b708")") 58 | opts+=(--eth-rpc "http://127.0.0.1:8888") 59 | # opts+=(--eth-rpc "http://127.0.0.1:8889") 60 | opts+=(--l2-eth-rpc "http://127.0.0.1:8888") 61 | # opts+=(--l2-eth-rpc "http://127.0.0.1:8889") 62 | 63 | _mode="feed" 64 | _restart="" 65 | _log="" 66 | while [[ -n "${2-}" ]]; do 67 | case "$2" in 68 | --relay) 69 | _mode="relay" 70 | ;; 71 | --gofer) 72 | opts+=(--no-source --add-source "gofer") 73 | ;; 74 | --spire) 75 | opts+=(--no-transport --add-transport "transport-spire") 76 | ;; 77 | --ssb) 78 | opts+=(--no-transport --add-transport "transport-ssb" --add-transport "transport-ssb-rpc") 79 | ;; 80 | --ssb-rpc) 81 | opts+=(--no-transport --add-transport "transport-ssb-rpc") 82 | ;; 83 | --restart) 84 | _restart="true" 85 | ;; 86 | --log) 87 | _log="true" 88 | ;; 89 | --debug) 90 | export ORACLE_DEBUG="true" 91 | opts+=(--debug) 92 | ;; 93 | --verbose) 94 | opts+=(--verbose --logFormat "json") 95 | ;; 96 | *) 97 | echo >&2 "\"$2\" is not a valid option" 98 | ;; 99 | esac 100 | shift 101 | done 102 | 103 | cmd=("install-omnia" "$_mode") 104 | cmd+=("${opts[@]}") 105 | 106 | echo -e "\n\n${cmd[*]}\n\n" 107 | 108 | "${cmd[@]}" 109 | 110 | sudo systemctl daemon-reload 111 | 112 | [[ -z "$_restart" ]] || oracle restart 113 | [[ -z "$_log" ]] || oracle log 114 | fi 115 | 116 | if [[ "$1" == "enable" ]]; then 117 | sudo systemctl enable --now ssb-server 118 | sudo systemctl enable --now gofer-agent 119 | sudo systemctl enable --now spire-agent 120 | sudo systemctl enable --now splitter-agent 121 | sudo systemctl enable --now leeloo-agent 122 | 123 | sudo systemctl enable --now omnia 124 | 125 | oracle status 126 | fi 127 | 128 | if [[ "$1" == "start" || "$1" == "stop" || "$1" == "restart" ]]; then 129 | sudo systemctl "$1" omnia 130 | 131 | sudo systemctl "$1" ssb-server 132 | sudo systemctl "$1" gofer-agent 133 | sudo systemctl "$1" spire-agent 134 | sudo systemctl "$1" splitter-agent 135 | sudo systemctl "$1" leeloo-agent 136 | 137 | oracle status 138 | fi 139 | 140 | if [[ "$1" == "connect" ]]; then 141 | while IFS= read -r line; do 142 | ssb-server invite.accept "$line" 143 | done < /vagrant/.local/ssb-invites.txt 144 | fi 145 | 146 | if [[ "$1" == "status" ]]; then 147 | systemctl status ssb-server omnia gofer-agent spire-agent splitter-agent leeloo-agent --no-pager --lines=0 148 | fi 149 | 150 | if [[ "$1" == "log-all" ]]; then 151 | journalctl --quiet --follow -u omnia -u ssb-server -u gofer-agent -u spire-agent -u splitter-agent -u leeloo-agent 152 | fi 153 | 154 | if [[ "$1" == "log" ]]; then 155 | journalctl -q -f -u "${2:-omnia}" 156 | fi 157 | 158 | if [[ "$1" == "state" ]]; then 159 | watch du -h "$HOME/.ssb/flume/log.offset" 160 | fi 161 | 162 | if [[ "$1" == "smoke" ]]; then 163 | nix-shell /vagrant/tests --run testSmoke 164 | fi 165 | 166 | if [[ "$1" == "rec" ]]; then 167 | nix-shell /vagrant/tests --run recordE2E 168 | fi -------------------------------------------------------------------------------- /omnia/config/relayer-goerli.conf: -------------------------------------------------------------------------------- 1 | { 2 | "ethereum": { 3 | "from": "0x690a4a3e3964d61a97c0961f7d4314bbf2f8e434", 4 | "gasPrice": { 5 | "multiplier": 1, 6 | "source": "node" 7 | }, 8 | "keystore": "/nix/store/wb8j9s44wwrzwsfa2w9f66b73hzljxvm-0-5-1-ethereum/keystore", 9 | "network": "https://goerli.infura.io/v3/c63ad234ae824dc4a6f94f1ea7eb3b41", 10 | "password": "/nix/store/wb8j9s44wwrzwsfa2w9f66b73hzljxvm-0-5-1-ethereum/password" 11 | }, 12 | "feeds": [ 13 | "0x5c01f0f08e54b85f4cab8c6a03c9425196fe66dd", 14 | "0x75fbd0aace74fb05ef0f6c0ac63d26071eb750c9", 15 | "0x0c4fc7d66b7b6c684488c1f218caa18d4082da18", 16 | "0xc50df8b5dcb701abc0d6d1c7c99e6602171abbc4", 17 | "0xc4b015761a92f0de17a2d065298fb048d612c6d6" 18 | ], 19 | "mode": "relay", 20 | "options": { 21 | "debug": false, 22 | "goferConfig": "/nix/store/18h2h90s1l38rz7fli9hn2yvrhammv3y-gofer.json", 23 | "interval": 60, 24 | "logFormat": "json", 25 | "msgLimit": 35, 26 | "setzerCacheExpiry": 120, 27 | "setzerMinMedian": 3, 28 | "setzerTimeout": 10, 29 | "spireConfig": "/nix/store/43fmzcch576csg30xg0iidfzp3i8c4qa-spire.json", 30 | "srcTimeout": 10, 31 | "verbose": true 32 | }, 33 | "pairs": { 34 | "AAVE/USD": { 35 | "msgExpiration": 1800, 36 | "oracle": "0x48d9b9B980EcB23601E4cE5D0f828Ad1F3c8673f", 37 | "oracleExpiration": 14400, 38 | "oracleSpread": 3 39 | }, 40 | "BAL/USD": { 41 | "msgExpiration": 1800, 42 | "oracle": "0x0f18931AF4BD88a77640E86977E41691A6773C81", 43 | "oracleExpiration": 14400, 44 | "oracleSpread": 3 45 | }, 46 | "BAT/USD": { 47 | "msgExpiration": 1800, 48 | "oracle": "0x559492e2D2CB96da572aB35b551049f2414039DB", 49 | "oracleExpiration": 14400, 50 | "oracleSpread": 3 51 | }, 52 | "BTC/USD": { 53 | "msgExpiration": 1800, 54 | "oracle": "0x586409bb88cF89BBAB0e106b0620241a0e4005c9", 55 | "oracleExpiration": 14400, 56 | "oracleSpread": 3 57 | }, 58 | "COMP/USD": { 59 | "msgExpiration": 1800, 60 | "oracle": "0x41Bd42D1f64489a85CB552a9C122546bF0986399", 61 | "oracleExpiration": 14400, 62 | "oracleSpread": 3 63 | }, 64 | "ETH/USD": { 65 | "msgExpiration": 1800, 66 | "oracle": "0xD81834Aa83504F6614caE3592fb033e4b8130380", 67 | "oracleExpiration": 14400, 68 | "oracleSpread": 3 69 | }, 70 | "KNC/USD": { 71 | "msgExpiration": 1800, 72 | "oracle": "0x9e2ab3c02229458Ac611De9f4FF56792cf0c6D30", 73 | "oracleExpiration": 14400, 74 | "oracleSpread": 3 75 | }, 76 | "LINK/USD": { 77 | "msgExpiration": 1800, 78 | "oracle": "0xe4919256D404968566cbdc5E5415c769D5EeBcb0", 79 | "oracleExpiration": 14400, 80 | "oracleSpread": 3 81 | }, 82 | "LRC/USD": { 83 | "msgExpiration": 1800, 84 | "oracle": "0x9D86EC8d17AC9F27B67626710e70598198c53846", 85 | "oracleExpiration": 14400, 86 | "oracleSpread": 3 87 | }, 88 | "MANA/USD": { 89 | "msgExpiration": 1800, 90 | "oracle": "0xCCce898497e139831523cc9D23c948138dDF67f6", 91 | "oracleExpiration": 14400, 92 | "oracleSpread": 3 93 | }, 94 | "MATIC/USD": { 95 | "msgExpiration": 1800, 96 | "oracle": "0x4b4e2a0b7a560290280f083c8b5174fb706d7926", 97 | "oracleExpiration": 14400, 98 | "oracleSpread": 3 99 | }, 100 | "UNI/USD": { 101 | "msgExpiration": 1800, 102 | "oracle": "0xF87BE13f2b081d8D555f31d6bd6590Fd817a99FA", 103 | "oracleExpiration": 14400, 104 | "oracleSpread": 3 105 | }, 106 | "USDT/USD": { 107 | "msgExpiration": 1800, 108 | "oracle": "0x44084f056e9405FB41343ACb4E2E49f75b75640f", 109 | "oracleExpiration": 14400, 110 | "oracleSpread": 3 111 | }, 112 | "YFI/USD": { 113 | "msgExpiration": 1800, 114 | "oracle": "0x38D27Ba21E1B2995d0ff9C1C070c5c93dd07cB31", 115 | "oracleExpiration": 14400, 116 | "oracleSpread": 3 117 | }, 118 | "ZRX/USD": { 119 | "msgExpiration": 1800, 120 | "oracle": "0x5C964118cD17B6b7b8a15C5De93b2E23c24d5789", 121 | "oracleExpiration": 14400, 122 | "oracleSpread": 3 123 | } 124 | }, 125 | "services": { 126 | "scuttlebotIdMap": { 127 | "0x0c4fc7d66b7b6c684488c1f218caa18d4082da18": "@mfvX393zEzwt9DGwdF096U3gMY3XLl5ZqzUdN88OVrk=.ed25519", 128 | "0x5c01f0f08e54b85f4cab8c6a03c9425196fe66dd": "@nacYnIPq7wJgv5-RHWtYjUxL_-wuVdnQ4UG8twV7gO8=.ed25519", 129 | "0x75fbd0aace74fb05ef0f6c0ac63d26071eb750c9": "@i0ARgRToLWc4OvMLysvO5vvbK3dbAubn8rQbfD0XyfM=.ed25519", 130 | "0xc4b015761a92f0de17a2d065298fb048d612c6d6": "@FwihkZCKn0aJ0FZWF_zrKQF2EirhAlKUd3XiyrZ-FuA=.ed25519", 131 | "0xc50df8b5dcb701abc0d6d1c7c99e6602171abbc4": "@zbHB0ZGIWA4mR24AeKpZfrKNUQJw1J0sNsMlxRLKniI=.ed25519" 132 | } 133 | }, 134 | "sources": [ 135 | "gofer", 136 | "setzer" 137 | ], 138 | "transports": [ 139 | "transport-spire", 140 | "transport-ssb" 141 | ] 142 | } -------------------------------------------------------------------------------- /nixos/omnia.nix: -------------------------------------------------------------------------------- 1 | { oracle-suite }: 2 | { pkgs, config, lib, ... }: 3 | let 4 | writeJSON = name: attrs: pkgs.writeText name (builtins.toJSON attrs); 5 | 6 | cfg = config.services.omnia; 7 | ssbIncomingPorts = (if (cfg.ssbConfig ? connections) then 8 | (if (cfg.ssbConfig.connections ? incoming && cfg.ssbConfig.connections.incoming ? net) then 9 | map (x: if (x ? port) then x.port else 8008) cfg.ssbConfig.connections.incoming.net 10 | else 11 | [ 8008 ]) 12 | else 13 | (if (cfg.ssbConfig ? port) then [ cfg.ssbConfig.port ] else [ 8008 ])); 14 | 15 | ssb-config = writeJSON "ssb-config" cfg.ssbConfig; 16 | omnia-config = writeJSON "omnia.conf" { inherit (cfg) pairs mode feeds ethereum options sources transports services; }; 17 | 18 | inherit (import ../. { }) omnia ssb-server; 19 | 20 | name = "omnia"; 21 | home = "/var/lib/${name}"; 22 | in { 23 | config = lib.mkIf cfg.enable { 24 | environment.systemPackages = with pkgs; [ ssb-server omnia ]; 25 | 26 | networking.firewall.allowedTCPPorts = ssbIncomingPorts; 27 | 28 | systemd.services.gofer = { 29 | enable = true; 30 | description = "Gofer Agent"; 31 | after = [ "network.target" ]; 32 | wantedBy = [ "multi-user.target" "omnia.service" ]; 33 | 34 | serviceConfig = { 35 | Type = "simple"; 36 | User = name; 37 | Group = name; 38 | PermissionsStartOnly = true; 39 | Restart = "always"; 40 | RestartSec = 5; 41 | ExecStart = "${oracle-suite}/bin/gofer --config ${cfg.options.goferConfig} agent"; 42 | }; 43 | }; 44 | 45 | systemd.services.spire = { 46 | enable = true; 47 | description = "Spire Agent"; 48 | after = [ "network.target" ]; 49 | wantedBy = [ "multi-user.target" "omnia.service" ]; 50 | 51 | serviceConfig = { 52 | Type = "simple"; 53 | User = name; 54 | Group = name; 55 | PermissionsStartOnly = true; 56 | Restart = "always"; 57 | RestartSec = 5; 58 | ExecStart = "${oracle-suite}/bin/spire --config ${cfg.options.spireConfig} --log.verbosity debug agent"; 59 | }; 60 | }; 61 | 62 | systemd.services.splitter = { 63 | enable = true; 64 | description = "RPC Splitter Agent"; 65 | after = [ "network.target" ]; 66 | wantedBy = [ "multi-user.target" "gofer.service" ]; 67 | 68 | serviceConfig = { 69 | Type = "simple"; 70 | User = name; 71 | Group = name; 72 | PermissionsStartOnly = true; 73 | Restart = "always"; 74 | RestartSec = 5; 75 | ExecStart = "${oracle-suite}/bin/rpc-splitter --listen 127.0.0.1:9989 --eth-rpc=${ 76 | lib.concatStringsSep "," cfg.ethRpcList 77 | } --log.verbosity debug agent"; 78 | }; 79 | }; 80 | 81 | systemd.services.ssb-server = { 82 | enable = true; 83 | description = "Scuttlebot server"; 84 | after = [ "network.target" ]; 85 | wantedBy = [ "multi-user.target" "omnia.service" ]; 86 | 87 | serviceConfig = { 88 | Type = "simple"; 89 | User = name; 90 | Group = name; 91 | WorkingDirectory = home; 92 | PermissionsStartOnly = true; 93 | Restart = "always"; 94 | RestartSec = 5; 95 | ExecStart = "${ssb-server}/bin/ssb-server start"; 96 | }; 97 | 98 | preStart = '' 99 | installSsbFile() { 100 | local from="$1" 101 | local target="${home}/.ssb/$2" 102 | if [[ ! -e "$target" ]]; then 103 | echo >&2 "SSB Service Setup: $target not found! Initiallizing with $from -> $target" 104 | cp -f "$from" "$target" 105 | else 106 | echo >&2 "SSB Service Setup: $target exists! Not overwriting" 107 | fi 108 | } 109 | 110 | mkdir -p "${home}/.ssb" 111 | '' + (lib.optionalString (cfg.ssbInitSecret != null) '' 112 | installSsbFile "${cfg.ssbInitSecret}" "secret" 113 | '') + (lib.optionalString (cfg.ssbInitGossip != null) '' 114 | installSsbFile "${cfg.ssbInitGossip}" "gossip.json" 115 | '') + '' 116 | ln -sf "${ssb-config}" "${home}/.ssb/config" 117 | chown -R ${name}:${name} "${home}/.ssb" 118 | chmod -R ug+w "${home}/.ssb" 119 | ''; 120 | }; 121 | 122 | systemd.services.omnia = { 123 | enable = true; 124 | description = "Omnia oracle client"; 125 | after = [ "network.target" "ssb-server.service" ]; 126 | wants = [ "ssb-server.service" ]; 127 | wantedBy = [ "multi-user.target" ]; 128 | 129 | environment = { 130 | OMNIA_CONFIG = omnia-config; 131 | OMNIA_DEBUG = toString cfg.options.debug; 132 | OMNIA_LOG_FORMAT = cfg.options.logFormat; 133 | OMNIA_VERBOSE = toString cfg.options.verbose; 134 | GOFER_CONFIG = toString cfg.options.goferConfig; 135 | SPIRE_CONFIG = toString cfg.options.spireConfig; 136 | }; 137 | 138 | serviceConfig = { 139 | Type = "simple"; 140 | User = name; 141 | Group = name; 142 | WorkingDirectory = home; 143 | PermissionsStartOnly = true; 144 | Restart = "always"; 145 | RestartSec = 5; 146 | ExecStart = "${omnia}/bin/omnia"; 147 | }; 148 | }; 149 | 150 | users.groups."${name}" = { inherit name; }; 151 | users.users."${name}" = { 152 | inherit name; 153 | group = name; 154 | home = home; 155 | createHome = true; 156 | shell = "${pkgs.bash}/bin/bash"; 157 | isSystemUser = true; 158 | packages = [ ]; 159 | }; 160 | }; 161 | } 162 | -------------------------------------------------------------------------------- /starkware/signature/starkex_messages.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from .signature import FIELD_PRIME, pedersen_hash 4 | 5 | 6 | def get_msg( 7 | instruction_type: int, vault0: int, vault1: int, amount0: int, amount1: int, token0: int, 8 | token1_or_pub_key: int, nonce: int, expiration_timestamp: int, 9 | hash=pedersen_hash, condition: Optional[int] = None) -> int: 10 | """ 11 | Creates a message to sign on. 12 | """ 13 | packed_message = instruction_type 14 | packed_message = packed_message * 2**31 + vault0 15 | packed_message = packed_message * 2**31 + vault1 16 | packed_message = packed_message * 2**63 + amount0 17 | packed_message = packed_message * 2**63 + amount1 18 | packed_message = packed_message * 2**31 + nonce 19 | packed_message = packed_message * 2**22 + expiration_timestamp 20 | if condition is not None: 21 | # A message representing a conditional transfer. The condition is interpreted by the 22 | # application. 23 | return hash(hash(hash(token0, token1_or_pub_key), condition), packed_message) 24 | 25 | return hash(hash(token0, token1_or_pub_key), packed_message) 26 | 27 | 28 | def get_limit_order_msg( 29 | vault_sell: int, vault_buy: int, amount_sell: int, amount_buy: int, token_sell: int, 30 | token_buy: int, nonce: int, expiration_timestamp: int, 31 | hash=pedersen_hash) -> int: 32 | """ 33 | party_a sells amount_sell coins of token_sell from vault_sell. 34 | party_a buys amount_buy coins of token_buy into vault_buy. 35 | """ 36 | assert 0 <= vault_sell < 2**31 37 | assert 0 <= vault_buy < 2**31 38 | assert 0 <= amount_sell < 2**63 39 | assert 0 <= amount_buy < 2**63 40 | assert 0 <= token_sell < FIELD_PRIME 41 | assert 0 <= token_buy < FIELD_PRIME 42 | assert 0 <= nonce < 2**31 43 | assert 0 <= expiration_timestamp < 2**22 44 | 45 | instruction_type = 0 46 | return get_msg( 47 | instruction_type, vault_sell, vault_buy, amount_sell, amount_buy, token_sell, token_buy, 48 | nonce, expiration_timestamp, hash=hash) 49 | 50 | 51 | def get_transfer_msg( 52 | amount: int, nonce: int, sender_vault_id: int, token: int, receiver_vault_id: int, 53 | receiver_public_key: int, expiration_timestamp: int, 54 | hash=pedersen_hash, condition: Optional[int] = None) -> int: 55 | """ 56 | Transfer `amount` of `token` from `sender_vault_id` to `receiver_vault_id`. 57 | The transfer is conditional only if `condition` is given. 58 | """ 59 | assert 0 <= sender_vault_id < 2**31 60 | assert 0 <= receiver_vault_id < 2**31 61 | assert 0 <= amount < 2**63 62 | assert 0 <= token < FIELD_PRIME 63 | assert 0 <= receiver_public_key < FIELD_PRIME 64 | assert 0 <= nonce < 2**31 65 | assert 0 <= expiration_timestamp < 2**22 66 | 67 | TRANSFER = 1 68 | CONDITIONAL_TRANSFER = 2 69 | instruction_type = CONDITIONAL_TRANSFER if condition else TRANSFER 70 | assert condition is None or 0 <= condition < FIELD_PRIME 71 | 72 | return get_msg( 73 | instruction_type, sender_vault_id, receiver_vault_id, amount, 0, token, receiver_public_key, 74 | nonce, expiration_timestamp, hash=hash, condition=condition) 75 | 76 | 77 | ##################################################################################### 78 | # get_price_msg: gets as input: # 79 | # oracle: a 40-bit number, describes the oracle (i.e hex encoding of "Maker") # 80 | # price: a 120-bit number # 81 | # asset: a 128-bit number # 82 | # timestamp: a 32 bit number, represents seconds since epoch # 83 | # outputs a number which is less than FIELD_PRIME, which can be used as data # 84 | # to sign on in the sign method. This number is obtained by applying pedersen # 85 | # on the following two numbers: # 86 | # # 87 | # first number: # 88 | # --------------------------------------------------------------------------------- # 89 | # | 0 (84 bits) | asset_name (128 bits) | oracle_name (40 bits) | # 90 | # --------------------------------------------------------------------------------- # 91 | # # 92 | # second number: # 93 | # --------------------------------------------------------------------------------- # 94 | # | 0 (100 bits) | price (120 bits) | timestamp (32 bits) | # 95 | # --------------------------------------------------------------------------------- # 96 | # # 97 | ##################################################################################### 98 | 99 | 100 | def get_price_msg(oracle_name: int, asset_pair: int, timestamp: int, price: int): 101 | assert 0 <= oracle_name < 2**40 102 | assert 0 <= asset_pair < 2**128 103 | assert 0 <= timestamp < 2**32 104 | assert 0 <= price < 2**120 105 | 106 | # The first number to hash is the oracle name (Maker) and the asset name. 107 | first_number = (asset_pair << 40) + oracle_name 108 | 109 | # The second number is timestamp in the 32 LSB, then the price. 110 | second_number = (price << 32) + timestamp 111 | 112 | return pedersen_hash(first_number, second_number) 113 | -------------------------------------------------------------------------------- /nixos/omnia-options.nix: -------------------------------------------------------------------------------- 1 | { lib, pkgs }: 2 | let 3 | writeJSON = name: attrs: pkgs.writeText name (builtins.toJSON attrs); 4 | passJSON = name: file: writeJSON name (lib.importJSON file); 5 | in { 6 | enable = lib.mkEnableOption "omnia"; 7 | 8 | mode = lib.mkOption { 9 | type = lib.types.enum [ "feed" "relay" "relayer" ]; 10 | description = '' 11 | Omnia operational mode (feed or relay) 12 | ''; 13 | default = "feed"; 14 | }; 15 | 16 | options = { 17 | logFormat = lib.mkOption { 18 | type = lib.types.str; 19 | default = "text"; 20 | }; 21 | 22 | verbose = lib.mkOption { 23 | type = lib.types.bool; 24 | default = true; 25 | description = '' 26 | Enable verbose output. 27 | ''; 28 | }; 29 | 30 | debug = lib.mkOption { 31 | type = lib.types.bool; 32 | default = false; 33 | description = '' 34 | Enable debug output 35 | ''; 36 | }; 37 | 38 | interval = lib.mkOption { 39 | type = lib.types.int; 40 | description = '' 41 | Pooling interval 42 | ''; 43 | default = 60; 44 | }; 45 | 46 | msgLimit = lib.mkOption { 47 | type = lib.types.int; 48 | description = '' 49 | Message look back limit 50 | ''; 51 | default = 35; 52 | }; 53 | 54 | srcTimeout = lib.mkOption { 55 | type = lib.types.int; 56 | description = '' 57 | Price source timeout 58 | ''; 59 | default = 600; 60 | }; 61 | 62 | setzerTimeout = lib.mkOption { 63 | type = lib.types.int; 64 | description = '' 65 | Setzer internal timeout 66 | ''; 67 | default = 600; 68 | }; 69 | 70 | setzerCacheExpiry = lib.mkOption { 71 | type = lib.types.int; 72 | description = '' 73 | Setzer internal cache expiry 74 | ''; 75 | default = 120; 76 | }; 77 | 78 | setzerMinMedian = lib.mkOption { 79 | type = lib.types.int; 80 | description = '' 81 | Setzer internal minimum amount of sources for median 82 | ''; 83 | default = 3; 84 | }; 85 | 86 | setzerEthRpcUrl = lib.mkOption { 87 | type = lib.types.str; 88 | default = "http://127.0.0.1:9989"; 89 | }; 90 | 91 | goferConfig = lib.mkOption { 92 | type = lib.types.path; 93 | description = '' 94 | Path to Gofer config file. 95 | ''; 96 | default = passJSON "gofer.json" ../systemd/gofer.json; 97 | }; 98 | 99 | spireConfig = lib.mkOption { 100 | type = lib.types.path; 101 | description = '' 102 | Path to Spire config file. 103 | ''; 104 | default = passJSON "spire.json" ../systemd/spire.json; 105 | }; 106 | }; 107 | 108 | sources = lib.mkOption { 109 | type = lib.types.listOf (lib.types.enum [ "gofer" "setzer" ]); 110 | description = '' 111 | List of sources to use and order they fallback in. 112 | ''; 113 | default = [ "gofer" "setzer" ]; 114 | }; 115 | 116 | transports = lib.mkOption { 117 | type = lib.types.listOf (lib.types.enum [ "transport-spire" "transport-ssb" ]); 118 | description = '' 119 | Transport CLIs to use. 120 | ''; 121 | default = [ "transport-spire" "transport-ssb" ]; 122 | }; 123 | 124 | ethRpcList = lib.mkOption { 125 | type = lib.types.listOf lib.types.str; 126 | default = [ ]; 127 | }; 128 | 129 | services = { 130 | scuttlebotIdMap = lib.mkOption { 131 | type = lib.types.attrsOf lib.types.str; 132 | description = '' 133 | Map of Ethereum addresses to Scuttlebot IDs. 134 | ''; 135 | default = { }; 136 | }; 137 | }; 138 | 139 | feeds = lib.mkOption { 140 | type = lib.types.listOf lib.types.str; 141 | description = '' 142 | Scuttlebot feeds 143 | ''; 144 | default = [ ]; 145 | }; 146 | 147 | pairs = lib.mkOption { 148 | type = lib.types.attrsOf lib.types.attrs; 149 | description = '' 150 | Trading pairs 151 | ''; 152 | default = [ ]; 153 | }; 154 | 155 | ethereum = { 156 | from = lib.mkOption { 157 | type = lib.types.str; 158 | example = "0x0000000000000000000000000000000000000000"; 159 | description = '' 160 | Ethereum address to use 161 | ''; 162 | }; 163 | 164 | keystore = lib.mkOption { 165 | type = lib.types.path; 166 | description = '' 167 | Ethereum keystore directory 168 | ''; 169 | }; 170 | 171 | password = lib.mkOption { 172 | type = lib.types.path; 173 | description = '' 174 | Ethereum private key password 175 | ''; 176 | }; 177 | 178 | network = lib.mkOption { 179 | type = lib.types.nullOr lib.types.str; 180 | default = null; 181 | example = "http://localhost:8545"; 182 | description = '' 183 | Ethereum network 184 | ''; 185 | }; 186 | 187 | gasPrice = lib.mkOption { 188 | type = lib.types.attrs; 189 | default = { 190 | source = "node"; 191 | multiplier = 1; 192 | priority = "fast"; 193 | }; 194 | }; 195 | 196 | }; 197 | 198 | ssbConfig = lib.mkOption { 199 | type = lib.types.attrs; 200 | description = '' 201 | Scuttlebot config 202 | ''; 203 | }; 204 | 205 | ssbInitSecret = lib.mkOption { 206 | type = lib.types.nullOr lib.types.path; 207 | description = '' 208 | Scuttlebot secret, if null will generate one 209 | ''; 210 | default = null; 211 | }; 212 | 213 | ssbInitGossip = lib.mkOption { 214 | type = lib.types.nullOr lib.types.path; 215 | description = '' 216 | gossip.json file to init scuttlebot with 217 | ''; 218 | default = null; 219 | }; 220 | } 221 | -------------------------------------------------------------------------------- /tests/lib/tap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2020 Christopher Fredén 4 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 5 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 7 | 8 | trap "trap - 1 2 3 15; end" EXIT 9 | trap "trap - ERR; err" ERR 10 | trap "trap - INT; int" INT 11 | trap "trap - HUP; timeup" HUP 12 | 13 | set -eo pipefail 14 | 15 | wdir=$(mktemp -d "${TMPDIR:-/tmp}"/tapsh.XXXXXXXX) 16 | 17 | log() { 18 | cat > $wdir/log 19 | if [[ -f $wdir/log && $(wc -c $wdir/log 2>/dev/null | cut -f1 -d' ') = 0 ]]; then 20 | rm $wdir/log 21 | fi 22 | } 23 | note() { sed 's/^/# /'; } 24 | run() { 25 | local ecode=0 26 | { set -x 27 | "$@" || ecode=$? 28 | { set +x; } >/dev/null 2>&1 29 | } >$wdir/log 2>&1 /dev/null 2>&1 38 | } >$wdir/log 2>&1 /dev/null 2>&1 47 | } 2>$wdir/log >$wdir/output /dev/null 2>&1 55 | } 2> >(log) /dev/null 2>&1; then 60 | run after || { [[ -f $wdir/log ]] && note < $wdir/log; } 61 | fi 62 | if [[ ! $plan ]]; then 63 | plan $test_count 64 | fi 65 | if [[ $skipped_tests != 0 ]]; then 66 | echo "# Skipped $skipped_tests tests" 67 | fi 68 | if [[ $failed_tests != 0 ]]; then 69 | echo "# Failed $failed_tests out of $test_count tests" 70 | exit_code=${exit_code:-1} 71 | fi 72 | if [[ $test_count != $plan ]]; then 73 | echo "# Plan failed, ran $test_count tests, plan was $plan" 74 | exit_code=${exit_code:-2} 75 | fi 76 | exit_code=${exit_code:-0} 77 | if [[ $exit_code = 0 ]]; then 78 | echo "# Success, ran $((test_count-skipped_tests)) tests!" 79 | fi 80 | cleanup 81 | exit $exit_code 82 | } 83 | int() { 84 | echo "# Test interrupted, test did NOT finish correctly" 85 | exit_code=3 86 | exit 87 | } 88 | err() { 89 | msg="${@+": $@"}" 90 | echo "# Unexpected error, test did NOT run correctly$msg" 91 | exit_code=4 92 | exit 93 | } 94 | timeup() { 95 | timeoutpid="" 96 | echo "# Timeout reached, test took too long" 97 | exit_code=5 98 | exit 99 | } 100 | 101 | clear_timeout() { 102 | if [ "$timeoutpid" ]; then 103 | pkill -PIPE -P $timeoutpid 104 | timeoutpid="" 105 | fi 106 | } 107 | timeout() { 108 | if [ "$timeoutpid" ]; then 109 | echo "# Warning: timeout called more than once! Ignoring" 110 | else 111 | ( sleep $1 112 | kill -HUP 0 113 | ) & timeoutpid=$! 114 | fi 115 | } 116 | 117 | before_done="" 118 | test_count=0 119 | failed_tests=0 120 | skipped_tests=0 121 | exit_code="" 122 | timeoutpid="" 123 | 124 | plan() { 125 | plan=$1 126 | echo 1..$plan 127 | } 128 | 129 | assert() { 130 | if [[ -z $before_done ]] && command -v before >/dev/null 2>&1; then 131 | before_done=1 132 | run before || { [[ -f $wdir/log ]] && note < $wdir/log; exit 3; } 133 | fi 134 | 135 | ((test_count+=1)) 136 | local desc="${1:+$1 }"; shift 137 | local ecode=0 138 | local res 139 | 140 | if [[ ${desc^^} =~ ^\#\ SKIP ]]; then 141 | ((skipped_tests+=1)) 142 | else 143 | "$@" > $wdir/res || ecode=$? 144 | res=$(cat $wdir/res); rm -f $wdir/res 145 | fi 146 | 147 | if [[ $ecode -eq 0 && ! $res ]]; then 148 | echo "ok $test_count - $desc> $@" 149 | else 150 | ((failed_tests+=1)) 151 | echo "not ok $test_count - $desc> $@" 152 | if [[ $res || -f $wdir/log ]]; then 153 | echo " ---" 154 | if [[ $res ]]; then 155 | sed 's/^/ /' <<<"$res" 156 | fi 157 | if [[ -f $wdir/log ]]; then 158 | echo " stdout: |-" 159 | sed 's/^/ /' $wdir/log 160 | rm $wdir/log 161 | fi 162 | echo " ..." 163 | fi 164 | fi 165 | } 166 | 167 | output(){ 168 | jq 2>&1 -S "${@-.}" < $wdir/output 169 | } 170 | json() { 171 | jq 2>&1 -S . > $wdir/expect-$test_count.json 172 | output "${@-.}" > $wdir/got-$test_count.json 173 | local res="$(diff -u $wdir/expect-$test_count.json $wdir/got-$test_count.json)" 174 | [[ ! $res ]] || { cat < $wdir/headers-$test_count || ( 223 | touch $wdir/output-$test_count 224 | cat < fetches specs of type . 7 | # 8 | 9 | fetch_file = pkgs: name: spec: 10 | let 11 | name' = sanitizeName name + "-src"; 12 | in 13 | if spec.builtin or true then 14 | builtins_fetchurl { inherit (spec) url sha256; name = name'; } 15 | else 16 | pkgs.fetchurl { inherit (spec) url sha256; name = name'; }; 17 | 18 | fetch_tarball = pkgs: name: spec: 19 | let 20 | name' = sanitizeName name + "-src"; 21 | in 22 | if spec.builtin or true then 23 | builtins_fetchTarball { name = name'; inherit (spec) url sha256; } 24 | else 25 | pkgs.fetchzip { name = name'; inherit (spec) url sha256; }; 26 | 27 | fetch_git = name: spec: 28 | let 29 | ref = 30 | if spec ? ref then spec.ref else 31 | if spec ? branch then "refs/heads/${spec.branch}" else 32 | if spec ? tag then "refs/tags/${spec.tag}" else 33 | abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!"; 34 | in 35 | builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; }; 36 | 37 | fetch_local = spec: spec.path; 38 | 39 | fetch_builtin-tarball = name: throw 40 | ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`. 41 | $ niv modify ${name} -a type=tarball -a builtin=true''; 42 | 43 | fetch_builtin-url = name: throw 44 | ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`. 45 | $ niv modify ${name} -a type=file -a builtin=true''; 46 | 47 | # 48 | # Various helpers 49 | # 50 | 51 | # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695 52 | sanitizeName = name: 53 | ( 54 | concatMapStrings (s: if builtins.isList s then "-" else s) 55 | ( 56 | builtins.split "[^[:alnum:]+._?=-]+" 57 | ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name) 58 | ) 59 | ); 60 | 61 | # The set of packages used when specs are fetched using non-builtins. 62 | mkPkgs = sources: system: 63 | let 64 | sourcesNixpkgs = 65 | import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; }; 66 | hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; 67 | hasThisAsNixpkgsPath = == ./.; 68 | in 69 | if builtins.hasAttr "nixpkgs" sources 70 | then sourcesNixpkgs 71 | else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then 72 | import {} 73 | else 74 | abort 75 | '' 76 | Please specify either (through -I or NIX_PATH=nixpkgs=...) or 77 | add a package called "nixpkgs" to your sources.json. 78 | ''; 79 | 80 | # The actual fetching function. 81 | fetch = pkgs: name: spec: 82 | 83 | if ! builtins.hasAttr "type" spec then 84 | abort "ERROR: niv spec ${name} does not have a 'type' attribute" 85 | else if spec.type == "file" then fetch_file pkgs name spec 86 | else if spec.type == "tarball" then fetch_tarball pkgs name spec 87 | else if spec.type == "git" then fetch_git name spec 88 | else if spec.type == "local" then fetch_local spec 89 | else if spec.type == "builtin-tarball" then fetch_builtin-tarball name 90 | else if spec.type == "builtin-url" then fetch_builtin-url name 91 | else 92 | abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; 93 | 94 | # If the environment variable NIV_OVERRIDE_${name} is set, then use 95 | # the path directly as opposed to the fetched source. 96 | replace = name: drv: 97 | let 98 | saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name; 99 | ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}"; 100 | in 101 | if ersatz == "" then drv else 102 | # this turns the string into an actual Nix path (for both absolute and 103 | # relative paths) 104 | if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}"; 105 | 106 | # Ports of functions for older nix versions 107 | 108 | # a Nix version of mapAttrs if the built-in doesn't exist 109 | mapAttrs = builtins.mapAttrs or ( 110 | f: set: with builtins; 111 | listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)) 112 | ); 113 | 114 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 115 | range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1); 116 | 117 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 118 | stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); 119 | 120 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 121 | stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); 122 | concatMapStrings = f: list: concatStrings (map f list); 123 | concatStrings = builtins.concatStringsSep ""; 124 | 125 | # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331 126 | optionalAttrs = cond: as: if cond then as else {}; 127 | 128 | # fetchTarball version that is compatible between all the versions of Nix 129 | builtins_fetchTarball = { url, name ? null, sha256 }@attrs: 130 | let 131 | inherit (builtins) lessThan nixVersion fetchTarball; 132 | in 133 | if lessThan nixVersion "1.12" then 134 | fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; })) 135 | else 136 | fetchTarball attrs; 137 | 138 | # fetchurl version that is compatible between all the versions of Nix 139 | builtins_fetchurl = { url, name ? null, sha256 }@attrs: 140 | let 141 | inherit (builtins) lessThan nixVersion fetchurl; 142 | in 143 | if lessThan nixVersion "1.12" then 144 | fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; })) 145 | else 146 | fetchurl attrs; 147 | 148 | # Create the final "sources" from the config 149 | mkSources = config: 150 | mapAttrs ( 151 | name: spec: 152 | if builtins.hasAttr "outPath" spec 153 | then abort 154 | "The values in sources.json should not have an 'outPath' attribute" 155 | else 156 | spec // { outPath = replace name (fetch config.pkgs name spec); } 157 | ) config.sources; 158 | 159 | # The "config" used by the fetchers 160 | mkConfig = 161 | { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null 162 | , sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile) 163 | , system ? builtins.currentSystem 164 | , pkgs ? mkPkgs sources system 165 | }: rec { 166 | # The sources, i.e. the attribute set of spec name to spec 167 | inherit sources; 168 | 169 | # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers 170 | inherit pkgs; 171 | }; 172 | 173 | in 174 | mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); } 175 | -------------------------------------------------------------------------------- /starkware/stark_cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | ############################################################################### 3 | # Copyright 2019 StarkWare Industries Ltd. # 4 | # # 5 | # Licensed under the Apache License, Version 2.0 (the "License"). # 6 | # You may not use this file except in compliance with the License. # 7 | # You may obtain a copy of the License at # 8 | # # 9 | # https://www.starkware.co/open-source-license/ # 10 | # # 11 | # Unless required by applicable law or agreed to in writing, # 12 | # software distributed under the License is distributed on an "AS IS" BASIS, # 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # 14 | # See the License for the specific language governing permissions # 15 | # and limitations under the License. # 16 | ############################################################################### 17 | 18 | import sys 19 | from argparse import ArgumentParser, RawTextHelpFormatter 20 | 21 | from signature import FIELD_PRIME, get_price_msg, private_to_stark_key, sign 22 | 23 | class HexedBoundedParam(): 24 | def __init__(self, bound): 25 | self.bound = bound 26 | 27 | def __call__(self, input_element): 28 | num = int(input_element, 16) 29 | assert(num < self.bound) 30 | return num 31 | 32 | def sign_cli(key, data): 33 | r, s = sign(data, key) 34 | return ' '.join([hex(r), hex(s)]) 35 | 36 | def public_cli(key): 37 | return hex(private_to_stark_key(key)) 38 | 39 | def hash_price(oracle_name, asset_pair, price, timestamp): 40 | return hex(get_price_msg(oracle_name, asset_pair, timestamp, price))[2:] 41 | 42 | def main(): 43 | description = """ 44 | ##################################################################################### 45 | # Starkware hash&sign cli, provides hash and sign functions. # 46 | # # 47 | # Sign: gets as input: # 48 | # private key: (a number which is less than FIELD_PRIME, roughly 2**251) # 49 | # data to sign (another number from the same range) # 50 | # and outputs: # 51 | # Stark signature with the key on the data # 52 | # # 53 | # Hash: gets as input: # 54 | # oracle: a 40-bit number, describes the oracle (i.e hex encoding of "Maker") # 55 | # price: a 120-bit number # 56 | # asset: a 128-bit number # 57 | # timestamp: a 32 bit number, represents seconds since epoch # 58 | # outputs a number which is less than FIELD_PRIME, which can be used as data # 59 | # to sign on in the sign method. This number is obtained by applying pedersen # 60 | # on the following two numbers: # 61 | # # 62 | # first number: # 63 | # --------------------------------------------------------------------------------- # 64 | # | 0 (84 bits) | asset_name (128 bits) | oracle_name (40 bits) | # 65 | # --------------------------------------------------------------------------------- # 66 | # # 67 | # second number: # 68 | # --------------------------------------------------------------------------------- # 69 | # | 0 (100 bits) | price (120 bits) | timestamp (32 bits) | # 70 | # --------------------------------------------------------------------------------- # 71 | # # 72 | ##################################################################################### 73 | """ 74 | 75 | def hash_main(args, unknown): 76 | parser = ArgumentParser() 77 | parser.add_argument( 78 | '-a', '--asset', required=True, dest='asset', 79 | help='The asset pair', type=HexedBoundedParam(2**128)) 80 | parser.add_argument( 81 | '-o', '--oracle', required=True, dest='oracle', 82 | help='The signing oracle', type=HexedBoundedParam(2**40)) 83 | parser.add_argument( 84 | '-p', '--price', required=True, dest='price', 85 | help='The asset price', type=HexedBoundedParam(2**120)) 86 | parser.add_argument( 87 | '-t', '--time', required=True, dest='time', 88 | help='The asset time', type=HexedBoundedParam(2**32)) 89 | 90 | parser.parse_args(unknown, namespace=args) 91 | 92 | return hash_price(args.oracle, args.asset, args.price, args.time) 93 | 94 | def sign_main(args, unknown): 95 | parser = ArgumentParser() 96 | parser.add_argument( 97 | '-k', '--key', required=True, dest='key', 98 | help='The private key (hex string)', type=HexedBoundedParam(FIELD_PRIME)) 99 | 100 | parser.add_argument( 101 | '-d', '--data', required=True, dest='data', 102 | help='The data to sign', type=HexedBoundedParam(FIELD_PRIME)) 103 | 104 | parser.parse_args(unknown, namespace=args) 105 | return sign_cli(args.key, args.data) 106 | 107 | def public_main(args, unknown): 108 | parser = ArgumentParser() 109 | parser.add_argument( 110 | '-k', '--key', required=True, dest='key', 111 | help='The private key (hex string)', type=HexedBoundedParam(FIELD_PRIME)) 112 | 113 | parser.parse_args(unknown, namespace=args) 114 | return public_cli(args.key) 115 | 116 | subparsers = { 117 | 'hash': hash_main, 118 | 'sign': sign_main, 119 | 'get_public': public_main, 120 | } 121 | 122 | parser = ArgumentParser(description=description, formatter_class=RawTextHelpFormatter) 123 | parser.add_argument( 124 | '-m', '--method', required=True, dest='method', 125 | help='The required operation - hash or sign', choices=subparsers.keys()) 126 | 127 | args, unknown = parser.parse_known_args() 128 | try: 129 | result = subparsers[args.method](args, unknown) 130 | print(result) 131 | return 0 132 | except Exception: 133 | print('Got an error while processing "%s":' % name, file=sys.stderr) 134 | traceback.print_exc() 135 | print(file=sys.stderr) 136 | return 1 137 | 138 | 139 | if __name__ == '__main__': 140 | sys.exit(main()) 141 | -------------------------------------------------------------------------------- /omnia/config/relayer.conf: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "relayer", 3 | "ethereum": { 4 | "from": "0x", 5 | "keystore": "", 6 | "password": "", 7 | "network": "mainnet", 8 | "gasPrice": { 9 | "source": "node", 10 | "maxPriceMultiplier": 2, 11 | "tipMultiplier": 1, 12 | "priority": "fast" 13 | } 14 | }, 15 | "sources":[], 16 | "transports":["transport-ssb", "transport-spire"], 17 | "feeds": [ 18 | "0xDA1d2961Da837891f43235FddF66BAD26f41368b", 19 | "0x4b0E327C08e23dD08cb87Ec994915a5375619aa2", 20 | "0x75ef8432566A79C86BBF207A47df3963B8Cf0753", 21 | "0x83e23C207a67a9f9cB680ce84869B91473403e7d", 22 | "0xFbaF3a7eB4Ec2962bd1847687E56aAEE855F5D00", 23 | "0xfeEd00AA3F0845AFE52Df9ECFE372549B74C69D2", 24 | "0x71eCFF5261bAA115dcB1D9335c88678324b8A987", 25 | "0x8ff6a38A1CD6a42cAac45F08eB0c802253f68dfD", 26 | "0x16655369Eb59F3e1cAFBCfAC6D3Dd4001328f747", 27 | "0xD09506dAC64aaA718b45346a032F934602e29cca", 28 | "0xc00584B271F378A0169dd9e5b165c0945B4fE498", 29 | "0x60da93D9903cb7d3eD450D4F81D402f7C4F71dd9", 30 | "0xa580BBCB1Cee2BCec4De2Ea870D20a12A964819e", 31 | "0xD27Fa2361bC2CfB9A591fb289244C538E190684B", 32 | "0x8de9c5F1AC1D4d02bbfC25fD178f5DAA4D5B26dC", 33 | "0xE6367a7Da2b20ecB94A25Ef06F3b551baB2682e6", 34 | "0xA8EB82456ed9bAE55841529888cDE9152468635A", 35 | "0x130431b4560Cd1d74A990AE86C337a33171FF3c6", 36 | "0x8aFBD9c3D794eD8DF903b3468f4c4Ea85be953FB", 37 | "0xd94BBe83b4a68940839cD151478852d16B3eF891", 38 | "0xC9508E9E3Ccf319F5333A5B8c825418ABeC688BA", 39 | "0x77EB6CF8d732fe4D92c427fCdd83142DB3B742f7", 40 | "0x3CB645a8f10Fb7B0721eaBaE958F77a878441Cb9", 41 | "0x4f95d9B4D842B2E2B1d1AC3f2Cf548B93Fd77c67", 42 | "0xaC8519b3495d8A3E3E44c041521cF7aC3f8F63B3", 43 | "0xd72BA9402E9f3Ff01959D6c841DDD13615FFff42" 44 | ], 45 | "services":{ 46 | "scuttlebotIdMap":{ 47 | "0x01":"@UOxmD3Z7sDymyucCjbkrACbmthAxhj31Hx7l2XMQeJo=.ed25519", 48 | "0xDA1d2961Da837891f43235FddF66BAD26f41368b":"@aGtbCUrDyGt+EcH0ppaqaC+L9XDUzwcbM2O1aK9nT84=.ed25519", 49 | "0x4b0E327C08e23dD08cb87Ec994915a5375619aa2":"@E7B3opUWe14hpOsPgOzW8YzZv46uus0vVVZtcZ3TQBo=.ed25519", 50 | "0x75ef8432566A79C86BBF207A47df3963B8Cf0753":"@1eJfb61Dwk4Q4alBdjJPireI6sdtz4UtuJZ6qolbfo0=.ed25519", 51 | "0x83e23C207a67a9f9cB680ce84869B91473403e7d":"@OWP0AueLTTGXKT2VikDUPdmd8oqAeN85iF1Tt+nz52U=.ed25519", 52 | "0xFbaF3a7eB4Ec2962bd1847687E56aAEE855F5D00":"@549y65IgZK/KzcRM6KqYJlEJrqZeX1+XGIrPUGlXHHA=.ed25519", 53 | "0xfeEd00AA3F0845AFE52Df9ECFE372549B74C69D2":"@7fpfNfD8dVgEzKuvFvG3tVrXj3IaLfcaFEY35nKpyA0=.ed25519", 54 | "0x71eCFF5261bAA115dcB1D9335c88678324b8A987":"@7y4tZnczYjjqXzMC32srHsCVMCaMs/mpkVgaDGdXOgM=.ed25519", 55 | "0x8ff6a38A1CD6a42cAac45F08eB0c802253f68dfD":"@8zMLDDmm3zsrFhkX6Rum3uWZ3DoWAb6cGAO2CYnEi7I=.ed25519", 56 | "0x16655369Eb59F3e1cAFBCfAC6D3Dd4001328f747":"@q4XL6KymSI1WHuR+ZBgjCSj6N+dO8vuZuGCgQZq67Gw=.ed25519", 57 | "0xD09506dAC64aaA718b45346a032F934602e29cca":"@N1wbM7vKqEODT8UDqvJZHTlaVuUE41UJIqcXs6Q733U=.ed25519", 58 | "0xc00584B271F378A0169dd9e5b165c0945B4fE498":"@5xWuAF/8zZMFYssMQwBzCOz+n43VtfdJBWVpu5WB5ds=.ed25519", 59 | "0x60da93D9903cb7d3eD450D4F81D402f7C4F71dd9":"@+MXygz7HXiDdjXsQDbEkIFD8MOcWr+0Cn1529iwEVCI=.ed25519", 60 | "0xa580BBCB1Cee2BCec4De2Ea870D20a12A964819e":"@NkNJzUcy3dlO4V6QsyaQzs3eAbfd9jLKpOdIMyGHBcI=.ed25519", 61 | "0xD27Fa2361bC2CfB9A591fb289244C538E190684B":"@92ohB9a3bIScXuTRhzWqDiTUjk3CzEBpImSNilttIdQ=.ed25519", 62 | "0x8de9c5F1AC1D4d02bbfC25fD178f5DAA4D5B26dC":"@RaGo37sm5OZtxr16An3n0Lg7QUTp7TYroH8Kfl1DHRo=.ed25519", 63 | "0xE6367a7Da2b20ecB94A25Ef06F3b551baB2682e6":"@QgSCAEDYsg/C6nxHaBcKGI68BUJWs4h7+PdhhWBh36c=.ed25519", 64 | "0xA8EB82456ed9bAE55841529888cDE9152468635A":"@rgvhqaiHOxOCnnCFPFRv0REkcnQn8Xe9+AZs3sVL4+g=.ed25519", 65 | "0x130431b4560Cd1d74A990AE86C337a33171FF3c6":"@kqg/A1m9pdbGSylkUwnZJw7o7AtFBN8sFmXJ4xgwqcM=.ed25519", 66 | "0x8aFBD9c3D794eD8DF903b3468f4c4Ea85be953FB":"@amqs7YOREwqVSt+kVYcNNt6jHKrxFrIsSlUK1pTnqgo=.ed25519", 67 | "0xd94BBe83b4a68940839cD151478852d16B3eF891":"@sbW7YaJfV+Cgu1BNafSbJ4LMRkUMdRjJoOal+iBp8fo=.ed25519", 68 | "0xC9508E9E3Ccf319F5333A5B8c825418ABeC688BA":"@N2Yz4d5vzO882cfA9Ze9oVIsIzV+N6eU6uTe61gMfGk=.ed25519", 69 | "0x77EB6CF8d732fe4D92c427fCdd83142DB3B742f7":"@DY7va7XHwh7XTdA7x9Pnu0GS/O4beWsUQrVObikqclE=.ed25519", 70 | "0x3CB645a8f10Fb7B0721eaBaE958F77a878441Cb9":"@oBjyIV0PufX/btAu0/03RwfTm9KAw8Afq5uFlq+xPJc=.ed25519", 71 | "0x4f95d9B4D842B2E2B1d1AC3f2Cf548B93Fd77c67":"@NmlE5G9XxgRfC8dxFY49t1iaB5O2i0VDLw7lhDWcmVo=.ed25519", 72 | "0xaC8519b3495d8A3E3E44c041521cF7aC3f8F63B3":"@pJ3+fcCQ/wycefUTM6+tM91iZi/KrgVkJvzfuc2WIYo=.ed25519", 73 | "0xd72BA9402E9f3Ff01959D6c841DDD13615FFff42":"@4BW2SNFDeUeS5gVxk1QZRtoCpg5SSr+JKhx9/q6uAEw=.ed25519" 74 | } 75 | }, 76 | "options": { 77 | "interval": 60, 78 | "msgLimit": 35, 79 | "verbose": true, 80 | "logFormat": "text" 81 | }, 82 | "pairs": { 83 | "AAVE/USD": { 84 | "msgExpiration": 1800, 85 | "oracle": "0xe62872DFEbd323b03D27946f8e2491B454a69811", 86 | "oracleExpiration": 15500, 87 | "oracleSpread": 4 88 | }, 89 | "BAL/USD": { 90 | "msgExpiration": 1800, 91 | "oracle": "0x1D36d59e5a22cB51B30Bb6fA73b62D73f4A11745", 92 | "oracleExpiration": 15500, 93 | "oracleSpread": 4 94 | }, 95 | "BAT/USD": { 96 | "msgExpiration": 1800, 97 | "oracle": "0x18B4633D6E39870f398597f3c1bA8c4A41294966", 98 | "oracleExpiration": 15500, 99 | "oracleSpread": 4 100 | }, 101 | "BTC/USD": { 102 | "msgExpiration": 1800, 103 | "oracle": "0xe0F30cb149fAADC7247E953746Be9BbBB6B5751f", 104 | "oracleExpiration": 15500, 105 | "oracleSpread": 1 106 | }, 107 | "COMP/USD": { 108 | "msgExpiration": 1800, 109 | "oracle": "0xA3421Be733125405Ea20aA853839D34b364eB524", 110 | "oracleExpiration": 15500, 111 | "oracleSpread": 4 112 | }, 113 | "ETH/BTC": { 114 | "msgExpiration": 1800, 115 | "oracle": "0x81A679f98b63B3dDf2F17CB5619f4d6775b3c5ED", 116 | "oracleExpiration": 15500, 117 | "oracleSpread": 1 118 | }, 119 | "ETH/USD": { 120 | "msgExpiration": 1800, 121 | "oracle": "0x64DE91F5A373Cd4c28de3600cB34C7C6cE410C85", 122 | "oracleExpiration": 15500, 123 | "oracleSpread": 1 124 | }, 125 | "KNC/USD": { 126 | "msgExpiration": 1800, 127 | "oracle": "0x83076a2F42dc1925537165045c9FDe9A4B71AD97", 128 | "oracleExpiration": 15500, 129 | "oracleSpread": 4 130 | }, 131 | "LINK/USD": { 132 | "msgExpiration": 1800, 133 | "oracle": "0xbAd4212d73561B240f10C56F27e6D9608963f17b", 134 | "oracleExpiration": 15500, 135 | "oracleSpread": 4 136 | }, 137 | "LRC/USD": { 138 | "msgExpiration": 1800, 139 | "oracle": "0xcCe92282d9fe310F4c232b0DA9926d5F24611C7B", 140 | "oracleExpiration": 15500, 141 | "oracleSpread": 4 142 | }, 143 | "MANA/USD": { 144 | "msgExpiration": 1800, 145 | "oracle": "0x681c4F8f69cF68852BAd092086ffEaB31F5B812c", 146 | "oracleExpiration": 15500, 147 | "oracleSpread": 4 148 | }, 149 | "MATIC/USD": { 150 | "msgExpiration": 1800, 151 | "oracle": "0xfe1e93840D286C83cF7401cB021B94b5bc1763d2", 152 | "oracleExpiration": 15500, 153 | "oracleSpread": 4 154 | }, 155 | "UNI/USD": { 156 | "msgExpiration": 1800, 157 | "oracle": "0x52f761908cc27b4d77ad7a329463cf08baf62153", 158 | "oracleExpiration": 15500, 159 | "oracleSpread": 4 160 | }, 161 | "USDT/USD": { 162 | "msgExpiration": 1800, 163 | "oracle": "0x56D4bBF358D7790579b55eA6Af3f605BcA2c0C3A", 164 | "oracleExpiration": 15500, 165 | "oracleSpread": 4 166 | }, 167 | "WSTETH/USD": { 168 | "msgExpiration": 1800, 169 | "oracle": "0x2F73b6567B866302e132273f67661fB89b5a66F2", 170 | "oracleExpiration": 15500, 171 | "oracleSpread": 1 172 | }, 173 | "YFI/USD": { 174 | "msgExpiration": 1800, 175 | "oracle": "0x89AC26C0aFCB28EC55B6CD2F6b7DAD867Fa24639", 176 | "oracleExpiration": 15500, 177 | "oracleSpread": 4 178 | }, 179 | "ZRX/USD": { 180 | "msgExpiration": 1800, 181 | "oracle": "0x956ecD6a9A9A0d84e8eB4e6BaaC09329E202E55e", 182 | "oracleExpiration": 15500, 183 | "oracleSpread": 4 184 | } 185 | } 186 | } -------------------------------------------------------------------------------- /starkware/signature/signature.py: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Copyright 2019 StarkWare Industries Ltd. # 3 | # # 4 | # Licensed under the Apache License, Version 2.0 (the "License"). # 5 | # You may not use this file except in compliance with the License. # 6 | # You may obtain a copy of the License at # 7 | # # 8 | # https://www.starkware.co/open-source-license/ # 9 | # # 10 | # Unless required by applicable law or agreed to in writing, # 11 | # software distributed under the License is distributed on an "AS IS" BASIS, # 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # 13 | # See the License for the specific language governing permissions # 14 | # and limitations under the License. # 15 | ############################################################################### 16 | 17 | import hashlib 18 | import json 19 | import math 20 | import os 21 | import random 22 | from typing import Optional, Tuple, Union 23 | 24 | from ecdsa.rfc6979 import generate_k 25 | 26 | from .math_utils import ECPoint, div_mod, ec_add, ec_double, ec_mult, is_quad_residue, sqrt_mod 27 | 28 | PEDERSEN_HASH_POINT_FILENAME = os.path.join( 29 | os.path.dirname(__file__), 'pedersen_params.json') 30 | PEDERSEN_PARAMS = json.load(open(PEDERSEN_HASH_POINT_FILENAME)) 31 | 32 | FIELD_PRIME = PEDERSEN_PARAMS['FIELD_PRIME'] 33 | FIELD_GEN = PEDERSEN_PARAMS['FIELD_GEN'] 34 | ALPHA = PEDERSEN_PARAMS['ALPHA'] 35 | BETA = PEDERSEN_PARAMS['BETA'] 36 | EC_ORDER = PEDERSEN_PARAMS['EC_ORDER'] 37 | CONSTANT_POINTS = PEDERSEN_PARAMS['CONSTANT_POINTS'] 38 | 39 | N_ELEMENT_BITS_ECDSA = math.floor(math.log(FIELD_PRIME, 2)) 40 | assert N_ELEMENT_BITS_ECDSA == 251 41 | 42 | N_ELEMENT_BITS_HASH = FIELD_PRIME.bit_length() 43 | assert N_ELEMENT_BITS_HASH == 252 44 | 45 | # Elliptic curve parameters. 46 | assert 2**N_ELEMENT_BITS_ECDSA < EC_ORDER < FIELD_PRIME 47 | 48 | SHIFT_POINT = CONSTANT_POINTS[0] 49 | MINUS_SHIFT_POINT = (SHIFT_POINT[0], FIELD_PRIME - SHIFT_POINT[1]) 50 | EC_GEN = CONSTANT_POINTS[1] 51 | 52 | assert SHIFT_POINT == [0x49ee3eba8c1600700ee1b87eb599f16716b0b1022947733551fde4050ca6804, 53 | 0x3ca0cfe4b3bc6ddf346d49d06ea0ed34e621062c0e056c1d0405d266e10268a] 54 | assert EC_GEN == [0x1ef15c18599971b7beced415a40f0c7deacfd9b0d1819e03d723d8bc943cfca, 55 | 0x5668060aa49730b7be4801df46ec62de53ecd11abe43a32873000c36e8dc1f] 56 | 57 | 58 | ######### 59 | # ECDSA # 60 | ######### 61 | 62 | # A type for the digital signature. 63 | ECSignature = Tuple[int, int] 64 | 65 | 66 | class InvalidPublicKeyError(Exception): 67 | def __init__(self): 68 | super().__init__('Given x coordinate does not represent any point on the elliptic curve.') 69 | 70 | 71 | def get_y_coordinate(stark_key_x_coordinate: int) -> int: 72 | """ 73 | Given the x coordinate of a stark_key, returns a possible y coordinate such that together the 74 | point (x,y) is on the curve. 75 | Note that the real y coordinate is either y or -y. 76 | If x is invalid stark_key it throws an error. 77 | """ 78 | 79 | x = stark_key_x_coordinate 80 | y_squared = (x * x * x + ALPHA * x + BETA) % FIELD_PRIME 81 | if not is_quad_residue(y_squared, FIELD_PRIME): 82 | raise InvalidPublicKeyError() 83 | return sqrt_mod(y_squared, FIELD_PRIME) 84 | 85 | 86 | def get_random_private_key() -> int: 87 | # NOTE: It is IMPORTANT to use a strong random function here. 88 | return random.randint(1, EC_ORDER - 1) 89 | 90 | 91 | def private_key_to_ec_point_on_stark_curve(priv_key: int) -> ECPoint: 92 | assert 0 < priv_key < EC_ORDER 93 | return ec_mult(priv_key, EC_GEN, ALPHA, FIELD_PRIME) 94 | 95 | 96 | def private_to_stark_key(priv_key: int) -> int: 97 | return private_key_to_ec_point_on_stark_curve(priv_key)[0] 98 | 99 | 100 | def inv_mod_curve_size(x: int) -> int: 101 | return div_mod(1, x, EC_ORDER) 102 | 103 | 104 | def generate_k_rfc6979(msg_hash: int, priv_key: int, seed: Optional[int] = None) -> int: 105 | # Pad the message hash, for consistency with the elliptic.js library. 106 | if 1 <= msg_hash.bit_length() % 8 <= 4 and msg_hash.bit_length() >= 248: 107 | # Only if we are one-nibble short: 108 | msg_hash *= 16 109 | 110 | if seed is None: 111 | extra_entropy = b'' 112 | else: 113 | extra_entropy = seed.to_bytes(math.ceil(seed.bit_length() / 8), 'big') 114 | 115 | return generate_k(EC_ORDER, priv_key, hashlib.sha256, 116 | msg_hash.to_bytes(math.ceil(msg_hash.bit_length() / 8), 'big'), 117 | extra_entropy=extra_entropy) 118 | 119 | 120 | def sign(msg_hash: int, priv_key: int, seed: Optional[int] = None) -> ECSignature: 121 | # Note: msg_hash must be smaller than 2**N_ELEMENT_BITS_ECDSA. 122 | # Message whose hash is >= 2**N_ELEMENT_BITS_ECDSA cannot be signed. 123 | # This happens with a very small probability. 124 | assert 0 <= msg_hash < 2**N_ELEMENT_BITS_ECDSA, 'Message not signable.' 125 | 126 | # Choose a valid k. In our version of ECDSA not every k value is valid, 127 | # and there is a negligible probability a drawn k cannot be used for signing. 128 | # This is why we have this loop. 129 | while True: 130 | k = generate_k_rfc6979(msg_hash, priv_key, seed) 131 | # Update seed for next iteration in case the value of k is bad. 132 | if seed is None: 133 | seed = 1 134 | else: 135 | seed += 1 136 | 137 | # Cannot fail because 0 < k < EC_ORDER and EC_ORDER is prime. 138 | x = ec_mult(k, EC_GEN, ALPHA, FIELD_PRIME)[0] 139 | 140 | # DIFF: in classic ECDSA, we take int(x) % n. 141 | r = int(x) 142 | if not (1 <= r < 2**N_ELEMENT_BITS_ECDSA): 143 | # Bad value. This fails with negligible probability. 144 | continue 145 | 146 | if (msg_hash + r * priv_key) % EC_ORDER == 0: 147 | # Bad value. This fails with negligible probability. 148 | continue 149 | 150 | w = div_mod(k, msg_hash + r * priv_key, EC_ORDER) 151 | if not (1 <= w < 2**N_ELEMENT_BITS_ECDSA): 152 | # Bad value. This fails with negligible probability. 153 | continue 154 | 155 | s = inv_mod_curve_size(w) 156 | return r, s 157 | 158 | 159 | def mimic_ec_mult_air(m: int, point: ECPoint, shift_point: ECPoint) -> ECPoint: 160 | """ 161 | Computes m * point + shift_point using the same steps like the AIR and throws an exception if 162 | and only if the AIR errors. 163 | """ 164 | assert 0 < m < 2**N_ELEMENT_BITS_ECDSA 165 | partial_sum = shift_point 166 | for _ in range(N_ELEMENT_BITS_ECDSA): 167 | assert partial_sum[0] != point[0] 168 | if m & 1: 169 | partial_sum = ec_add(partial_sum, point, FIELD_PRIME) 170 | point = ec_double(point, ALPHA, FIELD_PRIME) 171 | m >>= 1 172 | assert m == 0 173 | return partial_sum 174 | 175 | 176 | def verify(msg_hash: int, r: int, s: int, public_key: Union[int, ECPoint]) -> bool: 177 | # Compute w = s^-1 (mod EC_ORDER). 178 | assert 1 <= s < EC_ORDER, 's = %s' % s 179 | w = inv_mod_curve_size(s) 180 | 181 | # Preassumptions: 182 | # DIFF: in classic ECDSA, we assert 1 <= r, w <= EC_ORDER-1. 183 | # Since r, w < 2**N_ELEMENT_BITS_ECDSA < EC_ORDER, we only need to verify r, w != 0. 184 | assert 1 <= r < 2**N_ELEMENT_BITS_ECDSA, 'r = %s' % r 185 | assert 1 <= w < 2**N_ELEMENT_BITS_ECDSA, 'w = %s' % w 186 | assert 0 <= msg_hash < 2**N_ELEMENT_BITS_ECDSA, 'msg_hash = %s' % msg_hash 187 | 188 | if isinstance(public_key, int): 189 | # Only the x coordinate of the point is given, check the two possibilities for the y 190 | # coordinate. 191 | try: 192 | y = get_y_coordinate(public_key) 193 | except InvalidPublicKeyError: 194 | return False 195 | assert pow(y, 2, FIELD_PRIME) == ( 196 | pow(public_key, 3, FIELD_PRIME) + ALPHA * public_key + BETA) % FIELD_PRIME 197 | return verify(msg_hash, r, s, (public_key, y)) or \ 198 | verify(msg_hash, r, s, (public_key, (-y) % FIELD_PRIME)) 199 | else: 200 | # The public key is provided as a point. 201 | # Verify it is on the curve. 202 | assert (public_key[1]**2 - (public_key[0]**3 + ALPHA * 203 | public_key[0] + BETA)) % FIELD_PRIME == 0 204 | 205 | # Signature validation. 206 | # DIFF: original formula is: 207 | # x = (w*msg_hash)*EC_GEN + (w*r)*public_key 208 | # While what we implement is: 209 | # x = w*(msg_hash*EC_GEN + r*public_key). 210 | # While both mathematically equivalent, one might error while the other doesn't, 211 | # given the current implementation. 212 | # This formula ensures that if the verification errors in our AIR, it errors here as well. 213 | try: 214 | zG = mimic_ec_mult_air(msg_hash, EC_GEN, MINUS_SHIFT_POINT) 215 | rQ = mimic_ec_mult_air(r, public_key, SHIFT_POINT) 216 | wB = mimic_ec_mult_air(w, ec_add(zG, rQ, FIELD_PRIME), SHIFT_POINT) 217 | x = ec_add(wB, MINUS_SHIFT_POINT, FIELD_PRIME)[0] 218 | except AssertionError: 219 | return False 220 | 221 | # DIFF: Here we drop the mod n from classic ECDSA. 222 | return r == x 223 | 224 | 225 | ################# 226 | # Pedersen hash # 227 | ################# 228 | 229 | def pedersen_hash(*elements: int) -> int: 230 | return pedersen_hash_as_point(*elements)[0] 231 | 232 | 233 | def pedersen_hash_as_point(*elements: int) -> ECPoint: 234 | """ 235 | Similar to pedersen_hash but also returns the y coordinate of the resulting EC point. 236 | This function is used for testing. 237 | """ 238 | point = SHIFT_POINT 239 | for i, x in enumerate(elements): 240 | assert 0 <= x < FIELD_PRIME 241 | point_list = CONSTANT_POINTS[2 + i * N_ELEMENT_BITS_HASH:2 + (i + 1) * N_ELEMENT_BITS_HASH] 242 | assert len(point_list) == N_ELEMENT_BITS_HASH 243 | for pt in point_list: 244 | assert point[0] != pt[0], 'Unhashable input.' 245 | if x & 1: 246 | point = ec_add(point, pt, FIELD_PRIME) 247 | x >>= 1 248 | assert x == 0 249 | return point 250 | -------------------------------------------------------------------------------- /systemd/install-omnia: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | [[ -z "$ORACLE_DEBUG" ]] || set -x 5 | 6 | SCRIPT_DIR=$(cd "${BASH_SOURCE[0]%/*}" && pwd) 7 | TARGET_DIR=${TARGET_DIR:-/etc/systemd/system} 8 | SHARE_DIR="${SHARE_PATH:-$SCRIPT_DIR}" 9 | OMNIA_LIB_PATH=${OMNIA_LIB_PATH:-$SCRIPT_DIR/../omnia/lib} 10 | OMNIA_CONF_PATH="$SCRIPT_DIR/../share" 11 | 12 | export SSB_PATH=${SSB_PATH:-$(command -v ssb-server)} 13 | export SSB_CONF=${SSB_CONF:-$(dirname "$(command -v ssb-server)")/.ssb/config} 14 | 15 | export GOFER_PATH=${GOFER_PATH:-$(command -v gofer)} 16 | export GOFER_CONFIG=/etc/gofer.json 17 | 18 | export SPIRE_PATH=${SPIRE_PATH:-$(command -v spire)} 19 | export SPIRE_CONFIG=/etc/spire.json 20 | 21 | export OMNIA_PATH=${OMNIA_PATH:-$(command -v omnia)} 22 | export OMNIA_CONFIG=/etc/omnia.conf 23 | 24 | export SPLITTER_PATH=${SPLITTER_PATH:-$(command -v rpc-splitter)} 25 | export SPLITTER_URLS="" 26 | 27 | export LEELOO_PATH=${LEELOO_PATH:-$(command -v leeloo)} 28 | export LEELOO_CONFIG=/etc/leeloo.conf 29 | 30 | usage() { 31 | cat >&2 </dev/null 68 | 69 | echo >&2 "$name -> $target" 70 | 71 | systemdCmd+="systemctl enable $name # Enable auto-start of $1 service at boot 72 | " 73 | if [[ -n "$restart" ]]; then 74 | systemdCmd+="systemctl restart $name # Restart $1 service now 75 | " 76 | else 77 | systemdCmd+="systemctl start $name # Start $1 service now 78 | " 79 | fi 80 | } 81 | 82 | copyOmniaConf() { 83 | echo >&2 "OMNIA_CONFIG: START" 84 | 85 | local savedFrom 86 | local updatedConf 87 | 88 | if [[ -f "$OMNIA_CONFIG" ]]; then 89 | savedFrom=$(jq '{ethereum,services} | del(.. | nulls)' "$OMNIA_CONFIG") 90 | fi 91 | 92 | sudo cp -v "$OMNIA_CONF_PATH/$1" "$OMNIA_CONFIG" >&2 93 | sudo chmod u+w "$OMNIA_CONFIG" 94 | 95 | if [[ -n "$savedFrom" ]]; then 96 | configUpdates=( ". * $savedFrom" "${configUpdates[@]}" ) 97 | fi 98 | 99 | configUpdates+=( ".options.logFormat = \"text\"" ) 100 | configOverrides+=( ".options.setzerEthRpcUrl = \"http://127.0.0.1:9989\"" ) 101 | 102 | for i in "${!configUpdates[@]}"; do 103 | sudo tee "$OMNIA_CONFIG" <<<"$(jq "${configUpdates[$i]}" "$OMNIA_CONFIG")" >/dev/null 104 | done 105 | for i in "${!configOverrides[@]}"; do 106 | sudo tee "$OMNIA_CONFIG" <<<"$(jq "${configOverrides[$i]}" "$OMNIA_CONFIG")" >/dev/null 107 | done 108 | 109 | echo >&2 "OMNIA_CONFIG: DONE" 110 | } 111 | declare -a configUpdates 112 | declare -a configOverrides 113 | 114 | copySsbConf() { 115 | echo >&2 "SSB_CONF: START" 116 | 117 | local confDir="$HOME/.ssb" 118 | export SSB_KEYS="$confDir/secret" 119 | export SSB_GOSSIP="$confDir/gossip.json" 120 | export SSB_CONF="$confDir/config" 121 | 122 | local savedFrom 123 | local updatedConf 124 | 125 | mkdir -p "$confDir" 126 | 127 | if [[ -f $SSB_CONF ]]; then 128 | updatedConf=$(jq . "$SSB_CONF") 129 | fi 130 | [[ -n $updatedConf ]] || updatedConf=$(jq . "$SHARE_DIR/ssb-config.json") 131 | 132 | for i in "${!ssbConfigUpdates[@]}"; do 133 | updatedConf=$(jq "${ssbConfigUpdates[$i]}" <<<"$updatedConf") 134 | done 135 | 136 | tee "$SSB_CONF" <<<"$updatedConf" >/dev/null 2>&1 137 | 138 | export SSB_HOST;SSB_HOST="$(jq -cr '.connections.incoming["net"][].host | select(.!= null) // "localhost"' <<<"$updatedConf")" 139 | export SSB_PORT;SSB_PORT="$(jq -cr '.connections.incoming["net"][].port | select(.!= null) // 8007' <<<"$updatedConf")" 140 | 141 | if [[ -n "$installSsbSecret" ]]; then 142 | cp -iv "$installSsbSecret" "$SSB_KEYS" 143 | chmod 0600 "$SSB_KEYS" 144 | fi 145 | 146 | if [[ -n "$installSsbGossip" ]]; then 147 | cp -iv "$installSsbGossip" "$SSB_GOSSIP" 148 | chmod 0600 "$SSB_GOSSIP" 149 | fi 150 | 151 | echo >&2 "SSB_CONF: DONE" 152 | } 153 | mapfile -t ssbConfigUpdates < <(grep -v "^ *#\|^ *$" "$SHARE_DIR/ssb-config-updates") 154 | 155 | copySpireConf() { 156 | echo >&2 "SPIRE_CONFIG: START" 157 | 158 | if [[ ! -f "$SPIRE_CONFIG" ]] 159 | then 160 | sudo cp -v "$SHARE_DIR/spire.json" "$SPIRE_CONFIG" >&2 161 | sudo chmod u+w "$SPIRE_CONFIG" 162 | else 163 | if [[ "$(jq -cr '.spire' < "$SPIRE_CONFIG")" == "null" ]]; then 164 | spireConfigUpdates+=( "{ethereum,feeds,spire:{rpc,pairs},transport:{libp2p}}" ) 165 | fi 166 | 167 | local _defaultConf 168 | _defaultConf=$(jq -c '{feeds,spire:{pairs:.spire.pairs}}' "$SHARE_DIR/spire.json") 169 | spireConfigUpdates+=( ". * ${_defaultConf}" ) 170 | fi 171 | 172 | for i in "${!spireConfigUpdates[@]}"; do 173 | echo >&2 "${spireConfigUpdates[$i]}" 174 | sudo tee "$SPIRE_CONFIG" <<<"$(jq -c "${spireConfigUpdates[$i]}" "$SPIRE_CONFIG")" >/dev/null 175 | done 176 | for i in "${!spireConfigOverrides[@]}"; do 177 | echo >&2 "${spireConfigOverrides[$i]}" 178 | sudo tee "$SPIRE_CONFIG" <<<"$(jq -c "${spireConfigOverrides[$i]}" "$SPIRE_CONFIG")" >/dev/null 179 | done 180 | 181 | echo >&2 "SPIRE_CONFIG: DONE" 182 | } 183 | declare -a spireConfigUpdates 184 | declare -a spireConfigOverrides 185 | 186 | copyLeelooConf() { 187 | echo >&2 "LEELOO_CONFIG: START" 188 | 189 | if [[ ! -f "$LEELOO_CONFIG" ]] 190 | then 191 | sudo cp -v "$SHARE_DIR/leeloo.json" "$LEELOO_CONFIG" >&2 192 | sudo chmod u+w "$LEELOO_CONFIG" 193 | else 194 | local _defaultConf 195 | _defaultConf=$(jq -c '{feeds,leeloo:{pairs:.leeloo.pairs},transport,ethereum}' "$SHARE_DIR/leeloo.json") 196 | leelooConfigUpdates+=( ". * ${_defaultConf}" ) 197 | fi 198 | 199 | for i in "${!leelooConfigUpdates[@]}"; do 200 | echo >&2 "${leelooConfigUpdates[$i]}" 201 | sudo tee "$LEELOO_CONFIG" <<<"$(jq -c "${leelooConfigUpdates[$i]}" "$LEELOO_CONFIG")" >/dev/null 202 | done 203 | for i in "${!leelooConfigOverrides[@]}"; do 204 | echo >&2 "${leelooConfigOverrides[$i]}" 205 | sudo tee "$LEELOO_CONFIG" <<<"$(jq -c "${leelooConfigOverrides[$i]}" "$LEELOO_CONFIG")" >/dev/null 206 | done 207 | 208 | echo >&2 "LEELOO_CONFIG: DONE" 209 | } 210 | declare -a leelooConfigUpdates 211 | declare -a leelooConfigOverrides 212 | 213 | copyGoferConf() { 214 | echo >&2 "GOFER_CONFIG: START" 215 | 216 | if [[ ! -f "$GOFER_CONFIG" ]]; then 217 | sudo cp -v "$SHARE_DIR/gofer.json" "$GOFER_CONFIG" >&2 218 | sudo chmod u+w "$GOFER_CONFIG" 219 | else 220 | local _goferConfig 221 | _goferConfig="$(jq -c '.' < "$GOFER_CONFIG")" 222 | 223 | if [[ "$(jq -cr '.gofer' <<<"$_goferConfig")" == "null" ]]; then 224 | echo >&2 "GOFER_CONFIG: A" 225 | _goferConfig="$(jq -c '{gofer:.}' < "$GOFER_CONFIG")" 226 | fi 227 | 228 | local _defaultConf 229 | _defaultConf="$(jq -c '{ethRpc:.gofer.ethRpc,origins:.gofer.origins,priceModels:.gofer.priceModels}|del(..|nulls)' < "$SHARE_DIR/gofer.json")" 230 | goferConfigUpdates+=( ". * {gofer:${_defaultConf}}" ) 231 | 232 | local _apiKeys 233 | _apiKeys="$(jq -c <<<"$_goferConfig" '.gofer.origins|to_entries|map({key,value:{params:{apiKey:.value.params.apiKey}}})[]|select(.value.params.apiKey!=null)|[.]|from_entries' | jq -sc add)" 234 | if [[ "$_apiKeys" != "null" ]]; then 235 | goferConfigUpdates+=( ". * {gofer:{origins:${_apiKeys}}}" ) 236 | fi 237 | 238 | local _savedData 239 | _savedData=$(jq -cr '{ethRpc:.gofer.ethRpc}|del(.. | nulls)' <<<"$_goferConfig") 240 | if [[ "$_savedData" != "null" ]] && [[ "$_savedData" != "{}" ]]; then 241 | goferConfigUpdates+=( ". * {gofer:$_savedData}" ) 242 | fi 243 | fi 244 | 245 | goferConfigOverrides+=( ".gofer.ethRpc = \"http://127.0.0.1:9989\"" ) 246 | 247 | for i in "${!goferConfigUpdates[@]}"; do 248 | echo >&2 "${goferConfigUpdates[$i]}" 249 | sudo tee "$GOFER_CONFIG" <<<"$(jq "${goferConfigUpdates[$i]}" "$GOFER_CONFIG")" >/dev/null 250 | done 251 | for i in "${!goferConfigOverrides[@]}"; do 252 | echo >&2 "${goferConfigOverrides[$i]}" 253 | sudo tee "$GOFER_CONFIG" <<<"$(jq "${goferConfigOverrides[$i]}" "$GOFER_CONFIG")" >/dev/null 254 | done 255 | 256 | echo >&2 "GOFER_CONFIG: DONE" 257 | } 258 | declare -a goferConfigUpdates 259 | declare -a goferConfigOverrides 260 | 261 | installConfigs() { 262 | copyOmniaConf "$installType.conf" 263 | copySsbConf 264 | copySpireConf 265 | copyGoferConf 266 | copyLeelooConf 267 | } 268 | 269 | installServices() { 270 | subst spire-agent "$SHARE_DIR"/spire-agent.service 271 | subst leeloo-agent "$SHARE_DIR"/leeloo-agent.service 272 | subst gofer-agent "$SHARE_DIR"/gofer-agent.service 273 | subst splitter-agent "$SHARE_DIR"/splitter-agent.service 274 | subst ssb-server "$SHARE_DIR"/ssb-server.service 275 | subst omnia "$SHARE_DIR"/omnia.service 276 | } 277 | 278 | case "$1" in 279 | feed) 280 | installType="feed" 281 | ;; 282 | relayer|relay) 283 | installType="relayer" 284 | ;; 285 | help|--help|-h) 286 | usage 287 | ;; 288 | *) 289 | echo >&2 "\"$1\" is not a valid command" 290 | usage 291 | ;; 292 | esac 293 | 294 | shift 295 | 296 | while [[ -n "$1" ]]; do 297 | case "$1" in 298 | --debug) 299 | configOverrides+=( ".options.debug = true" ) 300 | ;; 301 | --verbose) 302 | configOverrides+=( ".options.verbose = true" ) 303 | ;; 304 | --logFormat) 305 | configOverrides+=( ".options.logFormat = \"$2\"" ) 306 | shift 307 | ;; 308 | --override-origin) 309 | goferConfigOverrides+=( ". * {gofer:{origins:{\"$2\":{type:\"$2\",name:\"$2\",params:{\"$3\": \"$4\"}}}}}" ) 310 | shift;shift;shift 311 | ;; 312 | --eth-rpc) 313 | if [[ -z "$SPLITTER_URLS" ]]; then 314 | SPLITTER_URLS="$2" 315 | else 316 | SPLITTER_URLS="$SPLITTER_URLS,$2" 317 | fi 318 | shift 319 | ;; 320 | --from) 321 | configOverrides+=( ".ethereum.from = \"$2\"" ) 322 | spireConfigOverrides+=( ".ethereum.from = \"$2\"" ) 323 | spireConfigOverrides+=( ".feeds += [\"$2\"]" ) 324 | leelooConfigOverrides+=( ".ethereum.from = \"$2\"" ) 325 | leelooConfigOverrides+=( ".feeds += [\"$2\"]" ) 326 | shift 327 | ;; 328 | --keystore) 329 | configOverrides+=( ".ethereum.keystore = \"$2\"" ) 330 | spireConfigOverrides+=( ".ethereum.keystore = \"$2\"" ) 331 | leelooConfigOverrides+=( ".ethereum.keystore = \"$2\"" ) 332 | shift 333 | ;; 334 | --password) 335 | configOverrides+=( ".ethereum.password = \"$2\"" ) 336 | spireConfigOverrides+=( ".ethereum.password = \"$2\"" ) 337 | leelooConfigOverrides+=( ".ethereum.password = \"$2\"" ) 338 | shift 339 | ;; 340 | --l2-eth-rpc) 341 | leelooConfigUpdates+=( ".leeloo.listeners.wormhole += [\"$2\"]" ); 342 | shift 343 | ;; 344 | --network) 345 | configOverrides+=( ".ethereum.network = \"$2\"" ); 346 | shift 347 | ;; 348 | --no-transport) 349 | configOverrides+=( ".transports = []" ) 350 | ;; 351 | --add-transport) 352 | configOverrides+=( ".transports += [\"$2\"]" ); 353 | shift 354 | ;; 355 | --no-source) 356 | configOverrides+=( ".sources = []" ) 357 | ;; 358 | --add-source) 359 | configOverrides+=( ".sources += [\"$2\"]" ); 360 | shift 361 | ;; 362 | --ssb-external) 363 | ssbConfigUpdates+=( ".connections.incoming[\"net\",\"ws\"][].external = \"$2\"" ); 364 | shift 365 | ;; 366 | --ssb-caps) 367 | ssbConfigUpdates+=( ".caps = $(jq . "$2")" ); 368 | shift 369 | ;; 370 | --ssb-secret) 371 | installSsbSecret="$2"; 372 | shift 373 | ;; 374 | --ssb-port) 375 | ssbConfigUpdates+=( ".connections.incoming[\"net\"][].port = \"$2\"" ); 376 | shift 377 | ;; 378 | --ssb-host) 379 | ssbConfigUpdates+=( ".connections.incoming[\"net\"][].host = \"$2\"" ); 380 | shift 381 | ;; 382 | --ssb-gossip) 383 | installSsbGossip="$2"; 384 | shift 385 | ;; 386 | --help|-h) 387 | usage 388 | ;; 389 | *) 390 | echo >&2 "\"$1\" is not a valid option" 391 | usage 392 | ;; 393 | esac 394 | shift 395 | done 396 | 397 | installConfigs 398 | 399 | installServices 400 | 401 | echo >&2 "To (re)start Omnia now (and auto-start on reboot) run:" 402 | echo "systemctl daemon-reload # Reload systemd services" 403 | echo -n "$systemdCmd" -------------------------------------------------------------------------------- /nix/node-env.nix: -------------------------------------------------------------------------------- 1 | # This file originates from node2nix 2 | 3 | {stdenv, nodejs, python2, utillinux, libtool, runCommand, writeTextFile}: 4 | 5 | let 6 | python = if nodejs ? python then nodejs.python else python2; 7 | 8 | # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise 9 | tarWrapper = runCommand "tarWrapper" {} '' 10 | mkdir -p $out/bin 11 | 12 | cat > $out/bin/tar <> $out/nix-support/hydra-build-products 37 | ''; 38 | }; 39 | 40 | includeDependencies = {dependencies}: 41 | stdenv.lib.optionalString (dependencies != []) 42 | (stdenv.lib.concatMapStrings (dependency: 43 | '' 44 | # Bundle the dependencies of the package 45 | mkdir -p node_modules 46 | cd node_modules 47 | 48 | # Only include dependencies if they don't exist. They may also be bundled in the package. 49 | if [ ! -e "${dependency.name}" ] 50 | then 51 | ${composePackage dependency} 52 | fi 53 | 54 | cd .. 55 | '' 56 | ) dependencies); 57 | 58 | # Recursively composes the dependencies of a package 59 | composePackage = { name, packageName, src, dependencies ? [], ... }@args: 60 | builtins.addErrorContext "while evaluating node package '${packageName}'" '' 61 | DIR=$(pwd) 62 | cd $TMPDIR 63 | 64 | unpackFile ${src} 65 | 66 | # Make the base dir in which the target dependency resides first 67 | mkdir -p "$(dirname "$DIR/${packageName}")" 68 | 69 | if [ -f "${src}" ] 70 | then 71 | # Figure out what directory has been unpacked 72 | packageDir="$(find . -maxdepth 1 -type d | tail -1)" 73 | 74 | # Restore write permissions to make building work 75 | find "$packageDir" -type d -exec chmod u+x {} \; 76 | chmod -R u+w "$packageDir" 77 | 78 | # Move the extracted tarball into the output folder 79 | mv "$packageDir" "$DIR/${packageName}" 80 | elif [ -d "${src}" ] 81 | then 82 | # Get a stripped name (without hash) of the source directory. 83 | # On old nixpkgs it's already set internally. 84 | if [ -z "$strippedName" ] 85 | then 86 | strippedName="$(stripHash ${src})" 87 | fi 88 | 89 | # Restore write permissions to make building work 90 | chmod -R u+w "$strippedName" 91 | 92 | # Move the extracted directory into the output folder 93 | mv "$strippedName" "$DIR/${packageName}" 94 | fi 95 | 96 | # Unset the stripped name to not confuse the next unpack step 97 | unset strippedName 98 | 99 | # Include the dependencies of the package 100 | cd "$DIR/${packageName}" 101 | ${includeDependencies { inherit dependencies; }} 102 | cd .. 103 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 104 | ''; 105 | 106 | pinpointDependencies = {dependencies, production}: 107 | let 108 | pinpointDependenciesFromPackageJSON = writeTextFile { 109 | name = "pinpointDependencies.js"; 110 | text = '' 111 | var fs = require('fs'); 112 | var path = require('path'); 113 | 114 | function resolveDependencyVersion(location, name) { 115 | if(location == process.env['NIX_STORE']) { 116 | return null; 117 | } else { 118 | var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json"); 119 | 120 | if(fs.existsSync(dependencyPackageJSON)) { 121 | var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON)); 122 | 123 | if(dependencyPackageObj.name == name) { 124 | return dependencyPackageObj.version; 125 | } 126 | } else { 127 | return resolveDependencyVersion(path.resolve(location, ".."), name); 128 | } 129 | } 130 | } 131 | 132 | function replaceDependencies(dependencies) { 133 | if(typeof dependencies == "object" && dependencies !== null) { 134 | for(var dependency in dependencies) { 135 | var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency); 136 | 137 | if(resolvedVersion === null) { 138 | process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n"); 139 | } else { 140 | dependencies[dependency] = resolvedVersion; 141 | } 142 | } 143 | } 144 | } 145 | 146 | /* Read the package.json configuration */ 147 | var packageObj = JSON.parse(fs.readFileSync('./package.json')); 148 | 149 | /* Pinpoint all dependencies */ 150 | replaceDependencies(packageObj.dependencies); 151 | if(process.argv[2] == "development") { 152 | replaceDependencies(packageObj.devDependencies); 153 | } 154 | replaceDependencies(packageObj.optionalDependencies); 155 | 156 | /* Write the fixed package.json file */ 157 | fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2)); 158 | ''; 159 | }; 160 | in 161 | '' 162 | node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"} 163 | 164 | ${stdenv.lib.optionalString (dependencies != []) 165 | '' 166 | if [ -d node_modules ] 167 | then 168 | cd node_modules 169 | ${stdenv.lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies} 170 | cd .. 171 | fi 172 | ''} 173 | ''; 174 | 175 | # Recursively traverses all dependencies of a package and pinpoints all 176 | # dependencies in the package.json file to the versions that are actually 177 | # being used. 178 | 179 | pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args: 180 | '' 181 | if [ -d "${packageName}" ] 182 | then 183 | cd "${packageName}" 184 | ${pinpointDependencies { inherit dependencies production; }} 185 | cd .. 186 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 187 | fi 188 | ''; 189 | 190 | # Extract the Node.js source code which is used to compile packages with 191 | # native bindings 192 | nodeSources = runCommand "node-sources" {} '' 193 | tar --no-same-owner --no-same-permissions -xf ${nodejs.src} 194 | mv node-* $out 195 | ''; 196 | 197 | # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty) 198 | addIntegrityFieldsScript = writeTextFile { 199 | name = "addintegrityfields.js"; 200 | text = '' 201 | var fs = require('fs'); 202 | var path = require('path'); 203 | 204 | function augmentDependencies(baseDir, dependencies) { 205 | for(var dependencyName in dependencies) { 206 | var dependency = dependencies[dependencyName]; 207 | 208 | // Open package.json and augment metadata fields 209 | var packageJSONDir = path.join(baseDir, "node_modules", dependencyName); 210 | var packageJSONPath = path.join(packageJSONDir, "package.json"); 211 | 212 | if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored 213 | console.log("Adding metadata fields to: "+packageJSONPath); 214 | var packageObj = JSON.parse(fs.readFileSync(packageJSONPath)); 215 | 216 | if(dependency.integrity) { 217 | packageObj["_integrity"] = dependency.integrity; 218 | } else { 219 | packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads. 220 | } 221 | 222 | if(dependency.resolved) { 223 | packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided 224 | } else { 225 | packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories. 226 | } 227 | 228 | if(dependency.from !== undefined) { // Adopt from property if one has been provided 229 | packageObj["_from"] = dependency.from; 230 | } 231 | 232 | fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2)); 233 | } 234 | 235 | // Augment transitive dependencies 236 | if(dependency.dependencies !== undefined) { 237 | augmentDependencies(packageJSONDir, dependency.dependencies); 238 | } 239 | } 240 | } 241 | 242 | if(fs.existsSync("./package-lock.json")) { 243 | var packageLock = JSON.parse(fs.readFileSync("./package-lock.json")); 244 | 245 | if(packageLock.lockfileVersion !== 1) { 246 | process.stderr.write("Sorry, I only understand lock file version 1!\n"); 247 | process.exit(1); 248 | } 249 | 250 | if(packageLock.dependencies !== undefined) { 251 | augmentDependencies(".", packageLock.dependencies); 252 | } 253 | } 254 | ''; 255 | }; 256 | 257 | # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes 258 | reconstructPackageLock = writeTextFile { 259 | name = "addintegrityfields.js"; 260 | text = '' 261 | var fs = require('fs'); 262 | var path = require('path'); 263 | 264 | var packageObj = JSON.parse(fs.readFileSync("package.json")); 265 | 266 | var lockObj = { 267 | name: packageObj.name, 268 | version: packageObj.version, 269 | lockfileVersion: 1, 270 | requires: true, 271 | dependencies: {} 272 | }; 273 | 274 | function augmentPackageJSON(filePath, dependencies) { 275 | var packageJSON = path.join(filePath, "package.json"); 276 | if(fs.existsSync(packageJSON)) { 277 | var packageObj = JSON.parse(fs.readFileSync(packageJSON)); 278 | dependencies[packageObj.name] = { 279 | version: packageObj.version, 280 | integrity: "sha1-000000000000000000000000000=", 281 | dependencies: {} 282 | }; 283 | processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies); 284 | } 285 | } 286 | 287 | function processDependencies(dir, dependencies) { 288 | if(fs.existsSync(dir)) { 289 | var files = fs.readdirSync(dir); 290 | 291 | files.forEach(function(entry) { 292 | var filePath = path.join(dir, entry); 293 | var stats = fs.statSync(filePath); 294 | 295 | if(stats.isDirectory()) { 296 | if(entry.substr(0, 1) == "@") { 297 | // When we encounter a namespace folder, augment all packages belonging to the scope 298 | var pkgFiles = fs.readdirSync(filePath); 299 | 300 | pkgFiles.forEach(function(entry) { 301 | if(stats.isDirectory()) { 302 | var pkgFilePath = path.join(filePath, entry); 303 | augmentPackageJSON(pkgFilePath, dependencies); 304 | } 305 | }); 306 | } else { 307 | augmentPackageJSON(filePath, dependencies); 308 | } 309 | } 310 | }); 311 | } 312 | } 313 | 314 | processDependencies("node_modules", lockObj.dependencies); 315 | 316 | fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2)); 317 | ''; 318 | }; 319 | 320 | prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}: 321 | let 322 | forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com"; 323 | in 324 | '' 325 | # Pinpoint the versions of all dependencies to the ones that are actually being used 326 | echo "pinpointing versions of dependencies..." 327 | source $pinpointDependenciesScriptPath 328 | 329 | # Patch the shebangs of the bundled modules to prevent them from 330 | # calling executables outside the Nix store as much as possible 331 | patchShebangs . 332 | 333 | # Deploy the Node.js package by running npm install. Since the 334 | # dependencies have been provided already by ourselves, it should not 335 | # attempt to install them again, which is good, because we want to make 336 | # it Nix's responsibility. If it needs to install any dependencies 337 | # anyway (e.g. because the dependency parameters are 338 | # incomplete/incorrect), it fails. 339 | # 340 | # The other responsibilities of NPM are kept -- version checks, build 341 | # steps, postprocessing etc. 342 | 343 | export HOME=$TMPDIR 344 | cd "${packageName}" 345 | runHook preRebuild 346 | 347 | ${stdenv.lib.optionalString bypassCache '' 348 | ${stdenv.lib.optionalString reconstructLock '' 349 | if [ -f package-lock.json ] 350 | then 351 | echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!" 352 | echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!" 353 | rm package-lock.json 354 | else 355 | echo "No package-lock.json file found, reconstructing..." 356 | fi 357 | 358 | node ${reconstructPackageLock} 359 | ''} 360 | 361 | node ${addIntegrityFieldsScript} 362 | ''} 363 | 364 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} rebuild 365 | 366 | if [ "''${dontNpmInstall-}" != "1" ] 367 | then 368 | # NPM tries to download packages even when they already exist if npm-shrinkwrap is used. 369 | rm -f npm-shrinkwrap.json 370 | 371 | npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${stdenv.lib.optionalString production "--production"} install 372 | fi 373 | ''; 374 | 375 | # Builds and composes an NPM package including all its dependencies 376 | buildNodePackage = 377 | { name 378 | , packageName 379 | , version 380 | , dependencies ? [] 381 | , buildInputs ? [] 382 | , production ? true 383 | , npmFlags ? "" 384 | , dontNpmInstall ? false 385 | , bypassCache ? false 386 | , reconstructLock ? false 387 | , preRebuild ? "" 388 | , dontStrip ? true 389 | , unpackPhase ? "true" 390 | , buildPhase ? "true" 391 | , ... }@args: 392 | 393 | let 394 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" ]; 395 | in 396 | stdenv.mkDerivation ({ 397 | name = "node_${name}-${version}"; 398 | buildInputs = [ tarWrapper python nodejs ] 399 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux 400 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool 401 | ++ buildInputs; 402 | 403 | inherit nodejs; 404 | 405 | inherit dontStrip; # Stripping may fail a build for some package deployments 406 | inherit dontNpmInstall preRebuild unpackPhase buildPhase; 407 | 408 | compositionScript = composePackage args; 409 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; 410 | 411 | passAsFile = [ "compositionScript" "pinpointDependenciesScript" ]; 412 | 413 | installPhase = '' 414 | # Create and enter a root node_modules/ folder 415 | mkdir -p $out/lib/node_modules 416 | cd $out/lib/node_modules 417 | 418 | # Compose the package and all its dependencies 419 | source $compositionScriptPath 420 | 421 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 422 | 423 | # Create symlink to the deployed executable folder, if applicable 424 | if [ -d "$out/lib/node_modules/.bin" ] 425 | then 426 | ln -s $out/lib/node_modules/.bin $out/bin 427 | fi 428 | 429 | # Create symlinks to the deployed manual page folders, if applicable 430 | if [ -d "$out/lib/node_modules/${packageName}/man" ] 431 | then 432 | mkdir -p $out/share 433 | for dir in "$out/lib/node_modules/${packageName}/man/"* 434 | do 435 | mkdir -p $out/share/man/$(basename "$dir") 436 | for page in "$dir"/* 437 | do 438 | ln -s $page $out/share/man/$(basename "$dir") 439 | done 440 | done 441 | fi 442 | 443 | # Run post install hook, if provided 444 | runHook postInstall 445 | ''; 446 | } // extraArgs); 447 | 448 | # Builds a development shell 449 | buildNodeShell = 450 | { name 451 | , packageName 452 | , version 453 | , src 454 | , dependencies ? [] 455 | , buildInputs ? [] 456 | , production ? true 457 | , npmFlags ? "" 458 | , dontNpmInstall ? false 459 | , bypassCache ? false 460 | , reconstructLock ? false 461 | , dontStrip ? true 462 | , unpackPhase ? "true" 463 | , buildPhase ? "true" 464 | , ... }@args: 465 | 466 | let 467 | extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ]; 468 | 469 | nodeDependencies = stdenv.mkDerivation ({ 470 | name = "node-dependencies-${name}-${version}"; 471 | 472 | buildInputs = [ tarWrapper python nodejs ] 473 | ++ stdenv.lib.optional (stdenv.isLinux) utillinux 474 | ++ stdenv.lib.optional (stdenv.isDarwin) libtool 475 | ++ buildInputs; 476 | 477 | inherit dontStrip; # Stripping may fail a build for some package deployments 478 | inherit dontNpmInstall unpackPhase buildPhase; 479 | 480 | includeScript = includeDependencies { inherit dependencies; }; 481 | pinpointDependenciesScript = pinpointDependenciesOfPackage args; 482 | 483 | passAsFile = [ "includeScript" "pinpointDependenciesScript" ]; 484 | 485 | installPhase = '' 486 | mkdir -p $out/${packageName} 487 | cd $out/${packageName} 488 | 489 | source $includeScriptPath 490 | 491 | # Create fake package.json to make the npm commands work properly 492 | cp ${src}/package.json . 493 | chmod 644 package.json 494 | ${stdenv.lib.optionalString bypassCache '' 495 | if [ -f ${src}/package-lock.json ] 496 | then 497 | cp ${src}/package-lock.json . 498 | fi 499 | ''} 500 | 501 | # Go to the parent folder to make sure that all packages are pinpointed 502 | cd .. 503 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 504 | 505 | ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }} 506 | 507 | # Expose the executables that were installed 508 | cd .. 509 | ${stdenv.lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."} 510 | 511 | mv ${packageName} lib 512 | ln -s $out/lib/node_modules/.bin $out/bin 513 | ''; 514 | } // extraArgs); 515 | in 516 | stdenv.mkDerivation { 517 | name = "node-shell-${name}-${version}"; 518 | 519 | buildInputs = [ python nodejs ] ++ stdenv.lib.optional (stdenv.isLinux) utillinux ++ buildInputs; 520 | buildCommand = '' 521 | mkdir -p $out/bin 522 | cat > $out/bin/shell <