├── .github └── workflows │ └── gitspiegel-trigger.yml ├── .gitignore ├── .gitlab-ci.yml ├── CODEOWNERS ├── LICENSE ├── README.md ├── dockerfiles ├── ansible │ ├── Dockerfile │ └── README.md ├── awscli │ ├── Dockerfile │ └── README.md ├── base-bin │ ├── Dockerfile │ ├── README.md │ └── build.sh ├── base-ci-linux │ ├── Dockerfile │ └── README.md ├── benchmarks │ ├── .gitignore │ ├── Dockerfile │ ├── README.md │ ├── check_bench_result.py │ ├── check_single_bench_result.py │ ├── push_bench_result.py │ └── requirements.txt ├── bridges-ci │ ├── Dockerfile │ └── README.md ├── ci-linux │ ├── Dockerfile │ └── README.md ├── ci-unified │ ├── Dockerfile │ ├── README.md │ ├── build-args │ ├── cargo-config │ └── download-forklift.sh ├── contracts-ci-linux │ ├── Dockerfile │ └── README.md ├── db-dumper │ ├── Dockerfile │ ├── README.md │ ├── app.py │ └── requirements.txt ├── deb │ ├── Dockerfile │ └── README.md ├── debian10 │ ├── Dockerfile │ └── README.md ├── debian11 │ ├── Dockerfile │ └── README.md ├── eng-automation-ci │ ├── Dockerfile │ └── README.md ├── github-gh-cli │ ├── Dockerfile │ └── README.md ├── gnupg │ ├── Dockerfile │ └── README.md ├── ink-ci-linux │ ├── Dockerfile │ └── README.md ├── ink-waterfall-ci │ ├── Dockerfile │ └── README.md ├── kube-manifests-validation │ ├── Dockerfile │ ├── README.md │ ├── datree-config.yaml │ └── datree-policies.yaml ├── kubetools │ ├── README.md │ ├── helm │ │ ├── Dockerfile │ │ └── helm3.Dockerfile │ └── kubectl │ │ └── Dockerfile ├── lz4 │ ├── Dockerfile │ └── README.md ├── mdbook-utils │ ├── Dockerfile │ └── README.md ├── mitogen │ ├── Dockerfile │ └── README.md ├── molecule │ ├── Dockerfile │ └── README.md ├── multisig-ci │ ├── Dockerfile │ └── README.md ├── node-bench-regression-guard │ ├── Dockerfile │ ├── Gemfile │ ├── Gemfile.lock │ ├── README.md │ ├── node-bench-regression-guard │ └── run-tests.rb ├── node-wrk │ ├── Dockerfile │ └── README.md ├── packer │ ├── Dockerfile │ └── README.md ├── parity-keyring │ ├── Dockerfile │ └── README.md ├── parity-scale-codec │ ├── Dockerfile │ └── README.md ├── polkadotjs-cli │ ├── Dockerfile │ └── README.md ├── python │ ├── Dockerfile │ └── README.md ├── query-exporter │ ├── Dockerfile │ └── README.md ├── redis-exporter │ ├── Dockerfile │ └── README.md ├── releng-scripts │ ├── .dockerignore │ ├── Dockerfile │ ├── README.md │ └── build.sh ├── rpm │ ├── Dockerfile │ ├── README.md │ └── rpmmacros ├── rusty-cachier-env │ ├── Dockerfile │ └── README.md ├── sops │ ├── Dockerfile │ └── README.md ├── substrate-session-keys-grabber │ ├── Dockerfile │ ├── README.md │ └── grabber.py ├── terraform │ ├── Dockerfile │ └── README.md ├── tools │ ├── Dockerfile │ └── README.md ├── utility │ ├── README.md │ ├── awscli-config │ ├── base-ci-linux-config │ ├── debian-llvm-clang.key │ ├── libudev.patch │ └── rust-builder-config ├── ws-health-exporter │ ├── Dockerfile │ ├── README.md │ └── exporter.py └── xbuilder-aarch64-unknown-linux-gnu │ ├── Dockerfile │ └── README.md ├── docs └── legacy │ └── reproduce_ci_locally.md ├── find-duplicate-dependencies.awk ├── get-substrate.sh ├── gitlab ├── .ruby-version ├── Gemfile ├── Gemfile.lock ├── README.md ├── get-all-mirrored-projects ├── get-all-projects-with-pages └── wipe-inactive-runners ├── retag.sh ├── snippets ├── .bashrc ├── .zshrc ├── cargoenvhere.fish └── cargoenvhere.sh └── wasm-utils ├── install-rust-lld-ARM.sh └── install-wasm-binaries.sh /.github/workflows/gitspiegel-trigger.yml: -------------------------------------------------------------------------------- 1 | name: gitspiegel sync 2 | 3 | # This workflow doesn't do anything, it's only use is to trigger "workflow_run" 4 | # webhook, that'll be consumed by gitspiegel 5 | # This way, gitspiegel won't do mirroring, unless this workflow runs, 6 | # and running the workflow is protected by GitHub 7 | 8 | on: 9 | pull_request: 10 | types: 11 | - opened 12 | - synchronize 13 | - unlocked 14 | - ready_for_review 15 | - reopened 16 | 17 | jobs: 18 | sync: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Do nothing 22 | run: echo "let's go" 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .*.swp 2 | .env 3 | venv -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Lists some code owners. 2 | # 3 | # A codeowner just oversees some part of the codebase. If an owned file is changed then the 4 | # corresponding codeowner receives a review request. An approval of the codeowner might be 5 | # required for merging a PR (depends on repository settings). 6 | # 7 | # For details about syntax, see: 8 | # https://help.github.com/en/articles/about-code-owners 9 | # But here are some important notes: 10 | # 11 | # - Glob syntax is git-like, e.g. `/core` means the core directory in the root, unlike `core` 12 | # which can be everywhere. 13 | # - Multiple owners are supported. 14 | # - Either handle (e.g, @github_user or @github_org/team) or email can be used. Keep in mind, 15 | # that handles might work better because they are more recognizable on GitHub, 16 | # you can use them for mentioning unlike an email. 17 | # - The latest matching rule, if multiple, takes precedence. 18 | 19 | # CI 20 | /.gitlab-ci.yml @paritytech/ci 21 | /dockerfiles/ci-linux @paritytech/ci @paritytech/opstooling 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > [!WARNING] 2 | > This repo is deprecated, Dockerfiles are now located at https://github.com/paritytech/dockerfiles 3 | 4 | # Parity Scripts & Dockerfiles 5 | 6 | Nowadays, this repo is mostly the open collection of the company's Dockerfiles that are used by Parity in different ways. If you work on a Polkadot SDK-based project, you could be interested in our CI image (`ci-unified`) in the `dockerfiles` directory. 7 | 8 | #### But what about scripts? 9 | 10 | `scripts` mostly is a legacy name. Yes, this repo contains some scripts, but they are kept here for historical reasons. We are not actively maintaining them. If you are looking for something specific, please, open an issue and we will try to help you. 11 | 12 | ### Additional information 13 | 14 | * We use the `ci-unified` image for most of our CI pipelines (including the pipelines for Polkadot SDK-based/Polkadot SDK-related projects and Polkadot SDK itself). You can find more information about the image in the `dockerfiles/ci-unified` directory. 15 | * Most of the images are published to Docker Hub and could be found [here](https://hub.docker.com/u/paritytech). 16 | * If you have access to Parity's internal GitLab, please have a look at this project's pipeline schedules. You can use them to build Docker/OCI images on demand. 17 | 18 | ### Legacy notes 19 | 20 | * [Reproduce CI locally](https://github.com/paritytech/scripts/blob/master/docs/legacy/reproduce_ci_locally.md) 21 | -------------------------------------------------------------------------------- /dockerfiles/ansible/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:3.11-slim-bullseye 2 | 3 | # metadata 4 | ARG VCS_REF=master 5 | ARG BUILD_DATE="" 6 | ARG REGISTRY_PATH=docker.io/paritytech 7 | 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/ansible" \ 11 | io.parity.image.description="ansible" \ 12 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 13 | dockerfiles/ansible/Dockerfile" \ 14 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/ansible/README.md" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" 18 | 19 | RUN apt-get update -y && \ 20 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ 21 | sshpass ssh-client rsync tini curl git ruby 22 | 23 | RUN pip install --no-cache-dir pip --upgrade 24 | RUN pip install --no-cache-dir ansible==9.1.0 requests jmespath google-auth 25 | RUN ansible-galaxy collection install ansible.posix community.general ansible.utils ansible.netcommon 26 | 27 | ARG WORKDIR=/work 28 | 29 | RUN groupadd --gid 10001 nonroot && \ 30 | useradd --home-dir /home/nonroot \ 31 | --create-home \ 32 | --shell /bin/bash \ 33 | --gid nonroot \ 34 | --groups nonroot \ 35 | --uid 10000 nonroot 36 | RUN chown -R nonroot. /home/nonroot 37 | RUN mkdir ${WORKDIR} 38 | RUN chown -R nonroot. ${WORKDIR} 39 | 40 | USER 10000:10001 41 | WORKDIR ${WORKDIR} 42 | ENTRYPOINT ["/usr/bin/tini", "--"] 43 | -------------------------------------------------------------------------------- /dockerfiles/ansible/README.md: -------------------------------------------------------------------------------- 1 | # Ansible utility Docker image 2 | -------------------------------------------------------------------------------- /dockerfiles/awscli/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:3.6-alpine 2 | 3 | # metadata 4 | ARG VCS_REF=master 5 | ARG BUILD_DATE="" 6 | ARG REGISTRY_PATH=docker.io/paritytech 7 | 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/awscli" \ 11 | io.parity.image.description="awscli" \ 12 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 13 | dockerfiles/awscli/Dockerfile" \ 14 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/awscli/README.md" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" 18 | 19 | RUN pip install awscli; \ 20 | apk add --no-cache bash shadow 21 | 22 | RUN set -x \ 23 | && groupadd -g 1000 nonroot \ 24 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot 25 | 26 | COPY utility/awscli-config /etc/aws.config 27 | ENV AWS_CONFIG_FILE /etc/aws.config 28 | 29 | USER nonroot:nonroot 30 | CMD ["/bin/bash"] 31 | -------------------------------------------------------------------------------- /dockerfiles/awscli/README.md: -------------------------------------------------------------------------------- 1 | # Awscli utility Docker image 2 | -------------------------------------------------------------------------------- /dockerfiles/base-bin/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/ubuntu:22.04 2 | 3 | # metadata 4 | ARG VCS_REF 5 | ARG BUILD_DATE 6 | ARG GPG_KEYSERVER="keyserver.ubuntu.com" 7 | ARG PARITY_SEC_GPGKEY=9D4B2B6EB8F97156D19669A9FF0812D491B96798 8 | ARG DOC_URL=https://github.com/paritytech/polkadot 9 | ARG USER=parity 10 | 11 | LABEL io.parity.image.authors="devops-team@parity.io" \ 12 | io.parity.image.vendor="Parity Technologies" \ 13 | io.parity.image.title="parity/base-bin" \ 14 | io.parity.image.description="A base image for standard binary distribution" \ 15 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/base-bin/Dockerfile" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" \ 18 | io.parity.image.documentation="${DOC_URL}" 19 | 20 | # show backtraces 21 | ENV RUST_BACKTRACE 1 22 | 23 | # install tools and dependencies 24 | RUN apt-get update && \ 25 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ 26 | libssl3 ca-certificates gnupg && \ 27 | useradd -m -u 1000 -U -s /bin/sh -d /${USER} ${USER} && \ 28 | # add repo's gpg keys and install the published polkadot binary 29 | gpg --keyserver ${GPG_KEYSERVER} --recv-keys ${PARITY_SEC_GPGKEY} && \ 30 | gpg --export ${PARITY_SEC_GPGKEY} > /usr/share/keyrings/parity.gpg && \ 31 | echo 'deb [signed-by=/usr/share/keyrings/parity.gpg] https://releases.parity.io/deb release main' > /etc/apt/sources.list.d/parity.list && \ 32 | apt-get update && \ 33 | # apt cleanup 34 | apt-get autoremove -y && apt-get clean && rm -rf /var/lib/apt/lists/* ; \ 35 | mkdir -p /data /${USER}/.local/share && \ 36 | chown -R ${USER}:${USER} /data /${USER} && \ 37 | ln -s /data /${USER}/.local/share/${USER} 38 | 39 | # Set last update 40 | ENV TMSP=/var/lastupdate 41 | RUN mkdir -p $(dirname $TMSP); date > $TMSP; chmod a+r $TMSP 42 | 43 | USER ${USER} 44 | -------------------------------------------------------------------------------- /dockerfiles/base-bin/README.md: -------------------------------------------------------------------------------- 1 | # `base-bin` 2 | 3 | A frequently built and updated image to be used as base for our binary distribution. 4 | The image is not named after a specific distribution such as `ubuntu` to leave us the option 5 | to change the base image over time. 6 | 7 | This base image is Parity opinionated and contains our GPG keys. 8 | 9 | Unlike the `base-ci-linux` image which contain development toolchains, this image is meant to be used as base image for final delivery of binaries. 10 | 11 | ## Build 12 | 13 | See `./build.sh` 14 | -------------------------------------------------------------------------------- /dockerfiles/base-bin/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ENGINE=podman 4 | IMAGE=parity/base-bin 5 | REGISTRY=${REGISTRY:-docker.io} 6 | 7 | $ENGINE build \ 8 | --build-arg BUILD_DATE=$(date +%Y%m%d) \ 9 | --build-arg USER=parity \ 10 | -t $REGISTRY/parity/$IMAGE \ 11 | -t $REGISTRY/$USER/$IMAGE \ 12 | -t $REGISTRY/$IMAGE \ 13 | . 14 | $ENGINE images | grep $IMAGE 15 | $ENGINE run --rm -it -h $IMAGE $IMAGE bash -c 'uname -a; echo "Last updated:"; cat /var/lastupdate' 16 | -------------------------------------------------------------------------------- /dockerfiles/base-ci-linux/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/debian:bullseye-slim 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | ARG DEBIAN_CODENAME=bullseye 8 | 9 | # metadata 10 | LABEL summary="Layer 1 image with all dependencies for Rust and WASM compilation." \ 11 | name="${REGISTRY_PATH}/base-ci-linux" \ 12 | maintainer="devops-team@parity.io" \ 13 | version="1.0" \ 14 | description="libssl-dev, clang, libclang-dev, lld, cmake, make, git, pkg-config \ 15 | curl, time, jq, lsof, rhash, rust stable, sccache" \ 16 | io.parity.image.vendor="Parity Technologies" \ 17 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 18 | dockerfiles/base-ci-linux/Dockerfile" \ 19 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 20 | dockerfiles/base-ci-linux/README.md" \ 21 | io.parity.image.revision="${VCS_REF}" \ 22 | io.parity.image.created="${BUILD_DATE}" 23 | 24 | WORKDIR /builds 25 | 26 | ENV SHELL /bin/bash 27 | ENV DEBIAN_FRONTEND=noninteractive 28 | 29 | # config for wasm32-unknown-unknown & clang 30 | COPY utility/base-ci-linux-config /root/.cargo/config 31 | COPY utility/debian-llvm-clang.key /etc/apt/trusted.gpg.d/debian-llvm-clang.gpg 32 | 33 | ENV RUSTUP_HOME=/usr/local/rustup \ 34 | CARGO_HOME=/usr/local/cargo \ 35 | PATH=/usr/local/cargo/bin:$PATH \ 36 | CC=clang-14 \ 37 | CXX=clang-14 38 | 39 | # install tools and dependencies 40 | RUN set -eux; \ 41 | apt-get -y update; \ 42 | apt-get install -y --no-install-recommends \ 43 | libssl-dev make cmake graphviz \ 44 | git pkg-config curl time rhash ca-certificates jq \ 45 | python3 python3-pip lsof ruby ruby-bundler git-restore-mtime xz-utils zstd unzip gnupg protobuf-compiler && \ 46 | # add clang 14 repo 47 | echo "deb http://apt.llvm.org/${DEBIAN_CODENAME}/ llvm-toolchain-${DEBIAN_CODENAME}-14 main" >> /etc/apt/sources.list.d/llvm-toolchain-${DEBIAN_CODENAME}-14.list; \ 48 | echo "deb-src http://apt.llvm.org/${DEBIAN_CODENAME}/ llvm-toolchain-${DEBIAN_CODENAME}-14 main" >> /etc/apt/sources.list.d/llvm-toolchain-${DEBIAN_CODENAME}-14.list; \ 49 | apt-get -y update; \ 50 | apt-get install -y --no-install-recommends \ 51 | clang-14 lldb-14 lld-14 libclang-14-dev && \ 52 | # add non-root user 53 | groupadd -g 1000 nonroot && \ 54 | useradd -u 1000 -g 1000 -s /bin/bash -m nonroot && \ 55 | # install specific minio client version (2023-04-06) 56 | curl -L "https://dl.min.io/client/mc/release/linux-amd64/archive/mc.RELEASE.2023-04-06T16-51-10Z" -o /usr/local/bin/mc && \ 57 | chmod 755 /usr/local/bin/mc && \ 58 | # set a link to clang 59 | update-alternatives --install /usr/bin/cc cc /usr/bin/clang-14 100; \ 60 | # set a link to ldd 61 | update-alternatives --install /usr/bin/ld ld /usr/bin/ld.lld-14 100; \ 62 | # install rustup, use minimum components 63 | curl -L "https://static.rust-lang.org/rustup/dist/x86_64-unknown-linux-gnu/rustup-init" \ 64 | -o rustup-init; \ 65 | chmod +x rustup-init; \ 66 | ./rustup-init -y --no-modify-path --profile minimal --default-toolchain stable; \ 67 | rm rustup-init; \ 68 | chown -R root:nonroot ${RUSTUP_HOME} ${CARGO_HOME}; \ 69 | chmod -R g+w ${RUSTUP_HOME} ${CARGO_HOME}; \ 70 | pip install yq; \ 71 | # install sccache 72 | cargo install sccache; \ 73 | # versions 74 | rustup show; \ 75 | cargo --version; \ 76 | # cargo clean up 77 | # removes compilation artifacts cargo install creates (>250M) 78 | rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache; \ 79 | # apt clean up 80 | apt-get autoremove -y; \ 81 | apt-get clean; \ 82 | rm -rf /var/lib/apt/lists/* 83 | # show backtraces 84 | ENV RUST_BACKTRACE=1 85 | -------------------------------------------------------------------------------- /dockerfiles/base-ci-linux/README.md: -------------------------------------------------------------------------------- 1 | # base-ci-linux 2 | 3 | Docker image based on [official Debian image](https://hub.docker.com/_/debian) debian:bullseye-slim. 4 | 5 | Used as base for `Substrate`-based CI images. 6 | 7 | Our base CI image ``. 8 | 9 | Used to build and test Substrate-based projects. 10 | 11 | **Dependencies and Tools:** 12 | 13 | - `libssl-dev` 14 | - `clang-14` 15 | - `lld-14` 16 | - `libclang-14-dev` 17 | - `make` 18 | - `cmake` 19 | - `git` 20 | - `pkg-config` 21 | - `curl` 22 | - `time` 23 | - `jq` 24 | - `lsof` 25 | - `rhash` 26 | - `ca-certificates` 27 | 28 | [Click here](https://hub.docker.com/repository/docker/paritytech/base-ci-linux) for the registry. 29 | 30 | **Rust tools & toolchains:** 31 | 32 | - stable (default) 33 | - `sccache` 34 | 35 | ## Usage 36 | 37 | ```Dockerfile 38 | FROM docker.io/paritytech/base-ci-linux:latest 39 | ``` 40 | -------------------------------------------------------------------------------- /dockerfiles/benchmarks/.gitignore: -------------------------------------------------------------------------------- 1 | output.txt 2 | output_redacted.txt 3 | -------------------------------------------------------------------------------- /dockerfiles/benchmarks/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VCS_REF=master 2 | ARG BUILD_DATE="" 3 | ARG REGISTRY_PATH=docker.io/paritytech 4 | 5 | FROM docker.io/library/python:slim 6 | 7 | # metadata 8 | LABEL summary="Image for benchmarks" \ 9 | name="${REGISTRY_PATH}/benchmarks" \ 10 | maintainer="devops-team@parity.io" \ 11 | version="1.0" \ 12 | description="Image to push benchmarks and evaluate them" \ 13 | io.parity.image.vendor="Parity Technologies" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/benchmarks/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/benchmarks/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | COPY benchmarks/check_bench_result.py /usr/local/bin/check_bench_result 22 | COPY benchmarks/check_single_bench_result.py /usr/local/bin/check_single_bench_result 23 | COPY benchmarks/push_bench_result.py /usr/local/bin/push_bench_result 24 | 25 | RUN groupadd -g 10000 nonroot && \ 26 | useradd -u 10000 -g 10000 -s /bin/bash -m nonroot && \ 27 | mkdir /nonroot &&\ 28 | chown nonroot. /nonroot 29 | 30 | USER nonroot:nonroot 31 | 32 | WORKDIR /nonroot 33 | 34 | COPY benchmarks/requirements.txt /tmp/requirements.txt 35 | 36 | RUN pip install -r /tmp/requirements.txt 37 | -------------------------------------------------------------------------------- /dockerfiles/benchmarks/check_bench_result.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | The script compares current benchmark result with the previous 4 | and creates an issue in GitHub if it exceeds the threshold 5 | Usage: check_bench_result.py 6 | 7 | EXAMPLE: 8 | export CI_PROJECT_NAME=jsonrpsee 9 | export PROMETHEUS_URL="https://thanos.parity-mgmt.parity.io/" 10 | ./check_bench_result.py output.txt 11 | """ 12 | 13 | import os 14 | import sys 15 | from prometheus_api_client import PrometheusConnect 16 | from github import Github 17 | 18 | import urllib3 19 | 20 | urllib3.disable_warnings() 21 | 22 | 23 | try: 24 | github_org = os.environ["GITHUB_ORG"] 25 | except KeyError: 26 | github_org = "paritytech" 27 | 28 | try: 29 | results_file = sys.argv[1] 30 | prometheus_url = os.environ["PROMETHEUS_URL"] 31 | github_repo = os.environ["CI_PROJECT_NAME"] 32 | except KeyError as error: 33 | print(f"{error} not found in environment variables") 34 | sys.exit(1) 35 | except IndexError: 36 | print("Please specify file with benchmark results and github_org") 37 | print("Usage:", sys.argv[0], "") 38 | sys.exit(1) 39 | 40 | try: 41 | threshold = os.environ["THRESHOLD"] 42 | except KeyError: 43 | threshold = 20 44 | 45 | prometheus_client = PrometheusConnect(url=prometheus_url, disable_ssl=True) 46 | 47 | # Allow script to run locally to check connection to Promethues without the 48 | # need of a github token 49 | try: 50 | github_token = os.environ["GITHUB_TOKEN"] 51 | github_repo_full = f"{github_org}/{github_repo}" 52 | current_sha = os.environ["CI_COMMIT_SHA"] 53 | github_client = Github(github_token) 54 | except Exception as e: 55 | print( 56 | f"WARNING: No github parameter {e} provided." 57 | "GITHUB reporting wil not work." 58 | ) 59 | 60 | 61 | def benchmark_last_result(project, benchmark): 62 | """ 63 | Get latest benchmark result from Thanos. 64 | Returns "-1" if result not found 65 | """ 66 | query = f'last_over_time(parity_benchmark_common_result_ns{{project="{project}",benchmark="{benchmark}"}}[1y])' 67 | query_result = prometheus_client.custom_query(query=query) 68 | if len(query_result) > 0: 69 | last_benchmark_result = int(query_result[0]["value"][1]) 70 | else: 71 | last_benchmark_result = -1 72 | return last_benchmark_result 73 | 74 | 75 | def benchmark_last_sha(project): 76 | """ 77 | Get short sha of latest benchmark result from Thanos. 78 | """ 79 | query = f'parity_benchmark_specific_result_ns{{project="{project}"}}[3d]' 80 | query_result = prometheus_client.custom_query(query=query) 81 | results = {} 82 | for result in range(len(query_result)): 83 | commit = query_result[result]["metric"]["commit"] 84 | timestamp = query_result[result]["values"][0][0] 85 | results[commit] = timestamp 86 | sha = max(results, key=results.get) 87 | return sha 88 | 89 | 90 | def create_github_issue(benchmarks_list): 91 | """Create github issue with list of benchmarks""" 92 | last_sha = benchmark_last_sha(github_repo) 93 | commit_url = f"https://github.com/{github_repo_full}/commit/" 94 | benchmarks_mstring = "\n".join(benchmarks_list) 95 | issue_text = f""" 96 | ## Benchmarks have regressions 97 | 98 | Threshold: {threshold}% 99 | 100 | These benchmarks have regressions: 101 | 102 | | Benchmark name | Previous result [{last_sha}]({commit_url}{last_sha}) (ns/iter) | Current result [{current_sha[:8]}]({commit_url}{current_sha}) (ns/iter) | Difference (%) | 103 | |---|---|---|---| 104 | {benchmarks_mstring} 105 | """ 106 | repo = github_client.get_repo(github_repo_full) 107 | github_issue = repo.create_issue( 108 | title="[benchmarks] Regression detected", body=issue_text 109 | ) 110 | return github_issue 111 | 112 | 113 | def get_name_value(line): 114 | """Get benchmark name and result from a text line""" 115 | name = line.split()[1] 116 | value = int(line.split()[4]) 117 | return name, value 118 | 119 | 120 | def check_line_valid(line): 121 | """ 122 | Check that line can be transformed to a list with 8 values 123 | Expects line "test ... bench: ns/iter (+/- )" 124 | """ 125 | if len(line.split()) != 8: 126 | print("Data has wrong format") 127 | sys.exit(1) 128 | pass 129 | 130 | 131 | def difference_p(first, second): 132 | """Calculate difference between 2 values in percent""" 133 | return round(abs(first * 100 / second - 100)) 134 | 135 | 136 | if __name__ == "__main__": 137 | benchmarks_with_regression = [] 138 | with open(results_file, "r") as file_handle: 139 | for result in file_handle: 140 | # Skipping lines without benchmark results 141 | if not "test" in result: 142 | continue 143 | check_line_valid(result) 144 | benchmark_name, benchmark_current_value = get_name_value(result) 145 | benchmark_last_value = benchmark_last_result( 146 | github_repo, benchmark_name 147 | ) 148 | if benchmark_last_value == -1: 149 | print( 150 | benchmark_name, 151 | "is new and doesn't have any data yet, skipping", 152 | ) 153 | continue 154 | if benchmark_current_value > benchmark_last_value: 155 | diff = difference_p( 156 | benchmark_current_value, benchmark_last_value 157 | ) 158 | if diff > threshold: 159 | string_to_add = f"| {benchmark_name} | {benchmark_last_value} ns/iter | {benchmark_current_value} ns/iter | {diff}% |" 160 | benchmarks_with_regression.append(string_to_add) 161 | if len(benchmarks_with_regression) > 0: 162 | print("Regression found") 163 | # Uncomment after fixing https://github.com/paritytech/ci_cd/issues/302 164 | # print("Regression found, creating GitHub issue") 165 | # issue = create_github_issue(benchmarks_with_regression) 166 | # print(issue) 167 | else: 168 | print("No regressions") 169 | -------------------------------------------------------------------------------- /dockerfiles/benchmarks/check_single_bench_result.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Script compares provided result either with constant or with previuos value 4 | from Thanos. 5 | If the result exceeds constant or threshold, scripts creates github issue. 6 | 2 env variables should exist: CI_COMMIT_SHA and GITHUB_TOKEN 7 | 8 | Examples: 9 | To compare with previous result from Prometheus / Thanos 10 | check_single_bench_result.py --metric parity_benchmark_common_result_ms \ 11 | --project substrate-api-sidecar \ 12 | --name sidecar \ 13 | --prometheus-server 'https://thanos.parity-mgmt.parity.io/' \ 14 | --github-repo 'paritytech/substrate-api-sidecar' \ 15 | --threshold 20 \ 16 | --value 35000 17 | 18 | To compare with constant: 19 | check_single_bench_result.py -g 'org/repo'\ 20 | -c 1 \ 21 | -v 15 22 | """ 23 | 24 | import argparse 25 | from os import environ 26 | from sys import exit 27 | 28 | from prometheus_api_client import PrometheusConnect 29 | from github import Github 30 | 31 | import urllib3 32 | 33 | urllib3.disable_warnings() 34 | 35 | 36 | def get_arguments(): 37 | parser = argparse.ArgumentParser( 38 | description="Check that provided value doesn't exceed constant or threshold" 39 | ) 40 | parser.add_argument( 41 | "-v", 42 | "--value", 43 | type=str, 44 | required=True, 45 | help="Value for comparison with constant/threshold", 46 | ) 47 | parser.add_argument( 48 | "-g", 49 | "--github-repo", 50 | type=str, 51 | required=True, 52 | help="Github url for creating issue", 53 | ) 54 | group = parser.add_mutually_exclusive_group(required=True) 55 | group.add_argument( 56 | "-t", 57 | "--threshold", 58 | type=str, 59 | help="Threshold (%%) which the metric shouldn't exceed", 60 | ) 61 | group.add_argument( 62 | "-c", 63 | "--constant", 64 | type=str, 65 | help="Constant with which the metric should be compared", 66 | ) 67 | parser.add_argument( 68 | "-s", 69 | "--prometheus-server", 70 | type=str, 71 | help="Prometheus server address", 72 | ) 73 | parser.add_argument( 74 | "-m", 75 | "--metric", 76 | type=str, 77 | help="Metric name", 78 | metavar="parity_benchmark_common_result_ms", 79 | ) 80 | parser.add_argument( 81 | "-p", 82 | "--project", 83 | type=str, 84 | help="Benchmark project (usually name of the repo)", 85 | ) 86 | parser.add_argument("-n", "--name", type=str, help="Benchmark name") 87 | args = parser.parse_args() 88 | if args.threshold and ( 89 | args.project is None 90 | or args.metric is None 91 | or args.prometheus_server is None 92 | or args.name is None 93 | ): 94 | parser.error( 95 | "-t/--threshold requires -p/--project, -m/--metric, -n/--name and -s/--prometheus-server" 96 | ) 97 | 98 | return args 99 | 100 | 101 | def is_metric_exceed_threshold( 102 | value1: float, value2: float, threshold: int 103 | ) -> bool: 104 | return abs(100 - value1 * 100 / value2) > threshold 105 | 106 | 107 | def is_metric_exceed_constant(value: float, constant: int) -> bool: 108 | return value > constant 109 | 110 | 111 | def get_benchmark_last_result( 112 | metric_name: str, 113 | project: str, 114 | benchmark: str, 115 | prometheus_client: PrometheusConnect, 116 | ) -> float: 117 | """ 118 | Get latest benchmark result from Thanos. 119 | Returns "-1" if result not found 120 | :param metric_name: Full metric name (e.g. parity_benchmark_common_result_ms) 121 | :param project: Project name 122 | :param benchmark: Benchmark name 123 | :param prometheus_client: PrometheusConnect object 124 | :return: Last value or -1 125 | """ 126 | query = f'last_over_time({metric_name}{{project="{project}",benchmark="{benchmark}"}}[1y])' 127 | query_result = prometheus_client.custom_query(query=query) 128 | if len(query_result) > 0: 129 | return float(query_result[0]["value"][1]) 130 | return -1 131 | 132 | 133 | def create_github_issue(github_client: Github, github_repo: str, reason: str): 134 | """Create github issue with benchmark result""" 135 | current_sha = get_variable_from_env("CI_COMMIT_SHA") 136 | commit_url = f"https://github.com/{github_repo}/commit/" 137 | repo = github_client.get_repo(github_repo) 138 | issue_text = f""" 139 | Regression detected for commit: [{current_sha[:8]}]({commit_url}{current_sha}) 140 | 141 | {reason} 142 | """ 143 | github_issue = repo.create_issue( 144 | title="[benchmarks] Regression detected", body=issue_text 145 | ) 146 | return github_issue 147 | 148 | 149 | def get_variable_from_env(variable: str): 150 | result = environ.get(variable) 151 | if result is None: 152 | print(f"{variable} not found in env variables, exiting") 153 | exit(1) 154 | return result 155 | 156 | 157 | def main(): 158 | args = get_arguments() 159 | # Compare with constant 160 | if args.threshold is None: 161 | is_metric_exceed = is_metric_exceed_constant( 162 | float(args.value), int(args.constant) 163 | ) 164 | reason = f"Current result (**{args.value}**) exceeds constant value (**{args.constant}**)" 165 | # Compare with threshold 166 | elif args.constant is None: 167 | prometheus_client = PrometheusConnect( 168 | url=args.prometheus_server, disable_ssl=True 169 | ) 170 | last_result = get_benchmark_last_result( 171 | args.metric, args.project, args.name, prometheus_client 172 | ) 173 | print("Last benchmark result is", last_result) 174 | is_metric_exceed = is_metric_exceed_threshold( 175 | float(args.value), last_result, int(args.threshold) 176 | ) 177 | reason = f"Difference between current result (**{args.value}**) and previous result (**{last_result}**) exceeds threshold **{args.threshold}%**" 178 | if is_metric_exceed: 179 | print("Regression found") 180 | print("Creating issue") 181 | github_repo = args.github_repo 182 | github_client = Github(get_variable_from_env("GITHUB_TOKEN")) 183 | print(create_github_issue(github_client, github_repo, reason)) 184 | else: 185 | print("No regressions") 186 | 187 | 188 | if __name__ == "__main__": 189 | main() 190 | -------------------------------------------------------------------------------- /dockerfiles/benchmarks/push_bench_result.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Push data to Promethues Pushgateway 4 | Usage: python3 push_bench_result.py -t \ 5 | -p \ 6 | -n \ 7 | -r \ 8 | -u \ 9 | -l 10 | -s 11 | 12 | Example: python3 push_bench_result.py -t specific \ 13 | -p $CI_PROJECT_NAME \ 14 | -n superbench \ 15 | -r 15 \ 16 | -u ns \ 17 | -l 'commit="1qw2f43984uf",cirunner="ci5"' \ 18 | -s 'http://prometheus.address' 19 | If you need to pass some variables to the label then string should look like: 'commit="'$commit'",cirunner="'$runner'"' 20 | 21 | EXAMPLE: 22 | ./push_bench_result.py --type common \ 23 | --project substrate-api-sidecar \ 24 | --name sidecar \ 25 | --result 34444 \ 26 | --unit ms \ 27 | --prometheus-server https://pushgateway.parity-build.parity.io 28 | Metric 'parity_benchmark_common_result_ms{project="substrate-api-sidecar",benchmark="sidecar"} 34444' was successfully sent 29 | """ 30 | 31 | import sys 32 | import argparse 33 | import requests 34 | 35 | 36 | def get_arguments(): 37 | parser = argparse.ArgumentParser( 38 | description="Push data to Promethues Pushgateway." 39 | ) 40 | parser.add_argument( 41 | "--type", 42 | "-t", 43 | default="common", 44 | choices=["common", "specific", "test"], 45 | type=str, 46 | help="Type of benchmark: common|specific|test", 47 | ) 48 | parser.add_argument( 49 | "--project", 50 | "-p", 51 | type=str, 52 | required=True, 53 | help="Benchmark project (usually name of the repo)", 54 | ) 55 | parser.add_argument( 56 | "--name", "-n", type=str, required=True, help="Benchmark name" 57 | ) 58 | parser.add_argument( 59 | "--result", "-r", type=str, required=True, help="Result of benchmark" 60 | ) 61 | parser.add_argument( 62 | "--labels", "-l", type=str, help="Dictionary with additional labels" 63 | ) 64 | parser.add_argument( 65 | "--unit", 66 | "-u", 67 | type=str, 68 | default="ns", 69 | help="Metric unit (ns,s,bytes,info...)", 70 | ) 71 | parser.add_argument( 72 | "--prometheus-pushgateway", 73 | "-s", 74 | type=str, 75 | required=True, 76 | help="Prometheus server address http://prom.com", 77 | ) 78 | args = parser.parse_args() 79 | return args 80 | 81 | 82 | def create_metric(args): 83 | """ 84 | Creates metric in prometheus format from script arguments 85 | Common metric doesn't have labels, test metric can have labels 86 | """ 87 | if args.labels: 88 | if args.type == "common": 89 | print("Common metric shouldn't have additional labels") 90 | sys.exit(1) 91 | metric_name = f'parity_benchmark_{args.type}_result_{args.unit}{{project="{args.project}",benchmark="{args.name}",{args.labels}}}' 92 | else: 93 | metric_name = f'parity_benchmark_{args.type}_result_{args.unit}{{project="{args.project}",benchmark="{args.name}"}}' 94 | return metric_name, args.result 95 | 96 | 97 | def send_metric(server, metric_name, metric_value): 98 | """ 99 | Sends metric to Prometheus Pushgateway 100 | 101 | https://github.com/prometheus/pushgateway#command-line 102 | echo "some_metric 3.14" | curl --data-binary @- http://pushgateway.example.org:9091/metrics/job/some_job 103 | """ 104 | url = f"{server}/metrics/job/{metric_name}" 105 | # \n is required to signal end of input stream 106 | data = f"{metric_name} {metric_value}\n" 107 | return requests.post(url, data=data) 108 | 109 | 110 | def main(): 111 | args = get_arguments() 112 | metric_name, metric_value = create_metric(args) 113 | send_metric_result = send_metric( 114 | args.prometheus_pushgateway, metric_name, metric_value 115 | ) 116 | if send_metric_result.text == "" and send_metric_result.status_code < 400: 117 | print(f"Metric '{metric_name} {metric_value}' was successfully sent") 118 | else: 119 | print( 120 | f"Error occured: \nError code: {send_metric_result.status_code} \nError message: {send_metric_result.text}" 121 | ) 122 | 123 | 124 | if __name__ == "__main__": 125 | main() 126 | -------------------------------------------------------------------------------- /dockerfiles/benchmarks/requirements.txt: -------------------------------------------------------------------------------- 1 | prometheus_api_client==0.4.2 2 | PyGithub==1.55 3 | -------------------------------------------------------------------------------- /dockerfiles/bridges-ci/Dockerfile: -------------------------------------------------------------------------------- 1 | # Image with dependencies required to build projects from the bridge repo. 2 | # 3 | # This image is meant to be used as a building block when building images for 4 | # the various components in the bridge repo, such as nodes and relayers. 5 | ARG VCS_REF=master 6 | ARG BUILD_DATE 7 | ARG REGISTRY_PATH=docker.io/paritytech 8 | 9 | FROM ${REGISTRY_PATH}/base-ci-linux:latest 10 | 11 | # metadata 12 | LABEL io.parity.image.authors="devops-team@parity.io" \ 13 | io.parity.image.vendor="Parity Technologies" \ 14 | io.parity.image.title="${REGISTRY_PATH}/bridges-ci" \ 15 | io.parity.image.description="CI image with all dependencies for parity-bridges-common compilation." \ 16 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/bridges-ci/Dockerfile" \ 18 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 19 | dockerfiles/bridges-ci/README.md" \ 20 | io.parity.image.revision="${VCS_REF}" \ 21 | io.parity.image.created="${BUILD_DATE}" 22 | 23 | RUN rustup toolchain install nightly --target wasm32-unknown-unknown \ 24 | --profile minimal --component clippy rustfmt && \ 25 | rustup target add wasm32-unknown-unknown && \ 26 | cargo install cargo-deny cargo-spellcheck && \ 27 | # cargo clean up 28 | # removes compilation artifacts cargo install creates (>250M) 29 | rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" && \ 30 | # versions 31 | rustc -vV && \ 32 | cargo -V 33 | 34 | USER nonroot:nonroot 35 | -------------------------------------------------------------------------------- /dockerfiles/bridges-ci/README.md: -------------------------------------------------------------------------------- 1 | # bridges-ci 2 | 3 | Docker image based on our base CI image ``. 4 | 5 | Used to build and test parity-bridges-common. 6 | 7 | ## Dependencies and Tools 8 | 9 | **Inherited from ``** 10 | 11 | - `libssl-dev` 12 | - `clang-10` 13 | - `lld` 14 | - `libclang-dev` 15 | - `make` 16 | - `cmake` 17 | - `git` 18 | - `pkg-config` 19 | - `curl` 20 | - `time` 21 | - `rhash` 22 | - `ca-certificates` 23 | - `jq` 24 | 25 | **Rust versions:** 26 | 27 | We always use the [latest possible](https://rust-lang.github.io/rustup-components-history/) `nightly` version that supports our required `rustup` components: 28 | 29 | - `clippy`: The Rust linter. 30 | - `rustfmt`: The Rust code formatter. 31 | 32 | **Rust tools & toolchains:** 33 | 34 | - stable (default) 35 | - `wasm32-unknown-unknown`: The toolchain to compile Rust codebases for Wasm. 36 | - `sccache`: Caching system for Rust. 37 | - `cargo-deny`: Checks licenses, dupe dependencies, vulnerability dDBs. 38 | 39 | [Click here](https://hub.docker.com/repository/docker/paritytech/bridges-ci) for the registry. 40 | 41 | ## Usage 42 | 43 | ```yaml 44 | test-ink: 45 | stage: test 46 | image: paritytech/bridges-ci:production 47 | script: 48 | - cargo build ... 49 | ``` 50 | 51 | -------------------------------------------------------------------------------- /dockerfiles/ci-linux/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VCS_REF=master 2 | ARG BUILD_DATE="" 3 | ARG REGISTRY_PATH=docker.io/paritytech 4 | 5 | FROM ${REGISTRY_PATH}/base-ci-linux:latest 6 | 7 | ARG RUST_NIGHTLY="2023-05-23" 8 | 9 | # metadata 10 | LABEL summary="Image for Substrate-based projects." \ 11 | name="${REGISTRY_PATH}/ci-linux" \ 12 | maintainer="devops-team@parity.io" \ 13 | version="1.0" \ 14 | description="Inherits from base-ci-linux; wasm-gc, wasm-bindgen-cli, wasm-pack, cargo-audit, cargo-web, cargo-deny" \ 15 | io.parity.image.vendor="Parity Technologies" \ 16 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/ci-linux/Dockerfile" \ 17 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/ci-linux/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | # install tools and dependencies 22 | RUN set -eux && \ 23 | # install `rust-src` component for ui test 24 | rustup component add rust-src rustfmt clippy && \ 25 | # install wasm target into default (stable) toolchain 26 | rustup target add wasm32-unknown-unknown && \ 27 | # install specific Rust nightly, default is stable, use minimum components 28 | rustup toolchain install "nightly-${RUST_NIGHTLY}" --profile minimal --component rustfmt && \ 29 | # install wasm target into nightly toolchain 30 | rustup target add wasm32-unknown-unknown --toolchain "nightly-${RUST_NIGHTLY}" && \ 31 | # "alias" pinned nightly toolchain as nightly 32 | ln -s "/usr/local/rustup/toolchains/nightly-${RUST_NIGHTLY}-x86_64-unknown-linux-gnu" /usr/local/rustup/toolchains/nightly-x86_64-unknown-linux-gnu && \ 33 | # install cargo tools 34 | cargo install cargo-web wasm-pack cargo-deny cargo-spellcheck cargo-hack \ 35 | mdbook mdbook-mermaid mdbook-linkcheck mdbook-graphviz mdbook-last-changed && \ 36 | cargo install cargo-nextest --locked && \ 37 | # diener 0.4.6 NOTE: before upgrading please test new version with companion build 38 | # example can be found here: https://github.com/paritytech/substrate/pull/12710 39 | cargo install diener --version 0.4.6 && \ 40 | # wasm-bindgen-cli version should match the one pinned in substrate 41 | # https://github.com/paritytech/substrate/blob/master/bin/node/browser-testing/Cargo.toml#L15 42 | cargo install --version 0.2.73 wasm-bindgen-cli && \ 43 | # install wasm-gc. It's useful for stripping slimming down wasm binaries (polkadot) 44 | cargo install wasm-gc && \ 45 | # install cargo hfuzz and honggfuzz dependencies 46 | apt-get -y update && \ 47 | apt-get install -y binutils-dev libunwind-dev libblocksruntime-dev && \ 48 | cargo install honggfuzz && \ 49 | # versions 50 | rustup show && \ 51 | cargo --version && \ 52 | # apt clean up 53 | apt-get autoremove -y && \ 54 | apt-get clean && \ 55 | rm -rf /var/lib/apt/lists/* && \ 56 | # cargo clean up 57 | # removes compilation artifacts cargo install creates (>250M) 58 | rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache 59 | -------------------------------------------------------------------------------- /dockerfiles/ci-linux/README.md: -------------------------------------------------------------------------------- 1 | # ci-linux 2 | 3 | Docker image based on our base CI image ``. 4 | 5 | Used to build and test Substrate-based projects. 6 | 7 | ## Dependencies and Tools 8 | 9 | - `chromium-driver` 10 | 11 | **Inherited from ``:** 12 | 13 | - `libssl-dev` 14 | - `clang` 15 | - `lld` 16 | - `libclang-dev` 17 | - `make` 18 | - `cmake` 19 | - `git` 20 | - `pkg-config` 21 | - `curl` 22 | - `time` 23 | - `rhash` 24 | - `ca-certificates` 25 | 26 | **Rust versions:** 27 | 28 | - stable (default) 29 | - nightly 30 | 31 | **Rust tools & toolchains:** 32 | 33 | - `cargo-web` 34 | - `cargo-hack` 35 | - `cargo-nextest` 36 | - `sccache` 37 | - `wasm-pack` 38 | - `wasm-bindgen` 39 | - `cargo-deny` 40 | - `cargo-spellcheck`: Required for the CI to do automated spell-checking. 41 | - `cargo-hfuzz` 42 | - `wasm32-unknown-unknown` toolchain 43 | - `mdbook mdbook-mermaid mdbook-linkcheck mdbook-graphviz mdbook-last-changed` 44 | 45 | [Click here](https://hub.docker.com/repository/docker/paritytech/ci-linux) for the registry. 46 | 47 | ## Usage 48 | 49 | ```yaml 50 | test-substrate: 51 | stage: test 52 | image: paritytech/ci-linux:production 53 | script: 54 | - cargo build ... 55 | ``` 56 | 57 | -------------------------------------------------------------------------------- /dockerfiles/ci-unified/README.md: -------------------------------------------------------------------------------- 1 | # The unified Parity CI image 2 | 3 | [![Docker Pulls](https://img.shields.io/docker/pulls/paritytech/ci-unified)](https://hub.docker.com/r/paritytech/ci-unified/tags) 4 | 5 | 6 | This image is used for running CI jobs for Parity repositories. It could also work for you if you're building something on Polkadot SDK. 7 | 8 | ### Specification 9 | 10 | The actual image's revision is based on Debian 11 (aka `bullseye`) and contains the following: 11 | 12 | * Rust stable 1.77.0 13 | * Rust nightly 2024-04-10 14 | * LLVM 15 15 | * Python 3.9.2 16 | * Ruby 2.7.4 17 | * Plenty of different utilities required in the CI pipelines 18 | 19 | ### Tags 20 | 21 | Images are tagged with the following pattern: 22 | ``` 23 | [ - | - ][ -v ] 24 | ``` 25 | 26 | For example: 27 | 28 | * `paritytech/ci-unified:bullseye-1.70` 29 | * `paritytech/ci-unified:bullseye-1.70-v20230705` 30 | * `paritytech/ci-unified:bullseye-1.70-2023-05-23` 31 | * `paritytech/ci-unified:bullseye-1.70-2023-05-23-v20230705` 32 | 33 | So when we release a new image, the image is tagged with these 4 tags based on the pattern described above. 34 | 35 | #### Currently available tag combination flavors (i.e. pairs) 36 | 37 | * `bullseye-1.77.0-2024-04-10` 38 | * `bullseye-1.75.0-2024-01-22` 39 | * `bullseye-1.74.0-2023-11-01` 40 | * `bullseye-1.73.0-2023-11-01` 41 | * `bullseye-1.73.0-2023-05-23` 42 | * `bullseye-1.71.0-2023-05-23` 43 | * `bullseye-1.70.0-2023-05-23` 44 | * `bullseye-1.69.0-2023-03-21` 45 | 46 | Note that we keep the old pairs for a while, but eventually they will be removed. So please, try to use the actual available pair. 47 | 48 | #### The `latest` tag 49 | 50 | The `latest` tag is an alias for the latest available tag combination flavor. Using `latest` implies that you following the upstream in the rolling release style, so you should be aware of the possible breaking changes (i.e. that replicates previous `ci-linux:production` behavior). 51 | -------------------------------------------------------------------------------- /dockerfiles/ci-unified/build-args: -------------------------------------------------------------------------------- 1 | RUST_STABLE_VERSION=1.81.0 2 | RUST_NIGHTLY_VERSION=2024-09-11 3 | FORKLIFT_VERSION=0.13.2 4 | CODECOV_UPLOADER_VERSION=v0.7.3 5 | CODECOV_CLI_VERSION=v0.6.0 6 | # 7 | DISTRO_CODENAME=bullseye 8 | -------------------------------------------------------------------------------- /dockerfiles/ci-unified/cargo-config: -------------------------------------------------------------------------------- 1 | [target.wasm32-unknown-unknown] 2 | # use clang as linker 3 | linker="clang-REPLACEME" 4 | 5 | [target.x86_64-unknown-linux-gnu] 6 | # use clang as linker 7 | linker="clang-REPLACEME" 8 | # enable the additional instruction extensions for rustcrypto deps 9 | rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"] 10 | -------------------------------------------------------------------------------- /dockerfiles/ci-unified/download-forklift.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | DL_PATH="$HOME/.forklift4" 4 | VERSION="latest" 5 | 6 | while getopts "p:v:" flag; do 7 | case $flag in 8 | v) # version 9 | VERSION=$OPTARG 10 | ;; 11 | p) # download path 12 | DL_PATH=$OPTARG 13 | ;; 14 | \?) 15 | echo "invalid option '$flag'" 16 | exit 1 17 | ;; 18 | esac 19 | done 20 | 21 | RELEASE_URL="https://api.github.com/repos/paritytech/forklift/releases/tags/$VERSION" 22 | 23 | if [[ "latest" == $VERSION ]]; then 24 | RELEASE_URL="https://api.github.com/repos/paritytech/forklift/releases/latest" 25 | fi 26 | 27 | echo "Downloading forklift $VERSION to $DL_PATH from $RELEASE_URL" 28 | 29 | RELEASE=`curl -s $RELEASE_URL` 30 | ASSET=`jq '.assets[] | select(.name | endswith("linux_amd64"))' <<< "$RELEASE"` 31 | 32 | ASSET_NAME=`jq -r '.name' <<< "$ASSET"` 33 | ASSET_URL=`jq -r '.browser_download_url' <<< "$ASSET"` 34 | 35 | mkdir -p $DL_PATH 36 | curl -L -s -o $DL_PATH/$ASSET_NAME -L $ASSET_URL 37 | cp -r $DL_PATH/$ASSET_NAME /usr/local/bin/forklift 38 | 39 | chmod 755 /usr/local/bin/forklift 40 | -------------------------------------------------------------------------------- /dockerfiles/contracts-ci-linux/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VCS_REF=master 2 | ARG BUILD_DATE="" 3 | ARG REGISTRY_PATH=docker.io/paritytech 4 | 5 | FROM ${REGISTRY_PATH}/base-ci-linux:latest 6 | 7 | ARG RUST_NIGHTLY="2023-03-21" 8 | 9 | # metadata 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/contracts-ci-linux" \ 13 | io.parity.image.description="Inherits from base-ci-linux:latest. \ 14 | llvm-dev, clang, zlib1g-dev, npm, yarn, wabt, binaryen. \ 15 | rust nightly, rustfmt, clippy, rust-src, substrate-contracts-node" \ 16 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/contracts-ci-linux/Dockerfile" \ 18 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 19 | dockerfiles/contracts-ci-linux/README.md" \ 20 | io.parity.image.revision="${VCS_REF}" \ 21 | io.parity.image.created="${BUILD_DATE}" 22 | 23 | WORKDIR /builds 24 | 25 | RUN set -eux; \ 26 | apt-get -y update && \ 27 | apt-get install -y --no-install-recommends zlib1g-dev npm wabt && \ 28 | npm install --ignore-scripts -g yarn && \ 29 | 30 | # `binaryen` is needed by `cargo-contract` for optimizing Wasm files. 31 | # We fetch the latest release which contains a Linux binary. 32 | curl -L $(curl --silent https://api.github.com/repos/WebAssembly/binaryen/releases \ 33 | | jq -r '.[0].assets | [.[] | .browser_download_url] | map(select(match("x86_64-linux\\.tar\\.gz$"))) | .[0]' \ 34 | ) | tar -xz -C /usr/local/bin/ --wildcards --strip-components=2 'binaryen-*/bin/wasm-opt' && \ 35 | 36 | # The stable toolchain is used to build ink! contracts through the use of the 37 | # `RUSTC_BOOSTRAP=1` environment variable. We also need to install the 38 | # `wasm32-unknown-unknown` target since that's the platform that ink! smart contracts 39 | # run on. 40 | rustup target add wasm32-unknown-unknown --toolchain stable && \ 41 | rustup component add rust-src --toolchain stable && \ 42 | rustup default stable && \ 43 | 44 | # We also use the nightly toolchain for linting. We perform checks using RustFmt, and 45 | # Cargo Clippy. 46 | # 47 | # Note that we pin the nightly toolchain since it often creates breaking changes during 48 | # the RustFmt and Clippy stages of the CI. 49 | rustup toolchain install nightly-${RUST_NIGHTLY} --target wasm32-unknown-unknown \ 50 | --profile minimal --component rustfmt clippy rust-src && \ 51 | 52 | # Alias pinned toolchain as nightly, otherwise it appears as though we 53 | # don't have a nightly toolchain (i.e rustc +nightly --version is empty) 54 | ln -s "/usr/local/rustup/toolchains/nightly-${RUST_NIGHTLY}-x86_64-unknown-linux-gnu" \ 55 | /usr/local/rustup/toolchains/nightly-x86_64-unknown-linux-gnu && \ 56 | 57 | # `cargo-dylint` and `dylint-link` are dependencies needed to run `cargo-contract`. 58 | cargo install cargo-dylint dylint-link && \ 59 | 60 | # Install the latest `cargo-contract` 61 | cargo install --git https://github.com/paritytech/cargo-contract \ 62 | --locked --branch master --force && \ 63 | 64 | # Download the latest `substrate-contracts-node` binary 65 | curl -L -o substrate-contracts-node.zip 'https://gitlab.parity.io/parity/mirrors/substrate-contracts-node/-/jobs/artifacts/main/download?job=build-linux' && \ 66 | unzip substrate-contracts-node.zip && \ 67 | mv artifacts/substrate-contracts-node-linux/substrate-contracts-node /usr/local/cargo/bin/substrate-contracts-node && \ 68 | rm -r artifacts substrate-contracts-node.zip && \ 69 | chmod +x /usr/local/cargo/bin/substrate-contracts-node && \ 70 | 71 | # We use `estuary` as a lightweight cargo registry in the CI to test if 72 | # publishing `cargo-contract` to it and installing it from there works. 73 | cargo install --git https://github.com/onelson/estuary.git --force && \ 74 | 75 | # Versions 76 | yarn --version && \ 77 | rustup show && \ 78 | cargo --version && \ 79 | echo $( substrate-contracts-node --version | awk 'NF' ) && \ 80 | estuary --version && \ 81 | 82 | # cargo clean up 83 | # removes compilation artifacts cargo install creates (>250M) 84 | rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache && \ 85 | 86 | # apt clean up 87 | apt-get autoremove -y && \ 88 | apt-get clean && \ 89 | rm -rf /var/lib/apt/lists/* 90 | 91 | # TODO: https://gitlab.parity.io/parity/cargo-contract/-/jobs/958744, https://gitlab.parity.io/parity/cargo-contract/-/jobs/958745 92 | # USER nonroot:nonroot 93 | -------------------------------------------------------------------------------- /dockerfiles/contracts-ci-linux/README.md: -------------------------------------------------------------------------------- 1 | # contracts! CI for Linux Distributions 2 | 3 | Docker image based on our base CI image ``. 4 | 5 | Used to build and test contracts!. 6 | 7 | ## Dependencies and Tools 8 | 9 | - `llvm-dev` 10 | - `zlib1g-dev` 11 | - `npm` 12 | - `yarn` 13 | - `wabt` 14 | - `binaryen` 15 | 16 | **Inherited from ``** 17 | 18 | - `libssl-dev` 19 | - `clang-10` 20 | - `lld` 21 | - `libclang-dev` 22 | - `make` 23 | - `cmake` 24 | - `git` 25 | - `pkg-config` 26 | - `curl` 27 | - `time` 28 | - `rhash` 29 | - `ca-certificates` 30 | - `jq` 31 | 32 | **Rust versions:** 33 | 34 | We always try to use the [latest possible](https://rust-lang.github.io/rustup-components-history/) `nightly` version that supports our required `rustup` components: 35 | 36 | - `rustfmt`: The Rust code formatter. 37 | - `clippy`: The Rust linter. 38 | - `rust-src`: The Rust sources of the standard library. 39 | 40 | **Rust tools & toolchains:** 41 | 42 | - `cargo-contract` 43 | - `cargo-dylint` and `dylint-link` 44 | - `pwasm-utils-cli` 45 | - `solang` 46 | - `substrate-contracts-node` 47 | - `wasm32-unknown-unknown`: The toolchain to compile Rust codebases for Wasm. 48 | - `estuary`: Lightweight cargo registry to test if publishing/installing works. 49 | 50 | [Click here](https://hub.docker.com/repository/docker/paritytech/contracts-ci-linux) for the registry. 51 | 52 | ## Usage 53 | 54 | ```yaml 55 | test-contracts: 56 | stage: test 57 | image: paritytech/contracts-ci-linux:production 58 | script: 59 | - cargo build ... 60 | ``` 61 | 62 | -------------------------------------------------------------------------------- /dockerfiles/db-dumper/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-slim-bookworm 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | # metadata 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/db-dumper" \ 11 | io.parity.image.description="A script that makes dumps of GCP Cloud SQL Postgres databases\ 12 | to a GCP Cloud storage bucket." \ 13 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 14 | dockerfiles/db-dumper/Dockerfile" \ 15 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 16 | dockerfiles/db-dumper/README.md" \ 17 | io.parity.image.revision="${VCS_REF}" \ 18 | io.parity.image.created="${BUILD_DATE}" 19 | 20 | RUN mkdir /opt/app 21 | WORKDIR /opt/app 22 | 23 | COPY db-dumper/app.py . 24 | COPY db-dumper/requirements.txt . 25 | 26 | RUN pip3 --no-cache-dir install -r requirements.txt 27 | 28 | USER nobody:nogroup 29 | 30 | CMD ["python", "./app.py"] -------------------------------------------------------------------------------- /dockerfiles/db-dumper/README.md: -------------------------------------------------------------------------------- 1 | # db-dumper 2 | 3 | ## Description 4 | 5 | db-dumper is a script that makes dumps of GCP Cloud SQL Postgres databases to a GCP Cloud storage bucket. 6 | 7 | ## Features 8 | 9 | * uses a stream to read DB and writes every row directly to a bucket blob, 10 | it allows dumping any DBs avoiding custom RAM or disk caches 11 | * a dump query can be defined as a configuration variable 12 | * runs query in read-only mode, it's safe for DB data 13 | * uses the simple CSV output format 14 | * uses cloud-sql-python-connector, which allows connecting to DB using IAM authentication 15 | * uses native Google libraries, it allows to move the script to GCP Cloud Functions with minimum changes 16 | 17 | ## Configuration 18 | 19 | The script can be configured using environment variables 20 | 21 | Required: 22 | * `DBDUMPER_DB_INSTANCE_NAME` - GCP Cloud sql instance name, e.g. `project:region:instance` 23 | * `DBDUMPER_DB_NAME` - DB name lo login 24 | * `DBDUMPER_DB_USER` - DB user name lo login 25 | * `DBDUMPER_DB_PASS` - DB user password lo login 26 | * `DBDUMPER_QUERY` - DB query, e.g. `SELECT * FROM Customers` 27 | * `DBDUMPER_BUCKET_NAME` - name of a GCP Cloud storage bucket 28 | 29 | Optional: 30 | * `DBDUMPER_BUCKET_PATH` - base path in the bucket, the default value is the empty string (the root of the bucket) 31 | * `DBDUMPER_BUCKET_FILE_BASE_NAME` - first part of the dump file name, the default value is `dump` 32 | * `DBDUMPER_LOG_LEVEL`- log levels of the `logging` Python module, the default value is `INFO` 33 | 34 | ## Dump path 35 | 36 | `{DBDUMPER_BUCKET_PATH}/{DBDUMPER_BUCKET_FILE_BASE_NAME}-%m%d%Y-%H%M%S.csv` -------------------------------------------------------------------------------- /dockerfiles/db-dumper/app.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import posixpath 3 | import logging 4 | from datetime import datetime 5 | from datetime import timezone 6 | 7 | from environs import Env 8 | 9 | from google.cloud.sql.connector import Connector 10 | import pg8000 11 | import sqlalchemy 12 | 13 | from google.cloud import storage 14 | 15 | LOGGING_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 16 | 17 | def connect_with_connector() -> sqlalchemy.engine.base.Engine: 18 | env = Env() 19 | instance_connection_name = config['DBDUMPER_DB_INSTANCE_NAME'] 20 | db_user = config['DBDUMPER_DB_USER'] 21 | db_pass = env.str("DBDUMPER_DB_PASS") 22 | db_name = config['DBDUMPER_DB_NAME'] 23 | 24 | connector = Connector() 25 | 26 | def getconn() -> pg8000.dbapi.Connection: 27 | conn: pg8000.dbapi.Connection = connector.connect( 28 | instance_connection_name, 29 | "pg8000", 30 | user=db_user, 31 | password=db_pass, 32 | db=db_name, 33 | ) 34 | return conn 35 | 36 | pool = sqlalchemy.create_engine( 37 | "postgresql+pg8000://", 38 | creator=getconn, 39 | pool_size=5, 40 | max_overflow=2, 41 | pool_timeout=30, # 30 seconds 42 | pool_recycle=1800, # 30 minutes 43 | ) 44 | return pool 45 | 46 | 47 | if __name__ == '__main__': 48 | env = Env() 49 | 50 | config = {} 51 | config['DBDUMPER_LOG_LEVEL'] = env.str("DBDUMPER_LOG_LEVEL", "INFO") 52 | config['DBDUMPER_QUERY'] = env.str("DBDUMPER_QUERY") 53 | config['DBDUMPER_BUCKET_PATH'] = env.str("DBDUMPER_BUCKET_PATH", "") 54 | config['DBDUMPER_BUCKET_FILE_BASE_NAME'] = env.str("DBDUMPER_BUCKET_FILE_BASE_NAME", "dump") 55 | config['DBDUMPER_BUCKET_NAME'] = env.str("DBDUMPER_BUCKET_NAME") 56 | config['DBDUMPER_DB_INSTANCE_NAME'] = env.str("DBDUMPER_DB_INSTANCE_NAME") 57 | config['DBDUMPER_DB_USER'] = env.str("DBDUMPER_DB_USER") 58 | config['DBDUMPER_DB_NAME'] = env.str("DBDUMPER_DB_NAME") 59 | 60 | 61 | # set up console log handler 62 | console = logging.StreamHandler() 63 | console.setLevel(getattr(logging, config['DBDUMPER_LOG_LEVEL'])) 64 | formatter = logging.Formatter(LOGGING_FORMAT) 65 | console.setFormatter(formatter) 66 | # set up basic logging config 67 | logging.basicConfig(format=LOGGING_FORMAT, level=getattr(logging, config['DBDUMPER_LOG_LEVEL']), handlers=[console]) 68 | 69 | logging.info(f'Config:\n{config}') 70 | logging.info(f'Backup started!') 71 | 72 | db = connect_with_connector() 73 | query = sqlalchemy.text(config['DBDUMPER_QUERY']) 74 | 75 | now = datetime.now(timezone.utc) 76 | bucket_blob_path = posixpath.join(config['DBDUMPER_BUCKET_PATH'], 77 | f'{config["DBDUMPER_BUCKET_FILE_BASE_NAME"]}-{now.strftime("%Y%m%d-%H%M%S")}.csv') 78 | storage_client = storage.Client() 79 | bucket = storage_client.bucket(config['DBDUMPER_BUCKET_NAME']) 80 | bucket_blob = bucket.blob(bucket_blob_path) 81 | 82 | with db.connect() as conn: 83 | q = conn.execution_options(stream_results=True, postgresql_readonly=True).execute(query) 84 | with bucket_blob.open("w") as f: 85 | csv_file = csv.writer(f) 86 | 87 | header=list(q.keys()) 88 | csv_file.writerow(header) 89 | for record in q.all(): 90 | csv_file.writerow([getattr(record, c) for c in header]) 91 | 92 | logging.info(f'Backup finished successfully!') 93 | -------------------------------------------------------------------------------- /dockerfiles/db-dumper/requirements.txt: -------------------------------------------------------------------------------- 1 | cloud-sql-python-connector 2 | google-cloud-storage 3 | pg8000 4 | sqlalchemy 5 | environs -------------------------------------------------------------------------------- /dockerfiles/deb/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG REGISTRY_PATH=docker.io/paritytech 2 | 3 | FROM ${REGISTRY_PATH}/gnupg:latest 4 | 5 | ARG VCS_REF=master 6 | ARG BUILD_DATE="" 7 | 8 | # metadata 9 | LABEL summary="DEB packaging/signing toolchain" \ 10 | name="${REGISTRY_PATH}/deb" \ 11 | maintainer="devops-team@parity.io" \ 12 | version="1.0" \ 13 | description="DEB packaging/signing toolchain" \ 14 | io.parity.image.vendor="Parity Technologies" \ 15 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/deb/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/deb/README.md" \ 17 | io.parity.image.revision="${VCS_REF}" \ 18 | io.parity.image.created="${BUILD_DATE}" 19 | 20 | USER root 21 | RUN apt-get install -yq --no-install-recommends reprepro 22 | 23 | USER nonroot:nonroot 24 | -------------------------------------------------------------------------------- /dockerfiles/deb/README.md: -------------------------------------------------------------------------------- 1 | # deb 2 | 3 | Docker image based on the [paritytech gnupg image](https://hub.docker.com/paritytech/gnupg) paritytech/gnupg:latest. 4 | 5 | Used for signing and managing .deb repositories and packages 6 | 7 | **Tools:** 8 | 9 | - `reprepro` 10 | 11 | [Click here](https://hub.docker.com/repository/docker/paritytech/deb) for the registry. 12 | 13 | ## Usage 14 | 15 | ```Dockerfile 16 | FROM docker.io/paritytech/deb:latest 17 | ``` 18 | 19 | -------------------------------------------------------------------------------- /dockerfiles/debian10/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:buster 2 | 3 | # metadata 4 | ARG VCS_REF=master 5 | ARG BUILD_DATE="" 6 | ARG REGISTRY_PATH=docker.io/paritytech 7 | LABEL io.parity.image.authors="devops-team@parity.io" \ 8 | io.parity.image.vendor="Parity Technologies" \ 9 | io.parity.image.title="${REGISTRY_PATH}/debian10" \ 10 | io.parity.image.description="ansible" \ 11 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 12 | dockerfiles/debian10/Dockerfile" \ 13 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 14 | dockerfiles/debian10/README.md" \ 15 | io.parity.image.revision="${VCS_REF}" \ 16 | io.parity.image.created="${BUILD_DATE}" 17 | 18 | 19 | ARG DEBIAN_FRONTEND=noninteractive 20 | # Install dependencies. 21 | RUN apt-get update \ 22 | && apt-get install -y --no-install-recommends \ 23 | sudo systemd systemd-sysv \ 24 | build-essential wget libffi-dev libssl-dev \ 25 | python3-pip python3-dev python3-setuptools python3-wheel python3-apt \ 26 | && rm -rf /var/lib/apt/lists/* \ 27 | && rm -Rf /usr/share/doc && rm -Rf /usr/share/man \ 28 | && apt-get clean 29 | # Make sure systemd doesn't start agettys on tty[1-6]. 30 | RUN rm -f /lib/systemd/system/multi-user.target.wants/getty.target 31 | VOLUME ["/sys/fs/cgroup"] 32 | CMD ["/lib/systemd/systemd"] -------------------------------------------------------------------------------- /dockerfiles/debian10/README.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | This Debian 10 image is used in the ansible Molecule test as a base to apply roles. 4 | The docker image should be close to default GCP or AWS images. 5 | -------------------------------------------------------------------------------- /dockerfiles/debian11/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye 2 | 3 | # metadata 4 | ARG VCS_REF=master 5 | ARG BUILD_DATE="" 6 | ARG REGISTRY_PATH=docker.io/paritytech 7 | LABEL io.parity.image.authors="devops-team@parity.io" \ 8 | io.parity.image.vendor="Parity Technologies" \ 9 | io.parity.image.title="${REGISTRY_PATH}/debian11" \ 10 | io.parity.image.description="ansible" \ 11 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 12 | dockerfiles/debian11/Dockerfile" \ 13 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 14 | dockerfiles/debian11/README.md" \ 15 | io.parity.image.revision="${VCS_REF}" \ 16 | io.parity.image.created="${BUILD_DATE}" 17 | 18 | 19 | ARG DEBIAN_FRONTEND=noninteractive 20 | # Install dependencies. 21 | RUN apt-get update \ 22 | && apt-get install -y --no-install-recommends \ 23 | sudo systemd systemd-sysv \ 24 | build-essential wget libffi-dev libssl-dev \ 25 | python3-pip python3-dev python3-setuptools python3-wheel python3-apt \ 26 | && rm -rf /var/lib/apt/lists/* \ 27 | && rm -Rf /usr/share/doc && rm -Rf /usr/share/man \ 28 | && apt-get clean 29 | # Make sure systemd doesn't start agettys on tty[1-6]. 30 | RUN rm -f /lib/systemd/system/multi-user.target.wants/getty.target 31 | CMD ["/lib/systemd/systemd"] -------------------------------------------------------------------------------- /dockerfiles/debian11/README.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | This Debian 11 image is used in the ansible Molecule test as a base to apply roles. 4 | The docker image should be close to default GCP or AWS images. 5 | -------------------------------------------------------------------------------- /dockerfiles/eng-automation-ci/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/node:22-alpine 2 | 3 | RUN apk update && apk add git-daemon util-linux python3 bash sed git gnupg g++ make 4 | -------------------------------------------------------------------------------- /dockerfiles/eng-automation-ci/README.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | One image for all CI jobs of Engineering Automation projects 4 | -------------------------------------------------------------------------------- /dockerfiles/github-gh-cli/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG REGISTRY_PATH=docker.io/paritytech 2 | 3 | FROM docker.io/library/ubuntu:latest 4 | 5 | ARG VCS_REF=master 6 | ARG BUILD_DATE="" 7 | 8 | # metadta 9 | LABEL summary="Base image with git and gh (GitHub CLI)" \ 10 | name="${REGISTRY_PATH}/github-gh-cli" \ 11 | maintainer="devops-team@parity.io" \ 12 | version="1.0" \ 13 | description="Image contains git and gh tools." \ 14 | io.parity.image.vendor="Parity Technologies" \ 15 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/github-gh-cli/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/github-gh-cli/gnupg/README.md" \ 17 | io.parity.image.revision="${VCS_REF}" \ 18 | io.parity.image.created="${BUILD_DATE}" 19 | 20 | RUN apt-get update && apt-get install -yq --no-install-recommends bash ca-certificates git gh; \ 21 | # verify gh binary works 22 | gh --version; 23 | 24 | WORKDIR /tmp/repo 25 | 26 | CMD ["gh"] 27 | -------------------------------------------------------------------------------- /dockerfiles/github-gh-cli/README.md: -------------------------------------------------------------------------------- 1 | # GitHub CLI 2 | 3 | Docker image based on [official Ubuntu image](https://hub.docker.com/_/ubuntu) ubuntu:latest. 4 | 5 | Used as base for tooling that requires git and gh. 6 | 7 | **Tools:** 8 | 9 | - `git` 10 | - `gh` 11 | 12 | [Click here](https://hub.docker.com/repository/docker/paritytech/github-gh-cli) for the registry. 13 | 14 | ## Usage 15 | 16 | ``` 17 | docker run --rm -it -v $PWD:/tmp/repo docker.io/paritytech/github-gh-cli gh {needed_gh_command} 18 | ``` 19 | -------------------------------------------------------------------------------- /dockerfiles/gnupg/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG REGISTRY_PATH=docker.io/paritytech 2 | 3 | FROM docker.io/library/ubuntu:latest 4 | 5 | # 'Parity Security Team ' 6 | ARG GPG_KEYID=9D4B2B6EB8F97156D19669A9FF0812D491B96798 7 | ARG VCS_REF=master 8 | ARG BUILD_DATE="" 9 | ARG UID=1000 10 | ARG GID=1000 11 | 12 | # metadata 13 | LABEL summary="Base image for GnuPG operations" \ 14 | name="${REGISTRY_PATH}/gnupg" \ 15 | maintainer="devops-team@parity.io" \ 16 | version="1.0" \ 17 | description="GnuPG base container" \ 18 | io.parity.image.vendor="Parity Technologies" \ 19 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/gnupg/Dockerfile" \ 20 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/gnupg/README.md" \ 21 | io.parity.image.revision="${VCS_REF}" \ 22 | io.parity.image.created="${BUILD_DATE}" 23 | 24 | RUN apt-get update && apt-get install -yq --no-install-recommends bash ca-certificates curl gnupg 25 | 26 | RUN set -x \ 27 | && groupadd -g $GID nonroot \ 28 | && useradd -u $UID -g $GID -s /bin/bash -m nonroot 29 | 30 | USER nonroot:nonroot 31 | 32 | RUN curl -LfSs "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x${GPG_KEYID}&options=mr&exact=on" | gpg --import - \ 33 | && gpg --list-keys 34 | 35 | WORKDIR /home/nonroot 36 | CMD ["/bin/bash"] 37 | -------------------------------------------------------------------------------- /dockerfiles/gnupg/README.md: -------------------------------------------------------------------------------- 1 | # gnupg 2 | 3 | Docker image based on [official Ubuntu image](https://hub.docker.com/_/ubuntu) ubuntu:latest. 4 | 5 | Used as base for tooling that requires gnupg. 6 | 7 | **Tools:** 8 | 9 | - `curl` 10 | - `gnupg` 11 | 12 | [Click here](https://hub.docker.com/repository/docker/paritytech/gnupg) for the registry. 13 | 14 | ## Usage 15 | 16 | ```Dockerfile 17 | FROM docker.io/paritytech/gnupg:latest 18 | ``` 19 | 20 | -------------------------------------------------------------------------------- /dockerfiles/ink-ci-linux/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VCS_REF=master 2 | ARG BUILD_DATE 3 | ARG REGISTRY_PATH=docker.io/paritytech 4 | 5 | FROM ${REGISTRY_PATH}/base-ci-linux:latest 6 | 7 | ARG RUST_NIGHTLY="2023-03-21" 8 | 9 | # metadata 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/ink-ci-linux" \ 13 | io.parity.image.description="Inherits from docker.io/paritytech/base-ci-linux. \ 14 | rust nightly, clippy, rustfmt, miri, rust-src, rustc-dev, grcov, rust-covfix, \ 15 | llvm-tools-preview, cargo-contract, xargo, binaryen, parallel, codecov" \ 16 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/ink-ci-linux/Dockerfile" \ 18 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 19 | dockerfiles/ink-ci-linux/README.md" \ 20 | io.parity.image.revision="${VCS_REF}" \ 21 | io.parity.image.created="${BUILD_DATE}" 22 | 23 | WORKDIR /builds 24 | 25 | RUN set -eux; \ 26 | apt-get -y update && \ 27 | 28 | # Needed for running commands in parallel, without overlapping output. 29 | # gnupg is only needed to verify the signature of the codecov uploader. 30 | apt-get install -y --no-install-recommends parallel gnupg && \ 31 | 32 | # codecov uploader 33 | curl --remote-name --silent https://uploader.codecov.io/latest/linux/codecov && \ 34 | curl --remote-name --silent https://uploader.codecov.io/latest/linux/codecov.SHA256SUM && \ 35 | curl --remote-name --silent https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig && \ 36 | curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --import && \ 37 | gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM && \ 38 | shasum --algorithm 256 --check codecov.SHA256SUM && \ 39 | chmod +x codecov && \ 40 | mv codecov /usr/local/bin/codecov && \ 41 | rm -f codecov.SHA256SUM codecov.SHA256SUM.sig && \ 42 | 43 | # `binaryen` is needed by `cargo-contract` for optimizing Wasm files. 44 | # We fetch the latest release which contains a Linux binary. 45 | curl -L $(curl --silent https://api.github.com/repos/WebAssembly/binaryen/releases \ 46 | | jq -r '.[0].assets | [.[] | .browser_download_url] | map(select(match("x86_64-linux\\.tar\\.gz$"))) | .[0]' \ 47 | ) | tar -xz -C /usr/local/bin/ --wildcards --strip-components=2 'binaryen-*/bin/wasm-opt' && \ 48 | 49 | # The stable toolchain is used to build ink! itself through the use of the 50 | # `RUSTC_BOOSTRAP=1` environment variable. We also need to install the 51 | # `wasm32-unknown-unknown` target since that's the platform that ink! smart contracts 52 | # run on. 53 | # 54 | # The 1.69 toolchain is temporarily required to build ink! contracts because of 55 | # https://github.com/paritytech/cargo-contract/issues/1139 \ 56 | rustup toolchain install 1.69 && \ 57 | rustup target add wasm32-unknown-unknown --toolchain 1.69 && \ 58 | rustup component add rust-src clippy rustfmt --toolchain 1.69 && \ 59 | rustup default 1.69 && \ 60 | 61 | # We also use the nightly toolchain to lint ink!. We perform checks using RustFmt, 62 | # Cargo Clippy, and Miri. 63 | # 64 | # Note that we pin the nightly toolchain since it often creates breaking changes during 65 | # the RustFmt and Clippy stages of the CI. 66 | rustup toolchain install nightly-${RUST_NIGHTLY} --target wasm32-unknown-unknown \ 67 | --profile minimal --component rustfmt clippy miri rust-src rustc-dev llvm-tools-preview && \ 68 | 69 | # Alias pinned toolchain as nightly, otherwise it appears as though we 70 | # don't have a nightly toolchain (i.e rustc +nightly --version is empty) 71 | ln -s /usr/local/rustup/toolchains/nightly-${RUST_NIGHTLY}-x86_64-unknown-linux-gnu \ 72 | /usr/local/rustup/toolchains/nightly-x86_64-unknown-linux-gnu && \ 73 | 74 | # `cargo-dylint` and `dylint-link` are dependencies needed to run `cargo-contract`. 75 | cargo install cargo-dylint dylint-link && \ 76 | 77 | # Install the latest `cargo-contract` 78 | cargo install --git https://github.com/paritytech/cargo-contract \ 79 | --locked --branch master --force && \ 80 | 81 | # Download the latest `substrate-contracts-node` binary 82 | curl -L -o substrate-contracts-node.zip 'https://gitlab.parity.io/parity/mirrors/substrate-contracts-node/-/jobs/artifacts/main/download?job=build-linux' && \ 83 | unzip substrate-contracts-node.zip && \ 84 | mv artifacts/substrate-contracts-node-linux/substrate-contracts-node /usr/local/cargo/bin/substrate-contracts-node && \ 85 | rm -r artifacts substrate-contracts-node.zip && \ 86 | chmod +x /usr/local/cargo/bin/substrate-contracts-node && \ 87 | 88 | 89 | # We require `grcov` for coverage reporting and `rust-covfix` to improve it. 90 | # We require `xargo` so that `miri` runs properly 91 | cargo install grcov rust-covfix xargo && \ 92 | 93 | # Automated spell-checking 94 | cargo install cargo-spellcheck && \ 95 | 96 | # More faster test runner for Rust projects than cargo-test 97 | cargo install cargo-nextest && \ 98 | 99 | # Versions 100 | rustup show && \ 101 | cargo --version && \ 102 | cargo-contract --version && \ 103 | wasm-opt --version && \ 104 | 105 | # Clean up and remove compilation artifacts that a cargo install creates (>250M). 106 | rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache && \ 107 | 108 | # apt clean up 109 | apt-get remove -y gnupg && \ 110 | apt-get autoremove -y && \ 111 | apt-get clean && \ 112 | rm -rf /var/lib/apt/lists/* 113 | 114 | # TODO: switch to non-root here when ink-waterfall-ci won't be the dependency of this img 115 | # USER nonroot:nonroot 116 | -------------------------------------------------------------------------------- /dockerfiles/ink-ci-linux/README.md: -------------------------------------------------------------------------------- 1 | # ink! CI for Linux Distributions 2 | 3 | Docker image based on our base CI image ``. 4 | 5 | Used to build and test ink!. 6 | 7 | ## Dependencies and Tools 8 | 9 | - `parallel`: for running commands in parallel, without overlapping output 10 | - `codecov`: to upload the test coverage results 11 | - `binaryen`: needed by `cargo-contract` for optimizing Wasm files 12 | - `cargo-dylint` and `dylint-link`: needed by `cargo-contract` for running lints 13 | 14 | **Inherited from ``** 15 | 16 | - `libssl-dev` 17 | - `clang` 18 | - `lld` 19 | - `libclang-dev` 20 | - `make` 21 | - `cmake` 22 | - `git` 23 | - `pkg-config` 24 | - `curl` 25 | - `time` 26 | - `rhash` 27 | - `ca-certificates` 28 | - `jq` 29 | 30 | **Rust versions:** 31 | 32 | We always use the [latest possible](https://rust-lang.github.io/rustup-components-history/) `nightly` version that supports our required `rustup` components: 33 | 34 | - `clippy`: The Rust linter. 35 | - `rust-src`: The Rust sources of the standard library. 36 | - `miri`: The Rust MIR interpreter that interprets the test suite with additional checks. 37 | - `rustfmt`: The Rust code formatter. 38 | 39 | **Rust tools & toolchains:** 40 | 41 | - `grcov`: Required for general Rust code coverage reports. 42 | - `rust-covfix`: Required to polish the coverage reports by `grcov`. 43 | - `cargo-contract`: Required to build ink! Wasm smart contracts. 44 | - `xargo`: Required so that `miri` runs properly. 45 | - `cargo-spellcheck`: Required for the CI to do automated spell-checking. 46 | - `wasm32-unknown-unknown`: The toolchain to compile Rust codebases for Wasm. 47 | - `llvm-tools-preview`: our MIR source-based Rust coverage instrumentation. 48 | - `cargo-nextest`: Test runner for Rust project, to replace cargo test. 49 | 50 | [Click here](https://hub.docker.com/repository/docker/paritytech/ink-ci-linux) for the registry. 51 | 52 | ## Usage 53 | 54 | ```yaml 55 | test-ink: 56 | stage: test 57 | image: paritytech/ink-ci-linux:production 58 | script: 59 | - cargo build ... 60 | ``` 61 | 62 | -------------------------------------------------------------------------------- /dockerfiles/ink-waterfall-ci/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VCS_REF=master 2 | ARG BUILD_DATE 3 | ARG REGISTRY_PATH=docker.io/paritytech 4 | 5 | # `production` tag is used here to base off the image that has already been tested against 6 | # the `ink` CI. This reduces the maintenance of fixing the same nightly stuff in both images. 7 | FROM ${REGISTRY_PATH}/ink-ci-linux:production 8 | 9 | # metadata 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/ink-waterfall-ci" \ 13 | io.parity.image.description="Inherits from docker.io/paritytech/ink-ci-linux." \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/ink-waterfall-ci/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/ink-waterfall-ci/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | WORKDIR /builds 22 | 23 | ENV SHELL /bin/bash 24 | ENV DEBIAN_FRONTEND=noninteractive 25 | 26 | RUN set -eux; \ 27 | apt-get -y update && \ 28 | apt-get install -y --no-install-recommends \ 29 | # `redis-cli` is needed to interact with ci/cd's redis 30 | redis-tools \ 31 | # npm is needed to install `yarn` 32 | npm \ 33 | # `firefox` is needed to simulate interactions with the `canvas-ui` 34 | firefox-esr && \ 35 | # `geckodriver` is needed to run headless browser tests 36 | # we fetch the latest version number from the github api and use that release 37 | curl --silent https://api.github.com/repos/mozilla/geckodriver/releases/latest | \ 38 | egrep --only-matching 'https://github.com/mozilla/geckodriver/releases/download/v[0-9.]+/geckodriver-v[0-9.]+-linux64.tar.gz' | \ 39 | head -n1 | \ 40 | xargs curl -L -O && \ 41 | tar -xvzf geckodriver-v*-linux64.tar.gz && \ 42 | rm geckodriver-v*-linux64.tar.gz && \ 43 | chmod +x geckodriver && \ 44 | mv geckodriver /usr/local/bin/ && \ 45 | # `yarn` is needed to run `canvas-ui` 46 | npm install --ignore-scripts -g yarn && \ 47 | 48 | # `substrate-contracts-node` is a Substrate chain with smart contract functionality. 49 | # `--locked` ensures the project's `Cargo.lock` is used. 50 | cargo install --git https://github.com/paritytech/substrate-contracts-node.git \ 51 | --locked --branch main --force && \ 52 | # We additionally install the `substrate-contracts-node` as `substrate-contracts-rand-extension`. 53 | # This installation though is a modified `substrate-contracts-node`, so that ink!'s 54 | # `rand-extension` chain extension example is included in the runtime. 55 | # This enables us to test in the waterfall that the `rand-extension` 56 | # integration with Substrate still works. 57 | git clone --depth 1 https://github.com/paritytech/substrate-contracts-node.git && \ 58 | curl -s https://raw.githubusercontent.com/paritytech/ink/master/integration-tests/rand-extension/runtime/chain-extension-example.rs \ 59 | >> substrate-contracts-node/runtime/src/lib.rs && \ 60 | sed -i 's/type ChainExtension = ();/type ChainExtension = FetchRandomExtension;/g' substrate-contracts-node/runtime/src/lib.rs && \ 61 | sed -i 's/name = "substrate-contracts-node"/name = "substrate-contracts-node-rand-extension"/g' substrate-contracts-node/node/Cargo.toml && \ 62 | cargo install --locked --path substrate-contracts-node/node/ && \ 63 | # Needed for regression testing, a CSV contains the sizes of compiled contracts. 64 | cargo install --git https://github.com/paritytech/ink-waterfall.git csv-comparator && \ 65 | npm install -g csv2md && \ 66 | # versions 67 | rustup show && \ 68 | cargo --version && \ 69 | cargo-contract --version && \ 70 | echo $( substrate-contracts-node --version | awk 'NF' ) && \ 71 | substrate-contracts-node-rand-extension --version && \ 72 | # Clean up and remove compilation artifacts that a cargo install creates (>250M). 73 | rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache && \ 74 | # Clean up artifacts of `substrate-contracts-rand-extension` installation 75 | rm -rf substrate-contracts-node/ && \ 76 | # apt clean up 77 | apt-get autoremove -y && \ 78 | apt-get clean && \ 79 | rm -rf /var/lib/apt/lists/* 80 | 81 | # TODO: https://gitlab.parity.io/parity/infrastructure/scripts/-/jobs/958687 82 | # USER nonroot:nonroot 83 | -------------------------------------------------------------------------------- /dockerfiles/ink-waterfall-ci/README.md: -------------------------------------------------------------------------------- 1 | # ink! waterfall CI for Linux Distributions 2 | 3 | Docker image based on our base CI image ``. 4 | 5 | Used to build and run end-to-end tests for ink!, `cargo-contract`, `substrate-contracts-node` and `canvas-ui`. 6 | 7 | ## Dependencies and Tools 8 | 9 | **Inherited from ``** 10 | 11 | - `libssl-dev` 12 | - `lld` 13 | - `libclang-dev` 14 | - `make` 15 | - `cmake` 16 | - `git` 17 | - `pkg-config` 18 | - `curl` 19 | - `time` 20 | - `rhash` 21 | - `ca-certificates` 22 | 23 | **Rust versions:** 24 | 25 | We always use the [latest possible](https://rust-lang.github.io/rustup-components-history/) `nightly` version that supports our required `rustup` components: 26 | 27 | - `clippy`: The Rust linter. 28 | - `rust-src`: The Rust sources of the standard library. 29 | - `rustfmt`: The Rust code formatter. 30 | 31 | **Rust tools & toolchains:** 32 | 33 | - `cargo-contract`: Required to build ink! Wasm smart contracts. 34 | - `cargo-dylint` and `dylint-link`: Required to run `cargo-contract`. 35 | - `substrate-contracts-node`: Required to run a Substrate chain for smart contracts. 36 | - `wasm32-unknown-unknown`: The toolchain to compile Rust codebases for Wasm. 37 | 38 | [Click here](https://hub.docker.com/repository/docker/paritytech/ink-waterfall-ci) for the registry. 39 | 40 | ## Usage 41 | 42 | ```yaml 43 | test-ink: 44 | stage: test 45 | image: paritytech/ink-waterfall-ci:latest 46 | script: 47 | - cargo build ... 48 | ``` 49 | 50 | -------------------------------------------------------------------------------- /dockerfiles/kube-manifests-validation/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/debian:12-slim 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | ARG HELM_VERSION=3.12.2 7 | ARG GATOR_VERSION=3.12.0 8 | ARG DATREE_VERSION=1.9.19 9 | ARG YQ_VERSION=4.34.2 10 | ARG VAULT_VERSION=1.17.3 11 | ARG K8S_SCHEMA_VERSION=1.25.9 12 | ARG K8S_SCHEMA_REPO_COMMIT_SHA=a43aa7eceaf4c32c5f45c9fc477588e7a12f18b6 13 | ARG CRDS_SCHEMA_REPO_COMMIT_SHA=8f0604e873746d6b2d49794e5b37768460e7b545 14 | ARG K8S_SCHEMA_DIR=/schemas/k8s 15 | ARG K8S_CRDS_DIR=/schemas/crds 16 | 17 | # metadata 18 | LABEL io.parity.image.authors="devops-team@parity.io" \ 19 | io.parity.image.vendor="Parity Technologies" \ 20 | io.parity.image.title="${REGISTRY_PATH}/kube-manifests-validation" \ 21 | io.parity.image.description="Tools for validating Kubernetes manifest files" \ 22 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 23 | dockerfiles/kube-manifests-validation/Dockerfile" \ 24 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 25 | dockerfiles/kube-manifests-validation/README.md" \ 26 | io.parity.image.revision="${VCS_REF}" \ 27 | io.parity.image.created="${BUILD_DATE}" 28 | 29 | RUN apt-get update && \ 30 | apt-get install -y curl git moreutils unzip && \ 31 | rm -rf /var/lib/apt/lists/* 32 | 33 | # Install tools 34 | RUN set -eu; \ 35 | # helm 36 | curl -fSsL -o - https://get.helm.sh/helm-v$HELM_VERSION-linux-amd64.tar.gz \ 37 | | tar zx --strip 1 -C /usr/local/bin linux-amd64/helm && \ 38 | \ 39 | # gator 40 | curl -fSsL -o - https://github.com/open-policy-agent/gatekeeper/releases/download/v${GATOR_VERSION}/gator-v${GATOR_VERSION}-linux-amd64.tar.gz \ 41 | | tar zx -C /usr/local/bin gator && \ 42 | \ 43 | # datree 44 | curl -fSsL -o /tmp/datree.zip https://github.com/datreeio/datree/releases/download/$DATREE_VERSION/datree-cli_${DATREE_VERSION}_Linux_x86_64.zip && \ 45 | unzip /tmp/datree.zip -d /usr/local/bin datree && \ 46 | rm -f /tmp/datree.zip && \ 47 | \ 48 | # yq 49 | curl -fSsL -o /usr/local/bin/yq https://github.com/mikefarah/yq/releases/download/v${YQ_VERSION}/yq_linux_amd64 && \ 50 | chmod +x /usr/local/bin/yq && \ 51 | \ 52 | # vault 53 | curl -fSsL -o /tmp/vault.zip https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip && \ 54 | unzip /tmp/vault.zip -d /usr/local/bin vault && \ 55 | rm -f /tmp/vault.zip 56 | 57 | # Download K8s schemas 58 | RUN set -eu; \ 59 | mkdir -p $K8S_SCHEMA_DIR $K8S_CRDS_DIR && \ 60 | git clone --no-checkout --filter=blob:none https://github.com/yannh/kubernetes-json-schema.git $K8S_SCHEMA_DIR && \ 61 | cd $K8S_SCHEMA_DIR && \ 62 | git sparse-checkout set --cone && git checkout $K8S_SCHEMA_REPO_COMMIT_SHA && \ 63 | git sparse-checkout set v$K8S_SCHEMA_VERSION v$K8S_SCHEMA_VERSION-standalone v$K8S_SCHEMA_VERSION-standalone-strict && \ 64 | \ 65 | cd $K8S_CRDS_DIR && \ 66 | git init && \ 67 | git remote add origin https://github.com/datreeio/CRDs-catalog.git && \ 68 | git fetch origin $CRDS_SCHEMA_REPO_COMMIT_SHA && \ 69 | git reset --hard FETCH_HEAD && \ 70 | rm -rf $K8S_SCHEMA_DIR/.git $K8S_CRDS_DIR/.git 71 | 72 | WORKDIR /app 73 | 74 | RUN mkdir -p /root/.datree 75 | COPY ./datree-policies.yaml ./ 76 | COPY ./datree-config.yaml /root/.datree/config.yaml 77 | 78 | WORKDIR /git 79 | -------------------------------------------------------------------------------- /dockerfiles/kube-manifests-validation/README.md: -------------------------------------------------------------------------------- 1 | # kube-manifests-validation 2 | 3 | Docker image based on [official Debian image](https://hub.docker.com/_/debian) debian:12-slim. 4 | 5 | A base image with the tools for validating Kubernetes manifests using Gator and Datree CLI utilities 6 | 7 | **Tools:** 8 | 9 | - `curl` 10 | - `git` 11 | - `moreutil` 12 | - `zip` 13 | - `gator` 14 | - `datree` 15 | 16 | [Click here](https://hub.docker.com/repository/docker/paritytech/kube-manifests-validation) for the registry. 17 | 18 | ## Usage 19 | 20 | ```Dockerfile 21 | FROM docker.io/paritytech/kube-manifests-validation:latest 22 | ``` 23 | -------------------------------------------------------------------------------- /dockerfiles/kube-manifests-validation/datree-config.yaml: -------------------------------------------------------------------------------- 1 | offline: local 2 | -------------------------------------------------------------------------------- /dockerfiles/kube-manifests-validation/datree-policies.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | policies: 3 | - name: Default 4 | isDefault: true 5 | rules: [] 6 | -------------------------------------------------------------------------------- /dockerfiles/kubetools/README.md: -------------------------------------------------------------------------------- 1 | # Image with Kube and Helm. 2 | -------------------------------------------------------------------------------- /dockerfiles/kubetools/helm/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:latest 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | ARG HELM_VERSION="2.16.6" 7 | ARG KUBE_VERSION="1.18.2" 8 | 9 | # metadata 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/kubetools" \ 13 | io.parity.image.description="ca-certificates git jq make curl gettext; kube helm;" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/kubetools/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/kubetools/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | RUN apk add --no-cache \ 22 | ca-certificates git jq make curl gettext bash shadow; \ 23 | curl -L "https://dl.k8s.io/release/v${KUBE_VERSION}/bin/linux/amd64/kubectl" \ 24 | -o /usr/local/bin/kubectl; \ 25 | curl -L "https://get.helm.sh/helm-v${HELM_VERSION}-linux-amd64.tar.gz" \ 26 | -o helm.tar.gz; \ 27 | tar -zxf helm.tar.gz linux-amd64/helm; \ 28 | mv linux-amd64/helm /usr/local/bin/helm; \ 29 | rm -rf helm.tar.gz linux-amd64; \ 30 | chmod +x /usr/local/bin/kubectl; \ 31 | chmod +x /usr/local/bin/helm 32 | 33 | RUN set -x \ 34 | && groupadd -g 1000 nonroot \ 35 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot \ 36 | && mkdir /config \ 37 | && chown nonroot:nonroot /config 38 | 39 | WORKDIR /config 40 | 41 | USER nonroot:nonroot 42 | CMD ["/bin/bash"] 43 | -------------------------------------------------------------------------------- /dockerfiles/kubetools/helm/helm3.Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:latest 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | ARG HELM_VERSION 7 | ARG HELMFILE_VERSION 8 | ARG HELM_DIFF_PLUGIN_VERSION 9 | ARG HELM_SECRETS_VERSION 10 | ARG KUBE_VERSION 11 | ARG VALS_VERSION 12 | ARG VAULT_VERSION 13 | ARG KUSTOMIZE_VERSION 14 | 15 | # metadata 16 | LABEL io.parity.image.authors="devops-team@parity.io" \ 17 | io.parity.image.vendor="Parity Technologies" \ 18 | io.parity.image.title="${REGISTRY_PATH}/kubetools" \ 19 | io.parity.image.description="ca-certificates git jq make curl gettext; kube helm;" \ 20 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 21 | dockerfiles/kubetools/helm3.Dockerfile" \ 22 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 23 | dockerfiles/kubetools/README.md" \ 24 | io.parity.image.revision="${VCS_REF}" \ 25 | io.parity.image.created="${BUILD_DATE}" 26 | 27 | RUN apk add --no-cache \ 28 | ca-certificates git jq yq make curl gettext bash shadow python3 py3-pip py3-kubernetes && \ 29 | # https://kubernetes.io/docs/tasks/tools/install-kubectl-linux/ 30 | echo "Installing kubectl" && \ 31 | curl -L "https://dl.k8s.io/release/v${KUBE_VERSION}/bin/linux/amd64/kubectl" \ 32 | -o /usr/local/bin/kubectl && \ 33 | # https://github.com/helm/helm/releases 34 | echo "Installing helm" && \ 35 | curl -L "https://get.helm.sh/helm-v${HELM_VERSION}-linux-amd64.tar.gz" \ 36 | -o helm.tar.gz && \ 37 | tar -zxf helm.tar.gz linux-amd64/helm && \ 38 | mv linux-amd64/helm /usr/local/bin/helm && \ 39 | rm -rf helm.tar.gz linux-amd64 && \ 40 | # https://github.com/roboll/helmfile/releases 41 | echo "Installing helmfile" && \ 42 | curl -L "https://github.com/roboll/helmfile/releases/download/v${HELMFILE_VERSION}/helmfile_linux_amd64" \ 43 | -o /usr/local/bin/helmfile && \ 44 | # Install vals: https://github.com/variantdev/vals/releases 45 | echo "Installing vals" && \ 46 | curl -L "https://github.com/variantdev/vals/releases/download/v${VALS_VERSION}/vals_${VALS_VERSION}_linux_amd64.tar.gz" \ 47 | -o vals.tar.gz && \ 48 | tar -zxf vals.tar.gz vals && \ 49 | mv vals /usr/local/bin/ && \ 50 | # Install vault 51 | echo "Installing vault" && \ 52 | wget -qO- "https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip" \ 53 | | unzip -d /usr/local/bin - && \ 54 | # Install kustomize 55 | echo "Installing kustomize" && \ 56 | curl -L "https://github.com/kubernetes-sigs/kustomize/releases/download/kustomize%2Fv${KUSTOMIZE_VERSION}/kustomize_v${KUSTOMIZE_VERSION}_linux_amd64.tar.gz" \ 57 | -o kustomize.tar.gz && \ 58 | tar -zxf kustomize.tar.gz kustomize && \ 59 | mv kustomize /usr/local/bin/kustomize && \ 60 | rm -rf kustomize.tar.gz && \ 61 | chmod +x /usr/local/bin/kustomize && \ 62 | chmod +x /usr/local/bin/kubectl && \ 63 | chmod +x /usr/local/bin/helm && \ 64 | chmod +x /usr/local/bin/helmfile && \ 65 | chmod +x /usr/local/bin/vault && \ 66 | # test 67 | kubectl version --short=true --client && \ 68 | helm version && \ 69 | helmfile version && \ 70 | vault --version && \ 71 | vals version && \ 72 | kustomize version 73 | 74 | RUN set -x \ 75 | && groupadd -g 10000 nonroot \ 76 | && useradd -u 10000 -g 10000 -s /bin/bash -m nonroot \ 77 | && mkdir /config \ 78 | && chown nonroot:nonroot /config 79 | 80 | WORKDIR /config 81 | 82 | USER nonroot:nonroot 83 | 84 | # https://github.com/databus23/helm-diff/releases 85 | RUN helm plugin install https://github.com/databus23/helm-diff --version "v${HELM_DIFF_PLUGIN_VERSION}" && \ 86 | # https://github.com/jkroepke/helm-secrets 87 | helm plugin install https://github.com/jkroepke/helm-secrets --version "v${HELM_SECRETS_VERSION}" && \ 88 | helm plugin list 89 | CMD ["/bin/bash"] 90 | -------------------------------------------------------------------------------- /dockerfiles/kubetools/kubectl/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:latest 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | ARG KUBE_VERSION="1.26.6" 7 | 8 | # metadata 9 | LABEL io.parity.image.authors="devops-team@parity.io" \ 10 | io.parity.image.vendor="Parity Technologies" \ 11 | io.parity.image.title="${REGISTRY_PATH}/kubetools" \ 12 | io.parity.image.description="ca-certificates git jq make curl gettext kubectl;" \ 13 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 14 | dockerfiles/kubetools/Dockerfile" \ 15 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 16 | dockerfiles/kubetools/README.md" \ 17 | io.parity.image.revision="${VCS_REF}" \ 18 | io.parity.image.created="${BUILD_DATE}" 19 | 20 | RUN apk add --no-cache \ 21 | ca-certificates git jq make curl gettext bash shadow; \ 22 | curl -L "https://dl.k8s.io/release/v${KUBE_VERSION}/bin/linux/amd64/kubectl" \ 23 | -o /usr/local/bin/kubectl; \ 24 | chmod +x /usr/local/bin/kubectl 25 | 26 | RUN set -x \ 27 | && groupadd -g 1000 nonroot \ 28 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot \ 29 | && mkdir /config \ 30 | && chown nonroot:nonroot /config 31 | 32 | WORKDIR /config 33 | 34 | USER nonroot:nonroot 35 | CMD ["/bin/bash"] 36 | -------------------------------------------------------------------------------- /dockerfiles/lz4/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:3.16.0 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | # metadata 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/lz4" \ 11 | io.parity.image.description="lz4 wget;" \ 12 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 13 | dockerfiles/lz4/Dockerfile" \ 14 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/lz4/README.md" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" 18 | 19 | RUN apk add --no-cache lz4 wget 20 | -------------------------------------------------------------------------------- /dockerfiles/lz4/README.md: -------------------------------------------------------------------------------- 1 | # Image containing lz4 and wget binaries. 2 | -------------------------------------------------------------------------------- /dockerfiles/mdbook-utils/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/debian:stable-slim 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | # metadata 8 | LABEL summary="Image with mdbook utils" \ 9 | name="${REGISTRY_PATH}/mdbook-utils" \ 10 | maintainer="cicd-team@parity.io" \ 11 | version="1.0" \ 12 | description="Image with mdbook utils" \ 13 | io.parity.image.vendor="Parity Technologies" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/mdbook-utils/Dockerfile" \ 15 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/mdbook-utils/README.md" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" 18 | 19 | ENV RUSTUP_HOME=/usr/local/rustup \ 20 | CARGO_HOME=/usr/local/cargo \ 21 | PATH=/usr/local/cargo/bin:$PATH 22 | 23 | RUN apt-get update -y && \ 24 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ 25 | git curl build-essential libssl-dev perl gcc graphviz ca-certificates && \ 26 | # install rustup, use minimum components 27 | curl -L "https://static.rust-lang.org/rustup/dist/x86_64-unknown-linux-gnu/rustup-init" -o rustup-init && \ 28 | chmod +x rustup-init && \ 29 | ./rustup-init -y --no-modify-path --profile minimal --default-toolchain stable && \ 30 | rm rustup-init && \ 31 | cargo install mdbook mdbook-mermaid mdbook-linkcheck mdbook-graphviz mdbook-last-changed 32 | 33 | RUN set -x \ 34 | && groupadd -g 1000 nonroot \ 35 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot \ 36 | && git config --global --add safe.directory '*' \ 37 | && cp /root/.gitconfig /home/nonroot/.gitconfig \ 38 | && chown nonroot:nonroot /home/nonroot/.gitconfig 39 | 40 | USER nonroot:nonroot 41 | -------------------------------------------------------------------------------- /dockerfiles/mdbook-utils/README.md: -------------------------------------------------------------------------------- 1 | # mdbook-utils 2 | 3 | - mdbook 4 | - mdbook-mermaid 5 | - mdbook-linkcheck 6 | - mdbook-graphviz 7 | - mdbook-last-changed 8 | 9 | -------------------------------------------------------------------------------- /dockerfiles/mitogen/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:3.12-slim 2 | 3 | # metadata 4 | ARG VCS_REF=master 5 | ARG BUILD_DATE="" 6 | ARG REGISTRY_PATH=docker.io/paritytech 7 | ARG VAULT_VERSION=1.17.3 8 | 9 | LABEL io.parity.image.authors="devops-team@parity.io" \ 10 | io.parity.image.vendor="Parity Technologies" \ 11 | io.parity.image.title="${REGISTRY_PATH}/mitogen" \ 12 | io.parity.image.description="mitogen" \ 13 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 14 | dockerfiles/mitogen/Dockerfile" \ 15 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 16 | dockerfiles/mitogen/README.md" \ 17 | io.parity.image.revision="${VCS_REF}" \ 18 | io.parity.image.created="${BUILD_DATE}" 19 | 20 | RUN apt-get update -y && \ 21 | DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ 22 | sshpass ssh-client rsync tini curl git ruby unzip \ 23 | && rm -rf /var/lib/apt/lists/* 24 | 25 | RUN pip install --no-cache-dir pip --upgrade 26 | RUN pip install --no-cache-dir ansible==10.4 google-auth requests jmespath dnspython 27 | 28 | RUN curl -fSL -o /usr/local/bin/subkey 'https://releases.parity.io/substrate/x86_64-debian%3Astretch/v3.0.0/subkey/subkey' \ 29 | && chmod +x /usr/local/bin/subkey \ 30 | && curl "https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip" -o vault.zip \ 31 | && unzip vault.zip -d /usr/local/bin vault \ 32 | && chmod +x /usr/local/bin/vault 33 | 34 | ENV ANSIBLE_STRATEGY=mitogen_linear 35 | ENV ANSIBLE_STRATEGY_PLUGINS=/opt/mitogen/ansible_mitogen/plugins/strategy 36 | RUN cd /opt && git clone --depth 1 --branch v0.3.9 https://github.com/mitogen-hq/mitogen.git 37 | 38 | ARG WORKDIR=/work 39 | RUN groupadd --gid 10001 nonroot && \ 40 | useradd --home-dir /home/nonroot \ 41 | --create-home \ 42 | --shell /bin/bash \ 43 | --gid nonroot \ 44 | --groups nonroot \ 45 | --uid 10000 nonroot 46 | RUN chown -R nonroot. /home/nonroot 47 | RUN mkdir ${WORKDIR} 48 | RUN chown -R nonroot. ${WORKDIR} 49 | USER 10000:10001 50 | WORKDIR ${WORKDIR} 51 | 52 | ENTRYPOINT ["/usr/bin/tini", "--"] 53 | 54 | -------------------------------------------------------------------------------- /dockerfiles/mitogen/README.md: -------------------------------------------------------------------------------- 1 | # Mitogen 2 | 3 | Ansible container with mitogen plugin. 4 | -------------------------------------------------------------------------------- /dockerfiles/molecule/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10 2 | 3 | # metadata 4 | ARG VCS_REF=master 5 | ARG BUILD_DATE="" 6 | ARG REGISTRY_PATH=docker.io/paritytech 7 | LABEL io.parity.image.authors="devops-team@parity.io" \ 8 | io.parity.image.vendor="Parity Technologies" \ 9 | io.parity.image.title="${REGISTRY_PATH}/molecule" \ 10 | io.parity.image.description="ansible" \ 11 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 12 | dockerfiles/molecule/Dockerfile" \ 13 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 14 | dockerfiles/molecule/README.md" \ 15 | io.parity.image.revision="${VCS_REF}" \ 16 | io.parity.image.created="${BUILD_DATE}" 17 | 18 | RUN pip install --no-cache-dir \ 19 | ansible \ 20 | ansible-lint==6.0.1 \ 21 | yamllint \ 22 | "molecule[docker]"==3.6.1 23 | 24 | RUN apt-get update \ 25 | && apt-get install -y --no-install-recommends \ 26 | docker.io jq \ 27 | && rm -rf /var/lib/apt/lists/* \ 28 | && rm -Rf /usr/share/doc && rm -Rf /usr/share/man \ 29 | && apt-get clean 30 | 31 | # Do not switch to nonroot user, molecule have to connect to docker engine anyway. 32 | # nonroot will create a false fealing of security. 33 | -------------------------------------------------------------------------------- /dockerfiles/molecule/README.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Image with ansible and molecule. Used in GitLab CI to test ansible roles. 4 | -------------------------------------------------------------------------------- /dockerfiles/multisig-ci/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:buster 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | LABEL io.parity.image.authors="devops-team@parity.io" \ 8 | io.parity.image.vendor="Parity Technologies" \ 9 | io.parity.image.title="${REGISTRY_PATH}/multisig-ci" \ 10 | io.parity.image.description="python, poetry " \ 11 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/multisig-ci/Dockerfile" \ 12 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/multisig-ci/README.md" \ 13 | io.parity.image.revision="${VCS_REF}" \ 14 | io.parity.image.created="${BUILD_DATE}" 15 | 16 | RUN curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" && \ 17 | chmod +x kubectl && \ 18 | mv kubectl /usr/local/bin/ && \ 19 | apt-get update && apt-get --no-install-recommends install -y gettext python3-setuptools && \ 20 | pip install poetry==1.8.3 && \ 21 | # add non-root user 22 | groupadd -g 10000 nonroot && \ 23 | useradd -u 10000 -g 10000 -s /bin/bash -m nonroot 24 | 25 | USER nonroot:nonroot 26 | -------------------------------------------------------------------------------- /dockerfiles/multisig-ci/README.md: -------------------------------------------------------------------------------- 1 | # python:buster image with poetry and non-root user -------------------------------------------------------------------------------- /dockerfiles/node-bench-regression-guard/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/ruby:2.7-alpine 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | LABEL io.parity.image.authors="devops-team@parity.io" \ 8 | io.parity.image.vendor="Parity Technologies" \ 9 | io.parity.image.title="${REGISTRY_PATH}/node-bench-regression-guard" \ 10 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/scripts/node-bench-regression-guard/Dockerfile" \ 11 | io.parity.image.revision="${VCS_REF}" \ 12 | io.parity.image.created="${BUILD_DATE}" 13 | 14 | RUN apk add --no-cache --update curl unzip bash shadow jq 15 | COPY node-bench-regression-guard/node-bench-regression-guard /usr/local/bin/node-bench-regression-guard 16 | 17 | RUN set -x \ 18 | && groupadd -g 1000 nonroot \ 19 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot 20 | 21 | USER nonroot:nonroot 22 | CMD ["node-bench-regression-guard", "--help"] 23 | -------------------------------------------------------------------------------- /dockerfiles/node-bench-regression-guard/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | gem 'faker', '~> 2.17' 4 | gem 'minitest', '~> 5.14', '>= 5.14.4' 5 | -------------------------------------------------------------------------------- /dockerfiles/node-bench-regression-guard/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | concurrent-ruby (1.1.8) 5 | faker (2.17.0) 6 | i18n (>= 1.6, < 2) 7 | i18n (1.8.9) 8 | concurrent-ruby (~> 1.0) 9 | minitest (5.14.4) 10 | 11 | PLATFORMS 12 | ruby 13 | 14 | DEPENDENCIES 15 | faker (~> 2.17) 16 | minitest (~> 5.14, >= 5.14.4) 17 | 18 | BUNDLED WITH 19 | 2.1.4 20 | -------------------------------------------------------------------------------- /dockerfiles/node-bench-regression-guard/README.md: -------------------------------------------------------------------------------- 1 | # node-bench-regression-guard 2 | 3 | This small utility is used as part of `substrate` pipeline for the perfomance regression detections. 4 | 5 | ### How it works 6 | 7 | It processes JSON output of `substrate`'s `node-bench` benchmarks and compares the results. The utility exits with `1` when the difference between the benchmarks and their reference counterparts is twice or more. 8 | 9 | ### Usage 10 | 11 | ``` 12 | node-bench-regression-guard --help 13 | 14 | Usage: node-bench-regression-guard [options] 15 | --reference DIRECTORY (current master) 16 | --compare-with DIRECTORY (merged PR branch) 17 | --comparison-threshold (optional, integer, defaults to 2) 18 | ``` 19 | 20 | ### Testing 21 | 22 | Just `bundle install && bundle exec ruby run-tests.rb`. 23 | 24 | ### Dependencies 25 | 26 | The test script uses `faker` and `minitest` as the external dependencies. The main script (`node-bench-regression-guard`) is dependency-free and relies solely on Ruby's standard library. 27 | 28 | -------------------------------------------------------------------------------- /dockerfiles/node-bench-regression-guard/node-bench-regression-guard: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'json' 4 | require 'optparse' 5 | 6 | if ARGV.empty? 7 | puts 'Please provide options' 8 | exit 1 9 | end 10 | 11 | options = {} 12 | option_parser = OptionParser.new do |option| 13 | option.banner = "Usage: node-bench-regression-guard [options]" 14 | option.on('--reference DIRECTORY (current master)', String) do |reference| 15 | options[:reference] = reference 16 | end 17 | option.on('--compare-with DIRECTORY (merged PR branch)', String) do |compare| 18 | options[:compare] = compare 19 | end 20 | option.on('--comparison-threshold (optional, integer, defaults to 2)', Integer) do |threshold| 21 | options[:threshold] = threshold 22 | end 23 | end 24 | 25 | option_parser.parse! 26 | 27 | raise OptionParser::MissingArgument, '--reference is required' unless options[:reference] 28 | raise OptionParser::MissingArgument, '--compare-with is required' unless options[:compare] 29 | 30 | reference_dir = options[:reference] 31 | compare_dir = options[:compare] 32 | 33 | if options[:threshold] 34 | comparison_threshold = options[:threshold] 35 | else 36 | comparison_threshold = 2 37 | end 38 | 39 | raise ArgumentError, "Directory with references doesn't exist" unless Dir.exist?(reference_dir) 40 | raise ArgumentError, "Directory with results to compare doesn't exist" unless Dir.exist?(compare_dir) 41 | 42 | def deserialize_result(filename) 43 | # skip all output until the json line 44 | File.readlines(filename).each do |line| 45 | begin 46 | return JSON.parse(line)[0] 47 | rescue JSON::ParserError 48 | next 49 | end 50 | end 51 | raise RuntimeError, "File doesn't contain any JSON line" 52 | end 53 | 54 | def parse_benches(directory) 55 | hash = {} 56 | Dir.each_child(directory) do |filename| 57 | Dir.chdir(directory) do 58 | puts File.expand_path(filename) 59 | hash[filename] = deserialize_result(filename) 60 | puts hash[filename]['name'] 61 | puts "raw_average: #{hash[filename]['raw_average']}" 62 | puts "average: #{hash[filename]['average']}" 63 | puts 64 | end 65 | end 66 | hash 67 | end 68 | 69 | references = parse_benches(reference_dir) 70 | puts "=========================\n\n" 71 | comparisons = parse_benches(compare_dir) 72 | 73 | regressions = [] 74 | failed = false 75 | 76 | puts "=========================\n\n" 77 | 78 | puts "Performance ratios (merged PR branch / current master):" 79 | references.each_value do |reference| 80 | comparisons.each_value do |comparison| 81 | next unless reference['name'] == comparison['name'] 82 | result_avg = comparison['average'].to_f / reference['average'] 83 | result_raw_avg = comparison['raw_average'].to_f / reference['raw_average'] 84 | puts "average: #{result_avg.round(2)}; raw_average: #{result_raw_avg.round(2)} - #{comparison['name']}" 85 | if ( (result_avg >= comparison_threshold) || 86 | (result_raw_avg >= comparison_threshold) ) 87 | regressions << comparison['name'] 88 | failed = true 89 | end 90 | end 91 | end 92 | 93 | puts "\n=========================\n\n" 94 | puts "Comparison threshold is #{comparison_threshold}" 95 | 96 | if failed 97 | puts 'Regressions detected:' 98 | puts regressions.join("\n") 99 | exit 1 100 | else 101 | puts 'No regressions detected' 102 | end 103 | -------------------------------------------------------------------------------- /dockerfiles/node-bench-regression-guard/run-tests.rb: -------------------------------------------------------------------------------- 1 | require 'faker' 2 | require 'fileutils' 3 | require 'json' 4 | require 'securerandom' 5 | require 'tmpdir' 6 | 7 | require 'minitest/autorun' 8 | 9 | def in_tmp_dir 10 | Dir.mktmpdir('nbrg-') do |tmpdir| 11 | FileUtils.cp('node-bench-regression-guard', tmpdir) 12 | Dir.chdir(tmpdir) do 13 | FileUtils.mkdir('references') 14 | FileUtils.mkdir('comparisons') 15 | yield 16 | end 17 | end 18 | end 19 | 20 | def generate_name_part 21 | Faker::Dessert.flavor.gsub(/[[:space:]]/, '::').downcase 22 | end 23 | 24 | def generate_bench_data 25 | base = rand(100000000...200000000) 26 | name = "#{generate_name_part}::#{generate_name_part}::#{SecureRandom.hex[0..10]}" 27 | [{name: name, 28 | average: (base * rand(0.9..1.1)).to_i, 29 | raw_average: (base * rand(0.9..1.1)).to_i}] 30 | end 31 | 32 | class NodeBenchRegressionGuardTest < Minitest::Test 33 | 34 | def test_regression_detected 35 | stdout, stderr = capture_subprocess_io do 36 | in_tmp_dir do 37 | foo = generate_bench_data 38 | bar = generate_bench_data 39 | File.write("references/#{foo[0][:name]}", foo.to_json) 40 | File.write("references/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 41 | foo[0][:average] = (foo[0][:average] * rand(2.0..3.0)).to_i 42 | foo[0][:raw_average] = (foo[0][:average] * rand(0.9..1.1)).to_i 43 | File.write("comparisons/#{foo[0][:name]}", foo.to_json) 44 | File.write("comparisons/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 45 | puts `./node-bench-regression-guard --reference ./references --compare-with ./comparisons` 46 | end 47 | end 48 | assert_match(/Regressions detected/, stdout) 49 | assert_equal($?.exitstatus, 1) 50 | end 51 | 52 | def test_regression_detected_custom_threshold 53 | stdout, stderr = capture_subprocess_io do 54 | in_tmp_dir do 55 | foo = generate_bench_data 56 | bar = generate_bench_data 57 | File.write("references/#{foo[0][:name]}", foo.to_json) 58 | File.write("references/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 59 | foo[0][:average] = (foo[0][:average] * rand(3.2..4.2)).to_i 60 | foo[0][:raw_average] = (foo[0][:average] * rand(0.9..1.1)).to_i 61 | File.write("comparisons/#{foo[0][:name]}", foo.to_json) 62 | File.write("comparisons/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 63 | puts `./node-bench-regression-guard --reference ./references --compare-with ./comparisons --comparison-threshold 3` 64 | end 65 | end 66 | assert_match(/Regressions detected/, stdout) 67 | assert_equal(1, $?.exitstatus) 68 | end 69 | 70 | def test_no_regression_detected 71 | stdout, stderr = capture_subprocess_io do 72 | in_tmp_dir do 73 | foo = generate_bench_data 74 | bar = generate_bench_data 75 | File.write("references/#{foo[0][:name]}", foo.to_json) 76 | File.write("references/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 77 | foo[0][:average] = (foo[0][:average] * rand(1.0..1.2)).to_i 78 | foo[0][:raw_average] = (foo[0][:average] * rand(0.9..1.1)).to_i 79 | File.write("comparisons/#{foo[0][:name]}", foo.to_json) 80 | File.write("comparisons/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 81 | puts `./node-bench-regression-guard --reference ./references --compare-with ./comparisons` 82 | end 83 | end 84 | assert_match(/No regressions detected/, stdout) 85 | assert_equal(0, $?.exitstatus) 86 | end 87 | 88 | def test_no_regression_detected_custom_threshold 89 | stdout, stderr = capture_subprocess_io do 90 | in_tmp_dir do 91 | foo = generate_bench_data 92 | bar = generate_bench_data 93 | File.write("references/#{foo[0][:name]}", foo.to_json) 94 | File.write("references/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 95 | foo[0][:average] = (foo[0][:average] * rand(1.0..1.2)).to_i 96 | foo[0][:raw_average] = (foo[0][:average] * rand(0.9..1.1)).to_i 97 | File.write("comparisons/#{foo[0][:name]}", foo.to_json) 98 | File.write("comparisons/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 99 | puts `./node-bench-regression-guard --reference ./references --compare-with ./comparisons --comparison-threshold 3` 100 | end 101 | end 102 | assert_match(/No regressions detected/, stdout) 103 | assert_equal(0, $?.exitstatus) 104 | end 105 | 106 | def test_no_json_in_file 107 | stdout, stderr = capture_subprocess_io do 108 | in_tmp_dir do 109 | foo = generate_bench_data 110 | bar = generate_bench_data 111 | File.write("references/#{foo[0][:name]}", foo.to_json) 112 | File.write("references/#{bar[0][:name]}","Faker::Quote.famous_last_words\n#{bar.to_json}") 113 | foo[0][:average] = (foo[0][:average] * rand(1.0..1.2)).to_i 114 | foo[0][:raw_average] = (foo[0][:average] * rand(0.9..1.1)).to_i 115 | File.write("comparisons/#{foo[0][:name]}", foo.to_json) 116 | File.write("comparisons/#{bar[0][:name]}","Faker::Quote.famous_last_words\n") 117 | puts `./node-bench-regression-guard --reference ./references --compare-with ./comparisons` 118 | end 119 | end 120 | assert_match(/contain any JSON line/, stderr) 121 | end 122 | 123 | end 124 | -------------------------------------------------------------------------------- /dockerfiles/node-wrk/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:buster-slim as builder 2 | 3 | # metadata 4 | ARG VCS_REF 5 | ARG BUILD_DATE 6 | ARG IMAGE_NAME="staking-miner" 7 | ARG PROFILE=release 8 | 9 | LABEL description="This is the build stage. Here we create the binary." 10 | 11 | WORKDIR /build 12 | 13 | RUN set -eux && \ 14 | apt-get update && \ 15 | apt-get install -y build-essential git curl clang libclang-dev pkg-config libssl-dev unzip lua5.1 liblua5.1-0-dev && \ 16 | git clone https://github.com/wg/wrk.git wrk && \ 17 | cd wrk && \ 18 | make 19 | 20 | # ===== SECOND STAGE ====== 21 | 22 | ARG VCS_REF=master 23 | ARG BUILD_DATE="" 24 | 25 | FROM node:18-slim 26 | 27 | LABEL summary="Image for Substrate-api-sidecar benchmarks" \ 28 | name="${REGISTRY_PATH}/sidecar-bench" \ 29 | maintainer="devops-team@parity.io" \ 30 | version="1.0" \ 31 | description="node image with wrk for benchmarks" \ 32 | io.parity.image.vendor="Parity Technologies" \ 33 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/sidecar-bench/Dockerfile" \ 34 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/sidecar-bench/README.md" \ 35 | io.parity.image.revision="${VCS_REF}" \ 36 | io.parity.image.created="${BUILD_DATE}" 37 | 38 | COPY --from=builder /build/wrk/wrk /usr/local/bin 39 | -------------------------------------------------------------------------------- /dockerfiles/node-wrk/README.md: -------------------------------------------------------------------------------- 1 | # Dockerfile based on `node:16-slim` contains [wrk](https://github.com/wg/wrk) tool for benchmarks 2 | -------------------------------------------------------------------------------- /dockerfiles/packer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/paritytech/mitogen:latest 2 | 3 | # metadata 4 | ARG VCS_REF=master 5 | ARG BUILD_DATE="" 6 | ARG REGISTRY_PATH=docker.io/paritytech 7 | ARG PACKER_VERSION=1.8.7 8 | ARG VAULT_VERSION=1.17.3 9 | 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/packer" \ 13 | io.parity.image.description="packer" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/packer/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/packer/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | USER root:root 22 | SHELL ["/bin/bash", "-c"] 23 | 24 | RUN apt-get update -y \ 25 | && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \ 26 | libarchive-tools \ 27 | && rm -rf /var/lib/apt/lists/* 28 | 29 | RUN pip install --no-cache-dir hvac # for ansible community.hashi_vault 30 | 31 | RUN set -euo pipefail \ 32 | && curl -L "https://releases.hashicorp.com/packer/${PACKER_VERSION}/packer_${PACKER_VERSION}_linux_amd64.zip" \ 33 | | bsdtar -xf - -C /usr/local/bin packer \ 34 | && chmod +x /usr/local/bin/packer \ 35 | \ 36 | && curl -L "https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip" \ 37 | | bsdtar -xf - -C /usr/local/bin vault \ 38 | && chmod +x /usr/local/bin/vault 39 | 40 | USER 10000:10001 41 | -------------------------------------------------------------------------------- /dockerfiles/packer/README.md: -------------------------------------------------------------------------------- 1 | # Packer 2 | 3 | Image for packer builds, used by cloud-infra pipelines. 4 | 5 | ## Dependencies and Tools 6 | 7 | - `ansible` 8 | - `packer` 9 | - `openssh-client` 10 | - `vault` 11 | -------------------------------------------------------------------------------- /dockerfiles/parity-keyring/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG REGISTRY_PATH=docker.io/paritytech 2 | 3 | FROM docker.io/paritytech/gnupg:latest 4 | 5 | # 'Parity Security Team ' 6 | ARG KEY_ID=9D4B2B6EB8F97156D19669A9FF0812D491B96798 7 | ARG KEY_SERVER=hkps://keys.mailvelope.com 8 | ARG VCS_REF=master 9 | ARG BUILD_DATE="" 10 | 11 | # metadata 12 | LABEL summary="Base image with Parity-Keyring" \ 13 | name="${REGISTRY_PATH}/parity-keyring" \ 14 | maintainer="devops-team@parity.io" \ 15 | version="1.0" \ 16 | description="Parity Keyring base container" \ 17 | io.parity.image.vendor="Parity Technologies" \ 18 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/parity-keyring/Dockerfile" \ 19 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/parity-keyring/README.md" \ 20 | io.parity.image.revision="${VCS_REF}" \ 21 | io.parity.image.created="${BUILD_DATE}" 22 | 23 | USER root 24 | 25 | RUN gpg --recv-keys --keyserver $KEY_SERVER $KEY_ID && \ 26 | gpg --export $KEY_ID > /usr/share/keyrings/parity.gpg && \ 27 | echo 'deb [signed-by=/usr/share/keyrings/parity.gpg] https://releases.parity.io/deb release main' > /etc/apt/sources.list.d/parity.list && \ 28 | apt update && \ 29 | apt install parity-keyring 30 | 31 | USER nonroot:nonroot 32 | 33 | RUN gpg /usr/share/keyrings/parity.gpg | grep -v expired 34 | 35 | WORKDIR /home/nonroot 36 | CMD ["/bin/bash"] 37 | -------------------------------------------------------------------------------- /dockerfiles/parity-keyring/README.md: -------------------------------------------------------------------------------- 1 | # parity-keyring 2 | 3 | A base Docker image based on [our gnupg image](https://hub.docker.com/repository/docker/paritytech/gnupg) and coming pre-installed with the parity keyring. 4 | 5 | [Click here](https://hub.docker.com/repository/docker/paritytech/parity-keyring) for the registry. 6 | 7 | ## Usage 8 | 9 | ``` 10 | docker run --rm -it docker.io/paritytech/parity-keyring gpg --list-keys $KEY_ID 11 | ``` 12 | 13 | -------------------------------------------------------------------------------- /dockerfiles/parity-scale-codec/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VCS_REF=master 2 | ARG BUILD_DATE 3 | ARG REGISTRY_PATH=docker.io/paritytech 4 | 5 | FROM ${REGISTRY_PATH}/base-ci-linux:latest 6 | 7 | # metadata 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/parity-scale-codec" \ 11 | io.parity.image.description="Inherits from docker.io/paritytech/base-ci-linux:latest \ 12 | rust nightly, clippy, rustfmt, miri, rust-src grcov, rust-covfix, llvm-tools-preview, \ 13 | cargo-contract, xargo, binaryen, parallel, codecov, gnupg" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/parity-scale-codec/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/parity-scale-codec/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | WORKDIR /builds 22 | 23 | ENV SHELL /bin/bash 24 | ENV DEBIAN_FRONTEND=noninteractive 25 | 26 | RUN set -eux; \ 27 | # Needed for running commands in parallel, without overlapping output. 28 | # gnupg is only needed to verify the signature of the codecov uploader. 29 | apt-get -y update && \ 30 | apt-get install -y --no-install-recommends parallel gnupg && \ 31 | # codecov uploader 32 | curl --remote-name --silent https://uploader.codecov.io/latest/linux/codecov && \ 33 | curl --remote-name --silent https://uploader.codecov.io/latest/linux/codecov.SHA256SUM && \ 34 | curl --remote-name --silent https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig && \ 35 | curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --import && \ 36 | gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM && \ 37 | shasum --algorithm 256 --check codecov.SHA256SUM && \ 38 | chmod +x codecov && \ 39 | mv codecov /usr/local/bin/codecov && \ 40 | rm -f codecov.SHA256SUM codecov.SHA256SUM.sig && \ 41 | # `binaryen` is needed by `cargo-contract` for optimizing Wasm files. 42 | # We fetch the latest release which contains a Linux binary. 43 | curl --silent https://api.github.com/repos/WebAssembly/binaryen/releases | \ 44 | egrep --only-matching 'https://github.com/WebAssembly/binaryen/releases/download/version_[0-9]+/binaryen-version_[0-9]+-x86_64-linux.tar.gz' | \ 45 | head -n1 | \ 46 | xargs curl -L -O && \ 47 | tar -xvzf binaryen-*-x86_64-linux.tar.gz && \ 48 | rm binaryen-*-x86_64-linux.tar.gz && \ 49 | chmod +x binaryen-*/bin/wasm-opt && \ 50 | mv binaryen-*/bin/wasm-opt /usr/local/bin/ && \ 51 | rm -rf binaryen-*/ && \ 52 | # The supported Rust nightly version must support the following components 53 | # to allow for a functioning CI pipeline: 54 | # 55 | # - cargo: General build tool. 56 | # - rustfmt: Rust formatting tool. 57 | # - clippy: Rust linter. 58 | # - miri: Rust interpreter with additional safety checks. 59 | # 60 | # We also need to install the wasm32-unknown-unknown target to test 61 | # ink! smart contracts compilation for this target architecture. 62 | # 63 | # Only Rust nightly builds supporting all of the above mentioned components 64 | # and targets can be used for this docker image. 65 | # Installs the latest common nightly for the listed components, 66 | # adds those components, wasm target and sets the profile to minimal 67 | rustup toolchain install nightly --target wasm32-unknown-unknown \ 68 | --profile minimal --component rustfmt clippy miri rust-src llvm-tools-preview && \ 69 | rustup default nightly && \ 70 | # We require `xargo` so that `miri` runs properly 71 | # We require `grcov` for coverage reporting and `rust-covfix` to improve it. 72 | # We require `cargo-spellcheck` for automated spell-checking 73 | cargo install grcov rust-covfix xargo dylint-link && \ 74 | cargo install cargo-contract && \ 75 | 76 | # Stuff that compiles on stable. 77 | cargo +stable install cargo-spellcheck && \ 78 | 79 | # versions 80 | rustup show && \ 81 | cargo --version && \ 82 | cargo-contract --version && \ 83 | wasm-opt --version && \ 84 | # Clean up and remove compilation artifacts that a cargo install creates (>250M). 85 | rm -rf "${CARGO_HOME}/registry" "${CARGO_HOME}/git" /root/.cache/sccache && \ 86 | # apt clean up 87 | apt-get remove -y gnupg && \ 88 | apt-get autoremove -y && \ 89 | apt-get clean && \ 90 | rm -rf /var/lib/apt/lists/* 91 | 92 | # TODO: switch to non-root here when ink-waterfall-ci won't be the dependency of this img 93 | # USER nonroot:nonroot 94 | -------------------------------------------------------------------------------- /dockerfiles/parity-scale-codec/README.md: -------------------------------------------------------------------------------- 1 | # parity-scale-codec CI for Linux Distributionsimage 2 | 3 | Docker image based on our base CI image ``. 4 | 5 | Used to build and test parity-scale-codec. 6 | 7 | ## Dependencies and Tools 8 | 9 | - `parallel`: for running commands in parallel, without overlapping output 10 | - `codecov`: to upload the test coverage results 11 | - `binaryen`: needed by cargo-contract for optimizing Wasm files 12 | - `gnupg` 13 | 14 | **Inherited from ``** 15 | 16 | - `libssl-dev` 17 | - `clang` 18 | - `lld` 19 | - `libclang-dev` 20 | - `make` 21 | - `cmake` 22 | - `git` 23 | - `pkg-config` 24 | - `curl` 25 | - `time` 26 | - `rhash` 27 | - `ca-certificates` 28 | - `jq` 29 | 30 | **Rust versions:** 31 | 32 | We always use the [latest possible](https://rust-lang.github.io/rustup-components-history/) `nightly` version that supports our required `rustup` components: 33 | 34 | - `clippy`: The Rust linter. 35 | - `rust-src`: The Rust sources of the standard library. 36 | - `miri`: The Rust MIR interpreter that interprets the test suite with additional checks. 37 | - `rustfmt`: The Rust code formatter. 38 | 39 | **Rust tools & toolchains:** 40 | 41 | - `grcov`: Required for general Rust code coverage reports. 42 | - `rust-covfix`: Required to polish the coverage reports by `grcov`. 43 | - `cargo-contract`: Required to build ink! Wasm smart contracts. 44 | - `xargo`: Required so that `miri` runs properly. 45 | - `cargo-spellcheck`: Required for the CI to do automated spell-checking. 46 | - `wasm32-unknown-unknown`: The toolchain to compile Rust codebases for Wasm. 47 | - `llvm-tools-preview`: or MIR source-based Rust coverage instrumentation. 48 | 49 | [Click here](https://hub.docker.com/repository/docker/paritytech/parity-scale-codec) for the registry. 50 | 51 | ## Usage 52 | 53 | ```yaml 54 | test-ink: 55 | stage: test 56 | image: paritytech/parity-scale-codec:production 57 | script: 58 | - cargo build ... 59 | ``` 60 | 61 | -------------------------------------------------------------------------------- /dockerfiles/polkadotjs-cli/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:3.17.2 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | # metadata 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/polkadotjs-cli" \ 11 | io.parity.image.description="Image to use for polkadotjs;" \ 12 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 13 | dockerfiles/polkadotjs-cli/Dockerfile" \ 14 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/polkadotjs-cli/README.md" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" 18 | 19 | RUN apk add --no-cache bash yarn npm jq wget && \ 20 | yarn global add @polkadot/api-cli 21 | -------------------------------------------------------------------------------- /dockerfiles/polkadotjs-cli/README.md: -------------------------------------------------------------------------------- 1 | # bridges-cumulus 2 | 3 | Docker image based on Alpine. 4 | 5 | Used to execute any polkadotjs related calls via polkadotjs-cli 6 | 7 | # Packages 8 | bash with installed polkadot-js-api, jq, nodejs/node/npm 9 | -------------------------------------------------------------------------------- /dockerfiles/python/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:3.12 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | ARG VAULT_VERSION 7 | 8 | # metadata 9 | LABEL io.parity.image.authors="devops-team@parity.io" \ 10 | io.parity.image.vendor="Parity Technologies" \ 11 | io.parity.image.title="${REGISTRY_PATH}/python" \ 12 | io.parity.image.description="python; vault;" \ 13 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 14 | dockerfiles/terraform/Dockerfile" \ 15 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 16 | dockerfiles/terraform/README.md" \ 17 | io.parity.image.revision="${VCS_REF}" \ 18 | io.parity.image.created="${BUILD_DATE}" 19 | 20 | RUN curl "https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip" \ 21 | -o vault.zip; \ 22 | unzip vault.zip -d /usr/local/bin/ vault; \ 23 | rm vault.zip; \ 24 | chmod +x /usr/local/bin/vault 25 | 26 | RUN apt-get update \ 27 | && apt-get install -y --no-install-recommends \ 28 | docker.io jq \ 29 | && rm -rf /var/lib/apt/lists/* \ 30 | && rm -Rf /usr/share/doc && rm -Rf /usr/share/man \ 31 | && apt-get clean 32 | 33 | ARG WORKDIR=/work 34 | RUN groupadd --gid 10001 nonroot && \ 35 | useradd --home-dir /home/nonroot \ 36 | --create-home \ 37 | --shell /bin/bash \ 38 | --gid nonroot \ 39 | --groups nonroot \ 40 | --uid 10000 nonroot 41 | RUN chown -R nonroot. /home/nonroot 42 | RUN mkdir ${WORKDIR} 43 | RUN chown -R nonroot. ${WORKDIR} 44 | USER 10000:10001 45 | WORKDIR ${WORKDIR} 46 | 47 | USER nonroot:nonroot 48 | CMD ["/bin/bash"] 49 | -------------------------------------------------------------------------------- /dockerfiles/python/README.md: -------------------------------------------------------------------------------- 1 | # Image containing Python + Vault 2 | -------------------------------------------------------------------------------- /dockerfiles/query-exporter/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:3-alpine 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | # metadata 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/query-exporter" \ 11 | io.parity.image.source="https://github.com/paritytech/polkadot/blob/${VCS_REF}/scripts/docker/Dockerfile" \ 12 | io.parity.image.revision="${VCS_REF}" \ 13 | io.parity.image.created="${BUILD_DATE}" 14 | 15 | RUN \ 16 | apk add --no-cache postgresql-libs bash shadow && \ 17 | apk add --no-cache --virtual .build-deps gcc musl-dev postgresql-dev && \ 18 | python3 -m pip install --no-cache-dir SQLAlchemy[postgresql] query-exporter && \ 19 | apk --purge del .build-deps 20 | 21 | RUN set -x \ 22 | && groupadd -g 1000 nonroot \ 23 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot 24 | 25 | EXPOSE 9560 26 | 27 | USER nonroot:nonroot 28 | ENTRYPOINT ["python3", "/usr/local/bin/query-exporter"] 29 | -------------------------------------------------------------------------------- /dockerfiles/query-exporter/README.md: -------------------------------------------------------------------------------- 1 | # query-exporter 2 | 3 | take the sql query-exporter from 4 | [github.com/albertodonato/query-exporter](https://github.com/albertodonato/query-exporter.git) 5 | and build it into a dockerfile. 6 | 7 | the query-exporter can be used to expose arbitrary sql queries as metrics for 8 | prometheus. this docker image was set up for the substrate-save database. 9 | -------------------------------------------------------------------------------- /dockerfiles/redis-exporter/Dockerfile: -------------------------------------------------------------------------------- 1 | #docker image from https://github.com/oliver006/redis_exporter 2 | FROM docker.io/library/golang:1.13-alpine as builder 3 | 4 | RUN apk --no-cache add ca-certificates git 5 | 6 | RUN go get github.com/oliver006/redis_exporter 7 | RUN cd ${GOPATH}/src/github.com/oliver006/redis_exporter && go build 8 | 9 | 10 | FROM docker.io/library/alpine as alpine 11 | COPY --from=builder /go/src/github.com/oliver006/redis_exporter/redis_exporter /redis_exporter 12 | COPY --from=builder /etc/ssl/certs /etc/ssl/certs 13 | 14 | # Run as non-root user for secure environments 15 | USER 59000:59000 16 | 17 | EXPOSE 9121 18 | ENTRYPOINT [ "/redis_exporter" ] 19 | -------------------------------------------------------------------------------- /dockerfiles/redis-exporter/README.md: -------------------------------------------------------------------------------- 1 | # redis exporter 2 | 3 | build docker image from https://github.com/oliver006/redis_exporter 4 | -------------------------------------------------------------------------------- /dockerfiles/releng-scripts/.dockerignore: -------------------------------------------------------------------------------- 1 | .env* 2 | !.env-sample 3 | tests 4 | .github 5 | .git 6 | .* 7 | CODEOWNERS 8 | -------------------------------------------------------------------------------- /dockerfiles/releng-scripts/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG REGISTRY_PATH=docker.io/paritytech 2 | 3 | FROM docker.io/library/ubuntu:latest 4 | 5 | ARG VCS_REF=master 6 | ARG BUILD_DATE="" 7 | ARG UID=10000 8 | ARG GID=10000 9 | 10 | # metadata 11 | LABEL summary="Base image for GnuPG operations" \ 12 | name="${REGISTRY_PATH}/gnupg" \ 13 | maintainer="devops-team@parity.io" \ 14 | version="1.0" \ 15 | description="GnuPG base container" \ 16 | io.parity.image.vendor="Parity Technologies" \ 17 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/gnupg/Dockerfile" \ 18 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/gnupg/README.md" \ 19 | io.parity.image.revision="${VCS_REF}" \ 20 | io.parity.image.created="${BUILD_DATE}" 21 | 22 | RUN apt-get update && apt-get install -yq --no-install-recommends ca-certificates bash jq unzip curl && \ 23 | curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "/tmp/awscliv2.zip" && \ 24 | unzip "/tmp/awscliv2.zip" && rm "/tmp/awscliv2.zip" && \ 25 | /aws/install && rm -rf /aws && \ 26 | apt -yq remove ca-certificates unzip && apt -yq autoremove && \ 27 | aws --version 28 | 29 | WORKDIR /scripts 30 | 31 | COPY . . 32 | 33 | RUN set -x \ 34 | && groupadd -g $GID nonroot \ 35 | && useradd -u $UID -g $GID -s /bin/bash -m nonroot 36 | 37 | USER nonroot:nonroot 38 | 39 | ENTRYPOINT [ "./releng-scripts" ] 40 | -------------------------------------------------------------------------------- /dockerfiles/releng-scripts/README.md: -------------------------------------------------------------------------------- 1 | # releng-scripts 2 | 3 | Docker image based on [official Ubuntu image](https://hub.docker.com/_/ubuntu) ubuntu:latest. 4 | 5 | This is an image for the scripts located in this repo: https://github.com/paritytech/releng-scripts 6 | 7 | ## Build 8 | 9 | The provided `build.sh` script clones [the repo](https://github.com/paritytech/releng-scripts) and builds the image. 10 | 11 | ## Usage 12 | 13 | ```bash 14 | # Show the help 15 | docker run --rm -it paritytech/releng-scripts 16 | # Show the version 17 | docker run --rm -it paritytech/releng-scripts version 18 | ``` 19 | -------------------------------------------------------------------------------- /dockerfiles/releng-scripts/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | REPO=https://github.com/paritytech/releng-scripts 4 | REGISTRY_PATH=${REGISTRY_PATH:-paritytech} 5 | DOCKER_IMAGE_NAME=releng-scripts 6 | 7 | REPO_TMP=$(mktemp -d) 8 | echo "Cloning ${REPO} into ${REPO_TMP}" 9 | git clone ${REPO} "${REPO_TMP}" 10 | 11 | docker build \ 12 | -t "${DOCKER_IMAGE_NAME}" \ 13 | -t "${REGISTRY_PATH}/${DOCKER_IMAGE_NAME}" \ 14 | "${REPO_TMP}" 15 | 16 | docker images | grep "${DOCKER_IMAGE_NAME}" 17 | 18 | # Testing 19 | docker run --rm -it ${DOCKER_IMAGE_NAME} 20 | docker run --rm -it ${DOCKER_IMAGE_NAME} version 21 | -------------------------------------------------------------------------------- /dockerfiles/rpm/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG REGISTRY_PATH=docker.io/paritytech 2 | 3 | FROM ${REGISTRY_PATH}/gnupg:latest 4 | 5 | ARG VCS_REF=master 6 | ARG BUILD_DATE="" 7 | 8 | # metadata 9 | LABEL summary="RPM packaging/signing toolchain" \ 10 | name="${REGISTRY_PATH}/rpm" \ 11 | maintainer="devops-team@parity.io" \ 12 | version="1.0" \ 13 | description="RPM packaging/signing toolchain" \ 14 | io.parity.image.vendor="Parity Technologies" \ 15 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/rpm/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/rpm/README.md" \ 17 | io.parity.image.revision="${VCS_REF}" \ 18 | io.parity.image.created="${BUILD_DATE}" 19 | 20 | USER root 21 | RUN apt-get install -yq --no-install-recommends rpm 22 | 23 | USER nonroot:nonroot 24 | 25 | COPY rpm/rpmmacros /home/nonroot/.rpmmacros 26 | -------------------------------------------------------------------------------- /dockerfiles/rpm/README.md: -------------------------------------------------------------------------------- 1 | # rpm 2 | 3 | Docker image based on the [paritytech gnupg image](https://hub.docker.com/paritytech/gnupg) paritytech/gnupg:latest. 4 | 5 | Used for signing and managing .rpm repositories and packages 6 | 7 | **Tools:** 8 | 9 | - `rpm` 10 | 11 | [Click here](https://hub.docker.com/repository/docker/paritytech/rpm) for the registry. 12 | 13 | ## Usage 14 | 15 | ```Dockerfile 16 | FROM docker.io/paritytech/rpm:latest 17 | ``` 18 | 19 | -------------------------------------------------------------------------------- /dockerfiles/rpm/rpmmacros: -------------------------------------------------------------------------------- 1 | %_signature gpg 2 | %_gpg_path /home/nonroot/.gnupg 3 | %_gpg_name security@parity.io 4 | %_gpg /usr/bin/gpg 5 | %__gpg_sign_cmd %{__gpg} gpg --force-v3-sigs --batch --verbose --no-armor -u "%{_gpg_name}" -sbo %{__signature_filename} --digest-algo sha256 %{__plaintext_filename} 6 | -------------------------------------------------------------------------------- /dockerfiles/rusty-cachier-env/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/ruby:2.7-alpine3.16 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | LABEL io.parity.image.authors="devops-team@parity.io" \ 8 | io.parity.image.vendor="Parity Technologies" \ 9 | io.parity.image.title="${REGISTRY_PATH}/rusty-cachier-env" \ 10 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/scripts/rusty-cachier-env/Dockerfile" \ 11 | io.parity.image.revision="${VCS_REF}" \ 12 | io.parity.image.created="${BUILD_DATE}" 13 | 14 | RUN apk add --no-cache --update curl unzip bash 15 | -------------------------------------------------------------------------------- /dockerfiles/rusty-cachier-env/README.md: -------------------------------------------------------------------------------- 1 | # rusty-cachier-env 2 | 3 | A very basic and bare-bones image prepared to run a small subset of `rusty-cachier` commands. 4 | 5 | ### How it works 6 | 7 | Currently the image is baked with the environment to run `rusty-cachier cache notify` only. We keep the image size at the minimum. 8 | 9 | -------------------------------------------------------------------------------- /dockerfiles/sops/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:latest 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | ARG SOPS_VERSION 7 | ARG VAULT_VERSION 8 | 9 | # metadata 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/sops" \ 13 | io.parity.image.description="ca-certificates git jq make curl gettext; sops;" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/terraform/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/terraform/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | RUN apk add --no-cache \ 22 | ca-certificates git jq make curl gettext bash shadow; \ 23 | curl -L "https://github.com/mozilla/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.linux" \ 24 | -o /usr/local/bin/sops; \ 25 | chmod +x /usr/local/bin/sops 26 | 27 | RUN curl "https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip" \ 28 | -o vault.zip; \ 29 | unzip vault.zip -d /usr/local/bin/vault; \ 30 | rm vault.zip; \ 31 | chmod +x /usr/local/bin/vault 32 | 33 | RUN set -x \ 34 | && groupadd -g 1000 nonroot \ 35 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot \ 36 | && mkdir /config \ 37 | && chown nonroot:nonroot /config 38 | 39 | WORKDIR /config 40 | 41 | USER nonroot:nonroot 42 | CMD ["/bin/bash"] 43 | -------------------------------------------------------------------------------- /dockerfiles/sops/README.md: -------------------------------------------------------------------------------- 1 | # Image containing the Sops and Vault binaries. 2 | -------------------------------------------------------------------------------- /dockerfiles/substrate-session-keys-grabber/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:alpine 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | # metadata 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/sops" \ 11 | io.parity.image.description=";" \ 12 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 13 | dockerfiles/terraform/Dockerfile" \ 14 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/terraform/README.md" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" 18 | 19 | COPY substrate-session-keys-grabber/grabber.py . 20 | 21 | VOLUME [ "/keystore" ] 22 | ENTRYPOINT ["python", "./grabber.py"] 23 | -------------------------------------------------------------------------------- /dockerfiles/substrate-session-keys-grabber/README.md: -------------------------------------------------------------------------------- 1 | # A Substrate session keys grabber 2 | 3 | ## Overview 4 | 5 | The utility re-constructs session key of a Substrate node by inspecting individual keys in the keystore and printing the key to STDOUT. 6 | 7 | ## Usage 8 | 9 | Launch the script directly with: 10 | 11 | ``` 12 | python src/grabber.py /path/to/node/keystore 13 | ``` 14 | 15 | Add it as an extra init container when using the [node helm-chart](https://github.com/paritytech/helm-charts/tree/main/charts/node): 16 | 17 | ``` 18 | extraInitContainers: 19 | - name: dump-session-keys 20 | image: docker.io/paritytech/substrate-session-keys-grabber:latest 21 | args: ["/keystore"] 22 | volumeMounts: 23 | - mountPath: /keystore 24 | name: chain-keystore 25 | ``` -------------------------------------------------------------------------------- /dockerfiles/substrate-session-keys-grabber/grabber.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | import argparse 4 | 5 | parser = argparse.ArgumentParser(description='Substrate session keys grabber') 6 | 7 | parser.add_argument('keystore', type=str, help='A path to the keystore') 8 | args = parser.parse_args() 9 | 10 | logger = logging.getLogger('session_keys_grabber') 11 | logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') 12 | 13 | keystore_path = args.keystore 14 | 15 | def parse_session_key(dir): 16 | # variants of key prefixes in the right order 17 | key_formats = ( 18 | ['6772616e', '62616265', '696d6f6e', '70617261', '61756469'], # validator keys (gran,babe,imon,para,audi) 19 | ['6772616e', '62616265', '696d6f6e', '70617261', '6173676e', '61756469'], # validator keys (gran,babe,imon,para,asgn,audi) 20 | ['6772616e', '62616265', '696d6f6e', '70617261', '6173676e', '61756469', '62656566'], # validator keys (gran,babe,imon,para,asgn,audi,beef) 21 | ['6772616e', '62616265', '70617261', '6173676e', '61756469', '62656566'], # validator keys (gran,babe,para,asgn,audi,beef) 22 | ['61757261'] # collator keys (aura) 23 | ) 24 | possible_prefixes = list(set([j for i in key_formats for j in i])) 25 | if os.path.isdir(dir): 26 | os.chdir(dir) 27 | files = os.listdir('.') 28 | files = [i for i in files if len(i) in [72, 74] and i[0:8] in possible_prefixes] 29 | if not files: 30 | return None 31 | # find creation time of the newest key 32 | time_of_last_key = sorted(list(set([int(os.path.getmtime(i)) for i in files])))[-1] 33 | # parse the newest public keys and prefix them with the names of files. 34 | # make sure to only pick up the keys created within 1 second interval 35 | keys = {i[0:8]: i[8:] for i in files if int(os.path.getmtime(i)) in [time_of_last_key - 1, time_of_last_key, time_of_last_key + 1]} 36 | logger.debug('keys were found: ' + str(keys) + ' in the keystore path: ' + dir) 37 | for key_format in key_formats: 38 | if set(keys.keys()) == set(key_format): 39 | # build the session key 40 | session_key = '0x' + ''.join([keys[i] for i in key_format]) 41 | return(session_key) 42 | logger.error('Error parsing the session key') 43 | return None 44 | 45 | session_key = parse_session_key(keystore_path) 46 | if session_key: 47 | print(session_key) 48 | -------------------------------------------------------------------------------- /dockerfiles/terraform/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:latest 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | ARG TERRAFORM_VERSION=1.5.6 7 | ARG VAULT_VERSION=1.17.3 8 | 9 | # metadata 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/terraform" \ 13 | io.parity.image.description="ca-certificates git jq make curl gettext; terraform;" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/terraform/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/terraform/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | RUN apk add --no-cache \ 22 | ca-certificates git jq make curl gettext bash shadow openssh-client python3 py3-requests \ 23 | && ln -sf python3 /usr/bin/python; \ 24 | curl "https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip" \ 25 | -o terraform.zip; \ 26 | unzip terraform.zip -d /usr/local/bin/ terraform; \ 27 | rm terraform.zip; \ 28 | chmod +x /usr/local/bin/terraform 29 | 30 | RUN curl "https://releases.hashicorp.com/vault/${VAULT_VERSION}/vault_${VAULT_VERSION}_linux_amd64.zip" \ 31 | -o vault.zip; \ 32 | unzip vault.zip -d /usr/local/bin/ vault; \ 33 | rm vault.zip; \ 34 | chmod +x /usr/local/bin/vault 35 | 36 | RUN set -x \ 37 | && groupadd -g 1000 nonroot \ 38 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot \ 39 | && mkdir /config \ 40 | && chown nonroot:nonroot /config 41 | 42 | WORKDIR /config 43 | 44 | USER nonroot:nonroot 45 | CMD ["/bin/bash"] 46 | -------------------------------------------------------------------------------- /dockerfiles/terraform/README.md: -------------------------------------------------------------------------------- 1 | # Image containing the Terraform and Vault binaries. 2 | -------------------------------------------------------------------------------- /dockerfiles/tools/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/alpine:3.16 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | ENV PROM_VERSION=2.19.0 8 | 9 | # metadata 10 | LABEL io.parity.image.authors="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.title="${REGISTRY_PATH}/tools" \ 13 | io.parity.image.description="curl git jq rsync make gettext gnupg bash shadow redis promtool openssh-client ruby" \ 14 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/tools/Dockerfile" \ 16 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 17 | dockerfiles/tools/README.md" \ 18 | io.parity.image.revision="${VCS_REF}" \ 19 | io.parity.image.created="${BUILD_DATE}" 20 | 21 | 22 | RUN apk add --no-cache curl git jq rsync make gettext gnupg bash shadow redis openssh-client ruby xz ripgrep zstd skopeo 23 | 24 | RUN curl -L "https://dl.min.io/client/mc/release/linux-amd64/mc" -o /usr/local/bin/mc && chmod 755 /usr/local/bin/mc 25 | 26 | RUN curl -sS -L "https://github.com/prometheus/prometheus/releases/download/v${PROM_VERSION}/prometheus-${PROM_VERSION}.linux-amd64.tar.gz" \ 27 | --output prometheus.tar.gz && \ 28 | tar -xzf prometheus.tar.gz prometheus-${PROM_VERSION}.linux-amd64/promtool && \ 29 | mv prometheus-${PROM_VERSION}.linux-amd64/promtool /usr/local/bin && \ 30 | rm -rf prometheus.tar.gz prometheus-${PROM_VERSION}.linux-amd64 31 | 32 | RUN set -x \ 33 | && groupadd -g 1000 nonroot \ 34 | && useradd -u 1000 -g 1000 -s /bin/bash -m nonroot \ 35 | && git config --global --add safe.directory '*' \ 36 | && cp /root/.gitconfig /home/nonroot/.gitconfig \ 37 | && chown nonroot:nonroot /home/nonroot/.gitconfig 38 | 39 | USER nonroot:nonroot 40 | CMD ["/bin/bash"] 41 | -------------------------------------------------------------------------------- /dockerfiles/tools/README.md: -------------------------------------------------------------------------------- 1 | # tools 2 | 3 | alpine based small docker image with `curl` and `git` and `jq` installed in it. 4 | -------------------------------------------------------------------------------- /dockerfiles/utility/README.md: -------------------------------------------------------------------------------- 1 | 2 | *config files 3 | TODO 4 | ... 5 | -------------------------------------------------------------------------------- /dockerfiles/utility/awscli-config: -------------------------------------------------------------------------------- 1 | [default] 2 | s3 = 3 | max_concurrent_requests = 20 4 | max_queue_size = 10000 5 | multipart_threshold = 64MB 6 | multipart_chunksize = 16MB 7 | max_bandwidth = 50MB/s 8 | use_accelerate_endpoint = false 9 | addressing_style = path 10 | 11 | -------------------------------------------------------------------------------- /dockerfiles/utility/base-ci-linux-config: -------------------------------------------------------------------------------- 1 | [target.wasm32-unknown-unknown] 2 | runner = "node" 3 | linker="clang-14" 4 | 5 | [target.x86_64-unknown-linux-gnu] 6 | # Enables the aes-ni instructions for RustCrypto dependency. 7 | rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"] 8 | # setup clang as Linker 9 | linker="clang-14" 10 | -------------------------------------------------------------------------------- /dockerfiles/utility/debian-llvm-clang.key: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paritytech/scripts/1ea0f9b94cc1bd7ed80eb7e8884b6f82b0644f32/dockerfiles/utility/debian-llvm-clang.key -------------------------------------------------------------------------------- /dockerfiles/utility/libudev.patch: -------------------------------------------------------------------------------- 1 | diff --git a/src/collect/collect.c b/src/collect/collect.c 2 | index 2cf1f00..b24f26b 100644 3 | --- a/src/collect/collect.c 4 | +++ b/src/collect/collect.c 5 | @@ -84,7 +84,7 @@ static void usage(void) 6 | " invoked for each ID in ) collect returns 0, the\n" 7 | " number of missing IDs otherwise.\n" 8 | " On error a negative number is returned.\n\n" 9 | - , program_invocation_short_name); 10 | + , "parity"); 11 | } 12 | 13 | /* 14 | diff --git a/src/scsi_id/scsi_id.c b/src/scsi_id/scsi_id.c 15 | index 8b76d87..7bf3948 100644 16 | --- a/src/scsi_id/scsi_id.c 17 | +++ b/src/scsi_id/scsi_id.c 18 | @@ -321,7 +321,7 @@ static void help(void) { 19 | " -u --replace-whitespace Replace all whitespace by underscores\n" 20 | " -v --verbose Verbose logging\n" 21 | " -x --export Print values as environment keys\n" 22 | - , program_invocation_short_name); 23 | + , "parity"); 24 | 25 | } 26 | 27 | diff --git a/src/shared/hashmap.h b/src/shared/hashmap.h 28 | index a03ee58..a7c2005 100644 29 | --- a/src/shared/hashmap.h 30 | +++ b/src/shared/hashmap.h 31 | @@ -98,10 +98,7 @@ extern const struct hash_ops uint64_hash_ops; 32 | #if SIZEOF_DEV_T != 8 33 | unsigned long devt_hash_func(const void *p, const uint8_t hash_key[HASH_KEY_SIZE]) _pure_; 34 | int devt_compare_func(const void *a, const void *b) _pure_; 35 | -extern const struct hash_ops devt_hash_ops = { 36 | - .hash = devt_hash_func, 37 | - .compare = devt_compare_func 38 | -}; 39 | +extern const struct hash_ops devt_hash_ops; 40 | #else 41 | #define devt_hash_func uint64_hash_func 42 | #define devt_compare_func uint64_compare_func 43 | diff --git a/src/shared/log.c b/src/shared/log.c 44 | index 4a40996..1496984 100644 45 | --- a/src/shared/log.c 46 | +++ b/src/shared/log.c 47 | @@ -335,7 +335,7 @@ static int write_to_syslog( 48 | 49 | IOVEC_SET_STRING(iovec[0], header_priority); 50 | IOVEC_SET_STRING(iovec[1], header_time); 51 | - IOVEC_SET_STRING(iovec[2], program_invocation_short_name); 52 | + IOVEC_SET_STRING(iovec[2], "parity"); 53 | IOVEC_SET_STRING(iovec[3], header_pid); 54 | IOVEC_SET_STRING(iovec[4], buffer); 55 | 56 | @@ -383,7 +383,7 @@ static int write_to_kmsg( 57 | char_array_0(header_pid); 58 | 59 | IOVEC_SET_STRING(iovec[0], header_priority); 60 | - IOVEC_SET_STRING(iovec[1], program_invocation_short_name); 61 | + IOVEC_SET_STRING(iovec[1], "parity"); 62 | IOVEC_SET_STRING(iovec[2], header_pid); 63 | IOVEC_SET_STRING(iovec[3], buffer); 64 | IOVEC_SET_STRING(iovec[4], "\n"); 65 | diff --git a/src/udev/udevadm-control.c b/src/udev/udevadm-control.c 66 | index 6af7163..3271e56 100644 67 | --- a/src/udev/udevadm-control.c 68 | +++ b/src/udev/udevadm-control.c 69 | @@ -41,7 +41,7 @@ static void print_help(void) { 70 | " -p --property=KEY=VALUE Set a global property for all events\n" 71 | " -m --children-max=N Maximum number of children\n" 72 | " --timeout=SECONDS Maximum time to block for a reply\n" 73 | - , program_invocation_short_name); 74 | + , "parity"); 75 | } 76 | 77 | static int adm_control(struct udev *udev, int argc, char *argv[]) { 78 | diff --git a/src/udev/udevadm-info.c b/src/udev/udevadm-info.c 79 | index 0aec976..a31ac02 100644 80 | --- a/src/udev/udevadm-info.c 81 | +++ b/src/udev/udevadm-info.c 82 | @@ -279,7 +279,7 @@ static void help(void) { 83 | " -P --export-prefix Export the key name with a prefix\n" 84 | " -e --export-db Export the content of the udev database\n" 85 | " -c --cleanup-db Clean up the udev database\n" 86 | - , program_invocation_short_name); 87 | + , "parity"); 88 | } 89 | 90 | static int uinfo(struct udev *udev, int argc, char *argv[]) { 91 | diff --git a/src/udev/udevadm-monitor.c b/src/udev/udevadm-monitor.c 92 | index 15ded09..b58dd08 100644 93 | --- a/src/udev/udevadm-monitor.c 94 | +++ b/src/udev/udevadm-monitor.c 95 | @@ -73,7 +73,7 @@ static void help(void) { 96 | " -u --udev Print udev events\n" 97 | " -s --subsystem-match=SUBSYSTEM[/DEVTYPE] Filter events by subsystem\n" 98 | " -t --tag-match=TAG Filter events by tag\n" 99 | - , program_invocation_short_name); 100 | + , "parity"); 101 | } 102 | 103 | static int adm_monitor(struct udev *udev, int argc, char *argv[]) { 104 | diff --git a/src/udev/udevadm-settle.c b/src/udev/udevadm-settle.c 105 | index 33597bc..b36a504 100644 106 | --- a/src/udev/udevadm-settle.c 107 | +++ b/src/udev/udevadm-settle.c 108 | @@ -43,7 +43,7 @@ static void help(void) { 109 | " --version Show package version\n" 110 | " -t --timeout=SECONDS Maximum time to wait for events\n" 111 | " -E --exit-if-exists=FILE Stop waiting if file exists\n" 112 | - , program_invocation_short_name); 113 | + , "parity"); 114 | } 115 | 116 | static int adm_settle(struct udev *udev, int argc, char *argv[]) { 117 | diff --git a/src/udev/udevadm-test-builtin.c b/src/udev/udevadm-test-builtin.c 118 | index baaeca9..50ed812 100644 119 | --- a/src/udev/udevadm-test-builtin.c 120 | +++ b/src/udev/udevadm-test-builtin.c 121 | @@ -39,7 +39,7 @@ static void help(struct udev *udev) { 122 | " -h --help Print this message\n" 123 | " --version Print version of the program\n\n" 124 | "Commands:\n" 125 | - , program_invocation_short_name); 126 | + , "parity"); 127 | 128 | udev_builtin_list(udev); 129 | } 130 | diff --git a/src/udev/udevadm-test.c b/src/udev/udevadm-test.c 131 | index 47fd924..a855412 100644 132 | --- a/src/udev/udevadm-test.c 133 | +++ b/src/udev/udevadm-test.c 134 | @@ -39,7 +39,7 @@ static void help(void) { 135 | " --version Show package version\n" 136 | " -a --action=ACTION Set action string\n" 137 | " -N --resolve-names=early|late|never When to resolve names\n" 138 | - , program_invocation_short_name); 139 | + , "parity"); 140 | } 141 | 142 | static int adm_test(struct udev *udev, int argc, char *argv[]) { 143 | diff --git a/src/udev/udevadm-trigger.c b/src/udev/udevadm-trigger.c 144 | index 4dc756a..67787d3 100644 145 | --- a/src/udev/udevadm-trigger.c 146 | +++ b/src/udev/udevadm-trigger.c 147 | @@ -92,7 +92,7 @@ static void help(void) { 148 | " -y --sysname-match=NAME Trigger devices with this /sys path\n" 149 | " --name-match=NAME Trigger devices with this /dev name\n" 150 | " -b --parent-match=NAME Trigger devices with that parent device\n" 151 | - , program_invocation_short_name); 152 | + , "parity"); 153 | } 154 | 155 | static int adm_trigger(struct udev *udev, int argc, char *argv[]) { 156 | diff --git a/src/udev/udevadm.c b/src/udev/udevadm.c 157 | index 3e57cf6..b03dfaa 100644 158 | --- a/src/udev/udevadm.c 159 | +++ b/src/udev/udevadm.c 160 | @@ -62,7 +62,7 @@ static int adm_help(struct udev *udev, int argc, char *argv[]) { 161 | printf("%s [--help] [--version] [--debug] COMMAND [COMMAND OPTIONS]\n\n" 162 | "Send control commands or test the device manager.\n\n" 163 | "Commands:\n" 164 | - , program_invocation_short_name); 165 | + , "parity"); 166 | 167 | for (i = 0; i < ELEMENTSOF(udevadm_cmds); i++) 168 | if (udevadm_cmds[i]->help != NULL) 169 | @@ -128,7 +128,7 @@ int main(int argc, char *argv[]) { 170 | goto out; 171 | } 172 | 173 | - fprintf(stderr, "%s: missing or unknown command\n", program_invocation_short_name); 174 | + fprintf(stderr, "%s: missing or unknown command\n", "parity"); 175 | rc = 2; 176 | out: 177 | mac_selinux_finish(); 178 | diff --git a/src/udev/udevd.c b/src/udev/udevd.c 179 | index cf826c6..4eec0af 100644 180 | --- a/src/udev/udevd.c 181 | +++ b/src/udev/udevd.c 182 | @@ -1041,7 +1041,7 @@ static void help(void) { 183 | " -t --event-timeout=SECONDS Seconds to wait before terminating an event\n" 184 | " -N --resolve-names=early|late|never\n" 185 | " When to resolve users and groups\n" 186 | - , program_invocation_short_name); 187 | + , "parity"); 188 | } 189 | 190 | static int parse_argv(int argc, char *argv[]) { 191 | diff --git a/src/v4l_id/v4l_id.c b/src/v4l_id/v4l_id.c 192 | index 1dce0d5..f65badf 100644 193 | --- a/src/v4l_id/v4l_id.c 194 | +++ b/src/v4l_id/v4l_id.c 195 | @@ -49,7 +49,7 @@ int main(int argc, char *argv[]) { 196 | printf("%s [-h,--help] \n\n" 197 | "Video4Linux device identification.\n\n" 198 | " -h Print this message\n" 199 | - , program_invocation_short_name); 200 | + , "parity"); 201 | return 0; 202 | case '?': 203 | return -EINVAL; 204 | diff --git a/src/shared/path-util.c b/src/shared/path-util.c 205 | index 0744563..7151356 100644 206 | --- a/src/shared/path-util.c 207 | +++ b/src/shared/path-util.c 208 | @@ -109,7 +109,7 @@ char *path_make_absolute_cwd(const char *p) { 209 | if (path_is_absolute(p)) 210 | return strdup(p); 211 | 212 | - cwd = get_current_dir_name(); 213 | + cwd = getcwd(malloc(128), 128); 214 | if (!cwd) 215 | return NULL; 216 | 217 | -------------------------------------------------------------------------------- /dockerfiles/utility/rust-builder-config: -------------------------------------------------------------------------------- 1 | [target.wasm32-unknown-unknown] 2 | runner = "node" 3 | linker = "clang" 4 | 5 | [target.x86_64-unknown-linux-gnu] 6 | # Enables the aes-ni instructions for RustCrypto dependency. 7 | rustflags = ["-Ctarget-feature=+aes,+sse2,+ssse3"] 8 | # setup clang as Linker 9 | linker="clang" 10 | -------------------------------------------------------------------------------- /dockerfiles/ws-health-exporter/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM docker.io/library/python:3.10.10-slim-bullseye 2 | 3 | ARG VCS_REF=master 4 | ARG BUILD_DATE="" 5 | ARG REGISTRY_PATH=docker.io/paritytech 6 | 7 | # metadata 8 | LABEL io.parity.image.authors="devops-team@parity.io" \ 9 | io.parity.image.vendor="Parity Technologies" \ 10 | io.parity.image.title="${REGISTRY_PATH}/ws-health-exporter" \ 11 | io.parity.image.description="A health checker for Substrate-based nodes" \ 12 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 13 | dockerfiles/ws-health-exporter/Dockerfile" \ 14 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/\ 15 | dockerfiles/ws-health-exporter/README.md" \ 16 | io.parity.image.revision="${VCS_REF}" \ 17 | io.parity.image.created="${BUILD_DATE}" 18 | 19 | RUN pip install --no-cache-dir prometheus-client websocket-client apscheduler flask environs waitress 20 | COPY ws-health-exporter/exporter.py . 21 | 22 | USER nobody:nogroup 23 | 24 | EXPOSE 8001 25 | CMD ["python", "./exporter.py"] 26 | -------------------------------------------------------------------------------- /dockerfiles/ws-health-exporter/README.md: -------------------------------------------------------------------------------- 1 | # A WebSocket health checker for Substrate-based nodes 2 | 3 | The script: 4 | * opens a WebSocket connection 5 | * runs `system_health` and `system_syncState` API methods 6 | * closes the WebSocket connection 7 | * checks results: common health state, number of peers, state of syncing, block syncing drift 8 | * exposes health data as a Prometheus endpoint and a simple HTTP probe (can be useful as probes for k8s) 9 | 10 | ## Configuration 11 | It can be configured using environment variables: 12 | * `WSHE_LOG_LEVEL` - setups the log level; can be `INFO` or `DEBUG`; default `INFO` 13 | * `WSHE_HOST` - setups the host address to bind; default `0.0.0.0` 14 | * `WSHE_PORT` - setups the port number to bind; default `8001` 15 | * `WSHE_WS_CHECK_INTERVAL` - setups the WebSocket check interval in seconds; default `10` seconds 16 | * `WSHE_WS_TIMEOUT` - setups the WebSocket check timeout in seconds; default `60` seconds 17 | * `WSHE_NODE_RPC_URLS` - setups the list of WebSocket URLs; the format is a comma-separated string; 18 | default `ws://127.0.0.1:5556` 19 | * `WSHE_NODE_MAX_UNSYNCHRONIZED_BLOCK_DRIFT` - setups maximum of unsynchronized blocks; if a node has 20 | more, the health check will fail; default `0` blocks (disabled) 21 | * `WSHE_NODE_MIN_PEERS` - setups minimum of peers; if a node has less, the health check will fail; default `2` peers 22 | * `WSHE_BLOCK_RATE_MEASUREMENT_PERIOD` - average rate of new blocks is calculated for the period; 23 | if this period is less than `WSHE_WS_CHECK_INTERVAL` it will be equal `WSHE_WS_CHECK_INTERVAL`; 24 | default `600` seconds 25 | * `WSHE_MIN_BLOCK_RATE` - setups minimum rate of new blocks; if a node has a 26 | lower rate, the health check will fail; default `0.0` blocks/second (disabled) 27 | 28 | ## API endpoints 29 | GET: 30 | * `/` - list of all HTTP routes 31 | * `/metrics` - Prometheus metric endpoint 32 | * `/health/readiness` - simple HTTP probe (HTTP codes: `200`, `500`); 33 | the probe will be failed if at least one check for a URL fails 34 | 35 | ## Prometheus metrics 36 | Example: 37 | ``` 38 | wss_alive{url="wss://kusama-rpc.polkadot.io"} 1.0 39 | wss_alive{url="wss://rpc.polkadot.io"} 0.0 40 | ``` -------------------------------------------------------------------------------- /dockerfiles/ws-health-exporter/exporter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | import sys 5 | import logging 6 | import traceback 7 | import json 8 | import io 9 | from urllib.parse import urlparse 10 | from threading import Lock 11 | import math 12 | import time 13 | from collections import deque 14 | 15 | from flask import Flask 16 | from waitress import serve 17 | from apscheduler.schedulers.background import BackgroundScheduler 18 | from prometheus_client import generate_latest, Gauge 19 | from websocket import create_connection 20 | from environs import Env 21 | import signal 22 | 23 | LOGGING_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' 24 | 25 | ws_metrics = { 26 | 'polkadot_ws_alive': Gauge('wss_alive', 'WebSocket alive', ['ws_alive_url']) 27 | } 28 | 29 | 30 | # global variable to keep current readiness status 31 | # we need threading.Lock() to avoid race conditions 32 | # some Python modules can use threads inside 33 | readiness_status = {'status': False, 'lock': Lock()} 34 | 35 | # Flask app 36 | app = Flask(__name__, static_folder=None) 37 | 38 | app_config = { 39 | 'log_level': 'INFO', # WSHE_LOG_LEVEL 40 | 'host': '0.0.0.0', # WSHE_HOST 41 | 'port': 8001, # WSHE_PORT 42 | 'ws_check_interval': 10, # WSHE_WS_CHECK_INTERVAL 43 | 'ws_timeout': 60, # WSHE_WS_TIMEOUT 44 | 'node_rpc_urls': ['ws://127.0.0.1:5556'], # WSHE_NODE_RPC_URLS 45 | 'node_max_unsynchronized_block_drift': 0, # WSHE_NODE_MAX_UNSYNCHRONIZED_BLOCK_DRIFT 46 | 'node_min_peers': 2, # WSHE_NODE_MIN_PEERS 47 | 'block_rate_measurement_period': 600, # WSHE_BLOCK_RATE_MEASUREMENT_PERIOD 48 | 'min_block_rate': 0.0, # WSHE_MIN_BLOCK_RATE 49 | } 50 | 51 | 52 | def handle_sigterm(signum, frame): 53 | logging.info("Received SIGTERM. Shutting down...") 54 | scheduler.shutdown() 55 | sys.exit(0) 56 | 57 | 58 | def read_readiness_status(): 59 | with readiness_status['lock']: 60 | return readiness_status['status'] 61 | 62 | 63 | def write_readiness_status(status): 64 | with readiness_status['lock']: 65 | readiness_status['status'] = status 66 | 67 | 68 | def uri_validator(url): 69 | try: 70 | result = urlparse(url) 71 | return all([result.scheme, result.netloc]) 72 | except: 73 | return False 74 | 75 | 76 | def run_error(msg): 77 | print('fatal run error! ' + msg, file=sys.stderr) 78 | sys.exit(1) 79 | 80 | 81 | def parse_config(config): 82 | env = Env() 83 | # WSHE_LOG_LEVEL 84 | # it checks debug CLI flag to keep backward compatibility with old versions 85 | if len(sys.argv) > 1 and sys.argv[1] == 'debug': 86 | config['log_level'] = 'DEBUG' 87 | else: 88 | config['log_level'] = env.str("WSHE_LOG_LEVEL", config['log_level']).upper() 89 | if config['log_level'] not in ['INFO', 'DEBUG']: 90 | run_error(f'{config["log_level"]} isn\'t a valid log level. It can be INFO or DEBUG') 91 | # WSHE_HOST 92 | config['host'] = env.str("WSHE_HOST", config['host']) 93 | # WSHE_PORT 94 | # it checks PORT to keep backward compatibility with old versions 95 | config['port'] = env.int("WSHE_PORT", env.int("PORT", config['port'])) 96 | if config["port"] < 1 or config["port"] > 65535: 97 | run_error(f'{config["port"]} isn\'t a valid port number') 98 | # WSHE_WS_CHECK_INTERVAL 99 | config['ws_check_interval'] = env.int("WSHE_WS_CHECK_INTERVAL", config['ws_check_interval']) 100 | # WSHE_WS_TIMEOUT 101 | config['ws_timeout'] = env.int("WSHE_WS_TIMEOUT", config['ws_timeout']) 102 | # WSHE_NODE_RPC_URLS 103 | # it checks NODE_URL to keep backward compatibility with old versions 104 | config['node_rpc_urls'] = env.list("WSHE_NODE_RPC_URLS", env.list("NODE_URL", config['node_rpc_urls'])) 105 | invalid_urls = [] 106 | # validate URLs 107 | for url in config['node_rpc_urls']: 108 | if not uri_validator(url): 109 | invalid_urls.append(url) 110 | if invalid_urls: 111 | run_error(f'{invalid_urls} URLs aren\'t valid') 112 | # WSHE_NODE_MAX_UNSYNCHRONIZED_BLOCK_DRIFT 113 | config['node_max_unsynchronized_block_drift'] = env.int("WSHE_NODE_MAX_UNSYNCHRONIZED_BLOCK_DRIFT", 114 | config['node_max_unsynchronized_block_drift']) 115 | # WSHE_NODE_MIN_PEERS 116 | config['node_min_peers'] = env.int("WSHE_NODE_MIN_PEERS", config['node_min_peers']) 117 | # WSHE_BLOCK_RATE_MEASUREMENT_PERIOD 118 | config['block_rate_measurement_period'] = env.int("WSHE_BLOCK_RATE_MEASUREMENT_PERIOD", 119 | config['block_rate_measurement_period']) 120 | # WSHE_MIN_BLOCK_RATE 121 | config['min_block_rate'] = env.float("WSHE_MIN_BLOCK_RATE", 122 | config['min_block_rate']) 123 | 124 | print('config:') 125 | for config_line in sorted(config.items()): 126 | print(f' {config_line[0]}: {config_line[1]}') 127 | 128 | 129 | def check_ws(node_url): 130 | node_state = {'health_summary': True} 131 | try: 132 | ws = create_connection(node_url, timeout=app_config['ws_timeout']) 133 | ws.send('{"id":1, "jsonrpc":"2.0", "method": "system_health", "params":[]}') 134 | hc_data = json.loads(ws.recv()) 135 | ws.send('{"id":1, "jsonrpc":"2.0", "method": "system_syncState", "params":[false]}') 136 | sync_data = json.loads(ws.recv()) 137 | ws.close() 138 | node_state['is_syncing'] = hc_data['result']['isSyncing'] 139 | if node_state['is_syncing'] is not False: 140 | logging.info(f'URL: {node_url}. The check failed because the node in syncing') 141 | node_state['health_summary'] = False 142 | node_state['peers'] = hc_data['result']['peers'] 143 | if node_state['peers'] < app_config['node_min_peers']: 144 | logging.info(f'URL: {node_url}. The check failed because peers are not enough ' 145 | f'{node_state["peers"]} < {app_config["node_min_peers"]} (config)') 146 | node_state['health_summary'] = False 147 | node_state['should_have_peers'] = hc_data['result']['shouldHavePeers'] 148 | node_state['highest_block'] = sync_data['result']['highestBlock'] 149 | node_state['current_block'] = sync_data['result']['currentBlock'] 150 | block_number_cache[node_url].append({'imestamp': time.time(), 'number': node_state['current_block']}) 151 | if len(block_number_cache[node_url]) >= 2: 152 | node_state['block_rate'] = (block_number_cache[node_url][-1]['number'] - block_number_cache[node_url][0]['number']) / \ 153 | (block_number_cache[node_url][-1]['imestamp'] - block_number_cache[node_url][0]['imestamp']) 154 | if app_config['min_block_rate'] > 0 and node_state['block_rate'] < app_config['min_block_rate']: 155 | logging.info(f'URL: {node_url}. The check failed because the node has a low rate of new blocks ' 156 | f'{node_state["block_rate"]} < {app_config["min_block_rate"]} (config)') 157 | node_state['health_summary'] = False 158 | node_state['unsynchronized_block_drift'] = node_state['highest_block'] - node_state['current_block'] 159 | if (app_config['node_max_unsynchronized_block_drift'] > 0 and 160 | node_state['unsynchronized_block_drift'] > app_config['node_max_unsynchronized_block_drift']): 161 | logging.info(f'URL: {node_url}. The check failed because the node has unsynchronized blocks ' 162 | f'{node_state["unsynchronized_block_drift"]} > {app_config["node_max_unsynchronized_block_drift"]} (config)') 163 | node_state['health_summary'] = False 164 | logging.debug(f'URL: {node_url}. Check state: {node_state}') 165 | return node_state['health_summary'] 166 | except Exception as e: 167 | logging.error(f'WebSocket request error. URL: {node_url}, timeout: {app_config["ws_timeout"]}, error: "{e}"') 168 | tb_output = io.StringIO() 169 | traceback.print_tb(e.__traceback__, file=tb_output) 170 | logging.debug(f'WebSocket request error. URL: {node_url}, timeout: {app_config["ws_timeout"]}, ' 171 | f'traceback:\n{tb_output.getvalue()}') 172 | tb_output.close() 173 | return False 174 | 175 | 176 | def update_metrics(): 177 | if not app_config['node_rpc_urls']: 178 | return 179 | # the common status will be negative if at least one check for a URL fails 180 | status = True 181 | for url in app_config['node_rpc_urls']: 182 | url_probe = check_ws(node_url=url) 183 | ws_metrics['polkadot_ws_alive'].labels(ws_alive_url=url).set(int(url_probe)) 184 | status = status and url_probe 185 | write_readiness_status(status) 186 | 187 | 188 | @app.route('/') 189 | def site_map(): 190 | routes = "Main Page:\n" 191 | for rule in app.url_map.iter_rules(): 192 | routes += ('%s\n' % rule) 193 | return routes 194 | 195 | 196 | @app.route('/metrics') 197 | def metrics(): 198 | return generate_latest() 199 | 200 | 201 | @app.route('/health/readiness') 202 | def health_readiness(): 203 | if read_readiness_status(): 204 | return '{"status": "UP"}', 200 205 | else: 206 | return '{"status": "DOWN"}', 500 207 | 208 | 209 | if __name__ == '__main__': 210 | global block_number_cache 211 | signal.signal(signal.SIGTERM, handle_sigterm) 212 | 213 | parse_config(app_config) 214 | 215 | # set up console log handler 216 | console = logging.StreamHandler() 217 | console.setLevel(getattr(logging, app_config['log_level'])) 218 | formatter = logging.Formatter(LOGGING_FORMAT) 219 | console.setFormatter(formatter) 220 | # set up basic logging config 221 | logging.basicConfig(format=LOGGING_FORMAT, level=getattr(logging, app_config['log_level']), handlers=[console]) 222 | 223 | number_block_metrics = max(2, math.ceil(app_config['block_rate_measurement_period'] / app_config['ws_check_interval'])) 224 | block_number_cache = {} 225 | for url in app_config['node_rpc_urls']: 226 | block_number_cache[url] = deque([], number_block_metrics) 227 | 228 | update_metrics() 229 | scheduler = BackgroundScheduler() 230 | scheduler.add_job(func=update_metrics, trigger="interval", seconds=app_config['ws_check_interval']) 231 | scheduler.start() 232 | 233 | serve(app, host=app_config['host'], port=app_config['port']) 234 | -------------------------------------------------------------------------------- /dockerfiles/xbuilder-aarch64-unknown-linux-gnu/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG REGISTRY_PATH=docker.io/paritytech 2 | 3 | FROM docker.io/library/rust:latest 4 | 5 | # metadata 6 | LABEL summary="Cross Builder for aarch64-unknown-linux-gnu targets" \ 7 | name="${REGISTRY_PATH}/xbuilder-aarch64-unknown-linux-gnu" \ 8 | version="1.0" \ 9 | description="Parity xbuilder-aarch64-unknown-linux-gnu container" \ 10 | maintainer="devops-team@parity.io" \ 11 | io.parity.image.vendor="Parity Technologies" \ 12 | io.parity.image.authors="chevdor@gmail.com, devops-team@parity.io" \ 13 | io.parity.image.source="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/parity-keyring/Dockerfile" \ 14 | io.parity.image.documentation="https://github.com/paritytech/scripts/blob/${VCS_REF}/dockerfiles/parity-keyring/README.md" \ 15 | io.parity.image.revision="${VCS_REF}" \ 16 | io.parity.image.created="${BUILD_DATE}" 17 | 18 | USER root 19 | 20 | RUN apt update && apt upgrade -y && \ 21 | apt install -y \ 22 | g++-aarch64-linux-gnu libc6-dev-arm64-cross \ 23 | pkg-config libssl-dev \ 24 | protobuf-compiler clang make bsdmainutils && \ 25 | rm -rf /var/lib/apt/lists/* /tmp/* && apt clean && \ 26 | rustup target add aarch64-unknown-linux-gnu && \ 27 | rustup toolchain install stable-aarch64-unknown-linux-gnu 28 | 29 | WORKDIR /app 30 | 31 | ENV SKIP_WASM_BUILD=1 \ 32 | CC_aarch64_unknown_linux_gnu="aarch64-linux-gnu-gcc" \ 33 | CXX_aarch64_unknown_linux_gnu="aarch64-linux-gnu-g++" \ 34 | BINDGEN_EXTRA_CLANG_ARGS="-I/usr/aarch64-linux-gnu/include/" \ 35 | CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER="aarch64-linux-gnu-gcc" 36 | 37 | ENTRYPOINT ["cargo", "build", "--target", "aarch64-unknown-linux-gnu"] 38 | -------------------------------------------------------------------------------- /dockerfiles/xbuilder-aarch64-unknown-linux-gnu/README.md: -------------------------------------------------------------------------------- 1 | # xbuilder-aarch64-unknown-linux-gnu 2 | 3 | A Docker image to cross build using the `aarch64-unknown-linux-gnu` target. 4 | This is used to make `arm64` binaries. 5 | 6 | ## Usage 7 | 8 | Here is a sample use to build the `polkadot` binary: 9 | ``` 10 | TARGET=aarch64-unknown-linux-gnu 11 | docker run --rm -ti \ 12 | -v $PWD:/app parity-xbuilder-${TARGET} \ 13 | -p polkadot \ 14 | --profile production 15 | ``` 16 | 17 | ## Build 18 | 19 | ``` 20 | TARGET=aarch64-unknown-linux-gnu 21 | docker build -t parity-xbuilder-${TARGET} -f xbuilder-aarch64-linux-gnu.Dockerfile . 22 | docker images | grep ${TARGET} 23 | ``` 24 | 25 | -------------------------------------------------------------------------------- /docs/legacy/reproduce_ci_locally.md: -------------------------------------------------------------------------------- 1 | ## Reproduce CI locally 2 | 3 | ### Preparation 4 | 5 | 1. [install `podman`](https://podman.io/getting-started/installation) (it's rootless) or rename it to `docker` in the following snippet 6 | 2. Consider the [following snippets](https://github.com/paritytech/scripts/tree/master/snippets) pick one depending on which shell you use. 7 | 8 | TLDR; the function runs the named container in the current dir with 9 |   - redirecting the current directory into the image 10 |   - keeping your shell history on your host 11 |   - keeping Rust caches on your host, so you build faster the next time 12 | example use: `cargoenvhere paritytech/ci-linux:production /bin/bash -c 'RUSTFLAGS="-Cdebug-assertions=y -Dwarnings" RUST_BACKTRACE=1 time cargo test --workspace --locked --release --verbose --features runtime-benchmarks --manifest-path bin/node/cli/Cargo.toml'` 13 | 14 | ### Execution 15 | 16 | 1. open the CI config file (`.gitlab-ci.yml`) 17 | 2. note `CI_IMAGE:` variable value there (for example `paritytech/contracts-ci-linux:production`) 18 | 3. look for the job you want to reproduce and see if `*docker-env` or `image:` is mentioned there (then you should use this one) 19 | 4. note global and in-job `variables:`, in order to reproduce the job closely you might want to run it with the same `RUSTFLAGS` and `CARGO_INCREMENTAL` 20 | 5. `podman pull [CI image name]` / `docker pull [CI image name]` 21 | 6. execute your job how it's shown in the example ^ `cargoenvhere [CI image name] /bin/bash -c ‘[cargo build ...]’` 22 | 7. find your artifacts in `/home/$USER/cache/[project name or current dir name]/target` for Linux users or `/path/to/the/cloned/repo/target` for OS X users. 23 | 24 | :warning: If you want to execute a binary on OS X pay attention that with docker it is compiled for Linux. So if want to run it you need to use something like: 25 | `cargoenvhere paritytech/contracts-ci-linux:production cargo run` 26 | -------------------------------------------------------------------------------- /find-duplicate-dependencies.awk: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S awk -E 2 | # Find duplicate dependencies in a Cargo.lock file 3 | function do_substr(str, len, end) { 4 | return substr(str, len + 1, length(str) - len - end) 5 | } 6 | function assert(cond, msg) { 7 | if (!cond) { 8 | print msg > "/dev/stderr" 9 | exit 2 10 | } 11 | } 12 | BEGIN { 13 | num_duplicates = 0 14 | for (i = 1; i < ARGC; ++i) { 15 | if (ARGV[i] !~ /^[/.]/) { 16 | ARGV[i] = "./" ARGV[i] 17 | } 18 | } 19 | } 20 | /^name = "[[:alnum:]_-]+"$/ { 21 | pkg_name = do_substr($0, 8, 1) 22 | line_num = NR 23 | next 24 | } 25 | /^version = "[0-9]+\.[0-9]+\.[0-9]+([+-][[:alnum:]_.-]*)?"$/ { 26 | version = do_substr($0, length("version = \""), 1) 27 | assert(NR == line_num + 1, "Found version line at wrong time?") 28 | line_num = 0 29 | if (pkg_name in versions && versions[pkg_name] != version) { 30 | print ("Duplicate package " pkg_name ": found versions " versions[pkg_name] " and " version) 31 | num_duplicates += 1 32 | } 33 | versions[pkg_name] = version 34 | next 35 | } 36 | { assert(!line_num, "missing version line " NR) } 37 | END { 38 | if (num_duplicates) { 39 | print "\033[1;31mFound", num_duplicates, "duplicate dependencies 😨\033[0m" 40 | exit 1 41 | } else { 42 | print "\033[1;32mYou only depend on one version of each dependency. Yay! 🙂\033[0m" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /get-substrate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2015-2020 Parity Technologies (UK) Ltd. 3 | 4 | if [[ "$OSTYPE" == "linux-gnu" ]]; then 5 | set -e 6 | if [[ $(whoami) == "root" ]]; then 7 | MAKE_ME_ROOT= 8 | else 9 | MAKE_ME_ROOT=sudo 10 | fi 11 | 12 | if [ -f /etc/redhat-release ]; then 13 | echo "Redhat Linux detected." 14 | $MAKE_ME_ROOT yum update -y 15 | $MAKE_ME_ROOT yum groupinstall -y "Development Tools" 16 | $MAKE_ME_ROOT yum install -y cmake openssl-devel git protobuf protobuf-compiler clang clang-devel 17 | elif [ -f /etc/SuSE-release ]; then 18 | echo "Suse Linux detected." 19 | echo "This OS is not supported with this script at present. Sorry." 20 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 21 | exit 1 22 | elif [ -f /etc/arch-release ]; then 23 | echo "Arch Linux detected." 24 | $MAKE_ME_ROOT pacman -Syu --needed --noconfirm cmake gcc openssl-1.0 pkgconf git clang 25 | export OPENSSL_LIB_DIR="/usr/lib/openssl-1.0"; 26 | export OPENSSL_INCLUDE_DIR="/usr/include/openssl-1.0" 27 | elif [ -f /etc/mandrake-release ]; then 28 | echo "Mandrake Linux detected." 29 | echo "This OS is not supported with this script at present. Sorry." 30 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 31 | exit 1 32 | elif [ -f /etc/debian_version ]; then 33 | echo "Ubuntu/Debian Linux detected." 34 | $MAKE_ME_ROOT apt update 35 | $MAKE_ME_ROOT apt install -y cmake pkg-config libssl-dev git gcc build-essential git protobuf protobuf-compiler clang libclang-dev 36 | else 37 | echo "Unknown Linux distribution." 38 | echo "This OS is not supported with this script at present. Sorry." 39 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 40 | exit 1 41 | fi 42 | elif [[ "$OSTYPE" == "darwin"* ]]; then 43 | set -e 44 | echo "Mac OS (Darwin) detected." 45 | 46 | if ! which brew >/dev/null 2>&1; then 47 | /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)" 48 | fi 49 | 50 | brew update 51 | brew install openssl cmake llvm 52 | elif [[ "$OSTYPE" == "freebsd"* ]]; then 53 | echo "FreeBSD detected." 54 | echo "This OS is not supported with this script at present. Sorry." 55 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 56 | exit 1 57 | else 58 | echo "Unknown operating system." 59 | echo "This OS is not supported with this script at present. Sorry." 60 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 61 | exit 1 62 | fi 63 | 64 | if ! which rustup >/dev/null 2>&1; then 65 | curl https://sh.rustup.rs -sSf | sh -s -- -y 66 | source ~/.cargo/env 67 | rustup default stable 68 | else 69 | rustup update 70 | rustup default stable 71 | fi 72 | 73 | rustup update nightly 74 | rustup target add wasm32-unknown-unknown --toolchain nightly 75 | 76 | if [[ "$1" == "--fast" ]]; then 77 | echo "Skipped cargo install of 'substrate' and 'subkey'" 78 | echo "You can install manually by cloning the https://github.com/paritytech/substrate repo," 79 | echo "and using cargo to install 'substrate' and 'subkey' from the repo path." 80 | else 81 | g=$(mktemp -d) 82 | git clone https://github.com/paritytech/substrate "$g" 83 | pushd "$g" 84 | cargo install --force --path ./bin/node/cli #substrate 85 | cargo install --force --path ./bin/utils/subkey subkey 86 | popd 87 | fi 88 | 89 | echo "Run source ~/.cargo/env now to update environment" 90 | -------------------------------------------------------------------------------- /gitlab/.ruby-version: -------------------------------------------------------------------------------- 1 | 2.7.3 -------------------------------------------------------------------------------- /gitlab/Gemfile: -------------------------------------------------------------------------------- 1 | source 'https://rubygems.org' 2 | 3 | ruby '~>' + File.read('.ruby-version').strip 4 | 5 | gem 'gitlab', '~> 4.17' 6 | -------------------------------------------------------------------------------- /gitlab/Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | gitlab (4.17.0) 5 | httparty (~> 0.18) 6 | terminal-table (~> 1.5, >= 1.5.1) 7 | httparty (0.21.0) 8 | mini_mime (>= 1.0.0) 9 | multi_xml (>= 0.5.2) 10 | mini_mime (1.1.2) 11 | multi_xml (0.6.0) 12 | terminal-table (1.8.0) 13 | unicode-display_width (~> 1.1, >= 1.1.1) 14 | unicode-display_width (1.7.0) 15 | 16 | PLATFORMS 17 | ruby 18 | 19 | DEPENDENCIES 20 | gitlab (~> 4.17) 21 | 22 | RUBY VERSION 23 | ruby 2.7.3p183 24 | 25 | BUNDLED WITH 26 | 2.1.4 27 | -------------------------------------------------------------------------------- /gitlab/README.md: -------------------------------------------------------------------------------- 1 | # GitLab scripts 2 | 3 | This is the collection of small GitLab-related scripts written in Ruby. 4 | 5 | ### Table of contents 6 | 7 | | Name | Usage | 8 | | ---- | ----- | 9 | | `get-all-mirrored-projects` | Outputs all mirrored projects within user's authz scope. Needs `GITLAB_INSTANCE` & `GITLAB_TOKEN` env vars. Accepts optional `with-vanity` and `without-vanity` arguments to filter projects with/without `vanity-service` integration. Accepts optional `github` to filter projects with enabled GitHub integration. 10 | | `get-all-projects-with-pages` | Outputs all projects with GitLab Pages enabled within user's authz scope. Needs `GITLAB_INSTANCE` & `GITLAB_TOKEN` env vars. Accepts optional `public` argument to filter projects with public Pages access level. 11 | | `wipe-inactive-runners` | Delete runners that never contacted a GitLab instance. User has to be an admin. Needs `GITLAB_INSTANCE` & `GITLAB_TOKEN` env vars. Check available options with `./wipe-inactive-runners --help`. Highly recommended to check what you are going to delete first with `--dry-run` flag. 12 | 13 | ### Examples 14 | 15 | Get maximum info from `get-all-mirrored-projects`: 16 | ``` 17 | env GITLAB_INSTANCE=https://gitlab.example.com GITLAB_TOKEN=foobarbaz ./get-all-mirrored-projects with-vanity without-vanity github 18 | ``` 19 | 20 | Show projects with public Pages from `get-all-projects-with-pages`: 21 | ``` 22 | env GITLAB_INSTANCE=https://gitlab.example.com GITLAB_TOKEN=foobarbaz ./get-all-projects-with-pages public 23 | ``` 24 | 25 | Show dummy runners without their removal with `wipe-inactive-runners`: 26 | ``` 27 | env GITLAB_INSTANCE=https://gitlab.example.com GITLAB_TOKEN=foobarbaz ./wipe-inactive-runners --tag somerunnertag --dry-run 28 | ``` 29 | 30 | ### Dependencies 31 | 32 | Dependencies are declared in the relevant `Gemfile`. Just `bundle install` and you are good to go. 33 | -------------------------------------------------------------------------------- /gitlab/get-all-mirrored-projects: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'gitlab' 4 | require 'json' 5 | 6 | if ENV['GITLAB_INSTANCE'].nil? || ENV['GITLAB_TOKEN'].nil? 7 | puts 'Specify GITLAB_INSTANCE & GITLAB_TOKEN environment variables' 8 | exit 1 9 | end 10 | 11 | $api = Gitlab.client(endpoint: "#{ENV['GITLAB_INSTANCE']}/api/v4", private_token: ENV['GITLAB_TOKEN']) 12 | 13 | puts "Signed in as #{$api.user.email}\n\n" 14 | 15 | mirrored_projects = $api.projects.auto_paginate.select(&:mirror) 16 | 17 | puts "Total mirrors: #{mirrored_projects.count}" 18 | puts "==================" 19 | mirrored_projects.each do |project| 20 | puts project.web_url 21 | end 22 | 23 | def vanity(projects, with: true) 24 | vanity = [] 25 | projects.each do |project| 26 | hooks = $api.project_hooks(project.id) 27 | if hooks.count > 0 28 | hooks.each do |hook| 29 | vanity << project.web_url if with && hook.url.include?('vanity-service') 30 | end 31 | else 32 | vanity << project.web_url unless with 33 | end 34 | end 35 | puts "\nMirrors #{with ? "with" : "without"} vanity-service's hook: #{vanity.count}" 36 | puts "==================" 37 | puts vanity.sort.map { |url| url + '/hooks' }.join("\n") 38 | vanity 39 | end 40 | 41 | def github(projects) 42 | github = [] 43 | projects.each do |project| 44 | github << project.web_url if $api.service(project.id, :github).active 45 | end 46 | puts "\nMirrors with GitHub integration enabled: #{github.count}" 47 | puts "==================" 48 | puts github.sort.map { |url| url + '/-/services/github/edit' }.join("\n") 49 | github 50 | end 51 | 52 | unless ARGV.nil? 53 | vanity(mirrored_projects) if ARGV.include?('with-vanity') 54 | vanity(mirrored_projects, with: false) if ARGV.include?('without-vanity') 55 | github(mirrored_projects) if ARGV.include?('github') 56 | end 57 | -------------------------------------------------------------------------------- /gitlab/get-all-projects-with-pages: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'gitlab' 4 | require 'json' 5 | 6 | if ENV['GITLAB_INSTANCE'].nil? || ENV['GITLAB_TOKEN'].nil? 7 | puts 'Specify GITLAB_INSTANCE & GITLAB_TOKEN environment variables' 8 | exit 1 9 | end 10 | 11 | unless ARGV.nil? 12 | public_only = true if ARGV.include?('public') 13 | end 14 | 15 | 16 | $api = Gitlab.client(endpoint: "#{ENV['GITLAB_INSTANCE']}/api/v4", private_token: ENV['GITLAB_TOKEN']) 17 | 18 | puts "Signed in as #{$api.user.email}\n\n" 19 | 20 | projects_with_pages = $api.projects.auto_paginate.select { |project| project.pages_access_level != 'disabled' } 21 | projects_with_pages = projects_with_pages.select { |project| project.pages_access_level == 'public' } if public_only 22 | 23 | puts "Total projects with Pages enabled: #{projects_with_pages.count}" 24 | puts "(public only)" if public_only 25 | puts "==================" 26 | projects_with_pages.each do |project| 27 | puts "#{project.web_url} - #{project.pages_access_level}" 28 | end 29 | -------------------------------------------------------------------------------- /gitlab/wipe-inactive-runners: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | require 'gitlab' 4 | require 'json' 5 | require 'optparse' 6 | 7 | if ARGV.empty? 8 | puts 'Please provide options or check --help' 9 | exit 1 10 | end 11 | 12 | options = {} 13 | option_parser = OptionParser.new do |option| 14 | option.banner = "Usage: wipe-inactive-runners [options]" 15 | option.on('--tag RUNNER TAG', 'Which runner\'s tag to clean up', String) do |tag| 16 | options[:tag] = tag 17 | end 18 | option.on('--dry-run', 'Just output what will be removed') do |dry_run| 19 | options[:dry_run] = true 20 | end 21 | end 22 | 23 | option_parser.parse! 24 | 25 | raise OptionParser::MissingArgument, '--tag is required' unless options[:tag] 26 | 27 | if ENV['GITLAB_INSTANCE'].nil? || ENV['GITLAB_TOKEN'].nil? 28 | puts 'Specify GITLAB_INSTANCE & GITLAB_TOKEN environment variables' 29 | exit 1 30 | end 31 | 32 | $api = Gitlab.client(endpoint: "#{ENV['GITLAB_INSTANCE']}/api/v4", private_token: ENV['GITLAB_TOKEN']) 33 | 34 | puts "Signed in as #{$api.user.email}\n\n" 35 | 36 | runners = $api.all_runners.auto_paginate 37 | 38 | detailed_runners = [] 39 | 40 | runners.each do |runner| 41 | detailed_runners << $api.runner(runner.id) 42 | end 43 | 44 | dead_runners = detailed_runners.select { |runner| runner.tag_list.include?(options[:tag]) && runner.online.nil? } 45 | 46 | puts "Dry run. Outputting runners' list to remove:" if options[:dry_run] 47 | 48 | dead_runners.each do |runner| 49 | if options[:dry_run] 50 | puts "#{runner.id} | #{runner.description} | #{runner.tag_list}" 51 | else 52 | $api.delete_runner(runner.id) 53 | puts "Deleted #{runner.id} (#{runner.description})" 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /retag.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # EXAMPLE USAGE: 4 | # ./retag.sh paritytech ci-linux staging production 5 | 6 | set -eu 7 | 8 | ORGA=$1 9 | IMAGE=$2 10 | TAG_OLD=$3 11 | TAG_NEW=$4 12 | echo "Organization: ${ORGA}" 13 | echo "Image: ${IMAGE}" 14 | echo "Old tag: ${TAG_OLD}" 15 | echo "New tag: ${TAG_NEW}" 16 | 17 | log_in() { 18 | read -p "Enter username: " UNAME 19 | REPONAME=${ORGA}/${IMAGE} 20 | 21 | if [ -n ${UNAME} ]; then 22 | read -s -p "Password/token: " UPASS 23 | echo 24 | fi 25 | } 26 | 27 | get_token() { 28 | local HEADERS 29 | 30 | if [ -n "$UNAME" ]; then 31 | HEADERS="Authorization: Basic $(echo -n "${UNAME}:${UPASS}" | base64)" 32 | fi 33 | echo "☑ Logging in" 34 | curl -s -H "$HEADERS" "https://auth.docker.io/token?service=registry.docker.io&scope=repository:${REPONAME}:pull,push" | jq '.token' -r > token 35 | echo "☑ Got token" 36 | } 37 | 38 | pull_push_manifest() { 39 | local CONTENT_TYPE="application/vnd.docker.distribution.manifest.v2+json" 40 | local REGISTER_URI="https://registry-1.docker.io/v2" 41 | 42 | # read here https://docs.docker.com/registry/spec/auth/token/ 43 | curl -s -H "Accept: ${CONTENT_TYPE}" -H "Authorization: Bearer $(cat token)" "${REGISTER_URI}/${REPONAME}/manifests/${TAG_OLD}" -o manifest.json 44 | echo "☑ Got manifest in manifest.json" 45 | curl -s -X PUT -H "Content-Type: ${CONTENT_TYPE}" -H "Authorization: Bearer $(cat token)" -d '@manifest.json' "${REGISTER_URI}/${REPONAME}/manifests/${TAG_NEW}" 46 | echo "☑ Pushed ${REPONAME}:${TAG_OLD} Manifest to ${REPONAME}:${TAG_NEW}" 47 | } 48 | 49 | clean_up() { 50 | rm token 51 | rm manifest.json 52 | echo "☑ Removed token and manifest.json files" 53 | } 54 | 55 | log_in 56 | get_token ${REPONAME} 57 | pull_push_manifest 58 | clean_up 59 | -------------------------------------------------------------------------------- /snippets/.bashrc: -------------------------------------------------------------------------------- 1 | function cargoenvhere { 2 | dirname="$(basename $(pwd))" 3 | user=$(whoami) 4 | echo "Cargo as a virtual environment in" "$dirname" "dir" 5 | mkdir -p /home/"$user"/cache/"$dirname" 6 | podman run --rm -it -w /shellhere/"$dirname" \ 7 | -v "$(pwd)":/shellhere/"$dirname" \ 8 | -v /home/"$user"/cache/"$dirname"/:/cache/ \ 9 | -e CARGO_HOME=/cache/cargo/ \ 10 | -e SCCACHE_DIR=/cache/sccache/ \ 11 | -e CARGO_TARGET_DIR=/cache/target/ "$@" 12 | } 13 | -------------------------------------------------------------------------------- /snippets/.zshrc: -------------------------------------------------------------------------------- 1 | function cargoenvhere { 2 | dirname="$(basename $(pwd))" 3 | echo "Cargo as a virtual environment in" "$dirname" "dir" 4 | docker volume inspect cargo-cache > /dev/null || docker volume create cargo-cache 5 | docker run --rm -it -w /shellhere/"$dirname" \ 6 | -v "$(pwd)":/shellhere/"$dirname" \ 7 | -v cargo-cache:/cache/ \ 8 | -e CARGO_HOME=/cache/cargo/ \ 9 | -e SCCACHE_DIR=/cache/sccache/ "$@" 10 | } 11 | -------------------------------------------------------------------------------- /snippets/cargoenvhere.fish: -------------------------------------------------------------------------------- 1 | function cargoenvhere -d "Cargo as a virtual environment in the current dir" 2 | set dirname (basename (pwd)) 3 | set user (whoami) 4 | echo "Cargo as a virtual environment in" $dirname "dir" 5 | mkdir -p /home/$user/cache/$dirname 6 | podman run --rm -it -w /shellhere/$dirname -v (pwd):/shellhere/$dirname -v /home/$user/cache/$dirname/:/cache/ -e CARGO_HOME=/cache/cargo/ -e SCCACHE_DIR=/cache/sccache/ -e CARGO_TARGET_DIR=/cache/target/ $argv 7 | end 8 | -------------------------------------------------------------------------------- /snippets/cargoenvhere.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # "Cargo as a virtual environment in the current dir" 4 | dirname="$(basename $(pwd))" 5 | user=$(whoami) 6 | echo "Cargo as a virtual environment in" "$dirname" "dir" 7 | mkdir -p /home/"$user"/cache/"$dirname" 8 | podman run --rm -it -w /shellhere/"$dirname" \ 9 | -v "$(pwd)":/shellhere/"$dirname" \ 10 | -v /home/"$user"/cache/"$dirname"/:/cache/ \ 11 | -e CARGO_HOME=/cache/cargo/ \ 12 | -e SCCACHE_DIR=/cache/sccache/ \ 13 | -e CARGO_TARGET_DIR=/cache/target/ "$@" 14 | 15 | # example use 16 | # cargoenvhere paritytech/ci-linux:production /bin/bash -c 'RUSTFLAGS="-Cdebug-assertions=y -Dwarnings" RUST_BACKTRACE=1 time cargo test --workspace --locked --release --verbose --features runtime-benchmarks --manifest-path bin/node/cli/Cargo.toml' 17 | -------------------------------------------------------------------------------- /wasm-utils/install-rust-lld-ARM.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Install all required dependencies for building Substrate based chains on ARM architectures. 3 | # Copyright (C) Parity Technologies 4 | # License: Apache-2.0 5 | 6 | # Set to the latest branch from: https://github.com/rust-lang/llvm-project 7 | RUST_LLVM_BRANCH="rustc/10.0-2020-05-05" 8 | 9 | if [[ "$OSTYPE" == "linux-gnu" ]] || [[ "$OSTYPE" == "linux-gnueabihf" ]]; then 10 | set -e 11 | if [[ $(whoami) == "root" ]]; then 12 | MAKE_ME_ROOT= 13 | else 14 | MAKE_ME_ROOT=sudo 15 | fi 16 | ARCH=$(uname -m) 17 | if [[ "$ARCH" == "aarch64" ]] || [[ "$ARCH" == "armv7l" ]]; then 18 | 19 | if [ -f /etc/debian_version ]; then 20 | echo "Ubuntu/Debian Linux detected." 21 | $MAKE_ME_ROOT apt update 22 | $MAKE_ME_ROOT apt-get install -y --no-install-recommends \ 23 | build-essential \ 24 | ninja-build \ 25 | ca-certificates \ 26 | cmake \ 27 | pkg-config \ 28 | libssl-dev \ 29 | gcc \ 30 | git \ 31 | clang \ 32 | libclang-dev \ 33 | protobuf-compiler 34 | 35 | else 36 | echo "This Linux distribution is not supported with this script at present. Sorry." 37 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 38 | exit 1 39 | fi 40 | 41 | if ! which rustup >/dev/null 2>&1; then 42 | curl https://sh.rustup.rs -sSf | sh -s -- -y 43 | source ~/.cargo/env 44 | rustup default stable 45 | else 46 | rustup update 47 | rustup default stable 48 | fi 49 | 50 | rustup update nightly --force 51 | rustup target add wasm32-unknown-unknown --toolchain nightly 52 | 53 | echo "Building llvm-ld for: $ARCH" 54 | 55 | tmp=$(mktemp -d) 56 | git clone -b $RUST_LLVM_BRANCH --depth 1 https://github.com/rust-lang/llvm-project.git "$tmp" 57 | pushd "$tmp" 58 | mkdir -p llvm/tools/lld 59 | cp -R lld/ llvm/tools/ 60 | mkdir -p "$tmp"/build/arm 61 | cd "$tmp"/build/arm 62 | if [ "$ARCH" == "aarch64" ]; then 63 | cmake -G Ninja "$tmp"/llvm \ 64 | -DCMAKE_BUILD_TYPE=Release \ 65 | -DCMAKE_INSTALL_PREFIX=/opt/local/llvm \ 66 | -DLLVM_TARGETS_TO_BUILD="AArch64" \ 67 | -DLLVM_TARGET_ARCH="AArch64" 68 | else 69 | cmake -G Ninja "$tmp"/llvm \ 70 | -DCMAKE_BUILD_TYPE=Release \ 71 | -DCMAKE_INSTALL_PREFIX=/opt/local/llvm \ 72 | -DLLVM_TARGETS_TO_BUILD="ARM" \ 73 | -DLLVM_TARGET_ARCH="ARM" 74 | fi 75 | ninja lld 76 | $MAKE_ME_ROOT ninja install-lld 77 | popd 78 | 79 | echo "Installing rust-lld for: $ARCH" 80 | cp -f /opt/local/llvm/bin/ld.lld ~/.cargo/bin/rust-lld 81 | source ~/.cargo/env 82 | 83 | echo "Run source ~/.cargo/env now to update environment" 84 | 85 | else 86 | echo "This architecture is not supported with this script at present. Sorry." 87 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 88 | exit 1 89 | fi 90 | 91 | else 92 | echo "This OS is not supported with this script at present. Sorry." 93 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 94 | exit 1 95 | fi 96 | -------------------------------------------------------------------------------- /wasm-utils/install-wasm-binaries.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # If there is no binaryen package, it downloads the missing Wasm binaries. 3 | # Copyright (C) Parity Technologies om> 4 | # License: Apache-2.0 5 | 6 | set -euv 7 | 8 | if [[ "$OSTYPE" == "linux-gnu" ]]; then 9 | 10 | if [[ $(whoami) == "root" ]]; then 11 | MAKE_ME_ROOT= 12 | else 13 | MAKE_ME_ROOT=sudo 14 | fi 15 | 16 | if [ -f /etc/redhat-release ]; then 17 | echo "Redhat Linux detected." 18 | echo "This OS is not supported with this script at present. Sorry." 19 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 20 | exit 1 21 | elif [ -f /etc/SuSE-release ]; then 22 | echo "Suse Linux detected." 23 | echo "This OS is not supported with this script at present. Sorry." 24 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 25 | exit 1 26 | elif [ -f /etc/arch-release ]; then 27 | echo "Arch Linux detected." 28 | echo "This OS is not supported with this script at present. Sorry." 29 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 30 | exit 1 31 | elif [ -f /etc/mandrake-release ]; then 32 | echo "Mandrake Linux detected." 33 | echo "This OS is not supported with this script at present. Sorry." 34 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 35 | exit 1 36 | elif [ -f /etc/debian_version ]; then 37 | echo "Ubuntu/Debian Linux detected." 38 | 39 | function install_external_source { 40 | set -euv 41 | 42 | if ! [ -x "$(command -v curl)" ] || ! [ -x "$(command -v jq)" ] || ! [ -x "$(command -v tar)" ]; then 43 | echo "Installing missing OS packages." 44 | $MAKE_ME_ROOT apt update 45 | $MAKE_ME_ROOT apt install -y curl jq tar 46 | fi 47 | 48 | BUILD_NUM=$(curl -s https://storage.googleapis.com/wasm-llvm/builds/linux/lkgr.json | jq -r '.build') 49 | if [ -z ${BUILD_NUM+x} ]; then 50 | echo "Could not fetch the latest build number." 51 | exit 1 52 | fi 53 | 54 | tmp=$(mktemp -d) 55 | pushd "$tmp" >/dev/null 56 | echo "Downloading wasm-binaries.tbz2" 57 | curl -L -o wasm-binaries.tbz2 "https://storage.googleapis.com/wasm-llvm/builds/linux/$BUILD_NUM/wasm-binaries.tbz2" 58 | 59 | declare -a binaries=("wasm-opt") # Default binaries 60 | if [ "$#" -ne 0 ]; then 61 | echo "Installing selected binaries." 62 | binaries=("$@") 63 | else 64 | echo "Installing default binaries." 65 | fi 66 | 67 | for bin in "${binaries[@]}"; do 68 | echo "Installing $bin into ~/.cargo/bin" 69 | tar -xvjf "wasm-binaries.tbz2" "wasm-install/bin/$bin" >/dev/null 70 | cp -f "wasm-install/bin/$bin" ~/.cargo/bin/ 71 | done 72 | popd >/dev/null 73 | } 74 | 75 | if apt-cache show binaryen >/dev/null 2>&1; then 76 | set -e 77 | echo "Setting up Binaryen from package manager." 78 | $MAKE_ME_ROOT apt update "$@" 79 | $MAKE_ME_ROOT apt install -y binaryen 80 | else 81 | echo "Setting up wasm-opt from external source." 82 | install_external_source "$@" 83 | fi 84 | 85 | else 86 | echo "Unknown Linux distribution." 87 | echo "This OS is not supported with this script at present. Sorry." 88 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 89 | exit 1 90 | fi 91 | 92 | elif [[ "$OSTYPE" == "darwin"* ]]; then 93 | echo "Mac OS (Darwin) detected." 94 | echo "This OS is not supported with this script at present. Sorry." 95 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 96 | exit 1 97 | elif [[ "$OSTYPE" == "freebsd"* ]]; then 98 | echo "FreeBSD detected." 99 | echo "This OS is not supported with this script at present. Sorry." 100 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 101 | exit 1 102 | else 103 | echo "Unknown operating system." 104 | echo "This OS is not supported with this script at present. Sorry." 105 | echo "Please refer to https://github.com/paritytech/substrate for setup information." 106 | exit 1 107 | fi 108 | 109 | echo "" 110 | echo "Run source ~/.cargo/env now to update environment." 111 | echo "" --------------------------------------------------------------------------------