├── .gitignore ├── .gitlab-ci.yml ├── LICENSE ├── README.md ├── admin.sh ├── coq ├── .dockerignore └── stable │ └── Dockerfile ├── external └── docker-keeper │ ├── .gitignore │ ├── .gitlab-ci.yml │ ├── LICENSE │ ├── README.md │ ├── VERSION │ ├── bash_formatter.py │ ├── debug_read.bash │ ├── gitlab-ci-template.yml │ ├── gitlab_functions.sh │ ├── keeper.py │ ├── requirements.txt │ └── requirements_lint.txt └── images.yml /.gitignore: -------------------------------------------------------------------------------- 1 | ### Emacs ### 2 | # -*- mode: gitignore; -*- 3 | *~ 4 | \#*\# 5 | /.emacs.desktop 6 | /.emacs.desktop.lock 7 | *.elc 8 | auto-save-list 9 | tramp 10 | .\#* 11 | 12 | # Org-mode 13 | .org-id-locations 14 | *_archive 15 | 16 | # flymake-mode 17 | *_flymake.* 18 | 19 | # eshell files 20 | /eshell/history 21 | /eshell/lastdir 22 | 23 | # elpa packages 24 | /elpa/ 25 | 26 | # reftex files 27 | *.rel 28 | 29 | # AUCTeX auto folder 30 | /auto/ 31 | 32 | # cask packages 33 | .cask/ 34 | dist/ 35 | 36 | # Flycheck 37 | flycheck_*.el 38 | 39 | # server auth directory 40 | /server/ 41 | 42 | # projectiles files 43 | .projectile 44 | 45 | # directory configuration 46 | .dir-locals.el 47 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | include: 'external/docker-keeper/gitlab-ci-template.yml' 2 | 3 | # Uncomment if ever you chose a different subtree prefix 4 | # variables: 5 | # KEEPER_SUBTREE: external/docker-keeper 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2018, Erik Martin-Dorel 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Docker images of Coq (v. 8.4 to 8.20): a Rocq-community project 2 | 3 | [![tags](https://img.shields.io/badge/tags%20on-docker%20hub-blue.svg)](https://hub.docker.com/r/coqorg/coq#supported-tags "Supported tags on Docker Hub") 4 | [![pipeline status](https://gitlab.com/coq-community/docker-coq/badges/master/pipeline.svg)](https://gitlab.com/coq-community/docker-coq/-/pipelines) 5 | [![dev image](https://img.shields.io/badge/coqorg%2Fcoq-dev-blue.svg)](https://hub.docker.com/r/coqorg/coq/tags?page=1&name=dev "See dev image on Docker Hub") 6 | [![pulls](https://img.shields.io/docker/pulls/coqorg/coq.svg)](https://hub.docker.com/r/coqorg/coq "Number of pulls from Docker Hub") 7 | [![stars](https://img.shields.io/docker/stars/coqorg/coq.svg)](https://hub.docker.com/r/coqorg/coq "Star the image on Docker Hub") 8 | [![dockerfile](https://img.shields.io/badge/dockerfile%20on-github-blue.svg)](https://github.com/coq-community/docker-coq "Dockerfile source repository") 9 | [![base](https://img.shields.io/badge/depends%20on-coqorg%2Fbase-blue.svg)](https://hub.docker.com/r/coqorg/base "Docker base image for Coq") 10 | 11 | > [!NOTE] 12 | > 13 | > This repository provides [Docker](https://www.docker.com/) images of the **Coq proof assistant (versions 8.4.6 to 8.20.1)**, maintained as part of the [Rocq-community](https://github.com/rocq-community) project. Note that [Docker tags](https://hub.docker.com/r/coqorg/coq/tags) for **Coq versions < 8.7** are not actively rebuilt anymore, and thereby do not show up in the list of **Supported tags** below: these old Coq images are stalled because of [this opam-repository PR](https://github.com/ocaml/opam-repository/pull/27273) that archived old versions of the OCaml compiler. For newest versions of the **Rocq Prover ≥ 9.0**, use [rocq/rocq-prover](https://hub.docker.com/r/rocq/rocq-prover) images instead. 14 | 15 | These images are based on [this parent image](https://hub.docker.com/r/coqorg/base/), itself based on [Debian 12 Slim](https://hub.docker.com/_/debian/) and relying on the last version of [opam 2.x](https://opam.ocaml.org/doc/Manual.html): 16 | 17 | | | GitHub repo | Type | Docker Hub | 18 | |---|-------------------------------------------------------------------------|---------------|--------------------------------------------------------| 19 | | | [docker-coq-action](https://github.com/coq-community/docker-coq-action) | GitHub Action | N/A | 20 | | ⊙ | [docker-coq](https://github.com/coq-community/docker-coq) | Dockerfile | [`coqorg/coq`](https://hub.docker.com/r/coqorg/coq/) | 21 | | ↳ | [docker-base](https://github.com/coq-community/docker-base) | Dockerfile | [`coqorg/base`](https://hub.docker.com/r/coqorg/base/) | 22 | | ↳ | Debian | Linux distro | [`debian`](https://hub.docker.com/_/debian/) | 23 | 24 | See also the [docker-coq wiki](https://github.com/coq-community/docker-coq/wiki) for details about how to use these images. 25 | 26 | This Dockerfile repository is [mirrored on GitLab](https://gitlab.com/coq-community/docker-coq), but [issues](https://github.com/coq-community/docker-coq/issues) and [pull requests](https://github.com/coq-community/docker-coq/pulls) are tracked on GitHub. 27 | 28 | 29 | -------------------------------------------------------------------------------- /admin.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Author: Erik Martin-Dorel, 2020-2021 3 | # Summary: helper functions to compute compatible OCaml versions 4 | 5 | ocamls() { opam switch list-available ocaml-base-compiler | grep -v -e '#' | cut -d ' ' -f 2; } 6 | 7 | pred_ocaml_for_coqs() { 8 | for v; do 9 | printf '%s' "* Coq $v: "'`' 10 | printf '%s' "$(opam show "coq.$v" -f depends: | grep '"ocaml"')" 11 | printf '`\n' 12 | done 13 | } 14 | 15 | list_ocaml_for_coqs() { 16 | local indent=' ' 17 | # To change manually: 18 | local render='true' 19 | # local render='false' 20 | # Note: we may post-process the output to merge "coq" items with same ocaml 21 | for v; do 22 | local several='true' 23 | printf '%s' "* Coq $v: " 24 | if [ "$v" = "8.4.6" ] || [ "$v" = "8.5.3" ] || [ "$v" = "8.6.1" ]; then 25 | minimal="4.02.3" 26 | default="4.02.3" 27 | several='false' 28 | else 29 | versions=$(opam search ocaml-base-compiler --no-switch --columns=version -V --coinstallable-with="coq.$v" | grep -v -e '#' -e 'alpha' -e 'beta' -e 'rc' -e '4\.09\.0') 30 | if [[ "$versions" =~ 4\.05\.0 ]]; then 31 | minimal="4.05.0" 32 | else 33 | minimal="$(head -n 1 <<<"$versions")-flambda" 34 | fi 35 | # BEGIN SWAP THIS LATER ON IF NEED BE 36 | if [[ "$versions" =~ 4\.07\.1 ]]; then 37 | default="4.07.1-flambda" 38 | elif [[ "$versions" =~ 4\.13\.1 ]]; then 39 | default="4.13.1-flambda" # like Coq Platform 40 | # END SWAP THIS LATER ON IF NEED BE 41 | else 42 | default="$minimal" 43 | fi 44 | fi 45 | [ "$render" = 'true' ] && printf '\n%s' "${indent}default: ['" 46 | if [ "$render" = 'true' ]; then 47 | printf '%s' "$default" 48 | else 49 | printf '%s' "$minimal" 50 | fi 51 | [ "$render" = 'true' ] && printf "']\n" 52 | [ "$render" = 'true' ] && printf '%s' "${indent}base: [" 53 | if [ "$several" = 'true' ]; then 54 | minor2=$(cut -d '.' -f 1-2 <<<"$versions" | sort -u -V | tail -n 2 | tac) 55 | minor3=$(cut -d '.' -f 1-2 <<<"$versions" | sort -u -V | tail -n 3 | tac) 56 | last2=$(for vv in $minor2; do grep -e "^${vv//./\\.}.*\$" <<<"$versions" | tail -n 1; done) 57 | last3=$(for vv in $minor3; do grep -e "^${vv//./\\.}.*\$" <<<"$versions" | tail -n 1; done) 58 | dflt_regex="${default%-flambda}" 59 | dflt_regex="${dflt_regex//./\\.}" 60 | # Incomplete algorithm (to be refined): 61 | # we check that default is not in {minimal} \/ last3 62 | already=$(if grep -q -e "^${dflt_regex}$" <<< "$minimal" || \ 63 | grep -q -e "^${dflt_regex}$" <<< "$last3"; then 64 | echo true 65 | else 66 | echo false 67 | fi) 68 | if [ "$render" = 'true' ]; then 69 | if [ "$already" = 'true' ]; then 70 | printf '%s' "$last3" | xargs printf "'%s-flambda', " 71 | # SHOULD check that default notin last3 72 | else 73 | printf '%s' "$last2" | xargs printf "'%s-flambda', " 74 | printf '%s' "$default" | xargs printf "'%s', " 75 | # SHOULD check that default notin last2 76 | fi 77 | else 78 | if [ "$already" = 'true' ]; then 79 | printf '%s' "$last3" | xargs printf "%s-flambda " 80 | else 81 | printf '%s' "$last2" | xargs printf "%s-flambda " 82 | printf '%s' "$default" | xargs printf "%s " 83 | fi 84 | fi 85 | fi 86 | [ "$render" = 'true' ] && printf '%s' "'$minimal']" 87 | printf "\n" 88 | [ "$render" = 'true' ] && printf '%s\n ' "${indent}coq: ['${v}']" 89 | done 90 | } 91 | 92 | # opam repo add --all-switches --set-default coq-core-dev https://coq.inria.fr/opam/core-dev 93 | # opam update 94 | # opam show coq 95 | # pred_ocaml_for_coqs 8.4.6 8.5.3 8.6.1 8.7.2 8.8.2 8.9.1 8.10.2 8.11.2 8.12.2 8.13.2 8.14.1 8.15.2 dev 96 | # list_ocaml_for_coqs dev 8.15.2 8.14.1 8.13.2 8.12.2 8.11.2 8.10.2 8.9.1 8.8.2 8.7.2 8.6.1 8.5.3 8.4.6 97 | -------------------------------------------------------------------------------- /coq/.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | -------------------------------------------------------------------------------- /coq/stable/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_TAG="latest" 2 | FROM rocq/base:${BASE_TAG} 3 | 4 | # The following variable should be nonempty for coq >= 8.17 5 | ARG COQ_CORE_PINNED 6 | 7 | ARG COQ_EXTRA_OPAM="coq-bignums" 8 | ENV COQ_EXTRA_OPAM="${COQ_EXTRA_OPAM}" 9 | 10 | # The following variable should be empty for ocaml < 4.06 or coq <= 8.7 11 | ARG COQ_INSTALL_SERAPI 12 | 13 | ARG COQ_VERSION="dev" 14 | ENV COQ_VERSION=${COQ_VERSION} 15 | 16 | # This line is actually unneeded (was already enabled in rocq/base) 17 | SHELL ["/bin/bash", "--login", "-o", "pipefail", "-c"] 18 | 19 | # hadolint ignore=SC2046 20 | RUN set -x \ 21 | && eval $(opam env "--switch=${COMPILER}" --set-switch) \ 22 | && opam update -y -u \ 23 | && opam pin add -n -k version coq ${COQ_VERSION} \ 24 | && if [ -n "${COQ_CORE_PINNED}" ]; then \ 25 | opam pin add -n -k version coq-core ${COQ_VERSION} && \ 26 | opam pin add -n -k version coq-stdlib ${COQ_VERSION} && \ 27 | opam pin add -n -k version coqide-server ${COQ_VERSION}; fi \ 28 | && opam install -y -v -j "${NJOBS}" coq ${COQ_EXTRA_OPAM} ${COQ_INSTALL_SERAPI:+coq-serapi} \ 29 | && opam clean -a -c -s --logs \ 30 | && chmod -R g=u /home/coq/.opam \ 31 | && opam config list && opam list 32 | 33 | ARG BUILD_DATE 34 | ARG VCS_REF 35 | LABEL org.label-schema.build-date=${BUILD_DATE} \ 36 | org.label-schema.name="The Coq Proof Assistant" \ 37 | org.label-schema.description="Coq is a formal proof management system. It provides a formal language to write mathematical definitions, executable algorithms and theorems together with an environment for semi-interactive development of machine-checked proofs." \ 38 | org.label-schema.url="https://coq.inria.fr/" \ 39 | org.label-schema.vcs-ref=${VCS_REF} \ 40 | org.label-schema.vcs-url="https://github.com/coq/coq" \ 41 | org.label-schema.vendor="The Coq Development Team" \ 42 | org.label-schema.version=${COQ_VERSION} \ 43 | org.label-schema.schema-version="1.0" \ 44 | maintainer="erik@martin-dorel.org" 45 | -------------------------------------------------------------------------------- /external/docker-keeper/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/emacs,python 3 | # Edit at https://www.gitignore.io/?templates=emacs,python 4 | 5 | ### Emacs ### 6 | # -*- mode: gitignore; -*- 7 | *~ 8 | \#*\# 9 | /.emacs.desktop 10 | /.emacs.desktop.lock 11 | *.elc 12 | auto-save-list 13 | tramp 14 | .\#* 15 | 16 | # Org-mode 17 | .org-id-locations 18 | *_archive 19 | 20 | # flymake-mode 21 | *_flymake.* 22 | 23 | # eshell files 24 | /eshell/history 25 | /eshell/lastdir 26 | 27 | # elpa packages 28 | /elpa/ 29 | 30 | # reftex files 31 | *.rel 32 | 33 | # AUCTeX auto folder 34 | /auto/ 35 | 36 | # cask packages 37 | .cask/ 38 | dist/ 39 | 40 | # Flycheck 41 | flycheck_*.el 42 | 43 | # server auth directory 44 | /server/ 45 | 46 | # projectiles files 47 | .projectile 48 | 49 | # directory configuration 50 | .dir-locals.el 51 | 52 | # network security 53 | /network-security.data 54 | 55 | 56 | ### Python ### 57 | # Byte-compiled / optimized / DLL files 58 | __pycache__/ 59 | *.py[cod] 60 | *$py.class 61 | 62 | # C extensions 63 | *.so 64 | 65 | # Distribution / packaging 66 | .Python 67 | build/ 68 | develop-eggs/ 69 | downloads/ 70 | eggs/ 71 | .eggs/ 72 | lib/ 73 | lib64/ 74 | parts/ 75 | sdist/ 76 | var/ 77 | wheels/ 78 | pip-wheel-metadata/ 79 | share/python-wheels/ 80 | *.egg-info/ 81 | .installed.cfg 82 | *.egg 83 | MANIFEST 84 | 85 | # PyInstaller 86 | # Usually these files are written by a python script from a template 87 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 88 | *.manifest 89 | *.spec 90 | 91 | # Installer logs 92 | pip-log.txt 93 | pip-delete-this-directory.txt 94 | 95 | # Unit test / coverage reports 96 | htmlcov/ 97 | .tox/ 98 | .nox/ 99 | .coverage 100 | .coverage.* 101 | .cache 102 | nosetests.xml 103 | coverage.xml 104 | *.cover 105 | .hypothesis/ 106 | .pytest_cache/ 107 | 108 | # Translations 109 | *.mo 110 | *.pot 111 | 112 | # Scrapy stuff: 113 | .scrapy 114 | 115 | # Sphinx documentation 116 | docs/_build/ 117 | 118 | # PyBuilder 119 | target/ 120 | 121 | # pyenv 122 | .python-version 123 | 124 | # pipenv 125 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 126 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 127 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 128 | # install all needed dependencies. 129 | #Pipfile.lock 130 | 131 | # celery beat schedule file 132 | celerybeat-schedule 133 | 134 | # SageMath parsed files 135 | *.sage.py 136 | 137 | # Spyder project settings 138 | .spyderproject 139 | .spyproject 140 | 141 | # Rope project settings 142 | .ropeproject 143 | 144 | # Mr Developer 145 | .mr.developer.cfg 146 | .project 147 | .pydevproject 148 | 149 | # mkdocs documentation 150 | /site 151 | 152 | # mypy 153 | .mypy_cache/ 154 | .dmypy.json 155 | dmypy.json 156 | 157 | # Pyre type checker 158 | .pyre/ 159 | 160 | # End of https://www.gitignore.io/api/emacs,python 161 | -------------------------------------------------------------------------------- /external/docker-keeper/.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | # Author: Erik Martin-Dorel, 2020 2 | 3 | stages: 4 | - lint 5 | - test 6 | 7 | .python: 8 | image: python:3-alpine 9 | before_script: 10 | - python --version 11 | 12 | lint-python: 13 | stage: lint 14 | extends: .python 15 | script: 16 | - pip install --no-cache-dir -r requirements_lint.txt 17 | - flake8 *.py 18 | 19 | lint-script: 20 | stage: lint 21 | image: 22 | name: koalaman/shellcheck-alpine:stable 23 | entrypoint: ["/bin/ash", "-c"] 24 | script: 25 | - shellcheck --color=always *.sh 26 | 27 | lint-template: 28 | stage: lint 29 | image: 30 | name: cytopia/yamllint 31 | entrypoint: ["/bin/ash", "-c"] 32 | script: | 33 | yamllint -f colored -d '{extends: default, rules: {line-length: {level: warning}}}' gitlab-ci-template.yml 34 | 35 | test-python: 36 | stage: test 37 | extends: .python 38 | script: 39 | - pip install --no-cache-dir -r requirements.txt 40 | - ./keeper.py --version 41 | - pytest *.py 42 | -------------------------------------------------------------------------------- /external/docker-keeper/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020-2024 Erik Martin-Dorel 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /external/docker-keeper/README.md: -------------------------------------------------------------------------------- 1 | # docker-keeper 2 | 3 | This python script is devised to help maintain Docker Hub repositories 4 | of stable and dev (nightly build) Docker images from a YAML-specified, 5 | single-branch GitLab repository - typically created as a fork of the 6 | following repo: . 7 | 8 | This script is meant to be run by GitLab CI. 9 | 10 | This repository is thus [hosted on GitLab](https://gitlab.com/erikmd/docker-keeper), and [mirrored on GitHub](https://github.com/erikmd/docker-keeper) for more visibility. 11 | 12 | ## Syntax 13 | 14 | ``` 15 | usage: keeper.py [-h] [--version] [--upstream-version] 16 | {generate-config,write-artifacts} ... 17 | 18 | § docker-keeper 19 | 20 | This python3 script is devised to help maintain Docker Hub repositories of 21 | stable and dev (from webhooks or for nightly builds) Docker images from a 22 | YAML-specified, single-branch Git repository - typically created as a fork of 23 | the following GitLab repo: . 24 | For more details, follow the instructions of the README.md in your own fork. 25 | Note: this script is meant to be run by GitLab CI. 26 | 27 | docker-keeper offers customizable propagate strategies (declarative cURL calls) 28 | 29 | It supports both single modes given in variable CRON_MODE (and optionally ITEM) 30 | and multiple modes, from CLI as well as from HEAD's commit message, typically: 31 | $ git commit --allow-empty -m "…" -m "docker-keeper: rebuild-all" 32 | $ git commit -m "docker-keeper: propagate: I1: minimal; propagate: I2: nightly" 33 | $ git commit -m "docker-keeper: propagate: ID: rebuild-all" 34 | $ git commit -m "docker-keeper: propagate: ID: rebuild-keyword: KW1,KW2" 35 | $ git commit -m "docker-keeper: propagate: ()" 36 | If the commit message (or equivalently, the CLI) contains propagate…, 37 | then it overrides the automatic default propagation. 38 | If the commit is rebuilt with the same SHA1 in a given branch, 39 | then it switches to the default behavior (automatic propagate strategy). 40 | 41 | options: 42 | -h, --help show this help message and exit 43 | --version show program's version number and exit 44 | --upstream-version show program's upstream version from 45 | https://gitlab.com/erikmd/docker-keeper and exit 46 | 47 | subcommands: 48 | {generate-config,write-artifacts} 49 | generate-config Print a GitLab CI YAML config to standard output. This 50 | requires files: - generated/build_data_chosen.json - 51 | generated/remote_tags_to_rm.json - 52 | generated/propagate.json 53 | write-artifacts Generate artifacts in the 'generated' directory. This 54 | requires having file 'images.yml' in the current 55 | working directory. 56 | ``` 57 | & 58 | ``` 59 | usage: keeper.py write-artifacts [-h] [--debug] [--minimal] [--nightly] 60 | [--rebuild-all] [--rebuild-files FILE] 61 | [--rebuild-tags FILE] 62 | [--rebuild-keywords FILE] 63 | [--rebuild-file NAME1,NAME2] 64 | [--rebuild-tag TAG1,TAG2] 65 | [--rebuild-keyword KW1,KW2] 66 | [--propagate 'CHILD-REPO: COMMAND'] 67 | 68 | Generate artifacts in the 'generated' directory. This requires having file 69 | 'images.yml' in the current working directory. 70 | 71 | options: 72 | -h, --help show this help message and exit 73 | --debug help debugging by printing more info (especially 74 | regarding argparse) 75 | --minimal default option, can be omitted, kept for backward 76 | compatibility 77 | --nightly trigger builds that have the 'nightly: true' flag 78 | --rebuild-all rebuild all images 79 | --rebuild-files FILE (deprecated) rebuild images with Dockerfile mentioned 80 | in FILE (can be supplied several times) 81 | --rebuild-tags FILE (deprecated) rebuild images with tag mentioned in FILE 82 | (can be supplied several times) 83 | --rebuild-keywords FILE 84 | (deprecated) rebuild images with keyword mentioned in 85 | FILE (can be supplied several times) 86 | --rebuild-file NAME1,NAME2 87 | rebuild images with Dockerfile mentioned in CLI comma- 88 | separated list (can be supplied several times) 89 | --rebuild-tag TAG1,TAG2 90 | rebuild images with tag mentioned in CLI comma- 91 | separated list (can be supplied several times) 92 | --rebuild-keyword KW1,KW2 93 | rebuild images with keyword mentioned in CLI comma- 94 | separated list (can be supplied several times) 95 | --propagate 'CHILD-REPO: COMMAND' 96 | manually specify to propagate 'minimal', 'nightly', 97 | 'rebuild-all', or 'rebuild-keyword: KW1,KW2' commands 98 | to children docker-keeper repositories; note that you 99 | can use '--propagate=()' to disable propagation fully, 100 | independently of the other occurrences of this option; 101 | if there is no occurrence of this option (in CLI nor 102 | in HEAD's commit message), docker-keeper will apply 103 | the propagate strategy defined in the images.yml file 104 | (can be supplied several times) 105 | ``` 106 | & 107 | ``` 108 | usage: keeper.py generate-config [-h] 109 | 110 | Print a GitLab CI YAML config to standard output. 111 | This requires files: 112 | - generated/build_data_chosen.json 113 | - generated/remote_tags_to_rm.json 114 | - generated/propagate.json 115 | 116 | options: 117 | -h, --help show this help message and exit 118 | ``` 119 | 120 | ## Usage 121 | 122 | * Fork . 123 | 124 | * Follow the instructions from the [docker-keeper wiki](https://gitlab.com/erikmd/docker-keeper/-/wikis/home#initial-setup). 125 | -------------------------------------------------------------------------------- /external/docker-keeper/VERSION: -------------------------------------------------------------------------------- 1 | 0.11.1 2 | -------------------------------------------------------------------------------- /external/docker-keeper/bash_formatter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Copyright (c) 2020-2024 Érik Martin-Dorel 5 | # 6 | # Contributed under the terms of the MIT license, 7 | # cf. 8 | 9 | from string import Formatter 10 | import functools 11 | import _string 12 | import re 13 | 14 | 15 | def translate(glob, greedy=False): 16 | """Translate a simple glob expression to a (non-anchored) regexp.""" 17 | qmark = '.' 18 | if greedy: 19 | star = '.*' 20 | else: 21 | star = '.*?' 22 | 23 | inner = lambda s: qmark.join(map(re.escape, s.split('?'))) # noqa: E731 24 | 25 | return star.join(map(inner, glob.split('*'))) 26 | 27 | 28 | def translate_prefix(glob, greedy=False): 29 | """Translate a simple glob expression to a (left)anchored regexp.""" 30 | return '^' + translate(glob, greedy) 31 | 32 | 33 | def reverse(text): 34 | return text[::-1] 35 | 36 | 37 | class BashLike(Formatter): 38 | """Refine string.format(dict), allowing {var[bash-like-patterns]}. 39 | 40 | In particular: 41 | {var[0:7]} 42 | {var[%.*]} 43 | {var[%%.*]} 44 | {var[//glob/str]} 45 | """ 46 | # New implementation of 47 | # : 48 | def get_field(self, field_name, args, kwargs): 49 | first, rest = _string.formatter_field_name_split(field_name) 50 | 51 | obj = self.get_value(first, args, kwargs) 52 | 53 | for is_attr, i in rest: 54 | if is_attr: 55 | # hide private fields 56 | if i.startswith('_'): 57 | obj = '' 58 | else: 59 | obj = getattr(obj, i) 60 | else: 61 | mslice = re.match('^([0-9]+):([0-9]+)$', i) 62 | msuffixgreedy = re.match('^%%(.+)$', i) 63 | msuffix = re.match('^%(.+)$', i) # to test after greedy 64 | mprefixgreedy = re.match('^##(.+)$', i) 65 | mprefix = re.match('^#(.+)$', i) 66 | msed = re.match('^//([^/]+)/(.*)$', i) 67 | mprefixjoin = re.match('^/#/(.*)$', i) # useful on a list var 68 | msuffixjoin = re.match('^/%/(.*)$', i) # useful on a list var 69 | if mslice: 70 | a, b = map(int, mslice.groups()) 71 | obj = obj[a:b] 72 | elif msuffixgreedy: 73 | suffix = msuffixgreedy.groups()[0] 74 | prefix = translate_prefix(reverse(suffix), True) 75 | obj = reverse(re.sub(prefix, '', reverse(obj), count=1)) 76 | elif msuffix: 77 | suffix = msuffix.groups()[0] 78 | prefix = translate_prefix(reverse(suffix), False) 79 | obj = reverse(re.sub(prefix, '', reverse(obj), count=1)) 80 | elif mprefixgreedy: 81 | prefix = mprefixgreedy.groups()[0] 82 | prefix = translate_prefix(prefix, True) 83 | obj = re.sub(prefix, '', obj, count=1) 84 | elif mprefix: 85 | prefix = mprefix.groups()[0] 86 | prefix = translate_prefix(prefix, False) 87 | obj = re.sub(prefix, '', obj, count=1) 88 | elif msed: 89 | glob, dest = msed.groups() 90 | regexp = translate(glob, True) 91 | obj = re.sub(regexp, dest, obj, count=0) 92 | elif mprefixjoin: 93 | addprefix = mprefixjoin.groups()[0] 94 | if obj: 95 | if isinstance(obj, list): 96 | obj = functools.reduce(lambda res, e: 97 | res + addprefix + str(e), 98 | obj, '') 99 | else: 100 | obj = addprefix + str(obj) 101 | else: 102 | obj = '' 103 | elif msuffixjoin: 104 | addsuffix = msuffixjoin.groups()[0] # no need for reverse 105 | if obj: 106 | if isinstance(obj, list): 107 | obj = functools.reduce(lambda res, e: 108 | res + str(e) + addsuffix, 109 | obj, '') 110 | else: 111 | obj = str(obj) + addsuffix 112 | else: 113 | obj = '' 114 | else: 115 | obj = obj[i] 116 | 117 | return obj, first 118 | 119 | 120 | ############################################################################### 121 | # Test suite, cf. 122 | # $ pip3 install pytest 123 | # $ py.test bash_formatter.py 124 | 125 | class Dummy(): 126 | _val = None 127 | pub = None 128 | 129 | def __init__(self, val, pub): 130 | self._val = val 131 | self.pub = pub 132 | 133 | 134 | def test_reverse(): 135 | assert reverse('12345') == '54321' 136 | 137 | 138 | def test_translate(): 139 | assert translate('?????678-*.txt') == '.....678\\-.*?\\.txt' 140 | assert translate('?????678-*.txt', True) == '.....678\\-.*\\.txt' 141 | 142 | 143 | def test_BashLike(): 144 | b = BashLike() 145 | assert b.format('A{var[2:4]}Z', var='abcde') == 'AcdZ' 146 | assert b.format('{s[0:7]}', s='1234567890abcdef') == '1234567' 147 | assert b.format('{s[%.*]}', s='8.10.0') == '8.10' 148 | assert b.format('{s[%%.*]}', s='8.10.0') == '8' 149 | assert b.format('{s[%???]}', s='3.14159') == '3.14' 150 | assert b.format('{obj.pub}', obj=Dummy(4, 12)) == '12' 151 | assert b.format('{obj._val}', obj=Dummy(4, 12)) == '' 152 | assert b.format('V{matrix[coq][//-/+]}', matrix={'coq': '8.12-alpha'}) == \ 153 | 'V8.12+alpha' 154 | assert b.format('{s[#*>]}', s="string->int->char") == 'int->char' 155 | assert b.format('{s[##*>]}', s="string->int->char") == 'char' 156 | assert b.format('{s[%-*]}', s="string->int->char") == 'string->int' 157 | assert b.format('{s[%%-*]}', s="string->int->char") == 'string' 158 | assert b.format('{lst[/#/;]}', lst=[]) == '' 159 | assert b.format('{s[/#/;]}', s="dev") == ';dev' 160 | assert b.format('{lst[/#/;]}', lst=['5.0']) == ';5.0' 161 | assert b.format('{lst[/#/;]}', lst=[1, 2, 4]) == ';1;2;4' 162 | assert b.format('{s[/#/;][#;]}', s="dev") == 'dev' 163 | assert b.format('{lst[/#/;][#;]}', lst=['5.0']) == '5.0' 164 | assert b.format('{lst[/#/;][#;]}', lst=[1, 2, 4]) == '1;2;4' 165 | assert b.format('{lst[/#/,ocaml-][#,]}', lst=['4.14', '5.0']) == \ 166 | 'ocaml-4.14,ocaml-5.0' 167 | assert b.format('{lst[/%/;]}', lst=[]) == '' 168 | assert b.format('{s[/%/;]}', s="dev") == 'dev;' 169 | assert b.format('{lst[/%/;]}', lst=['5.0']) == '5.0;' 170 | assert b.format('{lst[/%/;]}', lst=[1, 2, 4]) == '1;2;4;' 171 | assert b.format('{s[/%/;][%;]}', s="dev") == 'dev' 172 | assert b.format('{lst[/%/;][%;]}', lst=['5.0']) == '5.0' 173 | assert b.format('{lst[/%/;][%;]}', lst=[1, 2, 4]) == '1;2;4' 174 | assert b.format('{lst[/%/-flambda,][%,]}', lst=['4.14', '5.0']) == \ 175 | '4.14-flambda,5.0-flambda' 176 | -------------------------------------------------------------------------------- /external/docker-keeper/debug_read.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Author: Erik Martin-Dorel, 2024, MIT license 3 | # Script written for debugging purposes and just kept for the record. 4 | 5 | message="chore(docker-keeper): nightly; propagate: () 6 | chore: Update images.yml; docker-keeper: rebuild-all; propagate: mathcomp: rebuild-all 7 | chore: docker-keeper: rebuild-keyword: dev,8.20 8 | Some more text!" 9 | 10 | readarray -t lines < <(grep "\(^\|(\| \|;\)docker-keeper)\?:" <<< "$message") 11 | declare -a DOCKER_KEEPER_CMDS 12 | 13 | for line in "${lines[@]}"; do 14 | cmd=$(sed -e 's/^.*docker-keeper)\?: *//g' <<< "$line") 15 | readarray -t cmds < <(sed -e 's/; \?/\n/g' <<< "$cmd") 16 | for cmd in "${cmds[@]}"; do 17 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="$(sed -e 's/: \?/=/' <<< "$cmd")" 18 | done 19 | done 20 | 21 | printf "'%s' " "${DOCKER_KEEPER_CMDS[@]/#/--}" 22 | -------------------------------------------------------------------------------- /external/docker-keeper/gitlab-ci-template.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Author: Erik Martin-Dorel, 2020-2024 3 | 4 | # Design: 5 | # - docker-keeper: Lint python scripts (Use flake8) 6 | # - On push for all branches: 7 | # - Lint images.yml (Use cytopia/yamllint) 8 | # - Read images.yml (Use pyyaml) 9 | # - Compute the list of images tags foreach item and store it as artifact 10 | # - Check that these images tags are disjoint 11 | # - Compute the list of Dockerfiles and store it as artifact 12 | # - Lint the Dockerfiles (mentioned in generated/Dockerfiles.txt) 13 | # - Gen README.md as artifact (Supported tags and respective Dockerfile links) 14 | # (cf. https://hub.docker.com/_/debian/) (with GitLab hyperlinks) 15 | # - Get the list of remote tags and store it as artifact 16 | # - Compute the symmetric difference of tags and store it as artifact 17 | # - On push for master (protected branch): 18 | # - Foreach Dockerfile spec from images.yml (following the list order): 19 | # - If one of the associated tags does not exists, or with a --rebuild flag: 20 | # - Run 1 job per Dockerfile spec 21 | # - Push the image to Docker Hub foreach required tag 22 | # - Document how to Remove the old tags from Docker Hub 23 | # (see also https://github.com/docker/roadmap/issues/115) 24 | # - Document how to Update the README.md 25 | # - Upload URL: https://hub.docker.com/repository/docker/user/repo 26 | # - Download URL: 27 | # - $CI_JOB_URL = https://gitlab.com/coq/coq/-/jobs/$CI_JOB_ID 28 | # - $CI_JOB_URL/artifacts/file/generated/README.md 29 | # - $CI_JOB_URL/artifacts/raw/generated/README.md?inline=false 30 | # - On scheduled pipelines for master: 31 | # - Run relevant jobs above (alpha releases | dev -> nightly build) 32 | # - Run verification that the docker-make subtree is up-to-date (once a day) 33 | # - On manual pipelines: 34 | # - Run relevant jobs for master (taking account --rebuild or so flags) 35 | # - Document the procedure to rebuild images 36 | # 37 | # - documentation to update docker-keeper using git-subtree 38 | # - [TODO] documentation suggesting Dockerfile sources 39 | # 40 | # - [TODO] docker-base & docker-coq: CONTRIBUTE.md → GitHub PRs 41 | # - docker-keeper-template: 'active: false', link to wiki 42 | 43 | variables: 44 | # default relative path (subtree prefix) 45 | KEEPER_SUBTREE: external/docker-keeper 46 | 47 | stages: 48 | - lint-src 49 | - compile 50 | - prepare-ci 51 | - lint-ci 52 | - trigger-ci 53 | - notify 54 | 55 | .lint-yaml: 56 | dependencies: [] # optional 57 | variables: 58 | FILENAME: existing-file-name # to override in child jobs 59 | image: 60 | name: cytopia/yamllint 61 | entrypoint: ["/bin/ash", "-c"] 62 | # cf. https://yamllint.readthedocs.io/en/stable/configuration.html 63 | script: | 64 | echo "$FILENAME" 65 | yamllint -f colored -d '{extends: default, rules: {line-length: {level: warning}}}' "$FILENAME" 66 | 67 | .python: 68 | dependencies: [] # optional 69 | image: python:3-alpine 70 | before_script: 71 | - pip install --no-cache-dir -r "$KEEPER_SUBTREE/requirements.txt" 72 | 73 | lint-images: 74 | stage: lint-src 75 | extends: .lint-yaml 76 | variables: 77 | FILENAME: 'images.yml' 78 | 79 | prepare-artifacts: 80 | stage: compile 81 | extends: .python 82 | image: python:3 # default shell: /bin/bash, OS: Debian 83 | script: 84 | - apt-get update -y -q 85 | - DEBIAN_FRONTEND=noninteractive apt-get install -y -q git 86 | - apt-get clean 87 | - rm -rf /var/lib/apt/lists/* 88 | - git rev-parse --verify HEAD 89 | - git describe --all --long --abbrev=40 --always --dirty 90 | - mkdir -p generated 91 | - echo "$CI_JOB_URL" > generated/CI_JOB_URL.txt 92 | - | 93 | # Launch keeper.py with proper options 94 | run() { 95 | echo -n 'RUN ' 96 | for arg; do printf "'%s' " "${arg//\'/\'\\\'\'}"; done 97 | echo 98 | "$@" 99 | } 100 | # Detect if it's the first pipeline run for the current commit 101 | # See https://docs.gitlab.com/ee/ci/variables/predefined_variables.html#predefined-variables-reference 102 | echo "SHELL=$SHELL" # for debugging purpose 103 | echo "CI_COMMIT_BEFORE_SHA=$CI_COMMIT_BEFORE_SHA" 104 | echo "CI_COMMIT_SHA=$CI_COMMIT_SHA" 105 | declare -a DOCKER_KEEPER_CMDS 106 | # The previous latest commit present on a branch or tag. 107 | # Is always `0000000000000000000000000000000000000000` for 108 | # merge request pipelines, the first commit in pipelines for branches or tags, 109 | # *or when manually running a pipeline*. 110 | # Remark for testing: coqbot+gitlab-ci may run 2 pipelines (branch/pull request) 111 | if [ "$CI_COMMIT_BEFORE_SHA" != "0000000000000000000000000000000000000000" ]; then 112 | echo "Parsing commit message {|$CI_COMMIT_MESSAGE|}." 113 | readarray -t lines < <(grep "\(^\|(\| \|;\)docker-keeper)\?:" <<< "$CI_COMMIT_MESSAGE") 114 | for line in "${lines[@]}"; do 115 | cmd=$(sed -e 's/^.*docker-keeper)\?: *//g' <<< "$line") 116 | readarray -t cmds < <(sed -e 's/; */\n/g' <<< "$cmd") 117 | for cmd in "${cmds[@]}"; do 118 | # put the command in the end of the array (note: should NOT be greedy) 119 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="$(sed -e 's/: */=/' <<< "$cmd")" 120 | done 121 | done 122 | else 123 | echo "Skipping commit message." 124 | fi 125 | if [ -z "$CRON_MODE" ]; then 126 | # Keep this name by backward compatibility 127 | # We could replace it with a MORE expressive SINGLE variable DOCKER_KEEPER 128 | echo "Pipeline variable CRON_MODE was empty." 129 | elif [ "$CRON_MODE" = "minimal" ]; then 130 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="minimal" 131 | elif [ "$CRON_MODE" = "nightly" ]; then 132 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="nightly" 133 | elif [ "$CRON_MODE" = "rebuild-all" ]; then 134 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="rebuild-all" 135 | # BEGIN Keep this by backward compatibility (we could remove it later on): 136 | elif [ "$CRON_MODE" = "rebuild-files" ]; then 137 | if [ -z "$ITEMS" ]; then 138 | echo "Error: file variable ITEMS is missing (CRON_MODE='$CRON_MODE')." 139 | false 140 | else 141 | run cat "$ITEMS" 142 | fi 143 | readarray -t ar_items < "$ITEMS" 144 | items="${ar_items[@]/#/,}" 145 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="rebuild-file=${items#,}" 146 | elif [ "$CRON_MODE" = "rebuild-tags" ]; then 147 | if [ -z "$ITEMS" ]; then 148 | echo "Error: file variable ITEMS is missing (CRON_MODE='$CRON_MODE')." 149 | false 150 | else 151 | run cat "$ITEMS" 152 | fi 153 | readarray -t ar_items < "$ITEMS" 154 | items="${ar_items[@]/#/,}" 155 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="rebuild-tag=${items#,}" 156 | elif [ "$CRON_MODE" = "rebuild-keywords" ]; then 157 | if [ -z "$ITEMS" ]; then 158 | echo "Error: file variable ITEMS is missing (CRON_MODE='$CRON_MODE')." 159 | false 160 | else 161 | run cat "$ITEMS" 162 | fi 163 | readarray -t ar_items < "$ITEMS" 164 | items="${ar_items[@]/#/,}" 165 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="rebuild-keyword=${items#,}" 166 | # END Keep this by backward compatibility (we could remove it later on). 167 | elif [ "$CRON_MODE" = "rebuild-file" ]; then 168 | if [ -z "$ITEM" ]; then 169 | echo "Error: variable ITEM is missing (CRON_MODE='$CRON_MODE')." 170 | false 171 | else 172 | echo "ITEM=$ITEM" 173 | fi 174 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="rebuild-file=$ITEM" 175 | elif [ "$CRON_MODE" = "rebuild-tag" ]; then 176 | if [ -z "$ITEM" ]; then 177 | echo "Error: variable ITEM is missing (CRON_MODE='$CRON_MODE')." 178 | false 179 | else 180 | echo "ITEM=$ITEM" 181 | fi 182 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="rebuild-tag=$ITEM" 183 | elif [ "$CRON_MODE" = "rebuild-keyword" ]; then 184 | if [ -z "$ITEM" ]; then 185 | echo "Error: variable ITEM is missing (CRON_MODE='$CRON_MODE')." 186 | false 187 | else 188 | echo "ITEM=$ITEM" 189 | fi 190 | DOCKER_KEEPER_CMDS[${#DOCKER_KEEPER_CMDS[@]}]="rebuild-keyword=$ITEM" 191 | else 192 | echo "Error: unexpected value CRON_MODE='$CRON_MODE'." 193 | false 194 | fi 195 | # BEGIN main command 196 | run "$KEEPER_SUBTREE/keeper.py" write-artifacts "${DOCKER_KEEPER_CMDS[@]/#/--}" 197 | # END main command 198 | artifacts: 199 | name: "docker-keeper_$CI_JOB_NAME" 200 | when: always 201 | paths: 202 | - generated/build_data_all.json 203 | - generated/build_data_min.json 204 | - generated/build_data_chosen.json 205 | - generated/remote_tags.txt 206 | - generated/remote_tags_to_rm.json 207 | - generated/propagate.json 208 | - generated/gitlab_ci_tags.txt 209 | - generated/Dockerfiles.txt 210 | - generated/README.md 211 | - generated/docker_repo.txt 212 | - generated/CI_JOB_URL.txt 213 | expire_in: 6 months 214 | except: 215 | - tags 216 | 217 | check-updates: 218 | stage: compile 219 | extends: .python 220 | only: 221 | - schedules 222 | allow_failure: true 223 | script: | 224 | if [ "$CRON_MODE" = "nightly" ]; then 225 | version=$("$KEEPER_SUBTREE/keeper.py" --version) 226 | upstream=$("$KEEPER_SUBTREE/keeper.py" --upstream-version 2>/dev/null) 227 | upstream_repo="https://gitlab.com/erikmd/docker-keeper.git" 228 | if [ "$version" != "$upstream" ]; then 229 | cat < $upstream, by doing: 231 | cd \$(git rev-parse --show-toplevel) 232 | git subtree pull --squash -P $KEEPER_SUBTREE $upstream_repo master 233 | EOF 234 | false 235 | else 236 | echo "docker-keeper $version is up-to-date" 237 | fi 238 | else 239 | echo "Doing nothing... You may want to set CRON_MODE='nightly'." 240 | false # could be removed 241 | fi 242 | 243 | prepare-config: 244 | stage: prepare-ci 245 | extends: .python 246 | dependencies: # OVERRIDE 247 | - prepare-artifacts 248 | script: 249 | - '"$KEEPER_SUBTREE/keeper.py" generate-config > generated/build.yml' 250 | # - cat generated/build.yml 251 | artifacts: 252 | name: "docker-keeper_$CI_JOB_NAME" 253 | when: always 254 | paths: 255 | - generated/build.yml 256 | expire_in: 6 months 257 | except: 258 | - tags 259 | 260 | lint-dockerfiles: 261 | stage: prepare-ci # or could be lint-ci 262 | dependencies: 263 | - prepare-artifacts 264 | needs: 265 | - prepare-artifacts 266 | image: 267 | name: hadolint/hadolint:latest-alpine 268 | entrypoint: ["/bin/ash", "-c"] 269 | allow_failure: true 270 | script: | 271 | hadolint --version 272 | cat generated/Dockerfiles.txt 273 | cat generated/Dockerfiles.txt | tr '\n' '\0' | xargs -0 -n1 hadolint 274 | 275 | lint-config: 276 | stage: lint-ci 277 | extends: .lint-yaml 278 | dependencies: # OVERRIDE 279 | - prepare-config 280 | variables: 281 | GIT_STRATEGY: none 282 | FILENAME: 'generated/build.yml' 283 | except: 284 | - tags 285 | 286 | trigger-build: 287 | stage: trigger-ci 288 | only: 289 | - master 290 | trigger: 291 | include: 292 | - artifact: generated/build.yml 293 | job: prepare-config 294 | strategy: depend 295 | 296 | debrief: 297 | stage: notify 298 | dependencies: 299 | - prepare-artifacts 300 | image: alpine:latest 301 | variables: 302 | GIT_STRATEGY: none 303 | except: 304 | - tags 305 | script: |+ 306 | v_CI_JOB_URL=$(cat generated/CI_JOB_URL.txt) 307 | v_docker_repo=$(cat generated/docker_repo.txt) 308 | cat <&2 "Error: missing 'HUB_...' protected variables." 8 | false 9 | fi 10 | } 11 | 12 | dk_logout() { 13 | docker logout 14 | } 15 | 16 | dk_build() { 17 | local context="$1" 18 | local dockerfile="$2" 19 | local one_image="$3" 20 | shift 3 21 | # rest: VAR1=value1 VAR2=value2 22 | context="${context%/}" 23 | local args=(-f "$context/$dockerfile" --pull -t "$one_image") 24 | for arg; do 25 | args[${#args[@]}]="--build-arg=$arg" 26 | done 27 | ( set -ex; 28 | docker build "${args[@]}" "$context" ) 29 | } 30 | 31 | dk_push() { 32 | local hub_repo="$1" 33 | local one_image="$2" 34 | shift 2 35 | # rest: tag1 tag2 36 | for tag; do 37 | ( set -ex; 38 | docker tag "$one_image" "$hub_repo:$tag"; 39 | docker push "$hub_repo:$tag" ) 40 | done 41 | } 42 | 43 | dk_curl() { 44 | local slug="$1" 45 | local gitlab_token="$2" 46 | local gitlab_domain="$3" 47 | local gitlab_project="$4" 48 | local cron_mode="$5" 49 | local item="$6" 50 | date -u -R 51 | if [ -n "$gitlab_token" ]; then 52 | echo >&2 "For child repo $slug:" 53 | if [ -z "$item" ]; then 54 | curl -X POST -F token="$gitlab_token" -F ref=master -F "variables[CRON_MODE]=$cron_mode" "https://$gitlab_domain/api/v4/projects/$gitlab_project/trigger/pipeline" 55 | else 56 | curl -X POST -F token="$gitlab_token" -F ref=master -F "variables[CRON_MODE]=$cron_mode" -F "variables[ITEM]=$item" "https://$gitlab_domain/api/v4/projects/$gitlab_project/trigger/pipeline" 57 | fi 58 | else 59 | echo >&2 "Error: cannot read api_token_env_var for '$slug'" 60 | false 61 | fi 62 | } 63 | -------------------------------------------------------------------------------- /external/docker-keeper/keeper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Copyright (c) 2020-2024 Érik Martin-Dorel 5 | # 6 | # Contributed under the terms of the MIT license, 7 | # cf. 8 | 9 | from bash_formatter import BashLike 10 | from datetime import datetime 11 | from itertools import chain 12 | import argparse 13 | import base64 14 | import copy 15 | import json 16 | import requests 17 | import os 18 | import re 19 | import sys 20 | import time 21 | import yaml 22 | 23 | prog = os.path.basename(__file__) 24 | output_directory = 'generated' 25 | images_filename = 'images.yml' 26 | json_indent = 2 27 | upstream_project = 'erikmd/docker-keeper' 28 | upstream_url = 'https://gitlab.com/%s' % upstream_project 29 | desc = """ 30 | § docker-keeper 31 | 32 | This python3 script is devised to help maintain Docker Hub repositories of 33 | stable and dev (from webhooks or for nightly builds) Docker images from a 34 | YAML-specified, single-branch Git repository - typically created as a fork of 35 | the following GitLab repo: . 36 | For more details, follow the instructions of the README.md in your own fork. 37 | Note: this script is meant to be run by GitLab CI. 38 | 39 | docker-keeper offers customizable propagate strategies (declarative cURL calls) 40 | 41 | It supports both single modes given in variable CRON_MODE (and optionally ITEM) 42 | and multiple modes, from CLI as well as from HEAD's commit message, typically: 43 | $ git commit --allow-empty -m "…" -m "docker-keeper: rebuild-all" 44 | $ git commit -m "docker-keeper: propagate: I1: minimal; propagate: I2: nightly" 45 | $ git commit -m "docker-keeper: propagate: ID: rebuild-all" 46 | $ git commit -m "docker-keeper: propagate: ID: rebuild-keyword: KW1,KW2" 47 | $ git commit -m "docker-keeper: propagate: ()" 48 | If the commit message (or equivalently, the CLI) contains propagate…, 49 | then it overrides the automatic default propagation. 50 | If the commit is rebuilt with the same SHA1 in a given branch, 51 | then it switches to the default behavior (automatic propagate strategy).""" 52 | 53 | 54 | def print_stderr(message): 55 | print(message, file=sys.stderr, flush=True) 56 | 57 | 58 | def dump(data): 59 | """Debug""" 60 | print_stderr(json.dumps(data, indent=json_indent)) 61 | 62 | 63 | # def error(msg, flush=True): 64 | # print(msg, file=sys.stderr, flush=flush) 65 | # exit(1) 66 | 67 | class Error(Exception): 68 | """Base class for exceptions in this module.""" 69 | pass 70 | 71 | 72 | def error(msg): 73 | raise Error(msg) 74 | 75 | 76 | def first_shortest_tag(list_tags): 77 | return sorted(list_tags, key=(lambda s: (len(s), s)))[0] 78 | 79 | 80 | def uniqify(s): 81 | """Remove duplicates and sort the result list.""" 82 | return sorted(set(s)) 83 | 84 | 85 | def uniqify_tags(list_tags): 86 | """Might be improved to mimic 'sort -V'""" 87 | return sorted(set(list_tags), key=(lambda s: (len(s), s))) 88 | 89 | 90 | def diff_list(l1, l2): 91 | """Compute the set-difference (l1 - l2), preserving duplicates.""" 92 | return list(filter(lambda e: e not in l2, l1)) 93 | 94 | 95 | def meet_list(l1, l2): 96 | """Return the sublist of l1, intersecting l2.""" 97 | return list(filter(lambda e: e in l2, l1)) 98 | 99 | 100 | def subset_list(l1, l2): 101 | """Check if l1 is included in l2.""" 102 | return not diff_list(l1, l2) 103 | 104 | 105 | def is_unique(s): 106 | """Check if the list s has no duplicate.""" 107 | return len(s) == len(set(s)) 108 | 109 | 110 | def merge_dict(a, b): 111 | """Merge the fields of a and b, the latter overriding the former.""" 112 | res = copy.deepcopy(a) if a else {} 113 | copyb = copy.deepcopy(b) if b else {} 114 | for key in copyb: 115 | res[key] = copyb[key] 116 | return res 117 | 118 | 119 | def check_domain(text): 120 | if not re.match(r'^[a-z0-9]+(-[a-z0-9]+)*(\.[a-z0-9]+(-[a-z0-9]+)*)+$', 121 | text): 122 | error("Error: '%s' is not a valid domain name." % text) 123 | 124 | 125 | def check_string(value, ident=None): 126 | if not isinstance(value, str): 127 | if ident: 128 | error("Error: expecting a string value, but was given '%s: %s'." 129 | % (ident, value)) 130 | else: 131 | error("Error: expecting a string value, but was given '%s'." 132 | % value) 133 | 134 | 135 | def check_list(value, text=None): 136 | if not isinstance(value, list): 137 | if not text: 138 | text = str(value) 139 | error("Error: not (JSON) list\nText: %s" 140 | % text) 141 | 142 | 143 | def check_dict(value, text=None): 144 | if not isinstance(value, dict): 145 | if not text: 146 | text = str(value) 147 | error("Error: not (JSON) dict\nText: %s" 148 | % text) 149 | 150 | 151 | def ignore_fields(obj, lst): 152 | for field in lst: 153 | obj.pop(field, None) 154 | 155 | 156 | def check_no_fields(text, obj): 157 | if obj: 158 | print_stderr('Unexpected fields in %s:' % text) 159 | dump(obj) 160 | exit(1) 161 | 162 | 163 | def remove_spaces(text): 164 | return text.replace(' ', '') 165 | 166 | 167 | def trim_comma_split(text): 168 | """Turn a comma-separated string into a list of nonempty strings""" 169 | check_string(text) 170 | # the filter is useful to drop empty strings (e.g., for '8.19,8.20,') 171 | return list(filter(lambda e: e, remove_spaces(text).split(','))) 172 | 173 | 174 | def flat_map_trim_comma_split(lst): 175 | """Apply trim_comma_split to each list elt then flatten; needs itertools""" 176 | if lst: 177 | return list(chain(*map(trim_comma_split, lst))) 178 | else: # lst = None 179 | return [] 180 | 181 | 182 | def subset_comma_list(cstr1, cstr2): 183 | """Check if cstr1 is included in cstr2.""" 184 | return subset_list(trim_comma_split(cstr1), trim_comma_split(cstr2)) 185 | 186 | 187 | def eval_bashlike(template, matrix, gvars=None, defaults=None): 188 | b = BashLike() 189 | return b.format(template, matrix=matrix, vars=gvars, defaults=defaults) 190 | 191 | 192 | def eval_bashlike2(expr, matrix, tags, keywords): 193 | b = BashLike() 194 | return b.format(expr, matrix=matrix, tags=tags, keywords=keywords) 195 | 196 | 197 | def eval_propagate(expr, build_elt): 198 | return eval_bashlike2(expr, build_elt['matrix'], build_elt['tags'], 199 | build_elt['keywords']) 200 | 201 | 202 | def uniq_cat_eval_propagate(expr, build_data): 203 | list_str = list(map(lambda elt: eval_propagate(expr, elt), build_data)) 204 | return uniqify_tags(flat_map_trim_comma_split(list_str)) 205 | 206 | 207 | def get_build_date(): 208 | """ISO 8601 UTC timestamp""" 209 | return datetime.utcnow().strftime("%FT%TZ") 210 | 211 | 212 | def naive_url_encode(name): 213 | """https://gitlab.com/help/api/README.md#namespaced-path-encoding""" 214 | check_string(name) 215 | return name.replace('/', '%2F') 216 | 217 | 218 | def gitlab_lambda_query_sha1(response): 219 | """Return the "commit.id" field from 'response.json()'.""" 220 | return response.json()['commit']['id'] 221 | 222 | 223 | def lambda_query_text(response): 224 | return response.text 225 | 226 | 227 | def get_url(url, headers=None, params=None, lambda_query=(lambda r: r)): 228 | """Some examples of lambda_query: 229 | 230 | - gitlab_lambda_query_sha1 231 | - lambda_query_text 232 | """ 233 | print_stderr('GET %s\n' % url) 234 | response = requests.get(url, headers=headers, params=params) 235 | if not response: 236 | error("Error!\nCode: %d\nText: %s" 237 | % (response.status_code, response.text)) 238 | return lambda_query(response) 239 | 240 | 241 | def get_commit(commit_api): 242 | """Get GitHub or GitLab SHA1 of a given branch.""" 243 | fetcher = commit_api['fetcher'] 244 | repo = commit_api['repo'] 245 | branch = commit_api['branch'] 246 | if fetcher == 'github': 247 | url = 'https://api.github.com/repos/%s/commits/%s' % (repo, branch) 248 | headers = {"Accept": "application/vnd.github.v3.sha"} 249 | lambda_query = lambda_query_text 250 | elif fetcher == 'gitlab': 251 | # https://gitlab.com/help/api/branches.md#get-single-repository-branch 252 | url = ('https://gitlab.com/api/v4/projects/%s/repository/branches/%s' 253 | % (naive_url_encode(repo), naive_url_encode(branch))) 254 | headers = None 255 | lambda_query = gitlab_lambda_query_sha1 256 | elif fetcher == 'gitlab.inria.fr': 257 | url = ('https://%s/api/v4/projects/%s/repository/branches/%s' 258 | % (fetcher, naive_url_encode(repo), naive_url_encode(branch))) 259 | headers = None 260 | lambda_query = gitlab_lambda_query_sha1 261 | else: 262 | error("Error: do not support 'fetcher: %s'" % fetcher) 263 | return get_url(url, headers, None, lambda_query) 264 | 265 | 266 | def load_spec(): 267 | """Parse the YAML file and return a dict.""" 268 | print_stderr("Loading '%s'..." % images_filename) 269 | with open(images_filename) as f: 270 | j = yaml.safe_load(f) 271 | if 'active' not in j or not j['active']: 272 | print_stderr(""" 273 | WARNING: the 'docker-keeper' tasks are not yet active. 274 | Please update your %s specification and Dockerfile templates. 275 | Then, set the option 'active: true' in the %s file.""" 276 | % (images_filename, images_filename)) 277 | exit(1) 278 | return j 279 | 280 | 281 | def product_build_matrix(matrix): 282 | """Get the list of dicts grouping 1 item per list mapped to matrix keys.""" 283 | assert matrix 284 | old = [{}] 285 | res = [] 286 | for key in matrix: 287 | for value in matrix[key]: 288 | for e in old: 289 | enew = copy.deepcopy(e) 290 | enew[key] = value 291 | res.append(enew) 292 | old = res 293 | res = [] 294 | return old 295 | 296 | 297 | def check_trim_relative_path(path): 298 | """Fail if path is absolute and remove leading './'.""" 299 | check_string(path) 300 | if path[0] == '/': 301 | error("Error: expecting a relative path, but was given '%s'." % path) 302 | elif path[:2] == './': 303 | return path[2:] 304 | else: 305 | return path 306 | 307 | 308 | def check_filename(filename): 309 | check_string(filename) 310 | if '/' in filename: 311 | error("Error: expecting a filename, but was given '%s'." % filename) 312 | 313 | 314 | def eval_if(raw_condition, matrix, gvars): 315 | """Evaluate YAML condition. 316 | 317 | Supported forms: 318 | '{matrix[key]} == "string"' 319 | '{matrix[key]} != "string"' 320 | '"{matrix[key]}" == "string"' 321 | '"{matrix[key]}" != "string"' 322 | """ 323 | # Conjunction 324 | if isinstance(raw_condition, list): 325 | for item_condition in raw_condition: 326 | e = eval_if(item_condition, matrix, gvars) 327 | if not e: 328 | return False 329 | return True 330 | elif raw_condition is None: 331 | return True 332 | 333 | check_string(raw_condition) 334 | equality = (raw_condition.find("==") > -1) 335 | inequality = (raw_condition.find("!=") > -1) 336 | if equality: 337 | args = raw_condition.split("==") 338 | elif inequality: 339 | args = raw_condition.split("!=") 340 | else: 341 | error("Unsupported condition: '%s'." % raw_condition) 342 | if len(args) != 2: 343 | error("Wrong number of arguments: '%s'." % raw_condition) 344 | a = eval_bashlike(args[0].strip().replace('"', ''), matrix, gvars) 345 | b = eval_bashlike(args[1].strip().replace('"', ''), matrix, gvars) 346 | if equality: 347 | return a == b 348 | else: 349 | return a != b 350 | 351 | 352 | def get_list_dict_dockerfile_matrix_tags_args(json, debug): 353 | """Directly called by main on the result of load_spec(). 354 | 355 | Get list of dicts containing the following keys: 356 | - "context": "…" 357 | - "dockerfile": "…/Dockerfile" 358 | - "path": "…/…/Dockerfile" 359 | - "matrix": […] 360 | - "tags": […] 361 | - "args": […] 362 | - "keywords": […] 363 | - "after_deploy_script": […] 364 | """ 365 | # TODO later-on: fix (dockerfile / path) semantics 366 | res = [] 367 | images = json['images'] 368 | args1 = json['args'] if 'args' in json else {} 369 | gvars = json['vars'] if 'vars' in json else {} 370 | # = global vars, interpolated in: 371 | # - dockerfile 372 | # - args 373 | # - build.args 374 | # - build.tags 375 | # - build.after_deploy_export 376 | for item in images: 377 | list_matrix = product_build_matrix(item['matrix']) 378 | if 'dockerfile' in item['build']: 379 | dfile_templ = check_trim_relative_path(item['build']['dockerfile']) 380 | else: 381 | dfile_templ = 'Dockerfile' 382 | context_templ = check_trim_relative_path(item['build']['context']) 383 | raw_tags = item['build']['tags'] 384 | args2 = item['build']['args'] if 'args' in item['build'] else {} 385 | raw_args = merge_dict(args1, args2) 386 | if 'keywords' in item['build']: 387 | raw_keywords = item['build']['keywords'] 388 | else: 389 | raw_keywords = [] 390 | if 'after_deploy' in item['build']: 391 | raw_after_deploy = item['build']['after_deploy'] 392 | # support both 393 | # after_deploy: 'code' 394 | # and 395 | # after_deploy: 396 | # - 'code' 397 | # as well as 398 | # after_deploy: 399 | # - run: 'code' 400 | # if: '{matrix[base]} == 4.07.1-flambda' 401 | # and regarding interpolation, we can add: 402 | # after_deploy_export: 403 | # variable_name: 'value-{matrix[coq]}' 404 | # to prepend the after_deploy_script with export commands 405 | if isinstance(raw_after_deploy, str): 406 | raw_after_deploy = [raw_after_deploy] 407 | else: 408 | raw_after_deploy = [] 409 | if 'after_deploy_export' in item['build']: 410 | raw_after_deploy_export = item['build']['after_deploy_export'] 411 | check_dict(raw_after_deploy_export) 412 | else: 413 | raw_after_deploy_export = {} 414 | for matrix in list_matrix: 415 | tags = [] 416 | for tag_item in raw_tags: 417 | tag_template = tag_item['tag'] 418 | tag_cond = tag_item['if'] if 'if' in tag_item else None 419 | if eval_if(tag_cond, matrix, gvars): 420 | # otherwise skip the tag synonym 421 | tag = eval_bashlike(tag_template, matrix, 422 | gvars) # NOT defaults 423 | tags.append(tag) 424 | defaults = {"build_date": get_build_date()} 425 | if 'commit_api' in item['build']: 426 | commit_api = item['build']['commit_api'] 427 | defaults['commit'] = get_commit(commit_api) # TODO: auth? 428 | args = {} 429 | for arg_key in raw_args: 430 | arg_template = raw_args[arg_key] 431 | args[arg_key] = eval_bashlike(arg_template, matrix, 432 | gvars, defaults) 433 | keywords = list(map(lambda k: eval_bashlike(k, matrix, 434 | gvars, defaults), 435 | raw_keywords)) 436 | 437 | after_deploy_export = [] 438 | # Note: This could be a map: 439 | for var in raw_after_deploy_export: 440 | check_string(var) 441 | var_template = raw_after_deploy_export[var] 442 | var_value = eval_bashlike(var_template, matrix, 443 | gvars, defaults) 444 | # TODO soon: think about quoting var_value 445 | after_deploy_export.append("export %s='%s'" % (var, var_value)) 446 | 447 | if raw_after_deploy: 448 | after_deploy_script = after_deploy_export 449 | else: 450 | after_deploy_script = [] 451 | 452 | for ad_item in raw_after_deploy: 453 | if isinstance(ad_item, str): 454 | after_deploy_script.append(ad_item) # no { } interpolation 455 | # otherwise sth like ${BASH_VARIABLE} would raise an error 456 | else: 457 | script_item = ad_item['run'] 458 | script_cond = ad_item['if'] if 'if' in ad_item else None 459 | if eval_if(script_cond, matrix, gvars): 460 | # otherwise skip the script item 461 | after_deploy_script.append(script_item) 462 | dfile = eval_bashlike(dfile_templ, matrix, gvars) # NOT defaults 463 | context = eval_bashlike(context_templ, matrix, gvars) # idem 464 | path = '%s/%s' % (context, dfile) 465 | newitem = {"context": context, "dockerfile": dfile, 466 | "path": path, 467 | "matrix": matrix, "tags": tags, "args": args, 468 | "keywords": keywords, 469 | "after_deploy_script": after_deploy_script} 470 | res.append(newitem) 471 | if debug: 472 | print_stderr('get_list_dict_dockerfile_matrix_tags_args():') 473 | dump(res) 474 | return res 475 | 476 | 477 | def gitlab_build_params_pagination(page, per_page): 478 | """https://docs.gitlab.com/ce/api/README.html#pagination""" 479 | return { 480 | 'page': str(page), 481 | 'per_page': str(per_page) 482 | } 483 | 484 | 485 | def hub_build_params_pagination(page, per_page): 486 | return { 487 | 'page': str(page), 488 | 'page_size': str(per_page) 489 | } 490 | 491 | 492 | def hub_lambda_list(j): 493 | """https://registry.hub.docker.com/v2/repositories/library/debian/tags""" 494 | return list(map(lambda e: e['name'], j['results'])) 495 | 496 | 497 | def get_list_paginated(url, headers, params, lambda_list, max_per_sec=5): 498 | """Generic wrapper to handle GET requests with pagination. 499 | 500 | If the response is a JSON list, use lambda_list=(lambda l: l). 501 | 502 | REM: for https://registry.hub.docker.com/v2/repositories/_/_/tags, 503 | one could use the "next" field to guess the following page.""" 504 | assert isinstance(max_per_sec, int) 505 | assert max_per_sec > 0 506 | assert max_per_sec <= 10 507 | per_page = 50 # max allowed (by gitlab.com & hub.docker.com): 100 508 | page = 0 509 | allj = [] 510 | while True: 511 | page += 1 512 | if page % max_per_sec == 0: 513 | time.sleep(1.1) 514 | page_params = hub_build_params_pagination(page, per_page) 515 | all_params = merge_dict(params, page_params) 516 | print_stderr("GET %s\n # page: %d" % (url, page)) 517 | response = requests.get(url, headers=headers, params=all_params) 518 | if response.status_code == 404: 519 | j = [] 520 | elif not response: 521 | error("Error!\nCode: %d\nText: %s" 522 | % (response.status_code, response.text)) 523 | else: 524 | j = lambda_list(response.json()) 525 | check_list(j, text=response.text) 526 | if j: 527 | allj += j 528 | else: 529 | break 530 | return allj 531 | 532 | 533 | def get_remote_tags(spec): 534 | repo = spec['docker_repo'] 535 | check_string(repo) 536 | return get_list_paginated( 537 | 'https://registry.hub.docker.com/v2/repositories/%s/tags' % repo, 538 | None, None, hub_lambda_list) 539 | 540 | 541 | def get_gitlab_ci_tags(spec): 542 | if 'gitlab_ci_tags' not in spec: 543 | gitlab_ci_tags = [] 544 | else: 545 | gitlab_ci_tags = spec['gitlab_ci_tags'] 546 | check_list(gitlab_ci_tags) 547 | return gitlab_ci_tags 548 | 549 | 550 | def yaml_safe_quote(text): 551 | return '"' + text.replace('"', '\\"') + '"' 552 | 553 | 554 | def oneliner_str_of_list(json): 555 | check_list(json) 556 | return '[' + ", ".join(map(lambda s: yaml_safe_quote(s), json)) + ']' 557 | 558 | 559 | def minimal_rebuild(build_tags, remote_tags): 560 | def pred(item): 561 | return not subset_list(item['tags'], remote_tags) 562 | return list(filter(pred, build_tags)) 563 | 564 | 565 | def to_rm(all_tags, remote_tags): 566 | return diff_list(remote_tags, all_tags) 567 | 568 | 569 | def get_script_directory(): 570 | """$(cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd) in Python.""" 571 | return os.path.dirname(__file__) 572 | 573 | 574 | def get_script_rel2_directory(): 575 | """relative path that's equivalent to: relpath(dirname(__file__), ../..)""" 576 | keeper_dir = get_script_directory() 577 | keeper_rel_dir = os.path.relpath( 578 | keeper_dir, os.path.dirname(os.path.dirname(keeper_dir))) 579 | return check_trim_relative_path(keeper_rel_dir) 580 | 581 | 582 | def mkdir_dirname(filename): 583 | """Python3 equivalent to 'mkdir -p $(dirname $filename)"'.""" 584 | os.makedirs(os.path.dirname(filename), mode=0o755, exist_ok=True) 585 | 586 | 587 | def fullpath(filename): 588 | """Get path of filename in output_directory/.""" 589 | return os.path.join(output_directory, filename) 590 | 591 | 592 | def write_json_artifact(j, basename): 593 | filename = fullpath(basename) 594 | print_stderr("Generating '%s'..." % filename) 595 | mkdir_dirname(filename) 596 | with open(filename, 'w') as f: 597 | json.dump(j, f, indent=json_indent) 598 | 599 | 600 | def write_text_artifact(text, basename): 601 | filename = fullpath(basename) 602 | print_stderr("Generating '%s'..." % filename) 603 | mkdir_dirname(filename) 604 | with open(filename, 'w') as f: 605 | f.write(text) 606 | 607 | 608 | def write_list_text_artifact(seq, basename): 609 | check_list(seq) 610 | write_text_artifact('\n'.join(seq) + '\n', basename) 611 | 612 | 613 | def write_build_data_all(build_data_all): 614 | write_json_artifact(build_data_all, 'build_data_all.json') 615 | 616 | 617 | def write_build_data_chosen(build_data): 618 | write_json_artifact(build_data, 'build_data_chosen.json') 619 | 620 | 621 | def write_build_data_min(build_data_min): 622 | write_json_artifact(build_data_min, 'build_data_min.json') 623 | 624 | 625 | def write_remote_tags(remote_tags): 626 | write_list_text_artifact(remote_tags, 'remote_tags.txt') 627 | 628 | 629 | def write_gitlab_ci_tags(gitlab_ci_tags): 630 | check_list(gitlab_ci_tags) 631 | write_text_artifact(oneliner_str_of_list(gitlab_ci_tags), 632 | 'gitlab_ci_tags.txt') 633 | 634 | 635 | def write_remote_tags_to_rm(remote_tags_to_rm): 636 | write_json_artifact(remote_tags_to_rm, 'remote_tags_to_rm.json') 637 | 638 | 639 | def write_propagate(propagate_data): 640 | write_json_artifact(propagate_data, 'propagate.json') 641 | 642 | 643 | def write_list_dockerfile(seq): 644 | """To be used on the value of get_list_dict_dockerfile_matrix_tags_args.""" 645 | dockerfiles = uniqify(map(lambda e: e['path'], seq)) 646 | write_list_text_artifact(dockerfiles, 'Dockerfiles.txt') 647 | 648 | 649 | def write_docker_repo(spec): 650 | repo = spec['docker_repo'] + '\n' 651 | write_text_artifact(repo, 'docker_repo.txt') 652 | 653 | 654 | def read_json_artifact(basename): 655 | filename = fullpath(basename) 656 | print_stderr("Reading '%s'..." % filename) 657 | with open(filename, 'r') as json_data: 658 | j = json.load(json_data) 659 | return j 660 | 661 | 662 | def read_build_data_chosen(): 663 | return read_json_artifact('build_data_chosen.json') 664 | 665 | 666 | def read_propagate(): 667 | return read_json_artifact('propagate.json') 668 | 669 | 670 | def write_readme(base_url, build_data): 671 | """Read README.md and replace with a list of images 672 | 673 | with https://gitlab.com/foo/bar/blob/master/Dockerfile hyperlinks. 674 | """ 675 | pattern = '' 676 | check_string(base_url) 677 | if base_url[-1] == '/': 678 | base_url = base_url[:-1] 679 | 680 | def readme_image(item): 681 | return '- [`{tags}`]({url})'.format( 682 | tags=('`, `'.join(item['tags'])), 683 | url=('%s/blob/master/%s' % (base_url, item['path']))) 684 | 685 | print_stderr("Reading the template 'README.md'...") 686 | with open('README.md', 'r') as f: 687 | template = f.read() 688 | 689 | tags = ('# ' 690 | 'Supported tags and respective `Dockerfile` links\n\n%s' 691 | % '\n'.join(map(readme_image, build_data))) 692 | 693 | readme = template.replace(pattern, tags) 694 | 695 | filename = fullpath('README.md') 696 | print_stderr("Generating '%s'..." % filename) 697 | mkdir_dirname(filename) 698 | with open(filename, 'w') as f: 699 | f.write(readme) 700 | 701 | 702 | def get_check_tags(seq): 703 | """To be used on the value of get_list_dict_dockerfile_matrix_tags_args.""" 704 | res = [] 705 | for e in seq: 706 | res.extend(e['tags']) 707 | if is_unique(res): 708 | print_stderr("OK: no duplicate tag found.") 709 | else: 710 | error("Error: there are some tags duplicates.") 711 | return res 712 | 713 | 714 | def merge_data(l1, l2): 715 | """Append to l1 the elements of l2 that do not belong to l1.""" 716 | extra = diff_list(l2, l1) 717 | return l1 + extra 718 | 719 | 720 | def get_nightly_only(spec, debug): 721 | spec2 = copy.deepcopy(spec) 722 | images = spec2.pop('images') 723 | 724 | def nightly(item): 725 | return 'nightly' in item['build'] and item['build']['nightly'] 726 | 727 | images2 = list(filter(nightly, images)) 728 | spec2['images'] = images2 729 | return get_list_dict_dockerfile_matrix_tags_args(spec2, debug) 730 | 731 | 732 | def print_list(title, seq): 733 | print_stderr(title + ':' + ''.join(map(lambda e: '\n- ' + e, seq))) 734 | 735 | 736 | def get_file_only(build_data_all, dockerfiles): 737 | print_list('Specified Dockerfiles', dockerfiles) 738 | 739 | # TODO later-on: fix (dockerfile / path) semantics 740 | def matching(item): 741 | return item['path'] in dockerfiles 742 | 743 | return list(filter(matching, build_data_all)) 744 | 745 | 746 | def get_tag_only(build_data_all, tags): 747 | print_list('Specified tags', tags) 748 | 749 | def matching(item): 750 | return meet_list(item['tags'], tags) 751 | 752 | return list(filter(matching, build_data_all)) 753 | 754 | 755 | def get_keyword_only(build_data_all, keywords): 756 | print_list('Specified keywords', keywords) 757 | 758 | def matching(item): 759 | return meet_list(item['keywords'], keywords) 760 | 761 | return list(filter(matching, build_data_all)) 762 | 763 | 764 | def get_files_list(items_filename): 765 | with open(items_filename, 'r') as fh: 766 | dockerfiles = [item.strip() for item in fh.readlines()] 767 | return dockerfiles 768 | 769 | 770 | def get_tags_list(items_filename): 771 | with open(items_filename, 'r') as fh: 772 | tags = [item.strip() for item in fh.readlines()] 773 | return tags 774 | 775 | 776 | def get_keywords_list(items_filename): 777 | with open(items_filename, 'r') as fh: 778 | keywords = [item.strip() for item in fh.readlines()] 779 | return keywords 780 | 781 | 782 | def check_output_mode(mode): 783 | match mode: 784 | case 'nil': # can only be used in 'images.yml'.propagate.strategy 785 | return 786 | case 'minimal': 787 | return 788 | case 'nightly': 789 | return 790 | case 'rebuild-keyword': 791 | return 792 | case 'rebuild-all': 793 | return 794 | case _: 795 | error("Error: invalid output value 'mode: %s'." % mode) 796 | 797 | 798 | def check_manual_mode(mode): 799 | match mode: 800 | case 'minimal': 801 | return 802 | case 'nightly': 803 | return 804 | case 'rebuild-keyword': 805 | return 806 | case 'rebuild-all': 807 | return 808 | case _: 809 | error("Error: invalid manual value 'mode: %s'." % mode) 810 | 811 | 812 | def get_propagate_strategy(spec, build_data_chosen, 813 | triggered, manual_propagate): 814 | """Get propagate_strategy from images.yml, build_data_chosen, --propagate 815 | 816 | Regarding --propagate: can be specified by means of HEAD's commit message: 817 | git commit --allow-empty -m "…" -m "docker-keeper: nightly; propagate: ()" 818 | 819 | 'images.yml'.'propagate' Syntax: sequence of: 820 | when: 'nightly' | 'rebuild-all' | 'forall' | 'exists' 821 | # when OPTIONAL for last sequence element 822 | expr: # (forall/exisgts) 's,t'-list, interp({matrix},{tags},{keywords}) 823 | subset: # (forall/exists) 's,t'-list, interpolation, expr subset of this 824 | mode: 'nil' | 'minimal' | 'nightly' | 'rebuild-keyword' | 'rebuild-all' 825 | item: # (rebuild-keyword) concat; 's,t'-list; interpolation; uniqify 826 | 827 | 'images.yml'.'propagate' Full example: 828 | propagate: 829 | random-slug: 830 | api_token_env_var: 'VAR_NAME' 831 | gitlab_domain: 'gitlab.com' 832 | gitlab_project: '42' 833 | strategy: 834 | # the first that matches (unless manual --propagate) 835 | # current limitation: triggers only 1 mode in child docker-keeper(§) 836 | - # when MANDATORY because not the last rule 837 | when: 'nightly' # this is the 1st possible (arg-free) input mode 838 | mode: 'nightly' # (§)so this cannot be a list 839 | - when: 'rebuild-all' # this is the 2d possible (arg-free) input mode 840 | mode: 'rebuild-all' # (§)so this cannot be a list 841 | - when: 'forall' # forall built image, the property holds 842 | expr: '{matrix[coq][//pl/.][%.*]}' # string or 's,t' list 843 | subset: '8.4,8.5' # is a subset of {8.4, 8.5} 844 | mode: 'nil' # do not propagate then 845 | # no explicit neg, but eval order + previous steps -> implicit neg 846 | - when: 'forall' 847 | expr: '{matrix[coq]}' 848 | subset: 'dev' 849 | # trigger a 'rebuild-keyword: dev' 850 | mode: 'rebuild-keyword' # (§)so this cannot be a list 851 | item: 'dev' # string or 's,t' list; uniqify 852 | - # when OPTIONAL for last rule 853 | mode: 'minimal' 854 | mathcomp: 855 | api_token_env_var: 'VAR_NAME' 856 | gitlab_domain: 'gitlab.inria.fr' 857 | gitlab_project: '40' 858 | strategy: 859 | - when: 'rebuild-all' 860 | mode: 'rebuild-all' 861 | - when: 'forall' 862 | expr: '{matrix[coq][//pl/.][%.*]}' 863 | subset: '8.4,8.5' 864 | mode: 'nil' 865 | - # when OPTIONAL for last rule 866 | mode: 'rebuild-keyword' # trigger a 'rebuild-keyword: s,t' 867 | item: '{keywords[/#/,][#,]}' # concat; 's,t' list; interp; uniqify 868 | mathcomp-dev: 869 | api_token_env_var: 'VAR_NAME' 870 | gitlab_domain: 'gitlab.inria.fr' 871 | gitlab_project: '41' 872 | strategy: 873 | - when: 'rebuild-all' 874 | mode: 'minimal' 875 | - when: 'forall' 876 | expr: '{matrix[coq]}' 877 | subset: 'dev' 878 | mode: 'nightly' 879 | - when: 'exists' # there exists a built image s.t. the property holds 880 | expr: '{matrix[coq][//pl/.][%.*]}' # string or 's,t' list 881 | subset: '8.19,8.20,dev' # is a subset of {8.19,8.20,dev} 882 | mode: 'minimal' 883 | - # when OPTIONAL for last rule 884 | mode: 'nil'""" 885 | prop = spec['propagate'] if 'propagate' in spec else {} 886 | res_prop = {} 887 | 888 | at_least_one_manual = bool(manual_propagate) 889 | 890 | for slug in prop: 891 | prop1 = prop[slug] 892 | res_prop1 = {} 893 | 894 | api_token_env_var = prop1.pop('api_token_env_var') 895 | if not re.match(r'^[a-zA-Z_]+[a-zA-Z0-9_]*$', api_token_env_var): 896 | error("Error: invalid api_token_env_var for %s (was given '%s')." 897 | % (slug, api_token_env_var)) 898 | res_prop1['api_token_env_var'] = api_token_env_var 899 | gitlab_domain = prop1.pop('gitlab_domain') 900 | check_domain(gitlab_domain) 901 | res_prop1['gitlab_domain'] = gitlab_domain 902 | res_prop1['gitlab_project'] = prop1.pop('gitlab_project') 903 | 904 | strat = prop1.pop('strategy') 905 | check_no_fields(slug, prop1) 906 | check_list(strat) 907 | # check that each elt (except maybe the last one) has a 'when' property 908 | strat_drop1 = strat[:-1] 909 | for elt in strat_drop1: 910 | if 'when' not in elt: 911 | error("Error: propagate: %s: strategy: 'when' is mandatory %s." 912 | % (slug, "(except for last list element)")) 913 | # 1a. manual strategy 914 | if slug in manual_propagate: 915 | res_prop1['strategy'] = manual_propagate.pop(slug) 916 | res_prop[slug] = res_prop1 917 | continue 918 | else: 919 | if at_least_one_manual: 920 | # disable automatic strategy; will try other (manual) slugs 921 | continue 922 | 923 | # 1b. otherwise, automatic strategy 924 | res_strat = {} 925 | # detect the first strategy elt that matches the 'when' property 926 | # and retrieve the output 'mode' (and interpolated 'item') in res_strat 927 | for elt in strat: 928 | if 'when' in elt: 929 | when = elt.pop('when') 930 | match when: 931 | case 'nightly': 932 | if 'nightly' in triggered and triggered['nightly']: 933 | # BEGIN idem1 934 | res_strat['mode'] = elt.pop('mode') 935 | if res_strat['mode'] == 'rebuild-keyword': 936 | raw_item = elt.pop('item') 937 | res_strat['item'] = \ 938 | uniq_cat_eval_propagate(raw_item, 939 | build_data_chosen) 940 | else: 941 | check_output_mode(res_strat['mode']) 942 | check_no_fields('strategy', elt) 943 | break 944 | # END idem1 945 | case 'rebuild-all': 946 | if 'rebuild_all' in triggered \ 947 | and triggered['rebuild_all']: 948 | # BEGIN idem2 949 | res_strat['mode'] = elt.pop('mode') 950 | if res_strat['mode'] == 'rebuild-keyword': 951 | raw_item = elt.pop('item') 952 | res_strat['item'] = \ 953 | uniq_cat_eval_propagate(raw_item, 954 | build_data_chosen) 955 | else: 956 | check_output_mode(res_strat['mode']) 957 | check_no_fields('strategy', elt) 958 | break 959 | # END idem2 960 | case 'forall': 961 | acc = True 962 | expr = elt.pop('expr') 963 | subset = elt.pop('subset') 964 | for build in build_data_chosen: 965 | e_expr = eval_propagate(expr, build) 966 | e_subset = eval_propagate(subset, build) 967 | if not subset_comma_list(e_expr, e_subset): 968 | acc = False 969 | break 970 | if acc: 971 | # BEGIN idem3 972 | res_strat['mode'] = elt.pop('mode') 973 | if res_strat['mode'] == 'rebuild-keyword': 974 | raw_item = elt.pop('item') 975 | res_strat['item'] = \ 976 | uniq_cat_eval_propagate(raw_item, 977 | build_data_chosen) 978 | else: 979 | check_output_mode(res_strat['mode']) 980 | check_no_fields('strategy', elt) 981 | break 982 | # END idem3 983 | else: 984 | ignore_fields(elt, ['mode', 'item']) 985 | check_no_fields('strategy', elt) 986 | case 'exists': 987 | acc = False # dual 988 | expr = elt.pop('expr') 989 | subset = elt.pop('subset') 990 | for build in build_data_chosen: 991 | e_expr = eval_propagate(expr, build) 992 | e_subset = eval_propagate(subset, build) 993 | if subset_comma_list(e_expr, e_subset): # dual 994 | acc = True # dual 995 | break 996 | if acc: 997 | # BEGIN idem4 998 | res_strat['mode'] = elt.pop('mode') 999 | if res_strat['mode'] == 'rebuild-keyword': 1000 | raw_item = elt.pop('item') 1001 | res_strat['item'] = \ 1002 | uniq_cat_eval_propagate(raw_item, 1003 | build_data_chosen) 1004 | else: 1005 | check_output_mode(res_strat['mode']) 1006 | check_no_fields('strategy', elt) 1007 | break 1008 | # END idem4 1009 | else: 1010 | ignore_fields(elt, ['mode', 'item']) 1011 | check_no_fields('strategy', elt) 1012 | case _: 1013 | error("Error: propagate: %s: strategy: %s 'when: %s'" 1014 | % (slug, 'unexpected', elt['when'])) 1015 | else: 1016 | # BEGIN idem5 1017 | res_strat['mode'] = elt.pop('mode') 1018 | if res_strat['mode'] == 'rebuild-keyword': 1019 | raw_item = elt.pop('item') 1020 | res_strat['item'] = \ 1021 | uniq_cat_eval_propagate(raw_item, 1022 | build_data_chosen) 1023 | else: 1024 | check_output_mode(res_strat['mode']) 1025 | check_no_fields('strategy', elt) 1026 | break 1027 | # END idem5 1028 | 1029 | res_prop1['strategy'] = res_strat 1030 | if 'mode' in res_strat and res_strat['mode'] != 'nil': 1031 | res_prop[slug] = res_prop1 1032 | 1033 | # check that all manually-specified propagate slug belonged in the strategy 1034 | if at_least_one_manual: 1035 | check_no_fields('manual_propagate', manual_propagate) 1036 | return res_prop 1037 | 1038 | 1039 | def get_version(): 1040 | with open(os.path.join(get_script_directory(), 'VERSION'), 'r') as f: 1041 | version = f.read().strip() 1042 | return version 1043 | 1044 | 1045 | def get_upstream_version(): 1046 | url = ('https://gitlab.com/api/v4/projects/%s/repository/files/VERSION' 1047 | % naive_url_encode(upstream_project)) 1048 | 1049 | def lambda_query_content(response): 1050 | return (base64.b64decode(response.json()['content']) 1051 | .decode('UTF-8').rstrip()) 1052 | 1053 | return get_url(url, None, {"ref": "master"}, lambda_query_content) 1054 | 1055 | 1056 | def equalize_args(record): 1057 | """{"VAR1": "value1", "VAR2": "value2"} → ['VAR1=value1', 'VAR2=value2']""" 1058 | res = [] 1059 | for key in record: 1060 | res.append("%s=%s" % (key, record[key])) 1061 | return res 1062 | 1063 | 1064 | def indent_script(list_after_deploy, indent_level, start=False): 1065 | check_list(list_after_deploy) 1066 | if list_after_deploy: 1067 | indent = " " * indent_level 1068 | if start: 1069 | return indent + ('\n' + indent).join(list_after_deploy) 1070 | else: 1071 | return ('\n' + indent).join(list_after_deploy) 1072 | else: 1073 | return "" 1074 | 1075 | 1076 | def escape_single_quotes(script): 1077 | return script.replace("'", "'\\''") 1078 | 1079 | 1080 | def generate_config(docker_repo, gitlab_ci_tags, propagate_data): 1081 | data = read_build_data_chosen() 1082 | 1083 | if gitlab_ci_tags: 1084 | str_gitlab_ci_tags = """default: 1085 | tags: {string} 1086 | """.format(string=str(gitlab_ci_tags)) 1087 | else: 1088 | str_gitlab_ci_tags = '' 1089 | 1090 | if not data: 1091 | yamlstr_init = """--- 1092 | # GitLab CI config automatically generated by docker-keeper; do not edit. 1093 | # yamllint disable rule:line-length rule:empty-lines 1094 | 1095 | {var_gitlab_ci_tags} 1096 | stages: 1097 | - build 1098 | - propagate 1099 | 1100 | noop: 1101 | stage: build 1102 | image: alpine:latest 1103 | variables: 1104 | GIT_STRATEGY: none 1105 | script: 1106 | - echo "No image to rebuild." 1107 | only: 1108 | - master 1109 | 1110 | .curl-propagate: 1111 | stage: propagate 1112 | only: 1113 | - master 1114 | variables: 1115 | image: alpine:latest 1116 | before_script: 1117 | - echo $0 1118 | - apk add --no-cache bash 1119 | - /usr/bin/env bash --version 1120 | - apk add --no-cache curl 1121 | - curl --version 1122 | - pwd 1123 | 1124 | {var_jobs}""" 1125 | 1126 | else: 1127 | yamlstr_init = """--- 1128 | # GitLab CI config automatically generated by docker-keeper; do not edit. 1129 | # yamllint disable rule:line-length rule:empty-lines 1130 | 1131 | {var_gitlab_ci_tags} 1132 | stages: 1133 | - deploy 1134 | - remove 1135 | - propagate 1136 | 1137 | # Changes below (or jobs extending .docker-deploy) should be carefully 1138 | # reviewed to avoid leaks of HUB_TOKEN 1139 | .docker-deploy: 1140 | stage: deploy 1141 | only: 1142 | - master 1143 | variables: 1144 | HUB_REPO: "{var_hub_repo}" 1145 | # HUB_USER: # protected variable 1146 | # HUB_TOKEN: # protected variable 1147 | # FOO_TOKEN: # other, user-defined tokens for after_deploy_script 1148 | image: docker:latest 1149 | services: 1150 | - docker:dind 1151 | before_script: 1152 | - cat /proc/cpuinfo /proc/meminfo 1153 | - echo $0 1154 | - apk add --no-cache bash 1155 | - /usr/bin/env bash --version 1156 | - apk add --no-cache curl 1157 | - curl --version 1158 | - pwd 1159 | 1160 | .curl-propagate: 1161 | stage: propagate 1162 | only: 1163 | - master 1164 | variables: 1165 | image: alpine:latest 1166 | before_script: 1167 | - echo $0 1168 | - apk add --no-cache bash 1169 | - /usr/bin/env bash --version 1170 | - apk add --no-cache curl 1171 | - curl --version 1172 | - pwd 1173 | 1174 | {var_jobs}""" 1175 | 1176 | # See https://gitlab.com/erikmd/docker-keeper-template 1177 | # /-/blob/master/.gitlab-ci.yml#L5 1178 | keeper_subtree = os.getenv("KEEPER_SUBTREE") 1179 | if keeper_subtree: 1180 | print_stderr("Info: non-empty env-var KEEPER_SUBTREE=\"%s\"" 1181 | % keeper_subtree) 1182 | else: 1183 | keeper_subtree = get_script_rel2_directory() 1184 | print_stderr("Info: call get_script_rel2_directory()=\"%s\"" 1185 | % keeper_subtree) 1186 | 1187 | yamlstr_jobs = '' 1188 | job_id = 0 1189 | for item in data: 1190 | job_id += 1 1191 | yamlstr_jobs += """ 1192 | deploy_{var_job_id}_{var_some_real_tag}: 1193 | extends: .docker-deploy 1194 | script: | 1195 | /usr/bin/env bash -e -c ' 1196 | echo $0 1197 | . "{var_keeper_subtree}/gitlab_functions.sh" 1198 | dk_login 1199 | dk_build "{var_context}" "{var_dockerfile}" "{var_one_tag}" {vars_args} 1200 | dk_push "{var_hub_repo}" "{var_one_tag}" {vars_tags} 1201 | dk_logout 1202 | {var_after_deploy}' bash 1203 | """.format(var_context=item['context'], 1204 | var_dockerfile=item['dockerfile'], 1205 | vars_args=('"%s"' % '" "'.join(equalize_args(item['args']))), 1206 | vars_tags=('"%s"' % '" "'.join(item['tags'])), 1207 | var_keeper_subtree=keeper_subtree, 1208 | var_hub_repo=docker_repo, 1209 | var_one_tag=("image_%d" % job_id), 1210 | var_job_id=job_id, 1211 | var_some_real_tag=first_shortest_tag(item['tags']), 1212 | var_after_deploy=escape_single_quotes( 1213 | indent_script(item['after_deploy_script'], 6))) 1214 | 1215 | curl_propagate = [] 1216 | for slug in propagate_data: 1217 | prop = propagate_data[slug] 1218 | strat = prop['strategy'] 1219 | if 'item' in strat: 1220 | check_list(strat['item']) 1221 | item = ','.join(strat['item']) 1222 | else: 1223 | item = '' 1224 | next_curl = ('dk_curl "{var_slug}" "{var_tok}" "{var_dom}" "{var_prj}"' 1225 | + ' "{var_mod}" "{var_it}"').format( 1226 | var_slug=slug, 1227 | var_tok='$' + prop['api_token_env_var'], 1228 | var_dom=prop['gitlab_domain'], 1229 | var_prj=prop['gitlab_project'], 1230 | var_mod=prop['strategy']['mode'], 1231 | var_it=item) 1232 | curl_propagate.append(next_curl) 1233 | if propagate_data: 1234 | yamlstr_jobs += """ 1235 | propagate: 1236 | extends: .curl-propagate 1237 | script: | 1238 | /usr/bin/env bash -e -c ' 1239 | echo $0 1240 | . "{var_keeper_subtree}/gitlab_functions.sh" 1241 | {var_curl_propagate}' bash 1242 | """.format(var_keeper_subtree=keeper_subtree, 1243 | var_curl_propagate=indent_script(curl_propagate, 6)) 1244 | 1245 | return yamlstr_init.format(var_gitlab_ci_tags=str_gitlab_ci_tags, 1246 | var_hub_repo=docker_repo, 1247 | var_jobs=yamlstr_jobs) 1248 | 1249 | 1250 | def main_generate_config(upstream_version): 1251 | spec = load_spec() # could be avoided by writing yet another .json… 1252 | propagate_data = read_propagate() 1253 | print(generate_config(spec['docker_repo'], 1254 | get_gitlab_ci_tags(spec), propagate_data)) 1255 | 1256 | 1257 | def main_write_artifacts(upstream_version, minimal, # <- input ignored 1258 | rebuild_files, rebuild_tags, rebuild_keywords, 1259 | # ^- deprecated 1260 | rebuild_file, rebuild_tag, rebuild_keyword, 1261 | # ^- supports comma-separated lists 1262 | debug, nightly, propagate, rebuild_all): 1263 | spec = load_spec() 1264 | build_data_all = get_list_dict_dockerfile_matrix_tags_args(spec, debug) 1265 | all_tags = get_check_tags(build_data_all) 1266 | remote_tags = get_remote_tags(spec) 1267 | build_data_min = minimal_rebuild(build_data_all, remote_tags) 1268 | remote_tags_to_rm = to_rm(all_tags, remote_tags) 1269 | 1270 | res_nightly = [] 1271 | if nightly: 1272 | res_nightly = get_nightly_only(spec, debug) 1273 | # reminder: merge_data(build_data_min, res_nightly), and likewise below 1274 | 1275 | # BEGIN deprecated 1276 | res_rebuild_files = [] 1277 | if rebuild_files: 1278 | for fil in rebuild_files: 1279 | res_rebuild_files += get_files_list(fil) 1280 | 1281 | res_rebuild_tags = [] 1282 | if rebuild_tags: 1283 | for fil in rebuild_tags: 1284 | res_rebuild_tags += get_tags_list(fil) 1285 | 1286 | res_rebuild_keywords = [] 1287 | if rebuild_keywords: 1288 | for fil in rebuild_keywords: 1289 | res_rebuild_keywords += get_keywords_list(fil) 1290 | # END deprecated 1291 | 1292 | # BEGIN on the edge 1293 | items = uniqify(flat_map_trim_comma_split(rebuild_file) 1294 | + res_rebuild_files) 1295 | res_rebuild_file = get_file_only(build_data_all, items) 1296 | 1297 | items = uniqify_tags(flat_map_trim_comma_split(rebuild_tag) 1298 | + res_rebuild_tags) 1299 | res_rebuild_tag = get_tag_only(build_data_all, items) 1300 | 1301 | items = uniqify_tags(flat_map_trim_comma_split(rebuild_keyword) 1302 | + res_rebuild_keywords) 1303 | res_rebuild_keyword = get_keyword_only(build_data_all, items) 1304 | # END on the edge 1305 | 1306 | if rebuild_all: 1307 | build_data_tags = build_data_all 1308 | else: 1309 | build_data_tags = build_data_min 1310 | build_data_tags = merge_data(build_data_tags, res_nightly) 1311 | build_data_tags = merge_data(build_data_tags, res_rebuild_file) 1312 | build_data_tags = merge_data(build_data_tags, res_rebuild_tag) 1313 | build_data_tags = merge_data(build_data_tags, res_rebuild_keyword) 1314 | 1315 | # Pre-processing 1316 | # --propagate=SLUG: minimal 1317 | # --propagate=SLUG: nightly 1318 | # --propagate=SLUG: rebuild-all 1319 | # --propagate=SLUG: rebuild-keyword: KW1,KW2 1320 | manual_propagate = {} 1321 | if propagate: 1322 | print_stderr("Set manual propagation using commit-msg, gitlab-var" 1323 | + ", or CLI--propagate.") 1324 | for elt in propagate: 1325 | if elt == '()': 1326 | continue 1327 | msed = \ 1328 | re.match(r'^([A-Za-z0-9_-]+): *([a-z-]+)(?:[:] *([\w._-]+))?$', 1329 | elt) 1330 | if not msed: 1331 | error("Error: incorrect syntax '--propagate=%s'." % elt) 1332 | slug, command, item = msed.groups() 1333 | check_manual_mode(command) 1334 | if command == 'rebuild-keyword': 1335 | if not item: 1336 | error("Error: '--propagate=_: rebuild-keyword:' " 1337 | + "missing item") 1338 | else: 1339 | if item: 1340 | error("Error: '--propagate=_: %s': " 1341 | + "unexpected item" % command) 1342 | res_elt = {} 1343 | res_elt['mode'] = command 1344 | if item: # here, a string or comma-separated list 1345 | res_elt['item'] = uniqify_tags(trim_comma_split(item)) 1346 | manual_propagate[slug] = res_elt 1347 | # if debug: 1348 | print_stderr('Specified manual_propagate:') 1349 | dump(manual_propagate) 1350 | else: 1351 | print_stderr("Applying propagate strategy automatically from '%s'..." 1352 | % images_filename) 1353 | 1354 | # value for get_propagate_strategy: detect 'nightly'/'rebuild-all' events 1355 | triggered = {} 1356 | if rebuild_all: 1357 | triggered['rebuild_all'] = True 1358 | elif nightly: 1359 | triggered['nightly'] = True 1360 | if debug and triggered: 1361 | print_stderr('triggered:') 1362 | dump(triggered) 1363 | 1364 | # Processing CLI 1365 | # --propagate=() 1366 | # --propagate=SLUG: minimal 1367 | # --propagate=SLUG: nightly 1368 | # --propagate=SLUG: rebuild-all 1369 | # --propagate=SLUG: rebuild-keyword: KW1,KW2 1370 | propagate_data = {} 1371 | 1372 | if propagate: 1373 | # manual option - `if manual_propagate:` would be wrong b/o '()'. 1374 | if '()' in propagate: 1375 | print_stderr("Got '--propagate=()': disable propagation.") 1376 | else: 1377 | propagate_data = get_propagate_strategy(spec, build_data_tags, 1378 | triggered, 1379 | manual_propagate) 1380 | else: 1381 | # automatic option 1382 | propagate_data = get_propagate_strategy(spec, build_data_tags, 1383 | triggered, {}) 1384 | 1385 | if debug: 1386 | print_stderr('propagate_data:') 1387 | dump(propagate_data) 1388 | 1389 | write_propagate(propagate_data) 1390 | write_build_data_chosen(build_data_tags) 1391 | write_build_data_all(build_data_all) 1392 | write_build_data_min(build_data_min) 1393 | write_remote_tags(remote_tags) 1394 | write_remote_tags_to_rm(remote_tags_to_rm) 1395 | write_list_dockerfile(build_data_all) 1396 | write_readme(spec['base_url'], build_data_all) 1397 | write_docker_repo(spec) 1398 | write_gitlab_ci_tags(get_gitlab_ci_tags(spec)) 1399 | 1400 | 1401 | def main(argv): 1402 | parser = argparse.ArgumentParser( 1403 | prog=prog, description=desc, 1404 | formatter_class=argparse.RawDescriptionHelpFormatter) 1405 | # --version 1406 | parser.add_argument('--version', action='version', 1407 | version=(get_version())) 1408 | # --upstream-version 1409 | help_upstream_version = """ 1410 | show program's upstream version from %s and exit""" % upstream_url 1411 | parser.add_argument('--upstream-version', action='store_true', 1412 | help=help_upstream_version) 1413 | subparsers = parser.add_subparsers(title='subcommands', help=None) 1414 | 1415 | # generate-config 1416 | help_generate_config = """ 1417 | Print a GitLab CI YAML config to standard output. 1418 | This requires files: 1419 | - generated/build_data_chosen.json 1420 | - generated/remote_tags_to_rm.json 1421 | - generated/propagate.json""" 1422 | parser_generate_config = \ 1423 | subparsers.add_parser( 1424 | 'generate-config', 1425 | # no parents parser 1426 | help=help_generate_config, 1427 | description=help_generate_config, 1428 | formatter_class=argparse.RawDescriptionHelpFormatter) 1429 | parser_generate_config.set_defaults(func=main_generate_config) 1430 | 1431 | # write-artifacts 1432 | help_write_artifacts = """ 1433 | Generate artifacts in the '%s' directory. 1434 | This requires having file '%s' in the current working directory. 1435 | """ % (output_directory, images_filename) 1436 | parser_write_artifacts = \ 1437 | subparsers.add_parser('write-artifacts', 1438 | # no parents parser 1439 | help=help_write_artifacts, 1440 | description=help_write_artifacts) 1441 | several = ' (can be supplied several times)' 1442 | # --debug 1443 | help_debug = """ 1444 | help debugging by printing more info (especially regarding argparse)""" 1445 | parser_write_artifacts.add_argument('--debug', action='store_true', 1446 | help=help_debug) 1447 | # --minimal 1448 | help_minimal = """ 1449 | default option, can be omitted, kept for backward compatibility""" 1450 | parser_write_artifacts.add_argument('--minimal', action='store_true', 1451 | help=help_minimal) 1452 | # --nightly 1453 | help_nightly = "trigger builds that have the 'nightly: true' flag" 1454 | parser_write_artifacts.add_argument('--nightly', action='store_true', 1455 | help=help_nightly) 1456 | # --rebuild-all 1457 | help_rebuild_all = "rebuild all images" 1458 | parser_write_artifacts.add_argument('--rebuild-all', action='store_true', 1459 | help=help_rebuild_all) 1460 | # --rebuild-files FILE 1461 | help_rebuild_files = """ 1462 | (deprecated) rebuild images with Dockerfile mentioned in FILE""" 1463 | parser_write_artifacts.add_argument('--rebuild-files', action='append', 1464 | metavar='FILE', 1465 | help=help_rebuild_files + several) 1466 | # --rebuild-tags FILE 1467 | help_rebuild_tags = """ 1468 | (deprecated) rebuild images with tag mentioned in FILE""" 1469 | parser_write_artifacts.add_argument('--rebuild-tags', action='append', 1470 | metavar='FILE', 1471 | help=help_rebuild_tags + several) 1472 | # --rebuild-keywords FILE 1473 | help_rebuild_keywords = """ 1474 | (deprecated) rebuild images with keyword mentioned in FILE""" 1475 | parser_write_artifacts.add_argument('--rebuild-keywords', action='append', 1476 | metavar='FILE', 1477 | help=help_rebuild_keywords + several) 1478 | # --rebuild-file NAME1,NAME2 1479 | help_rebuild_file = """ 1480 | rebuild images with Dockerfile mentioned in CLI comma-separated list""" 1481 | parser_write_artifacts.add_argument('--rebuild-file', action='append', 1482 | metavar='NAME1,NAME2', 1483 | help=help_rebuild_file + several) 1484 | # --rebuild-tag TAG1,TAG2 1485 | help_rebuild_tag = """ 1486 | rebuild images with tag mentioned in CLI comma-separated list""" 1487 | parser_write_artifacts.add_argument('--rebuild-tag', action='append', 1488 | metavar='TAG1,TAG2', 1489 | help=help_rebuild_tag + several) 1490 | # --rebuild-keyword KW1,KW2 1491 | help_rebuild_keyword = """ 1492 | rebuild images with keyword mentioned in CLI comma-separated list""" 1493 | parser_write_artifacts.add_argument('--rebuild-keyword', action='append', 1494 | metavar='KW1,KW2', 1495 | help=help_rebuild_keyword + several) 1496 | # --propagate=() 1497 | # --propagate=SLUG: minimal 1498 | # --propagate=SLUG: nightly 1499 | # --propagate=SLUG: rebuild-all 1500 | # --propagate=SLUG: rebuild-keyword: KW1,KW2 1501 | help_propagate = """ 1502 | manually specify to propagate 'minimal', 'nightly', 'rebuild-all', 1503 | or 'rebuild-keyword: KW1,KW2' commands 1504 | to children docker-keeper repositories; 1505 | note that you can use '--propagate=()' to disable propagation fully, 1506 | independently of the other occurrences of this option; 1507 | if there is no occurrence of this option (in CLI 1508 | nor in HEAD's commit message), docker-keeper will apply the 1509 | propagate strategy defined in the %s file""" % images_filename 1510 | parser_write_artifacts.add_argument('--propagate', action='append', 1511 | metavar="'CHILD-REPO: COMMAND'", 1512 | help=help_propagate + several) 1513 | parser_write_artifacts.set_defaults(func=main_write_artifacts) 1514 | 1515 | # main 1516 | args = vars(parser.parse_args(argv)) 1517 | if 'debug' in args and args['debug']: 1518 | print_stderr('argparse:') 1519 | print_stderr(args) 1520 | if args["upstream_version"]: 1521 | print(get_upstream_version()) 1522 | elif ("func" in args): 1523 | func = args.pop("func") 1524 | func(**args) 1525 | else: 1526 | parser.print_help() 1527 | 1528 | 1529 | ############################################################################### 1530 | # Test suite, cf. 1531 | # $ pip3 install pytest 1532 | # $ py.test bash_formatter.py 1533 | 1534 | def test_get_commit(): 1535 | github = {"fetcher": "github", "repo": "rocq-prover/rocq", 1536 | "branch": "v8.1"} 1537 | github_expected = "f7cdf553d983a79fe0fbb08403f6a55230016074" 1538 | github_actual = get_commit(github) 1539 | assert github_actual == github_expected 1540 | gitlab = {"fetcher": "gitlab.inria.fr", "repo": "coq/coq", 1541 | "branch": "v8.1"} 1542 | gitlab_expected = "f7cdf553d983a79fe0fbb08403f6a55230016074" 1543 | gitlab_actual = get_commit(gitlab) 1544 | assert gitlab_actual == gitlab_expected 1545 | 1546 | 1547 | def shouldfail(lam): 1548 | try: 1549 | res = lam() 1550 | print_stderr("Wrong outcome: '%s'" % res) 1551 | assert False 1552 | except Error: 1553 | print_stderr('OK') 1554 | 1555 | 1556 | def test_check_trim_relative_path(): 1557 | assert check_trim_relative_path('.') == '.' 1558 | assert check_trim_relative_path('./foo/bar') == 'foo/bar' 1559 | assert check_trim_relative_path('bar/baz') == 'bar/baz' 1560 | shouldfail(lambda: check_trim_relative_path('/etc')) 1561 | 1562 | 1563 | def test_get_script_rel2_directory(): 1564 | dir2 = get_script_rel2_directory() 1565 | assert '/' in dir2 1566 | assert os.path.basename(dir2) == 'docker-keeper' 1567 | 1568 | 1569 | def test_eval_if(): 1570 | matrix1 = {"base": "latest", "coq": "dev"} 1571 | matrix2 = {"base": "4.09.0-flambda", "coq": "8.7.2"} 1572 | gvars = {"coq_dev": "dev"} 1573 | assert eval_if('{matrix[base]}=="latest"', matrix1, gvars) 1574 | assert eval_if('{matrix[base]} == "latest"', matrix1, gvars) 1575 | assert eval_if(' "{matrix[base]}" == "latest"', matrix1, gvars) 1576 | assert eval_if('{matrix[base]}!="latest"', matrix2, gvars) 1577 | assert eval_if('{matrix[base]} != "latest"', matrix2, gvars) 1578 | assert eval_if(' "{matrix[base]}" != "latest"', matrix2, gvars) 1579 | assert eval_if('{matrix[coq]} == {vars[coq_dev]}', matrix1, gvars) 1580 | assert eval_if('{matrix[coq]} != {vars[coq_dev]}', matrix2, gvars) 1581 | 1582 | 1583 | def test_eval_bashlike(): 1584 | matrix = {"base": "4.09.0-flambda", "coq": "8.19.0"} 1585 | gvars = {"coq_latest": "8.19.1"} 1586 | template0 = '{matrix[coq]}-ocaml-{matrix[base]}' 1587 | template1 = '{vars[coq_latest]}-ocaml-{matrix[base]}' 1588 | template20 = '{matrix[coq][%.*]}-ocaml-{matrix[base][%.*-*]}-flambda' 1589 | template21 = '{vars[coq_latest][%.*]}-ocaml-{matrix[base][%.*-*]}-flambda' 1590 | assert eval_bashlike(template0, matrix, 1591 | gvars, None) == '8.19.0-ocaml-4.09.0-flambda' 1592 | assert eval_bashlike(template1, matrix, 1593 | gvars, None) == '8.19.1-ocaml-4.09.0-flambda' 1594 | assert eval_bashlike(template20, matrix, 1595 | gvars, None) == '8.19-ocaml-4.09-flambda' 1596 | assert eval_bashlike(template21, matrix, 1597 | gvars, None) == '8.19-ocaml-4.09-flambda' 1598 | 1599 | 1600 | def test_is_unique(): 1601 | s = [1, 2, 4, 0, 4] 1602 | assert not is_unique(s) 1603 | s = uniqify(s) 1604 | assert is_unique(s) 1605 | 1606 | 1607 | def test_uniqify(): 1608 | assert uniqify([1, 2, 4, 0, 4]) == [0, 1, 2, 4] 1609 | 1610 | 1611 | def test_merge_dict(): 1612 | foo = {'a': 1, 'c': 2} 1613 | bar = {'b': 3, 'c': 4} 1614 | foobar = merge_dict(foo, bar) 1615 | assert foobar == {'a': 1, 'b': 3, 'c': 4} 1616 | 1617 | 1618 | def test_diff_list(): 1619 | l1 = [1, 2, 4, 2, 5, 4] 1620 | l2 = [3, 1, 2] 1621 | assert diff_list(l1, l2) == [4, 5, 4] 1622 | 1623 | 1624 | def test_subset_list(): 1625 | l2 = [2, 3] 1626 | l1 = [2] 1627 | l0 = [3, 4, 5] 1628 | l3 = [2, 3, 5] 1629 | assert subset_list(l2, l3) 1630 | assert not subset_list(l2, l1) 1631 | assert not subset_list(l2, l0) 1632 | 1633 | 1634 | def test_subset_comma_list(): 1635 | s1 = '8.19' 1636 | s2 = '8.18,8.19,8.20,dev' 1637 | s3 = '8.4' 1638 | s4 = '8.40,dev' 1639 | assert subset_comma_list(s1, s2) 1640 | assert not subset_comma_list(s3, s4) 1641 | 1642 | 1643 | def test_equalize_args(): 1644 | assert (equalize_args({"VAR1": "value1", "VAR2": "value2"}) == 1645 | ['VAR1=value1', 'VAR2=value2']) 1646 | 1647 | 1648 | def test_merge_data(): 1649 | l1 = [{"i": 1, "s": "a"}, {"i": 2, "s": "b"}, {"i": 1, "s": "a"}] 1650 | l2 = [{"i": 2, "s": "b"}, {"i": 2, "s": "b"}, {"i": 3, "s": "c"}] 1651 | res1 = merge_data(l1, l2) 1652 | assert res1 == [{"i": 1, "s": "a"}, {"i": 2, "s": "b"}, {"i": 1, "s": "a"}, 1653 | {"i": 3, "s": "c"}] 1654 | res2 = merge_data(l2, l1) 1655 | assert res2 == [{"i": 2, "s": "b"}, {"i": 2, "s": "b"}, {"i": 3, "s": "c"}, 1656 | {"i": 1, "s": "a"}, {"i": 1, "s": "a"}] 1657 | 1658 | 1659 | def test_meet_list(): 1660 | assert not meet_list([1, 2], []) 1661 | assert not meet_list([], [2, 3]) 1662 | assert not meet_list([1, 2], [3]) 1663 | assert meet_list([1, 2], [2, 3]) 1664 | 1665 | 1666 | def test_first_shortest_tag(): 1667 | assert first_shortest_tag(['BB', 'AA', 'z', 'y']) == 'y' 1668 | 1669 | 1670 | def test_indent_script(): 1671 | assert indent_script(['echo ok', 'echo "The End"'], 6, True) == \ 1672 | ' echo ok\n echo "The End"' 1673 | assert indent_script(['echo ok', 'echo "The End"'], 6) == \ 1674 | 'echo ok\n echo "The End"' 1675 | 1676 | 1677 | def test_trim_comma_split(): 1678 | assert trim_comma_split('') == [] 1679 | assert flat_map_trim_comma_split(None) == [] 1680 | assert trim_comma_split(',dev,dev-native,dev,') == \ 1681 | ['dev', 'dev-native', 'dev'] 1682 | assert sorted(flat_map_trim_comma_split(['dev', '8.19,8.20,', 1683 | 'dev,dev-native'])) == \ 1684 | sorted(['8.19', '8.20', 'dev', 'dev', 'dev-native']) 1685 | assert uniqify_tags(trim_comma_split('dev')) == ['dev'] 1686 | assert uniqify_tags(trim_comma_split('dev,dev,')) == ['dev'] 1687 | 1688 | 1689 | if __name__ == "__main__": 1690 | main(sys.argv[1:]) 1691 | -------------------------------------------------------------------------------- /external/docker-keeper/requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | pyyaml 3 | requests 4 | -------------------------------------------------------------------------------- /external/docker-keeper/requirements_lint.txt: -------------------------------------------------------------------------------- 1 | flake8 2 | -------------------------------------------------------------------------------- /images.yml: -------------------------------------------------------------------------------- 1 | --- 2 | base_url: 'https://gitlab.com/coq-community/docker-coq' 3 | active: true 4 | docker_repo: 'coqorg/coq' 5 | vars: 6 | # TODO: Update when appropriate 7 | coq_latest: '8.20.1' 8 | args: 9 | BUILD_DATE: '{defaults[build_date]}' 10 | propagate: 11 | mathcomp: 12 | api_token_env_var: 'DMC_TOKEN' 13 | gitlab_domain: 'gitlab.inria.fr' 14 | gitlab_project: '44938' 15 | strategy: 16 | - when: 'rebuild-all' 17 | mode: 'rebuild-all' 18 | - when: 'forall' 19 | expr: '{matrix[coq][//pl/.][%.*]}' 20 | subset: '8.4,8.5' 21 | mode: 'nil' 22 | - # when OPTIONAL for last rule 23 | mode: 'rebuild-keyword' 24 | item: '{keywords[/#/,][#,]}' 25 | mathcomp-dev: 26 | api_token_env_var: 'MC_TOKEN' 27 | gitlab_domain: 'gitlab.inria.fr' 28 | gitlab_project: '44939' 29 | strategy: 30 | - when: 'rebuild-all' 31 | mode: 'minimal' 32 | - when: 'forall' 33 | expr: '{matrix[coq]}' 34 | subset: 'dev' 35 | mode: 'nightly' 36 | - when: 'exists' 37 | expr: '{matrix[coq][//pl/.][%.*]}' 38 | subset: '8.19,8.20,dev' 39 | mode: 'minimal' 40 | - # when OPTIONAL for last rule 41 | mode: 'nil' 42 | images: 43 | ## coqorg/coq:latest 44 | ## coqorg/coq:8.20 45 | - matrix: 46 | default: ['4.13.1-flambda'] 47 | # only *-flambda switches 48 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.12.1-flambda', '4.09.1-flambda'] 49 | coq: ['8.20.1'] 50 | build: &build_coq_stable 51 | context: './coq' 52 | dockerfile: './stable/Dockerfile' 53 | keywords: 54 | - '{matrix[coq][%.*]}' 55 | args: 56 | BASE_TAG: 'coq_{matrix[base]}' 57 | COQ_VERSION: '{matrix[coq]}' 58 | VCS_REF: 'V{matrix[coq]}' 59 | COQ_CORE_PINNED: 'true' 60 | COQ_EXTRA_OPAM: 'coq-bignums' 61 | # +- coq-native 62 | COQ_INSTALL_SERAPI: '{matrix[base][//4.09.1-flambda/]}' 63 | # (or any nonempty string) as coq-serapi 8.20.0+_ supports ocaml 4.12.0+ 64 | tags: 65 | # full tag 66 | - tag: '{matrix[coq]}-ocaml-{matrix[base]}' 67 | # abbreviated tag (*-ocaml-4.13-flambda) 68 | - tag: '{matrix[coq][%.*]}-ocaml-{matrix[base][%.*-*]}-flambda' 69 | # default tag (8.20.1) 70 | - tag: '{matrix[coq]}' 71 | if: '{matrix[base]} == {matrix[default]}' 72 | # abbreviated tag (8.20) 73 | - tag: '{matrix[coq][%.*]}' 74 | if: '{matrix[base]} == {matrix[default]}' 75 | # latest-abbreviated tag (*-ocaml-4.13-flambda) 76 | - tag: 'latest-ocaml-{matrix[base][%.*-*]}-flambda' 77 | if: 78 | - '{matrix[coq]} == {vars[coq_latest]}' 79 | # latest tag 80 | - tag: 'latest' 81 | if: 82 | - '{matrix[coq]} == {vars[coq_latest]}' 83 | - '{matrix[base]} == {matrix[default]}' 84 | ## coqorg/coq:latest-native 85 | ## coqorg/coq:latest-native-flambda 86 | ## coqorg/coq:8.20-native 87 | - matrix: 88 | default: ['4.13.1'] 89 | base: ['4.13.1', '4.13.1-flambda'] 90 | coq: ['8.20.1'] 91 | build: &build_coq_stable_native 92 | <<: *build_coq_stable 93 | args: 94 | BASE_TAG: 'coq_{matrix[base]}' 95 | COQ_VERSION: '{matrix[coq]}' 96 | VCS_REF: 'V{matrix[coq]}' 97 | COQ_CORE_PINNED: 'true' 98 | COQ_EXTRA_OPAM: 'coq-native coq-bignums' 99 | COQ_INSTALL_SERAPI: 'true' 100 | # (or any nonempty string) as coq-serapi 8.20.0+_ supports ocaml 4.12.0+ 101 | tags: 102 | # full tag 103 | - tag: '{matrix[coq]}-native-ocaml-{matrix[base]}' 104 | # abbreviated tag (*-ocaml-4.13) 105 | - tag: '{matrix[coq][%.*]}-native-ocaml-{matrix[base][%.*]}' 106 | if: '{matrix[base]} == {matrix[default]}' 107 | # abbreviated tag (*-ocaml-4.07-flambda) 108 | - tag: '{matrix[coq][%.*]}-native-ocaml-{matrix[base][%.*-*]}-flambda' 109 | if: '{matrix[base]} != {matrix[default]}' # -flambda 110 | # default tag (8.20.1-native) 111 | - tag: '{matrix[coq]}-native' 112 | if: '{matrix[base]} == {matrix[default]}' 113 | # default tag (8.20.1-native-flambda) 114 | - tag: '{matrix[coq]}-native-flambda' 115 | if: '{matrix[base]} != {matrix[default]}' # -flambda 116 | # abbreviated default tag (8.20-native) 117 | - tag: '{matrix[coq][%.*]}-native' 118 | if: '{matrix[base]} == {matrix[default]}' 119 | # abbreviated default tag (8.20-native-flambda) 120 | - tag: '{matrix[coq][%.*]}-native-flambda' 121 | if: '{matrix[base]} != {matrix[default]}' # -flambda 122 | # latest-abbreviated tag (latest-native-ocaml-4.13) 123 | - tag: 'latest-native-ocaml-{matrix[base][%.*]}' 124 | if: 125 | - '{matrix[coq]} == {vars[coq_latest]}' 126 | - '{matrix[base]} == {matrix[default]}' 127 | # latest-abbreviated tag (latest-native-ocaml-4.13-flambda) 128 | - tag: 'latest-native-ocaml-{matrix[base][%.*]}-flambda' 129 | if: 130 | - '{matrix[coq]} == {vars[coq_latest]}' 131 | - '{matrix[base]} != {matrix[default]}' # -flambda 132 | # latest tag 133 | - tag: 'latest-native' 134 | if: 135 | - '{matrix[coq]} == {vars[coq_latest]}' 136 | - '{matrix[base]} == {matrix[default]}' 137 | - tag: 'latest-native-flambda' 138 | if: 139 | - '{matrix[coq]} == {vars[coq_latest]}' 140 | - '{matrix[base]} != {matrix[default]}' # -flambda 141 | ## coqorg/coq:8.19 142 | - matrix: 143 | default: ['4.13.1-flambda'] 144 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.12.1-flambda', '4.09.1-flambda'] 145 | coq: ['8.19.2'] 146 | build: 147 | <<: *build_coq_stable 148 | args: 149 | BASE_TAG: 'coq_{matrix[base]}' 150 | COQ_VERSION: '{matrix[coq]}' 151 | VCS_REF: 'V{matrix[coq]}' 152 | COQ_CORE_PINNED: 'true' 153 | COQ_EXTRA_OPAM: 'coq-bignums' 154 | # +- coq-native 155 | COQ_INSTALL_SERAPI: 'true' 156 | ## coqorg/coq:8.19-native 157 | - matrix: 158 | default: ['4.13.1'] 159 | base: ['4.13.1', '4.13.1-flambda'] 160 | coq: ['8.19.2'] 161 | build: 162 | <<: *build_coq_stable_native 163 | ## coqorg/coq:8.18 164 | - matrix: 165 | default: ['4.13.1-flambda'] 166 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.12.1-flambda', '4.09.1-flambda'] 167 | coq: ['8.18.0'] 168 | build: 169 | <<: *build_coq_stable 170 | args: 171 | BASE_TAG: 'coq_{matrix[base]}' 172 | COQ_VERSION: '{matrix[coq]}' 173 | VCS_REF: 'V{matrix[coq]}' 174 | COQ_EXTRA_OPAM: 'coq-bignums' 175 | COQ_CORE_PINNED: 'true' 176 | # +- coq-native 177 | COQ_INSTALL_SERAPI: 'true' 178 | ## coqorg/coq:8.18-native 179 | - matrix: 180 | default: ['4.13.1'] 181 | base: ['4.13.1', '4.13.1-flambda'] 182 | coq: ['8.18.0'] 183 | build: 184 | <<: *build_coq_stable_native 185 | ## coqorg/coq:8.17 186 | - matrix: 187 | default: ['4.13.1-flambda'] 188 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.12.1-flambda', '4.09.1-flambda'] 189 | coq: ['8.17.1'] 190 | build: 191 | <<: *build_coq_stable 192 | args: 193 | BASE_TAG: 'coq_{matrix[base]}' 194 | COQ_VERSION: '{matrix[coq]}' 195 | VCS_REF: 'V{matrix[coq]}' 196 | COQ_CORE_PINNED: 'true' 197 | COQ_EXTRA_OPAM: 'coq-bignums' 198 | # +- coq-native 199 | COQ_INSTALL_SERAPI: 'true' 200 | ## coqorg/coq:8.17-native 201 | - matrix: 202 | default: ['4.13.1'] 203 | base: ['4.13.1', '4.13.1-flambda'] 204 | coq: ['8.17.1'] 205 | build: 206 | <<: *build_coq_stable_native 207 | ## coqorg/coq:8.16.1 208 | - matrix: 209 | default: ['4.13.1-flambda'] 210 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.12.1-flambda', '4.09.1-flambda'] 211 | coq: ['8.16.1'] 212 | build: 213 | <<: *build_coq_stable 214 | args: 215 | BASE_TAG: 'coq_{matrix[base]}' 216 | COQ_VERSION: '{matrix[coq]}' 217 | VCS_REF: 'V{matrix[coq]}' 218 | COQ_CORE_PINNED: '' 219 | COQ_EXTRA_OPAM: 'coq-bignums' 220 | # +- coq-native 221 | COQ_INSTALL_SERAPI: 'true' 222 | ## coqorg/coq:8.16-native 223 | - matrix: 224 | default: ['4.13.1'] 225 | base: ['4.13.1', '4.13.1-flambda'] 226 | coq: ['8.16.1'] 227 | build: 228 | <<: *build_coq_stable_native 229 | args: 230 | BASE_TAG: 'coq_{matrix[base]}' 231 | COQ_VERSION: '{matrix[coq]}' 232 | VCS_REF: 'V{matrix[coq]}' 233 | COQ_CORE_PINNED: '' 234 | COQ_EXTRA_OPAM: 'coq-native coq-bignums' 235 | COQ_INSTALL_SERAPI: 'true' 236 | # (or any nonempty string) as coq-serapi 8.20.0+_ supports ocaml 4.12.0+ 237 | ## coqorg/coq:8.15 238 | - matrix: 239 | default: ['4.07.1-flambda'] 240 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.07.1-flambda', '4.05.0'] 241 | coq: ['8.15.2'] 242 | build: &build_coq_oldstable 243 | keywords: 244 | - '{matrix[coq][%.*]}' 245 | context: './coq' 246 | dockerfile: './stable/Dockerfile' 247 | args: 248 | BASE_TAG: 'coq_{matrix[base]}' 249 | COQ_VERSION: '{matrix[coq]}' 250 | VCS_REF: 'V{matrix[coq]}' 251 | COQ_CORE_PINNED: '' 252 | COQ_EXTRA_OPAM: 'coq-bignums' 253 | COQ_INSTALL_SERAPI: '{matrix[base][//4.05.0/]}' 254 | # as coq-serapi does not support ocaml 4.05.0 255 | tags: 256 | # full tag 257 | - tag: '{matrix[coq]}-ocaml-{matrix[base]}' 258 | # abbreviated tag (*-ocaml-4.05) 259 | - tag: '{matrix[coq][%.*]}-ocaml-{matrix[base][%.*]}' 260 | if: '{matrix[base]} == 4.05.0' 261 | # abbreviated tag (*-ocaml-4.07-flambda) 262 | - tag: '{matrix[coq][%.*]}-ocaml-{matrix[base][%.*-*]}-flambda' 263 | if: '{matrix[base]} != 4.05.0' 264 | # default tag (8.15.2) 265 | - tag: '{matrix[coq]}' 266 | if: '{matrix[base]} == {matrix[default]}' 267 | # abbreviated tag (8.15) 268 | - tag: '{matrix[coq][%.*]}' 269 | if: '{matrix[base]} == {matrix[default]}' 270 | ## coqorg/coq:8.15-native 271 | ## coqorg/coq:8.15-native-flambda 272 | - matrix: 273 | default: ['4.07.1'] 274 | base: ['4.07.1', '4.07.1-flambda'] 275 | coq: ['8.15.2'] 276 | build: &build_coq_oldstable_native 277 | <<: *build_coq_oldstable 278 | args: 279 | BASE_TAG: 'coq_{matrix[base]}' 280 | COQ_VERSION: '{matrix[coq]}' 281 | VCS_REF: 'V{matrix[coq]}' 282 | COQ_CORE_PINNED: '' 283 | COQ_EXTRA_OPAM: 'coq-native coq-bignums' 284 | COQ_INSTALL_SERAPI: 'true' 285 | # (or any nonempty string) as coq-serapi supports ocaml 4.07.1 286 | tags: 287 | # full tag 288 | - tag: '{matrix[coq]}-native-ocaml-{matrix[base]}' 289 | # abbreviated tag (*-ocaml-4.07) 290 | - tag: '{matrix[coq][%.*]}-native-ocaml-{matrix[base][%.*]}' 291 | if: '{matrix[base]} == {matrix[default]}' 292 | # abbreviated tag (*-ocaml-4.07-flambda) 293 | - tag: '{matrix[coq][%.*]}-native-ocaml-{matrix[base][%.*-*]}-flambda' 294 | if: '{matrix[base]} != {matrix[default]}' 295 | # default tag (8.13.2-native) 296 | - tag: '{matrix[coq]}-native' 297 | if: '{matrix[base]} == {matrix[default]}' 298 | # default tag (8.13.2-native-flambda) 299 | - tag: '{matrix[coq]}-native-flambda' 300 | if: '{matrix[base]} != {matrix[default]}' 301 | # abbreviated tag (8.13-native) 302 | - tag: '{matrix[coq][%.*]}-native' 303 | if: '{matrix[base]} == {matrix[default]}' 304 | # abbreviated tag (8.13-native-flambda) 305 | - tag: '{matrix[coq][%.*]}-native-flambda' 306 | if: '{matrix[base]} != {matrix[default]}' 307 | ## coqorg/coq:8.14 308 | - matrix: 309 | default: ['4.07.1-flambda'] 310 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.07.1-flambda', '4.05.0'] 311 | coq: ['8.14.1'] 312 | build: 313 | <<: *build_coq_oldstable 314 | ## coqorg/coq:8.14-native 315 | - matrix: 316 | default: ['4.07.1'] 317 | base: ['4.07.1', '4.07.1-flambda'] 318 | coq: ['8.14.1'] 319 | build: 320 | <<: *build_coq_oldstable_native 321 | ## coqorg/coq:8.13 322 | - matrix: 323 | default: ['4.07.1-flambda'] 324 | base: ['4.14.2-flambda', '4.13.1-flambda', '4.07.1-flambda', '4.05.0'] 325 | coq: ['8.13.2'] 326 | build: 327 | <<: *build_coq_oldstable 328 | ## coqorg/coq:8.13-native 329 | - matrix: 330 | default: ['4.07.1'] 331 | base: ['4.07.1', '4.07.1-flambda'] 332 | coq: ['8.13.2'] 333 | build: 334 | <<: *build_coq_oldstable_native 335 | ## coqorg/coq:8.x < 8.13 336 | - matrix: 337 | default: ['4.07.1-flambda'] 338 | base: ['4.11.2-flambda', '4.10.2-flambda', '4.07.1-flambda', '4.05.0'] 339 | coq: ['8.12.2', '8.11.2'] 340 | build: &build_coq_old_old_stable 341 | keywords: 342 | - '{matrix[coq][%.*]}' 343 | context: './coq' 344 | dockerfile: './stable/Dockerfile' 345 | args: 346 | BASE_TAG: 'coq_{matrix[base]}' 347 | COQ_VERSION: '{matrix[coq]}' 348 | VCS_REF: 'V{matrix[coq]}' 349 | COQ_CORE_PINNED: '' 350 | COQ_EXTRA_OPAM: 'coq-native coq-bignums' 351 | COQ_INSTALL_SERAPI: '{matrix[base][//4.05.0/]}' 352 | # as coq-serapi does not support ocaml 4.05.0 353 | tags: 354 | # full tag 355 | - tag: '{matrix[coq]}-ocaml-{matrix[base]}' 356 | # abbreviated tag (*-ocaml-4.05) 357 | - tag: '{matrix[coq][%.*]}-ocaml-{matrix[base][%.*]}' 358 | if: '{matrix[base]} == 4.05.0' 359 | # abbreviated tag (*-ocaml-4.07-flambda) 360 | - tag: '{matrix[coq][%.*]}-ocaml-{matrix[base][%.*-*]}-flambda' 361 | if: '{matrix[base]} != 4.05.0' 362 | # default tag (8.12.2) 363 | - tag: '{matrix[coq]}' 364 | if: '{matrix[base]} == {matrix[default]}' 365 | # abbreviated tag (8.12) 366 | - tag: '{matrix[coq][%.*]}' 367 | if: '{matrix[base]} == {matrix[default]}' 368 | ## coqorg/coq:8.x (bis, other ocaml versions) 369 | - matrix: 370 | default: ['4.07.1-flambda'] 371 | base: ['4.09.1-flambda', '4.08.1-flambda', '4.07.1-flambda', '4.05.0'] 372 | coq: ['8.10.2', '8.9.1', '8.8.2'] 373 | build: 374 | <<: *build_coq_old_old_stable 375 | args: 376 | BASE_TAG: 'coq_{matrix[base]}' 377 | COQ_VERSION: '{matrix[coq]}' 378 | VCS_REF: 'V{matrix[coq]}' 379 | COQ_CORE_PINNED: '' 380 | COQ_EXTRA_OPAM: 'coq-native coq-bignums' 381 | COQ_INSTALL_SERAPI: '{matrix[base][//4.05.0/]}' 382 | # as coq-serapi does not support ocaml 4.05.0 383 | ## coqorg/coq:8.7 384 | - matrix: 385 | default: ['4.07.1-flambda'] 386 | base: ['4.09.1-flambda', '4.08.1-flambda', '4.07.1-flambda', '4.05.0'] 387 | coq: ['8.7.2'] 388 | build: 389 | <<: *build_coq_old_old_stable 390 | args: 391 | BASE_TAG: 'coq_{matrix[base]}' 392 | COQ_VERSION: '{matrix[coq]}' 393 | VCS_REF: 'V{matrix[coq]}' 394 | COQ_CORE_PINNED: '' 395 | COQ_EXTRA_OPAM: 'coq-native coq-bignums' 396 | COQ_INSTALL_SERAPI: '' 397 | # as coq-serapi is not compatible with coq 8.7 398 | # templates: 399 | --------------------------------------------------------------------------------