├── .envrc ├── .github ├── issue_template.md └── pull_request_template.md ├── .gitignore ├── .semaphore ├── install-nix.sha256 ├── known_hosts_github ├── prologue.sh └── semaphore.yml ├── .stylish-haskell.yaml ├── app └── Main.hs ├── cabal.project.freeze ├── changelog.md ├── default.nix ├── doc ├── background.md ├── comments.png ├── example-dev-config.json ├── installing.md ├── merge-train-delay.dia ├── merge-train-delay.svg ├── merge-train-failure.dia ├── merge-train-failure.svg ├── merge-train-rebase-failure-1.dia ├── merge-train-rebase-failure-1.svg ├── merge-train-rebase-failure-2.dia ├── merge-train-rebase-failure-2.svg ├── merge-train-restart.dia ├── merge-train-restart.svg ├── merge-train.dia ├── merge-train.svg ├── name.md ├── no-merge-train.dia ├── no-merge-train.svg ├── performance.md ├── tls.md └── web-interface.png ├── hie.yaml ├── hoff.cabal ├── hoff.nix ├── license ├── locale.nix ├── nix ├── haskell-overlay.nix ├── nixpkgs-pinned.nix ├── overlay.nix ├── sources.json └── sources.nix ├── package ├── build-and-ship.sh ├── build-binary.sh ├── build-package.sh ├── check-version.sh ├── deb-conffiles ├── deb-control ├── deb-postinst ├── example-config.json ├── github-known-hosts ├── hoff.service ├── on-failure@.service.example └── os-release ├── readme.md ├── release.nix ├── src ├── ClockTickLoop.hs ├── Configuration.hs ├── EventLoop.hs ├── Format.hs ├── Git.hs ├── Github.hs ├── GithubApi.hs ├── Logic.hs ├── Metrics │ ├── Metrics.hs │ └── Server.hs ├── MonadLoggerEffect.hs ├── Parser.hs ├── Project.hs ├── Server.hs ├── Time.hs ├── Types.hs └── WebInterface.hs ├── static ├── script.js └── style.css ├── tests ├── EndToEnd.hs ├── EventLoopSpec.hs ├── ParserSpec.hs ├── ProjectSpec.hs ├── ServerSpec.hs ├── Spec.hs └── data │ ├── issue-comment-created-payload.json │ ├── issue-comment-edited-payload.json │ ├── pull-request-payload.json │ ├── pull-request-review-edited-payload.json │ ├── pull-request-review-submitted-payload.json │ ├── push-payload.json │ └── status-payload.json ├── todo.md ├── tools ├── build-status ├── comment ├── send-webhook └── update-diagrams └── update-nixpkgs.sh /.envrc: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Add possibility to run a custom envrc that completely overrides the behavior of this envrc. 4 | CUSTOM_ENVRC=.customenvrc 5 | if [ -f "$CUSTOM_ENVRC" ]; then 6 | echo "Using .customenvrc file" 7 | source_env $CUSTOM_ENVRC 8 | else 9 | # Decrease logging output 10 | # shellcheck disable=SC2034 # unused variable is still read by direnv. 11 | DIRENV_LOG_FORMAT= 12 | # Install nix-direnv, which has an improved implementation of `use nix` that 13 | # caches the Nix environment. Note that this URL is cached locally, so it 14 | # doesn't fetch the script every time. 15 | if ! has nix_direnv_version || ! nix_direnv_version 2.2.1; then 16 | source_url "https://raw.githubusercontent.com/nix-community/nix-direnv/2.2.1/direnvrc" "sha256-zelF0vLbEl5uaqrfIzbgNzJWGmLzCmYAkInj/LNxvKs=" 17 | fi 18 | 19 | nix_direnv_watch_file nix/sources.json nix/haskell-dependencies.nix 20 | dotenv 21 | 22 | use nix default.nix --argstr environment shell 23 | fi 24 | -------------------------------------------------------------------------------- /.github/issue_template.md: -------------------------------------------------------------------------------- 1 | 5 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Build output 2 | /dist-newstyle 3 | /package/*.deb 4 | 5 | # Editor files 6 | *.swp 7 | *.swo 8 | *.autosave 9 | *~ 10 | /.idea 11 | /.vscode 12 | /TAGS 13 | /tags 14 | 15 | # Vagrant files 16 | /package/.vagrant 17 | /package/*-console.log 18 | 19 | # Configuration file 20 | /config.json 21 | 22 | # Hoff state 23 | /run 24 | 25 | # Temporary file of send-webhook 26 | webhook-data.json 27 | 28 | # Test output 29 | .hspec-failures 30 | 31 | # Nix build result 32 | result 33 | 34 | # Direnv 35 | .direnv/ 36 | -------------------------------------------------------------------------------- /.semaphore/install-nix.sha256: -------------------------------------------------------------------------------- 1 | 0f72926b1c55d787fd1e1d3a23de65da5cf7638c3fb7772500c55112bc99b799 install-nix 2 | -------------------------------------------------------------------------------- /.semaphore/known_hosts_github: -------------------------------------------------------------------------------- 1 | |1|8Fk1EtvndjXcVeIsKXpSBlAYIo4=|hS8XwXQkITyL1G+JkAJ2LUYPJPM= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== 2 | |1|BBbkpgyuYnsnH51pg8lqPDf50NU=|tL+hRQvjigtBeYsNllQv94RQ0nc= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== 3 | -------------------------------------------------------------------------------- /.semaphore/prologue.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Shared commands for the set-up phase of jobs 4 | 5 | # This file is NOT usable with Semaphore's "commands_file" property, as this 6 | # puts all the comments in this file in the build output and requires us to 7 | # write all build steps on one line each. Instead, this file must be executed 8 | # (not sourced) in the build pipelines where we use it. 9 | # If the build requires the use of Nix, this command must be rerun afterwards: 10 | # `source $HOME/.nix-profile/etc/profile.d/nix.sh` 11 | # The reason for executing and not sourcing this file (and having to re-source 12 | # nix.sh) is that sourcing interferes with Semaphore's job control flow: if a 13 | # command would fail in this pipeline, it immediately causes Semaphore to abort 14 | # the job, without running any cleanup commands or the epilogue (which we use 15 | # to report failures to Healthchecks.io). 16 | 17 | # We don't use `set -x` here to show the run commands, because this would show 18 | # authentication tokens in the build output. Instead, `set -v` prints the 19 | # commands "as they are read". 20 | # We do want to exit immediately after encountering an error. 21 | # We also cannot use "set -u", as Semaphore's tooling uses unset variables in 22 | # some places (which we can't change). 23 | set -evo pipefail 24 | 25 | # Install Nix. We install in single-user mode (--no-daemon) because the Nix 26 | # process can access the running SSH agent to fetch private Git repositories. 27 | curl -o install-nix https://releases.nixos.org/nix/nix-2.24.10/install 28 | sha256sum --check .semaphore/install-nix.sha256 29 | 30 | sudo rm -rf \ 31 | /home/semaphore/.rbenv \ 32 | /home/semaphore/.kerl \ 33 | /home/semaphore/.nvm \ 34 | /home/semaphore/.phpbrew \ 35 | /home/semaphore/.kiex \ 36 | /opt/google \ 37 | /opt/firefox-esr \ 38 | /opt/firefox-esr-prev \ 39 | /usr/local/golang 40 | 41 | # Hotfix for Semaphore issue preventing installing Nix 42 | unset LD_LIBRARY_PATH 43 | 44 | mount_nix_store() { 45 | # Before we can successfully restore to `/nix` it needs to be created and owned 46 | # by the CI user. Without this, the `cache restore` command fails because it 47 | # doesn't have permission to create `/nix`. (We cannot run the cache restore 48 | # command as `root` as it takes settings from environment variables.) 49 | # We use the local scratch SSD mounted at `/mnt` to prevent running out of disk 50 | # space, as the Nix store can get quite large. 51 | sudo mkdir -p /mnt/nix /nix 52 | sudo mount --bind /mnt/nix /nix 53 | sudo chown -R semaphore: /nix 54 | } 55 | mount_nix_store 56 | 57 | # Attempt to restore the Semaphore cache entry for `/nix`. 58 | # 59 | # We have this in addition to Cachix because we want to avoid hitting Cachix 60 | # for individual store entries, as restoring `/nix` from the Semaphore cache in 61 | # one go is a lot faster than downloading individual cache entries from 62 | # Cachix's S3 + Cloudflare. 63 | # 64 | # We refresh the Nix store cache entry daily. It is populated after the first 65 | # successful build of the day by our main pipeline. 66 | # 67 | # Restoring the cache can fail when the cache entry is only partially matched, 68 | # because then it might still be in the process of being uploaded by Semaphore, 69 | # which can be caused by a concurrent build. Since using the Semaphore cache is 70 | # only an optimization, but not strictly necessary, we make sure that the build 71 | # doesn't fail in this case, by making sure the exit code is always 0. When 72 | # restoring the cache fails, we delete /nix to ensure that we are not left with 73 | # a partially restored Nix store. 74 | cache restore "nix-store-$(date -u -Idate),nix-store-$(date -u -Idate --date=yesterday),nix-store" || { 75 | sudo umount /nix 76 | sudo rm -fr /mnt/nix 77 | mount_nix_store 78 | } 79 | 80 | # Don't break during the `nix-env` -> `nix profile` transition. 81 | rm -f /nix/var/nix/profiles/per-user/semaphore/* 82 | 83 | # Install nix 84 | sh ./install-nix --no-daemon 85 | 86 | # Enable `nix-command` feature, which `nix build` needs to build 87 | sudo mkdir /etc/nix 88 | echo "experimental-features = nix-command flakes" | sudo tee -a /etc/nix/nix.conf 89 | echo "max-jobs = auto" | sudo tee -a /etc/nix/nix.conf 90 | 91 | # Activate nix profile 92 | # Disable shellcheck, because the file does not exist before this script is run. 93 | # shellcheck disable=SC1091 94 | source "$HOME/.nix-profile/etc/profile.d/nix.sh" 95 | 96 | # Enable building multiple derivations at the same time. See the max-jobs option 97 | # in https://nixos.org/manual/nix/unstable/command-ref/opt-common.html 98 | mkdir -p ~/.config/nix 99 | 100 | # Enable cachix. Cachix is also in default.nix, but it is installed separately 101 | # here because it is needed for building default.nix. 102 | nix profile install nixpkgs/nixos-unstable#cachix 103 | # We don't need to `cachix authtoken` because we export `CACHIX_AUTH_TOKEN` via 104 | # the `cachix-channable` secret. 105 | cachix use channable-public 106 | 107 | # Add known hosts for Github, so when Nix is going to fetch from Github, 108 | # the host is already there, and it does not print 109 | # "Warning: Permanently added 'github.com' (RSA) to the list of known hosts." 110 | cat .semaphore/known_hosts_github >> "$HOME/.ssh/known_hosts" 111 | 112 | # Build dev environment and push to cachix 113 | cachix watch-exec channable-public -- nix print-dev-env --file default.nix > "$HOME/devenv" 114 | 115 | # Enable building multiple derivations at the same time. See the max-jobs option in 116 | # https://nixos.org/manual/nix/unstable/command-ref/opt-common.html 117 | echo "max-jobs = auto" >> ~/.config/nix/nix.conf 118 | # Enable using multiple cores for a single derivation at the same time. See the cores option in 119 | # https://nixos.org/manual/nix/unstable/command-ref/opt-common.html 120 | echo "cores = 0" >> ~/.config/nix/nix.conf 121 | -------------------------------------------------------------------------------- /.semaphore/semaphore.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: "v1.0" 3 | name: "Hoff" 4 | 5 | agent: 6 | machine: 7 | type: "f1-standard-2" 8 | os_image: "ubuntu2204" 9 | 10 | # If we push a new build to some branch that isn't master, and another build is 11 | # already running, we cancel it. 12 | auto_cancel: 13 | running: 14 | when: "branch != 'master'" 15 | 16 | blocks: 17 | - name: "Build" 18 | task: 19 | secrets: 20 | - name: "opsbot-github-key" 21 | # Keys needed to access our cachix cache. 22 | - name: "cachix-channable-public" 23 | 24 | jobs: 25 | - name: "Build, test, package, ship" 26 | commands: 27 | # Change permissions of the shipping key to avoid SSH complaining 28 | # about the default file permissions that Semaphore uses. 29 | - "chmod 0600 ~/.ssh/id_ed25519" 30 | - "ssh-add ~/.ssh/id_ed25519" 31 | 32 | # With `--use-cache` we get a full clone that is cached by semaphore so that fetching is faster. 33 | - checkout --use-cache 34 | - .semaphore/prologue.sh 35 | - source $HOME/.nix-profile/etc/profile.d/nix.sh 36 | 37 | # Make the dev environment available for all subsequent commands, to 38 | # prevent having to run `nix develop` every time. 39 | - source "$HOME/devenv" 40 | 41 | # Binaries in the profile built above may need locales, that they 42 | # can't find unless we point LOCALE_ARCHIVE at the archive that 43 | # contains them. 44 | - "export LOCALE_ARCHIVE=$(nix-build --no-out-link locale.nix)/lib/locale/locale-archive" 45 | 46 | # This makes sure the version bounds from the `cabal.project.freeze` 47 | # file match the versions from nixpkgs 48 | - "cabal build --dry-run all" 49 | 50 | # Check shell scripts for issues. 51 | - "shellcheck package/*.sh package/deb-postinst" 52 | 53 | # Print working directory for debugging purposes 54 | - "pwd" 55 | 56 | # Check version consistency between hoff.cabal and hoff.nix 57 | - "./package/check-version.sh" 58 | 59 | # Run build and tests in Nix 60 | - "nix-build --no-out-link release.nix >nix-store-location" 61 | 62 | # Display Nix store location for debugging purposes 63 | - "cat nix-store-location" 64 | 65 | # push the result to Cachix. 66 | - "cat nix-store-location | cachix push channable-public" 67 | 68 | # Remove store location (unneeded now) 69 | - "rm nix-store-location" 70 | 71 | # Store a copy of the nix store. This will be refreshed daily, which 72 | # is more than sufficient for this repo. 73 | - "cache store nix-store-$(date -u -Idate) /nix" 74 | -------------------------------------------------------------------------------- /.stylish-haskell.yaml: -------------------------------------------------------------------------------- 1 | # stylish-haskell configuration file 2 | # ================================== 3 | 4 | # The stylish-haskell tool is mainly configured by specifying steps. These steps 5 | # are a list, so they have an order, and one specific step may appear more than 6 | # once (if needed). Each file is processed by these steps in the given order. 7 | steps: 8 | # Convert some ASCII sequences to their Unicode equivalents. This is disabled 9 | # by default. 10 | # - unicode_syntax: 11 | # # In order to make this work, we also need to insert the UnicodeSyntax 12 | # # language pragma. If this flag is set to true, we insert it when it's 13 | # # not already present. You may want to disable it if you configure 14 | # # language extensions using some other method than pragmas. Default: 15 | # # true. 16 | # add_language_pragma: true 17 | 18 | # Align the right hand side of some elements. This is quite conservative 19 | # and only applies to statements where each element occupies a single 20 | # line. 21 | - simple_align: 22 | cases: false 23 | top_level_patterns: true 24 | records: true 25 | 26 | # Import cleanup 27 | - imports: 28 | # There are different ways we can align names and lists. 29 | # 30 | # - global: Align the import names and import list throughout the entire 31 | # file. 32 | # 33 | # - file: Like global, but don't add padding when there are no qualified 34 | # imports in the file. 35 | # 36 | # - group: Only align the imports per group (a group is formed by adjacent 37 | # import lines). 38 | # 39 | # - none: Do not perform any alignment. 40 | # 41 | # Default: global. 42 | align: none 43 | 44 | # The following options affect only import list alignment. 45 | # 46 | # List align has following options: 47 | # 48 | # - after_alias: Import list is aligned with end of import including 49 | # 'as' and 'hiding' keywords. 50 | # 51 | # > import qualified Data.List as List (concat, foldl, foldr, head, 52 | # > init, last, length) 53 | # 54 | # - with_alias: Import list is aligned with start of alias or hiding. 55 | # 56 | # > import qualified Data.List as List (concat, foldl, foldr, head, 57 | # > init, last, length) 58 | # 59 | # - new_line: Import list starts always on new line. 60 | # 61 | # > import qualified Data.List as List 62 | # > (concat, foldl, foldr, head, init, last, length) 63 | # 64 | # Default: after_alias 65 | list_align: after_alias 66 | 67 | # Right-pad the module names to align imports in a group: 68 | # 69 | # - true: a little more readable 70 | # 71 | # > import qualified Data.List as List (concat, foldl, foldr, 72 | # > init, last, length) 73 | # > import qualified Data.List.Extra as List (concat, foldl, foldr, 74 | # > init, last, length) 75 | # 76 | # - false: diff-safe 77 | # 78 | # > import qualified Data.List as List (concat, foldl, foldr, init, 79 | # > last, length) 80 | # > import qualified Data.List.Extra as List (concat, foldl, foldr, 81 | # > init, last, length) 82 | # 83 | # Default: true 84 | pad_module_names: false 85 | 86 | # Long list align style takes effect when import is too long. This is 87 | # determined by 'columns' setting. 88 | # 89 | # - inline: This option will put as much specs on same line as possible. 90 | # 91 | # - new_line: Import list will start on new line. 92 | # 93 | # - new_line_multiline: Import list will start on new line when it's 94 | # short enough to fit to single line. Otherwise it'll be multiline. 95 | # 96 | # - multiline: One line per import list entry. 97 | # Type with constructor list acts like single import. 98 | # 99 | # > import qualified Data.Map as M 100 | # > ( empty 101 | # > , singleton 102 | # > , ... 103 | # > , delete 104 | # > ) 105 | # 106 | # Default: inline 107 | long_list_align: inline 108 | 109 | # Align empty list (importing instances) 110 | # 111 | # Empty list align has following options 112 | # 113 | # - inherit: inherit list_align setting 114 | # 115 | # - right_after: () is right after the module name: 116 | # 117 | # > import Vector.Instances () 118 | # 119 | # Default: inherit 120 | empty_list_align: inherit 121 | 122 | # List padding determines indentation of import list on lines after import. 123 | # This option affects 'long_list_align'. 124 | # 125 | # - : constant value 126 | # 127 | # - module_name: align under start of module name. 128 | # Useful for 'file' and 'group' align settings. 129 | list_padding: 4 130 | 131 | # Separate lists option affects formatting of import list for type 132 | # or class. The only difference is single space between type and list 133 | # of constructors, selectors and class functions. 134 | # 135 | # - true: There is single space between Foldable type and list of it's 136 | # functions. 137 | # 138 | # > import Data.Foldable (Foldable (fold, foldl, foldMap)) 139 | # 140 | # - false: There is no space between Foldable type and list of it's 141 | # functions. 142 | # 143 | # > import Data.Foldable (Foldable(fold, foldl, foldMap)) 144 | # 145 | # Default: true 146 | separate_lists: true 147 | 148 | # Space surround option affects formatting of import lists on a single 149 | # line. The only difference is single space after the initial 150 | # parenthesis and a single space before the terminal parenthesis. 151 | # 152 | # - true: There is single space associated with the enclosing 153 | # parenthesis. 154 | # 155 | # > import Data.Foo ( foo ) 156 | # 157 | # - false: There is no space associated with the enclosing parenthesis 158 | # 159 | # > import Data.Foo (foo) 160 | # 161 | # Default: false 162 | space_surround: false 163 | 164 | # Language pragmas 165 | - language_pragmas: 166 | # We can generate different styles of language pragma lists. 167 | # 168 | # - vertical: Vertical-spaced language pragmas, one per line. 169 | # 170 | # - compact: A more compact style. 171 | # 172 | # - compact_line: Similar to compact, but wrap each line with 173 | # `{-#LANGUAGE #-}'. 174 | # 175 | # Default: vertical. 176 | style: vertical 177 | 178 | # Align affects alignment of closing pragma brackets. 179 | # 180 | # - true: Brackets are aligned in same column. 181 | # 182 | # - false: Brackets are not aligned together. There is only one space 183 | # between actual import and closing bracket. 184 | # 185 | # Default: true 186 | align: false 187 | 188 | # stylish-haskell can detect redundancy of some language pragmas. If this 189 | # is set to true, it will remove those redundant pragmas. Default: true. 190 | remove_redundant: true 191 | 192 | # Replace tabs by spaces. This is disabled by default. 193 | - tabs: 194 | # Number of spaces to use for each tab. Default: 8, as specified by the 195 | # Haskell report. 196 | spaces: 4 197 | 198 | # Remove trailing whitespace 199 | - trailing_whitespace: {} 200 | 201 | # A common setting is the number of columns (parts of) code will be wrapped 202 | # to. Different steps take this into account. Default: 80. 203 | columns: 100 204 | 205 | # By default, line endings are converted according to the OS. You can override 206 | # preferred format here. 207 | # 208 | # - native: Native newline format. CRLF on Windows, LF on other OSes. 209 | # 210 | # - lf: Convert to LF ("\n"). 211 | # 212 | # - crlf: Convert to CRLF ("\r\n"). 213 | # 214 | # Default: native. 215 | newline: lf 216 | 217 | # Sometimes, language extensions are specified in a cabal file or from the 218 | # command line instead of using language pragmas in the file. stylish-haskell 219 | # needs to be aware of these, so it can parse the file correctly. 220 | # 221 | # No language extensions are enabled by default. 222 | language_extensions: 223 | # StylishHaskell needs these extensions even when GHC does not in some cases 224 | - MultiParamTypeClasses 225 | - TemplateHaskell 226 | - FlexibleContexts 227 | - ExistentialQuantification 228 | -------------------------------------------------------------------------------- /cabal.project.freeze: -------------------------------------------------------------------------------- 1 | active-repositories: hackage.haskell.org:merge 2 | constraints: any.HUnit ==1.6.2.0, 3 | any.ListLike ==4.7.8.2, 4 | any.OneTuple ==0.4.1.1, 5 | any.QuickCheck ==2.14.3, 6 | any.StateVar ==1.2.2, 7 | any.aeson ==2.1.2.1, 8 | any.aeson-pretty ==0.8.10, 9 | any.ansi-terminal ==1.0.2, 10 | any.ansi-terminal-types ==0.11.5, 11 | any.appar ==0.1.8, 12 | any.array ==0.5.6.0, 13 | any.asn1-encoding ==0.9.6, 14 | any.asn1-parse ==0.9.5, 15 | any.asn1-types ==0.3.4, 16 | any.assoc ==1.1, 17 | any.async ==2.2.5, 18 | any.atomic-primops ==0.8.5, 19 | any.attoparsec ==0.14.4, 20 | any.attoparsec-aeson ==2.1.0.0, 21 | any.attoparsec-iso8601 ==1.1.0.1, 22 | any.auto-update ==0.1.6, 23 | any.base ==4.18.2.1, 24 | any.base-compat ==0.13.1, 25 | any.base-compat-batteries ==0.13.1, 26 | any.base-orphans ==0.9.1, 27 | any.base16-bytestring ==1.0.2.0, 28 | any.base64-bytestring ==1.2.1.0, 29 | any.basement ==0.0.16, 30 | any.bifunctors ==5.6.2, 31 | any.binary ==0.8.9.1, 32 | any.binary-instances ==1.0.4, 33 | any.binary-orphans ==1.0.4.1, 34 | any.bitvec ==1.1.5.0, 35 | any.blaze-builder ==0.4.2.3, 36 | any.blaze-html ==0.9.2.0, 37 | any.blaze-markup ==0.8.3.0, 38 | any.bsb-http-chunked ==0.0.0.4, 39 | any.byteorder ==1.0.4, 40 | any.bytestring ==0.11.5.3, 41 | any.call-stack ==0.4.0, 42 | any.case-insensitive ==1.2.1.0, 43 | any.cereal ==0.5.8.3, 44 | any.clock ==0.8.4, 45 | any.colour ==2.3.6, 46 | any.comonad ==5.0.8, 47 | any.conduit ==1.3.5, 48 | any.conduit-extra ==1.3.6, 49 | any.containers ==0.6.7, 50 | any.contravariant ==1.5.5, 51 | any.cookie ==0.4.6, 52 | any.cryptohash-md5 ==0.11.101.0, 53 | any.cryptohash-sha1 ==0.11.101.0, 54 | any.crypton ==0.34, 55 | any.crypton-connection ==0.3.2, 56 | any.crypton-x509 ==1.7.6, 57 | any.crypton-x509-store ==1.6.9, 58 | any.crypton-x509-system ==1.6.7, 59 | any.crypton-x509-validation ==1.6.12, 60 | any.cryptonite ==0.30, 61 | any.data-default ==0.7.1.1, 62 | any.data-default-class ==0.1.2.0, 63 | any.data-default-instances-containers ==0.0.1, 64 | any.data-default-instances-dlist ==0.0.1, 65 | any.data-default-instances-old-locale ==0.0.1, 66 | any.data-fix ==0.3.2, 67 | any.data-sketches ==0.3.1.0, 68 | any.data-sketches-core ==0.1.0.0, 69 | any.deepseq ==1.4.8.1, 70 | any.deepseq-generics ==0.2.0.0, 71 | any.directory ==1.3.8.4, 72 | any.distributive ==0.6.2.1, 73 | any.dlist ==1.0, 74 | any.double-conversion ==2.0.5.0, 75 | any.easy-file ==0.2.5, 76 | any.effectful ==2.3.0.0, 77 | any.effectful-core ==2.3.0.1, 78 | any.entropy ==0.4.1.10, 79 | any.errors ==2.3.0, 80 | any.exceptions ==0.10.7, 81 | any.extra ==1.7.14, 82 | any.fast-logger ==3.2.2, 83 | any.file-embed ==0.0.16.0, 84 | any.filepath ==1.4.300.1, 85 | any.fmlist ==0.9.4, 86 | any.generic-arbitrary ==1.0.1, 87 | any.generic-deriving ==1.14.5, 88 | any.generically ==0.1.1, 89 | any.ghc-bignum ==1.3, 90 | any.ghc-boot-th ==9.6.5, 91 | any.ghc-prim ==0.10.0, 92 | any.github ==0.29, 93 | any.hashable ==1.4.4.0, 94 | any.haskell-lexer ==1.1.1, 95 | any.hourglass ==0.2.12, 96 | any.hspec ==2.11.7, 97 | any.hspec-core ==2.11.7, 98 | any.hspec-discover ==2.11.7, 99 | any.hspec-expectations ==0.8.4, 100 | any.http-api-data ==0.5.1, 101 | any.http-client ==0.7.17, 102 | any.http-client-tls ==0.3.6.3, 103 | any.http-conduit ==2.3.8.3, 104 | any.http-date ==0.0.11, 105 | any.http-link-header ==1.2.1, 106 | any.http-types ==0.12.4, 107 | any.http2 ==5.0.1, 108 | any.indexed-traversable ==0.1.3, 109 | any.indexed-traversable-instances ==0.1.1.2, 110 | any.integer-conversion ==0.1.0.1, 111 | any.integer-gmp ==1.1, 112 | any.integer-logarithms ==1.0.3.1, 113 | any.iproute ==1.7.12, 114 | any.iso8601-time ==0.1.5, 115 | any.lifted-base ==0.2.3.12, 116 | any.math-functions ==0.3.4.4, 117 | any.megaparsec ==9.5.0, 118 | any.memory ==0.18.0, 119 | any.mime-types ==0.1.2.0, 120 | any.monad-control ==1.0.3.1, 121 | any.monad-logger ==0.3.40, 122 | any.monad-loops ==0.4.3, 123 | any.mono-traversable ==1.0.17.0, 124 | any.mtl ==2.3.1, 125 | any.mwc-random ==0.15.0.2, 126 | any.network ==3.1.4.0, 127 | any.network-byte-order ==0.1.7, 128 | any.network-control ==0.0.2, 129 | any.network-info ==0.2.1, 130 | any.network-uri ==2.6.4.2, 131 | any.old-locale ==1.0.0.7, 132 | any.old-time ==1.1.0.4, 133 | any.optparse-applicative ==0.18.1.0, 134 | any.os-string ==2.0.2, 135 | any.parsec ==3.1.16.1, 136 | any.parser-combinators ==1.3.0, 137 | any.pem ==0.2.4, 138 | any.pretty ==1.1.3.6, 139 | any.prettyprinter ==1.7.1, 140 | any.prettyprinter-ansi-terminal ==1.1.3, 141 | any.primitive ==0.8.0.0, 142 | any.process ==1.6.19.0, 143 | any.process-extras ==0.7.4, 144 | any.prometheus-client ==1.1.1, 145 | any.prometheus-metrics-ghc ==1.0.1.2, 146 | any.psqueues ==0.2.8.0, 147 | any.quickcheck-instances ==0.3.30, 148 | any.quickcheck-io ==0.2.0, 149 | any.random ==1.2.1.2, 150 | any.recv ==0.1.0, 151 | any.regex-base ==0.94.0.2, 152 | any.regex-compat ==0.95.2.1, 153 | any.regex-posix ==0.96.0.1, 154 | any.resourcet ==1.3.0, 155 | any.rts ==1.0.2, 156 | any.safe ==0.3.21, 157 | any.safe-exceptions ==0.1.7.4, 158 | any.scientific ==0.3.7.0, 159 | any.scotty ==0.20.1, 160 | any.semialign ==1.3, 161 | any.semigroupoids ==6.0.0.1, 162 | any.simple-sendfile ==0.2.32, 163 | any.socks ==0.6.1, 164 | any.split ==0.2.5, 165 | any.splitmix ==0.1.0.5, 166 | any.stm ==2.5.1.0, 167 | any.stm-chans ==3.0.0.9, 168 | any.streaming-commons ==0.2.2.6, 169 | any.strict ==0.5, 170 | any.system-cxx-std-lib ==1.0, 171 | any.tagged ==0.8.8, 172 | any.template-haskell ==2.20.0.0, 173 | any.text ==2.0.2, 174 | any.text-binary ==0.2.1.1, 175 | any.text-format ==0.3.2.1, 176 | any.text-short ==0.1.5, 177 | any.tf-random ==0.5, 178 | any.th-abstraction ==0.5.0.0, 179 | any.th-compat ==0.1.5, 180 | any.these ==1.2, 181 | any.time ==1.12.2, 182 | any.time-compat ==1.9.6.1, 183 | any.time-manager ==0.0.1, 184 | any.tls ==1.8.0, 185 | any.tls-session-manager ==0.0.4, 186 | any.transformers ==0.6.1.0, 187 | any.transformers-base ==0.4.6, 188 | any.transformers-compat ==0.7.2, 189 | any.typed-process ==0.2.11.1, 190 | any.unix ==2.8.4.0, 191 | any.unix-compat ==0.7.1, 192 | any.unix-time ==0.4.12, 193 | any.unliftio ==0.2.25.0, 194 | any.unliftio-core ==0.2.1.0, 195 | any.unordered-containers ==0.2.20, 196 | any.utf8-string ==1.0.2, 197 | any.uuid ==1.3.15, 198 | any.uuid-types ==1.0.5.1, 199 | any.vault ==0.3.1.5, 200 | any.vector ==0.13.1.0, 201 | any.vector-algorithms ==0.9.0.1, 202 | any.vector-binary-instances ==0.2.5.2, 203 | any.vector-stream ==0.1.0.1, 204 | any.wai ==3.2.4, 205 | any.wai-extra ==3.1.14, 206 | any.wai-logger ==2.4.0, 207 | any.wai-middleware-prometheus ==1.0.0.1, 208 | any.warp ==3.3.31, 209 | any.warp-tls ==3.4.4, 210 | any.witherable ==0.4.2, 211 | any.word8 ==0.1.3, 212 | any.zlib ==0.6.3.0 213 | index-state: hackage.haskell.org 2024-04-26T10:39:31Z 214 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | # Optionally put the specified nix version of the package in the environment 2 | { environment ? "shell" }: 3 | let 4 | pkgs = import ./nix/nixpkgs-pinned.nix { }; 5 | 6 | defaultEnv = pkgs.haskellPackages.shellFor { 7 | packages = p: [ p.hoff ]; 8 | 9 | buildInputs = [ 10 | pkgs.dia 11 | pkgs.dpkg 12 | pkgs.git 13 | pkgs.niv 14 | pkgs.shellcheck 15 | 16 | pkgs.haskellPackages.cabal-install 17 | pkgs.haskellPackages.haskell-language-server 18 | pkgs.haskellPackages.implicit-hie 19 | pkgs.haskellPackages.stylish-haskell 20 | ]; 21 | 22 | withHoogle = true; 23 | }; 24 | 25 | environments = { shell = defaultEnv; }; 26 | in environments."${environment}" 27 | -------------------------------------------------------------------------------- /doc/background.md: -------------------------------------------------------------------------------- 1 | # Background 2 | 3 | I’ve worked on several codebases with several source control systems. Big 4 | repositories with more than a thousand active developers and ~300 commits per 5 | day. Small repositories where all developers are in the same room. My personal 6 | projects, where I am the sole developer apart from the occasional pull request. 7 | I’ve worked in codebases with sophisticated review systems and testing 8 | infrastructure. I’ve worked in codebases where there was no code review at all, 9 | and no continuous integration. (Sometimes not even tests.) No one size fits all, 10 | though some workflows are definitely better than others. Here I outline some of 11 | my thoughts on collaborating on a software system. 12 | 13 | What I am proposing here is a system that manages source control, code review, 14 | and continuous integration. I don’t think there is a name for such a system yet. 15 | I’ll use the term *Collaboration System* (CS) henceforth. 16 | 17 | ## The Not Rocket Science Principle 18 | 19 | I firmly believe in the [Not Rocket Science Principle][not-rocket-science]: 20 | 21 | > Automatically maintain a repository of code that always passes all the tests. 22 | 23 | This implies that you have a test suite in the first place. While I am generally 24 | sceptic about tests (I think a strong type system can be much more valuable than 25 | a big test suite) -- and I certainly don’t believe in test driven development -- 26 | I do believe in tests. Ideally, a passing test suite should give you enough 27 | confidence to deploy or release the product. 28 | 29 | In terms of source control (I’ll assume Git here), this means that the master 30 | branch should always pass all the tests. You should always be confident to 31 | deploy or make a release from master. If this is to be automatically enforced, 32 | that means no pushing to master. Which raises the question: how do commits end 33 | up in master? 34 | 35 | [not-rocket-science]: https://graydon2.dreamwidth.org/1597.html 36 | 37 | ## Proposing changes 38 | 39 | In a distributed world, it is obvious how changes are developed: you pull from 40 | master, and build your changes on top. Whether locally you do that in one or 41 | more feature branches, or directly on top of master, is irrelevant. In the end, 42 | your proposal is a single commit. Its chain of parents eventually points to some 43 | commit in the master branch: the point where you branched off. 44 | 45 | The collaboration system takes such a proposed commit as input. Possibly it 46 | waits for the changes to be approved by a reviewer. It integrates the changes 47 | with the current master branch, runs the tests, and if they pass, forwards 48 | master to the new version. This raises two new questions: how are reviews 49 | handled, and how are changes integrated into master? 50 | 51 | ## Code review policy 52 | 53 | Code review is intimately related to trust. Different projects require different 54 | approaches here. When the people you work with are all in the same room, you 55 | probably trust them. If there is an issue you can just walk up to them and ask. 56 | In a big project, you might not know all of your colleagues. You might not even 57 | live on the same continent. Some kind of trust hierarchy is needed there. If you 58 | allow contributions from outside (e.g. pull requests on GitHub), you don’t want 59 | any random person on the internet to be able to make changes without review. 60 | 61 | For a small project where trust is high, a simple full access model can be 62 | sufficient. Ask for a review when it makes sense, but don’t waste valuable 63 | developer time by asking somebody to review your whitespace fixes. Perhaps even 64 | significant code is not always reviewed. (I’ve had an employer who thought it 65 | was too expensive to have two programmers look at the same code. While I 66 | disagree, the decision was not mine to make.) The fact that malicious code can 67 | be pushed without review is not an issue when trust is high. 68 | 69 | Sometimes all you care about is that *somebody* apart from the author looked at 70 | the code. For mission-critical code, you might require a review from multiple 71 | designated reviewers. There are countless policies out there. It depends on the 72 | project which ones are suitable. 73 | 74 | Trust and code review are matters of policy, not technology. The collaboration 75 | system doesn’t care. It must be flexible enough to support various policies, but 76 | ultimately all it cares about is a valid LGTM stamp on a proposed change. 77 | 78 | ## Integrating changes and the history 79 | 80 | (This section is a work in progress.) Keep the history clean. That probably 81 | means linear, so rebase. Enforce commit message format. (It is trivial to do if 82 | changes are guarded, so why not?) Every commit should at least compile, but 83 | perhaps it need not pass all the tests. 84 | 85 | ## Dependent changes 86 | 87 | (This section is a work in progress.) This is where all tools I know of fall 88 | short. Rietveld has dependent patchsets, but the local Git workflow is still a 89 | mess. Changes are a DAG, and every change might consist of multiple commits. If 90 | feature B depends on A, and the reviewer for A asked you to make a change on top 91 | of A, then now you need to rebase B. With a moderately complex dependency graph 92 | this becomes a mess. And Git doesn’t track dependencies. (This is the single 93 | point where the Team Foundation branching model -- which seemed ridiculous to me 94 | at first -- has an advantage over Git.) Need a tool that can track dependencies 95 | in the review (unlike GitHub, which just shows you the diff including 96 | dependencies), but also issue the right Git rebase commands. 97 | 98 | ## The repository 99 | 100 | (This section is a work in progress.) 101 | 102 | * The set of projects that you wish to be able to make atomic changes 103 | to should live in the same repository. Even if they are decoupled from a code 104 | point of view (i.e. you can build and test them independently), they could 105 | still be coupled via e.g. a REST API, dependence on a database schema, or 106 | using a specific file format. 107 | 108 | * Creates tension with throughput. Test *every* project again if you only made 109 | a change to one? 110 | 111 | ## Similar projects and further reading 112 | 113 | There exists lots of software that deals with code review and continuous 114 | integration already, but no existing project offered all of the things I wanted. 115 | Below are some of the projects that inspired this project: 116 | 117 | * [Rietveld][rietveld], a great code review tool. It is mature and has it has 118 | many features, but it can only deal with dependent patchsets in a limited 119 | way, and it does not enforce the Not Rocket Science Principle. It creates a 120 | linear history, but it requires custom tools to interact with it, different 121 | from normal Git workflows. Rietveld was itself based on [Mondrian][mondrian]. 122 | * [Gerrit][gerrit], a code review tool for Git based on Rietveld. It can rebase 123 | proposed changes, but as far as I am aware, it cannot enforce the Not Rocket 124 | Science Principle. 125 | * [Iron][iron], a code review and release management tool. It was released as 126 | source-available by Jane Street, with an interesting trilogy of blog posts 127 | ([I][iron-i], [II][iron-ii], [III][iron-iii]), but unfortunately is lacks any 128 | further documentation. I have no clue how to build or use it. The blog posts 129 | are a good read nevertheless. 130 | * [Bors][bors], a bot for GitHub that enforces the Not Rocket Science 131 | Principle, written for the Rust project. Graydon’s [post about the Not Rocket 132 | Science Principle][not-rocket-science] provides a bit of background. Bors is 133 | not a code review tool, it only handles gating commits on test results. Its 134 | integration strategy is to do a merge, which creates an ugly history. The 135 | original implementation of Bors did not scale very well, so a more robust 136 | rewrite called [Homu][homu] was created. 137 | * [Zuul][zuul], a commit queue that speculatively starts builds for changes to 138 | be integrated after changes that are being tested. When builds usually pass, 139 | this allows for higher throughput. 140 | 141 | [bors]: https://github.com/graydon/bors 142 | [gerrit]: https://www.gerritcodereview.com/ 143 | [homu]: https://github.com/servo/homu 144 | [iron-i]: https://blogs.janestreet.com/code-review-that-isnt-boring/ 145 | [iron-ii]: https://blogs.janestreet.com/scrutinizing-your-code-in-style/ 146 | [iron-iii]: https://blogs.janestreet.com/ironing-out-your-release-process/ 147 | [iron]: https://github.com/janestreet/iron 148 | [mondrian]: https://www.youtube.com/watch?v=sMql3Di4Kgc 149 | [rietveld]: https://github.com/rietveld-codereview/rietveld 150 | [zuul]: https://zuul-ci.org/ 151 | -------------------------------------------------------------------------------- /doc/comments.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/channable/hoff/eeb4a9ffdef9cc31b302c2fa04605f7c94c0412b/doc/comments.png -------------------------------------------------------------------------------- /doc/example-dev-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "projects": [{ 3 | "owner": "USERNAME-OR-ORGANIZATION", 4 | "repository": "git-sandbox", 5 | "branch": "master", 6 | "testBranch": "testing", 7 | "checkout": "./run/checkouts/git-sandbox", 8 | "stateFile": "./run/state/git-sandbox.json", 9 | "checks": { 10 | "mandatory": [] 11 | }, 12 | "deployEnvironments": [ 13 | "staging", 14 | "production" 15 | ], 16 | "deploySubprojects": [ 17 | "staging", 18 | "production" 19 | ], 20 | "safeForFriday": true 21 | }], 22 | "secret": "REPLACE with output of 'head --bytes 32 /dev/urandom | base64'", 23 | "accessToken": "REPLACE with a new personal access token from https://github.com/settings/tokens", 24 | "port": 1979, 25 | "tls": null, 26 | "user": { 27 | "name": "Dev Hoffbot", 28 | "email": "hoffbot@example.com", 29 | "sshConfigFile": "/dev/null" 30 | }, 31 | "mergeWindowExemption": ["hoffbot"], 32 | "trigger": { 33 | "commentPrefix": "@hoffbot" 34 | }, 35 | "metrics": { 36 | "metricsPort": 3333, 37 | "metricsHost": "*" 38 | }, 39 | "featureFreezeWindow": { 40 | "start": "2023-01-01T00:00:00Z", 41 | "end": "2023-01-07T00:00:00Z" 42 | }, 43 | "timeouts": { 44 | "promotionTimeout": 60, 45 | "rememberTimeout": 600 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /doc/installing.md: -------------------------------------------------------------------------------- 1 | # Installing 2 | 3 | This document details how to install Hoff on your own server. I will be using 4 | a server running Ubuntu 18.04 here. 5 | 6 | The application consists of a single binary that opens a server at a configured 7 | port, and then runs until it is killed. Log messages are written to stdout. This 8 | makes the application work well with systemd. 9 | 10 | ## Building a package 11 | 12 | Enter an environment with development dependencies available through [Nix][nix]: 13 | 14 | $ nix develop --file default.nix 15 | 16 | There is a script to build a Debian package: 17 | 18 | $ cd package 19 | $ ./build-binary.sh 20 | $ VERSION=1 fakeroot ./build-package.sh 21 | 22 | Alternatively, you can build the binary only, and assemble your own package: 23 | 24 | $ cabal build 25 | $ cabal list-bin hoff 26 | 27 | The systemd service file is in `package/hoff.service`. 28 | 29 | ## Installing the package 30 | 31 | On the server, install the package: 32 | 33 | $ sudo dpkg --install hoff_0.0.0-1.deb 34 | 35 | This will do several things: 36 | 37 | * Install the `hoff` binary in `/usr/bin`. 38 | * Create the `hoff` user under which the daemon will run. 39 | * Create an example config file at `/etc/hoff/config.json`. 40 | 41 | Enable the daemon to start it automatically at boot, and start it now: 42 | 43 | $ sudo systemctl enable hoff 44 | $ sudo systemctl start hoff 45 | 46 | Verify that everything is up and running: 47 | 48 | $ sudo systemctl status hoff 49 | 50 | ## Setting up the user 51 | 52 | The systemd service file included runs Hoff as the `hoff` user. The Debian 53 | package creates it, but we need to do some further setup for files owned by this 54 | user. You can also add the user manually: 55 | 56 | $ sudo useradd --system --user-group --no-create-home hoff 57 | 58 | The application needs a key pair to connect to GitHub. Because the `hoff` system 59 | user has no home directory, we will put it in `/etc/hoff` instead. The Debian 60 | package creates that directory, but the `hoff` user has no write access in it, 61 | so we create the files with the right owner before calling `ssh-keygen`. 62 | 63 | $ sudo touch /etc/hoff/id_ed25519{,.pub} 64 | $ sudo chown hoff:hoff /etc/hoff/id_ed25519{,.pub} 65 | $ sudo --user hoff ssh-keygen -t ed25519 -f /etc/hoff/id_ed25519 66 | $ sudo chmod u=rw,g=,o= /etc/hoff/id_ed25519 67 | $ sudo chmod u=rw,g=r,o=r /etc/hoff/id_ed25519.pub 68 | 69 | Leave the passphrase empty to allow the key to be used without human 70 | interaction. To tell SSH where the key is, we also create an SSH config file: 71 | 72 | $ echo "IdentitiesOnly yes" | sudo tee --append /etc/hoff/ssh_config 73 | $ echo "IdentityFile /etc/hoff/id_ed25519" | sudo tee --append /etc/hoff/ssh_config 74 | $ echo "CheckHostIP no" | sudo tee --append /etc/hoff/ssh_config 75 | $ sudo chown hoff:hoff /etc/hoff/ssh_config 76 | $ sudo chmod u=rw,g=,o= /etc/hoff/ssh_config 77 | 78 | Here we also set `CheckHostIP no`, so SSH does not emit a warning when the IP 79 | address of a host changes. Hoff mounts a file that contains GitHub's public key 80 | at `/etc/ssh/ssh_known_hosts`, so there is no need to accept any 81 | [fingerprints][fingerprints]. Because the `ssh_known_hosts` file is readonly, we 82 | can *only* connect to GitHub, and only if the public key that we baked into the 83 | package has not changed. Furthermore, for testing (and also in general) it is 84 | useful to prevent SSH from trying all keys it can find; it should 85 | only use the provided file, so we set `IdentitiesOnly=yes`. 86 | 87 | Finally, we need a GitHub account that will be used for fetching and pushing. I 88 | recommend creating a separate account for this purpose. On GitHub, add the 89 | public key to the new account. Paste the output of `cat /etc/hoff/id_ed25519.pub` 90 | into the key field under “SSH and GPG keys”. 91 | 92 | ## Setting up directories 93 | 94 | Hoff writes two things to the file system per configured repository: 95 | 96 | * A checkout of the repository. 97 | * A state file, to persist the internal state (open issues, etc.). 98 | 99 | A good place to store these is in `/var/lib/hoff`. The Debian package creates a 100 | `checkouts` and a `state` subdirectory there, owned by the `hoff` user so it can 101 | create files and subdirectories. We could also create them manually: 102 | 103 | $ sudo mkdir --parents /var/lib/hoff/{checkouts,state} 104 | $ sudo chown hoff:hoff /var/lib/hoff/{checkouts,state} 105 | 106 | ## Adding a repostory 107 | 108 | To add a repository, we need to add an entry to the config file. I’ll be using 109 | the repository `ruuda/bogus` in this example. Add this to the `projects` key in 110 | the config file (e.g. with `sudo --edit /etc/hoff/config.json`): 111 | 112 | { 113 | "owner": "ruuda", 114 | "repository": "bogus", 115 | "branch": "master", 116 | "testBranch": "testing", 117 | "checkout": "/var/lib/hoff/checkouts/ruuda/bogus", 118 | "stateFile": "/var/lib/hoff/state/ruuda/bogus.json" 119 | } 120 | 121 | The meaning of the fields is as follows: 122 | 123 | * *Owner*: The GitHub user or organization that owns the repository. 124 | In my case `ruuda`. 125 | * *Repository*: The GitHub repository to manage. In my case `bogus`. 126 | * *Branch*: The branch to integrate changes into. `master` in most cases. 127 | * *TestBranch*: The branch that changes are pushed to to trigger a CI build. 128 | The application will force-push to this branch, so it should not be used for 129 | other purposes. I used `testing`. 130 | * *Checkout*: The full path to the checkout. 131 | * *StateFile*: The path to the file where the daemon saves its state, so it 132 | can remember the set of open pull requests across restarts. TODO: urge to 133 | back up this file regularly. 134 | 135 | On GitHub, add the bot account to this repository as a collaborator, to give it 136 | push access (and pull access in the case of a private repository). Note that 137 | after adding the bot as a collaborator, you need to accept the invitation from 138 | the bot account. (TODO: automate this via the API.) 139 | 140 | When Hoff starts, it will clone the repository if it does not yet exist. It also 141 | creates the state file if it does not exist. 142 | 143 | ## Global configuration 144 | 145 | There are a few global options in the config file too: 146 | 147 | * *Secret*: The secret used to verify the authenticity of GitHub webhooks. 148 | You can run `head --bytes 32 /dev/urandom | base64` to generate a secure 149 | 256-bit secret that doesn’t require any character to be escaped in the json 150 | file. 151 | * *AccessToken*: A GitHub API access token for the bot user. This is used to 152 | leave comments on behalf of the bots. 153 | * *Port*: The port at which the webhook server is exposed. The systemd unit 154 | ensures that the daemon has permissions to run on priviliged ports (such as 155 | 80 and 443) without having to run as root. 156 | * *TLS*: Can be used to make the server serve https instead of insecure http. 157 | See the [TLS guide](tls.md) for more details. Set to `null` to disable TLS. 158 | * *Trigger.commentPrefix*: Specifies the prefix that makes Hoff interpret a 159 | comment as a command directed at it. Setting this to the username of the bot 160 | account makes for natural conversations on GitHub, but a different prefix 161 | could be used too. In my case, I set it to `@hoffbot`. 162 | 163 | Finally, there is some Git config for the bot user, under the *user* key. *Name* 164 | and *email* are used for the Git committer metadata. *SshConfigFile* should 165 | point to `/etc/hoff/ssh_config` as [created previously](#setting-up-the-user). 166 | 167 | Restart the daemon to pick up the new configuration, and verify that it started 168 | properly: 169 | 170 | $ sudo systemctl restart hoff 171 | $ sudo systemctl status hoff 172 | 173 | ## Setting up webhooks 174 | 175 | On GitHub, go to the repository settings and add a new webhook. The payload url 176 | should be `http://yourserver.com/hook/github`, with content type 177 | application/json. Enter the secret generated in the previous section, and select 178 | the following events to be delivered: 179 | 180 | * *Pull request*, to make the daemon aware of new or closed pull requests. 181 | * *Issue comment*, to listen for LGTM stamps. 182 | * *Pull request reviews*, to listen for LGTM stamps in review summaries. 183 | * *Status*, to get updates on the build status from a linked CI service. 184 | 185 | GitHub will deliver a ping event, and if everything is okay a green checkmark 186 | will appear in the list of configured webhooks. On the server, we can see that 187 | the webhook was received: 188 | 189 | $ sudo journalctl --pager-end --unit hoff 190 | > ... 191 | > Sep 04 21:37:41 hoffbuild hoff[2860]: [Debug] github loop received event: Ping 192 | 193 | That’s it! You can now open a pull request and leave an LGTM comment to see the 194 | application in action. Remember to also set up a CI service like Travis CI to 195 | provide the build status updates. 196 | 197 | [fingerprints]: https://help.github.com/articles/github-s-ssh-key-fingerprints/ 198 | [nix]: https://nixos.org/nix 199 | -------------------------------------------------------------------------------- /doc/merge-train-delay.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | PR#1 building... 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | . . . t i m e . . . 18 | 19 | 20 | push 21 | new master 22 | 23 | 24 | rebase 25 | & merge 26 | 27 | 28 | 29 | 30 | 31 | 32 | #1 33 | @hoff merge 34 | 35 | 36 | 37 | 38 | 39 | 40 | #2 41 | @hoff merge 42 | 43 | 44 | 45 | 46 | 47 | 48 | #3 49 | @hoff merge 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | PR#2 building... 58 | 59 | 60 | 61 | push 62 | new master 63 | 64 | 65 | speculative 66 | rebase & merge 67 | 68 | 69 | 70 | 71 | 72 | 73 | PR#3 building... 74 | 75 | 76 | 77 | push 78 | new master 79 | 80 | 81 | speculative 82 | rebase & merge 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | ... wait... 108 | 109 | 110 | 111 | 112 | 113 | 114 | before! 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | after 123 | 124 | 125 | (no wait) 126 | 127 | 128 | 129 | -------------------------------------------------------------------------------- /doc/merge-train.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | PR#1 building... 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | . . . t i m e . . . 18 | 19 | 20 | push 21 | new master 22 | 23 | 24 | rebase 25 | & merge 26 | 27 | 28 | 29 | 30 | 31 | 32 | #1 33 | @hoff merge 34 | 35 | 36 | 37 | 38 | 39 | 40 | #2 41 | @hoff merge 42 | 43 | 44 | 45 | 46 | 47 | 48 | #3 49 | @hoff merge 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | PR#2 building... 58 | 59 | 60 | 61 | push 62 | new master 63 | 64 | 65 | speculative 66 | rebase & merge 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | PR#3 building... 75 | 76 | 77 | 78 | push 79 | new master 80 | 81 | 82 | speculative 83 | rebase & merge 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | -------------------------------------------------------------------------------- /doc/name.md: -------------------------------------------------------------------------------- 1 | # Name 2 | 3 | To respect the tradition of naming code review systems after members of the 4 | De Stijl movement, Hoff is named after Robert van ’t Hoff, a Dutch architect. 5 | 6 | Hoff is a merge bot, not a code review system, but the original idea had a 7 | broader scope than just being a merge bot. 8 | -------------------------------------------------------------------------------- /doc/no-merge-train.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | PR#1 building... 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | . . . t i m e . . . 18 | 19 | 20 | push 21 | new master 22 | 23 | 24 | rebase 25 | & merge 26 | 27 | 28 | 29 | 30 | 31 | 32 | #1 33 | @hoff merge 34 | 35 | 36 | 37 | 38 | 39 | 40 | #2 41 | @hoff merge 42 | 43 | 44 | 45 | 46 | 47 | 48 | #3 49 | @hoff merge 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | PR#2 building... 58 | 59 | 60 | 61 | push 62 | new master 63 | 64 | 65 | rebase 66 | & merge 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | PR#3 building... 75 | 76 | 77 | 78 | push 79 | new master 80 | 81 | 82 | rebase 83 | & merge 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | ... wait... 96 | 97 | 98 | 99 | 100 | 101 | 102 | ... wait... 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | -------------------------------------------------------------------------------- /doc/performance.md: -------------------------------------------------------------------------------- 1 | # Performance 2 | 3 | What kind of performance should I be aiming for, for this to be usable in a big 4 | project? Let’s assume a project that approves roughly 300 changes per day, where 5 | every proposed change involves about 30 events. Events can be pushes, build 6 | status notifications, comments, and opening or closing a proposal. (These 7 | numbers are in the ballpark of the main repository of the Chromium project, 8 | although the number of events is just an estimate.) **So let’s define a 9 | *big project* as a project that generates 9000 events per day.** 10 | 11 | Even for an international effort, there will be peek hours. As a very rough 12 | estimate (I don’t have any actual data), the 80/20 rule dictates that 80% of the 13 | events happen in 20% of the time. That is 7200 events in 288 minutes during peek 14 | hours, 25 events per minute. 15 | 16 | Assume events are generated by a Poisson process with a rate of 25 events per 17 | minute. We will compute the interval _x_, such that with 99% probability the 18 | time between consecutive events is larger than _x_. In R: 19 | 20 | > qexp(0.01, rate = 25/60) 21 | [1] 0.02412081 22 | 23 | That gives us about 24 milliseconds to respond to a request when running on a 24 | single core. Responing to a request might involve enqueueing some work to handle 25 | the event, and actually doing that work may take longer, as long as we can 26 | handle a sustained load of 25 events per minute. If handling the event involves 27 | doing a Git pull, it can take a few seconds, but to decide what to do with a 28 | comment, 24 milliseconds should be plenty. 29 | 30 | In my last benchmark, the server could handle requests at 1.96 ± 1.5 ms per 31 | request on a single core, so no blockers there. 32 | -------------------------------------------------------------------------------- /doc/tls.md: -------------------------------------------------------------------------------- 1 | # TLS 2 | 3 | Hoff has has a built-in webserver ([Warp][warp]) that can either serve http or 4 | https. To enable https, point `tls.keyFile` and `tls.certFile` to the right 5 | key and certificate file in the configuration file. 6 | 7 | [warp]: https://hackage.haskell.org/package/warp 8 | 9 | ## Self-signed 10 | 11 | For local testing purposes, you can generate a self-signed certificate: 12 | 13 | openssl genrsa -out key.pem 2048 14 | openssl req -new -key key.pem -out certificate.csr 15 | openssl x509 -req -in certificate.csr -signkey key.pem -out certificate.pem 16 | 17 | ## Let’s Encrypt 18 | 19 | [Let’s Encrypt][letsencrypt] is a certificate authority that can be used in a 20 | fully automated way in production. The [Certbot][certbot] client works well in 21 | combination with Hoff. In this section a server running Ubuntu 16.04 is assumed. 22 | 23 | First install Certbot: 24 | 25 | $ sudo apt install letsencrypt 26 | 27 | We will be using the “standalone” mode of Certbot. This mode temporarily runs a 28 | webserver when a certificate is requested or renewed, so it cannot run while 29 | Hoff itself is running. Fortunately Certbot can execute commands to stop and 30 | start other services before and after renewing a certificate, so the entire 31 | renewal process can be automated. Only the initial request needs to be done 32 | manually. 33 | 34 | Stop Hoff because Certbot will need access to the same ports: 35 | 36 | $ sudo systemctl stop hoff 37 | 38 | Next we request an initial certificate. Note that unfortunately Ubuntu does not 39 | use the standard `certbot` command. Instead the command is called `letsencrypt`. 40 | Don’t forget to open port 443 in your firewall if you had not done so already. 41 | 42 | $ sudo ufw allow 443 43 | $ sudo letsencrypt certonly --standalone 44 | 45 | Update the configuration file to point to the newly generated certificate: 46 | 47 | $ sudo -e /etc/hoff.json 48 | 49 | Make sure to set the appropriate values for `tls`, and set the port to 443. 50 | 51 | ```json 52 | { 53 | "port": 443, 54 | "tls": { 55 | "keyFile": "/etc/letsencrypt/live/example.com/privkey.pem", 56 | "certFile": "/etc/letsencrypt/live/example.com/fullchain.pem" 57 | } 58 | } 59 | ``` 60 | 61 | Before we can start Hoff again, we need to fix the permissions of the 62 | certificate directories. Currently they are accessible only to `root`, but Hoff 63 | runs as the `git` user. To fix this, create a new user group `certaccess`, and 64 | put `git` in this group: 65 | 66 | $ sudo addgroup certaccess 67 | $ sudo usermod --append --groups certaccess git 68 | 69 | Next, change the group owner of the certificate directories to `certaccess`, 70 | and make the directories group-readable and searchable: 71 | 72 | $ sudo chown root:certaccess /etc/letsencrypt/archive 73 | $ sudo chown root:certaccess /etc/letsencrypt/live 74 | $ sudo chmod g+rx /etc/letsencrypt/archive 75 | $ sudo chmod g+rx /etc/letsencrypt/live 76 | 77 | Now we can start Hoff again: 78 | 79 | $ sudo systemctl start hoff 80 | $ sudo systemctl status hoff 81 | 82 | TODO: Write a systemd unit that renews with 83 | 84 | $ certbot renew --pre-hook "systemctl stop hoff" --post-hook "systemctl start hoff" 85 | 86 | [letsencrypt]: https://letsencrypt.org/ 87 | [certbot]: https://certbot.eff.org/ 88 | -------------------------------------------------------------------------------- /doc/web-interface.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/channable/hoff/eeb4a9ffdef9cc31b302c2fa04605f7c94c0412b/doc/web-interface.png -------------------------------------------------------------------------------- /hie.yaml: -------------------------------------------------------------------------------- 1 | cradle: 2 | cabal: 3 | - path: "src" 4 | component: "lib:hoff" 5 | - path: "app/Main.hs" 6 | component: "hoff:exe:hoff" 7 | - path: "tests" 8 | component: "hoff:test:spec" 9 | 10 | # These need to be specified manually since they're in the same directory. 11 | # These end-to-end tests should probably be moved to their own directory 12 | # so this isn't needed anymore. 13 | - path: "tests/EndToEnd.hs" 14 | component: "hoff:test:end-to-end" 15 | - path: "tests/EventLoopSpec.hs" 16 | component: "hoff:test:end-to-end" 17 | - path: "tests/ServerSpec.hs" 18 | component: "hoff:test:end-to-end" 19 | -------------------------------------------------------------------------------- /hoff.cabal: -------------------------------------------------------------------------------- 1 | name: hoff 2 | -- please keep version consistent with hoff.nix 3 | version: 0.37.0 4 | category: Development 5 | synopsis: A gatekeeper for your commits 6 | 7 | copyright: Copyright 2016 Ruud van Asseldonk 8 | license: Apache-2.0 9 | license-file: license 10 | 11 | author: Ruud van Asseldonk 12 | maintainer: dev@veniogames.com 13 | 14 | build-type: Simple 15 | extra-source-files: readme.md 16 | cabal-version: >=1.10 17 | 18 | library 19 | default-language: Haskell2010 20 | ghc-options: -Wall -Werror -Wincomplete-uni-patterns -Wincomplete-record-updates -fno-ignore-asserts 21 | hs-source-dirs: src 22 | exposed-modules: ClockTickLoop 23 | , Configuration 24 | , EventLoop 25 | , Format 26 | , Git 27 | , Github 28 | , GithubApi 29 | , Logic 30 | , Project 31 | , Server 32 | , Metrics.Server 33 | , Metrics.Metrics 34 | , MonadLoggerEffect 35 | , Parser 36 | , Time 37 | , Types 38 | , WebInterface 39 | 40 | build-depends: aeson 41 | , aeson-pretty 42 | , base 43 | , base16-bytestring 44 | , blaze-html 45 | , blaze-markup 46 | , bytestring 47 | , containers 48 | , cryptonite 49 | , directory 50 | , effectful 51 | , extra 52 | , file-embed 53 | , filepath 54 | , github 55 | , http-client 56 | , http-types 57 | , megaparsec 58 | , memory 59 | , monad-logger 60 | , process 61 | , process-extras 62 | , prometheus-client 63 | , prometheus-metrics-ghc 64 | , scotty 65 | , stm 66 | , text 67 | , text-format 68 | , time 69 | , vector 70 | , wai 71 | , wai-middleware-prometheus 72 | , warp 73 | , warp-tls 74 | other-modules: Paths_hoff 75 | autogen-modules: Paths_hoff 76 | 77 | executable hoff 78 | default-language: Haskell2010 79 | main-is: Main.hs 80 | hs-source-dirs: app 81 | ghc-options: -Wall -Werror -Wincomplete-uni-patterns -Wincomplete-record-updates 82 | 83 | build-depends: async 84 | , base 85 | , containers 86 | , directory 87 | , effectful 88 | , github 89 | , hoff 90 | , monad-logger 91 | , optparse-applicative 92 | , text 93 | other-modules: Paths_hoff 94 | autogen-modules: Paths_hoff 95 | 96 | test-suite spec 97 | default-language: Haskell2010 98 | type: exitcode-stdio-1.0 99 | main-is: Spec.hs 100 | other-modules: ParserSpec, ProjectSpec 101 | hs-source-dirs: tests 102 | ghc-options: -Wall -Werror 103 | 104 | build-depends: aeson 105 | , base 106 | , bytestring 107 | , containers 108 | -- TODO: Use the new function that really deletes directories, 109 | -- instead of rolling my own. 110 | , directory 111 | , effectful 112 | , filepath 113 | , generic-arbitrary 114 | , hoff 115 | , hspec 116 | , hspec-core 117 | , QuickCheck 118 | , quickcheck-instances 119 | , text 120 | , time 121 | , uuid 122 | 123 | test-suite end-to-end 124 | default-language: Haskell2010 125 | type: exitcode-stdio-1.0 126 | main-is: EndToEnd.hs 127 | other-modules: EventLoopSpec, ServerSpec 128 | hs-source-dirs: tests 129 | ghc-options: -Wall -Werror -threaded -rtsopts -with-rtsopts=-N 130 | 131 | build-depends: async 132 | , base 133 | , bytestring 134 | , containers 135 | , cryptonite 136 | , filepath 137 | -- TODO: Use the new function that really deletes directories, 138 | -- instead of rolling my own. 139 | , directory 140 | , effectful 141 | , hoff 142 | , http-conduit 143 | , hspec 144 | , hspec-core 145 | , http-types 146 | , monad-logger 147 | , random 148 | , stm 149 | , text 150 | , time 151 | , uuid 152 | -------------------------------------------------------------------------------- /hoff.nix: -------------------------------------------------------------------------------- 1 | { 2 | # `pkgs` is needed for git. Specifying `git` here would use the `git` library 3 | # from Hackage instead 4 | pkgs, mkDerivation 5 | 6 | # Core packages 7 | , coreutils, glibcLocales, lib, makeWrapper, nix-gitignore, openssh 8 | 9 | # Haskell packages 10 | , QuickCheck, aeson, aeson-pretty, blaze-html, blaze-markup, bytestring 11 | , containers, cryptonite, directory, effectful, extra, file-embed, filepath 12 | , generic-arbitrary, github, hspec, hspec-core, http-client, http-conduit 13 | , http-types, megaparsec, memory, monad-logger, optparse-applicative, process 14 | , process-extras, prometheus, prometheus-metrics-ghc, quickcheck-instances 15 | , scotty, stm, text, text-format, time, uuid, vector, wai 16 | , wai-middleware-prometheus, warp, warp-tls }: 17 | mkDerivation { 18 | pname = "hoff"; 19 | version = "0.37.0"; # please keep consistent with hoff.cabal 20 | 21 | src = let 22 | # We do not want to include all files, because that leads to a lot of things 23 | # that nix has to copy to the temporary build directory that we don't want 24 | # to have in there (e.g. the `.dist-newstyle` directory, the `.git` 25 | # directory, etc.) 26 | prefixWhitelist = builtins.map builtins.toString [ 27 | ./app 28 | ./package 29 | ./src 30 | ./static 31 | ./tests 32 | ./hoff.cabal 33 | ./license 34 | ]; 35 | # Compute source based on whitelist 36 | whitelistFilter = path: _type: 37 | lib.any (prefix: lib.hasPrefix prefix path) prefixWhitelist; 38 | gitignore = builtins.readFile ./.gitignore; 39 | gitignoreFilter = 40 | nix-gitignore.gitignoreFilterPure whitelistFilter gitignore ./.; 41 | whitelistedSrc = lib.cleanSourceWith { 42 | src = lib.cleanSource ./.; 43 | filter = gitignoreFilter; 44 | }; 45 | in whitelistedSrc; 46 | 47 | buildTools = [ makeWrapper ]; 48 | 49 | postInstall = '' 50 | # Set LOCALE_ARCHIVE so that glibc can find the locales it needs when running on Ubuntu 51 | # machines. 52 | wrapProgram $out/bin/hoff --set LOCALE_ARCHIVE ${glibcLocales}/lib/locale/locale-archive 53 | ''; 54 | 55 | isLibrary = false; 56 | isExecutable = true; 57 | 58 | executableToolDepends = [ pkgs.git coreutils openssh ]; 59 | 60 | testDepends = [ pkgs.git coreutils openssh ]; 61 | 62 | libraryHaskellDepends = [ 63 | QuickCheck 64 | aeson 65 | aeson-pretty 66 | blaze-html 67 | blaze-markup 68 | bytestring 69 | containers 70 | cryptonite 71 | directory 72 | effectful 73 | extra 74 | file-embed 75 | filepath 76 | generic-arbitrary 77 | github 78 | hspec 79 | hspec-core 80 | http-client 81 | http-conduit 82 | http-types 83 | megaparsec 84 | memory 85 | monad-logger 86 | optparse-applicative 87 | process 88 | process-extras 89 | prometheus 90 | prometheus-metrics-ghc 91 | quickcheck-instances 92 | scotty 93 | stm 94 | text 95 | text-format 96 | time 97 | uuid 98 | vector 99 | wai 100 | wai-middleware-prometheus 101 | warp 102 | warp-tls 103 | ]; 104 | 105 | homepage = "https://github.com/channable/hoff"; 106 | 107 | license = lib.licenses.asl20; 108 | } 109 | -------------------------------------------------------------------------------- /locale.nix: -------------------------------------------------------------------------------- 1 | let 2 | pkgs = import ./nix/nixpkgs-pinned.nix { }; 3 | # Needed for the locale-archive. 4 | in pkgs.glibcLocales 5 | -------------------------------------------------------------------------------- /nix/haskell-overlay.nix: -------------------------------------------------------------------------------- 1 | { sources ? import ./sources.nix, pkgs }: 2 | self: super: { 3 | hoff = self.callPackage ../hoff.nix { }; 4 | 5 | github = 6 | pkgs.haskell.lib.compose.appendPatches 7 | [ 8 | # https://github.com/haskell-github/github/pull/509 9 | (pkgs.fetchpatch { 10 | name = "github.patch"; 11 | url = "https://github.com/haskell-github/github/commit/623105d3987c4bb4e67d48e5ae36a3af97480be9.patch"; 12 | sha256 = "sha256-3zRYnrxg9G+druD8o5iejCnTclxd2eg1V7BAO6USjzo="; 13 | }) 14 | ] 15 | super.github; 16 | } 17 | -------------------------------------------------------------------------------- /nix/nixpkgs-pinned.nix: -------------------------------------------------------------------------------- 1 | # Provide almost the same arguments as the actual nixpkgs. 2 | # This allows us to further configure this nixpkgs instantiation in places where we need it. 3 | { overlays ? [ ] # additional overlays 4 | , config ? { } # Imported configuration 5 | }: 6 | # Provides our instantiation of nixpkgs with all overlays and extra tooling 7 | # that we pull in from other repositories. 8 | # This expression is what all places where we need a concrete instantiation of nixpkgs should use. 9 | let 10 | sources = import ./sources.nix; 11 | 12 | nixpkgs = import sources.nixpkgs { 13 | overlays = [ (import ./overlay.nix { inherit sources; }) ] ++ overlays; 14 | config = { 15 | imports = [ config ]; 16 | 17 | allowUnfree = true; 18 | }; 19 | }; 20 | in nixpkgs 21 | -------------------------------------------------------------------------------- /nix/overlay.nix: -------------------------------------------------------------------------------- 1 | { sources ? import ./sources.nix }: 2 | self: super: 3 | let 4 | # Overrides all haskell packages to not automatically add cost centers. 5 | # Note: profilingDetail is ends up as the `--profiling-detail` cabal flag, 6 | # and as the `-fno-prof-auto` ghc flag. In nixpkgs this is set to 7 | # "exported-functions" by default 8 | disableAutoProfs = haskellSelf: haskellSuper: { 9 | mkDerivation = drv: 10 | haskellSuper.mkDerivation (drv // { profilingDetail = "none"; }); 11 | }; 12 | 13 | haskellOverlay = import ./haskell-overlay.nix { 14 | inherit sources; 15 | pkgs = self; 16 | }; 17 | in { 18 | sources = if super ? sources then super.sources // sources else sources; 19 | 20 | haskellPackages = (super.haskellPackages.extend haskellOverlay) 21 | # Uncomment the following line to disable automatic cost centers for 22 | # libraries. Use this for profiling. Note: this will trigger a large 23 | # recompile! 24 | # .extend disableAutoProfs 25 | ; 26 | } 27 | -------------------------------------------------------------------------------- /nix/sources.json: -------------------------------------------------------------------------------- 1 | { 2 | "niv": { 3 | "branch": "master", 4 | "description": "Easy dependency management for Nix projects", 5 | "homepage": "https://github.com/nmattia/niv", 6 | "owner": "nmattia", 7 | "repo": "niv", 8 | "rev": "e0ca65c81a2d7a4d82a189f1e23a48d59ad42070", 9 | "sha256": "1pq9nh1d8nn3xvbdny8fafzw87mj7gsmp6pxkdl65w2g18rmcmzx", 10 | "type": "tarball", 11 | "url": "https://github.com/nmattia/niv/archive/e0ca65c81a2d7a4d82a189f1e23a48d59ad42070.tar.gz", 12 | "url_template": "https://github.com///archive/.tar.gz" 13 | }, 14 | "nixpkgs": { 15 | "branch": "nixpkgs-unstable", 16 | "description": "Nix Packages collection", 17 | "homepage": "", 18 | "owner": "NixOS", 19 | "repo": "nixpkgs", 20 | "rev": "cf8cc1201be8bc71b7cbbbdaf349b22f4f99c7ae", 21 | "sha256": "1x7nca1ij9snsb2pqqzfawzgpc1b1d5js9m7b78lmql54ayixl68", 22 | "type": "tarball", 23 | "url": "https://github.com/NixOS/nixpkgs/archive/cf8cc1201be8bc71b7cbbbdaf349b22f4f99c7ae.tar.gz", 24 | "url_template": "https://github.com///archive/.tar.gz" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /nix/sources.nix: -------------------------------------------------------------------------------- 1 | # This file has been generated by Niv. 2 | 3 | let 4 | 5 | # 6 | # The fetchers. fetch_ fetches specs of type . 7 | # 8 | 9 | fetch_file = pkgs: name: spec: 10 | let 11 | name' = sanitizeName name + "-src"; 12 | in 13 | if spec.builtin or true then 14 | builtins_fetchurl { inherit (spec) url sha256; name = name'; } 15 | else 16 | pkgs.fetchurl { inherit (spec) url sha256; name = name'; }; 17 | 18 | fetch_tarball = pkgs: name: spec: 19 | let 20 | name' = sanitizeName name + "-src"; 21 | in 22 | if spec.builtin or true then 23 | builtins_fetchTarball { name = name'; inherit (spec) url sha256; } 24 | else 25 | pkgs.fetchzip { name = name'; inherit (spec) url sha256; }; 26 | 27 | fetch_git = name: spec: 28 | let 29 | ref = 30 | if spec ? ref then spec.ref else 31 | if spec ? branch then "refs/heads/${spec.branch}" else 32 | if spec ? tag then "refs/tags/${spec.tag}" else 33 | abort "In git source '${name}': Please specify `ref`, `tag` or `branch`!"; 34 | in 35 | builtins.fetchGit { url = spec.repo; inherit (spec) rev; inherit ref; }; 36 | 37 | fetch_local = spec: spec.path; 38 | 39 | fetch_builtin-tarball = name: throw 40 | ''[${name}] The niv type "builtin-tarball" is deprecated. You should instead use `builtin = true`. 41 | $ niv modify ${name} -a type=tarball -a builtin=true''; 42 | 43 | fetch_builtin-url = name: throw 44 | ''[${name}] The niv type "builtin-url" will soon be deprecated. You should instead use `builtin = true`. 45 | $ niv modify ${name} -a type=file -a builtin=true''; 46 | 47 | # 48 | # Various helpers 49 | # 50 | 51 | # https://github.com/NixOS/nixpkgs/pull/83241/files#diff-c6f540a4f3bfa4b0e8b6bafd4cd54e8bR695 52 | sanitizeName = name: 53 | ( 54 | concatMapStrings (s: if builtins.isList s then "-" else s) 55 | ( 56 | builtins.split "[^[:alnum:]+._?=-]+" 57 | ((x: builtins.elemAt (builtins.match "\\.*(.*)" x) 0) name) 58 | ) 59 | ); 60 | 61 | # The set of packages used when specs are fetched using non-builtins. 62 | mkPkgs = sources: system: 63 | let 64 | sourcesNixpkgs = 65 | import (builtins_fetchTarball { inherit (sources.nixpkgs) url sha256; }) { inherit system; }; 66 | hasNixpkgsPath = builtins.any (x: x.prefix == "nixpkgs") builtins.nixPath; 67 | hasThisAsNixpkgsPath = == ./.; 68 | in 69 | if builtins.hasAttr "nixpkgs" sources 70 | then sourcesNixpkgs 71 | else if hasNixpkgsPath && ! hasThisAsNixpkgsPath then 72 | import {} 73 | else 74 | abort 75 | '' 76 | Please specify either (through -I or NIX_PATH=nixpkgs=...) or 77 | add a package called "nixpkgs" to your sources.json. 78 | ''; 79 | 80 | # The actual fetching function. 81 | fetch = pkgs: name: spec: 82 | 83 | if ! builtins.hasAttr "type" spec then 84 | abort "ERROR: niv spec ${name} does not have a 'type' attribute" 85 | else if spec.type == "file" then fetch_file pkgs name spec 86 | else if spec.type == "tarball" then fetch_tarball pkgs name spec 87 | else if spec.type == "git" then fetch_git name spec 88 | else if spec.type == "local" then fetch_local spec 89 | else if spec.type == "builtin-tarball" then fetch_builtin-tarball name 90 | else if spec.type == "builtin-url" then fetch_builtin-url name 91 | else 92 | abort "ERROR: niv spec ${name} has unknown type ${builtins.toJSON spec.type}"; 93 | 94 | # If the environment variable NIV_OVERRIDE_${name} is set, then use 95 | # the path directly as opposed to the fetched source. 96 | replace = name: drv: 97 | let 98 | saneName = stringAsChars (c: if isNull (builtins.match "[a-zA-Z0-9]" c) then "_" else c) name; 99 | ersatz = builtins.getEnv "NIV_OVERRIDE_${saneName}"; 100 | in 101 | if ersatz == "" then drv else 102 | # this turns the string into an actual Nix path (for both absolute and 103 | # relative paths) 104 | if builtins.substring 0 1 ersatz == "/" then /. + ersatz else /. + builtins.getEnv "PWD" + "/${ersatz}"; 105 | 106 | # Ports of functions for older nix versions 107 | 108 | # a Nix version of mapAttrs if the built-in doesn't exist 109 | mapAttrs = builtins.mapAttrs or ( 110 | f: set: with builtins; 111 | listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (attrNames set)) 112 | ); 113 | 114 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/lists.nix#L295 115 | range = first: last: if first > last then [] else builtins.genList (n: first + n) (last - first + 1); 116 | 117 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L257 118 | stringToCharacters = s: map (p: builtins.substring p 1 s) (range 0 (builtins.stringLength s - 1)); 119 | 120 | # https://github.com/NixOS/nixpkgs/blob/0258808f5744ca980b9a1f24fe0b1e6f0fecee9c/lib/strings.nix#L269 121 | stringAsChars = f: s: concatStrings (map f (stringToCharacters s)); 122 | concatMapStrings = f: list: concatStrings (map f list); 123 | concatStrings = builtins.concatStringsSep ""; 124 | 125 | # https://github.com/NixOS/nixpkgs/blob/8a9f58a375c401b96da862d969f66429def1d118/lib/attrsets.nix#L331 126 | optionalAttrs = cond: as: if cond then as else {}; 127 | 128 | # fetchTarball version that is compatible between all the versions of Nix 129 | builtins_fetchTarball = { url, name ? null, sha256 }@attrs: 130 | let 131 | inherit (builtins) lessThan nixVersion fetchTarball; 132 | in 133 | if lessThan nixVersion "1.12" then 134 | fetchTarball ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; })) 135 | else 136 | fetchTarball attrs; 137 | 138 | # fetchurl version that is compatible between all the versions of Nix 139 | builtins_fetchurl = { url, name ? null, sha256 }@attrs: 140 | let 141 | inherit (builtins) lessThan nixVersion fetchurl; 142 | in 143 | if lessThan nixVersion "1.12" then 144 | fetchurl ({ inherit url; } // (optionalAttrs (!isNull name) { inherit name; })) 145 | else 146 | fetchurl attrs; 147 | 148 | # Create the final "sources" from the config 149 | mkSources = config: 150 | mapAttrs ( 151 | name: spec: 152 | if builtins.hasAttr "outPath" spec 153 | then abort 154 | "The values in sources.json should not have an 'outPath' attribute" 155 | else 156 | spec // { outPath = replace name (fetch config.pkgs name spec); } 157 | ) config.sources; 158 | 159 | # The "config" used by the fetchers 160 | mkConfig = 161 | { sourcesFile ? if builtins.pathExists ./sources.json then ./sources.json else null 162 | , sources ? if isNull sourcesFile then {} else builtins.fromJSON (builtins.readFile sourcesFile) 163 | , system ? builtins.currentSystem 164 | , pkgs ? mkPkgs sources system 165 | }: rec { 166 | # The sources, i.e. the attribute set of spec name to spec 167 | inherit sources; 168 | 169 | # The "pkgs" (evaluated nixpkgs) to use for e.g. non-builtin fetchers 170 | inherit pkgs; 171 | }; 172 | 173 | in 174 | mkSources (mkConfig {}) // { __functor = _: settings: mkSources (mkConfig settings); } 175 | -------------------------------------------------------------------------------- /package/build-and-ship.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Build Hoff and ship it to Freight 4 | 5 | set -efuo pipefail 6 | 7 | # Get the version from the git history. Strip the first `v` 8 | # character as dpkg really wants versions to start with a digit. 9 | VERSION="$(git describe | cut -c2-)" 10 | export VERSION 11 | 12 | # Change to the directory of the current script so that we can 13 | # execute `build-package.sh` from the right location. VERSION 14 | # has already been set by the logic above. 15 | cd "$(dirname "$0")" 16 | ./build-package.sh 17 | 18 | PKGFILE="hoff_$VERSION-1.deb" 19 | FREIGHT_HOST="archive-petrol" 20 | 21 | gcloud compute scp --tunnel-through-iap "$PKGFILE" "$FREIGHT_HOST:/tmp/$PKGFILE" 22 | 23 | # Shellcheck false positive. We want the client side versions 24 | # of these variables, not the server side versions. 25 | # shellcheck disable=SC2087 26 | gcloud compute ssh --tunnel-through-iap "$FREIGHT_HOST" -- -T < "$PKGNAME/DEBIAN/control" 48 | envsubst < deb-postinst > "$PKGNAME/DEBIAN/postinst" 49 | cp deb-conffiles "$PKGNAME/DEBIAN/conffiles" 50 | chmod +x "$PKGNAME/DEBIAN/postinst" 51 | 52 | dpkg-deb --root-owner-group --build "$PKGNAME" 53 | 54 | # Finally clean up the package directory. 55 | rm -fr "$PKGNAME" 56 | -------------------------------------------------------------------------------- /package/check-version.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Checks if the version number is consistent between hoff.cabal and hoff.nix 4 | cabal_version=$( 5 | grep "^version:" 7 | Description: A gatekeeper for your commits 8 | Depends: libc6 (>= 2.23), 9 | libgcc1 (>= 1:6.0.1), 10 | libgmp10 (>= 2:6.1.0), 11 | libstdc++6 (>= 5.4.0), 12 | git (>= 1:2.7.4), 13 | zlib1g (>= 1:1.2.8) 14 | -------------------------------------------------------------------------------- /package/deb-postinst: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Fail early if any of the commands below fail. 4 | set -eo pipefail 5 | 6 | case "$1" in 7 | configure) 8 | 9 | # Create a 'hoff' user if it does not exist with the same parameters as 10 | # below. The user gets a corresponding group named 'hoff' too. 11 | adduser --system --group --no-create-home hoff 12 | 13 | # Create state directories owned by the 'hoff' user. 14 | mkdir --parents /var/lib/hoff/checkouts 15 | chown hoff:hoff /var/lib/hoff/checkouts 16 | 17 | mkdir --parents /var/lib/hoff/state 18 | chown hoff:hoff /var/lib/hoff/state 19 | 20 | # If the config file has not been modified (when the checksum matches that 21 | # of the example), tell the user to do so. The sha256 of the example config 22 | # file is spliced in by build-package.sh. 23 | if sha256sum /etc/hoff/config.json | grep -q "^${EXAMPLE_CONFIG_SHA256}"; then 24 | echo "You should now edit /etc/hoff/config.json." 25 | fi 26 | 27 | ;; 28 | 29 | esac 30 | 31 | exit 0 32 | -------------------------------------------------------------------------------- /package/example-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "projects": [{ 3 | "owner": "your-github-username-or-organization", 4 | "repository": "your-repo", 5 | "branch": "master", 6 | "testBranch": "testing", 7 | "checkout": "/var/lib/hoff/checkouts/your-username/your-repo", 8 | "stateFile": "/var/lib/hoff/state/your-username/your-repo.json", 9 | "checks": { 10 | "mandatory": [] 11 | }, 12 | "deployEnvironments": [ 13 | "staging", 14 | "production" 15 | ], 16 | "deploySubprojects": [ 17 | "foo", 18 | "bar" 19 | ] 20 | }, 21 | { 22 | "owner": "your-github-username-or-organization", 23 | "repository": "your-repo", 24 | "branch": "master", 25 | "testBranch": "testing", 26 | "checkout": "/var/lib/hoff/checkouts/your-username/your-repo", 27 | "stateFile": "/var/lib/hoff/state/your-username/your-repo.json", 28 | "checks": { 29 | "mandatory": [] 30 | }, 31 | "deployEnvironments": [ 32 | "staging", 33 | "production" 34 | ], 35 | "deploySubprojects": [ 36 | "foo", 37 | "bar" 38 | ], 39 | "safeForFriday": true 40 | }], 41 | "secret": "run 'head --bytes 32 /dev/urandom | base64' and paste output here", 42 | "accessToken": "paste a personal access token for a bot user here", 43 | "port": 1979, 44 | "tls": null, 45 | "user": { 46 | "name": "CI Bot", 47 | "email": "cibot@example.com", 48 | "sshConfigFile": "/etc/hoff/ssh_config" 49 | }, 50 | "mergeWindowExemption": ["hoffbot"], 51 | "trigger": { 52 | "commentPrefix": "@hoffbot" 53 | }, 54 | "featureFreezeWindow": { 55 | "start": "2023-01-01T00:00:00Z", 56 | "end": "2023-01-07T00:00:00Z" 57 | }, 58 | "timeouts": { 59 | "promotionTimeout": 60, 60 | "rememberTimeout": 600 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /package/github-known-hosts: -------------------------------------------------------------------------------- 1 | github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl 2 | github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg= 3 | github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk= 4 | -------------------------------------------------------------------------------- /package/hoff.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=commit gatekeeper daemon 3 | After=network.target 4 | Requires=network.target 5 | AssertPathExists=/etc/hoff/config.json 6 | 7 | # Generic crash reporting. This makes systemd start the on-failure@hoff.service 8 | # unit when the process exits or fails to start. Comment this out if you do not 9 | # have a systemd unit with this name. See `package/on-failure@.service.example` 10 | # for an idea of what you can do with this. 11 | OnFailure=on-failure@%n 12 | 13 | [Service] 14 | User=hoff 15 | Group=hoff 16 | 17 | # If you want to run Hoff without reverse proxy, you can enable the line below 18 | # to allow binding to priviliged ports (e.g. 80 and 443) as non-root user. 19 | # AmbientCapabilities=CAP_NET_BIND_SERVICE 20 | 21 | # Do not allow processes to modify things that they should not be modifying. 22 | PrivateTmp=true 23 | ProtectKernelTunables=true 24 | ProtectKernelModules=true 25 | MemoryDenyWriteExecute=true 26 | 27 | # Mount /usr, /boot, and /etc as read-only for the process and subprocesses. 28 | # This is contrast to =true, which would not protect /etc, or =strict, which 29 | # mounts more read-only, including /var which we need to be writable. 30 | ProtectSystem=full 31 | 32 | ExecStart=/usr/bin/hoff /etc/hoff/config.json 33 | Restart=on-failure 34 | RestartSec=10 35 | 36 | # The package provides a known-hosts file with GitHub's fingerprint. When 37 | # written to /etc/ssh/ssh_known_hosts, SSH will use it. But we don't want to 38 | # install it system-wide. So we install it in /etc/hoff, and bind-mount it into 39 | # place for this unit only. 40 | BindReadOnlyPaths=/etc/hoff/github-known-hosts:/etc/ssh/ssh_known_hosts 41 | 42 | [Install] 43 | WantedBy=multi-user.target 44 | -------------------------------------------------------------------------------- /package/on-failure@.service.example: -------------------------------------------------------------------------------- 1 | # This is an example onfailure handler. See hoff.service for how 2 | # you can use this to wire up failure notifications when services 3 | # crash. 4 | 5 | [Unit] 6 | Description=Failure report for %i 7 | 8 | [Service] 9 | Type=oneshot 10 | 11 | # This can be a program which fetches the last loglines from the 12 | # journal and report this crash to a location you care about (e.g. 13 | # send chat messages, emails, or page people). It might have some 14 | # logic for inhibition, or be very stupid. 15 | # 16 | # Install in a file like `/etc/systemd/system/on-failure@.service`. 17 | # If you then run the command `systemctl start on-failure@foo`, 18 | # systemd will invoke `/usr/bin/yourfailurehandler foo`. `%i` 19 | # will be expanded to whatever you put after the @. 20 | ExecStart=/usr/bin/yourfailurehandler %i 21 | -------------------------------------------------------------------------------- /package/os-release: -------------------------------------------------------------------------------- 1 | # This file exists because systemd demands an /usr/lib/os-release 2 | # file to be able to attach images that contain portable services. 3 | # "portablectl inspect" displays the value of PORTABLE_PRETTY_NAME. 4 | PORTABLE_PRETTY_NAME=Hoff 5 | HOME_URL=https://github.com/ruuda/hoff 6 | -------------------------------------------------------------------------------- /release.nix: -------------------------------------------------------------------------------- 1 | let pkgs = import ./nix/nixpkgs-pinned.nix { }; in pkgs.haskellPackages.hoff 2 | -------------------------------------------------------------------------------- /src/ClockTickLoop.hs: -------------------------------------------------------------------------------- 1 | module ClockTickLoop 2 | ( clockTickLoop 3 | ) where 4 | 5 | import Control.Concurrent (threadDelay) 6 | import Control.Monad (void) 7 | import Control.Monad.IO.Class (liftIO, MonadIO) 8 | import Data.Time (getCurrentTime) 9 | import Data.Time.Clock (DiffTime, diffTimeToPicoseconds) 10 | 11 | import Configuration (ClockTickInterval (..)) 12 | import Logic (EventQueue, enqueueEvent, Event (ClockTick)) 13 | 14 | foreverWithDelay :: MonadIO m => DiffTime -> m a -> m b 15 | foreverWithDelay delay action = go 16 | where 17 | go = do 18 | void $ action 19 | liftIO $ threadDelay $ fromInteger $ diffTimeToMicroseconds delay 20 | go 21 | 22 | -- | Creates a loop that every interval sends a clock tick event 23 | -- to check for timed out PRs that are waiting to be promoted. 24 | clockTickLoop 25 | :: MonadIO m 26 | => ClockTickInterval 27 | -> [EventQueue] 28 | -> m a 29 | clockTickLoop (ClockTickInterval tickInterval) queues = do 30 | foreverWithDelay tickInterval $ do 31 | currentTime <- liftIO getCurrentTime 32 | liftIO $ mapM_ (`enqueueEvent` ClockTick currentTime) queues 33 | 34 | -- A picosecond is 1e-12 seconds, a microsecond is 1e-6 seconds. 35 | diffTimeToMicroseconds :: DiffTime -> Integer 36 | diffTimeToMicroseconds t = diffTimeToPicoseconds t `div` (1000 * 1000) 37 | -------------------------------------------------------------------------------- /src/Configuration.hs: -------------------------------------------------------------------------------- 1 | -- Hoff -- A gatekeeper for your commits 2 | -- Copyright 2016 Ruud van Asseldonk 3 | -- 4 | -- Licensed under the Apache License, Version 2.0 (the "License"); 5 | -- you may not use this file except in compliance with the License. 6 | -- A copy of the License has been included in the root of the repository. 7 | 8 | {-# LANGUAGE DeriveGeneric #-} 9 | 10 | module Configuration 11 | ( 12 | Configuration (..), 13 | ProjectConfiguration (..), 14 | knownEnvironments, 15 | knownSubprojects, 16 | ChecksConfiguration (..), 17 | TlsConfiguration (..), 18 | TriggerConfiguration (..), 19 | UserConfiguration (..), 20 | MergeWindowExemptionConfiguration (..), 21 | MetricsConfiguration (..), 22 | FeatureFreezeWindow (..), 23 | Timeouts (..), 24 | ClockTickInterval (..), 25 | loadConfiguration 26 | ) 27 | where 28 | 29 | import Data.Aeson (FromJSON, eitherDecodeStrict') 30 | import Data.ByteString (readFile) 31 | import Data.Maybe (fromMaybe) 32 | import Data.Set (Set) 33 | import Data.Text (Text) 34 | import Data.Time (DiffTime, UTCTime) 35 | import GHC.Generics 36 | import Prelude hiding (readFile) 37 | import qualified Network.Wai.Handler.Warp as Warp 38 | 39 | data ProjectConfiguration = ProjectConfiguration 40 | { 41 | owner :: Text, -- The GitHub user or organization who owns the repo. 42 | repository :: Text, -- The name of the repository. 43 | branch :: Text, -- The branch to guard and integrate commits into. 44 | testBranch :: Text, -- The branch to force-push candidates to for testing. 45 | checkout :: FilePath, -- The path to a local checkout of the repository. 46 | stateFile :: FilePath, -- The file where project state is stored. 47 | checks :: Maybe ChecksConfiguration, -- Optional configuration related to checks for the project. 48 | deployEnvironments :: Maybe [Text], -- The environments which the `deploy to ` command should be enabled for 49 | deploySubprojects :: Maybe [Text], -- The subprojects which the `deploy` command should be enabled for 50 | safeForFriday :: Maybe Bool -- Whether it's safe to deploy this project on Friday without an "on Friday" check. default False 51 | } 52 | deriving (Generic) 53 | 54 | knownEnvironments :: ProjectConfiguration -> [Text] 55 | knownEnvironments = fromMaybe [] . deployEnvironments 56 | 57 | knownSubprojects :: ProjectConfiguration -> [Text] 58 | knownSubprojects = fromMaybe [] . deploySubprojects 59 | 60 | data FeatureFreezeWindow = FeatureFreezeWindow 61 | { 62 | start :: UTCTime, 63 | end :: UTCTime 64 | } 65 | deriving (Generic) 66 | 67 | data TriggerConfiguration = TriggerConfiguration 68 | { 69 | -- When a comment with this prefix is left on a PR, that triggers the 70 | -- remainder of the comment to be interpreted as a directive at the bot. 71 | -- Usually this would be the Github username of the bot (including @ but not 72 | -- a space), e.g. "@hoffbot", and the comment "@hoffbot merge" would trigger 73 | -- a merge. The prefix is case-insensitive. 74 | commentPrefix :: Text 75 | } 76 | deriving (Generic) 77 | 78 | data UserConfiguration = UserConfiguration 79 | { 80 | name :: Text, -- Name used for Git committer. 81 | email :: Text, -- Email address used for Git committer. 82 | sshConfigFile :: FilePath -- The path to ~/.ssh/config. 83 | } 84 | deriving (Generic) 85 | 86 | newtype ChecksConfiguration = ChecksConfiguration 87 | { 88 | -- Multiple checks can succeed on a PR, the normal behaviour is to merge 89 | -- once the first one succeeds. Instead, we make it possible to configure a 90 | -- list of mandatory checks that should have succesfully finished before we 91 | -- allow the merge. Each entry is a prefix of the context that sent the 92 | -- status update. 93 | mandatory :: Set Text 94 | } 95 | deriving (Generic) 96 | 97 | data TlsConfiguration = TlsConfiguration 98 | { 99 | certFile :: FilePath, 100 | keyFile :: FilePath 101 | } 102 | deriving (Generic, Show) 103 | 104 | data MetricsConfiguration = MetricsConfiguration 105 | { 106 | metricsPort :: Warp.Port, 107 | metricsHost :: Text 108 | } 109 | deriving (Generic, Show) 110 | 111 | newtype MergeWindowExemptionConfiguration = MergeWindowExemptionConfiguration [Text] 112 | deriving (Generic, Show) 113 | 114 | data Timeouts = Timeouts 115 | { 116 | promotionTimeout :: DiffTime, 117 | rememberTimeout :: DiffTime 118 | } 119 | deriving (Generic, Show) 120 | 121 | newtype ClockTickInterval = ClockTickInterval DiffTime 122 | deriving (Generic, Show) 123 | 124 | data Configuration = Configuration 125 | { 126 | -- The projects to manage. 127 | projects :: [ProjectConfiguration], 128 | 129 | -- The secret for GitHub webhook hmac signatures. Note that for webhooks 130 | -- only it would be better if these were per project, but the GitHub 131 | -- "integrations" only get one webhook per integration, so in that case 132 | -- there can be only one secret. (Note that it would be much better if 133 | -- GitHub were to sign their requests with a public/private key pair, but 134 | -- alas, that is not the case.) 135 | secret :: Text, 136 | 137 | -- The access token for the Github API, for leaving comments. 138 | accessToken :: Text, 139 | 140 | -- Triggers that the bot may respond to. 141 | trigger :: TriggerConfiguration, 142 | 143 | -- The port to run the webserver on. 144 | port :: Int, 145 | 146 | -- Optional config for enabling https. 147 | tls :: Maybe TlsConfiguration, 148 | 149 | -- Configuration of the Git user. 150 | user :: UserConfiguration, 151 | 152 | -- List of users that are exempted from the merge window. This is useful for 153 | -- bots that automatically merge low impact changes. 154 | mergeWindowExemption :: MergeWindowExemptionConfiguration, 155 | 156 | -- Configuration for the Prometheus metrics server. 157 | metricsConfig :: Maybe MetricsConfiguration, 158 | 159 | -- Feature freeze period in which only 'merge hotfix' commands are allowed 160 | featureFreezeWindow :: Maybe FeatureFreezeWindow, 161 | 162 | -- The timeouts for promoting an integrated pull request and remembering promoted pull requests 163 | timeouts :: Timeouts, 164 | 165 | -- The interval to send clock tick events 166 | clockTickInterval :: Maybe ClockTickInterval 167 | } 168 | deriving (Generic) 169 | 170 | instance FromJSON Configuration 171 | instance FromJSON ChecksConfiguration 172 | instance FromJSON ProjectConfiguration 173 | instance FromJSON TlsConfiguration 174 | instance FromJSON TriggerConfiguration 175 | instance FromJSON UserConfiguration 176 | instance FromJSON MergeWindowExemptionConfiguration 177 | instance FromJSON MetricsConfiguration 178 | instance FromJSON FeatureFreezeWindow 179 | instance FromJSON Timeouts 180 | instance FromJSON ClockTickInterval 181 | 182 | -- Reads and parses the configuration. Returns Nothing if parsing failed, but 183 | -- crashes if the file could not be read. 184 | loadConfiguration :: FilePath -> IO (Either String Configuration) 185 | loadConfiguration = fmap eitherDecodeStrict' . readFile 186 | -------------------------------------------------------------------------------- /src/EventLoop.hs: -------------------------------------------------------------------------------- 1 | -- Hoff -- A gatekeeper for your commits 2 | -- Copyright 2016 Ruud van Asseldonk 3 | -- 4 | -- Licensed under the Apache License, Version 2.0 (the "License"); 5 | -- you may not use this file except in compliance with the License. 6 | -- A copy of the License has been included in the root of the repository. 7 | 8 | {-# LANGUAGE DataKinds #-} 9 | {-# LANGUAGE GHC2021 #-} 10 | {-# LANGUAGE OverloadedRecordDot #-} 11 | {-# LANGUAGE OverloadedStrings #-} 12 | {-# LANGUAGE TypeFamilies #-} 13 | 14 | module EventLoop 15 | ( 16 | convertGithubEvent, -- An internal helper function, but exposed for testing. 17 | runGithubEventLoop, 18 | runLogicEventLoop 19 | ) 20 | where 21 | 22 | import Control.Concurrent.STM.TBQueue 23 | import Control.Monad (when) 24 | import Control.Monad.IO.Class (MonadIO, liftIO) 25 | import Control.Monad.Logger (MonadLogger, logDebugN, logInfoN) 26 | import Control.Monad.STM (atomically) 27 | import Data.Foldable (traverse_) 28 | import Data.Text (Text) 29 | import Effectful (Eff, (:>), IOE) 30 | import qualified Data.Text as Text 31 | 32 | import Configuration (ProjectConfiguration, TriggerConfiguration, MergeWindowExemptionConfiguration, FeatureFreezeWindow, Timeouts) 33 | import Github (PullRequestPayload, CommentPayload, CommitStatusPayload, PushPayload, WebhookEvent (..)) 34 | import Github (eventProjectInfo) 35 | import MonadLoggerEffect (MonadLoggerEffect) 36 | import Project (ProjectInfo (..), ProjectState, PullRequestId (..)) 37 | import Time ( TimeOperation ) 38 | 39 | import qualified Configuration as Config 40 | import qualified Git 41 | import qualified Github 42 | import qualified GithubApi 43 | import qualified Logic 44 | import qualified Project 45 | import qualified Metrics.Metrics as Metrics 46 | 47 | eventFromPullRequestPayload :: PullRequestPayload -> Logic.Event 48 | eventFromPullRequestPayload payload = 49 | let 50 | number = PullRequestId payload.number 51 | title = payload.title 52 | author = payload.author 53 | body = payload.body 54 | branch = payload.branch 55 | sha = payload.sha 56 | baseBranch = Github.baseBranch (payload :: PullRequestPayload) 57 | in 58 | case payload.action of 59 | Github.Opened -> Logic.PullRequestOpened number branch baseBranch sha title author body 60 | -- If the pull request is reopened, we do not want to parse the command from 61 | -- the body, because it means that a person manually intervened with the pull 62 | -- request. This is why we do not pass the body to the event, so we don't 63 | -- accidentally repeatedly process the same command. 64 | Github.Reopened -> Logic.PullRequestOpened number branch baseBranch sha title author Nothing 65 | Github.Closed -> Logic.PullRequestClosed number 66 | Github.Synchronize -> Logic.PullRequestCommitChanged number sha 67 | Github.Edited -> Logic.PullRequestEdited number title baseBranch 68 | 69 | eventFromCommentPayload :: CommentPayload -> Maybe Logic.Event 70 | eventFromCommentPayload payload = 71 | let number = PullRequestId payload.number 72 | author = payload.author -- TODO: Wrapper type 73 | body = payload.body 74 | commentAdded = Logic.CommentAdded number author payload.id body 75 | in case payload.action of 76 | Left Github.CommentCreated -> Just commentAdded 77 | Right Github.ReviewSubmitted -> Just commentAdded 78 | -- Do not bother with edited and deleted comments, as it would tremendously 79 | -- complicate handling of approval. Once approved, this cannot be undone. 80 | -- And if approval undo is desired, it would be better implemented as a 81 | -- separate magic comment, rather than editing the approval comment. 82 | _ -> Nothing 83 | 84 | mapCommitStatus :: Github.CommitStatus -> Maybe Text.Text -> Project.BuildStatus 85 | mapCommitStatus status murl = case status of 86 | Github.Pending -> case murl of 87 | Nothing -> Project.BuildPending 88 | Just url -> Project.BuildStarted url 89 | Github.Success -> Project.BuildSucceeded 90 | Github.Failure -> Project.BuildFailed murl 91 | Github.Error -> Project.BuildFailed murl 92 | 93 | eventFromCommitStatusPayload :: CommitStatusPayload -> Logic.Event 94 | eventFromCommitStatusPayload payload = 95 | let sha = payload.sha 96 | status = payload.status 97 | url = payload.url 98 | context = payload.context 99 | in Logic.BuildStatusChanged sha context (mapCommitStatus status url) 100 | 101 | eventFromPushPayload :: PushPayload -> Logic.Event 102 | eventFromPushPayload payload = Logic.PushPerformed payload.branch payload.sha 103 | 104 | convertGithubEvent :: Github.WebhookEvent -> Maybe Logic.Event 105 | convertGithubEvent event = case event of 106 | Ping -> Nothing -- TODO: What to do with this one? 107 | PullRequest payload -> Just $ eventFromPullRequestPayload payload 108 | CommitStatus payload -> Just $ eventFromCommitStatusPayload payload 109 | Comment payload -> eventFromCommentPayload payload 110 | Push payload -> Just $ eventFromPushPayload payload 111 | 112 | -- The event loop that converts GitHub webhook events into logic events. 113 | runGithubEventLoop 114 | :: (MonadIO m, MonadLogger m) 115 | => Github.EventQueue 116 | -> (ProjectInfo -> Logic.Event -> IO ()) -> m () 117 | runGithubEventLoop ghQueue enqueueEvent = runLoop 118 | where 119 | shouldHandle ghEvent = (ghEvent /= Ping) 120 | runLoop = do 121 | ghEvent <- liftIO $ atomically $ readTBQueue ghQueue 122 | let projectInfo = eventProjectInfo ghEvent 123 | logDebugN $ "github loop received event: " <> showText ghEvent 124 | when (shouldHandle ghEvent) $ 125 | -- If conversion yielded an event, enqueue it. Block if the queue is full. 126 | traverse_ (liftIO . enqueueEvent projectInfo) (convertGithubEvent ghEvent) 127 | runLoop 128 | 129 | runLogicEventLoop 130 | :: forall es 131 | . IOE :> es 132 | => Metrics.MetricsOperation :> es 133 | => Time.TimeOperation :> es 134 | => Git.GitOperation :> es 135 | => GithubApi.GithubOperation :> es 136 | => MonadLoggerEffect :> es 137 | => TriggerConfiguration 138 | -> ProjectConfiguration 139 | -> MergeWindowExemptionConfiguration 140 | -> Maybe FeatureFreezeWindow 141 | -> Timeouts 142 | -- Action that gets the next event from the queue. 143 | -> IO (Maybe Logic.Event) 144 | -- Action to perform after the state has changed, such as 145 | -- persisting the new state, and making it available to the 146 | -- webinterface. 147 | -> (ProjectState -> IO ()) 148 | -> ProjectState 149 | -> Eff es ProjectState 150 | runLogicEventLoop 151 | triggerConfig projectConfig mergeWindowExemptionConfig featureFreezeWindow timeouts 152 | getNextEvent publish initialState = 153 | let 154 | repo = Config.repository projectConfig 155 | handleAndContinue state0 event = do 156 | -- Handle the event and then perform any additional required actions until 157 | -- the state reaches a fixed point (when there are no further actions to 158 | -- perform). 159 | case event of 160 | -- Do not log clock ticks since they happen a lot and are not very interesting. 161 | Logic.ClockTick _ -> pure () 162 | _ -> logInfoN $ "logic loop received event (" <> repo <> "): " <> showText event 163 | state1 <- 164 | Logic.handleEvent triggerConfig mergeWindowExemptionConfig featureFreezeWindow timeouts event state0 165 | liftIO $ publish state1 166 | runLoop state1 167 | 168 | runLoop state = do 169 | -- Before anything, clone the repository if there is no clone. 170 | Logic.ensureCloned projectConfig 171 | -- Take one event off the queue, block if there is none. 172 | eventOrStopSignal <- liftIO getNextEvent 173 | -- Queue items are of type 'Maybe Event'; 'Nothing' signals loop 174 | -- termination. If there was an event, run one iteration and recurse. 175 | case eventOrStopSignal of 176 | Just event -> handleAndContinue state event 177 | Nothing -> return state 178 | 179 | in do 180 | Logic.runAction projectConfig $ 181 | Logic.runRetrieveEnvironment projectConfig $ 182 | runLoop initialState 183 | 184 | showText :: Show a => a -> Text 185 | showText = Text.pack . show 186 | -------------------------------------------------------------------------------- /src/Format.hs: -------------------------------------------------------------------------------- 1 | -- Hoff -- A gatekeeper for your commits 2 | -- Copyright 2020 The Hoff authors 3 | -- 4 | -- Licensed under the Apache License, Version 2.0 (the "License"); 5 | -- you may not use this file except in compliance with the License. 6 | -- A copy of the License has been included in the root of the repository. 7 | 8 | module Format 9 | ( 10 | format, 11 | Text.Only (..) 12 | ) 13 | where 14 | 15 | import Data.Text (Text) 16 | import Data.Text.Format.Params (Params) 17 | import Data.Text.Lazy (toStrict) 18 | 19 | import qualified Data.Text.Format as Text 20 | 21 | -- Like `Text.format`, but returning a strict `Text` instead of a lazy one. 22 | format :: Params ps => Text.Format -> ps -> Text 23 | format formatString params = toStrict $ Text.format formatString params 24 | -------------------------------------------------------------------------------- /src/Github.hs: -------------------------------------------------------------------------------- 1 | -- Hoff -- A gatekeeper for your commits 2 | -- Copyright 2016 Ruud van Asseldonk 3 | -- 4 | -- Licensed under the Apache License, Version 2.0 (the "License"); 5 | -- you may not use this file except in compliance with the License. 6 | -- A copy of the License has been included in the root of the repository. 7 | 8 | {-# LANGUAGE DuplicateRecordFields #-} 9 | {-# LANGUAGE OverloadedRecordDot #-} 10 | {-# LANGUAGE OverloadedStrings #-} 11 | 12 | module Github 13 | ( 14 | CommentAction (..), 15 | CommentPayload (..), 16 | CommitStatus (..), 17 | CommitStatusPayload (..), 18 | EventQueue, 19 | PullRequestAction (..), 20 | PullRequestPayload (..), 21 | PushPayload (..), 22 | ReviewAction (..), 23 | WebhookEvent (..), 24 | eventProjectInfo, 25 | newEventQueue, 26 | tryEnqueueEvent 27 | ) 28 | where 29 | 30 | import Control.Applicative ((<|>), optional) 31 | import Control.Concurrent.STM.TBQueue (TBQueue, isFullTBQueue, newTBQueue, writeTBQueue) 32 | import Control.Monad.STM (atomically) 33 | import Data.Aeson (FromJSON (parseJSON), Object, Value (Object, String), (.:)) 34 | import Data.Aeson.Types (Parser, Key, typeMismatch) 35 | import Data.Text (Text) 36 | import GHC.Natural (Natural) 37 | 38 | import Git (Sha (..), Branch (..), BaseBranch (..), Context) 39 | import Project (ProjectInfo (..)) 40 | import Types (Body, Username, CommentId (..)) 41 | import Data.Maybe (fromMaybe) 42 | 43 | data PullRequestAction 44 | = Opened 45 | | Closed 46 | | Reopened 47 | | Synchronize 48 | | Edited 49 | deriving (Eq, Show) 50 | 51 | data CommentAction 52 | = CommentCreated 53 | | CommentEdited 54 | | CommentDeleted 55 | deriving (Eq, Show) 56 | 57 | data ReviewAction 58 | = ReviewSubmitted 59 | | ReviewEdited 60 | | ReviewDismissed 61 | deriving (Eq, Show) 62 | 63 | data CommitStatus 64 | = Pending 65 | | Success 66 | | Failure 67 | | Error 68 | deriving (Eq, Show) 69 | 70 | data PullRequestPayload = PullRequestPayload { 71 | action :: PullRequestAction, -- Corresponds to "action". 72 | owner :: Text, -- Corresponds to "pull_request.base.repo.owner.login". 73 | repository :: Text, -- Corresponds to "pull_request.base.repo.name". 74 | baseBranch :: BaseBranch, -- Corresponds to "pull_request.base.ref" 75 | number :: Int, -- Corresponds to "pull_request.number". 76 | branch :: Branch, -- Corresponds to "pull_request.head.ref". 77 | sha :: Sha, -- Corresponds to "pull_request.head.sha". 78 | title :: Text, -- Corresponds to "pull_request.title". 79 | author :: Username, -- Corresponds to "pull_request.user.login". 80 | body :: Maybe Body -- Corresponds to "pull_request.body" 81 | } deriving (Eq, Show) 82 | 83 | data CommentPayload = CommentPayload { 84 | action :: Either CommentAction ReviewAction, -- Corresponds to "action". 85 | owner :: Text, -- Corresponds to "repository.owner.login". 86 | repository :: Text, -- Corresponds to "repository.name". 87 | number :: Int, -- Corresponds to "issue.number" or "pull_request.number". 88 | author :: Username, -- Corresponds to "sender.login". 89 | id :: Maybe CommentId, -- Corresponds to "comment.id". 90 | -- Can be absent if we actually received a review, 91 | -- because those have separate IDs from ordinary issue 92 | -- comments. 93 | body :: Text -- Corresponds to "comment.body" or "review.body". 94 | } deriving (Eq, Show) 95 | 96 | data CommitStatusPayload = CommitStatusPayload { 97 | owner :: Text, -- Corresponds to "repository.owner.login". 98 | repository :: Text, -- Corresponds to "repository.name". 99 | status :: CommitStatus, -- Corresponds to "action". 100 | context :: Context, -- Corresponds to "context". 101 | url :: Maybe Text, -- Corresponds to "target_url". 102 | sha :: Sha -- Corresponds to "sha". 103 | } deriving (Eq, Show) 104 | 105 | data PushPayload = PushPayload { 106 | owner :: Text, -- Corresponds to "repository.owner.login". 107 | repository :: Text, -- Corresponds to "repository.name". 108 | branch :: BaseBranch, -- Corresponds to "ref" 109 | sha :: Sha -- Cooresponds to "after" 110 | } deriving (Eq, Show) 111 | 112 | instance FromJSON PullRequestAction where 113 | parseJSON (String "opened") = return Opened 114 | parseJSON (String "closed") = return Closed 115 | parseJSON (String "reopened") = return Opened 116 | parseJSON (String "synchronize") = return Synchronize 117 | parseJSON (String "edited") = return Edited 118 | parseJSON _ = fail "unexpected pull_request action" 119 | 120 | instance FromJSON CommentAction where 121 | parseJSON (String "created") = return CommentCreated 122 | parseJSON (String "edited") = return CommentEdited 123 | parseJSON (String "deleted") = return CommentDeleted 124 | parseJSON _ = fail "unexpected issue_comment action" 125 | 126 | instance FromJSON ReviewAction where 127 | parseJSON (String "submitted") = return ReviewSubmitted 128 | parseJSON (String "edited") = return ReviewEdited 129 | parseJSON (String "dismissed") = return ReviewDismissed 130 | parseJSON _ = fail "unexpected pull_request_review action" 131 | 132 | instance FromJSON CommitStatus where 133 | parseJSON (String "pending") = return Pending 134 | parseJSON (String "success") = return Success 135 | parseJSON (String "failure") = return Failure 136 | parseJSON (String "error") = return Error 137 | parseJSON _ = fail "unexpected status state" 138 | 139 | -- A helper function to parse nested fields in json. 140 | getNested :: FromJSON a => Object -> [Key] -> Parser a 141 | getNested rootObject fields = 142 | -- Build object parsers for every field except the last one. The last field is 143 | -- different, as it needs a parser of type "a", not "Object". 144 | let parsers :: [Object -> Parser Object] 145 | parsers = fmap (\ field -> (.: field)) (init fields) 146 | object = foldl (>>=) (return rootObject) parsers 147 | in object >>= (.: (last fields)) 148 | 149 | instance FromJSON PullRequestPayload where 150 | parseJSON (Object v) = PullRequestPayload 151 | <$> (v .: "action") 152 | <*> getNested v ["pull_request", "base", "repo", "owner", "login"] 153 | <*> getNested v ["pull_request", "base", "repo", "name"] 154 | <*> getNested v ["pull_request", "base", "ref"] 155 | <*> getNested v ["pull_request", "number"] 156 | <*> getNested v ["pull_request", "head", "ref"] 157 | <*> getNested v ["pull_request", "head", "sha"] 158 | <*> getNested v ["pull_request", "title"] 159 | <*> getNested v ["pull_request", "user", "login"] 160 | <*> getNested v ["pull_request", "body"] 161 | parseJSON nonObject = typeMismatch "pull_request payload" nonObject 162 | 163 | instance FromJSON CommentPayload where 164 | parseJSON (Object v) = do 165 | isReview <- optional (v .: "review" :: Parser Value) 166 | parsedAction <- case isReview of 167 | Nothing -> Left <$> v .: "action" 168 | Just _ -> Right <$> v .: "action" 169 | CommentPayload parsedAction 170 | <$> getNested v ["repository", "owner", "login"] 171 | <*> getNested v ["repository", "name"] 172 | -- We subscribe to both issue comments and pull request review comments. 173 | <*> (getNested v ["issue", "number"] 174 | <|> getNested v ["pull_request", "number"]) 175 | <*> getNested v ["sender", "login"] 176 | <*> (getNested v ["comment", "id"] 177 | -- If we couldn't get a comment ID, we likely got a review, which does have an ID, 178 | -- but we can't treat that as a comment ID for API requests. 179 | <|> pure Nothing) 180 | <*> (getNested v ["comment", "body"] 181 | <|> fromMaybe "" <$> getNested v ["review", "body"]) 182 | parseJSON nonObject = typeMismatch "(issue_comment | pull_request_review) payload" nonObject 183 | 184 | instance FromJSON CommitStatusPayload where 185 | parseJSON (Object v) = CommitStatusPayload 186 | <$> getNested v ["repository", "owner", "login"] 187 | <*> getNested v ["repository", "name"] 188 | <*> (v .: "state") 189 | <*> (v .: "context") 190 | <*> (v .: "target_url") 191 | <*> (v .: "sha") 192 | parseJSON nonObject = typeMismatch "status payload" nonObject 193 | 194 | instance FromJSON PushPayload where 195 | parseJSON (Object v) = PushPayload 196 | <$> getNested v ["repository", "owner", "login"] 197 | <*> getNested v ["repository", "name"] 198 | <*> (v .: "ref") 199 | <*> (v .: "after") 200 | parseJSON nonObject = typeMismatch "push payload" nonObject 201 | 202 | -- Note that GitHub calls pull requests "issues" for the sake of comments: the 203 | -- pull request comment event is actually "issue_comment". 204 | data WebhookEvent 205 | = Ping 206 | | PullRequest PullRequestPayload 207 | | Comment CommentPayload 208 | | CommitStatus CommitStatusPayload 209 | | Push PushPayload 210 | deriving (Eq, Show) 211 | 212 | -- Returns the owner of the repository for which the webhook was triggered. 213 | eventRepositoryOwner :: WebhookEvent -> Text 214 | eventRepositoryOwner event = case event of 215 | Ping -> error "ping event must not be processed" 216 | PullRequest payload -> payload.owner 217 | Comment payload -> payload.owner 218 | CommitStatus payload -> payload.owner 219 | Push payload -> payload.owner 220 | 221 | -- Returns the name of the repository for which the webhook was triggered. 222 | eventRepository :: WebhookEvent -> Text 223 | eventRepository event = case event of 224 | Ping -> error "ping event must not be processed" 225 | PullRequest payload -> payload.repository 226 | Comment payload -> payload.repository 227 | CommitStatus payload -> payload.repository 228 | Push payload -> payload.repository 229 | 230 | eventProjectInfo :: WebhookEvent -> ProjectInfo 231 | eventProjectInfo event = 232 | ProjectInfo (eventRepositoryOwner event) (eventRepository event) 233 | 234 | type EventQueue = TBQueue WebhookEvent 235 | 236 | -- Creates a new event queue with the given maximum capacity. 237 | newEventQueue :: Natural -> IO EventQueue 238 | newEventQueue capacity = atomically $ newTBQueue capacity 239 | 240 | -- Enqueues the event if the queue is not full. Returns whether the event has 241 | -- been enqueued. This function does not block. 242 | tryEnqueueEvent :: EventQueue -> WebhookEvent -> IO Bool 243 | tryEnqueueEvent queue event = atomically $ do 244 | isFull <- isFullTBQueue queue 245 | if isFull 246 | then return False 247 | -- Normally writeTBQueue would block if the queue is full, but at this point 248 | -- we know that the queue is not full, so it will return immediately. 249 | else (writeTBQueue queue event) >> (return True) 250 | -------------------------------------------------------------------------------- /src/GithubApi.hs: -------------------------------------------------------------------------------- 1 | -- Hoff -- A gatekeeper for your commits 2 | -- Copyright 2019 Ruud van Asseldonk 3 | -- 4 | -- Licensed under the Apache License, Version 2.0 (the "License"); 5 | -- you may not use this file except in compliance with the License. 6 | -- A copy of the License has been included in the root of the repository. 7 | {-# LANGUAGE DataKinds #-} 8 | {-# LANGUAGE GHC2021 #-} 9 | {-# LANGUAGE OverloadedStrings #-} 10 | {-# LANGUAGE PatternSynonyms #-} 11 | {-# LANGUAGE TypeFamilies #-} 12 | {-# LANGUAGE LambdaCase #-} 13 | 14 | -- This module defines high-level Github API operations, plus an interpreter to 15 | -- run those operations against the real API. 16 | module GithubApi 17 | ( 18 | GithubOperation (..), 19 | PullRequest (..), 20 | ReactionContent(..), 21 | getOpenPullRequests, 22 | getPullRequest, 23 | hasPushAccess, 24 | leaveComment, 25 | addReaction, 26 | runGithub, 27 | runGithubReadOnly, 28 | ) 29 | where 30 | 31 | import Control.Monad.IO.Class (liftIO) 32 | import Control.Monad.Logger (logDebugN, logInfoN, logWarnN, logErrorN) 33 | import Effectful (Dispatch (Dynamic), DispatchOf, Eff, Effect, IOE, (:>)) 34 | import Effectful.Dispatch.Dynamic (interpret, send, interpose) 35 | import Data.IntSet (IntSet) 36 | import Data.Text (Text) 37 | import GitHub.Data.Reactions (ReactionContent(..)) 38 | 39 | import qualified Data.IntSet as IntSet 40 | import qualified Data.Vector as Vector 41 | import qualified GitHub.Data.Id as Github3 42 | import qualified GitHub.Data.Name as Github3 43 | import qualified GitHub.Data.Options as Github3 44 | import qualified GitHub.Endpoints.Issues.Comments as Github3 45 | import qualified GitHub.Endpoints.PullRequests as Github3 46 | import qualified GitHub.Endpoints.Reactions as Github3 47 | import qualified GitHub.Endpoints.Repos.Collaborators as Github3 48 | import qualified GitHub.Request as Github3 49 | import qualified Network.HTTP.Client as Http 50 | import qualified Network.HTTP.Types.Status as Http 51 | 52 | import Format (format) 53 | import Git (BaseBranch (..), Branch (..), Sha (..)) 54 | import MonadLoggerEffect (MonadLoggerEffect) 55 | import Project (ProjectInfo) 56 | import Types (PullRequestId (..), Username (..), CommentId (..), ReactableId (..)) 57 | 58 | import qualified Project 59 | 60 | -- A stripped-down version of the `Github3.PullRequest` type, with only the 61 | -- fields we need. 62 | data PullRequest = PullRequest 63 | { sha :: Sha 64 | , branch :: Branch 65 | , baseBranch :: BaseBranch 66 | , title :: Text 67 | , author :: Username 68 | } 69 | 70 | data GithubOperation :: Effect where 71 | LeaveComment :: PullRequestId -> Text -> GithubOperation m () 72 | AddReaction :: ReactableId -> ReactionContent -> GithubOperation m () 73 | HasPushAccess :: Username -> GithubOperation m Bool 74 | GetPullRequest :: PullRequestId -> GithubOperation m (Maybe PullRequest) 75 | GetOpenPullRequests :: GithubOperation m (Maybe IntSet) 76 | 77 | type instance DispatchOf GithubOperation = 'Dynamic 78 | 79 | leaveComment :: GithubOperation :> es => PullRequestId -> Text -> Eff es () 80 | leaveComment pr remoteBranch = send $ LeaveComment pr remoteBranch 81 | 82 | addReaction :: GithubOperation :> es => ReactableId -> ReactionContent -> Eff es () 83 | addReaction id' reaction = send $ AddReaction id' reaction 84 | 85 | hasPushAccess :: GithubOperation :> es => Username -> Eff es Bool 86 | hasPushAccess username = send $ HasPushAccess username 87 | 88 | getPullRequest :: GithubOperation :> es => PullRequestId -> Eff es (Maybe PullRequest) 89 | getPullRequest pr = send $ GetPullRequest pr 90 | 91 | getOpenPullRequests :: GithubOperation :> es => Eff es (Maybe IntSet) 92 | getOpenPullRequests = send GetOpenPullRequests 93 | 94 | isPermissionToPush :: Github3.CollaboratorPermission -> Bool 95 | isPermissionToPush perm = case perm of 96 | Github3.CollaboratorPermissionAdmin -> True 97 | Github3.CollaboratorPermissionWrite -> True 98 | Github3.CollaboratorPermissionRead -> False 99 | Github3.CollaboratorPermissionNone -> False 100 | 101 | pattern StatusCodeException :: Http.Response() -> Github3.Error 102 | pattern StatusCodeException response <- 103 | Github3.HTTPError ( 104 | Http.HttpExceptionRequest _request (Http.StatusCodeException response _body) 105 | ) 106 | 107 | is404NotFound :: Github3.Error -> Bool 108 | is404NotFound err = case err of 109 | StatusCodeException response -> Http.responseStatus response == Http.notFound404 110 | _ -> False 111 | 112 | runGithub 113 | :: (IOE :> es, MonadLoggerEffect :> es) 114 | => Github3.Auth 115 | -> ProjectInfo 116 | -> Eff (GithubOperation : es) a 117 | -> Eff es a 118 | runGithub auth projectInfo = 119 | interpret $ \_ -> \case 120 | LeaveComment (PullRequestId pr) body -> do 121 | result <- liftIO $ Github3.github auth $ Github3.createCommentR 122 | (Github3.N $ Project.owner projectInfo) 123 | (Github3.N $ Project.repository projectInfo) 124 | (Github3.IssueNumber pr) 125 | body 126 | case result of 127 | Left err -> logWarnN $ format "Failed to comment: {}" [show err] 128 | Right _ -> logInfoN $ format "Posted comment on {}#{}: {}" 129 | (Project.repository projectInfo, pr, body) 130 | 131 | AddReaction reactableId reaction -> do 132 | let 133 | createReactionR project owner = 134 | case reactableId of 135 | OnIssueComment (CommentId commentId) -> Github3.createCommentReactionR project owner (Github3.Id commentId) 136 | OnPullRequest (PullRequestId prId) -> Github3.createIssueReactionR project owner (Github3.Id prId) 137 | 138 | result <- liftIO $ Github3.github auth $ createReactionR 139 | (Github3.N $ Project.owner projectInfo) 140 | (Github3.N $ Project.repository projectInfo) 141 | reaction 142 | 143 | case result of 144 | Left err -> logWarnN $ format "Failed to add reaction: {}" [show err] 145 | Right _ -> 146 | logInfoN $ 147 | format 148 | "Added reaction in {} on {}: {}" 149 | (Project.repository projectInfo, reactableId, show reaction) 150 | 151 | HasPushAccess (Username username) -> do 152 | result <- liftIO $ Github3.github auth $ Github3.collaboratorPermissionOnR 153 | (Github3.N $ Project.owner projectInfo) 154 | (Github3.N $ Project.repository projectInfo) 155 | (Github3.N username) 156 | 157 | case result of 158 | Left err -> do 159 | logErrorN $ format "Failed to retrive collaborator status: {}" [show err] 160 | -- To err on the safe side, if the API call fails, we pretend nobody 161 | -- has push access. 162 | pure False 163 | 164 | Right (Github3.CollaboratorWithPermission _user perm) -> do 165 | logDebugN $ format "User {} has permission {} on {}." (username, show perm, projectInfo) 166 | pure $ isPermissionToPush perm 167 | 168 | GetPullRequest (PullRequestId pr) -> do 169 | logDebugN $ format "Getting pull request {} in {}." (pr, projectInfo) 170 | result <- liftIO $ Github3.github auth $ Github3.pullRequestR 171 | (Github3.N $ Project.owner projectInfo) 172 | (Github3.N $ Project.repository projectInfo) 173 | (Github3.IssueNumber pr) 174 | case result of 175 | Left err | is404NotFound err -> do 176 | logWarnN $ format "Pull request {} does not exist in {}." (pr, projectInfo) 177 | pure Nothing 178 | Left err -> do 179 | logWarnN $ format "Failed to retrieve pull request {} in {}: {}" (pr, projectInfo, show err) 180 | pure Nothing 181 | Right details -> pure $ Just $ PullRequest 182 | { sha = Sha $ Github3.pullRequestCommitSha $ Github3.pullRequestHead details 183 | , branch = Branch $ Github3.pullRequestCommitRef $ Github3.pullRequestHead details 184 | , baseBranch = BaseBranch $ Github3.pullRequestCommitRef $ Github3.pullRequestBase details 185 | , title = Github3.pullRequestTitle details 186 | , author = Username $ Github3.untagName $ Github3.simpleUserLogin $ Github3.pullRequestUser details 187 | } 188 | 189 | GetOpenPullRequests -> do 190 | logDebugN $ format "Getting open pull request in {}." [projectInfo] 191 | result <- liftIO $ Github3.github auth $ Github3.pullRequestsForR 192 | (Github3.N $ Project.owner projectInfo) 193 | (Github3.N $ Project.repository projectInfo) 194 | Github3.stateOpen 195 | Github3.FetchAll 196 | case result of 197 | Left err -> do 198 | logWarnN $ format "Failed to retrieve pull requests in {}: {}" (projectInfo, show err) 199 | pure Nothing 200 | Right prs -> do 201 | logDebugN $ format "Got {} open pull requests in {}." (Vector.length prs, projectInfo) 202 | pure $ Just 203 | -- Note: we want to extract the *issue number*, not the *id*, 204 | -- which is a different integer part of the payload. 205 | $ foldMap (IntSet.singleton . Github3.unIssueNumber . Github3.simplePullRequestNumber) 206 | $ prs 207 | 208 | -- Like runGithub, but does not execute operations that have side effects, in 209 | -- the sense of being observable by Github users. We will still make requests 210 | -- against the read-only endpoints of the API. This is useful for local testing. 211 | runGithubReadOnly 212 | :: (IOE :> es, MonadLoggerEffect :> es) 213 | => Github3.Auth 214 | -> ProjectInfo 215 | -> Eff (GithubOperation : es) a 216 | -> Eff es a 217 | runGithubReadOnly auth projectInfo = runGithub auth projectInfo . augmentedGithubOperation 218 | where 219 | augmentedGithubOperation = interpose $ \_ operation -> case operation of 220 | -- These operations are read-only, we can run them for real. 221 | HasPushAccess username -> send $ HasPushAccess username 222 | GetPullRequest pullRequestId -> send $ GetPullRequest pullRequestId 223 | GetOpenPullRequests -> send GetOpenPullRequests 224 | 225 | -- These operations have side effects, we fake them. 226 | LeaveComment pr body -> 227 | logInfoN $ format "Would have posted comment on {}: {}" (show pr, body) 228 | AddReaction reactableId reaction -> 229 | logInfoN $ format "Would have added reaction on {}: {}" (reactableId, show reaction) 230 | -------------------------------------------------------------------------------- /src/Metrics/Metrics.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DataKinds #-} 2 | {-# LANGUAGE DerivingStrategies #-} 3 | {-# LANGUAGE GHC2021 #-} 4 | {-# LANGUAGE LambdaCase #-} 5 | {-# LANGUAGE OverloadedStrings #-} 6 | {-# LANGUAGE TypeFamilies #-} 7 | 8 | module Metrics.Metrics 9 | ( 10 | MetricsOperation (..), 11 | ProjectMetrics (..), 12 | runMetrics, 13 | increaseMergedPRTotal, 14 | updateTrainSizeGauge, 15 | registerGHCMetrics, 16 | registerProjectMetrics 17 | ) 18 | where 19 | 20 | import Data.Text 21 | import Prometheus 22 | import Prometheus.Metric.GHC (ghcMetrics) 23 | import Effectful (Dispatch (Dynamic), DispatchOf, Eff, Effect, IOE, (:>)) 24 | import Effectful.Dispatch.Dynamic (interpret, send) 25 | import Control.Monad (void) 26 | import Control.Monad.IO.Class (liftIO) 27 | 28 | type ProjectLabel = Text 29 | 30 | data ProjectMetrics = ProjectMetrics 31 | { projectMetricsMergedPR :: Vector ProjectLabel Counter 32 | , projectMetricsMergeTrainSize :: Vector ProjectLabel Gauge 33 | } 34 | 35 | data MetricsOperation :: Effect where 36 | MergeBranch :: MetricsOperation m () 37 | UpdateTrainSize :: Int -> MetricsOperation m () 38 | 39 | type instance DispatchOf MetricsOperation = 'Dynamic 40 | 41 | increaseMergedPRTotal :: MetricsOperation :> es => Eff es () 42 | increaseMergedPRTotal = send MergeBranch 43 | 44 | updateTrainSizeGauge :: MetricsOperation :> es => Int -> Eff es () 45 | updateTrainSizeGauge n = send $ UpdateTrainSize n 46 | 47 | runMetrics 48 | :: IOE :> es 49 | => ProjectMetrics 50 | -> ProjectLabel 51 | -> Eff (MetricsOperation : es) a 52 | -> Eff es a 53 | runMetrics metrics label = interpret $ \_ -> \case 54 | UpdateTrainSize n -> void $ 55 | liftIO $ setProjectMetricMergeTrainSize metrics label n 56 | MergeBranch -> void $ 57 | liftIO $ incProjectMergedPR metrics label 58 | 59 | registerGHCMetrics :: IO () 60 | registerGHCMetrics = void $ register ghcMetrics 61 | 62 | registerProjectMetrics :: IO ProjectMetrics 63 | registerProjectMetrics = ProjectMetrics 64 | <$> register (vector "project" (counter (Info "hoff_project_merged_pull_requests" 65 | "Number of merged pull requests"))) 66 | <*> register (vector "project" (gauge (Info "hoff_project_merge_train_size" 67 | "Number of pull requests currently in the queue (merge train)"))) 68 | 69 | incProjectMergedPR :: ProjectMetrics -> ProjectLabel -> IO () 70 | incProjectMergedPR metrics project = 71 | withLabel (projectMetricsMergedPR metrics) project incCounter 72 | 73 | setProjectMetricMergeTrainSize :: ProjectMetrics -> ProjectLabel -> Int -> IO () 74 | setProjectMetricMergeTrainSize metrics project n = 75 | withLabel (projectMetricsMergeTrainSize metrics) project (\g -> setGauge g (fromIntegral n)) 76 | -------------------------------------------------------------------------------- /src/Metrics/Server.hs: -------------------------------------------------------------------------------- 1 | module Metrics.Server 2 | ( 3 | MetricsServerConfig (..), 4 | serverConfig, 5 | runMetricsServer 6 | ) 7 | where 8 | 9 | import qualified Network.Wai.Handler.Warp as Warp 10 | import qualified Network.Wai.Middleware.Prometheus as PrometheusWai 11 | import Data.Function ((&)) 12 | 13 | data MetricsServerConfig = MetricsServerConfig 14 | { metricsConfigHost :: Warp.HostPreference 15 | , metricsConfigPort :: Warp.Port 16 | } 17 | 18 | serverConfig :: MetricsServerConfig -> Warp.Settings 19 | serverConfig config = Warp.defaultSettings 20 | & Warp.setHost (metricsConfigHost config) 21 | & Warp.setPort (metricsConfigPort config) 22 | 23 | runMetricsServer :: MetricsServerConfig -> IO () 24 | runMetricsServer config = Warp.runSettings (serverConfig config) PrometheusWai.metricsApp 25 | -------------------------------------------------------------------------------- /src/MonadLoggerEffect.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DataKinds #-} 2 | {-# LANGUAGE GHC2021 #-} 3 | {-# LANGUAGE LambdaCase #-} 4 | {-# LANGUAGE TypeFamilies #-} 5 | {-# LANGUAGE UndecidableInstances #-} -- Needed for 'MonadLogger (Eff es)' 6 | {-# OPTIONS_GHC -Wno-orphans #-} 7 | 8 | module MonadLoggerEffect (MonadLoggerEffect (..), runLoggerStdout) where 9 | 10 | import Control.Monad.Logger (Loc, LogLevel, LogSource, MonadLogger (..), ToLogStr (toLogStr), 11 | defaultOutput) 12 | import Effectful (Dispatch (Dynamic), DispatchOf, Eff, Effect, IOE, MonadIO (liftIO), (:>)) 13 | import Effectful.Dispatch.Dynamic (interpret, send) 14 | import System.IO (stdout) 15 | 16 | data MonadLoggerEffect :: Effect where 17 | MonadLoggerLog :: ToLogStr msg => Loc -> LogSource -> LogLevel -> msg -> MonadLoggerEffect m () 18 | 19 | type instance DispatchOf MonadLoggerEffect = 'Dynamic 20 | 21 | -- This orphan instance allows using monad-logger functions in Eff. UndecidableInstances is needed 22 | -- here because the 'MonadLoggerEffect :> es' constraint actually makes the instance head bigger, 23 | -- rather than smaller. The actual reducing of the es variable happens in the interpretation of the 24 | -- MonadLoggerEffect. See e.g. the runLoggerStdout, which removes the MonadLoggerEffect from the 25 | -- effects list. 26 | instance MonadLoggerEffect :> es => MonadLogger (Eff es) where 27 | monadLoggerLog loc logSource logLevel msg = send $ MonadLoggerLog loc logSource logLevel msg 28 | 29 | -- | Run the logger such that everything is logged to stdout 30 | runLoggerStdout :: IOE :> es => Eff (MonadLoggerEffect : es) a -> Eff es a 31 | runLoggerStdout = interpret $ \_ -> \case 32 | MonadLoggerLog loc logSource logLevel msg -> 33 | liftIO $ defaultOutput stdout loc logSource logLevel $ toLogStr msg 34 | -------------------------------------------------------------------------------- /src/Time.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DataKinds #-} 2 | {-# LANGUAGE GHC2021 #-} 3 | {-# LANGUAGE LambdaCase #-} 4 | {-# LANGUAGE TypeFamilies #-} 5 | module Time (addTime, getDateTime, runTime, TimeOperation (..)) where 6 | 7 | import Control.Monad.IO.Class (liftIO) 8 | import Data.Time (DiffTime, UTCTime, addUTCTime, getCurrentTime) 9 | import Effectful (Dispatch (Dynamic), DispatchOf, Eff, Effect, IOE, (:>)) 10 | import Effectful.Dispatch.Dynamic (interpret, send) 11 | 12 | data TimeOperation :: Effect where 13 | GetDateTime :: TimeOperation m UTCTime 14 | 15 | type instance DispatchOf TimeOperation = 'Dynamic 16 | 17 | getDateTime :: TimeOperation :> es => Eff es UTCTime 18 | getDateTime = send GetDateTime 19 | 20 | runTime :: IOE :> es => Eff (TimeOperation : es) a -> Eff es a 21 | runTime = interpret $ \_ -> \case 22 | GetDateTime -> liftIO getCurrentTime 23 | 24 | addTime :: UTCTime -> DiffTime -> UTCTime 25 | addTime t d = addUTCTime (realToFrac d) t 26 | -------------------------------------------------------------------------------- /src/Types.hs: -------------------------------------------------------------------------------- 1 | -- Hoff -- A gatekeeper for your commits 2 | -- Copyright 2020 The Hoff authors 3 | -- 4 | -- Licensed under the Apache License, Version 2.0 (the "License"); 5 | -- you may not use this file except in compliance with the License. 6 | -- A copy of the License has been included in the root of the repository. 7 | 8 | {-# LANGUAGE DeriveGeneric #-} 9 | {-# LANGUAGE OverloadedStrings #-} 10 | {-# LANGUAGE GeneralizedNewtypeDeriving #-} 11 | 12 | module Types 13 | ( 14 | Body (..), 15 | PullRequestId (..), 16 | CommentId (..), 17 | ReactableId (..), 18 | Username (..), 19 | ) 20 | where 21 | 22 | import Data.Aeson (FromJSON, ToJSON) 23 | import Data.String (IsString) 24 | import Data.Text (Text) 25 | import Data.Text.Buildable (Buildable(..)) 26 | import GHC.Generics (Generic) 27 | 28 | import qualified Data.Aeson as Aeson 29 | 30 | -- The name of a user on GitHub. 31 | newtype Username = Username Text deriving (Eq, Show, Generic, IsString, Buildable) 32 | 33 | -- A pull request is identified by its number. 34 | newtype PullRequestId = PullRequestId Int deriving (Eq, Ord, Show, Generic) 35 | 36 | -- The body of a pull request 37 | newtype Body = Body Text deriving (Eq, Show, Generic, IsString, Buildable) 38 | 39 | -- The numeric ID of an issue comment. (In GitHub's model, a PR is a special kind of issue.) 40 | newtype CommentId = CommentId Int 41 | deriving (Eq, Ord, Show, Generic) 42 | 43 | instance FromJSON Body 44 | instance FromJSON PullRequestId 45 | instance FromJSON Username 46 | instance FromJSON CommentId 47 | 48 | instance ToJSON Body where toEncoding = Aeson.genericToEncoding Aeson.defaultOptions 49 | instance ToJSON PullRequestId where toEncoding = Aeson.genericToEncoding Aeson.defaultOptions 50 | instance ToJSON Username where toEncoding = Aeson.genericToEncoding Aeson.defaultOptions 51 | instance ToJSON CommentId where toEncoding = Aeson.genericToEncoding Aeson.defaultOptions 52 | 53 | -- The numeric ID of something on GitHub we can react to. 54 | data ReactableId 55 | = OnIssueComment CommentId 56 | | OnPullRequest PullRequestId 57 | -- Ideally we would also be able to react to PR reviews, but (as of 9-5-2024) there 58 | -- doesn't seem to be a REST endpoint for that, despite it being possible through the UI. 59 | deriving (Show, Eq, Ord, Generic) 60 | 61 | instance Buildable ReactableId where 62 | build (OnIssueComment (CommentId commentId)) = 63 | "issue comment " <> build commentId 64 | build (OnPullRequest (PullRequestId prId)) = 65 | "pull request " <> build prId 66 | 67 | instance FromJSON ReactableId 68 | 69 | instance ToJSON ReactableId where 70 | toEncoding = Aeson.genericToEncoding Aeson.defaultOptions 71 | -------------------------------------------------------------------------------- /static/script.js: -------------------------------------------------------------------------------- 1 | function updateAutoRefreshButtonText() { 2 | document.getElementById('autoRefreshToggle').textContent = isAutoRefreshOn() ? 'auto-refresh UI: enabled' : 'auto-refresh UI: disabled' 3 | } 4 | 5 | function isAutoRefreshOn() { 6 | return window.localStorage.getItem('hoffAutoRefresh') == 'true'; 7 | } 8 | 9 | function toggleAutoRefresh() { 10 | window.localStorage.setItem('hoffAutoRefresh', !isAutoRefreshOn()); 11 | updateAutoRefreshButtonText(); 12 | } 13 | 14 | function main(){ 15 | updateAutoRefreshButtonText(); 16 | window.addEventListener('storage', updateAutoRefreshButtonText); // Synchronize other tabs 17 | 18 | window.setInterval(function () { 19 | if(isAutoRefreshOn()) { window.location.reload(); } 20 | }, 5000); 21 | } 22 | 23 | main(); -------------------------------------------------------------------------------- /static/style.css: -------------------------------------------------------------------------------- 1 | /* Scale: 2 | 0.391em 3 | 0.625em 4 | 1.0em 5 | 1.6em 6 | 2.56em 7 | 4.096em 8 | 9 | Baseline grid: multiples of 1.6em. 10 | 11 | Color scheme: 12 | https://coolors.co/e0644c-fffbf7-adaca4-43494c-313638 13 | 14 | #e0644c (Jelly bean -- accent) 15 | #fffbf7 (Snow -- background) 16 | #adaca4 (Silver chalice -- light text) 17 | #56656d (Black coral -- body text) 18 | #43494c (Outer space -- body text) // Unused 19 | #313638 (Onyx -- currently not used) 20 | 21 | #8c8e81 (Dolphin gray -- light text accent) 22 | */ 23 | 24 | * 25 | { 26 | margin: 0; 27 | padding: 0; 28 | font-weight: 400; 29 | } 30 | 31 | html 32 | { 33 | font-size: 14px; 34 | line-height: 1.6em; 35 | } 36 | 37 | /* Font size is 10 + 0.25 * sqrt(screen-width). */ 38 | @media(min-width: 512px) { html { font-size: 15.7px; } } 39 | @media(min-width: 768px) { html { font-size: 16.9px; } } 40 | @media(min-width: 1024px) { html { font-size: 18.0px; } } 41 | @media(min-width: 1280px) { html { font-size: 19.8px; } } 42 | @media(min-width: 1536px) { html { font-size: 20.6px; } } 43 | @media(min-width: 1792px) { html { font-size: 21.3px; } } 44 | @media(min-width: 2048px) { html { font-size: 22.0px; } } 45 | 46 | body 47 | { 48 | font-family: 'Source Sans Pro', 'Segoe UI', sans-serif; 49 | background-color: #fffbf7; 50 | color: #56656d; 51 | margin: 3.2em; 52 | } 53 | 54 | a 55 | { 56 | color: #e0644c; 57 | text-decoration: none; 58 | } 59 | 60 | #content 61 | { 62 | margin-left: auto; 63 | margin-right: auto; 64 | /* 24 em was nicer, but 32 is needed for long repo names. */ 65 | max-width: 32em; 66 | } 67 | 68 | h1 69 | { 70 | font-size: 2.56em; 71 | line-height: 1.6rem; /* Aligns the text baseline to the grid. */ 72 | margin-top: -0.1rem; 73 | padding-bottom: 0.5rem; /* Leave the same amount of space as a p would. */ 74 | position: relative; 75 | } 76 | 77 | h1 > a.back 78 | { 79 | position: absolute; 80 | left: -1.6rem; 81 | } 82 | 83 | h2 84 | { 85 | font-size: 1.6em; 86 | line-height: 2rem; /* Aligns the text baseline to the grid. */ 87 | margin-top: 2.4rem; 88 | padding-bottom: 0.4rem; 89 | } 90 | 91 | h3 92 | { 93 | font-size: 1em; 94 | margin-top: 1.6em; 95 | text-transform: capitalize; 96 | } 97 | 98 | h2 + h3 99 | { 100 | margin-top: 0; 101 | } 102 | 103 | p 104 | { 105 | clear: both; 106 | } 107 | 108 | span.prId 109 | { 110 | color: #adaca4; 111 | display: inline-block; 112 | margin-left: 0.391rem; 113 | } 114 | 115 | span.review 116 | { 117 | color: #adaca4; 118 | display: block; 119 | } 120 | 121 | span.review a 122 | { 123 | color: #8c8e81; 124 | } 125 | 126 | p.version 127 | { 128 | text-align: right; 129 | font-size: smaller; 130 | margin-top: 2em; 131 | } 132 | 133 | .autoRefresh { 134 | text-align: right; 135 | font-style: italic; 136 | cursor: pointer; 137 | } -------------------------------------------------------------------------------- /tests/EndToEnd.hs: -------------------------------------------------------------------------------- 1 | -- Hoff -- A gatekeeper for your commits 2 | -- Copyright 2016 Ruud van Asseldonk 3 | -- 4 | -- Licensed under the Apache License, Version 2.0 (the "License"); 5 | -- you may not use this file except in compliance with the License. 6 | -- A copy of the License has been included in the root of the repository. 7 | 8 | import Test.Hspec (hspec) 9 | import EventLoopSpec (eventLoopSpec) 10 | import ServerSpec (serverSpec) 11 | 12 | -- This test suite tests interaction of the system with the outside world, as 13 | -- opposed to its internals (there are unit tests for that). It is not a full 14 | -- end-to-end test, in the sense that the final executable is tested. Instead, 15 | -- the program is tested in two stages: one tests the Git interaction and the 16 | -- event loop, but incoming messages are faked. The other tests the web server, 17 | -- ignoring the messages it receives. 18 | 19 | main :: IO () 20 | main = hspec $ do 21 | eventLoopSpec 22 | serverSpec 23 | -------------------------------------------------------------------------------- /tests/ParserSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | 3 | module ParserSpec (parserSpec) where 4 | 5 | import Data.Text (Text) 6 | import Test.Hspec (Spec, describe, it, shouldBe) 7 | import Test.Hspec.QuickCheck (prop) 8 | 9 | import qualified Data.Text as Text 10 | import qualified Test.QuickCheck as QC 11 | 12 | import Configuration (ProjectConfiguration (..), TriggerConfiguration (..)) 13 | import Parser (ParseResult (..), parseMergeCommand) 14 | import Project (ApprovedFor (..), DeployEnvironment (..), DeploySubprojects (..), 15 | MergeCommand (..), MergeWindow (..), Priority (..)) 16 | 17 | parserSpec :: Spec 18 | parserSpec = do 19 | describe "Parser" $ do 20 | describe "merge commands" $ do 21 | it "can parse 'merge'" $ 22 | dummyParse "@bot merge" `shouldBe` 23 | Success (Approve Merge, AnyDay, Normal) 24 | 25 | it "can parse 'merge on friday'" $ 26 | dummyParse "@bot merge on friday" `shouldBe` 27 | Success (Approve Merge, OnFriday, Normal) 28 | 29 | it "can parse 'merge as hotfix'" $ 30 | dummyParse "@bot merge as hotfix" `shouldBe` 31 | Success (Approve Merge, DuringFeatureFreeze, Normal) 32 | 33 | it "can parse 'with priority'" $ 34 | dummyParse "@bot merge with priority" `shouldBe` 35 | Success (Approve Merge, AnyDay, High) 36 | 37 | it "can parse 'with priority' on friday" $ 38 | dummyParse "@bot merge on friday with priority" `shouldBe` 39 | Success (Approve Merge, OnFriday, High) 40 | 41 | describe "tag commands" $ do 42 | it "can parse 'merge and tag'" $ 43 | dummyParse "@bot merge and tag" `shouldBe` 44 | Success (Approve MergeAndTag, AnyDay, Normal) 45 | 46 | it "can parse a merge window after 'tag'" $ do 47 | dummyParse "@bot merge and tag on friday" `shouldBe` 48 | Success (Approve MergeAndTag, OnFriday, Normal) 49 | 50 | describe "deploy commands" $ do 51 | let oneEnvProject = dummyProject { deployEnvironments = Just ["foo"] } 52 | let oneEnvParse = parseMergeCommand oneEnvProject dummyTrigger 53 | 54 | it "selects the environment implicitly when there is only one" $ 55 | oneEnvParse "@bot merge and deploy" `shouldBe` 56 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "foo"), AnyDay, Normal) 57 | 58 | it "allows the environment to be specified when there is only one" $ 59 | oneEnvParse "@bot merge and deploy to foo" `shouldBe` 60 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "foo"), AnyDay, Normal) 61 | 62 | it "can parse a merge window after an implicit environment" $ 63 | oneEnvParse "@bot merge and deploy as hotfix" `shouldBe` 64 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "foo"), DuringFeatureFreeze, Normal) 65 | 66 | it "can parse a priority after an implicit environment" $ 67 | oneEnvParse "@bot merge and deploy with priority" `shouldBe` 68 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "foo"), AnyDay, High) 69 | 70 | it "allows the environment to be specified when there are multiple" $ 71 | dummyParse "@bot merge and deploy to staging" `shouldBe` 72 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "staging"), AnyDay, Normal) 73 | 74 | it "fails when the environment is not specified and ambiguous" $ 75 | dummyParse "@bot merge and deploy" `shouldBe` 76 | ParseError 77 | "comment:1:22:\n |\n1 | @bot merge and deploy\n | ^\n\ 78 | \Merge and deploy has been deprecated. Please use merge and deploy to \n\ 79 | \where is one of production, staging\n" 80 | 81 | let noEnvProject = dummyProject { deployEnvironments = Nothing } 82 | let noEnvParse = parseMergeCommand noEnvProject dummyTrigger 83 | 84 | it "fails when there are no deploy environments" $ 85 | noEnvParse "@bot merge and deploy" `shouldBe` 86 | ParseError 87 | "comment:1:22:\n |\n1 | @bot merge and deploy\n | ^\n\ 88 | \No deployment environments have been configured.\n" 89 | 90 | it "can parse a merge window after an explicit environment" $ 91 | dummyParse "@bot merge and deploy to production on friday" `shouldBe` 92 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "production"), OnFriday, Normal) 93 | 94 | it "can parse a priority after an explicit environment" $ 95 | dummyParse "@bot merge and deploy to production with priority" `shouldBe` 96 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "production"), AnyDay, High) 97 | 98 | it "allows a specific subproject to be deployed" $ 99 | dummyParse "@bot merge and deploy aaa to production" `shouldBe` 100 | Success (Approve $ MergeAndDeploy (OnlySubprojects ["aaa"]) (DeployEnvironment "production"), AnyDay, Normal) 101 | 102 | it "allows many specific subprojects to be deployed" $ 103 | dummyParse "@bot merge and deploy aaa, bbb to production" `shouldBe` 104 | Success (Approve $ MergeAndDeploy (OnlySubprojects ["aaa", "bbb"]) (DeployEnvironment "production"), AnyDay, Normal) 105 | 106 | it "fails when an unknown subproject is specified" $ 107 | dummyParse "@bot merge and deploy ccc to production" `shouldBe` 108 | -- I'm not super happy with this error message, it _should_ say that we expect to see 109 | -- either 'to', or the name of a known subproject. 110 | ParseError 111 | "comment:1:23:\n |\n1 | @bot merge and deploy ccc to production\n\ 112 | \ | ^^\nunexpected \"cc\"\nexpecting \"to\" or white space\n" 113 | 114 | it "allows subprojects to be specified with an implicit environment" $ 115 | oneEnvParse "@bot merge and deploy aaa" `shouldBe` 116 | Success (Approve $ MergeAndDeploy (OnlySubprojects ["aaa"]) (DeployEnvironment "foo"), AnyDay, Normal) 117 | 118 | it "allows subprojects, an implicit environment, and a merge window" $ 119 | oneEnvParse "@bot merge and deploy bbb on friday" `shouldBe` 120 | Success (Approve $ MergeAndDeploy (OnlySubprojects ["bbb"]) (DeployEnvironment "foo"), OnFriday, Normal) 121 | 122 | let prefixProject = dummyProject 123 | { deployEnvironments = Just ["foo", "fooooooo"] 124 | , deploySubprojects = Just ["bar", "barrrrrr"] 125 | } 126 | let prefixParse = parseMergeCommand prefixProject dummyTrigger 127 | 128 | it "allows environment names to be prefixes of each other" $ 129 | prefixParse "@bot merge and deploy to fooooooo" `shouldBe` 130 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "fooooooo"), AnyDay, Normal) 131 | 132 | it "allows subproject names to be prefixes of each other" $ 133 | prefixParse "@bot merge and deploy barrrrrr to fooooooo" `shouldBe` 134 | Success (Approve $ MergeAndDeploy (OnlySubprojects ["barrrrrr"]) (DeployEnvironment "fooooooo"), AnyDay, Normal) 135 | 136 | describe "retry commands" $ do 137 | it "can parse 'retry'" $ 138 | dummyParse "@bot retry" `shouldBe` 139 | Success (Retry, AnyDay, Normal) 140 | 141 | it "can parse a merge window after 'retry'" $ 142 | dummyParse "@bot retry on friday" `shouldBe` 143 | Success (Retry, OnFriday, Normal) 144 | 145 | describe "misc features" $ do 146 | it "ignores messages without the comment prefix" $ 147 | dummyParse "merge and deploy to production" `shouldBe` 148 | Ignored 149 | 150 | it "accepts commands at the end of other comments" $ 151 | dummyParse "LGTM, @bot merge" `shouldBe` 152 | Success (Approve Merge, AnyDay, Normal) 153 | 154 | it "accepts comments after a command if there is a newline" $ 155 | dummyParse "@bot merge\nLGTM" `shouldBe` 156 | Success (Approve Merge, AnyDay, Normal) 157 | 158 | it "rejects following a command with another command" $ 159 | dummyParse "@bot merge, @bot merge and tag" `shouldBe` 160 | ParseError 161 | "comment:1:13:\n |\n1 | @bot merge, @bot merge and tag\n | ^\n\ 162 | \Merge commands may not be followed by anything other than a punctuation character \ 163 | \('.', ',', '!', '?', ':', ';').\n" 164 | 165 | it "rejects following an invalid command with a valid command" $ 166 | dummyParse "@bot looks good to me, @bot merge" `shouldBe` 167 | ParseError 168 | "comment:1:6:\n |\n1 | @bot looks good to me, @bot merge\n | ^^^^^\n\ 169 | \unexpected \"looks\"\nexpecting \"merge\", \"retry\", or white space\n" 170 | 171 | it "parses case insensitively" $ 172 | dummyParse "@bot MeRgE aNd TaG oN fRiDaY" `shouldBe` 173 | Success (Approve MergeAndTag, OnFriday, Normal) 174 | 175 | prop "allows trailing punctuation" $ 176 | let genSuffix = Text.pack <$> QC.listOf (QC.elements ".,!?:;") 177 | in QC.forAll genSuffix $ \suffix -> 178 | dummyParse ("@bot merge" <> suffix) == 179 | Success (Approve Merge, AnyDay, Normal) 180 | 181 | it "understands HTML comments" $ 182 | dummyParse 183 | "@bot merge and deploy\ 184 | \ to production on friday" 185 | `shouldBe` 186 | Success (Approve $ MergeAndDeploy EntireProject (DeployEnvironment "production"), OnFriday, Normal) 187 | 188 | -- Giving silly commands to the bot is a highly-valued feature ;) 189 | it "allows HTML comment chicanery" $ 190 | dummyParse " @bot YEET" `shouldBe` 191 | Success (Approve Merge, AnyDay, Normal) 192 | 193 | {- 194 | TODO I would like to change the parser to be able to recognise the hoff 195 | ignore messages at any point and just bail. We can do that by using the 196 | recovery / deferred errors for normal parse errors and immediately giving 197 | up when we see an error of our custom "ignore me" type. 198 | 199 | it "understands ignore comments at the beginning" $ do 200 | dummyParse " @bot merge" `shouldBe` 201 | Ignored 202 | 203 | it "understands ignore comments not at the beginning" $ do 204 | dummyParse "@ops merge and tag " `shouldBe` 205 | Ignored 206 | -} 207 | 208 | dummyParse :: Text -> ParseResult (MergeCommand, MergeWindow, Priority) 209 | dummyParse = parseMergeCommand dummyProject dummyTrigger 210 | 211 | dummyProject :: ProjectConfiguration 212 | dummyProject = 213 | ProjectConfiguration 214 | { owner = "owner" 215 | , repository = "repository" 216 | , branch = "branch" 217 | , testBranch = "test-branch" 218 | , checkout = "/dev/null" 219 | , stateFile = "/dev/null" 220 | , checks = Nothing 221 | , deployEnvironments = Just ["production", "staging"] 222 | , deploySubprojects = Just ["aaa", "bbb"] 223 | , safeForFriday = Just True 224 | } 225 | 226 | dummyTrigger :: TriggerConfiguration 227 | dummyTrigger = 228 | TriggerConfiguration 229 | { commentPrefix = "@bot" } 230 | -------------------------------------------------------------------------------- /tests/ProjectSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE StandaloneDeriving #-} 3 | {-# OPTIONS_GHC -Wno-orphans #-} 4 | 5 | module ProjectSpec (projectSpec) where 6 | 7 | import Control.Monad (forM_) 8 | import Test.Hspec (Spec, describe, shouldBe, shouldNotBe, shouldSatisfy) 9 | import Test.Hspec.QuickCheck (prop) 10 | import Test.QuickCheck ((==>), Arbitrary, genericShrink, withMaxSuccess) 11 | import Test.QuickCheck.Arbitrary (arbitrary, shrink) 12 | import Test.QuickCheck.Arbitrary.Generic (genericArbitrary) 13 | import Test.QuickCheck.Instances.Text () 14 | 15 | import qualified Data.Map.Strict as Map 16 | 17 | import qualified Project 18 | 19 | projectSpec :: Spec 20 | projectSpec = do 21 | describe "Project.subMapByOwner" $ do 22 | prop "the ord instance guarentees owners are grouped together" $ withMaxSuccess 100 $ \(owners, repositories) -> do 23 | let projectInfos = [Project.ProjectInfo owner repo | owner <- owners, repo <- repositories] 24 | projectInfoMap = Map.fromList (map (\p -> (p, Project.repository p)) projectInfos) 25 | 26 | forM_ owners $ \owner -> do 27 | Project.subMapByOwner owner projectInfoMap `shouldBe` 28 | Map.filterWithKey (\key _ -> Project.owner key == owner) projectInfoMap 29 | 30 | describe "Project.summarize" $ do 31 | prop "Ensure successes are always overshadowed by other statuses" $ withMaxSuccess 1000 $ \statuses -> do 32 | let checkMap = Map.fromList statuses 33 | outstanding = Project.SpecificChecks checkMap 34 | any ((/= Project.BuildSucceeded) . snd) (Map.toList checkMap) ==> do 35 | Project.summarize outstanding `shouldNotBe` Project.BuildSucceeded 36 | prop "Ensure failures always overshadow other statuses" $ withMaxSuccess 1000 $ \statuses -> do 37 | let checkMap = Map.fromList statuses 38 | outstanding = Project.SpecificChecks checkMap 39 | isFailure (Project.BuildFailed _) = True 40 | isFailure _ = False 41 | any (isFailure . snd) (Map.toList checkMap) ==> do 42 | Project.summarize outstanding `shouldSatisfy` isFailure 43 | 44 | instance Arbitrary Project.Check where 45 | arbitrary = Project.Check <$> arbitrary 46 | instance Arbitrary Project.BuildStatus where 47 | arbitrary = genericArbitrary 48 | shrink = genericShrink 49 | -------------------------------------------------------------------------------- /tests/data/issue-comment-created-payload.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "created", 3 | "issue": { 4 | "url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2", 5 | "labels_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2/labels{/name}", 6 | "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2/comments", 7 | "events_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2/events", 8 | "html_url": "https://github.com/baxterthehacker/public-repo/issues/2", 9 | "id": 73464126, 10 | "number": 2, 11 | "title": "Spelling error in the README file", 12 | "user": { 13 | "login": "baxterthehacker", 14 | "id": 6752317, 15 | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", 16 | "gravatar_id": "", 17 | "url": "https://api.github.com/users/baxterthehacker", 18 | "html_url": "https://github.com/baxterthehacker", 19 | "followers_url": "https://api.github.com/users/baxterthehacker/followers", 20 | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", 21 | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", 22 | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", 23 | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", 24 | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", 25 | "repos_url": "https://api.github.com/users/baxterthehacker/repos", 26 | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", 27 | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", 28 | "type": "User", 29 | "site_admin": false 30 | }, 31 | "labels": [ 32 | { 33 | "url": "https://api.github.com/repos/baxterthehacker/public-repo/labels/bug", 34 | "name": "bug", 35 | "color": "fc2929" 36 | } 37 | ], 38 | "state": "open", 39 | "locked": false, 40 | "assignee": null, 41 | "milestone": null, 42 | "comments": 1, 43 | "created_at": "2015-05-05T23:40:28Z", 44 | "updated_at": "2015-05-05T23:40:28Z", 45 | "closed_at": null, 46 | "body": "It looks like you accidently spelled 'commit' with two 't's." 47 | }, 48 | "comment": { 49 | "url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments/99262140", 50 | "html_url": "https://github.com/baxterthehacker/public-repo/issues/2#issuecomment-99262140", 51 | "issue_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2", 52 | "id": 99262140, 53 | "user": { 54 | "login": "baxterthehacker", 55 | "id": 6752317, 56 | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", 57 | "gravatar_id": "", 58 | "url": "https://api.github.com/users/baxterthehacker", 59 | "html_url": "https://github.com/baxterthehacker", 60 | "followers_url": "https://api.github.com/users/baxterthehacker/followers", 61 | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", 62 | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", 63 | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", 64 | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", 65 | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", 66 | "repos_url": "https://api.github.com/users/baxterthehacker/repos", 67 | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", 68 | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", 69 | "type": "User", 70 | "site_admin": false 71 | }, 72 | "created_at": "2015-05-05T23:40:28Z", 73 | "updated_at": "2015-05-05T23:40:28Z", 74 | "body": "You are totally right! I'll get this fixed right away." 75 | }, 76 | "repository": { 77 | "id": 35129377, 78 | "name": "public-repo", 79 | "full_name": "baxterthehacker/public-repo", 80 | "owner": { 81 | "login": "baxterthehacker", 82 | "id": 6752317, 83 | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", 84 | "gravatar_id": "", 85 | "url": "https://api.github.com/users/baxterthehacker", 86 | "html_url": "https://github.com/baxterthehacker", 87 | "followers_url": "https://api.github.com/users/baxterthehacker/followers", 88 | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", 89 | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", 90 | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", 91 | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", 92 | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", 93 | "repos_url": "https://api.github.com/users/baxterthehacker/repos", 94 | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", 95 | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", 96 | "type": "User", 97 | "site_admin": false 98 | }, 99 | "private": false, 100 | "html_url": "https://github.com/baxterthehacker/public-repo", 101 | "description": "", 102 | "fork": false, 103 | "url": "https://api.github.com/repos/baxterthehacker/public-repo", 104 | "forks_url": "https://api.github.com/repos/baxterthehacker/public-repo/forks", 105 | "keys_url": "https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}", 106 | "collaborators_url": "https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}", 107 | "teams_url": "https://api.github.com/repos/baxterthehacker/public-repo/teams", 108 | "hooks_url": "https://api.github.com/repos/baxterthehacker/public-repo/hooks", 109 | "issue_events_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}", 110 | "events_url": "https://api.github.com/repos/baxterthehacker/public-repo/events", 111 | "assignees_url": "https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}", 112 | "branches_url": "https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}", 113 | "tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/tags", 114 | "blobs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}", 115 | "git_tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}", 116 | "git_refs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}", 117 | "trees_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}", 118 | "statuses_url": "https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}", 119 | "languages_url": "https://api.github.com/repos/baxterthehacker/public-repo/languages", 120 | "stargazers_url": "https://api.github.com/repos/baxterthehacker/public-repo/stargazers", 121 | "contributors_url": "https://api.github.com/repos/baxterthehacker/public-repo/contributors", 122 | "subscribers_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscribers", 123 | "subscription_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscription", 124 | "commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}", 125 | "git_commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}", 126 | "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}", 127 | "issue_comment_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}", 128 | "contents_url": "https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}", 129 | "compare_url": "https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}", 130 | "merges_url": "https://api.github.com/repos/baxterthehacker/public-repo/merges", 131 | "archive_url": "https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}", 132 | "downloads_url": "https://api.github.com/repos/baxterthehacker/public-repo/downloads", 133 | "issues_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}", 134 | "pulls_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}", 135 | "milestones_url": "https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}", 136 | "notifications_url": "https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}", 137 | "labels_url": "https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}", 138 | "releases_url": "https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}", 139 | "created_at": "2015-05-05T23:40:12Z", 140 | "updated_at": "2015-05-05T23:40:12Z", 141 | "pushed_at": "2015-05-05T23:40:27Z", 142 | "git_url": "git://github.com/baxterthehacker/public-repo.git", 143 | "ssh_url": "git@github.com:baxterthehacker/public-repo.git", 144 | "clone_url": "https://github.com/baxterthehacker/public-repo.git", 145 | "svn_url": "https://github.com/baxterthehacker/public-repo", 146 | "homepage": null, 147 | "size": 0, 148 | "stargazers_count": 0, 149 | "watchers_count": 0, 150 | "language": null, 151 | "has_issues": true, 152 | "has_downloads": true, 153 | "has_wiki": true, 154 | "has_pages": true, 155 | "forks_count": 0, 156 | "mirror_url": null, 157 | "open_issues_count": 2, 158 | "forks": 0, 159 | "open_issues": 2, 160 | "watchers": 0, 161 | "default_branch": "master" 162 | }, 163 | "sender": { 164 | "login": "baxterthehacker2", 165 | "id": 6752317, 166 | "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", 167 | "gravatar_id": "", 168 | "url": "https://api.github.com/users/baxterthehacker", 169 | "html_url": "https://github.com/baxterthehacker", 170 | "followers_url": "https://api.github.com/users/baxterthehacker/followers", 171 | "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", 172 | "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", 173 | "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", 174 | "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", 175 | "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", 176 | "repos_url": "https://api.github.com/users/baxterthehacker/repos", 177 | "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", 178 | "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", 179 | "type": "User", 180 | "site_admin": false 181 | } 182 | } 183 | -------------------------------------------------------------------------------- /tests/data/issue-comment-edited-payload.json: -------------------------------------------------------------------------------- 1 | { 2 | "action": "edited", 3 | "changes": { 4 | "body": { 5 | "from": "This is a comment on the issue page" 6 | } 7 | }, 8 | "issue": { 9 | "url": "https://api.github.com/repos/crtschin/test/issues/1", 10 | "repository_url": "https://api.github.com/repos/crtschin/test", 11 | "labels_url": "https://api.github.com/repos/crtschin/test/issues/1/labels{/name}", 12 | "comments_url": "https://api.github.com/repos/crtschin/test/issues/1/comments", 13 | "events_url": "https://api.github.com/repos/crtschin/test/issues/1/events", 14 | "html_url": "https://github.com/crtschin/test/pull/1", 15 | "id": 803765384, 16 | "node_id": "MDExOlB1bGxSZXF1ZXN0NTY5NjM5MjI2", 17 | "number": 1, 18 | "title": "1", 19 | "user": { 20 | "login": "crtschin", 21 | "id": 15233905, 22 | "node_id": "MDQ6VXNlcjE1MjMzOTA1", 23 | "avatar_url": "https://avatars.githubusercontent.com/u/15233905?v=4", 24 | "gravatar_id": "", 25 | "url": "https://api.github.com/users/crtschin", 26 | "html_url": "https://github.com/crtschin", 27 | "followers_url": "https://api.github.com/users/crtschin/followers", 28 | "following_url": "https://api.github.com/users/crtschin/following{/other_user}", 29 | "gists_url": "https://api.github.com/users/crtschin/gists{/gist_id}", 30 | "starred_url": "https://api.github.com/users/crtschin/starred{/owner}{/repo}", 31 | "subscriptions_url": "https://api.github.com/users/crtschin/subscriptions", 32 | "organizations_url": "https://api.github.com/users/crtschin/orgs", 33 | "repos_url": "https://api.github.com/users/crtschin/repos", 34 | "events_url": "https://api.github.com/users/crtschin/events{/privacy}", 35 | "received_events_url": "https://api.github.com/users/crtschin/received_events", 36 | "type": "User", 37 | "site_admin": false 38 | }, 39 | "labels": [ 40 | 41 | ], 42 | "state": "open", 43 | "locked": false, 44 | "assignee": null, 45 | "assignees": [ 46 | 47 | ], 48 | "milestone": null, 49 | "comments": 1, 50 | "created_at": "2021-02-08T17:19:15Z", 51 | "updated_at": "2021-02-10T11:05:28Z", 52 | "closed_at": null, 53 | "author_association": "OWNER", 54 | "active_lock_reason": null, 55 | "pull_request": { 56 | "url": "https://api.github.com/repos/crtschin/test/pulls/1", 57 | "html_url": "https://github.com/crtschin/test/pull/1", 58 | "diff_url": "https://github.com/crtschin/test/pull/1.diff", 59 | "patch_url": "https://github.com/crtschin/test/pull/1.patch" 60 | }, 61 | "body": "", 62 | "performed_via_github_app": null 63 | }, 64 | "comment": { 65 | "url": "https://api.github.com/repos/crtschin/test/issues/comments/776631010", 66 | "html_url": "https://github.com/crtschin/test/pull/1#issuecomment-776631010", 67 | "issue_url": "https://api.github.com/repos/crtschin/test/issues/1", 68 | "id": 776631010, 69 | "node_id": "MDEyOklzc3VlQ29tbWVudDc3NjYzMTAxMA==", 70 | "user": { 71 | "login": "crtschin", 72 | "id": 15233905, 73 | "node_id": "MDQ6VXNlcjE1MjMzOTA1", 74 | "avatar_url": "https://avatars.githubusercontent.com/u/15233905?v=4", 75 | "gravatar_id": "", 76 | "url": "https://api.github.com/users/crtschin", 77 | "html_url": "https://github.com/crtschin", 78 | "followers_url": "https://api.github.com/users/crtschin/followers", 79 | "following_url": "https://api.github.com/users/crtschin/following{/other_user}", 80 | "gists_url": "https://api.github.com/users/crtschin/gists{/gist_id}", 81 | "starred_url": "https://api.github.com/users/crtschin/starred{/owner}{/repo}", 82 | "subscriptions_url": "https://api.github.com/users/crtschin/subscriptions", 83 | "organizations_url": "https://api.github.com/users/crtschin/orgs", 84 | "repos_url": "https://api.github.com/users/crtschin/repos", 85 | "events_url": "https://api.github.com/users/crtschin/events{/privacy}", 86 | "received_events_url": "https://api.github.com/users/crtschin/received_events", 87 | "type": "User", 88 | "site_admin": false 89 | }, 90 | "created_at": "2021-02-10T11:05:16Z", 91 | "updated_at": "2021-02-10T11:05:28Z", 92 | "author_association": "OWNER", 93 | "body": "This is an edit of a comment on the issue page.", 94 | "performed_via_github_app": null 95 | }, 96 | "repository": { 97 | "id": 337149693, 98 | "node_id": "MDEwOlJlcG9zaXRvcnkzMzcxNDk2OTM=", 99 | "name": "test", 100 | "full_name": "crtschin/test", 101 | "private": true, 102 | "owner": { 103 | "login": "crtschin", 104 | "id": 15233905, 105 | "node_id": "MDQ6VXNlcjE1MjMzOTA1", 106 | "avatar_url": "https://avatars.githubusercontent.com/u/15233905?v=4", 107 | "gravatar_id": "", 108 | "url": "https://api.github.com/users/crtschin", 109 | "html_url": "https://github.com/crtschin", 110 | "followers_url": "https://api.github.com/users/crtschin/followers", 111 | "following_url": "https://api.github.com/users/crtschin/following{/other_user}", 112 | "gists_url": "https://api.github.com/users/crtschin/gists{/gist_id}", 113 | "starred_url": "https://api.github.com/users/crtschin/starred{/owner}{/repo}", 114 | "subscriptions_url": "https://api.github.com/users/crtschin/subscriptions", 115 | "organizations_url": "https://api.github.com/users/crtschin/orgs", 116 | "repos_url": "https://api.github.com/users/crtschin/repos", 117 | "events_url": "https://api.github.com/users/crtschin/events{/privacy}", 118 | "received_events_url": "https://api.github.com/users/crtschin/received_events", 119 | "type": "User", 120 | "site_admin": false 121 | }, 122 | "html_url": "https://github.com/crtschin/test", 123 | "description": null, 124 | "fork": false, 125 | "url": "https://api.github.com/repos/crtschin/test", 126 | "forks_url": "https://api.github.com/repos/crtschin/test/forks", 127 | "keys_url": "https://api.github.com/repos/crtschin/test/keys{/key_id}", 128 | "collaborators_url": "https://api.github.com/repos/crtschin/test/collaborators{/collaborator}", 129 | "teams_url": "https://api.github.com/repos/crtschin/test/teams", 130 | "hooks_url": "https://api.github.com/repos/crtschin/test/hooks", 131 | "issue_events_url": "https://api.github.com/repos/crtschin/test/issues/events{/number}", 132 | "events_url": "https://api.github.com/repos/crtschin/test/events", 133 | "assignees_url": "https://api.github.com/repos/crtschin/test/assignees{/user}", 134 | "branches_url": "https://api.github.com/repos/crtschin/test/branches{/branch}", 135 | "tags_url": "https://api.github.com/repos/crtschin/test/tags", 136 | "blobs_url": "https://api.github.com/repos/crtschin/test/git/blobs{/sha}", 137 | "git_tags_url": "https://api.github.com/repos/crtschin/test/git/tags{/sha}", 138 | "git_refs_url": "https://api.github.com/repos/crtschin/test/git/refs{/sha}", 139 | "trees_url": "https://api.github.com/repos/crtschin/test/git/trees{/sha}", 140 | "statuses_url": "https://api.github.com/repos/crtschin/test/statuses/{sha}", 141 | "languages_url": "https://api.github.com/repos/crtschin/test/languages", 142 | "stargazers_url": "https://api.github.com/repos/crtschin/test/stargazers", 143 | "contributors_url": "https://api.github.com/repos/crtschin/test/contributors", 144 | "subscribers_url": "https://api.github.com/repos/crtschin/test/subscribers", 145 | "subscription_url": "https://api.github.com/repos/crtschin/test/subscription", 146 | "commits_url": "https://api.github.com/repos/crtschin/test/commits{/sha}", 147 | "git_commits_url": "https://api.github.com/repos/crtschin/test/git/commits{/sha}", 148 | "comments_url": "https://api.github.com/repos/crtschin/test/comments{/number}", 149 | "issue_comment_url": "https://api.github.com/repos/crtschin/test/issues/comments{/number}", 150 | "contents_url": "https://api.github.com/repos/crtschin/test/contents/{+path}", 151 | "compare_url": "https://api.github.com/repos/crtschin/test/compare/{base}...{head}", 152 | "merges_url": "https://api.github.com/repos/crtschin/test/merges", 153 | "archive_url": "https://api.github.com/repos/crtschin/test/{archive_format}{/ref}", 154 | "downloads_url": "https://api.github.com/repos/crtschin/test/downloads", 155 | "issues_url": "https://api.github.com/repos/crtschin/test/issues{/number}", 156 | "pulls_url": "https://api.github.com/repos/crtschin/test/pulls{/number}", 157 | "milestones_url": "https://api.github.com/repos/crtschin/test/milestones{/number}", 158 | "notifications_url": "https://api.github.com/repos/crtschin/test/notifications{?since,all,participating}", 159 | "labels_url": "https://api.github.com/repos/crtschin/test/labels{/name}", 160 | "releases_url": "https://api.github.com/repos/crtschin/test/releases{/id}", 161 | "deployments_url": "https://api.github.com/repos/crtschin/test/deployments", 162 | "created_at": "2021-02-08T17:10:24Z", 163 | "updated_at": "2021-02-08T17:15:15Z", 164 | "pushed_at": "2021-02-08T17:19:16Z", 165 | "git_url": "git://github.com/crtschin/test.git", 166 | "ssh_url": "git@github.com:crtschin/test.git", 167 | "clone_url": "https://github.com/crtschin/test.git", 168 | "svn_url": "https://github.com/crtschin/test", 169 | "homepage": null, 170 | "size": 0, 171 | "stargazers_count": 0, 172 | "watchers_count": 0, 173 | "language": null, 174 | "has_issues": true, 175 | "has_projects": true, 176 | "has_downloads": true, 177 | "has_wiki": true, 178 | "has_pages": false, 179 | "forks_count": 0, 180 | "mirror_url": null, 181 | "archived": false, 182 | "disabled": false, 183 | "open_issues_count": 1, 184 | "license": null, 185 | "forks": 0, 186 | "open_issues": 1, 187 | "watchers": 0, 188 | "default_branch": "master" 189 | }, 190 | "sender": { 191 | "login": "crtschin", 192 | "id": 15233905, 193 | "node_id": "MDQ6VXNlcjE1MjMzOTA1", 194 | "avatar_url": "https://avatars.githubusercontent.com/u/15233905?v=4", 195 | "gravatar_id": "", 196 | "url": "https://api.github.com/users/crtschin", 197 | "html_url": "https://github.com/crtschin", 198 | "followers_url": "https://api.github.com/users/crtschin/followers", 199 | "following_url": "https://api.github.com/users/crtschin/following{/other_user}", 200 | "gists_url": "https://api.github.com/users/crtschin/gists{/gist_id}", 201 | "starred_url": "https://api.github.com/users/crtschin/starred{/owner}{/repo}", 202 | "subscriptions_url": "https://api.github.com/users/crtschin/subscriptions", 203 | "organizations_url": "https://api.github.com/users/crtschin/orgs", 204 | "repos_url": "https://api.github.com/users/crtschin/repos", 205 | "events_url": "https://api.github.com/users/crtschin/events{/privacy}", 206 | "received_events_url": "https://api.github.com/users/crtschin/received_events", 207 | "type": "User", 208 | "site_admin": false 209 | } 210 | } -------------------------------------------------------------------------------- /tests/data/push-payload.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref": "refs/heads/master", 3 | "before": "0000000000000000000000000000000000000000", 4 | "after": "6113728f27ae82c7b1a177c8d03f9e96e0adf246", 5 | "created": true, 6 | "deleted": false, 7 | "forced": false, 8 | "base_ref": null, 9 | "compare": "https://octocoders.github.io/Codertocat/Hello-World/compare/simple-tag", 10 | "commits": [], 11 | "head_commit": { 12 | "id": "6113728f27ae82c7b1a177c8d03f9e96e0adf246", 13 | "tree_id": "4b825dc642cb6eb9a060e54bf8d69288fbee4904", 14 | "distinct": true, 15 | "message": "Adding a .gitignore file", 16 | "timestamp": "2019-05-15T15:20:41Z", 17 | "url": "https://octocoders.github.io/Codertocat/Hello-World/commit/6113728f27ae82c7b1a177c8d03f9e96e0adf246", 18 | "author": { 19 | "name": "Codertocat", 20 | "email": "Codertocat@Octocoders.io", 21 | "username": "Codertocat" 22 | }, 23 | "committer": { 24 | "name": "Codertocat", 25 | "email": "Codertocat@Octocoders.io", 26 | "username": "Codertocat" 27 | }, 28 | "added": [ 29 | ".gitignore" 30 | ], 31 | "removed": [], 32 | "modified": [] 33 | }, 34 | "repository": { 35 | "id": 118, 36 | "node_id": "MDEwOlJlcG9zaXRvcnkxMTg=", 37 | "name": "Hello-World", 38 | "full_name": "Codertocat/Hello-World", 39 | "private": false, 40 | "owner": { 41 | "name": "Codertocat", 42 | "email": "Codertocat@Octocoders.io", 43 | "login": "Codertocat", 44 | "id": 4, 45 | "node_id": "MDQ6VXNlcjQ=", 46 | "avatar_url": "https://octocoders.github.io/avatars/u/4?", 47 | "gravatar_id": "", 48 | "url": "https://octocoders.github.io/api/v3/users/Codertocat", 49 | "html_url": "https://octocoders.github.io/Codertocat", 50 | "followers_url": "https://octocoders.github.io/api/v3/users/Codertocat/followers", 51 | "following_url": "https://octocoders.github.io/api/v3/users/Codertocat/following{/other_user}", 52 | "gists_url": "https://octocoders.github.io/api/v3/users/Codertocat/gists{/gist_id}", 53 | "starred_url": "https://octocoders.github.io/api/v3/users/Codertocat/starred{/owner}{/repo}", 54 | "subscriptions_url": "https://octocoders.github.io/api/v3/users/Codertocat/subscriptions", 55 | "organizations_url": "https://octocoders.github.io/api/v3/users/Codertocat/orgs", 56 | "repos_url": "https://octocoders.github.io/api/v3/users/Codertocat/repos", 57 | "events_url": "https://octocoders.github.io/api/v3/users/Codertocat/events{/privacy}", 58 | "received_events_url": "https://octocoders.github.io/api/v3/users/Codertocat/received_events", 59 | "type": "User", 60 | "site_admin": false 61 | }, 62 | "html_url": "https://octocoders.github.io/Codertocat/Hello-World", 63 | "description": null, 64 | "fork": false, 65 | "url": "https://octocoders.github.io/Codertocat/Hello-World", 66 | "forks_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/forks", 67 | "keys_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/keys{/key_id}", 68 | "collaborators_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/collaborators{/collaborator}", 69 | "teams_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/teams", 70 | "hooks_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/hooks", 71 | "issue_events_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/issues/events{/number}", 72 | "events_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/events", 73 | "assignees_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/assignees{/user}", 74 | "branches_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/branches{/branch}", 75 | "tags_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/tags", 76 | "blobs_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/git/blobs{/sha}", 77 | "git_tags_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/git/tags{/sha}", 78 | "git_refs_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/git/refs{/sha}", 79 | "trees_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/git/trees{/sha}", 80 | "statuses_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/statuses/{sha}", 81 | "languages_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/languages", 82 | "stargazers_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/stargazers", 83 | "contributors_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/contributors", 84 | "subscribers_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/subscribers", 85 | "subscription_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/subscription", 86 | "commits_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/commits{/sha}", 87 | "git_commits_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/git/commits{/sha}", 88 | "comments_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/comments{/number}", 89 | "issue_comment_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/issues/comments{/number}", 90 | "contents_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/contents/{+path}", 91 | "compare_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/compare/{base}...{head}", 92 | "merges_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/merges", 93 | "archive_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/{archive_format}{/ref}", 94 | "downloads_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/downloads", 95 | "issues_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/issues{/number}", 96 | "pulls_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/pulls{/number}", 97 | "milestones_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/milestones{/number}", 98 | "notifications_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/notifications{?since,all,participating}", 99 | "labels_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/labels{/name}", 100 | "releases_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/releases{/id}", 101 | "deployments_url": "https://octocoders.github.io/api/v3/repos/Codertocat/Hello-World/deployments", 102 | "created_at": 1557949027, 103 | "updated_at": "2019-05-15T19:38:15Z", 104 | "pushed_at": 1557949103, 105 | "git_url": "git://octocoders.github.io/Codertocat/Hello-World.git", 106 | "ssh_url": "git@octocoders.github.io:Codertocat/Hello-World.git", 107 | "clone_url": "https://octocoders.github.io/Codertocat/Hello-World.git", 108 | "svn_url": "https://octocoders.github.io/Codertocat/Hello-World", 109 | "homepage": null, 110 | "size": 0, 111 | "stargazers_count": 0, 112 | "watchers_count": 0, 113 | "language": "Ruby", 114 | "has_issues": true, 115 | "has_projects": true, 116 | "has_downloads": true, 117 | "has_wiki": true, 118 | "has_pages": true, 119 | "forks_count": 1, 120 | "mirror_url": null, 121 | "archived": false, 122 | "disabled": false, 123 | "open_issues_count": 2, 124 | "license": null, 125 | "forks": 1, 126 | "open_issues": 2, 127 | "watchers": 0, 128 | "default_branch": "master", 129 | "stargazers": 0, 130 | "master_branch": "master" 131 | }, 132 | "pusher": { 133 | "name": "Codertocat", 134 | "email": "Codertocat@Octocoders.io" 135 | }, 136 | "enterprise": { 137 | "id": 1, 138 | "slug": "github", 139 | "name": "GitHub", 140 | "node_id": "MDg6QnVzaW5lc3Mx", 141 | "avatar_url": "https://octocoders.github.io/avatars/b/1?", 142 | "description": null, 143 | "website_url": null, 144 | "html_url": "https://octocoders.github.io/businesses/github", 145 | "created_at": "2019-05-14T19:31:12Z", 146 | "updated_at": "2019-05-14T19:31:12Z" 147 | }, 148 | "sender": { 149 | "login": "Codertocat", 150 | "id": 4, 151 | "node_id": "MDQ6VXNlcjQ=", 152 | "avatar_url": "https://octocoders.github.io/avatars/u/4?", 153 | "gravatar_id": "", 154 | "url": "https://octocoders.github.io/api/v3/users/Codertocat", 155 | "html_url": "https://octocoders.github.io/Codertocat", 156 | "followers_url": "https://octocoders.github.io/api/v3/users/Codertocat/followers", 157 | "following_url": "https://octocoders.github.io/api/v3/users/Codertocat/following{/other_user}", 158 | "gists_url": "https://octocoders.github.io/api/v3/users/Codertocat/gists{/gist_id}", 159 | "starred_url": "https://octocoders.github.io/api/v3/users/Codertocat/starred{/owner}{/repo}", 160 | "subscriptions_url": "https://octocoders.github.io/api/v3/users/Codertocat/subscriptions", 161 | "organizations_url": "https://octocoders.github.io/api/v3/users/Codertocat/orgs", 162 | "repos_url": "https://octocoders.github.io/api/v3/users/Codertocat/repos", 163 | "events_url": "https://octocoders.github.io/api/v3/users/Codertocat/events{/privacy}", 164 | "received_events_url": "https://octocoders.github.io/api/v3/users/Codertocat/received_events", 165 | "type": "User", 166 | "site_admin": false 167 | }, 168 | "installation": { 169 | "id": 5, 170 | "node_id": "MDIzOkludGVncmF0aW9uSW5zdGFsbGF0aW9uNQ==" 171 | } 172 | } -------------------------------------------------------------------------------- /todo.md: -------------------------------------------------------------------------------- 1 | # Up next 2 | 3 | * Automated testing for all core functionality 4 | 5 | # Near-term 6 | 7 | * Add tests for multiproject code 8 | * Graceful shutdown when receiving sigterm 9 | * Add the ability to force a rebuild 10 | * Keep track of recently integrated pull requests, even after they were closed 11 | * Support for commit message validation 12 | * Generate ping event at startup 13 | * Client to generate API calls for ping 14 | * The ability to queue approved commits directly without requiring a pull 15 | request, for personal use 16 | * Append a "Reviewed-by" line to integrated commits 17 | * Set timezone to UTC+0 for bot (rebased) commits 18 | * Add the ability to filter pull request status by context: 19 | accepting pull request build status as valid is wrong, 20 | because GitHub makes Travis build a merge commit 21 | * Clean up multiproject code 22 | 23 | # Done 24 | 25 | * Parse webhook data json -> event 26 | * Server to listen for webhooks 27 | * Run event loop 28 | * Run Git process and parse output 29 | * Add end-to-end test for retry after rejected push 30 | * Configurable Git credentials (just edit config of the daemon user) 31 | * Test that the server continues serving after an invalid hook 32 | * Render webinterface pages 33 | * Serve webinterface pages 34 | * Test with GitHub 35 | * Support for basic review policy enforcement (whitelist reviewer usernames) 36 | * Support for multiple repositories 37 | * Make API calls to leave comments and close pull requests 38 | * Support for `--autosquash` rebasing 39 | -------------------------------------------------------------------------------- /tools/build-status: -------------------------------------------------------------------------------- 1 | send-webhook -------------------------------------------------------------------------------- /tools/comment: -------------------------------------------------------------------------------- 1 | send-webhook -------------------------------------------------------------------------------- /tools/send-webhook: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # This file is part of Hoff -- A gatekeeper for your commits. 4 | # 5 | # 6 | # Simulates a webhook from GitHub, only needed fields are included. 7 | # Examples: 8 | # 9 | # $ ./tools/send-webhook # displays usage 10 | # 11 | # $ ./tools/comment deckard 1337 @hoffbot merge 12 | # 13 | # $ ./tools/build-status c033170123456789abcdef0123456789abcdef01 14 | # 15 | # The files ./tools/comment and ./tools/build-status are set up as symlinks. 16 | # 17 | # 18 | # Copyright 2022 Channable. 19 | # 20 | # Licensed under the Apache License, Version 2.0 (the "License"); 21 | # you may not use this file except in compliance with the License. 22 | # A copy of the License has been included in the root of the repository. 23 | 24 | usage() { 25 | cat <&2 59 | exit 1 60 | } 61 | 62 | # Takes the first occurrence of a field in config.json. 63 | # This is not a proper JSON parsing, but is enough 64 | # for the purposes of locally simulating webhooks 65 | # in a development environment. 66 | get() { 67 | grep -m1 "$1" config.json | 68 | sed "s/.*\"$1\": *\"//;s/\".*//" 69 | } 70 | 71 | # Produced a comment payload with the fields needed by Hoff. 72 | # Variables $owner, $repository, $number, $author and $body must be set. 73 | # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment 74 | comment-payload() { 75 | cat <webhook-data.json 127 | ;; 128 | build-status) 129 | event= 130 | commit="$1" 131 | state="$2" 132 | target="$3" 133 | context="${4:-default}" 134 | 135 | [ -n "$commit" ] || errxit "Full commit sha should be provided as the 1st argument" 136 | [ -n "$state" ] || state=success 137 | 138 | # If this is a URL make sure it has quote marks, if null make sure it doesn't. 139 | if [ -z "$target" ]; then 140 | target="null" 141 | else 142 | target="\"$target\"" 143 | fi 144 | 145 | event="status" 146 | build-status-payload >webhook-data.json 147 | ;; 148 | *) 149 | usage 150 | exit 0 151 | ;; 152 | esac 153 | 154 | # Regarding the choice of the webhook-data.json file, 155 | # we could have used /tmp, but the current directory 156 | # is better for debugging purposes. 157 | # One can check the final payload after the fact. 158 | 159 | # computes the HMAC of stdin printing result on stdout 160 | # pass either -sha1 or -sha256 for the HMAC type 161 | hmac() { 162 | openssl dgst "$1" -hmac "$secret" | 163 | sed 's/^.* //' 164 | } 165 | 166 | signature1=$( hmac -sha1 nixpkgs-pinned.nix 12 | fetchTarball { 13 | url = "https://github.com/NixOS/nixpkgs/archive/${commit_hash}.tar.gz"; 14 | sha256 = "${archive_hash}"; 15 | } 16 | EOF 17 | 18 | git add nixpkgs-pinned.nix 19 | git commit -m "Upgrade to latest commit in $channel channel" 20 | --------------------------------------------------------------------------------