├── .github └── workflows │ ├── deploy.yml │ └── test.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── Makefile ├── README.md ├── default.nix ├── deploy.yml ├── dockerize.nix ├── marge.app ├── marge.nix ├── marge ├── __init__.py ├── app.py ├── approvals.py ├── batch_job.py ├── bot.py ├── branch.py ├── commit.py ├── git.py ├── gitlab.py ├── interval.py ├── job.py ├── merge_request.py ├── pipeline.py ├── project.py ├── pylintrc ├── single_merge_job.py ├── store.py ├── trailerfilter.py └── user.py ├── nix ├── sources.json └── sources.nix ├── pylintrc ├── requirements.nix ├── requirements.txt ├── requirements_frozen.txt ├── requirements_override.nix ├── setup.cfg ├── setup.py ├── shell.nix ├── tests ├── __init__.py ├── git_repo_mock.py ├── gitlab_api_mock.py ├── test_app.py ├── test_approvals.py ├── test_batch_job.py ├── test_commit.py ├── test_git.py ├── test_gitlab.py ├── test_interval.py ├── test_job.py ├── test_merge_request.py ├── test_pipeline.py ├── test_project.py ├── test_single_job.py ├── test_store.py └── test_user.py └── version /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | branches: 8 | - master 9 | 10 | jobs: 11 | dockerhub: 12 | runs-on: ubuntu-20.04 13 | if: ${{ github.repository == 'smarkets/marge-bot' }} # Only run from main repo 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: actions/setup-python@v2 17 | - uses: cachix/install-nix-action@v13 18 | with: 19 | nix_path: nixpkgs=channel:nixos-unstable 20 | - name: Dockerize 21 | run: make dockerize 22 | - name: Deploy to DockerHub 23 | env: 24 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} 25 | DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} 26 | # GitHub Actions have GITHUB_REF as `refs/heads/` or `refs/tags/`, clean it up 27 | run: make docker-push VERSION="$(echo $GITHUB_REF | sed 's!^refs/\(heads\|tags\)/!!')" 28 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | env: 12 | PY_COLORS: 1 13 | 14 | jobs: 15 | test: 16 | runs-on: ubuntu-22.04 17 | strategy: 18 | matrix: 19 | python-version: ["3.8", "3.9"] 20 | steps: 21 | - uses: actions/checkout@v3 22 | - uses: actions/setup-python@v4 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | - name: Install requirements 26 | run: pip install -r requirements_frozen.txt 27 | - name: Run tests 28 | run: pytest 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | ENV/ 3 | .cache 4 | *.egg-info 5 | .coverage 6 | .pytest_cache 7 | 8 | # nix stuff 9 | result 10 | result-* 11 | 12 | # Editor 13 | *.sw[po] 14 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | * 0.10.1: 2 | - Feature: Guarantee pipeline before merging 3 | * 0.10.0: 4 | - Feature: implement HTTPS support for cloning (#225) #283 5 | - Feature: Make CI work with GitHub Actions #308 6 | - Feature: Allow running marge-bot in CI pipelines or as a single CLI job #289 7 | - Fix: Bump urllib3 from 1.26.4 to 1.26.5 #310 8 | - Fix: Bump urllib3 from 1.26.3 to 1.26.4 #306 9 | - Fix: Upgrade dependencies and fix lints and tests #305 10 | - Fix: AccessLevel enum matches GitLab docs #294 11 | * 0.9.5: 12 | - Feature: Add new choice `assigned_at` to option `merge_order` #268 13 | - Fix: Wait for merge status to resolve #265 14 | * 0.9.4: 15 | - Fix: handle `CannotMerge` which could be raised from `update_merge_request` #275 16 | - Fix: maintain `batch_mr_sha` value when batch merging with fast forward commits #276 17 | * 0.9.3: 18 | - Feature: allow merge commits in batch MRs, to make the commits be exactly the same in 19 | the sub MRs and the batch MR. Add `--use-merge-commit-batches` and `--skip-ci-batches` options #264 20 | - Feature: add `--use-no-ff-batches` to disable fast forwarding of batch merges (#256, #259) 21 | * 0.9.2: 22 | - Fix: ensure parameters are correct when merging with/without pipelines enabled #251 23 | - Fix: only delete source branch if forced #193 24 | - Fix: fix sandboxed build #250 25 | * 0.9.1: 26 | - Feature: support passing a timezone with the embargo #228 27 | - Fix: fix not checking the target project for MRs from forked projects #218 28 | * 0.9.0: 29 | - Feature: support rebasing through GitLab's API #160 30 | - Feature: allow restrict source branches #206 31 | - Fix: only fetch projects with min access level #166 32 | - Fix: bump all dependencies (getting rid of vulnerable packages) #179 33 | - Fix: support multiple assignees #186, #192 34 | - Fix: fetch pipelines by merge request instead of branch #212 35 | - Fix: fix unassign when author is Marge #211 36 | - Enhancement: ignore archived projects #177 37 | - Enhancement: add a timeout to all gitlab requests #200 38 | - Enhancement: smaller docker image size #199 39 | * 0.8.1 40 | - Feature: allow merging in order of last-update time #149 41 | * 0.8.0 42 | - Feature: allow reference repository in git clone #129 43 | - Feature: add new stable/master tags for docker images #142 44 | - Fix: fix TypeError when fetching source project #122 45 | - Fix: handle CI status 'skipped' #127 46 | - Fix: handle merging when source branch is master #127 47 | - Fix: handle error on pushing to protected branches #127 48 | - Enhancement: add appropriate error if unresolved discussions on merge request #136 49 | - Enhancement: ensure reviewer and commit author aren't the same #137 50 | * 0.7.0: 51 | - Feature: add `--batch` to better support repos with many daily MRs and slow-ish CI (#84, #116) 52 | - Fix: fix fuse() call when using experimental --use-merge-strategy to update source branch #102 53 | - Fix: Get latest CI status of a commit filtered by branch #96 (thanks to benjamb) 54 | - Enhancement: Check MR is mergeable before accepting MR #117 55 | * 0.6.1: 56 | - Fix when target SHA is retrieved #92. 57 | - Replace word "gitlab" with "GitLab" #93. 58 | * 0.6.0: 59 | - Fix issue due to a `master` branch being assumed when removing 60 | local branches #88. 61 | - Better error reporting when there are no changes left 62 | after rebasing #87. 63 | - Add --approval-reset-timeout option #85. 64 | - Fix encoding issues under Windows #86. 65 | - Support new merge-request status "locked" #79. 66 | - Fixes issue where stale branches in marge's repo could 67 | lead to conflicts #78. 68 | - Add experimental --use-merge-strategy flag that uses merge-commits 69 | instead of rebasing (#72, and also #90 for caveats). 70 | * 0.5.1: 71 | - Sleep even less between polling for MRs #75. 72 | * 0.5.0: 73 | - Added "default -> config file -> env var -> args" way to configure marge-bot #71 74 | * 0.4.1: 75 | - Fixed bug in error handling of commit rewritting (#70 / 1438867) 76 | - Add --project-regexp argument to restrict to certain target branches $65. 77 | - Sleep less between merging requests while there are jobs pending #67. 78 | - Less verborragic logging when --debug is used #66. 79 | * 0.4.0: 80 | - The official docker image is now on `smarkets/marge-bot` not (`smarketshq/marge-bot`). 81 | - Add a --add-part-of option to tag commit messages with originating MR #48. 82 | - Add a --git-timeout parameter (that takes time units); also add --ci-timeout 83 | that deprecates --max-ci-time-in-minutes #58. 84 | - Re-approve immediately after push #53. 85 | - Always use --ssh-key-file if passed (never ssh-agent or keys from ~/.ssh) #61. 86 | - Fix bad LOCALE problem in official image (hardcode utf-8 everywhere) #57. 87 | - Don't blow up on logging bad json responses #51. 88 | - Grammar fix #52. 89 | * 0.3.2: Fix support for branches with "/" in their names #50. 90 | * 0.3.1: Fix start-up error when running as non-admin user #49. 91 | * 0.3.0: 92 | - Display better messages when GitLab refuses to merge #32, #33. 93 | - Handle auto-squash being selected #14. 94 | - Add `--max-ci-time-in-minutes`, with default of 15 #44. 95 | - Fix clean-up of `ssh-key-xxx` files #38. 96 | - All command line args now have an environment var equivalent #35. 97 | * 0.2.0: 98 | - Add `--project-regexp` flag, to select which projects to include/exclude. 99 | - Fix GitLab CE incompatibilities #30. 100 | * 0.1.2: Fix parsing of GitLab versions #28. 101 | * 0.1.1: Fix failure to take into account group permissions #19. 102 | * 0.1.0: Initial release. 103 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017 Smarkets Limited 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | VERSION?=$$(git rev-parse --abbrev-ref HEAD) 2 | 3 | .PHONY: all 4 | all: requirements_frozen.txt requirements.nix requirements_override.nix marge-bot dockerize 5 | 6 | .PHONY: marge-bot 7 | marge-bot: 8 | nix-build --keep-failed --attr marge-bot default.nix 9 | 10 | .PHONY: clean 11 | clean: 12 | rm -rf .cache result result-* requirements_frozen.txt 13 | 14 | .PHONY: bump 15 | bump: bump-requirements bump-sources 16 | 17 | .PHONY: bump-sources 18 | bump-sources: 19 | nix-shell --run niv update 20 | 21 | .PHONY: bump-requirements 22 | bump-requirements: clean requirements_frozen.txt 23 | 24 | requirements_frozen.txt requirements.nix requirements_override.nix: requirements.txt 25 | pypi2nix -V 3.6 -r $^ 26 | 27 | .PHONY: dockerize 28 | dockerize: 29 | docker load --input $$(nix-build --attr docker-image default.nix) 30 | 31 | .PHONY: docker-push 32 | docker-push: 33 | if [ -n "$$DOCKER_USERNAME" -a -n "$$DOCKER_PASSWORD" ]; then \ 34 | docker login -u "$${DOCKER_USERNAME}" -p "$${DOCKER_PASSWORD}"; \ 35 | else \ 36 | docker login; \ 37 | fi 38 | docker tag smarkets/marge-bot:$$(cat version) smarkets/marge-bot:$(VERSION) 39 | if [ "$(VERSION)" = "$$(cat version)" ]; then \ 40 | docker tag smarkets/marge-bot:$$(cat version) smarkets/marge-bot:latest; \ 41 | docker tag smarkets/marge-bot:$$(cat version) smarkets/marge-bot:stable; \ 42 | docker push smarkets/marge-bot:stable; \ 43 | docker push smarkets/marge-bot:latest; \ 44 | fi 45 | docker push smarkets/marge-bot:$(VERSION) 46 | # for backwards compatibility push to previous location 47 | docker tag smarkets/marge-bot:$$(cat version) smarketshq/marge-bot:latest 48 | docker tag smarkets/marge-bot:$$(cat version) smarketshq/marge-bot:$(VERSION) 49 | docker push smarketshq/marge-bot:$(VERSION) 50 | docker push smarketshq/marge-bot:latest 51 | -------------------------------------------------------------------------------- /default.nix: -------------------------------------------------------------------------------- 1 | let sources = import ./nix/sources.nix; in 2 | with import sources.nixpkgs {}; 3 | { 4 | marge-bot = callPackage ./marge.nix {}; 5 | docker-image = callPackage ./dockerize.nix {}; 6 | } 7 | -------------------------------------------------------------------------------- /deploy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | kind: "Template" 3 | apiVersion: "v1" 4 | metadata: 5 | name: "marge-ephemeral" 6 | annotations: 7 | description: "Provides a marge template" 8 | labels: 9 | template: "marge template" 10 | objects: 11 | - kind: Secret 12 | apiVersion: v1 13 | metadata: 14 | namespace: "$(KUBE_NAMESPACE)" 15 | name: marge-secrets 16 | type: Opaque 17 | data: 18 | MARGE_AUTH_TOKEN: "$(MARGE_AUTH_TOKEN)" 19 | MARGE_SSH_KEY: "$(MARGE_SSH_KEY)" 20 | - kind: Deployment 21 | apiVersion: apps/v1 22 | metadata: 23 | name: "$(APP_NAME)" 24 | namespace: "$(KUBE_NAMESPACE)" 25 | labels: 26 | k8s-app: "$(APP_NAME)" 27 | spec: 28 | replicas: "$((REPLICA_COUNT))" 29 | selector: 30 | matchLabels: 31 | k8s-app: "$(APP_NAME)" 32 | template: 33 | metadata: 34 | labels: 35 | k8s-app: "$(APP_NAME)" 36 | spec: 37 | serviceAccountName: default 38 | containers: 39 | - name: app 40 | image: "$(APP_IMAGE)" 41 | imagePullPolicy: Always 42 | args: ["--gitlab-url=$(MARGE_GITLAB_URL)", 43 | "--impersonate-approvers", 44 | "--add-tested", 45 | "--add-reviewers", 46 | "--add-part-of"] 47 | env: 48 | - name: MARGE_AUTH_TOKEN 49 | valueFrom: 50 | secretKeyRef: 51 | name: marge-secrets 52 | key: MARGE_AUTH_TOKEN 53 | - name: MARGE_SSH_KEY 54 | valueFrom: 55 | secretKeyRef: 56 | name: marge-secrets 57 | key: MARGE_SSH_KEY 58 | parameters: 59 | - name: "APP_NAME" 60 | description: "Name of the app" 61 | value: "marge-bot" 62 | required: true 63 | parameterType: "string" 64 | - name: "APP_IMAGE" 65 | description: "App image name to run" 66 | value: "smarkets/marge-bot" 67 | required: true 68 | parameterType: "string" 69 | - name: "KUBE_NAMESPACE" 70 | description: "Kube namespace" 71 | value: "marge" 72 | required: true 73 | parameterType: "string" 74 | - name: "REPLICA_COUNT" 75 | description: "Number of replicas to run" 76 | value: 1 77 | required: true 78 | parameterType: "int" 79 | - name: "MARGE_GITLAB_URL" 80 | description: "Marge GitLab url" 81 | value: "http://your.gitlab.instance.com" 82 | required: true 83 | parameterType: "string" 84 | - name: "MARGE_AUTH_TOKEN" 85 | description: "Marge GitLab auth token" 86 | value: 0 87 | required: true 88 | parameterType: "base64" 89 | - name: "MARGE_SSH_KEY" 90 | description: "Marge GitLab rsa key" 91 | value: 0 92 | required: true 93 | parameterType: "base64" 94 | -------------------------------------------------------------------------------- /dockerize.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | let 3 | marge = pkgs.callPackage ./marge.nix {}; 4 | version = marge.version; 5 | basicShadow = 6 | # minimal user setup, so ssh won't whine 'No user exists for uid 0' 7 | pkgs.runCommand "basic-shadow-setup" {} 8 | '' 9 | mkdir -p $out 10 | cd $out 11 | mkdir -p root/.ssh 12 | mkdir -p etc/pam.d 13 | echo "root:x:0:0::/root:/bin/sh" >etc/passwd 14 | echo "root:!x:::::::" >etc/shadow 15 | echo "root:x:0:" >etc/group 16 | echo "root:x::" >etc/gshadow 17 | cat >etc/pam.d/other <<\EOF 18 | account sufficient pam_unix.so 19 | auth sufficient pam_rootok.so 20 | password requisite pam_unix.so nullok sha512 21 | session required pam_unix.so 22 | EOF 23 | ''; 24 | in 25 | pkgs.dockerTools.buildImage { 26 | name = "smarkets/marge-bot"; 27 | tag = "${version}"; 28 | contents = 29 | with pkgs; [ 30 | basicShadow 31 | busybox 32 | gitMinimal 33 | openssh 34 | cacert 35 | ] ++ [ marge ]; 36 | config = { 37 | Entrypoint = [ "/bin/marge.app" ]; 38 | Env = ["LANG=en_US.UTF-8" ''LOCALE_ARCHIVE=/lib/locale/locale-archive'' "GIT_SSL_CAINFO=/etc/ssl/certs/ca-bundle.crt" "SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"]; 39 | }; 40 | } 41 | -------------------------------------------------------------------------------- /marge.app: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import marge.app 3 | 4 | try: 5 | marge.app.main() 6 | except Exception as e: 7 | print('Exception occured') 8 | if hasattr(e, 'stdout'): 9 | print(f'stdout was: {e.stdout}') 10 | if hasattr(e, 'stderr'): 11 | print(f'stderr was: {e.stderr}') 12 | raise 13 | -------------------------------------------------------------------------------- /marge.nix: -------------------------------------------------------------------------------- 1 | { pkgs 2 | , lib 3 | }: 4 | let 5 | python = import ./requirements.nix { inherit pkgs; }; 6 | version = lib.fileContents ./version; 7 | in 8 | python.mkDerivation { 9 | version = "${version}"; 10 | name = "marge-${version}"; 11 | src = lib.sourceByRegex ./. [ 12 | "marge(/.*\.py)?" 13 | "tests(/.*\.py)?" 14 | "marge\.app" 15 | "pylintrc" 16 | "setup\.cfg" 17 | "setup\.py" 18 | "version" 19 | ]; 20 | checkInputs = with python.packages; [ 21 | pytest 22 | pytest-cov 23 | pytest-flake8 24 | pytest-pylint 25 | pytest-runner 26 | ]; 27 | propagatedBuildInputs = with python.packages; [ 28 | ConfigArgParse maya PyYAML requests 29 | ]; 30 | meta = { 31 | homepage = "https://github.com/smarkets/marge-bot"; 32 | description = "A build bot for GitLab"; 33 | license = lib.licenses.bsd3; 34 | maintainers = [ 35 | "Alexander Schmolck " 36 | "Jaime Lennox " 37 | ]; 38 | platforms = pkgs.lib.platforms.linux ++ pkgs.lib.platforms.darwin; 39 | }; 40 | } 41 | -------------------------------------------------------------------------------- /marge/__init__.py: -------------------------------------------------------------------------------- 1 | import logging as log 2 | 3 | log.basicConfig( 4 | level=log.INFO, 5 | format='%(asctime)s %(levelname)s %(message)s', 6 | ) 7 | -------------------------------------------------------------------------------- /marge/app.py: -------------------------------------------------------------------------------- 1 | """ 2 | An auto-merger of merge requests for GitLab 3 | """ 4 | 5 | import contextlib 6 | import logging 7 | import re 8 | import sys 9 | import tempfile 10 | from datetime import timedelta 11 | 12 | import configargparse 13 | 14 | from . import bot 15 | from . import interval 16 | from . import gitlab 17 | from . import user as user_module 18 | 19 | 20 | class MargeBotCliArgError(Exception): 21 | pass 22 | 23 | 24 | def time_interval(str_interval): 25 | try: 26 | quant, unit = re.match(r'\A([\d.]+) ?(h|m(?:in)?|s)?\Z', str_interval).groups() 27 | translate = {'h': 'hours', 'm': 'minutes', 'min': 'minutes', 's': 'seconds'} 28 | return timedelta(**{translate[unit or 's']: float(quant)}) 29 | except (AttributeError, ValueError) as err: 30 | raise configargparse.ArgumentTypeError( 31 | 'Invalid time interval (e.g. 12[s|min|h]): %s' % str_interval 32 | ) from err 33 | 34 | 35 | def _parse_config(args): # pylint: disable=too-many-statements 36 | 37 | def regexp(str_regex): 38 | try: 39 | return re.compile(str_regex) 40 | except re.error as err: 41 | raise configargparse.ArgumentTypeError('Invalid regexp: %r (%s)' % (str_regex, err.msg)) 42 | 43 | parser = configargparse.ArgParser( 44 | auto_env_var_prefix='MARGE_', 45 | ignore_unknown_config_file_keys=True, # Don't parse unknown args 46 | config_file_parser_class=configargparse.YAMLConfigFileParser, 47 | formatter_class=configargparse.ArgumentDefaultsRawHelpFormatter, 48 | description=__doc__, 49 | ) 50 | parser.add_argument( 51 | '--config-file', 52 | env_var='MARGE_CONFIG_FILE', 53 | type=str, 54 | is_config_file=True, 55 | help='config file path', 56 | ) 57 | auth_token_group = parser.add_mutually_exclusive_group(required=True) 58 | auth_token_group.add_argument( 59 | '--auth-token', 60 | type=str, 61 | metavar='TOKEN', 62 | help=( 63 | 'Your GitLab token.\n' 64 | 'DISABLED because passing credentials on the command line is insecure:\n' 65 | 'You can still set it via ENV variable or config file, or use "--auth-token-file" flag.\n' 66 | ), 67 | ) 68 | auth_token_group.add_argument( 69 | '--auth-token-file', 70 | type=configargparse.FileType('rt'), 71 | metavar='FILE', 72 | help='Path to your GitLab token file.\n', 73 | ) 74 | parser.add_argument( 75 | '--gitlab-url', 76 | type=str, 77 | required=True, 78 | metavar='URL', 79 | help='Your GitLab instance, e.g. "https://gitlab.example.com".\n', 80 | ) 81 | repo_access = parser.add_mutually_exclusive_group(required=True) 82 | repo_access.add_argument( 83 | '--use-https', 84 | env_var='MARGE_USE_HTTPS', 85 | action='store_true', 86 | help='use HTTP(S) instead of SSH for GIT repository access\n', 87 | ) 88 | repo_access.add_argument( 89 | '--ssh-key', 90 | type=str, 91 | metavar='KEY', 92 | help=( 93 | 'The private ssh key for marge so it can clone/push.\n' 94 | 'DISABLED because passing credentials on the command line is insecure:\n' 95 | 'You can still set it via ENV variable or config file, or use "--ssh-key-file" flag.\n' 96 | ), 97 | ) 98 | repo_access.add_argument( 99 | '--ssh-key-file', 100 | type=str, # because we want a file location, not the content 101 | metavar='FILE', 102 | help='Path to the private ssh key for marge so it can clone/push.\n', 103 | ) 104 | parser.add_argument( 105 | '--embargo', 106 | type=interval.IntervalUnion.from_human, 107 | metavar='INTERVAL[,..]', 108 | help='Time(s) during which no merging is to take place, e.g. "Friday 1pm - Monday 9am".\n', 109 | ) 110 | experimental_group = parser.add_mutually_exclusive_group(required=False) 111 | experimental_group.add_argument( 112 | '--use-merge-strategy', 113 | action='store_true', 114 | help=( 115 | 'Use git merge instead of git rebase to update the *source* branch (EXPERIMENTAL)\n' 116 | 'If you need to use a strict no-rebase workflow (in most cases\n' 117 | 'you don\'t want this, even if you configured gitlab to use merge requests\n' 118 | 'to use merge commits on the *target* branch (the default).)\n' 119 | ), 120 | ) 121 | parser.add_argument( 122 | '--rebase-remotely', 123 | action='store_true', 124 | help=( 125 | "Instead of rebasing in a local clone of the repository, use GitLab's\n" 126 | "built-in rebase functionality, via their API. Note that Marge can't add\n" 127 | "information in the commits in this case.\n" 128 | ), 129 | ) 130 | parser.add_argument( 131 | '--add-tested', 132 | action='store_true', 133 | help='Add "Tested: marge-bot <$MR_URL>" for the final commit on branch after it passed CI.\n', 134 | ) 135 | parser.add_argument( 136 | '--batch', 137 | action='store_true', 138 | help='Enable processing MRs in batches\n', 139 | ) 140 | parser.add_argument( 141 | '--add-part-of', 142 | action='store_true', 143 | help='Add "Part-of: <$MR_URL>" to each commit in MR.\n', 144 | ) 145 | parser.add_argument( 146 | '--add-reviewers', 147 | action='store_true', 148 | help='Add "Reviewed-by: $approver" for each approver of MR to each commit in MR.\n', 149 | ) 150 | parser.add_argument( 151 | '--impersonate-approvers', 152 | action='store_true', 153 | help='Marge-bot pushes effectively don\'t change approval status.\n', 154 | ) 155 | parser.add_argument( 156 | '--merge-order', 157 | default='created_at', 158 | choices=('created_at', 'updated_at', 'assigned_at'), 159 | help='Order marge merges assigned requests. created_at (default), updated_at or assigned_at.\n', 160 | ) 161 | parser.add_argument( 162 | '--approval-reset-timeout', 163 | type=time_interval, 164 | default='0s', 165 | help=( 166 | 'How long to wait for approvals to reset after pushing.\n' 167 | 'Only useful with the "new commits remove all approvals" option in a project\'s settings.\n' 168 | 'This is to handle the potential race condition where approvals don\'t reset in GitLab\n' 169 | 'after a force push due to slow processing of the event.\n' 170 | ), 171 | ) 172 | parser.add_argument( 173 | '--project-regexp', 174 | type=regexp, 175 | default='.*', 176 | help="Only process projects that match; e.g. 'some_group/.*' or '(?!exclude/me)'.\n", 177 | ) 178 | parser.add_argument( 179 | '--ci-timeout', 180 | type=time_interval, 181 | default='15min', 182 | help='How long to wait for CI to pass.\n', 183 | ) 184 | parser.add_argument( 185 | '--max-ci-time-in-minutes', 186 | type=int, 187 | default=None, 188 | help='Deprecated; use --ci-timeout.\n', 189 | ) 190 | parser.add_argument( 191 | '--git-timeout', 192 | type=time_interval, 193 | default='120s', 194 | help='How long a single git operation can take.\n' 195 | ) 196 | parser.add_argument( 197 | '--git-reference-repo', 198 | type=str, 199 | default=None, 200 | help='A reference repo to be used when git cloning.\n' 201 | ) 202 | parser.add_argument( 203 | '--branch-regexp', 204 | type=regexp, 205 | default='.*', 206 | help='Only process MRs whose target branches match the given regular expression.\n', 207 | ) 208 | parser.add_argument( 209 | '--source-branch-regexp', 210 | type=regexp, 211 | default='.*', 212 | help='Only process MRs whose source branches match the given regular expression.\n', 213 | ) 214 | parser.add_argument( 215 | '--debug', 216 | action='store_true', 217 | help='Debug logging (includes all HTTP requests etc).\n', 218 | ) 219 | parser.add_argument( 220 | '--use-no-ff-batches', 221 | action='store_true', 222 | help='Disable fast forwarding when merging MR batches' 223 | ) 224 | parser.add_argument( 225 | '--use-merge-commit-batches', 226 | action='store_true', 227 | help='Use merge commit when creating batches, so that the commits in the batch MR ' 228 | 'will be the same with in individual MRs. Requires sudo scope in the access token.\n', 229 | ) 230 | parser.add_argument( 231 | '--skip-ci-batches', 232 | action='store_true', 233 | help='Skip CI when updating individual MRs when using batches' 234 | ) 235 | parser.add_argument( 236 | '--cli', 237 | action='store_true', 238 | help='Run marge-bot as a single CLI command, not a service' 239 | ) 240 | parser.add_argument( 241 | '--guarantee-final-pipeline', 242 | action='store_true', 243 | help='Guaranteed final pipeline when assigned to marge-bot' 244 | ) 245 | 246 | config = parser.parse_args(args) 247 | 248 | if config.use_merge_strategy and config.batch: 249 | raise MargeBotCliArgError('--use-merge-strategy and --batch are currently mutually exclusive') 250 | if config.use_merge_strategy and config.add_tested: 251 | raise MargeBotCliArgError('--use-merge-strategy and --add-tested are currently mutually exclusive') 252 | if config.rebase_remotely: 253 | conflicting_flag = [ 254 | '--use-merge-strategy', 255 | '--add-tested', 256 | '--add-reviewers', 257 | '--add-part-of', 258 | ] 259 | for flag in conflicting_flag: 260 | if getattr(config, flag[2:].replace("-", "_")): 261 | raise MargeBotCliArgError('--rebase-remotely and %s are mutually exclusive' % flag) 262 | 263 | cli_args = [] 264 | # pylint: disable=protected-access 265 | for _, (_, value) in parser._source_to_settings.get(configargparse._COMMAND_LINE_SOURCE_KEY, {}).items(): 266 | cli_args.extend(value) 267 | for bad_arg in ['--auth-token', '--ssh-key']: 268 | if any(bad_arg in arg for arg in cli_args): 269 | raise MargeBotCliArgError('"%s" can only be set via ENV var or config file.' % bad_arg) 270 | return config 271 | 272 | 273 | @contextlib.contextmanager 274 | def _secret_auth_token_and_ssh_key(options): 275 | auth_token = options.auth_token or options.auth_token_file.readline().strip() 276 | if options.use_https: 277 | yield auth_token, None 278 | elif options.ssh_key_file: 279 | yield auth_token, options.ssh_key_file 280 | else: 281 | with tempfile.NamedTemporaryFile(mode='w', prefix='ssh-key-') as tmp_ssh_key_file: 282 | try: 283 | tmp_ssh_key_file.write(options.ssh_key + '\n') 284 | tmp_ssh_key_file.flush() 285 | yield auth_token, tmp_ssh_key_file.name 286 | finally: 287 | tmp_ssh_key_file.close() 288 | 289 | 290 | def main(args=None): 291 | if args is None: 292 | args = sys.argv[1:] 293 | logging.basicConfig() 294 | 295 | options = _parse_config(args) 296 | 297 | if options.debug: 298 | logging.getLogger().setLevel(logging.DEBUG) 299 | else: 300 | logging.getLogger("requests").setLevel(logging.WARNING) 301 | 302 | with _secret_auth_token_and_ssh_key(options) as (auth_token, ssh_key_file): 303 | api = gitlab.Api(options.gitlab_url, auth_token) 304 | user = user_module.User.myself(api) 305 | if options.max_ci_time_in_minutes: 306 | logging.warning( 307 | "--max-ci-time-in-minutes is DEPRECATED, use --ci-timeout %dmin", 308 | options.max_ci_time_in_minutes 309 | ) 310 | options.ci_timeout = timedelta(minutes=options.max_ci_time_in_minutes) 311 | 312 | if options.batch: 313 | logging.warning('Experimental batch mode enabled') 314 | 315 | if options.use_merge_strategy: 316 | fusion = bot.Fusion.merge 317 | elif options.rebase_remotely: 318 | version = api.version() 319 | if version.release < (11, 6): 320 | raise Exception( 321 | "Need GitLab 11.6+ to use rebase through the API, " 322 | "but your instance is {}".format(version) 323 | ) 324 | fusion = bot.Fusion.gitlab_rebase 325 | else: 326 | fusion = bot.Fusion.rebase 327 | 328 | config = bot.BotConfig( 329 | user=user, 330 | use_https=options.use_https, 331 | auth_token=auth_token, 332 | ssh_key_file=ssh_key_file, 333 | project_regexp=options.project_regexp, 334 | git_timeout=options.git_timeout, 335 | git_reference_repo=options.git_reference_repo, 336 | branch_regexp=options.branch_regexp, 337 | source_branch_regexp=options.source_branch_regexp, 338 | merge_order=options.merge_order, 339 | merge_opts=bot.MergeJobOptions.default( 340 | add_tested=options.add_tested, 341 | add_part_of=options.add_part_of, 342 | add_reviewers=options.add_reviewers, 343 | reapprove=options.impersonate_approvers, 344 | approval_timeout=options.approval_reset_timeout, 345 | embargo=options.embargo, 346 | ci_timeout=options.ci_timeout, 347 | fusion=fusion, 348 | use_no_ff_batches=options.use_no_ff_batches, 349 | use_merge_commit_batches=options.use_merge_commit_batches, 350 | skip_ci_batches=options.skip_ci_batches, 351 | guarantee_final_pipeline=options.guarantee_final_pipeline, 352 | ), 353 | batch=options.batch, 354 | cli=options.cli, 355 | ) 356 | 357 | marge_bot = bot.Bot(api=api, config=config) 358 | marge_bot.start() 359 | -------------------------------------------------------------------------------- /marge/approvals.py: -------------------------------------------------------------------------------- 1 | from . import gitlab 2 | 3 | GET, POST, PUT = gitlab.GET, gitlab.POST, gitlab.PUT 4 | 5 | 6 | class Approvals(gitlab.Resource): 7 | """Approval info for a MergeRequest.""" 8 | 9 | def refetch_info(self): 10 | gitlab_version = self._api.version() 11 | if gitlab_version.release >= (9, 2, 2): 12 | approver_url = '/projects/{0.project_id}/merge_requests/{0.iid}/approvals'.format(self) 13 | else: 14 | # GitLab botched the v4 api before 9.2.3 15 | approver_url = '/projects/{0.project_id}/merge_requests/{0.id}/approvals'.format(self) 16 | 17 | # Approvals are in CE since 13.2 18 | if gitlab_version.is_ee or gitlab_version.release >= (13, 2, 0): 19 | self._info = self._api.call(GET(approver_url)) 20 | else: 21 | self._info = dict(self._info, approvals_left=0, approved_by=[]) 22 | 23 | @property 24 | def iid(self): 25 | return self.info['iid'] 26 | 27 | @property 28 | def project_id(self): 29 | return self.info['project_id'] 30 | 31 | @property 32 | def approvals_left(self): 33 | return self.info.get("approvals_left", 0) or 0 34 | 35 | @property 36 | def sufficient(self): 37 | return not self.approvals_left 38 | 39 | @property 40 | def approver_usernames(self): 41 | return [who['user']['username'] for who in self.info['approved_by']] 42 | 43 | @property 44 | def approver_ids(self): 45 | """Return the uids of the approvers.""" 46 | return [who['user']['id'] for who in self.info['approved_by']] 47 | 48 | def reapprove(self): 49 | """Impersonates the approvers and re-approves the merge_request as them. 50 | 51 | The idea is that we want to get the approvers, push the rebased branch 52 | (which may invalidate approvals, depending on GitLab settings) and then 53 | restore the approval status. 54 | """ 55 | self.approve(self) 56 | 57 | def approve(self, obj): 58 | """Approve an object which can be a merge_request or an approval.""" 59 | if self._api.version().release >= (9, 2, 2): 60 | approve_url = '/projects/{0.project_id}/merge_requests/{0.iid}/approve'.format(obj) 61 | else: 62 | # GitLab botched the v4 api before 9.2.3 63 | approve_url = '/projects/{0.project_id}/merge_requests/{0.id}/approve'.format(obj) 64 | 65 | for uid in self.approver_ids: 66 | self._api.call(POST(approve_url), sudo=uid) 67 | -------------------------------------------------------------------------------- /marge/batch_job.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=too-many-branches,too-many-statements,arguments-differ 2 | import logging as log 3 | from time import sleep 4 | 5 | from . import git 6 | from . import gitlab 7 | from .commit import Commit 8 | from .job import MergeJob, CannotMerge, SkipMerge 9 | from .merge_request import MergeRequest 10 | from .pipeline import Pipeline 11 | 12 | 13 | class CannotBatch(Exception): 14 | pass 15 | 16 | 17 | class BatchMergeJob(MergeJob): 18 | BATCH_BRANCH_NAME = 'marge_bot_batch_merge_job' 19 | 20 | def __init__(self, *, api, user, project, repo, options, merge_requests): 21 | super().__init__(api=api, user=user, project=project, repo=repo, options=options) 22 | self._merge_requests = merge_requests 23 | 24 | def remove_batch_branch(self): 25 | log.info('Removing local batch branch') 26 | try: 27 | self._repo.remove_branch(BatchMergeJob.BATCH_BRANCH_NAME) 28 | except git.GitError: 29 | pass 30 | 31 | def close_batch_mr(self): 32 | log.info('Closing batch MRs') 33 | params = { 34 | 'author_id': self._user.id, 35 | 'labels': BatchMergeJob.BATCH_BRANCH_NAME, 36 | 'state': 'opened', 37 | 'order_by': 'created_at', 38 | 'sort': 'desc', 39 | } 40 | batch_mrs = MergeRequest.search( 41 | api=self._api, 42 | project_id=self._project.id, 43 | params=params, 44 | ) 45 | for batch_mr in batch_mrs: 46 | log.info('Closing batch MR !%s', batch_mr.iid) 47 | batch_mr.close() 48 | 49 | def create_batch_mr(self, target_branch): 50 | self.push_batch() 51 | log.info('Creating batch MR') 52 | params = { 53 | 'source_branch': BatchMergeJob.BATCH_BRANCH_NAME, 54 | 'target_branch': target_branch, 55 | 'title': 'Marge Bot Batch MR - DO NOT TOUCH', 56 | 'labels': BatchMergeJob.BATCH_BRANCH_NAME, 57 | } 58 | batch_mr = MergeRequest.create( 59 | api=self._api, 60 | project_id=self._project.id, 61 | params=params, 62 | ) 63 | log.info('Batch MR !%s created', batch_mr.iid) 64 | return batch_mr 65 | 66 | def get_mrs_with_common_target_branch(self, target_branch): 67 | log.info('Filtering MRs with target branch %s', target_branch) 68 | return [ 69 | merge_request for merge_request in self._merge_requests 70 | if merge_request.target_branch == target_branch 71 | ] 72 | 73 | def ensure_mergeable_mr(self, merge_request, skip_ci=False): 74 | super().ensure_mergeable_mr(merge_request) 75 | 76 | if self._project.only_allow_merge_if_pipeline_succeeds and not skip_ci: 77 | ci_status = self.get_mr_ci_status(merge_request) 78 | if ci_status != 'success': 79 | raise CannotBatch('This MR has not passed CI.') 80 | 81 | def get_mergeable_mrs(self, merge_requests): 82 | log.info('Filtering mergeable MRs') 83 | mergeable_mrs = [] 84 | for merge_request in merge_requests: 85 | try: 86 | self.ensure_mergeable_mr(merge_request) 87 | except (CannotBatch, SkipMerge) as ex: 88 | log.warning('Skipping unbatchable MR: "%s"', ex) 89 | except CannotMerge as ex: 90 | log.warning('Skipping unmergeable MR: "%s"', ex) 91 | self.unassign_from_mr(merge_request) 92 | merge_request.comment("I couldn't merge this branch: {}".format(ex)) 93 | else: 94 | mergeable_mrs.append(merge_request) 95 | return mergeable_mrs 96 | 97 | def push_batch(self): 98 | log.info('Pushing batch branch') 99 | self._repo.push(BatchMergeJob.BATCH_BRANCH_NAME, force=True) 100 | 101 | def ensure_mr_not_changed(self, merge_request): 102 | log.info('Ensuring MR !%s did not change', merge_request.iid) 103 | changed_mr = MergeRequest.fetch_by_iid( 104 | merge_request.project_id, 105 | merge_request.iid, 106 | self._api, 107 | ) 108 | error_message = 'The {} changed whilst merging!' 109 | for attr in ('source_branch', 'source_project_id', 'target_branch', 'target_project_id', 'sha'): 110 | if getattr(changed_mr, attr) != getattr(merge_request, attr): 111 | raise CannotMerge(error_message.format(attr.replace('_', ' '))) 112 | 113 | def merge_batch(self, target_branch, source_branch, no_ff=False): 114 | if no_ff: 115 | return self._repo.merge( 116 | target_branch, 117 | source_branch, 118 | '--no-ff', 119 | ) 120 | 121 | return self._repo.fast_forward( 122 | target_branch, 123 | source_branch, 124 | ) 125 | 126 | def update_merge_request( 127 | self, 128 | merge_request, 129 | source_repo_url=None, 130 | ): 131 | log.info('Fusing MR !%s', merge_request.iid) 132 | approvals = merge_request.fetch_approvals() 133 | 134 | _, _, actual_sha = self.update_from_target_branch_and_push( 135 | merge_request, 136 | source_repo_url=source_repo_url, 137 | skip_ci=self._options.skip_ci_batches, 138 | ) 139 | 140 | sha_now = Commit.last_on_branch( 141 | merge_request.source_project_id, merge_request.source_branch, self._api, 142 | ).id 143 | log.info('update_merge_request: sha_now (%s), actual_sha (%s)', sha_now, actual_sha) 144 | # Make sure no-one managed to race and push to the branch in the 145 | # meantime, because we're about to impersonate the approvers, and 146 | # we don't want to approve unreviewed commits 147 | if sha_now != actual_sha: 148 | raise CannotMerge('Someone pushed to branch while we were trying to merge') 149 | 150 | # As we're not using the API to merge the individual MR, we don't strictly need to reapprove it. 151 | # However, it's a little weird to look at the merged MR to find it has no approvals, 152 | # so let's do it anyway. 153 | self.maybe_reapprove(merge_request, approvals) 154 | return sha_now 155 | 156 | def accept_mr( 157 | self, 158 | merge_request, 159 | expected_remote_target_branch_sha, 160 | source_repo_url=None, 161 | ): 162 | log.info('Accept MR !%s', merge_request.iid) 163 | 164 | # Make sure latest commit in remote is the one we tested against 165 | new_target_sha = Commit.last_on_branch(self._project.id, merge_request.target_branch, self._api).id 166 | if new_target_sha != expected_remote_target_branch_sha: 167 | raise CannotBatch('Someone was naughty and by-passed marge') 168 | 169 | # Rebase and apply the trailers 170 | self.update_merge_request( 171 | merge_request, 172 | source_repo_url=source_repo_url, 173 | ) 174 | 175 | # This switches git to 176 | final_sha = self.merge_batch( 177 | merge_request.target_branch, 178 | merge_request.source_branch, 179 | self._options.use_no_ff_batches, 180 | ) 181 | # Don't force push in case the remote has changed. 182 | self._repo.push(merge_request.target_branch, force=False) 183 | 184 | sleep(2) 185 | 186 | # At this point Gitlab should have recognised the MR as being accepted. 187 | log.info('Successfully merged MR !%s', merge_request.iid) 188 | 189 | pipelines = Pipeline.pipelines_by_branch( 190 | api=self._api, 191 | project_id=merge_request.source_project_id, 192 | branch=merge_request.source_branch, 193 | status='running', 194 | ) 195 | for pipeline in pipelines: 196 | pipeline.cancel() 197 | 198 | return final_sha 199 | 200 | def execute(self): 201 | # Cleanup previous batch work 202 | self.remove_batch_branch() 203 | self.close_batch_mr() 204 | 205 | target_branch = self._merge_requests[0].target_branch 206 | merge_requests = self.get_mrs_with_common_target_branch(target_branch) 207 | merge_requests = self.get_mergeable_mrs(merge_requests) 208 | 209 | if len(merge_requests) <= 1: 210 | # Either no merge requests are ready to be merged, or there's only one for this target branch. 211 | # Let's raise an error to do a basic job for these cases. 212 | raise CannotBatch('not enough ready merge requests') 213 | 214 | self._repo.fetch('origin') 215 | 216 | # Save the sha of remote so we can use it to make sure 217 | # the remote wasn't changed while we're testing against it 218 | remote_target_branch_sha = self._repo.get_commit_hash('origin/%s' % target_branch) 219 | 220 | self._repo.checkout_branch(target_branch, 'origin/%s' % target_branch) 221 | self._repo.checkout_branch(BatchMergeJob.BATCH_BRANCH_NAME, 'origin/%s' % target_branch) 222 | 223 | batch_mr = self.create_batch_mr( 224 | target_branch=target_branch, 225 | ) 226 | batch_mr_sha = batch_mr.sha 227 | 228 | working_merge_requests = [] 229 | 230 | for merge_request in merge_requests: 231 | try: 232 | _, source_repo_url, merge_request_remote = self.fetch_source_project(merge_request) 233 | self._repo.checkout_branch( 234 | merge_request.source_branch, 235 | '%s/%s' % (merge_request_remote, merge_request.source_branch), 236 | ) 237 | 238 | if self._options.use_merge_commit_batches: 239 | # Rebase and apply the trailers before running the batch MR 240 | actual_sha = self.update_merge_request( 241 | merge_request, 242 | source_repo_url=source_repo_url, 243 | ) 244 | # Update branch with MR changes 245 | batch_mr_sha = self._repo.merge( 246 | BatchMergeJob.BATCH_BRANCH_NAME, 247 | merge_request.source_branch, 248 | '-m', 249 | 'Batch merge !%s into %s (!%s)' % ( 250 | merge_request.iid, 251 | merge_request.target_branch, 252 | batch_mr.iid 253 | ), 254 | local=True, 255 | ) 256 | else: 257 | # Update on latest branch so it contains previous MRs 258 | self.fuse( 259 | merge_request.source_branch, 260 | BatchMergeJob.BATCH_BRANCH_NAME, 261 | source_repo_url=source_repo_url, 262 | local=True, 263 | ) 264 | # Update branch with MR changes 265 | batch_mr_sha = self._repo.fast_forward( 266 | BatchMergeJob.BATCH_BRANCH_NAME, 267 | merge_request.source_branch, 268 | local=True, 269 | ) 270 | 271 | # We don't need anymore. Remove it now in case another 272 | # merge request is using the same branch name in a different project. 273 | self._repo.remove_branch(merge_request.source_branch) 274 | except (git.GitError, CannotMerge): 275 | log.warning('Skipping MR !%s, got conflicts while rebasing', merge_request.iid) 276 | continue 277 | else: 278 | if self._options.use_merge_commit_batches: 279 | # update merge_request with the current sha, we will compare it with 280 | # the actual sha later to make sure no one pushed this MR meanwhile 281 | merge_request.update_sha(actual_sha) 282 | 283 | working_merge_requests.append(merge_request) 284 | 285 | if len(working_merge_requests) <= 1: 286 | raise CannotBatch('not enough ready merge requests') 287 | 288 | # This switches git to branch 289 | self.push_batch() 290 | for merge_request in working_merge_requests: 291 | merge_request.comment('I will attempt to batch this MR (!{})...'.format(batch_mr.iid)) 292 | 293 | # wait for the CI of the batch MR 294 | if self._project.only_allow_merge_if_pipeline_succeeds: 295 | try: 296 | self.wait_for_ci_to_pass(batch_mr, commit_sha=batch_mr_sha) 297 | except CannotMerge as err: 298 | for merge_request in working_merge_requests: 299 | merge_request.comment( 300 | 'Batch MR !{batch_mr_iid} failed: {error} I will retry later...'.format( 301 | batch_mr_iid=batch_mr.iid, 302 | error=err.reason, 303 | ), 304 | ) 305 | raise CannotBatch(err.reason) from err 306 | 307 | # check each sub MR, and accept each sub MR if using the normal batch 308 | for merge_request in working_merge_requests: 309 | try: 310 | # FIXME: this should probably be part of the merge request 311 | _, source_repo_url, merge_request_remote = self.fetch_source_project(merge_request) 312 | self.ensure_mr_not_changed(merge_request) 313 | # we know the batch MR's CI passed, so we skip CI for sub MRs this time 314 | self.ensure_mergeable_mr(merge_request, skip_ci=True) 315 | 316 | if not self._options.use_merge_commit_batches: 317 | # accept each MRs 318 | remote_target_branch_sha = self.accept_mr( 319 | merge_request, 320 | remote_target_branch_sha, 321 | source_repo_url=source_repo_url, 322 | ) 323 | except CannotBatch as err: 324 | merge_request.comment( 325 | "I couldn't merge this branch: {error} I will retry later...".format( 326 | error=str(err), 327 | ), 328 | ) 329 | raise 330 | except SkipMerge: 331 | # Raise here to avoid being caught below - we don't want to be unassigned. 332 | raise 333 | except CannotMerge as err: 334 | self.unassign_from_mr(merge_request) 335 | merge_request.comment("I couldn't merge this branch: %s" % err.reason) 336 | raise 337 | 338 | # Accept the batch MR 339 | if self._options.use_merge_commit_batches: 340 | # Approve the batch MR using the last sub MR's approvers 341 | if not batch_mr.fetch_approvals().sufficient: 342 | approvals = working_merge_requests[-1].fetch_approvals() 343 | try: 344 | approvals.approve(batch_mr) 345 | except (gitlab.Forbidden, gitlab.Unauthorized): 346 | log.exception('Failed to approve MR:') 347 | 348 | try: 349 | ret = batch_mr.accept( 350 | remove_branch=batch_mr.force_remove_source_branch, 351 | sha=batch_mr_sha, 352 | merge_when_pipeline_succeeds=bool(self._project.only_allow_merge_if_pipeline_succeeds), 353 | ) 354 | log.info('batch_mr.accept result: %s', ret) 355 | except gitlab.ApiError as err: 356 | log.exception('Gitlab API Error:') 357 | raise CannotMerge('Gitlab API Error: %s' % err) from err 358 | -------------------------------------------------------------------------------- /marge/bot.py: -------------------------------------------------------------------------------- 1 | import logging as log 2 | import time 3 | from collections import namedtuple 4 | from tempfile import TemporaryDirectory 5 | 6 | from . import batch_job 7 | from . import git 8 | from . import job 9 | from . import merge_request as merge_request_module 10 | from . import single_merge_job 11 | from . import store 12 | from .project import AccessLevel, Project 13 | 14 | MergeRequest = merge_request_module.MergeRequest 15 | 16 | 17 | class Bot: 18 | def __init__(self, *, api, config): 19 | self._api = api 20 | self._config = config 21 | 22 | user = config.user 23 | opts = config.merge_opts 24 | 25 | if not user.is_admin: 26 | assert not opts.reapprove, ( 27 | "{0.username} is not an admin, can't impersonate!".format(user) 28 | ) 29 | assert not opts.add_reviewers, ( 30 | "{0.username} is not an admin, can't lookup Reviewed-by: email addresses ".format(user) 31 | ) 32 | 33 | def start(self): 34 | with TemporaryDirectory() as root_dir: 35 | if self._config.use_https: 36 | repo_manager = store.HttpsRepoManager( 37 | user=self.user, 38 | root_dir=root_dir, 39 | auth_token=self._config.auth_token, 40 | timeout=self._config.git_timeout, 41 | reference=self._config.git_reference_repo, 42 | ) 43 | else: 44 | repo_manager = store.SshRepoManager( 45 | user=self.user, 46 | root_dir=root_dir, 47 | ssh_key_file=self._config.ssh_key_file, 48 | timeout=self._config.git_timeout, 49 | reference=self._config.git_reference_repo, 50 | ) 51 | self._run(repo_manager) 52 | 53 | @property 54 | def user(self): 55 | return self._config.user 56 | 57 | @property 58 | def api(self): 59 | return self._api 60 | 61 | def _run(self, repo_manager): 62 | time_to_sleep_between_projects_in_secs = 1 63 | min_time_to_sleep_after_iterating_all_projects_in_secs = 30 64 | while True: 65 | projects = self._get_projects() 66 | self._process_projects( 67 | repo_manager, 68 | time_to_sleep_between_projects_in_secs, 69 | projects, 70 | ) 71 | if self._config.cli: 72 | return 73 | 74 | big_sleep = max(0, 75 | min_time_to_sleep_after_iterating_all_projects_in_secs - 76 | time_to_sleep_between_projects_in_secs * len(projects)) 77 | log.info('Sleeping for %s seconds...', big_sleep) 78 | time.sleep(big_sleep) 79 | 80 | def _get_projects(self): 81 | log.info('Finding out my current projects...') 82 | my_projects = Project.fetch_all_mine(self._api) 83 | project_regexp = self._config.project_regexp 84 | filtered_projects = [p for p in my_projects if project_regexp.match(p.path_with_namespace)] 85 | log.debug( 86 | 'Projects that match project_regexp: %s', 87 | [p.path_with_namespace for p in filtered_projects] 88 | ) 89 | filtered_out = set(my_projects) - set(filtered_projects) 90 | if filtered_out: 91 | log.debug( 92 | 'Projects that do not match project_regexp: %s', 93 | [p.path_with_namespace for p in filtered_out] 94 | ) 95 | return filtered_projects 96 | 97 | def _process_projects( 98 | self, 99 | repo_manager, 100 | time_to_sleep_between_projects_in_secs, 101 | projects, 102 | ): 103 | for project in projects: 104 | project_name = project.path_with_namespace 105 | 106 | if project.access_level < AccessLevel.reporter: 107 | log.warning("Don't have enough permissions to browse merge requests in %s!", project_name) 108 | continue 109 | merge_requests = self._get_merge_requests(project, project_name) 110 | self._process_merge_requests(repo_manager, project, merge_requests) 111 | time.sleep(time_to_sleep_between_projects_in_secs) 112 | 113 | def _get_merge_requests(self, project, project_name): 114 | log.info('Fetching merge requests assigned to me in %s...', project_name) 115 | my_merge_requests = MergeRequest.fetch_all_open_for_user( 116 | project_id=project.id, 117 | user=self.user, 118 | api=self._api, 119 | merge_order=self._config.merge_order, 120 | ) 121 | branch_regexp = self._config.branch_regexp 122 | filtered_mrs = [mr for mr in my_merge_requests 123 | if branch_regexp.match(mr.target_branch)] 124 | log.debug( 125 | 'MRs that match branch_regexp: %s', 126 | [mr.web_url for mr in filtered_mrs] 127 | ) 128 | filtered_out = set(my_merge_requests) - set(filtered_mrs) 129 | if filtered_out: 130 | log.debug( 131 | 'MRs that do not match branch_regexp: %s', 132 | [mr.web_url for mr in filtered_out] 133 | ) 134 | source_branch_regexp = self._config.source_branch_regexp 135 | source_filtered_mrs = [mr for mr in filtered_mrs 136 | if source_branch_regexp.match(mr.source_branch)] 137 | log.debug( 138 | 'MRs that match source_branch_regexp: %s', 139 | [mr.web_url for mr in source_filtered_mrs] 140 | ) 141 | source_filtered_out = set(filtered_mrs) - set(source_filtered_mrs) 142 | if source_filtered_out: 143 | log.debug( 144 | 'MRs that do not match source_branch_regexp: %s', 145 | [mr.web_url for mr in source_filtered_out] 146 | ) 147 | return source_filtered_mrs 148 | 149 | def _process_merge_requests(self, repo_manager, project, merge_requests): 150 | if not merge_requests: 151 | log.info('Nothing to merge at this point...') 152 | return 153 | 154 | try: 155 | repo = repo_manager.repo_for_project(project) 156 | except git.GitError: 157 | log.exception("Couldn't initialize repository for project!") 158 | raise 159 | 160 | log.info('Got %s requests to merge;', len(merge_requests)) 161 | if self._config.batch and len(merge_requests) > 1: 162 | log.info('Attempting to merge as many MRs as possible using BatchMergeJob...') 163 | batch_merge_job = batch_job.BatchMergeJob( 164 | api=self._api, 165 | user=self.user, 166 | project=project, 167 | merge_requests=merge_requests, 168 | repo=repo, 169 | options=self._config.merge_opts, 170 | ) 171 | try: 172 | batch_merge_job.execute() 173 | return 174 | except batch_job.CannotBatch as err: 175 | log.warning('BatchMergeJob aborted: %s', err) 176 | except batch_job.CannotMerge as err: 177 | log.warning('BatchMergeJob failed: %s', err) 178 | return 179 | except git.GitError as err: 180 | log.exception('BatchMergeJob failed: %s', err) 181 | log.info('Attempting to merge the oldest MR...') 182 | merge_request = merge_requests[0] 183 | merge_job = self._get_single_job( 184 | project=project, merge_request=merge_request, repo=repo, 185 | options=self._config.merge_opts, 186 | ) 187 | merge_job.execute() 188 | 189 | def _get_single_job(self, project, merge_request, repo, options): 190 | return single_merge_job.SingleMergeJob( 191 | api=self._api, 192 | user=self.user, 193 | project=project, 194 | merge_request=merge_request, 195 | repo=repo, 196 | options=options, 197 | ) 198 | 199 | 200 | class BotConfig(namedtuple('BotConfig', 201 | 'user use_https auth_token ssh_key_file project_regexp merge_order merge_opts ' + 202 | 'git_timeout git_reference_repo branch_regexp source_branch_regexp batch cli')): 203 | pass 204 | 205 | 206 | MergeJobOptions = job.MergeJobOptions 207 | Fusion = job.Fusion 208 | -------------------------------------------------------------------------------- /marge/branch.py: -------------------------------------------------------------------------------- 1 | from . import gitlab 2 | 3 | 4 | GET = gitlab.GET 5 | 6 | 7 | class Branch(gitlab.Resource): 8 | 9 | @classmethod 10 | def fetch_by_name(cls, project_id, branch, api): 11 | info = api.call(GET( 12 | '/projects/{project_id}/repository/branches/{branch}'.format( 13 | project_id=project_id, 14 | branch=branch, 15 | ), 16 | )) 17 | return cls(api, info) 18 | 19 | @property 20 | def name(self): 21 | return self.info['name'] 22 | 23 | @property 24 | def protected(self): 25 | return self.info['protected'] 26 | -------------------------------------------------------------------------------- /marge/commit.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from requests.utils import quote 4 | 5 | from . import gitlab 6 | 7 | 8 | GET = gitlab.GET 9 | 10 | 11 | class Commit(gitlab.Resource): 12 | 13 | @classmethod 14 | def fetch_by_id(cls, project_id, sha, api): 15 | info = api.call(GET( 16 | '/projects/{project_id}/repository/commits/{sha}'.format( 17 | project_id=project_id, 18 | sha=sha, 19 | ), 20 | )) 21 | return cls(api, info) 22 | 23 | @classmethod 24 | def last_on_branch(cls, project_id, branch, api): 25 | info = api.call(GET( 26 | '/projects/{project_id}/repository/branches/{branch}'.format( 27 | project_id=project_id, 28 | branch=quote(branch, safe=''), 29 | ), 30 | ))['commit'] 31 | return cls(api, info) 32 | 33 | @property 34 | def short_id(self): 35 | return self.info['short_id'] 36 | 37 | @property 38 | def title(self): 39 | return self.info['title'] 40 | 41 | @property 42 | def author_name(self): 43 | return self.info['author_name'] 44 | 45 | @property 46 | def author_email(self): 47 | return self.info['author_email'] 48 | 49 | @property 50 | def status(self): 51 | return self.info['status'] 52 | 53 | @property 54 | def reviewers(self): 55 | return re.findall(r'^Reviewed-by: ([^\n]+)$', self.info['message'], re.MULTILINE) 56 | 57 | @property 58 | def testers(self): 59 | return re.findall(r'^Tested-by: ([^\n]+)$', self.info['message'], re.MULTILINE) 60 | -------------------------------------------------------------------------------- /marge/git.py: -------------------------------------------------------------------------------- 1 | import logging as log 2 | import shlex 3 | import os 4 | import sys 5 | import subprocess 6 | from subprocess import PIPE, TimeoutExpired 7 | 8 | from collections import namedtuple 9 | 10 | 11 | from . import trailerfilter 12 | 13 | # Turning off StrictHostKeyChecking is a nasty hack to approximate 14 | # just accepting the hostkey sight unseen the first time marge 15 | # connects. The proper solution would be to pass in known_hosts as 16 | # a commandline parameter, but in practice few people will bother anyway and 17 | # in this case the threat of MiTM seems somewhat bogus. 18 | GIT_SSH_COMMAND = "ssh -o StrictHostKeyChecking=no " 19 | 20 | 21 | def _filter_branch_script(trailer_name, trailer_values): 22 | filter_script = 'TRAILERS={trailers} python3 {script}'.format( 23 | trailers=shlex.quote( 24 | '\n'.join( 25 | '{}: {}'.format(trailer_name, trailer_value) 26 | for trailer_value in trailer_values or ['']) 27 | ), 28 | script=trailerfilter.__file__, 29 | ) 30 | return filter_script 31 | 32 | 33 | class Repo(namedtuple('Repo', 'remote_url local_path ssh_key_file timeout reference')): 34 | def clone(self): 35 | reference_flag = '--reference=' + self.reference if self.reference else '' 36 | self.git('clone', '--origin=origin', reference_flag, self.remote_url, 37 | self.local_path, from_repo=False) 38 | 39 | def config_user_info(self, user_name, user_email): 40 | self.git('config', 'user.email', user_email) 41 | self.git('config', 'user.name', user_name) 42 | 43 | def fetch(self, remote_name, remote_url=None): 44 | if remote_name != 'origin': 45 | assert remote_url is not None 46 | # upsert remote 47 | try: 48 | self.git('remote', 'rm', remote_name) 49 | except GitError: 50 | pass 51 | self.git('remote', 'add', remote_name, remote_url) 52 | self.git('fetch', '--prune', remote_name) 53 | 54 | def tag_with_trailer(self, trailer_name, trailer_values, branch, start_commit): 55 | """Replace `trailer_name` in commit messages with `trailer_values` in `branch` from `start_commit`. 56 | """ 57 | 58 | # Strips all `$trailer_name``: lines and trailing newlines, adds an empty 59 | # newline and tags on the `$trailer_name: $trailer_value` for each `trailer_value` in 60 | # `trailer_values`. 61 | filter_script = _filter_branch_script(trailer_name, trailer_values) 62 | commit_range = start_commit + '..' + branch 63 | try: 64 | # --force = overwrite backup of last filter-branch 65 | self.git('filter-branch', '--force', '--msg-filter', filter_script, commit_range) 66 | except GitError: 67 | log.warning('filter-branch failed, will try to restore') 68 | try: 69 | self.get_commit_hash('refs/original/refs/heads/') 70 | except GitError: 71 | log.warning('No changes have been effected by filter-branch') 72 | else: 73 | self.git('reset', '--hard', 'refs/original/refs/heads/' + branch) 74 | raise 75 | return self.get_commit_hash() 76 | 77 | def merge(self, source_branch, target_branch, *merge_args, source_repo_url=None, local=False): 78 | """Merge `target_branch` into `source_branch` and return the new HEAD commit id. 79 | 80 | By default `source_branch` and `target_branch` are assumed to reside in the same 81 | repo as `self`. However, if `source_repo_url` is passed and not `None`, 82 | `source_branch` is taken from there. 83 | 84 | Throws a `GitError` if the merge fails. Will also try to --abort it. 85 | """ 86 | return self._fuse_branch( 87 | 'merge', source_branch, target_branch, *merge_args, source_repo_url=source_repo_url, local=local, 88 | ) 89 | 90 | def fast_forward(self, source, target, source_repo_url=None, local=False): 91 | return self.merge(source, target, '--ff', '--ff-only', source_repo_url=source_repo_url, local=local) 92 | 93 | def rebase(self, branch, new_base, source_repo_url=None, local=False): 94 | """Rebase `new_base` into `branch` and return the new HEAD commit id. 95 | 96 | By default `branch` and `new_base` are assumed to reside in the same 97 | repo as `self`. However, if `source_repo_url` is passed and not `None`, 98 | `branch` is taken from there. 99 | 100 | Throws a `GitError` if the rebase fails. Will also try to --abort it. 101 | """ 102 | return self._fuse_branch('rebase', branch, new_base, source_repo_url=source_repo_url, local=local) 103 | 104 | def _fuse_branch(self, strategy, branch, target_branch, *fuse_args, source_repo_url=None, local=False): 105 | assert source_repo_url or branch != target_branch, branch 106 | 107 | if not local: 108 | self.fetch('origin') 109 | target = 'origin/' + target_branch 110 | if source_repo_url: 111 | self.fetch('source', source_repo_url) 112 | self.checkout_branch(branch, 'source/' + branch) 113 | else: 114 | self.checkout_branch(branch, 'origin/' + branch) 115 | else: 116 | self.checkout_branch(branch) 117 | target = target_branch 118 | 119 | try: 120 | self.git(strategy, target, *fuse_args) 121 | except GitError: 122 | log.warning('%s failed, doing an --abort', strategy) 123 | self.git(strategy, '--abort') 124 | raise 125 | return self.get_commit_hash() 126 | 127 | def remove_branch(self, branch, *, new_current_branch='master'): 128 | assert branch != new_current_branch 129 | self.git('branch', '-D', branch) 130 | 131 | def checkout_branch(self, branch, start_point=''): 132 | create_and_reset = '-B' if start_point else '' 133 | self.git('checkout', create_and_reset, branch, start_point, '--') 134 | 135 | def push(self, branch, *, source_repo_url=None, force=False, skip_ci=False): 136 | self.git('checkout', branch, '--') 137 | 138 | self.git('diff-index', '--quiet', 'HEAD') # check it is not dirty 139 | 140 | untracked_files = self.git('ls-files', '--others').stdout # check no untracked files 141 | if untracked_files: 142 | raise GitError('There are untracked files', untracked_files) 143 | 144 | if source_repo_url: 145 | assert self.get_remote_url('source') == source_repo_url 146 | source = 'source' 147 | else: 148 | source = 'origin' 149 | force_flag = '--force' if force else '' 150 | skip_flag = ('-o', 'ci.skip') if skip_ci else () 151 | self.git('push', force_flag, *skip_flag, source, '%s:%s' % (branch, branch)) 152 | 153 | def get_commit_hash(self, rev='HEAD'): 154 | """Return commit hash for `rev` (default "HEAD").""" 155 | result = self.git('rev-parse', rev) 156 | return result.stdout.decode('ascii').strip() 157 | 158 | def get_remote_url(self, name): 159 | return self.git('config', '--get', 'remote.{}.url'.format(name)).stdout.decode('utf-8').strip() 160 | 161 | def git(self, *args, from_repo=True): 162 | env = None 163 | if self.ssh_key_file: 164 | env = os.environ.copy() 165 | # ssh's handling of identity files is infuriatingly dumb, to get it 166 | # to actually really use the IdentityFile we pass in via -i we also 167 | # need to tell it to ignore ssh-agent (IdentitiesOnly=true) and not 168 | # read in any identities from ~/.ssh/config etc (-F /dev/null), 169 | # because they append and it tries them in order, starting with config file 170 | env['GIT_SSH_COMMAND'] = " ".join([ 171 | GIT_SSH_COMMAND, 172 | "-F", "/dev/null", 173 | "-o", "IdentitiesOnly=yes", 174 | "-i", self.ssh_key_file, 175 | ]) 176 | 177 | command = ['git'] 178 | if from_repo: 179 | command.extend(['-C', self.local_path]) 180 | command.extend([arg for arg in args if str(arg)]) 181 | 182 | log.info('Running %s', ' '.join(shlex.quote(w) for w in command)) 183 | try: 184 | timeout_seconds = self.timeout.total_seconds() if self.timeout is not None else None 185 | return _run(*command, env=env, check=True, timeout=timeout_seconds) 186 | except subprocess.CalledProcessError as err: 187 | log.warning('git returned %s', err.returncode) 188 | log.warning('stdout: %r', err.stdout) 189 | log.warning('stderr: %r', err.stderr) 190 | raise GitError(err) from err 191 | 192 | 193 | def _run(*args, env=None, check=False, timeout=None): 194 | encoded_args = [a.encode('utf-8') for a in args] if sys.platform != 'win32' else args 195 | with subprocess.Popen(encoded_args, env=env, stdout=PIPE, stderr=PIPE) as process: 196 | try: 197 | stdout, stderr = process.communicate(input, timeout=timeout) 198 | except TimeoutExpired as err: 199 | process.kill() 200 | stdout, stderr = process.communicate() 201 | raise TimeoutExpired( 202 | process.args, timeout, output=stdout, stderr=stderr, 203 | ) from err 204 | except Exception: 205 | process.kill() 206 | process.wait() 207 | raise 208 | retcode = process.poll() 209 | if check and retcode: 210 | raise subprocess.CalledProcessError( 211 | retcode, process.args, output=stdout, stderr=stderr, 212 | ) 213 | return subprocess.CompletedProcess(process.args, retcode, stdout, stderr) 214 | 215 | 216 | class GitError(Exception): 217 | pass 218 | -------------------------------------------------------------------------------- /marge/gitlab.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging as log 3 | from collections import namedtuple 4 | 5 | import requests 6 | 7 | 8 | class Api: 9 | def __init__(self, gitlab_url, auth_token): 10 | self._auth_token = auth_token 11 | self._api_base_url = gitlab_url.rstrip('/') + '/api/v4' 12 | 13 | def call(self, command, sudo=None): 14 | method = command.method 15 | url = self._api_base_url + command.endpoint 16 | headers = {'PRIVATE-TOKEN': self._auth_token} 17 | if sudo: 18 | headers['SUDO'] = '%d' % sudo 19 | log.debug('REQUEST: %s %s %r %r', method.__name__.upper(), url, headers, command.call_args) 20 | # Timeout to prevent indefinitely hanging requests. 60s is very conservative, 21 | # but should be short enough to not cause any practical annoyances. We just 22 | # crash rather than retry since marge-bot should be run in a restart loop anyway. 23 | try: 24 | response = method(url, headers=headers, timeout=60, **command.call_args) 25 | except requests.exceptions.Timeout as err: 26 | log.error('Request timeout: %s', err) 27 | raise 28 | log.debug('RESPONSE CODE: %s', response.status_code) 29 | log.debug('RESPONSE BODY: %r', response.content) 30 | 31 | if response.status_code == 202: 32 | return True # Accepted 33 | 34 | if response.status_code == 204: 35 | return True # NoContent 36 | 37 | if response.status_code < 300: 38 | return command.extract(response.json()) if command.extract else response.json() 39 | 40 | if response.status_code == 304: 41 | return False # Not Modified 42 | 43 | errors = { 44 | 400: BadRequest, 45 | 401: Unauthorized, 46 | 403: Forbidden, 47 | 404: NotFound, 48 | 405: MethodNotAllowed, 49 | 406: NotAcceptable, 50 | 409: Conflict, 51 | 422: Unprocessable, 52 | 500: InternalServerError, 53 | } 54 | 55 | def other_error(code, msg): 56 | exception = InternalServerError if 500 < code < 600 else UnexpectedError 57 | return exception(code, msg) 58 | 59 | error = errors.get(response.status_code, other_error) 60 | try: 61 | err_message = response.json() 62 | except json.JSONDecodeError: 63 | err_message = response.reason 64 | 65 | raise error(response.status_code, err_message) 66 | 67 | def collect_all_pages(self, get_command): 68 | result = [] 69 | fetch_again, page_no = True, 1 70 | while fetch_again: 71 | page = self.call(get_command.for_page(page_no)) 72 | if page: 73 | result.extend(page) 74 | page_no += 1 75 | else: 76 | fetch_again = False 77 | 78 | return result 79 | 80 | def version(self): 81 | response = self.call(GET('/version')) 82 | return Version.parse(response['version']) 83 | 84 | 85 | def from_singleton_list(fun=None): 86 | fun = fun or (lambda x: x) 87 | 88 | def extractor(response_list): 89 | assert isinstance(response_list, list), type(response_list) 90 | assert len(response_list) <= 1, len(response_list) 91 | if not response_list: 92 | return None 93 | return fun(response_list[0]) 94 | 95 | return extractor 96 | 97 | 98 | class Command(namedtuple('Command', 'endpoint args extract')): 99 | def __new__(cls, endpoint, args=None, extract=None): 100 | return super(Command, cls).__new__(cls, endpoint, args or {}, extract) 101 | 102 | @property 103 | def call_args(self): 104 | return {'json': self.args} 105 | 106 | 107 | class GET(Command): 108 | @property 109 | def method(self): 110 | return requests.get 111 | 112 | @property 113 | def call_args(self): 114 | return {'params': _prepare_params(self.args)} 115 | 116 | def for_page(self, page_no): 117 | args = self.args 118 | return self._replace(args=dict(args, page=page_no, per_page=100)) 119 | 120 | 121 | class PUT(Command): 122 | @property 123 | def method(self): 124 | return requests.put 125 | 126 | 127 | class POST(Command): 128 | @property 129 | def method(self): 130 | return requests.post 131 | 132 | 133 | class DELETE(Command): 134 | @property 135 | def method(self): 136 | return requests.delete 137 | 138 | 139 | def _prepare_params(params): 140 | def process(val): 141 | if isinstance(val, bool): 142 | return 'true' if val else 'false' 143 | return str(val) 144 | 145 | return {key: process(val) for key, val in params.items()} 146 | 147 | 148 | class ApiError(Exception): 149 | @property 150 | def error_message(self): 151 | args = self.args 152 | if len(args) != 2: 153 | return None 154 | 155 | arg = args[1] 156 | if isinstance(arg, dict): 157 | return arg.get('message') 158 | return arg 159 | 160 | 161 | class BadRequest(ApiError): 162 | pass 163 | 164 | 165 | class Unauthorized(ApiError): 166 | pass 167 | 168 | 169 | class Forbidden(ApiError): 170 | pass 171 | 172 | 173 | class NotFound(ApiError): 174 | pass 175 | 176 | 177 | class MethodNotAllowed(ApiError): 178 | pass 179 | 180 | 181 | class NotAcceptable(ApiError): 182 | pass 183 | 184 | 185 | class Conflict(ApiError): 186 | pass 187 | 188 | 189 | class Unprocessable(ApiError): 190 | pass 191 | 192 | 193 | class InternalServerError(ApiError): 194 | pass 195 | 196 | 197 | class UnexpectedError(ApiError): 198 | pass 199 | 200 | 201 | class Resource: 202 | def __init__(self, api, info): 203 | self._info = info 204 | self._api = api 205 | 206 | @property 207 | def info(self): 208 | return self._info 209 | 210 | @property 211 | def id(self): # pylint: disable=invalid-name 212 | return self.info['id'] 213 | 214 | @property 215 | def api(self): 216 | return self._api 217 | 218 | def __repr__(self): 219 | return '{0.__class__.__name__}({0._api}, {0.info})'.format(self) 220 | 221 | 222 | class Version(namedtuple('Version', 'release edition')): 223 | @classmethod 224 | def parse(cls, string): 225 | maybe_split_string = string.split('-', maxsplit=1) 226 | if len(maybe_split_string) == 2: 227 | release_string, edition = maybe_split_string 228 | else: 229 | release_string, edition = string, None 230 | 231 | release = tuple(int(number) for number in release_string.split('.')) 232 | return cls(release=release, edition=edition) 233 | 234 | @property 235 | def is_ee(self): 236 | return self.edition == 'ee' 237 | 238 | def __str__(self): 239 | return '%s-%s' % ('.'.join(map(str, self.release)), self.edition) 240 | -------------------------------------------------------------------------------- /marge/interval.py: -------------------------------------------------------------------------------- 1 | import operator 2 | from enum import Enum, unique 3 | 4 | import maya 5 | 6 | 7 | # pylint: disable=invalid-name 8 | @unique 9 | class WeekDay(Enum): 10 | Monday = 0 11 | Tuesday = 1 12 | Wednesday = 2 13 | Thursday = 3 14 | Friday = 4 15 | Saturday = 5 16 | Sunday = 6 17 | 18 | 19 | _DAY_NAMES = {day.name.lower(): day for day in WeekDay} 20 | _DAY_NAMES.update((day.name.lower()[:3], day) for day in WeekDay) 21 | _DAY_NAMES.update((day, day) for day in WeekDay) 22 | 23 | 24 | def find_weekday(string_or_day): 25 | if isinstance(string_or_day, WeekDay): 26 | return string_or_day 27 | 28 | if isinstance(string_or_day, str): 29 | return _DAY_NAMES[string_or_day.lower()] 30 | 31 | raise ValueError('Not a week day: %r' % string_or_day) 32 | 33 | 34 | class WeeklyInterval: 35 | def __init__(self, from_weekday, from_time, to_weekday, to_time): 36 | from_weekday = find_weekday(from_weekday) 37 | to_weekday = find_weekday(to_weekday) 38 | 39 | # the class invariant is that from_weekday <= to_weekday; so when this 40 | # is not the case (e.g. a Fri-Mon interval), we store the complement interval 41 | # (in the example, Mon-Fri), and invert the criterion 42 | self._is_complement_interval = from_weekday.value > to_weekday.value 43 | if self._is_complement_interval: 44 | self._from_weekday = to_weekday 45 | self._from_time = to_time 46 | self._to_weekday = from_weekday 47 | self._to_time = from_time 48 | else: 49 | self._from_weekday = from_weekday 50 | self._from_time = from_time 51 | self._to_weekday = to_weekday 52 | self._to_time = to_time 53 | 54 | def __eq__(self, other): 55 | if isinstance(other, self.__class__): 56 | return self.__dict__ == other.__dict__ 57 | return False 58 | 59 | def __ne__(self, other): 60 | return not self == other 61 | 62 | def __repr__(self): 63 | pat = '{class_name}({from_weekday}, {from_time}, {to_weekday}, {to_time})' 64 | if self._is_complement_interval: 65 | return pat.format( 66 | class_name=self.__class__.__name__, 67 | from_weekday=self._to_weekday, 68 | from_time=self._to_time, 69 | to_weekday=self._from_weekday, 70 | to_time=self._from_time, 71 | ) 72 | return pat.format( 73 | class_name=self.__class__.__name__, 74 | from_weekday=self._from_weekday, 75 | from_time=self._from_time, 76 | to_weekday=self._to_weekday, 77 | to_time=self._to_time, 78 | ) 79 | 80 | @classmethod 81 | def from_human(cls, string): 82 | from_, to_ = string.split('-') 83 | 84 | def parse_part(part): 85 | part = part.replace('@', ' ') 86 | parts = part.split() 87 | weekday = parts[0] 88 | time = parts[1] 89 | timezone = parts[2] if len(parts) > 2 else 'UTC' 90 | weekday = find_weekday(weekday) 91 | time = maya.parse(time, timezone=timezone).datetime().time() 92 | return weekday, time 93 | 94 | from_weekday, from_time = parse_part(from_) 95 | to_weekday, to_time = parse_part(to_) 96 | return cls(from_weekday, from_time, to_weekday, to_time) 97 | 98 | def covers(self, date): 99 | return self._interval_covers(date) != self._is_complement_interval 100 | 101 | def _interval_covers(self, date): 102 | weekday = date.date().weekday() 103 | time = date.time() 104 | before = operator.le if self._is_complement_interval else operator.lt 105 | 106 | if not self._from_weekday.value <= weekday <= self._to_weekday.value: 107 | return False 108 | 109 | if self._from_weekday.value == weekday and before(time, self._from_time): 110 | return False 111 | 112 | if self._to_weekday.value == weekday and before(self._to_time, time): 113 | return False 114 | 115 | return True 116 | 117 | 118 | class IntervalUnion: 119 | def __init__(self, iterable): 120 | self._intervals = list(iterable) 121 | 122 | def __eq__(self, other): 123 | if isinstance(other, self.__class__): 124 | return self.__dict__ == other.__dict__ 125 | return False 126 | 127 | def __ne__(self, other): 128 | return not self == other 129 | 130 | def __repr__(self): 131 | return '{o.__class__.__name__}({o._intervals})'.format(o=self) 132 | 133 | @classmethod 134 | def empty(cls): 135 | return cls(()) 136 | 137 | @classmethod 138 | def from_human(cls, string): 139 | strings = string.split(',') 140 | return cls(WeeklyInterval.from_human(s) for s in strings) 141 | 142 | def covers(self, date): 143 | return any(interval.covers(date) for interval in self._intervals) 144 | -------------------------------------------------------------------------------- /marge/merge_request.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import logging as log 3 | import time 4 | import datetime 5 | 6 | from . import gitlab 7 | from .approvals import Approvals 8 | 9 | 10 | GET, POST, PUT, DELETE = gitlab.GET, gitlab.POST, gitlab.PUT, gitlab.DELETE 11 | 12 | 13 | class MergeRequest(gitlab.Resource): 14 | 15 | @classmethod 16 | def create(cls, api, project_id, params): 17 | merge_request_info = api.call(POST( 18 | '/projects/{project_id}/merge_requests'.format(project_id=project_id), 19 | params, 20 | )) 21 | merge_request = cls(api, merge_request_info) 22 | return merge_request 23 | 24 | @classmethod 25 | def search(cls, api, project_id, params): 26 | merge_requests = api.collect_all_pages(GET( 27 | '/projects/{project_id}/merge_requests'.format(project_id=project_id), 28 | params, 29 | )) 30 | return [cls(api, merge_request) for merge_request in merge_requests] 31 | 32 | @classmethod 33 | def fetch_by_iid(cls, project_id, merge_request_iid, api): 34 | merge_request = cls(api, {'iid': merge_request_iid, 'project_id': project_id}) 35 | merge_request.refetch_info() 36 | return merge_request 37 | 38 | @classmethod 39 | def fetch_assigned_at(cls, user, api, merge_request): 40 | assigned_at = 0 41 | all_discussions = api.collect_all_pages( 42 | GET('/projects/{project_id}/merge_requests/{merge_requests_id}/discussions'.format( 43 | project_id=merge_request.get('project_id'), 44 | merge_requests_id=merge_request.get('iid') 45 | ))) 46 | match_body = 'assigned to @{username}'.format(username=user.username) 47 | for discussion in all_discussions: 48 | for note in discussion.get('notes'): 49 | if match_body in note.get('body'): 50 | date_string = note.get('created_at') 51 | date_format = "%Y-%m-%dT%H:%M:%S.%f%z" 52 | if (sys.version_info.major, sys.version_info.minor) <= (3, 6): 53 | assigned = datetime.datetime.strptime(date_string[:-1], date_format[:-2]) \ 54 | .replace(tzinfo=datetime.timezone.utc).timestamp() 55 | else: 56 | assigned = datetime.datetime.strptime(date_string, date_format).timestamp() 57 | if assigned > assigned_at: 58 | assigned_at = assigned 59 | return assigned_at 60 | 61 | @classmethod 62 | def fetch_all_open_for_user(cls, project_id, user, api, merge_order): 63 | request_merge_order = 'created_at' if merge_order == 'assigned_at' else merge_order 64 | 65 | all_merge_request_infos = api.collect_all_pages(GET( 66 | '/projects/{project_id}/merge_requests'.format(project_id=project_id), 67 | {'state': 'opened', 'order_by': request_merge_order, 'sort': 'asc'}, 68 | )) 69 | my_merge_request_infos = [ 70 | mri for mri in all_merge_request_infos 71 | if ((mri.get('assignee', {}) or {}).get('id') == user.id) or 72 | (user.id in [assignee.get('id') for assignee in (mri.get('assignees', []) or [])]) 73 | ] 74 | 75 | if merge_order == 'assigned_at': 76 | my_merge_request_infos.sort(key=lambda mri: cls.fetch_assigned_at(user, api, mri)) 77 | 78 | return [cls(api, merge_request_info) for merge_request_info in my_merge_request_infos] 79 | 80 | @property 81 | def project_id(self): 82 | return self.info['project_id'] 83 | 84 | @property 85 | def iid(self): 86 | return self.info['iid'] 87 | 88 | @property 89 | def title(self): 90 | return self.info['title'] 91 | 92 | @property 93 | def state(self): 94 | return self.info['state'] 95 | 96 | @property 97 | def merge_status(self): 98 | return self.info['merge_status'] 99 | 100 | @property 101 | def rebase_in_progress(self): 102 | return self.info.get('rebase_in_progress', False) 103 | 104 | @property 105 | def merge_error(self): 106 | return self.info.get('merge_error') 107 | 108 | @property 109 | def assignee_ids(self): 110 | if 'assignees' in self.info: 111 | return [assignee.get('id') for assignee in (self.info['assignees'] or [])] 112 | return [(self.info.get('assignee', {}) or {}).get('id')] 113 | 114 | @property 115 | def author_id(self): 116 | return self.info['author'].get('id') 117 | 118 | @property 119 | def source_branch(self): 120 | return self.info['source_branch'] 121 | 122 | @property 123 | def target_branch(self): 124 | return self.info['target_branch'] 125 | 126 | @property 127 | def sha(self): 128 | return self.info['sha'] 129 | 130 | @property 131 | def squash(self): 132 | return self.info.get('squash', False) # missing means auto-squash not supported 133 | 134 | @property 135 | def source_project_id(self): 136 | return self.info['source_project_id'] 137 | 138 | @property 139 | def target_project_id(self): 140 | return self.info['target_project_id'] 141 | 142 | @property 143 | def work_in_progress(self): 144 | return self.info['work_in_progress'] 145 | 146 | @property 147 | def approved_by(self): 148 | return self.info['approved_by'] 149 | 150 | @property 151 | def web_url(self): 152 | return self.info['web_url'] 153 | 154 | @property 155 | def blocking_discussions_resolved(self): 156 | return self.info['blocking_discussions_resolved'] 157 | 158 | @property 159 | def force_remove_source_branch(self): 160 | return self.info['force_remove_source_branch'] 161 | 162 | def update_sha(self, sha): 163 | """record the updated sha. We don't use refetch_info instead as it may hit cache.""" 164 | self._info['sha'] = sha 165 | 166 | def refetch_info(self): 167 | self._info = self._api.call(GET('/projects/{0.project_id}/merge_requests/{0.iid}'.format(self))) 168 | 169 | def comment(self, message): 170 | if self._api.version().release >= (9, 2, 2): 171 | notes_url = '/projects/{0.project_id}/merge_requests/{0.iid}/notes'.format(self) 172 | else: 173 | # GitLab botched the v4 api before 9.2.2 174 | notes_url = '/projects/{0.project_id}/merge_requests/{0.id}/notes'.format(self) 175 | 176 | return self._api.call(POST(notes_url, {'body': message})) 177 | 178 | def rebase(self): 179 | self.refetch_info() 180 | 181 | if not self.rebase_in_progress: 182 | self._api.call(PUT( 183 | '/projects/{0.project_id}/merge_requests/{0.iid}/rebase'.format(self), 184 | )) 185 | else: 186 | # We wanted to rebase and someone just happened to press the button for us! 187 | log.info('A rebase was already in progress on the merge request!') 188 | 189 | max_attempts = 30 190 | wait_between_attempts_in_secs = 1 191 | 192 | for _ in range(max_attempts): 193 | self.refetch_info() 194 | if not self.rebase_in_progress: 195 | if self.merge_error: 196 | raise MergeRequestRebaseFailed(self.merge_error) 197 | return 198 | 199 | time.sleep(wait_between_attempts_in_secs) 200 | 201 | raise TimeoutError('Waiting for merge request to be rebased by GitLab') 202 | 203 | def accept(self, remove_branch=False, sha=None, merge_when_pipeline_succeeds=True): 204 | return self._api.call(PUT( 205 | '/projects/{0.project_id}/merge_requests/{0.iid}/merge'.format(self), 206 | dict( 207 | should_remove_source_branch=remove_branch, 208 | merge_when_pipeline_succeeds=merge_when_pipeline_succeeds, 209 | sha=sha or self.sha, # if provided, ensures what is merged is what we want (or fails) 210 | ), 211 | )) 212 | 213 | def close(self): 214 | return self._api.call(PUT( 215 | '/projects/{0.project_id}/merge_requests/{0.iid}'.format(self), 216 | {'state_event': 'close'}, 217 | )) 218 | 219 | def assign_to(self, user_id): 220 | return self._api.call(PUT( 221 | '/projects/{0.project_id}/merge_requests/{0.iid}'.format(self), 222 | {'assignee_id': user_id}, 223 | )) 224 | 225 | def unassign(self): 226 | return self.assign_to(0) 227 | 228 | def fetch_approvals(self): 229 | # 'id' needed for for GitLab 9.2.2 hack (see Approvals.refetch_info()) 230 | info = {'id': self.id, 'iid': self.iid, 'project_id': self.project_id} 231 | approvals = Approvals(self.api, info) 232 | approvals.refetch_info() 233 | return approvals 234 | 235 | def fetch_commits(self): 236 | return self._api.call(GET('/projects/{0.project_id}/merge_requests/{0.iid}/commits'.format(self))) 237 | 238 | 239 | class MergeRequestRebaseFailed(Exception): 240 | pass 241 | -------------------------------------------------------------------------------- /marge/pipeline.py: -------------------------------------------------------------------------------- 1 | from . import gitlab 2 | 3 | 4 | GET, POST = gitlab.GET, gitlab.POST 5 | 6 | 7 | class Pipeline(gitlab.Resource): 8 | def __init__(self, api, info, project_id): 9 | info['project_id'] = project_id 10 | super().__init__(api, info) 11 | 12 | @classmethod 13 | def pipelines_by_branch( 14 | cls, project_id, branch, api, *, 15 | ref=None, 16 | status=None, 17 | order_by='id', 18 | sort='desc', 19 | ): 20 | params = { 21 | 'ref': branch if ref is None else ref, 22 | 'order_by': order_by, 23 | 'sort': sort, 24 | } 25 | if status is not None: 26 | params['status'] = status 27 | pipelines_info = api.call(GET( 28 | '/projects/{project_id}/pipelines'.format(project_id=project_id), 29 | params, 30 | )) 31 | 32 | return [cls(api, pipeline_info, project_id) for pipeline_info in pipelines_info] 33 | 34 | @classmethod 35 | def pipelines_by_merge_request(cls, project_id, merge_request_iid, api): 36 | """Fetch all pipelines for a merge request in descending order of pipeline ID.""" 37 | pipelines_info = api.call(GET( 38 | '/projects/{project_id}/merge_requests/{merge_request_iid}/pipelines'.format( 39 | project_id=project_id, merge_request_iid=merge_request_iid, 40 | ) 41 | )) 42 | pipelines_info.sort(key=lambda pipeline_info: pipeline_info['id'], reverse=True) 43 | return [cls(api, pipeline_info, project_id) for pipeline_info in pipelines_info] 44 | 45 | @property 46 | def project_id(self): 47 | return self.info['project_id'] 48 | 49 | @property 50 | def id(self): 51 | return self.info['id'] 52 | 53 | @property 54 | def status(self): 55 | return self.info['status'] 56 | 57 | @property 58 | def ref(self): 59 | return self.info['ref'] 60 | 61 | @property 62 | def sha(self): 63 | return self.info['sha'] 64 | 65 | def cancel(self): 66 | return self._api.call(POST( 67 | '/projects/{0.project_id}/pipelines/{0.id}/cancel'.format(self), 68 | )) 69 | -------------------------------------------------------------------------------- /marge/project.py: -------------------------------------------------------------------------------- 1 | import logging as log 2 | from enum import IntEnum, unique 3 | from functools import partial 4 | 5 | from . import gitlab 6 | 7 | 8 | GET = gitlab.GET 9 | 10 | 11 | class Project(gitlab.Resource): 12 | 13 | @classmethod 14 | def fetch_by_id(cls, project_id, api): 15 | info = api.call(GET('/projects/%s' % project_id)) 16 | return cls(api, info) 17 | 18 | @classmethod 19 | def fetch_by_path(cls, project_path, api): 20 | def filter_by_path_with_namespace(projects): 21 | return [p for p in projects if p['path_with_namespace'] == project_path] 22 | 23 | make_project = partial(cls, api) 24 | 25 | all_projects = api.collect_all_pages(GET('/projects')) 26 | return gitlab.from_singleton_list(make_project)(filter_by_path_with_namespace(all_projects)) 27 | 28 | @classmethod 29 | def fetch_all_mine(cls, api): 30 | projects_kwargs = {'membership': True, 31 | 'with_merge_requests_enabled': True, 32 | 'archived': False, 33 | } 34 | 35 | # GitLab has an issue where projects may not show appropriate permissions in nested groups. Using 36 | # `min_access_level` is known to provide the correct projects, so we'll prefer this method 37 | # if it's available. See #156 for more details. 38 | use_min_access_level = api.version().release >= (11, 2) 39 | if use_min_access_level: 40 | projects_kwargs["min_access_level"] = int(AccessLevel.developer) 41 | 42 | projects_info = api.collect_all_pages(GET( 43 | '/projects', 44 | projects_kwargs, 45 | )) 46 | 47 | def project_seems_ok(project_info): 48 | # A bug in at least GitLab 9.3.5 would make GitLab not report permissions after 49 | # moving subgroups. See for full story #19. 50 | permissions = project_info['permissions'] 51 | permissions_ok = bool(permissions['project_access'] or permissions['group_access']) 52 | if not permissions_ok: 53 | project_name = project_info['path_with_namespace'] 54 | log.warning('Ignoring project %s since GitLab provided no user permissions', project_name) 55 | 56 | return permissions_ok 57 | 58 | projects = [] 59 | 60 | for project_info in projects_info: 61 | if use_min_access_level: 62 | # We know we fetched projects with at least developer access, so we'll use that as 63 | # a fallback if GitLab doesn't correctly report permissions as described above. 64 | project_info["permissions"]["marge"] = {"access_level": AccessLevel.developer} 65 | elif not project_seems_ok(projects_info): 66 | continue 67 | 68 | projects.append(cls(api, project_info)) 69 | 70 | return projects 71 | 72 | @property 73 | def default_branch(self): 74 | return self.info['default_branch'] 75 | 76 | @property 77 | def path_with_namespace(self): 78 | return self.info['path_with_namespace'] 79 | 80 | @property 81 | def ssh_url_to_repo(self): 82 | return self.info['ssh_url_to_repo'] 83 | 84 | @property 85 | def http_url_to_repo(self): 86 | return self.info['http_url_to_repo'] 87 | 88 | @property 89 | def merge_requests_enabled(self): 90 | return self.info['merge_requests_enabled'] 91 | 92 | @property 93 | def only_allow_merge_if_pipeline_succeeds(self): 94 | return self.info['only_allow_merge_if_pipeline_succeeds'] 95 | 96 | @property 97 | def only_allow_merge_if_all_discussions_are_resolved(self): # pylint: disable=invalid-name 98 | return self.info['only_allow_merge_if_all_discussions_are_resolved'] 99 | 100 | @property 101 | def approvals_required(self): 102 | return self.info['approvals_before_merge'] 103 | 104 | @property 105 | def access_level(self): 106 | permissions = self.info['permissions'] 107 | effective_access = ( 108 | permissions['project_access'] 109 | or permissions['group_access'] 110 | or permissions.get("marge") 111 | ) 112 | assert effective_access is not None, "GitLab failed to provide user permissions on project" 113 | return AccessLevel(effective_access['access_level']) 114 | 115 | 116 | # pylint: disable=invalid-name 117 | @unique 118 | class AccessLevel(IntEnum): 119 | # See https://docs.gitlab.com/ce/api/access_requests.html 120 | none = 0 121 | minimal = 5 122 | guest = 10 123 | reporter = 20 124 | developer = 30 125 | maintainer = 40 126 | owner = 50 127 | -------------------------------------------------------------------------------- /marge/pylintrc: -------------------------------------------------------------------------------- 1 | ../pylintrc -------------------------------------------------------------------------------- /marge/single_merge_job.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=too-many-locals,too-many-branches,too-many-statements 2 | import logging as log 3 | import time 4 | from datetime import datetime 5 | 6 | from . import git, gitlab 7 | from .commit import Commit 8 | from .job import CannotMerge, GitLabRebaseResultMismatch, MergeJob, SkipMerge 9 | 10 | 11 | class SingleMergeJob(MergeJob): 12 | 13 | def __init__(self, *, api, user, project, repo, options, merge_request): 14 | super().__init__(api=api, user=user, project=project, repo=repo, options=options) 15 | self._merge_request = merge_request 16 | self._options = options 17 | 18 | def execute(self): 19 | merge_request = self._merge_request 20 | 21 | log.info('Processing !%s - %r', merge_request.iid, merge_request.title) 22 | 23 | try: 24 | approvals = merge_request.fetch_approvals() 25 | self.update_merge_request_and_accept(approvals) 26 | log.info('Successfully merged !%s.', merge_request.info['iid']) 27 | except SkipMerge as err: 28 | log.warning("Skipping MR !%s: %s", merge_request.info['iid'], err.reason) 29 | except CannotMerge as err: 30 | message = "I couldn't merge this branch: %s" % err.reason 31 | log.warning(message) 32 | self.unassign_from_mr(merge_request) 33 | merge_request.comment(message) 34 | except git.GitError: 35 | log.exception('Unexpected Git error') 36 | merge_request.comment('Something seems broken on my local git repo; check my logs!') 37 | raise 38 | except Exception: 39 | log.exception('Unexpected Exception') 40 | merge_request.comment("I'm broken on the inside, please somebody fix me... :cry:") 41 | self.unassign_from_mr(merge_request) 42 | raise 43 | 44 | def update_merge_request_and_accept(self, approvals): 45 | api = self._api 46 | merge_request = self._merge_request 47 | updated_into_up_to_date_target_branch = False 48 | 49 | while not updated_into_up_to_date_target_branch: 50 | self.ensure_mergeable_mr(merge_request) 51 | source_project, source_repo_url, _ = self.fetch_source_project(merge_request) 52 | target_project = self.get_target_project(merge_request) 53 | try: 54 | # NB. this will be a no-op if there is nothing to update/rewrite 55 | 56 | target_sha, _updated_sha, actual_sha = self.update_from_target_branch_and_push( 57 | merge_request, 58 | source_repo_url=source_repo_url, 59 | ) 60 | except GitLabRebaseResultMismatch: 61 | log.info("Gitlab rebase didn't give expected result") 62 | merge_request.comment("Someone skipped the queue! Will have to try again...") 63 | continue 64 | 65 | if _updated_sha == actual_sha and self._options.guarantee_final_pipeline: 66 | log.info('No commits on target branch to fuse, triggering pipeline...') 67 | merge_request.comment("jenkins retry") 68 | time.sleep(30) 69 | 70 | log.info( 71 | 'Commit id to merge %r into: %r (updated sha: %r)', 72 | actual_sha, 73 | target_sha, 74 | _updated_sha 75 | ) 76 | time.sleep(5) 77 | 78 | sha_now = Commit.last_on_branch(source_project.id, merge_request.source_branch, api).id 79 | # Make sure no-one managed to race and push to the branch in the 80 | # meantime, because we're about to impersonate the approvers, and 81 | # we don't want to approve unreviewed commits 82 | if sha_now != actual_sha: 83 | raise CannotMerge('Someone pushed to branch while we were trying to merge') 84 | 85 | self.maybe_reapprove(merge_request, approvals) 86 | 87 | if target_project.only_allow_merge_if_pipeline_succeeds: 88 | self.wait_for_ci_to_pass(merge_request, actual_sha) 89 | time.sleep(2) 90 | 91 | self.wait_for_merge_status_to_resolve(merge_request) 92 | 93 | self.ensure_mergeable_mr(merge_request) 94 | 95 | try: 96 | ret = merge_request.accept( 97 | remove_branch=merge_request.force_remove_source_branch, 98 | sha=actual_sha, 99 | merge_when_pipeline_succeeds=bool(target_project.only_allow_merge_if_pipeline_succeeds), 100 | ) 101 | log.info('merge_request.accept result: %s', ret) 102 | except gitlab.NotAcceptable as err: 103 | new_target_sha = Commit.last_on_branch(self._project.id, merge_request.target_branch, api).id 104 | # target_branch has moved under us since we updated, just try again 105 | if new_target_sha != target_sha: 106 | log.info('Someone was naughty and by-passed marge') 107 | merge_request.comment( 108 | "My job would be easier if people didn't jump the queue and push directly... *sigh*" 109 | ) 110 | continue 111 | # otherwise the source branch has been pushed to or something 112 | # unexpected went wrong in either case, we expect the user to 113 | # explicitly re-assign to marge (after resolving potential 114 | # problems) 115 | raise CannotMerge('Merge request was rejected by GitLab: %r' % err.error_message) from err 116 | except gitlab.Unauthorized as err: 117 | log.warning('Unauthorized!') 118 | raise CannotMerge('My user cannot accept merge requests!') from err 119 | except gitlab.NotFound as ex: 120 | log.warning('Not Found!: %s', ex) 121 | merge_request.refetch_info() 122 | if merge_request.state == 'merged': 123 | # someone must have hit "merge when build succeeds" and we lost the race, 124 | # the branch is gone and we got a 404. Anyway, our job here is done. 125 | # (see #33) 126 | updated_into_up_to_date_target_branch = True 127 | else: 128 | log.warning('For the record, merge request state is %r', merge_request.state) 129 | raise 130 | except gitlab.MethodNotAllowed as ex: 131 | log.warning('Not Allowed!: %s', ex) 132 | merge_request.refetch_info() 133 | if merge_request.work_in_progress: 134 | raise CannotMerge( 135 | 'The request was marked as WIP as I was processing it (maybe a WIP commit?)' 136 | ) from ex 137 | if merge_request.state == 'reopened': 138 | raise CannotMerge( 139 | 'GitLab refused to merge this branch. I suspect that a Push Rule or a git-hook ' 140 | 'is rejecting my commits; maybe my email needs to be white-listed?' 141 | ) from ex 142 | if merge_request.state == 'closed': 143 | raise CannotMerge( 144 | 'Someone closed the merge request while I was attempting to merge it.' 145 | ) from ex 146 | if merge_request.state == 'merged': 147 | # We are not covering any observed behaviour here, but if at this 148 | # point the request is merged, our job is done, so no need to complain 149 | log.info('Merge request is already merged, someone was faster!') 150 | updated_into_up_to_date_target_branch = True 151 | else: 152 | raise CannotMerge( 153 | "Gitlab refused to merge this request and I don't know why!" + ( 154 | " Maybe you have unresolved discussions?" 155 | if self._project.only_allow_merge_if_all_discussions_are_resolved else "" 156 | ) 157 | ) from ex 158 | except gitlab.ApiError as err: 159 | log.exception('Unanticipated ApiError from GitLab on merge attempt') 160 | raise CannotMerge('had some issue with GitLab, check my logs...') from err 161 | else: 162 | self.wait_for_branch_to_be_merged() 163 | updated_into_up_to_date_target_branch = True 164 | 165 | def wait_for_branch_to_be_merged(self): 166 | merge_request = self._merge_request 167 | time_0 = datetime.utcnow() 168 | waiting_time_in_secs = 10 169 | 170 | while datetime.utcnow() - time_0 < self._merge_timeout: 171 | merge_request.refetch_info() 172 | 173 | if merge_request.state == 'merged': 174 | return # success! 175 | if merge_request.state == 'closed': 176 | raise CannotMerge('someone closed the merge request while merging!') 177 | assert merge_request.state in ('opened', 'reopened', 'locked'), merge_request.state 178 | 179 | log.info('Giving %s more secs for !%s to be merged...', waiting_time_in_secs, merge_request.iid) 180 | time.sleep(waiting_time_in_secs) 181 | 182 | raise CannotMerge('It is taking too long to see the request marked as merged!') 183 | -------------------------------------------------------------------------------- /marge/store.py: -------------------------------------------------------------------------------- 1 | import re 2 | import tempfile 3 | 4 | from . import git 5 | 6 | 7 | class RepoManager: 8 | 9 | def __init__(self, user, root_dir, timeout=None, reference=None): 10 | self._root_dir = root_dir 11 | self._user = user 12 | self._repos = {} 13 | self._timeout = timeout 14 | self._reference = reference 15 | 16 | def forget_repo(self, project): 17 | self._repos.pop(project.id, None) 18 | 19 | @property 20 | def user(self): 21 | return self._user 22 | 23 | @property 24 | def root_dir(self): 25 | return self._root_dir 26 | 27 | 28 | class SshRepoManager(RepoManager): 29 | 30 | def __init__(self, user, root_dir, ssh_key_file=None, timeout=None, reference=None): 31 | super().__init__(user, root_dir, timeout, reference) 32 | self._ssh_key_file = ssh_key_file 33 | 34 | def repo_for_project(self, project): 35 | repo = self._repos.get(project.id) 36 | if not repo or repo.remote_url != project.ssh_url_to_repo: 37 | repo_url = project.ssh_url_to_repo 38 | local_repo_dir = tempfile.mkdtemp(dir=self._root_dir) 39 | 40 | repo = git.Repo(repo_url, local_repo_dir, ssh_key_file=self._ssh_key_file, 41 | timeout=self._timeout, reference=self._reference) 42 | repo.clone() 43 | repo.config_user_info( 44 | user_email=self._user.email, 45 | user_name=self._user.name, 46 | ) 47 | 48 | self._repos[project.id] = repo 49 | 50 | return repo 51 | 52 | @property 53 | def ssh_key_file(self): 54 | return self._ssh_key_file 55 | 56 | 57 | class HttpsRepoManager(RepoManager): 58 | 59 | def __init__(self, user, root_dir, auth_token=None, timeout=None, reference=None): 60 | super().__init__(user, root_dir, timeout, reference) 61 | self._auth_token = auth_token 62 | 63 | def repo_for_project(self, project): 64 | repo = self._repos.get(project.id) 65 | if not repo or repo.remote_url != project.http_url_to_repo: 66 | credentials = "oauth2:" + self._auth_token 67 | # insert token auth "oauth2:@" 68 | pattern = "(http(s)?://)" 69 | replacement = r"\1" + credentials + "@" 70 | repo_url = re.sub(pattern, replacement, project.http_url_to_repo, 1) 71 | local_repo_dir = tempfile.mkdtemp(dir=self._root_dir) 72 | 73 | repo = git.Repo(repo_url, local_repo_dir, ssh_key_file=None, 74 | timeout=self._timeout, reference=self._reference) 75 | repo.clone() 76 | repo.config_user_info( 77 | user_email=self._user.email, 78 | user_name=self._user.name, 79 | ) 80 | 81 | self._repos[project.id] = repo 82 | 83 | return repo 84 | 85 | @property 86 | def auth_token(self): 87 | return self._auth_token 88 | -------------------------------------------------------------------------------- /marge/trailerfilter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Executable script to pass to git filter-branch --msgfilter to rewrite trailers. 3 | 4 | This treats everything (stdin, stdout, env) at the level of raw bytes which are 5 | assumed to be utf-8, or more specifically some ASCII superset, regardless of 6 | (possibly broken) LOCALE settings. 7 | 8 | """ 9 | import collections 10 | import os 11 | import re 12 | import sys 13 | 14 | STDIN = sys.stdin.buffer 15 | STDOUT = sys.stdout.buffer 16 | STDERR = sys.stderr.buffer 17 | 18 | 19 | def die(msg): 20 | STDERR.write(b'ERROR: ') 21 | STDERR.write(msg) 22 | sys.exit(1) 23 | 24 | 25 | def drop_trailing_newlines(lines): 26 | while lines and not lines[-1]: 27 | del lines[-1] 28 | 29 | 30 | def remove_duplicates(trailers): 31 | return list(collections.OrderedDict((t, None) for t in trailers).keys()) 32 | 33 | 34 | def rework_commit_message(commit_message, trailers): 35 | if not commit_message: 36 | die(b'Expected a non-empty commit message') 37 | 38 | trailer_names = [trailer.split(b':', 1)[0].lower() for trailer in trailers] 39 | 40 | filtered_lines = [ 41 | line.rstrip() for line in commit_message.split(b'\n') 42 | if line.split(b':', 1)[0].lower() not in trailer_names 43 | ] 44 | 45 | reworked_lines = filtered_lines[:] 46 | 47 | drop_trailing_newlines(reworked_lines) 48 | while len(reworked_lines) > 1 and re.match(br'^[A-Z][\w-]+: ', reworked_lines[-1]): 49 | trailers.insert(0, reworked_lines.pop()) 50 | if not reworked_lines: 51 | die(b"Your commit message seems to consist only of Trailers: " + commit_message) 52 | 53 | drop_trailing_newlines(reworked_lines) 54 | 55 | non_empty_trailers = remove_duplicates([t for t in trailers if t.split(b': ', 1)[1].strip()]) 56 | if non_empty_trailers: 57 | reworked_lines += [b''] + non_empty_trailers 58 | reworked_lines += [b''] 59 | return b'\n'.join(reworked_lines) 60 | 61 | 62 | def main(): 63 | trailers = os.environb[b'TRAILERS'].split(b'\n') if os.environb[b'TRAILERS'] else [] 64 | assert all(b':' in trailer for trailer in trailers), trailers 65 | original_commit_message = STDIN.read().strip() 66 | new_commit_message = rework_commit_message(original_commit_message, trailers) 67 | STDOUT.write(new_commit_message) 68 | 69 | 70 | if __name__ == '__main__': 71 | main() 72 | -------------------------------------------------------------------------------- /marge/user.py: -------------------------------------------------------------------------------- 1 | from . import gitlab 2 | 3 | 4 | GET = gitlab.GET 5 | 6 | 7 | class User(gitlab.Resource): 8 | 9 | @classmethod 10 | def myself(cls, api): 11 | info = api.call(GET('/user')) 12 | 13 | if info.get('is_admin') is None: # WORKAROUND FOR BUG IN 9.2.2 14 | try: 15 | # sudoing succeeds iff we are admin 16 | api.call(GET('/user'), sudo=info['id']) 17 | info['is_admin'] = True 18 | except gitlab.Forbidden: 19 | info['is_admin'] = False 20 | 21 | return cls(api, info) 22 | 23 | @property 24 | def is_admin(self): 25 | return self.info['is_admin'] 26 | 27 | @classmethod 28 | def fetch_by_id(cls, user_id, api): 29 | info = api.call(GET('/users/%s' % user_id)) 30 | return cls(api, info) 31 | 32 | @classmethod 33 | def fetch_by_username(cls, username, api): 34 | info = api.call(GET( 35 | '/users', 36 | {'username': username}, 37 | gitlab.from_singleton_list(), 38 | )) 39 | return cls(api, info) 40 | 41 | @property 42 | def name(self): 43 | return self.info['name'].strip() 44 | 45 | @property 46 | def username(self): 47 | return self.info['username'] 48 | 49 | @property 50 | def email(self): 51 | """Only visible to admins and 'self'. Sigh.""" 52 | return self.info.get('email') 53 | 54 | @property 55 | def state(self): 56 | return self.info['state'] 57 | -------------------------------------------------------------------------------- /nix/sources.json: -------------------------------------------------------------------------------- 1 | { 2 | "nixpkgs": { 3 | "url": "https://github.com/NixOS/nixpkgs-channels/archive/915ce0f1e1a75adec7079ddb6cd3ffba5036b3fc.tar.gz", 4 | "owner": "NixOS", 5 | "branch": "nixos-19.03", 6 | "url_template": "https://github.com///archive/.tar.gz", 7 | "repo": "nixpkgs-channels", 8 | "type": "tarball", 9 | "sha256": "1kmx29i3xy4701z4lgmv5xxslb1djahrjxmrf83ig1whb4vgk4wm", 10 | "description": "Nixpkgs/NixOS branches that track the Nixpkgs/NixOS channels", 11 | "rev": "915ce0f1e1a75adec7079ddb6cd3ffba5036b3fc" 12 | }, 13 | "niv": { 14 | "homepage": "https://github.com/nmattia/niv", 15 | "url": "https://github.com/nmattia/niv/archive/e5e441998ede88dfce5b8b9a7ea99e1e0f1102fa.tar.gz", 16 | "owner": "nmattia", 17 | "branch": "master", 18 | "url_template": "https://github.com///archive/.tar.gz", 19 | "repo": "niv", 20 | "type": "tarball", 21 | "sha256": "0s3pwakbp9qmwzznl8xd3smmymz1s2vrvyip8yizqdllaps4pf18", 22 | "description": "Easy dependency management for Nix projects", 23 | "rev": "e5e441998ede88dfce5b8b9a7ea99e1e0f1102fa" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /nix/sources.nix: -------------------------------------------------------------------------------- 1 | # Read in the json spec for packages we want (so it can be auto-updated). 2 | # niv: no_update 3 | 4 | # make travis happy, reasonably new nix doesn't need this 5 | let mapAttrs = builtins.mapAttrs or 6 | (f: set: 7 | builtins.listToAttrs (map (attr: { name = attr; value = f attr set.${attr}; }) (builtins.attrNames set))); 8 | in with builtins; 9 | mapAttrs 10 | (_: spec: spec // { outPath = fetchTarball { inherit (spec) url sha256; }; }) 11 | (fromJSON (readFile ./sources.json)) 12 | -------------------------------------------------------------------------------- /pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | persistent=no 3 | 4 | [BASIC] 5 | include-naming-hint=yes 6 | function-rgx=(([a-z_][a-z0-9_]{2,80})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$ 7 | method-rgx=(([a-z_][a-z0-9_]{2,60})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$ 8 | variable-rgx=(([a-z_][a-z0-9_]{2,60})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$ 9 | attr-rgx=(([a-z_][a-z0-9_]{2,40})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$ 10 | argument-rgx=(([a-z_][a-z0-9_]{2,40})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$ 11 | 12 | [MESSAGE CONTROL] 13 | disable=bad-continuation, 14 | fixme, 15 | missing-docstring, 16 | no-self-use, 17 | unsubscriptable-object 18 | 19 | [SIMILARITIES] 20 | min-similarity-lines=10 21 | 22 | [TYPECHECK] 23 | # this can be removed when we bump to asteroid >1.6.1 24 | # see: https://github.com/PyCQA/astroid/pull/487/files 25 | ignored-classes=Popen 26 | 27 | [FORMAT] 28 | max-line-length=110 29 | 30 | [DESIGN] 31 | max-args=10 32 | max-attributes=15 33 | max-public-methods=35 34 | # Maximum number of locals for function / method body 35 | max-locals=25 36 | 37 | [REPORTS] 38 | output-format=parseable 39 | reports=no 40 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ConfigArgParse 2 | maya 3 | PyYAML 4 | requests 5 | 6 | # testing 7 | pytest 8 | pytest-cov 9 | pytest-flake8 10 | pytest-pylint 11 | pytest-runner 12 | -------------------------------------------------------------------------------- /requirements_frozen.txt: -------------------------------------------------------------------------------- 1 | 2 | ConfigArgParse==1.3 3 | PyYAML==5.4.1 4 | astroid==2.5 5 | attrs==20.3.0 6 | certifi==2020.12.5 7 | chardet==4.0.0 8 | coverage==5.4 9 | dateparser==1.0.0 10 | flake8==3.8.4 11 | humanize==3.2.0 12 | idna==2.10 13 | iniconfig==1.1.1 14 | isort==5.7.0 15 | lazy-object-proxy==1.5.2 16 | maya==0.6.1 17 | mccabe==0.6.1 18 | packaging==20.9 19 | pendulum==2.1.2 20 | pluggy==0.13.1 21 | py==1.10.0 22 | pycodestyle==2.6.0 23 | pyflakes==2.2.0 24 | pylint==2.7.0 25 | pyparsing==2.4.7 26 | pytest-cov==2.11.1 27 | pytest-flake8==1.0.7 28 | pytest-pylint==0.18.0 29 | pytest-runner==5.3.0 30 | pytest==6.2.2 31 | python-dateutil==2.8.1 32 | pytz==2021.1 33 | pytzdata==2020.1 34 | regex==2020.11.13 35 | requests==2.25.1 36 | six==1.15.0 37 | snaptime==0.2.4 38 | toml==0.10.2 39 | tzlocal==2.1 40 | urllib3==1.26.5 41 | wrapt==1.11.1 42 | -------------------------------------------------------------------------------- /requirements_override.nix: -------------------------------------------------------------------------------- 1 | { pkgs, python }: 2 | self: super: 3 | let 4 | # Packages use setuptools-scm to try to infer version from source control metadata (say, git tag). 5 | # Authors put setuptools-scm in setup_requires. 6 | # Add it manually to affected packages. 7 | # NOTE: source tarballs don't have scm metadata. 8 | # setuptools-scm will just give up and emit 0.0.0. 9 | setuptools-scm = python.mkDerivation { 10 | name = "setuptools-scm"; 11 | src = pkgs.fetchurl { 12 | url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz"; 13 | sha256 = "11qs1jvfgflx1msv39jgc6bj9d9a300ra35fwypkr44jayh23psv"; 14 | }; 15 | }; 16 | 17 | addBuildInputs = 18 | pkg: buildInputs: 19 | python.overrideDerivation pkg ( 20 | old: { 21 | buildInputs = old.buildInputs ++ buildInputs; 22 | } 23 | ); 24 | in 25 | { 26 | # Break circular dependency: attrs <-> pytest 27 | attrs = python.overrideDerivation super.attrs ( 28 | old: { 29 | propagatedBuildInputs = [ self.six ]; 30 | } 31 | ); 32 | 33 | # Break circular dependency: mccabe <-> pytest-runner 34 | mccabe = python.overrideDerivation super.mccabe ( 35 | old: { 36 | postPatch = '' 37 | substituteInPlace setup.py --replace "setup_requires=['pytest-runner']," "setup_requires=[]," || true 38 | ''; 39 | } 40 | ); 41 | 42 | # pypi2nix does not handle setup_requires. 43 | astroid = addBuildInputs super.astroid [ self.pytest-runner ]; 44 | pluggy = addBuildInputs super.pluggy [ setuptools-scm ]; 45 | python-dateutil = addBuildInputs super.python-dateutil [ setuptools-scm ]; 46 | py = addBuildInputs super.py [ setuptools-scm ]; 47 | pylint = addBuildInputs super.pylint [ self.pytest-runner ]; 48 | pytest = addBuildInputs super.pytest [ setuptools-scm ]; 49 | pytest-runner = addBuildInputs super.pytest-runner [ setuptools-scm ]; 50 | pytest-pylint = addBuildInputs super.pytest-pylint [ self.pytest-runner ]; 51 | } 52 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [aliases] 2 | test=pytest 3 | 4 | [tool:pytest] 5 | addopts = --flake8 --pylint --cov=marge 6 | testpaths = tests marge 7 | 8 | [flake8] 9 | max-line-length = 110 10 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | from distutils.core import setup 3 | VERSION = open(os.path.join(os.path.dirname(__file__), 'version')).read().strip() 4 | setup( 5 | name='marge', 6 | version=VERSION, 7 | license='BSD3', 8 | packages=['marge'], 9 | scripts=['marge.app'], 10 | ) 11 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | let 2 | addBuildTools = pkg: tools: pkg.overrideAttrs 3 | (oldAttrs: { nativeBuildInputs = oldAttrs.nativeBuildInputs ++ tools; }); 4 | sources = import ./nix/sources.nix; 5 | ## Tool to bump versions of sources written as json entries to git repos etc. 6 | ## We use it bump nixpkgs itself ATM (just `niv update`). 7 | niv = (import sources.niv {}).niv; 8 | pkgs = (import sources.nixpkgs {}); 9 | pypi2nix = pkgs.pypi2nix; 10 | make = pkgs.make; 11 | marge-bot = (import ./.).marge-bot; 12 | in 13 | ## create a version of the marge-bot env that has niv 14 | addBuildTools marge-bot [ niv pypi2nix ] 15 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | # reduce noise, see: https://github.com/eisensheng/pytest-catchlog/issues/59 4 | logging.getLogger('flake8').propagate = False 5 | -------------------------------------------------------------------------------- /tests/git_repo_mock.py: -------------------------------------------------------------------------------- 1 | import logging as log 2 | from collections import defaultdict 3 | from datetime import timedelta 4 | import functools 5 | import shlex 6 | 7 | import marge.git as git 8 | 9 | 10 | class RepoMock(git.Repo): 11 | 12 | @classmethod 13 | def init_for_merge_request(cls, merge_request, initial_target_sha, project, forked_project=None): 14 | assert bool(forked_project) == ( 15 | merge_request.source_project_id != merge_request.target_project_id 16 | ) 17 | 18 | target_url = project.ssh_url_to_repo 19 | source_url = forked_project.ssh_url_to_repo if forked_project else target_url 20 | 21 | remote_repos = defaultdict(GitRepoModel) 22 | remote_repos[source_url].set_ref(merge_request.source_branch, merge_request.sha) 23 | remote_repos[target_url].set_ref(merge_request.target_branch, initial_target_sha) 24 | 25 | result = cls( 26 | remote_url=target_url, 27 | local_path='/tmp/blah', 28 | ssh_key_file='/home/homer/.ssh/id_rsa', 29 | timeout=timedelta(seconds=1000000), 30 | reference='the_reference', 31 | ) 32 | 33 | # pylint: disable=attribute-defined-outside-init 34 | result.mock_impl = GitModel(origin=target_url, remote_repos=remote_repos) 35 | return result 36 | 37 | def git(self, *args, from_repo=True): 38 | command = args[0] 39 | command_args = args[1:] 40 | 41 | log.info('Run: git %r %s', command, ' '.join(map(repr, command_args))) 42 | assert from_repo == (command != 'clone') 43 | 44 | command_impl_name = command.replace('-', '_') 45 | command_impl = getattr(self.mock_impl, command_impl_name, None) 46 | assert command_impl, ('git: Unexpected command %s' % command) 47 | try: 48 | result = command_impl(*command_args) 49 | except Exception: 50 | log.warning('Failed to simulate: git %r %s', command, command_args) 51 | raise 52 | else: 53 | return self._pretend_result_comes_from_popen(result) 54 | 55 | @staticmethod 56 | def _pretend_result_comes_from_popen(result): 57 | result_bytes = ('' if result is None else str(result)).encode('ascii') 58 | return stub(stdout=result_bytes) 59 | 60 | 61 | class stub: # pylint: disable=invalid-name,too-few-public-methods 62 | def __init__(self, **kwargs): 63 | self.__dict__ = kwargs 64 | 65 | 66 | class GitRepoModel: 67 | def __init__(self, copy_of=None): 68 | # pylint: disable=protected-access 69 | self._refs = dict(copy_of._refs) if copy_of else {} 70 | 71 | def set_ref(self, ref, commit): 72 | self._refs[ref] = commit 73 | 74 | def get_ref(self, ref): 75 | return self._refs[ref] 76 | 77 | def has_ref(self, ref): 78 | return ref in self._refs 79 | 80 | def del_ref(self, ref): 81 | self._refs.pop(ref, None) 82 | 83 | def __repr__(self): 84 | return "<%s: %s>" % (type(self), self._refs) 85 | 86 | 87 | class GitModel: 88 | def __init__(self, origin, remote_repos): 89 | assert origin in remote_repos 90 | 91 | self.remote_repos = remote_repos 92 | self._local_repo = GitRepoModel() 93 | self._remotes = dict(origin=origin) 94 | self._remote_refs = {} 95 | self._branch = None 96 | self.on_push_callbacks = [] 97 | 98 | @property 99 | def _head(self): 100 | return self._local_repo.get_ref(self._branch) 101 | 102 | def remote(self, *args): 103 | action = args[0] 104 | if action == 'rm': 105 | _, remote = args 106 | try: 107 | self._remotes.pop(remote) 108 | except KeyError as err: 109 | raise git.GitError('No such remote: %s' % remote) from err 110 | 111 | elif action == 'add': 112 | _, remote, url = args 113 | self._remotes[remote] = url 114 | else: 115 | assert False, args 116 | 117 | def fetch(self, *args): 118 | _, remote_name = args 119 | assert args == ('--prune', remote_name) 120 | remote_url = self._remotes[remote_name] 121 | remote_repo = self.remote_repos[remote_url] 122 | self._remote_refs[remote_name] = GitRepoModel(copy_of=remote_repo) 123 | 124 | def checkout(self, *args): 125 | if args[0] == '-B': # -B == create if it doesn't exist 126 | _, branch, start_point, _ = args 127 | assert args == ('-B', branch, start_point, '--') 128 | assert start_point == '' or '/' in start_point # '' when "local" 129 | 130 | # create if it doesn't exist 131 | if not self._local_repo.has_ref(branch): 132 | if start_point: 133 | remote_name, remote_branch = start_point.split('/') 134 | assert remote_branch == branch 135 | 136 | remote_url = self._remotes[remote_name] 137 | remote_repo = self.remote_repos[remote_url] 138 | commit = remote_repo.get_ref(branch) 139 | self._local_repo.set_ref(branch, commit) 140 | else: 141 | self._local_repo.set_ref(branch, self._head) 142 | else: 143 | branch, _ = args 144 | assert args == (branch, '--') 145 | assert self._local_repo.has_ref(branch) 146 | 147 | # checkout 148 | self._branch = branch 149 | 150 | def branch(self, *args): 151 | if args[0] == "-D": 152 | _, branch = args 153 | assert self._branch != branch 154 | self._local_repo.del_ref(branch) 155 | else: 156 | assert False 157 | 158 | def rev_parse(self, arg): 159 | if arg == 'HEAD': 160 | return self._head 161 | 162 | remote, branch = arg.split('/') 163 | return self._remote_refs[remote].get_ref(branch) 164 | 165 | def rebase(self, arg): 166 | remote, branch = arg.split('/') 167 | new_base = self._remote_refs[remote].get_ref(branch) 168 | if new_base != self._head: 169 | new_sha = 'rebase(%s onto %s)' % (self._head, new_base) 170 | self._local_repo.set_ref(self._branch, new_sha) 171 | 172 | def merge(self, arg): 173 | remote, branch = arg.split('/') 174 | 175 | other_ref = self._remote_refs[remote].get_ref(branch) 176 | if other_ref != self._head: 177 | new_sha = 'merge(%s with %s)' % (self._head, other_ref) 178 | self._local_repo.set_ref(self._branch, new_sha) 179 | 180 | def push(self, *args): 181 | force_flag, remote_name, refspec = args 182 | 183 | assert force_flag in ('', '--force') 184 | 185 | branch, remote_branch = refspec.split(':') 186 | remote_url = self._remotes[remote_name] 187 | remote_repo = self.remote_repos[remote_url] 188 | 189 | old_sha = remote_repo.get_ref(remote_branch) 190 | new_sha = self._local_repo.get_ref(branch) 191 | 192 | if force_flag: 193 | remote_repo.set_ref(remote_branch, new_sha) 194 | else: 195 | expected_remote_sha = self._remote_refs[remote_name].get_ref(remote_branch) 196 | if old_sha != expected_remote_sha: 197 | raise git.GitError("conflict: can't push") 198 | remote_repo.set_ref(remote_branch, new_sha) 199 | 200 | for callback in self.on_push_callbacks: 201 | callback( 202 | remote_url=remote_url, 203 | remote_branch=remote_branch, 204 | old_sha=old_sha, 205 | new_sha=new_sha, 206 | ) 207 | 208 | def config(self, *args): 209 | assert len(args) == 2 and args[0] == '--get' 210 | _, remote, _ = elems = args[1].split('.') 211 | assert elems == ['remote', remote, 'url'], elems 212 | return self._remotes[remote] 213 | 214 | def diff_index(self, *args): 215 | assert args == ('--quiet', 'HEAD') 216 | # we don't model dirty index 217 | 218 | def ls_files(self, *args): 219 | assert args == ('--others',) 220 | # we don't model untracked files 221 | 222 | def filter_branch(self, *args): 223 | _, _, filter_cmd, commit_range = args 224 | assert args == ('--force', '--msg-filter', filter_cmd, commit_range) 225 | 226 | trailers_var, python, script_path = shlex.split(filter_cmd) 227 | _, trailers_str = trailers_var.split('=') 228 | 229 | assert trailers_var == "TRAILERS=%s" % trailers_str 230 | assert python == "python3" 231 | assert script_path.endswith("marge/trailerfilter.py") 232 | 233 | trailers = list(sorted(set(line.split(':')[0] for line in trailers_str.split('\n')))) 234 | assert trailers 235 | 236 | new_sha = functools.reduce( 237 | lambda x, f: "add-%s(%s)" % (f, x), 238 | [trailer.lower() for trailer in trailers], 239 | self._head 240 | ) 241 | self._local_repo.set_ref(self._branch, new_sha) 242 | return new_sha 243 | -------------------------------------------------------------------------------- /tests/gitlab_api_mock.py: -------------------------------------------------------------------------------- 1 | import re 2 | import logging as log 3 | from collections import namedtuple 4 | 5 | import marge.gitlab as gitlab 6 | import tests.test_approvals as test_approvals 7 | import tests.test_commit as test_commit 8 | import tests.test_project as test_project 9 | import tests.test_user as test_user 10 | 11 | GET = gitlab.GET 12 | POST = gitlab.POST 13 | 14 | 15 | def commit(commit_id, status): 16 | return { 17 | 'id': commit_id, 18 | 'short_id': commit_id, 19 | 'author_name': 'J. Bond', 20 | 'author_email': 'jbond@mi6.gov.uk', 21 | 'message': 'Shaken, not stirred', 22 | 'status': status, 23 | } 24 | 25 | 26 | class MockLab: # pylint: disable=too-few-public-methods 27 | def __init__(self, initial_master_sha='505e', gitlab_url=None, fork=False, merge_request_options=None): 28 | self.gitlab_url = gitlab_url = gitlab_url or 'http://git.example.com' 29 | self.api = api = Api(gitlab_url=gitlab_url, auth_token='no-token', initial_state='initial') 30 | 31 | api.add_transition(GET('/version'), Ok({'version': '9.2.3-ee'})) 32 | 33 | self.user_info = dict(test_user.INFO) 34 | self.user_id = self.user_info['id'] 35 | api.add_user(self.user_info, is_current=True) 36 | 37 | self.project_info = dict(test_project.INFO) 38 | api.add_project(self.project_info) 39 | 40 | self.commit_info = dict(test_commit.INFO) 41 | api.add_commit(self.project_info['id'], self.commit_info) 42 | 43 | self.author_id = 234234 44 | self.merge_request_info = { 45 | 'id': 53, 46 | 'iid': 54, 47 | 'title': 'a title', 48 | 'project_id': 1234, 49 | 'author': {'id': self.author_id}, 50 | 'assignees': [{'id': self.user_id}], 51 | 'approved_by': [], 52 | 'state': 'opened', 53 | 'merge_status': 'can_be_merged', 54 | 'sha': self.commit_info['id'], 55 | 'source_project_id': 1234, 56 | 'target_project_id': 1234, 57 | 'source_branch': 'useless_new_feature', 58 | 'force_remove_source_branch': True, 59 | 'target_branch': 'master', 60 | 'work_in_progress': False, 61 | 'blocking_discussions_resolved': True, 62 | 'web_url': 'http://git.example.com/group/project/merge_request/666', 63 | } 64 | if merge_request_options is not None: 65 | self.merge_request_info.update(merge_request_options) 66 | 67 | if fork: 68 | self.forked_project_info = dict( 69 | self.project_info, 70 | id=4321, 71 | ssh_url_to_repo='ssh://some.other.project/stuff', 72 | ) 73 | api.add_project(self.forked_project_info) 74 | self.merge_request_info.update({'iid': 55, 'source_project_id': '4321'}) 75 | else: 76 | self.forked_project_info = None 77 | 78 | api.add_merge_request(self.merge_request_info) 79 | 80 | self.initial_master_sha = initial_master_sha 81 | self.approvals_info = dict( 82 | test_approvals.INFO, 83 | id=self.merge_request_info['id'], 84 | iid=self.merge_request_info['iid'], 85 | project_id=self.merge_request_info['project_id'], 86 | approvals_left=0, 87 | ) 88 | api.add_approvals(self.approvals_info) 89 | api.add_transition( 90 | GET( 91 | '/projects/1234/repository/branches/{target}'.format( 92 | target=self.merge_request_info['target_branch'], 93 | ), 94 | ), 95 | Ok({'commit': {'id': self.initial_master_sha}}), 96 | ) 97 | 98 | 99 | class Api(gitlab.Api): 100 | def __init__(self, gitlab_url, auth_token, initial_state): 101 | super().__init__(gitlab_url, auth_token) 102 | 103 | self._transitions = {} 104 | self.state = initial_state 105 | self.notes = [] 106 | 107 | def call(self, command, sudo=None): 108 | log.info( 109 | 'CALL: %s%s @ %s', 110 | 'sudo %s ' % sudo if sudo is not None else '', 111 | command, 112 | self.state, 113 | ) 114 | try: 115 | response, next_state, side_effect = self._find(command, sudo) 116 | except KeyError as err: 117 | page = command.args.get('page') 118 | if page == 0: 119 | no_page_args = dict((k, v) for k, v in command.args.items() if k not in ['page', 'per_page']) 120 | try: 121 | return self.call(command._replace(args=no_page_args)) 122 | except MockedEndpointNotFound: 123 | pass # raise the right exception below 124 | elif page: # page is not None 125 | try: 126 | # only return an empty list if the command exists 127 | self.call(command.for_page(0)) 128 | except MockedEndpointNotFound: 129 | pass # raise the right exception below 130 | else: 131 | return [] 132 | 133 | raise MockedEndpointNotFound(command, sudo, self.state) from err 134 | else: 135 | if next_state: 136 | self.state = next_state 137 | 138 | if side_effect: 139 | side_effect() 140 | return response() 141 | 142 | def _find(self, command, sudo): 143 | more_specific = self._transitions.get(_key(command, sudo, self.state)) 144 | return more_specific or self._transitions[_key(command, sudo, None)] 145 | 146 | def add_transition(self, command, response, sudo=None, from_state=None, to_state=None, side_effect=None): 147 | from_states = from_state if isinstance(from_state, list) else [from_state] 148 | 149 | for _from_state in from_states: 150 | show_from = '*' if _from_state is None else repr(_from_state) 151 | log.info( 152 | 'REGISTERING %s%s from %s to %s', 153 | 'sudo %s ' % sudo if sudo is not None else '', 154 | command, 155 | show_from, 156 | show_from if to_state is None else repr(to_state), 157 | ) 158 | self._transitions[_key(command, sudo, _from_state)] = (response, to_state, side_effect) 159 | 160 | def add_resource(self, path, info, sudo=None, from_state=None, to_state=None): 161 | self.add_transition(GET(path.format(attrs(info))), Ok(info), sudo, from_state, to_state) 162 | 163 | def add_user(self, info, is_current=False, sudo=None, from_state=None, to_state=None): 164 | self.add_resource('/users/{0.id}', info, sudo, from_state, to_state) 165 | if is_current: 166 | self.add_resource('/user', info, sudo, from_state, to_state) 167 | 168 | def add_project(self, info, sudo=None, from_state=None, to_state=None): 169 | self.add_resource('/projects/{0.id}', info, sudo, from_state, to_state) 170 | self.add_transition( 171 | GET('/projects/{0.id}/merge_requests'.format(attrs(info))), 172 | List(r'/projects/\d+/merge_requests/\d+$', self), 173 | sudo, from_state, to_state, 174 | ) 175 | 176 | def add_merge_request(self, info, sudo=None, from_state=None, to_state=None): 177 | self.add_resource('/projects/{0.project_id}/merge_requests/{0.iid}', info, sudo, from_state, to_state) 178 | 179 | def add_commit(self, project_id, info, sudo=None, from_state=None, to_state=None): 180 | path = '/projects/%s/repository/commits/{0.id}' % project_id 181 | self.add_resource(path, info, sudo, from_state, to_state) 182 | 183 | def add_approvals(self, info, sudo=None, from_state=None, to_state=None): 184 | path = '/projects/{0.project_id}/merge_requests/{0.iid}/approvals' 185 | self.add_resource(path, info, sudo, from_state, to_state) 186 | 187 | def add_pipelines(self, project_id, info, sudo=None, from_state=None, to_state=None): 188 | self.add_transition( 189 | GET( 190 | '/projects/%s/pipelines' % project_id, 191 | args={'ref': info['ref'], 'order_by': 'id', 'sort': 'desc'}, 192 | ), 193 | Ok([info]), 194 | sudo, from_state, to_state, 195 | ) 196 | 197 | def expected_note(self, merge_request, note, sudo=None, from_state=None, to_state=None): 198 | self.add_transition( 199 | POST( 200 | '/projects/{0.project_id}/merge_requests/{0.iid}/notes'.format(attrs(merge_request)), 201 | args={'body': note} 202 | ), 203 | LeaveNote(note, self), 204 | sudo, from_state, to_state, 205 | ) 206 | 207 | 208 | def _key(command, sudo, state): 209 | return command._replace(args=frozenset(command.args.items())), sudo, state 210 | 211 | 212 | class Ok(namedtuple('Ok', 'result')): 213 | def __call__(self): 214 | return self.result 215 | 216 | 217 | class Error(namedtuple('Error', 'exc')): 218 | def __call__(self): 219 | raise self.exc 220 | 221 | 222 | class List(namedtuple('List', 'prefix api')): 223 | def _call__(self): 224 | candidates = ( 225 | command for command, _ in self.api._transitions.keys() # pylint: disable=protected-access 226 | if isinstance(command, GET) and re.match(self.prefix, command.endpoint) 227 | ) 228 | 229 | results = [] 230 | for command in candidates: 231 | try: 232 | results.append(self.api.call(command)) 233 | except MockedEndpointNotFound: 234 | pass 235 | 236 | return results 237 | 238 | 239 | class LeaveNote(namedtuple('LeaveNote', 'note api')): 240 | def __call__(self): 241 | self.api.notes.append(self.note) 242 | return {} 243 | 244 | 245 | class MockedEndpointNotFound(Exception): 246 | pass 247 | 248 | 249 | def attrs(_dict): 250 | return namedtuple('Attrs', _dict.keys())(*_dict.values()) 251 | -------------------------------------------------------------------------------- /tests/test_app.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import datetime 3 | import os 4 | import re 5 | import shlex 6 | import tempfile 7 | import unittest.mock as mock 8 | 9 | import pytest 10 | 11 | import marge.app as app 12 | import marge.bot as bot_module 13 | import marge.interval as interval 14 | import marge.job as job 15 | 16 | import tests.gitlab_api_mock as gitlab_mock 17 | from tests.test_user import INFO as user_info 18 | 19 | 20 | @contextlib.contextmanager 21 | def config_file(): 22 | content = ''' 23 | add-part-of: true 24 | add-reviewers: true 25 | add-tested: true 26 | branch-regexp: foo.*bar 27 | ci-timeout: 5min 28 | embargo: Friday 1pm - Monday 7am 29 | git-timeout: 150s 30 | gitlab-url: "http://foo.com" 31 | impersonate-approvers: true 32 | project-regexp: foo.*bar 33 | ssh-key: KEY 34 | ''' 35 | with tempfile.NamedTemporaryFile(mode='w', prefix='config-file-') as tmp_config_file: 36 | try: 37 | tmp_config_file.write(content) 38 | tmp_config_file.flush() 39 | yield tmp_config_file.name 40 | finally: 41 | tmp_config_file.close() 42 | 43 | 44 | @contextlib.contextmanager 45 | def env(**kwargs): 46 | original = os.environ.copy() 47 | 48 | os.environ.clear() 49 | for key, value in kwargs.items(): 50 | os.environ[key] = value 51 | 52 | yield 53 | 54 | os.environ.clear() 55 | for key, value in original.items(): 56 | os.environ[key] = value 57 | 58 | 59 | @contextlib.contextmanager 60 | def main(cmdline=''): 61 | def api_mock(gitlab_url, auth_token): 62 | assert gitlab_url == 'http://foo.com' 63 | assert auth_token in ('NON-ADMIN-TOKEN', 'ADMIN-TOKEN') 64 | api = gitlab_mock.Api(gitlab_url=gitlab_url, auth_token=auth_token, initial_state='initial') 65 | user_info_for_token = dict(user_info, is_admin=auth_token == 'ADMIN-TOKEN') 66 | api.add_user(user_info_for_token, is_current=True) 67 | api.add_transition(gitlab_mock.GET('/version'), gitlab_mock.Ok({'version': '11.6.0-ce'})) 68 | return api 69 | 70 | class DoNothingBot(bot_module.Bot): 71 | instance = None 72 | 73 | def start(self): 74 | assert self.__class__.instance is None 75 | self.__class__.instance = self 76 | 77 | @property 78 | def config(self): 79 | return self._config 80 | 81 | with mock.patch('marge.bot.Bot', new=DoNothingBot), mock.patch('marge.gitlab.Api', new=api_mock): 82 | app.main(args=shlex.split(cmdline)) 83 | the_bot = DoNothingBot.instance 84 | assert the_bot is not None 85 | yield the_bot 86 | 87 | 88 | def test_default_values(): 89 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 90 | with main() as bot: 91 | assert bot.user.info == user_info 92 | assert bot.config.project_regexp == re.compile('.*') 93 | assert bot.config.git_timeout == datetime.timedelta(seconds=120) 94 | assert bot.config.merge_opts == job.MergeJobOptions.default() 95 | assert bot.config.merge_order == 'created_at' 96 | 97 | 98 | def test_embargo(): 99 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 100 | with main('--embargo="Fri 1pm-Mon 7am"') as bot: 101 | assert bot.config.merge_opts == job.MergeJobOptions.default( 102 | embargo=interval.IntervalUnion.from_human('Fri 1pm-Mon 7am'), 103 | ) 104 | 105 | 106 | def test_rebase_remotely(): 107 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 108 | with main('--rebase-remotely') as bot: 109 | assert bot.config.merge_opts != job.MergeJobOptions.default() 110 | assert bot.config.merge_opts == job.MergeJobOptions.default(fusion=job.Fusion.gitlab_rebase) 111 | 112 | 113 | def test_use_merge_strategy(): 114 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 115 | with main('--use-merge-strategy') as bot: 116 | assert bot.config.merge_opts != job.MergeJobOptions.default() 117 | assert bot.config.merge_opts == job.MergeJobOptions.default(fusion=job.Fusion.merge) 118 | 119 | 120 | def test_add_tested(): 121 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 122 | with main('--add-tested') as bot: 123 | assert bot.config.merge_opts != job.MergeJobOptions.default() 124 | assert bot.config.merge_opts == job.MergeJobOptions.default(add_tested=True) 125 | 126 | 127 | def test_use_merge_strategy_and_add_tested_are_mutualy_exclusive(): 128 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 129 | with pytest.raises(app.MargeBotCliArgError): 130 | with main('--use-merge-strategy --add-tested'): 131 | pass 132 | 133 | 134 | def test_add_part_of(): 135 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 136 | with main('--add-part-of') as bot: 137 | assert bot.config.merge_opts != job.MergeJobOptions.default() 138 | assert bot.config.merge_opts == job.MergeJobOptions.default(add_part_of=True) 139 | 140 | 141 | def test_add_reviewers(): 142 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 143 | with pytest.raises(AssertionError): 144 | with main('--add-reviewers') as bot: 145 | pass 146 | 147 | with env(MARGE_AUTH_TOKEN="ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 148 | with main('--add-reviewers') as bot: 149 | assert bot.config.merge_opts != job.MergeJobOptions.default() 150 | assert bot.config.merge_opts == job.MergeJobOptions.default(add_reviewers=True) 151 | 152 | 153 | def test_rebase_remotely_option_conflicts(): 154 | for conflicting_flag in ['--use-merge-strategy', '--add-tested', '--add-part-of', '--add-reviewers']: 155 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 156 | with pytest.raises(app.MargeBotCliArgError): 157 | with main('--rebase-remotely %s' % conflicting_flag): 158 | pass 159 | 160 | 161 | def test_impersonate_approvers(): 162 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 163 | with pytest.raises(AssertionError): 164 | with main('--impersonate-approvers'): 165 | pass 166 | 167 | with env(MARGE_AUTH_TOKEN="ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 168 | with main('--impersonate-approvers') as bot: 169 | assert bot.config.merge_opts != job.MergeJobOptions.default() 170 | assert bot.config.merge_opts == job.MergeJobOptions.default(reapprove=True) 171 | 172 | 173 | def test_approval_reset_timeout(): 174 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 175 | with main('--approval-reset-timeout 1m') as bot: 176 | assert bot.config.merge_opts != job.MergeJobOptions.default() 177 | assert bot.config.merge_opts == job.MergeJobOptions.default( 178 | approval_timeout=datetime.timedelta(seconds=60), 179 | ) 180 | 181 | 182 | def test_project_regexp(): 183 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 184 | with main("--project-regexp='foo.*bar'") as bot: 185 | assert bot.config.project_regexp == re.compile('foo.*bar') 186 | 187 | 188 | def test_ci_timeout(): 189 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 190 | with main("--ci-timeout 5m") as bot: 191 | assert bot.config.merge_opts != job.MergeJobOptions.default() 192 | assert bot.config.merge_opts == job.MergeJobOptions.default( 193 | ci_timeout=datetime.timedelta(seconds=5*60), 194 | ) 195 | 196 | 197 | def test_deprecated_max_ci_time_in_minutes(): 198 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 199 | with main("--max-ci-time-in-minutes=5") as bot: 200 | assert bot.config.merge_opts != job.MergeJobOptions.default() 201 | assert bot.config.merge_opts == job.MergeJobOptions.default( 202 | ci_timeout=datetime.timedelta(seconds=5*60), 203 | ) 204 | 205 | 206 | def test_git_timeout(): 207 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 208 | with main("--git-timeout '150 s'") as bot: 209 | assert bot.config.git_timeout == datetime.timedelta(seconds=150) 210 | 211 | 212 | def test_branch_regexp(): 213 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 214 | with main("--branch-regexp='foo.*bar'") as bot: 215 | assert bot.config.branch_regexp == re.compile('foo.*bar') 216 | 217 | 218 | def test_source_branch_regexp(): 219 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 220 | with main("--source-branch-regexp='foo.*bar'") as bot: 221 | assert bot.config.source_branch_regexp == re.compile('foo.*bar') 222 | 223 | 224 | def test_git_reference_repo(): 225 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 226 | with main("--git-reference-repo='/foo/reference_repo'") as bot: 227 | assert bot.config.git_reference_repo == '/foo/reference_repo' 228 | 229 | 230 | def test_merge_order_updated(): 231 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 232 | with main("--merge-order='updated_at'") as bot: 233 | assert bot.config.merge_order == 'updated_at' 234 | 235 | 236 | def test_merge_order_assigned(): 237 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 238 | with main("--merge-order='assigned_at'") as bot: 239 | assert bot.config.merge_order == 'assigned_at' 240 | 241 | 242 | # FIXME: I'd reallly prefer this to be a doctest, but adding --doctest-modules 243 | # seems to seriously mess up the test run 244 | def test_time_interval(): 245 | _900s = datetime.timedelta(0, 900) 246 | assert [app.time_interval(x) for x in ['15min', '15m', '.25h', '900s']] == [_900s] * 4 247 | 248 | 249 | def test_disabled_auth_token_cli_arg(): 250 | with env(MARGE_SSH_KEY="KEY", MARGE_GITLAB_URL='http://foo.com'): 251 | with pytest.raises(app.MargeBotCliArgError): 252 | with main('--auth-token=ADMIN-TOKEN'): 253 | pass 254 | 255 | 256 | def test_disabled_ssh_key_cli_arg(): 257 | with env(MARGE_AUTH_TOKEN="NON-ADMIN-TOKEN", MARGE_GITLAB_URL='http://foo.com'): 258 | with pytest.raises(app.MargeBotCliArgError): 259 | with main('--ssh-key=KEY'): 260 | pass 261 | 262 | 263 | def test_config_file(): 264 | with config_file() as config_file_name: 265 | with env(MARGE_AUTH_TOKEN="ADMIN-TOKEN"): 266 | with main('--config-file=%s' % config_file_name) as bot: 267 | admin_user_info = dict(**user_info) 268 | admin_user_info['is_admin'] = True 269 | assert bot.user.info == admin_user_info 270 | assert bot.config.merge_opts != job.MergeJobOptions.default() 271 | assert bot.config.merge_opts == job.MergeJobOptions.default( 272 | embargo=interval.IntervalUnion.from_human('Fri 1pm-Mon 7am'), 273 | add_tested=True, 274 | add_part_of=True, 275 | add_reviewers=True, 276 | reapprove=True, 277 | ci_timeout=datetime.timedelta(seconds=5*60), 278 | ) 279 | assert bot.config.project_regexp == re.compile('foo.*bar') 280 | assert bot.config.git_timeout == datetime.timedelta(seconds=150) 281 | assert bot.config.branch_regexp == re.compile('foo.*bar') 282 | 283 | 284 | def test_config_overwrites(): 285 | with config_file() as config_file_name: 286 | with env(MARGE_CI_TIMEOUT='20min', MARGE_AUTH_TOKEN="ADMIN-TOKEN"): 287 | with main('--git-timeout=100s --config-file=%s' % config_file_name) as bot: 288 | admin_user_info = dict(**user_info) 289 | admin_user_info['is_admin'] = True 290 | assert bot.user.info == admin_user_info 291 | assert bot.config.merge_opts != job.MergeJobOptions.default() 292 | assert bot.config.merge_opts == job.MergeJobOptions.default( 293 | embargo=interval.IntervalUnion.from_human('Fri 1pm-Mon 7am'), 294 | add_tested=True, 295 | add_part_of=True, 296 | add_reviewers=True, 297 | reapprove=True, 298 | ci_timeout=datetime.timedelta(seconds=20*60), 299 | ) 300 | assert bot.config.project_regexp == re.compile('foo.*bar') 301 | assert bot.config.git_timeout == datetime.timedelta(seconds=100) 302 | assert bot.config.branch_regexp == re.compile('foo.*bar') 303 | -------------------------------------------------------------------------------- /tests/test_approvals.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import call, Mock, patch 2 | 3 | import pytest 4 | 5 | from marge.gitlab import Api, GET, POST, Version 6 | from marge.approvals import Approvals 7 | from marge.merge_request import MergeRequest 8 | import marge.user 9 | # testing this here is more convenient 10 | from marge.job import CannotMerge, _get_reviewer_names_and_emails 11 | 12 | INFO = { 13 | "id": 5, 14 | "iid": 6, 15 | "project_id": 1, 16 | "title": "Approvals API", 17 | "description": "Test", 18 | "state": "opened", 19 | "created_at": "2016-06-08T00:19:52.638Z", 20 | "updated_at": "2016-06-08T21:20:42.470Z", 21 | "merge_status": "can_be_merged", 22 | "approvals_required": 3, 23 | "approvals_left": 1, 24 | "approved_by": [ 25 | { 26 | "user": { 27 | "name": "Administrator", 28 | "username": "root", 29 | "id": 1, 30 | "state": "active", 31 | "avatar_url": "".join([ 32 | "http://www.gravatar.com/avatar/", 33 | "e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon", 34 | ]), 35 | "web_url": "http://localhost:3000/u/root" 36 | }, 37 | }, 38 | { 39 | "user": { 40 | "name": "Roger Ebert", 41 | "username": "ebert", 42 | "id": 2, 43 | "state": "active", 44 | } 45 | } 46 | ] 47 | } 48 | USERS = { 49 | 1: { 50 | "name": "Administrator", 51 | "username": "root", 52 | "id": 1, 53 | "state": "active", 54 | "email": "root@localhost", 55 | }, 56 | 2: { 57 | "name": "Roger Ebert", 58 | "username": "ebert", 59 | "id": 2, 60 | "state": "active", 61 | "email": "ebert@example.com", 62 | }, 63 | } 64 | 65 | 66 | # pylint: disable=attribute-defined-outside-init 67 | class TestApprovals: 68 | 69 | def setup_method(self, _method): 70 | self.api = Mock(Api) 71 | self.api.version = Mock(return_value=Version.parse('9.2.3-ee')) 72 | self.approvals = Approvals(api=self.api, info=INFO) 73 | 74 | def test_fetch_from_merge_request(self): 75 | api = self.api 76 | api.call = Mock(return_value=INFO) 77 | 78 | merge_request = MergeRequest(api, {'id': 74, 'iid': 6, 'project_id': 1234}) 79 | approvals = merge_request.fetch_approvals() 80 | 81 | api.call.assert_called_once_with(GET( 82 | '/projects/1234/merge_requests/6/approvals' 83 | )) 84 | assert approvals.info == INFO 85 | 86 | def test_fetch_from_merge_request_ce_compat(self): 87 | api = self.api 88 | api.version = Mock(return_value=Version.parse('9.2.3')) 89 | api.call = Mock() 90 | 91 | merge_request = MergeRequest(api, {'id': 74, 'iid': 6, 'project_id': 1234}) 92 | approvals = merge_request.fetch_approvals() 93 | 94 | api.call.assert_not_called() 95 | assert approvals.info == { 96 | 'id': 74, 'iid': 6, 'project_id': 1234, 'approvals_left': 0, 'approved_by': [], 97 | } 98 | 99 | def test_properties(self): 100 | assert self.approvals.project_id == 1 101 | assert self.approvals.approvals_left == 1 102 | assert self.approvals.approver_usernames == ['root', 'ebert'] 103 | assert not self.approvals.sufficient 104 | 105 | def test_sufficiency(self): 106 | good_approvals = Approvals(api=self.api, info=dict(INFO, approvals_required=1, approvals_left=0)) 107 | assert good_approvals.sufficient 108 | 109 | def test_reapprove(self): 110 | self.approvals.reapprove() 111 | self.api.call.has_calls([ 112 | call(POST(endpoint='/projects/1/merge_requests/6/approve', args={}, extract=None), sudo=1), 113 | call(POST(endpoint='/projects/1/merge_requests/6/approve', args={}, extract=None), sudo=2) 114 | ]) 115 | 116 | @patch('marge.user.User.fetch_by_id') 117 | def test_get_reviewer_names_and_emails(self, user_fetch_by_id): 118 | user_fetch_by_id.side_effect = lambda id, _: marge.user.User(self.api, USERS[id]) 119 | assert _get_reviewer_names_and_emails(commits=[], approvals=self.approvals, api=self.api) == [ 120 | 'Administrator ', 121 | 'Roger Ebert ' 122 | ] 123 | 124 | @patch('marge.user.User.fetch_by_id') 125 | def test_approvals_fails_when_same_author(self, user_fetch_by_id): 126 | info = dict(INFO, approved_by=list(INFO['approved_by'])) 127 | del info['approved_by'][1] 128 | approvals = Approvals(self.api, info) 129 | user_fetch_by_id.side_effect = lambda id, _: marge.user.User(self.api, USERS[id]) 130 | commits = [{'author_email': 'root@localhost'}] 131 | with pytest.raises(CannotMerge): 132 | _get_reviewer_names_and_emails(commits=commits, approvals=approvals, api=self.api) 133 | 134 | @patch('marge.user.User.fetch_by_id') 135 | def test_approvals_succeeds_with_independent_author(self, user_fetch_by_id): 136 | user_fetch_by_id.side_effect = lambda id, _: marge.user.User(self.api, USERS[id]) 137 | print(INFO['approved_by']) 138 | commits = [{'author_email': 'root@localhost'}] 139 | assert _get_reviewer_names_and_emails(commits=commits, approvals=self.approvals, api=self.api) == [ 140 | 'Administrator ', 141 | 'Roger Ebert ', 142 | ] 143 | -------------------------------------------------------------------------------- /tests/test_batch_job.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=protected-access 2 | from unittest.mock import ANY, patch, create_autospec 3 | 4 | import pytest 5 | 6 | import marge.git 7 | import marge.project 8 | import marge.user 9 | from marge.batch_job import BatchMergeJob, CannotBatch 10 | from marge.gitlab import GET 11 | from marge.job import CannotMerge, MergeJobOptions 12 | from marge.merge_request import MergeRequest 13 | from tests.gitlab_api_mock import MockLab, Ok, commit 14 | 15 | 16 | class TestBatchJob: 17 | @pytest.fixture(params=[True, False]) 18 | def fork(self, request): 19 | return request.param 20 | 21 | @pytest.fixture() 22 | def mocklab(self, fork): 23 | return MockLab(fork=fork) 24 | 25 | @pytest.fixture() 26 | def api(self, mocklab): 27 | return mocklab.api 28 | 29 | def _mock_merge_request(self, **options): 30 | return create_autospec(marge.merge_request.MergeRequest, spec_set=True, **options) 31 | 32 | def get_batch_merge_job(self, api, mocklab, **batch_merge_kwargs): 33 | project_id = mocklab.project_info['id'] 34 | merge_request_iid = mocklab.merge_request_info['iid'] 35 | 36 | merge_request = MergeRequest.fetch_by_iid(project_id, merge_request_iid, api) 37 | 38 | params = { 39 | 'api': api, 40 | 'user': marge.user.User.myself(api), 41 | 'project': marge.project.Project.fetch_by_id(project_id, api), 42 | 'repo': create_autospec(marge.git.Repo, spec_set=True), 43 | 'options': MergeJobOptions.default(), 44 | 'merge_requests': [merge_request] 45 | } 46 | params.update(batch_merge_kwargs) 47 | return BatchMergeJob(**params) 48 | 49 | def test_remove_batch_branch(self, api, mocklab): 50 | repo = create_autospec(marge.git.Repo, spec_set=True) 51 | batch_merge_job = self.get_batch_merge_job(api, mocklab, repo=repo) 52 | batch_merge_job.remove_batch_branch() 53 | repo.remove_branch.assert_called_once_with( 54 | BatchMergeJob.BATCH_BRANCH_NAME, 55 | ) 56 | 57 | def test_close_batch_mr(self, api, mocklab): 58 | with patch('marge.batch_job.MergeRequest') as mr_class: 59 | batch_mr = self._mock_merge_request() 60 | mr_class.search.return_value = [batch_mr] 61 | 62 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 63 | batch_merge_job.close_batch_mr() 64 | 65 | params = { 66 | 'author_id': batch_merge_job._user.id, 67 | 'labels': BatchMergeJob.BATCH_BRANCH_NAME, 68 | 'state': 'opened', 69 | 'order_by': 'created_at', 70 | 'sort': 'desc', 71 | } 72 | mr_class.search.assert_called_once_with( 73 | api=ANY, 74 | project_id=ANY, 75 | params=params, 76 | ) 77 | batch_mr.close.assert_called_once() 78 | 79 | def test_create_batch_mr(self, api, mocklab): 80 | with patch('marge.batch_job.MergeRequest') as mr_class: 81 | batch_mr = self._mock_merge_request() 82 | mr_class.create.return_value = batch_mr 83 | 84 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 85 | target_branch = 'master' 86 | r_batch_mr = batch_merge_job.create_batch_mr(target_branch) 87 | 88 | params = { 89 | 'source_branch': BatchMergeJob.BATCH_BRANCH_NAME, 90 | 'target_branch': target_branch, 91 | 'title': 'Marge Bot Batch MR - DO NOT TOUCH', 92 | 'labels': BatchMergeJob.BATCH_BRANCH_NAME, 93 | } 94 | mr_class.create.assert_called_once_with( 95 | api=ANY, 96 | project_id=ANY, 97 | params=params, 98 | ) 99 | assert r_batch_mr is batch_mr 100 | 101 | def test_get_mrs_with_common_target_branch(self, api, mocklab): 102 | master_mrs = [ 103 | self._mock_merge_request(target_branch='master'), 104 | self._mock_merge_request(target_branch='master'), 105 | ] 106 | non_master_mrs = [ 107 | self._mock_merge_request(target_branch='non_master'), 108 | self._mock_merge_request(target_branch='non_master'), 109 | ] 110 | batch_merge_job = self.get_batch_merge_job( 111 | api, mocklab, 112 | merge_requests=non_master_mrs + master_mrs, 113 | ) 114 | r_maser_mrs = batch_merge_job.get_mrs_with_common_target_branch('master') 115 | assert r_maser_mrs == master_mrs 116 | 117 | @patch.object(BatchMergeJob, 'get_mr_ci_status') 118 | def test_ensure_mergeable_mr_ci_not_ok(self, bmj_get_mr_ci_status, api, mocklab): 119 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 120 | bmj_get_mr_ci_status.return_value = 'failed' 121 | merge_request = self._mock_merge_request( 122 | assignee_ids=[batch_merge_job._user.id], 123 | state='opened', 124 | work_in_progress=False, 125 | squash=False, 126 | ) 127 | merge_request.fetch_approvals.return_value.sufficient = True 128 | with pytest.raises(CannotBatch) as exc_info: 129 | batch_merge_job.ensure_mergeable_mr(merge_request) 130 | 131 | assert str(exc_info.value) == 'This MR has not passed CI.' 132 | 133 | def test_push_batch(self, api, mocklab): 134 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 135 | batch_merge_job.push_batch() 136 | batch_merge_job._repo.push.assert_called_once_with( 137 | BatchMergeJob.BATCH_BRANCH_NAME, 138 | force=True, 139 | ) 140 | 141 | def test_merge_batch(self, api, mocklab): 142 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 143 | target_branch = 'master' 144 | source_branch = mocklab.merge_request_info['source_branch'] 145 | batch_merge_job.merge_batch(target_branch, source_branch, no_ff=False) 146 | batch_merge_job._repo.fast_forward.assert_called_once_with( 147 | target_branch, 148 | source_branch, 149 | ) 150 | 151 | def test_merge_batch_with_no_ff_enabled(self, api, mocklab): 152 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 153 | target_branch = 'master' 154 | source_branch = mocklab.merge_request_info['source_branch'] 155 | batch_merge_job.merge_batch(target_branch, source_branch, no_ff=True) 156 | batch_merge_job._repo.merge.assert_called_once_with( 157 | target_branch, 158 | source_branch, 159 | '--no-ff' 160 | ) 161 | batch_merge_job._repo.fast_forward.assert_not_called() 162 | 163 | def test_ensure_mr_not_changed(self, api, mocklab): 164 | with patch('marge.batch_job.MergeRequest') as mr_class: 165 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 166 | merge_request = self._mock_merge_request() 167 | changed_merge_request = self._mock_merge_request() 168 | mr_class.fetch_by_iid.return_value = changed_merge_request 169 | 170 | with pytest.raises(CannotMerge): 171 | batch_merge_job.ensure_mr_not_changed(merge_request) 172 | 173 | mr_class.fetch_by_iid.assert_called_once_with( 174 | merge_request.project_id, 175 | merge_request.iid, 176 | batch_merge_job._api, 177 | ) 178 | 179 | def test_fuse_mr_when_target_branch_was_moved(self, api, mocklab): 180 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 181 | merge_request = self._mock_merge_request(target_branch='master') 182 | with pytest.raises(CannotBatch) as exc_info: 183 | batch_merge_job.accept_mr(merge_request, 'abc') 184 | assert str(exc_info.value) == 'Someone was naughty and by-passed marge' 185 | 186 | def test_fuse_mr_when_source_branch_was_moved(self, api, mocklab): 187 | batch_merge_job = self.get_batch_merge_job(api, mocklab) 188 | merge_request = self._mock_merge_request( 189 | source_project_id=mocklab.merge_request_info['source_project_id'], 190 | target_branch='master', 191 | source_branch=mocklab.merge_request_info['source_branch'], 192 | ) 193 | 194 | api.add_transition( 195 | GET( 196 | '/projects/{project_iid}/repository/branches/useless_new_feature'.format( 197 | project_iid=mocklab.merge_request_info['source_project_id'], 198 | ), 199 | ), 200 | Ok({'commit': commit(commit_id='abc', status='running')}), 201 | ) 202 | 203 | with pytest.raises(CannotMerge) as exc_info: 204 | batch_merge_job.accept_mr(merge_request, mocklab.initial_master_sha) 205 | 206 | assert str(exc_info.value) == 'Someone pushed to branch while we were trying to merge' 207 | -------------------------------------------------------------------------------- /tests/test_commit.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | from marge.gitlab import Api, GET 4 | from marge.commit import Commit 5 | 6 | 7 | INFO = { 8 | "id": "6104942438c14ec7bd21c6cd5bd995272b3faff6", 9 | "short_id": "6104942438c", 10 | "title": "Sanitize for network graph", 11 | "author_name": "randx", 12 | "author_email": "dmitriy.zaporozhets@gmail.com", 13 | "committer_name": "Dmitriy", 14 | "committer_email": "dmitriy.zaporozhets@gmail.com", 15 | "created_at": "2012-09-20T09:06:12+03:00", 16 | "message": "Sanitize for network graph", 17 | "committed_date": "2012-09-20T09:06:12+03:00", 18 | "authored_date": "2012-09-20T09:06:12+03:00", 19 | "parent_ids": [ 20 | "ae1d9fb46aa2b07ee9836d49862ec4e2c46fbbba", 21 | ], 22 | "stats": { 23 | "additions": 15, 24 | "deletions": 10, 25 | "total": 25, 26 | }, 27 | "status": "running", 28 | } 29 | 30 | 31 | # pylint: disable=attribute-defined-outside-init 32 | class TestProjectWithCommits: 33 | 34 | def setup_method(self, _method): 35 | self.api = Mock(Api) 36 | 37 | def test_fetch_by_id(self): 38 | api = self.api 39 | api.call = Mock(return_value=INFO) 40 | 41 | commit = Commit.fetch_by_id(project_id=1234, sha=INFO['id'], api=api) 42 | 43 | api.call.assert_called_once_with(GET( 44 | '/projects/1234/repository/commits/6104942438c14ec7bd21c6cd5bd995272b3faff6' 45 | )) 46 | assert commit.info == INFO 47 | 48 | def test_last_on_branch(self): 49 | self.api.call.side_effect = lambda *_, **__: {'commit': INFO} 50 | Commit.last_on_branch(project_id=1234, branch='foobar-branch', api=self.api) 51 | self.api.call.assert_called_once_with(GET('/projects/1234/repository/branches/foobar-branch')) 52 | 53 | def test_last_on_branch_encoding(self): 54 | self.api.call.side_effect = lambda *_, **__: {'commit': INFO} 55 | Commit.last_on_branch(project_id=1234, branch='foo/bar', api=self.api) 56 | self.api.call.assert_called_once_with(GET('/projects/1234/repository/branches/foo%2Fbar')) 57 | 58 | def test_properties(self): 59 | commit = Commit(api=self.api, info=INFO) 60 | assert commit.id == "6104942438c14ec7bd21c6cd5bd995272b3faff6" 61 | assert commit.short_id == "6104942438c" 62 | assert commit.title == "Sanitize for network graph" 63 | assert commit.author_name == "randx" 64 | assert commit.author_email == "dmitriy.zaporozhets@gmail.com" 65 | assert commit.status == "running" 66 | -------------------------------------------------------------------------------- /tests/test_git.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import re 4 | import shlex 5 | import subprocess 6 | import unittest.mock as mock 7 | 8 | import pytest 9 | 10 | import marge.git 11 | from marge.git import GIT_SSH_COMMAND 12 | 13 | 14 | # pylint: disable=attribute-defined-outside-init 15 | @mock.patch('marge.git._run') 16 | class TestRepo: 17 | 18 | def setup_method(self, _method): 19 | self.repo = marge.git.Repo( 20 | remote_url='ssh://git@git.foo.com/some/repo.git', 21 | local_path='/tmp/local/path', 22 | ssh_key_file=None, 23 | timeout=datetime.timedelta(seconds=1), 24 | reference=None, 25 | ) 26 | 27 | def test_clone(self, mocked_run): 28 | self.repo.clone() 29 | assert get_calls(mocked_run) == [ 30 | 'git clone --origin=origin ssh://git@git.foo.com/some/repo.git /tmp/local/path', 31 | ] 32 | 33 | def test_config_user_info(self, mocked_run): 34 | self.repo.config_user_info('bart', 'bart.simpson@gmail.com') 35 | assert get_calls(mocked_run) == [ 36 | 'git -C /tmp/local/path config user.email bart.simpson@gmail.com', 37 | 'git -C /tmp/local/path config user.name bart', 38 | ] 39 | 40 | def test_rebase_success(self, mocked_run): 41 | self.repo.rebase('feature_branch', 'master_of_the_universe') 42 | 43 | assert get_calls(mocked_run) == [ 44 | 'git -C /tmp/local/path fetch --prune origin', 45 | 'git -C /tmp/local/path checkout -B feature_branch origin/feature_branch --', 46 | 'git -C /tmp/local/path rebase origin/master_of_the_universe', 47 | 'git -C /tmp/local/path rev-parse HEAD' 48 | ] 49 | 50 | def test_merge_success(self, mocked_run): 51 | self.repo.merge('feature_branch', 'master_of_the_universe') 52 | 53 | assert get_calls(mocked_run) == [ 54 | 'git -C /tmp/local/path fetch --prune origin', 55 | 'git -C /tmp/local/path checkout -B feature_branch origin/feature_branch --', 56 | 'git -C /tmp/local/path merge origin/master_of_the_universe', 57 | 'git -C /tmp/local/path rev-parse HEAD' 58 | ] 59 | 60 | def test_reviewer_tagging_success(self, mocked_run): 61 | self.repo.tag_with_trailer( 62 | trailer_name='Reviewed-by', 63 | trailer_values=['John Simon '], 64 | branch='feature_branch', 65 | start_commit='origin/master_of_the_universe', 66 | ) 67 | 68 | rewrite, parse = get_calls(mocked_run) 69 | pattern = ''.join([ 70 | 'git -C /tmp/local/path filter-branch --force ', 71 | '--msg-filter.*John Simon .*origin/master_of_the_universe..feature_branch', 72 | ]) 73 | assert re.match(pattern, rewrite) 74 | assert parse == 'git -C /tmp/local/path rev-parse HEAD' 75 | 76 | def test_reviewer_tagging_failure(self, mocked_run): 77 | def fail_on_filter_branch(*args, **unused_kwargs): 78 | if 'filter-branch' in args: 79 | raise subprocess.CalledProcessError(returncode=1, cmd='git rebase blah') 80 | if 'rev-parse' in args or 'reset' in args: 81 | return mock.Mock() 82 | raise Exception('Unexpected call:', args) 83 | 84 | mocked_run.side_effect = fail_on_filter_branch 85 | 86 | try: 87 | self.repo.tag_with_trailer( 88 | trailer_name='Reviewed-by', 89 | branch='feature_branch', 90 | start_commit='origin/master_of_the_universe', 91 | trailer_values=['John Simon '] 92 | ) 93 | except marge.git.GitError: 94 | pass 95 | else: 96 | assert False 97 | rewrite, check, abort = get_calls(mocked_run) 98 | assert 'filter-branch' in rewrite 99 | assert check == 'git -C /tmp/local/path rev-parse refs/original/refs/heads/' 100 | assert abort == 'git -C /tmp/local/path reset --hard refs/original/refs/heads/feature_branch' 101 | 102 | def test_rebase_same_branch(self, mocked_run): 103 | with pytest.raises(AssertionError): 104 | self.repo.rebase('branch', 'branch') 105 | 106 | assert get_calls(mocked_run) == [] 107 | 108 | def test_merge_same_branch(self, mocked_run): 109 | with pytest.raises(AssertionError): 110 | self.repo.merge('branch', 'branch') 111 | 112 | assert get_calls(mocked_run) == [] 113 | 114 | def test_remove_branch(self, mocked_run): 115 | self.repo.remove_branch('some_branch', new_current_branch='devel') 116 | assert get_calls(mocked_run) == [ 117 | 'git -C /tmp/local/path branch -D some_branch', 118 | ] 119 | 120 | def test_remove_branch_default(self, mocked_run): 121 | self.repo.remove_branch('some_branch') 122 | assert get_calls(mocked_run) == [ 123 | 'git -C /tmp/local/path branch -D some_branch', 124 | ] 125 | 126 | def test_remove_master_branch_fails(self, unused_mocked_run): 127 | with pytest.raises(AssertionError): 128 | self.repo.remove_branch('meister', new_current_branch='meister') 129 | 130 | def test_push_force(self, mocked_run): 131 | mocked_run.return_value = mocked_stdout(b'') 132 | self.repo.push('my_branch', force=True) 133 | assert get_calls(mocked_run) == [ 134 | 'git -C /tmp/local/path checkout my_branch --', 135 | 'git -C /tmp/local/path diff-index --quiet HEAD', 136 | 'git -C /tmp/local/path ls-files --others', 137 | 'git -C /tmp/local/path push --force origin my_branch:my_branch', 138 | ] 139 | 140 | def test_push_force_fails_on_dirty(self, mocked_run): 141 | def fail_on_diff_index(*args, **unused_kwargs): 142 | if 'diff-index' in args: 143 | raise subprocess.CalledProcessError(returncode=1, cmd='git diff-index blah') 144 | mocked_run.side_effect = fail_on_diff_index 145 | 146 | with pytest.raises(marge.git.GitError): 147 | self.repo.push('my_branch', force=True) 148 | 149 | assert get_calls(mocked_run) == [ 150 | 'git -C /tmp/local/path checkout my_branch --', 151 | 'git -C /tmp/local/path diff-index --quiet HEAD', 152 | ] 153 | 154 | def test_push_force_fails_on_untracked(self, mocked_run): 155 | def fail_on_ls_files(*args, **unused_kwargs): 156 | if 'ls-files' in args: 157 | return mocked_stdout('some_file.txt\nanother_file.py') 158 | return None 159 | 160 | mocked_run.side_effect = fail_on_ls_files 161 | 162 | with pytest.raises(marge.git.GitError): 163 | self.repo.push('my_branch', force=True) 164 | 165 | assert get_calls(mocked_run) == [ 166 | 'git -C /tmp/local/path checkout my_branch --', 167 | 'git -C /tmp/local/path diff-index --quiet HEAD', 168 | 'git -C /tmp/local/path ls-files --others', 169 | ] 170 | 171 | def test_get_commit_hash(self, mocked_run): 172 | mocked_run.return_value = mocked_stdout(b'deadbeef') 173 | 174 | commit_hash = self.repo.get_commit_hash() 175 | assert commit_hash == 'deadbeef' 176 | 177 | assert get_calls(mocked_run) == [ 178 | 'git -C /tmp/local/path rev-parse HEAD', 179 | ] 180 | self.repo.get_commit_hash(rev='master') 181 | assert get_calls(mocked_run)[-1] == 'git -C /tmp/local/path rev-parse master' 182 | 183 | def test_passes_ssh_key(self, mocked_run): 184 | repo = self.repo._replace(ssh_key_file='/foo/id_rsa') 185 | repo.config_user_info('bart', 'bart@gmail.com') 186 | git_ssh = "GIT_SSH_COMMAND='%s -F /dev/null -o IdentitiesOnly=yes -i /foo/id_rsa'" % ( 187 | GIT_SSH_COMMAND, 188 | ) 189 | assert get_calls(mocked_run) == [ 190 | '%s git -C /tmp/local/path config user.email bart@gmail.com' % git_ssh, 191 | '%s git -C /tmp/local/path config user.name bart' % git_ssh, 192 | ] 193 | 194 | def test_passes_reference_repo(self, mocked_run): 195 | repo = self.repo._replace(reference='/foo/reference_repo') 196 | repo.clone() 197 | assert get_calls(mocked_run) == [ 198 | 'git clone --origin=origin --reference=/foo/reference_repo ssh://git@git.foo.com/some/repo.git ' + 199 | '/tmp/local/path', 200 | ] 201 | 202 | 203 | def get_calls(mocked_run): 204 | return [bashify(call) for call in mocked_run.call_args_list] 205 | 206 | 207 | def bashify(call): 208 | args, kwargs = call 209 | args = [shlex.quote(arg) for arg in args] 210 | env = kwargs.get('env') or {} 211 | alt_env = [shlex.quote(k) + '=' + shlex.quote(v) for k, v in set(env.items()) - set(os.environ.items())] 212 | return ' '.join(alt_env + args) 213 | 214 | 215 | def mocked_stdout(stdout): 216 | return subprocess.CompletedProcess(['blah', 'args'], 0, stdout, None) 217 | 218 | 219 | def _filter_test(message, trailer_name, trailer_values): 220 | script = marge.git._filter_branch_script(trailer_name, trailer_values) # pylint: disable=protected-access 221 | result = subprocess.check_output( 222 | [b'sh', b'-c', script.encode('utf-8')], 223 | input=message.encode('utf-8'), 224 | stderr=subprocess.STDOUT 225 | ) 226 | return result.decode('utf-8') 227 | 228 | 229 | def test_filter(): 230 | assert _filter_test('Some Stuff', 'Tested-by', []) == 'Some Stuff\n' 231 | assert _filter_test('Some Stuff\n', 'Tested-by', []) == 'Some Stuff\n' 232 | assert _filter_test('Some Stuff', 'Tested-by', ['T. Estes ']) == '''Some Stuff 233 | 234 | Tested-by: T. Estes 235 | ''' 236 | 237 | test_commit_message = r'''Fix: bug in BLah. 238 | 239 | Some stuff. 240 | Some More stuff (really? Yeah: really!) 241 | 242 | Reviewed-by: R. Viewer 243 | Reviewed-by: R. Viewer 244 | Signed-off-by: Stephen Offer 245 | ''' 246 | with_tested_by = _filter_test(test_commit_message, 'Tested-by', ['T. Estes ']) 247 | assert with_tested_by == '''Fix: bug in BLah. 248 | 249 | Some stuff. 250 | Some More stuff (really? Yeah: really!) 251 | 252 | Reviewed-by: R. Viewer 253 | Signed-off-by: Stephen Offer 254 | Tested-by: T. Estes 255 | ''' 256 | with_new_reviewed_by = _filter_test(with_tested_by, 'Reviewed-by', [ 257 | 'Roger Ebert ', 'John Simon ' 258 | ]) 259 | assert with_new_reviewed_by == '''Fix: bug in BLah. 260 | 261 | Some stuff. 262 | Some More stuff (really? Yeah: really!) 263 | 264 | Signed-off-by: Stephen Offer 265 | Tested-by: T. Estes 266 | Reviewed-by: Roger Ebert 267 | Reviewed-by: John Simon 268 | ''' 269 | assert _filter_test('Test: frobnificator', 'Tested-by', []) == 'Test: frobnificator\n' 270 | assert _filter_test('Test: frobnificator', 'Tested-by', ['T. Estes ']) == ( 271 | '''Test: frobnificator 272 | 273 | Tested-by: T. Estes 274 | ''' 275 | ) 276 | 277 | 278 | def test_filter_fails_on_empty_commit_messages(): 279 | with pytest.raises(subprocess.CalledProcessError) as exc_info: 280 | _filter_test('', '', []) 281 | assert exc_info.value.output == b'ERROR: Expected a non-empty commit message' 282 | 283 | 284 | def test_filter_fails_on_commit_messages_that_are_empty_apart_from_trailers(): 285 | with pytest.raises(subprocess.CalledProcessError) as exc_info: 286 | _filter_test( 287 | 'Tested-by: T. Estes ', 288 | 'Tested-by', 289 | ['T. Estes '] 290 | ) 291 | assert exc_info.value.output == b''.join([ 292 | b'ERROR: Your commit message seems to consist only of ', 293 | b'Trailers: Tested-by: T. Estes ', 294 | ]) 295 | 296 | with pytest.raises(subprocess.CalledProcessError) as exc_info: 297 | _filter_test('', 'Tested-by', ['T. Estes ']) 298 | assert exc_info.value.output == b'ERROR: Expected a non-empty commit message' 299 | 300 | 301 | def test_filter_ignore_first_line_trailer_in_commit_message_if_not_set(): 302 | assert _filter_test( 303 | 'Tested-by: T. Estes ', 304 | 'Reviewed-by', [ 305 | 'John Simon ', 306 | ], 307 | ) == '''Tested-by: T. Estes 308 | 309 | Reviewed-by: John Simon 310 | ''' 311 | -------------------------------------------------------------------------------- /tests/test_gitlab.py: -------------------------------------------------------------------------------- 1 | import marge.gitlab as gitlab 2 | 3 | 4 | class TestVersion: 5 | def test_parse(self): 6 | assert gitlab.Version.parse('9.2.2-ee') == gitlab.Version(release=(9, 2, 2), edition='ee') 7 | 8 | def test_parse_no_edition(self): 9 | assert gitlab.Version.parse('9.4.0') == gitlab.Version(release=(9, 4, 0), edition=None) 10 | 11 | def test_is_ee(self): 12 | assert gitlab.Version.parse('9.4.0-ee').is_ee 13 | assert not gitlab.Version.parse('9.4.0').is_ee 14 | -------------------------------------------------------------------------------- /tests/test_interval.py: -------------------------------------------------------------------------------- 1 | from datetime import time 2 | 3 | import maya 4 | import pendulum 5 | from pendulum.helpers import set_test_now 6 | from marge.interval import IntervalUnion, WeeklyInterval 7 | 8 | 9 | def date(spec): 10 | return maya.parse(spec).datetime() 11 | 12 | 13 | class TestWeekly: 14 | def test_on_same_week(self): 15 | interval = WeeklyInterval('Mon', time(10, 00), 'Fri', time(18, 00)) 16 | assert interval.covers(date('Tuesday 3pm')) 17 | assert not interval.covers(date('Sunday 5pm')) 18 | 19 | assert interval.covers(date('Monday 10am')) 20 | assert not interval.covers(date('Monday 9:59am')) 21 | 22 | assert interval.covers(date('Friday 6pm')) 23 | assert not interval.covers(date('Friday 6:01pm')) 24 | 25 | def test_span_two_weeks(self): 26 | interval = WeeklyInterval('Friday', time(12, 00), 'Mon', time(7, 00)) 27 | assert interval.covers(date('Sunday 10am')) 28 | assert not interval.covers(date('Wed 10am')) 29 | 30 | assert interval.covers(date('Friday 12:00pm')) 31 | assert not interval.covers(date('Friday 11:59am')) 32 | 33 | assert interval.covers(date('Monday 7am')) 34 | assert not interval.covers(date('Monday 7:01am')) 35 | 36 | def test_from_human(self): 37 | working_hours = WeeklyInterval('Mon', time(9, 00), 'Fri', time(17, 0)) 38 | 39 | assert WeeklyInterval.from_human('Mon@9am - Fri@5pm') == working_hours 40 | assert WeeklyInterval.from_human('Monday 9:00 - Friday@17:00') == working_hours 41 | assert WeeklyInterval.from_human('Mon@9:00-Fri@17:00') == working_hours 42 | assert WeeklyInterval.from_human('Mon@9:00-Tue@17:00') != working_hours 43 | 44 | def test_from_human_with_timezone(self): 45 | working_hours = WeeklyInterval('Mon', time(9, 00), 'Fri', time(17, 0)) 46 | 47 | # During summer time 48 | now = pendulum.datetime(2019, 8, 30, tz='Europe/London') 49 | set_test_now(now) 50 | assert WeeklyInterval.from_human( 51 | "Mon 10:00 Europe/London - Fri 18:00 Europe/London" 52 | ) == working_hours 53 | 54 | # Outside summer time 55 | now = pendulum.datetime(2019, 12, 30, tz='Europe/London') 56 | set_test_now(now) 57 | assert WeeklyInterval.from_human( 58 | "Mon 09:00 Europe/London - Fri 17:00 Europe/London" 59 | ) == working_hours 60 | 61 | 62 | class TestIntervalUnion: 63 | def test_empty(self): 64 | empty_interval = IntervalUnion.empty() 65 | assert empty_interval == IntervalUnion([]) 66 | assert not empty_interval.covers(date('Monday 5pm')) 67 | 68 | def test_singleton(self): 69 | weekly = WeeklyInterval('Mon', time(10, 00), 'Fri', time(18, 00)) 70 | interval = IntervalUnion([weekly]) 71 | assert interval.covers(date('Tuesday 3pm')) 72 | assert not interval.covers(date('Sunday 5pm')) 73 | 74 | def test_non_overlapping(self): 75 | weekly_1 = WeeklyInterval('Mon', time(10, 00), 'Fri', time(18, 00)) 76 | weekly_2 = WeeklyInterval('Sat', time(12, 00), 'Sun', time(9, 00)) 77 | interval = IntervalUnion([weekly_1, weekly_2]) 78 | assert interval.covers(date('Tuesday 3pm')) 79 | assert not interval.covers(date('Saturday 9am')) 80 | assert interval.covers(date('Saturday 6pm')) 81 | assert not interval.covers(date('Sunday 11am')) 82 | 83 | def test_from_human(self): 84 | weekly_1 = WeeklyInterval('Mon', time(10, 00), 'Fri', time(18, 00)) 85 | weekly_2 = WeeklyInterval('Sat', time(12, 00), 'Sun', time(9, 00)) 86 | interval = IntervalUnion([weekly_1, weekly_2]) 87 | 88 | assert interval == IntervalUnion.from_human('Mon@10am - Fri@6pm,Sat@12pm-Sunday 9am') 89 | assert IntervalUnion([weekly_1]) == IntervalUnion.from_human('Mon@10am - Fri@6pm') 90 | 91 | def test_from_human_with_timezone(self): 92 | weekly_1 = WeeklyInterval('Mon', time(10, 00), 'Fri', time(18, 00)) 93 | weekly_2 = WeeklyInterval('Sat', time(12, 00), 'Sun', time(9, 00)) 94 | interval = IntervalUnion([weekly_1, weekly_2]) 95 | 96 | # During summer time 97 | now = pendulum.datetime(2019, 8, 30, tz='Europe/London') 98 | set_test_now(now) 99 | assert IntervalUnion.from_human( 100 | "Mon 11:00 Europe/London - Fri 19:00 Europe/London," 101 | "Sat 13:00 Europe/London - Sun 10:00 Europe/London" 102 | ) == interval 103 | 104 | # Outside summer time 105 | now = pendulum.datetime(2019, 12, 30, tz='Europe/London') 106 | set_test_now(now) 107 | assert IntervalUnion.from_human( 108 | "Mon 10:00 Europe/London - Fri 18:00 Europe/London," 109 | "Sat 12:00 Europe/London - Sun 09:00 Europe/London" 110 | ) == interval 111 | -------------------------------------------------------------------------------- /tests/test_job.py: -------------------------------------------------------------------------------- 1 | # pylint: disable=protected-access 2 | from datetime import timedelta 3 | from unittest.mock import ANY, MagicMock, patch, create_autospec 4 | 5 | import pytest 6 | 7 | from marge.job import CannotMerge, Fusion, MergeJob, MergeJobOptions, SkipMerge 8 | import marge.interval 9 | import marge.git 10 | import marge.gitlab 11 | import marge.merge_request 12 | import marge.project 13 | import marge.user 14 | 15 | 16 | class TestJob: 17 | def _mock_merge_request(self, **options): 18 | return create_autospec(marge.merge_request.MergeRequest, spec_set=True, **options) 19 | 20 | def get_merge_job(self, **merge_kwargs): 21 | params = { 22 | 'api': create_autospec(marge.gitlab.Api, spec_set=True), 23 | 'user': create_autospec(marge.user.User, spec_set=True), 24 | 'project': create_autospec(marge.project.Project, spec_set=True), 25 | 'repo': create_autospec(marge.git.Repo, spec_set=True), 26 | 'options': MergeJobOptions.default(), 27 | } 28 | params.update(merge_kwargs) 29 | return MergeJob(**params) 30 | 31 | def test_get_source_project_when_is_target_project(self): 32 | merge_job = self.get_merge_job() 33 | merge_request = self._mock_merge_request() 34 | merge_request.source_project_id = merge_job._project.id 35 | r_source_project = merge_job.get_source_project(merge_request) 36 | assert r_source_project is merge_job._project 37 | 38 | def test_get_source_project_when_is_fork(self): 39 | with patch('marge.job.Project') as project_class: 40 | merge_job = self.get_merge_job() 41 | merge_request = self._mock_merge_request() 42 | r_source_project = merge_job.get_source_project(merge_request) 43 | 44 | project_class.fetch_by_id.assert_called_once_with( 45 | merge_request.source_project_id, 46 | api=merge_job._api, 47 | ) 48 | assert r_source_project is not merge_job._project 49 | assert r_source_project is project_class.fetch_by_id.return_value 50 | 51 | @pytest.mark.parametrize( 52 | 'version,use_merge_request_pipelines', 53 | [('9.4.0-ee', False), ('10.5.0-ee', True)], 54 | ) 55 | def test_get_mr_ci_status(self, version, use_merge_request_pipelines): 56 | with patch('marge.job.Pipeline', autospec=True) as pipeline_class: 57 | pipeline_success = [ 58 | MagicMock(sha='abc', status='success'), 59 | ] 60 | pipeline_class.pipelines_by_branch.return_value = pipeline_success 61 | pipeline_class.pipelines_by_merge_request.return_value = pipeline_success 62 | merge_job = self.get_merge_job() 63 | merge_job._api.version.return_value = marge.gitlab.Version.parse(version) 64 | merge_request = self._mock_merge_request(sha='abc') 65 | 66 | r_ci_status = merge_job.get_mr_ci_status(merge_request) 67 | 68 | if use_merge_request_pipelines: 69 | pipeline_class.pipelines_by_merge_request.assert_called_once_with( 70 | merge_request.target_project_id, 71 | merge_request.iid, 72 | merge_job._api, 73 | ) 74 | else: 75 | pipeline_class.pipelines_by_branch.assert_called_once_with( 76 | merge_request.source_project_id, 77 | merge_request.source_branch, 78 | merge_job._api, 79 | ) 80 | assert r_ci_status == 'success' 81 | 82 | def test_ensure_mergeable_mr_not_assigned(self): 83 | merge_job = self.get_merge_job() 84 | merge_request = self._mock_merge_request( 85 | state='opened', 86 | work_in_progress=False, 87 | squash=False, 88 | ) 89 | with pytest.raises(SkipMerge) as exc_info: 90 | merge_job.ensure_mergeable_mr(merge_request) 91 | assert exc_info.value.reason == 'It is not assigned to me anymore!' 92 | 93 | def test_ensure_mergeable_mr_state_not_ok(self): 94 | merge_job = self.get_merge_job() 95 | merge_request = self._mock_merge_request( 96 | assignee_ids=[merge_job._user.id], 97 | state='merged', 98 | work_in_progress=False, 99 | squash=False, 100 | ) 101 | with pytest.raises(CannotMerge) as exc_info: 102 | merge_job.ensure_mergeable_mr(merge_request) 103 | assert exc_info.value.reason == 'The merge request is already merged!' 104 | 105 | def test_ensure_mergeable_mr_not_approved(self): 106 | merge_job = self.get_merge_job() 107 | merge_request = self._mock_merge_request( 108 | assignee_ids=[merge_job._user.id], 109 | state='opened', 110 | work_in_progress=False, 111 | squash=False, 112 | ) 113 | merge_request.fetch_approvals.return_value.sufficient = False 114 | with pytest.raises(CannotMerge) as exc_info: 115 | merge_job.ensure_mergeable_mr(merge_request) 116 | 117 | merge_request.fetch_approvals.assert_called_once() 118 | assert 'Insufficient approvals' in str(exc_info.value) 119 | 120 | def test_ensure_mergeable_mr_wip(self): 121 | merge_job = self.get_merge_job() 122 | merge_request = self._mock_merge_request( 123 | assignee_ids=[merge_job._user.id], 124 | state='opened', 125 | work_in_progress=True, 126 | ) 127 | merge_request.fetch_approvals.return_value.sufficient = True 128 | with pytest.raises(CannotMerge) as exc_info: 129 | merge_job.ensure_mergeable_mr(merge_request) 130 | 131 | assert exc_info.value.reason == "Sorry, I can't merge requests marked as Work-In-Progress!" 132 | 133 | def test_ensure_mergeable_mr_unresolved_discussion(self): 134 | merge_job = self.get_merge_job() 135 | merge_request = self._mock_merge_request( 136 | assignee_ids=[merge_job._user.id], 137 | state='opened', 138 | work_in_progress=False, 139 | blocking_discussions_resolved=False, 140 | ) 141 | merge_request.fetch_approvals.return_value.sufficient = True 142 | with pytest.raises(CannotMerge) as exc_info: 143 | merge_job.ensure_mergeable_mr(merge_request) 144 | 145 | assert exc_info.value.reason == "Sorry, I can't merge requests which have unresolved discussions!" 146 | 147 | def test_ensure_mergeable_mr_squash_and_trailers(self): 148 | merge_job = self.get_merge_job(options=MergeJobOptions.default(add_reviewers=True)) 149 | merge_request = self._mock_merge_request( 150 | assignee_ids=[merge_job._user.id], 151 | state='opened', 152 | work_in_progress=False, 153 | squash=True, 154 | ) 155 | merge_request.fetch_approvals.return_value.sufficient = True 156 | with pytest.raises(CannotMerge) as exc_info: 157 | merge_job.ensure_mergeable_mr(merge_request) 158 | 159 | assert ( 160 | exc_info.value.reason == "Sorry, merging requests marked as auto-squash " 161 | "would ruin my commit tagging!" 162 | ) 163 | 164 | def test_unassign_from_mr(self): 165 | merge_job = self.get_merge_job() 166 | merge_request = self._mock_merge_request() 167 | 168 | # when we are not the author 169 | merge_job.unassign_from_mr(merge_request) 170 | merge_request.assign_to.assert_called_once_with(merge_request.author_id) 171 | 172 | # when we are the author 173 | merge_request.author_id = merge_job._user.id 174 | merge_job.unassign_from_mr(merge_request) 175 | merge_request.unassign.assert_called_once() 176 | 177 | def test_fuse_using_rebase(self): 178 | merge_job = self.get_merge_job(options=MergeJobOptions.default(fusion=Fusion.rebase)) 179 | branch_a = 'A' 180 | branch_b = 'B' 181 | 182 | merge_job.fuse(branch_a, branch_b) 183 | 184 | merge_job._repo.rebase.assert_called_once_with( 185 | branch_a, 186 | branch_b, 187 | source_repo_url=ANY, 188 | local=ANY, 189 | ) 190 | 191 | def test_fuse_using_merge(self): 192 | merge_job = self.get_merge_job(options=MergeJobOptions.default(fusion=Fusion.merge)) 193 | branch_a = 'A' 194 | branch_b = 'B' 195 | 196 | merge_job.fuse(branch_a, branch_b) 197 | 198 | merge_job._repo.merge.assert_called_once_with( 199 | branch_a, 200 | branch_b, 201 | source_repo_url=ANY, 202 | local=ANY, 203 | ) 204 | 205 | 206 | class TestMergeJobOptions: 207 | def test_default(self): 208 | assert MergeJobOptions.default() == MergeJobOptions( 209 | add_tested=False, 210 | add_part_of=False, 211 | add_reviewers=False, 212 | reapprove=False, 213 | approval_timeout=timedelta(seconds=0), 214 | embargo=marge.interval.IntervalUnion.empty(), 215 | ci_timeout=timedelta(minutes=15), 216 | fusion=Fusion.rebase, 217 | use_no_ff_batches=False, 218 | use_merge_commit_batches=False, 219 | skip_ci_batches=False, 220 | guarantee_final_pipeline=False, 221 | ) 222 | 223 | def test_default_ci_time(self): 224 | three_min = timedelta(minutes=3) 225 | assert MergeJobOptions.default(ci_timeout=three_min) == MergeJobOptions.default()._replace( 226 | ci_timeout=three_min 227 | ) 228 | -------------------------------------------------------------------------------- /tests/test_merge_request.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import call, Mock 2 | 3 | import pytest 4 | 5 | from marge.gitlab import Api, GET, POST, PUT, Version 6 | from marge.merge_request import MergeRequest, MergeRequestRebaseFailed 7 | import marge.user 8 | 9 | from tests.test_user import INFO as USER_INFO 10 | 11 | _MARGE_ID = 77 12 | 13 | INFO = { 14 | 'id': 42, 15 | 'iid': 54, 16 | 'title': 'a title', 17 | 'project_id': 1234, 18 | 'assignees': [{'id': _MARGE_ID}], 19 | 'author': {'id': 88}, 20 | 'state': 'opened', 21 | 'sha': 'dead4g00d', 22 | 'source_project_id': 5678, 23 | 'target_project_id': 1234, 24 | 'source_branch': 'useless_new_feature', 25 | 'force_remove_source_branch': True, 26 | 'target_branch': 'master', 27 | 'work_in_progress': False, 28 | } 29 | 30 | DISCUSSION = { 31 | 'id': 'aabbcc0044', 32 | 'notes': [ 33 | {'id': 12, "body": "assigned to @john_smith", "created_at": "2020-08-04T06:56:11.854Z"}, 34 | {'id': 13, "body": "assigned to @john_smith", "created_at": "2020-08-18T06:52:58.093Z"} 35 | ], 36 | } 37 | 38 | 39 | # pylint: disable=attribute-defined-outside-init 40 | class TestMergeRequest: 41 | 42 | def setup_method(self, _method): 43 | self.api = Mock(Api) 44 | self.api.version = Mock(return_value=Version.parse('9.2.3-ee')) 45 | self.merge_request = MergeRequest(api=self.api, info=INFO) 46 | 47 | def test_fetch_by_iid(self): 48 | api = self.api 49 | api.call = Mock(return_value=INFO) 50 | 51 | merge_request = MergeRequest.fetch_by_iid(project_id=1234, merge_request_iid=54, api=api) 52 | 53 | api.call.assert_called_once_with(GET('/projects/1234/merge_requests/54')) 54 | assert merge_request.info == INFO 55 | 56 | def test_refetch_info(self): 57 | new_info = dict(INFO, state='closed') 58 | self.api.call = Mock(return_value=new_info) 59 | 60 | self.merge_request.refetch_info() 61 | self.api.call.assert_called_once_with(GET('/projects/1234/merge_requests/54')) 62 | assert self.merge_request.info == new_info 63 | 64 | def test_properties(self): 65 | assert self.merge_request.id == 42 66 | assert self.merge_request.project_id == 1234 67 | assert self.merge_request.iid == 54 68 | assert self.merge_request.title == 'a title' 69 | assert self.merge_request.assignee_ids == [77] 70 | assert self.merge_request.author_id == 88 71 | assert self.merge_request.state == 'opened' 72 | assert self.merge_request.source_branch == 'useless_new_feature' 73 | assert self.merge_request.target_branch == 'master' 74 | assert self.merge_request.sha == 'dead4g00d' 75 | assert self.merge_request.source_project_id == 5678 76 | assert self.merge_request.target_project_id == 1234 77 | assert self.merge_request.work_in_progress is False 78 | 79 | self._load({'assignees': []}) 80 | assert self.merge_request.assignee_ids == [] 81 | 82 | def test_comment(self): 83 | self.merge_request.comment('blah') 84 | self.api.call.assert_called_once_with( 85 | POST( 86 | '/projects/1234/merge_requests/54/notes', 87 | {'body': 'blah'}, 88 | ), 89 | ) 90 | 91 | def test_assign(self): 92 | self.merge_request.assign_to(42) 93 | self.api.call.assert_called_once_with(PUT('/projects/1234/merge_requests/54', {'assignee_id': 42})) 94 | 95 | def test_unassign(self): 96 | self.merge_request.unassign() 97 | self.api.call.assert_called_once_with(PUT('/projects/1234/merge_requests/54', {'assignee_id': 0})) 98 | 99 | def test_rebase_was_not_in_progress_no_error(self): 100 | expected = [ 101 | ( 102 | GET('/projects/1234/merge_requests/54'), # refetch_info -> not in progress 103 | INFO 104 | ), 105 | ( 106 | PUT('/projects/1234/merge_requests/54/rebase'), 107 | True 108 | ), 109 | ( 110 | GET('/projects/1234/merge_requests/54'), # refetch_info -> in progress 111 | dict(INFO, rebase_in_progress=True) 112 | ), 113 | ( 114 | GET('/projects/1234/merge_requests/54'), # refetch_info -> succeeded 115 | dict(INFO, rebase_in_progress=False) 116 | ), 117 | ] 118 | 119 | self.api.call = Mock(side_effect=[resp for (req, resp) in expected]) 120 | self.merge_request.rebase() 121 | self.api.call.assert_has_calls([call(req) for (req, resp) in expected]) 122 | 123 | def test_rebase_was_not_in_progress_error(self): 124 | expected = [ 125 | ( 126 | GET('/projects/1234/merge_requests/54'), # refetch_info -> not in progress 127 | INFO 128 | ), 129 | ( 130 | PUT('/projects/1234/merge_requests/54/rebase'), 131 | True 132 | ), 133 | ( 134 | GET('/projects/1234/merge_requests/54'), # refetch_info -> BOOM 135 | dict(INFO, rebase_in_progress=False, merge_error="Rebase failed. Please rebase locally") 136 | ), 137 | ] 138 | 139 | self.api.call = Mock(side_effect=[resp for (req, resp) in expected]) 140 | 141 | with pytest.raises(MergeRequestRebaseFailed): 142 | self.merge_request.rebase() 143 | self.api.call.assert_has_calls([call(req) for (req, resp) in expected]) 144 | 145 | def test_rebase_was_in_progress_no_error(self): 146 | expected = [ 147 | ( 148 | GET('/projects/1234/merge_requests/54'), # refetch_info -> in progress 149 | dict(INFO, rebase_in_progress=True) 150 | ), 151 | ( 152 | GET('/projects/1234/merge_requests/54'), # refetch_info -> in progress 153 | dict(INFO, rebase_in_progress=True) 154 | ), 155 | ( 156 | GET('/projects/1234/merge_requests/54'), # refetch_info -> succeeded 157 | dict(INFO, rebase_in_progress=False) 158 | ), 159 | ] 160 | self.api.call = Mock(side_effect=[resp for (req, resp) in expected]) 161 | self.merge_request.rebase() 162 | self.api.call.assert_has_calls([call(req) for (req, resp) in expected]) 163 | 164 | def test_accept_remove_branch(self): 165 | self._load(dict(INFO, sha='badc0de')) 166 | 167 | for boolean in (True, False): 168 | self.merge_request.accept(remove_branch=boolean) 169 | self.api.call.assert_called_once_with(PUT( 170 | '/projects/1234/merge_requests/54/merge', 171 | dict( 172 | merge_when_pipeline_succeeds=True, 173 | should_remove_source_branch=boolean, 174 | sha='badc0de', 175 | ) 176 | )) 177 | self.api.call.reset_mock() 178 | 179 | def test_accept_sha(self): 180 | self._load(dict(INFO, sha='badc0de')) 181 | self.merge_request.accept(sha='g00dc0de') 182 | self.api.call.assert_called_once_with(PUT( 183 | '/projects/1234/merge_requests/54/merge', 184 | dict( 185 | merge_when_pipeline_succeeds=True, 186 | should_remove_source_branch=False, 187 | sha='g00dc0de', 188 | ) 189 | )) 190 | 191 | def test_accept_merge_when_pipeline_succeeds(self): 192 | self._load(dict(INFO, sha='badc0de')) 193 | self.merge_request.accept(merge_when_pipeline_succeeds=False) 194 | self.api.call.assert_called_once_with(PUT( 195 | '/projects/1234/merge_requests/54/merge', 196 | dict( 197 | merge_when_pipeline_succeeds=False, 198 | should_remove_source_branch=False, 199 | sha='badc0de', 200 | ) 201 | )) 202 | 203 | def test_fetch_all_opened_for_me(self): 204 | api = self.api 205 | mr1, mr_not_me, mr2 = INFO, dict(INFO, assignees=[{'id': _MARGE_ID+1}], id=679), dict(INFO, id=678) 206 | user = marge.user.User(api=None, info=dict(USER_INFO, id=_MARGE_ID)) 207 | api.collect_all_pages = Mock(return_value=[mr1, mr_not_me, mr2]) 208 | result = MergeRequest.fetch_all_open_for_user( 209 | 1234, user=user, api=api, merge_order='created_at' 210 | ) 211 | api.collect_all_pages.assert_called_once_with(GET( 212 | '/projects/1234/merge_requests', 213 | {'state': 'opened', 'order_by': 'created_at', 'sort': 'asc'}, 214 | )) 215 | assert [mr.info for mr in result] == [mr1, mr2] 216 | 217 | def test_fetch_assigned_at(self): 218 | api = self.api 219 | dis1, dis2 = DISCUSSION, dict(DISCUSSION, id=679) 220 | mr1 = INFO 221 | user = marge.user.User(api=None, info=dict(USER_INFO, id=_MARGE_ID)) 222 | api.collect_all_pages = Mock(return_value=[dis1, dis2]) 223 | result = MergeRequest.fetch_assigned_at( 224 | user=user, api=api, merge_request=mr1 225 | ) 226 | api.collect_all_pages.assert_called_once_with(GET( 227 | '/projects/1234/merge_requests/54/discussions', 228 | )) 229 | assert result == 1597733578.093 230 | 231 | def _load(self, json): 232 | old_mock = self.api.call 233 | self.api.call = Mock(return_value=json) 234 | self.merge_request.refetch_info() 235 | self.api.call.assert_called_with(GET('/projects/1234/merge_requests/54')) 236 | self.api.call = old_mock 237 | -------------------------------------------------------------------------------- /tests/test_pipeline.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | 3 | from marge.gitlab import Api, GET 4 | from marge.pipeline import Pipeline 5 | 6 | 7 | INFO = { 8 | "id": 47, 9 | "status": "pending", 10 | "ref": "new-pipeline", 11 | "sha": "a91957a858320c0e17f3a0eca7cfacbff50ea29a" 12 | } 13 | 14 | 15 | # pylint: disable=attribute-defined-outside-init 16 | class TestPipeline: 17 | 18 | def setup_method(self, _method): 19 | self.api = Mock(Api) 20 | 21 | def test_pipelines_by_branch(self): 22 | api = self.api 23 | pl1, pl2 = INFO, dict(INFO, id=48) 24 | api.call = Mock(return_value=[pl1, pl2]) 25 | 26 | result = Pipeline.pipelines_by_branch(project_id=1234, branch=INFO['ref'], api=api) 27 | api.call.assert_called_once_with(GET( 28 | '/projects/1234/pipelines', 29 | {'ref': INFO['ref'], 'order_by': 'id', 'sort': 'desc'}, 30 | )) 31 | assert [pl.info for pl in result] == [pl1, pl2] 32 | 33 | def test_pipelines_by_merge_request(self): 34 | api = self.api 35 | pl1, pl2 = INFO, dict(INFO, id=48) 36 | api.call = Mock(return_value=[pl1, pl2]) 37 | 38 | result = Pipeline.pipelines_by_merge_request(project_id=1234, merge_request_iid=1, api=api) 39 | api.call.assert_called_once_with(GET( 40 | '/projects/1234/merge_requests/1/pipelines', 41 | )) 42 | assert [pl.info for pl in result] == [pl2, pl1] 43 | 44 | def test_properties(self): 45 | pipeline = Pipeline(api=self.api, project_id=1234, info=INFO) 46 | assert pipeline.id == 47 47 | assert pipeline.project_id == 1234 48 | assert pipeline.status == "pending" 49 | assert pipeline.ref == "new-pipeline" 50 | assert pipeline.sha == "a91957a858320c0e17f3a0eca7cfacbff50ea29a" 51 | -------------------------------------------------------------------------------- /tests/test_project.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock 2 | import pytest 3 | 4 | from marge.gitlab import Api, GET, Version 5 | from marge.project import AccessLevel, Project 6 | 7 | 8 | INFO = { 9 | 'id': 1234, 10 | 'path_with_namespace': 'cool/project', 11 | 'ssh_url_to_repo': 'ssh://blah.com/cool/project.git', 12 | 'merge_requests_enabled': True, 13 | 'default_branch': 'master', 14 | 'only_allow_merge_if_pipeline_succeeds': True, 15 | 'only_allow_merge_if_all_discussions_are_resolved': False, 16 | 'permissions': { 17 | 'project_access': { 18 | 'access_level': AccessLevel.developer.value, 19 | }, 20 | 'group_access': { 21 | 'access_level': AccessLevel.developer.value, 22 | } 23 | } 24 | } 25 | 26 | GROUP_ACCESS = { 27 | 'project_access': None, 28 | 'group_access': { 29 | 'access_level': AccessLevel.developer.value, 30 | } 31 | } 32 | 33 | NONE_ACCESS = { 34 | 'project_access': None, 35 | 'group_access': None 36 | } 37 | 38 | 39 | # pylint: disable=attribute-defined-outside-init,duplicate-code 40 | class TestProject: 41 | 42 | def setup_method(self, _method): 43 | self.api = Mock(Api) 44 | 45 | def test_fetch_by_id(self): 46 | api = self.api 47 | api.call = Mock(return_value=INFO) 48 | 49 | project = Project.fetch_by_id(project_id=1234, api=api) 50 | 51 | api.call.assert_called_once_with(GET('/projects/1234')) 52 | assert project.info == INFO 53 | 54 | def test_fetch_by_path_exists(self): 55 | api = self.api 56 | prj1 = INFO 57 | prj2 = dict(INFO, id=1235, path_with_namespace='foo/bar') 58 | prj3 = dict(INFO, id=1240, path_with_namespace='foo/foo') 59 | api.collect_all_pages = Mock(return_value=[prj1, prj2, prj3]) 60 | 61 | project = Project.fetch_by_path('foo/bar', api) 62 | 63 | api.collect_all_pages.assert_called_once_with(GET('/projects')) 64 | assert project and project.info == prj2 65 | 66 | def fetch_all_mine_with_permissions(self): 67 | prj1, prj2 = INFO, dict(INFO, id=678) 68 | 69 | api = self.api 70 | api.collect_all_pages = Mock(return_value=[prj1, prj2]) 71 | api.version = Mock(return_value=Version.parse("11.0.0-ee")) 72 | 73 | result = Project.fetch_all_mine(api) 74 | api.collect_all_pages.assert_called_once_with(GET( 75 | '/projects', 76 | { 77 | 'membership': True, 78 | 'with_merge_requests_enabled': True, 79 | }, 80 | )) 81 | assert [prj.info for prj in result] == [prj1, prj2] 82 | assert all(prj.access_level == AccessLevel.developer for prj in result) 83 | 84 | def fetch_all_mine_with_min_access_level(self): 85 | prj1, prj2 = dict(INFO, permissions=NONE_ACCESS), dict(INFO, id=678, permissions=NONE_ACCESS) 86 | 87 | api = self.api 88 | api.collect_all_pages = Mock(return_value=[prj1, prj2]) 89 | api.version = Mock(return_value=Version.parse("11.2.0-ee")) 90 | 91 | result = Project.fetch_all_mine(api) 92 | api.collect_all_pages.assert_called_once_with(GET( 93 | '/projects', 94 | { 95 | 'membership': True, 96 | 'with_merge_requests_enabled': True, 97 | "min_access_level": AccessLevel.developer.value, 98 | }, 99 | )) 100 | assert [prj.info for prj in result] == [prj1, prj2] 101 | assert all(prj.info["permissions"]["marge"] for prj in result) 102 | assert all(prj.access_level == AccessLevel.developer for prj in result) 103 | 104 | def test_properties(self): 105 | project = Project(api=self.api, info=INFO) 106 | assert project.id == 1234 107 | assert project.path_with_namespace == 'cool/project' 108 | assert project.ssh_url_to_repo == 'ssh://blah.com/cool/project.git' 109 | assert project.merge_requests_enabled is True 110 | assert project.only_allow_merge_if_pipeline_succeeds is True 111 | assert project.only_allow_merge_if_all_discussions_are_resolved is False 112 | assert project.access_level == AccessLevel.developer 113 | 114 | def test_group_access(self): 115 | project = Project(api=self.api, info=dict(INFO, permissions=GROUP_ACCESS)) 116 | bad_project = Project(api=self.api, info=dict(INFO, permissions=NONE_ACCESS)) 117 | assert project.access_level == AccessLevel.developer 118 | with pytest.raises(AssertionError): 119 | bad_project.access_level # pylint: disable=pointless-statement 120 | -------------------------------------------------------------------------------- /tests/test_store.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import tempfile 3 | import unittest.mock as mock 4 | 5 | import marge.git 6 | import marge.store 7 | import marge.user 8 | 9 | from tests.test_git import get_calls as get_git_calls 10 | from tests.test_project import INFO as PRJ_INFO 11 | from tests.test_user import INFO as USER_INFO 12 | 13 | 14 | # pylint: disable=attribute-defined-outside-init 15 | @mock.patch('marge.git._run') 16 | class TestRepoManager: 17 | 18 | def setup_method(self, _method): 19 | user = marge.user.User(api=None, info=dict(USER_INFO, name='Peter Parker', email='pparker@bugle.com')) 20 | self.root_dir = tempfile.TemporaryDirectory() 21 | self.repo_manager = marge.store.SshRepoManager( 22 | user=user, root_dir=self.root_dir.name, ssh_key_file='/ssh/key', 23 | ) 24 | 25 | def teardown_method(self, _method): 26 | self.root_dir.cleanup() 27 | 28 | def new_project(self, project_id, path_with_namespace): 29 | ssh_url_to_repo = 'ssh://buh.com/%s.git' % path_with_namespace 30 | info = dict( 31 | PRJ_INFO, 32 | id=project_id, 33 | path_with_namespace=path_with_namespace, 34 | ssh_url_to_repo=ssh_url_to_repo, 35 | ) 36 | return marge.project.Project(api=None, info=info) 37 | 38 | def test_creates_and_initializes_repo(self, git_run): 39 | repo_manager = self.repo_manager 40 | project = self.new_project(1234, 'some/stuff') 41 | 42 | git_run.assert_not_called() 43 | 44 | repo = repo_manager.repo_for_project(project) 45 | 46 | assert os.path.dirname(repo.local_path) == repo_manager.root_dir 47 | assert repo.local_path != repo_manager.root_dir 48 | 49 | env = "GIT_SSH_COMMAND='%s -F /dev/null -o IdentitiesOnly=yes -i /ssh/key'" % ( 50 | marge.git.GIT_SSH_COMMAND, 51 | ) 52 | assert get_git_calls(git_run) == [ 53 | "%s git clone --origin=origin %s %s" % (env, project.ssh_url_to_repo, repo.local_path), 54 | "%s git -C %s config user.email pparker@bugle.com" % (env, repo.local_path), 55 | "%s git -C %s config user.name 'Peter Parker'" % (env, repo.local_path) 56 | ] 57 | 58 | def test_caches_repos_by_id(self, git_run): 59 | repo_manager = self.repo_manager 60 | project = self.new_project(1234, 'some/stuff') 61 | same_project = marge.project.Project(api=None, info=dict(project.info, name='same/stuff')) 62 | 63 | assert git_run.call_count == 0 64 | 65 | repo_first_call = repo_manager.repo_for_project(project) 66 | assert git_run.call_count == 3 67 | 68 | repo_second_call = repo_manager.repo_for_project(same_project) 69 | assert repo_second_call is repo_first_call 70 | assert git_run.call_count == 3 71 | 72 | def test_stops_caching_if_ssh_url_changed(self, git_run): 73 | repo_manager = self.repo_manager 74 | project = self.new_project(1234, 'some/stuff') 75 | 76 | assert git_run.call_count == 0 77 | 78 | repo_first_call = repo_manager.repo_for_project(project) 79 | assert git_run.call_count == 3 80 | 81 | different_ssh_url = self.new_project(1234, 'same/stuff') 82 | 83 | repo_second_call = repo_manager.repo_for_project(different_ssh_url) 84 | assert git_run.call_count == 6 85 | assert repo_first_call.remote_url != repo_second_call.remote_url == different_ssh_url.ssh_url_to_repo 86 | 87 | def test_handles_different_projects(self, git_run): 88 | repo_manager = self.repo_manager 89 | project_1 = self.new_project(1234, 'some/stuff') 90 | project_2 = self.new_project(5678, 'other/things') 91 | 92 | assert git_run.call_count == 0 93 | 94 | repo_1 = repo_manager.repo_for_project(project_1) 95 | assert git_run.call_count == 3 96 | 97 | repo_2 = repo_manager.repo_for_project(project_2) 98 | assert git_run.call_count == 6 99 | 100 | assert repo_1.local_path != repo_2.local_path 101 | 102 | def test_can_forget_repos(self, git_run): 103 | repo_manager = self.repo_manager 104 | project_1 = self.new_project(1234, 'some/stuff') 105 | project_2 = self.new_project(5678, 'other/things') 106 | 107 | assert git_run.call_count == 0 108 | 109 | repo_1 = repo_manager.repo_for_project(project_1) 110 | assert git_run.call_count == 3 111 | 112 | repo_2 = repo_manager.repo_for_project(project_2) 113 | assert git_run.call_count == 6 114 | 115 | cached_repo_1 = repo_manager.repo_for_project(project_1) 116 | assert cached_repo_1 is repo_1 117 | assert git_run.call_count == 6 118 | 119 | repo_manager.forget_repo(project_1) 120 | another_repo_1 = repo_manager.repo_for_project(project_1) 121 | assert another_repo_1.local_path != repo_1.local_path 122 | assert another_repo_1.remote_url == repo_1.remote_url 123 | assert git_run.call_count == 9 124 | 125 | # project_2's repo is still around 126 | cached_repo_2 = repo_manager.repo_for_project(project_2) 127 | assert cached_repo_2 is repo_2 128 | assert git_run.call_count == 9 129 | 130 | # shouldn't fail 131 | repo_manager.forget_repo(self.new_project(90, 'non/existent')) 132 | -------------------------------------------------------------------------------- /tests/test_user.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import ANY, Mock 2 | 3 | from marge.gitlab import Api, GET 4 | from marge.user import User 5 | 6 | 7 | INFO = { 8 | 'id': 1234, 9 | 'username': 'john_smith', 10 | 'name': 'John Smith', 11 | 'state': 'active', 12 | 'is_admin': False, 13 | } 14 | 15 | 16 | # pylint: disable=attribute-defined-outside-init 17 | class TestProjectWithUser: 18 | def setup_method(self, _method): 19 | self.api = Mock(Api) 20 | 21 | def test_fetch_myself(self): 22 | User.myself(api=self.api) 23 | self.api.call.assert_called_once_with(GET('/user')) 24 | 25 | def test_fetch_by_id(self): 26 | api = self.api 27 | api.call = Mock(return_value=INFO) 28 | 29 | user = User.fetch_by_id(user_id=1234, api=api) 30 | 31 | api.call.assert_called_once_with(GET('/users/1234')) 32 | assert user.info == INFO 33 | 34 | def test_fetch_by_username_exists(self): 35 | api = self.api 36 | api.call = Mock(return_value=INFO) 37 | 38 | user = User.fetch_by_username('john_smith', api) 39 | 40 | api.call.assert_called_once_with(GET('/users', {'username': 'john_smith'}, ANY)) 41 | assert user and user.info == INFO 42 | 43 | def test_properties(self): 44 | user = User(api=self.api, info=INFO) 45 | assert user.id == 1234 46 | assert user.username == 'john_smith' 47 | assert user.name == 'John Smith' 48 | assert user.state == 'active' 49 | -------------------------------------------------------------------------------- /version: -------------------------------------------------------------------------------- 1 | 0.10.1 2 | --------------------------------------------------------------------------------