├── tests ├── __init__.py ├── test___main__.py ├── test_close_pr.py ├── test_filepaths.py ├── test_prtype.py ├── test_util.py ├── test_news.py ├── test_backport.py ├── test_gh_issue.py └── test_stage.py ├── bedevere ├── __init__.py ├── filepaths.py ├── prtype.py ├── close_pr.py ├── __main__.py ├── news.py ├── backport.py ├── gh_issue.py ├── util.py └── stage.py ├── runtime.txt ├── Procfile ├── .git-blame-ignore-revs ├── dev-requirements.txt ├── .coveragerc ├── pytest.ini ├── tox.ini ├── requirements.txt ├── .github ├── codecov.yml ├── workflows │ ├── lint.yml │ └── ci.yml └── dependabot.yml ├── .pre-commit-config.yaml ├── .gitignore ├── CONTRIBUTING.rst ├── README.md └── LICENSE /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /bedevere/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.11.6 2 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | web: python3 -m bedevere 2 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | 35235fd609a52db835beb2a96d0da461a0eab3ed 2 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | -r requirements.txt 2 | asynctest==0.13.0 3 | pytest==9.0.1 4 | pytest-asyncio==1.3.0 5 | pytest-aiohttp==1.1.0 6 | pytest-cov==7.0.0 7 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | dynamic_context = test_function 4 | 5 | [report] 6 | fail_under = 100 7 | 8 | [html] 9 | show_contexts = True 10 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | testpaths = tests 3 | addopts = --cov --cov-report=html --cov-report=term --cov-report=xml 4 | # https://github.com/pytest-dev/pytest-asyncio#modes 5 | asyncio_mode = auto 6 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{314, 313, 312, 311, 310} 3 | toxworkdir={env:TOX_WORK_DIR:.tox} 4 | 5 | [testenv] 6 | passenv = 7 | FORCE_COLOR 8 | skip_install = True 9 | deps = 10 | -r dev-requirements.txt 11 | commands = 12 | pytest {posargs} 13 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp==3.13.2 2 | appdirs==1.4.4 3 | async-timeout==5.0.1 4 | cachetools==6.2.2 5 | chardet==5.2.0 6 | gidgethub==5.4.0 7 | multidict==6.7.0 8 | packaging==25.0 9 | pyparsing==3.2.5 10 | six==1.17.0 11 | uritemplate==4.2.0 12 | yarl==1.22.0 13 | sentry-sdk==2.46.0 14 | -------------------------------------------------------------------------------- /.github/codecov.yml: -------------------------------------------------------------------------------- 1 | # PR commenting is disabled in the Python org's global YAML: 2 | # https://app.codecov.io/account/gh/python/yaml 3 | # To enable commenting in this repo, we need some 'comment' 4 | # config here to override the global one. 5 | # And due to a Codecov bug, it can't be 'comment: false'. 6 | # For more info see https://github.com/python/bedevere/issues/441 7 | comment: 8 | layout: "reach, diff, flags, files" 9 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: [push, pull_request, workflow_dispatch] 4 | 5 | env: 6 | FORCE_COLOR: 1 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v6 17 | with: 18 | persist-credentials: false 19 | - uses: actions/setup-python@v6 20 | with: 21 | python-version: "3.x" 22 | - uses: tox-dev/action-pre-commit-uv@v1 23 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for Python 4 | - package-ecosystem: pip 5 | directory: "/" 6 | schedule: 7 | interval: monthly 8 | assignees: 9 | - "ezio-melotti" 10 | groups: 11 | pip: 12 | patterns: 13 | - "*" 14 | 15 | # Maintain dependencies for GitHub Actions 16 | - package-ecosystem: "github-actions" 17 | directory: "/" 18 | schedule: 19 | interval: monthly 20 | assignees: 21 | - "ezio-melotti" 22 | groups: 23 | actions: 24 | patterns: 25 | - "*" 26 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 23.9.1 4 | hooks: 5 | - id: black 6 | 7 | - repo: https://github.com/PyCQA/isort 8 | rev: 5.12.0 9 | hooks: 10 | - id: isort 11 | files: \.py$ 12 | 13 | - repo: https://github.com/pre-commit/pre-commit-hooks 14 | rev: v4.5.0 15 | hooks: 16 | - id: check-builtin-literals 17 | - id: check-added-large-files 18 | - id: check-case-conflict 19 | - id: check-toml 20 | - id: check-yaml 21 | - id: debug-statements 22 | - id: end-of-file-fixer 23 | - id: forbid-new-submodules 24 | - id: trailing-whitespace 25 | -------------------------------------------------------------------------------- /bedevere/filepaths.py: -------------------------------------------------------------------------------- 1 | """Checks related to filepaths on a pull request.""" 2 | import gidgethub.routing 3 | 4 | from . import news, prtype, util 5 | 6 | router = gidgethub.routing.Router() 7 | 8 | 9 | @router.register("pull_request", action="opened") 10 | @router.register("pull_request", action="synchronize") 11 | @router.register("pull_request", action="reopened") 12 | async def check_file_paths(event, gh, *args, **kwargs): 13 | pull_request = event.data["pull_request"] 14 | files = await util.files_for_PR(gh, pull_request) 15 | filenames = [file["file_name"] for file in files] 16 | if event.data["action"] == "opened": 17 | labels = await prtype.classify_by_filepaths(gh, pull_request, filenames) 18 | if prtype.Labels.skip_news not in labels: 19 | await news.check_news(gh, pull_request, files) 20 | else: 21 | await news.check_news(gh, pull_request, files) 22 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: [push, pull_request, workflow_dispatch] 4 | 5 | permissions: 6 | contents: read 7 | 8 | env: 9 | FORCE_COLOR: 1 10 | 11 | jobs: 12 | test: 13 | name: Python ${{ matrix.python-version }} 14 | runs-on: ubuntu-latest 15 | strategy: 16 | fail-fast: false 17 | matrix: 18 | python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] 19 | 20 | steps: 21 | - uses: actions/checkout@v6 22 | with: 23 | persist-credentials: false 24 | 25 | - name: Set up Python ${{ matrix.python-version }} 26 | uses: actions/setup-python@v6 27 | id: python-install 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | allow-prereleases: true 31 | 32 | - name: Install uv 33 | uses: hynek/setup-cached-uv@v2 34 | with: 35 | cache-dependency-path: | 36 | requirements.txt 37 | dev-requirements.txt 38 | 39 | - name: Tox tests 40 | run: | 41 | uvx --with tox-uv tox -e py 42 | 43 | - uses: codecov/codecov-action@v5 44 | if: always() 45 | with: 46 | token: ${{ secrets.CODECOV_ORG_TOKEN }} 47 | file: ./coverage.xml 48 | flags: Python_${{ steps.python-install.outputs.python-version }} 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | .venv/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | .pytest_cache 92 | -------------------------------------------------------------------------------- /bedevere/prtype.py: -------------------------------------------------------------------------------- 1 | """Label a pull request based on its type.""" 2 | import enum 3 | import pathlib 4 | 5 | from . import util 6 | 7 | TYPE_LABEL_PREFIX = "type" 8 | 9 | 10 | @enum.unique 11 | class Labels(enum.Enum): 12 | """Labels that can be applied to a Pull Request.""" 13 | 14 | type_bug = f"{TYPE_LABEL_PREFIX}-bug" 15 | docs = "docs" 16 | type_feature = f"{TYPE_LABEL_PREFIX}-feature" 17 | performance = "performance" 18 | type_security = f"{TYPE_LABEL_PREFIX}-security" 19 | tests = "tests" 20 | skip_news = "skip news" 21 | 22 | 23 | async def add_labels(gh, issue, labels): 24 | """Add the specified labels to the PR.""" 25 | current_labels = util.labels(issue) 26 | label_names = [c.value for c in labels if c.value not in current_labels] 27 | if label_names: 28 | await gh.post(issue["labels_url"], data=label_names) 29 | 30 | 31 | async def classify_by_filepaths(gh, pull_request, filenames): 32 | """Categorize the pull request based on the files it has modified. 33 | 34 | If any paths are found which do not fall within a specific classification, 35 | then no new label is applied. 36 | 37 | The routing is handled by the filepaths module. 38 | """ 39 | pr_labels = [] 40 | issue = await util.issue_for_PR(gh, pull_request) 41 | news = docs = tests = False 42 | for filename in filenames: 43 | if util.is_news_dir(filename): 44 | news = True 45 | filepath = pathlib.PurePath(filename) 46 | if filepath.suffix in {".md", ".rst"} or filepath.name == ".nitignore": 47 | docs = True 48 | elif filepath.name.startswith(("test_", "_test")): 49 | tests = True 50 | else: 51 | return pr_labels 52 | if tests: 53 | pr_labels = [Labels.tests] 54 | elif docs: 55 | if news: 56 | pr_labels = [Labels.docs] 57 | else: 58 | pr_labels = [Labels.docs, Labels.skip_news] 59 | await add_labels(gh, issue, pr_labels) 60 | return pr_labels 61 | -------------------------------------------------------------------------------- /bedevere/close_pr.py: -------------------------------------------------------------------------------- 1 | """Automatically close PR that tries to merge maintenance branch into main.""" 2 | import re 3 | 4 | import gidgethub.routing 5 | 6 | PYTHON_MAINT_BRANCH_RE = re.compile(r"^\w+:\d+\.\d+$") 7 | 8 | INVALID_PR_COMMENT = """\ 9 | PRs attempting to merge a maintenance branch into the \ 10 | main branch are deemed to be spam and automatically closed. \ 11 | If you were attempting to report a bug, please go to \ 12 | https://github.com/python/cpython/issues; \ 13 | see devguide.python.org for further instruction as needed.""" 14 | 15 | 16 | router = gidgethub.routing.Router() 17 | 18 | 19 | @router.register("pull_request", action="opened") 20 | @router.register("pull_request", action="synchronize") 21 | async def close_invalid_pr(event, gh, *args, **kwargs): 22 | """Close the invalid PR, add 'invalid' label, and post a message. 23 | 24 | PR is considered invalid if: 25 | * base_label is 'python:main' 26 | * head_label is ':' 27 | """ 28 | head_label = event.data["pull_request"]["head"]["label"] 29 | base_label = event.data["pull_request"]["base"]["label"] 30 | 31 | if PYTHON_MAINT_BRANCH_RE.match(head_label) and base_label == "python:main": 32 | data = {"state": "closed"} 33 | await gh.patch(event.data["pull_request"]["url"], data=data) 34 | await gh.post( 35 | f'{event.data["pull_request"]["issue_url"]}/labels', data=["invalid"] 36 | ) 37 | await gh.post( 38 | f'{event.data["pull_request"]["issue_url"]}/comments', 39 | data={"body": INVALID_PR_COMMENT}, 40 | ) 41 | 42 | 43 | @router.register("pull_request", action="review_requested") 44 | async def dismiss_invalid_pr_review_request(event, gh, *args, **kwargs): 45 | """Dismiss review request from the invalid PR. 46 | 47 | PR is considered invalid if: 48 | * base_label is 'python:main' 49 | * head_label is ':' 50 | """ 51 | head_label = event.data["pull_request"]["head"]["label"] 52 | base_label = event.data["pull_request"]["base"]["label"] 53 | 54 | if PYTHON_MAINT_BRANCH_RE.match(head_label) and base_label == "python:main": 55 | data = { 56 | "reviewers": [ 57 | reviewer["login"] 58 | for reviewer in event.data["pull_request"]["requested_reviewers"] 59 | ], 60 | "team_reviewers": [ 61 | team["name"] for team in event.data["pull_request"]["requested_teams"] 62 | ], 63 | } 64 | await gh.delete( 65 | f'{event.data["pull_request"]["url"]}/requested_reviewers', data=data 66 | ) 67 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing and Maintenance Guide 2 | ================================== 3 | 4 | Bedevere web service is deployed to Heroku, which is managed by The PSF. 5 | 6 | Deployment 7 | ---------- 8 | 9 | There are two ways to have bedevere deployed: automatic deployment, and 10 | manual deployment. 11 | 12 | Automatic Deployment (currently broken) 13 | ''''''''''''''''''''''''''''''''''''''' 14 | 15 | When the automatic deployment is enabled (on Heroku side), any merged PR 16 | will automatically be deployed to Heroku. This process takes less than 5 minutes. 17 | 18 | If after 10 minutes you did not see the changes reflected, please ping one 19 | of the collaborators listed below. 20 | 21 | To enable Automatic deployment: 22 | 23 | - On the Heroku dashboard for bedevere, choose the "Deploy" tab. 24 | - Scroll down to the "Automatic deploys" section 25 | - Enter the name of the branch to be deployed (in this case: ``main``) 26 | - Check the "Wait for CI to pass before deploy" button 27 | - Press the "Enable automatic deploys" button. 28 | 29 | Once done, merging a PR against the ``main`` branch will trigger a new 30 | deployment using a webhook that is already set up in the repo settings. 31 | 32 | 33 | .. note:: 34 | 35 | Due to recent `security incident`_, the Heroku GitHub integration is broken. 36 | Automatic deployment does not currently work. Until this gets resolved, 37 | maintainers have to deploy bedevere to Heroku manually. 38 | 39 | 40 | Manual Deployment 41 | ''''''''''''''''' 42 | 43 | The app can be deployed manually to Heroku by collaborators and members of the ``bedevere`` app on Heroku. 44 | Heroku admins can do it too. 45 | 46 | #. Install Heroku CLI 47 | 48 | Details at: https://devcenter.heroku.com/articles/heroku-cli 49 | 50 | #. Login to Heroku CLI on the command line and follow instructions:: 51 | 52 | heroku login 53 | 54 | 55 | #. If you haven't already, get a clone of the bedevere repo:: 56 | 57 | git clone git@github.com:python/bedevere.git 58 | 59 | Or, using `GitHub CLI`_:: 60 | 61 | gh repo clone python/bedevere 62 | 63 | #. From the ``bedevere`` directory, add the ``bedevere`` Heroku app as remote branch:: 64 | 65 | heroku git:remote -a bedevere 66 | 67 | 68 | #. From the ``bedevere`` directory, push to Heroku:: 69 | 70 | git push heroku main 71 | 72 | 73 | After a successful push, the deployment will begin. 74 | 75 | Heroku app collaborators and members 76 | '''''''''''''''''''''''''''''''''''' 77 | 78 | - @Mariatta 79 | - @ambv 80 | - @brettcannon 81 | 82 | .. _security incident: https://status.heroku.com/incidents/2413 83 | .. _GitHub CLI: https://cli.github.com/ 84 | -------------------------------------------------------------------------------- /bedevere/__main__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import importlib 3 | import os 4 | import sys 5 | import traceback 6 | 7 | import aiohttp 8 | import cachetools 9 | import sentry_sdk 10 | from aiohttp import web 11 | from gidgethub import aiohttp as gh_aiohttp 12 | from gidgethub import apps, routing, sansio 13 | 14 | from . import backport, close_pr, filepaths, gh_issue, news, stage 15 | 16 | router = routing.Router( 17 | backport.router, 18 | gh_issue.router, 19 | close_pr.router, 20 | filepaths.router, 21 | news.router, 22 | stage.router, 23 | ) 24 | cache = cachetools.LRUCache(maxsize=500) 25 | 26 | sentry_sdk.init(os.environ.get("SENTRY_DSN")) 27 | 28 | 29 | async def main(request): 30 | try: 31 | body = await request.read() 32 | secret = os.environ.get("GH_SECRET") 33 | event = sansio.Event.from_http(request.headers, body, secret=secret) 34 | print("GH delivery ID", event.delivery_id, file=sys.stderr) 35 | if event.event == "ping": 36 | return web.Response(status=200) 37 | 38 | async with aiohttp.ClientSession() as session: 39 | gh = gh_aiohttp.GitHubAPI(session, "python/bedevere", cache=cache) 40 | if not event.data.get("installation"): 41 | return web.Response(text="Must be installed as an App.", status=400) 42 | installation_id = event.data["installation"]["id"] 43 | installation_access_token = await apps.get_installation_access_token( 44 | gh, 45 | installation_id=installation_id, 46 | app_id=os.environ.get("GH_APP_ID"), 47 | private_key=os.environ.get("GH_PRIVATE_KEY"), 48 | ) 49 | gh.oauth_token = installation_access_token["token"] 50 | 51 | # Give GitHub some time to reach internal consistency. 52 | await asyncio.sleep(1) 53 | await router.dispatch(event, gh, session=session) 54 | try: 55 | print("GH requests remaining:", gh.rate_limit.remaining) 56 | except AttributeError: 57 | pass 58 | return web.Response(status=200) 59 | except Exception as exc: 60 | traceback.print_exc(file=sys.stderr) 61 | return web.Response(status=500) 62 | 63 | 64 | @router.register("installation", action="created") 65 | async def repo_installation_added(event, gh, *args, **kwargs): 66 | print( 67 | f"App installed by {event.data['installation']['account']['login']}, installation_id: {event.data['installation']['id']}" 68 | ) 69 | 70 | 71 | if __name__ == "__main__": # pragma: no cover 72 | app = web.Application() 73 | app.router.add_post("/", main) 74 | port = os.environ.get("PORT") 75 | if port is not None: 76 | port = int(port) 77 | web.run_app(app, port=port) 78 | -------------------------------------------------------------------------------- /tests/test___main__.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | from aiohttp import web 4 | from gidgethub import sansio 5 | 6 | from bedevere import __main__ as main 7 | 8 | app_installation_payload = { 9 | "installation": { 10 | "id": 123, 11 | "account": {"login": "mariatta"}, 12 | } 13 | } 14 | 15 | 16 | async def test_ping(aiohttp_client): 17 | app = web.Application() 18 | app.router.add_post("/", main.main) 19 | client = await aiohttp_client(app) 20 | headers = {"x-github-event": "ping", "x-github-delivery": "1234"} 21 | data = {"zen": "testing is good"} 22 | response = await client.post("/", headers=headers, json=data) 23 | assert response.status == 200 24 | 25 | 26 | async def test_bad_request_if_no_installation(aiohttp_client): 27 | app = web.Application() 28 | app.router.add_post("/", main.main) 29 | client = await aiohttp_client(app) 30 | headers = {"x-github-event": "project", "x-github-delivery": "1234"} 31 | # Sending a payload that shouldn't trigger any networking, but no errors 32 | # either. 33 | data = {"action": "created"} 34 | response = await client.post("/", headers=headers, json=data) 35 | assert response.status == 400 36 | assert await response.text() == "Must be installed as an App." 37 | 38 | 39 | async def test_failure(aiohttp_client): 40 | """Even in the face of an exception, the server should not crash.""" 41 | app = web.Application() 42 | app.router.add_post("/", main.main) 43 | client = await aiohttp_client(app) 44 | # Missing key headers. 45 | response = await client.post("/", headers={}) 46 | assert response.status == 500 47 | 48 | 49 | @mock.patch("gidgethub.apps.get_installation_access_token") 50 | async def test_success_with_installation(get_access_token_mock, aiohttp_client): 51 | get_access_token_mock.return_value = { 52 | "token": "ghs_blablabla", 53 | "expires_at": "2023-06-14T19:02:50Z", 54 | } 55 | app = web.Application() 56 | app.router.add_post("/", main.main) 57 | client = await aiohttp_client(app) 58 | headers = {"x-github-event": "project", "x-github-delivery": "1234"} 59 | # Sending a payload that shouldn't trigger any networking, but no errors 60 | # either. 61 | data = {"action": "created"} 62 | data.update(app_installation_payload) 63 | response = await client.post("/", headers=headers, json=data) 64 | assert response.status == 200 65 | 66 | 67 | class FakeGH: 68 | def __init__(self): 69 | pass 70 | 71 | 72 | async def test_repo_installation_added(capfd): 73 | event_data = { 74 | "action": "created", 75 | } 76 | event_data.update(app_installation_payload) 77 | 78 | event = sansio.Event(event_data, event="installation", delivery_id="1") 79 | gh = FakeGH() 80 | await main.router.dispatch(event, gh) 81 | out, err = capfd.readouterr() 82 | assert ( 83 | f"App installed by {event.data['installation']['account']['login']}, installation_id: {event.data['installation']['id']}" 84 | in out 85 | ) 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # bedevere 2 | 3 | [![Build Status](https://github.com/python/bedevere/actions/workflows/ci.yml/badge.svg?event=push)](https://github.com/python/bedevere/actions) 4 | [![codecov](https://codecov.io/gh/python/bedevere/branch/main/graph/badge.svg)](https://codecov.io/gh/python/bedevere) 5 | 6 | This bot is meant to help identify issues with a CPython pull request. 7 | 8 | ## What the bot does: 9 | - ### Identifies missing GitHub issue numbers in the title 10 | If no GitHub issue number is found the status fails and the 11 | "Details" link points to the relevant 12 | [section of the devguide](https://devguide.python.org/getting-started/pull-request-lifecycle.html#submitting). 13 | - ### Links to GitHub issues 14 | If an issue number is found then the "Details" link points to the relevant issue 15 | itself, making it easier to navigate from PR to issue. 16 | - ### Identifies missing news entry 17 | If no `Misc/NEWS.d` entry is found or the news entry is formatted incorrectly 18 | and the issue doesn't have the `skip-news` label, the status fails and a relevant 19 | description label is added to it. 20 | - ### Closes invalid PRs 21 | Closes PRs that try to merge a maintenance branch into the main branch, adds 22 | `invalid` label, and posts a relevant message. 23 | - ### Labels PRs for docs 24 | Labels PRs for documentation as `docs` 25 | - ### Copies main labels to backport 26 | Copies labels from main PRs to backport PRs 27 | 28 | ## PR State Machine 29 | 30 | This diagram represents the state machine for pull requests, and the labels 31 | applied by Bedevere. 32 | 33 | The colors represent who can make a state change or who is currently 34 | blocking the PR from moving forward: 35 | * Yellow: the PR creator 36 | * Green: core developers 37 | * Blue: anyone 38 | 39 | 43 | 44 | ```mermaid 45 | flowchart TD 46 | A([Published PR]):::creator 47 | A_draft([New Draft PR]):::creator 48 | A_draft -- publish draft by contributor --> A:::creator 49 | A -- by contributor --> B[Awaiting review]:::anyone 50 | A -- by core dev --> C[Awaiting core review]:::coredev 51 | B & C -- new review by
another contributor --> C 52 | C & B & E -- new core review
requests changes --> D[Awaiting changes]:::creator 53 | D -- changes by contributor --> E[Awaiting change review]:::coredev 54 | C & E & B -- new core review
approves ---> F[Awaiting merge]:::coredev 55 | G[When a review is dismissed
the highest remaining state is restored
based on the remaining reviews] 56 | classDef creator stroke:#CC0; 57 | classDef anyone stroke:#00C; 58 | classDef coredev stroke:#0C0; 59 | classDef triager stroke:#C0C; 60 | linkStyle 0,1,8 stroke:#CC0,color:auto; 61 | linkStyle 3,4 stroke:#00C,color:auto; 62 | linkStyle 2,5,6,7,9,10,11 stroke:#0C0,color:auto; 63 | ``` 64 | 65 | ## *Aside*: where does the name come from? 66 | Since this bot is about identifying pull requests that need changes, 67 | it seemed fitting to name it after Sir Bedevere who knew 68 | [how to identify a witch](https://youtu.be/yp_l5ntikaU). 69 | -------------------------------------------------------------------------------- /bedevere/news.py: -------------------------------------------------------------------------------- 1 | """Check for a news entry.""" 2 | import functools 3 | import pathlib 4 | import re 5 | 6 | import gidgethub.routing 7 | 8 | from . import util 9 | 10 | router = gidgethub.routing.Router() 11 | 12 | 13 | create_status = functools.partial(util.create_status, "bedevere/news") 14 | 15 | BLURB_IT_URL = "https://blurb-it.herokuapp.com" 16 | BLURB_PYPI_URL = "https://pypi.org/project/blurb/" 17 | 18 | FILENAME_RE = re.compile( 19 | r"""# YYYY-mm-dd or YYYY-mm-dd-HH-MM-SS 20 | \d{4}-\d{2}-\d{2}(?:-\d{2}-\d{2}-\d{2})?\. 21 | (?:bpo|gh-issue)-\d+(?:,\d+)*\. # Issue number(s) 22 | [A-Za-z0-9_=-]+\. # Nonce (URL-safe base64) 23 | rst # File extension""", 24 | re.VERBOSE, 25 | ) 26 | 27 | SKIP_NEWS_LABEL = util.skip_label("news") 28 | SKIP_LABEL_STATUS = create_status( 29 | util.StatusState.SUCCESS, description='"skip news" label found' 30 | ) 31 | 32 | HELP = f"""\ 33 | Most changes to Python [require a NEWS entry]\ 34 | (https://devguide.python.org/core-developers/committing/#updating-news-and-what-s-new-in-python). \ 35 | Add one using the [blurb_it]({BLURB_IT_URL}) web app or \ 36 | the [blurb]({BLURB_PYPI_URL}) command-line tool. 37 | 38 | If this change has little impact on Python users, wait for a maintainer to \ 39 | apply the `skip news` label instead. 40 | """ 41 | 42 | 43 | async def check_news(gh, pull_request, files=None): 44 | """Check for a news entry. 45 | 46 | The routing is handled through the filepaths module. 47 | """ 48 | if not files: 49 | files = await util.files_for_PR(gh, pull_request) 50 | in_next_dir = file_found = False 51 | for file in files: 52 | if not util.is_news_dir(file["file_name"]): 53 | continue 54 | in_next_dir = True 55 | file_path = pathlib.PurePath(file["file_name"]) 56 | if len(file_path.parts) != 5: # Misc, NEWS.d, next, , 57 | continue 58 | file_found = True 59 | if FILENAME_RE.match(file_path.name) and len(file["patch"]) >= 1: 60 | status = create_status( 61 | util.StatusState.SUCCESS, description="News entry found in Misc/NEWS.d" 62 | ) 63 | break 64 | else: 65 | issue = await util.issue_for_PR(gh, pull_request) 66 | if util.skip("news", issue): 67 | status = SKIP_LABEL_STATUS 68 | else: 69 | if pull_request["author_association"] == "NONE": 70 | await gh.post( 71 | f"{pull_request['issue_url']}/comments", data={"body": HELP} 72 | ) 73 | if not in_next_dir: 74 | description = ( 75 | f'No news entry in {util.NEWS_NEXT_DIR} or "skip news" label found' 76 | ) 77 | elif not file_found: 78 | description = "News entry not in an appropriate directory" 79 | else: 80 | description = "News entry file name incorrectly formatted" 81 | status = create_status( 82 | util.StatusState.FAILURE, 83 | description=description, 84 | target_url=BLURB_IT_URL, 85 | ) 86 | 87 | await gh.post(pull_request["statuses_url"], data=status) 88 | 89 | 90 | @router.register("pull_request", action="labeled") 91 | async def label_added(event, gh, *args, **kwargs): 92 | if util.label_name(event.data) == SKIP_NEWS_LABEL: 93 | await util.post_status(gh, event, SKIP_LABEL_STATUS) 94 | 95 | 96 | @router.register("pull_request", action="unlabeled") 97 | async def label_removed(event, gh, *args, **kwargs): 98 | if util.no_labels(event.data): 99 | return 100 | elif util.label_name(event.data) == SKIP_NEWS_LABEL: 101 | pull_request = event.data["pull_request"] 102 | await check_news(gh, pull_request) 103 | -------------------------------------------------------------------------------- /bedevere/backport.py: -------------------------------------------------------------------------------- 1 | """Automatically remove a backport label, and check backport PR validity.""" 2 | import functools 3 | import re 4 | 5 | import gidgethub.routing 6 | 7 | from . import util 8 | 9 | create_status = functools.partial(util.create_status, "bedevere/maintenance-branch-pr") 10 | 11 | 12 | router = gidgethub.routing.Router() 13 | 14 | TITLE_RE = re.compile( 15 | r"\s*\[(?P\d+\.\d+)\].+\((?:GH-|#)(?P\d+)\)", re.IGNORECASE 16 | ) 17 | MAINTENANCE_BRANCH_TITLE_RE = re.compile(r"\s*\[(?P\d+\.\d+)\].+") 18 | MAINTENANCE_BRANCH_RE = re.compile(r"\s*(?P\d+\.\d+)") 19 | BACKPORT_LABEL = "needs backport to {branch}" 20 | MESSAGE_TEMPLATE = ( 21 | "[GH-{pr}](https://github.com/python/cpython/pull/{pr}) is " 22 | "a backport of this pull request to the " 23 | "[{branch} branch](https://github.com/python/cpython/tree/{branch})." 24 | ) 25 | 26 | 27 | BACKPORT_TITLE_DEVGUIDE_URL = ( 28 | "https://devguide.python.org/committing/#backport-pr-title" 29 | ) 30 | 31 | 32 | async def _copy_over_labels(gh, original_issue, backport_issue): 33 | """Copy over relevant labels from the original PR to the backport PR.""" 34 | label_prefixes = "skip", "type", "sprint", "topic" 35 | labels = list( 36 | filter(lambda x: x.startswith(label_prefixes), util.labels(original_issue)) 37 | ) 38 | if labels: 39 | await gh.post(backport_issue["labels_url"], data=labels) 40 | 41 | 42 | async def _remove_backport_label(gh, original_issue, branch, backport_pr_number): 43 | """Remove the appropriate "backport to" label on the original PR. 44 | 45 | Also leave a comment on the original PR referencing the backport PR. 46 | """ 47 | backport_label = BACKPORT_LABEL.format(branch=branch) 48 | message = MESSAGE_TEMPLATE.format(branch=branch, pr=backport_pr_number) 49 | await gh.post(original_issue["comments_url"], data={"body": message}) 50 | if backport_label not in util.labels(original_issue): 51 | return 52 | await gh.delete(original_issue["labels_url"], {"name": backport_label}) 53 | 54 | 55 | @router.register("pull_request", action="opened") 56 | @router.register("pull_request", action="edited") 57 | async def manage_labels(event, gh, *args, **kwargs): 58 | if event.data["action"] == "edited" and "title" not in event.data["changes"]: 59 | return 60 | pull_request = event.data["pull_request"] 61 | title = util.normalize_title(pull_request["title"], pull_request["body"]) 62 | title_match = TITLE_RE.match(title) 63 | if title_match is None: 64 | return 65 | branch = title_match.group("branch") 66 | original_pr_number = title_match.group("pr") 67 | 68 | original_issue = await gh.getitem( 69 | event.data["repository"]["issues_url"], {"number": original_pr_number} 70 | ) 71 | await _remove_backport_label(gh, original_issue, branch, event.data["number"]) 72 | 73 | backport_issue = await util.issue_for_PR(gh, pull_request) 74 | await _copy_over_labels(gh, original_issue, backport_issue) 75 | 76 | 77 | def is_maintenance_branch(ref): 78 | """ 79 | Return True if the ref refers to a maintenance branch. 80 | 81 | >>> is_maintenance_branch("3.11") 82 | True 83 | >>> is_maintenance_branch("main") 84 | False 85 | >>> is_maintenance_branch("gh-1234/something-completely-different") 86 | False 87 | """ 88 | maintenance_branch_pattern = r"\d+\.\d+" 89 | return bool(re.fullmatch(maintenance_branch_pattern, ref)) 90 | 91 | 92 | @router.register("pull_request", action="opened") 93 | @router.register("pull_request", action="reopened") 94 | @router.register("pull_request", action="edited") 95 | @router.register("pull_request", action="synchronize") 96 | async def validate_maintenance_branch_pr(event, gh, *args, **kwargs): 97 | """Check the PR title for maintenance branch pull requests. 98 | 99 | If the PR was made against maintenance branch, and the title does not 100 | match the maintenance branch PR pattern, then post a failure status. 101 | 102 | The maintenance branch PR has to start with `[X.Y]` 103 | """ 104 | if event.data["action"] == "edited" and "title" not in event.data["changes"]: 105 | return 106 | pull_request = event.data["pull_request"] 107 | base_branch = pull_request["base"]["ref"] 108 | 109 | if not is_maintenance_branch(base_branch): 110 | return 111 | 112 | title = util.normalize_title(pull_request["title"], pull_request["body"]) 113 | title_match = MAINTENANCE_BRANCH_TITLE_RE.match(title) 114 | 115 | if title_match is None: 116 | status = create_status( 117 | util.StatusState.FAILURE, 118 | description="Not a valid maintenance branch PR title.", 119 | target_url=BACKPORT_TITLE_DEVGUIDE_URL, 120 | ) 121 | else: 122 | status = create_status( 123 | util.StatusState.SUCCESS, description="Valid maintenance branch PR title." 124 | ) 125 | await util.post_status(gh, event, status) 126 | 127 | 128 | @router.register("create", ref_type="branch") 129 | async def maintenance_branch_created(event, gh, *args, **kwargs): 130 | """Create the `needs backport label` when the maintenance branch is created. 131 | 132 | If a maintenance branch was created (e.g.: 3.9, or 4.0), 133 | automatically create the `needs backport to ` label. 134 | 135 | The maintenance branch PR has to start with `[X.Y]` 136 | """ 137 | branch_name = event.data["ref"] 138 | 139 | if MAINTENANCE_BRANCH_RE.match(branch_name): 140 | await gh.post( 141 | "/repos/python/cpython/labels", 142 | data={ 143 | "name": f"needs backport to {branch_name}", 144 | "color": "c2e0c6", 145 | "description": "bug and security fixes", 146 | }, 147 | ) 148 | -------------------------------------------------------------------------------- /bedevere/gh_issue.py: -------------------------------------------------------------------------------- 1 | """Check if a GitHub issue number is specified in the pull request's title.""" 2 | import re 3 | from typing import Literal 4 | 5 | import gidgethub 6 | from aiohttp import ClientSession 7 | from gidgethub import routing 8 | from gidgethub.abc import GitHubAPI 9 | 10 | from . import util 11 | 12 | router = routing.Router() 13 | 14 | IssueKind = Literal["gh", "bpo"] 15 | 16 | ISSUE_RE = re.compile(r"(?Pbpo|gh)-(?P\d+)", re.IGNORECASE) 17 | SKIP_ISSUE_LABEL = util.skip_label("issue") 18 | STATUS_CONTEXT = "bedevere/issue-number" 19 | # Try to keep descriptions at or below 50 characters, else GitHub's CSS will truncate it. 20 | SKIP_ISSUE_STATUS = util.create_status( 21 | STATUS_CONTEXT, util.StatusState.SUCCESS, description="Issue report skipped" 22 | ) 23 | ISSUE_URL: dict[IssueKind, str] = { 24 | "gh": "https://github.com/python/cpython/issues/{issue_number}", 25 | "bpo": "https://bugs.python.org/issue?@action=redirect&bpo={issue_number}", 26 | } 27 | ISSUE_CHECK_URL: dict[IssueKind, str] = { 28 | "gh": "https://api.github.com/repos/python/cpython/issues/{issue_number}", 29 | "bpo": "https://bugs.python.org/issue{issue_number}", 30 | } 31 | 32 | 33 | @router.register("pull_request", action="opened") 34 | @router.register("pull_request", action="synchronize") 35 | @router.register("pull_request", action="reopened") 36 | async def set_status(event, gh: GitHubAPI, *args, session: ClientSession, **kwargs): 37 | """Set the issue number status on the pull request.""" 38 | pull_request = event.data["pull_request"] 39 | issue = await util.issue_for_PR(gh, pull_request) 40 | 41 | if util.skip("issue", issue): 42 | await util.post_status(gh, event, SKIP_ISSUE_STATUS) 43 | return 44 | 45 | issue_number_found = ISSUE_RE.search(pull_request["title"]) 46 | 47 | if not issue_number_found: 48 | status = create_failure_status_no_issue() 49 | else: 50 | issue_number = int(issue_number_found.group("issue")) 51 | issue_kind = issue_number_found.group("kind").lower() 52 | issue_found = await _validate_issue_number( 53 | gh, issue_number, session=session, kind=issue_kind 54 | ) 55 | if issue_found: 56 | status = create_success_status(issue_number, kind=issue_kind) 57 | if issue_kind == "gh": 58 | # Add the issue number to the pull request's body 59 | await util.patch_body(gh, util.PR, pull_request, issue_number) 60 | # Get GitHub Issue data 61 | issue_data = await gh.getitem( 62 | ISSUE_CHECK_URL[issue_kind].format(issue_number=issue_number) 63 | ) 64 | # Add the pull request number to the issue's body 65 | await util.patch_body( 66 | gh, util.ISSUE, issue_data, pull_request["number"] 67 | ) 68 | else: 69 | status = create_failure_status_issue_not_present( 70 | issue_number, kind=issue_kind 71 | ) 72 | await util.post_status(gh, event, status) 73 | 74 | 75 | @router.register("pull_request", action="edited") 76 | async def title_edited(event, gh, *args, session, **kwargs): 77 | """Set the status on a pull request that has changed its title.""" 78 | if "title" not in event.data["changes"]: 79 | return 80 | await set_status(event, gh, session=session) 81 | 82 | 83 | @router.register("pull_request", action="labeled") 84 | async def new_label(event, gh, *args, **kwargs): 85 | """Update the status if the "skip issue" label was added.""" 86 | if util.label_name(event.data) == SKIP_ISSUE_LABEL: 87 | issue_number_found = ISSUE_RE.search(event.data["pull_request"]["title"]) 88 | if issue_number_found: 89 | status = create_success_status(issue_number_found.group("issue")) 90 | else: 91 | status = SKIP_ISSUE_STATUS 92 | await util.post_status(gh, event, status) 93 | 94 | 95 | @router.register("pull_request", action="unlabeled") 96 | async def removed_label(event, gh, *args, session, **kwargs): 97 | """Re-check the status if the "skip issue" label is removed.""" 98 | if util.no_labels(event.data): 99 | return 100 | elif util.label_name(event.data) == SKIP_ISSUE_LABEL: 101 | await set_status(event, gh, session=session) 102 | 103 | 104 | def create_success_status(issue_number: int, *, kind: IssueKind = "gh"): 105 | """Create a success status for when an issue number was found in the title.""" 106 | url = ISSUE_URL[kind].format(issue_number=issue_number) 107 | return util.create_status( 108 | STATUS_CONTEXT, 109 | util.StatusState.SUCCESS, 110 | description=f"Issue number {issue_number} found", 111 | target_url=url, 112 | ) 113 | 114 | 115 | def create_failure_status_issue_not_present( 116 | issue_number: int, *, kind: IssueKind = "gh" 117 | ): 118 | """Create a failure status for when an issue does not exist on the GitHub issue tracker.""" 119 | url = ISSUE_URL[kind].format(issue_number=issue_number) 120 | description = f"{kind.upper()} Issue #{issue_number} is not valid." 121 | return util.create_status( 122 | STATUS_CONTEXT, 123 | util.StatusState.FAILURE, 124 | description=description, 125 | target_url=url, 126 | ) 127 | 128 | 129 | def create_failure_status_no_issue(): 130 | """Create a failure status for when no issue number was found in the title.""" 131 | description = 'No issue # in title or "skip issue" label found' 132 | url = "https://devguide.python.org/getting-started/pull-request-lifecycle.html#submitting" 133 | return util.create_status( 134 | STATUS_CONTEXT, 135 | util.StatusState.FAILURE, 136 | description=description, 137 | target_url=url, 138 | ) 139 | 140 | 141 | async def _validate_issue_number( 142 | gh: GitHubAPI, issue_number: int, *, session: ClientSession, kind: IssueKind = "gh" 143 | ) -> bool: 144 | """Ensure the GitHub Issue number is valid.""" 145 | if kind == "bpo": 146 | url = ISSUE_CHECK_URL[kind].format(issue_number=issue_number) 147 | async with session.head(url) as res: 148 | return res.status != 404 149 | 150 | if kind != "gh": 151 | raise ValueError(f"Unknown issue kind {kind}") 152 | 153 | url = ISSUE_CHECK_URL[kind].format(issue_number=issue_number) 154 | try: 155 | response = await gh.getitem(url) 156 | except gidgethub.BadRequest: 157 | return False 158 | # It is an issue if the response does not have the `pull_request` key. 159 | return "pull_request" not in response 160 | -------------------------------------------------------------------------------- /bedevere/util.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import re 3 | import sys 4 | from typing import Any 5 | 6 | import gidgethub 7 | from gidgethub.abc import GitHubAPI 8 | 9 | NEWS_NEXT_DIR = "Misc/NEWS.d/next/" 10 | PR = "pr" 11 | ISSUE = "issue" 12 | DEFAULT_BODY = "" 13 | 14 | PR_BODY_TAG_NAME = f"gh-{{tag_type}}-number" 15 | PR_BODY_OPENING_TAG = f"" 16 | PR_BODY_CLOSING_TAG = f"" 17 | PR_BODY_TEMPLATE = f"""\ 18 | {{body}} 19 | 20 | {PR_BODY_OPENING_TAG} 21 | * {{key}}: gh-{{pr_or_issue_number}} 22 | {PR_BODY_CLOSING_TAG} 23 | """ 24 | 25 | ISSUE_BODY_TAG_NAME = f"gh-linked-{PR}s" 26 | ISSUE_BODY_OPENING_TAG = f"" 27 | ISSUE_BODY_CLOSING_TAG = f"" 28 | ISSUE_BODY_TASK_LIST_TEMPLATE = f"""\n 29 | {ISSUE_BODY_OPENING_TAG} 30 | ### Linked PRs 31 | * gh-{{pr_number}} 32 | {ISSUE_BODY_CLOSING_TAG} 33 | """ 34 | 35 | # Regex pattern to search for tasklist in the issue body 36 | ISSUE_BODY_TASK_LIST_PATTERN = re.compile( 37 | rf"(?P{ISSUE_BODY_OPENING_TAG})" 38 | rf"(?P.*?)" 39 | rf"(?P{ISSUE_BODY_CLOSING_TAG})", 40 | flags=re.DOTALL, 41 | ) 42 | 43 | 44 | @enum.unique 45 | class StatusState(enum.Enum): 46 | SUCCESS = "success" 47 | ERROR = "error" 48 | FAILURE = "failure" 49 | 50 | 51 | def create_status(context, state, *, description=None, target_url=None): 52 | """Create the data for a status. 53 | 54 | The argument order is such that you can use functools.partial() to set the 55 | context to avoid repeatedly specifying it throughout a module. 56 | """ 57 | status = { 58 | "context": context, 59 | "state": state.value, 60 | } 61 | if description is not None: 62 | status["description"] = description 63 | if target_url is not None: 64 | status["target_url"] = target_url 65 | 66 | return status 67 | 68 | 69 | async def post_status(gh, event, status): 70 | """Post a status in reaction to an event.""" 71 | await gh.post(event.data["pull_request"]["statuses_url"], data=status) 72 | 73 | 74 | def skip_label(what): 75 | """Generate a "skip" label name.""" 76 | return f"skip {what}" 77 | 78 | 79 | def labels(issue): 80 | return {label_data["name"] for label_data in issue["labels"]} 81 | 82 | 83 | def skip(what, issue): 84 | """See if an issue has a "skip {what}" label.""" 85 | return skip_label(what) in labels(issue) 86 | 87 | 88 | def label_name(event_data): 89 | """Get the label name from a label-related webhook event.""" 90 | return event_data["label"]["name"] 91 | 92 | 93 | def user_login(item): 94 | return item["user"]["login"] 95 | 96 | 97 | async def files_for_PR(gh, pull_request): 98 | """Get files for a pull request.""" 99 | # For some unknown reason there isn't any files URL in a pull request 100 | # payload. 101 | files_url = f'{pull_request["url"]}/files' 102 | data = [] 103 | async for filedata in gh.getiter(files_url): # pragma: no branch 104 | data.append( 105 | { 106 | "file_name": filedata["filename"], 107 | "patch": filedata.get("patch", ""), 108 | } 109 | ) 110 | return data 111 | 112 | 113 | async def issue_for_PR(gh, pull_request): 114 | """Return a dict with data about the given PR.""" 115 | # "issue_url" is the API endpoint for the given pull_request (despite the name) 116 | # It could also come from "url" 117 | 118 | url_key = "issue_url" 119 | if not pull_request.get(url_key): 120 | url_key = "url" 121 | return await gh.getitem(pull_request[url_key]) 122 | 123 | 124 | def build_pr_body(issue_number: int, body: str) -> str: 125 | """Update the Pull Request body with related Issue.""" 126 | return PR_BODY_TEMPLATE.format( 127 | body=body, 128 | pr_or_issue_number=issue_number, 129 | key=ISSUE.title(), 130 | tag_type=ISSUE, 131 | ) 132 | 133 | 134 | def build_issue_body(pr_number: int, body: str) -> str: 135 | """Update the Issue body with related Pull Request.""" 136 | # If the body already contains a legacy closing tag 137 | # (e.g. ), then we use the legacy template 138 | # TODO: Remove this when all the open issues using the legacy tag are closed 139 | if PR_BODY_CLOSING_TAG.format(tag_type=PR) in body: 140 | return PR_BODY_TEMPLATE.format( 141 | body=body, 142 | pr_or_issue_number=pr_number, 143 | key=PR.upper(), 144 | tag_type=PR, 145 | ) 146 | 147 | # Check if the body already contains a tasklist 148 | result = ISSUE_BODY_TASK_LIST_PATTERN.search(body) 149 | 150 | if not result: 151 | # If the body doesn't contain a tasklist, we add one using the template 152 | body += ISSUE_BODY_TASK_LIST_TEMPLATE.format(pr_number=pr_number) 153 | return body 154 | 155 | # If the body already contains a tasklist, only append the new PR to the list 156 | return ISSUE_BODY_TASK_LIST_PATTERN.sub( 157 | rf"\g\g* gh-{pr_number}\n\g", 158 | body, 159 | count=1, 160 | ) 161 | 162 | 163 | async def patch_body( 164 | gh: GitHubAPI, 165 | content_type: str, 166 | pr_or_issue: dict[str, Any], 167 | pr_or_issue_number: int, 168 | ) -> Any: 169 | """Updates the description of a PR/Issue with the gh issue/pr number if it exists. 170 | 171 | returns if body exists with issue/pr number 172 | """ 173 | body = pr_or_issue.get("body", None) or DEFAULT_BODY 174 | body_search_pattern = rf"(^|\b)(GH-|gh-|#){pr_or_issue_number}\b" 175 | 176 | if not body or not re.search(body_search_pattern, body): 177 | updated_body = ( 178 | build_issue_body(pr_or_issue_number, body) 179 | if content_type == ISSUE 180 | else build_pr_body(pr_or_issue_number, body) 181 | ) 182 | return await gh.patch(pr_or_issue["url"], data={"body": updated_body}) 183 | return None 184 | 185 | 186 | async def is_core_dev(gh, username): 187 | """Check if the user is a CPython core developer.""" 188 | org_teams = "/orgs/python/teams" 189 | team_name = "python core" 190 | try: 191 | async for team in gh.getiter(org_teams): 192 | if team["name"].lower() == team_name: # pragma: no branch 193 | break 194 | else: 195 | raise ValueError(f"{team_name!r} not found at {org_teams!r}") 196 | except gidgethub.BadRequest as exc: 197 | # returns 403 error if the resource is not accessible by integration 198 | return False 199 | # The 'teams' object only provides a URL to a deprecated endpoint, 200 | # so manually construct the URL to the non-deprecated team membership 201 | # endpoint. 202 | membership_url = f"/teams/{team['id']}/memberships/{username}" 203 | try: 204 | await gh.getitem(membership_url) 205 | except gidgethub.BadRequest as exc: 206 | if exc.status_code == 404: 207 | return False 208 | raise 209 | else: 210 | return True 211 | 212 | 213 | def is_news_dir(filename): 214 | "Return True if file is in the News directory." 215 | return filename.startswith(NEWS_NEXT_DIR) 216 | 217 | 218 | def normalize_title(title, body): 219 | """Normalize the title if it spills over into the PR's body.""" 220 | if not (title.endswith("…") and body.startswith("…")): 221 | return title 222 | else: 223 | # Being paranoid in case \r\n is used. 224 | return title[:-1] + body[1:].partition("\r\n")[0] 225 | 226 | 227 | def no_labels(event_data): 228 | if "label" not in event_data: 229 | print( 230 | "no 'label' key in payload; " 231 | "'unlabeled' event triggered by label deletion?", 232 | file=sys.stderr, 233 | ) 234 | return True 235 | else: 236 | return False 237 | 238 | 239 | async def get_pr_for_commit(gh, sha, repo_full_name=None): 240 | """Find the PR containing the specific commit hash.""" 241 | if not repo_full_name: 242 | repo_full_name = "python/cpython" 243 | prs_for_commit = await gh.getitem( 244 | f"/search/issues?q=type:pr+repo:{repo_full_name}+sha:{sha}" 245 | ) 246 | if prs_for_commit["total_count"] > 0: # there should only be one 247 | return prs_for_commit["items"][0] 248 | return None 249 | -------------------------------------------------------------------------------- /tests/test_close_pr.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from gidgethub import sansio 3 | 4 | from bedevere import close_pr 5 | 6 | 7 | class FakeGH: 8 | def __init__(self, *, getitem=None, post=None): 9 | self._getitem_return = getitem 10 | self.patch_url = None 11 | self.patch_data = None 12 | self.delete_url = None 13 | self.delete_data = None 14 | self.data = None 15 | self._post_return = post 16 | self.post_url = [] 17 | self.post_data = [] 18 | 19 | async def patch(self, url, data): 20 | self.patch_url = url 21 | self.patch_data = data 22 | 23 | async def delete(self, url, data): 24 | self.delete_url = url 25 | self.delete_data = data 26 | 27 | async def post(self, url, *, data): 28 | self.post_url.append(url) 29 | self.post_data.append(data) 30 | return self._post_return 31 | 32 | 33 | @pytest.mark.asyncio 34 | async def test_close_invalid_pr_on_open(): 35 | data = { 36 | "action": "opened", 37 | "pull_request": { 38 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 39 | "title": "No issue in title", 40 | "issue_url": "https://api.github.com/org/repo/issues/123", 41 | "url": "https://api.github.com/org/repo/pulls/123", 42 | "head": {"label": "python:3.6"}, 43 | "base": {"label": "python:main"}, 44 | }, 45 | } 46 | pr_data = { 47 | "labels": [ 48 | {"name": "non-trivial"}, 49 | ] 50 | } 51 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 52 | gh = FakeGH(getitem=pr_data) 53 | await close_pr.router.dispatch(event, gh) 54 | patch_data = gh.patch_data 55 | assert patch_data["state"] == "closed" 56 | 57 | assert len(gh.post_url) == 2 58 | assert gh.post_url[0] == "https://api.github.com/org/repo/issues/123/labels" 59 | assert gh.post_data[0] == ["invalid"] 60 | assert gh.post_url[1] == "https://api.github.com/org/repo/issues/123/comments" 61 | assert gh.post_data[1] == {"body": close_pr.INVALID_PR_COMMENT} 62 | 63 | 64 | @pytest.mark.asyncio 65 | async def test_close_invalid_pr_on_synchronize(): 66 | data = { 67 | "action": "synchronize", 68 | "pull_request": { 69 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 70 | "title": "No issue in title", 71 | "issue_url": "https://api.github.com/org/repo/issues/123", 72 | "url": "https://api.github.com/org/repo/pulls/123", 73 | "head": {"label": "python:3.6"}, 74 | "base": {"label": "python:main"}, 75 | }, 76 | } 77 | pr_data = { 78 | "labels": [ 79 | {"name": "non-trivial"}, 80 | ] 81 | } 82 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 83 | gh = FakeGH(getitem=pr_data) 84 | await close_pr.router.dispatch(event, gh) 85 | patch_data = gh.patch_data 86 | assert patch_data["state"] == "closed" 87 | 88 | assert len(gh.post_url) == 2 89 | assert gh.post_url[0] == "https://api.github.com/org/repo/issues/123/labels" 90 | assert gh.post_data[0] == ["invalid"] 91 | assert gh.post_url[1] == "https://api.github.com/org/repo/issues/123/comments" 92 | assert gh.post_data[1] == {"body": close_pr.INVALID_PR_COMMENT} 93 | 94 | 95 | @pytest.mark.asyncio 96 | async def test_valid_pr_not_closed(): 97 | data = { 98 | "action": "opened", 99 | "pull_request": { 100 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 101 | "title": "No issue in title", 102 | "issue_url": "issue URL", 103 | "url": "https://api.github.com/org/repo/pulls/123", 104 | "head": {"label": "someuser:bpo-3.6"}, 105 | "base": {"label": "python:main"}, 106 | }, 107 | } 108 | pr_data = { 109 | "labels": [ 110 | {"name": "non-trivial"}, 111 | ] 112 | } 113 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 114 | gh = FakeGH(getitem=pr_data) 115 | await close_pr.router.dispatch(event, gh) 116 | patch_data = gh.patch_data 117 | assert patch_data is None 118 | 119 | 120 | @pytest.mark.asyncio 121 | async def test_close_invalid_pr_on_open_not_python_as_head(): 122 | data = { 123 | "action": "opened", 124 | "pull_request": { 125 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 126 | "title": "No issue in title", 127 | "issue_url": "issue URL", 128 | "url": "https://api.github.com/org/repo/pulls/123", 129 | "head": {"label": "username123:3.6"}, 130 | "base": {"label": "python:main"}, 131 | }, 132 | } 133 | pr_data = { 134 | "labels": [ 135 | {"name": "non-trivial"}, 136 | ] 137 | } 138 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 139 | gh = FakeGH(getitem=pr_data) 140 | await close_pr.router.dispatch(event, gh) 141 | patch_data = gh.patch_data 142 | assert patch_data["state"] == "closed" 143 | 144 | 145 | @pytest.mark.asyncio 146 | async def test_pr_with_head_branch_containing_all_digits_not_closed(): 147 | data = { 148 | "action": "opened", 149 | "pull_request": { 150 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 151 | "title": "No issue in title", 152 | "issue_url": "issue URL", 153 | "url": "https://api.github.com/org/repo/pulls/123", 154 | "head": {"label": "someuser:12345"}, 155 | "base": {"label": "python:main"}, 156 | }, 157 | } 158 | pr_data = { 159 | "labels": [ 160 | {"name": "non-trivial"}, 161 | ] 162 | } 163 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 164 | gh = FakeGH(getitem=pr_data) 165 | await close_pr.router.dispatch(event, gh) 166 | patch_data = gh.patch_data 167 | assert patch_data is None 168 | 169 | 170 | @pytest.mark.asyncio 171 | async def test_dismiss_review_request_for_invalid_pr(): 172 | data = { 173 | "action": "review_requested", 174 | "pull_request": { 175 | "number": 123, 176 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 177 | "title": "No issue in title", 178 | "issue_url": "issue URL", 179 | "url": "https://api.github.com/org/repo/pulls/123", 180 | "head": {"label": "python:3.6"}, 181 | "base": {"label": "python:main"}, 182 | "requested_reviewers": [ 183 | { 184 | "login": "gpshead", 185 | }, 186 | { 187 | "login": "gvanrossum", 188 | }, 189 | ], 190 | "requested_teams": [ 191 | { 192 | "name": "import-team", 193 | }, 194 | { 195 | "name": "windows-team", 196 | }, 197 | ], 198 | }, 199 | } 200 | 201 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 202 | gh = FakeGH() 203 | await close_pr.router.dispatch(event, gh) 204 | assert gh.delete_data == { 205 | "reviewers": ["gpshead", "gvanrossum"], 206 | "team_reviewers": ["import-team", "windows-team"], 207 | } 208 | assert ( 209 | gh.delete_url 210 | == f"https://api.github.com/org/repo/pulls/123/requested_reviewers" 211 | ) 212 | 213 | 214 | @pytest.mark.asyncio 215 | async def test_valid_pr_review_request_not_dismissed(): 216 | data = { 217 | "action": "review_requested", 218 | "pull_request": { 219 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 220 | "title": "No issue in title", 221 | "issue_url": "issue URL", 222 | "url": "https://api.github.com/org/repo/pulls/123", 223 | "head": {"label": "someuser:bpo-3.6"}, 224 | "base": {"label": "python:main"}, 225 | "requested_reviewers": [ 226 | { 227 | "login": "gpshead", 228 | }, 229 | { 230 | "login": "gvanrossum", 231 | }, 232 | ], 233 | "requested_teams": [ 234 | { 235 | "name": "import-team", 236 | }, 237 | { 238 | "name": "windows-team", 239 | }, 240 | ], 241 | }, 242 | } 243 | 244 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 245 | gh = FakeGH() 246 | await close_pr.router.dispatch(event, gh) 247 | assert gh.delete_data is None 248 | assert gh.delete_url is None 249 | -------------------------------------------------------------------------------- /tests/test_filepaths.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from gidgethub import sansio 3 | 4 | from bedevere import filepaths 5 | from bedevere.prtype import Labels 6 | 7 | from .test_news import check_n_pop_nonews_events 8 | 9 | 10 | class FakeGH: 11 | def __init__(self, *, getiter=None, getitem=None, post=None): 12 | self._getitem_return = getitem 13 | self._getiter_return = getiter 14 | self._post_return = post 15 | self.getitem_url = None 16 | self.getiter_url = None 17 | self.post_url = [] 18 | self.post_data = [] 19 | 20 | async def getitem(self, url): 21 | self.getitem_url = url 22 | return self._getitem_return 23 | 24 | async def getiter(self, url): 25 | self.getiter_url = url 26 | for item in self._getiter_return: 27 | yield item 28 | 29 | async def post(self, url, *, data): 30 | self.post_url.append(url) 31 | self.post_data.append(data) 32 | return self._post_return 33 | 34 | 35 | GOOD_BASENAME = "2017-06-16-20-32-50.bpo-1234.nonce.rst" 36 | 37 | 38 | async def test_news_only(): 39 | filenames = [ 40 | { 41 | "filename": "README", 42 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 43 | }, 44 | { 45 | "filename": f"Misc/NEWS.d/next/Lib/{GOOD_BASENAME}", 46 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 47 | }, 48 | ] 49 | issue = {"labels": []} 50 | gh = FakeGH(getiter=filenames, getitem=issue) 51 | event_data = { 52 | "action": "opened", 53 | "number": 1234, 54 | "pull_request": { 55 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 56 | "statuses_url": "https://api.github.com/some/status", 57 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 58 | }, 59 | } 60 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 61 | await filepaths.router.dispatch(event, gh) 62 | assert ( 63 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 64 | ) 65 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 66 | assert len(gh.post_url) == 1 67 | assert gh.post_url[0] == "https://api.github.com/some/status" 68 | assert gh.post_data[0]["state"] == "success" 69 | 70 | 71 | @pytest.mark.parametrize( 72 | "author_association", ["OWNER", "MEMBER", "CONTRIBUTOR", "NONE"] 73 | ) 74 | async def test_docs_only(author_association): 75 | filenames = [ 76 | { 77 | "filename": "/path/to/docs1.rst", 78 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 79 | }, 80 | { 81 | "filename": "docs2.rst", 82 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 83 | }, 84 | ] 85 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 86 | gh = FakeGH(getiter=filenames, getitem=issue) 87 | event_data = { 88 | "action": "opened", 89 | "number": 1234, 90 | "pull_request": { 91 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 92 | "statuses_url": "https://api.github.com/some/status", 93 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 94 | "issue_comment_url": "https://api.github.com/repos/cpython/python/issue/1234/comments", 95 | "author_association": author_association, 96 | }, 97 | } 98 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 99 | await filepaths.router.dispatch(event, gh) 100 | assert ( 101 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 102 | ) 103 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 104 | assert len(gh.post_url) == 1 105 | assert gh.post_url[0] == "https://api.github.com/some/label" 106 | assert gh.post_data[0] == [Labels.docs.value, Labels.skip_news.value] 107 | 108 | 109 | @pytest.mark.parametrize( 110 | "author_association", ["OWNER", "MEMBER", "CONTRIBUTOR", "NONE"] 111 | ) 112 | async def test_tests_only(author_association): 113 | filenames = [ 114 | { 115 | "filename": "/path/to/test_docs1.py", 116 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 117 | }, 118 | { 119 | "filename": "test_docs2.py", 120 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 121 | }, 122 | ] 123 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 124 | gh = FakeGH(getiter=filenames, getitem=issue) 125 | event_data = { 126 | "action": "opened", 127 | "number": 1234, 128 | "pull_request": { 129 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 130 | "statuses_url": "https://api.github.com/some/status", 131 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 132 | "issue_comment_url": "https://api.github.com/repos/cpython/python/issue/1234/comments", 133 | "author_association": author_association, 134 | }, 135 | } 136 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 137 | await filepaths.router.dispatch(event, gh) 138 | assert ( 139 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 140 | ) 141 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 142 | assert len(gh.post_url) == 3 if author_association == "NONE" else 2 143 | assert gh.post_url.pop(0) == "https://api.github.com/some/label" 144 | assert gh.post_data.pop(0) == [Labels.tests.value] 145 | check_n_pop_nonews_events(gh, author_association == "NONE") 146 | 147 | 148 | async def test_docs_and_tests(): 149 | filenames = [ 150 | { 151 | "filename": "/path/to/docs.rst", 152 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 153 | }, 154 | { 155 | "filename": "test_docs2.py", 156 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 157 | }, 158 | ] 159 | issue = { 160 | "labels": [{"name": "skip news"}], 161 | "labels_url": "https://api.github.com/some/label", 162 | } 163 | gh = FakeGH(getiter=filenames, getitem=issue) 164 | event_data = { 165 | "action": "opened", 166 | "number": 1234, 167 | "pull_request": { 168 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 169 | "statuses_url": "https://api.github.com/some/status", 170 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 171 | }, 172 | } 173 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 174 | await filepaths.router.dispatch(event, gh) 175 | assert ( 176 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 177 | ) 178 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 179 | # Only creates type-tests label. 180 | assert len(gh.post_url) == 2 181 | assert gh.post_url[0] == "https://api.github.com/some/label" 182 | assert gh.post_data[0] == [Labels.tests.value] 183 | assert gh.post_url[1] == "https://api.github.com/some/status" 184 | assert gh.post_data[1]["state"] == "success" 185 | 186 | 187 | async def test_news_and_tests(): 188 | filenames = [ 189 | { 190 | "filename": "/path/to/docs.rst", 191 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 192 | }, 193 | { 194 | "filename": "test_docs2.py", 195 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 196 | }, 197 | { 198 | "filename": f"Misc/NEWS.d/next/Lib/{GOOD_BASENAME}", 199 | "patch": "@@ -0,0 +1 @@ +Fix inspect.getsourcelines for module level frames/tracebacks", 200 | }, 201 | ] 202 | 203 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 204 | gh = FakeGH(getiter=filenames, getitem=issue) 205 | event_data = { 206 | "action": "opened", 207 | "number": 1234, 208 | "pull_request": { 209 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 210 | "statuses_url": "https://api.github.com/some/status", 211 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 212 | }, 213 | } 214 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 215 | await filepaths.router.dispatch(event, gh) 216 | assert ( 217 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 218 | ) 219 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 220 | # Only creates type-tests label. 221 | assert len(gh.post_url) == 2 222 | assert gh.post_url[0] == "https://api.github.com/some/label" 223 | assert gh.post_data[0] == [Labels.tests.value] 224 | assert gh.post_url[1] == "https://api.github.com/some/status" 225 | assert gh.post_data[1]["state"] == "success" 226 | 227 | 228 | async def test_synchronize(): 229 | filenames = [ 230 | { 231 | "filename": "/path/to/docs.rst", 232 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 233 | }, 234 | { 235 | "filename": "test_docs2.py", 236 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 237 | }, 238 | { 239 | "filename": f"Misc/NEWS.d/next/Lib/{GOOD_BASENAME}", 240 | "patch": "@@ -0,0 +1 @@ +Fix inspect.getsourcelines for module level frames/tracebacks", 241 | }, 242 | ] 243 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 244 | gh = FakeGH(getiter=filenames, getitem=issue) 245 | event_data = { 246 | "action": "synchronize", 247 | "number": 1234, 248 | "pull_request": { 249 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 250 | "statuses_url": "https://api.github.com/some/status", 251 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 252 | }, 253 | } 254 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 255 | await filepaths.router.dispatch(event, gh) 256 | assert ( 257 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 258 | ) 259 | assert gh.getitem_url is None 260 | # Only creates type-tests label. 261 | assert len(gh.post_url) == 1 262 | assert gh.post_url[0] == "https://api.github.com/some/status" 263 | assert gh.post_data[0]["state"] == "success" 264 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2017 The Python Software Foundation 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /bedevere/stage.py: -------------------------------------------------------------------------------- 1 | """Label a pull request based on what its waiting on.""" 2 | 3 | # The state machine for the flow of a PR is currently 4 | # documented with a Mermaid graph in the README. 5 | # The graph replaces a previous version (written as 6 | # a DOT file) that was included here. 7 | # 8 | # Changes to this file should be reflected in the README. 9 | 10 | import enum 11 | import random 12 | 13 | import gidgethub.routing 14 | 15 | from . import util 16 | 17 | router = gidgethub.routing.Router() 18 | 19 | BORING_TRIGGER_PHRASE = "I have made the requested changes; please review again" 20 | FUN_TRIGGER_PHRASE = "I didn't expect the Spanish Inquisition" 21 | TRIGGERS = frozenset([BORING_TRIGGER_PHRASE, FUN_TRIGGER_PHRASE]) 22 | 23 | TAG_NAME = "changes-requested" 24 | 25 | CHANGES_REQUESTED_MESSAGE = f"""\ 26 | 27 | A Python core developer has requested some changes be made to your \ 28 | pull request before we can consider merging it. If you could please \ 29 | address their requests along with any other requests in other \ 30 | reviews from core developers that would be appreciated. 31 | 32 | Once you have made the requested changes, please leave a comment \ 33 | on this pull request containing the phrase `{BORING_TRIGGER_PHRASE}`. \ 34 | I will then notify any core developers who have left a review that \ 35 | you're ready for them to take another look at this pull request. 36 | 37 | 38 | {{easter_egg}} 39 | """ 40 | 41 | CORE_DEV_CHANGES_REQUESTED_MESSAGE = f"""\ 42 | 43 | When you're done making the requested changes, leave the comment: `{BORING_TRIGGER_PHRASE}`. 44 | 45 | 46 | {{easter_egg}} 47 | """ 48 | 49 | EASTER_EGG_1 = """\ 50 | And if you don't make the requested changes, \ 51 | you will be poked with soft cushions! 52 | """ 53 | 54 | EASTER_EGG_2 = """\ 55 | And if you don't make the requested changes, \ 56 | you will be put in the comfy chair! 57 | """ 58 | 59 | ACK = """\ 60 | {greeting} 61 | 62 | {core_devs}: please review the changes made to this pull request. 63 | """ 64 | BORING_THANKS = "Thanks for making the requested changes!" 65 | FUN_THANKS = "Nobody expects the Spanish Inquisition!" 66 | 67 | 68 | LABEL_PREFIX = "awaiting" 69 | 70 | 71 | @enum.unique 72 | class Blocker(enum.Enum): 73 | """What is blocking a pull request from being committed.""" 74 | 75 | review = f"{LABEL_PREFIX} review" 76 | core_review = f"{LABEL_PREFIX} core review" 77 | changes = f"{LABEL_PREFIX} changes" 78 | change_review = f"{LABEL_PREFIX} change review" 79 | merge = f"{LABEL_PREFIX} merge" 80 | 81 | 82 | async def _remove_stage_labels(gh, issue): 83 | """Remove all "awaiting" labels.""" 84 | # There's no reason to expect there to be multiple "awaiting" labels on a 85 | # single pull request, but just in case there are we might as well clean 86 | # up the situation when we come across it. 87 | for label in issue["labels"]: 88 | stale_name = label["name"] 89 | if stale_name.startswith(LABEL_PREFIX + " "): 90 | await gh.delete(issue["labels_url"], {"name": stale_name}) 91 | 92 | 93 | async def stage(gh, issue, blocked_on): 94 | """Remove any "awaiting" labels and apply the specified one.""" 95 | label_name = blocked_on.value 96 | if any(label_name == label["name"] for label in issue["labels"]): 97 | return 98 | await _remove_stage_labels(gh, issue) 99 | await gh.post(issue["labels_url"], data=[label_name]) 100 | 101 | 102 | async def stage_for_review(gh, pull_request): 103 | """Apply "awaiting review" label.""" 104 | issue = await util.issue_for_PR(gh, pull_request) 105 | username = util.user_login(pull_request) 106 | if await util.is_core_dev(gh, username): 107 | await stage(gh, issue, Blocker.core_review) 108 | else: 109 | await stage(gh, issue, Blocker.review) 110 | 111 | 112 | @router.register("pull_request", action="opened") 113 | async def opened_pr(event, gh, *arg, **kwargs): 114 | """Decide if a new pull request requires a review. 115 | 116 | If a pull request comes in from a core developer, then mark it 117 | as "awaiting core review". Otherwise the pull request is 118 | "awaiting review". 119 | """ 120 | pull_request = event.data["pull_request"] 121 | if pull_request.get("draft"): 122 | return 123 | await stage_for_review(gh, pull_request) 124 | 125 | 126 | @router.register("pull_request", action="converted_to_draft") 127 | async def pr_converted_to_draft(event, gh, *arg, **kwargs): 128 | pull_request = event.data["pull_request"] 129 | issue = await util.issue_for_PR(gh, pull_request) 130 | await _remove_stage_labels(gh, issue) 131 | 132 | 133 | @router.register("pull_request", action="ready_for_review") 134 | async def draft_pr_published(event, gh, *arg, **kwargs): 135 | pull_request = event.data["pull_request"] 136 | await stage_for_review(gh, pull_request) 137 | 138 | 139 | @router.register("push") 140 | async def new_commit_pushed(event, gh, *arg, **kwargs): 141 | """If there is a new commit pushed to the PR branch that is in `awaiting merge` state, 142 | move it back to `awaiting core review` stage. 143 | """ 144 | commits = event.data["commits"] 145 | if len(commits) > 0: 146 | # get the latest commit hash 147 | commit_hash = commits[-1]["id"] 148 | repo_full_name = event.data["repository"]["full_name"] 149 | pr = await util.get_pr_for_commit(gh, commit_hash, repo_full_name) 150 | 151 | for label in util.labels(pr): 152 | if label == "awaiting merge": 153 | issue = await util.issue_for_PR(gh, pr) 154 | greeting = "There's a new commit after the PR has been approved." 155 | await request_core_review( 156 | gh, issue, blocker=Blocker.core_review, greeting=greeting 157 | ) 158 | break 159 | 160 | 161 | async def core_dev_reviewers(gh, pull_request_url): 162 | """Find the reviewers who are core developers.""" 163 | # Unfortunately the reviews URL is not contained in a pull request's data. 164 | async for review in gh.getiter(pull_request_url + "/reviews"): 165 | reviewer = util.user_login(review) 166 | # Ignoring "comment" reviews. 167 | actual_review = review["state"].lower() in {"approved", "changes_requested"} 168 | if actual_review and await util.is_core_dev(gh, reviewer): 169 | yield reviewer 170 | 171 | 172 | async def reviewers(gh, pull_request_url): 173 | """Find any type of reviewers.""" 174 | # Unfortunately the reviews URL is not contained in a pull request's data. 175 | async for review in gh.getiter(pull_request_url + "/reviews"): 176 | reviewer = util.user_login(review) 177 | # Ignoring "comment" reviews. 178 | actual_review = review["state"].lower() in {"approved", "changes_requested"} 179 | if actual_review: 180 | yield reviewer 181 | 182 | 183 | @router.register("pull_request_review", action="submitted") 184 | async def new_review(event, gh, *args, **kwargs): 185 | """Update the stage based on the latest review.""" 186 | pull_request = event.data["pull_request"] 187 | review = event.data["review"] 188 | reviewer = util.user_login(review) 189 | state = review["state"].lower() 190 | if state == "commented": 191 | # Don't care about comment reviews. 192 | return 193 | elif not await util.is_core_dev(gh, reviewer): 194 | # Poor-man's asynchronous any(). 195 | async for _ in core_dev_reviewers(gh, pull_request["url"]): 196 | # No need to update the stage as a core developer has already 197 | # reviewed this PR. 198 | return 199 | else: 200 | # Waiting for a core developer to leave a review. 201 | await stage( 202 | gh, await util.issue_for_PR(gh, pull_request), Blocker.core_review 203 | ) 204 | else: 205 | if state == "approved": 206 | if pull_request["state"] == "open": 207 | await stage( 208 | gh, await util.issue_for_PR(gh, pull_request), Blocker.merge 209 | ) 210 | elif state == "changes_requested": 211 | issue = await util.issue_for_PR(gh, pull_request) 212 | if Blocker.changes.value in util.labels(issue): 213 | # Contributor already knows what to do for this round of reviews. 214 | return 215 | easter_egg = "" 216 | if random.random() < 0.1: # pragma: no cover 217 | easter_egg = random.choice([EASTER_EGG_1, EASTER_EGG_2]) 218 | comment = CHANGES_REQUESTED_MESSAGE.format(easter_egg=easter_egg) 219 | pr_author = util.user_login(pull_request) 220 | if await util.is_core_dev(gh, pr_author): 221 | comment = CORE_DEV_CHANGES_REQUESTED_MESSAGE.format( 222 | easter_egg=easter_egg 223 | ) 224 | await stage(gh, issue, Blocker.changes) 225 | await gh.post(pull_request["comments_url"], data={"body": comment}) 226 | else: # pragma: no cover 227 | raise ValueError(f"unexpected review state: {state!r}") 228 | 229 | 230 | @router.register("pull_request_review", action="dismissed") 231 | async def dismissed_review(event, gh, *args, **kwargs): 232 | """Update the stage based on a dismissed review.""" 233 | pull_request = event.data["pull_request"] 234 | 235 | # Poor-man's asynchronous any(). 236 | async for _ in core_dev_reviewers(gh, pull_request["url"]): 237 | # No need to update the label as there is still a core dev review. 238 | return 239 | else: 240 | async for _ in reviewers(gh, pull_request["url"]): 241 | # Request review from core dev 242 | await stage( 243 | gh, await util.issue_for_PR(gh, pull_request), Blocker.core_review 244 | ) 245 | return 246 | else: 247 | # Waiting for anybody to leave a review. 248 | await stage(gh, await util.issue_for_PR(gh, pull_request), Blocker.review) 249 | 250 | 251 | @router.register("issue_comment", action="created") 252 | async def new_comment(event, gh, *args, **kwargs): 253 | issue = event.data["issue"] 254 | comment = event.data["comment"] 255 | comment_body = comment["body"].lower() 256 | if util.user_login(issue) != util.user_login(comment): 257 | # Only care about the PR creator leaving a comment. 258 | return 259 | elif not any(trigger.lower() in comment_body for trigger in TRIGGERS): 260 | # PR creator didn't request another review. 261 | return 262 | else: 263 | if FUN_TRIGGER_PHRASE.lower() in comment_body: 264 | thanks = FUN_THANKS 265 | else: 266 | thanks = BORING_THANKS 267 | await request_core_review( 268 | gh, issue, blocker=Blocker.change_review, greeting=thanks 269 | ) 270 | 271 | 272 | async def request_core_review(gh, issue, *, blocker, greeting): 273 | await stage(gh, issue, blocker) 274 | pr_url = issue["pull_request"]["url"] 275 | # Using a set comprehension to remove duplicates. 276 | core_devs = ", ".join( 277 | {"@" + core_dev async for core_dev in core_dev_reviewers(gh, pr_url)} 278 | ) 279 | 280 | comment = ACK.format(greeting=greeting, core_devs=core_devs) 281 | await gh.post(issue["comments_url"], data={"body": comment}) 282 | # Re-request reviews from core developers based on the new state of the PR. 283 | reviewers_url = f"{pr_url}/requested_reviewers" 284 | reviewers = [core_dev async for core_dev in core_dev_reviewers(gh, pr_url)] 285 | await gh.post(reviewers_url, data={"reviewers": reviewers}) 286 | 287 | 288 | @router.register("pull_request", action="closed") 289 | async def closed_pr(event, gh, *args, **kwargs): 290 | """Remove all `awaiting ... ` labels when a PR is merged.""" 291 | if event.data["pull_request"]["merged"]: 292 | issue = await util.issue_for_PR(gh, event.data["pull_request"]) 293 | await _remove_stage_labels(gh, issue) 294 | -------------------------------------------------------------------------------- /tests/test_prtype.py: -------------------------------------------------------------------------------- 1 | from bedevere import prtype 2 | from bedevere.prtype import Labels 3 | 4 | 5 | class FakeGH: 6 | def __init__(self, *, getiter=None, getitem=None, post=None): 7 | self._getitem_return = getitem 8 | self._getiter_return = getiter 9 | self._post_return = post 10 | self.getitem_url = None 11 | self.getiter_url = None 12 | self.post_url = [] 13 | self.post_data = [] 14 | 15 | async def getitem(self, url): 16 | self.getitem_url = url 17 | return self._getitem_return 18 | 19 | async def post(self, url, *, data): 20 | self.post_url.append(url) 21 | self.post_data.append(data) 22 | return self._post_return 23 | 24 | 25 | GOOD_BASENAME = "2017-06-16-20-32-50.bpo-1234.nonce.rst" 26 | 27 | 28 | async def test_no_files(): 29 | filenames = {} 30 | issue = {"labels": []} 31 | gh = FakeGH(getitem=issue) 32 | event_data = { 33 | "action": "opened", 34 | "number": 1234, 35 | "pull_request": { 36 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 37 | "statuses_url": "https://api.github.com/some/status", 38 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 39 | }, 40 | } 41 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 42 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 43 | # When no files are present, no labels are added. 44 | assert len(gh.post_url) == 0 45 | assert len(gh.post_data) == 0 46 | 47 | 48 | async def test_news_only(): 49 | filenames = {"README", f"Misc/NEWS.d/next/Lib/{GOOD_BASENAME}"} 50 | issue = {"labels": []} 51 | gh = FakeGH(getitem=issue) 52 | event_data = { 53 | "action": "opened", 54 | "number": 1234, 55 | "pull_request": { 56 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 57 | "statuses_url": "https://api.github.com/some/status", 58 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 59 | }, 60 | } 61 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 62 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 63 | # News only .rst does not add a docs label. 64 | assert len(gh.post_url) == 0 65 | assert len(gh.post_data) == 0 66 | 67 | 68 | async def test_docs_no_news(): 69 | filenames = {"path/to/docs1.rst", "other/path/to/docs2.md"} 70 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 71 | gh = FakeGH(getitem=issue) 72 | event_data = { 73 | "action": "opened", 74 | "number": 1234, 75 | "pull_request": { 76 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 77 | "statuses_url": "https://api.github.com/some/status", 78 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 79 | }, 80 | } 81 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 82 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 83 | assert len(gh.post_url) == 1 84 | assert gh.post_url[0] == "https://api.github.com/some/label" 85 | assert gh.post_data[0] == [Labels.docs.value, Labels.skip_news.value] 86 | 87 | 88 | async def test_docs_no_news_with_dotnitignore(): 89 | filenames = {"path/to/docs1.rst", "path/to/.nitignore"} 90 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 91 | gh = FakeGH(getitem=issue) 92 | event_data = { 93 | "action": "opened", 94 | "number": 1234, 95 | "pull_request": { 96 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 97 | "statuses_url": "https://api.github.com/some/status", 98 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 99 | }, 100 | } 101 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 102 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 103 | assert len(gh.post_url) == 1 104 | assert gh.post_url[0] == "https://api.github.com/some/label" 105 | assert gh.post_data[0] == [Labels.docs.value, Labels.skip_news.value] 106 | 107 | 108 | async def test_docs_and_news(): 109 | filenames = {"/path/to/docs1.rst", f"Misc/NEWS.d/next/Lib/{GOOD_BASENAME}"} 110 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 111 | gh = FakeGH(getitem=issue) 112 | event_data = { 113 | "action": "opened", 114 | "number": 1234, 115 | "pull_request": { 116 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 117 | "statuses_url": "https://api.github.com/some/status", 118 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 119 | }, 120 | } 121 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 122 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 123 | assert len(gh.post_url) == 1 124 | assert gh.post_url[0] == "https://api.github.com/some/label" 125 | assert gh.post_data[0] == [Labels.docs.value] 126 | 127 | 128 | async def test_tests_only(): 129 | filenames = {"/path/to/test_docs1.py", "test_docs2.py"} 130 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 131 | gh = FakeGH(getitem=issue) 132 | event_data = { 133 | "action": "opened", 134 | "number": 1234, 135 | "pull_request": { 136 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 137 | "statuses_url": "https://api.github.com/some/status", 138 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 139 | }, 140 | } 141 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 142 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 143 | assert len(gh.post_url) == 1 144 | assert gh.post_url[0] == "https://api.github.com/some/label" 145 | assert gh.post_data[0] == [Labels.tests.value] 146 | 147 | 148 | async def test_tests_and_testmods_only(): 149 | filenames = {"/path/to/_testmod.c", "_test_module.c", "test_capi,py"} 150 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 151 | gh = FakeGH(getitem=issue) 152 | event_data = { 153 | "action": "opened", 154 | "number": 1234, 155 | "pull_request": { 156 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 157 | "statuses_url": "https://api.github.com/some/status", 158 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 159 | }, 160 | } 161 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 162 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 163 | assert len(gh.post_url) == 1 164 | assert gh.post_url[0] == "https://api.github.com/some/label" 165 | assert gh.post_data[0] == [Labels.tests.value] 166 | 167 | 168 | async def test_docs_and_tests(): 169 | filenames = {"/path/to/docs.rst", "test_docs2.py"} 170 | issue = { 171 | "labels": [{"name": "skip news"}], 172 | "labels_url": "https://api.github.com/some/label", 173 | } 174 | gh = FakeGH(getiter=filenames, getitem=issue) 175 | event_data = { 176 | "action": "opened", 177 | "number": 1234, 178 | "pull_request": { 179 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 180 | "statuses_url": "https://api.github.com/some/status", 181 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 182 | }, 183 | } 184 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 185 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 186 | # Only creates type-tests label. 187 | assert len(gh.post_url) == 1 188 | assert gh.post_url[0] == "https://api.github.com/some/label" 189 | assert gh.post_data[0] == [Labels.tests.value] 190 | 191 | 192 | async def test_leave_existing_type_labels(): 193 | filenames = {"/path/to/docs.rst", "test_docs2.py"} 194 | issue = { 195 | "labels": [{"name": "skip news"}, {"name": "docs"}], 196 | "labels_url": "https://api.github.com/some/label", 197 | } 198 | gh = FakeGH(getiter=filenames, getitem=issue) 199 | event_data = { 200 | "action": "opened", 201 | "number": 1234, 202 | "pull_request": { 203 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 204 | "statuses_url": "https://api.github.com/some/status", 205 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 206 | }, 207 | } 208 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 209 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 210 | assert len(gh.post_url) == 1 211 | assert gh.post_url[0] == "https://api.github.com/some/label" 212 | # This should only add the tests label as the docs label is already applied 213 | assert gh.post_data[0] == [Labels.tests.value] 214 | 215 | 216 | async def test_do_not_post_if_nothing_to_apply(): 217 | filenames = {"/path/to/docs.rst"} 218 | issue = { 219 | "labels": [{"name": "skip news"}, {"name": "docs"}], 220 | "labels_url": "https://api.github.com/some/label", 221 | } 222 | gh = FakeGH(getiter=filenames, getitem=issue) 223 | event_data = { 224 | "action": "opened", 225 | "number": 1234, 226 | "pull_request": { 227 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 228 | "statuses_url": "https://api.github.com/some/status", 229 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 230 | }, 231 | } 232 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 233 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 234 | # This should not post anything as docs is already applied 235 | assert len(gh.post_url) == 0 236 | 237 | 238 | async def test_news_and_tests(): 239 | filenames = {"test_docs2.py", f"Misc/NEWS.d/next/Lib/{GOOD_BASENAME}"} 240 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 241 | gh = FakeGH(getitem=issue) 242 | event_data = { 243 | "action": "opened", 244 | "number": 1234, 245 | "pull_request": { 246 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 247 | "statuses_url": "https://api.github.com/some/status", 248 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 249 | }, 250 | } 251 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 252 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 253 | # Creates type-tests label. 254 | assert len(gh.post_url) == 1 255 | assert gh.post_url[0] == "https://api.github.com/some/label" 256 | assert gh.post_data[0] == [Labels.tests.value] 257 | 258 | 259 | async def test_other_files(): 260 | filenames = { 261 | "README", 262 | "/path/to/docs.rst", 263 | "test_docs2.py", 264 | f"Misc/NEWS.d/next/Lib/{GOOD_BASENAME}", 265 | } 266 | issue = {"labels": [], "labels_url": "https://api.github.com/some/label"} 267 | gh = FakeGH(getitem=issue) 268 | event_data = { 269 | "action": "opened", 270 | "number": 1234, 271 | "pull_request": { 272 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 273 | "statuses_url": "https://api.github.com/some/status", 274 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 275 | }, 276 | } 277 | await prtype.classify_by_filepaths(gh, event_data["pull_request"], filenames) 278 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 279 | # No labels if a file other than doc or test exists. 280 | assert len(gh.post_url) == 0 281 | -------------------------------------------------------------------------------- /tests/test_util.py: -------------------------------------------------------------------------------- 1 | import http 2 | from unittest.mock import patch 3 | 4 | import gidgethub 5 | import pytest 6 | 7 | from bedevere import util 8 | 9 | from .test_stage import FakeGH 10 | 11 | 12 | def test_StatusState(): 13 | assert util.StatusState.SUCCESS.value == "success" 14 | assert util.StatusState.ERROR.value == "error" 15 | assert util.StatusState.FAILURE.value == "failure" 16 | 17 | 18 | class TestCreateStatus: 19 | def test_simple_case(self): 20 | expected = {"state": "success", "context": "me"} 21 | assert util.create_status("me", util.StatusState.SUCCESS) == expected 22 | 23 | def test_with_description(self): 24 | expected = {"state": "error", "context": "me", "description": "desc"} 25 | status = util.create_status("me", util.StatusState.ERROR, description="desc") 26 | assert status == expected 27 | 28 | def test_with_target_url(self): 29 | expected = { 30 | "state": "failure", 31 | "context": "me", 32 | "target_url": "https://devguide.python.org", 33 | } 34 | status = util.create_status( 35 | "me", util.StatusState.FAILURE, target_url="https://devguide.python.org" 36 | ) 37 | assert status == expected 38 | 39 | def test_with_everything(self): 40 | expected = { 41 | "state": "failure", 42 | "context": "me", 43 | "description": "desc", 44 | "target_url": "https://devguide.python.org", 45 | } 46 | status = util.create_status( 47 | "me", 48 | util.StatusState.FAILURE, 49 | description="desc", 50 | target_url="https://devguide.python.org", 51 | ) 52 | assert status == expected 53 | 54 | 55 | def test_skip(): 56 | issue = {"labels": [{"name": "CLA signed"}, {"name": "skip something"}]} 57 | assert util.skip("something", issue) 58 | 59 | issue = {"labels": [{"name": "CLA signed"}]} 60 | assert not util.skip("something", issue) 61 | 62 | 63 | async def test_is_core_dev(): 64 | teams = [{"name": "not Python core"}] 65 | gh = FakeGH(getiter={"https://api.github.com/orgs/python/teams": teams}) 66 | with pytest.raises(ValueError): 67 | await util.is_core_dev(gh, "brett") 68 | 69 | teams = [{"name": "Python core", "id": 42}] 70 | getitem = {"https://api.github.com/teams/42/memberships/brett": True} 71 | gh = FakeGH( 72 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=getitem 73 | ) 74 | assert await util.is_core_dev(gh, "brett") 75 | assert gh.getiter_url == "https://api.github.com/orgs/python/teams" 76 | 77 | teams = [{"name": "Python core", "id": 42}] 78 | getitem = { 79 | "https://api.github.com/teams/42/memberships/andrea": gidgethub.BadRequest( 80 | status_code=http.HTTPStatus(404) 81 | ) 82 | } 83 | gh = FakeGH( 84 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=getitem 85 | ) 86 | assert not await util.is_core_dev(gh, "andrea") 87 | 88 | teams = [{"name": "Python core", "id": 42}] 89 | getitem = { 90 | "https://api.github.com/teams/42/memberships/andrea": gidgethub.BadRequest( 91 | status_code=http.HTTPStatus(400) 92 | ) 93 | } 94 | gh = FakeGH( 95 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=getitem 96 | ) 97 | with pytest.raises(gidgethub.BadRequest): 98 | await util.is_core_dev(gh, "andrea") 99 | 100 | 101 | async def test_is_core_dev_resource_not_accessible(): 102 | gh = FakeGH( 103 | getiter={ 104 | "https://api.github.com/orgs/python/teams": [ 105 | gidgethub.BadRequest(status_code=http.HTTPStatus(403)) 106 | ] 107 | } 108 | ) 109 | assert await util.is_core_dev(gh, "mariatta") is False 110 | 111 | 112 | def test_title_normalization(): 113 | title = "abcd" 114 | body = "1234" 115 | assert util.normalize_title(title, body) == title 116 | 117 | title = "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations …" 118 | body = "…(GH-1478)\r\n\r\nstuff" 119 | expected = ( 120 | "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations (GH-1478)" 121 | ) 122 | assert util.normalize_title(title, body) == expected 123 | 124 | title = "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations …" 125 | body = "…(GH-1478)" 126 | assert util.normalize_title(title, body) == expected 127 | 128 | title = ( 129 | "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations (GH-14…" 130 | ) 131 | body = "…78)" 132 | assert util.normalize_title(title, body) == expected 133 | 134 | 135 | async def test_get_pr_for_commit(): 136 | sha = "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9" 137 | gh = FakeGH( 138 | getitem={ 139 | f"https://api.github.com/search/issues?q=type:pr+repo:python/cpython+sha:{sha}": { 140 | "total_count": 1, 141 | "items": [ 142 | { 143 | "number": 5547, 144 | "title": "[3.6] bpo-32720: Fixed the replacement field grammar documentation. (GH-5544)", 145 | "body": "\n\n`arg_name` and `element_index` are defined as `digit`+ instead of `integer`.\n(cherry picked from commit 7a561afd2c79f63a6008843b83733911d07f0119)\n\nCo-authored-by: Mariatta ", 146 | } 147 | ], 148 | } 149 | } 150 | ) 151 | result = await util.get_pr_for_commit(gh, sha) 152 | assert result == { 153 | "number": 5547, 154 | "title": "[3.6] bpo-32720: Fixed the replacement field grammar documentation. (GH-5544)", 155 | "body": "\n\n`arg_name` and `element_index` are defined as `digit`+ instead of `integer`.\n(cherry picked from commit 7a561afd2c79f63a6008843b83733911d07f0119)\n\nCo-authored-by: Mariatta ", 156 | } 157 | 158 | 159 | async def test_get_pr_for_commit_not_found(): 160 | sha = "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9" 161 | gh = FakeGH( 162 | getitem={ 163 | f"https://api.github.com/search/issues?q=type:pr+repo:python/cpython+sha:{sha}": { 164 | "total_count": 0, 165 | "items": [], 166 | } 167 | } 168 | ) 169 | result = await util.get_pr_for_commit(gh, sha) 170 | 171 | assert result is None 172 | 173 | 174 | async def test_patch_body_adds_issue_if_not_present(): 175 | """Updates the description of a PR/Issue with the gh issue/pr number if it exists. 176 | 177 | returns if body exists with issue/pr number 178 | """ 179 | sha = "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9" 180 | gh = FakeGH( 181 | getitem={ 182 | f"https://api.github.com/search/issues?q=type:pr+repo:python/cpython+sha:{sha}": { 183 | "total_count": 0, 184 | "items": [], 185 | } 186 | } 187 | ) 188 | vals = {} 189 | vals["url"] = "https://fake.com" 190 | vals["body"] = "GH-1234\n" 191 | 192 | with patch.object(gh, "patch") as mock: 193 | await util.patch_body(gh, util.PR, vals, 1234) 194 | mock.assert_not_called() 195 | with patch.object(gh, "patch") as mock: 196 | vals["body"] = "Multiple\nlines\nwith gh-1234 in some prose" 197 | await util.patch_body(gh, util.PR, vals, 1234) 198 | mock.assert_not_called() 199 | with patch.object(gh, "patch") as mock: 200 | vals["body"] = "#1234 in some prose" 201 | await util.patch_body(gh, util.PR, vals, 1234) 202 | mock.assert_not_called() 203 | with patch.object(gh, "patch") as mock: 204 | vals["body"] = "Some prose mentioning gh-12345 but not our issue" 205 | await util.patch_body(gh, util.PR, vals, 1234) 206 | mock.assert_called_once() 207 | with patch.object(gh, "patch") as mock: 208 | vals["body"] = None 209 | await util.patch_body(gh, util.PR, vals, 1234) 210 | mock.assert_called_once() 211 | with patch.object(gh, "patch") as mock: 212 | vals["body"] = "" 213 | await util.patch_body(gh, util.PR, vals, 1234) 214 | data = { 215 | "body": "\n\n\n* Issue: gh-1234\n\n" 216 | } 217 | mock.assert_called_once_with("https://fake.com", data=data) 218 | assert await gh.patch(vals["url"], data=vals) == None 219 | 220 | 221 | async def test_patch_body_adds_pr_if_not_present(): 222 | """Updates the description of a PR/Issue with the gh issue/pr number if it exists. 223 | 224 | returns if body exists with issue/pr number 225 | """ 226 | sha = "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9" 227 | gh = FakeGH( 228 | getitem={ 229 | f"https://api.github.com/search/issues?q=type:pr+repo:python/cpython+sha:{sha}": { 230 | "total_count": 0, 231 | "items": [], 232 | } 233 | } 234 | ) 235 | vals = {} 236 | vals["url"] = "https://fake.com" 237 | vals["body"] = "GH-1234\n" 238 | 239 | with patch.object(gh, "patch") as mock: 240 | await util.patch_body(gh, util.ISSUE, vals, 1234) 241 | mock.assert_not_called() 242 | with patch.object(gh, "patch") as mock: 243 | vals["body"] = "Multiple\nlines\nwith gh-1234 in some prose" 244 | await util.patch_body(gh, util.ISSUE, vals, 1234) 245 | mock.assert_not_called() 246 | with patch.object(gh, "patch") as mock: 247 | vals["body"] = "#1234 in some prose" 248 | await util.patch_body(gh, util.ISSUE, vals, 1234) 249 | mock.assert_not_called() 250 | with patch.object(gh, "patch") as mock: 251 | vals["body"] = "Some prose mentioning gh-12345 but not our issue" 252 | await util.patch_body(gh, util.ISSUE, vals, 1234) 253 | mock.assert_called_once() 254 | with patch.object(gh, "patch") as mock: 255 | vals["body"] = None 256 | await util.patch_body(gh, util.ISSUE, vals, 1234) 257 | mock.assert_called_once() 258 | with patch.object(gh, "patch") as mock: 259 | vals["body"] = "" 260 | await util.patch_body(gh, util.ISSUE, vals, 1234) 261 | data = { 262 | "body": ( 263 | "\n\n\n" 264 | "### Linked PRs\n* gh-1234\n" 265 | "\n" 266 | ) 267 | } 268 | mock.assert_called_once_with("https://fake.com", data=data) 269 | with patch.object(gh, "patch") as mock: 270 | vals["body"] = ( 271 | "\n\n\n" 272 | "### Linked PRs\n* gh-1234\n" 273 | "\n" 274 | ) 275 | await util.patch_body(gh, util.ISSUE, vals, 54321) 276 | data = { 277 | "body": ( 278 | "\n\n\n" 279 | "### Linked PRs\n* gh-1234\n* gh-54321\n" 280 | "\n" 281 | ) 282 | } 283 | mock.assert_called_once_with("https://fake.com", data=data) 284 | assert await gh.patch(vals["url"], data=vals) == None 285 | 286 | 287 | async def test_patch_body_adds_pr_to_legacy_issue_body(): 288 | """Updates the description of a PR/Issue with the gh issue/pr number if it exists. 289 | 290 | returns if body exists with issue/pr number 291 | """ 292 | sha = "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9" 293 | gh = FakeGH( 294 | getitem={ 295 | f"https://api.github.com/search/issues?q=type:pr+repo:python/cpython+sha:{sha}": { 296 | "total_count": 0, 297 | "items": [], 298 | } 299 | } 300 | ) 301 | vals = {} 302 | vals["url"] = "https://fake.com" 303 | vals["body"] = "GH-1234\n" 304 | 305 | with patch.object(gh, "patch") as mock: 306 | vals["body"] = ( 307 | "\n" 308 | "* PR: gh-103\n" 309 | "\n" 310 | ) 311 | await util.patch_body(gh, util.ISSUE, vals, 54321) 312 | data = { 313 | "body": ( 314 | "\n" 315 | "* PR: gh-103\n" 316 | "\n" 317 | "\n\n\n" 318 | "* PR: gh-54321\n" 319 | "\n" 320 | ) 321 | } 322 | mock.assert_called_once_with("https://fake.com", data=data) 323 | assert await gh.patch(vals["url"], data=vals) == None 324 | -------------------------------------------------------------------------------- /tests/test_news.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from gidgethub import sansio 3 | 4 | from bedevere import news 5 | 6 | 7 | def check_n_pop_nonews_events(gh, expect_help): 8 | if expect_help: 9 | assert ( 10 | gh.post_url.pop(0) 11 | == "https://api.github.com/repos/cpython/python/issue/1234/comments" 12 | ) 13 | assert gh.post_data.pop(0)["body"] == news.HELP 14 | assert gh.post_url.pop(0) == "https://api.github.com/some/status" 15 | post_data = gh.post_data.pop(0) 16 | assert post_data["state"] == "failure" 17 | assert post_data["target_url"] == news.BLURB_IT_URL 18 | 19 | 20 | class FakeGH: 21 | def __init__(self, *, getiter=None, getitem=None, post=None): 22 | self._getitem_return = getitem 23 | self._getiter_return = getiter 24 | self._post_return = post 25 | self.getitem_url = None 26 | self.getiter_url = None 27 | self.post_url = [] 28 | self.post_data = [] 29 | 30 | async def getitem(self, url): 31 | self.getitem_url = url 32 | return self._getitem_return 33 | 34 | async def getiter(self, url): 35 | self.getiter_url = url 36 | for item in self._getiter_return: 37 | yield item 38 | 39 | async def post(self, url, *, data): 40 | self.post_url.append(url) 41 | self.post_data.append(data) 42 | return self._post_return 43 | 44 | 45 | GOOD_BASENAME = "2017-06-16-20-32-50.gh-issue-1234.nonce.rst" 46 | BPO_BASENAME = "2017-06-16-20-32-50.bpo-1234.nonce.rst" 47 | 48 | 49 | class TestFilenameRE: 50 | def test_malformed_basename(self): 51 | assert news.FILENAME_RE.match("2017-06-16.gh-issue-1234.rst") is None 52 | assert news.FILENAME_RE.match("2017-06-16.gh-1234.rst") is None 53 | 54 | def test_success(self): 55 | assert news.FILENAME_RE.match(GOOD_BASENAME) 56 | live_result = "2017-08-14-15-13-50.gh-issue-1612262.-x_Oyq.rst" 57 | assert news.FILENAME_RE.match(live_result) 58 | 59 | def test_multiple_issue_numbers(self): 60 | basename = "2018-01-01.gh-issue-1234,5678,9012.nonce.rst" 61 | assert news.FILENAME_RE.match(basename) 62 | 63 | def test_date_only(self): 64 | basename = "2017-08-14.gh-issue-1234.nonce.rst" 65 | assert news.FILENAME_RE.match(basename) 66 | 67 | def test_malformed_basename_bpo(self): 68 | assert news.FILENAME_RE.match("2017-06-16.bpo-1234.rst") is None 69 | 70 | def test_success_bpo(self): 71 | assert news.FILENAME_RE.match(BPO_BASENAME) 72 | live_result = "2017-08-14-15-13-50.bpo-1612262.-x_Oyq.rst" 73 | assert news.FILENAME_RE.match(live_result) 74 | 75 | def test_multiple_issue_numbers_bpo(self): 76 | basename = "2018-01-01.bpo-1234,5678,9012.nonce.rst" 77 | assert news.FILENAME_RE.match(basename) 78 | 79 | def test_date_only_bpo(self): 80 | basename = "2017-08-14.bpo-1234.nonce.rst" 81 | assert news.FILENAME_RE.match(basename) 82 | 83 | 84 | async def failure_testing(path, action, author_association): 85 | files = [ 86 | { 87 | "filename": "README", 88 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 89 | }, 90 | { 91 | "filename": path, 92 | "patch": "@@ -0,0 +1 @@ +Fix inspect.getsourcelines for module level frames/tracebacks", 93 | }, 94 | ] 95 | issue = {"labels": []} 96 | gh = FakeGH(getiter=files, getitem=issue) 97 | event_data = { 98 | "action": action, 99 | "number": 1234, 100 | "pull_request": { 101 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 102 | "statuses_url": "https://api.github.com/some/status", 103 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 104 | "issue_comment_url": "https://api.github.com/repos/cpython/python/issue/1234/comments", 105 | "author_association": author_association, 106 | }, 107 | } 108 | await news.check_news(gh, event_data["pull_request"]) 109 | assert ( 110 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 111 | ) 112 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 113 | check_n_pop_nonews_events(gh, author_association == "NONE") 114 | assert not gh.post_url 115 | 116 | 117 | @pytest.mark.parametrize("action", ["opened", "reopened", "synchronize"]) 118 | @pytest.mark.parametrize( 119 | "author_association", ["OWNER", "MEMBER", "CONTRIBUTOR", "NONE"] 120 | ) 121 | async def test_bad_news_entry(action, author_association): 122 | # Not in Misc/NEWS.d. 123 | await failure_testing(f"some/other/dir/{GOOD_BASENAME}", action, author_association) 124 | # Not in next/. 125 | await failure_testing(f"Misc/NEWS.d/{GOOD_BASENAME}", action, author_association) 126 | # Not in a classifying subdirectory. 127 | await failure_testing( 128 | f"Misc/NEWS.d/next/{GOOD_BASENAME}", action, author_association 129 | ) 130 | # Missing the nonce. 131 | await failure_testing( 132 | f"Misc/NEWS.d/next/Library/2017-06-16.bpo-1234.rst", action, author_association 133 | ) 134 | 135 | 136 | @pytest.mark.parametrize("action", ["opened", "reopened", "synchronize"]) 137 | async def test_skip_news(action): 138 | files = [ 139 | { 140 | "filename": "README", 141 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 142 | }, 143 | { 144 | "filename": f"Misc/NEWS.d/next/{GOOD_BASENAME}", 145 | "patch": "@@ -0,0 +1 @@ +Fix inspect.getsourcelines for module level frames/tracebacks", 146 | }, 147 | ] 148 | issue = {"labels": [{"name": "skip news"}]} 149 | gh = FakeGH(getiter=files, getitem=issue) 150 | event_data = { 151 | "action": action, 152 | "number": 1234, 153 | "pull_request": { 154 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 155 | "statuses_url": "https://api.github.com/some/status", 156 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 157 | }, 158 | } 159 | await news.check_news(gh, event_data["pull_request"]) 160 | assert ( 161 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 162 | ) 163 | assert gh.getitem_url == "https://api.github.com/repos/cpython/python/issue/1234" 164 | assert gh.post_url[0] == "https://api.github.com/some/status" 165 | assert gh.post_data[0]["state"] == "success" 166 | assert gh.post_data[0].get("target_url") is None 167 | 168 | 169 | @pytest.mark.parametrize("action", ["opened", "reopened", "synchronize"]) 170 | async def test_news_file(action): 171 | files = [ 172 | { 173 | "filename": "README", 174 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 175 | }, 176 | { 177 | "filename": f"Misc/NEWS.d/next/Library/{GOOD_BASENAME}", 178 | "patch": "@@ -0,0 +1 @@ +Fix inspect.getsourcelines for module level frames/tracebacks", 179 | }, 180 | ] 181 | issue = {"labels": [{"name": "CLA signed"}]} 182 | gh = FakeGH(getiter=files, getitem=issue) 183 | event_data = { 184 | "action": action, 185 | "number": 1234, 186 | "pull_request": { 187 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 188 | "statuses_url": "https://api.github.com/some/status", 189 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 190 | }, 191 | } 192 | await news.check_news(gh, event_data["pull_request"]) 193 | assert ( 194 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 195 | ) 196 | assert gh.post_url[0] == "https://api.github.com/some/status" 197 | assert gh.post_data[0]["state"] == "success" 198 | assert gh.post_data[0].get("target_url") is None 199 | 200 | 201 | @pytest.mark.parametrize("action", ["opened", "reopened", "synchronize"]) 202 | @pytest.mark.parametrize( 203 | "author_association", ["OWNER", "MEMBER", "CONTRIBUTOR", "NONE"] 204 | ) 205 | async def test_empty_news_file(action, author_association): 206 | files = [ 207 | { 208 | "filename": "README", 209 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 210 | }, 211 | {"filename": f"Misc/NEWS.d/next/Library/{GOOD_BASENAME}"}, 212 | ] 213 | issue = {"labels": [{"name": "CLA signed"}]} 214 | gh = FakeGH(getiter=files, getitem=issue) 215 | event_data = { 216 | "action": action, 217 | "number": 1234, 218 | "pull_request": { 219 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 220 | "statuses_url": "https://api.github.com/some/status", 221 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 222 | "issue_comment_url": "https://api.github.com/repos/cpython/python/issue/1234/comments", 223 | "author_association": author_association, 224 | }, 225 | } 226 | await news.check_news(gh, event_data["pull_request"]) 227 | assert ( 228 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 229 | ) 230 | check_n_pop_nonews_events(gh, author_association == "NONE") 231 | assert not gh.post_url 232 | 233 | 234 | @pytest.mark.parametrize("action", ["opened", "reopened", "synchronize"]) 235 | async def test_news_file_not_empty(action): 236 | files = [ 237 | { 238 | "filename": "README", 239 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 240 | }, 241 | { 242 | "filename": f"Misc/NEWS.d/next/Library/{GOOD_BASENAME}", 243 | "patch": "@@ -0,0 +1 @@ +A", 244 | }, 245 | ] 246 | issue = {"labels": [{"name": "CLA signed"}]} 247 | gh = FakeGH(getiter=files, getitem=issue) 248 | event_data = { 249 | "action": action, 250 | "number": 1234, 251 | "pull_request": { 252 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 253 | "statuses_url": "https://api.github.com/some/status", 254 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 255 | }, 256 | } 257 | await news.check_news(gh, event_data["pull_request"]) 258 | assert ( 259 | gh.getiter_url == "https://api.github.com/repos/cpython/python/pulls/1234/files" 260 | ) 261 | assert gh.post_url[0] == "https://api.github.com/some/status" 262 | assert gh.post_data[0]["state"] == "success" 263 | assert gh.post_data[0].get("target_url") is None 264 | 265 | 266 | async def test_adding_skip_news_label(): 267 | gh = FakeGH() 268 | event_data = { 269 | "action": "labeled", 270 | "label": {"name": news.SKIP_NEWS_LABEL}, 271 | "pull_request": { 272 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 273 | "title": "An easy fix", 274 | }, 275 | } 276 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 277 | await news.router.dispatch(event, gh) 278 | assert gh.post_data[0]["state"] == "success" 279 | 280 | 281 | async def test_adding_benign_label(): 282 | gh = FakeGH() 283 | event_data = { 284 | "action": "labeled", 285 | "label": {"name": "unimportant"}, 286 | "pull_request": { 287 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 288 | "title": "An easy fix", 289 | }, 290 | } 291 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 292 | await news.router.dispatch(event, gh) 293 | assert len(gh.post_data) == 0 294 | 295 | 296 | async def test_deleting_label(): 297 | gh = FakeGH() 298 | event_data = { 299 | "action": "unlabeled", 300 | "pull_request": { 301 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 302 | "title": "An easy fix", 303 | }, 304 | } 305 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 306 | await news.router.dispatch(event, gh) 307 | assert len(gh.post_data) == 0 308 | 309 | 310 | @pytest.mark.parametrize( 311 | "author_association", ["OWNER", "MEMBER", "CONTRIBUTOR", "NONE"] 312 | ) 313 | async def test_removing_skip_news_label(author_association): 314 | files = [ 315 | { 316 | "filename": "README", 317 | "patch": "@@ -31,3 +31,7 @@ # Licensed to PSF under a Contributor Agreement.", 318 | }, 319 | { 320 | "filename": f"Misc/NEWS.d/next/{GOOD_BASENAME}", 321 | "patch": "@@ -0,0 +1 @@ +Fix inspect.getsourcelines for module level frames/tracebacks", 322 | }, 323 | ] 324 | issue = {"labels": []} 325 | gh = FakeGH(getiter=files, getitem=issue) 326 | event_data = { 327 | "action": "unlabeled", 328 | "label": {"name": news.SKIP_NEWS_LABEL}, 329 | "number": 1234, 330 | "pull_request": { 331 | "url": "https://api.github.com/repos/cpython/python/pulls/1234", 332 | "title": "An easy fix", 333 | "statuses_url": "https://api.github.com/some/status", 334 | "issue_url": "https://api.github.com/repos/cpython/python/issue/1234", 335 | "issue_comment_url": "https://api.github.com/repos/cpython/python/issue/1234/comments", 336 | "author_association": author_association, 337 | }, 338 | } 339 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 340 | await news.router.dispatch(event, gh) 341 | check_n_pop_nonews_events(gh, author_association == "NONE") 342 | assert not gh.post_url 343 | 344 | 345 | async def test_removing_benign_label(): 346 | gh = FakeGH() 347 | event_data = { 348 | "action": "unlabeled", 349 | "label": {"name": "unimportant"}, 350 | "pull_request": { 351 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 352 | "title": "An easy fix", 353 | }, 354 | } 355 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 356 | await news.router.dispatch(event, gh) 357 | assert len(gh.post_data) == 0 358 | -------------------------------------------------------------------------------- /tests/test_backport.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from gidgethub import sansio 3 | 4 | from bedevere import backport 5 | 6 | 7 | class FakeGH: 8 | def __init__(self, *, getitem=None, delete=None, post=None): 9 | self._getitem_return = getitem 10 | self._delete_return = delete 11 | self._post_return = post 12 | self.getitem_url = None 13 | self.delete_url = None 14 | self.post_ = [] 15 | 16 | async def getitem(self, url, url_vars={}): 17 | self.getitem_url = sansio.format_url(url, url_vars) 18 | return self._getitem_return[self.getitem_url] 19 | 20 | async def delete(self, url, url_vars): 21 | self.delete_url = sansio.format_url(url, url_vars) 22 | 23 | async def post(self, url, url_vars={}, *, data): 24 | post_url = sansio.format_url(url, url_vars) 25 | self.post_.append((post_url, data)) 26 | 27 | 28 | @pytest.fixture(params=["gh", "GH"]) 29 | def pr_prefix(request): 30 | return request.param 31 | 32 | 33 | async def test_edit_not_title(): 34 | data = { 35 | "action": "edited", 36 | "pull_request": {"title": "Backport this (GH-1234)", "body": ""}, 37 | "changes": {}, 38 | } 39 | event = sansio.Event(data, event="pull_request", delivery_id="1") 40 | gh = FakeGH() 41 | await backport.router.dispatch(event, gh) 42 | assert gh.getitem_url is None 43 | 44 | 45 | async def test_missing_branch_in_title(): 46 | data = { 47 | "action": "opened", 48 | "pull_request": { 49 | "title": "Backport this (GH-1234)", 50 | "body": "", 51 | "base": { 52 | "ref": "3.6", 53 | }, 54 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 55 | }, 56 | } 57 | event = sansio.Event(data, event="pull_request", delivery_id="1") 58 | gh = FakeGH() 59 | await backport.router.dispatch(event, gh) 60 | assert gh.getitem_url is None 61 | 62 | 63 | async def test_missing_pr_in_title(): 64 | data = { 65 | "action": "opened", 66 | "pull_request": { 67 | "title": "[3.6] Backport this", 68 | "body": "", 69 | "base": { 70 | "ref": "3.6", 71 | }, 72 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 73 | }, 74 | } 75 | event = sansio.Event(data, event="pull_request", delivery_id="1") 76 | gh = FakeGH() 77 | await backport.router.dispatch(event, gh) 78 | assert gh.getitem_url is None 79 | 80 | 81 | async def test_missing_backport_label(): 82 | title = "[3.6] Backport this (GH-1234)" 83 | data = { 84 | "action": "opened", 85 | "number": 2248, 86 | "pull_request": { 87 | "title": title, 88 | "body": "", 89 | "issue_url": "https://api.github.com/issue/2248", 90 | "base": { 91 | "ref": "3.6", 92 | }, 93 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 94 | }, 95 | "repository": {"issues_url": "https://api.github.com/issue{/number}"}, 96 | } 97 | event = sansio.Event(data, event="pull_request", delivery_id="1") 98 | getitem = { 99 | "https://api.github.com/issue/1234": { 100 | "labels": [{"name": "CLA signed"}], 101 | "comments_url": "https://api.github.com/issue/1234/comments", 102 | }, 103 | "https://api.github.com/issue/2248": {}, 104 | } 105 | gh = FakeGH(getitem=getitem) 106 | await backport.router.dispatch(event, gh) 107 | assert gh.delete_url is None 108 | 109 | 110 | async def test_backport_label_removal_success(pr_prefix): 111 | event_data = { 112 | "action": "opened", 113 | "number": 2248, 114 | "pull_request": { 115 | "title": "[3.6] Backport this …", 116 | "body": f"…({pr_prefix}-1234)", 117 | "issue_url": "https://api.github.com/issue/2248", 118 | "base": { 119 | "ref": "3.6", 120 | }, 121 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 122 | }, 123 | "repository": { 124 | "issues_url": "https://api.github.com/issue{/number}", 125 | }, 126 | } 127 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 128 | getitem_data = { 129 | "https://api.github.com/issue/1234": { 130 | "labels": [{"name": "needs backport to 3.6"}], 131 | "labels_url": "https://api.github.com/issue/1234/labels{/name}", 132 | "comments_url": "https://api.github.com/issue/1234/comments", 133 | }, 134 | "https://api.github.com/issue/2248": {}, 135 | } 136 | gh = FakeGH(getitem=getitem_data) 137 | await backport.router.dispatch(event, gh) 138 | issue_data = getitem_data["https://api.github.com/issue/1234"] 139 | assert gh.delete_url == sansio.format_url( 140 | issue_data["labels_url"], {"name": "needs backport to 3.6"} 141 | ) 142 | assert len(gh.post_) > 0 143 | expected_post = None 144 | for post in gh.post_: 145 | if post[0] == issue_data["comments_url"]: 146 | expected_post = post 147 | message = post[1]["body"] 148 | assert message == backport.MESSAGE_TEMPLATE.format(branch="3.6", pr="2248") 149 | 150 | assert expected_post is not None 151 | 152 | 153 | async def test_backport_link_comment_without_label(pr_prefix): 154 | event_data = { 155 | "action": "opened", 156 | "number": 2248, 157 | "pull_request": { 158 | "title": f"[3.6] Backport this ({pr_prefix}-1234)", 159 | "body": "", 160 | "issue_url": "https://api.github.com/issue/2248", 161 | "base": { 162 | "ref": "3.6", 163 | }, 164 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 165 | }, 166 | "repository": { 167 | "issues_url": "https://api.github.com/issue{/number}", 168 | }, 169 | } 170 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 171 | getitem_data = { 172 | "https://api.github.com/issue/1234": { 173 | "labels": [], 174 | "comments_url": "https://api.github.com/issue/1234/comments", 175 | }, 176 | "https://api.github.com/issue/2248": {}, 177 | } 178 | gh = FakeGH(getitem=getitem_data) 179 | await backport.router.dispatch(event, gh) 180 | issue_data = getitem_data["https://api.github.com/issue/1234"] 181 | assert gh.delete_url is None 182 | assert len(gh.post_) > 0 183 | expected_post = None 184 | for post in gh.post_: 185 | if post[0] == issue_data["comments_url"]: 186 | expected_post = post 187 | message = post[1]["body"] 188 | assert message == backport.MESSAGE_TEMPLATE.format(branch="3.6", pr="2248") 189 | 190 | assert expected_post is not None 191 | 192 | 193 | async def test_backport_label_removal_with_leading_space_in_title(pr_prefix): 194 | event_data = { 195 | "action": "opened", 196 | "number": 2248, 197 | "pull_request": { 198 | "title": f" [3.6] Backport this ({pr_prefix}-1234)", 199 | "body": "…", 200 | "issue_url": "https://api.github.com/issue/2248", 201 | "base": { 202 | "ref": "3.6", 203 | }, 204 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 205 | }, 206 | "repository": { 207 | "issues_url": "https://api.github.com/issue{/number}", 208 | }, 209 | } 210 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 211 | getitem_data = { 212 | "https://api.github.com/issue/1234": { 213 | "labels": [{"name": "needs backport to 3.6"}], 214 | "labels_url": "https://api.github.com/issue/1234/labels{/name}", 215 | "comments_url": "https://api.github.com/issue/1234/comments", 216 | }, 217 | "https://api.github.com/issue/2248": {}, 218 | } 219 | gh = FakeGH(getitem=getitem_data) 220 | await backport.router.dispatch(event, gh) 221 | issue_data = getitem_data["https://api.github.com/issue/1234"] 222 | assert gh.delete_url == sansio.format_url( 223 | issue_data["labels_url"], {"name": "needs backport to 3.6"} 224 | ) 225 | 226 | 227 | async def test_backport_label_removal_with_parentheses_in_title(pr_prefix): 228 | event_data = { 229 | "action": "opened", 230 | "number": 2248, 231 | "pull_request": { 232 | "title": f"[3.6] Backport (0.9.6) this (more bpo-1234) ({pr_prefix}-1234)", 233 | "body": "…", 234 | "issue_url": "https://api.github.com/issue/2248", 235 | "base": { 236 | "ref": "3.6", 237 | }, 238 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 239 | }, 240 | "repository": { 241 | "issues_url": "https://api.github.com/issue{/number}", 242 | }, 243 | } 244 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 245 | getitem_data = { 246 | "https://api.github.com/issue/1234": { 247 | "labels": [{"name": "needs backport to 3.6"}], 248 | "labels_url": "https://api.github.com/issue/1234/labels{/name}", 249 | "comments_url": "https://api.github.com/issue/1234/comments", 250 | }, 251 | "https://api.github.com/issue/2248": {}, 252 | } 253 | gh = FakeGH(getitem=getitem_data) 254 | await backport.router.dispatch(event, gh) 255 | issue_data = getitem_data["https://api.github.com/issue/1234"] 256 | assert gh.delete_url == sansio.format_url( 257 | issue_data["labels_url"], {"name": "needs backport to 3.6"} 258 | ) 259 | 260 | 261 | async def test_label_copying(pr_prefix): 262 | event_data = { 263 | "action": "opened", 264 | "number": 2248, 265 | "pull_request": { 266 | "title": f"[3.6] Backport this ({pr_prefix}-1234)", 267 | "body": "N/A", 268 | "issue_url": "https://api.github.com/issue/2248", 269 | "base": { 270 | "ref": "3.6", 271 | }, 272 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 273 | }, 274 | "repository": { 275 | "issues_url": "https://api.github.com/issue{/number}", 276 | }, 277 | } 278 | event = sansio.Event(event_data, event="pull_request", delivery_id="1") 279 | labels_to_test = ( 280 | "CLA signed", 281 | "skip news", 282 | "type-enhancement", 283 | "sprint", 284 | "topic-repl", 285 | ) 286 | getitem_data = { 287 | "https://api.github.com/issue/1234": { 288 | "labels": [{"name": label} for label in labels_to_test], 289 | "labels_url": "https://api.github.com/issue/1234/labels{/name}", 290 | "comments_url": "https://api.github.com/issue/1234/comments", 291 | }, 292 | "https://api.github.com/issue/2248": { 293 | "labels_url": "https://api.github.com/issue/1234/labels{/name}", 294 | }, 295 | } 296 | gh = FakeGH(getitem=getitem_data) 297 | await backport.router.dispatch(event, gh) 298 | assert len(gh.post_) > 0 299 | expected_post = None 300 | for post in gh.post_: 301 | if post[0] == "https://api.github.com/issue/1234/labels": 302 | assert { 303 | "skip news", 304 | "type-enhancement", 305 | "sprint", 306 | "topic-repl", 307 | } == frozenset(post[1]) 308 | expected_post = post 309 | 310 | assert expected_post is not None 311 | 312 | 313 | @pytest.mark.parametrize("action", ["opened", "reopened", "edited", "synchronize"]) 314 | async def test_valid_maintenance_branch_pr_title(action): 315 | title = "[3.6] Fix to a maintenance branch" 316 | data = { 317 | "action": action, 318 | "number": 2248, 319 | "pull_request": { 320 | "title": title, 321 | "body": "", 322 | "issue_url": "https://api.github.com/issue/2248", 323 | "base": { 324 | "ref": "3.6", 325 | }, 326 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 327 | }, 328 | "repository": {"issues_url": "https://api.github.com/issue{/number}"}, 329 | "changes": {"title": title}, 330 | } 331 | event = sansio.Event(data, event="pull_request", delivery_id="1") 332 | getitem = { 333 | "https://api.github.com/issue/1234": {"labels": [{"name": "CLA signed"}]}, 334 | "https://api.github.com/issue/2248": {}, 335 | } 336 | gh = FakeGH(getitem=getitem) 337 | await backport.router.dispatch(event, gh) 338 | post = gh.post_[0] 339 | assert post[0] == "https://api.github.com/repos/python/cpython/statuses/somehash" 340 | assert post[1]["context"] == "bedevere/maintenance-branch-pr" 341 | assert post[1]["description"] == "Valid maintenance branch PR title." 342 | assert post[1]["state"] == "success" 343 | 344 | 345 | @pytest.mark.parametrize("action", ["opened", "reopened", "edited", "synchronize"]) 346 | async def test_not_valid_maintenance_branch_pr_title(action): 347 | title = "Fix some typo" 348 | data = { 349 | "action": action, 350 | "number": 2248, 351 | "pull_request": { 352 | "title": title, 353 | "body": "", 354 | "issue_url": "https://api.github.com/issue/2248", 355 | "base": { 356 | "ref": "3.6", 357 | }, 358 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 359 | }, 360 | "repository": {"issues_url": "https://api.github.com/issue{/number}"}, 361 | "changes": {"title": title}, 362 | } 363 | event = sansio.Event(data, event="pull_request", delivery_id="1") 364 | getitem = { 365 | "https://api.github.com/issue/1234": {"labels": [{"name": "CLA signed"}]}, 366 | "https://api.github.com/issue/2248": {}, 367 | } 368 | gh = FakeGH(getitem=getitem) 369 | await backport.router.dispatch(event, gh) 370 | post = gh.post_[0] 371 | assert post[0] == "https://api.github.com/repos/python/cpython/statuses/somehash" 372 | assert post[1]["context"] == "bedevere/maintenance-branch-pr" 373 | assert post[1]["description"] == "Not a valid maintenance branch PR title." 374 | assert post[1]["state"] == "failure" 375 | assert ( 376 | post[1]["target_url"] 377 | == "https://devguide.python.org/committing/#backport-pr-title" 378 | ) 379 | 380 | 381 | @pytest.mark.parametrize("action", ["opened", "reopened", "edited", "synchronize"]) 382 | async def test_maintenance_branch_pr_status_not_posted_on_main(action): 383 | title = "Fix some typo" 384 | data = { 385 | "action": action, 386 | "number": 2248, 387 | "pull_request": { 388 | "title": title, 389 | "body": "", 390 | "issue_url": "https://api.github.com/issue/2248", 391 | "base": { 392 | "ref": "main", 393 | }, 394 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 395 | }, 396 | "repository": {"issues_url": "https://api.github.com/issue{/number}"}, 397 | "changes": {"title": title}, 398 | } 399 | event = sansio.Event(data, event="pull_request", delivery_id="1") 400 | getitem = { 401 | "https://api.github.com/issue/1234": {"labels": [{"name": "CLA signed"}]}, 402 | "https://api.github.com/issue/2248": {}, 403 | } 404 | gh = FakeGH(getitem=getitem) 405 | await backport.router.dispatch(event, gh) 406 | assert len(gh.post_) == 0 407 | 408 | 409 | @pytest.mark.parametrize("action", ["opened", "reopened", "edited", "synchronize"]) 410 | async def test_not_maintenance_branch_pr_status_not_posted_alt_base(action): 411 | """ 412 | When a PR is proposed against a non-maintenance branch, such 413 | as another PR, it pass without status (same as with main). See 414 | #381 for a detailed justification. 415 | """ 416 | title = "Fix some typo" 417 | data = { 418 | "action": action, 419 | "number": 2248, 420 | "pull_request": { 421 | "title": title, 422 | "body": "", 423 | "issue_url": "https://api.github.com/issue/2248", 424 | "base": { 425 | "ref": "gh-1234/dependent-change", 426 | }, 427 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/somehash", 428 | }, 429 | "repository": {"issues_url": "https://api.github.com/issue{/number}"}, 430 | "changes": {"title": title}, 431 | } 432 | event = sansio.Event(data, event="pull_request", delivery_id="1") 433 | getitem = { 434 | "https://api.github.com/issue/1234": {"labels": [{"name": "CLA signed"}]}, 435 | "https://api.github.com/issue/2248": {}, 436 | } 437 | gh = FakeGH(getitem=getitem) 438 | await backport.router.dispatch(event, gh) 439 | assert not gh.post_ 440 | 441 | 442 | @pytest.mark.parametrize("ref", ["3.9", "4.0", "3.10"]) 443 | async def test_maintenance_branch_created(ref): 444 | event_data = { 445 | "ref": ref, 446 | "ref_type": "branch", 447 | } 448 | event = sansio.Event(event_data, event="create", delivery_id="1") 449 | gh = FakeGH() 450 | await backport.router.dispatch(event, gh) 451 | label_creation_post = gh.post_[0] 452 | assert ( 453 | label_creation_post[0] == "https://api.github.com/repos/python/cpython/labels" 454 | ) 455 | assert label_creation_post[1] == { 456 | "name": f"needs backport to {ref}", 457 | "color": "c2e0c6", 458 | "description": "bug and security fixes", 459 | } 460 | 461 | 462 | @pytest.mark.parametrize("ref", ["backport-3.9", "test", "Mariatta-patch-1"]) 463 | async def test_other_branch_created(ref): 464 | event_data = { 465 | "ref": ref, 466 | "ref_type": "branch", 467 | } 468 | event = sansio.Event(event_data, event="create", delivery_id="1") 469 | gh = FakeGH() 470 | await backport.router.dispatch(event, gh) 471 | assert gh.post_ == [] 472 | -------------------------------------------------------------------------------- /tests/test_gh_issue.py: -------------------------------------------------------------------------------- 1 | import http 2 | from unittest import mock 3 | 4 | import aiohttp 5 | import gidgethub 6 | import pytest 7 | from gidgethub import sansio 8 | 9 | from bedevere import gh_issue 10 | 11 | 12 | class FakeGH: 13 | def __init__(self, *, getitem=None, post=None, patch=None): 14 | self._getitem_return = getitem 15 | self._post_return = post 16 | self._patch_return = patch 17 | self.post_url = [] 18 | self.post_data = [] 19 | self.patch_url = [] 20 | self.patch_data = [] 21 | 22 | async def getitem(self, url): 23 | if isinstance(self._getitem_return, Exception): 24 | raise self._getitem_return 25 | return self._getitem_return 26 | 27 | async def post(self, url, *, data): 28 | self.post_url.append(url) 29 | self.post_data.append(data) 30 | return self._post_return 31 | 32 | async def patch(self, url, *, data): 33 | self.patch_url.append(url) 34 | self.patch_data.append(data) 35 | return self._patch_return 36 | 37 | 38 | @pytest.fixture 39 | async def issue_number(): 40 | return 1234 41 | 42 | 43 | @pytest.mark.asyncio 44 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 45 | async def test_set_status_failure(action, monkeypatch): 46 | monkeypatch.setattr( 47 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 48 | ) 49 | data = { 50 | "action": action, 51 | "pull_request": { 52 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 53 | "title": "No issue in title", 54 | "issue_url": "issue URL", 55 | "url": "url", 56 | "number": 1234, 57 | }, 58 | } 59 | issue_data = { 60 | "url": "url", 61 | "labels": [ 62 | {"name": "non-trivial"}, 63 | ], 64 | } 65 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 66 | gh = FakeGH(getitem=issue_data) 67 | await gh_issue.router.dispatch(event, gh, session=None) 68 | status = gh.post_data[0] 69 | assert status["state"] == "failure" 70 | assert status["target_url"].startswith("https://devguide.python.org") 71 | assert status["context"] == "bedevere/issue-number" 72 | gh_issue._validate_issue_number.assert_not_awaited() 73 | 74 | 75 | @pytest.mark.asyncio 76 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 77 | async def test_set_status_failure_via_issue_not_found_on_github(action, monkeypatch): 78 | monkeypatch.setattr( 79 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=False) 80 | ) 81 | 82 | data = { 83 | "action": action, 84 | "pull_request": { 85 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 86 | "title": "gh-123: Invalid issue number", 87 | "issue_url": "issue URL", 88 | "url": "url", 89 | "number": 1234, 90 | }, 91 | } 92 | issue_data = {"url": "url", "labels": []} 93 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 94 | gh = FakeGH(getitem=issue_data) 95 | async with aiohttp.ClientSession() as session: 96 | await gh_issue.router.dispatch(event, gh, session=session) 97 | status = gh.post_data[0] 98 | assert status["state"] == "failure" 99 | assert status["target_url"] == "https://github.com/python/cpython/issues/123" 100 | assert status["context"] == "bedevere/issue-number" 101 | assert status["description"] == "GH Issue #123 is not valid." 102 | 103 | 104 | @pytest.mark.asyncio 105 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 106 | async def test_set_status_success_issue_found_on_bpo(action): 107 | data = { 108 | "action": action, 109 | "pull_request": { 110 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 111 | "title": "bpo-12345: An issue on b.p.o", 112 | "issue_url": "issue URL", 113 | "url": "url", 114 | "number": 1234, 115 | }, 116 | } 117 | issue_data = {"url": "url", "labels": []} 118 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 119 | gh = FakeGH(getitem=issue_data) 120 | async with aiohttp.ClientSession() as session: 121 | await gh_issue.router.dispatch(event, gh, session=session) 122 | status = gh.post_data[0] 123 | assert status["state"] == "success" 124 | assert status["target_url"].endswith("bpo=12345") 125 | assert "12345" in status["description"] 126 | assert status["context"] == "bedevere/issue-number" 127 | assert "git-sha" in gh.post_url[0] 128 | 129 | assert len(gh.patch_data) == 0 130 | assert len(gh.patch_url) == 0 131 | 132 | 133 | @pytest.mark.asyncio 134 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 135 | async def test_set_status_success(action, monkeypatch): 136 | monkeypatch.setattr( 137 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 138 | ) 139 | data = { 140 | "action": action, 141 | "pull_request": { 142 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 143 | "url": "", 144 | "title": "[3.6] gh-1234: an issue!", 145 | "issue_url": "issue URL", 146 | "number": 1234, 147 | }, 148 | } 149 | issue_data = {"url": "url", "labels": []} 150 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 151 | gh = FakeGH(getitem=issue_data) 152 | await gh_issue.router.dispatch(event, gh, session=None) 153 | status = gh.post_data[0] 154 | assert status["state"] == "success" 155 | assert status["target_url"] == "https://github.com/python/cpython/issues/1234" 156 | assert "1234" in status["description"] 157 | assert status["context"] == "bedevere/issue-number" 158 | assert "git-sha" in gh.post_url[0] 159 | gh_issue._validate_issue_number.assert_awaited_with( 160 | gh, 1234, session=None, kind="gh" 161 | ) 162 | 163 | 164 | @pytest.mark.asyncio 165 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 166 | async def test_set_status_success_issue_found_on_gh(action, monkeypatch, issue_number): 167 | monkeypatch.setattr( 168 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 169 | ) 170 | data = { 171 | "action": action, 172 | "pull_request": { 173 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 174 | "title": f"gh-{issue_number}: an issue!", 175 | "url": "url", 176 | "issue_url": "issue URL", 177 | "number": 1234, 178 | }, 179 | } 180 | issue_data = {"url": "url", "labels": []} 181 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 182 | gh = FakeGH(getitem=issue_data) 183 | async with aiohttp.ClientSession() as session: 184 | await gh_issue.router.dispatch(event, gh, session=session) 185 | status = gh.post_data[0] 186 | assert status["state"] == "success" 187 | assert ( 188 | status["target_url"] 189 | == f"https://github.com/python/cpython/issues/{issue_number}" 190 | ) 191 | assert str(issue_number) in status["description"] 192 | assert status["context"] == "bedevere/issue-number" 193 | assert "git-sha" in gh.post_url[0] 194 | 195 | assert len(gh.patch_data) > 0 196 | assert f"" in gh.patch_data[0]["body"] 197 | assert ( 198 | "\n\n\n" 199 | f"### Linked PRs\n* gh-{issue_number}\n" 200 | "\n" 201 | ) in gh.patch_data[1]["body"] 202 | assert len(gh.patch_url) == 2 203 | assert gh.patch_url[0] == data["pull_request"]["url"] 204 | assert gh.patch_url[1] == issue_data["url"] 205 | 206 | 207 | @pytest.mark.asyncio 208 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 209 | async def test_set_status_success_issue_found_on_gh_ignore_case( 210 | action, monkeypatch, issue_number 211 | ): 212 | monkeypatch.setattr( 213 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 214 | ) 215 | data = { 216 | "action": action, 217 | "pull_request": { 218 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 219 | "title": f"GH-{issue_number}: an issue!", 220 | "url": "url", 221 | "issue_url": "issue URL", 222 | "number": 1234, 223 | }, 224 | } 225 | issue_data = {"url": "url", "labels": []} 226 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 227 | gh = FakeGH(getitem=issue_data) 228 | async with aiohttp.ClientSession() as session: 229 | await gh_issue.router.dispatch(event, gh, session=session) 230 | status = gh.post_data[0] 231 | assert status["state"] == "success" 232 | assert ( 233 | status["target_url"] 234 | == f"https://github.com/python/cpython/issues/{issue_number}" 235 | ) 236 | assert str(issue_number) in status["description"] 237 | assert status["context"] == "bedevere/issue-number" 238 | assert "git-sha" in gh.post_url[0] 239 | 240 | assert len(gh.patch_data) > 0 241 | assert f"" in gh.patch_data[0]["body"] 242 | assert ( 243 | "\n\n\n" 244 | f"### Linked PRs\n* gh-{issue_number}\n" 245 | "\n" 246 | ) in gh.patch_data[1]["body"] 247 | assert len(gh.patch_url) == 2 248 | assert gh.patch_url[0] == data["pull_request"]["url"] 249 | assert gh.patch_url[1] == issue_data["url"] 250 | 251 | 252 | @pytest.mark.asyncio 253 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 254 | async def test_set_status_success_via_skip_issue_label(action, monkeypatch): 255 | monkeypatch.setattr( 256 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 257 | ) 258 | data = { 259 | "action": action, 260 | "pull_request": { 261 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 262 | "title": "No issue in title", 263 | "issue_url": "issue URL", 264 | "url": "url", 265 | "number": 1234, 266 | }, 267 | } 268 | issue_data = { 269 | "url": "url", 270 | "labels": [ 271 | {"name": "skip issue"}, 272 | ], 273 | } 274 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 275 | gh = FakeGH(getitem=issue_data) 276 | await gh_issue.router.dispatch(event, gh, session=None) 277 | status = gh.post_data[0] 278 | assert status["state"] == "success" 279 | assert status["context"] == "bedevere/issue-number" 280 | assert "git-sha" in gh.post_url[0] 281 | gh_issue._validate_issue_number.assert_not_awaited() 282 | 283 | assert len(gh.patch_data) == 0 284 | assert len(gh.patch_url) == 0 285 | 286 | 287 | @pytest.mark.asyncio 288 | @pytest.mark.parametrize("action", ["opened", "synchronize", "reopened"]) 289 | async def test_set_status_success_via_skip_issue_label_pr_in_title(action, monkeypatch): 290 | monkeypatch.setattr( 291 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=False) 292 | ) 293 | data = { 294 | "action": action, 295 | "pull_request": { 296 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 297 | "title": "GH-93644: An issue with a PR as issue number", 298 | "issue_url": "issue URL", 299 | "url": "url", 300 | "number": 1234, 301 | }, 302 | } 303 | issue_data = { 304 | "url": "url", 305 | "labels": [ 306 | {"name": "skip issue"}, 307 | ], 308 | } 309 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 310 | gh = FakeGH(getitem=issue_data) 311 | await gh_issue.router.dispatch(event, gh, session=None) 312 | status = gh.post_data[0] 313 | assert status["state"] == "success" 314 | assert status["context"] == "bedevere/issue-number" 315 | assert "git-sha" in gh.post_url[0] 316 | gh_issue._validate_issue_number.assert_not_awaited() 317 | 318 | assert len(gh.patch_data) == 0 319 | assert len(gh.patch_url) == 0 320 | 321 | 322 | @pytest.mark.asyncio 323 | async def test_edit_title(monkeypatch, issue_number): 324 | monkeypatch.setattr( 325 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 326 | ) 327 | data = { 328 | "pull_request": { 329 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 330 | "title": f"gh-{issue_number}: an issue!", 331 | "url": "url", 332 | "issue_url": "issue URL", 333 | "number": 1234, 334 | }, 335 | "action": "edited", 336 | "changes": {"title": "thingy"}, 337 | } 338 | issue_data = {"url": "url", "labels": []} 339 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 340 | gh = FakeGH(getitem=issue_data) 341 | await gh_issue.router.dispatch(event, gh, session=None) 342 | assert len(gh.post_data) == 1 343 | gh_issue._validate_issue_number.assert_awaited_with( 344 | gh, issue_number, session=None, kind="gh" 345 | ) 346 | 347 | 348 | @pytest.mark.asyncio 349 | async def test_no_body_when_edit_title(monkeypatch, issue_number): 350 | monkeypatch.setattr( 351 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 352 | ) 353 | data = { 354 | "action": "edited", 355 | "pull_request": { 356 | "url": "https://api.github.com/repos/python/cpython/pulls/5291", 357 | "title": f"gh-{issue_number}: Fix @asyncio.coroutine debug mode bug", 358 | "body": None, 359 | "issue_url": "issue URL", 360 | "statuses_url": "https://api.github.com/repos/python/cpython/statuses/98d60953c85df9f0f28e04322a4c4ebec7b180f4", 361 | "number": 1234, 362 | }, 363 | "changes": { 364 | "title": f"gh-{issue_number}: Fix @asyncio.coroutine debug mode bug exposed by #5250." 365 | }, 366 | } 367 | issue_data = {"url": "url", "labels": []} 368 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 369 | gh = FakeGH(getitem=issue_data) 370 | await gh_issue.router.dispatch(event, gh, session=None) 371 | gh_issue._validate_issue_number.assert_awaited_with( 372 | gh, issue_number, session=None, kind="gh" 373 | ) 374 | 375 | assert len(gh.patch_data) > 0 376 | assert f"" in gh.patch_data[0]["body"] 377 | assert ( 378 | "\n\n\n" 379 | f"### Linked PRs\n* gh-{issue_number}\n" 380 | "\n" 381 | ) in gh.patch_data[1]["body"] 382 | assert len(gh.patch_url) == 2 383 | assert gh.patch_url[0] == data["pull_request"]["url"] 384 | assert gh.patch_url[1] == issue_data["url"] 385 | 386 | 387 | @pytest.mark.asyncio 388 | async def test_edit_other_than_title(monkeypatch): 389 | monkeypatch.setattr( 390 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 391 | ) 392 | data = { 393 | "pull_request": { 394 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 395 | "title": "bpo-1234: an issue!", 396 | "url": "url", 397 | "issue_url": "issue URL", 398 | "number": 1234, 399 | }, 400 | "action": "edited", 401 | "changes": {"stuff": "thingy"}, 402 | } 403 | issue_data = {"url": "url", "labels": []} 404 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 405 | gh = FakeGH(getitem=issue_data) 406 | await gh_issue.router.dispatch(event, gh, session=None) 407 | assert len(gh.post_data) == 0 408 | gh_issue._validate_issue_number.assert_not_awaited() 409 | 410 | assert len(gh.patch_data) == 0 411 | assert len(gh.patch_url) == 0 412 | 413 | 414 | @pytest.mark.asyncio 415 | async def test_new_label_skip_issue_no_issue(): 416 | data = { 417 | "action": "labeled", 418 | "label": {"name": "skip issue"}, 419 | "pull_request": { 420 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 421 | "title": "An easy fix", 422 | "url": "url", 423 | "issue_url": "issue URL", 424 | "number": 1234, 425 | }, 426 | } 427 | issue_data = {"url": "url", "labels": []} 428 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 429 | gh = FakeGH(getitem=issue_data) 430 | await gh_issue.router.dispatch(event, gh) 431 | assert gh.post_data[0]["state"] == "success" 432 | assert "git-sha" in gh.post_url[0] 433 | 434 | 435 | @pytest.mark.asyncio 436 | async def test_new_label_skip_issue_with_issue_number(): 437 | data = { 438 | "action": "labeled", 439 | "label": {"name": "skip issue"}, 440 | "pull_request": { 441 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 442 | "title": "Revert gh-1234: revert an easy fix", 443 | "url": "url", 444 | "issue_url": "issue URL", 445 | "number": 1234, 446 | }, 447 | } 448 | issue_data = {"url": "url", "labels": []} 449 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 450 | gh = FakeGH(getitem=issue_data) 451 | await gh_issue.router.dispatch(event, gh) 452 | status = gh.post_data[0] 453 | assert status["state"] == "success" 454 | assert status["target_url"] == "https://github.com/python/cpython/issues/1234" 455 | assert "1234" in status["description"] 456 | assert status["context"] == "bedevere/issue-number" 457 | assert "git-sha" in gh.post_url[0] 458 | 459 | 460 | @pytest.mark.asyncio 461 | async def test_new_label_skip_issue_with_issue_number_ignore_case(): 462 | data = { 463 | "action": "labeled", 464 | "label": {"name": "skip issue"}, 465 | "pull_request": { 466 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 467 | "title": "Revert Gh-1234: revert an easy fix", 468 | "url": "url", 469 | "issue_url": "issue URL", 470 | "number": 1234, 471 | }, 472 | } 473 | issue_data = {"url": "url", "labels": []} 474 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 475 | gh = FakeGH(getitem=issue_data) 476 | await gh_issue.router.dispatch(event, gh) 477 | status = gh.post_data[0] 478 | assert status["state"] == "success" 479 | assert status["target_url"] == "https://github.com/python/cpython/issues/1234" 480 | assert "1234" in status["description"] 481 | assert status["context"] == "bedevere/issue-number" 482 | assert "git-sha" in gh.post_url[0] 483 | 484 | 485 | @pytest.mark.asyncio 486 | async def test_new_label_not_skip_issue(): 487 | data = { 488 | "action": "labeled", 489 | "label": {"name": "non-trivial"}, 490 | "pull_request": { 491 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 492 | "url": "url", 493 | "issue_url": "issue URL", 494 | "number": 1234, 495 | }, 496 | } 497 | issue_data = {"url": "url", "labels": []} 498 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 499 | gh = FakeGH(getitem=issue_data) 500 | await gh_issue.router.dispatch(event, gh) 501 | assert len(gh.post_data) == 0 502 | 503 | 504 | @pytest.mark.asyncio 505 | async def test_removed_label_from_label_deletion(monkeypatch): 506 | """When a label is completely deleted from a repo, it triggers an 'unlabeled' 507 | event, but the payload has no details about the removed label.""" 508 | monkeypatch.setattr( 509 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 510 | ) 511 | data = { 512 | "action": "unlabeled", 513 | # No "label" key. 514 | "pull_request": { 515 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 516 | "title": "gh-1234: an issue!", 517 | "url": "url", 518 | "issue_url": "issue URL", 519 | "number": 1234, 520 | }, 521 | } 522 | issue_data = {"url": "url", "labels": []} 523 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 524 | gh = FakeGH(getitem=issue_data) 525 | await gh_issue.router.dispatch(event, gh, session=None) 526 | assert len(gh.post_data) == 0 527 | gh_issue._validate_issue_number.assert_not_awaited() 528 | 529 | 530 | @pytest.mark.asyncio 531 | async def test_removed_label_skip_issue(monkeypatch): 532 | monkeypatch.setattr( 533 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 534 | ) 535 | data = { 536 | "action": "unlabeled", 537 | "label": {"name": "skip issue"}, 538 | "pull_request": { 539 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 540 | "title": "gh-1234: an issue!", 541 | "url": "url", 542 | "issue_url": "issue URL", 543 | "number": 1234, 544 | }, 545 | } 546 | issue_data = {"url": "url", "labels": []} 547 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 548 | gh = FakeGH(getitem=issue_data) 549 | await gh_issue.router.dispatch(event, gh, session=None) 550 | status = gh.post_data[0] 551 | assert status["state"] == "success" 552 | assert status["target_url"] == "https://github.com/python/cpython/issues/1234" 553 | assert "1234" in status["description"] 554 | assert status["context"] == "bedevere/issue-number" 555 | assert "git-sha" in gh.post_url[0] 556 | gh_issue._validate_issue_number.assert_awaited_with( 557 | gh, 1234, session=None, kind="gh" 558 | ) 559 | 560 | 561 | @pytest.mark.asyncio 562 | async def test_removed_label_non_skip_issue(monkeypatch): 563 | monkeypatch.setattr( 564 | gh_issue, "_validate_issue_number", mock.AsyncMock(return_value=True) 565 | ) 566 | data = { 567 | "action": "unlabeled", 568 | "label": {"name": "non-trivial"}, 569 | "pull_request": { 570 | "statuses_url": "https://api.github.com/blah/blah/git-sha", 571 | "url": "url", 572 | "issue_url": "issue URL", 573 | "number": 1234, 574 | }, 575 | } 576 | issue_data = {"url": "url", "labels": []} 577 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 578 | gh = FakeGH(getitem=issue_data) 579 | await gh_issue.router.dispatch(event, gh, session=None) 580 | assert len(gh.post_data) == 0 581 | gh_issue._validate_issue_number.assert_not_awaited() 582 | 583 | 584 | @pytest.mark.asyncio 585 | async def test_validate_issue_number_valid_on_github(): 586 | gh = FakeGH(getitem={"number": 123}) 587 | async with aiohttp.ClientSession() as session: 588 | response = await gh_issue._validate_issue_number(gh, 123, session=session) 589 | assert response is True 590 | 591 | 592 | @pytest.mark.asyncio 593 | async def test_validate_issue_number_valid_on_bpo(): 594 | gh = FakeGH(getitem={"number": 1234}) 595 | async with aiohttp.ClientSession() as session: 596 | response = await gh_issue._validate_issue_number( 597 | gh, 1234, kind="bpo", session=session 598 | ) 599 | assert response is True 600 | 601 | 602 | @pytest.mark.asyncio 603 | async def test_validate_issue_number_is_pr_on_github(): 604 | gh = FakeGH( 605 | getitem={ 606 | "number": 123, 607 | "pull_request": { 608 | "html_url": "https://github.com/python/cpython/pull/123", 609 | "url": "url", 610 | "number": 1234, 611 | }, 612 | } 613 | ) 614 | async with aiohttp.ClientSession() as session: 615 | response = await gh_issue._validate_issue_number(gh, 123, session=session) 616 | assert response is False 617 | 618 | 619 | @pytest.mark.asyncio 620 | async def test_validate_issue_number_is_not_valid(): 621 | gh = FakeGH(getitem=gidgethub.BadRequest(status_code=http.HTTPStatus(404))) 622 | async with aiohttp.ClientSession() as session: 623 | response = await gh_issue._validate_issue_number(gh, 123, session=session) 624 | assert response is False 625 | 626 | 627 | @pytest.mark.asyncio 628 | async def test_validate_issue_number_coverage100(): 629 | gh = FakeGH(getitem={"number": 1234}) 630 | async with aiohttp.ClientSession() as session: 631 | with pytest.raises(ValueError): 632 | await gh_issue._validate_issue_number( 633 | gh, 123, session=session, kind="invalid" # type: ignore 634 | ) 635 | -------------------------------------------------------------------------------- /tests/test_stage.py: -------------------------------------------------------------------------------- 1 | import http 2 | 3 | import gidgethub 4 | import pytest 5 | from gidgethub import sansio 6 | 7 | from bedevere import stage as awaiting 8 | from bedevere.stage import ACK 9 | 10 | 11 | class FakeGH: 12 | def __init__(self, *, getiter=None, getitem=None, delete=None, post=None): 13 | self._getiter_return = getiter 14 | self._getitem_return = getitem 15 | self._delete_return = delete 16 | self._post_return = post 17 | self.getitem_url = None 18 | self.delete_url = None 19 | self.post_ = [] 20 | self.patch_ = [] 21 | 22 | async def getiter(self, url, url_vars={}): 23 | self.getiter_url = sansio.format_url(url, url_vars) 24 | to_iterate = self._getiter_return[self.getiter_url] 25 | for item in to_iterate: 26 | if isinstance(item, Exception): 27 | raise item 28 | yield item 29 | 30 | async def getitem(self, url, url_vars={}): 31 | self.getitem_url = sansio.format_url(url, url_vars) 32 | to_return = self._getitem_return[self.getitem_url] 33 | if isinstance(to_return, Exception): 34 | raise to_return 35 | else: 36 | return to_return 37 | 38 | async def delete(self, url, url_vars={}): 39 | self.delete_url = sansio.format_url(url, url_vars) 40 | 41 | async def post(self, url, url_vars={}, *, data): 42 | post_url = sansio.format_url(url, url_vars) 43 | self.post_.append((post_url, data)) 44 | 45 | async def patch(self, url, url_vars={}, *, data): 46 | patch_url = sansio.format_url(url, url_vars) 47 | self.patch_.append((patch_url, data)) 48 | 49 | 50 | async def test_stage(): 51 | # Skip changing labels if the label is already set. 52 | issue = {"labels": [{"name": "awaiting merge"}, {"name": "skip issue"}]} 53 | issue_url = "https://api.github.com/some/issue" 54 | gh = FakeGH() 55 | await awaiting.stage(gh, issue, awaiting.Blocker.merge) 56 | assert not gh.delete_url 57 | assert not gh.post_ 58 | 59 | # Test deleting an old label and adding a new one. 60 | issue = { 61 | "labels": [{"name": "awaiting review"}, {"name": "skip issue"}], 62 | "labels_url": "https://api.github.com/repos/python/cpython/issues/42/labels{/name}", 63 | } 64 | gh = FakeGH() 65 | await awaiting.stage(gh, issue, awaiting.Blocker.merge) 66 | assert ( 67 | gh.delete_url 68 | == "https://api.github.com/repos/python/cpython/issues/42/labels/awaiting%20review" 69 | ) 70 | assert len(gh.post_) == 1 71 | post_ = gh.post_[0] 72 | assert post_[0] == "https://api.github.com/repos/python/cpython/issues/42/labels" 73 | assert post_[1] == [awaiting.Blocker.merge.value] 74 | 75 | 76 | async def test_opened_draft_pr(): 77 | # New Draft PR from a core dev. 78 | username = "brettcannon" 79 | issue_url = "https://api.github.com/issue/42" 80 | data = { 81 | "action": "opened", 82 | "pull_request": { 83 | "user": { 84 | "login": username, 85 | }, 86 | "issue_url": issue_url, 87 | "draft": True, 88 | }, 89 | } 90 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 91 | teams = [{"name": "python core", "id": 6}] 92 | items = { 93 | f"https://api.github.com/teams/6/memberships/{username}": "OK", 94 | issue_url: {"labels": [], "labels_url": "https://api.github.com/labels"}, 95 | } 96 | gh = FakeGH( 97 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=items 98 | ) 99 | await awaiting.router.dispatch(event, gh) 100 | assert len(gh.post_) == 0 101 | 102 | # Draft PR is published 103 | data["action"] = "ready_for_review" 104 | data["pull_request"]["draft"] = False 105 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 106 | gh = FakeGH( 107 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=items 108 | ) 109 | await awaiting.router.dispatch(event, gh) 110 | assert len(gh.post_) == 1 111 | post_ = gh.post_[0] 112 | assert post_[0] == "https://api.github.com/labels" 113 | assert post_[1] == [awaiting.Blocker.core_review.value] 114 | 115 | # Published PR is unpublished (converted to Draft) 116 | data["action"] = "converted_to_draft" 117 | data["pull_request"]["draft"] = True 118 | encoded_label = "awaiting%20core%20review" 119 | items[issue_url] = { 120 | "labels": [ 121 | { 122 | "url": f"https://api.github.com/repos/python/cpython/labels/{encoded_label}", 123 | "name": "awaiting core review", 124 | }, 125 | { 126 | "url": "https://api.github.com/repos/python/cpython/labels/CLA%20signed", 127 | "name": "CLA signed", 128 | }, 129 | ], 130 | "labels_url": "https://api.github.com/repos/python/cpython/issues/12345/labels{/name}", 131 | } 132 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 133 | gh = FakeGH( 134 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=items 135 | ) 136 | await awaiting.router.dispatch(event, gh) 137 | assert len(gh.post_) == 0 138 | assert ( 139 | gh.delete_url 140 | == f"https://api.github.com/repos/python/cpython/issues/12345/labels/{encoded_label}" 141 | ) 142 | 143 | 144 | async def test_edited_pr_title(): 145 | # regression test for https://github.com/python/bedevere/issues/556 146 | # test that editing the PR title doesn't change the Blocker labels 147 | username = "itamaro" 148 | issue_url = "https://api.github.com/issue/42" 149 | data = { 150 | "action": "edited", 151 | "pull_request": { 152 | "user": { 153 | "login": username, 154 | }, 155 | "issue_url": issue_url, 156 | "draft": False, 157 | "title": "So long and thanks for all the fish", 158 | }, 159 | "changes": { 160 | "title": "So long and thanks for all the phish", 161 | }, 162 | } 163 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 164 | teams = [{"name": "python core", "id": 6}] 165 | encoded_label = "awaiting%20review" 166 | items = { 167 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 168 | status_code=http.HTTPStatus(404) 169 | ), 170 | issue_url: { 171 | "labels": [ 172 | { 173 | "url": f"https://api.github.com/repos/python/cpython/labels/{encoded_label}", 174 | "name": "awaiting review", 175 | }, 176 | { 177 | "url": "https://api.github.com/repos/python/cpython/labels/CLA%20signed", 178 | "name": "CLA signed", 179 | }, 180 | ], 181 | "labels_url": "https://api.github.com/repos/python/cpython/issues/12345/labels{/name}", 182 | }, 183 | } 184 | gh = FakeGH( 185 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=items 186 | ) 187 | await awaiting.router.dispatch(event, gh) 188 | assert len(gh.post_) == 0 189 | assert gh.delete_url is None 190 | 191 | 192 | async def test_opened_pr(): 193 | # New PR from a core dev. 194 | username = "brettcannon" 195 | issue_url = "https://api.github.com/issue/42" 196 | data = { 197 | "action": "opened", 198 | "pull_request": { 199 | "user": { 200 | "login": username, 201 | }, 202 | "issue_url": issue_url, 203 | }, 204 | } 205 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 206 | teams = [{"name": "python core", "id": 6}] 207 | items = { 208 | f"https://api.github.com/teams/6/memberships/{username}": "OK", 209 | issue_url: {"labels": [], "labels_url": "https://api.github.com/labels"}, 210 | } 211 | gh = FakeGH( 212 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=items 213 | ) 214 | await awaiting.router.dispatch(event, gh) 215 | assert len(gh.post_) == 1 216 | post_ = gh.post_[0] 217 | assert post_[0] == "https://api.github.com/labels" 218 | assert post_[1] == [awaiting.Blocker.core_review.value] 219 | 220 | # New PR from a non-core dev. 221 | username = "andreamcinnes" 222 | issue_url = "https://api.github.com/issue/42" 223 | data = { 224 | "action": "opened", 225 | "pull_request": { 226 | "user": { 227 | "login": username, 228 | }, 229 | "issue_url": issue_url, 230 | "draft": False, 231 | }, 232 | } 233 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 234 | teams = [{"name": "python core", "id": 6}] 235 | items = { 236 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 237 | status_code=http.HTTPStatus(404) 238 | ), 239 | issue_url: {"labels": [], "labels_url": "https://api.github.com/labels"}, 240 | } 241 | gh = FakeGH( 242 | getiter={"https://api.github.com/orgs/python/teams": teams}, getitem=items 243 | ) 244 | await awaiting.router.dispatch(event, gh) 245 | assert len(gh.post_) == 1 246 | post_ = gh.post_[0] 247 | assert post_[0] == "https://api.github.com/labels" 248 | assert post_[1] == [awaiting.Blocker.review.value] 249 | 250 | 251 | async def test_new_review(): 252 | # First non-comment review from a non-core dev. 253 | username = "andreamcinnes" 254 | data = { 255 | "action": "submitted", 256 | "review": { 257 | "state": "approved", 258 | "user": { 259 | "login": username, 260 | }, 261 | }, 262 | "pull_request": { 263 | "url": "https://api.github.com/pr/42", 264 | "issue_url": "https://api.github.com/issue/42", 265 | "state": "open", 266 | }, 267 | } 268 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 269 | teams = [{"name": "python core", "id": 6}] 270 | items = { 271 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 272 | status_code=http.HTTPStatus(404) 273 | ), 274 | "https://api.github.com/teams/6/memberships/brettcannon": True, 275 | "https://api.github.com/issue/42": { 276 | "labels": [], 277 | "labels_url": "https://api.github.com/labels/42", 278 | }, 279 | } 280 | iterators = { 281 | "https://api.github.com/orgs/python/teams": teams, 282 | "https://api.github.com/pr/42/reviews": [ 283 | {"user": {"login": "brettcannon"}, "state": "commented"} 284 | ], 285 | } 286 | gh = FakeGH(getiter=iterators, getitem=items) 287 | await awaiting.router.dispatch(event, gh) 288 | assert len(gh.post_) == 1 289 | post_ = gh.post_[0] 290 | assert post_[0] == "https://api.github.com/labels/42" 291 | assert post_[1] == [awaiting.Blocker.core_review.value] 292 | 293 | # First and second review from a non-core dev. 294 | items = { 295 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 296 | status_code=http.HTTPStatus(404) 297 | ), 298 | "https://api.github.com/teams/6/memberships/brettcannon": True, 299 | "https://api.github.com/issue/42": { 300 | "labels": [], 301 | "labels_url": "https://api.github.com/labels/42", 302 | }, 303 | } 304 | iterators = { 305 | "https://api.github.com/orgs/python/teams": teams, 306 | "https://api.github.com/pr/42/reviews": [ 307 | {"user": {"login": "brettcannon"}, "state": "approved"} 308 | ], 309 | } 310 | gh = FakeGH(getiter=iterators, getitem=items) 311 | await awaiting.router.dispatch(event, gh) 312 | assert not gh.post_ 313 | 314 | # First comment review from a non-core dev. 315 | data = { 316 | "action": "submitted", 317 | "review": { 318 | "state": "comment", 319 | "user": { 320 | "login": username, 321 | }, 322 | }, 323 | "pull_request": { 324 | "url": "https://api.github.com/pr/42", 325 | "issue_url": "https://api.github.com/issue/42", 326 | "state": "open", 327 | }, 328 | } 329 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 330 | items = { 331 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 332 | status_code=http.HTTPStatus(404) 333 | ), 334 | "https://api.github.com/teams/6/memberships/brettcannon": True, 335 | "https://api.github.com/issue/42": { 336 | "labels": [], 337 | "labels_url": "https://api.github.com/labels/42", 338 | }, 339 | } 340 | iterators = { 341 | "https://api.github.com/orgs/python/teams": teams, 342 | "https://api.github.com/pr/42/reviews": [ 343 | {"user": {"login": "brettcannon"}, "state": "approved"} 344 | ], 345 | } 346 | gh = FakeGH(getiter=iterators, getitem=items) 347 | await awaiting.router.dispatch(event, gh) 348 | assert not gh.post_ 349 | 350 | # Core dev submits an approving review. 351 | username = "brettcannon" 352 | data = { 353 | "action": "submitted", 354 | "review": { 355 | "user": { 356 | "login": username, 357 | }, 358 | "state": "APPROVED", 359 | }, 360 | "pull_request": { 361 | "url": "https://api.github.com/pr/42", 362 | "issue_url": "https://api.github.com/issue/42", 363 | "state": "open", 364 | }, 365 | } 366 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 367 | teams = [{"name": "python core", "id": 6}] 368 | items = { 369 | f"https://api.github.com/teams/6/memberships/{username}": True, 370 | "https://api.github.com/issue/42": { 371 | "labels": [{"name": awaiting.Blocker.changes.value}], 372 | "labels_url": "https://api.github.com/labels/42", 373 | }, 374 | } 375 | iterators = { 376 | "https://api.github.com/orgs/python/teams": teams, 377 | "https://api.github.com/pr/42/reviews": [], 378 | } 379 | gh = FakeGH(getiter=iterators, getitem=items) 380 | await awaiting.router.dispatch(event, gh) 381 | assert len(gh.post_) == 1 382 | post_ = gh.post_[0] 383 | assert post_[0] == "https://api.github.com/labels/42" 384 | assert post_[1] == [awaiting.Blocker.merge.value] 385 | 386 | # Core dev submits an approving review on an already closed pull request. 387 | username = "brettcannon" 388 | data = { 389 | "action": "submitted", 390 | "review": { 391 | "user": { 392 | "login": username, 393 | }, 394 | "state": "APPROVED", 395 | }, 396 | "pull_request": { 397 | "url": "https://api.github.com/pr/42", 398 | "issue_url": "https://api.github.com/issue/42", 399 | "state": "closed", 400 | }, 401 | } 402 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 403 | teams = [{"name": "python core", "id": 6}] 404 | items = { 405 | f"https://api.github.com/teams/6/memberships/{username}": True, 406 | "https://api.github.com/issue/42": { 407 | "labels": [{"name": awaiting.Blocker.changes.value}], 408 | "labels_url": "https://api.github.com/labels/42", 409 | }, 410 | } 411 | iterators = { 412 | "https://api.github.com/orgs/python/teams": teams, 413 | "https://api.github.com/pr/42/reviews": [], 414 | } 415 | gh = FakeGH(getiter=iterators, getitem=items) 416 | await awaiting.router.dispatch(event, gh) 417 | assert not gh.post_ 418 | 419 | # Core dev requests changes. 420 | data = { 421 | "action": "submitted", 422 | "review": { 423 | "user": { 424 | "login": username, 425 | }, 426 | "state": "changes_requested".upper(), 427 | }, 428 | "pull_request": { 429 | "url": "https://api.github.com/pr/42", 430 | "issue_url": "https://api.github.com/issue/42", 431 | "comments_url": "https://api.github.com/comment/42", 432 | "user": {"login": "miss-islington"}, 433 | "state": "open", 434 | }, 435 | } 436 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 437 | items = { 438 | f"https://api.github.com/teams/6/memberships/{username}": True, 439 | f"https://api.github.com/teams/6/memberships/miss-islington": gidgethub.BadRequest( 440 | status_code=http.HTTPStatus(404) 441 | ), 442 | "https://api.github.com/issue/42": { 443 | "labels": [], 444 | "labels_url": "https://api.github.com/labels/42", 445 | }, 446 | } 447 | gh = FakeGH(getiter=iterators, getitem=items) 448 | await awaiting.router.dispatch(event, gh) 449 | assert len(gh.post_) == 2 450 | labeling = gh.post_[0] 451 | assert labeling[0] == "https://api.github.com/labels/42" 452 | assert labeling[1] == [awaiting.Blocker.changes.value] 453 | message = gh.post_[1] 454 | assert message[0] == "https://api.github.com/comment/42" 455 | assert awaiting.BORING_TRIGGER_PHRASE in message[1]["body"] 456 | 457 | # Comment reviews do nothing. 458 | data = { 459 | "action": "submitted", 460 | "review": { 461 | "user": { 462 | "login": username, 463 | }, 464 | "state": "commented".upper(), 465 | }, 466 | "pull_request": { 467 | "url": "https://api.github.com/pr/42", 468 | "issue_url": "https://api.github.com/issue/42", 469 | "comments_url": "https://api.github.com/comment/42", 470 | "state": "open", 471 | }, 472 | } 473 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 474 | gh = FakeGH(getiter=iterators, getitem=items) 475 | await awaiting.router.dispatch(event, gh) 476 | assert not len(gh.post_) 477 | 478 | # Skip commenting if "awaiting changes" is already set. 479 | data = { 480 | "action": "submitted", 481 | "review": { 482 | "user": { 483 | "login": username, 484 | }, 485 | "state": "changes_requested".upper(), 486 | }, 487 | "pull_request": { 488 | "url": "https://api.github.com/pr/42", 489 | "issue_url": "https://api.github.com/issue/42", 490 | "comments_url": "https://api.github.com/comment/42", 491 | "state": "open", 492 | }, 493 | } 494 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 495 | items = { 496 | f"https://api.github.com/teams/6/memberships/{username}": True, 497 | "https://api.github.com/issue/42": { 498 | "labels": [{"name": awaiting.Blocker.changes.value}], 499 | "labels_url": "https://api.github.com/labels/42", 500 | }, 501 | } 502 | gh = FakeGH(getiter=iterators, getitem=items) 503 | await awaiting.router.dispatch(event, gh) 504 | assert not len(gh.post_) 505 | 506 | 507 | async def test_dismissed_review(): 508 | # Last non-core review is dismissed > downgrade 509 | username = "andreamcinnes" 510 | data = { 511 | "action": "dismissed", 512 | "review": { 513 | "user": { 514 | "login": username, 515 | }, 516 | }, 517 | "pull_request": { 518 | "url": "https://api.github.com/pr/42", 519 | "issue_url": "https://api.github.com/issue/42", 520 | }, 521 | } 522 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 523 | teams = [{"name": "python core", "id": 6}] 524 | items = { 525 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 526 | status_code=http.HTTPStatus(404) 527 | ), 528 | "https://api.github.com/teams/6/memberships/brettcannon": True, 529 | "https://api.github.com/issue/42": { 530 | "labels": [{"name": awaiting.Blocker.core_review.value}], 531 | "labels_url": "https://api.github.com/labels/42", 532 | }, 533 | } 534 | iterators = { 535 | "https://api.github.com/orgs/python/teams": teams, 536 | "https://api.github.com/pr/42/reviews": [ 537 | {"user": {"login": "brettcannon"}, "state": "commented"} 538 | ], 539 | } 540 | gh = FakeGH(getiter=iterators, getitem=items) 541 | await awaiting.router.dispatch(event, gh) 542 | assert len(gh.post_) == 1 543 | post_ = gh.post_[0] 544 | assert post_[0] == "https://api.github.com/labels/42" 545 | assert post_[1] == [awaiting.Blocker.review.value] 546 | 547 | # Non-core review is dismissed, but core review remains > no change 548 | username = "andreamcinnes" 549 | data = { 550 | "action": "dismissed", 551 | "review": { 552 | "user": { 553 | "login": username, 554 | }, 555 | }, 556 | "pull_request": { 557 | "url": "https://api.github.com/pr/42", 558 | "issue_url": "https://api.github.com/issue/42", 559 | }, 560 | } 561 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 562 | teams = [{"name": "python core", "id": 6}] 563 | items = { 564 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 565 | status_code=http.HTTPStatus(404) 566 | ), 567 | "https://api.github.com/teams/6/memberships/brettcannon": True, 568 | "https://api.github.com/issue/42": { 569 | "labels": [{"name": awaiting.Blocker.merge.value}], 570 | "labels_url": "https://api.github.com/labels/42", 571 | }, 572 | } 573 | iterators = { 574 | "https://api.github.com/orgs/python/teams": teams, 575 | "https://api.github.com/pr/42/reviews": [ 576 | {"user": {"login": "brettcannon"}, "state": "approved"} 577 | ], 578 | } 579 | gh = FakeGH(getiter=iterators, getitem=items) 580 | await awaiting.router.dispatch(event, gh) 581 | assert len(gh.post_) == 0 582 | 583 | # Non-core review is dismissed, but non-core review remains > no change 584 | username = "andreamcinnes" 585 | data = { 586 | "action": "dismissed", 587 | "review": { 588 | "user": { 589 | "login": username, 590 | }, 591 | }, 592 | "pull_request": { 593 | "url": "https://api.github.com/pr/42", 594 | "issue_url": "https://api.github.com/issue/42", 595 | }, 596 | } 597 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 598 | teams = [{"name": "python core", "id": 6}] 599 | items = { 600 | f"https://api.github.com/teams/6/memberships/{username}": gidgethub.BadRequest( 601 | status_code=http.HTTPStatus(404) 602 | ), 603 | f"https://api.github.com/teams/6/memberships/notbrettcannon": gidgethub.BadRequest( 604 | status_code=http.HTTPStatus(404) 605 | ), 606 | "https://api.github.com/issue/42": { 607 | "labels": [{"name": awaiting.Blocker.core_review.value}], 608 | "labels_url": "https://api.github.com/labels/42", 609 | }, 610 | } 611 | iterators = { 612 | "https://api.github.com/orgs/python/teams": teams, 613 | "https://api.github.com/pr/42/reviews": [ 614 | {"user": {"login": "notbrettcannon"}, "state": "approved"} 615 | ], 616 | } 617 | gh = FakeGH(getiter=iterators, getitem=items) 618 | await awaiting.router.dispatch(event, gh) 619 | assert len(gh.post_) == 0 620 | 621 | # Last core review is dismissed > double downgrade 622 | username = "brettcannon" 623 | data = { 624 | "action": "dismissed", 625 | "review": { 626 | "user": { 627 | "login": username, 628 | }, 629 | }, 630 | "pull_request": { 631 | "url": "https://api.github.com/pr/42", 632 | "issue_url": "https://api.github.com/issue/42", 633 | }, 634 | } 635 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 636 | teams = [{"name": "python core", "id": 6}] 637 | items = { 638 | f"https://api.github.com/teams/6/memberships/{username}": True, 639 | f"https://api.github.com/teams/6/memberships/notbrettcannon": gidgethub.BadRequest( 640 | status_code=http.HTTPStatus(404) 641 | ), 642 | "https://api.github.com/issue/42": { 643 | "labels": [{"name": awaiting.Blocker.merge.value}], 644 | "labels_url": "https://api.github.com/labels/42", 645 | }, 646 | } 647 | iterators = { 648 | "https://api.github.com/orgs/python/teams": teams, 649 | "https://api.github.com/pr/42/reviews": [ 650 | {"user": {"login": "notbrettcannon"}, "state": "commented"} 651 | ], 652 | } 653 | gh = FakeGH(getiter=iterators, getitem=items) 654 | await awaiting.router.dispatch(event, gh) 655 | assert len(gh.post_) == 1 656 | post_ = gh.post_[0] 657 | assert post_[0] == "https://api.github.com/labels/42" 658 | assert post_[1] == [awaiting.Blocker.review.value] 659 | 660 | # Last core review is dismissed, non-core remains > downgrade 661 | username = "brettcannon" 662 | data = { 663 | "action": "dismissed", 664 | "review": { 665 | "user": { 666 | "login": username, 667 | }, 668 | }, 669 | "pull_request": { 670 | "url": "https://api.github.com/pr/42", 671 | "issue_url": "https://api.github.com/issue/42", 672 | }, 673 | } 674 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 675 | teams = [{"name": "python core", "id": 6}] 676 | items = { 677 | f"https://api.github.com/teams/6/memberships/{username}": True, 678 | f"https://api.github.com/teams/6/memberships/notbrettcannon": gidgethub.BadRequest( 679 | status_code=http.HTTPStatus(404) 680 | ), 681 | "https://api.github.com/issue/42": { 682 | "labels": [{"name": awaiting.Blocker.merge.value}], 683 | "labels_url": "https://api.github.com/labels/42", 684 | }, 685 | } 686 | iterators = { 687 | "https://api.github.com/orgs/python/teams": teams, 688 | "https://api.github.com/pr/42/reviews": [ 689 | {"user": {"login": "notbrettcannon"}, "state": "approved"} 690 | ], 691 | } 692 | gh = FakeGH(getiter=iterators, getitem=items) 693 | await awaiting.router.dispatch(event, gh) 694 | assert len(gh.post_) == 1 695 | post_ = gh.post_[0] 696 | assert post_[0] == "https://api.github.com/labels/42" 697 | assert post_[1] == [awaiting.Blocker.core_review.value] 698 | 699 | # Core review is dismissed, but one core remains > no change 700 | username = "brettcannon" 701 | data = { 702 | "action": "dismissed", 703 | "review": { 704 | "user": { 705 | "login": username, 706 | }, 707 | }, 708 | "pull_request": { 709 | "url": "https://api.github.com/pr/42", 710 | "issue_url": "https://api.github.com/issue/42", 711 | }, 712 | } 713 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 714 | teams = [{"name": "python core", "id": 6}] 715 | items = { 716 | f"https://api.github.com/teams/6/memberships/{username}": True, 717 | f"https://api.github.com/teams/6/memberships/brettcannonalias": True, 718 | "https://api.github.com/issue/42": { 719 | "labels": [{"name": awaiting.Blocker.merge.value}], 720 | "labels_url": "https://api.github.com/labels/42", 721 | }, 722 | } 723 | iterators = { 724 | "https://api.github.com/orgs/python/teams": teams, 725 | "https://api.github.com/pr/42/reviews": [ 726 | {"user": {"login": "brettcannonalias"}, "state": "approved"} 727 | ], 728 | } 729 | gh = FakeGH(getiter=iterators, getitem=items) 730 | await awaiting.router.dispatch(event, gh) 731 | assert len(gh.post_) == 0 732 | 733 | 734 | async def test_non_core_dev_does_not_downgrade(): 735 | core_dev = "brettcannon" 736 | non_core_dev = "andreamcinnes" 737 | teams = [{"name": "python core", "id": 6}] 738 | items = { 739 | f"https://api.github.com/teams/6/memberships/{non_core_dev}": gidgethub.BadRequest( 740 | status_code=http.HTTPStatus(404) 741 | ), 742 | f"https://api.github.com/teams/6/memberships/{core_dev}": True, 743 | "https://api.github.com/issue/42": { 744 | "labels": [], 745 | "labels_url": "https://api.github.com/labels/42", 746 | }, 747 | } 748 | 749 | # Approval from a core dev changes the state to "Awaiting merge". 750 | data = { 751 | "action": "submitted", 752 | "review": { 753 | "state": "approved", 754 | "user": { 755 | "login": core_dev, 756 | }, 757 | }, 758 | "pull_request": { 759 | "url": "https://api.github.com/pr/42", 760 | "issue_url": "https://api.github.com/issue/42", 761 | "state": "open", 762 | }, 763 | } 764 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 765 | iterators = { 766 | "https://api.github.com/orgs/python/teams": teams, 767 | "https://api.github.com/pr/42/reviews": [ 768 | {"user": {"login": core_dev}, "state": "approved"} 769 | ], 770 | } 771 | gh = FakeGH(getiter=iterators, getitem=items) 772 | await awaiting.router.dispatch(event, gh) 773 | assert len(gh.post_) == 1 774 | post_ = gh.post_[0] 775 | assert post_[0] == "https://api.github.com/labels/42" 776 | assert post_[1] == [awaiting.Blocker.merge.value] 777 | 778 | # Non-comment review from a non-core dev doesn't "downgrade" the PR's state. 779 | data = { 780 | "action": "submitted", 781 | "review": { 782 | "state": "approved", 783 | "user": { 784 | "login": non_core_dev, 785 | }, 786 | }, 787 | "pull_request": { 788 | "url": "https://api.github.com/pr/42", 789 | "issue_url": "https://api.github.com/issue/42", 790 | "state": "open", 791 | }, 792 | } 793 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 794 | iterators = { 795 | "https://api.github.com/orgs/python/teams": teams, 796 | "https://api.github.com/pr/42/reviews": [ 797 | {"user": {"login": core_dev}, "state": "approved"}, 798 | {"user": {"login": non_core_dev}, "state": "approved"}, 799 | ], 800 | } 801 | gh = FakeGH(getiter=iterators, getitem=items) 802 | await awaiting.router.dispatch(event, gh) 803 | assert not gh.post_ 804 | 805 | 806 | async def test_new_comment(): 807 | # Comment not from PR author. 808 | data = { 809 | "action": "created", 810 | "issue": {"user": {"login": "andreamcinnes"}}, 811 | "comment": { 812 | "user": {"login": "brettcannon"}, 813 | "body": awaiting.BORING_TRIGGER_PHRASE, 814 | }, 815 | } 816 | event = sansio.Event(data, event="issue_comment", delivery_id="12345") 817 | gh = FakeGH() 818 | await awaiting.router.dispatch(event, gh) 819 | assert not len(gh.post_) 820 | 821 | # Comment from PR author but missing trigger phrase. 822 | data = { 823 | "action": "created", 824 | "issue": {"user": {"login": "andreamcinnes"}}, 825 | "comment": { 826 | "user": {"login": "andreamcinnes"}, 827 | "body": "I DID expect the Spanish Inquisition", 828 | }, 829 | } 830 | event = sansio.Event(data, event="issue_comment", delivery_id="12345") 831 | gh = FakeGH() 832 | await awaiting.router.dispatch(event, gh) 833 | assert not len(gh.post_) 834 | 835 | # Everything is right with the world. 836 | data = { 837 | "action": "created", 838 | "issue": { 839 | "user": {"login": "andreamcinnes"}, 840 | "labels": [], 841 | "labels_url": "https://api.github.com/labels/42", 842 | "url": "https://api.github.com/issue/42", 843 | "pull_request": {"url": "https://api.github.com/pr/42"}, 844 | "comments_url": "https://api.github.com/comments/42", 845 | }, 846 | "comment": { 847 | "user": {"login": "andreamcinnes"}, 848 | "body": awaiting.BORING_TRIGGER_PHRASE, 849 | }, 850 | } 851 | event = sansio.Event(data, event="issue_comment", delivery_id="12345") 852 | items = { 853 | "https://api.github.com/teams/6/memberships/brettcannon": True, 854 | "https://api.github.com/teams/6/memberships/gvanrossum": True, 855 | "https://api.github.com/teams/6/memberships/not-core-dev": gidgethub.BadRequest( 856 | status_code=http.HTTPStatus(404) 857 | ), 858 | } 859 | iterators = { 860 | "https://api.github.com/orgs/python/teams": [{"name": "python core", "id": 6}], 861 | "https://api.github.com/pr/42/reviews": [ 862 | {"user": {"login": "brettcannon"}, "state": "approved"}, 863 | {"user": {"login": "gvanrossum"}, "state": "changes_requested"}, 864 | {"user": {"login": "not-core-dev"}, "state": "approved"}, 865 | ], 866 | } 867 | gh = FakeGH(getitem=items, getiter=iterators) 868 | await awaiting.router.dispatch(event, gh) 869 | assert len(gh.post_) == 3 870 | labeling, comment, review_request = gh.post_ 871 | assert labeling[0] == "https://api.github.com/labels/42" 872 | assert labeling[1] == [awaiting.Blocker.change_review.value] 873 | assert comment[0] == "https://api.github.com/comments/42" 874 | comment_body = comment[1]["body"] 875 | assert "@brettcannon" in comment_body 876 | assert "@gvanrossum" in comment_body 877 | assert "not-core-dev" not in comment_body 878 | assert review_request[0] == "https://api.github.com/pr/42/requested_reviewers" 879 | requested_reviewers = review_request[1]["reviewers"] 880 | assert "brettcannon" in requested_reviewers 881 | assert "gvanrossum" in requested_reviewers 882 | assert "not-core-dev" not in requested_reviewers 883 | 884 | # All is right with the Monty Python world. 885 | data = { 886 | "action": "created", 887 | "issue": { 888 | "user": {"login": "andreamcinnes"}, 889 | "labels": [], 890 | "labels_url": "https://api.github.com/labels/42", 891 | "url": "https://api.github.com/issue/42", 892 | "pull_request": {"url": "https://api.github.com/pr/42"}, 893 | "comments_url": "https://api.github.com/comments/42", 894 | }, 895 | "comment": { 896 | "user": {"login": "andreamcinnes"}, 897 | "body": awaiting.FUN_TRIGGER_PHRASE, 898 | }, 899 | } 900 | event = sansio.Event(data, event="issue_comment", delivery_id="12345") 901 | gh = FakeGH(getitem=items, getiter=iterators) 902 | await awaiting.router.dispatch(event, gh) 903 | assert len(gh.post_) == 3 904 | labeling, comment, review_request = gh.post_ 905 | assert labeling[0] == "https://api.github.com/labels/42" 906 | assert labeling[1] == [awaiting.Blocker.change_review.value] 907 | assert comment[0] == "https://api.github.com/comments/42" 908 | comment_body = comment[1]["body"] 909 | assert "@brettcannon" in comment_body 910 | assert "@gvanrossum" in comment_body 911 | assert "not-core-dev" not in comment_body 912 | assert review_request[0] == "https://api.github.com/pr/42/requested_reviewers" 913 | requested_reviewers = review_request[1]["reviewers"] 914 | assert "brettcannon" in requested_reviewers 915 | assert "gvanrossum" in requested_reviewers 916 | assert "not-core-dev" not in requested_reviewers 917 | 918 | 919 | async def test_change_requested_for_core_dev(): 920 | data = { 921 | "action": "submitted", 922 | "review": { 923 | "user": { 924 | "login": "gvanrossum", 925 | }, 926 | "state": "changes_requested".upper(), 927 | }, 928 | "pull_request": { 929 | "url": "https://api.github.com/pr/42", 930 | "issue_url": "https://api.github.com/issue/42", 931 | "comments_url": "https://api.github.com/comment/42", 932 | "user": {"login": "brettcannon"}, 933 | }, 934 | } 935 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 936 | teams = [{"name": "python core", "id": 6}] 937 | items = { 938 | f"https://api.github.com/teams/6/memberships/gvanrossum": True, 939 | "https://api.github.com/teams/6/memberships/brettcannon": True, 940 | "https://api.github.com/issue/42": { 941 | "labels": [], 942 | "labels_url": "https://api.github.com/labels/42", 943 | }, 944 | } 945 | iterators = { 946 | "https://api.github.com/orgs/python/teams": teams, 947 | "https://api.github.com/pr/42/reviews": [ 948 | {"user": {"login": "brettcannon"}, "state": "changes_requested"} 949 | ], 950 | } 951 | gh = FakeGH(getiter=iterators, getitem=items) 952 | await awaiting.router.dispatch(event, gh) 953 | 954 | assert len(gh.post_) == 2 955 | labeling = gh.post_[0] 956 | assert labeling[0] == "https://api.github.com/labels/42" 957 | assert labeling[1] == [awaiting.Blocker.changes.value] 958 | message = gh.post_[1] 959 | assert message[0] == "https://api.github.com/comment/42" 960 | 961 | core_dev_message = awaiting.CORE_DEV_CHANGES_REQUESTED_MESSAGE.replace( 962 | "{easter_egg}", "" 963 | ).strip() 964 | assert core_dev_message in message[1]["body"] 965 | 966 | 967 | async def test_change_requested_for_non_core_dev(): 968 | data = { 969 | "action": "submitted", 970 | "review": { 971 | "user": { 972 | "login": "gvanrossum", 973 | }, 974 | "state": "changes_requested".upper(), 975 | }, 976 | "pull_request": { 977 | "url": "https://api.github.com/pr/42", 978 | "issue_url": "https://api.github.com/issue/42", 979 | "comments_url": "https://api.github.com/comment/42", 980 | "user": {"login": "miss-islington"}, 981 | }, 982 | } 983 | event = sansio.Event(data, event="pull_request_review", delivery_id="12345") 984 | teams = [{"name": "python core", "id": 6}] 985 | items = { 986 | f"https://api.github.com/teams/6/memberships/gvanrossum": True, 987 | "https://api.github.com/teams/6/memberships/miss-islington": gidgethub.BadRequest( 988 | status_code=http.HTTPStatus(404) 989 | ), 990 | "https://api.github.com/issue/42": { 991 | "labels": [], 992 | "labels_url": "https://api.github.com/labels/42", 993 | }, 994 | } 995 | iterators = { 996 | "https://api.github.com/orgs/python/teams": teams, 997 | "https://api.github.com/pr/42/reviews": [ 998 | {"user": {"login": "brettcannon"}, "state": "changes_requested"} 999 | ], 1000 | } 1001 | gh = FakeGH(getiter=iterators, getitem=items) 1002 | await awaiting.router.dispatch(event, gh) 1003 | 1004 | assert len(gh.post_) == 2 1005 | labeling = gh.post_[0] 1006 | assert labeling[0] == "https://api.github.com/labels/42" 1007 | assert labeling[1] == [awaiting.Blocker.changes.value] 1008 | message = gh.post_[1] 1009 | assert message[0] == "https://api.github.com/comment/42" 1010 | 1011 | change_requested_message = awaiting.CHANGES_REQUESTED_MESSAGE.replace( 1012 | "{easter_egg}", "" 1013 | ).strip() 1014 | assert change_requested_message in message[1]["body"] 1015 | 1016 | 1017 | awaiting_labels = ( 1018 | "awaiting change review", 1019 | "awaiting changes", 1020 | "awaiting core review", 1021 | "awaiting merge", 1022 | "awaiting review", 1023 | ) 1024 | 1025 | 1026 | @pytest.mark.parametrize("label", awaiting_labels) 1027 | async def test_awaiting_label_removed_when_pr_merged(label): 1028 | encoded_label = label.replace(" ", "%20") 1029 | 1030 | issue_url = "https://api.github.com/repos/org/proj/issues/3749" 1031 | data = { 1032 | "action": "closed", 1033 | "pull_request": { 1034 | "merged": True, 1035 | "issue_url": issue_url, 1036 | }, 1037 | } 1038 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 1039 | 1040 | issue_data = { 1041 | issue_url: { 1042 | "labels": [ 1043 | { 1044 | "url": f"https://api.github.com/repos/python/cpython/labels/{encoded_label}", 1045 | "name": label, 1046 | }, 1047 | { 1048 | "url": "https://api.github.com/repos/python/cpython/labels/CLA%20signed", 1049 | "name": "CLA signed", 1050 | }, 1051 | ], 1052 | "labels_url": "https://api.github.com/repos/python/cpython/issues/12345/labels{/name}", 1053 | }, 1054 | } 1055 | 1056 | gh = FakeGH(getitem=issue_data) 1057 | 1058 | await awaiting.router.dispatch(event, gh) 1059 | assert ( 1060 | gh.delete_url 1061 | == f"https://api.github.com/repos/python/cpython/issues/12345/labels/{encoded_label}" 1062 | ) 1063 | 1064 | 1065 | @pytest.mark.parametrize("label", awaiting_labels) 1066 | async def test_awaiting_label_not_removed_when_pr_not_merged(label): 1067 | encoded_label = label.replace(" ", "%20") 1068 | 1069 | issue_url = "https://api.github.com/repos/org/proj/issues/3749" 1070 | data = { 1071 | "action": "closed", 1072 | "pull_request": { 1073 | "merged": False, 1074 | "issue_url": issue_url, 1075 | }, 1076 | } 1077 | event = sansio.Event(data, event="pull_request", delivery_id="12345") 1078 | 1079 | issue_data = { 1080 | issue_url: { 1081 | "labels": [ 1082 | { 1083 | "url": f"https://api.github.com/repos/python/cpython/labels/{encoded_label}", 1084 | "name": label, 1085 | }, 1086 | { 1087 | "url": "https://api.github.com/repos/python/cpython/labels/CLA%20signed", 1088 | "name": "CLA signed", 1089 | }, 1090 | ], 1091 | "labels_url": "https://api.github.com/repos/python/cpython/issues/12345/labels{/name}", 1092 | }, 1093 | } 1094 | 1095 | gh = FakeGH(getitem=issue_data) 1096 | 1097 | await awaiting.router.dispatch(event, gh) 1098 | assert gh.delete_url is None 1099 | 1100 | 1101 | @pytest.mark.parametrize("issue_url_key", ["url", "issue_url"]) 1102 | @pytest.mark.parametrize("repo_full_name", ["mariatta/cpython", "python/cpython"]) 1103 | async def test_new_commit_pushed_to_approved_pr(issue_url_key, repo_full_name): 1104 | # There is new commit on approved PR 1105 | username = "brettcannon" 1106 | sha = "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9" 1107 | data = { 1108 | "commits": [{"id": sha}], 1109 | "repository": {"full_name": repo_full_name}, 1110 | } 1111 | event = sansio.Event(data, event="push", delivery_id="12345") 1112 | teams = [{"name": "python core", "id": 6}] 1113 | items = { 1114 | f"https://api.github.com/teams/6/memberships/{username}": "OK", 1115 | f"https://api.github.com/search/issues?q=type:pr+repo:{repo_full_name}+sha:{sha}": { 1116 | "total_count": 1, 1117 | "items": [ 1118 | { 1119 | "number": 5547, 1120 | "title": "[3.6] bpo-32720: Fixed the replacement field grammar documentation. (GH-5544)", 1121 | "body": "\n\n`arg_name` and `element_index` are defined as `digit`+ instead of `integer`.\n(cherry picked from commit 7a561afd2c79f63a6008843b83733911d07f0119)\n\nCo-authored-by: Mariatta ", 1122 | "labels": [ 1123 | { 1124 | "name": "CLA signed", 1125 | }, 1126 | { 1127 | "name": "awaiting merge", 1128 | }, 1129 | ], 1130 | # the key could be 'url' or 'issue_url' 1131 | issue_url_key: "/repos/python/cpython/issues/5547", 1132 | } 1133 | ], 1134 | }, 1135 | "https://api.github.com/repos/python/cpython/issues/5547": { 1136 | "labels": [{"name": "awaiting merge"}], 1137 | "labels_url": "https://api.github.com/repos/python/cpython/issues/5547/labels{/name}", 1138 | "pull_request": { 1139 | "url": "https://api.github.com/repos/python/cpython/pulls/5547", 1140 | }, 1141 | "comments_url": "https://api.github.com/repos/python/cpython/issues/5547/comments", 1142 | }, 1143 | } 1144 | gh = FakeGH( 1145 | getiter={ 1146 | "https://api.github.com/orgs/python/teams": teams, 1147 | "https://api.github.com/repos/python/cpython/pulls/5547/reviews": [ 1148 | {"user": {"login": "brettcannon"}, "state": "approved"} 1149 | ], 1150 | }, 1151 | getitem=items, 1152 | ) 1153 | await awaiting.router.dispatch(event, gh) 1154 | 1155 | # 3 posts: 1156 | # - change the label 1157 | # - leave a comment 1158 | # - re-request review 1159 | assert len(gh.post_) == 3 1160 | 1161 | assert ( 1162 | gh.post_[0][0] 1163 | == "https://api.github.com/repos/python/cpython/issues/5547/labels" 1164 | ) 1165 | assert gh.post_[0][1] == [awaiting.Blocker.core_review.value] 1166 | 1167 | assert ( 1168 | gh.post_[1][0] 1169 | == "https://api.github.com/repos/python/cpython/issues/5547/comments" 1170 | ) 1171 | assert gh.post_[1][1] == { 1172 | "body": ACK.format( 1173 | greeting="There's a new commit after the PR has been approved.", 1174 | core_devs="@brettcannon", 1175 | ) 1176 | } 1177 | 1178 | 1179 | @pytest.mark.parametrize("issue_url_key", ["url", "issue_url"]) 1180 | @pytest.mark.parametrize("repo_full_name", ["mariatta/cpython", "python/cpython"]) 1181 | async def test_new_commit_pushed_to_not_approved_pr(issue_url_key, repo_full_name): 1182 | # There is new commit on approved PR 1183 | sha = "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9" 1184 | data = { 1185 | "commits": [{"id": sha}], 1186 | "repository": {"full_name": repo_full_name}, 1187 | } 1188 | event = sansio.Event(data, event="push", delivery_id="12345") 1189 | items = { 1190 | f"https://api.github.com/search/issues?q=type:pr+repo:{repo_full_name}+sha:{sha}": { 1191 | "total_count": 1, 1192 | "items": [ 1193 | { 1194 | "number": 5547, 1195 | "title": "[3.6] bpo-32720: Fixed the replacement field grammar documentation. (GH-5544)", 1196 | "body": "\n\n`arg_name` and `element_index` are defined as `digit`+ instead of `integer`.\n(cherry picked from commit 7a561afd2c79f63a6008843b83733911d07f0119)\n\nCo-authored-by: Mariatta ", 1197 | "labels": [ 1198 | { 1199 | "name": "CLA signed", 1200 | }, 1201 | { 1202 | "name": "awaiting review", 1203 | }, 1204 | ], 1205 | # 1206 | issue_url_key: "/repos/python/cpython/issues/5547", 1207 | } 1208 | ], 1209 | }, 1210 | } 1211 | gh = FakeGH(getitem=items) 1212 | await awaiting.router.dispatch(event, gh) 1213 | 1214 | # no posts 1215 | assert len(gh.post_) == 0 1216 | 1217 | 1218 | async def test_pushed_without_commits(): 1219 | # There is new commit on approved PR 1220 | 1221 | data = {"commits": []} 1222 | event = sansio.Event(data, event="push", delivery_id="12345") 1223 | gh = FakeGH() 1224 | await awaiting.router.dispatch(event, gh) 1225 | 1226 | # no posts 1227 | assert len(gh.post_) == 0 1228 | --------------------------------------------------------------------------------