├── tests
├── __init__.py
├── test_delete_branch.py
├── test_util.py
└── test_backport_pr.py
├── runtime.txt
├── pytest.ini
├── Procfile
├── .coveragerc
├── dev-requirements.txt
├── requirements.txt
├── tox.ini
├── codecov.yml
├── .github
├── workflows
│ ├── lint.yml
│ └── ci.yml
└── dependabot.yml
├── .pre-commit-config.yaml
├── miss_islington
├── delete_branch.py
├── util.py
├── __main__.py
├── backport_pr.py
└── tasks.py
├── .gitignore
├── README.rst
└── LICENSE
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/runtime.txt:
--------------------------------------------------------------------------------
1 | python-3.11.6
2 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | asyncio_mode=auto
3 |
--------------------------------------------------------------------------------
/Procfile:
--------------------------------------------------------------------------------
1 | web: python3 -m miss_islington
2 | worker: celery --app miss_islington.tasks.app worker --concurrency=1
3 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | branch = True
3 | omit =
4 | miss_islington/tasks.py
5 |
6 | [report]
7 | fail_under = 100
8 |
--------------------------------------------------------------------------------
/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | -r requirements.txt
2 | coverage
3 | pytest==8.4.2
4 | pytest-aiohttp==1.1.0
5 | pytest-asyncio==1.2.0
6 | pytest-cov==7.0.0
7 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiohttp==3.13.2
2 | cachetools==6.2.1
3 | celery==5.5.3
4 | cherry_picker==2.6.0
5 | click==8.3.0
6 | gidgethub==5.4.0
7 | redis==7.0.1
8 | sentry-sdk==2.43.0
9 | stamina==25.1.0
10 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py{313, 312, 311}
3 | toxworkdir = {env:TOX_WORK_DIR:.tox}
4 |
5 | [testenv]
6 | passenv =
7 | FORCE_COLOR
8 | skip_install = True
9 | deps =
10 | -r dev-requirements.txt
11 | commands =
12 | pytest --cov=. --cov-report=xml {posargs}
13 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | codecov:
2 | require_ci_to_pass: yes
3 |
4 | coverage:
5 | precision: 2
6 | round: down
7 | range: "70...100"
8 |
9 | parsers:
10 | gcov:
11 | branch_detection:
12 | conditional: yes
13 | loop: yes
14 | method: no
15 | macro: no
16 |
17 | comment:
18 | layout: "reach,diff,flags,tree"
19 | behavior: default
20 | require_changes: no
21 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: Lint
2 |
3 | on: [push, pull_request, workflow_dispatch]
4 |
5 | env:
6 | FORCE_COLOR: 1
7 |
8 | permissions:
9 | contents: read
10 |
11 | jobs:
12 | lint:
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - uses: actions/checkout@v5
17 | with:
18 | persist-credentials: false
19 | - uses: actions/setup-python@v6
20 | with:
21 | python-version: "3.x"
22 | - uses: tox-dev/action-pre-commit-uv@v1
23 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: pip
4 | directory: "/"
5 | schedule:
6 | interval: monthly
7 | assignees:
8 | - "ezio-melotti"
9 | groups:
10 | pip:
11 | patterns:
12 | - "*"
13 |
14 | - package-ecosystem: "github-actions"
15 | directory: "/"
16 | schedule:
17 | interval: monthly
18 | assignees:
19 | - "ezio-melotti"
20 | groups:
21 | actions:
22 | patterns:
23 | - "*"
24 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pygrep-hooks
3 | rev: v1.10.0
4 | hooks:
5 | - id: python-check-blanket-noqa
6 |
7 | - repo: https://github.com/pre-commit/pre-commit-hooks
8 | rev: v5.0.0
9 | hooks:
10 | - id: check-added-large-files
11 | - id: check-case-conflict
12 | - id: check-merge-conflict
13 | - id: check-yaml
14 | - id: debug-statements
15 | - id: end-of-file-fixer
16 | - id: forbid-submodules
17 | - id: requirements-txt-fixer
18 | - id: trailing-whitespace
19 |
20 | - repo: https://github.com/python-jsonschema/check-jsonschema
21 | rev: 0.31.0
22 | hooks:
23 | - id: check-dependabot
24 | - id: check-github-workflows
25 |
26 | - repo: meta
27 | hooks:
28 | - id: check-hooks-apply
29 | - id: check-useless-excludes
30 |
31 | ci:
32 | autoupdate_schedule: quarterly
33 |
--------------------------------------------------------------------------------
/miss_islington/delete_branch.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | import gidgethub
4 | import gidgethub.routing
5 | import stamina
6 |
7 | router = gidgethub.routing.Router()
8 |
9 |
10 | @router.register("pull_request", action="closed")
11 | @stamina.retry(on=gidgethub.GitHubException, timeout=120)
12 | async def delete_branch(event, gh, *args, **kwargs):
13 | """
14 | Delete the branch once miss-islington's PR is closed.
15 | """
16 | if event.data["pull_request"]["user"]["login"] == "miss-islington":
17 | branch_name = event.data["pull_request"]["head"]["ref"]
18 | url = f"/repos/miss-islington/cpython/git/refs/heads/{branch_name}"
19 | if event.data["pull_request"]["merged"]:
20 | await gh.delete(url)
21 | else:
22 | # this is delayed to ensure that the bot doesn't remove the branch
23 | # if PR was closed and reopened to rerun checks (or similar)
24 | await asyncio.sleep(60)
25 | updated_data = await gh.getitem(event.data["pull_request"]["url"])
26 | if updated_data["state"] == "closed":
27 | await gh.delete(url)
28 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on: [push, pull_request, workflow_dispatch]
4 |
5 | permissions:
6 | contents: read
7 |
8 | env:
9 | FORCE_COLOR: 1
10 |
11 | jobs:
12 | test:
13 | name: Python ${{ matrix.python-version }}
14 | runs-on: ubuntu-latest
15 | strategy:
16 | fail-fast: false
17 | matrix:
18 | # remember to update the envlist in tox.ini too
19 | python-version: ["3.11", "3.12", "3.13"]
20 |
21 | steps:
22 | - uses: actions/checkout@v5
23 | with:
24 | persist-credentials: false
25 |
26 | - name: Set up Python ${{ matrix.python-version }}
27 | uses: actions/setup-python@v6
28 | with:
29 | python-version: ${{ matrix.python-version }}
30 | allow-prereleases: true
31 |
32 | - name: Install uv
33 | uses: hynek/setup-cached-uv@v2
34 | with:
35 | cache-dependency-path: |
36 | requirements.txt
37 | dev-requirements.txt
38 |
39 | - name: Tox tests
40 | run: |
41 | uvx --with tox-uv tox -e py
42 |
43 | - uses: codecov/codecov-action@v5
44 | if: always()
45 | with:
46 | files: ./coverage.xml
47 | flags: Python_${{ matrix.python-version }}
48 | env:
49 | CODECOV_TOKEN: ${{ secrets.CODECOV_ORG_TOKEN }}
50 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | .venv/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 | local_settings.py
55 |
56 | # Flask stuff:
57 | instance/
58 | .webassets-cache
59 |
60 | # Scrapy stuff:
61 | .scrapy
62 |
63 | # Sphinx documentation
64 | docs/_build/
65 |
66 | # PyBuilder
67 | target/
68 |
69 | # IPython Notebook
70 | .ipynb_checkpoints
71 |
72 | # pyenv
73 | .python-version
74 |
75 | # celery beat schedule file
76 | celerybeat-schedule
77 |
78 | # dotenv
79 | .env
80 |
81 | # virtualenv
82 | venv/
83 | ENV/
84 |
85 | # Spyder project settings
86 | .spyderproject
87 |
88 | # Rope project settings
89 | .ropeproject
90 |
91 | .pytest_cache
92 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | miss-islington
2 | ==============
3 |
4 | .. image:: https://github.com/python/miss-islington/actions/workflows/ci.yml/badge.svg?event=push
5 | :target: https://github.com/python/miss-islington/actions
6 | .. image:: https://codecov.io/gh/python/miss-islington/branch/main/graph/badge.svg
7 | :target: https://codecov.io/gh/python/miss-islington
8 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
9 | :target: https://github.com/ambv/black
10 |
11 | 🐍🍒⛏🤖
12 |
13 | Bot for backporting and merging `CPython `_ Pull Requests.
14 |
15 | Backporting a PR on CPython
16 | ===========================
17 |
18 | Prior to merging a PR, a Python core developer should apply the
19 | ``needs backport to X.Y`` label to the pull request.
20 | Once the pull request has been merged, `@miss-islington `_
21 | will prepare the backport PR.
22 |
23 | If `@miss-islington `_ encountered any issue while backporting,
24 | it will leave a comment about it, and the PR will be assigned to the core developer
25 | who merged the PR. The PR then needs to be backported manually.
26 |
27 |
28 | Merging the Backport PR
29 | =======================
30 |
31 | If a Python core developer approved the backport PR made by miss-islington, it will be
32 | automatically merged once all the CI checks passed.
33 |
34 |
35 | Merging PRs
36 | ===========
37 |
38 | If a Python core developer approved a PR made by anyone and added the "🤖 automerge" label,
39 | it will be automatically merged once all the CI checks pass.
40 |
41 |
42 | **Aside**: where does the name come from?
43 | =========================================
44 |
45 | According to Wikipedia, Miss Islington is the name of the witch in the
46 | `Monty Python and the Holy Grail `_
47 | sketch.
48 |
--------------------------------------------------------------------------------
/miss_islington/util.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | import textwrap
3 |
4 |
5 | async def comment_on_pr(gh, issue_number, message):
6 | """
7 | Leave a comment on a PR/Issue
8 | """
9 | issue_comment_url = f"/repos/python/cpython/issues/{issue_number}/comments"
10 | message = textwrap.dedent(message)
11 | data = {"body": message}
12 | response = await gh.post(issue_comment_url, data=data)
13 | print(f"Commented at {response['html_url']}, message: {message}")
14 | return response
15 |
16 |
17 | async def assign_pr_to_core_dev(gh, issue_number, coredev_login):
18 | """
19 | Assign the PR to a core dev. Should be done when miss-islington failed
20 | to backport.
21 | """
22 |
23 | edit_issue_url = f"/repos/python/cpython/issues/{issue_number}"
24 | data = {"assignees": [coredev_login]}
25 | await gh.patch(edit_issue_url, data=data)
26 |
27 |
28 | async def leave_comment(gh, pr_number, message):
29 | """
30 | Leave a comment on a PR/Issue
31 | """
32 | issue_comment_url = f"/repos/python/cpython/issues/{pr_number}/comments"
33 | data = {"body": message}
34 | await gh.post(issue_comment_url, data=data)
35 |
36 |
37 | def is_cpython_repo():
38 | cmd = "git log -r 7f777ed95a19224294949e1b4ce56bbffcb1fe9f"
39 | try:
40 | subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
41 | except subprocess.SubprocessError:
42 | return False
43 | return True
44 |
45 |
46 | async def get_gh_participants(gh, pr_number):
47 | pr_url = f"/repos/python/cpython/pulls/{pr_number}"
48 | pr_result = await gh.getitem(pr_url)
49 | created_by = pr_result["user"]["login"]
50 |
51 | merged_by = None
52 | if pr_result["merged_by"] and pr_result["merged_by"]["login"] != "miss-islington":
53 | merged_by = pr_result["merged_by"]["login"]
54 |
55 | participants = ""
56 | if created_by == merged_by or merged_by is None:
57 | participants = f"@{created_by}"
58 | else:
59 | participants = f"@{created_by} and @{merged_by}"
60 |
61 | return participants
62 |
63 |
64 | def get_participants(created_by, merged_by):
65 | participants = ""
66 | if created_by == merged_by or merged_by == "miss-islington":
67 | participants = f"@{created_by}"
68 | else:
69 | participants = f"@{created_by} and @{merged_by}"
70 | return participants
71 |
72 |
73 | def normalize_title(title, body):
74 | """Normalize the title if it spills over into the PR's body."""
75 | if not (title.endswith("…") and body.startswith("…")):
76 | return title
77 | else:
78 | # Being paranoid in case \r\n is used.
79 | return title[:-1] + body[1:].partition("\r\n")[0]
80 |
--------------------------------------------------------------------------------
/miss_islington/__main__.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import os
3 | import sys
4 | import traceback
5 |
6 | import aiohttp
7 | import cachetools
8 | from aiohttp import web
9 | from gidgethub import aiohttp as gh_aiohttp
10 | from gidgethub import routing, sansio
11 | from gidgethub import apps
12 |
13 | import sentry_sdk
14 | from sentry_sdk.integrations.aiohttp import AioHttpIntegration
15 |
16 |
17 | from . import backport_pr, delete_branch
18 |
19 | router = routing.Router(
20 | backport_pr.router, delete_branch.router
21 | )
22 |
23 | cache = cachetools.LRUCache(maxsize=500)
24 |
25 |
26 | async def main(request):
27 | try:
28 | body = await request.read()
29 |
30 | secret = os.environ.get("GH_SECRET")
31 | event = sansio.Event.from_http(request.headers, body, secret=secret)
32 | print("GH delivery ID", event.delivery_id, file=sys.stderr)
33 | if event.event == "ping":
34 | return web.Response(status=200)
35 | async with aiohttp.ClientSession() as session:
36 | gh = gh_aiohttp.GitHubAPI(
37 | session, "python/cpython", cache=cache
38 | )
39 | # This path only works on GitHub App
40 | installation_id = event.data["installation"]["id"]
41 | installation_access_token = await apps.get_installation_access_token(
42 | gh,
43 | installation_id=installation_id,
44 | app_id=os.environ.get("GH_APP_ID"),
45 | private_key=os.environ.get("GH_PRIVATE_KEY")
46 | )
47 | gh.oauth_token = installation_access_token["token"]
48 |
49 | # Give GitHub some time to reach internal consistency.
50 | await asyncio.sleep(1)
51 | await router.dispatch(event, gh)
52 | try:
53 | print(
54 | f"""\
55 | GH requests remaining: {gh.rate_limit.remaining}/{gh.rate_limit.limit}, \
56 | reset time: {gh.rate_limit.reset_datetime:%b-%d-%Y %H:%M:%S %Z}, \
57 | GH delivery ID {event.delivery_id} \
58 | """
59 | )
60 | except AttributeError:
61 | pass
62 | return web.Response(status=200)
63 | except Exception as exc:
64 | traceback.print_exc(file=sys.stderr)
65 | return web.Response(status=500)
66 |
67 |
68 | @router.register("installation", action="created")
69 | async def repo_installation_added(event, gh, *args, **kwargs):
70 | # installation_id = event.data["installation"]["id"]
71 | print(f"App installed by {event.data['installation']['account']['login']}, installation_id: {event.data['installation']['id']}")
72 |
73 |
74 | sentry_sdk.init(dsn=os.environ.get("SENTRY_DSN"), integrations=[AioHttpIntegration()])
75 | app = web.Application()
76 | app.router.add_post("/", main)
77 | port = os.environ.get("PORT")
78 | if port is not None:
79 | port = int(port)
80 |
81 | web.run_app(app, port=port)
82 |
--------------------------------------------------------------------------------
/miss_islington/backport_pr.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import os
3 | import random
4 |
5 | import gidgethub.routing
6 | from kombu import exceptions as kombu_ex
7 | from redis import exceptions as redis_ex
8 |
9 | from . import tasks, util
10 |
11 | EASTER_EGG = "I'm not a witch! I'm not a witch!"
12 |
13 | router = gidgethub.routing.Router()
14 |
15 |
16 | @router.register("pull_request", action="closed")
17 | @router.register("pull_request", action="labeled")
18 | async def backport_pr(event, gh, *args, **kwargs):
19 | if event.data["pull_request"]["merged"]:
20 |
21 | issue_number = event.data["pull_request"]["number"]
22 | merged_by = event.data["pull_request"]["merged_by"]["login"]
23 | created_by = event.data["pull_request"]["user"]["login"]
24 |
25 | commit_hash = event.data["pull_request"]["merge_commit_sha"]
26 |
27 | pr_labels = []
28 | if event.data["action"] == "labeled":
29 | pr_labels = [event.data["label"]]
30 | else:
31 | gh_issue = await gh.getitem(
32 | event.data["repository"]["issues_url"],
33 | {"number": f"{event.data['pull_request']['number']}"},
34 | )
35 | pr_labels = await gh.getitem(gh_issue["labels_url"])
36 |
37 | branches = [
38 | label["name"].split()[-1]
39 | for label in pr_labels
40 | if label["name"].startswith("needs backport to")
41 | ]
42 | installation_id = event.data["installation"]["id"]
43 | if branches:
44 | easter_egg = ""
45 | if random.random() < 0.1:
46 | easter_egg = EASTER_EGG
47 | thanks_to = ""
48 | if created_by == merged_by or merged_by == "miss-islington":
49 | thanks_to = f"Thanks @{created_by} for the PR 🌮🎉."
50 | else:
51 | thanks_to = f"Thanks @{created_by} for the PR, and @{merged_by} for merging it 🌮🎉."
52 | message = (
53 | f"{thanks_to}. I'm working now to backport this PR to: {', '.join(branches)}."
54 | f"\n🐍🍒⛏🤖 {easter_egg}"
55 | )
56 |
57 | await util.leave_comment(gh, issue_number, message)
58 |
59 | sorted_branches = sorted(
60 | branches, reverse=True, key=lambda v: tuple(map(int, v.split(".")))
61 | )
62 |
63 | for branch in sorted_branches:
64 | await kickoff_backport_task(
65 | gh, commit_hash, branch, issue_number, created_by, merged_by, installation_id=installation_id
66 | )
67 |
68 |
69 | async def kickoff_backport_task(
70 | gh, commit_hash, branch, issue_number, created_by, merged_by, installation_id
71 | ):
72 | try:
73 | tasks.backport_task.delay(
74 | commit_hash,
75 | branch,
76 | issue_number=issue_number,
77 | created_by=created_by,
78 | merged_by=merged_by,
79 | installation_id=installation_id
80 | )
81 | except (redis_ex.ConnectionError, kombu_ex.OperationalError) as ex:
82 | err_message = f"I'm having trouble backporting to `{branch}`. Reason: '`{ex}`'. Please retry by removing and re-adding the `needs backport to {branch}` label."
83 | await util.leave_comment(gh, issue_number, err_message)
84 |
--------------------------------------------------------------------------------
/tests/test_delete_branch.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from gidgethub import sansio
4 |
5 | from miss_islington import delete_branch
6 |
7 |
8 | class FakeGH:
9 | def __init__(self, *, getitem=None):
10 | self._getitem_return = getitem
11 | self.getitem_url = None
12 | self.post_data = None
13 |
14 | async def getitem(self, url):
15 | self.getitem_url = url
16 | to_return = self._getitem_return[self.getitem_url]
17 | return to_return
18 |
19 | async def delete(self, url):
20 | self.delete_url = url
21 |
22 |
23 | async def noop_sleep(delay, result=None):
24 | pass
25 |
26 |
27 | async def test_branch_deleted_when_pr_merged():
28 | data = {
29 | "action": "closed",
30 | "pull_request": {
31 | "number": 5722,
32 | "user": {"login": "miss-islington"},
33 | "merged": True,
34 | "merged_by": {"login": "miss-islington"},
35 | "head": {"ref": "backport-17ab8f0-3.7"},
36 | },
37 | }
38 | event = sansio.Event(data, event="pull_request", delivery_id="1")
39 |
40 | gh = FakeGH()
41 | await delete_branch.router.dispatch(event, gh)
42 | assert gh.post_data is None # does not leave a comment
43 | assert (
44 | gh.delete_url
45 | == f"/repos/miss-islington/cpython/git/refs/heads/{data['pull_request']['head']['ref']}"
46 | )
47 |
48 |
49 | async def test_branch_deleted_and_thank_committer():
50 | data = {
51 | "action": "closed",
52 | "pull_request": {
53 | "number": 5722,
54 | "user": {"login": "miss-islington"},
55 | "merged": True,
56 | "merged_by": {"login": "Mariatta"},
57 | "head": {"ref": "backport-17ab8f0-3.7"},
58 | },
59 | }
60 | event = sansio.Event(data, event="pull_request", delivery_id="1")
61 |
62 | gh = FakeGH()
63 | await delete_branch.router.dispatch(event, gh)
64 | assert gh.post_data is None # does not leave a comment
65 | assert (
66 | gh.delete_url
67 | == f"/repos/miss-islington/cpython/git/refs/heads/{data['pull_request']['head']['ref']}"
68 | )
69 |
70 |
71 | async def test_branch_deleted_and_thanks():
72 | data = {
73 | "action": "closed",
74 | "pull_request": {
75 | "number": 5722,
76 | "user": {"login": "miss-islington"},
77 | "merged": True,
78 | "merged_by": {"login": "miss-islington"},
79 | "head": {"ref": "backport-17ab8f0-3.7"},
80 | },
81 | }
82 | event = sansio.Event(data, event="pull_request", delivery_id="1")
83 |
84 | gh = FakeGH()
85 | await delete_branch.router.dispatch(event, gh)
86 | assert gh.post_data is None # does not leave a comment
87 | assert (
88 | gh.delete_url
89 | == f"/repos/miss-islington/cpython/git/refs/heads/{data['pull_request']['head']['ref']}"
90 | )
91 |
92 |
93 | async def test_branch_deleted_when_pr_closed(monkeypatch):
94 | data = {
95 | "action": "closed",
96 | "pull_request": {
97 | "number": 5722,
98 | "user": {"login": "miss-islington"},
99 | "merged": False,
100 | "merged_by": {"login": None},
101 | "head": {"ref": "backport-17ab8f0-3.7"},
102 | "url": "https://api.github.com/repos/python/cpython/pulls/5722",
103 | },
104 | }
105 | event = sansio.Event(data, event="pull_request", delivery_id="1")
106 | getitem = {
107 | "https://api.github.com/repos/python/cpython/pulls/5722": {"state": "closed"},
108 | }
109 |
110 | monkeypatch.setattr(asyncio, "sleep", noop_sleep)
111 | gh = FakeGH(getitem=getitem)
112 | await delete_branch.router.dispatch(event, gh)
113 | assert gh.post_data is None # does not leave a comment
114 | assert (
115 | gh.delete_url
116 | == f"/repos/miss-islington/cpython/git/refs/heads/{data['pull_request']['head']['ref']}"
117 | )
118 |
119 |
120 | async def test_branch_not_deleted_when_pr_closed_and_reopened(monkeypatch):
121 | data = {
122 | "action": "closed",
123 | "pull_request": {
124 | "number": 5722,
125 | "user": {"login": "miss-islington"},
126 | "merged": False,
127 | "merged_by": {"login": None},
128 | "head": {"ref": "backport-17ab8f0-3.7"},
129 | "url": "https://api.github.com/repos/python/cpython/pulls/5722",
130 | },
131 | }
132 | event = sansio.Event(data, event="pull_request", delivery_id="1")
133 | getitem = {
134 | "https://api.github.com/repos/python/cpython/pulls/5722": {"state": "opened"},
135 | }
136 |
137 | monkeypatch.setattr(asyncio, "sleep", noop_sleep)
138 | gh = FakeGH(getitem=getitem)
139 | await delete_branch.router.dispatch(event, gh)
140 | assert gh.post_data is None # does not leave a comment
141 | assert not hasattr(gh, "delete_url")
142 |
143 |
144 | async def test_ignore_non_miss_islingtons_prs():
145 | data = {
146 | "action": "closed",
147 | "pull_request": {
148 | "number": 5722,
149 | "user": {"login": "Mariatta"},
150 | "merged": True,
151 | "merged_by": {"login": "Mariatta"},
152 | "head": {"ref": "backport-17ab8f0-3.7"},
153 | },
154 | }
155 | event = sansio.Event(data, event="pull_request", delivery_id="1")
156 | gh = FakeGH()
157 | await delete_branch.router.dispatch(event, gh)
158 | assert gh.post_data is None # does not leave a comment
159 | assert not hasattr(gh, "delete_url")
160 |
--------------------------------------------------------------------------------
/tests/test_util.py:
--------------------------------------------------------------------------------
1 | import http
2 | import textwrap
3 |
4 | import pytest
5 | import gidgethub
6 |
7 | from unittest import mock
8 |
9 |
10 | from miss_islington import util
11 |
12 |
13 | class FakeGH:
14 | def __init__(self, *, getitem=None, post=None, patch=None):
15 | self._getitem_return = getitem
16 | self._post_return = post
17 | self._patch_return = patch
18 | self.getitem_url = None
19 | self.patch_url = self.patch_data = None
20 | self.post_url = self.post_data = None
21 |
22 | async def getitem(self, url):
23 | self.getitem_url = url
24 | return self._getitem_return[self.getitem_url]
25 |
26 | async def patch(self, url, *, data):
27 | self.patch_url = url
28 | self.patch_data = data
29 | return self._patch_return
30 |
31 | async def post(self, url, *, data):
32 | self.post_url = url
33 | self.post_data = data
34 | print(type(self._post_return))
35 | if isinstance(self._post_return, Exception):
36 | print("raising")
37 | raise self._post_return
38 | else:
39 | return self._post_return
40 |
41 |
42 | def test_title_normalization():
43 | title = "abcd"
44 | body = "1234"
45 | assert util.normalize_title(title, body) == title
46 |
47 | title = "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations …"
48 | body = "…(GH-1478)\r\n\r\nstuff"
49 | expected = (
50 | "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations (GH-1478)"
51 | )
52 | assert util.normalize_title(title, body) == expected
53 |
54 | title = "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations …"
55 | body = "…(GH-1478)"
56 | assert util.normalize_title(title, body) == expected
57 |
58 | title = (
59 | "[2.7] bpo-29243: Fix Makefile with respect to --enable-optimizations (GH-14…"
60 | )
61 | body = "…78)"
62 | assert util.normalize_title(title, body) == expected
63 |
64 |
65 |
66 | async def test_get_gh_participants_different_creator_and_committer():
67 | gh = FakeGH(
68 | getitem={
69 | "/repos/python/cpython/pulls/5544": {
70 | "user": {"login": "miss-islington"},
71 | "merged_by": {"login": "bedevere-bot"},
72 | }
73 | }
74 | )
75 | result = await util.get_gh_participants(gh, 5544)
76 | assert result == "@miss-islington and @bedevere-bot"
77 |
78 |
79 | async def test_get_gh_participants_same_creator_and_committer():
80 | gh = FakeGH(
81 | getitem={
82 | "/repos/python/cpython/pulls/5544": {
83 | "user": {"login": "bedevere-bot"},
84 | "merged_by": {"login": "bedevere-bot"},
85 | }
86 | }
87 | )
88 | result = await util.get_gh_participants(gh, 5544)
89 | assert result == "@bedevere-bot"
90 |
91 |
92 | async def test_get_gh_participants_pr_not_merged():
93 | gh = FakeGH(
94 | getitem={
95 | "/repos/python/cpython/pulls/5544": {
96 | "user": {"login": "bedevere-bot"},
97 | "merged_by": None,
98 | }
99 | }
100 | )
101 | result = await util.get_gh_participants(gh, 5544)
102 | assert result == "@bedevere-bot"
103 |
104 |
105 | async def test_get_gh_participants_merged_by_miss_islington():
106 | gh = FakeGH(
107 | getitem={
108 | "/repos/python/cpython/pulls/5544": {
109 | "user": {"login": "bedevere-bot"},
110 | "merged_by": {"login": "miss-islington"},
111 | }
112 | }
113 | )
114 | result = await util.get_gh_participants(gh, 5544)
115 | assert result == "@bedevere-bot"
116 |
117 |
118 | def test_get_participants_different_creator_and_committer():
119 | assert (
120 | util.get_participants("miss-islington", "bedevere-bot")
121 | == "@miss-islington and @bedevere-bot"
122 | )
123 |
124 |
125 | def test_get_participants_merged_by_miss_islington():
126 | assert util.get_participants("bedevere-bot", "miss-islington") == "@bedevere-bot"
127 |
128 |
129 | @mock.patch("subprocess.check_output")
130 | def test_is_cpython_repo_contains_first_cpython_commit(subprocess_check_output):
131 | mock_output = b"""commit 7f777ed95a19224294949e1b4ce56bbffcb1fe9f
132 | Author: Guido van Rossum
133 | Date: Thu Aug 9 14:25:15 1990 +0000
134 |
135 | Initial revision"""
136 | subprocess_check_output.return_value = mock_output
137 | assert util.is_cpython_repo()
138 |
139 |
140 | def test_is_not_cpython_repo():
141 | assert util.is_cpython_repo() == False
142 |
143 |
144 |
145 |
146 | async def test_comment_on_pr_success():
147 | issue_number = 100
148 | message = "Thanks for the PR!"
149 |
150 | gh = FakeGH(
151 | post={
152 | "html_url": f"https://github.com/python/cpython/pull/{issue_number}#issuecomment-401309376"
153 | }
154 | )
155 |
156 | await util.comment_on_pr(gh, issue_number, message)
157 | assert gh.post_url == f"/repos/python/cpython/issues/{issue_number}/comments"
158 | assert gh.post_data == {"body": message}
159 |
160 |
161 | async def test_comment_on_pr_failure():
162 | issue_number = 100
163 | message = "Thanks for the PR!"
164 | gh = FakeGH(post=gidgethub.BadRequest(status_code=http.HTTPStatus(400)))
165 |
166 | with pytest.raises(gidgethub.BadRequest):
167 | await util.comment_on_pr(gh, issue_number, message)
168 |
169 |
170 | async def test_assign_pr_to_coredev():
171 |
172 | issue_number = 100
173 | coredev_login = "Mariatta"
174 | gh = FakeGH()
175 |
176 | await util.assign_pr_to_core_dev(gh, issue_number, coredev_login)
177 | assert gh.patch_url == f"/repos/python/cpython/issues/{issue_number}"
178 |
--------------------------------------------------------------------------------
/miss_islington/tasks.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import os
3 | import ssl
4 | import subprocess
5 |
6 | import aiohttp
7 | import cachetools
8 | import celery
9 | from cherry_picker import cherry_picker
10 | from celery import bootsteps
11 | from gidgethub import aiohttp as gh_aiohttp
12 | from gidgethub import apps
13 | import sentry_sdk
14 | from sentry_sdk.integrations.celery import CeleryIntegration
15 |
16 | from . import util
17 |
18 |
19 | app = celery.Celery("backport_cpython")
20 |
21 | app.conf.update(
22 | broker_url=os.environ["HEROKU_REDIS_MAROON_URL"],
23 | result_backend=os.environ["HEROKU_REDIS_MAROON_URL"],
24 | broker_connection_retry_on_startup=True,
25 | broker_use_ssl={"ssl_cert_reqs": ssl.CERT_NONE},
26 | redis_backend_use_ssl={"ssl_cert_reqs": ssl.CERT_NONE},
27 | )
28 |
29 | cache = cachetools.LRUCache(maxsize=500)
30 | sentry_sdk.init(os.environ.get("SENTRY_DSN"), integrations=[CeleryIntegration()])
31 |
32 | CHERRY_PICKER_CONFIG = {
33 | "team": "python",
34 | "repo": "cpython",
35 | "check_sha": "7f777ed95a19224294949e1b4ce56bbffcb1fe9f",
36 | "fix_commit_msg": True,
37 | "default_branch": "main",
38 | "require_version_in_branch_name": True,
39 | "draft_pr": False,
40 | }
41 |
42 |
43 | @app.task()
44 | def setup_cpython_repo():
45 | print("Setting up CPython repository") # pragma: nocover
46 | if "cpython" not in os.listdir("."):
47 | subprocess.check_output(
48 | f"git clone https://{os.environ.get('GH_AUTH')}:x-oauth-basic@github.com/miss-islington/cpython.git".split()
49 | )
50 | subprocess.check_output(
51 | "git config --global user.email 'mariatta.wijaya+miss-islington@gmail.com'".split()
52 | )
53 | subprocess.check_output(
54 | ["git", "config", "--global", "user.name", "'Miss Islington (bot)'"]
55 | )
56 | os.chdir("./cpython")
57 | subprocess.check_output(
58 | f"git remote add upstream https://{os.environ.get('GH_AUTH')}:x-oauth-basic@github.com/python/cpython.git".split()
59 | )
60 | print("Finished setting up CPython Repo")
61 | else:
62 | print("cpython directory already exists")
63 |
64 |
65 | @app.task()
66 | def backport_task(commit_hash, branch, *, issue_number, created_by, merged_by, installation_id):
67 | loop = asyncio.get_event_loop()
68 | loop.run_until_complete(
69 | backport_task_asyncio(
70 | commit_hash,
71 | branch,
72 | issue_number=issue_number,
73 | created_by=created_by,
74 | merged_by=merged_by,
75 | installation_id=installation_id
76 | )
77 | )
78 |
79 |
80 | async def backport_task_asyncio(
81 | commit_hash, branch, *, issue_number, created_by, merged_by, installation_id
82 | ):
83 | """Backport a commit into a branch."""
84 | async with aiohttp.ClientSession() as session:
85 | gh = gh_aiohttp.GitHubAPI(
86 | session, "python/cpython", cache=cache
87 | )
88 | # This path only works on GitHub App
89 | installation_access_token = await apps.get_installation_access_token(
90 | gh,
91 | installation_id=installation_id,
92 | app_id=os.environ.get("GH_APP_ID"),
93 | private_key=os.environ.get("GH_PRIVATE_KEY")
94 | )
95 | gh.oauth_token = installation_access_token["token"]
96 | if not util.is_cpython_repo():
97 | # cd to cpython if we're not already in it
98 | if "cpython" in os.listdir("."):
99 | os.chdir("./cpython")
100 | else:
101 | print(f"pwd: {os.getcwd()}, listdir: {os.listdir('.')}")
102 |
103 | await util.comment_on_pr(
104 | gh,
105 | issue_number,
106 | f"""\
107 | {util.get_participants(created_by, merged_by)}, I can't backport for now. Please try again later or
108 | backport using [cherry_picker](https://pypi.org/project/cherry-picker/) on command line.
109 | ```
110 | cherry_picker {commit_hash} {branch}
111 | ```
112 | """,
113 | )
114 | await util.assign_pr_to_core_dev(gh, issue_number, merged_by)
115 |
116 | # Ensure that we don't have any changes lying around
117 | subprocess.check_output(['git', 'reset', '--hard'])
118 | subprocess.check_output(['git', 'clean', '-fxd'])
119 |
120 | cp = cherry_picker.CherryPicker(
121 | "origin",
122 | commit_hash,
123 | [branch],
124 | config=CHERRY_PICKER_CONFIG,
125 | prefix_commit=False,
126 | )
127 | try:
128 | cp.backport()
129 | except cherry_picker.BranchCheckoutException as bce:
130 | await util.comment_on_pr(
131 | gh,
132 | issue_number,
133 | f"""\
134 | Sorry {util.get_participants(created_by, merged_by)}, I had trouble checking out the `{branch}` backport branch.
135 | Please retry by removing and re-adding the "needs backport to {branch}" label.
136 | Alternatively, you can backport using [cherry_picker](https://pypi.org/project/cherry-picker/) on the command line.
137 | ```
138 | cherry_picker {commit_hash} {branch}
139 | ```
140 | """,
141 | )
142 | await util.assign_pr_to_core_dev(gh, issue_number, merged_by)
143 | bce_state = cp.get_state_and_verify()
144 | print(bce_state, bce)
145 | cp.abort_cherry_pick()
146 | except cherry_picker.CherryPickException as cpe:
147 | await util.comment_on_pr(
148 | gh,
149 | issue_number,
150 | f"""\
151 | Sorry, {util.get_participants(created_by, merged_by)}, I could not cleanly backport this to `{branch}` due to a conflict.
152 | Please backport using [cherry_picker](https://pypi.org/project/cherry-picker/) on command line.
153 | ```
154 | cherry_picker {commit_hash} {branch}
155 | ```
156 | """,
157 | )
158 | await util.assign_pr_to_core_dev(gh, issue_number, merged_by)
159 | cpe_state = cp.get_state_and_verify()
160 | print(cpe_state, cpe)
161 | cp.abort_cherry_pick()
162 | except cherry_picker.GitHubException as ghe:
163 | await util.comment_on_pr(
164 | gh,
165 | issue_number,
166 | f"""\
167 | Sorry {util.get_participants(created_by, merged_by)}, I had trouble completing the backport.
168 | Please retry by removing and re-adding the "needs backport to {branch}" label.
169 | Please backport backport using [cherry_picker](https://pypi.org/project/cherry-picker/) on the command line.
170 | ```
171 | cherry_picker {commit_hash} {branch}
172 | ```
173 | """,
174 | )
175 | await util.assign_pr_to_core_dev(gh, issue_number, merged_by)
176 | ghe_state = cp.get_state_and_verify()
177 | print(ghe_state, ghe)
178 | cp.abort_cherry_pick()
179 |
180 |
181 | class InitRepoStep(bootsteps.StartStopStep):
182 | def start(self, c):
183 | print("Initialize the repository.")
184 | setup_cpython_repo()
185 |
186 |
187 | app.steps["worker"].add(InitRepoStep)
188 |
--------------------------------------------------------------------------------
/tests/test_backport_pr.py:
--------------------------------------------------------------------------------
1 | import os
2 | from unittest import mock
3 |
4 | from gidgethub import sansio
5 |
6 | import pytest
7 | import redis
8 | import kombu
9 |
10 | os.environ["HEROKU_REDIS_MAROON_URL"] = "someurl"
11 |
12 | from miss_islington import backport_pr
13 |
14 |
15 | class FakeGH:
16 | def __init__(self, *, getitem=None, post=None):
17 | self._getitem_return = getitem
18 | self.getitem_url = None
19 | self.getiter_url = None
20 | self._post_return = post
21 |
22 | async def getitem(self, url, url_vars={}):
23 | self.getitem_url = sansio.format_url(url, url_vars)
24 | return self._getitem_return[self.getitem_url]
25 |
26 | async def post(self, url, *, data):
27 | self.post_url = url
28 | self.post_data = data
29 | return self._post_return
30 |
31 |
32 | async def test_unmerged_pr_is_ignored():
33 | data = {"action": "closed", "pull_request": {"merged": False}}
34 | event = sansio.Event(data, event="pull_request", delivery_id="1")
35 | gh = FakeGH()
36 | await backport_pr.router.dispatch(event, gh)
37 | assert gh.getitem_url is None
38 |
39 |
40 | async def test_labeled_on_unmerged_pr_is_ignored():
41 | data = {"action": "labeled", "pull_request": {"merged": False}}
42 | event = sansio.Event(data, event="pull_request", delivery_id="1")
43 | gh = FakeGH()
44 | await backport_pr.router.dispatch(event, gh)
45 | assert gh.getitem_url is None
46 |
47 |
48 | async def test_labeled_on_merged_pr_no_backport_label():
49 | data = {
50 | "action": "labeled",
51 | "pull_request": {
52 | "merged": True,
53 | "number": 1,
54 | "merged_by": {"login": "Mariatta"},
55 | "user": {"login": "Mariatta"},
56 | "merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
57 | },
58 | "repository": {
59 | "issues_url": "https://api.github.com/repos/python/cpython/issues{/number}"
60 | },
61 | "label": {"name": "skip news"},
62 | "installation": {"id": "123"}
63 | }
64 | event = sansio.Event(data, event="pull_request", delivery_id="1")
65 |
66 | gh = FakeGH()
67 | await backport_pr.router.dispatch(event, gh)
68 | assert not hasattr(gh, "post_data")
69 | assert not hasattr(gh, "post_url")
70 |
71 |
72 | async def test_merged_pr_no_backport_label():
73 | data = {
74 | "action": "closed",
75 | "pull_request": {
76 | "merged": True,
77 | "number": 1,
78 | "merged_by": {"login": "Mariatta"},
79 | "user": {"login": "Mariatta"},
80 | "merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
81 | },
82 | "repository": {
83 | "issues_url": "https://api.github.com/repos/python/cpython/issues/1"
84 | },
85 | "installation": {"id": "123"}
86 | }
87 | event = sansio.Event(data, event="pull_request", delivery_id="1")
88 |
89 | getitem = {
90 | "https://api.github.com/repos/python/cpython/issues/1": {
91 | "labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"
92 | },
93 | "https://api.github.com/repos/python/cpython/issues/1/labels": [
94 | {"name": "skip news"}
95 | ],
96 | }
97 |
98 | gh = FakeGH(getitem=getitem)
99 | await backport_pr.router.dispatch(event, gh)
100 | assert not hasattr(gh, "post_data")
101 | assert not hasattr(gh, "post_url")
102 |
103 |
104 | @pytest.mark.parametrize("branch", ["3.10", "3.11", "4.0", "3.7"])
105 | async def test_merged_pr_with_backport_label(branch):
106 | data = {
107 | "action": "closed",
108 | "pull_request": {
109 | "merged": True,
110 | "number": 1,
111 | "merged_by": {"login": "Mariatta"},
112 | "user": {"login": "Mariatta"},
113 | "merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
114 | },
115 | "repository": {
116 | "issues_url": "https://api.github.com/repos/python/cpython/issues/1"
117 | },
118 | "installation": {"id": "123"}
119 | }
120 | event = sansio.Event(data, event="pull_request", delivery_id="1")
121 |
122 | getitem = {
123 | "https://api.github.com/repos/python/cpython/issues/1": {
124 | "labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"
125 | },
126 | "https://api.github.com/repos/python/cpython/issues/1/labels": [
127 | {"name": f"needs backport to {branch}"},
128 | ],
129 | }
130 |
131 | gh = FakeGH(getitem=getitem)
132 | with mock.patch("miss_islington.tasks.backport_task.delay"):
133 | await backport_pr.router.dispatch(event, gh)
134 | assert (
135 | f"I'm working now to backport this PR to: {branch}" in gh.post_data["body"]
136 | )
137 | assert gh.post_url == "/repos/python/cpython/issues/1/comments"
138 |
139 |
140 | async def test_merged_pr_with_backport_label_thank_pr_author():
141 | data = {
142 | "action": "closed",
143 | "pull_request": {
144 | "merged": True,
145 | "number": 1,
146 | "merged_by": {"login": "Mariatta"},
147 | "user": {"login": "gvanrossum"},
148 | "merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
149 | },
150 | "repository": {
151 | "issues_url": "https://api.github.com/repos/python/cpython/issues/1"
152 | },
153 | "installation": {"id": "123"}
154 | }
155 | event = sansio.Event(data, event="pull_request", delivery_id="1")
156 |
157 | getitem = {
158 | "https://api.github.com/repos/python/cpython/issues/1": {
159 | "labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"
160 | },
161 | "https://api.github.com/repos/python/cpython/issues/1/labels": [
162 | {"name": "needs backport to 3.7"},
163 | ],
164 | }
165 |
166 | gh = FakeGH(getitem=getitem)
167 | with mock.patch("miss_islington.tasks.backport_task.delay"):
168 | await backport_pr.router.dispatch(event, gh)
169 | assert "I'm working now to backport this PR to: 3.7" in gh.post_data["body"]
170 | assert "Thanks @gvanrossum for the PR" in gh.post_data["body"]
171 | assert gh.post_url == "/repos/python/cpython/issues/1/comments"
172 |
173 |
174 | async def test_easter_egg():
175 | data = {
176 | "action": "closed",
177 | "pull_request": {
178 | "merged": True,
179 | "number": 1,
180 | "merged_by": {"login": "Mariatta"},
181 | "user": {"login": "gvanrossum"},
182 | "merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
183 | },
184 | "repository": {
185 | "issues_url": "https://api.github.com/repos/python/cpython/issues/1"
186 | },
187 | "installation": {"id": "123"}
188 | }
189 | event = sansio.Event(data, event="pull_request", delivery_id="1")
190 |
191 | getitem = {
192 | "https://api.github.com/repos/python/cpython/issues/1": {
193 | "labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"
194 | },
195 | "https://api.github.com/repos/python/cpython/issues/1/labels": [
196 | {"name": "needs backport to 3.7"},
197 | ],
198 | }
199 |
200 | gh = FakeGH(getitem=getitem)
201 | with mock.patch("miss_islington.tasks.backport_task.delay"), mock.patch(
202 | "random.random", return_value=0.1
203 | ):
204 | await backport_pr.router.dispatch(event, gh)
205 | assert "I'm working now to backport this PR to: 3.7" in gh.post_data["body"]
206 | assert "Thanks @gvanrossum for the PR" in gh.post_data["body"]
207 | assert "I'm not a witch" not in gh.post_data["body"]
208 | assert gh.post_url == "/repos/python/cpython/issues/1/comments"
209 |
210 | with mock.patch("miss_islington.tasks.backport_task.delay"), mock.patch(
211 | "random.random", return_value=0.01
212 | ):
213 | await backport_pr.router.dispatch(event, gh)
214 | assert "I'm working now to backport this PR to: 3.7" in gh.post_data["body"]
215 | assert "Thanks @gvanrossum for the PR" in gh.post_data["body"]
216 | assert "I'm not a witch" in gh.post_data["body"]
217 | assert gh.post_url == "/repos/python/cpython/issues/1/comments"
218 |
219 |
220 | async def test_backport_pr_redis_connection_error():
221 | data = {
222 | "action": "closed",
223 | "pull_request": {
224 | "merged": True,
225 | "number": 1,
226 | "merged_by": {"login": "Mariatta"},
227 | "user": {"login": "gvanrossum"},
228 | "merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
229 | },
230 | "repository": {
231 | "issues_url": "https://api.github.com/repos/python/cpython/issues/1"
232 | },
233 | "installation": {"id": "123"}
234 | }
235 | event = sansio.Event(data, event="pull_request", delivery_id="1")
236 |
237 | getitem = {
238 | "https://api.github.com/repos/python/cpython/issues/1": {
239 | "labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"
240 | },
241 | "https://api.github.com/repos/python/cpython/issues/1/labels": [
242 | {"name": "needs backport to 3.7"},
243 | ],
244 | }
245 |
246 | gh = FakeGH(getitem=getitem)
247 | with mock.patch("miss_islington.tasks.backport_task.delay") as backport_delay_mock:
248 | backport_delay_mock.side_effect = redis.exceptions.ConnectionError
249 | await backport_pr.router.dispatch(event, gh)
250 | assert "I'm having trouble backporting to `3.7`" in gh.post_data["body"]
251 |
252 |
253 | async def test_backport_pr_kombu_operational_error():
254 | data = {
255 | "action": "closed",
256 | "pull_request": {
257 | "merged": True,
258 | "number": 1,
259 | "merged_by": {"login": "Mariatta"},
260 | "user": {"login": "gvanrossum"},
261 | "merge_commit_sha": "f2393593c99dd2d3ab8bfab6fcc5ddee540518a9",
262 | },
263 | "repository": {
264 | "issues_url": "https://api.github.com/repos/python/cpython/issues/1"
265 | },
266 | "installation": {"id": "123"}
267 | }
268 | event = sansio.Event(data, event="pull_request", delivery_id="1")
269 |
270 | getitem = {
271 | "https://api.github.com/repos/python/cpython/issues/1": {
272 | "labels_url": "https://api.github.com/repos/python/cpython/issues/1/labels{/name}"
273 | },
274 | "https://api.github.com/repos/python/cpython/issues/1/labels": [
275 | {"name": "needs backport to 3.7"},
276 | ],
277 | }
278 |
279 | gh = FakeGH(getitem=getitem)
280 | with mock.patch("miss_islington.tasks.backport_task.delay") as backport_delay_mock:
281 | backport_delay_mock.side_effect = kombu.exceptions.OperationalError
282 | await backport_pr.router.dispatch(event, gh)
283 | assert "I'm having trouble backporting to `3.7`" in gh.post_data["body"]
284 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2017 Python Software Foundation
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------