├── tests
├── __init__.py
├── test_time_to_merge.py
├── test_time_to_review.py
├── test_merge_rate.py
├── test_common.py
├── test_helpers.py
└── mocks.py
├── github_metrics
├── metrics
│ ├── __init__.py
│ ├── prs_count.py
│ ├── hotfixes_count.py
│ ├── all.py
│ ├── merge_rate.py
│ ├── pr_size.py
│ ├── open_to_merge.py
│ ├── time_to_open.py
│ ├── time_to_merge.py
│ └── time_to_review.py
├── __init__.py
├── settings.py
├── common.py
├── request.py
├── helpers.py
├── main.py
└── run.py
├── requirements-dev.txt
├── .env.example
├── pyproject.toml
├── requirements.txt
├── docker-compose.yml
├── .gitignore
├── Dockerfile
├── etc
└── release.sh
├── setup.cfg
├── .pre-commit-config.yaml
├── LICENSE
├── setup.py
└── README.md
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/github_metrics/metrics/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | bump2version==*
2 | pre-commit==*
3 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | GITHUB_LOGIN=""
2 | GITHUB_TOKEN=""
3 | ORG_NAME=""
4 | REPOSITORY_NAME=""
5 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel"
5 | ]
6 | build-backend = "setuptools.build_meta"
7 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | arrow==1.0.2
2 | requests==2.22.0
3 | python-decouple==3.4
4 | numpy==1.20.1
5 | click==7.1.2
6 | fastapi==0.68.1
7 | uvicorn==0.15.0
8 |
--------------------------------------------------------------------------------
/github_metrics/__init__.py:
--------------------------------------------------------------------------------
1 | """Top-level package for github-metrics."""
2 |
3 | __author__ = "Victoria Pantoja (Vinta Software)"
4 | __email__ = "victoria.pantoja@vinta.com.br"
5 | __version__ = "0.0.10"
6 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | app:
5 | build: .
6 | working_dir: /home/backend/app
7 | user: backend
8 | stdin_open: true
9 | tty: true
10 | volumes:
11 | - .:/home/backend/app
12 |
--------------------------------------------------------------------------------
/github_metrics/settings.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from decouple import config
4 |
5 | BASE_DIR = os.path.dirname(__file__)
6 |
7 | GITHUB_LOGIN = config("GITHUB_LOGIN", default="")
8 | GITHUB_TOKEN = config("GITHUB_TOKEN", default="")
9 | ORG_NAME = config("ORG_NAME", default="")
10 | REPOSITORY_NAME = config("REPOSITORY_NAME", default="")
11 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | .DS_Store
3 | __pycache__/
4 | /.vscode/
5 | .idea/
6 |
7 | # coverage result
8 | .coverage
9 | /coverage/
10 |
11 | # data
12 | *.dump
13 |
14 | # Environments
15 | .env
16 | .venv
17 | env/
18 | venv/
19 | ENV/
20 | env.bak/
21 | venv.bak/
22 |
23 | # Distribution / packaging
24 | .Python
25 | build/
26 | develop-eggs/
27 | eggs/
28 | .eggs/
29 | lib/
30 | lib64/
31 | *.egg-info/
32 | *.egg
33 | dist/
34 | sdist/
35 |
36 | # Previous releases' directories
37 | release-backup-*/
38 |
--------------------------------------------------------------------------------
/github_metrics/metrics/prs_count.py:
--------------------------------------------------------------------------------
1 | from github_metrics.helpers import filter_valid_prs
2 |
3 |
4 | def count_prs(pr_list, include_hotfixes, exclude_authors, filter_authors):
5 | if not exclude_authors:
6 | exclude_authors = []
7 | prs_list = filter_valid_prs(pr_list, include_hotfixes, exclude_authors, filter_authors)
8 |
9 | print(
10 | f" \033[1mPRs Count\033[0m"
11 | f" ----------------------------------"
12 | f" Total PRs counted: {len(prs_list)}"
13 | )
14 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9
2 | ENV PYTHONUNBUFFERED 1
3 | ARG UID=1000
4 | ARG build_env
5 | ENV BUILD_ENV ${build_env}
6 | ENV VIRTUAL_ENV=/home/backend/venv
7 |
8 | RUN adduser --disabled-password --uid $UID --gecos '' backend
9 | RUN [ -d /home/backend/app ] || mkdir /home/backend/app
10 | RUN chown -Rf $UID:$UID /home/backend/app
11 |
12 | COPY --chown=$UID:$UID . /home/backend/src
13 |
14 | WORKDIR /home/backend/app
15 | USER backend
16 |
17 | # Creates virtualenv
18 | RUN python3 -m venv $VIRTUAL_ENV
19 | ENV PATH="$VIRTUAL_ENV/bin:$PATH"
20 | RUN pip install --editable ../src
21 |
--------------------------------------------------------------------------------
/etc/release.sh:
--------------------------------------------------------------------------------
1 |
2 | #!/bin/bash
3 |
4 | # Backup previous release
5 | if [[ (-d "dist") || (-d "build") ]]; then
6 | previous_release=release-backup-$(date --iso=seconds)
7 | mkdir $previous_release
8 | [[ (-d "dist") ]] && mv dist $previous_release
9 | [[ (-d "build") ]] && mv build $previous_release
10 | fi
11 |
12 | # Delete current build archives
13 | rm -rf build dist
14 |
15 | # Make sure you're running this inside a virtual environment
16 | pip install -r ./requirements.txt
17 |
18 | # Build the artifacts
19 | python setup.py sdist bdist_wheel --universal
20 |
21 | # This will upload the artifacts to PyPi (add your credentials to .pypirc for convenience)
22 | twine upload dist/*
23 |
--------------------------------------------------------------------------------
/github_metrics/metrics/hotfixes_count.py:
--------------------------------------------------------------------------------
1 | from github_metrics.helpers import filter_hotfixes
2 |
3 |
4 | def get_hotfixes_data(pr_list, exclude_authors, filter_authors):
5 | if not exclude_authors:
6 | exclude_authors = []
7 | hotfix_list = filter_hotfixes(pr_list, exclude_authors, filter_authors)
8 | return {"hotfix_list": hotfix_list}
9 |
10 |
11 | def count_hotfixes(pr_list, exclude_authors, filter_authors):
12 | data = get_hotfixes_data(
13 | pr_list=pr_list, exclude_authors=exclude_authors, filter_authors=filter_authors
14 | )
15 | print(
16 | f" \033[1mHotfixes Count\033[0m\n"
17 | f" ----------------------------------\n"
18 | f" Total PRs counted: {len(data['hotfix_list'])}\n"
19 | )
20 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 0.0.10
3 | commit = True
4 | tag = True
5 |
6 | [metadata]
7 | name = github-metrics
8 | author = Victoria Pantoja (Vinta Software)
9 | author_email = victoria.pantoja@vinta.com.br
10 | description = Generate development metrics using github data for your project.
11 | long_description = file: README.md
12 | long_description_content_type = text/markdown
13 | url = https://github.com/vintasoftware/github-metrics
14 | classifiers =
15 | Programming Language :: Python :: 3.8
16 | Programming Language :: Python :: 3.9
17 | License :: OSI Approved :: MIT License
18 | Operating System :: OS Independent
19 |
20 | [bumpversion:file:setup.py]
21 | search = version="{current_version}"
22 | replace = version="{new_version}"
23 |
24 | [bumpversion:file:github_metrics/__init__.py]
25 | search = __version__ = "{current_version}"
26 | replace = __version__ = "{new_version}"
27 |
--------------------------------------------------------------------------------
/tests/test_time_to_merge.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import unittest
3 |
4 | from github_metrics.metrics.time_to_merge import (
5 | call_mean_time_to_merge_statistics,
6 | get_merged_prs,
7 | )
8 |
9 |
10 | class TestPRsMTM(unittest.TestCase):
11 | def test_get_merged_prs_successfully(self):
12 | pr_list = [
13 | {"merged_at": None},
14 | {"merged_at": datetime.datetime(2021, 3, 25, 14, 28, 52)},
15 | ]
16 | merged_pr_list = get_merged_prs(pr_list)
17 | self.assertEqual(merged_pr_list, [pr_list[1]])
18 |
19 | def test_no_prs_to_calculate_mtm(self):
20 | message = call_mean_time_to_merge_statistics([])
21 |
22 | self.assertEqual(
23 | message,
24 | "There are no valid PRs to pull this data from, please select another timeframe",
25 | )
26 |
27 |
28 | if __name__ == "__main__":
29 | unittest.main()
30 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v4.0.1
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | args: [--markdown-linebreak-ext=md]
9 | - id: check-added-large-files
10 | args: ['--maxkb=500']
11 | - id: check-byte-order-marker
12 | - id: check-case-conflict
13 | - id: check-merge-conflict
14 | - id: check-symlinks
15 | - id: debug-statements
16 | - id: detect-private-key
17 | - repo: https://github.com/pre-commit/mirrors-isort
18 | rev: v5.9.3
19 | hooks:
20 | - id: isort
21 | args: ["--profile", "black"]
22 | - repo: https://github.com/psf/black
23 | rev: 21.8b0
24 | hooks:
25 | - id: black
26 | - repo: https://gitlab.com/pycqa/flake8
27 | rev: 3.9.2
28 | hooks:
29 | - id: flake8
30 | args: [--max-line-length=100]
31 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Vinta Software
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/github_metrics/common.py:
--------------------------------------------------------------------------------
1 | import arrow
2 |
3 |
4 | def extract_datetime_or_none(isoformat_str_date):
5 | try:
6 | arrow_date = arrow.get(isoformat_str_date)
7 | except Exception:
8 | return None
9 |
10 | return arrow_date.datetime
11 |
12 |
13 | def get_author_login(pr_or_review):
14 | author = pr_or_review.get("author")
15 | if not author:
16 | return None
17 |
18 | return author.get("login")
19 |
20 |
21 | def get_reviews_from_pr(pr):
22 | reviews_root = pr.get("reviews")
23 |
24 | if not reviews_root:
25 | return []
26 |
27 | reviews = reviews_root.get("nodes", [])
28 | if not reviews:
29 | return []
30 |
31 | return reviews
32 |
33 | def _get_raw_ready_datetime_from_pr(pr):
34 | timeline_root = pr.get("timelineItems", {})
35 | if not timeline_root:
36 | return pr["createdAt"]
37 |
38 | timeline = timeline_root.get("edges", [])
39 | if not timeline or not timeline[0]:
40 | return pr["createdAt"]
41 |
42 | timeline_node = timeline[0].get("node")
43 | if not timeline_node:
44 | return pr["createdAt"]
45 |
46 | return timeline_node.get("createdAt", pr["createdAt"])
47 |
48 | def get_ready_datetime_from_pr(pr):
49 | return extract_datetime_or_none(_get_raw_ready_datetime_from_pr(pr))
50 |
--------------------------------------------------------------------------------
/tests/test_time_to_review.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import unittest
3 |
4 | from github_metrics.metrics.time_to_review import (
5 | filter_prs_with_more_than_24h_before_review,
6 | filter_reviewed_prs,
7 | )
8 |
9 |
10 | class TestPRsMTR(unittest.TestCase):
11 | def test_filter_reviewed_prs_successfully(self):
12 | pr_list = [
13 | {"first_review_at": None},
14 | {"first_review_at": datetime.datetime(2021, 3, 25, 14, 28, 52)},
15 | ]
16 | reviewed_prs = filter_reviewed_prs(pr_list)
17 | self.assertEqual(reviewed_prs, [pr_list[1]])
18 |
19 | def test_filter_prs_with_more_than_18h_before_review(self):
20 | pr_list = [
21 | {
22 | "ready_at": datetime.datetime(2021, 3, 25, 14, 28, 52),
23 | "first_review_at": None,
24 | },
25 | {
26 | "ready_at": datetime.datetime(2021, 3, 2, 10, 10),
27 | "first_review_at": datetime.datetime(2021, 3, 4, 15, 12),
28 | },
29 | {
30 | "ready_at": datetime.datetime(2021, 3, 2, 10, 10),
31 | "first_review_at": datetime.datetime(2021, 3, 2, 11, 3),
32 | },
33 | ]
34 |
35 | pr_list = filter_prs_with_more_than_24h_before_review(pr_list)
36 | self.assertEqual(len(pr_list), 2)
37 |
38 |
39 | if __name__ == "__main__":
40 | unittest.main()
41 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from pkg_resources import parse_requirements
2 | from setuptools import find_packages, setup
3 |
4 | with open("requirements.txt") as requirements_file:
5 | requirements = [
6 | str(req) for req in parse_requirements(requirements_file.readlines())
7 | ]
8 |
9 | with open("README.md", "r", encoding="utf-8") as readme_file:
10 | readme = readme_file.read()
11 |
12 |
13 | setup(
14 | name="github_metrics",
15 | version="0.0.10",
16 | description="Generate development metrics using github data for your project.",
17 | url="https://github.com/vintasoftware/github-metrics",
18 | author="Victoria Pantoja (Vinta Software)",
19 | author_email="victoria.pantoja@vinta.com.br",
20 | python_requires=">=3.8",
21 | py_modules=["github_metrics"],
22 | install_requires=requirements,
23 | include_package_data=True,
24 | packages=find_packages(exclude=("tests*",)),
25 | entry_points="""
26 | [console_scripts]
27 | github_metrics=github_metrics.run:cli
28 | """,
29 | classifiers=[
30 | "Intended Audience :: Developers",
31 | "License :: OSI Approved :: MIT License",
32 | "Natural Language :: English",
33 | "Programming Language :: Python :: 3.8",
34 | "Programming Language :: Python :: 3.9",
35 | ],
36 | license="MIT license",
37 | long_description=readme,
38 | long_description_content_type="text/markdown",
39 | )
40 |
--------------------------------------------------------------------------------
/tests/test_merge_rate.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import unittest
3 |
4 | from github_metrics.metrics.merge_rate import (
5 | call_merge_rate_statistics,
6 | get_merged_prs,
7 | get_prs_authors,
8 | )
9 |
10 |
11 | class TestPRsMTM(unittest.TestCase):
12 | def test_get_merged_prs_successfully(self):
13 | pr_list = [
14 | {"merged_at": None},
15 | {"merged_at": datetime.datetime(2021, 3, 25, 14, 28, 52)},
16 | ]
17 | merged_pr_list = get_merged_prs(pr_list)
18 | self.assertEqual(merged_pr_list, [pr_list[1]])
19 |
20 | def test_no_prs_to_calculate_mr(self):
21 | message = call_merge_rate_statistics([])
22 |
23 | self.assertEqual(
24 | message,
25 | "There are no valid PRs to pull this data from, please select another timeframe",
26 | )
27 |
28 | def test_get_authors_list(self):
29 | prs = [
30 | {
31 | "author": "ladygaga",
32 | },
33 | {
34 | "author": "beyonce",
35 | },
36 | {
37 | "author": "badgalriri",
38 | },
39 | {
40 | "author": "badgalriri",
41 | },
42 | ]
43 |
44 | prs_authors = get_prs_authors(prs)
45 | self.assertEqual(
46 | prs_authors,
47 | ["ladygaga", "beyonce", "badgalriri"],
48 | )
49 |
50 |
51 | if __name__ == "__main__":
52 | unittest.main()
53 |
--------------------------------------------------------------------------------
/tests/test_common.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from github_metrics.common import (
4 | extract_datetime_or_none,
5 | get_author_login,
6 | get_reviews_from_pr,
7 | )
8 | from tests.mocks import request_mock
9 |
10 |
11 | class TestCommon(unittest.TestCase):
12 | def setUp(self):
13 | self.pr = request_mock[1]
14 | self.empty_pr = {
15 | "id": "",
16 | "title": "",
17 | "createdAt": "",
18 | "baseRefName": "",
19 | "headRefName": "",
20 | "reviews": {},
21 | "author": None,
22 | "mergedAt": None,
23 | "closedAt": None,
24 | "commits": {},
25 | }
26 |
27 | def test_extract_datetime_successfully(self):
28 | isoformat_str_date = self.pr.get("createdAt")
29 | datetime = extract_datetime_or_none(isoformat_str_date)
30 | self.assertNotEqual(datetime, None)
31 |
32 | def test_fails_extracting_datetime(self):
33 | isoformat_str_date = "2021-03-25T21!"
34 | datetime = extract_datetime_or_none(isoformat_str_date)
35 | self.assertEqual(datetime, None)
36 |
37 | def test_gets_author_login_successfully(self):
38 | author = get_author_login(self.pr)
39 | self.assertEqual(author, self.pr.get("author").get("login"))
40 |
41 | def test_fails_getting_author_login(self):
42 | author = get_author_login(self.empty_pr)
43 |
44 | self.assertEqual(author, None)
45 |
46 | def test_get_reviews_from_pr_successfully(self):
47 | reviews = get_reviews_from_pr(self.pr)
48 | self.assertEqual(self.pr.get("reviews").get("nodes"), reviews)
49 |
50 | def test_pr_without_review(self):
51 | reviews = get_reviews_from_pr(self.empty_pr)
52 | self.assertEqual(reviews, [])
53 |
54 |
55 | if __name__ == "__main__":
56 | unittest.main()
57 |
--------------------------------------------------------------------------------
/github_metrics/metrics/all.py:
--------------------------------------------------------------------------------
1 | from github_metrics.metrics.hotfixes_count import count_hotfixes
2 | from github_metrics.metrics.merge_rate import call_merge_rate_statistics
3 | from github_metrics.metrics.open_to_merge import (
4 | calulate_prs_open_to_merge_time_statistics,
5 | )
6 | from github_metrics.metrics.pr_size import call_pr_size_statistics
7 | from github_metrics.metrics.time_to_merge import call_mean_time_to_merge_statistics
8 | from github_metrics.metrics.time_to_open import call_time_to_open_statistics
9 | from github_metrics.metrics.time_to_review import calulate_prs_review_time_statistics
10 |
11 |
12 | def call_all_metrics(
13 | pr_list, include_hotfixes, exclude_authors, filter_authors, exclude_weekends
14 | ):
15 | call_mean_time_to_merge_statistics(
16 | pr_list=pr_list,
17 | include_hotfixes=include_hotfixes,
18 | exclude_authors=exclude_authors,
19 | filter_authors=filter_authors,
20 | exclude_weekends=exclude_weekends,
21 | )
22 | calulate_prs_review_time_statistics(
23 | pr_list=pr_list,
24 | include_hotfixes=include_hotfixes,
25 | exclude_authors=exclude_authors,
26 | filter_authors=filter_authors,
27 | exclude_weekends=exclude_weekends,
28 | )
29 | call_time_to_open_statistics(
30 | pr_list=pr_list,
31 | include_hotfixes=include_hotfixes,
32 | exclude_authors=exclude_authors,
33 | filter_authors=filter_authors,
34 | exclude_weekends=exclude_weekends,
35 | )
36 | calulate_prs_open_to_merge_time_statistics(
37 | pr_list=pr_list,
38 | include_hotfixes=include_hotfixes,
39 | exclude_authors=exclude_authors,
40 | filter_authors=filter_authors,
41 | exclude_weekends=exclude_weekends,
42 | )
43 | call_merge_rate_statistics(
44 | pr_list=pr_list,
45 | include_hotfixes=include_hotfixes,
46 | exclude_authors=exclude_authors,
47 | filter_authors=filter_authors,
48 | )
49 | call_pr_size_statistics(
50 | pr_list=pr_list,
51 | include_hotfixes=include_hotfixes,
52 | exclude_authors=exclude_authors,
53 | filter_authors=filter_authors,
54 | )
55 | count_hotfixes(
56 | pr_list=pr_list, exclude_authors=exclude_authors, filter_authors=filter_authors
57 | )
58 |
--------------------------------------------------------------------------------
/github_metrics/metrics/merge_rate.py:
--------------------------------------------------------------------------------
1 | from github_metrics.common import extract_datetime_or_none, get_author_login
2 | from github_metrics.helpers import filter_valid_prs
3 |
4 |
5 | def format_pr_list(pr_list):
6 | return [
7 | {
8 | "author": get_author_login(pr),
9 | "merged_at": extract_datetime_or_none(pr.get("mergedAt"))
10 | if pr.get("mergedAt")
11 | else None,
12 | }
13 | for pr in pr_list
14 | ]
15 |
16 |
17 | def get_merged_prs(formatted_pr_list):
18 | merged_prs = []
19 |
20 | for pr in formatted_pr_list:
21 | if pr["merged_at"] is not None:
22 | merged_prs.append(pr)
23 | return merged_prs
24 |
25 |
26 | def get_prs_authors(pr_list):
27 | pr_author_list = []
28 |
29 | for pr in pr_list:
30 | if pr["author"] not in pr_author_list:
31 | pr_author_list.append(pr["author"])
32 | return pr_author_list
33 |
34 |
35 | def get_merge_rate_data(pr_list, include_hotfixes, exclude_authors, filter_authors):
36 | valid_pr_list = filter_valid_prs(
37 | pr_list, include_hotfixes, exclude_authors, filter_authors
38 | )
39 | formatted_pr_list = format_pr_list(valid_pr_list)
40 | merged_prs = get_merged_prs(formatted_pr_list)
41 | if not merged_prs or merged_prs == []:
42 | return "There are no valid PRs to pull this data from, please select another timeframe"
43 |
44 | prs_authors = get_prs_authors(merged_prs)
45 | merge_rate = len(merged_prs) / len(prs_authors)
46 |
47 | return {
48 | "total_prs": merged_prs,
49 | "prs_authors": prs_authors,
50 | "merge_rate": merge_rate,
51 | }
52 |
53 |
54 | def call_merge_rate_statistics(
55 | pr_list, include_hotfixes, exclude_authors, filter_authors
56 | ):
57 | data = get_merge_rate_data(
58 | pr_list=pr_list,
59 | include_hotfixes=include_hotfixes,
60 | exclude_authors=exclude_authors,
61 | filter_authors=filter_authors,
62 | )
63 |
64 | print(
65 | f" \033[1mMerge Rate\033[0m\n"
66 | f" ----------------------------------\n"
67 | f" Total PRs calculated: {len(data['total_prs'])}\n"
68 | f" Total devs calculated: {len(data['prs_authors'])}\n"
69 | f" ----------------------------------\n"
70 | f" Merge Rate: {data['merge_rate']}\n"
71 | )
72 |
--------------------------------------------------------------------------------
/github_metrics/metrics/pr_size.py:
--------------------------------------------------------------------------------
1 | import numpy
2 |
3 | from github_metrics.helpers import filter_valid_prs
4 |
5 |
6 | def format_pr_list(pr_list):
7 | return [
8 | {
9 | "additions": pr["additions"],
10 | "deletions": pr["deletions"],
11 | }
12 | for pr in pr_list
13 | ]
14 |
15 |
16 | def get_pr_size_data(pr_list, include_hotfixes, exclude_authors, filter_authors):
17 | valid_pr_list = filter_valid_prs(
18 | pr_list, include_hotfixes, exclude_authors, filter_authors
19 | )
20 | formatted_pr_list = format_pr_list(valid_pr_list)
21 |
22 | total_line_diff = []
23 | diff_line_rate = []
24 |
25 | for pr in formatted_pr_list:
26 | total_line_diff.append(pr["additions"] + pr["deletions"])
27 | diff_line_rate.append(pr["additions"] - pr["deletions"])
28 |
29 | total_mean = numpy.mean(total_line_diff)
30 | total_median = numpy.median(total_line_diff)
31 | total_percentile = numpy.percentile(total_line_diff, 95)
32 |
33 | rate_mean = numpy.mean(diff_line_rate)
34 | rate_median = numpy.median(diff_line_rate)
35 | rate_percentile = numpy.percentile(diff_line_rate, 95)
36 |
37 | return {
38 | "total_prs": formatted_pr_list,
39 | "total_mean": total_mean,
40 | "total_median": total_median,
41 | "total_percentile_95": total_percentile,
42 | "rate_mean": rate_mean,
43 | "rate_median": rate_median,
44 | "rate_percentile_95": rate_percentile,
45 | }
46 |
47 |
48 | def call_pr_size_statistics(pr_list, include_hotfixes, exclude_authors, filter_authors):
49 | data = get_pr_size_data(
50 | pr_list=pr_list,
51 | include_hotfixes=include_hotfixes,
52 | exclude_authors=exclude_authors,
53 | filter_authors=filter_authors,
54 | )
55 |
56 | print(
57 | f" \033[1mPull Requests Size\033[0m\n"
58 | f" ----------------------------------\n"
59 | f" Total PRs calculated: {len(data['total_prs'])}\n"
60 | f" ----------------------------------\n"
61 | f" Total Lines Mean: {round(data['total_mean'], 2)} lines\n"
62 | f" Total Lines Median: {round(data['total_median'], 2)} lines\n"
63 | f" Total Lines 95 percentile: {round(data['total_percentile_95'], 2)} lines\n\n"
64 | f" Diff Rate Mean: {round(data['rate_mean'], 2)}\n"
65 | f" Diff Rate Median: {round(data['rate_median'], 2)}\n"
66 | f" Diff Rate 95 percentile: {round(data['rate_percentile_95'], 2)}\n"
67 | )
68 |
--------------------------------------------------------------------------------
/github_metrics/metrics/open_to_merge.py:
--------------------------------------------------------------------------------
1 | import numpy
2 |
3 | from github_metrics.common import extract_datetime_or_none, get_author_login, get_ready_datetime_from_pr
4 | from github_metrics.helpers import (
5 | filter_valid_prs,
6 | format_timedelta_to_hours,
7 | format_timedelta_to_text,
8 | get_time_without_weekend,
9 | )
10 |
11 |
12 | def get_merged_prs(formatted_pr_list):
13 | merged_prs = []
14 |
15 | for pr in formatted_pr_list:
16 | if pr["merged_at"] is not None:
17 | merged_prs.append(pr)
18 | return merged_prs
19 |
20 |
21 | def format_pr_list(pr_list):
22 | pr_list_with_hours = [
23 | {
24 | "title": pr["title"],
25 | "author": get_author_login(pr),
26 | "ready_at": get_ready_datetime_from_pr(pr),
27 | "merged_at": extract_datetime_or_none(pr.get("mergedAt"))
28 | if pr.get("mergedAt")
29 | else None,
30 | }
31 | for pr in pr_list
32 | ]
33 |
34 | return pr_list_with_hours
35 |
36 |
37 | def get_open_to_merge_time_data(
38 | pr_list, include_hotfixes, exclude_authors, filter_authors, exclude_weekends
39 | ):
40 | valid_pr_list = filter_valid_prs(
41 | pr_list, include_hotfixes, exclude_authors, filter_authors
42 | )
43 | formatted_pr_list = format_pr_list(valid_pr_list)
44 | merged_pr_list = get_merged_prs(formatted_pr_list)
45 |
46 | review_time_list = []
47 |
48 | for pr in merged_pr_list:
49 | open_pr_duration = pr["merged_at"] - pr["ready_at"]
50 | if exclude_weekends:
51 | open_pr_duration = get_time_without_weekend(
52 | pr["ready_at"], pr["merged_at"]
53 | )
54 | review_time_list.append(open_pr_duration)
55 |
56 | mean = numpy.mean(review_time_list)
57 | median = numpy.median(review_time_list)
58 | percentile = numpy.percentile(review_time_list, 95)
59 |
60 | merged_pr_rate = round((len(merged_pr_list) * 100) / len(valid_pr_list), 2)
61 |
62 | return {
63 | "mean": mean,
64 | "median": median,
65 | "percentile_95": percentile,
66 | "total_prs": merged_pr_list,
67 | "merged_pr_rate": merged_pr_rate,
68 | }
69 |
70 |
71 | def calulate_prs_open_to_merge_time_statistics(
72 | pr_list, include_hotfixes, exclude_authors, filter_authors, exclude_weekends
73 | ):
74 | data = get_open_to_merge_time_data(
75 | pr_list=pr_list,
76 | include_hotfixes=include_hotfixes,
77 | exclude_authors=exclude_authors,
78 | filter_authors=filter_authors,
79 | exclude_weekends=exclude_weekends,
80 | )
81 |
82 | print(
83 | f" \033[1mOpen to Merge\033[0m\n"
84 | f" ----------------------------------\n"
85 | f" Merged PRs count: {len(data['total_prs'])}\n"
86 | f" Valid Merged PRs rate: {data['merged_pr_rate']}%\n"
87 | f" ----------------------------------\n"
88 | f" Mean: {format_timedelta_to_text(data['mean'])}"
89 | f" ({format_timedelta_to_hours(data['mean'])} hours)\n"
90 | f" Median: {format_timedelta_to_text(data['median'])}"
91 | f" ({format_timedelta_to_hours(data['median'])} hours)\n"
92 | f" 95 percentile: {format_timedelta_to_text(data['percentile_95'])}"
93 | f" ({format_timedelta_to_hours(data['percentile_95'])} hours)\n"
94 | )
95 |
--------------------------------------------------------------------------------
/github_metrics/metrics/time_to_open.py:
--------------------------------------------------------------------------------
1 | import numpy
2 |
3 | from github_metrics.common import extract_datetime_or_none, get_author_login, get_ready_datetime_from_pr
4 | from github_metrics.helpers import (
5 | filter_valid_prs,
6 | format_timedelta_to_hours,
7 | format_timedelta_to_text,
8 | get_time_without_weekend,
9 | )
10 |
11 |
12 | def get_formatted_list_of_commits(commit_data):
13 | commits_list = []
14 |
15 | if not commit_data.get("edges"):
16 | return []
17 |
18 | for data in commit_data.get("edges"):
19 | commit = data.get("node").get("commit")
20 | commits_list.append(
21 | {
22 | "message": commit.get("message"),
23 | "commited_at": extract_datetime_or_none(commit.get("committedDate")),
24 | }
25 | )
26 | return commits_list
27 |
28 |
29 | def format_pr_list(pr_list):
30 | return [
31 | {
32 | "title": pr["title"],
33 | "author": get_author_login(pr),
34 | "ready_at": get_ready_datetime_from_pr(pr),
35 | "merged_at": extract_datetime_or_none(pr.get("mergedAt"))
36 | if pr.get("mergedAt")
37 | else None,
38 | "closed_at": extract_datetime_or_none(pr.get("closedAt"))
39 | if pr.get("closedAt")
40 | else None,
41 | "commits": get_formatted_list_of_commits(pr.get("commits")),
42 | }
43 | for pr in pr_list
44 | ]
45 |
46 |
47 | def get_time_to_open_data(
48 | pr_list, include_hotfixes, exclude_authors, filter_authors, exclude_weekends
49 | ):
50 | valid_pr_list = filter_valid_prs(
51 | pr_list, include_hotfixes, exclude_authors, filter_authors
52 | )
53 | formatted_pr_list = format_pr_list(valid_pr_list)
54 |
55 | if not formatted_pr_list or formatted_pr_list == []:
56 | return "There are no valid PRs to pull this data from, please select another timeframe"
57 |
58 | time_to_open = []
59 | for pr in formatted_pr_list:
60 | first_commit_time = pr["commits"][0]["commited_at"]
61 | timedelta = pr["ready_at"] - first_commit_time
62 | if exclude_weekends:
63 | timedelta = get_time_without_weekend(first_commit_time, pr["ready_at"])
64 | time_to_open.append(timedelta)
65 |
66 | mean = numpy.mean(time_to_open)
67 | median = numpy.median(time_to_open)
68 | percentile = numpy.percentile(time_to_open, 95)
69 | return {
70 | "mean": mean,
71 | "median": median,
72 | "percentile_95": percentile,
73 | "total_prs": formatted_pr_list,
74 | }
75 |
76 |
77 | def call_time_to_open_statistics(
78 | pr_list, include_hotfixes, exclude_authors, filter_authors, exclude_weekends
79 | ):
80 | data = get_time_to_open_data(
81 | pr_list=pr_list,
82 | include_hotfixes=include_hotfixes,
83 | exclude_authors=exclude_authors,
84 | filter_authors=filter_authors,
85 | exclude_weekends=exclude_weekends,
86 | )
87 |
88 | print(
89 | f" \033[1mTime to open\033[0m\n"
90 | f" ----------------------------------\n"
91 | f" Total PRs calculated: {len(data['total_prs'])}\n"
92 | f" ----------------------------------\n"
93 | f" Mean: {format_timedelta_to_text(data['mean'])}"
94 | f" ({format_timedelta_to_hours(data['mean'])} hours)\n"
95 | f" Median: {format_timedelta_to_text(data['median'])}"
96 | f" ({format_timedelta_to_hours(data['median'])} hours)\n"
97 | f" 95 percentile: {format_timedelta_to_text(data['percentile_95'])}"
98 | f" ({format_timedelta_to_hours(data['percentile_95'])} hours)\n"
99 | )
100 |
--------------------------------------------------------------------------------
/github_metrics/metrics/time_to_merge.py:
--------------------------------------------------------------------------------
1 | import numpy
2 |
3 | from github_metrics.common import extract_datetime_or_none, get_author_login, get_ready_datetime_from_pr
4 | from github_metrics.helpers import (
5 | filter_valid_prs,
6 | format_timedelta_to_hours,
7 | format_timedelta_to_text,
8 | get_time_without_weekend,
9 | )
10 |
11 |
12 | def get_formatted_list_of_commits(commit_data):
13 | commits_list = []
14 |
15 | if not commit_data.get("edges"):
16 | return []
17 |
18 | for data in commit_data.get("edges"):
19 | commit = data.get("node").get("commit")
20 | commits_list.append(
21 | {
22 | "message": commit.get("message"),
23 | "commited_at": extract_datetime_or_none(commit.get("committedDate")),
24 | }
25 | )
26 | return commits_list
27 |
28 |
29 | def format_pr_list(pr_list):
30 | return [
31 | {
32 | "title": pr["title"],
33 | "author": get_author_login(pr),
34 | "ready_at": get_ready_datetime_from_pr(pr),
35 | "merged_at": extract_datetime_or_none(pr.get("mergedAt"))
36 | if pr.get("mergedAt")
37 | else None,
38 | "closed_at": extract_datetime_or_none(pr.get("closedAt"))
39 | if pr.get("closedAt")
40 | else None,
41 | "commits": get_formatted_list_of_commits(pr.get("commits")),
42 | }
43 | for pr in pr_list
44 | ]
45 |
46 |
47 | def get_merged_prs(formatted_pr_list):
48 | merged_prs = []
49 |
50 | for pr in formatted_pr_list:
51 | if pr["merged_at"] is not None:
52 | merged_prs.append(pr)
53 | return merged_prs
54 |
55 |
56 | def get_time_to_merge_data(
57 | pr_list,
58 | include_hotfixes,
59 | exclude_authors,
60 | filter_authors,
61 | exclude_weekends,
62 | ):
63 | valid_pr_list = filter_valid_prs(
64 | pr_list, include_hotfixes, exclude_authors, filter_authors
65 | )
66 | formatted_pr_list = format_pr_list(valid_pr_list)
67 | merged_prs = get_merged_prs(formatted_pr_list)
68 |
69 | if not merged_prs or merged_prs == []:
70 | return "There are no valid PRs to pull this data from, please select another timeframe"
71 |
72 | time_to_merge_list = []
73 | for pr in merged_prs:
74 | first_commit_time = pr["commits"][0]["commited_at"]
75 | timedelta = pr["merged_at"] - first_commit_time
76 | if exclude_weekends:
77 | timedelta = get_time_without_weekend(first_commit_time, pr["merged_at"])
78 | time_to_merge_list.append(timedelta)
79 |
80 | mean = numpy.mean(time_to_merge_list)
81 | median = numpy.median(time_to_merge_list)
82 | percentile = numpy.percentile(time_to_merge_list, 95)
83 |
84 | return {
85 | "mean": mean,
86 | "median": median,
87 | "percentile_95": percentile,
88 | "merged_prs": merged_prs,
89 | }
90 |
91 |
92 | def call_mean_time_to_merge_statistics(
93 | pr_list,
94 | include_hotfixes,
95 | exclude_authors,
96 | filter_authors,
97 | exclude_weekends,
98 | ):
99 | data = get_time_to_merge_data(
100 | pr_list=pr_list,
101 | include_hotfixes=include_hotfixes,
102 | exclude_authors=exclude_authors,
103 | filter_authors=filter_authors,
104 | exclude_weekends=exclude_weekends,
105 | )
106 |
107 | print(
108 | f" \033[1mTime to merge\033[0m\n"
109 | f" ----------------------------------\n"
110 | f" Total PRs calculated: {len(data['merged_prs'])}\n"
111 | f" ----------------------------------\n"
112 | f" Mean: {format_timedelta_to_text(data['mean'])}"
113 | f" ({format_timedelta_to_hours(data['mean'])} hours)\n"
114 | f" Median: {format_timedelta_to_text(data['median'])}"
115 | f" ({format_timedelta_to_hours(data['median'])} hours)\n"
116 | f" 95 percentile: {format_timedelta_to_text(data['percentile_95'])}"
117 | f" ({format_timedelta_to_hours(data['percentile_95'])} hours)\n"
118 | )
119 |
--------------------------------------------------------------------------------
/github_metrics/metrics/time_to_review.py:
--------------------------------------------------------------------------------
1 | import arrow
2 | import numpy
3 |
4 | from github_metrics.common import extract_datetime_or_none, get_author_login, get_ready_datetime_from_pr
5 | from github_metrics.helpers import (
6 | filter_valid_prs,
7 | format_timedelta_to_hours,
8 | format_timedelta_to_text,
9 | get_time_without_weekend,
10 | )
11 |
12 |
13 | def get_reviews_from_pr(pr):
14 | reviews_root = pr.get("reviews")
15 |
16 | if not reviews_root:
17 | return []
18 |
19 | reviews = reviews_root.get("nodes", [])
20 | if not reviews:
21 | return []
22 |
23 | return reviews
24 |
25 |
26 | def get_first_review(pr):
27 | pr_author_login = get_author_login(pr)
28 | reviews = get_reviews_from_pr(pr)
29 |
30 | different_author_reviews = [
31 | r for r in reviews if pr_author_login != get_author_login(r)
32 | ]
33 |
34 | if not different_author_reviews:
35 | return
36 |
37 | return different_author_reviews[0]
38 |
39 |
40 | def hours_without_review(pr):
41 | open_date = extract_datetime_or_none(pr["ready_at"])
42 |
43 | if pr["first_review_at"] is None:
44 | time_without_review = arrow.now() - open_date
45 | return time_without_review.total_seconds() / 3600
46 |
47 | time_without_review = extract_datetime_or_none(pr["first_review_at"]) - open_date
48 | return time_without_review.total_seconds() / 3600
49 |
50 |
51 | def filter_prs_with_more_than_24h_before_review(pr_list, use_time_before_review=False):
52 | return [pr for pr in pr_list if hours_without_review(pr) > 24]
53 |
54 |
55 | def format_pr_list(pr_list):
56 | pr_list_with_hours = [
57 | {
58 | "title": pr["title"],
59 | "author": get_author_login(pr),
60 | "ready_at": get_ready_datetime_from_pr(pr),
61 | "first_review_at": extract_datetime_or_none(
62 | get_first_review(pr).get("createdAt")
63 | )
64 | if get_first_review(pr)
65 | else None,
66 | }
67 | for pr in pr_list
68 | ]
69 |
70 | return pr_list_with_hours
71 |
72 |
73 | def filter_reviewed_prs(pr_list):
74 | return [pr for pr in pr_list if pr["first_review_at"] is not None]
75 |
76 |
77 | def get_time_to_review_data(
78 | pr_list, include_hotfixes, exclude_authors, filter_authors, exclude_weekends
79 | ):
80 | valid_pr_list = filter_valid_prs(
81 | pr_list, include_hotfixes, exclude_authors, filter_authors
82 | )
83 | formatted_pr_list = format_pr_list(valid_pr_list)
84 | reviewed_prs = filter_reviewed_prs(formatted_pr_list)
85 | prs_more_than_24h_without_review = filter_prs_with_more_than_24h_before_review(
86 | formatted_pr_list
87 | )
88 |
89 | review_time_list = []
90 | for pr in reviewed_prs:
91 | review_time = pr["first_review_at"] - pr["ready_at"]
92 | if exclude_weekends:
93 | review_time = get_time_without_weekend(
94 | pr["ready_at"], pr["first_review_at"]
95 | )
96 | review_time_list.append(review_time)
97 |
98 | total_prs = len(formatted_pr_list)
99 | unreviewed_prs = total_prs - len(reviewed_prs)
100 | prs_over_24h = len(prs_more_than_24h_without_review)
101 |
102 | mean = numpy.mean(review_time_list)
103 | median = numpy.median(review_time_list)
104 | percentile = numpy.percentile(review_time_list, 95)
105 | return {
106 | "mean": mean,
107 | "median": median,
108 | "percentile_95": percentile,
109 | "total_prs": formatted_pr_list,
110 | "unreviewed_prs": unreviewed_prs,
111 | "prs_over_24h": prs_over_24h,
112 | }
113 |
114 |
115 | def calulate_prs_review_time_statistics(
116 | pr_list, include_hotfixes, exclude_authors, filter_authors, exclude_weekends
117 | ):
118 | data = get_time_to_review_data(
119 | pr_list=pr_list,
120 | include_hotfixes=include_hotfixes,
121 | exclude_authors=exclude_authors,
122 | filter_authors=filter_authors,
123 | exclude_weekends=exclude_weekends,
124 | )
125 |
126 | print(
127 | f" \033[1mTime to review\033[0m\n"
128 | f" ----------------------------------\n"
129 | f" Total valid PRs: {len(data['total_prs'])}\n"
130 | f" Unreviewed PRs: {data['unreviewed_prs']}"
131 | f" ({round((data['unreviewed_prs'] * 100) / len(data['total_prs']), 2)}%)\n"
132 | f" PRs with more than 24h waiting for review: {data['prs_over_24h']}"
133 | f" ({round(data['prs_over_24h'] * 100 / len(data['total_prs']), 2)}%)\n"
134 | f" ----------------------------------\n"
135 | f" Mean: {format_timedelta_to_text(data['mean'])}"
136 | f" ({format_timedelta_to_hours(data['mean'])} hours)\n"
137 | f" Median: {format_timedelta_to_text(data['median'])}"
138 | f" ({format_timedelta_to_hours(data['median'])} hours)\n"
139 | f" 95 percentile: {format_timedelta_to_text(data['percentile_95'])}"
140 | f" ({format_timedelta_to_hours(data['percentile_95'])} hours)\n"
141 | )
142 |
--------------------------------------------------------------------------------
/github_metrics/request.py:
--------------------------------------------------------------------------------
1 | from time import sleep
2 |
3 | import requests
4 | from requests.auth import HTTPBasicAuth
5 |
6 | from github_metrics.common import extract_datetime_or_none
7 | from github_metrics.settings import (
8 | GITHUB_LOGIN,
9 | GITHUB_TOKEN,
10 | ORG_NAME,
11 | REPOSITORY_NAME,
12 | )
13 |
14 |
15 | class ClientError(BaseException):
16 | pass
17 |
18 |
19 | def format_request_for_github(cursor=None):
20 | after = ""
21 | if cursor:
22 | after = f', after: "{cursor}"'
23 |
24 | return """{{
25 | organization(login: "{ORG_NAME}") {{
26 | repository(name: "{REPOSITORY_NAME}") {{
27 | pullRequests(
28 | first: 100,
29 | orderBy: {{
30 | field: CREATED_AT,
31 | direction: DESC
32 | }}{after}
33 | ) {{
34 | pageInfo {{
35 | endCursor
36 | startCursor
37 | hasNextPage
38 | }}
39 | nodes {{
40 | id
41 | title
42 | createdAt
43 | baseRefName
44 | headRefName
45 | additions
46 | deletions
47 | reviews(first: 10) {{
48 | nodes {{
49 | createdAt
50 | state
51 | author {{
52 | login
53 | }}
54 | }}
55 | }}
56 | author {{
57 | login
58 | }}
59 | mergedAt
60 | closedAt
61 | commits(first: 100) {{
62 | edges {{
63 | node {{
64 | commit {{
65 | oid
66 | message
67 | committedDate
68 | }}
69 | }}
70 | }}
71 | }}
72 | timelineItems(last: 1, itemTypes: READY_FOR_REVIEW_EVENT) {{
73 | edges {{
74 | node {{
75 | ... on ReadyForReviewEvent {{
76 | id
77 | createdAt
78 | }}
79 | }}
80 | }}
81 | }}
82 | }}
83 | }}
84 | }}
85 | }}
86 | }}""".format(
87 | after=after, ORG_NAME=ORG_NAME, REPOSITORY_NAME=REPOSITORY_NAME
88 | )
89 |
90 |
91 | def pr_was_created_between(pr, start_date, end_date):
92 | open_date = extract_datetime_or_none(pr.get("createdAt"))
93 | return open_date >= start_date and open_date <= end_date
94 |
95 |
96 | def fetch_prs_between(start_date, end_date):
97 | if not all([GITHUB_LOGIN, GITHUB_TOKEN, ORG_NAME, REPOSITORY_NAME]):
98 | raise EnvironmentError(
99 | "The environment is not properly configured. "
100 | "Please check if you .env file is created and has the proper variables."
101 | )
102 | pr_list = []
103 | current_date = None
104 | cursor = None
105 | has_next_page = True
106 |
107 | while has_next_page and (not current_date or current_date > start_date):
108 | response = requests.post(
109 | "https://api.github.com/graphql",
110 | auth=HTTPBasicAuth(GITHUB_LOGIN, GITHUB_TOKEN),
111 | json={"query": format_request_for_github(cursor)},
112 | )
113 |
114 | if response.json().get("errors", False):
115 | raise ClientError(response.json().get("errors"))
116 |
117 | data = response.json().get("data")
118 |
119 | if not data:
120 | has_next_page = False
121 | continue
122 |
123 | organization = data.get("organization")
124 | if not organization:
125 | has_next_page = False
126 | continue
127 |
128 | repository = organization.get("repository")
129 | if not repository:
130 | has_next_page = False
131 | continue
132 |
133 | prs = repository.get("pullRequests")
134 | if not prs:
135 | has_next_page = False
136 | continue
137 |
138 | page_info = prs.get("pageInfo")
139 | if not page_info:
140 | has_next_page = False
141 | continue
142 |
143 | has_next_page = page_info["hasNextPage"]
144 | cursor = page_info["endCursor"]
145 |
146 | page_pr_list = prs.get("nodes")
147 | if not page_pr_list:
148 | has_next_page = False
149 | continue
150 |
151 | current_date = extract_datetime_or_none(page_pr_list[-1]["createdAt"])
152 | pr_list += [
153 | pr
154 | for pr in page_pr_list
155 | if pr_was_created_between(pr, start_date, end_date)
156 | ]
157 | sleep(1)
158 | return pr_list
159 |
--------------------------------------------------------------------------------
/github_metrics/helpers.py:
--------------------------------------------------------------------------------
1 | import datetime
2 |
3 | from github_metrics.common import get_author_login
4 |
5 |
6 | def is_closed(pr):
7 | return pr.get("closedAt") is not None and not pr.get("mergedAt")
8 |
9 |
10 | def is_release(pr):
11 | release_in_head_branch_name = "release" in pr["headRefName"].lower()
12 | release_in_title = "release" in pr["title"].lower()
13 | head_branch_is_master = (
14 | "master" in pr["headRefName"].lower() or "main" in pr["headRefName"].lower()
15 | )
16 | base_branch_is_production = "production" in pr["baseRefName"].lower()
17 |
18 | if not base_branch_is_production:
19 | return False
20 |
21 | return release_in_head_branch_name or head_branch_is_master or release_in_title
22 |
23 |
24 | def is_hotfix(pr):
25 | hotfix_in_head_branch_name = (
26 | "hotfix/" in pr["headRefName"].lower() or "hf/" in pr["headRefName"].lower()
27 | )
28 | hotfix_in_title = "hotfix" in pr["title"].lower()
29 | base_branch_is_production = "production" in pr["baseRefName"].lower()
30 |
31 | if not base_branch_is_production:
32 | return False
33 |
34 | return hotfix_in_head_branch_name or hotfix_in_title
35 |
36 |
37 | def is_merge_back_from_prod(pr):
38 | base_branch_is_master = (
39 | "master" in pr["baseRefName"].lower() or "main" in pr["baseRefName"].lower()
40 | )
41 | head_branch_is_production = "production" in pr["headRefName"].lower()
42 | return base_branch_is_master and head_branch_is_production
43 |
44 |
45 | def exclude_closeds(pr_list):
46 | return [pr for pr in pr_list if not is_closed(pr)]
47 |
48 |
49 | def exclude_releases(pr_list):
50 | return [pr for pr in pr_list if not is_release(pr)]
51 |
52 |
53 | def exclude_hotfixes(pr_list):
54 | return [pr for pr in pr_list if not is_hotfix(pr)]
55 |
56 |
57 | def exclude_merge_backs_from_prod(pr_list):
58 | return [pr for pr in pr_list if not is_merge_back_from_prod(pr)]
59 |
60 |
61 | def exclude_authors_in_list(pr_list, authors):
62 | return [pr for pr in pr_list if not get_author_login(pr) in authors]
63 |
64 |
65 | def filter_authors_in_list(pr_list, authors):
66 | return [pr for pr in pr_list if get_author_login(pr) in authors]
67 |
68 |
69 | def filter_valid_prs(pr_list, include_hotfixes, exclude_authors, filter_authors):
70 | valid_pr_list = exclude_closeds(pr_list)
71 | valid_pr_list = exclude_releases(valid_pr_list)
72 | valid_pr_list = exclude_merge_backs_from_prod(valid_pr_list)
73 |
74 | if not include_hotfixes:
75 | valid_pr_list = exclude_hotfixes(valid_pr_list)
76 | if exclude_authors:
77 | valid_pr_list = exclude_authors_in_list(valid_pr_list, exclude_authors)
78 | if filter_authors:
79 | valid_pr_list = filter_authors_in_list(valid_pr_list, filter_authors)
80 | return valid_pr_list
81 |
82 |
83 | def filter_hotfixes(pr_list, exclude_authors, filter_authors):
84 | valid_pr_list = exclude_closeds(pr_list)
85 | valid_pr_list = exclude_releases(valid_pr_list)
86 | valid_pr_list = exclude_merge_backs_from_prod(valid_pr_list)
87 | valid_pr_list = [pr for pr in pr_list if is_hotfix(pr)]
88 |
89 | if exclude_authors:
90 | valid_pr_list = exclude_authors_in_list(valid_pr_list, exclude_authors)
91 | if filter_authors:
92 | valid_pr_list = filter_authors_in_list(valid_pr_list, filter_authors)
93 |
94 | return valid_pr_list
95 |
96 |
97 | def format_timedelta_to_text(timedelta):
98 | if timedelta.total_seconds() < 0:
99 | return "Invalid timeframe"
100 |
101 | days = timedelta.days
102 | hours, remainder = divmod(timedelta.seconds, 3600)
103 | minutes, seconds = divmod(remainder, 60)
104 | return f"{days} days {hours} hours {minutes} minutes"
105 |
106 |
107 | def format_timedelta_to_hours(timedelta):
108 | return round(timedelta.total_seconds() / 3600, 2)
109 |
110 |
111 | def get_weekend_time(start_at, end_at):
112 | day_count = end_at.date() - start_at.date()
113 | weekends = datetime.timedelta()
114 | for i in range(0, day_count.days + 1):
115 | day = start_at + datetime.timedelta(days=i)
116 |
117 | # 5 represents Saturday and 6 represents Sunday
118 | if day.weekday() == 5 or day.weekday() == 6:
119 | # In a time period, if it's starting a time count,
120 | # count from the beginning until end of day
121 | if i == 0:
122 | weekends += (
123 | datetime.datetime(day.year, day.month, day.day, 23, 59, 59).replace(
124 | tzinfo=day.tzinfo
125 | )
126 | - day
127 | )
128 |
129 | # If it's the end of a time period, count from start of day until the end time
130 | elif i == day_count.days:
131 | weekends += end_at - datetime.datetime(
132 | day.year, day.month, day.day
133 | ).replace(tzinfo=day.tzinfo)
134 |
135 | else:
136 | weekends += datetime.timedelta(days=1)
137 | return weekends
138 |
139 |
140 | def get_time_without_weekend(start_at, end_at):
141 | weekend_timedelta = get_weekend_time(start_at, end_at)
142 | timedelta = end_at - start_at
143 | return timedelta - weekend_timedelta
144 |
--------------------------------------------------------------------------------
/github_metrics/main.py:
--------------------------------------------------------------------------------
1 | from typing import Optional
2 |
3 | import arrow
4 | from fastapi import FastAPI, HTTPException
5 |
6 | from github_metrics.metrics.hotfixes_count import get_hotfixes_data
7 | from github_metrics.metrics.merge_rate import get_merge_rate_data
8 | from github_metrics.metrics.open_to_merge import get_open_to_merge_time_data
9 | from github_metrics.metrics.pr_size import get_pr_size_data
10 | from github_metrics.metrics.time_to_merge import get_time_to_merge_data
11 | from github_metrics.metrics.time_to_open import get_time_to_open_data
12 | from github_metrics.metrics.time_to_review import get_time_to_review_data
13 | from github_metrics.request import fetch_prs_between
14 |
15 | app = FastAPI()
16 |
17 |
18 | @app.get("/api/metrics/{metric}")
19 | def read_item(
20 | metric: str,
21 | start_date: str,
22 | end_date: str,
23 | include_hotfixes: Optional[bool] = False,
24 | exclude_weekends: Optional[bool] = True,
25 | exclude_author: Optional[str] = None,
26 | filter_author: Optional[str] = None,
27 | ):
28 | start = arrow.get(start_date)
29 | end = arrow.get(f"{end_date}T23:59:59")
30 |
31 | if end > start:
32 | raise HTTPException(
33 | status_code=403, detail="Date range is not in chronological order"
34 | )
35 |
36 | pr_list = fetch_prs_between(start, end)
37 |
38 | if metric == "ttm":
39 | data = get_time_to_merge_data(
40 | pr_list=pr_list,
41 | include_hotfixes=include_hotfixes,
42 | exclude_authors=exclude_author,
43 | filter_authors=filter_author,
44 | exclude_weekends=exclude_weekends,
45 | )
46 |
47 | return {
48 | "metric": "Time to merge",
49 | "mean": data["mean"],
50 | "median": data["median"],
51 | "percentile_95": data["percentile_95"],
52 | "prs_list": data["merged_prs"],
53 | }
54 |
55 | elif metric == "tto":
56 | data = get_time_to_open_data(
57 | pr_list=pr_list,
58 | include_hotfixes=include_hotfixes,
59 | exclude_authors=exclude_author,
60 | filter_authors=filter_author,
61 | exclude_weekends=exclude_weekends,
62 | )
63 |
64 | return {
65 | "metric": "Time to open",
66 | "mean": data["mean"],
67 | "median": data["median"],
68 | "percentile_95": data["percentile_95"],
69 | "prs_list": data["total_prs"],
70 | }
71 |
72 | elif metric == "ttr":
73 | data = get_time_to_review_data(
74 | pr_list=pr_list,
75 | include_hotfixes=include_hotfixes,
76 | exclude_authors=exclude_author,
77 | filter_authors=filter_author,
78 | exclude_weekends=exclude_weekends,
79 | )
80 |
81 | return {
82 | "metric": "Time to review",
83 | "mean": data["mean"],
84 | "median": data["median"],
85 | "percentile_95": data["percentile_95"],
86 | "prs_list": data["total_prs"],
87 | "unreviewed_prs": data["unreviewed_prs"],
88 | "prs_over_24h": data["prs_over_24h"],
89 | }
90 |
91 | elif metric == "pr_size":
92 | data = get_pr_size_data(
93 | pr_list=pr_list,
94 | include_hotfixes=include_hotfixes,
95 | exclude_authors=exclude_author,
96 | filter_authors=filter_author,
97 | )
98 |
99 | return {
100 | "metric": "PR size",
101 | "prs_list": data["total_prs"],
102 | "total_mean": data["total_mean"],
103 | "total_median": data["total_median"],
104 | "total_percentile_95": data["total_percentile_95"],
105 | "rate_mean": data["rate_mean"],
106 | "rate_median": data["rate_median"],
107 | "rate_percentile_95": data["rate_percentile_95"],
108 | }
109 |
110 | elif metric == "otm":
111 | data = get_open_to_merge_time_data(
112 | pr_list=pr_list,
113 | include_hotfixes=include_hotfixes,
114 | exclude_authors=exclude_author,
115 | filter_authors=filter_author,
116 | exclude_weekends=exclude_weekends,
117 | )
118 |
119 | return {
120 | "metric": "Open to merge",
121 | "mean": data["mean"],
122 | "median": data["median"],
123 | "percentile_95": data["percentile_95"],
124 | "prs_list": data["total_prs"],
125 | "merged_pr_rate": data["merged_pr_rate"],
126 | }
127 |
128 | elif metric == "mr":
129 | data = get_merge_rate_data(
130 | pr_list=pr_list,
131 | include_hotfixes=include_hotfixes,
132 | exclude_authors=exclude_author,
133 | filter_authors=filter_author,
134 | )
135 |
136 | return {
137 | "metric": "Merge rate",
138 | "prs_list": data["total_prs"],
139 | "prs_authors": data["prs_authors"],
140 | "merge_rate": data["merge_rate"],
141 | }
142 |
143 | elif metric == "hotfixes_count":
144 | data = get_hotfixes_data(
145 | pr_list=pr_list,
146 | exclude_authors=exclude_author,
147 | filter_authors=filter_author,
148 | )
149 |
150 | return {
151 | "metric": "Hotfix count",
152 | "hotfix_list": data["hotfix_list"],
153 | }
154 |
155 | else:
156 | raise HTTPException(status_code=404, detail="Metric not found")
157 |
--------------------------------------------------------------------------------
/github_metrics/run.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import arrow
4 | import click
5 |
6 | from github_metrics.metrics.all import call_all_metrics
7 | from github_metrics.metrics.hotfixes_count import count_hotfixes
8 | from github_metrics.metrics.prs_count import count_prs
9 | from github_metrics.metrics.merge_rate import call_merge_rate_statistics
10 | from github_metrics.metrics.open_to_merge import (
11 | calulate_prs_open_to_merge_time_statistics,
12 | )
13 | from github_metrics.metrics.pr_size import call_pr_size_statistics
14 | from github_metrics.metrics.time_to_merge import call_mean_time_to_merge_statistics
15 | from github_metrics.metrics.time_to_open import call_time_to_open_statistics
16 | from github_metrics.metrics.time_to_review import calulate_prs_review_time_statistics
17 | from github_metrics.request import fetch_prs_between
18 | from github_metrics.settings import BASE_DIR
19 |
20 |
21 | def setup_enviroment_variables():
22 | github_username = input("Github username: ")
23 | github_token = input("Github token: ")
24 | organization_name = input("Name of the organization: ")
25 | repository_name = input("Repository name: ")
26 |
27 | with open(os.path.join(BASE_DIR, ".env"), "w") as file:
28 | content = "\n".join(
29 | [
30 | f"GITHUB_LOGIN={github_username}",
31 | f"GITHUB_TOKEN={github_token}",
32 | f"ORG_NAME={organization_name}",
33 | f"REPOSITORY_NAME={repository_name}",
34 | ]
35 | )
36 | file.write(content)
37 |
38 |
39 | @click.command()
40 | @click.option(
41 | "--setup",
42 | is_flag=True,
43 | default=False,
44 | help="Setup your enviroment variables",
45 | )
46 | @click.option(
47 | "--metric",
48 | type=str,
49 | help="""The reference of the metric you'd like to run:
50 |
51 | \b
52 | ttm - Time to Merge
53 | ttr - Time to Review
54 | tto - Time to Open
55 | otm - Open To Merge Time
56 | mr - Merge Rate
57 | pr_size - Pull Request Size
58 | hotfixes_count - Hotfixes Count
59 | prs_count - Pull Requests Count
60 | \b""",
61 | )
62 | @click.option(
63 | "--start-date",
64 | type=str,
65 | help="""The metric start date.
66 |
67 | Date in format YYYY-mm-dd""",
68 | )
69 | @click.option(
70 | "--end-date",
71 | type=str,
72 | help="""The metric cutoff date.
73 |
74 | Date in format YYYY-mm-dd""",
75 | )
76 | @click.option(
77 | "--include-hotfixes",
78 | is_flag=True,
79 | default=False,
80 | help="Will include all hotfixes in the metric calculation.",
81 | )
82 | @click.option(
83 | "--exclude-authors",
84 | type=str,
85 | help="""
86 | List of PR authors separated by a comma to be removed from metric.
87 |
88 | eg.: username,other_username""",
89 | )
90 | @click.option(
91 | "--filter-authors",
92 | type=str,
93 | help="""
94 | Will calculate prs created only by the authors listed in here.
95 |
96 | eg.: username,other_username""",
97 | )
98 | @click.option(
99 | "--exclude-weekends",
100 | is_flag=True,
101 | default=False,
102 | help="Will exclude weekends from time metric.",
103 | )
104 | def cli(
105 | metric,
106 | start_date,
107 | end_date,
108 | include_hotfixes,
109 | exclude_authors,
110 | filter_authors,
111 | exclude_weekends,
112 | setup,
113 | ):
114 | """
115 | Generates metrics from Github API.
116 | """
117 |
118 | if setup:
119 | setup_enviroment_variables()
120 | return
121 |
122 | start_date = arrow.get(start_date)
123 | end_date = arrow.get(f"{end_date}T23:59:59")
124 |
125 | exclude_user_list = []
126 | if exclude_authors:
127 | exclude_user_list = exclude_authors.split(",")
128 |
129 | filter_user_list = []
130 | if filter_authors:
131 | filter_user_list = filter_authors.split(",")
132 |
133 | pr_list = fetch_prs_between(start_date, end_date)
134 | if metric == "ttm":
135 | call_mean_time_to_merge_statistics(
136 | pr_list,
137 | include_hotfixes,
138 | exclude_user_list,
139 | filter_user_list,
140 | exclude_weekends,
141 | )
142 | elif metric == "ttr":
143 | calulate_prs_review_time_statistics(
144 | pr_list,
145 | include_hotfixes,
146 | exclude_user_list,
147 | filter_user_list,
148 | exclude_weekends,
149 | )
150 | elif metric == "tto":
151 | call_time_to_open_statistics(
152 | pr_list,
153 | include_hotfixes,
154 | exclude_user_list,
155 | filter_user_list,
156 | exclude_weekends,
157 | )
158 | elif metric == "otm":
159 | calulate_prs_open_to_merge_time_statistics(
160 | pr_list,
161 | include_hotfixes,
162 | exclude_user_list,
163 | filter_user_list,
164 | exclude_weekends,
165 | )
166 | elif metric == "mr":
167 | call_merge_rate_statistics(
168 | pr_list, include_hotfixes, exclude_user_list, filter_user_list
169 | )
170 | elif metric == "pr_size":
171 | call_pr_size_statistics(
172 | pr_list, include_hotfixes, exclude_user_list, filter_user_list
173 | )
174 | elif metric == "prs_count":
175 | count_prs(pr_list, include_hotfixes, exclude_user_list, filter_user_list)
176 | else:
177 | call_all_metrics(
178 | pr_list,
179 | include_hotfixes,
180 | exclude_user_list,
181 | filter_user_list,
182 | exclude_weekends,
183 | )
184 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # github-metrics
2 |
3 | ## About
4 |
5 | A script for getting your team PRs metrics.
6 |
7 | ## Running
8 |
9 | The run command takes the following arguments:
10 |
11 | `--metric`: The reference for the metric you'd like to calculate. If no metric is provided, it will calculate all metrics available.
12 |
13 | `--start-date`: The metric start date. Date in format YYYY-mm-dd
14 |
15 | `--end-date`: The metric cutoff date. Date in format YYYY-mm-dd
16 |
17 | `--exclude-authors`: Will exclude all PRs created by the listed authors. This argument must list usernames separated by a comma. Example: `--exclude_authors=pantoja,github_username,other_username`
18 |
19 | `--filter-authors`: Will take into consideration only the PRs created by the authors in this list. This argument must list usernames separated by a comma. Example: `--exclude_authors=pantoja,github_username,other_username`
20 |
21 | `--include-hotfixes`: Will include all hotfixes in the metric calculation. By default, this value is false.
22 |
23 | `--exclude-weekends`: Will exclude weekend time from time metrics (time to merge, time to open, time to review).
24 |
25 | #### [All]
26 | `github_metrics --start-date=2021-03-22 --end-date=2021-03-24`
27 |
28 | #### [Time To Merge](#ttm)
29 | `github_metrics --metric=ttm --start-date=2021-03-22 --end-date=2021-03-24`
30 |
31 | #### [Time to Review](#ttr)
32 | `github_metrics --metric=ttr --start-date=2020-11-10 --end-date=2020-11-18`
33 |
34 | #### [Time to Open](#tto)
35 | `github_metrics --metric=tto --start-date=2020-11-10 --end-date=2020-11-18`
36 |
37 | #### [Open to Merge Time](#otm)
38 | `github_metrics --metric=otm --start-date=2020-11-10 --end-date=2020-11-18`
39 |
40 | #### [Merge Rate](#mr)
41 | `github_metrics --metric=mr --start-date=2021-03-22 --end-date=2021-03-24`
42 |
43 | #### [Pull Request Size](#pr-size)
44 | `github_metrics --metric=pr_size --start-date=2020-11-10 --end-date=2020-11-18`
45 |
46 | #### [Pull Requests Count](#pr-size)
47 | `github_metrics --metric=prs_count --start-date=2020-11-10 --end-date=2020-11-18`
48 |
49 | #### [Hotfixes count](#hotfixes-count)
50 | `github_metrics --metric=hotfixes_count --start-date=2020-11-10 --end-date=2020-11-18`
51 |
52 | ## Metrics available
53 | - Time To Merge (ttm):
54 | The Time to Merge metric calculates time between the first commit of a given branch, and the merge action of it's pull request.
55 |
56 | - Time To Review (ttr):
57 | It calculates the time a PR waited for, or has been waiting for the first review since the PR opened.
58 |
59 | - Time To Open (tto):
60 | The time elapsed between a user's first commit to a branch and opening a pull request for that branch.
61 |
62 | - Open To Merge Time (otm):
63 | Returns statistics related to the time between opening and merging a pull request.
64 |
65 | - Merge Rate (mr):
66 | It measures the total number of merged pull requests to the total number of developers active in this time period (number of merged PRS / dev). A value closer to 1 indicates that each developer is merging a PR. a higher number indicates more merged PRs than devs, and vice versa.
67 |
68 | - Pull Request Size (pr_size):
69 | It generates metrics related to the number of lines added and deleted in a PR. The output will generate metrics related to the sum of different lines in a pr (lines added + lines deleted), and the addition rate metric (lines added / lines deleted). In the latter case, a higher the rate number means more lines are being added than deleted.
70 |
71 | - Pull Request Count (prs_count):
72 | It generates metrics related to the number of PRs opened during the period.
73 |
74 | - Hotfixes Count (hotfixes_count):
75 | The number of hotfixes in the period.
76 |
77 | ## Installing the project
78 | 1. Install using `pip install github-metrics`
79 | 2. Setup settings variable by running `github_metrics --setup`. You'll be asked to give following information:
80 |
81 | `Github username`: Your github account login
82 |
83 | `Enter a github token`: An access token generated with your github account. More information accessible through [this guide](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token). The lib requires full repo access for it to work
84 |
85 | `Name of the organization`: The name of the organization where the repository is located
86 |
87 | `Repository name`: The repository name of the project of your choice
88 |
89 | **NOTE:** Running the `--setup` flag will overwrite the existing environment settings.
90 |
91 | ## Project setup
92 |
93 | ### Dependencies setup
94 | 1. Create a virtual enviroment `virtualenv venv`
95 | 2. Install dependencies with `pip install --editable .`
96 |
97 | ### Setup with docker
98 | 1. Run the command with `docker-compose run app {}`. e.g. `docker-compose run app github_metrics --metric=ttm --start-date=2021-03-22 --end-date=2021-03-24`
99 |
100 | ### Project variables setup
101 | 1. Create a .env file by copying from .env.example with `cp .env.example .env`
102 | 2. Fill settings variables:
103 |
104 | `REPOSITORY_NAME`: The name of the repository
105 | `ORG_NAME`: The name of the organization
106 | `GITHUB_LOGIN`: Your github account username
107 | `GITHUB_TOKEN`: The access token generated with your github account.
108 |
109 | 
110 |
111 |
112 | Release Process
113 | ===============
114 |
115 | For maintainers only:
116 |
117 | - Run `rm -rf build dist` to delete current build archives
118 | - Install dependencies with `pip install -r ./requirements.txt`. Make sure you are inside a virtual environment
119 | - Run ``bump2version `` to update the version number (pick one of the options)
120 |
121 | - Version number on ``github_metrics/__init__.py`` and ``setup.py`` will be updated automatically
122 | - You can specify the ``--new_version`` flag in case you wish to manually set the newest version (if not provided, it will be done automatically based on the chosen option)
123 | - Build the artifacts with `python setup.py sdist bdist_wheel --universal`
124 | - Upload the new version with `twine upload dist/*`
125 |
--------------------------------------------------------------------------------
/tests/test_helpers.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import unittest
3 |
4 | from github_metrics.helpers import (
5 | exclude_authors_in_list,
6 | exclude_closeds,
7 | exclude_hotfixes,
8 | exclude_merge_backs_from_prod,
9 | exclude_releases,
10 | filter_authors_in_list,
11 | format_timedelta_to_text,
12 | get_weekend_time,
13 | )
14 |
15 |
16 | class TestHelpers(unittest.TestCase):
17 | def test_exclude_closed_prs(self):
18 | prs = [
19 | {
20 | "mergedAt": "2021-03-24T11:13:19Z",
21 | "closedAt": None,
22 | },
23 | {
24 | "mergedAt": None,
25 | "closedAt": "2021-03-24T11:13:19Z",
26 | },
27 | {
28 | "mergedAt": "2021-03-24T11:13:19Z",
29 | "closedAt": "2021-03-24T11:13:19Z",
30 | },
31 | ]
32 |
33 | closed_prs = exclude_closeds(prs)
34 | self.assertEqual(len(closed_prs), 2)
35 |
36 | def test_exclude_releases_prs(self):
37 | prs = [
38 | {
39 | "title": "",
40 | "baseRefName": "production",
41 | "headRefName": "release/2021-03-24.0",
42 | },
43 | {
44 | "title": "Release 2021-03-24.0",
45 | "baseRefName": "production",
46 | "headRefName": "",
47 | },
48 | {
49 | "title": "",
50 | "baseRefName": "master",
51 | "headRefName": "",
52 | },
53 | {
54 | "title": "",
55 | "baseRefName": "production",
56 | "headRefName": "master",
57 | },
58 | ]
59 | releases_prs = exclude_releases(prs)
60 | self.assertEqual(len(releases_prs), 1)
61 |
62 | def test_exclude_merge_backs_from_prod(self):
63 | prs = [
64 | {
65 | "baseRefName": "master",
66 | "headRefName": "production",
67 | },
68 | {
69 | "baseRefName": "master",
70 | "headRefName": "",
71 | },
72 | {
73 | "baseRefName": "",
74 | "headRefName": "production",
75 | },
76 | ]
77 | pr_list = exclude_merge_backs_from_prod(prs)
78 | self.assertEqual(len(pr_list), 2)
79 |
80 | def test_format_time_string(self):
81 | time = datetime.timedelta(seconds=1 * 24 * 60 * 60 + 48035)
82 | formatted_time = format_timedelta_to_text(time)
83 |
84 | hours, remainder = divmod(time.seconds, 3600)
85 | minutes, seconds = divmod(remainder, 60)
86 | self.assertEqual(
87 | formatted_time, f"{time.days} days {hours} hours {minutes} minutes"
88 | )
89 |
90 | def test_negative_timedelta_format_time_returns_invalid(self):
91 | time = datetime.timedelta(seconds=1 * 24 * 60 * 60 + 48035)
92 | formatted_time = format_timedelta_to_text(-time)
93 |
94 | self.assertEqual(formatted_time, "Invalid timeframe")
95 |
96 | def test_exclude_authors_from_pr(self):
97 | prs = [
98 | {
99 | "author": {"login": "ladygaga"},
100 | "headRefName": "production",
101 | },
102 | {
103 | "author": {"login": "beyonce"},
104 | "headRefName": "production",
105 | },
106 | {
107 | "author": {"login": "beyonce"},
108 | "headRefName": "production",
109 | },
110 | {
111 | "author": {"login": "grimes"},
112 | "headRefName": "production",
113 | },
114 | {
115 | "author": {"login": "badgalriri"},
116 | "headRefName": "production",
117 | },
118 | {
119 | "author": {"login": "katyperry"},
120 | "headRefName": "production",
121 | },
122 | ]
123 |
124 | valid_prs = exclude_authors_in_list(prs, authors=["grimes", "katyperry"])
125 | self.assertEqual(len(valid_prs), 4)
126 |
127 | def test_filter_authors_from_pr(self):
128 | prs = [
129 | {
130 | "author": {"login": "ladygaga"},
131 | "headRefName": "production",
132 | },
133 | {
134 | "author": {"login": "beyonce"},
135 | "headRefName": "production",
136 | },
137 | {
138 | "author": {"login": "beyonce"},
139 | "headRefName": "production",
140 | },
141 | {
142 | "author": {"login": "grimes"},
143 | "headRefName": "production",
144 | },
145 | {
146 | "author": {"login": "badgalriri"},
147 | "headRefName": "production",
148 | },
149 | {
150 | "author": {"login": "katyperry"},
151 | "headRefName": "production",
152 | },
153 | ]
154 |
155 | valid_prs = filter_authors_in_list(prs, authors=["grimes", "katyperry"])
156 | self.assertEqual(len(valid_prs), 2)
157 |
158 | def test_exclude_hotfixes(self):
159 | prs = [
160 | {
161 | "baseRefName": "production",
162 | "headRefName": "feature/new",
163 | "title": "Open modal",
164 | },
165 | {
166 | "baseRefName": "production",
167 | "headRefName": "hf/page",
168 | "title": "page",
169 | },
170 | {
171 | "baseRefName": "master",
172 | "headRefName": "feature/test-hotfix",
173 | "title": "adds test",
174 | },
175 | ]
176 | pr_list = exclude_hotfixes(prs)
177 | self.assertEqual(len(pr_list), 2)
178 |
179 |
180 | class TestWeekendTime(unittest.TestCase):
181 | def test_get_weekend_time_between_months_correctly(self):
182 | start_at = datetime.datetime(2021, 5, 28, 14, 24)
183 | end_at = datetime.datetime(2021, 6, 4, 9, 47)
184 | weekend_time = get_weekend_time(start_at, end_at)
185 |
186 | self.assertEqual(weekend_time, datetime.timedelta(days=2))
187 |
188 | def test_get_weekend_time_of_pr_reviewed_through_weekend(self):
189 | start_at = datetime.datetime(2021, 6, 1, 9, 35)
190 | end_at = datetime.datetime(2021, 6, 15, 18, 42)
191 | weekend_count = get_weekend_time(start_at, end_at)
192 |
193 | self.assertEqual(weekend_count, datetime.timedelta(days=4))
194 |
195 | def test_get_weekend_time_of_pr_reviewed_through_weekdays(self):
196 | start_at = datetime.datetime(2021, 5, 31, 14, 12)
197 | end_at = datetime.datetime(2021, 6, 1, 9, 2)
198 | weekend_count = get_weekend_time(start_at, end_at)
199 |
200 | self.assertEqual(weekend_count, datetime.timedelta(days=0))
201 |
202 | def test_get_weekend_time_of_pr_reviewed_on_a_saturday(self):
203 | start_at = datetime.datetime(2021, 4, 30, 14)
204 | end_at = datetime.datetime(2021, 5, 1, 9)
205 | weekend_count = get_weekend_time(start_at, end_at)
206 | # 9 hours
207 | self.assertEqual(weekend_count, datetime.timedelta(seconds=32400))
208 |
209 | def test_get_weekend_time_of_pr_created_on_a_sunday(self):
210 | start_at = datetime.datetime(2021, 6, 6, 9)
211 | end_at = datetime.datetime(2021, 6, 8, 12)
212 | weekend_count = get_weekend_time(start_at, end_at)
213 | print(weekend_count)
214 | # 15 hours
215 | self.assertEqual(weekend_count, datetime.timedelta(seconds=53999))
216 |
217 |
218 | if __name__ == "__main__":
219 | unittest.main()
220 |
--------------------------------------------------------------------------------
/tests/mocks.py:
--------------------------------------------------------------------------------
1 | request_mock = [
2 | {
3 | "id": "00000000000000000000000",
4 | "title": "Adds new page",
5 | "createdAt": "2021-03-25T21:19:45Z",
6 | "baseRefName": "master",
7 | "headRefName": "feat/adds-new-page",
8 | "reviews": {
9 | "nodes": [
10 | {
11 | "createdAt": "2021-03-26T17:55:59Z",
12 | "state": "APPROVED",
13 | "author": {"login": "mariah_carey"},
14 | }
15 | ]
16 | },
17 | "author": {"login": "britney_spears"},
18 | "mergedAt": None,
19 | "closedAt": None,
20 | "commits": {
21 | "edges": [
22 | {
23 | "node": {
24 | "commit": {
25 | "oid": "0000000000000000000000000000",
26 | "message": "Creates view",
27 | "committedDate": "2021-03-24T17:06:18Z",
28 | }
29 | }
30 | },
31 | {
32 | "node": {
33 | "commit": {
34 | "oid": "0000000000000000000000000000",
35 | "message": "Adds section 1",
36 | "committedDate": "2021-03-25T21:04:15Z",
37 | }
38 | }
39 | },
40 | {
41 | "node": {
42 | "commit": {
43 | "oid": "0000000000000000000000000000",
44 | "message": "Adds section 2",
45 | "committedDate": "2021-03-25T21:05:14Z",
46 | }
47 | }
48 | },
49 | {
50 | "node": {
51 | "commit": {
52 | "oid": "0000000000000000000000000000",
53 | "message": "Adds responsiveness",
54 | "committedDate": "2021-03-25T21:11:23Z",
55 | }
56 | }
57 | },
58 | {
59 | "node": {
60 | "commit": {
61 | "oid": "0000000000000000000000000000",
62 | "message": "Changes colors",
63 | "committedDate": "2021-03-25T21:16:35Z",
64 | }
65 | }
66 | },
67 | {
68 | "node": {
69 | "commit": {
70 | "oid": "0000000000000000000000000000",
71 | "message": "Adds frontend tests",
72 | "committedDate": "2021-03-26T17:50:36Z",
73 | }
74 | }
75 | },
76 | ]
77 | },
78 | },
79 | {
80 | "id": "00000000000000000000000",
81 | "title": "Fixes navbar",
82 | "createdAt": "2021-03-25T14:11:48Z",
83 | "baseRefName": "production",
84 | "headRefName": "hf/fixes-navbar",
85 | "reviews": {
86 | "nodes": [
87 | {
88 | "createdAt": "2021-03-25T14:20:59Z",
89 | "state": "COMMENTED",
90 | "author": {"login": "beyonce"},
91 | },
92 | {
93 | "createdAt": "2021-03-26T00:26:24Z",
94 | "state": "APPROVED",
95 | "author": {"login": "beyonce"},
96 | },
97 | ]
98 | },
99 | "author": {"login": "rihanna"},
100 | "mergedAt": "2021-03-26T13:29:04Z",
101 | "closedAt": "2021-03-26T13:29:04Z",
102 | "commits": {
103 | "edges": [
104 | {
105 | "node": {
106 | "commit": {
107 | "oid": "0000000000000000000000000000",
108 | "message": "Fixes behavior of navbar",
109 | "committedDate": "2021-03-25T12:53:49Z",
110 | }
111 | }
112 | }
113 | ]
114 | },
115 | },
116 | {
117 | "id": "00000000000000000000000",
118 | "title": "New footer",
119 | "createdAt": "2021-03-25T15:09:22Z",
120 | "baseRefName": "master",
121 | "headRefName": "feat/new-footer",
122 | "reviews": {
123 | "nodes": [
124 | {
125 | "createdAt": "2021-03-26T09:55:20Z",
126 | "state": "APPROVED",
127 | "author": {"login": "britney_spears"},
128 | }
129 | ]
130 | },
131 | "author": {"login": "mariah_carey"},
132 | "mergedAt": "2021-03-26T09:58:24Z",
133 | "closedAt": "2021-03-26T09:58:26Z",
134 | "commits": {
135 | "edges": [
136 | {
137 | "node": {
138 | "commit": {
139 | "oid": "0000000000000000000000000000",
140 | "message": "Creates base",
141 | "committedDate": "2021-03-24T17:06:18Z",
142 | }
143 | }
144 | },
145 | {
146 | "node": {
147 | "commit": {
148 | "oid": "0000000000000000000000000000",
149 | "message": "Adds animation",
150 | "committedDate": "2021-03-25T21:04:15Z",
151 | }
152 | }
153 | },
154 | ]
155 | },
156 | },
157 | {
158 | "id": "00000000000000000000000",
159 | "title": "Production",
160 | "createdAt": "2021-03-25T12:03:56Z",
161 | "baseRefName": "master",
162 | "headRefName": "production",
163 | "reviews": {"nodes": []},
164 | "author": {"login": "mariah_carey"},
165 | "mergedAt": "2021-03-25T12:04:04Z",
166 | "closedAt": "2021-03-25T12:04:04Z",
167 | "commits": {
168 | "edges": [
169 | {
170 | "node": {
171 | "commit": {
172 | "oid": "0000000000000000000000000000",
173 | "message": "Merge pull request",
174 | "committedDate": "2021-03-24T11:13:18Z",
175 | }
176 | }
177 | },
178 | {
179 | "node": {
180 | "commit": {
181 | "oid": "0000000000000000000000000000",
182 | "message": "Creates tests",
183 | "committedDate": "2021-03-24T20:00:02Z",
184 | }
185 | }
186 | },
187 | {
188 | "node": {
189 | "commit": {
190 | "oid": "0000000000000000000000000000",
191 | "message": "Adapt tests",
192 | "committedDate": "2021-03-24T20:18:42Z",
193 | }
194 | }
195 | },
196 | {
197 | "node": {
198 | "commit": {
199 | "oid": "0000000000000000000000000000",
200 | "message": "Merge pull request",
201 | "committedDate": "2021-03-24T23:01:08Z",
202 | }
203 | }
204 | },
205 | {
206 | "node": {
207 | "commit": {
208 | "oid": "0000000000000000000000000000",
209 | "message": "Merge pull request",
210 | "committedDate": "2021-03-25T11:33:49Z",
211 | }
212 | }
213 | },
214 | ]
215 | },
216 | },
217 | {
218 | "id": "00000000000000000000000",
219 | "title": "Adds api integration",
220 | "createdAt": "2021-03-25T00:13:09Z",
221 | "baseRefName": "master",
222 | "headRefName": "feat/upload-course-content-notify-part-final",
223 | "reviews": {
224 | "nodes": [
225 | {
226 | "createdAt": "2021-03-25T12:23:52Z",
227 | "state": "CHANGES_REQUESTED",
228 | "author": {"login": "mariah_carey"},
229 | },
230 | {
231 | "createdAt": "2021-03-25T12:51:48Z",
232 | "state": "COMMENTED",
233 | "author": {"login": "ladygaga"},
234 | },
235 | {
236 | "createdAt": "2021-03-25T12:54:59Z",
237 | "state": "COMMENTED",
238 | "author": {"login": "mariah_carey"},
239 | },
240 | {
241 | "createdAt": "2021-03-25T14:53:42Z",
242 | "state": "APPROVED",
243 | "author": {"login": "mariah_carey"},
244 | },
245 | ]
246 | },
247 | "author": {"login": "ladygaga"},
248 | "mergedAt": "2021-03-25T15:09:17Z",
249 | "closedAt": "2021-03-25T15:09:17Z",
250 | "commits": {
251 | "edges": [
252 | {
253 | "node": {
254 | "commit": {
255 | "oid": "0000000000000000000000000000",
256 | "message": "Adds courses to platform",
257 | "committedDate": "2021-03-24T17:47:39Z",
258 | }
259 | }
260 | },
261 | {
262 | "node": {
263 | "commit": {
264 | "oid": "0000000000000000000000000000",
265 | "message": "Adds tests",
266 | "committedDate": "2021-03-24T17:49:45Z",
267 | }
268 | }
269 | },
270 | {
271 | "node": {
272 | "commit": {
273 | "oid": "0000000000000000000000000000",
274 | "message": "Fix return string",
275 | "committedDate": "2021-03-24T17:51:10Z",
276 | }
277 | }
278 | },
279 | {
280 | "node": {
281 | "commit": {
282 | "oid": "0000000000000000000000000000",
283 | "message": "Adds error message",
284 | "committedDate": "2021-03-24T21:15:59Z",
285 | }
286 | }
287 | },
288 | {
289 | "node": {
290 | "commit": {
291 | "oid": "0000000000000000000000000000",
292 | "message": "Adds fixtures",
293 | "committedDate": "2021-03-24T23:51:24Z",
294 | }
295 | }
296 | },
297 | {
298 | "node": {
299 | "commit": {
300 | "oid": "0000000000000000000000000000",
301 | "message": "Create constants",
302 | "committedDate": "2021-03-24T23:59:14Z",
303 | }
304 | }
305 | },
306 | {
307 | "node": {
308 | "commit": {
309 | "oid": "0000000000000000000000000000",
310 | "message": "Creates constants on front end",
311 | "committedDate": "2021-03-25T00:06:16Z",
312 | }
313 | }
314 | },
315 | {
316 | "node": {
317 | "commit": {
318 | "oid": "0000000000000000000000000000",
319 | "message": "Adds success message",
320 | "committedDate": "2021-03-25T00:32:45Z",
321 | }
322 | }
323 | },
324 | {
325 | "node": {
326 | "commit": {
327 | "oid": "0000000000000000000000000000",
328 | "message": "Delete unnecessary arg keys",
329 | "committedDate": "2021-03-25T14:17:22Z",
330 | }
331 | }
332 | },
333 | ]
334 | },
335 | },
336 | {
337 | "id": "00000000000000000000000",
338 | "title": "New functionality",
339 | "createdAt": "2021-03-25T10:19:45Z",
340 | "baseRefName": "master",
341 | "headRefName": "feat/adds-new-page",
342 | "reviews": {
343 | "nodes": [
344 | {
345 | "createdAt": "2021-03-26T17:55:59Z",
346 | "state": "APPROVED",
347 | "author": {"login": "mariah_carey"},
348 | }
349 | ]
350 | },
351 | "author": {"login": "britney_spears"},
352 | "mergedAt": None,
353 | "closedAt": "2021-03-26T18:02:10Z",
354 | "commits": {
355 | "edges": [
356 | {
357 | "node": {
358 | "commit": {
359 | "oid": "0000000000000000000000000000",
360 | "message": "Adds frontend tests",
361 | "committedDate": "2021-03-25T17:50:36Z",
362 | }
363 | }
364 | },
365 | ]
366 | },
367 | },
368 | ]
369 |
--------------------------------------------------------------------------------