├── tests ├── __init__.py ├── unit │ ├── __init__.py │ ├── test_reponse.py │ ├── test_requests.py │ └── test_github_status.py └── functional │ ├── __init__.py │ └── test_api.py ├── CLAUDE.md ├── acceptance ├── __init__.py └── test_basic.py ├── ghmirror ├── __init__.py ├── core │ ├── __init__.py │ ├── constants.py │ ├── mirror_response.py │ └── mirror_requests.py ├── utils │ ├── __init__.py │ ├── extensions.py │ └── wait.py ├── decorators │ ├── __init__.py │ ├── metrics.py │ └── checks.py ├── data_structures │ ├── __init__.py │ ├── requests_cache.py │ ├── redis_data_structures.py │ └── monostate.py └── app │ └── __init__.py ├── docs ├── images │ └── grafana_hits_misses.png ├── devel_guide.md └── redis_cache_backend.md ├── .dockerignore ├── .gitignore ├── renovate.json ├── Makefile ├── Dockerfile ├── .tekton ├── github-mirror-master-push.yaml └── github-mirror-master-pull-request.yaml ├── openshift ├── github-mirror-acceptance.yaml └── github-mirror.yaml ├── AGENTS.md ├── pyproject.toml ├── README.md ├── LICENSE └── uv.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /CLAUDE.md: -------------------------------------------------------------------------------- 1 | @AGENTS.md -------------------------------------------------------------------------------- /acceptance/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ghmirror/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ghmirror/core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ghmirror/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/functional/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ghmirror/decorators/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ghmirror/data_structures/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/images/grafana_hits_misses.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/app-sre/github-mirror/HEAD/docs/images/grafana_hits_misses.png -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .docker 2 | .git/ 3 | /.venv/ 4 | /venv/ 5 | /.idea/ 6 | **/__pycache__/ 7 | /*.egg-info/ 8 | /.coverage 9 | /.pytest_cache/ 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .venv/* 2 | venv/* 3 | .idea/* 4 | __pycache__ 5 | *.egg-info/* 6 | .coverage 7 | .pytest_cache/ 8 | build/ 9 | .github_client_token 10 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["github>app-sre/shared-pipelines//renovate/default.json"] 4 | } 5 | -------------------------------------------------------------------------------- /ghmirror/utils/extensions.py: -------------------------------------------------------------------------------- 1 | """Module to create a requests session that will be used to make all the requests to the GitHub API.""" 2 | 3 | import requests 4 | 5 | session = requests.Session() 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | check: 2 | uv run ruff check --no-fix 3 | uv run ruff format --check 4 | uv run pytest -v --forked --cov=ghmirror --cov-report=term-missing tests/ 5 | 6 | accept: 7 | python3 acceptance/test_basic.py 8 | 9 | format: 10 | uv run ruff check 11 | uv run ruff format 12 | 13 | local-acceptance-test: 14 | docker build -t github-mirror-acceptance --target prod . 15 | docker run --rm -it -d -p 8080:8080 --name github-mirror-test github-mirror-acceptance 16 | CLIENT_TOKEN=$$(cat .github_client_token) GITHUB_MIRROR_URL=http://localhost:8080 python3 acceptance/test_basic.py; docker stop github-mirror-test 17 | -------------------------------------------------------------------------------- /acceptance/test_basic.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import requests 4 | 5 | GITHUB_MIRROR_URL = os.environ.get("GITHUB_MIRROR_URL") 6 | CLIENT_TOKEN = os.environ.get("CLIENT_TOKEN") 7 | 8 | 9 | def test_get_repo(path, code, cache): 10 | if not GITHUB_MIRROR_URL or not CLIENT_TOKEN: 11 | raise ValueError("GITHUB_MIRROR_URL and CLIENT_TOKEN must be set") 12 | url = f"{GITHUB_MIRROR_URL}{path}" 13 | headers = {"Authorization": f"token {CLIENT_TOKEN}"} 14 | 15 | response = requests.get(url, headers=headers, timeout=60) 16 | 17 | assert response.status_code == code 18 | if cache is not None: 19 | assert response.headers["X-Cache"] == cache 20 | 21 | 22 | if __name__ == "__main__": 23 | test_get_repo("/repos/app-sre/github-mirror", 200, None) 24 | test_get_repo("/repos/app-sre/github-mirror", 200, "ONLINE_HIT") 25 | -------------------------------------------------------------------------------- /ghmirror/core/constants.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Amador Pahim 14 | 15 | """System constants.""" 16 | 17 | GH_API = "https://api.github.com" 18 | GH_STATUS_API = "https://www.githubstatus.com/api/v2/components.json" 19 | REQUESTS_TIMEOUT = 10 20 | STATUS_MAX_RETRIES = 3 21 | STATUS_SLEEP_TIME = 1 22 | STATUS_TIMEOUT = 10 23 | PER_PAGE_ELEMENTS = 30 24 | -------------------------------------------------------------------------------- /ghmirror/utils/wait.py: -------------------------------------------------------------------------------- 1 | """Functions to help waiting for a given state""" 2 | 3 | import time 4 | 5 | 6 | def wait_for(func, timeout, first=0.0, step=1.0, args=None, kwargs=None): 7 | """Wait until func() evaluates to True. 8 | 9 | If func() evaluates to True before timeout expires, return the 10 | value of func(). Otherwise return None. 11 | 12 | :param timeout: Timeout in seconds 13 | :param first: Time to sleep before first attempt 14 | :param step: Time to sleep between attempts in seconds 15 | :param args: Positional arguments to func 16 | :param kwargs: Keyword arguments to func 17 | """ 18 | if args is None: 19 | args = [] 20 | if kwargs is None: 21 | kwargs = {} 22 | start_time = time.monotonic() 23 | end_time = start_time + timeout 24 | time.sleep(first) 25 | while time.monotonic() < end_time: 26 | result = func(*args, **kwargs) 27 | if result: 28 | return result 29 | time.sleep(step) 30 | return None 31 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM registry.access.redhat.com/ubi9/python-311:1-1765312003@sha256:7d2cdff2b42ef82de8980bc8bc7c052e3ece1adafa95bd6aaa4808a29a53094e AS builder 2 | COPY --from=ghcr.io/astral-sh/uv:0.9.17@sha256:5cb6b54d2bc3fe2eb9a8483db958a0b9eebf9edff68adedb369df8e7b98711a2 /uv /bin/uv 3 | WORKDIR /ghmirror 4 | COPY --chown=1001:0 pyproject.toml uv.lock ./ 5 | RUN uv lock --locked 6 | COPY --chown=1001:0 ghmirror ./ghmirror 7 | RUN uv sync --frozen --no-cache --compile-bytecode --no-group dev --python /usr/bin/python3.11 8 | 9 | FROM registry.access.redhat.com/ubi9/ubi-minimal:9.7-1764794109@sha256:6fc28bcb6776e387d7a35a2056d9d2b985dc4e26031e98a2bd35a7137cd6fd71 AS prod 10 | RUN microdnf upgrade -y && \ 11 | microdnf install -y python3.11 && \ 12 | microdnf clean all 13 | COPY LICENSE /licenses/LICENSE 14 | WORKDIR /ghmirror 15 | RUN chown -R 1001:0 /ghmirror 16 | USER 1001 17 | ENV VIRTUAL_ENV=/ghmirror/.venv 18 | ENV PATH="$VIRTUAL_ENV/bin:$PATH" 19 | COPY --from=builder /ghmirror /ghmirror 20 | COPY acceptance ./acceptance 21 | ENTRYPOINT ["gunicorn", "ghmirror.app:APP"] 22 | CMD ["--workers", "1", "--threads", "8", "--bind", "0.0.0.0:8080"] 23 | 24 | FROM prod AS test 25 | COPY --from=ghcr.io/astral-sh/uv:0.9.17@sha256:5cb6b54d2bc3fe2eb9a8483db958a0b9eebf9edff68adedb369df8e7b98711a2 /uv /bin/uv 26 | USER root 27 | RUN microdnf install -y make 28 | USER 1001 29 | COPY --chown=1001:0 Makefile ./ 30 | COPY --chown=1001:0 tests ./tests 31 | ENV UV_NO_CACHE=true 32 | RUN uv sync --frozen 33 | RUN make check 34 | 35 | -------------------------------------------------------------------------------- /ghmirror/data_structures/requests_cache.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Maha Ashour 14 | 15 | """Implements caching backend""" 16 | 17 | import os 18 | 19 | from ghmirror.data_structures.monostate import InMemoryCache 20 | from ghmirror.data_structures.redis_data_structures import RedisCache 21 | 22 | CACHE_TYPE = os.environ.get("CACHE_TYPE", "in-memory") 23 | 24 | 25 | class RequestsCache: 26 | """Instantiates either a InMemoryCache or a Redis Cache object""" 27 | 28 | def __new__(cls, *args, **kwargs): 29 | if CACHE_TYPE == "redis": 30 | return RedisCache(*args, **kwargs) 31 | return InMemoryCache(*args, **kwargs) 32 | 33 | def __init__(self): # pragma: no cover 34 | pass 35 | 36 | def __contains__(self, item): # pragma: no cover 37 | pass 38 | 39 | def __getitem__(self, item): # pragma: no cover 40 | pass 41 | 42 | def __setitem__(self, key, value): # pragma: no cover 43 | pass 44 | 45 | def __iter__(self): # pragma: no cover 46 | pass 47 | 48 | def __len__(self): # pragma: no cover 49 | pass 50 | 51 | def __sizeof__(self): # pragma: no cover 52 | pass 53 | -------------------------------------------------------------------------------- /.tekton/github-mirror-master-push.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: tekton.dev/v1 2 | kind: PipelineRun 3 | metadata: 4 | annotations: 5 | build.appstudio.openshift.io/repo: https://github.com/app-sre/github-mirror?rev={{revision}} 6 | build.appstudio.redhat.com/commit_sha: '{{revision}}' 7 | build.appstudio.redhat.com/target_branch: '{{target_branch}}' 8 | pipelinesascode.tekton.dev/max-keep-runs: "25" 9 | pipelinesascode.tekton.dev/on-cel-expression: event == "push" && target_branch 10 | == "master" 11 | creationTimestamp: null 12 | labels: 13 | appstudio.openshift.io/application: github-mirror-master 14 | appstudio.openshift.io/component: github-mirror-master 15 | pipelines.appstudio.openshift.io/type: build 16 | name: github-mirror-master-on-push 17 | namespace: app-sre-tenant 18 | spec: 19 | params: 20 | - name: git-url 21 | value: '{{source_url}}' 22 | - name: revision 23 | value: '{{revision}}' 24 | - name: output-image 25 | value: quay.io/redhat-user-workloads/app-sre-tenant/github-mirror-master/github-mirror-master:{{revision}} 26 | - name: dockerfile 27 | value: Dockerfile 28 | - name: path-context 29 | value: . 30 | - name: target-stage 31 | value: prod 32 | pipelineRef: 33 | params: 34 | - name: url 35 | value: https://github.com/app-sre/shared-pipelines 36 | - name: revision 37 | value: main 38 | - name: pathInRepo 39 | value: pipelines/multi-arch-build-pipeline.yaml 40 | resolver: git 41 | taskRunTemplate: 42 | serviceAccountName: build-pipeline-github-mirror-master 43 | workspaces: 44 | - name: workspace 45 | volumeClaimTemplate: 46 | metadata: 47 | creationTimestamp: null 48 | spec: 49 | accessModes: 50 | - ReadWriteOnce 51 | resources: 52 | requests: 53 | storage: 1Gi 54 | status: {} 55 | - name: git-auth 56 | secret: 57 | secretName: '{{ git_auth_secret }}' 58 | status: {} 59 | -------------------------------------------------------------------------------- /.tekton/github-mirror-master-pull-request.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: tekton.dev/v1 2 | kind: PipelineRun 3 | metadata: 4 | annotations: 5 | build.appstudio.openshift.io/repo: https://github.com/app-sre/github-mirror?rev={{revision}} 6 | build.appstudio.redhat.com/commit_sha: '{{revision}}' 7 | build.appstudio.redhat.com/pull_request_number: '{{pull_request_number}}' 8 | build.appstudio.redhat.com/target_branch: '{{target_branch}}' 9 | pipelinesascode.tekton.dev/max-keep-runs: "10" 10 | pipelinesascode.tekton.dev/on-cel-expression: event == "pull_request" && target_branch 11 | == "master" 12 | creationTimestamp: null 13 | labels: 14 | appstudio.openshift.io/application: github-mirror-master 15 | appstudio.openshift.io/component: github-mirror-master 16 | pipelines.appstudio.openshift.io/type: build 17 | name: github-mirror-master-on-pull-request 18 | namespace: app-sre-tenant 19 | spec: 20 | params: 21 | - name: git-url 22 | value: '{{source_url}}' 23 | - name: revision 24 | value: '{{revision}}' 25 | - name: output-image 26 | value: quay.io/redhat-user-workloads/app-sre-tenant/github-mirror-master/github-mirror-master:on-pr-{{revision}} 27 | - name: image-expires-after 28 | value: 5d 29 | - name: dockerfile 30 | value: Dockerfile 31 | - name: path-context 32 | value: . 33 | - name: target-stage 34 | value: test 35 | pipelineRef: 36 | params: 37 | - name: url 38 | value: https://github.com/app-sre/shared-pipelines 39 | - name: revision 40 | value: main 41 | - name: pathInRepo 42 | value: pipelines/multi-arch-build-pipeline.yaml 43 | resolver: git 44 | taskRunTemplate: 45 | serviceAccountName: build-pipeline-github-mirror-master 46 | workspaces: 47 | - name: workspace 48 | volumeClaimTemplate: 49 | metadata: 50 | creationTimestamp: null 51 | spec: 52 | accessModes: 53 | - ReadWriteOnce 54 | resources: 55 | requests: 56 | storage: 1Gi 57 | status: {} 58 | - name: git-auth 59 | secret: 60 | secretName: '{{ git_auth_secret }}' 61 | status: {} 62 | -------------------------------------------------------------------------------- /openshift/github-mirror-acceptance.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: template.openshift.io/v1 3 | kind: Template 4 | metadata: 5 | name: github-mirror-acceptance 6 | objects: 7 | - apiVersion: v1 8 | kind: ServiceAccount 9 | metadata: 10 | name: ${SERVICE_ACCOUNT} 11 | imagePullSecrets: "${{IMAGE_PULL_SECRETS}}" 12 | - apiVersion: batch/v1 13 | kind: Job 14 | metadata: 15 | annotations: 16 | ignore-check.kube-linter.io/unset-cpu-requirements: "no cpu limits" 17 | name: github-mirror-acceptance-${IMAGE_TAG} 18 | spec: 19 | backoffLimit: 5 20 | template: 21 | spec: 22 | restartPolicy: Never 23 | serviceAccountName: ${SERVICE_ACCOUNT} 24 | containers: 25 | - image: ${IMAGE}:${IMAGE_TAG} 26 | imagePullPolicy: Always 27 | name: github-mirror-acceptance 28 | env: 29 | - name: GITHUB_MIRROR_URL 30 | value: ${GITHUB_MIRROR_URL} 31 | - name: CLIENT_TOKEN 32 | valueFrom: 33 | secretKeyRef: 34 | name: ${CLIENT_TOKEN_SECRET} 35 | key: CLIENT_TOKEN 36 | command: 37 | - python3 38 | args: 39 | - acceptance/test_basic.py 40 | resources: 41 | requests: 42 | memory: ${MEMORY_REQUESTS} 43 | cpu: ${CPU_REQUESTS} 44 | limits: 45 | memory: ${MEMORY_LIMIT} 46 | parameters: 47 | - name: IMAGE 48 | value: quay.io/redhat-services-prod/app-sre-tenant/github-mirror-master/github-mirror-master 49 | - name: IMAGE_TAG 50 | value: '' 51 | required: true 52 | - name: GITHUB_MIRROR_URL 53 | value: '' 54 | required: true 55 | - name: CLIENT_TOKEN_SECRET 56 | value: '' 57 | required: true 58 | - name: SERVICE_ACCOUNT 59 | value: "github-mirror-acceptance" 60 | deplayName: github-mirror-acceptance service account 61 | description: name of the service account to use when deploying the pod 62 | - name: MEMORY_REQUESTS 63 | value: 128Mi 64 | - name: MEMORY_LIMIT 65 | value: 128Mi 66 | - name: CPU_REQUESTS 67 | value: 100m 68 | - name: IMAGE_PULL_SECRETS 69 | value: '[]' 70 | -------------------------------------------------------------------------------- /ghmirror/decorators/metrics.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Amador Pahim 14 | 15 | """Metrics decorators.""" 16 | 17 | import time 18 | from functools import wraps 19 | 20 | import flask 21 | 22 | from ghmirror.data_structures.monostate import ( 23 | StatsCache, 24 | UsersCache, 25 | ) 26 | 27 | STATS_CACHE = StatsCache() 28 | 29 | 30 | def requests_metrics(function): 31 | """Collect metrics from the request and populate the StatsCache object.""" 32 | 33 | @wraps(function) 34 | def wrapper(*args, **kwargs): 35 | start = time.time() 36 | response = function(*args, **kwargs) 37 | 38 | # This is the total time spent to process the request 39 | elapsed_time = time.time() - start 40 | 41 | # Incrementing the total requests couter 42 | STATS_CACHE.count() 43 | 44 | # The X-Cache header is added by the flask APP 45 | # and it contains either HIT or MISS 46 | cache = response.headers["X-Cache"] 47 | 48 | users_cache = UsersCache() 49 | authorization = flask.request.headers.get("Authorization") 50 | if authorization: 51 | user = users_cache.get(authorization) 52 | if not user: 53 | # This may be the first call to get /user 54 | # so users_cache is not yet updated 55 | # with the user to match the auth sha. 56 | # Try to get the user from the response. 57 | user = response.json().get("login") 58 | else: 59 | user = None 60 | 61 | # Adding the request metrics to the histogram 62 | STATS_CACHE.observe( 63 | cache=cache, 64 | status=response.status_code, 65 | value=elapsed_time, 66 | method=flask.request.method, 67 | user=user, 68 | ) 69 | 70 | return response 71 | 72 | return wrapper 73 | -------------------------------------------------------------------------------- /AGENTS.md: -------------------------------------------------------------------------------- 1 | # AGENTS.md 2 | 3 | This file provides guidance to AI agents when working with code in this repository. 4 | 5 | ## Project Overview 6 | 7 | GitHub Mirror is a Python Flask application that caches GitHub API responses and implements conditional requests. It serves as a proxy to reduce API quota consumption by serving cached responses when GitHub returns a 304 (Not Modified) status code. 8 | 9 | ## Setup 10 | - **Install dependencies**: `uv sync --dev` or use the legacy method described in `docs/devel_guide.md` 11 | - **Run development server**: `python ghmirror/app/__init__.py` (starts on http://127.0.0.1:8080) 12 | 13 | ## Formatting 14 | 15 | To format code, run `make format`. 16 | 17 | ## Unit and Linting Tests 18 | 19 | To run unit and linting tests, you can use `make check` 20 | 21 | ## Acceptance Test 22 | 23 | Make sure the file `.github_client_token` exists in the project. It must be provided by the user. 24 | 25 | Before running any acceptance test, make sure that the `.github_client_token` file was created by the user. If not, prompt the user to set it with the secret stored in our vault at `app-interface/app-sre-stage-01/github-mirror-stage/acceptance-tests`. 26 | 27 | Once `.github_client_token` file was created by the user, you can execute acceptance tests locally by using `make local-acceptance-test`. 28 | 29 | The acceptance test can be considered successful, if every command in the make target exits successfully. 30 | 31 | ## Architecture 32 | 33 | ### Core Components 34 | - **`ghmirror/app/`**: Flask application entry point and route handlers 35 | - **`ghmirror/core/`**: Core business logic for mirror requests and responses 36 | - **`ghmirror/data_structures/`**: Cache implementations (in-memory and Redis) 37 | - **`ghmirror/decorators/`**: Request validation decorators (user authentication) 38 | - **`ghmirror/utils/`**: Utility modules including request session management 39 | 40 | ### Key Features 41 | - **Conditional Requests**: Implements GitHub's conditional request pattern using ETags 42 | - **Cache Backends**: Supports both in-memory and Redis caching (controlled by `CACHE_TYPE` env var) 43 | - **User Validation**: Optional user authorization via `GITHUB_USERS` environment variable 44 | - **Offline Mode**: Built-in detection for GitHub API outages with cache fallback 45 | - **Metrics**: Prometheus metrics endpoint at `/metrics` 46 | 47 | ### Configuration 48 | - Uses `pyproject.toml` for Python packaging and tool configuration 49 | - Ruff for linting/formatting with comprehensive rule set 50 | - Coverage threshold set to 98% 51 | - Python 3.11 required 52 | 53 | ### Cache Architecture 54 | The application uses a monostate pattern for cache management with separate implementations for in-memory and Redis backends. Cache keys are based on request URLs and ETags from GitHub responses. 55 | 56 | ## Commit Standards 57 | - First, before adding or committing anything, always make sure that Unit, Format and Acceptance tests have been run successfully 58 | - Use `Assisted-by:` instead of `Co-Authored-By:` 59 | - Remove whitespace-only lines 60 | - Use double newlines for EOF 61 | -------------------------------------------------------------------------------- /tests/unit/test_reponse.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from ghmirror.core.mirror_response import MirrorResponse 4 | 5 | 6 | class MockResponse: 7 | def __init__(self, content, headers, status_code): 8 | if content is None: 9 | self.content = content 10 | else: 11 | self.content = content.encode() 12 | self.headers = headers 13 | self.status_code = status_code 14 | 15 | 16 | class TestResponse(TestCase): 17 | def test_no_headers(self): 18 | headers = {"Some-Other-Header": "foo"} 19 | mock_response = MockResponse(content="", headers=headers, status_code=200) 20 | response = MirrorResponse( 21 | original_response=mock_response, gh_api_url="foo", gh_mirror_url="bar" 22 | ) 23 | 24 | # That item should not be part of the response.headers 25 | self.assertFalse(response.headers) 26 | 27 | def test_headers(self): 28 | headers = { 29 | "Link": "foobar", 30 | "Content-Type": "foo", 31 | "Last-Modified": "foo", 32 | "ETag": "foo", 33 | "Some-Other-Header": "foo", 34 | } 35 | mock_response = MockResponse(content="", headers=headers, status_code=200) 36 | 37 | response = MirrorResponse( 38 | original_response=mock_response, gh_api_url="foo", gh_mirror_url="bar" 39 | ) 40 | 41 | response_headers = response.headers 42 | 43 | link = response_headers.pop("Link") 44 | # Link should have been modified, replacing the gh_api_url string 45 | # by the gh_mirror_url string. 46 | self.assertEqual(link, "barbar") 47 | 48 | # Those headers should be there 49 | for item in ["Content-Type", "Last-Modified", "ETag"]: 50 | header = response_headers.pop(item) 51 | self.assertEqual(header, "foo") 52 | 53 | # No other headers should be there 54 | self.assertFalse(response_headers) 55 | 56 | def test_content(self): 57 | mock_response = MockResponse(content=None, headers={}, status_code=200) 58 | 59 | response = MirrorResponse( 60 | original_response=mock_response, gh_api_url="foo", gh_mirror_url="bar" 61 | ) 62 | # No content from the upstream response should stay the 63 | # same in the mirror response 64 | self.assertIsNone(response.content) 65 | 66 | mock_response = MockResponse(content="foobar", headers={}, status_code=200) 67 | response = MirrorResponse( 68 | original_response=mock_response, gh_api_url="foo", gh_mirror_url="bar" 69 | ) 70 | # content should have been modified, replacing the 71 | # gh_api_url string by the gh_mirror_url string. 72 | self.assertEqual(response.content, b"barbar") 73 | 74 | def test_status_code(self): 75 | mock_response = MockResponse(content="foobar", headers={}, status_code=200) 76 | 77 | response = MirrorResponse( 78 | original_response=mock_response, gh_api_url="foo", gh_mirror_url="bar" 79 | ) 80 | 81 | # No status code change 82 | self.assertEqual(response.status_code, 200) 83 | -------------------------------------------------------------------------------- /ghmirror/data_structures/redis_data_structures.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Maha Ashour 14 | 15 | """Caching data in Redis.""" 16 | 17 | import os 18 | import pickle 19 | from random import randint 20 | 21 | import redis 22 | 23 | PRIMARY_ENDPOINT = os.environ.get("PRIMARY_ENDPOINT", "localhost") 24 | READER_ENDPOINT = os.environ.get("READER_ENDPOINT", PRIMARY_ENDPOINT) 25 | REDIS_PORT = int(os.environ.get("REDIS_PORT", "6379")) 26 | REDIS_TOKEN = os.environ.get("REDIS_TOKEN") 27 | REDIS_SSL = os.environ.get("REDIS_SSL") 28 | 29 | 30 | class RedisCache: 31 | """Dictionary-like implementation for caching requests in Redis.""" 32 | 33 | def __init__(self): 34 | self.wr_cache = self._get_connection(PRIMARY_ENDPOINT) 35 | self.ro_cache = self._get_connection(READER_ENDPOINT) 36 | 37 | def __contains__(self, item): 38 | sr_key = self._serialize(item) 39 | return self.ro_cache.exists(sr_key) 40 | 41 | def __getitem__(self, item): 42 | sr_key = self._serialize(item) 43 | sr_value = self.ro_cache.get(sr_key) 44 | if sr_value is None: 45 | raise KeyError(item) 46 | return self._deserialize(sr_value) 47 | 48 | def __setitem__(self, key, value): 49 | sr_key = self._serialize(key) 50 | sr_value = self._serialize(value) 51 | # randomize cache expiration time (1 hr increments) from 1 hr to 6 mon 52 | rand_val = randint(1, 4320) 53 | self.wr_cache.set(sr_key, sr_value, ex=3600 * rand_val) 54 | 55 | def __iter__(self): 56 | return self._scan_iter() 57 | 58 | def __len__(self): 59 | return self.ro_cache.dbsize() 60 | 61 | def __sizeof__(self): 62 | return self.ro_cache.info()["used_memory"] 63 | 64 | def _scan_iter(self): 65 | """Make an iterator so that the client doesn't need to remember the cursor position.""" 66 | cursor = "0" 67 | while cursor != 0: 68 | cursor, data = self.wr_cache.scan(cursor) 69 | for item in data: 70 | yield self._deserialize(item) 71 | 72 | @staticmethod 73 | def _get_connection(host): 74 | parameters = {"host": host, "port": REDIS_PORT} 75 | if REDIS_TOKEN is not None: 76 | parameters["password"] = REDIS_TOKEN 77 | if REDIS_SSL is not None and REDIS_SSL.lower() == "true": 78 | parameters["ssl"] = True 79 | return redis.Redis(**parameters) 80 | 81 | @staticmethod 82 | def _serialize(item): 83 | """Serialize items for storage in Redis""" 84 | return pickle.dumps(item) 85 | 86 | @staticmethod 87 | def _deserialize(item): 88 | """Deserialize items stored in Redis""" 89 | return pickle.loads(item) # noqa: S301 90 | -------------------------------------------------------------------------------- /ghmirror/core/mirror_response.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Amador Pahim 14 | 15 | """Module containing all the abstractions around an HTTP response.""" 16 | 17 | 18 | class MirrorResponse: 19 | """Wrapper around the requests.Response. 20 | 21 | Implementing properties that replace the strings containing the 22 | GutHub API url by the mirror url where needed. 23 | 24 | :param original_response: the return from the original request 25 | to the GitHub API 26 | :param gh_api_url: the GitHub API url (with the scheme) 27 | :param gh_mirror_url: the GitHub Mirror url (with the scheme) 28 | 29 | :type original_response: requests.Response 30 | :type gh_api_url: str 31 | :type gh_mirror_url: str 32 | """ 33 | 34 | def __init__(self, original_response, gh_api_url, gh_mirror_url): 35 | self._original_response = original_response 36 | self._gh_api_url = gh_api_url.rstrip("/") 37 | self._gh_mirror_url = gh_mirror_url.rstrip("/") 38 | 39 | @property 40 | def headers(self): 41 | """Sanitize headers. 42 | 43 | Retrieves the headers we are interested in from the original response and 44 | sanitizes them so we can impersonate the GitHub API. 45 | 46 | :return: the sanitized headers 47 | :rtype: dict 48 | """ 49 | sanitized_headers = {} 50 | 51 | x_cache = self._original_response.headers.get("X-Cache") 52 | if x_cache is not None: 53 | sanitized_headers["X-Cache"] = x_cache 54 | 55 | link = self._original_response.headers.get("Link") 56 | if link is not None: 57 | sanitized_headers["Link"] = link.replace( 58 | self._gh_api_url, self._gh_mirror_url 59 | ) 60 | 61 | content_type = self._original_response.headers.get("Content-Type") 62 | if content_type is not None: 63 | sanitized_headers["Content-Type"] = content_type 64 | 65 | last_modified = self._original_response.headers.get("Last-Modified") 66 | if last_modified is not None: 67 | sanitized_headers["Last-Modified"] = last_modified 68 | 69 | etag = self._original_response.headers.get("ETag") 70 | if etag is not None: 71 | sanitized_headers["ETag"] = etag 72 | 73 | return sanitized_headers 74 | 75 | @property 76 | def content(self): 77 | """Sanitize content. 78 | 79 | Retrieves the content from the original response and sanitizes 80 | them so we can impersonate the GitHub API. 81 | 82 | :return: the sanitized content 83 | :rtype: bytes 84 | """ 85 | if self._original_response.content is None: 86 | return None 87 | 88 | return self._original_response.content.replace( 89 | self._gh_api_url.encode(), self._gh_mirror_url.encode() 90 | ) 91 | 92 | @property 93 | def status_code(self): 94 | """Convenience method to expose the original response HTTP status code. 95 | 96 | :return: the response status code 97 | """ 98 | return self._original_response.status_code 99 | -------------------------------------------------------------------------------- /ghmirror/app/__init__.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Amador Pahim 14 | 15 | """The GitHub Mirror endpoints""" 16 | 17 | import logging 18 | import os 19 | import sys 20 | 21 | import flask 22 | from prometheus_client import generate_latest 23 | 24 | from ghmirror.core.constants import GH_API 25 | from ghmirror.core.mirror_requests import conditional_request 26 | from ghmirror.core.mirror_response import MirrorResponse 27 | from ghmirror.data_structures.monostate import StatsCache 28 | from ghmirror.data_structures.requests_cache import RequestsCache 29 | from ghmirror.decorators.checks import check_user 30 | from ghmirror.utils.extensions import session 31 | 32 | logging.basicConfig(level=logging.INFO, format="%(asctime)-15s %(message)s") 33 | 34 | APP = flask.Flask(__name__) 35 | 36 | 37 | def error_handler(exception): 38 | """Used when an exception happens in the flask app.""" 39 | return ( 40 | flask.jsonify( 41 | message=f"Error reaching {GH_API}: {exception.__class__.__name__!s}" 42 | ), 43 | 502, 44 | ) 45 | 46 | 47 | APP.config["TRAP_HTTP_EXCEPTIONS"] = True 48 | APP.register_error_handler(Exception, error_handler) 49 | 50 | 51 | @APP.route("/healthz", methods=["GET"]) 52 | def healthz(): 53 | """Health check endpoint for Kubernetes.""" 54 | return flask.Response("OK") 55 | 56 | 57 | @APP.route("/metrics", methods=["GET"]) 58 | def metrics(): 59 | """Prometheus metrics endpoint.""" 60 | headers = {"Content-type": "text/plain"} 61 | 62 | stats_cache = StatsCache() 63 | requests_cache = RequestsCache() 64 | 65 | stats_cache.set_cache_size(sys.getsizeof(requests_cache)) 66 | stats_cache.set_cached_objects(len(requests_cache)) 67 | 68 | return flask.Response(generate_latest(registry=stats_cache.registry), 200, headers) 69 | 70 | 71 | @APP.route("/", defaults={"path": ""}) 72 | @APP.route("/", methods=["GET", "POST", "PUT", "PATCH", "DELETE"]) 73 | @check_user 74 | def ghmirror(path): 75 | """Default endpoint, matching any url without a specific endpoint.""" 76 | url = f"{GH_API}/{path}" 77 | 78 | if flask.request.args: 79 | url += "?" 80 | for key, value in flask.request.args.items(): 81 | url += f"{key}={value}&" 82 | url = url.rstrip("&") 83 | 84 | resp = conditional_request( 85 | session=session, 86 | method=flask.request.method, 87 | url=url, 88 | auth=flask.request.headers.get("Authorization"), 89 | data=flask.request.data, 90 | url_params=flask.request.args, 91 | ) 92 | 93 | gh_mirror_url = os.environ.get("GITHUB_MIRROR_URL", flask.request.host_url) 94 | mirror_response = MirrorResponse( 95 | original_response=resp, gh_api_url=GH_API, gh_mirror_url=gh_mirror_url 96 | ) 97 | 98 | return flask.Response( 99 | mirror_response.content, mirror_response.status_code, mirror_response.headers 100 | ) 101 | 102 | 103 | if __name__ == "__main__": # pragma: no cover 104 | APP.run( 105 | host="127.0.0.1", 106 | debug=bool(os.environ.get("GITHUB_MIRROR_DEBUG", "1")), 107 | port=8080, 108 | ) 109 | -------------------------------------------------------------------------------- /ghmirror/decorators/checks.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Amador Pahim 14 | 15 | """Contains all the required verification""" 16 | 17 | import os 18 | from functools import wraps 19 | 20 | import flask 21 | 22 | from ghmirror.core.constants import GH_API 23 | from ghmirror.core.mirror_requests import conditional_request 24 | from ghmirror.data_structures.monostate import UsersCache 25 | from ghmirror.utils.extensions import session 26 | 27 | AUTHORIZED_USERS = os.environ.get("GITHUB_USERS") 28 | DOC_URL = "https://github.com/app-sre/github-mirror#user-validation" 29 | 30 | 31 | def check_user(function): 32 | """Check if the user is authorized to use the github-mirror. 33 | 34 | Checks whether the user is a member of one of the authorized users, if no 35 | authorized users set, only cache user info. 36 | """ 37 | 38 | @wraps(function) 39 | def wrapper(*args, **kwargs): 40 | # Need to check if the Authorization header is present 41 | # in the request to support anonymous user access 42 | authorization = flask.request.headers.get("Authorization") 43 | 44 | # When the GITHUB_USERS is not set and there's no Authorization header 45 | # we just return the decorated function to allow anonymous access 46 | if AUTHORIZED_USERS is None and authorization is None: 47 | return function(*args, **kwargs) 48 | 49 | # At this stage, Authorization header is mandatory 50 | if authorization is None: 51 | return ( 52 | flask.jsonify( 53 | message="Authorization header is required", 54 | documentation_url=DOC_URL, 55 | ), 56 | 401, 57 | ) 58 | 59 | users_cache = UsersCache() 60 | # Users in cache were already checked and authorized, 61 | # so we just keep serving them 62 | if authorization in users_cache: 63 | return function(*args, **kwargs) 64 | 65 | # Using the Authorization header to get the user information 66 | user_url = f"{GH_API}/user" 67 | resp = conditional_request( 68 | session=session, method="GET", url=user_url, auth=authorization 69 | ) 70 | 71 | # Fail early when Github API tells something is wrong 72 | if resp.status_code != 200: # noqa: PLR2004 73 | return flask.Response(resp.content, resp.status_code) 74 | 75 | user_login = resp.json()["login"] 76 | authorized_users = AUTHORIZED_USERS.split(":") if AUTHORIZED_USERS else [] 77 | 78 | # If GITHUB_USERS is not set or the user login from GitHub 79 | # is in the authorized_users list, we cache the user for 80 | # future use and return the decorated function 81 | if not authorized_users or user_login in authorized_users: 82 | users_cache.add(authorization, user_login) 83 | return function(*args, **kwargs) 84 | 85 | # No match means user is forbidden 86 | return ( 87 | flask.jsonify( 88 | message=f"User {user_login} has no permission to use the github-mirror", 89 | documentation_url=DOC_URL, 90 | ), 91 | 403, 92 | ) 93 | 94 | return wrapper 95 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "github-mirror" 3 | version = "0.1.0" 4 | description = "GitHub API mirror that caches the responses and implements conditional requests, serving the client with the cached responses when the GitHub API replies with a 304 HTTP code, reducing the number of API calls, making a more efficient use of the GitHub API rate limit." 5 | authors = [ 6 | # Feel free to add or change authors 7 | { name = "Red Hat Application SRE Team", email = "sd-app-sre@redhat.com" }, 8 | ] 9 | license = { text = "GPLv2+" } 10 | readme = "README.md" 11 | requires-python = "~= 3.11.0" 12 | classifiers = [ 13 | "Development Status :: 2 - Pre-Alpha", 14 | "Environment :: Web Environment", 15 | "Framework :: Flask", 16 | "Intended Audience :: Developers", 17 | "License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)", 18 | "Natural Language :: English", 19 | "Operating System :: POSIX :: Linux", 20 | "Programming Language :: Python :: 3.6", 21 | "Programming Language :: Python :: 3.7", 22 | "Programming Language :: Python :: 3.8", 23 | "Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware", 24 | ] 25 | dependencies = [ 26 | "Flask==3.1.2", 27 | "requests==2.32.5", 28 | "prometheus_client==0.22.1", 29 | "gunicorn==23.0.0", 30 | "redis==6.4.0", 31 | ] 32 | 33 | [project.urls] 34 | homepage = "https://github.com/app-sre/github-mirror" 35 | repository = "https://github.com/app-sre/github-mirror" 36 | documentation = "https://github.com/app-sre/github-mirror" 37 | 38 | [dependency-groups] 39 | dev = [ 40 | # Development dependencies 41 | "ruff==0.12.12", 42 | "mypy==1.17.1", 43 | "pytest==9.0.1", 44 | "pytest-cov==6.3.0", 45 | "pytest-forked==1.6.0", 46 | "types-requests==2.32.4.20250809", 47 | ] 48 | 49 | # Ruff configuration 50 | [tool.ruff] 51 | line-length = 88 52 | src = ["ghmirror"] 53 | extend-exclude = [ 54 | # exclude some common cache and tmp directories 55 | ".local", 56 | ".cache", 57 | "tmp", 58 | ] 59 | fix = true 60 | 61 | [tool.ruff.lint] 62 | preview = true 63 | select = ["ALL"] 64 | ignore = [ 65 | "CPY", # Missing copyright header 66 | "D100", # Missing docstring in public module 67 | "D101", # Missing docstring in public class 68 | "D102", # Missing docstring in public method 69 | "D103", # Missing docstring in public function 70 | "D104", # Missing docstring in public package 71 | "D105", # Missing docstring in magic method 72 | "D107", # Missing docstring in __init__ 73 | "D203", # 1 blank line required before class docstring 74 | "D211", # No blank lines allowed before class docstring 75 | "D212", # multi-line-summary-first-line 76 | "D213", # multi-line-summary-second-line 77 | "D4", # Doc string style 78 | "E501", # Line too long 79 | "G004", # Logging statement uses f-string 80 | "PLR0904", # Too many public methods 81 | "PLR0913", # Too many arguments 82 | "PLR0917", # Too many positional arguments 83 | "S101", # Use of assert detected. Pytest uses assert 84 | "S404", # subprocess import 85 | "EM101", # Exception must not use a string literal, assign to variable first 86 | "EM102", # Exception must not use an f-string literal, assign to variable first 87 | "S311", # Standard pseudo-random generators are not suitable for security/cryptographic purposes 88 | "S324", # sha1 hash 89 | "S403", # pickle usage 90 | "TRY003", # Avoid specifying long messages outside the exception class 91 | "TRY300", # try-consider-else 92 | # pydoclint 93 | "DOC", 94 | # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules 95 | "W191", 96 | "E111", 97 | "E114", 98 | "E117", 99 | "D206", 100 | "D300", 101 | "Q", 102 | "COM812", 103 | "COM819", 104 | "ISC001", 105 | # Room for future improvements and refactoring 106 | "ANN", # Missing annotation 107 | "PT", # Use PyTest stuff instead unittest 108 | "RUF012", # need type annotations 109 | 110 | ] 111 | [tool.ruff.format] 112 | preview = true 113 | 114 | [tool.ruff.lint.isort] 115 | known-first-party = ["ghmirror"] 116 | 117 | # Coverage configuration 118 | [tool.coverage.run] 119 | branch = true 120 | omit = ["*/tests/*"] 121 | 122 | [tool.coverage.report] 123 | fail_under = 98 124 | -------------------------------------------------------------------------------- /openshift/github-mirror.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: template.openshift.io/v1 3 | kind: Template 4 | metadata: 5 | name: github-mirror 6 | objects: 7 | - apiVersion: policy/v1 8 | kind: PodDisruptionBudget 9 | metadata: 10 | name: github-mirror 11 | spec: 12 | minAvailable: 1 13 | selector: 14 | matchLabels: 15 | app: github-mirror 16 | - apiVersion: v1 17 | kind: ServiceAccount 18 | metadata: 19 | name: ${SERVICE_ACCOUNT} 20 | imagePullSecrets: "${{IMAGE_PULL_SECRETS}}" 21 | - apiVersion: apps/v1 22 | kind: Deployment 23 | metadata: 24 | annotations: 25 | ignore-check.kube-linter.io/unset-cpu-requirements: "no cpu limits" 26 | labels: 27 | app: github-mirror 28 | name: github-mirror 29 | spec: 30 | replicas: ${{REPLICAS}} 31 | strategy: 32 | type: RollingUpdate 33 | rollingUpdate: 34 | maxUnavailable: 0 35 | maxSurge: 1 36 | selector: 37 | matchLabels: 38 | app: github-mirror 39 | template: 40 | metadata: 41 | labels: 42 | app: github-mirror 43 | spec: 44 | affinity: 45 | podAntiAffinity: 46 | preferredDuringSchedulingIgnoredDuringExecution: 47 | - podAffinityTerm: 48 | labelSelector: 49 | matchExpressions: 50 | - key: app 51 | operator: In 52 | values: 53 | - github-mirror 54 | topologyKey: kubernetes.io/hostname 55 | weight: 90 56 | - podAffinityTerm: 57 | labelSelector: 58 | matchExpressions: 59 | - key: app 60 | operator: In 61 | values: 62 | - github-mirror 63 | topologyKey: topology.kubernetes.io/zone 64 | weight: 100 65 | serviceAccountName: ${SERVICE_ACCOUNT} 66 | containers: 67 | - image: ${IMAGE}:${IMAGE_TAG} 68 | imagePullPolicy: Always 69 | name: github-mirror 70 | env: 71 | - name: GITHUB_USERS 72 | value: ${GITHUB_USERS} 73 | - name: GITHUB_MIRROR_URL 74 | value: ${GITHUB_MIRROR_URL} 75 | - name: CACHE_TYPE 76 | value: ${CACHE_TYPE} 77 | - name: PRIMARY_ENDPOINT 78 | valueFrom: 79 | secretKeyRef: 80 | key: db.endpoint 81 | name: ${EC_SECRET_NAME} 82 | - name: READER_ENDPOINT 83 | value: ${READER_ENDPOINT} 84 | - name: REDIS_PORT 85 | valueFrom: 86 | secretKeyRef: 87 | key: db.port 88 | name: ${EC_SECRET_NAME} 89 | - name: REDIS_TOKEN 90 | valueFrom: 91 | secretKeyRef: 92 | key: db.auth_token 93 | name: ${EC_SECRET_NAME} 94 | - name: REDIS_SSL 95 | value: ${REDIS_SSL} 96 | - name: GITHUB_STATUS_SLEEP_TIME 97 | value: "${GITHUB_STATUS_SLEEP_TIME}" 98 | - name: GITHUB_STATUS_TIMEOUT 99 | value: "${GITHUB_STATUS_TIMEOUT}" 100 | ports: 101 | - name: github-mirror 102 | containerPort: 8080 103 | livenessProbe: 104 | httpGet: 105 | path: /healthz 106 | port: 8080 107 | initialDelaySeconds: 30 108 | periodSeconds: 10 109 | timeoutSeconds: 3 110 | readinessProbe: 111 | httpGet: 112 | path: /healthz 113 | port: 8080 114 | initialDelaySeconds: 3 115 | periodSeconds: 10 116 | timeoutSeconds: 3 117 | resources: 118 | requests: 119 | memory: ${MEMORY_REQUESTS} 120 | cpu: ${CPU_REQUESTS} 121 | limits: 122 | memory: ${MEMORY_LIMIT} 123 | - apiVersion: v1 124 | kind: Service 125 | metadata: 126 | name: github-mirror 127 | labels: 128 | app: github-mirror 129 | spec: 130 | ports: 131 | - protocol: TCP 132 | port: 80 133 | targetPort: 8080 134 | name: github-mirror 135 | selector: 136 | app: github-mirror 137 | 138 | parameters: 139 | - name: IMAGE 140 | value: quay.io/redhat-services-prod/app-sre-tenant/github-mirror-master/github-mirror-master 141 | displayName: github mirror image 142 | description: github mirror docker image. Defaults to quay.io/redhat-services-prod/app-sre-tenant/github-mirror-master/github-mirror-master 143 | - name: IMAGE_TAG 144 | value: latest 145 | displayName: github mirror version 146 | description: github mirror version which defaults to latest 147 | # It's an in-memory cache service. It needs memory. 148 | - name: MEMORY_REQUESTS 149 | value: 800Mi 150 | - name: MEMORY_LIMIT 151 | value: 1Gi 152 | # It runs multiple threads, but only one process. If 153 | # we need more, we should probably increase the number 154 | # of replicas instead of touching it here. 155 | - name: CPU_REQUESTS 156 | value: 200m 157 | # These values are meant to be overridden by the 158 | # saas-herder parametrization 159 | - name: GITHUB_USERS 160 | value: app-sre-bot:cs-sre-bot 161 | - name: GITHUB_MIRROR_URL 162 | value: https://github-mirror.stage.devshift.net 163 | - name: CACHE_TYPE 164 | value: redis 165 | - name: EC_SECRET_NAME 166 | value: ghmirror-elasticache-stage 167 | - name: REDIS_SSL 168 | value: 'True' 169 | - name: READER_ENDPOINT 170 | value: '' 171 | required: true 172 | - name: REPLICAS 173 | value: '3' 174 | - name: SERVICE_ACCOUNT 175 | value: "github-mirror" 176 | displayName: github-mirror service account 177 | description: name of the service account to use when deploying the pod 178 | - name: GITHUB_STATUS_SLEEP_TIME 179 | value: '1' 180 | - name: GITHUB_STATUS_TIMEOUT 181 | value: '10' 182 | - name: IMAGE_PULL_SECRETS 183 | value: '[]' 184 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GitHub Mirror 2 | 3 | GitHub API mirror that caches the responses and implements 4 | [conditional requests](https://docs.github.com/en/rest/using-the-rest-api/best-practices-for-using-the-rest-api?apiVersion=2022-11-28#use-conditional-requests-if-appropriate). 5 | 6 | With conditional requests, all the calls are forwarded to the Github API, but 7 | when the GitHub API replies with a 304 HTTP code, meaning that the resource 8 | has not changed, we serve the client with the previously cached response. 9 | 10 | That reduces the number of API calls that consume quota, helping you not to 11 | hit the API GitHub API rate limit. 12 | 13 | The mirror acts only on GET requests, bypassing other HTTP methods. 14 | 15 | The default cache backend is in-memory, and we also support Redis. The 16 | in-memory cache is shared among all the threads, but not shared between 17 | processes. Every time the server is started, the cache is initialized empty. 18 | Using Redis prevents the cache from being lost when the github mirror server 19 | is restarted. 20 | 21 | ## Quick Start 22 | 23 | Run the Docker container: 24 | 25 | ``` 26 | ~$ docker run --rm -it -p 8080:8080 quay.io/redhat-services-prod/app-sre-tenant/github-mirror-master/github-mirror-master 27 | [2021-02-20 12:40:16 +0000] [1] [INFO] Starting gunicorn 20.0.4 28 | [2021-02-20 12:40:16 +0000] [1] [INFO] Listening at: http://0.0.0.0:8080 (1) 29 | [2021-02-20 12:40:16 +0000] [1] [INFO] Using worker: threads 30 | [2021-02-20 12:40:16 +0000] [8] [INFO] Booting worker with pid: 8 31 | ``` 32 | 33 | Use it as the Github API url: 34 | 35 | ``` 36 | >>> import requests 37 | >>> 38 | >>> requests.get('http://localhost:8080/repos/app-sre/github-mirror') 39 | 40 | >>> 41 | >>> requests.get('http://localhost:8080/repos/app-sre/github-mirror') 42 | 43 | >>> 44 | ``` 45 | 46 | After those two requests, the server log will show: 47 | 48 | ``` 49 | 2020-02-16 21:08:07,948 [GET] CACHE_MISS https://api.github.com/repos/app-sre/github-mirror 50 | 2020-02-16 21:08:13,585 [GET] CACHE_HIT https://api.github.com/repos/app-sre/github-mirror 51 | ``` 52 | 53 | The second request was served from the cache and it did not consume the API 54 | calls quota. 55 | 56 | If you're using PyGithub, you have to pass the `base_url` when creating the 57 | client instance: 58 | 59 | ``` 60 | >>> from github import Github 61 | >>> gh_cli = Github(base_url='http://localhost:8080') 62 | ``` 63 | 64 | ## Redis Cache Backend 65 | 66 | To enable the Redis backend, set the environment variable: 67 | 68 | ``` 69 | CACHE_TYPE=redis 70 | ``` 71 | 72 | In addition to that, you can provide the following optional configuration: 73 | 74 | - `PRIMARY_ENDPOINT` is the primary endpoint or host address of the Redis 75 | service. If not set, it defaults to `localhost`. 76 | - `READER_ENDPOINT` is the read-only replica endpoint and can be used to 77 | increase the read availability of the Redis service. If not set, it defaults 78 | to the same address as the primary endpoint. 79 | - `REDIS_PORT` is the port which the Redis service binds to. The default port 80 | is `6379`. 81 | - `REDIS_PASSWORD` is the authentication token to access a password protected 82 | Redis server. If not set, the default is no authentication. 83 | - `REDIS_SSL` should be set to `True` if you are encrypting the traffic to the 84 | Redis server. If not set, the default assumes no encryption. 85 | 86 | You will find more details about the Redis cache backend implementation in the 87 | [Redis Cache Backend doc](docs/redis_cache_backend.md). 88 | 89 | ## Metrics 90 | 91 | The service has a `/metrics` endpoint, exposing metrics in the Prometheus 92 | format: 93 | 94 | ``` 95 | >>> response = requests.get('http://localhost:8080/metrics') 96 | >>> print(response.content.decode()) 97 | ... 98 | http_request_total 2.0 99 | ... 100 | request_latency_seconds_count{cache="MISS",method="GET",status="200"} 1.0 101 | ... 102 | request_latency_seconds_count{cache="HIT",method="GET",status="200"} 1.0 103 | ... 104 | ``` 105 | 106 | With that, as the Github API rate limit is per hour, you can have the total 107 | HIT/MISS per hour with: 108 | 109 | ``` 110 | sum(increase(request_latency_seconds_count{endpoint="github-mirror",cache="ONLINE_HIT"}[1h])) 111 | ``` 112 | 113 | and 114 | 115 | ``` 116 | sum(increase(request_latency_seconds_count{endpoint="github-mirror",cache="ONLINE_MISS"}[1h])) 117 | ``` 118 | 119 | Plotting on Grafana, we have: 120 | 121 | ![](docs/images/grafana_hits_misses.png) 122 | 123 | Many more metrics are available. Check the `/metrics` endpoint for details. 124 | 125 | ## User Validation 126 | 127 | To enable the user validation, the `GITHUB_USERS` environment variable 128 | should be available to the server. The `GITHUB_USERS` is a colon-separated 129 | list of authorized users to have their requests to the Github Mirror served. 130 | Example: 131 | 132 | ``` 133 | GITHUB_USERS=app-sre-bot,quay-io-bot 134 | ``` 135 | 136 | The user validation, when enabled, will not allow unauthenticated requests 137 | to the Github Mirror. 138 | 139 | Please notice that, in order to validate the user, one additional get request 140 | is made to the Github API, to the `/user` endpoint, using the provided 141 | authorization token. That call will also go through the caching mechanism, so 142 | the rate limit will be preserved when possible. 143 | 144 | ## Offline Mode 145 | 146 | There's a built-in mechanism to detect when the Github API is offline. 147 | 148 | To do so, we have a separate thread that keeps checking the url 149 | `https://www.githubstatus.com/api/v2/components.json` every second. 150 | When we don't get a success response, or `API Requests` component is `major_outage`, 151 | we consider the Github API offline. 152 | 153 | When that happens, all the requests are served from the cache until we detect 154 | that the Github API is back online. 155 | 156 | Requests served from the cache when we are in offline mode will be accounted 157 | for in separate metrics: 158 | 159 | ``` 160 | request_latency_seconds_count{endpoint="github-mirror",cache="OFFLINE_HIT"} 161 | ``` 162 | 163 | and 164 | 165 | ``` 166 | request_latency_seconds_count{endpoint="github-mirror",cache="OFFLINE_MISS"} 167 | ``` 168 | 169 | ## Contributing 170 | 171 | For contributing to the project, please follow the 172 | [Development Guide](docs/devel_guide.md). 173 | -------------------------------------------------------------------------------- /tests/unit/test_requests.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: SLF001 2 | from random import randint 3 | from unittest import ( 4 | TestCase, 5 | mock, 6 | ) 7 | 8 | import pytest 9 | 10 | from ghmirror.core.mirror_requests import ( 11 | _get_elements_per_page, # noqa: PLC2701 12 | _is_rate_limit_error, # noqa: PLC2701 13 | _should_error_response_be_served_from_cache, # noqa: PLC2701 14 | ) 15 | from ghmirror.data_structures.monostate import StatsCache 16 | from ghmirror.data_structures.requests_cache import RequestsCache 17 | 18 | RAND_CACHE_SIZE = randint(100, 1000) 19 | 20 | 21 | class TestStatsCache(TestCase): 22 | def test_shared_state(self): 23 | stats_cache_01 = StatsCache() 24 | with pytest.raises(AttributeError) as e_info: 25 | stats_cache_01.foo # noqa: B018 26 | self.assertIn("object has no attribute", e_info.message) 27 | self.assertEqual(stats_cache_01.counter._value._value, 0) 28 | 29 | stats_cache_01.count() 30 | stats_cache_01.count() 31 | 32 | self.assertEqual(stats_cache_01.counter._value._value, 2) 33 | 34 | stats_cache_02 = StatsCache() 35 | self.assertEqual(stats_cache_02.counter._value._value, 2) 36 | 37 | stats_cache_02.count() 38 | stats_cache_02.count() 39 | 40 | self.assertEqual(stats_cache_01.counter._value._value, 4) 41 | self.assertEqual(stats_cache_02.counter._value._value, 4) 42 | 43 | 44 | class MockResponse: 45 | def __init__(self, content, headers, status_code, text): 46 | self.content = content.encode() 47 | self.headers = headers 48 | self.status_code = status_code 49 | self.text = text 50 | 51 | 52 | class MockRedis: 53 | cache = {} 54 | 55 | def __init__(self, size=0): 56 | self.size = size 57 | 58 | def exists(self, item): 59 | return item in self.cache 60 | 61 | def get(self, item): 62 | if item in self.cache: 63 | return self.cache[item] 64 | return None 65 | 66 | def set(self, key, value, **_): 67 | self.cache[key] = value 68 | 69 | def _scan_iter(self): 70 | return iter(self.cache) 71 | 72 | def scan(self, *_args): 73 | return 0, iter(self.cache) 74 | 75 | def dbsize(self): 76 | return len(self.cache) 77 | 78 | def info(self): 79 | return {"used_memory": self.size} 80 | 81 | 82 | def mocked_redis_cache(*_args, **_kwargs): 83 | return MockRedis(size=RAND_CACHE_SIZE) 84 | 85 | 86 | class TestRequestsCache(TestCase): 87 | @mock.patch("ghmirror.data_structures.requests_cache.CACHE_TYPE", "redis") 88 | @mock.patch( 89 | "ghmirror.data_structures.redis_data_structures.REDIS_TOKEN", "mysecret" 90 | ) 91 | @mock.patch("ghmirror.data_structures.redis_data_structures.REDIS_SSL", "True") 92 | @mock.patch( 93 | "ghmirror.data_structures.redis_data_structures.redis.Redis", 94 | side_effect=mocked_redis_cache, 95 | ) 96 | def test_interface_redis(self, _mock_cache): 97 | requests_cache_01 = RequestsCache() 98 | requests_cache_01["foo"] = MockResponse( 99 | content="bar", headers={}, status_code=200, text="" 100 | ) 101 | self.assertTrue(list(requests_cache_01)) 102 | self.assertIn("foo", requests_cache_01) 103 | 104 | self.assertEqual(requests_cache_01["foo"].content, b"bar") 105 | self.assertEqual(requests_cache_01["foo"].status_code, 200) 106 | 107 | self.assertEqual(requests_cache_01.__sizeof__(), RAND_CACHE_SIZE) 108 | 109 | self.assertRaises(KeyError, lambda: requests_cache_01["bar"]) 110 | 111 | @mock.patch("ghmirror.data_structures.requests_cache.CACHE_TYPE", "in-memory") 112 | def test_interface_in_memory(self): 113 | requests_cache_01 = RequestsCache() 114 | requests_cache_01["foo"] = MockResponse( 115 | content="bar", headers={}, status_code=200, text="" 116 | ) 117 | self.assertTrue(list(requests_cache_01)) 118 | self.assertIn("foo", requests_cache_01) 119 | 120 | @mock.patch("ghmirror.data_structures.requests_cache.CACHE_TYPE", "in-memory") 121 | def test_shared_state(self): 122 | requests_cache_01 = RequestsCache() 123 | requests_cache_01["foo"] = MockResponse( 124 | content="bar", headers={}, status_code=200, text="" 125 | ) 126 | requests_cache_02 = RequestsCache() 127 | 128 | self.assertEqual(requests_cache_02["foo"].content, b"bar") 129 | self.assertEqual(requests_cache_02["foo"].status_code, 200) 130 | 131 | 132 | class TestParseUrlParameters(TestCase): 133 | def test_url_params_empty(self): 134 | url_params = None 135 | self.assertIsNone(_get_elements_per_page(url_params)) 136 | 137 | def test_url_params_no_per_page(self): 138 | url_params = {} 139 | self.assertIsNone(_get_elements_per_page(url_params)) 140 | 141 | def test_url_params_per_page(self): 142 | url_params = {"per_page": 2} 143 | self.assertEqual(_get_elements_per_page(url_params), 2) 144 | 145 | 146 | class TestIsRateLimitCondition(TestCase): 147 | def test_is_rate_limit_error_true(self): 148 | text = "You have triggered an abuse detection mechanism." 149 | resp = MockResponse(content="bar", headers={}, status_code=403, text=text) 150 | self.assertTrue(_is_rate_limit_error(resp)) 151 | 152 | def test_is_rate_limit_error_false(self): 153 | text = "it's fine." 154 | resp = MockResponse(content="bar", headers={}, status_code=403, text=text) 155 | self.assertFalse(_is_rate_limit_error(resp)) 156 | 157 | 158 | class TestServeFromCacheCondition(TestCase): 159 | def test_should_serve_from_cache_rate_limit(self): 160 | text = "You have triggered an abuse detection mechanism." 161 | resp = MockResponse(content="bar", headers={}, status_code=403, text=text) 162 | header = _should_error_response_be_served_from_cache(resp) 163 | self.assertEqual(header, "RATE_LIMITED") 164 | 165 | def test_should_serve_from_cache_api_error(self): 166 | text = "it's fine." 167 | resp = MockResponse(content="bar", headers={}, status_code=500, text=text) 168 | header = _should_error_response_be_served_from_cache(resp) 169 | self.assertEqual(header, "API_ERROR") 170 | 171 | def test_should_serve_from_cache_ok(self): 172 | text = "it's fine." 173 | resp = MockResponse(content="bar", headers={}, status_code=200, text=text) 174 | header = _should_error_response_be_served_from_cache(resp) 175 | self.assertIsNone(header) 176 | -------------------------------------------------------------------------------- /tests/unit/test_github_status.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | import requests 5 | 6 | from ghmirror.data_structures.monostate import ( 7 | GithubStatus, 8 | _GithubStatus, # noqa: PLC2701 9 | ) 10 | 11 | EXPECTED_TIMEOUT = 10 12 | EXPECTED_SLEEP_TIME = 1 13 | 14 | 15 | @mock.patch("ghmirror.data_structures.monostate.threading.Thread") 16 | def test_create_github_status_singleton(_mock_thread): 17 | github_status = GithubStatus() 18 | github_status2 = GithubStatus() 19 | 20 | assert isinstance(github_status, _GithubStatus) 21 | assert github_status is github_status2 22 | 23 | 24 | @pytest.mark.parametrize( 25 | "env,expected_sleep_time,expected_timeout", 26 | [ 27 | ({}, 1, 10), 28 | ({"GITHUB_STATUS_SLEEP_TIME": "3"}, 3, 10), 29 | ({"GITHUB_STATUS_TIMEOUT": "2"}, 1, 2), 30 | ], 31 | ) 32 | @mock.patch("ghmirror.data_structures.monostate.HTTPAdapter") 33 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 34 | @mock.patch("ghmirror.data_structures.monostate.threading.Thread") 35 | def test_create_github_status_with_sleep_time( 36 | mock_thread, 37 | mock_session, 38 | mock_http_adapter, 39 | env, 40 | expected_sleep_time, 41 | expected_timeout, 42 | ): 43 | with mock.patch.dict("ghmirror.data_structures.monostate.os.environ", env): 44 | github_status = _GithubStatus.create() 45 | 46 | assert github_status.online is True 47 | assert github_status.sleep_time == expected_sleep_time 48 | assert github_status.timeout == expected_timeout 49 | assert github_status.session is mock_session.return_value 50 | mock_thread.assert_called_once_with(target=github_status.check, daemon=True) 51 | mock_thread.return_value.start.assert_called_once_with() 52 | mock_session.assert_called_once_with() 53 | mock_http_adapter.assert_called_once_with(max_retries=3) 54 | mock_session.return_value.mount.assert_called_once_with( 55 | "https://", mock_http_adapter.return_value 56 | ) 57 | 58 | 59 | def build_github_status_response_builder(status): 60 | return { 61 | "page": { 62 | "id": "kctbh9vrtdwd", 63 | "name": "GitHub", 64 | "url": "https://www.githubstatus.com", 65 | "updated_at": "2023-08-31T07:56:30Z", 66 | }, 67 | "components": [ 68 | { 69 | "created_at": "2014-05-03T01:22:07.274Z", 70 | "description": None, 71 | "group": False, 72 | "group_id": None, 73 | "id": "b13yz5g2cw10", 74 | "name": "API Requests", 75 | "only_show_if_degraded": False, 76 | "page_id": "kctbh9vrtdwd", 77 | "position": 1, 78 | "showcase": True, 79 | "start_date": None, 80 | "status": status, 81 | "updated_at": "2014-05-14T20:34:43.340Z", 82 | }, 83 | { 84 | "created_at": "2014-05-03T01:22:07.286Z", 85 | "description": None, 86 | "group": False, 87 | "group_id": None, 88 | "id": "9397cnvk62zn", 89 | "name": "Management Portal", 90 | "only_show_if_degraded": False, 91 | "page_id": "kctbh9vrtdwd", 92 | "position": 2, 93 | "showcase": True, 94 | "start_date": None, 95 | "status": "major_outage", 96 | "updated_at": "2014-05-14T20:34:44.470Z", 97 | }, 98 | ], 99 | } 100 | 101 | 102 | @pytest.mark.parametrize( 103 | "status", 104 | [ 105 | "operational", 106 | "degraded_performance", 107 | "partial_outage", 108 | ], 109 | ) 110 | @mock.patch( 111 | "ghmirror.data_structures.monostate.time.sleep", side_effect=InterruptedError 112 | ) 113 | @mock.patch("ghmirror.data_structures.monostate.threading.Thread") 114 | def test_github_status_check_success(_mock_thread, mock_sleep, status): 115 | mocked_response = mock.create_autospec(requests.Response) 116 | mocked_response.json.return_value = build_github_status_response_builder(status) 117 | session = mock.create_autospec(requests.Session) 118 | session.get.return_value = mocked_response 119 | github_status = _GithubStatus( 120 | sleep_time=EXPECTED_SLEEP_TIME, timeout=EXPECTED_TIMEOUT, session=session 121 | ) 122 | 123 | with pytest.raises(InterruptedError): 124 | github_status.check() 125 | 126 | assert github_status.online is True 127 | session.get.assert_called_once_with( 128 | "https://www.githubstatus.com/api/v2/components.json", timeout=EXPECTED_TIMEOUT 129 | ) 130 | mocked_response.raise_for_status.assert_called_once_with() 131 | mock_sleep.assert_called_once_with(EXPECTED_SLEEP_TIME) 132 | 133 | 134 | @mock.patch("ghmirror.data_structures.monostate.LOG") 135 | @mock.patch( 136 | "ghmirror.data_structures.monostate.time.sleep", side_effect=InterruptedError 137 | ) 138 | @mock.patch("ghmirror.data_structures.monostate.threading.Thread") 139 | def test_github_status_check_outage(_mock_thread, mock_sleep, mock_log): 140 | mocked_response = mock.create_autospec(requests.Response) 141 | mocked_response.json.return_value = build_github_status_response_builder( 142 | "major_outage" 143 | ) 144 | session = mock.create_autospec(requests.Session) 145 | session.get.return_value = mocked_response 146 | github_status = _GithubStatus( 147 | sleep_time=EXPECTED_SLEEP_TIME, timeout=EXPECTED_TIMEOUT, session=session 148 | ) 149 | 150 | with pytest.raises(InterruptedError): 151 | github_status.check() 152 | 153 | assert github_status.online is False 154 | mock_log.warning.assert_called_once_with( 155 | "Github API is offline, response: %s", mocked_response.text 156 | ) 157 | session.get.assert_called_once_with( 158 | "https://www.githubstatus.com/api/v2/components.json", timeout=EXPECTED_TIMEOUT 159 | ) 160 | mocked_response.raise_for_status.assert_called_once_with() 161 | mock_sleep.assert_called_once_with(EXPECTED_SLEEP_TIME) 162 | 163 | 164 | @pytest.mark.parametrize( 165 | "error", 166 | [ 167 | (requests.exceptions.ConnectionError("Connection error")), 168 | (requests.exceptions.HTTPError("429 Client Error: too many requests")), 169 | (requests.exceptions.Timeout("Timeout")), 170 | ], 171 | ) 172 | @mock.patch("ghmirror.data_structures.monostate.LOG") 173 | @mock.patch( 174 | "ghmirror.data_structures.monostate.time.sleep", side_effect=InterruptedError 175 | ) 176 | @mock.patch("ghmirror.data_structures.monostate.threading.Thread") 177 | def test_github_status_check_fail(_mock_thread, mock_sleep, mock_log, error): 178 | mocked_response = mock.create_autospec(requests.Response) 179 | session = mock.create_autospec(requests.Session) 180 | session.get.return_value = mocked_response 181 | mocked_response.raise_for_status.side_effect = error 182 | github_status = _GithubStatus( 183 | sleep_time=EXPECTED_SLEEP_TIME, timeout=EXPECTED_TIMEOUT, session=session 184 | ) 185 | 186 | with pytest.raises(InterruptedError): 187 | github_status.check() 188 | 189 | assert github_status.online is False 190 | mock_log.warning.assert_called_once_with("Github API is offline, reason: %s", error) 191 | session.get.assert_called_once_with( 192 | "https://www.githubstatus.com/api/v2/components.json", timeout=EXPECTED_TIMEOUT 193 | ) 194 | mocked_response.raise_for_status.assert_called_once_with() 195 | mock_sleep.assert_called_once_with(EXPECTED_SLEEP_TIME) 196 | -------------------------------------------------------------------------------- /docs/devel_guide.md: -------------------------------------------------------------------------------- 1 | # Development Guide 2 | 3 | ## Fork the repository 4 | 5 | Go to https://github.com/app-sre/github-mirror, on the top-right corner, 6 | click "Fork" and confirm the fork to your user. 7 | 8 | That will give you a copy of the repository under your user. The resulting 9 | repository url will be `https://github.com//github-mirror`. 10 | 11 | ## Clone from your fork 12 | 13 | The git repository on your local machine has to be cloned from your fork. 14 | That's because you can create branches on your fork and propose Pull Requests 15 | from them, but you can't always create branches on the main repository. This is 16 | known as [fork workflow](https://dev.to/mathieuks/introduction-to-github-fork-workflow-why-is-it-so-complex-3ac8). 17 | 18 | Clone with: 19 | 20 | ``` 21 | user@localhost:~$ git clone git@github.com:/github-mirror.git 22 | ``` 23 | 24 | ## Setup your local repository 25 | 26 | Your local repository will have a `remote` already in place: 27 | 28 | ``` 29 | user@localhost:~$ cd github-mirror 30 | user@localhost:~/github-mirror$ git remote -v 31 | origin  git@github.com:/github-mirror.git (fetch) 32 | origin  git@github.com:/github-mirror.git (push) 33 | ``` 34 | 35 | `origin` points to your fork. Now you have to add the main repository as 36 | another `remote`. That is useful to sync the changes from the main repository 37 | into your local repository and into your fork. 38 | 39 | Add it with: 40 | 41 | ``` 42 | user@localhost:~/github-mirror$ git remote add upstream git@github.com:app-sre/github-mirror.git 43 | ``` 44 | 45 | After that command, you will see two remotes: 46 | 47 | ``` 48 | user@localhost:~/github-mirror$ git remote -v 49 | origin  git@github.com:/github-mirror.git (fetch) 50 | origin  git@github.com:/github-mirror.git (push) 51 | upstream git@github.com:app-sre/github-mirror.git (fetch) 52 | upstream git@github.com:app-sre/github-mirror.git (push) 53 | ``` 54 | 55 | Now `upstream` points to the main repository. 56 | 57 | ## Create a Python virtual environment: 58 | 59 | You need python - at least - v3.6. You can check the right version with: 60 | 61 | ``` 62 | user@localhost:~/github-mirror$ python --version 63 | ``` 64 | 65 | and 66 | 67 | ``` 68 | user@localhost:~/github-mirror$ python3 --version 69 | ``` 70 | 71 | Whatever is the right one, use it to create the virtual environment: 72 | 73 | ``` 74 | user@localhost:~/github-mirror$ python -m venv venv 75 | ``` 76 | 77 | That will use the python module `venv` to create a directory called `venv` 78 | with your virtual environment. 79 | 80 | Activate the virtual environment with: 81 | 82 | ``` 83 | user@localhost:~/github-mirror$ source venv/bin/activate 84 | ``` 85 | 86 | From now on, everything you install with `pip` will be installed in that 87 | directory and will be only available when it's activated. 88 | 89 | To exit the virtual environment, use: 90 | 91 | ``` 92 | (venv) user@localhost:~/github-mirror$ deactivate 93 | ``` 94 | 95 | ## Install the python package in development mode 96 | 97 | Activate the virtual environment: 98 | 99 | ``` 100 | user@localhost:~/github-mirror$ source venv/bin/activate 101 | ``` 102 | 103 | Install the package with: 104 | 105 | ``` 106 | (venv) user@localhost:~/github-mirror$ pip install --editable . 107 | ``` 108 | 109 | Install the check requirements with: 110 | 111 | ``` 112 | (venv) user@localhost:~/github-mirror$ pip install -r requirements-check.txt 113 | ``` 114 | 115 | ## Run the service 116 | 117 | To start the service in development mode, use: 118 | 119 | ``` 120 | (venv) user@localhost:~/github-mirror$ python ghmirror/app/__init__.py 121 | * Serving Flask app "__init__" (lazy loading) 122 | * Environment: production 123 | WARNING: This is a development server. Do not use it in a production deployment. 124 | Use a production WSGI server instead. 125 | * Debug mode: on 126 | 2020-06-08 15:08:58,704 * Running on http://127.0.0.1:8080/ (Press CTRL+C to quit) 127 | 2020-06-08 15:08:58,707 * Restarting with stat 128 | 2020-06-08 15:08:59,062 * Debugger is active! 129 | 2020-06-08 15:08:59,062 * Debugger PIN: 279-042-762 130 | ``` 131 | 132 | ## Run your requests against the github-mirror 133 | 134 | From a different terminal, just replace `https://api.github.com` by 135 | `http://localhost:8080`: 136 | 137 | ``` 138 | user@localhost:~$ curl http://localhost:8080/repos/app-sre/sretoolbox 139 | ... 140 | ``` 141 | 142 | The service log will show: 143 | 144 | ``` 145 | 2020-06-08 15:12:39,475 [GET] CACHE_MISS https://api.github.com/repos/app-sre/sretoolbox 146 | 2020-06-08 15:12:39,478 127.0.0.1 - - [08/Jun/2020 15:12:39] "GET /repos/app-sre/sretoolbox HTTP/1.1" 200 - 147 | ``` 148 | 149 | ## Run github-mirror in Redis backed mode (optional) 150 | 151 | When running in Redis mode, github-mirror will cache all requests in a Redis server instead of locally in-memory. This enables multiple instances of github-mirror to access and maintain a single shared cache. 152 | 153 | First, you will need to tell github-mirror to run in Redis mode by setting the `CACHE_TYPE` environment variable. If the variable is not set, github mirror will run in the default in-memory cache mode. 154 | 155 | ``` 156 | user@localhost:~$ export CACHE_TYPE=redis 157 | ``` 158 | 159 | ### Running Redis 160 | If you already have Redis installed, use a different terminal to run the Redis server. By default Redis will run at address `locahost` and port `6379`: 161 | ``` 162 | user@localhost:~$ redis-server 163 | ``` 164 | Alternatively, you can run Redis in a Docker container (you may need to run docker as root): 165 | 166 | ``` 167 | user@localhost:~$ docker run --rm -it -p 6379:6379 redis 168 | ``` 169 | 170 | You can now test the Redis backed github mirror as [before](#run-your-requests-against-the-github-mirror). 171 | 172 | ### Additional configurations 173 | 174 | You can optionally set the following environment variables, to configure the connection to the Redis server: 175 | ``` 176 | user@localhost:~$ export PRIMARY_ENDPOINT= 177 | user@localhost:~$ export READER_ENDPOINT= 178 | user@localhost:~$ export REDIS_PORT= 179 | user@localhost:~$ export REDIS_PASSWORD= 180 | user@localhost:~$ export REDIS_SSL=True 181 | ``` 182 | 183 | # Coding 184 | 185 | ## Create a new local branch 186 | 187 | You should never commit to your local master. 188 | 189 | Before start coding, get the latest changes from upstream/master. First, make 190 | sure your're on your local master: 191 | 192 | ``` 193 | (venv) user@localhost:~/github-mirror$ git branch 194 | * master 195 | ``` 196 | 197 | Then get all new commits from upstream master: 198 | 199 | ``` 200 | (venv) user@localhost:~/github-mirror$ git pull upstream master 201 | ``` 202 | 203 | Then checkout to a new branch: 204 | 205 | ``` 206 | (venv) user@localhost:~/github-mirror$ git checkout -b new_feature_01 207 | ``` 208 | 209 | ## Running the checks and tests 210 | 211 | To run the code checks, use: 212 | 213 | ``` 214 | (venv) user@localhost:~/github-mirror$ make check 215 | ``` 216 | 217 | Or to run the tests only, use: 218 | 219 | ``` 220 | (venv) user@localhost:~/github-mirror$ pytest -v --forked --cov=ghmirror --cov-report=term-missing tests/ 221 | ``` 222 | 223 | Those are all the checks and tests execute by the CI, so if they pass on your 224 | local machine, they are expected to pass in the CI pipeline. 225 | 226 | ## Commit your changes 227 | 228 | When the time comes, commit your changes: 229 | 230 | ``` 231 | (venv) user@localhost:~/github-mirror$ git commit -a -m "Adding feature 01" 232 | ``` 233 | 234 | Create as many commits as you need. All commits are local up to this point. 235 | 236 | ## Push your changes 237 | 238 | You will now push your changes to your fork. To do that, run: 239 | 240 | ``` 241 | (venv) user@localhost:~/github-mirror$ git push 242 | ``` 243 | 244 | If this is the first time you push from this branch, you will see a message 245 | showing the actual push command you need to execute the first time: 246 | 247 | ``` 248 | fatal: The current branch demo has no upstream branch. 249 | To push the current branch and set the remote as upstream, use 250 | 251 | git push --set-upstream origin new_feature_01 252 | ``` 253 | 254 | Run that command: 255 | 256 | ``` 257 | (venv) user@localhost:~/github-mirror$ git push --set-upstream origin new_feature_01 258 | ``` 259 | 260 | Next time you want to push commits from this branch, just use `git push`. 261 | 262 | ## Pull Requests 263 | 264 | Create a Pull Request from your fork's feature branch to the main repository's 265 | master branch using the Web UI. 266 | 267 | Keep up with the review, adding new commits to your local branch and pushing 268 | them using `git push`. That will automatically update the pull request. 269 | -------------------------------------------------------------------------------- /docs/redis_cache_backend.md: -------------------------------------------------------------------------------- 1 | # Redis Cache Backend 2 | 3 | ## Context 4 | 5 | The github mirror acts as a proxy for the Github API and is used to limit the 6 | number of calls to the Github API by serving cached objects to the client 7 | whenever possible. This prevents clients from becoming rate limited by 8 | exceeding their hourly API quota too quickly. 9 | 10 | When a client requests a resource from the mirror, the mirror first looks up 11 | the resource in the cache to obtain its 12 | [etag](https://developer.github.com/v3/#conditional-requests) (which 13 | identifies the version of the resource). If it is found, the mirror includes 14 | this etag in a conditional request to the Github API. If the cache entry is 15 | stale, the mirror will receive the new version of the resource (along with a 16 | new etag) which it then caches and serves to the client. However, if the 17 | resource has not changed the mirror will receive a 18 | [304 Not Modified](https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html) 19 | response and will serve the cached object to the client. 20 | 21 | ## Goals 22 | 23 | * Re-design the Github [mirror](#mirror) to reduce the number of Github API 24 | requests required to build up the mirror cache with every pod restart (ex: 25 | pod failure, deployment, etc.). 26 | * Specifically, cache rebuilding after a restart should require only one API 27 | call per resource per [client](#client), so that the number of API calls 28 | does not grow linearly in the number of [mirror replicas](#mirror-replica). 29 | * Minimize latency in the redesign so that the average mirror 30 | [request time](#request-time) does not increase significantly. 31 | 32 | ## Current solution 33 | 34 | The github mirror is deployed in an OpenShift cluster with a replication 35 | factor of 3. 36 | 37 | Each replica of the github mirror server maintains its own 38 | [local in-memory cache](#local-in-memory-cache). The cache is a data structure 39 | (Python dictionary) with constant-time lookup. The cache may grow unbounded if 40 | the pod continues running and does not restart. 41 | 42 | The current design incurs a significant overhead during redeployment, as each 43 | replica has to build and maintain its own copy of the cache (meaning each 44 | replica makes one API call per resource per client). This design does not 45 | scale well if the number of replicas is increased, as the 46 | [hourly quota](https://developer.github.com/v3/#rate-limiting) for Github API 47 | requests will quickly be exhausted, causing a hold up for the client for the 48 | remainder of the hour. 49 | 50 | ## Proposed solution 51 | 52 | ### Overview 53 | 54 | * Replace each pod's local in-memory cache with a single shared 55 | [cache server](#cache-server) for all pods, and periodically back up the 56 | cache contents in an object store. 57 | * A shared cache should reduce the number of unnecessary calls to the Github 58 | API since each pod will not maintain its own copy of the cache, and 59 | therefore will not have to request the same resource more than once per 60 | client. 61 | * When the cache server is restarted or redeployed, it can restore the cache 62 | state from backup to avoid exhausting the API quota due a sharp increase 63 | in cache misses (if otherwise starting with an empty cache). 64 | * The cache server should be deployed within the same region as the Github 65 | mirror OpenShift cluster to reduce the latency associated with requests to 66 | the cache. 67 | * Use an in-memory key-value store as the shared cache server. 68 | * An in-memory store is important to minimize latency. 69 | * Each request by a client to the Github API returns a response containing a 70 | unique etag. Therefore a key-value store can be used to map each client 71 | request to the unique response object. 72 | * The key-value store should meet the following requirements. 73 | * Highly available, to prevent bottlenecks and minimize latency. 74 | * Persistent, to prevent loss of all cached information in the case of a pod 75 | restart. 76 | * Weakly consistent, since we can tolerate stale reads 77 | 78 | ### Implementation details 79 | 80 | Given the project requirements, the shared cache will be implemented using 81 | [AWS Elasticache](https://aws.amazon.com/elasticache/pricing/) for Redis. 82 | 83 | Why Redis? 84 | 85 | * Since the goal is to minimize response time, a in-memory key-value is 86 | needed. There are several key-value stores that meet the critera, 87 | notably [Redis](https://redis.io/) or [Memcached](https://memcached.org/). 88 | * Memcached has a limit on the size of the object being stored (< 1 MB), 89 | however, most of the response objects returned by the API will be small 90 | and therefore Memcached or Redis will both work in this regard. 91 | * The main advantage that Redis has over Memcached is with regards to 92 | availability and persistence. 93 | [Memcached does not offer replication or persistence](https://docs.aws.amazon.com/AmazonElastiCache/latest/mem-ug/SelectEngine.html), 94 | so if the server fails or is restarted, the cache will be lost. 95 | * Redis offers [persistence](https://redis.io/topics/persistence) using 96 | RDB files which are snapshots of the data written to 97 | disk at specifiable intervals. These snapshots can be backedup in an object 98 | store like S3. Redis also provides more durable persistence using AOF logs 99 | that write each server request to disk, but this level of durability is not 100 | needed here. 101 | * Redis can be configured to be 102 | [highly available](https://redis.io/topics/sentinel) with automatic 103 | failover. 104 | * A Redis cluster provides 105 | [weak consistency](https://redis.io/topics/cluster-tutorial#redis-cluster-consistency-guarantees) 106 | which is acceptable since any stale cache reads will result in an 107 | additional request to the Github API (as well as unnecessarily replacing 108 | the cache entry), but the client will not receive stale data. 109 | * Other useful features of Redis include sharding the cache if it gets too 110 | big, as well as the ability to select from a number of cache eviction 111 | policies. 112 | 113 | Why AWS Elasticache? 114 | 115 | * Managing a persistent, highly-available and sharded Redis cluster can be 116 | greatly simplified using Elasticache, thereby reducing the maintenance 117 | burden of the github mirror. 118 | * Since the github mirror OpenShift cluster is running on AWS nodes, the AWS 119 | Elasticache Redis cluster can be co-located in the same region to reduce 120 | network time. 121 | 122 | Measuring latency 123 | 124 | * A Prometheus metrics endpoint currently provides the latency of requests to 125 | the mirror, and is used to display the 90, 95 and 99 percentile tail 126 | latencies in Grafana. A separate metric can be added to show the additional 127 | latency created by introducing the Redis cache server. 128 | 129 | ### Architecture diagram 130 | 131 | ![](./images/arch.svg) 132 | 133 | ## Scope 134 | 135 | * Cache persistence is limited to occasionally backing up the cache contents 136 | to prevent the github mirror from starting with an empty cache after a 137 | restart. 138 | * Any additional work on metrics for Prometheus or Grafana that is necessary 139 | to analyze and evaluate the new design is within the scope of this project. 140 | 141 | ## Acceptance Criteria 142 | 143 | * A successful solution will provide a highly available, low latency mirror 144 | server, that can be easily scaled from 3 to 100 replicas without causing the 145 | client to be rate limited by the Github API. 146 | * Restarting the github mirror will not cause a large spike in cache misses, 147 | if a backup of the cache contents is available. It is expected that a few 148 | writes to the cache will be lost in the process, but that is an acceptable 149 | outcome and can be adjusted by changing the snapshotting frequency. 150 | * Additional latency due to redesign will not exceed 3 151 | [http request times](http://services.google.com/fh/files/blogs/google_delayexp.pdf). 152 | * Users can set up the mirror to use either the per-pod local in-memory cache 153 | or the Redis server simply by modifying a configuration file. 154 | * If the Redis cache is unavailable, the github mirror can default to using 155 | the local in-memory cache. 156 | 157 | ## Glossary 158 | 159 | * **client**: any application making http requests to 160 | the mirror. 161 | * **mirror**: serves cached data to the client and makes 162 | conditional requests to the Github API. 163 | * **mirror replica**: replicas of the mirror 164 | server, increasing availability, fault tolerance and distributing the load. 165 | * **cache server**: server which caches results of 166 | Github API calls by the mirror. 167 | * **local in-memory cache**: *in this 168 | context*, it refers to each pod's ephemeral cache that is lost when a pods 169 | fails, is deleted, restarted or redeployed. Not to be confused with Redis 170 | which is often described as an in-memory cache! 171 | * **request time**: the time it takes for the 172 | client to receive a response to a request made to the mirror server, and 173 | this includes the time the mirror takes to make Github API calls and to 174 | make requests to the cache. 175 | -------------------------------------------------------------------------------- /ghmirror/core/mirror_requests.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Amador Pahim 14 | 15 | """Implements conditional requests""" 16 | 17 | # ruff: noqa: PLR2004 18 | import hashlib 19 | import logging 20 | 21 | import requests 22 | 23 | from ghmirror.core.constants import ( 24 | PER_PAGE_ELEMENTS, 25 | REQUESTS_TIMEOUT, 26 | ) 27 | from ghmirror.data_structures.monostate import GithubStatus 28 | from ghmirror.data_structures.requests_cache import RequestsCache 29 | from ghmirror.decorators.metrics import requests_metrics 30 | 31 | logging.basicConfig(level=logging.INFO, format="%(asctime)-15s %(message)s") 32 | LOG = logging.getLogger(__name__) 33 | 34 | 35 | def _get_elements_per_page(url_params): 36 | """Get 'per_page' parameter if present in URL or return None if not present""" 37 | if url_params is not None: 38 | per_page = url_params.get("per_page") 39 | if per_page is not None: 40 | return int(per_page) 41 | 42 | return None 43 | 44 | 45 | def _cache_response(resp, cache, cache_key): 46 | """Cache response if it makes sense 47 | 48 | Implements the logic to decide whether or not whe should cache a request acording 49 | to the headers and content 50 | """ 51 | # Caching only makes sense when at least one 52 | # of those headers is present 53 | if resp.status_code == 200 and any([ 54 | "ETag" in resp.headers, 55 | "Last-Modified" in resp.headers, 56 | ]): 57 | cache[cache_key] = resp 58 | 59 | 60 | def _online_request( 61 | session, method, url, cached_response, headers=None, parameters=None 62 | ): 63 | """Handle API errors on conditional requests and try to serve contents from cache""" 64 | try: 65 | resp = session.request( 66 | method=method, 67 | url=url, 68 | headers=headers, 69 | timeout=REQUESTS_TIMEOUT, 70 | params=parameters, 71 | ) 72 | 73 | # When we hit the API limit, or there is a problem with the API 74 | # let's try to serve from cache 75 | error_resp_header = _should_error_response_be_served_from_cache(resp) 76 | 77 | # If we didn't find any error in the API request, we return the 78 | # response directly to the next layer 79 | if error_resp_header is None: 80 | return resp 81 | 82 | if cached_response is None: 83 | LOG.info("%s GET CACHE_MISS %s", error_resp_header, url) 84 | resp.headers["X-Cache"] = error_resp_header + "_MISS" 85 | return resp 86 | 87 | LOG.info("%s GET CACHE_HIT %s", error_resp_header, url) 88 | cached_response.headers["X-Cache"] = error_resp_header + "_HIT" 89 | return cached_response 90 | 91 | except requests.exceptions.Timeout: 92 | if cached_response is None: 93 | raise 94 | 95 | LOG.info("API_TIMEOUT GET CACHE_HIT %s", url) 96 | cached_response.headers["X-Cache"] = "API_TIMEOUT_HIT" 97 | return cached_response 98 | 99 | except requests.exceptions.ConnectionError: 100 | if cached_response is None: 101 | raise 102 | 103 | LOG.info("API_CONNECTION_ERROR GET CACHE_HIT %s", url) 104 | cached_response.headers["X-Cache"] = "API_CONNECTION_ERROR_HIT" 105 | return cached_response 106 | 107 | 108 | def _handle_not_changed( 109 | session, 110 | cached_response, 111 | per_page_elements, 112 | headers, 113 | method, 114 | url, 115 | parameters, 116 | cache, 117 | cache_key, 118 | ): 119 | if len(cached_response.json()) == per_page_elements and not cached_response.links: 120 | headers.pop("If-None-Match", None) 121 | headers.pop("If-Modified-Since", None) 122 | resp = session.request( 123 | method=method, 124 | url=url, 125 | headers=headers, 126 | timeout=REQUESTS_TIMEOUT, 127 | params=parameters, 128 | ) 129 | 130 | LOG.info("ONLINE GET CACHE_MISS %s", url) 131 | resp.headers["X-Cache"] = "ONLINE_MISS" 132 | _cache_response(resp, cache, cache_key) 133 | return resp 134 | 135 | LOG.info("ONLINE GET CACHE_HIT %s", url) 136 | cached_response.headers["X-Cache"] = "ONLINE_HIT" 137 | return cached_response 138 | 139 | 140 | @requests_metrics 141 | def conditional_request(session, method, url, auth, data=None, url_params=None): 142 | """Implements conditional requests. 143 | 144 | Checking first whether the upstream API is online of offline to decide which 145 | request routine to call. 146 | """ 147 | if GithubStatus().online: 148 | return online_request(session, method, url, auth, data, url_params) 149 | return offline_request(method, url, auth) 150 | 151 | 152 | def online_request(session, method, url, auth, data=None, url_params=None): 153 | """Implements conditional requests.""" 154 | cache = RequestsCache() 155 | headers = {} 156 | parameters = url_params.to_dict() if url_params is not None else {} 157 | 158 | per_page_elements = _get_elements_per_page(url_params) 159 | 160 | if per_page_elements is None: 161 | per_page_elements = PER_PAGE_ELEMENTS 162 | parameters["per_page"] = PER_PAGE_ELEMENTS 163 | 164 | if auth is None: 165 | auth_sha = None 166 | else: 167 | auth_sha = hashlib.sha1(auth.encode()).hexdigest() 168 | headers["Authorization"] = auth 169 | 170 | # Special case for non-GET requests 171 | if method != "GET": 172 | # Just forward the request with the auth header 173 | resp = session.request( 174 | method=method, 175 | url=url, 176 | headers=headers, 177 | data=data, 178 | timeout=REQUESTS_TIMEOUT, 179 | params=parameters, 180 | ) 181 | 182 | LOG.info("ONLINE %s CACHE_MISS %s", method, url) 183 | # And just forward the response (with the 184 | # cache-miss header, for metrics) 185 | resp.headers["X-Cache"] = "ONLINE_MISS" 186 | return resp 187 | 188 | cache_key = (url, auth_sha) 189 | 190 | cached_response = None 191 | if cache_key in cache: 192 | cached_response = cache[cache_key] 193 | etag = cached_response.headers.get("ETag") 194 | if etag is not None: 195 | headers["If-None-Match"] = etag 196 | last_mod = cached_response.headers.get("Last-Modified") 197 | if last_mod is not None: 198 | headers["If-Modified-Since"] = last_mod 199 | 200 | resp = _online_request( 201 | session=session, 202 | method=method, 203 | url=url, 204 | headers=headers, 205 | parameters=parameters, 206 | cached_response=cached_response, 207 | ) 208 | 209 | if resp.status_code == 304: 210 | return _handle_not_changed( 211 | session, 212 | cached_response, 213 | per_page_elements, 214 | headers, 215 | method, 216 | url, 217 | parameters, 218 | cache, 219 | cache_key, 220 | ) 221 | 222 | # This section covers the log and the headers logic when we don't have 223 | # any error on the _online_request method, and the response from the 224 | # Github API is returned. 225 | if "X-Cache" not in resp.headers: 226 | LOG.info("ONLINE GET CACHE_MISS %s", url) 227 | resp.headers["X-Cache"] = "ONLINE_MISS" 228 | _cache_response(resp, cache, cache_key) 229 | 230 | return resp 231 | 232 | 233 | def _should_error_response_be_served_from_cache(response): 234 | """Parse a response to check if we should serve contents from cache 235 | 236 | :param response: requests module response 237 | :type response: requests.Response 238 | 239 | :return: The headers that we should return on the request if served 240 | from cache 241 | :rtype: str, optional 242 | """ 243 | if _is_rate_limit_error(response): 244 | return "RATE_LIMITED" 245 | 246 | if response.status_code >= 500 and response.status_code < 600: 247 | return "API_ERROR" 248 | 249 | return None 250 | 251 | 252 | def _is_rate_limit_error(response): 253 | """Try to serve response from the cache when we hit API limit 254 | 255 | :param response: requests module response 256 | :type response: requests.Response 257 | """ 258 | rate_limit_messages = { 259 | "API rate limit exceeded", 260 | "secondary rate limit", 261 | "abuse detection mechanism", 262 | } 263 | return response.status_code == 403 and any( 264 | m in response.text for m in rate_limit_messages 265 | ) 266 | 267 | 268 | def offline_request( 269 | method, url, auth, error_code=504, error_message=b'{"message": "gateway timeout"}\n' 270 | ): 271 | """Implements offline requests (serves content from cache, when possible).""" 272 | headers = {} 273 | if auth is None: 274 | auth_sha = None 275 | else: 276 | auth_sha = hashlib.sha1(auth.encode()).hexdigest() 277 | headers["Authorization"] = auth 278 | 279 | # Special case for non-GET requests 280 | if method != "GET": 281 | LOG.info("OFFLINE %s CACHE_MISS %s", method, url) 282 | # Not much to do here. We just build up a response 283 | # with a reasonable status code so users know that our 284 | # upstream is offline 285 | response = requests.models.Response() 286 | response.status_code = error_code 287 | response.headers["X-Cache"] = "OFFLINE_MISS" 288 | response._content = error_message # noqa: SLF001 289 | return response 290 | 291 | cache = RequestsCache() 292 | cache_key = (url, auth_sha) 293 | if cache_key in cache: 294 | LOG.info("OFFLINE GET CACHE_HIT %s", url) 295 | # This is the best case: upstream is offline 296 | # but we have the resource in cache for a given 297 | # user. We then serve from cache. 298 | cached_response = cache[cache_key] 299 | cached_response.headers["X-Cache"] = "OFFLINE_HIT" 300 | return cached_response 301 | 302 | LOG.info("OFFLINE GET CACHE_MISS %s", url) 303 | # GETs without cached content will receive an error 304 | # code so they know our upstream is offline. 305 | response = requests.models.Response() 306 | response.status_code = error_code 307 | response.headers["X-Cache"] = "OFFLINE_MISS" 308 | response._content = error_message # noqa: SLF001 309 | return response 310 | -------------------------------------------------------------------------------- /ghmirror/data_structures/monostate.py: -------------------------------------------------------------------------------- 1 | # This program is free software; you can redistribute it and/or modify 2 | # it under the terms of the GNU General Public License as published by 3 | # the Free Software Foundation; either version 2 of the License, or 4 | # (at your option) any later version. 5 | # 6 | # This program is distributed in the hope that it will be useful, 7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 9 | # 10 | # See LICENSE for more details. 11 | # 12 | # Copyright: Red Hat Inc. 2020 13 | # Author: Amador Pahim 14 | 15 | """Caching data structures.""" 16 | 17 | import hashlib 18 | import logging 19 | import os 20 | import pickle 21 | import sys 22 | import threading 23 | import time 24 | 25 | import requests 26 | from prometheus_client import ( 27 | CollectorRegistry, 28 | Counter, 29 | Gauge, 30 | Histogram, 31 | ProcessCollector, 32 | ) 33 | from prometheus_client.utils import INF 34 | from requests.adapters import HTTPAdapter 35 | 36 | from ghmirror.core.constants import ( 37 | GH_STATUS_API, 38 | STATUS_MAX_RETRIES, 39 | STATUS_SLEEP_TIME, 40 | STATUS_TIMEOUT, 41 | ) 42 | 43 | __all__ = ["GithubStatus", "InMemoryCache", "StatsCache", "UsersCache"] 44 | 45 | 46 | logging.basicConfig(level=logging.INFO, format="%(asctime)-15s %(message)s") 47 | 48 | LOG = logging.getLogger(__name__) 49 | 50 | 51 | class _GithubStatus: 52 | def __init__(self, sleep_time, timeout, session): 53 | self.sleep_time = sleep_time 54 | self.timeout = timeout 55 | self.session = session 56 | self.online = True 57 | self._start_check() 58 | 59 | def _start_check(self): 60 | """Starting a daemon thread to check the GitHub API status. 61 | 62 | daemon is required so the thread is killed when the main 63 | thread completes. This is also useful for the tests. 64 | """ 65 | thread = threading.Thread(target=self.check, daemon=True) 66 | thread.start() 67 | 68 | @staticmethod 69 | def _is_github_online(response): 70 | """Check if the Github API is online based on the response. 71 | 72 | If API Requests component status is major_outage, then it's offline. 73 | If API Requests component status is one of operational, 74 | degraded_performance, or partial_outage, then it's online. 75 | """ 76 | components = response.json()["components"] 77 | return any( 78 | c["name"] == "API Requests" and c["status"] != "major_outage" 79 | for c in components 80 | ) 81 | 82 | @classmethod 83 | def create(cls): 84 | """Class method to create a new instance of _GithubStatus.""" 85 | sleep_time = int(os.environ.get("GITHUB_STATUS_SLEEP_TIME", STATUS_SLEEP_TIME)) 86 | timeout = int(os.environ.get("GITHUB_STATUS_TIMEOUT", STATUS_TIMEOUT)) 87 | session = requests.Session() 88 | session.mount("https://", HTTPAdapter(max_retries=STATUS_MAX_RETRIES)) 89 | return cls(sleep_time=sleep_time, timeout=timeout, session=session) 90 | 91 | def check(self): 92 | """Method to be called in a thread. 93 | 94 | It will check the Github API status every self.sleep_time seconds and set 95 | the self.online accordingly. 96 | """ 97 | while True: 98 | try: 99 | response = self.session.get(GH_STATUS_API, timeout=STATUS_TIMEOUT) 100 | response.raise_for_status() 101 | self.online = self._is_github_online(response) 102 | if not self.online: 103 | LOG.warning("Github API is offline, response: %s", response.text) 104 | except ( 105 | requests.exceptions.ConnectionError, 106 | requests.exceptions.Timeout, 107 | requests.exceptions.HTTPError, 108 | ) as error: 109 | LOG.warning("Github API is offline, reason: %s", error) 110 | self.online = False 111 | time.sleep(self.sleep_time) 112 | 113 | 114 | class GithubStatus: 115 | """Monostate class for sharing the Github API Status.""" 116 | 117 | _instance = None 118 | _lock = threading.Lock() 119 | 120 | @classmethod 121 | def __new__(cls, *args, **kwargs): 122 | with cls._lock: 123 | if cls._instance is None: 124 | cls._instance = _GithubStatus.create() 125 | return cls._instance 126 | 127 | 128 | class InMemoryCacheBorg: 129 | """Monostate class for sharing the in-memory requests cache.""" 130 | 131 | _state = {} 132 | 133 | def __init__(self): 134 | self.__dict__ = self._state 135 | 136 | 137 | class InMemoryCache(InMemoryCacheBorg): 138 | """Dictionary-like implementation for caching requests.""" 139 | 140 | def __getattr__(self, item): 141 | """Safe class argument initialization. 142 | 143 | We do it here (instead of in the __init__()) so we don't overwrite 144 | them on when a new instance is created. 145 | """ 146 | setattr(self, item, {}) 147 | return getattr(self, item) 148 | 149 | def __contains__(self, item): 150 | return item in self._data 151 | 152 | def __getitem__(self, item): 153 | return self._data[item]["data"] 154 | 155 | def __setitem__(self, key, value): 156 | """Set the key-value pair as well as their total size""" 157 | key_size = sys.getsizeof(pickle.dumps(key)) 158 | value_size = sys.getsizeof(pickle.dumps(value)) 159 | self._data[key] = {"data": value, "size": key_size + value_size} 160 | 161 | def __iter__(self): 162 | return iter(self._data) 163 | 164 | def __len__(self): 165 | return len(self._data) 166 | 167 | def __sizeof__(self): 168 | """Calculate the size of the dictionary and all its contents""" 169 | total_cache_size = sys.getsizeof(self._data) 170 | for value in self._data.values(): 171 | total_cache_size += value["size"] 172 | return total_cache_size 173 | 174 | 175 | class UsersCacheBorg: 176 | """Monostate class for sharing the users cache.""" 177 | 178 | _state = {} 179 | 180 | def __init__(self): 181 | self.__dict__ = self._state 182 | 183 | 184 | class UsersCache(UsersCacheBorg): 185 | """Dict-like implementation for caching users information.""" 186 | 187 | def __getattr__(self, item): 188 | """Safe class argument initialization. 189 | 190 | We do it here (instead of in the __init__()) so we don't overwrite 191 | them when a new instance is created. 192 | """ 193 | setattr(self, item, {}) 194 | return getattr(self, item) 195 | 196 | @staticmethod 197 | def _sha(key): 198 | return hashlib.sha1(key.encode()).hexdigest() 199 | 200 | def __contains__(self, item): 201 | return self._sha(item) in self._data 202 | 203 | def add(self, key, value=None): 204 | """Adding the value to the backing dict""" 205 | self._data[self._sha(key)] = value 206 | 207 | def get(self, key): 208 | """Getting the value from the backing dict""" 209 | return self._data.get(self._sha(key)) 210 | 211 | 212 | class StatsCacheBorg: 213 | """Monostate class for sharing the Statistics.""" 214 | 215 | _state = {} 216 | 217 | def __init__(self): 218 | self.__dict__ = self._state 219 | 220 | 221 | class StatsCache(StatsCacheBorg): 222 | """Statistics cacher.""" 223 | 224 | def __getattr__(self, item): 225 | """Safe class argument initialization. 226 | 227 | We do it here (instead of in the __init__()) so we don't overwrite 228 | them when a new instance is created. 229 | """ 230 | if item == "registry": 231 | # This will create the self.registry attribute, which 232 | # contains an instance of the CollectorRegistry. 233 | setattr(self, item, CollectorRegistry()) 234 | # Adding a ProcessCollector to the registry. The 235 | # ProcessCollector does not have to be an attribute, 236 | # since it's never manipulated directly. 237 | ProcessCollector(registry=self.registry) 238 | 239 | elif item == "histogram": 240 | # Adding a Histogram to the registry and also making 241 | # the Histogram available as an attribute so we can 242 | # call its observe() 243 | setattr( 244 | self, 245 | item, 246 | Histogram( 247 | name="request_latency_seconds", 248 | labelnames=("cache", "status", "method", "user"), 249 | documentation="request latency histogram", 250 | registry=self.registry, 251 | buckets=( 252 | 0.05, 253 | 0.075, 254 | 0.1, 255 | 0.2, 256 | 0.3, 257 | 0.4, 258 | 0.5, 259 | 0.75, 260 | 1.0, 261 | 2.5, 262 | 5.0, 263 | 10.0, 264 | INF, 265 | ), 266 | ), 267 | ) 268 | elif item == "counter": 269 | # Adding a Counter to the registry and also making 270 | # the Counter available as an attribute so we can 271 | # call its inc() 272 | setattr( 273 | self, 274 | item, 275 | Counter( 276 | name="http_request", 277 | documentation="total requests", 278 | registry=self.registry, 279 | ), 280 | ) 281 | 282 | elif item == "gauge_cache_size": 283 | setattr( 284 | self, 285 | item, 286 | Gauge( 287 | name="github_mirror_cache_size", 288 | documentation="cache size in bytes", 289 | registry=self.registry, 290 | ), 291 | ) 292 | 293 | elif item == "gauge_cached_objects": 294 | setattr( 295 | self, 296 | item, 297 | Gauge( 298 | name="github_mirror_cached_objects", 299 | documentation="number of cached objects", 300 | registry=self.registry, 301 | ), 302 | ) 303 | 304 | else: 305 | raise AttributeError(f"object has no attribute {item}'") 306 | 307 | return getattr(self, item) 308 | 309 | def count(self): 310 | """Convenience method to increment the counter.""" 311 | self.counter.inc(1) 312 | 313 | def observe(self, cache, status, value, method, user): 314 | """Convenience method to populate the histogram.""" 315 | self.histogram.labels( 316 | cache=cache, status=status, method=method, user=user 317 | ).observe(value) 318 | 319 | def set_cache_size(self, value): 320 | """Convenience method to set the Gauge.""" 321 | self.gauge_cache_size.set(value) 322 | 323 | def set_cached_objects(self, value): 324 | """Convenience method to set the Gauge.""" 325 | self.gauge_cached_objects.set(value) 326 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | , 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /tests/functional/test_api.py: -------------------------------------------------------------------------------- 1 | # ruff: noqa: PLR2004 2 | from unittest import mock 3 | from unittest.mock import ANY 4 | 5 | import pytest 6 | import requests 7 | 8 | from ghmirror.app import APP 9 | from ghmirror.core.constants import ( 10 | PER_PAGE_ELEMENTS, 11 | REQUESTS_TIMEOUT, 12 | ) 13 | from ghmirror.data_structures.monostate import ( 14 | GithubStatus, 15 | UsersCache, 16 | ) 17 | from ghmirror.utils.wait import wait_for 18 | 19 | 20 | class MockResponse: 21 | def __init__( 22 | self, content, headers, status_code, user=None, links=None, json_content=None 23 | ): 24 | self.content = content.encode() 25 | self.text = content 26 | self.headers = headers 27 | self.status_code = status_code 28 | self.user = user 29 | self.links = links 30 | self.json_content = json_content 31 | 32 | def json(self): 33 | if self.json_content is not None: 34 | return self.json_content 35 | return {"login": self.user} 36 | 37 | def raise_for_status(self): 38 | if self.status_code >= 400: 39 | raise requests.exceptions.HTTPError 40 | 41 | 42 | def mocked_requests_get_etag(*_args, **kwargs): 43 | if "If-Modified-Since" in kwargs["headers"] or "If-None-Match" in kwargs["headers"]: 44 | return MockResponse("", {}, 304) 45 | 46 | return MockResponse("", {"ETag": "foo"}, 200) 47 | 48 | 49 | def mocked_requests_get_last_modified(*_args, **kwargs): 50 | if "If-Modified-Since" in kwargs["headers"] or "If-None-Match" in kwargs["headers"]: 51 | return MockResponse("", {}, 304) 52 | 53 | return MockResponse("", {"Last-Modified": "bar"}, 200) 54 | 55 | 56 | def mocked_requests_get_user_orgs_auth(*_args, **_kwargs): 57 | return MockResponse("", {}, 200, "app-sre-bot") 58 | 59 | 60 | def mocked_requests_get_user_orgs_unauth(*_args, **_kwargs): 61 | return MockResponse("", {}, 200, "other") 62 | 63 | 64 | def mocked_requests_get_error(*_args, **_kwargs): 65 | return MockResponse("", {}, 500) 66 | 67 | 68 | def mocked_requests_monitor_good(*_args, **_kwargs): 69 | return MockResponse( 70 | "", 71 | {}, 72 | 200, 73 | json_content={ 74 | "components": [{"name": "API Requests", "status": "operational"}] 75 | }, 76 | ) 77 | 78 | 79 | def mocked_requests_monitor_bad(*_args, **_kwargs): 80 | return MockResponse("", {}, 403) 81 | 82 | 83 | def setup_mocked_requests_session_get(mocked_session, side_effect): 84 | mocked_session.return_value.get.side_effect = side_effect 85 | 86 | 87 | def mocked_requests_rate_limited(*_args, **_kwargs): 88 | return MockResponse("API rate limit exceeded", {}, 403) 89 | 90 | 91 | def mocked_requests_api_corner_case(*_args, **kwargs): 92 | if "If-None-Match" in kwargs["headers"] or "If-Modified-Since" in kwargs["headers"]: 93 | return MockResponse("", {}, 304, json_content=[{"a": "b"}, {"c", "d"}]) 94 | 95 | return MockResponse( 96 | "", 97 | {"ETag": "foo", "Last-Modified": "bar"}, 98 | 200, 99 | json_content=[{"a": "b"}, {"c", "d"}], 100 | ) 101 | 102 | 103 | @pytest.fixture(name="client") 104 | def fixture_client(): 105 | APP.config["TESTING"] = True 106 | 107 | with APP.test_client() as client: 108 | yield client 109 | 110 | 111 | def test_healthz(client): 112 | response = client.get("/healthz", follow_redirects=True) 113 | assert response.status_code == 200 114 | assert response.data == b"OK" 115 | 116 | 117 | @mock.patch( 118 | "ghmirror.utils.extensions.session.request", 119 | side_effect=mocked_requests_get_etag, 120 | ) 121 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 122 | def test_mirror_etag(mock_monitor_session, _mock_get, client): 123 | setup_mocked_requests_session_get( 124 | mock_monitor_session, mocked_requests_monitor_good 125 | ) 126 | # Initially the stats are zeroed 127 | response = client.get("/metrics") 128 | assert response.status_code == 200 129 | assert ( 130 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 131 | 'method="GET",status="200",user="None"}' 132 | ) not in str(response.data) 133 | assert ( 134 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 135 | 'method="GET",status="200",user="None"}' 136 | ) not in str(response.data) 137 | 138 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 139 | assert response.status_code == 200 140 | 141 | # First get is a cache_miss 142 | response = client.get("/metrics", follow_redirects=True) 143 | 144 | assert response.status_code == 200 145 | assert ( 146 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 147 | 'method="GET",status="200",user="None"}' 148 | ) not in str(response.data) 149 | assert ( 150 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 151 | 'method="GET",status="200",user="None"} 1.0' 152 | ) in str(response.data) 153 | 154 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 155 | assert response.status_code == 200 156 | 157 | # Second get is a cache_hit 158 | response = client.get("/metrics", follow_redirects=True) 159 | 160 | assert response.status_code == 200 161 | assert ( 162 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 163 | 'method="GET",status="200",user="None"} 1.0' 164 | ) in str(response.data) 165 | assert ( 166 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 167 | 'method="GET",status="200",user="None"} 1.0' 168 | ) in str(response.data) 169 | 170 | 171 | @mock.patch( 172 | "ghmirror.utils.extensions.session.request", 173 | side_effect=mocked_requests_get_last_modified, 174 | ) 175 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 176 | def test_mirror_last_modified(mock_monitor_session, _mock_get, client): 177 | setup_mocked_requests_session_get( 178 | mock_monitor_session, mocked_requests_monitor_good 179 | ) 180 | # Initially the stats are zeroed 181 | response = client.get("/metrics") 182 | assert response.status_code == 200 183 | assert ( 184 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 185 | 'method="GET",status="200",user="None"}' 186 | ) not in str(response.data) 187 | assert ( 188 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 189 | 'method="GET",status="200",user="None"}' 190 | ) not in str(response.data) 191 | 192 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 193 | assert response.status_code == 200 194 | 195 | # First get is a cache_miss 196 | response = client.get("/metrics", follow_redirects=True) 197 | 198 | assert response.status_code == 200 199 | assert ( 200 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 201 | 'method="GET",status="200",user="None"}' 202 | ) not in str(response.data) 203 | assert ( 204 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 205 | 'method="GET",status="200",user="None"} 1.0' 206 | ) in str(response.data) 207 | 208 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 209 | assert response.status_code == 200 210 | 211 | # Second get is a cache_hit 212 | response = client.get("/metrics", follow_redirects=True) 213 | assert response.status_code == 200 214 | assert ( 215 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 216 | 'method="GET",status="200",user="None"} 1.0' 217 | ) in str(response.data) 218 | assert ( 219 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 220 | 'method="GET",status="200",user="None"} 1.0' 221 | ) in str(response.data) 222 | 223 | 224 | @mock.patch( 225 | "ghmirror.utils.extensions.session.request", 226 | side_effect=mocked_requests_get_last_modified, 227 | ) 228 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 229 | def test_mirror_upstream_call(mock_monitor_session, mocked_request, client): 230 | setup_mocked_requests_session_get( 231 | mock_monitor_session, mocked_requests_monitor_good 232 | ) 233 | client.get("/user/repos?page=2", headers={"Authorization": "foo"}) 234 | expected_url = "https://api.github.com/user/repos?page=2" 235 | mocked_request.assert_called_with( 236 | method="GET", 237 | headers={"Authorization": "foo"}, 238 | url=expected_url, 239 | timeout=REQUESTS_TIMEOUT, 240 | params={"page": "2", "per_page": PER_PAGE_ELEMENTS}, 241 | ) 242 | 243 | 244 | @mock.patch( 245 | "ghmirror.utils.extensions.session.request", 246 | side_effect=mocked_requests_get_last_modified, 247 | ) 248 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 249 | def test_mirror_non_get(mock_monitor_session, mocked_request, client): 250 | setup_mocked_requests_session_get( 251 | mock_monitor_session, mocked_requests_monitor_good 252 | ) 253 | client.patch("/repos/foo/bar", data=b"foo") 254 | expected_url = "https://api.github.com/repos/foo/bar" 255 | mocked_request.assert_called_with( 256 | method="PATCH", 257 | data=b"foo", 258 | headers={}, 259 | url=expected_url, 260 | timeout=REQUESTS_TIMEOUT, 261 | params={"per_page": PER_PAGE_ELEMENTS}, 262 | ) 263 | 264 | 265 | @mock.patch( 266 | "ghmirror.decorators.checks.conditional_request", 267 | side_effect=mocked_requests_get_user_orgs_auth, 268 | ) 269 | @mock.patch("ghmirror.utils.extensions.session.request") 270 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 271 | def test_mirror_no_authorized_user( 272 | mock_monitor_session, mocked_request, mocked_cond_request, client 273 | ): 274 | setup_mocked_requests_session_get( 275 | mock_monitor_session, mocked_requests_monitor_good 276 | ) 277 | client.get("/repos/app-sre/github-mirror", headers={"Authorization": "foo"}) 278 | mocked_cond_request.assert_called_with( 279 | session=ANY, auth="foo", method="GET", url="https://api.github.com/user" 280 | ) 281 | mocked_request.assert_called_with( 282 | method="GET", 283 | headers={"Authorization": "foo"}, 284 | url="https://api.github.com/repos/app-sre/github-mirror", 285 | timeout=REQUESTS_TIMEOUT, 286 | params={"per_page": PER_PAGE_ELEMENTS}, 287 | ) 288 | 289 | 290 | @mock.patch( 291 | "ghmirror.decorators.checks.conditional_request", 292 | side_effect=mocked_requests_get_user_orgs_auth, 293 | ) 294 | @mock.patch("ghmirror.utils.extensions.session.request") 295 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 296 | def test_mirror_no_authorized_user_cached( 297 | mock_monitor_session, mocked_request, mocked_cond_request, client 298 | ): 299 | setup_mocked_requests_session_get( 300 | mock_monitor_session, mocked_requests_monitor_good 301 | ) 302 | users_cache = UsersCache() 303 | auth = "foo" 304 | users_cache.add(auth) 305 | 306 | client.get("/repos/app-sre/github-mirror", headers={"Authorization": auth}) 307 | assert not mocked_cond_request.called 308 | mocked_request.assert_called_with( 309 | method="GET", 310 | headers={"Authorization": auth}, 311 | url="https://api.github.com/repos/app-sre/github-mirror", 312 | timeout=REQUESTS_TIMEOUT, 313 | params={"per_page": PER_PAGE_ELEMENTS}, 314 | ) 315 | 316 | 317 | @mock.patch("ghmirror.decorators.checks.AUTHORIZED_USERS", "app-sre-bot") 318 | @mock.patch( 319 | "ghmirror.decorators.checks.conditional_request", 320 | side_effect=mocked_requests_get_user_orgs_auth, 321 | ) 322 | @mock.patch("ghmirror.utils.extensions.session.request") 323 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 324 | def test_mirror_authorized_user( 325 | mock_monitor_session, mocked_request, mocked_cond_request, client 326 | ): 327 | setup_mocked_requests_session_get( 328 | mock_monitor_session, mocked_requests_monitor_good 329 | ) 330 | client.get("/repos/app-sre/github-mirror", headers={"Authorization": "foo"}) 331 | mocked_cond_request.assert_called_with( 332 | session=ANY, auth="foo", method="GET", url="https://api.github.com/user" 333 | ) 334 | mocked_request.assert_called_with( 335 | method="GET", 336 | headers={"Authorization": "foo"}, 337 | url="https://api.github.com/repos/app-sre/github-mirror", 338 | timeout=REQUESTS_TIMEOUT, 339 | params={"per_page": PER_PAGE_ELEMENTS}, 340 | ) 341 | 342 | 343 | @mock.patch("ghmirror.decorators.checks.AUTHORIZED_USERS", "app-sre-bot") 344 | @mock.patch( 345 | "ghmirror.decorators.checks.conditional_request", 346 | side_effect=mocked_requests_get_user_orgs_auth, 347 | ) 348 | @mock.patch("ghmirror.utils.extensions.session.request") 349 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 350 | def test_mirror_authorized_user_cached( 351 | mock_monitor_session, mocked_request, mocked_cond_request, client 352 | ): 353 | setup_mocked_requests_session_get( 354 | mock_monitor_session, mocked_requests_monitor_good 355 | ) 356 | users_cache = UsersCache() 357 | auth = "foo" 358 | users_cache.add(auth) 359 | 360 | client.get("/repos/app-sre/github-mirror", headers={"Authorization": auth}) 361 | assert not mocked_cond_request.called 362 | mocked_request.assert_called_with( 363 | method="GET", 364 | headers={"Authorization": auth}, 365 | url="https://api.github.com/repos/app-sre/github-mirror", 366 | timeout=REQUESTS_TIMEOUT, 367 | params={"per_page": PER_PAGE_ELEMENTS}, 368 | ) 369 | 370 | 371 | @mock.patch("ghmirror.decorators.checks.AUTHORIZED_USERS", "app-sre-bot") 372 | @mock.patch( 373 | "ghmirror.decorators.checks.conditional_request", 374 | side_effect=mocked_requests_get_user_orgs_unauth, 375 | ) 376 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 377 | def test_mirror_user_forbidden(mock_monitor_session, _mocked_cond_request, client): 378 | setup_mocked_requests_session_get( 379 | mock_monitor_session, mocked_requests_monitor_good 380 | ) 381 | response = client.get( 382 | "/repos/app-sre/github-mirror", headers={"Authorization": "foo"} 383 | ) 384 | assert response.status_code == 403 385 | 386 | 387 | @mock.patch("ghmirror.decorators.checks.AUTHORIZED_USERS", "app-sre-bot") 388 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 389 | def test_mirror_no_auth(mock_monitor_session, client): 390 | setup_mocked_requests_session_get( 391 | mock_monitor_session, mocked_requests_monitor_good 392 | ) 393 | response = client.get("/repos/app-sre/github-mirror", headers={}) 394 | assert response.status_code == 401 395 | 396 | 397 | @mock.patch("ghmirror.decorators.checks.AUTHORIZED_USERS", "app-sre-bot") 398 | @mock.patch( 399 | "ghmirror.decorators.checks.conditional_request", 400 | side_effect=mocked_requests_get_error, 401 | ) 402 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 403 | def test_mirror_auth_error(mock_monitor_session, _mocked_cond_request, client): 404 | setup_mocked_requests_session_get( 405 | mock_monitor_session, mocked_requests_monitor_good 406 | ) 407 | response = client.get( 408 | "/repos/app-sre/github-mirror", headers={"Authorization": "foo"} 409 | ) 410 | assert response.status_code == 500 411 | 412 | 413 | @mock.patch( 414 | "ghmirror.utils.extensions.session.request", 415 | side_effect=mocked_requests_get_etag, 416 | ) 417 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 418 | def test_offline_mode(mock_monitor_session, _mock_request, client): 419 | setup_mocked_requests_session_get( 420 | mock_monitor_session, mocked_requests_monitor_good 421 | ) 422 | # Let's wait the mirror consider itself online 423 | assert wait_for(lambda: GithubStatus().online, timeout=5) 424 | 425 | # First request will be a 200, intended to build up the cache and 426 | # is it is also an ONLINE_MISS 427 | response = client.get("/repos/app-sre/github-mirror") 428 | assert response.status_code == 200 429 | response = client.get("/metrics") 430 | assert response.status_code == 200 431 | assert ( 432 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 433 | 'method="GET",status="200",user="None"} 1.0' 434 | ) in str(response.data) 435 | 436 | # Now make the mirror go offline for upstream timeout 437 | setup_mocked_requests_session_get(mock_monitor_session, requests.exceptions.Timeout) 438 | 439 | # Let's wait the mirror to consider itself offline 440 | assert wait_for(lambda: not GithubStatus().online, timeout=5) 441 | 442 | # Second request, mirror went offline already but response is in the 443 | # cache due to the first request 444 | response = client.get("/repos/app-sre/github-mirror") 445 | assert response.status_code == 200 446 | response = client.get("/metrics") 447 | assert response.status_code == 200 448 | assert ( 449 | 'request_latency_seconds_count{cache="OFFLINE_HIT",' 450 | 'method="GET",status="200",user="None"} 1.0' 451 | ) in str(response.data) 452 | 453 | # Additional request including auth header. Should MISS since the 454 | # cache key id built from resource + user 455 | response = client.get( 456 | "/repos/app-sre/github-mirror", headers={"Authorization": "foo"} 457 | ) 458 | assert response.status_code == 504 459 | response = client.get("/metrics") 460 | assert response.status_code == 200 461 | assert ( 462 | 'request_latency_seconds_count{cache="OFFLINE_MISS",' 463 | 'method="GET",status="504",user="None"} 1.0' 464 | ) in str(response.data) 465 | 466 | # POST, just to check if we are behaving. 467 | response = client.post("/repos/app-sre/github-mirror", data=b"foo") 468 | assert response.status_code == 504 469 | response = client.get("/metrics") 470 | assert response.status_code == 200 471 | assert ( 472 | 'request_latency_seconds_count{cache="OFFLINE_MISS",' 473 | 'method="POST",status="504",user="None"} 1.0' 474 | ) in str(response.data) 475 | 476 | 477 | @mock.patch( 478 | "ghmirror.utils.extensions.session.request", 479 | side_effect=mocked_requests_get_etag, 480 | ) 481 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 482 | def test_offline_mode_upstream_error(mock_monitor_session, _mock_request, client): 483 | setup_mocked_requests_session_get( 484 | mock_monitor_session, mocked_requests_monitor_good 485 | ) 486 | # Let's wait the mirror consider itself online 487 | assert wait_for(lambda: GithubStatus().online, timeout=5) 488 | 489 | # First request will be a 200, intended to build up the cache and 490 | # is it is also an ONLINE_MISS 491 | assert wait_for(lambda: GithubStatus().online, timeout=5) 492 | response = client.get("/repos/app-sre/github-mirror") 493 | assert response.status_code == 200 494 | response = client.get("/metrics") 495 | assert response.status_code == 200 496 | assert ( 497 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 498 | 'method="GET",status="200",user="None"} 1.0' 499 | ) in str(response.data) 500 | 501 | # Now make the mirror go offline for upstream error 502 | setup_mocked_requests_session_get(mock_monitor_session, mocked_requests_get_error) 503 | 504 | # Let's wait the mirror to consider itself offline 505 | assert wait_for(lambda: not GithubStatus().online, timeout=5) 506 | 507 | # For the second request, mirror went offline already but 508 | # response is in the cache due to the first request, so 509 | # we get a 200 and an OFFLINE_HIT 510 | response = client.get("/repos/app-sre/github-mirror") 511 | assert response.status_code == 200 512 | response = client.get("/metrics") 513 | assert response.status_code == 200 514 | assert ( 515 | 'request_latency_seconds_count{cache="OFFLINE_HIT",' 516 | 'method="GET",status="200",user="None"} 1.0' 517 | ) in str(response.data) 518 | 519 | 520 | @mock.patch( 521 | "ghmirror.utils.extensions.session.request", 522 | side_effect=mocked_requests_rate_limited, 523 | ) 524 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 525 | def test_rate_limited(_mock_monitor_session, mock_request, client): 526 | setup_mocked_requests_session_get( 527 | _mock_monitor_session, mocked_requests_monitor_good 528 | ) 529 | # First request will get a 403/rate-limited. Because it's not cached 530 | # yet, we receive the same 403 531 | response = client.get("/repos/app-sre/github-mirror") 532 | assert response.status_code == 403 533 | response = client.get("/metrics") 534 | # In the metrics, we see a RATE_LIMITED_MISS 535 | assert response.status_code == 200 536 | assert ( 537 | 'request_latency_seconds_count{cache="RATE_LIMITED_MISS",' 538 | 'method="GET",status="403",user="None"} 1.0' 539 | ) in str(response.data) 540 | 541 | # Second request will be a 200, intended to build up the cache, so 542 | # it is an ONLINE_MISS 543 | mock_request.side_effect = mocked_requests_get_etag 544 | response = client.get("/repos/app-sre/github-mirror") 545 | assert response.status_code == 200 546 | response = client.get("/metrics") 547 | assert response.status_code == 200 548 | assert ( 549 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 550 | 'method="GET",status="200",user="None"} 1.0' 551 | ) in str(response.data) 552 | 553 | # For the third request, the response is a 403/rate-limited, 554 | # but because the resource was cached we want to see a 200 555 | # with RATE_LIMITED_HIT 556 | mock_request.side_effect = mocked_requests_rate_limited 557 | response = client.get("/repos/app-sre/github-mirror") 558 | assert response.status_code == 200 559 | response = client.get("/metrics") 560 | assert response.status_code == 200 561 | assert ( 562 | 'request_latency_seconds_count{cache="RATE_LIMITED_HIT",' 563 | 'method="GET",status="200",user="None"} 1.0' 564 | ) in str(response.data) 565 | 566 | 567 | @mock.patch( 568 | "ghmirror.utils.extensions.session.request", 569 | side_effect=mocked_requests_api_corner_case, 570 | ) 571 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 572 | def test_pagination_corner_case_custom_page_elements( 573 | mock_monitor_session, _mock_get, client 574 | ): 575 | setup_mocked_requests_session_get( 576 | mock_monitor_session, mocked_requests_monitor_good 577 | ) 578 | # Initially the stats are zeroed 579 | response = client.get("/metrics") 580 | assert response.status_code == 200 581 | assert ( 582 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 583 | 'method="GET",status="200",user="None"}' 584 | ) not in str(response.data) 585 | assert ( 586 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 587 | 'method="GET",status="200",user="None"}' 588 | ) not in str(response.data) 589 | 590 | response = client.get( 591 | "/repos/app-sre/github-mirror?per_page=2", follow_redirects=True 592 | ) 593 | assert response.status_code == 200 594 | 595 | # First get is a cache_miss 596 | response = client.get("/metrics", follow_redirects=True) 597 | 598 | assert response.status_code == 200 599 | assert ( 600 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 601 | 'method="GET",status="200",user="None"}' 602 | ) not in str(response.data) 603 | assert ( 604 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 605 | 'method="GET",status="200",user="None"} 1.0' 606 | ) in str(response.data) 607 | 608 | response = client.get( 609 | "/repos/app-sre/github-mirror?per_page=2", follow_redirects=True 610 | ) 611 | assert response.status_code == 200 612 | 613 | # Second get is a cache_miss as the request content has the same 614 | # number of elements as the PER_PAGE_ELEMENTS and links content 615 | # is empty 616 | response = client.get("/metrics", follow_redirects=True) 617 | 618 | assert response.status_code == 200 619 | assert ( 620 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 621 | 'method="GET",status="200",user="None"}' 622 | ) not in str(response.data) 623 | assert ( 624 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 625 | 'method="GET",status="200",user="None"} 2.0' 626 | ) in str(response.data) 627 | 628 | 629 | @mock.patch("ghmirror.core.mirror_requests.PER_PAGE_ELEMENTS", 2) 630 | @mock.patch( 631 | "ghmirror.utils.extensions.session.request", 632 | side_effect=mocked_requests_api_corner_case, 633 | ) 634 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 635 | def test_pagination_corner_case(mock_monitor_session, _mock_get, client): 636 | setup_mocked_requests_session_get( 637 | mock_monitor_session, mocked_requests_monitor_good 638 | ) 639 | # Initially the stats are zeroed 640 | response = client.get("/metrics") 641 | assert response.status_code == 200 642 | assert ( 643 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 644 | 'method="GET",status="200",user="None"}' 645 | ) not in str(response.data) 646 | assert ( 647 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 648 | 'method="GET",status="200",user="None"}' 649 | ) not in str(response.data) 650 | 651 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 652 | assert response.status_code == 200 653 | 654 | # First get is a cache_miss 655 | response = client.get("/metrics", follow_redirects=True) 656 | 657 | assert response.status_code == 200 658 | assert ( 659 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 660 | 'method="GET",status="200",user="None"}' 661 | ) not in str(response.data) 662 | assert ( 663 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 664 | 'method="GET",status="200",user="None"} 1.0' 665 | ) in str(response.data) 666 | 667 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 668 | assert response.status_code == 200 669 | 670 | # Second get is a cache_miss as the request content has the same 671 | # number of elements as the PER_PAGE_ELEMENTS and links content 672 | # is empty 673 | response = client.get("/metrics", follow_redirects=True) 674 | 675 | assert response.status_code == 200 676 | assert ( 677 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 678 | 'method="GET",status="200",user="None"}' 679 | ) not in str(response.data) 680 | assert ( 681 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 682 | 'method="GET",status="200",user="None"} 2.0' 683 | ) in str(response.data) 684 | 685 | 686 | @mock.patch( 687 | "ghmirror.utils.extensions.session.request", 688 | side_effect=requests.exceptions.Timeout, 689 | ) 690 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 691 | def test_mirror_request_timeout(mock_monitor_session, _mock_get, client): 692 | setup_mocked_requests_session_get( 693 | mock_monitor_session, mocked_requests_monitor_good 694 | ) 695 | # Initially the stats are zeroed 696 | response = client.get("/metrics") 697 | assert response.status_code == 200 698 | assert ( 699 | 'request_latency_seconds_count{cache="ONLINE_HIT",method="GET",status="200"}' 700 | ) not in str(response.data) 701 | assert ( 702 | 'request_latency_seconds_count{cache="ONLINE_MISS",method="GET",status="200"}' 703 | ) not in str(response.data) 704 | 705 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 706 | assert response.status_code == 502 707 | assert "Timeout" in response.data.decode("utf-8") 708 | 709 | 710 | @mock.patch( 711 | "ghmirror.utils.extensions.session.request", 712 | side_effect=mocked_requests_get_etag, 713 | ) 714 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 715 | def test_mirror_request_timeout_hit(mock_monitor_session, mock_get, client): 716 | setup_mocked_requests_session_get( 717 | mock_monitor_session, mocked_requests_monitor_good 718 | ) 719 | # Initially the stats are zeroed 720 | response = client.get("/metrics") 721 | assert response.status_code == 200 722 | assert ( 723 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 724 | 'method="GET",status="200",user="None"}' 725 | ) not in str(response.data) 726 | assert ( 727 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 728 | 'method="GET",status="200",user="None"}' 729 | ) not in str(response.data) 730 | 731 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 732 | assert response.status_code == 200 733 | 734 | # First get is a cache_miss 735 | response = client.get("/metrics", follow_redirects=True) 736 | 737 | assert response.status_code == 200 738 | assert ( 739 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 740 | 'method="GET",status="200",user="None"}' 741 | ) not in str(response.data) 742 | assert ( 743 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 744 | 'method="GET",status="200",user="None"} 1.0' 745 | ) in str(response.data) 746 | 747 | mock_get.side_effect = requests.exceptions.Timeout 748 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 749 | assert response.status_code == 200 750 | 751 | # Second get is a cache_hit 752 | response = client.get("/metrics", follow_redirects=True) 753 | 754 | assert response.status_code == 200 755 | assert ( 756 | 'request_latency_seconds_count{cache="API_TIMEOUT_HIT",' 757 | 'method="GET",status="200",user="None"} 1.0' 758 | ) in str(response.data) 759 | assert ( 760 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 761 | 'method="GET",status="200",user="None"} 1.0' 762 | ) in str(response.data) 763 | 764 | 765 | @mock.patch( 766 | "ghmirror.utils.extensions.session.request", 767 | side_effect=mocked_requests_get_etag, 768 | ) 769 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 770 | def test_mirror_request_5xx(mock_monitor_session, mock_get, client): 771 | setup_mocked_requests_session_get( 772 | mock_monitor_session, mocked_requests_monitor_good 773 | ) 774 | # Initially the stats are zeroed 775 | response = client.get("/metrics") 776 | assert response.status_code == 200 777 | assert ( 778 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 779 | 'method="GET",status="200",user="None"}' 780 | ) not in str(response.data) 781 | assert ( 782 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 783 | 'method="GET",status="200",user="None"}' 784 | ) not in str(response.data) 785 | 786 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 787 | assert response.status_code == 200 788 | 789 | # First get is a cache_miss 790 | response = client.get("/metrics", follow_redirects=True) 791 | 792 | assert response.status_code == 200 793 | assert ( 794 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 795 | 'method="GET",status="200",user="None"}' 796 | ) not in str(response.data) 797 | assert ( 798 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 799 | 'method="GET",status="200",user="None"} 1.0' 800 | ) in str(response.data) 801 | 802 | mock_get.side_effect = mocked_requests_get_error 803 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 804 | assert response.status_code == 200 805 | 806 | # Second get is a cache_hit 807 | response = client.get("/metrics", follow_redirects=True) 808 | 809 | assert response.status_code == 200 810 | assert ( 811 | 'request_latency_seconds_count{cache="API_ERROR_HIT",' 812 | 'method="GET",status="200",user="None"} 1.0' 813 | ) in str(response.data) 814 | assert ( 815 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 816 | 'method="GET",status="200",user="None"} 1.0' 817 | ) in str(response.data) 818 | 819 | 820 | @mock.patch( 821 | "ghmirror.utils.extensions.session.request", 822 | side_effect=mocked_requests_get_etag, 823 | ) 824 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 825 | def test_mirror_request_5xx_miss(mock_monitor_session, mock_get, client): 826 | setup_mocked_requests_session_get( 827 | mock_monitor_session, mocked_requests_monitor_good 828 | ) 829 | # Initially the stats are zeroed 830 | response = client.get("/metrics") 831 | assert response.status_code == 200 832 | assert ( 833 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 834 | 'method="GET",status="200",user="None"}' 835 | ) not in str(response.data) 836 | assert ( 837 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 838 | 'method="GET",status="200",user="None"}' 839 | ) not in str(response.data) 840 | 841 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 842 | assert response.status_code == 200 843 | 844 | # First get is a cache_miss 845 | response = client.get("/metrics", follow_redirects=True) 846 | 847 | assert response.status_code == 200 848 | assert ( 849 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 850 | 'method="GET",status="200",user="None"}' 851 | ) not in str(response.data) 852 | assert ( 853 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 854 | 'method="GET",status="200",user="None"} 1.0' 855 | ) in str(response.data) 856 | 857 | mock_get.side_effect = mocked_requests_get_error 858 | response = client.get("/repos/app-sre/github-mirror/2", follow_redirects=True) 859 | assert response.status_code == 500 860 | 861 | # Second get is a cache_hit 862 | response = client.get("/metrics", follow_redirects=True) 863 | 864 | assert response.status_code == 200 865 | assert ( 866 | 'request_latency_seconds_count{cache="API_ERROR_MISS",' 867 | 'method="GET",status="500",user="None"} 1.0' 868 | ) in str(response.data) 869 | assert ( 870 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 871 | 'method="GET",status="200",user="None"} 1.0' 872 | ) in str(response.data) 873 | 874 | 875 | @mock.patch( 876 | "ghmirror.utils.extensions.session.request", 877 | side_effect=mocked_requests_get_etag, 878 | ) 879 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 880 | def test_mirror_request_connection_error_hit(mock_monitor_session, mock_get, client): 881 | setup_mocked_requests_session_get( 882 | mock_monitor_session, mocked_requests_monitor_good 883 | ) 884 | # Initially the stats are zeroed 885 | response = client.get("/metrics") 886 | assert response.status_code == 200 887 | assert ( 888 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 889 | 'method="GET",status="200",user="None"}' 890 | ) not in str(response.data) 891 | assert ( 892 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 893 | 'method="GET",status="200",user="None"}' 894 | ) not in str(response.data) 895 | 896 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 897 | assert response.status_code == 200 898 | 899 | # First get is a cache_miss 900 | response = client.get("/metrics", follow_redirects=True) 901 | 902 | assert response.status_code == 200 903 | assert ( 904 | 'request_latency_seconds_count{cache="ONLINE_HIT",' 905 | 'method="GET",status="200",user="None"}' 906 | ) not in str(response.data) 907 | assert ( 908 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 909 | 'method="GET",status="200",user="None"} 1.0' 910 | ) in str(response.data) 911 | 912 | mock_get.side_effect = requests.exceptions.ConnectionError 913 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 914 | assert response.status_code == 200 915 | 916 | # Second get is a cache_hit 917 | response = client.get("/metrics", follow_redirects=True) 918 | 919 | assert response.status_code == 200 920 | assert ( 921 | 'request_latency_seconds_count{cache="API_CONNECTION_ERROR_HIT",' 922 | 'method="GET",status="200",user="None"} 1.0' 923 | ) in str(response.data) 924 | assert ( 925 | 'request_latency_seconds_count{cache="ONLINE_MISS",' 926 | 'method="GET",status="200",user="None"} 1.0' 927 | ) in str(response.data) 928 | 929 | 930 | @mock.patch( 931 | "ghmirror.utils.extensions.session.request", 932 | side_effect=mocked_requests_get_etag, 933 | ) 934 | @mock.patch("ghmirror.data_structures.monostate.requests.Session") 935 | def test_mirror_request_connection_error_miss(mock_monitor_session, mock_get, client): 936 | setup_mocked_requests_session_get( 937 | mock_monitor_session, mocked_requests_monitor_good 938 | ) 939 | mock_get.side_effect = requests.exceptions.ConnectionError 940 | response = client.get("/repos/app-sre/github-mirror", follow_redirects=True) 941 | assert response.status_code == 502 942 | -------------------------------------------------------------------------------- /uv.lock: -------------------------------------------------------------------------------- 1 | version = 1 2 | revision = 3 3 | requires-python = "==3.11.*" 4 | 5 | [[package]] 6 | name = "async-timeout" 7 | version = "5.0.1" 8 | source = { registry = "https://pypi.org/simple" } 9 | sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } 10 | wheels = [ 11 | { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, 12 | ] 13 | 14 | [[package]] 15 | name = "blinker" 16 | version = "1.9.0" 17 | source = { registry = "https://pypi.org/simple" } 18 | sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" } 19 | wheels = [ 20 | { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" }, 21 | ] 22 | 23 | [[package]] 24 | name = "certifi" 25 | version = "2025.11.12" 26 | source = { registry = "https://pypi.org/simple" } 27 | sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } 28 | wheels = [ 29 | { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, 30 | ] 31 | 32 | [[package]] 33 | name = "charset-normalizer" 34 | version = "3.4.4" 35 | source = { registry = "https://pypi.org/simple" } 36 | sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } 37 | wheels = [ 38 | { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, 39 | { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, 40 | { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, 41 | { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, 42 | { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, 43 | { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, 44 | { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, 45 | { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, 46 | { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, 47 | { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, 48 | { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, 49 | { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, 50 | { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, 51 | { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, 52 | { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, 53 | { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, 54 | { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, 55 | ] 56 | 57 | [[package]] 58 | name = "click" 59 | version = "8.3.1" 60 | source = { registry = "https://pypi.org/simple" } 61 | dependencies = [ 62 | { name = "colorama", marker = "sys_platform == 'win32'" }, 63 | ] 64 | sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } 65 | wheels = [ 66 | { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, 67 | ] 68 | 69 | [[package]] 70 | name = "colorama" 71 | version = "0.4.6" 72 | source = { registry = "https://pypi.org/simple" } 73 | sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } 74 | wheels = [ 75 | { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, 76 | ] 77 | 78 | [[package]] 79 | name = "coverage" 80 | version = "7.13.0" 81 | source = { registry = "https://pypi.org/simple" } 82 | sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" } 83 | wheels = [ 84 | { url = "https://files.pythonhosted.org/packages/f1/dc/888bf90d8b1c3d0b4020a40e52b9f80957d75785931ec66c7dfaccc11c7d/coverage-7.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0dfa3855031070058add1a59fdfda0192fd3e8f97e7c81de0596c145dea51820", size = 218104, upload-time = "2025-12-08T13:12:33.333Z" }, 85 | { url = "https://files.pythonhosted.org/packages/8d/ea/069d51372ad9c380214e86717e40d1a743713a2af191cfba30a0911b0a4a/coverage-7.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fdb6f54f38e334db97f72fa0c701e66d8479af0bc3f9bfb5b90f1c30f54500f", size = 218606, upload-time = "2025-12-08T13:12:34.498Z" }, 86 | { url = "https://files.pythonhosted.org/packages/68/09/77b1c3a66c2aa91141b6c4471af98e5b1ed9b9e6d17255da5eb7992299e3/coverage-7.13.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7e442c013447d1d8d195be62852270b78b6e255b79b8675bad8479641e21fd96", size = 248999, upload-time = "2025-12-08T13:12:36.02Z" }, 87 | { url = "https://files.pythonhosted.org/packages/0a/32/2e2f96e9d5691eaf1181d9040f850b8b7ce165ea10810fd8e2afa534cef7/coverage-7.13.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ed5630d946859de835a85e9a43b721123a8a44ec26e2830b296d478c7fd4259", size = 250925, upload-time = "2025-12-08T13:12:37.221Z" }, 88 | { url = "https://files.pythonhosted.org/packages/7b/45/b88ddac1d7978859b9a39a8a50ab323186148f1d64bc068f86fc77706321/coverage-7.13.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f15a931a668e58087bc39d05d2b4bf4b14ff2875b49c994bbdb1c2217a8daeb", size = 253032, upload-time = "2025-12-08T13:12:38.763Z" }, 89 | { url = "https://files.pythonhosted.org/packages/71/cb/e15513f94c69d4820a34b6bf3d2b1f9f8755fa6021be97c7065442d7d653/coverage-7.13.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30a3a201a127ea57f7e14ba43c93c9c4be8b7d17a26e03bb49e6966d019eede9", size = 249134, upload-time = "2025-12-08T13:12:40.382Z" }, 90 | { url = "https://files.pythonhosted.org/packages/09/61/d960ff7dc9e902af3310ce632a875aaa7860f36d2bc8fc8b37ee7c1b82a5/coverage-7.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7a485ff48fbd231efa32d58f479befce52dcb6bfb2a88bb7bf9a0b89b1bc8030", size = 250731, upload-time = "2025-12-08T13:12:41.992Z" }, 91 | { url = "https://files.pythonhosted.org/packages/98/34/c7c72821794afc7c7c2da1db8f00c2c98353078aa7fb6b5ff36aac834b52/coverage-7.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:22486cdafba4f9e471c816a2a5745337742a617fef68e890d8baf9f3036d7833", size = 248795, upload-time = "2025-12-08T13:12:43.331Z" }, 92 | { url = "https://files.pythonhosted.org/packages/0a/5b/e0f07107987a43b2def9aa041c614ddb38064cbf294a71ef8c67d43a0cdd/coverage-7.13.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:263c3dbccc78e2e331e59e90115941b5f53e85cfcc6b3b2fbff1fd4e3d2c6ea8", size = 248514, upload-time = "2025-12-08T13:12:44.546Z" }, 93 | { url = "https://files.pythonhosted.org/packages/71/c2/c949c5d3b5e9fc6dd79e1b73cdb86a59ef14f3709b1d72bf7668ae12e000/coverage-7.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5330fa0cc1f5c3c4c3bb8e101b742025933e7848989370a1d4c8c5e401ea753", size = 249424, upload-time = "2025-12-08T13:12:45.759Z" }, 94 | { url = "https://files.pythonhosted.org/packages/11/f1/bbc009abd6537cec0dffb2cc08c17a7f03de74c970e6302db4342a6e05af/coverage-7.13.0-cp311-cp311-win32.whl", hash = "sha256:0f4872f5d6c54419c94c25dd6ae1d015deeb337d06e448cd890a1e89a8ee7f3b", size = 220597, upload-time = "2025-12-08T13:12:47.378Z" }, 95 | { url = "https://files.pythonhosted.org/packages/c4/f6/d9977f2fb51c10fbaed0718ce3d0a8541185290b981f73b1d27276c12d91/coverage-7.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51a202e0f80f241ccb68e3e26e19ab5b3bf0f813314f2c967642f13ebcf1ddfe", size = 221536, upload-time = "2025-12-08T13:12:48.7Z" }, 96 | { url = "https://files.pythonhosted.org/packages/be/ad/3fcf43fd96fb43e337a3073dea63ff148dcc5c41ba7a14d4c7d34efb2216/coverage-7.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:d2a9d7f1c11487b1c69367ab3ac2d81b9b3721f097aa409a3191c3e90f8f3dd7", size = 220206, upload-time = "2025-12-08T13:12:50.365Z" }, 97 | { url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068, upload-time = "2025-12-08T13:14:36.236Z" }, 98 | ] 99 | 100 | [package.optional-dependencies] 101 | toml = [ 102 | { name = "tomli", marker = "python_full_version <= '3.11'" }, 103 | ] 104 | 105 | [[package]] 106 | name = "flask" 107 | version = "3.1.2" 108 | source = { registry = "https://pypi.org/simple" } 109 | dependencies = [ 110 | { name = "blinker" }, 111 | { name = "click" }, 112 | { name = "itsdangerous" }, 113 | { name = "jinja2" }, 114 | { name = "markupsafe" }, 115 | { name = "werkzeug" }, 116 | ] 117 | sdist = { url = "https://files.pythonhosted.org/packages/dc/6d/cfe3c0fcc5e477df242b98bfe186a4c34357b4847e87ecaef04507332dab/flask-3.1.2.tar.gz", hash = "sha256:bf656c15c80190ed628ad08cdfd3aaa35beb087855e2f494910aa3774cc4fd87", size = 720160, upload-time = "2025-08-19T21:03:21.205Z" } 118 | wheels = [ 119 | { url = "https://files.pythonhosted.org/packages/ec/f9/7f9263c5695f4bd0023734af91bedb2ff8209e8de6ead162f35d8dc762fd/flask-3.1.2-py3-none-any.whl", hash = "sha256:ca1d8112ec8a6158cc29ea4858963350011b5c846a414cdb7a954aa9e967d03c", size = 103308, upload-time = "2025-08-19T21:03:19.499Z" }, 120 | ] 121 | 122 | [[package]] 123 | name = "github-mirror" 124 | version = "0.1.0" 125 | source = { virtual = "." } 126 | dependencies = [ 127 | { name = "flask" }, 128 | { name = "gunicorn" }, 129 | { name = "prometheus-client" }, 130 | { name = "redis" }, 131 | { name = "requests" }, 132 | ] 133 | 134 | [package.dev-dependencies] 135 | dev = [ 136 | { name = "mypy" }, 137 | { name = "pytest" }, 138 | { name = "pytest-cov" }, 139 | { name = "pytest-forked" }, 140 | { name = "ruff" }, 141 | { name = "types-requests" }, 142 | ] 143 | 144 | [package.metadata] 145 | requires-dist = [ 146 | { name = "flask", specifier = "==3.1.2" }, 147 | { name = "gunicorn", specifier = "==23.0.0" }, 148 | { name = "prometheus-client", specifier = "==0.22.1" }, 149 | { name = "redis", specifier = "==6.4.0" }, 150 | { name = "requests", specifier = "==2.32.5" }, 151 | ] 152 | 153 | [package.metadata.requires-dev] 154 | dev = [ 155 | { name = "mypy", specifier = "==1.17.1" }, 156 | { name = "pytest", specifier = "==9.0.1" }, 157 | { name = "pytest-cov", specifier = "==6.3.0" }, 158 | { name = "pytest-forked", specifier = "==1.6.0" }, 159 | { name = "ruff", specifier = "==0.12.12" }, 160 | { name = "types-requests", specifier = "==2.32.4.20250809" }, 161 | ] 162 | 163 | [[package]] 164 | name = "gunicorn" 165 | version = "23.0.0" 166 | source = { registry = "https://pypi.org/simple" } 167 | dependencies = [ 168 | { name = "packaging" }, 169 | ] 170 | sdist = { url = "https://files.pythonhosted.org/packages/34/72/9614c465dc206155d93eff0ca20d42e1e35afc533971379482de953521a4/gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec", size = 375031, upload-time = "2024-08-10T20:25:27.378Z" } 171 | wheels = [ 172 | { url = "https://files.pythonhosted.org/packages/cb/7d/6dac2a6e1eba33ee43f318edbed4ff29151a49b5d37f080aad1e6469bca4/gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d", size = 85029, upload-time = "2024-08-10T20:25:24.996Z" }, 173 | ] 174 | 175 | [[package]] 176 | name = "idna" 177 | version = "3.11" 178 | source = { registry = "https://pypi.org/simple" } 179 | sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } 180 | wheels = [ 181 | { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, 182 | ] 183 | 184 | [[package]] 185 | name = "iniconfig" 186 | version = "2.3.0" 187 | source = { registry = "https://pypi.org/simple" } 188 | sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } 189 | wheels = [ 190 | { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, 191 | ] 192 | 193 | [[package]] 194 | name = "itsdangerous" 195 | version = "2.2.0" 196 | source = { registry = "https://pypi.org/simple" } 197 | sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } 198 | wheels = [ 199 | { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, 200 | ] 201 | 202 | [[package]] 203 | name = "jinja2" 204 | version = "3.1.6" 205 | source = { registry = "https://pypi.org/simple" } 206 | dependencies = [ 207 | { name = "markupsafe" }, 208 | ] 209 | sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } 210 | wheels = [ 211 | { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, 212 | ] 213 | 214 | [[package]] 215 | name = "markupsafe" 216 | version = "3.0.3" 217 | source = { registry = "https://pypi.org/simple" } 218 | sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } 219 | wheels = [ 220 | { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, 221 | { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, 222 | { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, 223 | { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, 224 | { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, 225 | { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, 226 | { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, 227 | { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, 228 | { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, 229 | { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, 230 | { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, 231 | ] 232 | 233 | [[package]] 234 | name = "mypy" 235 | version = "1.17.1" 236 | source = { registry = "https://pypi.org/simple" } 237 | dependencies = [ 238 | { name = "mypy-extensions" }, 239 | { name = "pathspec" }, 240 | { name = "typing-extensions" }, 241 | ] 242 | sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" } 243 | wheels = [ 244 | { url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" }, 245 | { url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" }, 246 | { url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" }, 247 | { url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" }, 248 | { url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" }, 249 | { url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" }, 250 | { url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" }, 251 | ] 252 | 253 | [[package]] 254 | name = "mypy-extensions" 255 | version = "1.1.0" 256 | source = { registry = "https://pypi.org/simple" } 257 | sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } 258 | wheels = [ 259 | { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, 260 | ] 261 | 262 | [[package]] 263 | name = "packaging" 264 | version = "25.0" 265 | source = { registry = "https://pypi.org/simple" } 266 | sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } 267 | wheels = [ 268 | { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, 269 | ] 270 | 271 | [[package]] 272 | name = "pathspec" 273 | version = "0.12.1" 274 | source = { registry = "https://pypi.org/simple" } 275 | sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } 276 | wheels = [ 277 | { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, 278 | ] 279 | 280 | [[package]] 281 | name = "pluggy" 282 | version = "1.6.0" 283 | source = { registry = "https://pypi.org/simple" } 284 | sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } 285 | wheels = [ 286 | { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, 287 | ] 288 | 289 | [[package]] 290 | name = "prometheus-client" 291 | version = "0.22.1" 292 | source = { registry = "https://pypi.org/simple" } 293 | sdist = { url = "https://files.pythonhosted.org/packages/5e/cf/40dde0a2be27cc1eb41e333d1a674a74ce8b8b0457269cc640fd42b07cf7/prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28", size = 69746, upload-time = "2025-06-02T14:29:01.152Z" } 294 | wheels = [ 295 | { url = "https://files.pythonhosted.org/packages/32/ae/ec06af4fe3ee72d16973474f122541746196aaa16cea6f66d18b963c6177/prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094", size = 58694, upload-time = "2025-06-02T14:29:00.068Z" }, 296 | ] 297 | 298 | [[package]] 299 | name = "py" 300 | version = "1.11.0" 301 | source = { registry = "https://pypi.org/simple" } 302 | sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796, upload-time = "2021-11-04T17:17:01.377Z" } 303 | wheels = [ 304 | { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708, upload-time = "2021-11-04T17:17:00.152Z" }, 305 | ] 306 | 307 | [[package]] 308 | name = "pygments" 309 | version = "2.19.2" 310 | source = { registry = "https://pypi.org/simple" } 311 | sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } 312 | wheels = [ 313 | { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, 314 | ] 315 | 316 | [[package]] 317 | name = "pytest" 318 | version = "9.0.1" 319 | source = { registry = "https://pypi.org/simple" } 320 | dependencies = [ 321 | { name = "colorama", marker = "sys_platform == 'win32'" }, 322 | { name = "iniconfig" }, 323 | { name = "packaging" }, 324 | { name = "pluggy" }, 325 | { name = "pygments" }, 326 | ] 327 | sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" } 328 | wheels = [ 329 | { url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" }, 330 | ] 331 | 332 | [[package]] 333 | name = "pytest-cov" 334 | version = "6.3.0" 335 | source = { registry = "https://pypi.org/simple" } 336 | dependencies = [ 337 | { name = "coverage", extra = ["toml"] }, 338 | { name = "pluggy" }, 339 | { name = "pytest" }, 340 | ] 341 | sdist = { url = "https://files.pythonhosted.org/packages/30/4c/f883ab8f0daad69f47efdf95f55a66b51a8b939c430dadce0611508d9e99/pytest_cov-6.3.0.tar.gz", hash = "sha256:35c580e7800f87ce892e687461166e1ac2bcb8fb9e13aea79032518d6e503ff2", size = 70398, upload-time = "2025-09-06T15:40:14.361Z" } 342 | wheels = [ 343 | { url = "https://files.pythonhosted.org/packages/80/b4/bb7263e12aade3842b938bc5c6958cae79c5ee18992f9b9349019579da0f/pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749", size = 25115, upload-time = "2025-09-06T15:40:12.44Z" }, 344 | ] 345 | 346 | [[package]] 347 | name = "pytest-forked" 348 | version = "1.6.0" 349 | source = { registry = "https://pypi.org/simple" } 350 | dependencies = [ 351 | { name = "py" }, 352 | { name = "pytest" }, 353 | ] 354 | sdist = { url = "https://files.pythonhosted.org/packages/8c/c9/93ad2ba2413057ee694884b88cf7467a46c50c438977720aeac26e73fdb7/pytest-forked-1.6.0.tar.gz", hash = "sha256:4dafd46a9a600f65d822b8f605133ecf5b3e1941ebb3588e943b4e3eb71a5a3f", size = 9977, upload-time = "2023-02-12T23:22:27.544Z" } 355 | wheels = [ 356 | { url = "https://files.pythonhosted.org/packages/f4/af/9c0bda43e486a3c9bf1e0f876d0f241bc3f229d7d65d09331a0868db9629/pytest_forked-1.6.0-py3-none-any.whl", hash = "sha256:810958f66a91afb1a1e2ae83089d8dc1cd2437ac96b12963042fbb9fb4d16af0", size = 4897, upload-time = "2023-02-12T23:22:26.022Z" }, 357 | ] 358 | 359 | [[package]] 360 | name = "redis" 361 | version = "6.4.0" 362 | source = { registry = "https://pypi.org/simple" } 363 | dependencies = [ 364 | { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, 365 | ] 366 | sdist = { url = "https://files.pythonhosted.org/packages/0d/d6/e8b92798a5bd67d659d51a18170e91c16ac3b59738d91894651ee255ed49/redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010", size = 4647399, upload-time = "2025-08-07T08:10:11.441Z" } 367 | wheels = [ 368 | { url = "https://files.pythonhosted.org/packages/e8/02/89e2ed7e85db6c93dfa9e8f691c5087df4e3551ab39081a4d7c6d1f90e05/redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f", size = 279847, upload-time = "2025-08-07T08:10:09.84Z" }, 369 | ] 370 | 371 | [[package]] 372 | name = "requests" 373 | version = "2.32.5" 374 | source = { registry = "https://pypi.org/simple" } 375 | dependencies = [ 376 | { name = "certifi" }, 377 | { name = "charset-normalizer" }, 378 | { name = "idna" }, 379 | { name = "urllib3" }, 380 | ] 381 | sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } 382 | wheels = [ 383 | { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, 384 | ] 385 | 386 | [[package]] 387 | name = "ruff" 388 | version = "0.12.12" 389 | source = { registry = "https://pypi.org/simple" } 390 | sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" } 391 | wheels = [ 392 | { url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" }, 393 | { url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" }, 394 | { url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" }, 395 | { url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" }, 396 | { url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" }, 397 | { url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" }, 398 | { url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" }, 399 | { url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" }, 400 | { url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" }, 401 | { url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" }, 402 | { url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" }, 403 | { url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" }, 404 | { url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" }, 405 | { url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" }, 406 | { url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" }, 407 | { url = "https://files.pythonhosted.org/packages/c8/fa/05b6428a008e60f79546c943e54068316f32ec8ab5c4f73e4563934fbdc7/ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1", size = 12052870, upload-time = "2025-09-04T16:50:09.121Z" }, 408 | { url = "https://files.pythonhosted.org/packages/85/60/d1e335417804df452589271818749d061b22772b87efda88354cf35cdb7a/ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d", size = 13178016, upload-time = "2025-09-04T16:50:12.559Z" }, 409 | { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" }, 410 | ] 411 | 412 | [[package]] 413 | name = "tomli" 414 | version = "2.3.0" 415 | source = { registry = "https://pypi.org/simple" } 416 | sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } 417 | wheels = [ 418 | { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, 419 | { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, 420 | { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, 421 | { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, 422 | { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, 423 | { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, 424 | { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, 425 | { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, 426 | { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, 427 | ] 428 | 429 | [[package]] 430 | name = "types-requests" 431 | version = "2.32.4.20250809" 432 | source = { registry = "https://pypi.org/simple" } 433 | dependencies = [ 434 | { name = "urllib3" }, 435 | ] 436 | sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" } 437 | wheels = [ 438 | { url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" }, 439 | ] 440 | 441 | [[package]] 442 | name = "typing-extensions" 443 | version = "4.15.0" 444 | source = { registry = "https://pypi.org/simple" } 445 | sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } 446 | wheels = [ 447 | { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, 448 | ] 449 | 450 | [[package]] 451 | name = "urllib3" 452 | version = "2.6.2" 453 | source = { registry = "https://pypi.org/simple" } 454 | sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" } 455 | wheels = [ 456 | { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" }, 457 | ] 458 | 459 | [[package]] 460 | name = "werkzeug" 461 | version = "3.1.4" 462 | source = { registry = "https://pypi.org/simple" } 463 | dependencies = [ 464 | { name = "markupsafe" }, 465 | ] 466 | sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687, upload-time = "2025-11-29T02:15:22.841Z" } 467 | wheels = [ 468 | { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960, upload-time = "2025-11-29T02:15:21.13Z" }, 469 | ] 470 | --------------------------------------------------------------------------------