├── .coverage ├── .gitignore ├── NOTICE ├── coverage.report ├── git_remote_s3 ├── enums.py ├── __init__.py ├── common.py ├── git.py ├── lfs.py ├── manage.py └── remote.py ├── .flake8 ├── CODE_OF_COMDUCT.md ├── Config ├── pyproject.toml ├── .github └── workflows │ ├── python-pytest.yml │ ├── publish.yml │ ├── publish-test.yml │ └── auto-release.yaml ├── THIRD_PARTY_NOTICE.md ├── CONTRIBUTING.md ├── test ├── parse_url_test.py ├── parallel_fetch_test.py └── remote_test.py ├── LICENSE ├── README.md └── poetry.lock /.coverage: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/awslabs/git-remote-s3/HEAD/.coverage -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | .venv 3 | .mypy_cache 4 | __pychache__ 5 | *.tar.gz 6 | .DS_Store -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | git-remote-s3 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -------------------------------------------------------------------------------- /coverage.report: -------------------------------------------------------------------------------- 1 | No source for code: '/Users/angmas/Projects/git-remote-s3-python/test/lfs_test.py'. 2 | -------------------------------------------------------------------------------- /git_remote_s3/enums.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: Amazon.com, Inc. or its affiliates 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | from enum import Enum 6 | 7 | 8 | class UriScheme(Enum): 9 | S3 = "s3" 10 | S3_ZIP = "s3+zip" 11 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | max-complexity = 10 4 | select = C,E,F,W,B,B950 5 | ignore = E203,E501,W503 6 | exclude = 7 | .git, 8 | __pycache__, 9 | *.egg-info, 10 | .nox, 11 | .pytest_cache, 12 | .mypy_cache, 13 | .venv, 14 | dist -------------------------------------------------------------------------------- /CODE_OF_COMDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | 3 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 4 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 5 | opensource-codeofconduct@amazon.com with any additional questions or comments. 6 | -------------------------------------------------------------------------------- /git_remote_s3/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023-present Amazon.com, Inc. or its affiliates 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | from .remote import S3Remote 5 | from . import git 6 | from .common import parse_git_url 7 | from .manage import Doctor 8 | from .enums import UriScheme 9 | 10 | __all__ = ["S3Remote", "git", "parse_git_url", "Doctor", "UriScheme"] 11 | -------------------------------------------------------------------------------- /Config: -------------------------------------------------------------------------------- 1 | package.GitRemoteS3 = { 2 | interfaces = (1.0); 3 | 4 | # Use NoOpBuild. See https://w.amazon.com/index.php/BrazilBuildSystem/NoOpBuild 5 | build-system = no-op; 6 | build-tools = { 7 | 1.0 = { 8 | NoOpBuild = 1.0; 9 | }; 10 | }; 11 | 12 | # Use runtime-dependencies for when you want to bring in additional 13 | # packages when deploying. 14 | # Use dependencies instead if you intend for these dependencies to 15 | # be exported to other packages that build against you. 16 | dependencies = { 17 | 1.0 = { 18 | }; 19 | }; 20 | 21 | runtime-dependencies = { 22 | 1.0 = { 23 | }; 24 | }; 25 | 26 | }; 27 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "git-remote-s3" 3 | version = "0.3.0" 4 | description = "A git remote helper for Amazon S3" 5 | authors = ["Massimiliano Angelino "] 6 | repository = "https://github.com/awslabs/git-remote-s3" 7 | classifiers = [ 8 | "Topic :: Software Development :: Version Control", 9 | "Topic :: Software Development :: Version Control :: Git" 10 | ] 11 | license = "Apache-2.0" 12 | readme = "README.md" 13 | 14 | [tool.poetry.scripts] 15 | git-remote-s3 = "git_remote_s3.remote:main" 16 | "git-remote-s3+zip" = "git_remote_s3.remote:main" 17 | git-lfs-s3 = "git_remote_s3.lfs:main" 18 | git-s3 = "git_remote_s3.manage:main" 19 | 20 | 21 | [tool.poetry.dependencies] 22 | python = ">3.9" 23 | boto3 = "^1.34.155" 24 | botocore = "^1.39.4" 25 | 26 | 27 | [tool.poetry.group.dev.dependencies] 28 | flake8 = "^7.1.1" 29 | flake8-bugbear = "^24.4.26" 30 | mypy = "^1.11.1" 31 | pytest = "^8.3.2" 32 | mock = "^5.1.0" 33 | black = "^24.8.0" 34 | coverage = "^7.6.1" 35 | 36 | [build-system] 37 | requires = ["poetry-core"] 38 | build-backend = "poetry.core.masonry.api" 39 | -------------------------------------------------------------------------------- /.github/workflows/python-pytest.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - "*" 7 | pull_request: 8 | branches: ["main"] 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | build: 15 | name: Test and build application with poetry and pytest 16 | runs-on: ubuntu-latest 17 | permissions: 18 | contents: read 19 | steps: 20 | - name: Checkout code 21 | uses: actions/checkout@v4 22 | with: 23 | token: ${{ github.token }} 24 | 25 | - name: Set up Python 3.10 26 | uses: actions/setup-python@v5 27 | with: 28 | python-version: "3.10" 29 | 30 | - name: Install Poetry 31 | run: | 32 | curl -sSL https://install.python-poetry.org | python - -y 33 | 34 | - name: Update PATH 35 | run: echo "$HOME/.local/bin" >> $GITHUB_PATH 36 | 37 | - name: Update Poetry configuration 38 | run: poetry config virtualenvs.create false 39 | 40 | - name: Install dependencies 41 | run: poetry install --sync --no-interaction 42 | 43 | - name: Run tests 44 | run: poetry run pytest 45 | 46 | - name: Package project 47 | run: poetry build 48 | -------------------------------------------------------------------------------- /git_remote_s3/common.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023-present Amazon.com, Inc. or its affiliates 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import re 6 | 7 | from .enums import UriScheme 8 | 9 | 10 | def parse_git_url(url: str) -> tuple[UriScheme, str, str, str]: 11 | """Parses the elements in a s3:// remote origin URI 12 | 13 | Args: 14 | url (str): the URI to parse 15 | 16 | Returns: 17 | tuple[str, str, str, str]: uri scheme, prefix, bucket and profile extracted 18 | from the URI or None, None, None, None if the URI is invalid 19 | """ 20 | if url is None: 21 | return None, None, None, None 22 | m = re.match(r"(s3|s3\+zip)://([^@]+@)?([a-z0-9][a-z0-9\.-]{2,62})/?(.+)?", url) 23 | if m is None or len(m.groups()) != 4: 24 | return None, None, None, None 25 | uri_scheme, profile, bucket, prefix = m.groups() 26 | if profile is not None: 27 | profile = profile[:-1] 28 | if prefix is not None: 29 | prefix = prefix.strip("/") 30 | if uri_scheme is not None: 31 | if uri_scheme == "s3": 32 | uri_scheme = UriScheme.S3 33 | if uri_scheme == "s3+zip": 34 | uri_scheme = UriScheme.S3_ZIP 35 | 36 | return uri_scheme, profile, bucket, prefix 37 | -------------------------------------------------------------------------------- /THIRD_PARTY_NOTICE.md: -------------------------------------------------------------------------------- 1 | # Third-Party Notices 2 | 3 | This project may include or interact with third-party software components and services. The following is a list of attributions for these components and related technologies: 4 | 5 | ## boto3 6 | 7 | - Copyright: Amazon.com, Inc. or its affiliates 8 | - License: Apache License 2.0 9 | - Repository: https://github.com/boto/boto3 10 | 11 | ## setuptools 12 | 13 | - Copyright: Python Packaging Authority 14 | - License: MIT License 15 | - Repository: https://github.com/pypa/setuptools 16 | 17 | ## git 18 | 19 | - Copyright: Linus Torvalds and others 20 | - License: GNU General Public License version 2.0 21 | - Repository: https://github.com/git/git 22 | 23 | ## git-lfs 24 | 25 | - Copyright: GitHub, Inc. and Git LFS contributors 26 | - License: MIT License 27 | - Repository: https://github.com/git-lfs/git-lfs 28 | 29 | ## Amazon S3 30 | 31 | - Copyright: Amazon.com, Inc. or its affiliates 32 | - Service: This project interacts with Amazon S3, a web service offered by Amazon Web Services 33 | 34 | Note: This project implements protocols and interacts with the above-mentioned software and services, but may not include their source code directly. For the most up-to-date information about dependencies, please check the project's requirements or setup files. -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - v*.*.* 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | pypi-publish: 13 | name: Upload release to pypi.org 14 | runs-on: ubuntu-latest 15 | environment: 16 | name: pypi 17 | url: https://pypi.org/project/git-remote-s3/ 18 | permissions: 19 | contents: read 20 | steps: 21 | - name: Checkout code 22 | uses: actions/checkout@v4 23 | with: 24 | token: ${{ github.token }} 25 | 26 | - name: Set up Python 3.10 27 | uses: actions/setup-python@v5 28 | with: 29 | python-version: "3.10" 30 | 31 | - name: Install Poetry 32 | run: | 33 | curl -sSL https://install.python-poetry.org | python - -y 34 | 35 | - name: Update PATH 36 | run: echo "$HOME/.local/bin" >> $GITHUB_PATH 37 | 38 | - name: Update Poetry configuration 39 | run: poetry config virtualenvs.create false 40 | 41 | - name: Install dependencies 42 | run: poetry install --sync --no-interaction 43 | 44 | - name: Test 45 | run: poetry run pytest 46 | 47 | - name: Package project 48 | run: poetry build 49 | 50 | - name: Publish package distributions to pypi.org 51 | uses: pypa/gh-action-pypi-publish@release/v1 52 | with: 53 | password: ${{ secrets.PYPI_API_TOKEN }} 54 | -------------------------------------------------------------------------------- /.github/workflows/publish-test.yml: -------------------------------------------------------------------------------- 1 | name: Test_Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - test_v*.*.* 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | pypi-publish: 13 | name: Upload release to test.pypi.org 14 | runs-on: ubuntu-latest 15 | environment: 16 | name: testpypi 17 | url: https://test.pypi.org/project/git-remote-s3/ 18 | permissions: 19 | contents: read 20 | steps: 21 | - name: Checkout code 22 | uses: actions/checkout@v4 23 | with: 24 | token: ${{ github.token }} 25 | 26 | - name: Set up Python 3.10 27 | uses: actions/setup-python@v5 28 | with: 29 | python-version: "3.10" 30 | 31 | - name: Install Poetry 32 | run: | 33 | curl -sSL https://install.python-poetry.org | python - -y 34 | 35 | - name: Update PATH 36 | run: echo "$HOME/.local/bin" >> $GITHUB_PATH 37 | 38 | - name: Update Poetry configuration 39 | run: poetry config virtualenvs.create false 40 | 41 | - name: Install dependencies 42 | run: poetry install --sync --no-interaction 43 | 44 | - name: Package project 45 | run: poetry build 46 | 47 | - name: Publish package distributions to test.pypi.org 48 | uses: pypa/gh-action-pypi-publish@release/v1 49 | with: 50 | password: ${{ secrets.TEST_PYPI_API_TOKEN }} 51 | repository-url: https://test.pypi.org/legacy/ 52 | -------------------------------------------------------------------------------- /.github/workflows/auto-release.yaml: -------------------------------------------------------------------------------- 1 | name: Auto_Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - v*.*.* 7 | 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | build: 13 | environment: 14 | name: pypi 15 | url: https://pypi.org/project/git-remote-s3/ 16 | runs-on: ubuntu-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v3 20 | with: 21 | token: ${{ github.token }} 22 | fetch-depth: 0 # Fetch all history for all branches and tags 23 | 24 | - name: Check if tag is on main branch 25 | id: check_tag 26 | run: | 27 | # Check if the tag commit is on the main branch 28 | if git merge-base --is-ancestor ${GITHUB_SHA} origin/main; then 29 | echo "Tag is on main branch, proceeding with release" 30 | echo "on_main=true" >> $GITHUB_OUTPUT 31 | else 32 | echo "Tag is not on main branch, skipping release" 33 | echo "on_main=false" >> $GITHUB_OUTPUT 34 | fi 35 | - name: Set up Python 36 | if: steps.check_tag.outputs.on_main == 'true' 37 | uses: actions/setup-python@v5 38 | with: 39 | python-version: "3.10" 40 | 41 | - name: Install Poetry 42 | if: steps.check_tag.outputs.on_main == 'true' 43 | run: | 44 | python -m pip install --upgrade pip 45 | pip install poetry 46 | 47 | - name: Set Poetry version 48 | if: steps.check_tag.outputs.on_main == 'true' 49 | run: | 50 | # Remove 'v' prefix from tag name if present 51 | VERSION=${GITHUB_REF_NAME#v} 52 | poetry version $VERSION 53 | 54 | - name: Build package 55 | if: steps.check_tag.outputs.on_main == 'true' 56 | run: | 57 | poetry build 58 | 59 | - name: Publish to PyPI 60 | if: steps.check_tag.outputs.on_main == 'true' 61 | env: 62 | PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }} 63 | run: | 64 | poetry config pypi-token.pypi $PYPI_TOKEN 65 | poetry publish 66 | 67 | - name: Create Release 68 | if: steps.check_tag.outputs.on_main == 'true' 69 | env: 70 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 71 | run: | 72 | tag_name="${GITHUB_REF_NAME}" 73 | gh release create "${tag_name}" --generate-notes 74 | for package in $(ls dist/*.whl dist/*.gz); do 75 | gh release upload "${tag_name}" "$package" --clobber 76 | done 77 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guidelines 2 | 3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional 4 | documentation, we greatly value feedback and contributions from our community. 5 | 6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary 7 | information to effectively respond to your bug report or contribution. 8 | 9 | ## Reporting Bugs/Feature Requests 10 | 11 | We welcome you to use the GitHub issue tracker to report bugs or suggest features. 12 | 13 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already 14 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful: 15 | 16 | - A reproducible test case or series of steps 17 | - The version of our code being used 18 | - Any modifications you've made relevant to the bug 19 | - Anything unusual about your environment or deployment 20 | 21 | ## Contributing via Pull Requests 22 | 23 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that: 24 | 25 | 1. You are working against the latest source on the _main_ branch. 26 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already. 27 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted. 28 | 29 | To send us a pull request, please: 30 | 31 | 1. Fork the repository. 32 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change. 33 | 3. Ensure local tests pass. 34 | 4. Commit to your fork using clear commit messages. 35 | 5. Send us a pull request, answering any default questions in the pull request interface. 36 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation. 37 | 38 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and 39 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/). 40 | 41 | ## Finding contributions to work on 42 | 43 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start. 44 | 45 | ## Code of Conduct 46 | 47 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 48 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 49 | opensource-codeofconduct@amazon.com with any additional questions or comments. 50 | 51 | ## Security issue notifications 52 | 53 | If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue. 54 | 55 | ## Licensing 56 | 57 | See the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution. 58 | -------------------------------------------------------------------------------- /git_remote_s3/git.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023-present Amazon.com, Inc. or its affiliates 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import subprocess 6 | import sys 7 | import re 8 | 9 | 10 | class GitError(Exception): 11 | pass 12 | 13 | 14 | def archive(*, folder: str, ref: str) -> str: 15 | """Archive the content of the folder into a repo.zip file 16 | 17 | Args: 18 | folder (str): the folder to archive 19 | ref (str): the ref to archive 20 | 21 | Returns: 22 | str: the path to the archive file 23 | """ 24 | 25 | file_path = f"{folder}/repo.zip" 26 | result = subprocess.run( 27 | ["git", "archive", "--format", "zip", "--output", file_path, ref], 28 | stdout=subprocess.PIPE, 29 | stderr=subprocess.PIPE, 30 | check=True, 31 | ) 32 | 33 | if result.returncode == 0: 34 | return file_path 35 | else: 36 | raise GitError(result.stderr.decode("utf8")) 37 | 38 | 39 | def bundle(*, folder: str, sha: str, ref: str) -> str: 40 | """Bundles the content of the folder into a sha.bundle file 41 | 42 | Args: 43 | folder (str): the folder to bundle 44 | sha (str): the sha of the bundle. A bundle is stored as sha.bundle 45 | ref (str): the ref to bundle 46 | 47 | Returns: 48 | str: the path to the bundle file 49 | """ 50 | file_path = f"{folder}/{sha}.bundle" 51 | result = subprocess.run( 52 | ["git", "bundle", "create", file_path, ref], 53 | stdout=subprocess.PIPE, 54 | stderr=subprocess.PIPE, 55 | check=True, 56 | ) 57 | 58 | if result.returncode == 0: 59 | return file_path 60 | else: 61 | raise GitError(result.stderr.decode("utf8")) 62 | 63 | 64 | def unbundle(*, folder: str, sha: str, ref: str): 65 | """Unbundles the content of the bundle referred by the sha 66 | 67 | Args: 68 | folder (str): the folder where the bundle is located 69 | sha (str): the sha of the bundle. A bundle is stored as sha.bundle 70 | ref (str): the ref to checkout after unbundling 71 | """ 72 | subprocess.run( 73 | ["git", "bundle", "unbundle", f"{folder}/{sha}.bundle", ref], 74 | stdout=sys.stderr, 75 | check=True, 76 | ) 77 | 78 | 79 | def rev_parse(ref: str) -> str: 80 | """Gets the sha of a ref 81 | 82 | Args: 83 | ref (str): the ref to get the sha for 84 | 85 | Raises: 86 | Exception: if the ref is not found 87 | 88 | Returns: 89 | str: _description_ 90 | """ 91 | 92 | result = subprocess.run(["git", "rev-parse", ref], stdout=subprocess.PIPE) 93 | if result.returncode != 0: 94 | raise GitError(f"fatal: {ref} not found") 95 | sha = result.stdout.decode("utf8").strip() 96 | return sha 97 | 98 | 99 | def is_ancestor(ancestor: str, descendant: str) -> bool: 100 | """Checks if the ancestor is an ancestor of the descendant 101 | 102 | Args: 103 | ancestor (str): the ancestor ref 104 | descendant (str): the descendant ref 105 | 106 | Returns: 107 | bool: true if the ancestor is an ancestor of the descendant 108 | """ 109 | result = subprocess.run( 110 | ["git", "merge-base", "--is-ancestor", ancestor, descendant], 111 | stderr=subprocess.DEVNULL, 112 | stdout=subprocess.DEVNULL, 113 | ) 114 | return result.returncode == 0 115 | 116 | 117 | def get_remote_url(remote: str) -> str: 118 | result = subprocess.run( 119 | ["git", "remote", "get-url", remote], stdout=subprocess.PIPE 120 | ) 121 | if result.returncode != 0: 122 | raise GitError(f"fatal: {remote} not found") 123 | url = result.stdout.decode("utf8").strip() 124 | return url 125 | 126 | 127 | # validate refname according to 128 | # https://github.com/git/git/blob/406f326d271e0bacecdb00425422c5fa3f314930/refs.c#L170 129 | def validate_ref_name(name: str) -> bool: 130 | return ( 131 | re.search( 132 | r"(^\.)|(\.\.)|([:\?\[\\\^\~\s\*\]])|(\.lock$)|(/$)|(@\{)|([\x00-\x1f])", 133 | name, 134 | ) 135 | is None 136 | ) 137 | 138 | 139 | def get_last_commit_message() -> str: 140 | result = subprocess.run( 141 | ["git", "log", "-1", "--pretty=%h %s"], stdout=subprocess.PIPE 142 | ) 143 | if result.returncode != 0: 144 | raise GitError("fatal: an error as occurred") 145 | message = result.stdout.decode("utf8").strip() 146 | return message 147 | -------------------------------------------------------------------------------- /test/parse_url_test.py: -------------------------------------------------------------------------------- 1 | from git_remote_s3 import parse_git_url, UriScheme 2 | 3 | 4 | def test_parse_url_trailing_slash_no_profile(): 5 | url = "s3://bucket-name/path/to/" 6 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 7 | assert uri_scheme == UriScheme.S3 8 | assert bucket == "bucket-name" 9 | assert profile is None 10 | assert prefix == "path/to" 11 | 12 | 13 | def test_parse_url_no_profile(): 14 | url = "s3://bucket-name/path/to" 15 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 16 | assert uri_scheme == UriScheme.S3 17 | assert bucket == "bucket-name" 18 | assert profile is None 19 | assert prefix == "path/to" 20 | 21 | 22 | def test_parse_url(): 23 | url = "s3://profile-test@bucket-name/path/to" 24 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 25 | assert uri_scheme == UriScheme.S3 26 | assert bucket == "bucket-name" 27 | assert profile == "profile-test" 28 | assert prefix == "path/to" 29 | 30 | 31 | def test_parse_url_issue5(): 32 | url = "s3://er@bucket/path/" 33 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 34 | assert uri_scheme == UriScheme.S3 35 | assert bucket == "bucket" 36 | assert profile == "er" 37 | assert prefix == "path" 38 | 39 | 40 | def test_parse_url_1_char_profile(): 41 | url = "s3://A@bucket/path/" 42 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 43 | assert uri_scheme == UriScheme.S3 44 | assert bucket == "bucket" 45 | assert profile == "A" 46 | assert prefix == "path" 47 | 48 | 49 | def test_parse_url_all_supported_symbols_in_profile(): 50 | url = "s3://Ab-tr+54_quwww@bucket/path/" 51 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 52 | assert uri_scheme == UriScheme.S3 53 | assert bucket == "bucket" 54 | assert profile == "Ab-tr+54_quwww" 55 | assert prefix == "path" 56 | 57 | 58 | def test_parse_url_unsupported_symbols_in_profile(): 59 | url = "s3://A!@bucket/path/" 60 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 61 | assert uri_scheme == UriScheme.S3 62 | assert bucket == "bucket" 63 | assert profile == "A!" 64 | assert prefix == "path" 65 | 66 | 67 | def test_parse_url_empty_profile(): 68 | url = "s3://@bucket/path/" 69 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 70 | assert uri_scheme is None 71 | assert bucket is None 72 | assert profile is None 73 | assert prefix is None 74 | 75 | 76 | def test_parse_url_no_prefix_trailing_slash(): 77 | url = "s3://profile-test@bucket-name/" 78 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 79 | assert uri_scheme == UriScheme.S3 80 | assert bucket == "bucket-name" 81 | assert profile == "profile-test" 82 | assert prefix is None 83 | 84 | 85 | def test_parse_url_no_prefix(): 86 | url = "s3://profile-test@bucket-name" 87 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 88 | assert uri_scheme == UriScheme.S3 89 | assert bucket == "bucket-name" 90 | assert profile == "profile-test" 91 | assert prefix is None 92 | 93 | 94 | def test_parse_url_no_prefix_no_profile(): 95 | url = "s3://bucket-name" 96 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 97 | assert uri_scheme == UriScheme.S3 98 | assert bucket == "bucket-name" 99 | assert profile is None 100 | assert prefix is None 101 | 102 | 103 | def test_parse_url_not_valid(): 104 | url = "s4://bucket-name/path/to" 105 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 106 | assert uri_scheme is None 107 | assert bucket is None 108 | assert profile is None 109 | assert prefix is None 110 | 111 | 112 | def test_parse_url_none(): 113 | url = None 114 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 115 | assert uri_scheme is None 116 | assert bucket is None 117 | assert profile is None 118 | assert prefix is None 119 | 120 | 121 | def test_parse_url_uri_scheme_s3_zip_no_profile(): 122 | url = "s3+zip://bucket-name/path/to" 123 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 124 | assert uri_scheme == UriScheme.S3_ZIP 125 | assert bucket == "bucket-name" 126 | assert profile is None 127 | assert prefix == "path/to" 128 | 129 | 130 | def test_parse_url_uri_scheme_s3_zip(): 131 | url = "s3+zip://profile-test@bucket-name/path/to" 132 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 133 | assert uri_scheme == UriScheme.S3_ZIP 134 | assert bucket == "bucket-name" 135 | assert profile == "profile-test" 136 | assert prefix == "path/to" 137 | 138 | 139 | def test_parse_url_uri_scheme_not_valid(): 140 | url = "s3+foo://bucket-name/path/to" 141 | uri_scheme, profile, bucket, prefix = parse_git_url(url) 142 | assert uri_scheme is None 143 | assert bucket is None 144 | assert profile is None 145 | assert prefix is None 146 | -------------------------------------------------------------------------------- /test/parallel_fetch_test.py: -------------------------------------------------------------------------------- 1 | from mock import patch 2 | from io import BytesIO 3 | from git_remote_s3 import S3Remote, UriScheme 4 | 5 | import threading 6 | 7 | SHA1 = "c105d19ba64965d2c9d3d3246e7269059ef8bb8a" 8 | SHA2 = "c105d19ba64965d2c9d3d3246e7269059ef8bb8b" 9 | SHA3 = "c105d19ba64965d2c9d3d3246e7269059ef8bb8c" 10 | BRANCH = "pytest" 11 | MOCK_BUNDLE_CONTENT = b"MOCK_BUNDLE_CONTENT" 12 | 13 | 14 | @patch("boto3.Session.client") 15 | def test_process_fetch_cmds_empty_list(session_client_mock): 16 | """Test that process_fetch_cmds handles empty command list gracefully""" 17 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 18 | 19 | # Call with empty list 20 | s3_remote.process_fetch_cmds([]) 21 | 22 | # Verify no interactions with S3 23 | session_client_mock.return_value.get_object.assert_not_called() 24 | 25 | 26 | @patch("git_remote_s3.git.unbundle") 27 | @patch("boto3.Session.client") 28 | def test_process_fetch_cmds_single_command(session_client_mock, unbundle_mock): 29 | """Test processing a single fetch command""" 30 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 31 | session_client_mock.return_value.get_object.return_value = { 32 | "Body": BytesIO(MOCK_BUNDLE_CONTENT) 33 | } 34 | 35 | # Process a single fetch command 36 | s3_remote.process_fetch_cmds([f"fetch {SHA1} refs/heads/{BRANCH}"]) 37 | 38 | # Verify S3 download_file was called once 39 | session_client_mock.return_value.download_file.assert_called_once() 40 | unbundle_mock.assert_called_once() 41 | 42 | # Verify the fetched_refs list contains the SHA 43 | assert SHA1 in s3_remote.fetched_refs 44 | 45 | 46 | @patch("git_remote_s3.git.unbundle") 47 | @patch("boto3.Session.client") 48 | def test_process_fetch_cmds_multiple_commands(session_client_mock, unbundle_mock): 49 | """Test processing multiple fetch commands in parallel""" 50 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 51 | session_client_mock.return_value.get_object.return_value = { 52 | "Body": BytesIO(MOCK_BUNDLE_CONTENT) 53 | } 54 | 55 | # Process multiple fetch commands 56 | fetch_cmds = [ 57 | f"fetch {SHA1} refs/heads/{BRANCH}", 58 | f"fetch {SHA2} refs/heads/{BRANCH}", 59 | f"fetch {SHA3} refs/heads/{BRANCH}", 60 | ] 61 | s3_remote.process_fetch_cmds(fetch_cmds) 62 | 63 | # Verify S3 download_file was called for each command 64 | assert session_client_mock.return_value.download_file.call_count == 3 65 | assert unbundle_mock.call_count == 3 66 | 67 | # Verify all SHAs are in the fetched_refs list 68 | assert SHA1 in s3_remote.fetched_refs 69 | assert SHA2 in s3_remote.fetched_refs 70 | assert SHA3 in s3_remote.fetched_refs 71 | 72 | 73 | @patch("git_remote_s3.git.unbundle") 74 | @patch("boto3.Session.client") 75 | def test_process_fetch_cmds_uses_thread_pool(session_client_mock, unbundle_mock): 76 | """Test that process_fetch_cmds uses a thread pool for parallel execution""" 77 | # This test verifies that the ThreadPoolExecutor is used by checking that 78 | # multiple commands are processed in parallel 79 | 80 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 81 | session_client_mock.return_value.get_object.return_value = { 82 | "Body": BytesIO(MOCK_BUNDLE_CONTENT) 83 | } 84 | 85 | # Create fetch commands 86 | fetch_cmds = [ 87 | f"fetch {SHA1} refs/heads/{BRANCH}", 88 | f"fetch {SHA2} refs/heads/{BRANCH}", 89 | f"fetch {SHA3} refs/heads/{BRANCH}", 90 | ] 91 | 92 | # Process the commands 93 | s3_remote.process_fetch_cmds(fetch_cmds) 94 | 95 | # Verify all commands were processed 96 | assert session_client_mock.return_value.download_file.call_count == 3 97 | assert unbundle_mock.call_count == 3 98 | 99 | # Verify all SHAs are in the fetched_refs list 100 | assert SHA1 in s3_remote.fetched_refs 101 | assert SHA2 in s3_remote.fetched_refs 102 | assert SHA3 in s3_remote.fetched_refs 103 | 104 | 105 | @patch("sys.stdin") 106 | @patch("git_remote_s3.git.unbundle") 107 | @patch("boto3.Session.client") 108 | def test_process_cmd_batch_processing(session_client_mock, unbundle_mock, stdin_mock): 109 | """Test that fetch commands are collected and processed in batch""" 110 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 111 | session_client_mock.return_value.get_object.return_value = { 112 | "Body": BytesIO(MOCK_BUNDLE_CONTENT) 113 | } 114 | 115 | # Simulate processing multiple fetch commands followed by an empty line 116 | s3_remote.process_cmd(f"fetch {SHA1} refs/heads/{BRANCH}") 117 | s3_remote.process_cmd(f"fetch {SHA2} refs/heads/{BRANCH}") 118 | s3_remote.process_cmd(f"fetch {SHA3} refs/heads/{BRANCH}") 119 | 120 | # Verify commands are collected but not processed yet 121 | assert len(s3_remote.fetch_cmds) == 3 122 | unbundle_mock.assert_not_called() 123 | 124 | # Process the empty line to trigger batch processing 125 | with patch("git_remote_s3.remote.S3Remote.process_fetch_cmds") as mock_process: 126 | s3_remote.process_cmd("\n") 127 | 128 | # Verify process_fetch_cmds was called with all collected commands 129 | mock_process.assert_called_once() 130 | assert len(mock_process.call_args[0][0]) == 3 131 | 132 | # Verify fetch_cmds is cleared after processing 133 | assert len(s3_remote.fetch_cmds) == 0 134 | 135 | 136 | @patch("git_remote_s3.git.unbundle") 137 | @patch("boto3.Session.client") 138 | def test_thread_safety_of_fetched_refs(session_client_mock, unbundle_mock): 139 | """Test thread safety of the fetched_refs list using a real thread pool""" 140 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 141 | session_client_mock.return_value.get_object.return_value = { 142 | "Body": BytesIO(MOCK_BUNDLE_CONTENT) 143 | } 144 | 145 | # Create multiple fetch commands with different SHAs 146 | fetch_cmds = [f"fetch {SHA1} refs/heads/{BRANCH}"] * 20 147 | 148 | # Process commands using a real thread pool 149 | s3_remote.process_fetch_cmds(fetch_cmds) 150 | 151 | # Verify SHA1 appears in fetched_refs 152 | assert SHA1 in s3_remote.fetched_refs 153 | 154 | 155 | @patch("git_remote_s3.git.unbundle") 156 | @patch("boto3.Session.client") 157 | def test_cmd_fetch_thread_safety(session_client_mock, unbundle_mock): 158 | """Test that cmd_fetch is thread-safe when called concurrently""" 159 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 160 | session_client_mock.return_value.get_object.return_value = { 161 | "Body": BytesIO(MOCK_BUNDLE_CONTENT) 162 | } 163 | 164 | # Create a function that simulates concurrent access 165 | def concurrent_fetch(): 166 | s3_remote.cmd_fetch(f"fetch {SHA1} refs/heads/{BRANCH}") 167 | 168 | # Create and start multiple threads 169 | threads = [] 170 | for _ in range(5): 171 | thread = threading.Thread(target=concurrent_fetch) 172 | threads.append(thread) 173 | thread.start() 174 | 175 | # Wait for all threads to complete 176 | for thread in threads: 177 | thread.join() 178 | 179 | # Verify SHA1 appears in fetched_refs 180 | assert SHA1 in s3_remote.fetched_refs 181 | -------------------------------------------------------------------------------- /git_remote_s3/lfs.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023-present Amazon.com, Inc. or its affiliates 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import sys 6 | import logging 7 | import json 8 | import subprocess 9 | import boto3 10 | import threading 11 | import os 12 | from .common import parse_git_url 13 | from .git import validate_ref_name 14 | 15 | if "lfs" in __name__: 16 | logging.basicConfig( 17 | level=logging.ERROR, 18 | format="%(asctime)s - %(levelname)s - %(process)d - %(message)s", 19 | filename=".git/lfs/tmp/git-lfs-s3.log", 20 | ) 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | class ProgressPercentage: 26 | def __init__(self, oid: str): 27 | self._seen_so_far = 0 28 | self._lock = threading.Lock() 29 | self.oid = oid 30 | 31 | def __call__(self, bytes_amount): 32 | with self._lock: 33 | self._seen_so_far += bytes_amount 34 | progress_event = { 35 | "event": "progress", 36 | "oid": self.oid, 37 | "bytesSoFar": self._seen_so_far, 38 | "bytesSinceLast": bytes_amount, 39 | } 40 | sys.stdout.write(f"{json.dumps(progress_event)}\n") 41 | sys.stdout.flush() 42 | 43 | 44 | def write_error_event(*, oid: str, error: str, flush=False): 45 | err_event = { 46 | "event": "complete", 47 | "oid": oid, 48 | "error": {"code": 2, "message": error}, 49 | } 50 | sys.stdout.write(f"{json.dumps(err_event)}\n") 51 | if flush: 52 | sys.stdout.flush() 53 | 54 | 55 | class LFSProcess: 56 | def __init__(self, s3uri: str): 57 | uri_scheme, profile, bucket, prefix = parse_git_url(s3uri) 58 | if bucket is None or prefix is None: 59 | logger.error(f"s3 uri {s3uri} is invalid") 60 | error_event = { 61 | "error": {"code": 32, "message": f"s3 uri {s3uri} is invalid"} 62 | } 63 | sys.stdout.write(f"{json.dumps(error_event)}\n") 64 | sys.stdout.flush() 65 | return 66 | self.prefix = prefix 67 | self.bucket = bucket 68 | self.profile = profile 69 | self.s3_bucket = None 70 | sys.stdout.write("{}\n") 71 | sys.stdout.flush() 72 | 73 | def init_s3_bucket(self): 74 | if self.s3_bucket is not None: 75 | return 76 | if self.profile is None: 77 | session = boto3.Session() 78 | else: 79 | session = boto3.Session(profile_name=self.profile) 80 | s3 = session.resource("s3") 81 | self.s3_bucket = s3.Bucket(self.bucket) 82 | 83 | def upload(self, event: dict): 84 | logger.debug("upload") 85 | try: 86 | self.init_s3_bucket() 87 | if list( 88 | self.s3_bucket.objects.filter( 89 | Prefix=f"{self.prefix}/lfs/{event['oid']}" 90 | ) 91 | ): 92 | logger.debug("object already exists") 93 | sys.stdout.write( 94 | f"{json.dumps({'event': 'complete', 'oid': event['oid']})}\n" 95 | ) 96 | sys.stdout.flush() 97 | return 98 | self.s3_bucket.upload_file( 99 | event["path"], 100 | f"{self.prefix}/lfs/{event['oid']}", 101 | Callback=ProgressPercentage(event["oid"]), 102 | ) 103 | sys.stdout.write( 104 | f"{json.dumps({'event': 'complete', 'oid': event['oid']})}\n" 105 | ) 106 | except Exception as e: 107 | logger.error(e) 108 | write_error_event(oid=event["oid"], error=str(e)) 109 | sys.stdout.flush() 110 | 111 | def download(self, event: dict): 112 | logger.debug("download") 113 | try: 114 | self.init_s3_bucket() 115 | temp_dir = os.path.abspath(".git/lfs/tmp") 116 | self.s3_bucket.download_file( 117 | Key=f"{self.prefix}/lfs/{event['oid']}", 118 | Filename=f"{temp_dir}/{event['oid']}", 119 | Callback=ProgressPercentage(event["oid"]), 120 | ) 121 | done_event = { 122 | "event": "complete", 123 | "oid": event["oid"], 124 | "path": f"{temp_dir}/{event['oid']}", 125 | } 126 | sys.stdout.write(f"{json.dumps(done_event)}\n") 127 | except Exception as e: 128 | logger.error(e) 129 | write_error_event(oid=event["oid"], error=str(e)) 130 | 131 | sys.stdout.flush() 132 | 133 | 134 | def install(): 135 | result = subprocess.run( 136 | ["git", "config", "--add", "lfs.customtransfer.git-lfs-s3.path", "git-lfs-s3"], 137 | stderr=subprocess.PIPE, 138 | ) 139 | if result.returncode != 0: 140 | sys.stderr.write(result.stderr.decode("utf-8").strip()) 141 | sys.stderr.flush() 142 | sys.exit(1) 143 | result = subprocess.run( 144 | ["git", "config", "--add", "lfs.standalonetransferagent", "git-lfs-s3"], 145 | stderr=subprocess.PIPE, 146 | ) 147 | if result.returncode != 0: 148 | sys.stderr.write(result.stderr.decode("utf-8").strip()) 149 | sys.stderr.flush() 150 | sys.exit(1) 151 | 152 | sys.stdout.write("git-lfs-s3 installed\n") 153 | sys.stdout.flush() 154 | 155 | 156 | def main(): # noqa: C901 157 | if len(sys.argv) > 1: 158 | if "install" == sys.argv[1]: 159 | install() 160 | sys.exit(0) 161 | elif "debug" == sys.argv[1]: 162 | logger.setLevel(logging.DEBUG) 163 | elif "enable-debug" == sys.argv[1]: 164 | subprocess.run( 165 | [ 166 | "git", 167 | "config", 168 | "--add", 169 | "lfs.customtransfer.git-lfs-s3.args", 170 | "debug", 171 | ] 172 | ) 173 | print("debug enabled") 174 | sys.exit(0) 175 | elif "disable-debug" == sys.argv[1]: 176 | subprocess.run( 177 | ["git", "config", "--unset", "lfs.customtransfer.git-lfs-s3.args"] 178 | ) 179 | print("debug disabled") 180 | sys.exit(0) 181 | else: 182 | print(f"unknown command {sys.argv[1]}") 183 | sys.exit(1) 184 | 185 | lfs_process = None 186 | while True: 187 | logger.debug("git-lfs-s3 starting") 188 | line = sys.stdin.readline() 189 | logger.debug(line) 190 | event = json.loads(line) 191 | if event["event"] == "init": 192 | # This is just another precaution but not strictly necessary since git would 193 | # already have validated the origin name 194 | if not validate_ref_name(event["remote"]): 195 | logger.error(f"invalid ref {event['remote']}") 196 | sys.stdout.write("{}\n") 197 | sys.stdout.flush() 198 | sys.exit(1) 199 | result = subprocess.run( 200 | ["git", "remote", "get-url", event["remote"]], 201 | stdout=subprocess.PIPE, 202 | stderr=subprocess.PIPE, 203 | ) 204 | if result.returncode != 0: 205 | logger.error(result.stderr.decode("utf-8").strip()) 206 | error_event = { 207 | "error": { 208 | "code": 2, 209 | "message": f"cannot resolve remote \"{event['remote']}\"", 210 | } 211 | } 212 | sys.stdout.write(f"{json.dumps(error_event)}") 213 | sys.stdout.flush() 214 | sys.exit(1) 215 | s3uri = result.stdout.decode("utf-8").strip() 216 | lfs_process = LFSProcess(s3uri=s3uri) 217 | 218 | elif event["event"] == "upload": 219 | lfs_process.upload(event) 220 | elif event["event"] == "download": 221 | lfs_process.download(event) 222 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2024 Massimiliano Angelino 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /git_remote_s3/manage.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023-present Amazon.com, Inc. or its affiliates 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import boto3 6 | from .remote import parse_git_url, DEFAULT_LOCK_TTL_SECONDS 7 | import argparse 8 | import sys 9 | import uuid 10 | from botocore.exceptions import ( 11 | ClientError, 12 | ProfileNotFound, 13 | CredentialRetrievalError, 14 | NoCredentialsError, 15 | UnknownCredentialError, 16 | ) 17 | from .git import get_remote_url, GitError 18 | import datetime 19 | 20 | 21 | class Doctor: 22 | def __init__(self, profile, bucket, prefix, delete_bundle, lock_ttl_seconds=60, delete_stale_locks=False) -> None: 23 | self.bucket = bucket 24 | self.prefix = prefix 25 | self.delete_bundle = delete_bundle 26 | self.s3 = boto3.Session(profile_name=profile).client("s3") 27 | self.lock_ttl_seconds = lock_ttl_seconds 28 | self.delete_stale_locks = delete_stale_locks 29 | 30 | def run(self): 31 | repos = self.analyze_repo() 32 | for r in repos.keys(): 33 | print(f"{r}:") 34 | head_ref = "Invalid" 35 | for ref in repos[r]["refs"].keys(): 36 | if repos[r]["HEAD"] == ref: 37 | head_ref = ref 38 | ref_value = repos[r]["refs"][ref] 39 | part_1 = "*" if ref_value["protected"] else "" 40 | part_2 = "Ok" if len(ref_value["bundles"]) == 1 else "Multiple refs" 41 | print(f" {part_1} {ref}: {part_2}") 42 | if head_ref == "Invalid": 43 | repos[r]["HEAD"] = head_ref 44 | print(f" HEAD: {head_ref}") 45 | 46 | self.fix_issues(repos) 47 | 48 | def fix_issues(self, repos): 49 | for r in repos.keys(): 50 | for ref in repos[r]["refs"].keys(): 51 | if len(repos[r]["refs"][ref]["bundles"]) > 1: 52 | self.fix_multiple_bundles(repos, r, ref) 53 | 54 | if repos[r]["HEAD"] == "Invalid": 55 | self.fix_head(repos, r) 56 | 57 | # After fixing references, scan and handle stale locks 58 | self.list_and_handle_stale_locks() 59 | 60 | def list_and_handle_stale_locks(self): 61 | print("\nScanning for stale locks...") 62 | objs = self.s3.list_objects_v2( 63 | Bucket=self.bucket, Prefix=self.prefix + "/" 64 | ).get("Contents", []) 65 | 66 | now = datetime.datetime.now(tz=datetime.timezone.utc) 67 | stale = [] 68 | for o in objs: 69 | key = o["Key"] 70 | if key.endswith(".lock"): 71 | last_modified = o.get("LastModified") 72 | if last_modified is not None: 73 | age = (now - last_modified).total_seconds() 74 | if age > self.lock_ttl_seconds: 75 | stale.append((key, int(age))) 76 | 77 | if not stale: 78 | print("No stale locks found.") 79 | return 80 | 81 | print("Found stale locks:") 82 | for key, age in stale: 83 | print(f" - {key} (age: {age}s)") 84 | 85 | if self.delete_stale_locks: 86 | print("\nDeleting stale locks...") 87 | for key, _ in stale: 88 | try: 89 | self.s3.delete_object(Bucket=self.bucket, Key=key) 90 | print(f"Deleted {key}") 91 | except ClientError as e: 92 | print(f"Failed to delete {key}: {e}") 93 | else: 94 | print("\nRun with --delete-stale-locks to remove them automatically.") 95 | 96 | def analyze_repo(self): 97 | objs = self.s3.list_objects_v2( 98 | Bucket=self.bucket, Prefix=self.prefix + "/" 99 | ).get("Contents", []) 100 | 101 | repos = {} 102 | for o in objs: 103 | key = o["Key"] 104 | key_parts = key.split("/") 105 | repo_name = key_parts[0] 106 | if repo_name not in repos: 107 | repos[repo_name] = {"refs": {}, "HEAD": "Missing"} 108 | refs = "/".join(key_parts[1:-1]) 109 | if key_parts[1] == "HEAD": 110 | head_ref = ( 111 | self.s3.get_object(Bucket=self.bucket, Key=key) 112 | .get("Body") 113 | .read() 114 | .decode("utf-8") 115 | .strip() 116 | ) 117 | repos[repo_name]["HEAD"] = head_ref 118 | continue 119 | if not repos[repo_name]["refs"].get(refs, None): 120 | repos[repo_name]["refs"][refs] = {"protected": False, "bundles": []} 121 | if "PROTECTED#" == key_parts[-1]: 122 | repos[repo_name]["refs"][refs]["protected"] = True 123 | else: 124 | sha = key_parts[-1].split(".")[0] 125 | repos[repo_name]["refs"][refs]["bundles"].append( 126 | {"sha": sha, "lastModified": o["LastModified"]} 127 | ) 128 | return repos 129 | 130 | def fix_multiple_bundles(self, repos: dict, r: str, ref: str) -> None: 131 | print(f"\nFix multiple bundles for repo {r} and ref {ref}") 132 | bundles = repos[r]["refs"][ref]["bundles"] 133 | for i, sha in enumerate(bundles): 134 | print(f"{i + 1}. {sha['sha']} {sha['lastModified']}") 135 | while True: 136 | try: 137 | i = int(input("Enter the number of the bundle to keep: ")) 138 | if i > 0 and i <= len(bundles): 139 | sha = bundles[i - 1]["sha"] 140 | print(f"Keeping {sha}") 141 | input("Press enter to confirm or Ctrl+C to cancel") 142 | for s in [sha["sha"] for sha in bundles]: 143 | if s != sha: 144 | if self.delete_bundle: 145 | print(f"Removing {s}") 146 | self.s3.delete_object( 147 | Bucket=self.bucket, 148 | Key=f"{self.prefix}/{ref}/{s}.bundle", 149 | ) 150 | else: 151 | tmp_branch = f"{ref}_{str(uuid.uuid4())[:8]}" 152 | print(f"Moving {s} to new branch {tmp_branch}") 153 | self.s3.copy_object( 154 | CopySource={ 155 | "Bucket": self.bucket, 156 | "Key": f"{self.prefix}/{ref}/{s}.bundle", 157 | }, 158 | Bucket=self.bucket, 159 | Key=f"{self.prefix}/{tmp_branch}/{s}.bundle", 160 | ) 161 | self.s3.delete_object( 162 | Bucket=self.bucket, 163 | Key=f"{self.prefix}/{ref}/{s}.bundle", 164 | ) 165 | break 166 | except ValueError: 167 | print("Invalid input") 168 | 169 | def fix_head(self, repos: dict, r: str) -> None: 170 | print(f"\nFix invalid HEAD for repo {r}") 171 | heads = [k for k in repos[r]["refs"].keys() if "heads" in k] 172 | for i, head in enumerate(heads): 173 | print(f"{i + 1}. {head.split('/')[-1]}") 174 | while True: 175 | try: 176 | i = int(input("Enter the number of the branch to use as head: ")) 177 | if i > 0 and i <= len(heads): 178 | head = heads[i - 1] 179 | print(f"Setting {head} as HEAD") 180 | self.s3.put_object( 181 | Bucket=self.bucket, 182 | Key=f"{self.prefix}/HEAD", 183 | Body=head, 184 | ) 185 | break 186 | except ValueError: 187 | print("Invalid input") 188 | 189 | 190 | class ManageBranch: 191 | def __init__(self, profile, bucket, prefix, branch) -> None: 192 | self.bucket = bucket 193 | self.prefix = prefix 194 | self.s3 = boto3.Session(profile_name=profile).client("s3") 195 | self.branch = branch 196 | if not self.get_branch_content(): 197 | raise ValueError(f"Branch {self.branch} does not exist") 198 | 199 | def process_cmd(self, cmd): 200 | if cmd == "delete-branch": 201 | self.delete_branch() 202 | if cmd == "protect": 203 | self.protect_branch() 204 | if cmd == "unprotect": 205 | self.unprotect_branch() 206 | 207 | def delete_branch(self): 208 | objs = self.get_branch_content() 209 | resp = input(f"Delete {self.branch} branch [yes/no]: ") 210 | if resp.lower() == "yes": 211 | for o in objs: 212 | self.s3.delete_object(Bucket=self.bucket, Key=o["Key"]) 213 | print(f"Branch {self.branch} has been deleted") 214 | else: 215 | print("Aborted") 216 | 217 | def get_branch_content(self) -> list[str]: 218 | objs = self.s3.list_objects_v2( 219 | Bucket=self.bucket, Prefix=f"{self.prefix}/refs/heads/{self.branch}/" 220 | ).get("Contents", []) 221 | return objs 222 | 223 | def protect_branch(self): 224 | self.s3.put_object( 225 | Bucket=self.bucket, 226 | Key=f"{self.prefix}/refs/heads/{self.branch}/PROTECTED#", 227 | ) 228 | print(f"Branch {self.branch} is now protected") 229 | 230 | def unprotect_branch(self): 231 | self.s3.delete_object( 232 | Bucket=self.bucket, 233 | Key=f"{self.prefix}/refs/heads/{self.branch}/PROTECTED#", 234 | ) 235 | print(f"Branch {self.branch} is now unprotected") 236 | 237 | 238 | def main(): 239 | parser = argparse.ArgumentParser() 240 | parser.add_argument("command") 241 | parser.add_argument( 242 | "remote", help="The remote s3 uri to analyze, including the AWS profile if used" 243 | ) 244 | parser.add_argument( 245 | "-d", 246 | "--delete-bundle", 247 | action="store_true", 248 | help="Delete the bundle instead of creating a new branch", 249 | ) 250 | parser.add_argument( 251 | "--lock-ttl", 252 | type=int, 253 | default=DEFAULT_LOCK_TTL_SECONDS, 254 | help=f"Seconds after which a lock is considered stale (default: {DEFAULT_LOCK_TTL_SECONDS})", 255 | ) 256 | parser.add_argument( 257 | "--delete-stale-locks", 258 | action="store_true", 259 | help="Delete stale lock files found during doctor run", 260 | ) 261 | parser.add_argument( 262 | "branch", 263 | type=str, 264 | action="store", 265 | help="Branch to delete from the remote", 266 | ) 267 | args = parser.parse_args() 268 | remote = args.remote 269 | try: 270 | remote_url = get_remote_url(remote) 271 | except GitError as e: 272 | sys.stderr.write(f"fatal: {e}\n") 273 | sys.stderr.flush() 274 | sys.exit(1) 275 | 276 | uri_scheme, profile, bucket, prefix = parse_git_url(remote_url) 277 | try: 278 | if args.command == "doctor": 279 | doctor = Doctor( 280 | profile, 281 | bucket, 282 | prefix, 283 | args.delete_bundle, 284 | args.lock_ttl, 285 | args.delete_stale_locks, 286 | ) 287 | doctor.run() 288 | if ( 289 | args.command == "delete-branch" 290 | or args.command == "protect" 291 | or args.command == "unprotect" 292 | ): 293 | if args.branch is None: 294 | sys.stderr.write("fatal: --branch is required\n") 295 | sys.stderr.flush() 296 | sys.exit(1) 297 | try: 298 | manage_branch = ManageBranch(profile, bucket, prefix, args.branch) 299 | manage_branch.process_cmd(args.command) 300 | except ValueError as e: 301 | sys.stderr.write(f"fatal: {e}\n") 302 | sys.stderr.flush() 303 | sys.exit(1) 304 | 305 | sys.exit(0) 306 | 307 | except ( 308 | ClientError, 309 | ProfileNotFound, 310 | CredentialRetrievalError, 311 | NoCredentialsError, 312 | UnknownCredentialError, 313 | ) as e: 314 | sys.stderr.write(f"fatal: invalid credentials {e}\n") 315 | sys.stderr.flush() 316 | sys.exit(1) 317 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # git-remote-s3 2 | 3 | This library enables to use Amazon S3 as a git remote and LFS server. 4 | 5 | It provides an implementation of a [git remote helper](https://git-scm.com/docs/gitremote-helpers) to use S3 as a serverless Git server. 6 | 7 | It also provide an implementation of the [git-lfs custom transfer](https://github.com/git-lfs/git-lfs/blob/main/docs/custom-transfers.md) to enable pushing LFS managed files to the same S3 bucket used as remote. 8 | 9 | ## Table of Contents 10 | 11 | - [Installation](#installation) 12 | - [Prerequisites](#prerequisites) 13 | - [Security](#security) 14 | - [Data encryption](#data-encryption) 15 | - [Access control](#access-control) 16 | - [Use S3 remotes](#use-s3-remotes) 17 | - [Create a new repo](#create-a-new-repo) 18 | - [Clone a repo](#clone-a-repo) 19 | - [Branches, etc.](#branches-etc) 20 | - [Using S3 remotes for submodules](#using-s3-remotes-for-submodules) 21 | - [Repo as S3 Source for AWS CodePipeline](#repo-as-s3-source-for-aws-codepipeline) 22 | - [Archive file location](#archive-file-location) 23 | - [Example AWS CodePipeline source action config](#example-aws-codepipeline-source-action-config) 24 | - [LFS](#lfs) 25 | - [Creating the repo and pushing](#creating-the-repo-and-pushing) 26 | - [Clone the repo](#clone-the-repo) 27 | - [Notes about specific behaviors of Amazon S3 remotes](#notes-about-specific-behaviors-of-amazon-s3-remotes) 28 | - [Arbitrary Amazon S3 URIs](#arbitrary-amazon-s3-uris) 29 | - [Concurrent writes](#concurrent-writes) 30 | - [Manage the Amazon S3 remote](#manage-the-amazon-s3-remote) 31 | - [Delete branches](#delete-branches) 32 | - [Protected branches](#protected-branches) 33 | - [Under the hood](#under-the-hood) 34 | - [How S3 remote work](#how-s3-remote-work) 35 | - [How LFS work](#how-lfs-work) 36 | - [Debugging](#debugging) 37 | - [Credits](#credits) 38 | 39 | ## Installation 40 | 41 | `git-remote-s3` is a Python script and works with any Python version >= 3.9. 42 | 43 | Run: 44 | 45 | ``` 46 | pip install git-remote-s3 47 | ``` 48 | 49 | ## Prerequisites 50 | 51 | Before you can use `git-remote-s3`, you must: 52 | 53 | - Complete initial configuration: 54 | 55 | - Creating an AWS account 56 | - Configuring an IAM user or role 57 | 58 | - Create an AWS S3 bucket (or have one already) in your AWS account. 59 | - Attach a minimal policy to that user/role that allows the to the S3 bucket: 60 | 61 | ```json 62 | { 63 | "Version": "2012-10-17", 64 | "Statement": [ 65 | { 66 | "Sid": "S3ObjectAccess", 67 | "Effect": "Allow", 68 | "Action": ["s3:PutObject", "s3:GetObject", "s3:DeleteObject"], 69 | "Resource": ["arn:aws:s3:::/*"] 70 | }, 71 | { 72 | "Sid": "S3ListAccess", 73 | "Effect": "Allow", 74 | "Action": ["s3:ListBucket"], 75 | "Resource": ["arn:aws:s3:::"] 76 | } 77 | ] 78 | } 79 | ``` 80 | 81 | - Optional (but recommended) - use [SSE-KMS Bucket keys to encrypt the content of the bucket](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucket-key.html), ensure the user/role create previously has the permission to access and use the key. 82 | 83 | ```json 84 | { 85 | "Sid": "KMSAccess", 86 | "Effect": "Allow", 87 | "Action": ["kms:Decrypt", "kms:GenerateDataKey"], 88 | "Resource": ["arn:aws:kms:::key/"] 89 | } 90 | ``` 91 | 92 | - Install Python and its package manager, pip, if they are not already installed. To download and install the latest version of Python, [visit the Python website](https://www.python.org/). 93 | - Install Git on your Linux, macOS, Windows, or Unix computer. 94 | - Install the latest version of the AWS CLI on your Linux, macOS, Windows, or Unix computer. You can find instructions [here](https://docs.aws.amazon.com/cli/latest/userguide/installing.html). 95 | 96 | ## Security 97 | 98 | ### Data encryption 99 | 100 | All data is encrypted at rest and in transit by default. To add an additional layer of security you can use customer managed KMS keys to encrypt the data at rest on the S3 bucket. We recommend to use [Bucket keys](https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucket-key.html) to minimize the KMS costs. 101 | 102 | ### Access control 103 | 104 | Access control to the remote is ensured via IAM permissions, and can be controlled at: 105 | 106 | - bucket level 107 | - prefix level (you can use prefixes to store multiple repos in the same S3 bucket thus minimizing the setup effort) 108 | - KMS key level 109 | 110 | If you store multiple repos in a single bucket but would like to separate permissions to access each repo, you can do so by modifying the resource definitions for the object related action to specify the repo prefix and by adding a condition to the ListBucket action to restrict the operation to matching prefixes (and by consequence the corresponding repo) : 111 | 112 | ```json 113 | { 114 | "Sid": "S3ObjectAccess", 115 | "Effect": "Allow", 116 | "Action": [ 117 | "s3:PutObject", 118 | "s3:GetObject", 119 | "s3:DeleteObject" 120 | ], 121 | "Resource": ["arn:aws:s3::://*"] 122 | }, 123 | { 124 | "Sid": "S3ListObjects", 125 | "Effect": "Allow", 126 | "Action": [ 127 | "s3:ListBucket", 128 | ], 129 | "Condition": { 130 | "StringEquals": { 131 | "s3:prefix": "" 132 | } 133 | }, 134 | "Resource": ["arn:aws:s3:::"] 135 | }, 136 | ``` 137 | 138 | Using the condition key restricts the access operation to the content of the specific repo in the bucket. 139 | 140 | ## Use S3 remotes 141 | 142 | ### Create a new repo 143 | 144 | S3 remotes are identified by the prefix `s3://` and at the bare minimum specify the name of the bucket. You can also provide a key prefix as in `s3://my-git-bucket/my-repo` and a profile `s3://my-profile@my-git-bucket/myrepo`. 145 | 146 | ```bash 147 | mkdir my-repo 148 | cd my-repo 149 | git init 150 | git remote add origin s3://my-git-bucket/my-repo 151 | ``` 152 | 153 | You can then add a file, commit and push the changes to the remote: 154 | 155 | ```bash 156 | echo "Hello" > hello.txt 157 | git add -A 158 | git commit -a -m "hello" 159 | git push --set-upstream origin main 160 | ``` 161 | 162 | The remote HEAD is set to track the branch that has been pushed first to the remote repo. To change the remote HEAD branch, delete the HEAD object `s3:////HEAD` and then run `git-remote-s3 doctor s3:///`. 163 | 164 | When you use `s3+zip://` instead of `s3://`, an additional zip archive named `repo.zip` is uploaded next to the `sha.bundle` file. This is for example useful if you want to use the Repo as a S3 Source for AWS CodePipeline, which expects a `.zip` file. The path on S3 when you push to the `main` branch is for example `refs/heads/main/repo.zip`. See [How S3 remote work](#how-s3-remote-work) for more details about the bundle file. 165 | 166 | ### Clone a repo 167 | 168 | To clone the repo to another folder just use the normal git syntax using the s3 URI as remote: 169 | 170 | ```bash 171 | git clone s3://my-git-bucket/my-repo my-repo-clone 172 | ``` 173 | 174 | ### Branches, etc. 175 | 176 | Creating branches and pushing them works as normal: 177 | 178 | ```bash 179 | cd my-repo 180 | git checkout -b new_branch 181 | touch new_file.txt 182 | git add -A 183 | git commit -a -m "new file" 184 | git push origin new_branch 185 | ``` 186 | 187 | All git operations that do not rely on communication with the server should work as usual (eg `git merge`) 188 | 189 | ### Using S3 remotes for submodules 190 | 191 | If you have a repo that uses submodules also hosted on S3, you need to run the following command: 192 | 193 | ``` 194 | git config protocol.s3.allow always 195 | ``` 196 | 197 | Or, to enable globally: 198 | 199 | ``` 200 | git config --global protocol.s3.allow always 201 | ``` 202 | 203 | ## Repo as S3 Source for AWS CodePipeline 204 | 205 | [AWS CodePipeline](https://aws.amazon.com/codepipeline/) offers an [Amazon S3 source action](https://docs.aws.amazon.com/codepipeline/latest/userguide/integrations-action-type.html#integrations-source-s3) 206 | as location for your source code and application files. But this requires to `upload the source files as a single ZIP file`. 207 | As briefly mentioned in [Create a new repo](#create-a-new-repo), `git-remote-s3` can create and upload zip archives. 208 | When you use `s3+zip` as URI Scheme when you add the remote, `git-remote-s3` will automatically upload an archive that can be used by AWS CodePipeline. 209 | 210 | ### Archive file location 211 | 212 | Let's assume your bucket name is `my-git-bucket` and the repo is called `my-repo`. Run `git remote add origin s3+zip://my-git-bucket/my-repo` to use it as remote. 213 | When you now commit your changes and push to the remote, an additional `repo.zip` file will be uploaded to the bucket. 214 | For example, if you push to the `main` branch (`git push origin main`), the file is available under `s3://my-git-bucket/my-repo/refs/heads/main/repo.zip`. 215 | When you push to a branch called `fix_a_bug` it's available under `s3://my-git-bucket/my-repo/refs/heads/fix_a_bug/repo.zip`. 216 | And if you create and push a tag called `v1.0` it will be `s3://my-git-bucket/my-repo/refs/tags/v1.0/repo.zip`. 217 | 218 | ### Example AWS CodePipeline source action config 219 | 220 | Your AWS CodePipeline Action configuration to trigger when you update your `main` branch: 221 | 222 | - Action Provider: `Amazon S3` 223 | - Bucket: `my-git-bucket` 224 | - S3 object key: `my-repo/refs/heads/main/repo.zip` 225 | - Change detection options: `AWS CodePipeline` 226 | 227 | Visit [Tutorial: Create a simple pipeline (S3 bucket)](https://docs.aws.amazon.com/codepipeline/latest/userguide/tutorials-simple-s3.html) to learn more about a S3 bucket as source action. 228 | 229 | ## LFS 230 | 231 | To use LFS you need to first install git-lfs. You can refer to the [official documentation](https://git-lfs.com/) on how to do this on your system. 232 | 233 | Next, you need enable the S3 integration by running the following command in the repo folder: 234 | 235 | ```bash 236 | git-lfs-s3 install 237 | ``` 238 | 239 | which is a short cut for: 240 | 241 | ```bash 242 | git config --add lfs.customtransfer.git-lfs-s3.path git-lfs-s3 243 | git config --add lfs.standalonetransferagent git-lfs-s3 244 | ``` 245 | 246 | ### Creating the repo and pushing 247 | 248 | Let's assume we want to store TIFF file in LFS. 249 | 250 | ```bash 251 | mkdir lfs-repo 252 | cd lfs-repo 253 | git init 254 | git lfs install 255 | git-lfs-s3 install 256 | git lfs track "*.tiff" 257 | git add .gitattributes 258 | 259 | git add file.tiff 260 | git commit -a -m "my first tiff file" 261 | git remote add origin s3://my-git-bucket/lfs-repo 262 | git push --set-upstream origin main 263 | ``` 264 | 265 | ### Clone the repo 266 | 267 | When cloning a repo using the S3 remote for LFS, `git-lfs` can't know how to fetch the files since we have yet to add the configuration. 268 | 269 | It involves 2 extra steps. 270 | 271 | ```bash 272 | % git clone s3://my-git-bucket/lfs-repo lfs-repo-clone 273 | Error downloading object: file.tiff (54238cf): Smudge error: Error downloading file.tiff (54238cfaaaa42dda05da0e12bf8ee3156763fa35296085ccdef63b13a87837c5): batch request: ssh: Could not resolve hostname s3: Name or service not known: exit status 255 274 | ... 275 | ``` 276 | 277 | To fix: 278 | 279 | ```bash 280 | cd lfs-repo-clone 281 | git-lfs-s3 install 282 | git reset --hard main 283 | ``` 284 | 285 | ## Notes about specific behaviors of Amazon S3 remotes 286 | 287 | ### Arbitrary Amazon S3 URIs 288 | 289 | An Amazon S3 URI for a valid bucket and an arbitrary prefix which does not contain the right structure under it, is considered valid. 290 | 291 | `git ls-remote` returns an empty list and `git clone` clones an empty repository for which the S3 URI is set as remote origin. 292 | 293 | ``` 294 | % git clone s3://my-git-bucket/this-is-a-new-repo 295 | Cloning into 'this-is-a-new-repo'... 296 | warning: You appear to have cloned an empty repository. 297 | % cd this-is-a-new-repo 298 | % git remote -v 299 | origin s3://my-git-bucket/this-is-a-new-repo (fetch) 300 | origin s3://my-git-bucket/this-is-a-new-repo (push) 301 | ``` 302 | 303 | **Tip**: This behavior can be used to quickly create a new git repo. 304 | 305 | `git-remote-s3` implements **per-reference locking** to prevent concurrent write conflicts when multiple clients push to the same branch simultaneously. 306 | 307 | 308 | When pushing to a remote reference, `git-remote-s3` uses S3 conditional writes to acquire an exclusive lock for that specific reference: 309 | 310 | 1. **Lock acquisition**: A lock file is created at `//LOCK#.lock` using S3's `IfNoneMatch="*"` condition, ensuring only one client can acquire the lock at a time 311 | 2. **Push execution**: While holding the lock, the client safely uploads the new bundle and cleans up the previous one 312 | 3. **Lock release**: The lock is automatically released after the push completes 313 | 314 | #### Concurrent push behavior 315 | 316 | If multiple clients attempt to push to the same reference simultaneously: 317 | 318 | - Only one client will successfully acquire the lock and proceed with the push 319 | - Other clients will receive a clear error message indicating lock acquisition failed 320 | - The failed clients can retry their push after the lock is released 321 | 322 | Example error message when lock acquisition fails: 323 | 324 | ``` 325 | error refs/heads/main "failed to acquire ref lock at my-repo/refs/heads/main/LOCK#.lock. 326 | Another client may be pushing. If this persists beyond 60s, 327 | run git-remote-s3 doctor --lock-ttl 60 to inspect and optionally clear stale locks." 328 | ``` 329 | 330 | #### Lock timeout and cleanup 331 | 332 | - **Lock TTL**: Locks automatically expire after 60 seconds by default (configurable via `GIT_REMOTE_S3_LOCK_TTL` environment variable) 333 | - **Stale lock detection**: If a lock becomes stale (older than the TTL), it can be automatically replaced during lock acquisition 334 | - **Manual cleanup**: Use `git-remote-s3 doctor --lock-ttl ` to inspect and optionally clean up stale locks 335 | 336 | This locking mechanism eliminates the race conditions that could previously result in multiple bundles per reference, ensuring consistent repository state across concurrent operations. 337 | 338 | 339 | ### Concurrent writes 340 | 341 | Due to the distributed nature of `git`, there might be cases (albeit rare) where 2 or more `git push` are executed at the same time by different user with their own modification of the same branch. `git-remote-s3` implements **per-reference locking** to prevent concurrent write conflicts in those cases. 342 | 343 | #### Per-reference locking 344 | The git command executes the push in 4 steps: 345 | 346 | 1. first it checks if the remote reference is the correct ancestor for the commit being pushed 347 | 2. if that is correct it invokes the `git-remote-s3` command then attempts acquire a lock by creating the lock object `//LOCK#.lock` using S3 conditional writes. 348 | 3. while holding the lock, `git-remote-s3` safely writes the bundle to the S3 bucket at the `refs/heads/` path 349 | 4. `git-remote-s3` deletes the lock object after the push succeeds, thereby releasing the lock for that ref 350 | 351 | Clients that fail to acquire the lock will fail with the following error and can try to push again. 352 | 353 | ``` 354 | error refs/heads/main "failed to acquire ref lock at my-repo/refs/heads/main/LOCK#.lock. 355 | Another client may be pushing. If this persists beyond 60s, 356 | run git-remote-s3 doctor --lock-ttl 60 to inspect and optionally clear stale locks." 357 | ``` 358 | 359 | The per-reference locks automatically expire after 60 seconds by default. This TTL is configurable via `GIT_REMOTE_S3_LOCK_TTL` environment variable 360 | If for some reason a reference's lock becomes stale, `git-remote-s3` automatically clears it when executing a git push. 361 | If you repeatedly run into lock acquisition failures or otherwise want to manually clean up stale locks, run `git-remote-s3 doctor --lock-ttl ` to inspect and optionally remove those stale locks. 362 | 363 | #### Multiple branch heads 364 | In the (rare) case where multiple `git push` commands are simultaneously executed with one or more clients running an outdated version of `git-remote-s3` without locking proection, then it is possible that that multiple bundles will be written to S3 for the same branch head. All subsequent `git push` commands will fail with the following error: 365 | 366 | ``` 367 | error: dst refspec refs/heads/> matches more than one 368 | error: failed to push some refs to 's3:///' 369 | ``` 370 | 371 | To fix this issue, run the `git-remote-s3 doctor ` command. By default it will create a new branch for every bundle that should not be retained. The user can then checkout the branch locally and merge it to the original branch. If you want instead to remove the bundle, specify `--delete-bundle`. 372 | 373 | ## Manage the Amazon S3 remote 374 | 375 | ### Delete branches 376 | 377 | To remove remote branches that are not used anymore you can use the `git-s3 delete-branch -b ` command. This command deletes the bundle object(s) from Amazon S3 under the branch path. 378 | 379 | ### Protected branches 380 | 381 | To protect/unprotect a branch run `git s3 protect ` respectively `git s3 unprotect `. 382 | 383 | ## Under the hood 384 | 385 | ### How S3 remote work 386 | 387 | Bundles are stored in the S3 bucket as `//.bundle`. 388 | 389 | When listing remote ref (eg explicitly via `git ls-remote`) we list all the keys present under the given ``. 390 | 391 | When pushing a new ref (eg a commit), we get the sha of the ref, we bundle the ref via `git bundle create .bundle ` and store it to S3 according the schema above. 392 | 393 | If the push is successful, the code removes the previous bundle associated to the ref. 394 | 395 | If two user concurrently push a commit based on the same current branch head to the remote both bundles would be written to the repo and the current bundle removed. No data is lost, but no further push will be possible until all bundles but one are removed. 396 | For this you can use the `git s3 doctor ` command. 397 | 398 | ### How LFS work 399 | 400 | The LFS integration stores the file in the bucket defined by the remote URI, under a key `/lfs/`, where oid is the unique identifier assigned by git-lfs to the file. 401 | 402 | If an object with the same key already exists, git-lfs-s3 does not upload it again. 403 | 404 | ### Debugging 405 | 406 | Use `--verbose` flag or set `transfer.verbosity=2` to print debug information when performing git operations: 407 | 408 | ```bash 409 | git -c transfer.verbosity=2 push origin main 410 | ``` 411 | 412 | For early errors (like credential issues), use the environment variable: 413 | 414 | ```bash 415 | GIT_REMOTE_S3_VERBOSE=1 git push origin main 416 | ``` 417 | 418 | Logs will be put to stderr. 419 | 420 | For LFS operations you can enable and disable debug logging via `git-lfs-s3 enable-debug` and `git-lfs-s3 disable-debug` respectively. Logs are put in `.git/lfs/tmp/git-lfs-s3.log` in the repo. 421 | 422 | ## Credits 423 | 424 | The git S3 integration was inspired by the work of Bryan Gahagan on [git-remote-s3](https://github.com/bgahagan/git-remote-s3). 425 | 426 | The LFS implementation benefitted from [lfs-s3](https://github.com/nicolas-graves/lfs-s3) by [@nicolas-graves](https://github.com/nicolas-graves). If you do not need to use the git-remote-s3 transport you should use that project. 427 | -------------------------------------------------------------------------------- /git_remote_s3/remote.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2023-present Amazon.com, Inc. or its affiliates 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | import sys 6 | import logging 7 | import boto3 8 | import boto3.exceptions 9 | from botocore.exceptions import ( 10 | ClientError, 11 | ProfileNotFound, 12 | CredentialRetrievalError, 13 | NoCredentialsError, 14 | UnknownCredentialError, 15 | ) 16 | from boto3.s3.transfer import TransferConfig 17 | import re 18 | import tempfile 19 | import os 20 | import concurrent.futures 21 | from threading import Lock 22 | 23 | import botocore.exceptions 24 | from git_remote_s3 import git 25 | from .enums import UriScheme 26 | from .common import parse_git_url 27 | import botocore 28 | from typing import Optional 29 | 30 | logger = logging.getLogger(__name__) 31 | if "remote" in __name__: 32 | # Check for early verbosity via environment variable 33 | verbose_env = os.environ.get("GIT_REMOTE_S3_VERBOSE", "").lower() in ( 34 | "1", 35 | "true", 36 | "yes", 37 | ) 38 | log_level = logging.INFO if verbose_env else logging.ERROR 39 | logging.basicConfig( 40 | level=log_level, 41 | stream=sys.stderr, 42 | format="%(name)s: %(levelname)s: %(message)s", 43 | ) 44 | 45 | DEFAULT_LOCK_TTL_SECONDS = 60 46 | 47 | class BucketNotFoundError(Exception): 48 | def __init__(self, bucket: str): 49 | self.bucket = bucket 50 | super().__init__(f"Bucket {bucket} not found.") 51 | 52 | 53 | class NotAuthorizedError(Exception): 54 | def __init__(self, action: str, bucket: str): 55 | self.bucket = bucket 56 | self.action = action 57 | super().__init__( 58 | f"Not authorized to perform {action} on the S3 bucket {bucket}." 59 | ) 60 | 61 | 62 | class Mode: 63 | FETCH = "fetch" 64 | PUSH = "push" 65 | 66 | 67 | class S3Remote: 68 | def __init__(self, uri_scheme, profile, bucket, prefix): 69 | self.uri_scheme = uri_scheme 70 | self.profile = profile 71 | self.bucket = bucket 72 | self.prefix = prefix 73 | if profile: 74 | self.session = boto3.Session(profile_name=profile) 75 | else: 76 | self.session = boto3.Session() 77 | self.s3 = self.session.client("s3") 78 | try: 79 | self.s3.list_objects_v2(Bucket=bucket, Prefix=prefix) 80 | except ClientError as e: 81 | if e.response["Error"]["Code"] == "NoSuchBucket": 82 | raise BucketNotFoundError(bucket) 83 | if e.response["Error"]["Code"] == "AccessDenied": 84 | raise NotAuthorizedError("ListObjectsV2", bucket) 85 | raise e 86 | 87 | self.bucket = bucket 88 | self.mode = None 89 | self.fetched_refs = [] 90 | self.fetched_refs_lock = Lock() # Lock for thread-safe access to fetched_refs 91 | self.push_cmds = [] 92 | self.fetch_cmds = [] # Store fetch commands for batch processing 93 | # Lock TTL (seconds); can be configured via env var 94 | try: 95 | self.lock_ttl_seconds = int(os.environ.get("GIT_REMOTE_S3_LOCK_TTL_SECONDS", DEFAULT_LOCK_TTL_SECONDS)) 96 | except ValueError: 97 | self.lock_ttl_seconds = DEFAULT_LOCK_TTL_SECONDS 98 | 99 | def list_refs(self, *, bucket: str, prefix: str) -> list: 100 | res = self.s3.list_objects_v2(Bucket=bucket, Prefix=prefix) 101 | contents = res.get("Contents", []) 102 | next_token = res.get("NextContinuationToken", None) 103 | 104 | while next_token: 105 | res = self.s3.list_objects_v2( 106 | Bucket=bucket, Prefix=prefix, ContinuationToken=next_token 107 | ) 108 | contents.extend(res.get("Contents", [])) 109 | next_token = res.get("NextContinuationToken", None) 110 | 111 | contents.sort(key=lambda x: x["LastModified"]) 112 | contents.reverse() 113 | 114 | objs = [ 115 | o["Key"].removeprefix(prefix)[1:] 116 | for o in contents 117 | if o["Key"].startswith(prefix + "/refs") and o["Key"].endswith(".bundle") 118 | ] 119 | return objs 120 | 121 | def cmd_fetch(self, args: str): 122 | sha, ref = args.split(" ")[1:] 123 | with self.fetched_refs_lock: 124 | if sha in self.fetched_refs: 125 | return 126 | logger.info(f"fetch {sha} {ref}") 127 | temp_dir: Optional[str] = None 128 | try: 129 | temp_dir = tempfile.mkdtemp(prefix="git_remote_s3_fetch_") 130 | bundle_path = f"{temp_dir}/{sha}.bundle" 131 | 132 | # Use TransferConfig for multipart download 133 | # Multipart Threshold (64 MB): 134 | # - Small enough to ensure multi-part downloads are used when necessary 135 | # - Allows parallel downloading to begin early 136 | # - Good balance between overhead and parallelization benefits 137 | # Chunk Size (16 MB): 138 | # - Large enough to minimize HTTP request overhead 139 | # - Small enough to allow good parallelization (500 MB file = ~31 chunks) 140 | # - Provides reasonable progress granularity for monitoring 141 | # - Works well with typical network conditions 142 | MB = 1024**2 143 | config = TransferConfig( 144 | multipart_threshold=25 * MB, # 25MB threshold for multipart 145 | multipart_chunksize=16 * MB, # Size of each part 146 | use_threads=True, # Enable threading 147 | max_concurrency=8, # Number of concurrent threads 148 | ) 149 | 150 | # Download file using the TransferConfig 151 | self.s3.download_file( 152 | Bucket=self.bucket, 153 | Key=f"{self.prefix}/{ref}/{sha}.bundle", 154 | Filename=bundle_path, 155 | Config=config, 156 | ) 157 | 158 | logger.info(f"fetched {bundle_path} {ref}") 159 | 160 | git.unbundle(folder=temp_dir, sha=sha, ref=ref) 161 | with self.fetched_refs_lock: 162 | self.fetched_refs.append(sha) 163 | except ClientError as e: 164 | if e.response["Error"]["Code"] == "AccessDenied": 165 | raise NotAuthorizedError("GetObject", self.bucket) 166 | raise e 167 | finally: 168 | if temp_dir is not None: 169 | if os.path.exists(f"{temp_dir}/{sha}.bundle"): 170 | os.remove(f"{temp_dir}/{sha}.bundle") 171 | 172 | def remove_remote_ref(self, remote_ref: str) -> str: 173 | logger.info(f"Removing remote ref {remote_ref}") 174 | try: 175 | objects_to_delete = self.s3.list_objects_v2( 176 | Bucket=self.bucket, Prefix=f"{self.prefix}/{remote_ref}/" 177 | ).get("Contents", []) 178 | if ( 179 | self.uri_scheme == UriScheme.S3 180 | and len(objects_to_delete) == 1 181 | or self.uri_scheme == UriScheme.S3_ZIP 182 | and len(objects_to_delete) == 2 183 | ): 184 | for object in objects_to_delete: 185 | self.s3.delete_object(Bucket=self.bucket, Key=object["Key"]) 186 | return f"ok {remote_ref}\n" 187 | elif len(objects_to_delete) == 0: 188 | return f"error {remote_ref} not found\n" 189 | else: 190 | return f'error {remote_ref} "multiple bundles exists on server. Run git-s3 doctor to fix."?\n' # noqa: B950 191 | 192 | except ClientError as e: 193 | if e.response["Error"]["Code"] == "404": 194 | logger.info(f"fatal: {remote_ref} not found\n") 195 | return f"error {remote_ref} not found\n" 196 | raise e 197 | 198 | def cmd_push(self, args: str) -> str: 199 | force_push = False 200 | local_ref, remote_ref = args.split(" ")[1].split(":") 201 | if not local_ref: 202 | return self.remove_remote_ref(remote_ref) 203 | if local_ref.startswith("+"): 204 | force_push = not self.is_protected(remote_ref) 205 | logger.info(f"Force push {force_push}") 206 | local_ref = local_ref[1:] 207 | 208 | logger.info(f"push !{local_ref}! !{remote_ref}!") 209 | temp_dir = tempfile.mkdtemp(prefix="git_remote_s3_push_") 210 | 211 | contents = self.get_bundles_for_ref(remote_ref) 212 | if len(contents) > 1: 213 | return f'error {remote_ref} "multiple bundles exists on server. Run git-s3 doctor to fix."?\n' # noqa: B950 214 | 215 | remote_to_remove = contents[0]["Key"] if len(contents) == 1 else None 216 | sha: Optional[str] = None 217 | lock_key: Optional[str] = None 218 | try: 219 | sha = git.rev_parse(local_ref) 220 | if remote_to_remove: 221 | remote_sha = remote_to_remove.split("/")[-1].split(".")[0] 222 | if not force_push and not git.is_ancestor(remote_sha, sha): 223 | return f'error {remote_ref} "remote ref is not ancestor of {local_ref}."?\n' 224 | 225 | # Create the bundle before acquiring the lock (local operation) 226 | temp_file = git.bundle(folder=temp_dir, sha=sha, ref=local_ref) 227 | 228 | # Acquire per-ref lock to avoid concurrent writes 229 | lock_key = self.acquire_lock(remote_ref) 230 | if not lock_key: 231 | # Provide clear guidance to the user; include lock path and TTL 232 | lock_path = f"{self.prefix}/{remote_ref}/LOCK#.lock" 233 | return ( 234 | f'error {remote_ref} ' 235 | f'"failed to acquire ref lock at {lock_path}. ' 236 | f'Another client may be pushing. If this persists beyond {self.lock_ttl_seconds}s, ' 237 | f'run git-remote-s3 doctor --lock-ttl {self.lock_ttl_seconds} to inspect and optionally clear stale locks."?\n' 238 | ) 239 | 240 | # If remote has multiple bundles for the ref, then reject push and notify client(s) 241 | # to upgrade to new locking behavior 242 | # Otherwise, proceed with pushing the new bundle 243 | current_contents = self.get_bundles_for_ref(remote_ref) 244 | if len(current_contents) > 1: 245 | return f'error {remote_ref} "multiple bundles exists for the same ref on server. Run git-s3 doctor to fix. Upgrade git-remote-s3 to latest version to prevent this in the future."\n' 246 | 247 | current_remote_to_remove = ( 248 | current_contents[0]["Key"] if len(current_contents) == 1 else None 249 | ) 250 | if ( 251 | remote_to_remove is not None 252 | and current_remote_to_remove is not None 253 | and current_remote_to_remove != remote_to_remove 254 | ): 255 | return f'error {remote_ref} "stale remote. Please fetch and retry."?\n' 256 | 257 | with open(temp_file, "rb") as f: 258 | self.s3.put_object( 259 | Bucket=self.bucket, 260 | Key=f"{self.prefix}/{remote_ref}/{sha}.bundle", 261 | Body=f, 262 | ) 263 | 264 | self.init_remote_head(remote_ref) 265 | logger.info(f"pushed {temp_file} to {remote_ref}") 266 | if remote_to_remove: 267 | self.s3.delete_object(Bucket=self.bucket, Key=remote_to_remove) 268 | 269 | if self.uri_scheme == UriScheme.S3_ZIP: 270 | # Create and push a zip archive next to the bundle file 271 | # Example use-case: Repo on S3 as Source for AWS CodePipeline 272 | commit_msg = git.get_last_commit_message() 273 | temp_file_archive = git.archive(folder=temp_dir, ref=local_ref) 274 | with open(temp_file_archive, "rb") as f: 275 | self.s3.put_object( 276 | Bucket=self.bucket, 277 | Key=f"{self.prefix}/{remote_ref}/repo.zip", 278 | Body=f, 279 | Metadata={"codepipeline-artifact-revision-summary": commit_msg}, 280 | ContentDisposition=f"attachment; filename=repo-{sha[:8]}.zip", 281 | ) 282 | logger.info( 283 | f"pushed {temp_file_archive} to " 284 | + "{self.prefix}/{remote_ref}/repo.zip with message {commit_msg}" 285 | ) 286 | 287 | return f"ok {remote_ref}\n" 288 | except git.GitError: 289 | logger.info(f"fatal: {local_ref} not found\n") 290 | return f'error {remote_ref} "{local_ref} not found"?\n' 291 | except boto3.exceptions.S3UploadFailedError as e: 292 | logger.info(f"fatal: {e}\n") 293 | return f'error {remote_ref} "{e}"?\n' 294 | except botocore.exceptions.ClientError as e: 295 | logger.info(f"fatal: {e}\n") 296 | return f'error {remote_ref} "{e}"?\n' 297 | finally: 298 | if lock_key: 299 | try: 300 | self.release_lock(remote_ref, lock_key) 301 | except Exception as e: 302 | logger.info(f"failed to release lock {lock_key} for {remote_ref}: {e}") 303 | return f'error {remote_ref} "failed to release lock. You may need to manually remove the lock {lock_key} from the server or use git-s3 doctor to fix."?\n' 304 | if sha and os.path.exists(f"{temp_dir}/{sha}.bundle"): 305 | os.remove(f"{temp_dir}/{sha}.bundle") 306 | 307 | def init_remote_head(self, ref: str) -> None: 308 | """Initialise the remote HEAD reference if it does not exist 309 | 310 | Args: 311 | ref (str): The ref to which the remote HEAD should point to 312 | """ 313 | 314 | try: 315 | self.s3.head_object(Bucket=self.bucket, Key=f"{self.prefix}/HEAD") 316 | except ClientError: 317 | self.s3.put_object( 318 | Bucket=self.bucket, 319 | Key=f"{self.prefix}/HEAD", 320 | Body=ref, 321 | ) 322 | 323 | def get_bundles_for_ref(self, remote_ref: str) -> list[dict]: 324 | """Lists all the bundles for a given ref on the remote 325 | 326 | Args: 327 | remote_ref (str): the remote ref 328 | 329 | Returns: 330 | list[dict]: the list of bundles objects 331 | """ 332 | 333 | # We are not implementing pagination since there can be few objects (bundles) 334 | # under a single Prefix 335 | return [ 336 | c 337 | for c in self.s3.list_objects_v2( 338 | Bucket=self.bucket, Prefix=f"{self.prefix}/{remote_ref}/" 339 | ).get("Contents", []) 340 | if "PROTECTED#" not in c["Key"] 341 | and ".zip" not in c["Key"] 342 | and "/LOCKS/" not in c["Key"] 343 | and not c["Key"].endswith(".lock") 344 | ] 345 | 346 | def is_protected(self, remote_ref): 347 | protected = self.s3.list_objects_v2( 348 | Bucket=self.bucket, Prefix=f"{self.prefix}/{remote_ref}/PROTECTED#" 349 | ).get("Contents", []) 350 | return protected 351 | 352 | def acquire_lock(self, remote_ref: str) -> Optional[str]: 353 | """Acquire a per-ref lock using S3 conditional writes. 354 | 355 | Client attempts to create a single lock object under // using 356 | S3's HTTP `If-None-Match` conditional header so that only one client can write the 357 | lock in case of acquisition races. 358 | If unable to acquire the lock, check for staleness of the lock and delete it if it is stale. 359 | Clients that lose the race will get a `412 PreconditionFailed` and should retry later. 360 | 361 | Returns the lock key if acquired, or None otherwise. 362 | """ 363 | 364 | lock_key = f"{self.prefix}/{remote_ref}/LOCK#.lock" 365 | try: 366 | # Use conditional write to create the lock only if it does not exist 367 | self.s3.put_object( 368 | Bucket=self.bucket, 369 | Key=lock_key, 370 | Body=b"", 371 | IfNoneMatch="*", 372 | ) 373 | return lock_key 374 | except botocore.exceptions.ClientError as e: 375 | # 412 PreconditionFailed when the lock already exists 376 | if ( 377 | e.response.get("ResponseMetadata", {}).get("HTTPStatusCode") == 412 378 | or e.response.get("Error", {}).get("Code") in [ 379 | "PreconditionFailed", 380 | "412", 381 | ] 382 | ): 383 | # Check if the existing lock is stale; if so, try to clear and acquire 384 | try: 385 | head = self.s3.head_object(Bucket=self.bucket, Key=lock_key) 386 | last_modified = head.get("LastModified") 387 | if last_modified is not None: 388 | import datetime 389 | 390 | now = datetime.datetime.now(tz=last_modified.tzinfo) 391 | age = (now - last_modified).total_seconds() 392 | if age > self.lock_ttl_seconds: 393 | # Attempt to delete stale lock and re-acquire 394 | self.s3.delete_object(Bucket=self.bucket, Key=lock_key) 395 | # Retry conditional put 396 | self.s3.put_object( 397 | Bucket=self.bucket, 398 | Key=lock_key, 399 | Body=b"", 400 | IfNoneMatch="*", 401 | ) 402 | return lock_key 403 | except botocore.exceptions.ClientError as e: 404 | logger.info(f"failed to check staleness of {lock_key} for {remote_ref}: {e}") 405 | raise e 406 | raise 407 | 408 | def release_lock(self, remote_ref: str, lock_key: str) -> None: 409 | """Release a previously acquired lock for the given ref.""" 410 | try: 411 | self.s3.delete_object(Bucket=self.bucket, Key=lock_key) 412 | except botocore.exceptions.ClientError as e: 413 | if e.response.get("ResponseMetadata", {}).get("HTTPStatusCode") == 404: 414 | logger.info(f"lock {lock_key} already released") 415 | else: 416 | raise 417 | 418 | def cmd_option(self, arg: str): 419 | option, value = arg.split(" ")[1:] 420 | if option == "verbosity" and int(value) >= 2: 421 | # Set both root logger and module logger for complete verbosity 422 | logging.getLogger().setLevel(logging.INFO) 423 | logger.setLevel(logging.INFO) 424 | sys.stdout.write("ok\n") 425 | else: 426 | sys.stdout.write("unsupported\n") 427 | sys.stdout.flush() 428 | 429 | def cmd_list(self, *, for_push: bool = False): 430 | objs = self.list_refs(bucket=self.bucket, prefix=self.prefix) 431 | logger.info(objs) 432 | 433 | if not for_push: 434 | try: 435 | head = self.get_remote_head() 436 | logger.info(f"HEAD=[{head}]") 437 | for o in objs: 438 | ref = "/".join(o.split("/")[:-1]) 439 | if ref == head: 440 | logger.info(f"@{ref} HEAD\n") 441 | sys.stdout.write(f"@{ref} HEAD\n") 442 | except ClientError as e: 443 | if e.response["Error"]["Code"] == "NoSuchKey": 444 | pass # ignoring missing HEAD on remote 445 | 446 | for o in [x for x in objs if re.match(".+/.+/.+/[a-f0-9]{40}.bundle", x)]: 447 | elements = o.split("/") 448 | sha = elements[-1].split(".")[0] 449 | sys.stdout.write(f"{sha} {'/'.join(elements[:-1])}\n") 450 | 451 | sys.stdout.write("\n") 452 | sys.stdout.flush() 453 | 454 | def get_remote_head(self) -> str: 455 | """Gets the remote head ref 456 | 457 | Returns: 458 | str: the remote head ref 459 | """ 460 | head = ( 461 | self.s3.get_object(Bucket=self.bucket, Key=f"{self.prefix}/HEAD") 462 | .get("Body") 463 | .read() 464 | .decode("utf-8") 465 | .strip() 466 | ) 467 | 468 | return head 469 | 470 | def cmd_capabilities(self): 471 | sys.stdout.write("*push\n") 472 | sys.stdout.write("*fetch\n") 473 | sys.stdout.write("option\n") 474 | sys.stdout.write("\n") 475 | sys.stdout.flush() 476 | 477 | def process_fetch_cmds(self, cmds): 478 | """Process fetch commands in parallel using a thread pool. 479 | 480 | Args: 481 | cmds (list): List of fetch commands to process 482 | """ 483 | if not cmds: 484 | return 485 | 486 | logger.info(f"Processing {len(cmds)} fetch commands in parallel") 487 | 488 | # Use a thread pool to process fetch commands in parallel 489 | with concurrent.futures.ThreadPoolExecutor() as executor: 490 | # Submit all fetch commands to the thread pool 491 | futures = [executor.submit(self.cmd_fetch, cmd) for cmd in cmds] 492 | 493 | # Wait for all fetch commands to complete 494 | concurrent.futures.wait(futures) 495 | 496 | logger.info(f"Completed processing {len(cmds)} fetch commands in parallel") 497 | 498 | def process_cmd(self, cmd: str): # noqa: C901 499 | if cmd.startswith("fetch"): 500 | if self.mode != Mode.FETCH: 501 | self.mode = Mode.FETCH 502 | self.fetch_cmds = [] 503 | self.fetch_cmds.append(cmd.strip()) 504 | # Don't process fetch commands immediately, collect them for batch processing 505 | elif cmd.startswith("push"): 506 | if self.mode != Mode.PUSH: 507 | self.mode = Mode.PUSH 508 | self.push_cmds = [] 509 | self.push_cmds.append(cmd.strip()) 510 | # self.cmd_push(cmd.strip()) 511 | elif cmd.startswith("option"): 512 | self.cmd_option(cmd.strip()) 513 | elif cmd.startswith("list for-push"): 514 | self.cmd_list(for_push=True) 515 | elif cmd.startswith("list"): 516 | self.cmd_list() 517 | elif cmd.startswith("capabilities"): 518 | self.cmd_capabilities() 519 | elif cmd == "\n": 520 | logger.info("empty line") 521 | if self.mode == Mode.PUSH and self.push_cmds: 522 | logger.info(f"pushing {self.push_cmds}") 523 | push_res = [self.cmd_push(c) for c in self.push_cmds] 524 | for res in push_res: 525 | sys.stdout.write(res) 526 | self.push_cmds = [] 527 | elif self.mode == Mode.FETCH and self.fetch_cmds: 528 | logger.info(f"fetching {len(self.fetch_cmds)} refs in parallel") 529 | self.process_fetch_cmds(self.fetch_cmds) 530 | self.fetch_cmds = [] 531 | sys.stdout.write("\n") 532 | sys.stdout.flush() 533 | else: 534 | sys.stderr.write(f"fatal: invalid command '{cmd}'\n") 535 | sys.stderr.flush() 536 | sys.exit(1) 537 | 538 | 539 | def main(): 540 | logger.info(sys.argv) 541 | remote = sys.argv[2] 542 | uri_scheme, profile, bucket, prefix = parse_git_url(remote) 543 | if bucket is None or prefix is None: 544 | sys.stderr.write( 545 | f"fatal: invalid remote '{remote}'. You need to have a bucket and a prefix.\n" 546 | ) 547 | sys.exit(1) 548 | try: 549 | s3remote = S3Remote( 550 | uri_scheme=uri_scheme, profile=profile, bucket=bucket, prefix=prefix 551 | ) 552 | while True: 553 | line = sys.stdin.readline() 554 | if not line: 555 | break 556 | logger.info(f"cmd: {line}") 557 | s3remote.process_cmd(line) 558 | 559 | except BrokenPipeError: 560 | logger.info("BrokenPipeError") 561 | devnull = os.open(os.devnull, os.O_WRONLY) 562 | os.dup2(devnull, sys.stdout.fileno()) 563 | sys.exit(0) 564 | except OSError as err: 565 | # Broken pipe error on Windows 566 | # see https://stackoverflow.com/questions/23688492/oserror-errno-22-invalid-argument-in-subprocess # noqa: B950 567 | if err.errno == 22: 568 | logger.info("BrokenPipeError") 569 | devnull = os.open(os.devnull, os.O_WRONLY) 570 | os.dup2(devnull, sys.stdout.fileno()) 571 | sys.exit(0) 572 | else: 573 | raise err 574 | except ( 575 | ClientError, 576 | ProfileNotFound, 577 | CredentialRetrievalError, 578 | NoCredentialsError, 579 | UnknownCredentialError, 580 | ) as e: 581 | sys.stderr.write(f"fatal: invalid credentials {e}\n") 582 | sys.stderr.flush() 583 | sys.exit(1) 584 | except BucketNotFoundError as e: 585 | sys.stderr.write(f"fatal: bucket not found {e.bucket}\n") 586 | sys.stderr.flush() 587 | sys.exit(1) 588 | except NotAuthorizedError as e: 589 | sys.stderr.write( 590 | f"fatal: user not authorized to perform {e.action} on {e.bucket}\n" 591 | ) 592 | sys.stderr.flush() 593 | sys.exit(1) 594 | except Exception as e: 595 | logger.info(e) 596 | sys.stderr.write( 597 | "fatal: unknown error. Run with --verbose flag to get full log\n" 598 | ) 599 | sys.stderr.flush() 600 | sys.exit(1) 601 | -------------------------------------------------------------------------------- /test/remote_test.py: -------------------------------------------------------------------------------- 1 | import botocore.client 2 | from mock import patch 3 | from io import StringIO, BytesIO 4 | from git_remote_s3 import S3Remote, UriScheme 5 | from botocore.exceptions import ClientError 6 | import tempfile 7 | import datetime 8 | import botocore 9 | import threading 10 | from io import BytesIO 11 | 12 | SHA1 = "c105d19ba64965d2c9d3d3246e7269059ef8bb8a" 13 | SHA2 = "c105d19ba64965d2c9d3d3246e7269059ef8bb8b" 14 | INVALID_SHA = "z45" 15 | BUNDLE_SUFFIX = ".bundle" 16 | MOCK_BUNDLE_CONTENT = b"MOCK_BUNDLE_CONTENT" 17 | ARCHIVE_SUFFIX = ".zip" 18 | MOCK_ARCHIVE_CONTENT = b"MOCK_ARCHIVE_CONTENT" 19 | BRANCH = "pytest" 20 | 21 | 22 | def create_list_objects_v2_mock( 23 | *, 24 | protected=False, 25 | no_head=False, 26 | branch=BRANCH, 27 | shas, 28 | ): 29 | def s3_list_objects_v2_mock(Prefix, **kwargs): 30 | content = [] 31 | for s in shas: 32 | content.append( 33 | { 34 | "Key": f"test_prefix/refs/heads/{branch}/{s}.bundle", 35 | "LastModified": datetime.datetime.now(), 36 | } 37 | ) 38 | if protected: 39 | content.append( 40 | { 41 | "Key": f"test_prefix/refs/heads/{branch}/PROTECTED#", 42 | "LastModified": datetime.datetime.now(), 43 | } 44 | ) 45 | if not no_head: 46 | content.append( 47 | { 48 | "Key": "test_prefix/HEAD", 49 | "LastModified": datetime.datetime.now(), 50 | } 51 | ) 52 | return { 53 | "Contents": [c for c in content if c["Key"].startswith(Prefix)], 54 | "NextContinuationToken": None, 55 | } 56 | 57 | return s3_list_objects_v2_mock 58 | 59 | 60 | @patch("sys.stdout", new_callable=StringIO) 61 | @patch("boto3.Session.client") 62 | def test_cmd_list(session_client_mock, stdout_mock): 63 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 64 | 65 | session_client_mock.return_value.list_objects_v2.side_effect = ( 66 | create_list_objects_v2_mock(shas=[SHA1]) 67 | ) 68 | session_client_mock.assert_called_once_with("s3") 69 | assert s3_remote.bucket == "test_bucket" 70 | assert s3_remote.prefix == "test_prefix" 71 | assert s3_remote.s3 == session_client_mock.return_value 72 | session_client_mock.return_value.get_object.return_value = { 73 | "Body": BytesIO(b"refs/heads/%b" % str.encode(BRANCH)) 74 | } 75 | s3_remote.cmd_list() 76 | assert ( 77 | f"@refs/heads/{BRANCH} HEAD\n{SHA1} refs/heads/{BRANCH}\n\n" 78 | == stdout_mock.getvalue() 79 | ) 80 | 81 | 82 | @patch("sys.stdout", new_callable=StringIO) 83 | @patch("boto3.Session.client") 84 | def test_list_refs(session_client_mock, stdout_mock): 85 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "nested/test_prefix") 86 | 87 | session_client_mock.return_value.list_objects_v2.return_value = { 88 | "Contents": [ 89 | { 90 | "Key": f"nested/test_prefix/refs/heads/{BRANCH}/{SHA1}.bundle", 91 | "LastModified": datetime.datetime.now(), 92 | }, 93 | { 94 | "Key": f"nested/test_prefix/refs/tags/v1/{SHA1}.bundle", 95 | "LastModified": datetime.datetime.now(), 96 | }, 97 | ] 98 | } 99 | 100 | session_client_mock.assert_called_once_with("s3") 101 | assert s3_remote.bucket == "test_bucket" 102 | assert s3_remote.prefix == "nested/test_prefix" 103 | assert s3_remote.s3 == session_client_mock.return_value 104 | refs = s3_remote.list_refs(bucket=s3_remote.bucket, prefix=s3_remote.prefix) 105 | assert len(refs) == 2 106 | assert f"refs/heads/{BRANCH}/{SHA1}.bundle" in refs 107 | assert f"refs/tags/v1/{SHA1}.bundle" in refs 108 | 109 | 110 | @patch("sys.stdout", new_callable=StringIO) 111 | @patch("boto3.Session.client") 112 | def test_cmd_list_nested_prefix(session_client_mock, stdout_mock): 113 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "nested/test_prefix") 114 | 115 | session_client_mock.return_value.list_objects_v2.return_value = { 116 | "Contents": [ 117 | { 118 | "Key": f"nested/test_prefix/refs/heads/{BRANCH}/{SHA1}.bundle", 119 | "LastModified": datetime.datetime.now(), 120 | }, 121 | { 122 | "Key": "nested/test_prefix/HEAD", 123 | "LastModified": datetime.datetime.now(), 124 | }, 125 | ] 126 | } 127 | session_client_mock.assert_called_once_with("s3") 128 | assert s3_remote.bucket == "test_bucket" 129 | assert s3_remote.prefix == "nested/test_prefix" 130 | assert s3_remote.s3 == session_client_mock.return_value 131 | session_client_mock.return_value.get_object.return_value = { 132 | "Body": BytesIO(b"refs/heads/%b" % str.encode(BRANCH)) 133 | } 134 | s3_remote.cmd_list() 135 | assert ( 136 | f"@refs/heads/{BRANCH} HEAD\n{SHA1} refs/heads/{BRANCH}\n\n" 137 | == stdout_mock.getvalue() 138 | ) 139 | 140 | 141 | @patch("sys.stdout", new_callable=StringIO) 142 | @patch("boto3.Session.client") 143 | def test_cmd_list_no_head(session_client_mock, stdout_mock): 144 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 145 | 146 | session_client_mock.return_value.list_objects_v2.side_effect = ( 147 | create_list_objects_v2_mock(shas=[SHA1], no_head=True) 148 | ) 149 | 150 | def error(**kwargs): 151 | raise botocore.exceptions.ClientError( 152 | {"Error": {"Code": "NoSuchKey"}}, "get_object" 153 | ) 154 | 155 | session_client_mock.return_value.get_object.side_effect = error 156 | session_client_mock.assert_called_once_with("s3") 157 | assert s3_remote.bucket == "test_bucket" 158 | assert s3_remote.prefix == "test_prefix" 159 | assert s3_remote.s3 == session_client_mock.return_value 160 | s3_remote.cmd_list() 161 | assert f"{SHA1} refs/heads/{BRANCH}\n\n" == stdout_mock.getvalue() 162 | 163 | 164 | @patch("sys.stdout", new_callable=StringIO) 165 | @patch("boto3.Session.client") 166 | def test_cmd_list_with_head_not_exsting_ref(session_client_mock, stdout_mock): 167 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 168 | 169 | session_client_mock.return_value.list_objects_v2.side_effect = ( 170 | create_list_objects_v2_mock(shas=[SHA1]) 171 | ) 172 | session_client_mock.return_value.get_object.return_value = { 173 | "Body": BytesIO(b"refs/heads/master") 174 | } 175 | session_client_mock.assert_called_once_with("s3") 176 | assert s3_remote.bucket == "test_bucket" 177 | assert s3_remote.prefix == "test_prefix" 178 | assert s3_remote.s3 == session_client_mock.return_value 179 | s3_remote.cmd_list() 180 | assert f"{SHA1} refs/heads/{BRANCH}\n\n" == stdout_mock.getvalue() 181 | 182 | 183 | @patch("sys.stdout", new_callable=StringIO) 184 | @patch("boto3.Session.client") 185 | def test_cmd_list_protected_branch(session_client_mock, stdout_mock): 186 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 187 | 188 | session_client_mock.return_value.list_objects_v2.side_effect = ( 189 | create_list_objects_v2_mock(protected=True, shas=[SHA1]) 190 | ) 191 | 192 | session_client_mock.return_value.get_object.return_value = { 193 | "Body": BytesIO(b"refs/heads/%b" % str.encode(BRANCH)) 194 | } 195 | session_client_mock.assert_called_once_with("s3") 196 | assert s3_remote.bucket == "test_bucket" 197 | assert s3_remote.prefix == "test_prefix" 198 | assert s3_remote.s3 == session_client_mock.return_value 199 | s3_remote.cmd_list() 200 | assert ( 201 | f"@refs/heads/{BRANCH} HEAD\n{SHA1} refs/heads/{BRANCH}\n\n" 202 | == stdout_mock.getvalue() 203 | ) 204 | 205 | 206 | @patch("git_remote_s3.git.is_ancestor") 207 | @patch("git_remote_s3.git.rev_parse") 208 | @patch("git_remote_s3.git.bundle") 209 | @patch("boto3.Session.client") 210 | def test_cmd_push_no_force_unprotected_ancestor( 211 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock 212 | ): 213 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 214 | rev_parse_mock.return_value = SHA1 215 | temp_dir = tempfile.mkdtemp("test_temp") 216 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 217 | with open(temp_file.name, "wb") as f: 218 | f.write(MOCK_BUNDLE_CONTENT) 219 | bundle_mock.return_value = temp_file.name 220 | session_client_mock.return_value.list_objects_v2.side_effect = ( 221 | create_list_objects_v2_mock(protected=True, shas=[SHA1]) 222 | ) 223 | is_ancestor_mock.return_value = True 224 | assert s3_remote.s3 == session_client_mock.return_value 225 | res = s3_remote.cmd_push(f"push refs/heads/{BRANCH}:refs/heads/{BRANCH}") 226 | put_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 227 | assert len(put_calls) == 1 228 | del_calls = [c for c in session_client_mock.return_value.delete_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 229 | assert len(del_calls) == 1 230 | assert res == (f"ok refs/heads/{BRANCH}\n") 231 | 232 | 233 | @patch("git_remote_s3.git.archive") 234 | @patch("git_remote_s3.git.is_ancestor") 235 | @patch("git_remote_s3.git.rev_parse") 236 | @patch("git_remote_s3.git.bundle") 237 | @patch("boto3.Session.client") 238 | def test_cmd_push_no_force_unprotected_ancestor_s3_zip( 239 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock, archive_mock 240 | ): 241 | s3_remote = S3Remote(UriScheme.S3_ZIP, None, "test_bucket", "test_prefix") 242 | rev_parse_mock.return_value = SHA1 243 | 244 | temp_dir = tempfile.mkdtemp("test_temp") 245 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 246 | with open(temp_file.name, "wb") as f: 247 | f.write(MOCK_BUNDLE_CONTENT) 248 | bundle_mock.return_value = temp_file.name 249 | 250 | temp_file_archive = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=ARCHIVE_SUFFIX) 251 | with open(temp_file_archive.name, "wb") as f: 252 | f.write(MOCK_ARCHIVE_CONTENT) 253 | archive_mock.return_value = temp_file_archive.name 254 | 255 | session_client_mock.return_value.list_objects_v2.side_effect = ( 256 | create_list_objects_v2_mock(protected=True, shas=[SHA1]) 257 | ) 258 | 259 | is_ancestor_mock.return_value = True 260 | 261 | assert s3_remote.s3 == session_client_mock.return_value 262 | 263 | res = s3_remote.cmd_push(f"push refs/heads/{BRANCH}:refs/heads/{BRANCH}") 264 | put_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 265 | assert len(put_calls) == 2 266 | del_calls = [c for c in session_client_mock.return_value.delete_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 267 | assert len(del_calls) == 1 268 | assert res == (f"ok refs/heads/{BRANCH}\n") 269 | 270 | 271 | @patch("git_remote_s3.git.is_ancestor") 272 | @patch("git_remote_s3.git.rev_parse") 273 | @patch("git_remote_s3.git.bundle") 274 | @patch("boto3.Session.client") 275 | def test_cmd_push_no_force_unprotected_no_ancestor( 276 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock 277 | ): 278 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 279 | rev_parse_mock.return_value = SHA1 280 | temp_dir = tempfile.mkdtemp("test_temp") 281 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 282 | with open(temp_file.name, "wb") as f: 283 | f.write(MOCK_BUNDLE_CONTENT) 284 | bundle_mock.return_value = temp_file.name 285 | session_client_mock.return_value.list_objects_v2.side_effect = ( 286 | create_list_objects_v2_mock(shas=[SHA2]) 287 | ) 288 | 289 | is_ancestor_mock.return_value = False 290 | assert s3_remote.s3 == session_client_mock.return_value 291 | res = s3_remote.cmd_push(f"push refs/heads/{BRANCH}:refs/heads/{BRANCH}") 292 | put_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs.get("Key", "").endswith(".lock")] 293 | assert len(put_calls) == 0 294 | assert session_client_mock.return_value.delete_object.call_count == 0 295 | assert res.startswith("error") 296 | 297 | 298 | @patch("git_remote_s3.git.is_ancestor") 299 | @patch("git_remote_s3.git.rev_parse") 300 | @patch("git_remote_s3.git.bundle") 301 | @patch("boto3.Session.client") 302 | def test_cmd_push_force_no_ancestor( 303 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock 304 | ): 305 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 306 | rev_parse_mock.return_value = SHA1 307 | temp_dir = tempfile.mkdtemp("test_temp") 308 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 309 | with open(temp_file.name, "wb") as f: 310 | f.write(MOCK_BUNDLE_CONTENT) 311 | bundle_mock.return_value = temp_file.name 312 | session_client_mock.return_value.list_objects_v2.side_effect = ( 313 | create_list_objects_v2_mock(shas=[SHA2]) 314 | ) 315 | is_ancestor_mock.return_value = False 316 | assert s3_remote.s3 == session_client_mock.return_value 317 | res = s3_remote.cmd_push(f"push +refs/heads/{BRANCH}:refs/heads/{BRANCH}") 318 | put_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 319 | assert len(put_calls) == 1 320 | del_calls = [c for c in session_client_mock.return_value.delete_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 321 | assert len(del_calls) == 1 322 | assert res.startswith("ok") 323 | 324 | 325 | @patch("git_remote_s3.git.archive") 326 | @patch("git_remote_s3.git.is_ancestor") 327 | @patch("git_remote_s3.git.rev_parse") 328 | @patch("git_remote_s3.git.bundle") 329 | @patch("boto3.Session.client") 330 | def test_cmd_push_force_no_ancestor_s3_zip( 331 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock, archive_mock 332 | ): 333 | s3_remote = S3Remote(UriScheme.S3_ZIP, None, "test_bucket", "test_prefix") 334 | 335 | rev_parse_mock.return_value = SHA1 336 | 337 | temp_dir = tempfile.mkdtemp("test_temp") 338 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 339 | with open(temp_file.name, "wb") as f: 340 | f.write(MOCK_BUNDLE_CONTENT) 341 | bundle_mock.return_value = temp_file.name 342 | 343 | temp_file_archive = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=ARCHIVE_SUFFIX) 344 | with open(temp_file_archive.name, "wb") as f: 345 | f.write(MOCK_ARCHIVE_CONTENT) 346 | archive_mock.return_value = temp_file_archive.name 347 | 348 | session_client_mock.return_value.list_objects_v2.side_effect = ( 349 | create_list_objects_v2_mock(shas=[SHA2]) 350 | ) 351 | 352 | is_ancestor_mock.return_value = False 353 | 354 | assert s3_remote.s3 == session_client_mock.return_value 355 | 356 | res = s3_remote.cmd_push(f"push +refs/heads/{BRANCH}:refs/heads/{BRANCH}") 357 | put_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 358 | assert len(put_calls) == 2 359 | del_calls = [c for c in session_client_mock.return_value.delete_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 360 | assert len(del_calls) == 1 361 | assert res.startswith("ok") 362 | 363 | 364 | @patch("git_remote_s3.git.is_ancestor") 365 | @patch("git_remote_s3.git.rev_parse") 366 | @patch("git_remote_s3.git.bundle") 367 | @patch("boto3.Session.client") 368 | def test_cmd_push_force_no_ancestor_protected( 369 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock 370 | ): 371 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 372 | rev_parse_mock.return_value = SHA1 373 | temp_dir = tempfile.mkdtemp("test_temp") 374 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 375 | with open(temp_file.name, "wb") as f: 376 | f.write(MOCK_BUNDLE_CONTENT) 377 | bundle_mock.return_value = temp_file.name 378 | session_client_mock.return_value.list_objects_v2.side_effect = ( 379 | create_list_objects_v2_mock(protected=True, shas=[SHA2]) 380 | ) 381 | is_ancestor_mock.return_value = False 382 | assert s3_remote.s3 == session_client_mock.return_value 383 | res = s3_remote.cmd_push(f"push +refs/heads/{BRANCH}:refs/heads/{BRANCH}") 384 | assert session_client_mock.return_value.put_object.call_count == 0 385 | assert session_client_mock.return_value.delete_object.call_count == 0 386 | assert res.startswith("error") 387 | 388 | 389 | @patch("git_remote_s3.git.is_ancestor") 390 | @patch("git_remote_s3.git.rev_parse") 391 | @patch("git_remote_s3.git.bundle") 392 | @patch("boto3.Session.client") 393 | def test_cmd_push_empty_bucket( 394 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock 395 | ): 396 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 397 | rev_parse_mock.return_value = SHA1 398 | temp_dir = tempfile.mkdtemp("test_temp") 399 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 400 | with open(temp_file.name, "wb") as f: 401 | f.write(MOCK_BUNDLE_CONTENT) 402 | bundle_mock.return_value = temp_file.name 403 | 404 | session_client_mock.return_value.head_object.side_effect = ClientError( 405 | {"Error": {"Code": "NoSuchKey"}}, "head_object" 406 | ) 407 | 408 | is_ancestor_mock.return_value = False 409 | assert s3_remote.s3 == session_client_mock.return_value 410 | res = s3_remote.cmd_push(f"push refs/heads/{BRANCH}:refs/heads/{BRANCH}") 411 | put_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 412 | assert len(put_calls) == 2 413 | del_calls = [c for c in session_client_mock.return_value.delete_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 414 | assert len(del_calls) == 0 415 | assert res.startswith("ok") 416 | 417 | 418 | @patch("git_remote_s3.git.archive") 419 | @patch("git_remote_s3.git.is_ancestor") 420 | @patch("git_remote_s3.git.rev_parse") 421 | @patch("git_remote_s3.git.bundle") 422 | @patch("boto3.Session.client") 423 | def test_cmd_push_empty_bucket_s3_zip( 424 | session_client_mock, 425 | bundle_mock, 426 | rev_parse_mock, 427 | is_ancestor_mock, 428 | archive_mock, 429 | ): 430 | s3_remote = S3Remote(UriScheme.S3_ZIP, None, "test_bucket", "test_prefix") 431 | 432 | rev_parse_mock.return_value = SHA1 433 | 434 | temp_dir = tempfile.mkdtemp("test_temp") 435 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 436 | with open(temp_file.name, "wb") as f: 437 | f.write(MOCK_BUNDLE_CONTENT) 438 | bundle_mock.return_value = temp_file.name 439 | 440 | temp_file_archive = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=ARCHIVE_SUFFIX) 441 | with open(temp_file_archive.name, "wb") as f: 442 | f.write(MOCK_ARCHIVE_CONTENT) 443 | archive_mock.return_value = temp_file_archive.name 444 | 445 | session_client_mock.return_value.head_object.side_effect = ClientError( 446 | {"Error": {"Code": "NoSuchKey"}}, "head_object" 447 | ) 448 | 449 | is_ancestor_mock.return_value = False 450 | 451 | assert s3_remote.s3 == session_client_mock.return_value 452 | 453 | res = s3_remote.cmd_push(f"push refs/heads/{BRANCH}:refs/heads/{BRANCH}") 454 | put_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 455 | assert len(put_calls) == 3 456 | del_calls = [c for c in session_client_mock.return_value.delete_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 457 | assert len(del_calls) == 0 458 | assert res.startswith("ok") 459 | 460 | 461 | @patch("git_remote_s3.git.archive") 462 | @patch("git_remote_s3.git.is_ancestor") 463 | @patch("git_remote_s3.git.rev_parse") 464 | @patch("git_remote_s3.git.bundle") 465 | @patch("git_remote_s3.git.get_last_commit_message") 466 | @patch("boto3.Session.client") 467 | def test_cmd_push_s3_zip_put_object_params( 468 | session_client_mock, 469 | get_last_commit_message_mock, 470 | bundle_mock, 471 | rev_parse_mock, 472 | is_ancestor_mock, 473 | archive_mock, 474 | ): 475 | s3_remote = S3Remote(UriScheme.S3_ZIP, None, "test_bucket", "test_prefix") 476 | rev_parse_mock.return_value = SHA1 477 | get_last_commit_message_mock.return_value = "test commit message" 478 | 479 | temp_dir = tempfile.mkdtemp("test_temp") 480 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 481 | with open(temp_file.name, "wb") as f: 482 | f.write(MOCK_BUNDLE_CONTENT) 483 | bundle_mock.return_value = temp_file.name 484 | 485 | temp_file_archive = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=ARCHIVE_SUFFIX) 486 | with open(temp_file_archive.name, "wb") as f: 487 | f.write(MOCK_ARCHIVE_CONTENT) 488 | archive_mock.return_value = temp_file_archive.name 489 | 490 | session_client_mock.return_value.list_objects_v2.side_effect = ( 491 | create_list_objects_v2_mock(shas=[SHA2]) 492 | ) 493 | 494 | is_ancestor_mock.return_value = True 495 | 496 | s3_remote.cmd_push(f"push refs/heads/{BRANCH}:refs/heads/{BRANCH}") 497 | 498 | put_object_calls = [c for c in session_client_mock.return_value.put_object.call_args_list if not c.kwargs["Key"].endswith(".lock")] 499 | assert len(put_object_calls) == 2 500 | 501 | # Check bundle upload 502 | bundle_call = put_object_calls[0] 503 | assert bundle_call.kwargs["Bucket"] == "test_bucket" 504 | assert bundle_call.kwargs["Key"].endswith(".bundle") 505 | 506 | # Check zip upload 507 | zip_call = put_object_calls[1] 508 | assert zip_call.kwargs["Bucket"] == "test_bucket" 509 | assert zip_call.kwargs["Key"].endswith("repo.zip") 510 | assert ( 511 | zip_call.kwargs["Metadata"]["codepipeline-artifact-revision-summary"] 512 | == "test commit message" 513 | ) 514 | 515 | 516 | @patch("git_remote_s3.git.is_ancestor") 517 | @patch("git_remote_s3.git.rev_parse") 518 | @patch("git_remote_s3.git.bundle") 519 | @patch("boto3.Session.client") 520 | def test_cmd_push_multiple_heads( 521 | session_client_mock, bundle_mock, rev_parse_mock, is_ancestor_mock 522 | ): 523 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 524 | rev_parse_mock.return_value = SHA1 525 | temp_dir = tempfile.mkdtemp("test_temp") 526 | temp_file = tempfile.NamedTemporaryFile(dir=temp_dir, suffix=BUNDLE_SUFFIX) 527 | with open(temp_file.name, "wb") as f: 528 | f.write(MOCK_BUNDLE_CONTENT) 529 | bundle_mock.return_value = temp_file.name 530 | session_client_mock.return_value.list_objects_v2.side_effect = ( 531 | create_list_objects_v2_mock(shas=[SHA1, SHA2]) 532 | ) 533 | is_ancestor_mock.return_value = False 534 | assert s3_remote.s3 == session_client_mock.return_value 535 | res = s3_remote.cmd_push(f"push refs/heads/{BRANCH}:refs/heads/{BRANCH}") 536 | assert session_client_mock.return_value.put_object.call_count == 0 537 | assert session_client_mock.return_value.delete_object.call_count == 0 538 | assert res.startswith("error") 539 | 540 | 541 | @patch("git_remote_s3.git.unbundle") 542 | @patch("boto3.Session.client") 543 | def test_cmd_fetch(session_client_mock, unbundle_mock): 544 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 545 | s3_remote.cmd_fetch(f"fetch {SHA1} refs/heads/{BRANCH}") 546 | 547 | unbundle_mock.assert_called_once() 548 | assert session_client_mock.return_value.download_file.call_count == 1 549 | 550 | 551 | @patch("git_remote_s3.git.unbundle") 552 | @patch("boto3.Session.client") 553 | def test_cmd_fetch_same_ref(session_client_mock, unbundle_mock): 554 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 555 | s3_remote.cmd_fetch(f"fetch {SHA1} refs/heads/{BRANCH}") 556 | s3_remote.cmd_fetch(f"fetch {SHA1} refs/heads/{BRANCH}") 557 | unbundle_mock.assert_called_once() 558 | assert session_client_mock.return_value.download_file.call_count == 1 559 | 560 | 561 | @patch("sys.stdout", new_callable=StringIO) 562 | @patch("boto3.Session.client") 563 | def test_cmd_option(session_client_mock, stdout_mock): 564 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 565 | s3_remote.cmd_option("option verbosity 2") 566 | assert stdout_mock.getvalue().startswith("ok\n") 567 | s3_remote.cmd_option("option concurrency 1") 568 | assert stdout_mock.getvalue().endswith("unsupported\n") 569 | 570 | 571 | @patch("sys.stdout", new_callable=StringIO) 572 | @patch("boto3.Session.client") 573 | def test_cmd_capabilities(session_client_mock, stdout_mock): 574 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 575 | s3_remote.cmd_capabilities() 576 | assert "fetch" in stdout_mock.getvalue() 577 | assert "option" in stdout_mock.getvalue() 578 | assert "push" in stdout_mock.getvalue() 579 | 580 | 581 | @patch("boto3.Session.client") 582 | def test_cmd_push_delete(session_client_mock): 583 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 584 | 585 | session_client_mock.return_value.list_objects_v2.return_value = { 586 | "Contents": [ 587 | { 588 | "Key": f"test_prefix/refs/heads/{BRANCH}/{SHA1}.bundle", 589 | "LastModified": datetime.datetime.now(), 590 | } 591 | ] 592 | } 593 | assert s3_remote.s3 == session_client_mock.return_value 594 | res = s3_remote.cmd_push(f"push :refs/heads/{BRANCH}") 595 | assert session_client_mock.return_value.delete_object.call_count == 1 596 | assert res == (f"ok refs/heads/{BRANCH}\n") 597 | 598 | 599 | @patch("boto3.Session.client") 600 | def test_cmd_push_delete_s3_zip(session_client_mock): 601 | s3_remote = S3Remote(UriScheme.S3_ZIP, None, "test_bucket", "test_prefix") 602 | 603 | session_client_mock.return_value.list_objects_v2.return_value = { 604 | "Contents": [ 605 | { 606 | "Key": f"test_prefix/refs/heads/{BRANCH}/{SHA1}.bundle", 607 | "LastModified": datetime.datetime.now(), 608 | }, 609 | { 610 | "Key": f"test_prefix/refs/heads/{BRANCH}/repo.zip", 611 | "LastModified": datetime.datetime.now(), 612 | }, 613 | ] 614 | } 615 | assert s3_remote.s3 == session_client_mock.return_value 616 | res = s3_remote.cmd_push(f"push :refs/heads/{BRANCH}") 617 | assert session_client_mock.return_value.delete_object.call_count == 2 618 | assert res == (f"ok refs/heads/{BRANCH}\n") 619 | 620 | 621 | @patch("boto3.Session.client") 622 | def test_cmd_push_delete_fails_with_multiple_heads(session_client_mock): 623 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 624 | 625 | session_client_mock.return_value.list_objects_v2.return_value = { 626 | "Contents": [ 627 | { 628 | "Key": f"test_prefix/refs/heads/{BRANCH}/{SHA1}.bundle", 629 | "LastModified": datetime.datetime.now(), 630 | }, 631 | { 632 | "Key": f"test_prefix/refs/heads/{BRANCH}/{SHA2}.bundle", 633 | "LastModified": datetime.datetime.now(), 634 | }, 635 | ] 636 | } 637 | assert s3_remote.s3 == session_client_mock.return_value 638 | res = s3_remote.cmd_push(f"push :refs/heads/{BRANCH}") 639 | assert session_client_mock.return_value.delete_object.call_count == 0 640 | assert res.startswith("error") 641 | 642 | 643 | @patch("boto3.Session.client") 644 | def test_cmd_push_delete_fails_with_multiple_heads_s3_zip(session_client_mock): 645 | s3_remote = S3Remote(UriScheme.S3_ZIP, None, "test_bucket", "test_prefix") 646 | 647 | session_client_mock.return_value.list_objects_v2.return_value = { 648 | "Contents": [ 649 | { 650 | "Key": f"test_prefix/refs/heads/{BRANCH}/{SHA1}.bundle", 651 | "LastModified": datetime.datetime.now(), 652 | }, 653 | { 654 | "Key": f"test_prefix/refs/heads/{BRANCH}/{SHA2}.bundle", 655 | "LastModified": datetime.datetime.now(), 656 | }, 657 | { 658 | "Key": f"test_prefix/refs/heads/{BRANCH}/repo.zip", 659 | "LastModified": datetime.datetime.now(), 660 | }, 661 | ] 662 | } 663 | assert s3_remote.s3 == session_client_mock.return_value 664 | res = s3_remote.cmd_push(f"push :refs/heads/{BRANCH}") 665 | assert session_client_mock.return_value.delete_object.call_count == 0 666 | assert res.startswith("error") 667 | 668 | 669 | @patch("git_remote_s3.git.bundle") 670 | @patch("git_remote_s3.git.rev_parse") 671 | @patch("boto3.Session.client") 672 | def test_simultaneous_pushes_single_bundle_remains( 673 | session_client_mock, rev_parse_mock, bundle_mock 674 | ): 675 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 676 | 677 | storage = {} 678 | lock_keys = [] 679 | storage_lock = threading.Lock() 680 | 681 | def list_objects_v2_side_effect(Bucket, Prefix, **kwargs): 682 | with storage_lock: 683 | if Prefix.endswith("/LOCKS/"): 684 | contents = [{"Key": k, "LastModified": datetime.datetime.now()} for k in lock_keys] 685 | else: 686 | contents = [ 687 | {"Key": k, "LastModified": datetime.datetime.now()} 688 | for k in storage.keys() 689 | if k.startswith(Prefix) 690 | ] 691 | return {"Contents": contents, "NextContinuationToken": None} 692 | 693 | def put_object_side_effect(Bucket, Key, Body=None, **kwargs): 694 | with storage_lock: 695 | # Simulate S3 conditional writes for lock creation using If-None-Match 696 | if Key.endswith(".lock"): 697 | if kwargs.get("IfNoneMatch") == "*": 698 | if Key in lock_keys: 699 | raise botocore.exceptions.ClientError( 700 | { 701 | "ResponseMetadata": {"HTTPStatusCode": 412}, 702 | "Error": {"Code": "PreconditionFailed"}, 703 | }, 704 | "put_object", 705 | ) 706 | lock_keys.append(Key) 707 | else: 708 | lock_keys.append(Key) 709 | else: 710 | data = Body.read() if hasattr(Body, "read") else Body or b"" 711 | storage[Key] = data 712 | return {} 713 | 714 | def delete_object_side_effect(Bucket, Key): 715 | with storage_lock: 716 | storage.pop(Key, None) 717 | try: 718 | lock_keys.remove(Key) 719 | except ValueError: 720 | pass 721 | return {} 722 | 723 | session_client_mock.return_value.list_objects_v2.side_effect = list_objects_v2_side_effect 724 | session_client_mock.return_value.put_object.side_effect = put_object_side_effect 725 | session_client_mock.return_value.delete_object.side_effect = delete_object_side_effect 726 | # Provide a concrete LastModified for lock head checks (non-stale) 727 | session_client_mock.return_value.head_object.side_effect = ( 728 | lambda Bucket, Key: {"LastModified": datetime.datetime.now()} 729 | ) 730 | 731 | def rev_parse_side_effect(local_ref: str): 732 | return SHA1 if "branch1" in local_ref else SHA2 733 | 734 | rev_parse_mock.side_effect = rev_parse_side_effect 735 | 736 | def bundle_side_effect(folder: str, sha: str, ref: str): 737 | temp_file = tempfile.NamedTemporaryFile(dir=folder, suffix=BUNDLE_SUFFIX, delete=False) 738 | with open(temp_file.name, "wb") as f: 739 | f.write(MOCK_BUNDLE_CONTENT) 740 | return temp_file.name 741 | 742 | bundle_mock.side_effect = bundle_side_effect 743 | 744 | remote_ref = f"refs/heads/{BRANCH}" 745 | 746 | t1 = threading.Thread( 747 | target=s3_remote.cmd_push, args=(f"push refs/heads/branch1:{remote_ref}",) 748 | ) 749 | t2 = threading.Thread( 750 | target=s3_remote.cmd_push, args=(f"push refs/heads/branch2:{remote_ref}",) 751 | ) 752 | 753 | t1.start() 754 | t2.start() 755 | t1.join() 756 | t2.join() 757 | 758 | with storage_lock: 759 | bundles = [ 760 | k 761 | for k in storage.keys() 762 | if k.startswith(f"test_prefix/{remote_ref}/") and k.endswith(".bundle") 763 | ] 764 | 765 | # Only one push should succeed due to per-ref locking; the other will fail to acquire lock 766 | assert len(bundles) == 1 767 | assert bundles[0].endswith(f"/{SHA1}.bundle") or bundles[0].endswith(f"/{SHA2}.bundle") 768 | 769 | 770 | @patch("boto3.Session.client") 771 | def test_acquire_lock_deletes_stale_and_reacquires(session_client_mock): 772 | s3_remote = S3Remote(UriScheme.S3, None, "test_bucket", "test_prefix") 773 | 774 | # Ensure initial list call in constructor succeeds 775 | session_client_mock.return_value.list_objects_v2.return_value = { 776 | "Contents": [], 777 | "NextContinuationToken": None, 778 | } 779 | 780 | # Simulate existing lock causing first put to fail with 412, then succeed after delete 781 | attempts = {"count": 0} 782 | 783 | def put_object_side_effect(Bucket, Key, Body=None, IfNoneMatch=None, **kwargs): 784 | if Key.endswith(".lock") and IfNoneMatch == "*": 785 | if attempts["count"] == 0: 786 | attempts["count"] += 1 787 | raise botocore.exceptions.ClientError( 788 | { 789 | "ResponseMetadata": {"HTTPStatusCode": 412}, 790 | "Error": {"Code": "PreconditionFailed"}, 791 | }, 792 | "put_object", 793 | ) 794 | return {} 795 | 796 | # Stale lock: last_modified far in the past 797 | def head_object_side_effect(Bucket, Key): 798 | return {"LastModified": datetime.datetime.now() - datetime.timedelta(seconds=120)} 799 | 800 | session_client_mock.return_value.put_object.side_effect = put_object_side_effect 801 | session_client_mock.return_value.head_object.side_effect = head_object_side_effect 802 | session_client_mock.return_value.delete_object.return_value = {} 803 | 804 | # Make TTL small enough so 120s old is stale 805 | s3_remote.lock_ttl_seconds = 60 806 | 807 | remote_ref = f"refs/heads/{BRANCH}" 808 | lock_key = s3_remote.acquire_lock(remote_ref) 809 | 810 | expected_lock_key = f"test_prefix/{remote_ref}/LOCK#.lock" 811 | assert lock_key == expected_lock_key 812 | 813 | # Verify delete was called exactly once for the stale lock 814 | delete_calls = [ 815 | c for c in session_client_mock.return_value.delete_object.call_args_list if c.kwargs["Key"].endswith(".lock") 816 | ] 817 | assert len(delete_calls) == 1 818 | 819 | # Verify put was attempted at least twice (initial fail + reacquire) 820 | put_lock_calls = [ 821 | c for c in session_client_mock.return_value.put_object.call_args_list if c.kwargs.get("Key", "").endswith(".lock") 822 | ] 823 | assert len(put_lock_calls) >= 2 824 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "attrs" 5 | version = "24.2.0" 6 | description = "Classes Without Boilerplate" 7 | optional = false 8 | python-versions = ">=3.7" 9 | groups = ["dev"] 10 | files = [ 11 | {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, 12 | {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, 13 | ] 14 | 15 | [package.extras] 16 | benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] 17 | cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] 18 | dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] 19 | docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] 20 | tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] 21 | tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\""] 22 | 23 | [[package]] 24 | name = "black" 25 | version = "24.8.0" 26 | description = "The uncompromising code formatter." 27 | optional = false 28 | python-versions = ">=3.8" 29 | groups = ["dev"] 30 | files = [ 31 | {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, 32 | {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, 33 | {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, 34 | {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, 35 | {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, 36 | {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, 37 | {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, 38 | {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, 39 | {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, 40 | {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, 41 | {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, 42 | {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, 43 | {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, 44 | {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, 45 | {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, 46 | {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, 47 | {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, 48 | {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, 49 | {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, 50 | {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, 51 | {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, 52 | {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, 53 | ] 54 | 55 | [package.dependencies] 56 | click = ">=8.0.0" 57 | mypy-extensions = ">=0.4.3" 58 | packaging = ">=22.0" 59 | pathspec = ">=0.9.0" 60 | platformdirs = ">=2" 61 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 62 | typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} 63 | 64 | [package.extras] 65 | colorama = ["colorama (>=0.4.3)"] 66 | d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] 67 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 68 | uvloop = ["uvloop (>=0.15.2)"] 69 | 70 | [[package]] 71 | name = "boto3" 72 | version = "1.39.4" 73 | description = "The AWS SDK for Python" 74 | optional = false 75 | python-versions = ">=3.9" 76 | groups = ["main"] 77 | files = [ 78 | {file = "boto3-1.39.4-py3-none-any.whl", hash = "sha256:f8e9534b429121aa5c5b7c685c6a94dd33edf14f87926e9a182d5b50220ba284"}, 79 | {file = "boto3-1.39.4.tar.gz", hash = "sha256:6c955729a1d70181bc8368e02a7d3f350884290def63815ebca8408ee6d47571"}, 80 | ] 81 | 82 | [package.dependencies] 83 | botocore = ">=1.39.4,<1.40.0" 84 | jmespath = ">=0.7.1,<2.0.0" 85 | s3transfer = ">=0.13.0,<0.14.0" 86 | 87 | [package.extras] 88 | crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] 89 | 90 | [[package]] 91 | name = "botocore" 92 | version = "1.39.4" 93 | description = "Low-level, data-driven core of boto 3." 94 | optional = false 95 | python-versions = ">=3.9" 96 | groups = ["main"] 97 | files = [ 98 | {file = "botocore-1.39.4-py3-none-any.whl", hash = "sha256:c41e167ce01cfd1973c3fa9856ef5244a51ddf9c82cb131120d8617913b6812a"}, 99 | {file = "botocore-1.39.4.tar.gz", hash = "sha256:e662ac35c681f7942a93f2ec7b4cde8f8b56dd399da47a79fa3e370338521a56"}, 100 | ] 101 | 102 | [package.dependencies] 103 | jmespath = ">=0.7.1,<2.0.0" 104 | python-dateutil = ">=2.1,<3.0.0" 105 | urllib3 = [ 106 | {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, 107 | {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, 108 | ] 109 | 110 | [package.extras] 111 | crt = ["awscrt (==0.23.8)"] 112 | 113 | [[package]] 114 | name = "click" 115 | version = "8.1.7" 116 | description = "Composable command line interface toolkit" 117 | optional = false 118 | python-versions = ">=3.7" 119 | groups = ["dev"] 120 | files = [ 121 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, 122 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, 123 | ] 124 | 125 | [package.dependencies] 126 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 127 | 128 | [[package]] 129 | name = "colorama" 130 | version = "0.4.6" 131 | description = "Cross-platform colored terminal text." 132 | optional = false 133 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 134 | groups = ["dev"] 135 | markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" 136 | files = [ 137 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 138 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 139 | ] 140 | 141 | [[package]] 142 | name = "coverage" 143 | version = "7.6.1" 144 | description = "Code coverage measurement for Python" 145 | optional = false 146 | python-versions = ">=3.8" 147 | groups = ["dev"] 148 | files = [ 149 | {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, 150 | {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, 151 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, 152 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, 153 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, 154 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, 155 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, 156 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, 157 | {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, 158 | {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, 159 | {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, 160 | {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, 161 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, 162 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, 163 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, 164 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, 165 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, 166 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, 167 | {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, 168 | {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, 169 | {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, 170 | {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, 171 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, 172 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, 173 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, 174 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, 175 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, 176 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, 177 | {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, 178 | {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, 179 | {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, 180 | {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, 181 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, 182 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, 183 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, 184 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, 185 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, 186 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, 187 | {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, 188 | {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, 189 | {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, 190 | {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, 191 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, 192 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, 193 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, 194 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, 195 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, 196 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, 197 | {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, 198 | {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, 199 | {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, 200 | {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, 201 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, 202 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, 203 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, 204 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, 205 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, 206 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, 207 | {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, 208 | {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, 209 | {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, 210 | {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, 211 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, 212 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, 213 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, 214 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, 215 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, 216 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, 217 | {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, 218 | {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, 219 | {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, 220 | {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, 221 | ] 222 | 223 | [package.extras] 224 | toml = ["tomli ; python_full_version <= \"3.11.0a6\""] 225 | 226 | [[package]] 227 | name = "exceptiongroup" 228 | version = "1.2.2" 229 | description = "Backport of PEP 654 (exception groups)" 230 | optional = false 231 | python-versions = ">=3.7" 232 | groups = ["dev"] 233 | markers = "python_version < \"3.11\"" 234 | files = [ 235 | {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, 236 | {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, 237 | ] 238 | 239 | [package.extras] 240 | test = ["pytest (>=6)"] 241 | 242 | [[package]] 243 | name = "flake8" 244 | version = "7.1.1" 245 | description = "the modular source code checker: pep8 pyflakes and co" 246 | optional = false 247 | python-versions = ">=3.8.1" 248 | groups = ["dev"] 249 | files = [ 250 | {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, 251 | {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, 252 | ] 253 | 254 | [package.dependencies] 255 | mccabe = ">=0.7.0,<0.8.0" 256 | pycodestyle = ">=2.12.0,<2.13.0" 257 | pyflakes = ">=3.2.0,<3.3.0" 258 | 259 | [[package]] 260 | name = "flake8-bugbear" 261 | version = "24.4.26" 262 | description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." 263 | optional = false 264 | python-versions = ">=3.8.1" 265 | groups = ["dev"] 266 | files = [ 267 | {file = "flake8_bugbear-24.4.26-py3-none-any.whl", hash = "sha256:cb430dd86bc821d79ccc0b030789a9c87a47a369667f12ba06e80f11305e8258"}, 268 | {file = "flake8_bugbear-24.4.26.tar.gz", hash = "sha256:ff8d4ba5719019ebf98e754624c30c05cef0dadcf18a65d91c7567300e52a130"}, 269 | ] 270 | 271 | [package.dependencies] 272 | attrs = ">=19.2.0" 273 | flake8 = ">=6.0.0" 274 | 275 | [package.extras] 276 | dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] 277 | 278 | [[package]] 279 | name = "iniconfig" 280 | version = "2.0.0" 281 | description = "brain-dead simple config-ini parsing" 282 | optional = false 283 | python-versions = ">=3.7" 284 | groups = ["dev"] 285 | files = [ 286 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 287 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 288 | ] 289 | 290 | [[package]] 291 | name = "jmespath" 292 | version = "1.0.1" 293 | description = "JSON Matching Expressions" 294 | optional = false 295 | python-versions = ">=3.7" 296 | groups = ["main"] 297 | files = [ 298 | {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, 299 | {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, 300 | ] 301 | 302 | [[package]] 303 | name = "mccabe" 304 | version = "0.7.0" 305 | description = "McCabe checker, plugin for flake8" 306 | optional = false 307 | python-versions = ">=3.6" 308 | groups = ["dev"] 309 | files = [ 310 | {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, 311 | {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, 312 | ] 313 | 314 | [[package]] 315 | name = "mock" 316 | version = "5.1.0" 317 | description = "Rolling backport of unittest.mock for all Pythons" 318 | optional = false 319 | python-versions = ">=3.6" 320 | groups = ["dev"] 321 | files = [ 322 | {file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"}, 323 | {file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"}, 324 | ] 325 | 326 | [package.extras] 327 | build = ["blurb", "twine", "wheel"] 328 | docs = ["sphinx"] 329 | test = ["pytest", "pytest-cov"] 330 | 331 | [[package]] 332 | name = "mypy" 333 | version = "1.11.1" 334 | description = "Optional static typing for Python" 335 | optional = false 336 | python-versions = ">=3.8" 337 | groups = ["dev"] 338 | files = [ 339 | {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, 340 | {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, 341 | {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, 342 | {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, 343 | {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, 344 | {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, 345 | {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, 346 | {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, 347 | {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, 348 | {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, 349 | {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, 350 | {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, 351 | {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, 352 | {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, 353 | {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, 354 | {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, 355 | {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, 356 | {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, 357 | {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, 358 | {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, 359 | {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, 360 | {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, 361 | {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, 362 | {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, 363 | {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, 364 | {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, 365 | {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, 366 | ] 367 | 368 | [package.dependencies] 369 | mypy-extensions = ">=1.0.0" 370 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 371 | typing-extensions = ">=4.6.0" 372 | 373 | [package.extras] 374 | dmypy = ["psutil (>=4.0)"] 375 | install-types = ["pip"] 376 | mypyc = ["setuptools (>=50)"] 377 | reports = ["lxml"] 378 | 379 | [[package]] 380 | name = "mypy-extensions" 381 | version = "1.0.0" 382 | description = "Type system extensions for programs checked with the mypy type checker." 383 | optional = false 384 | python-versions = ">=3.5" 385 | groups = ["dev"] 386 | files = [ 387 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, 388 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, 389 | ] 390 | 391 | [[package]] 392 | name = "packaging" 393 | version = "24.1" 394 | description = "Core utilities for Python packages" 395 | optional = false 396 | python-versions = ">=3.8" 397 | groups = ["dev"] 398 | files = [ 399 | {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, 400 | {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, 401 | ] 402 | 403 | [[package]] 404 | name = "pathspec" 405 | version = "0.12.1" 406 | description = "Utility library for gitignore style pattern matching of file paths." 407 | optional = false 408 | python-versions = ">=3.8" 409 | groups = ["dev"] 410 | files = [ 411 | {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, 412 | {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, 413 | ] 414 | 415 | [[package]] 416 | name = "platformdirs" 417 | version = "4.2.2" 418 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." 419 | optional = false 420 | python-versions = ">=3.8" 421 | groups = ["dev"] 422 | files = [ 423 | {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, 424 | {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, 425 | ] 426 | 427 | [package.extras] 428 | docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] 429 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] 430 | type = ["mypy (>=1.8)"] 431 | 432 | [[package]] 433 | name = "pluggy" 434 | version = "1.5.0" 435 | description = "plugin and hook calling mechanisms for python" 436 | optional = false 437 | python-versions = ">=3.8" 438 | groups = ["dev"] 439 | files = [ 440 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, 441 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, 442 | ] 443 | 444 | [package.extras] 445 | dev = ["pre-commit", "tox"] 446 | testing = ["pytest", "pytest-benchmark"] 447 | 448 | [[package]] 449 | name = "pycodestyle" 450 | version = "2.12.1" 451 | description = "Python style guide checker" 452 | optional = false 453 | python-versions = ">=3.8" 454 | groups = ["dev"] 455 | files = [ 456 | {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, 457 | {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, 458 | ] 459 | 460 | [[package]] 461 | name = "pyflakes" 462 | version = "3.2.0" 463 | description = "passive checker of Python programs" 464 | optional = false 465 | python-versions = ">=3.8" 466 | groups = ["dev"] 467 | files = [ 468 | {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, 469 | {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, 470 | ] 471 | 472 | [[package]] 473 | name = "pytest" 474 | version = "8.3.2" 475 | description = "pytest: simple powerful testing with Python" 476 | optional = false 477 | python-versions = ">=3.8" 478 | groups = ["dev"] 479 | files = [ 480 | {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, 481 | {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, 482 | ] 483 | 484 | [package.dependencies] 485 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 486 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 487 | iniconfig = "*" 488 | packaging = "*" 489 | pluggy = ">=1.5,<2" 490 | tomli = {version = ">=1", markers = "python_version < \"3.11\""} 491 | 492 | [package.extras] 493 | dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 494 | 495 | [[package]] 496 | name = "python-dateutil" 497 | version = "2.9.0.post0" 498 | description = "Extensions to the standard Python datetime module" 499 | optional = false 500 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 501 | groups = ["main"] 502 | files = [ 503 | {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, 504 | {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, 505 | ] 506 | 507 | [package.dependencies] 508 | six = ">=1.5" 509 | 510 | [[package]] 511 | name = "s3transfer" 512 | version = "0.13.0" 513 | description = "An Amazon S3 Transfer Manager" 514 | optional = false 515 | python-versions = ">=3.9" 516 | groups = ["main"] 517 | files = [ 518 | {file = "s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:0148ef34d6dd964d0d8cf4311b2b21c474693e57c2e069ec708ce043d2b527be"}, 519 | {file = "s3transfer-0.13.0.tar.gz", hash = "sha256:f5e6db74eb7776a37208001113ea7aa97695368242b364d73e91c981ac522177"}, 520 | ] 521 | 522 | [package.dependencies] 523 | botocore = ">=1.37.4,<2.0a.0" 524 | 525 | [package.extras] 526 | crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"] 527 | 528 | [[package]] 529 | name = "six" 530 | version = "1.16.0" 531 | description = "Python 2 and 3 compatibility utilities" 532 | optional = false 533 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 534 | groups = ["main"] 535 | files = [ 536 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 537 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 538 | ] 539 | 540 | [[package]] 541 | name = "tomli" 542 | version = "2.0.1" 543 | description = "A lil' TOML parser" 544 | optional = false 545 | python-versions = ">=3.7" 546 | groups = ["dev"] 547 | markers = "python_version < \"3.11\"" 548 | files = [ 549 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 550 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 551 | ] 552 | 553 | [[package]] 554 | name = "typing-extensions" 555 | version = "4.12.2" 556 | description = "Backported and Experimental Type Hints for Python 3.8+" 557 | optional = false 558 | python-versions = ">=3.8" 559 | groups = ["dev"] 560 | files = [ 561 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, 562 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, 563 | ] 564 | 565 | [[package]] 566 | name = "urllib3" 567 | version = "1.26.19" 568 | description = "HTTP library with thread-safe connection pooling, file post, and more." 569 | optional = false 570 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" 571 | groups = ["main"] 572 | markers = "python_version < \"3.10\"" 573 | files = [ 574 | {file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"}, 575 | {file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"}, 576 | ] 577 | 578 | [package.extras] 579 | brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] 580 | secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] 581 | socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] 582 | 583 | [[package]] 584 | name = "urllib3" 585 | version = "2.2.2" 586 | description = "HTTP library with thread-safe connection pooling, file post, and more." 587 | optional = false 588 | python-versions = ">=3.8" 589 | groups = ["main"] 590 | markers = "python_version >= \"3.10\"" 591 | files = [ 592 | {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, 593 | {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, 594 | ] 595 | 596 | [package.extras] 597 | brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] 598 | h2 = ["h2 (>=4,<5)"] 599 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] 600 | zstd = ["zstandard (>=0.18.0)"] 601 | 602 | [metadata] 603 | lock-version = "2.1" 604 | python-versions = ">3.9" 605 | content-hash = "04ecbb5fd60d3dcda4eea5a21434dc5c46a2b7ecad6b8aa8e65cc9b3378e49c6" 606 | --------------------------------------------------------------------------------