├── tests
├── __init__.py
├── auth
│ ├── __init__.py
│ └── data
│ │ ├── test-key.pub.pem
│ │ └── test-key.pem
├── mocks
│ ├── __init__.py
│ └── google_cloud_storage.py
├── transfer
│ ├── __init__.py
│ ├── conftest.py
│ └── test_module.py
├── storage
│ ├── data
│ │ └── test.csv
│ ├── test_local.py
│ ├── cassettes
│ │ ├── TestAzureBlobStorageBackend.test_get_download_action.yaml
│ │ ├── TestAzureBlobStorageBackend.test_get_upload_action.yaml
│ │ ├── TestAzureBlobStorageBackend.test_exists_not_exists.yaml
│ │ ├── TestAzureBlobStorageBackend.test_get_size_not_existing.yaml
│ │ ├── TestAzureBlobStorageBackend.test_verify_object_not_there.yaml
│ │ ├── TestAmazonS3StorageBackend.test_get_upload_action.yaml
│ │ ├── TestAmazonS3StorageBackend.test_get_download_action.yaml
│ │ ├── TestAzureBlobStorageBackend.test_get_raises_if_not_found.yaml
│ │ ├── TestGoogleCloudStorageBackend.test_get_download_action.yaml
│ │ ├── TestGoogleCloudStorageBackend.test_get_upload_action.yaml
│ │ ├── TestAmazonS3StorageBackend.test_exists_not_exists.yaml
│ │ ├── TestAmazonS3StorageBackend.test_get_size_not_existing.yaml
│ │ ├── TestAmazonS3StorageBackend.test_get_raises_if_not_found.yaml
│ │ ├── TestAmazonS3StorageBackend.test_verify_object_not_there.yaml
│ │ ├── TestGoogleCloudStorageBackend.test_exists_not_exists.yaml
│ │ ├── TestGoogleCloudStorageBackend.test_get_size_not_existing.yaml
│ │ └── TestGoogleCloudStorageBackend.test_verify_object_not_there.yaml
│ ├── test_azure.py
│ ├── test_amazon_s3.py
│ ├── test_google_cloud.py
│ └── __init__.py
├── test_error_responses.py
├── helpers.py
├── test_schema.py
├── conftest.py
└── test_middleware.py
├── giftless
├── __init__.py
├── wsgi_entrypoint.py
├── exc.py
├── storage
│ ├── exc.py
│ ├── local_storage.py
│ ├── __init__.py
│ └── amazon_s3.py
├── error_handling.py
├── transfer
│ ├── types.py
│ ├── basic_external.py
│ ├── multipart.py
│ └── __init__.py
├── representation.py
├── auth
│ ├── allow_anon.py
│ └── identity.py
├── schema.py
├── app.py
├── util.py
├── config.py
└── view.py
├── MANIFEST.in
├── .dockerignore
├── changelog.d
└── _template.md.jinja
├── examples
└── github-lfs
│ └── .env
├── .gitignore
├── .github
├── dependabot.yml
└── workflows
│ ├── test.yaml
│ ├── periodic.yaml
│ └── ci.yaml
├── docs
├── source
│ ├── guides.rst
│ ├── api.rst
│ ├── development.rst
│ ├── components.rst
│ ├── index.rst
│ ├── developer-guide.md
│ ├── installation.md
│ ├── conf.py
│ ├── github-lfs.md
│ ├── wsgi-middleware.md
│ └── transfer-adapters.md
├── Makefile
└── make.bat
├── scripts
└── docker-tag.sh
├── .codeclimate.yml
├── CHANGELOG.md
├── requirements
├── dev.in
├── main.in
└── main.txt
├── .readthedocs.yaml
├── flask-develop.sh
├── .travis.yml
├── .pre-commit-config.yaml
├── LICENSE
├── tox.ini
├── README.md
├── Dockerfile
└── Makefile
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/giftless/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/auth/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/mocks/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/transfer/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md
2 |
--------------------------------------------------------------------------------
/tests/storage/data/test.csv:
--------------------------------------------------------------------------------
1 | FID,Mkt-RF,SMB,HML,RF
2 | 192607, 2.96, -2.30, -2.87, 0.22
3 | 192608, 2.64, -1.40, 4.19, 0.25
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | /.venv*
2 | /build
3 | /dist
4 | /lfs-storage
5 | *.egg-info
6 | *.pyc
7 | .pytest_cache
8 | .mypy_cache
9 | /.idea
10 | .tox
11 | .make-cache
--------------------------------------------------------------------------------
/changelog.d/_template.md.jinja:
--------------------------------------------------------------------------------
1 |
2 | {%- for cat in config.categories %}
3 |
4 | ### {{ cat }}
5 |
6 | -
7 | {%- endfor %}
8 |
--------------------------------------------------------------------------------
/giftless/wsgi_entrypoint.py:
--------------------------------------------------------------------------------
1 | """Entry point module for WSGI.
2 |
3 | This is used when running the app using a WSGI server such as uWSGI.
4 | """
5 | from .app import init_app
6 |
7 | app = init_app()
8 |
--------------------------------------------------------------------------------
/examples/github-lfs/.env:
--------------------------------------------------------------------------------
1 | # listening (proxy) port on the host
2 | SERVICE_PORT=5000
3 | # inner port giftless listens on
4 | GIFTLESS_PORT=5000
5 | # inner port the reverse proxy listens on
6 | PROXY_PORT=8080
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.idea
2 | /.venv*
3 | .pytest_cache
4 | .mypy_cache
5 | /build
6 | /dist
7 | *.egg-info
8 | *.pyc
9 | /lfs-storage
10 | /*.local.yaml
11 | /.coverage
12 | /.make-cache
13 | /.tox
14 | /.DS_Store
15 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "docker"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
7 |
8 | - package-ecosystem: "github-actions"
9 | directory: "/"
10 | schedule:
11 | interval: "weekly"
12 |
--------------------------------------------------------------------------------
/docs/source/guides.rst:
--------------------------------------------------------------------------------
1 | How-to Guides
2 | =============
3 |
4 | This section includes several how-to guides designed to get you started with Giftless quickly.
5 |
6 | .. toctree::
7 | :maxdepth: 1
8 |
9 | quickstart
10 | using-gcs
11 | jwt-auth-guide
12 | github-lfs
13 |
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | Giftless APIs
2 | =============
3 |
4 | Transfer Adapters
5 | -----------------
6 |
7 | .. automodule:: giftless.transfer
8 | :members:
9 | .. .:undoc-members:
10 |
11 | Storage Backend Interfaces
12 | --------------------------
13 |
14 | .. automodule:: giftless.storage
15 | :members:
16 |
--------------------------------------------------------------------------------
/docs/source/development.rst:
--------------------------------------------------------------------------------
1 | Development
2 | ===========
3 | This section is intended for developers aiming to modify, extend or contribute to Giftless,
4 | or that are interested in more extensive technical information.
5 |
6 | .. toctree::
7 | :maxdepth: 1
8 | :caption: Contents:
9 |
10 | developer-guide
11 | multipart-spec
12 |
--------------------------------------------------------------------------------
/scripts/docker-tag.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Determine the tag for Docker images based on GitHub Actions environment
4 | # variables.
5 |
6 | set -eo pipefail
7 |
8 | if [ -n "$GITHUB_HEAD_REF" ]; then
9 | # For pull requests
10 | echo ${GITHUB_HEAD_REF} | sed -E 's,/,-,g'
11 | else
12 | # For push events
13 | echo ${GITHUB_REF} | sed -E 's,refs/(heads|tags)/,,' | sed -E 's,/,-,g'
14 | fi
15 |
--------------------------------------------------------------------------------
/.codeclimate.yml:
--------------------------------------------------------------------------------
1 | version: "2" # https://docs.codeclimate.com/docs/advanced-configuration
2 |
3 | checks:
4 | argument-count:
5 | config:
6 | threshold: 8
7 |
8 | plugins:
9 |
10 | bandit:
11 | enabled: true
12 |
13 | git-legal:
14 | enabled: true
15 | config:
16 | allow_affero_copyleft: false
17 | allow_strong_copyleft: false
18 |
19 | fixme:
20 | enabled: true
21 |
--------------------------------------------------------------------------------
/giftless/exc.py:
--------------------------------------------------------------------------------
1 | """Map Werkzueg exceptions to domain-specific exceptions.
2 |
3 | These exceptions should be used in all domain (non-Flask specific) code
4 | to avoid tying in to Flask / Werkzueg where it is not needed.
5 | """
6 |
7 | from werkzeug.exceptions import Forbidden, NotFound, UnprocessableEntity
8 |
9 | InvalidPayload = UnprocessableEntity
10 |
11 | __all__ = ["NotFound", "Forbidden", "InvalidPayload"]
12 |
--------------------------------------------------------------------------------
/docs/source/components.rst:
--------------------------------------------------------------------------------
1 | Architecture and Components
2 | ===========================
3 |
4 | Giftless is highly modular, and allows customization of most of its behavior
5 | through specialized classes, each responsible for a different aspect of the
6 | server's operation.
7 |
8 | .. toctree::
9 | :maxdepth: 1
10 | :caption: Contents:
11 |
12 | transfer-adapters
13 | storage-backends
14 | auth-providers
15 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## 0.6.2 (2024-12-16)
4 |
5 | ### Bug fixes
6 |
7 | - Correct typo in pyproject.toml
8 |
9 |
10 | ## v6.0.1 (2024-12-16)
11 |
12 | ### Bug fixes
13 |
14 | - Make CI attempt to upload on release
15 |
16 |
17 | ## v0.6.0 (2024-12-16)
18 |
19 | ### New features
20 |
21 | - Support Python 3.13
22 |
23 | - Work through PyPi publication
24 |
--------------------------------------------------------------------------------
/tests/transfer/conftest.py:
--------------------------------------------------------------------------------
1 | """Some global fixtures for transfer tests."""
2 | from collections.abc import Generator
3 |
4 | import pytest
5 |
6 | from giftless import transfer
7 |
8 |
9 | @pytest.fixture
10 | def _reset_registered_transfers() -> Generator:
11 | """Reset global registered transfer adapters for each module."""
12 | adapters = dict(transfer._registered_adapters)
13 | try:
14 | yield
15 | finally:
16 | transfer._registered_adapters = adapters
17 |
--------------------------------------------------------------------------------
/requirements/dev.in:
--------------------------------------------------------------------------------
1 | -c main.txt
2 |
3 | uv
4 |
5 | pip-tools
6 | tox
7 | flake8
8 | pytest
9 | pytest-isort
10 | pytest-mypy
11 | pytest-env
12 | pytest-cov
13 | pytest-vcr
14 | responses
15 |
16 | pytz
17 | types-pytz
18 | types-jwt
19 | types-python-dateutil
20 | types-PyYAML
21 | types-cachetools
22 |
23 | boto3-stubs
24 | botocore-stubs
25 | google-auth-stubs
26 |
27 | # Documentation Requirements
28 | recommonmark
29 | furo
30 | sphinx
31 | sphinx-autodoc-typehints
32 |
33 | # Internal tooling
34 | scriv
35 |
36 |
--------------------------------------------------------------------------------
/giftless/storage/exc.py:
--------------------------------------------------------------------------------
1 | """Storage related errors."""
2 |
3 |
4 | class StorageError(RuntimeError):
5 | """Base class for storage errors."""
6 |
7 | code: int | None = None
8 |
9 | def as_dict(self) -> dict[str, str | int | None]:
10 | return {"message": str(self), "code": self.code}
11 |
12 |
13 | class ObjectNotFoundError(StorageError):
14 | """No such object exists."""
15 |
16 | code = 404
17 |
18 |
19 | class InvalidObjectError(StorageError):
20 | """Request is syntactically OK, but invalid (wrong fields, usually)."""
21 |
22 | code = 422
23 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Build documentation in the docs/source directory with Sphinx
9 | sphinx:
10 | configuration: docs/source/conf.py
11 |
12 | # Optionally build your docs in additional formats such as PDF
13 | formats:
14 | - pdf
15 |
16 | # Optionally set the version of Python and requirements required to build your docs
17 | python:
18 | version: 3.10
19 | install:
20 | - requirements: requirements/dev.txt
21 |
--------------------------------------------------------------------------------
/requirements/main.in:
--------------------------------------------------------------------------------
1 | figcan==0.0.*
2 | flask~=2.3
3 | flask-marshmallow~=0.15
4 | pyyaml~=6.0
5 | PyJWT~=2.4
6 | webargs~=8.3
7 | typing-extensions~=4.9
8 | python-dotenv~=1.0
9 | python-dateutil~=2.8
10 | cryptography>=3.2 # not direct dependency, but pin to >=3.2 due to vulnerabilities
11 |
12 | flask-classful~=0.16
13 |
14 | werkzeug~=3.0
15 |
16 | # Storage backend dependencies
17 | # TODO: Split these out so users don't have to install all of them
18 | azure-storage-blob~=12.19
19 | google-cloud-storage~=2.14
20 | boto3~=1.34
21 |
22 | # GitHub AA Provider
23 | cachetools~=5.3
24 |
25 | importlib-metadata; python_version < '3.13'
26 |
--------------------------------------------------------------------------------
/flask-develop.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | export FLASK_ENV=development
4 | export FLASK_APP=giftless.wsgi_entrypoint
5 | export GIFTLESS_DEBUG=1
6 |
7 | DEFAULT_CONFIG_FILE=giftless.yaml
8 | if [ -z "$GIFTLESS_CONFIG_FILE" ]; then
9 | if [ -f "$DEFAULT_CONFIG_FILE" ]; then
10 | export GIFTLESS_CONFIG_FILE="$DEFAULT_CONFIG_FILE"
11 | echo "GIFTLESS_CONFIG_FILE not set, defaulting to local config file $DEFAULT_CONFIG_FILE" >&2
12 | else
13 | echo "GIFTLESS_CONFIG_FILE not set and $DEFAULT_CONFIG_FILE not found, running with default configuration" >&2
14 | fi
15 | else
16 | echo "Using configuration file: $GIFTLESS_CONFIG_FILE" >&2
17 | fi
18 |
19 | flask run $@
20 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 |
3 | python:
4 | - 3.7
5 | - 3.8
6 |
7 | env:
8 | global:
9 | - CC_TEST_REPORTER_ID=cca5a6743728de037cb47d4a845e35c682b4469c0f9c52851f4f3824dd471f87
10 |
11 | install:
12 | - pip install -r requirements/main.txt
13 |
14 | before_script:
15 | - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
16 | - chmod +x ./cc-test-reporter
17 | - ./cc-test-reporter before-build
18 |
19 | script: make test PYTEST_EXTRA_ARGS="--cov=giftless"
20 |
21 | after_script:
22 | - coverage xml
23 | - ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT
24 |
25 | cache:
26 | directories:
27 | - $HOME/.cache/pip
28 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/tests/auth/data/test-key.pub.pem:
--------------------------------------------------------------------------------
1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDJE/2GADf8UYkBVDGUMnMWApA6KQMlqXEwATTbDIr+JN4U1lOUHWT+gkOuS7B+b059wE6ObPLpoQyY+9xQkF+peYKpW7CsdmYkk3vDPJH4GDPne+ulL6FPXxECHFqDGsz8dKSGGsO4ZA5F2wxSpZ0nlFgWEPJZlHZxR9LzdHGQqcxncpLGdgQa763ZPsd7D3hJJsHTJEOp/KR36tMW8GbNrwirOjU1vsGOItZFJaRFq7hvDE8UeEaKHZlom76re7yOevCfQA5tVKP9bqmzxqaEG4wOxoTbDKOzy2jVsOl7O5mSie1oWijnqWXcVc47k3faLkemuwUhZetAWAbXEmn7t4NjuEqIrXF52diTySx7b9clFO0YGAU/qBt20J193sNBIYIbLpD/SwMImoAzfSoNVkM2D3jSF2nWT2fXOFNJ5b9ugVy/T0qOYWD5lWZlXHGpmJ090aoHdVvAFG8fuJ0UNr6dKDmAlszZhZo57oV5SQibv9GzET4x9T9MUe9b/irz2wer/ZX8RkfVVIHK/lHeLJazxwsWORCSotNh1fZL9BL6fC39pcqOfodeWPYfWF10zPtRNEEPLn8hdGRbXntgVYYg8hW+Oj/UemIA+0n0K/4wsI76Ti7v1lSNUqSluIZY3mhDAiB0vBfWY7JLVQtcVl+F/k8rS6PAsii89LKKlQ== shahar@skygig.local
2 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v4.5.0
4 | hooks:
5 | - id: check-merge-conflict
6 | - id: check-toml
7 | # FIXME: VCR is unhappy; address in test rewrite
8 | # - id: check-yaml
9 | # args: [--allow-multiple-documents]
10 | - id: trailing-whitespace
11 |
12 | - repo: https://github.com/astral-sh/ruff-pre-commit
13 | rev: v0.1.8
14 | hooks:
15 | - id: ruff
16 | args: [--fix, --exit-non-zero-on-fix]
17 | - id: ruff-format
18 |
19 | - repo: https://github.com/adamchainz/blacken-docs
20 | rev: 1.16.0
21 | hooks:
22 | - id: blacken-docs
23 | additional_dependencies: [black==23.12.1]
24 | args: [-l, '79', -t, 'py310']
25 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. giftless documentation master file, created by
2 | sphinx-quickstart on Wed Aug 19 12:32:30 2020.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Giftless
7 | ========
8 | Giftless a Python implementation of a `Git LFS `_ Server.
9 | It is designed with flexibility in mind, to allow pluggable storage backends, transfer
10 | methods and authentication methods.
11 |
12 | .. toctree::
13 | :maxdepth: 2
14 | :caption: Contents:
15 |
16 | installation
17 | guides
18 | configuration
19 | wsgi-middleware
20 | components
21 | development
22 | api
23 |
24 | Indices and Tables
25 | ------------------
26 | * :ref:`genindex`
27 | * :ref:`modindex`
28 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/.github/workflows/test.yaml:
--------------------------------------------------------------------------------
1 | name: Run Tests
2 | on:
3 | push:
4 | branches:
5 | - master
6 | pull_request:
7 | branches:
8 | - master
9 |
10 | jobs:
11 | test:
12 | runs-on: ubuntu-20.04
13 | strategy:
14 | matrix:
15 | python-version: [ "3.10", "3.11", "3.12" ]
16 | steps:
17 | - uses: actions/checkout@v4
18 | - name: Install Python 3
19 | uses: actions/setup-python@v5
20 | with:
21 | python-version: ${{ matrix.python-version }}
22 | - name: Set up the test environment
23 | run: |
24 | touch requirements.txt dev-requirements.txt
25 | make dev-setup
26 | - name: Run tests
27 | run: |
28 | make test PYTEST_EXTRA_ARGS="--cov=giftless"
29 | coverage xml
30 | - uses: paambaati/codeclimate-action@v6
31 | env:
32 | CC_TEST_REPORTER_ID: cca5a6743728de037cb47d4a845e35c682b4469c0f9c52851f4f3824dd471f87
33 |
--------------------------------------------------------------------------------
/giftless/error_handling.py:
--------------------------------------------------------------------------------
1 | """Handle errors according to the Git LFS spec.
2 |
3 | See https://github.com/git-lfs/git-lfs/blob/master/docs\
4 | /api/batch.md#response-errors
5 | """
6 | from flask import Flask, Response
7 | from werkzeug.exceptions import default_exceptions
8 |
9 | from .representation import output_git_lfs_json
10 |
11 |
12 | class ApiErrorHandler:
13 | """Handler to send JSON response for errors."""
14 |
15 | def __init__(self, app: Flask | None = None) -> None:
16 | if app:
17 | self.init_app(app)
18 |
19 | def init_app(self, app: Flask) -> None:
20 | for code in default_exceptions:
21 | app.errorhandler(code)(self.error_as_json)
22 |
23 | @classmethod
24 | def error_as_json(cls, ex: Exception) -> Response:
25 | """Handle errors by returning a JSON response."""
26 | code = ex.code if hasattr(ex, "code") else 500
27 | data = {"message": str(ex)}
28 |
29 | return output_git_lfs_json(data=data, code=code)
30 |
--------------------------------------------------------------------------------
/giftless/transfer/types.py:
--------------------------------------------------------------------------------
1 | """Some useful type definitions for transfer protocols."""
2 | from typing import Any, TypedDict
3 |
4 |
5 | class ObjectAttributes(TypedDict):
6 | """Type for object attributes sent in batch request."""
7 |
8 | oid: str
9 | size: int
10 |
11 |
12 | class BasicUploadActions(TypedDict, total=False):
13 | """Fundamental actions for upload."""
14 |
15 | upload: dict[str, Any]
16 | verify: dict[str, Any]
17 |
18 |
19 | class UploadObjectAttributes(ObjectAttributes, total=False):
20 | """Convert BasicUploadActions to object attributes."""
21 |
22 | actions: BasicUploadActions
23 |
24 |
25 | class MultipartUploadActions(TypedDict, total=False):
26 | """Additional actions to support multipart uploads."""
27 |
28 | init: dict[str, Any]
29 | commit: dict[str, Any]
30 | parts: list[dict[str, Any]]
31 | abort: dict[str, Any]
32 | verify: dict[str, Any]
33 |
34 |
35 | class MultipartUploadObjectAttributes(ObjectAttributes, total=False):
36 | """Convert MultipartUploadActions to object attributes."""
37 |
38 | actions: MultipartUploadActions
39 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2020-2024 Datopian (Viderum, Inc.)
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is furnished
8 | to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
20 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | # tox (https://tox.readthedocs.io/) is a tool for running tests
2 | # in multiple virtualenvs. This configuration file will run the
3 | # test suite on all supported python versions. To use it, "pip install tox"
4 | # and then run "tox" from this directory.
5 |
6 | [tox]
7 | envlist = py,typing,lint,docs
8 | isolated_build=true
9 |
10 | [testenv]
11 | deps =
12 | -rrequirements/main.txt
13 | -rrequirements/dev.txt
14 |
15 | [testenv:lint]
16 | description = Lint codebase by running pre-commit (Black, isort, Flake8)
17 | skip_install = true
18 | deps =
19 | pre-commit
20 | commands = pre-commit run --all-files
21 |
22 | [testenv:py]
23 | description = Run pytest
24 | commands =
25 | pytest -vv {posargs} --cov=giftless
26 |
27 | [testenv:coverage-report]
28 | description = Compile coverage from each test run.
29 | skip_install = true
30 | deps = coverage[toml]>=5.0.2
31 | depends =
32 | py-coverage
33 | commands = coverage report
34 |
35 | [testenv:docs]
36 | description = Build documentation (HTML) with Sphinx
37 | allowlist_externals =
38 | make
39 | commands =
40 | make docs
41 |
42 | [testenv:typing]
43 | description = Run mypy
44 | commands =
45 | mypy giftless tests
46 |
--------------------------------------------------------------------------------
/tests/transfer/test_module.py:
--------------------------------------------------------------------------------
1 | """Test common transfer module functionality."""
2 |
3 | import pytest
4 |
5 | from giftless import transfer
6 |
7 |
8 | @pytest.mark.parametrize(
9 | ("register", "requested", "expected"),
10 | [
11 | (["basic"], ["basic"], "basic"),
12 | (["foobar", "basic", "bizbaz"], ["basic"], "basic"),
13 | (["foobar", "basic", "bizbaz"], ["foobar"], "foobar"),
14 | (["foobar", "basic", "bizbaz"], ["bizbaz", "basic"], "bizbaz"),
15 | ],
16 | )
17 | @pytest.mark.usefixtures("_reset_registered_transfers")
18 | def test_transfer_adapter_matching(
19 | register: list[str], requested: list[str], expected: str
20 | ) -> None:
21 | for adapter in register:
22 | transfer.register_adapter(adapter, transfer.TransferAdapter())
23 | actual = transfer.match_transfer_adapter(requested)
24 | assert expected == actual[0]
25 | assert isinstance(actual[1], transfer.TransferAdapter)
26 |
27 |
28 | def test_transfer_adapter_matching_nomatch() -> None:
29 | for adapter in ["foobar", "basic", "bizbaz"]:
30 | transfer.register_adapter(adapter, transfer.TransferAdapter())
31 | with pytest.raises(ValueError, match="Unable to match"):
32 | transfer.match_transfer_adapter(["complex", "even-better"])
33 |
--------------------------------------------------------------------------------
/tests/storage/test_local.py:
--------------------------------------------------------------------------------
1 | """Tests for the local storage backend."""
2 | import shutil
3 | from collections.abc import Generator
4 | from pathlib import Path
5 |
6 | import pytest
7 |
8 | from giftless.storage.local_storage import LocalStorage
9 |
10 | from . import StreamingStorageAbstractTests
11 |
12 |
13 | @pytest.fixture
14 | def storage_dir(tmp_path: Path) -> Generator[Path, None, None]:
15 | """Create a unique temp dir for testing storage."""
16 | tdir = None
17 | try:
18 | tdir = tmp_path / "giftless_tests"
19 | tdir.mkdir(parents=True)
20 | yield tdir
21 | finally:
22 | if tdir and tdir.is_dir():
23 | shutil.rmtree(tdir)
24 |
25 |
26 | @pytest.fixture
27 | def storage_backend(storage_dir: str) -> LocalStorage:
28 | """Provide a local storage backend for all local tests."""
29 | return LocalStorage(path=storage_dir)
30 |
31 |
32 | class TestLocalStorageBackend(StreamingStorageAbstractTests):
33 | def test_local_path_created_on_init(self, storage_dir: Path) -> None:
34 | """Test that the local storage path is created on module init."""
35 | storage_path = storage_dir / "here"
36 | assert not storage_path.exists()
37 | LocalStorage(path=str(storage_path))
38 | assert storage_path.exists()
39 |
--------------------------------------------------------------------------------
/.github/workflows/periodic.yaml:
--------------------------------------------------------------------------------
1 | # This is a separate run of the Python test suite that doesn't cache the tox
2 | # environment and runs from a schedule. The purpose is to test whether
3 | # updating pinned dependencies would cause any tests to fail.
4 |
5 | name: Periodic CI
6 |
7 | "on":
8 | schedule:
9 | - cron: "0 12 * * 1"
10 | workflow_dispatch: {}
11 |
12 | jobs:
13 | test:
14 | runs-on: ubuntu-latest
15 | timeout-minutes: 10
16 |
17 | strategy:
18 | matrix:
19 | python:
20 | - "3.11"
21 |
22 | steps:
23 | - uses: actions/checkout@v4
24 |
25 | - name: Run neophile
26 | uses: lsst-sqre/run-neophile@v1
27 | with:
28 | python-version: ${{ matrix.python }}
29 | mode: update
30 |
31 | - name: Run tox
32 | uses: lsst-sqre/run-tox@v1
33 | with:
34 | python-version: ${{ matrix.python }}
35 | tox-envs: "lint,typing,py"
36 |
37 | - name: Report status
38 | if: always()
39 | uses: ravsamhq/notify-slack-action@v2
40 | with:
41 | status: ${{ job.status }}
42 | notify_when: "failure"
43 | notification_title: "Periodic test for {repo} failed"
44 | env:
45 | SLACK_WEBHOOK_URL: ${{ secrets.SLACK_ALERT_WEBHOOK }}
46 |
--------------------------------------------------------------------------------
/tests/test_error_responses.py:
--------------------------------------------------------------------------------
1 | """Tests for schema definitions."""
2 | from flask.testing import FlaskClient
3 |
4 | from .helpers import batch_request_payload
5 |
6 |
7 | def test_error_response_422(test_client: FlaskClient) -> None:
8 | """Test an invalid payload error."""
9 | response = test_client.post(
10 | "/myorg/myrepo.git/info/lfs/objects/batch",
11 | json=batch_request_payload(delete_keys=["operation"]),
12 | )
13 |
14 | assert response.status_code == 422
15 | assert response.content_type == "application/vnd.git-lfs+json"
16 | assert "message" in response.json # type:ignore[operator]
17 |
18 |
19 | def test_error_response_404(test_client: FlaskClient) -> None:
20 | """Test a bad route error."""
21 | response = test_client.get("/now/for/something/completely/different")
22 |
23 | assert response.status_code == 404
24 | assert response.content_type == "application/vnd.git-lfs+json"
25 | assert "message" in response.json # type:ignore[operator]
26 |
27 |
28 | def test_error_response_403(test_client: FlaskClient) -> None:
29 | """Test that we get Forbidden when trying to upload with the default
30 | read-only setup.
31 | """
32 | response = test_client.post(
33 | "/myorg/myrepo.git/info/lfs/objects/batch",
34 | json=batch_request_payload(operation="upload"),
35 | )
36 |
37 | assert response.status_code == 403
38 | assert response.content_type == "application/vnd.git-lfs+json"
39 | assert "message" in response.json # type:ignore[operator]
40 |
--------------------------------------------------------------------------------
/giftless/representation.py:
--------------------------------------------------------------------------------
1 | """Representations define how to render a response for a given content-type.
2 |
3 | Most commonly this will convert data returned by views into JSON or a similar
4 | format.
5 |
6 | See http://flask-classful.teracy.org/\
7 | #adding-resource-representations-get-real-classy-and-put-on-a-top-hat
8 | """
9 | import json
10 | from datetime import datetime
11 | from functools import partial
12 | from typing import Any
13 |
14 | from flask import Response, make_response
15 |
16 | GIT_LFS_MIME_TYPE = "application/vnd.git-lfs+json"
17 |
18 | # TODO @athornton: this, like the schemas, seems like something Pydantic
19 | # does really well, but probably a big job.
20 |
21 |
22 | class CustomJsonEncoder(json.JSONEncoder):
23 | """Custom JSON encoder that supports some additional required types."""
24 |
25 | def default(self, o: Any) -> Any:
26 | if isinstance(o, datetime):
27 | return o.isoformat()
28 | return super().default(o)
29 |
30 |
31 | def output_json(
32 | data: Any,
33 | code: int | None,
34 | headers: dict[str, str] | None = None,
35 | content_type: str = "application/json",
36 | ) -> Response:
37 | """Set appropriate Content-Type header for JSON response."""
38 | dumped = json.dumps(data, cls=CustomJsonEncoder)
39 | if headers:
40 | headers.update({"Content-Type": content_type})
41 | else:
42 | headers = {"Content-Type": content_type}
43 | return make_response(dumped, code, headers)
44 |
45 |
46 | output_git_lfs_json = partial(output_json, content_type=GIT_LFS_MIME_TYPE)
47 |
--------------------------------------------------------------------------------
/giftless/auth/allow_anon.py:
--------------------------------------------------------------------------------
1 | """Dummy authentication module.
2 |
3 | Always returns an `AnonymousUser` identity object.
4 |
5 | Depending on whether "read only" or "read write" authentication was
6 | used, the user is going to have read-only or read-write permissions on
7 | all objects.
8 |
9 | Only use this in production if you want your Giftless server to allow
10 | anonymous access. Most likely, this is not what you want unless you
11 | are deploying in a closed, 100% trusted network, or your server is
12 | behind a proxy that handles authentication for the services it
13 | manages.
14 |
15 | If for some reason you want to allow anonymous users as a fallback
16 | (e.g. you want to allow read-only access to anyone), be sure to load
17 | this authenticator last.
18 | """
19 | from typing import Any
20 |
21 | from .identity import DefaultIdentity, Permission
22 |
23 |
24 | class AnonymousUser(DefaultIdentity):
25 | """An anonymous user object."""
26 |
27 | def __init__(self, *args: Any, **kwargs: Any) -> None:
28 | super().__init__(*args, **kwargs)
29 | if self.name is None:
30 | self.name = "anonymous"
31 |
32 |
33 | def read_only(_: Any) -> AnonymousUser:
34 | """Give read-only permissions to everyone via AnonymousUser."""
35 | user = AnonymousUser()
36 | user.allow(permissions={Permission.READ, Permission.READ_META})
37 | return user
38 |
39 |
40 | def read_write(_: Any) -> AnonymousUser:
41 | """Give full permissions to everyone via AnonymousUser."""
42 | user = AnonymousUser()
43 | user.allow(permissions=Permission.all())
44 | return user
45 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAzureBlobStorageBackend.test_get_download_action.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | Accept:
6 | - application/xml
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | User-Agent:
12 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
13 | authorization:
14 | - fake-authz-header
15 | x-ms-client-request-id:
16 | - 73d89e84-e14f-11ea-94b4-a0999b18a477
17 | x-ms-date:
18 | - Tue, 18 Aug 2020 12:36:35 GMT
19 | x-ms-version:
20 | - '2019-07-07'
21 | method: GET
22 | uri: https://my-account.blob.core.windows.net/my-container?prefix=giftless-tests&restype=container&comp=list
23 | response:
24 | body:
25 | string: "\uFEFFgiftless-tests"
28 | headers:
29 | Content-Type:
30 | - application/xml
31 | Date:
32 | - Tue, 18 Aug 2020 12:36:35 GMT
33 | Server:
34 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
35 | Transfer-Encoding:
36 | - chunked
37 | Vary:
38 | - Origin
39 | x-ms-client-request-id:
40 | - 73d89e84-e14f-11ea-94b4-a0999b18a477
41 | x-ms-request-id:
42 | - 0e67090d-701e-00ef-3a5c-757a60000000
43 | x-ms-version:
44 | - '2019-07-07'
45 | status:
46 | code: 200
47 | message: OK
48 | version: 1
49 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAzureBlobStorageBackend.test_get_upload_action.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | Accept:
6 | - application/xml
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | User-Agent:
12 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
13 | authorization:
14 | - fake-authz-header
15 | x-ms-client-request-id:
16 | - 7347b31a-e14f-11ea-94b4-a0999b18a477
17 | x-ms-date:
18 | - Tue, 18 Aug 2020 12:36:34 GMT
19 | x-ms-version:
20 | - '2019-07-07'
21 | method: GET
22 | uri: https://my-account.blob.core.windows.net/my-container?prefix=giftless-tests&restype=container&comp=list
23 | response:
24 | body:
25 | string: "\uFEFFgiftless-tests"
28 | headers:
29 | Content-Type:
30 | - application/xml
31 | Date:
32 | - Tue, 18 Aug 2020 12:36:34 GMT
33 | Server:
34 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
35 | Transfer-Encoding:
36 | - chunked
37 | Vary:
38 | - Origin
39 | x-ms-client-request-id:
40 | - 7347b31a-e14f-11ea-94b4-a0999b18a477
41 | x-ms-request-id:
42 | - cd2f490c-e01e-0026-615c-75c78d000000
43 | x-ms-version:
44 | - '2019-07-07'
45 | status:
46 | code: 200
47 | message: OK
48 | version: 1
49 |
--------------------------------------------------------------------------------
/tests/helpers.py:
--------------------------------------------------------------------------------
1 | """Test helpers."""
2 | from collections.abc import Sequence
3 | from pathlib import Path
4 | from typing import Any
5 |
6 | import flask
7 |
8 |
9 | def batch_request_payload(
10 | delete_keys: Sequence[str] = (), **kwargs: Any
11 | ) -> dict[str, Any]:
12 | """Generate sample batch request payload."""
13 | payload = {
14 | "operation": "download",
15 | "transfers": ["basic"],
16 | "ref": {"name": "refs/heads/master"},
17 | "objects": [{"oid": "12345678", "size": 8}],
18 | }
19 |
20 | for key in delete_keys:
21 | del payload[key]
22 |
23 | payload.update(kwargs)
24 | return payload
25 |
26 |
27 | def create_file_in_storage(
28 | storage_path: str, org: str, repo: str, filename: str, size: int = 1
29 | ) -> None:
30 | """Put a dummy file in the storage path for a specific org / repo
31 | / oid combination.
32 |
33 | This is useful where we want to test download / verify actions
34 | without relying on 'put' actions to work.
35 |
36 | This assumes cleanup is done somewhere else (e.g. in the
37 | 'storage_path' fixture).
38 | """
39 | repo_path = Path(storage_path) / org / repo
40 | repo_path.mkdir(parents=True, exist_ok=True)
41 | with Path(repo_path / filename).open("wb") as f:
42 | for c in (b"0" for _ in range(size)):
43 | f.write(c)
44 |
45 |
46 | def legacy_endpoints_id(enabled: bool) -> str:
47 | return "legacy-ep" if enabled else "current-ep"
48 |
49 |
50 | def expected_uri_prefix(app: flask.Flask, *args: str) -> str:
51 | core_prefix = "/".join(args)
52 | if not app.config.get("LEGACY_ENDPOINTS"):
53 | return core_prefix + ".git/info/lfs"
54 | return core_prefix
55 |
--------------------------------------------------------------------------------
/giftless/schema.py:
--------------------------------------------------------------------------------
1 | """Schema for Git LFS APIs."""
2 | from enum import Enum
3 | from typing import Any
4 |
5 | import marshmallow
6 | from flask_marshmallow import Marshmallow
7 | from marshmallow import fields, pre_load, validate
8 |
9 | ma = Marshmallow()
10 |
11 | # TODO @athornton: probably a big job but it feels like this is what Pydantic
12 | # is for.
13 |
14 |
15 | class Operation(Enum):
16 | """Batch operations."""
17 |
18 | upload = "upload"
19 | download = "download"
20 |
21 |
22 | class RefSchema(ma.Schema): # type:ignore[name-defined]
23 | """ref field schema."""
24 |
25 | name = fields.String(required=True)
26 |
27 |
28 | class ObjectSchema(ma.Schema): # type:ignore[name-defined]
29 | """object field schema."""
30 |
31 | oid = fields.String(required=True)
32 | size = fields.Integer(required=True, validate=validate.Range(min=0))
33 |
34 | extra = fields.Dict(required=False, load_default=dict)
35 |
36 | @pre_load
37 | def set_extra_fields(
38 | self, data: dict[str, Any], **_: Any
39 | ) -> dict[str, Any]:
40 | extra = {}
41 | rest = {}
42 | for k, v in data.items():
43 | if k.startswith("x-"):
44 | extra[k[2:]] = v
45 | else:
46 | rest[k] = v
47 | return {"extra": extra, **rest}
48 |
49 |
50 | class BatchRequest(ma.Schema): # type:ignore[name-defined]
51 | """batch request schema."""
52 |
53 | operation = fields.Enum(Operation, required=True)
54 | transfers = fields.List(
55 | fields.String, required=False, load_default=["basic"]
56 | )
57 | ref = fields.Nested(RefSchema, required=False)
58 | objects = fields.Nested(
59 | ObjectSchema, validate=validate.Length(min=1), many=True, required=True
60 | )
61 |
62 |
63 | batch_request_schema = BatchRequest(unknown=marshmallow.EXCLUDE)
64 |
--------------------------------------------------------------------------------
/giftless/app.py:
--------------------------------------------------------------------------------
1 | """Main Flask application initialization code."""
2 | import logging
3 | import os
4 | from typing import Any
5 |
6 | from flask import Flask
7 | from flask_marshmallow import Marshmallow
8 |
9 | from giftless import config, transfer, view
10 | from giftless.auth import authentication
11 | from giftless.error_handling import ApiErrorHandler
12 | from giftless.util import get_callable
13 |
14 |
15 | def init_app(app: Flask | None = None, additional_config: Any = None) -> Flask:
16 | """Flask app initialization."""
17 | if app is None:
18 | app = Flask(__name__)
19 |
20 | config.configure(app, additional_config=additional_config)
21 |
22 | # Configure logging
23 | if os.environ.get("GIFTLESS_DEBUG"):
24 | level = logging.DEBUG
25 | else:
26 | level = logging.WARNING
27 | logging.basicConfig(
28 | format="%(asctime)-15s %(name)-15s %(levelname)s %(message)s",
29 | level=level,
30 | )
31 |
32 | # Load middleware
33 | _load_middleware(app)
34 |
35 | # Load all other Flask plugins
36 | ApiErrorHandler(app)
37 | Marshmallow(app)
38 |
39 | authentication.init_app(app)
40 |
41 | view.BatchView.register(app)
42 |
43 | # Load configured transfer adapters
44 | transfer.init_flask_app(app)
45 |
46 | return app
47 |
48 |
49 | def _load_middleware(flask_app: Flask) -> None:
50 | """Load WSGI middleware classes from configuration."""
51 | log = logging.getLogger(__name__)
52 | wsgi_app = flask_app.wsgi_app
53 | middleware_config = flask_app.config["MIDDLEWARE"]
54 |
55 | for spec in middleware_config:
56 | klass = get_callable(spec["class"])
57 | args = spec.get("args", [])
58 | kwargs = spec.get("kwargs", {})
59 | wsgi_app = klass(wsgi_app, *args, **kwargs)
60 | log.debug(f"Loaded middleware: {klass}(*{args}, **{kwargs}")
61 |
62 | flask_app.wsgi_app = wsgi_app # type:ignore[method-assign]
63 |
--------------------------------------------------------------------------------
/docs/source/developer-guide.md:
--------------------------------------------------------------------------------
1 | Developer Guide
2 | ===============
3 | `giftless` is based on Flask, with the following additional libraries:
4 |
5 | * [Flask Classful](http://flask-classful.teracy.org/) for simplifying API
6 | endpoint implementation with Flask
7 | * [Marshmallow](https://marshmallow.readthedocs.io/en/stable/) for
8 | input / output serialization and validation
9 | * [figcan](https://github.com/shoppimon/figcan) for configuration handling
10 |
11 | You must have Python 3.10 or newer set up to run or develop `giftless`.
12 |
13 | ## Code Style
14 | We use the following tools and standards to write `giftless` code:
15 | * `flake8` to check your Python code for PEP8 compliance
16 | * `import` statements are checked by `isort` and should be organized
17 | accordingly
18 | * Type checking is done using `mypy`
19 |
20 | Maximum line length is set to 120 characters.
21 |
22 | ## Setting up a Virtual Environment
23 | You should develop `giftless` in a virtual environment. We use [`pip-tools`][1]
24 | to manage both development and runtime dependencies.
25 |
26 | The following snippet is an example of how to set up your virtual environment
27 | for development:
28 |
29 | $ python3 -m venv .venv
30 | $ . .venv/bin/activate
31 |
32 | (.venv) $ pip install -r dev-requirements.txt
33 | (.venv) $ pip-sync dev-requirements.txt requirements.txt
34 |
35 | ## Running the tests
36 | Once in a virtual environment, you can simply run `make test` to run all tests
37 | and code style checks:
38 |
39 | $ make test
40 |
41 | We use `pytest` for Python unit testing.
42 |
43 | In addition, simple functions can specify some `doctest` style tests in the
44 | function docstring. These tests will be tested automatically when unit tests
45 | are executed.
46 |
47 | ## Building a Docker image
48 | Simply run `make docker` to build a `uWSGI` wrapped Docker image for Giftless.
49 | The image will be named `datopian/giftless:latest` by default. You can change
50 | it, for example:
51 |
52 | $ make docker DOCKER_REPO=mycompany DOCKER_IMAGE_TAG=1.2.3
53 |
54 | Will build a Docekr image tagged `mycompany/giftless:1.2.3`.
55 |
56 | [1]: [Pip Tools](https://github.com/jazzband/pip-tools)
57 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Giftless - a Pluggable Git LFS Server
2 | =====================================
3 |
4 | [](https://travis-ci.org/datopian/giftless)
5 | [](https://codeclimate.com/github/datopian/giftless/maintainability)
6 | [](https://codeclimate.com/github/datopian/giftless/test_coverage)
7 |
8 | Giftless is a Python implementation of a [Git LFS][1] Server. It is designed
9 | with flexibility in mind, to allow pluggable storage backends, transfer
10 | methods and authentication methods.
11 |
12 | Giftless supports the *basic* Git LFS transfer mode with the following
13 | storage backends:
14 |
15 | * Local storage
16 | * [Google Cloud Storage](https://cloud.google.com/storage)
17 | * [Azure Blob Storage](https://azure.microsoft.com/en-us/services/storage/blobs/)
18 | with direct-to-cloud or streamed transfers
19 | * [Amazon S3 Storage](https://aws.amazon.com/s3/)
20 |
21 | In addition, Giftless implements a custom transfer mode called `multipart-basic`,
22 | which is designed to take advantage of many vendors' multipart upload
23 | capabilities. It requires a specialized Git LFS client to use, and is currently
24 | not supported by standard Git LFS.
25 |
26 | See the [giftless-client](https://github.com/datopian/giftless-client) project
27 | for a compatible Python Git LFS client.
28 |
29 | Additional transfer modes and storage backends could easily be added and
30 | configured.
31 |
32 | [1]: https://git-lfs.github.com/
33 |
34 | Documentation
35 | -------------
36 | * [Installation Guide](https://giftless.datopian.com/en/latest/installation.html)
37 | * [Getting Started](https://giftless.datopian.com/en/latest/quickstart.html)
38 | * [Full Documentation](https://giftless.datopian.com/en/latest/)
39 | * [Developer Guide](https://giftless.datopian.com/en/latest/development.html)
40 |
41 | License
42 | -------
43 | Copyright (C) 2020-2024, Datopian / Viderum, Inc.
44 |
45 | Giftless is free / open source software and is distributed under the terms of
46 | the MIT license. See [LICENSE](LICENSE) for details.
47 |
--------------------------------------------------------------------------------
/tests/test_schema.py:
--------------------------------------------------------------------------------
1 | """Tests for schema definitions."""
2 |
3 | import pytest
4 | from marshmallow import ValidationError
5 |
6 | from giftless import schema
7 |
8 | from .helpers import batch_request_payload
9 |
10 |
11 | @pytest.mark.parametrize(
12 | "inp",
13 | [
14 | (batch_request_payload()),
15 | (batch_request_payload(operation="upload")),
16 | (batch_request_payload(delete_keys=["ref", "transfers"])),
17 | ],
18 | )
19 | def test_batch_request_schema_valid(inp: str) -> None:
20 | parsed = schema.BatchRequest().load(inp)
21 | assert parsed
22 |
23 |
24 | @pytest.mark.parametrize(
25 | "inp",
26 | [
27 | ({}),
28 | (batch_request_payload(operation="sneeze")),
29 | (batch_request_payload(objects=[])),
30 | (
31 | batch_request_payload(
32 | objects=[{"oid": 123456, "size": "large of course"}]
33 | )
34 | ),
35 | (batch_request_payload(objects=[{"oid": "123abc", "size": -12}])),
36 | ],
37 | )
38 | def test_batch_request_schema_invalid(inp: str) -> None:
39 | with pytest.raises(ValidationError):
40 | schema.BatchRequest().load(inp)
41 |
42 |
43 | def test_batch_request_default_transfer() -> None:
44 | inp = batch_request_payload(delete_keys=["transfers"])
45 | parsed = schema.BatchRequest().load(inp)
46 | assert ["basic"] == parsed["transfers"]
47 |
48 |
49 | def test_object_schema_accepts_x_fields() -> None:
50 | payload = {
51 | "oid": "123abc",
52 | "size": 1212,
53 | "x-filename": "foobarbaz",
54 | "x-mtime": 123123123123,
55 | "x-disposition": "inline",
56 | }
57 | parsed = schema.ObjectSchema().load(payload)
58 | assert parsed["extra"]["filename"] == "foobarbaz"
59 | assert parsed["extra"]["mtime"] == 123123123123
60 | assert parsed["oid"] == "123abc"
61 | assert parsed["extra"]["disposition"] == "inline"
62 |
63 |
64 | def test_object_schema_rejects_unknown_fields() -> None:
65 | payload = {
66 | "oid": "123abc",
67 | "size": 1212,
68 | "x-filename": "foobarbaz",
69 | "more": "stuff",
70 | }
71 | with pytest.raises(ValidationError):
72 | schema.ObjectSchema().load(payload)
73 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """Fixtures for giftless testing."""
2 | import pathlib
3 | import shutil
4 | from collections.abc import Generator
5 | from typing import Any
6 |
7 | import flask
8 | import pytest
9 | from flask.ctx import AppContext
10 | from flask.testing import FlaskClient
11 |
12 | from giftless.app import init_app
13 | from giftless.auth import allow_anon, authentication
14 | from tests.helpers import legacy_endpoints_id
15 |
16 |
17 | @pytest.fixture
18 | def storage_path(tmp_path: pathlib.Path) -> Generator:
19 | path = tmp_path / "lfs-tests"
20 | path.mkdir()
21 | try:
22 | yield str(path)
23 | finally:
24 | shutil.rmtree(path)
25 |
26 |
27 | @pytest.fixture(params=[False], ids=legacy_endpoints_id)
28 | def app(storage_path: str, request: Any) -> flask.Flask:
29 | """Session fixture to configure the Flask app."""
30 | legacy_endpoints = request.param
31 | app = init_app(
32 | additional_config={
33 | "TESTING": True,
34 | "LEGACY_ENDPOINTS": legacy_endpoints,
35 | "TRANSFER_ADAPTERS": {
36 | "basic": {
37 | "options": {"storage_options": {"path": storage_path}}
38 | }
39 | },
40 | }
41 | )
42 | app.config.update({"SERVER_NAME": "giftless.local"})
43 | return app
44 |
45 |
46 | @pytest.fixture
47 | def app_context(app: flask.Flask) -> Generator:
48 | ctx = app.app_context()
49 | try:
50 | ctx.push()
51 | yield ctx
52 | finally:
53 | ctx.pop()
54 |
55 |
56 | @pytest.fixture
57 | def test_client(app_context: AppContext) -> FlaskClient:
58 | test_client: FlaskClient = app_context.app.test_client()
59 | return test_client
60 |
61 |
62 | @pytest.fixture
63 | def _authz_full_access(
64 | app_context: AppContext,
65 | ) -> Generator:
66 | """Fixture that enables full anonymous access to all actions for
67 | tests that use it. Try block needed to ensure we call
68 | init_authenticators before app context is destroyed.
69 | """
70 | try:
71 | authentication.push_authenticator(
72 | allow_anon.read_write # type:ignore[arg-type]
73 | )
74 | yield
75 | finally:
76 | authentication.init_authenticators(reload=True)
77 |
--------------------------------------------------------------------------------
/docs/source/installation.md:
--------------------------------------------------------------------------------
1 | Installation / Deployment
2 | =========================
3 |
4 | You can install and run Giftless in different ways, depending on your needs:
5 |
6 | ## Running from Docker image
7 | Giftless is available as a Docker image available from
8 | [Docker Hub](https://hub.docker.com/r/datopian/giftless)
9 |
10 | To run the latest version of Giftless in HTTP mode, listening
11 | on port 8080, run:
12 |
13 | ```
14 | $ docker run --rm -p 8080:8080 datopian/giftless \
15 | -M -T --threads 2 -p 2 --manage-script-name --callable app \
16 | --http 0.0.0.0:8080
17 | ```
18 |
19 | This will pull the image and run it.
20 |
21 | Alternatively, to run in `WSGI` mode you can run:
22 |
23 | ```
24 | $ docker run --rm -p 5000:5000 datopian/giftless
25 | ```
26 |
27 | This will require an HTTP server such as *nginx* to proxy HTTP requests to it.
28 |
29 | If you need to, you can also build the Docker image locally as described below.
30 |
31 | ## Running from Pypi package
32 | You can install Giftless into your Python environment of choice (3.7+) using pip.
33 | It is recommended to install Giftless into a virtual environment:
34 |
35 | ```shell
36 | (venv) $ pip install uwsgi
37 | (venv) $ pip install giftless
38 | ```
39 |
40 | Once installed, you can run Giftless locally with uWSGI:
41 |
42 | ```
43 | # Run uWSGI (see uWSGI's manual for help on all arguments)
44 | (venv) $ uwsgi -M -T --threads 2 -p 2 --manage-script-name \
45 | --module giftless.wsgi_entrypoint --callable app --http 127.0.0.1:8080
46 | ```
47 |
48 | This will listen on port `8080`.
49 |
50 | You should be able to replace `uwsgi` with any other WSGI server, such as `gunicorn`.
51 |
52 | ## Running from source installation
53 | You can install and run `giftless` from source:
54 |
55 | ```shell
56 | $ git clone https://github.com/datopian/giftless.git
57 |
58 | # Initialize a virtual environment
59 | $ cd giftless
60 | $ python3 -m venv venv
61 | $ source venv/bin/activate
62 | (venv) $ pip install -r requirements/main.txt
63 | ```
64 |
65 | You can then proceed to run Giftless with a WSGI server as
66 | described above.
67 |
68 | Note that for non-production use you may avoid using a WSGI server and rely
69 | on Flask's built in development server. This should **never** be done in a
70 | production environment:
71 |
72 | ```shell
73 | (venv) $ ./flask-develop.sh
74 | ```
75 |
76 | In development mode, Giftless will be listening on `http://127.0.0.1:5000`
77 |
--------------------------------------------------------------------------------
/tests/storage/test_azure.py:
--------------------------------------------------------------------------------
1 | """Tests for the Azure storage backend."""
2 | import os
3 | from collections.abc import Generator
4 | from typing import Any
5 |
6 | import pytest
7 | from azure.core.exceptions import AzureError
8 | from azure.storage.blob import BlobServiceClient
9 |
10 | from giftless.storage.azure import AzureBlobsStorage
11 |
12 | from . import ExternalStorageAbstractTests, StreamingStorageAbstractTests
13 |
14 | MOCK_AZURE_ACCOUNT_NAME = "my-account"
15 | MOCK_AZURE_CONTAINER_NAME = "my-container"
16 |
17 |
18 | @pytest.fixture
19 | def storage_backend() -> Generator[AzureBlobsStorage, None, None]:
20 | """Provide an Azure Blob Storage backend for all Azure tests.
21 |
22 | For this to work against production Azure, you need to set
23 | ``AZURE_CONNECTION_STRING`` and ``AZURE_CONTAINER`` environment
24 | variables when running the tests.
25 |
26 | If these variables are not set, and pytest-vcr is not in use, the
27 | tests *will* fail.
28 | """
29 | connection_str = os.environ.get("AZURE_CONNECTION_STRING")
30 | container_name = os.environ.get("AZURE_CONTAINER")
31 | prefix = "giftless-tests"
32 |
33 | if container_name and connection_str:
34 | # We use a live Azure container to test
35 | client: BlobServiceClient = BlobServiceClient.from_connection_string(
36 | connection_str
37 | )
38 | try:
39 | yield AzureBlobsStorage(
40 | connection_str, container_name, path_prefix=prefix
41 | )
42 | finally:
43 | container = client.get_container_client(container_name)
44 | try:
45 | for blob in container.list_blob_names(name_starts_with=prefix):
46 | container.delete_blob(blob)
47 | except AzureError:
48 | pass
49 | else:
50 | connection_str = (
51 | f"DefaultEndpointsProtocol=https;AccountName={MOCK_AZURE_ACCOUNT_NAME};"
52 | "AccountKey=U29tZVJhbmRvbUNyYXBIZXJlCg==;EndpointSuffix=core.windows.net"
53 | )
54 | yield AzureBlobsStorage(
55 | connection_str, MOCK_AZURE_CONTAINER_NAME, path_prefix=prefix
56 | )
57 |
58 |
59 | @pytest.fixture(scope="module")
60 | def vcr_config() -> dict[str, Any]:
61 | live_tests = bool(
62 | os.environ.get("AZURE_CONNECTION_STRING")
63 | and os.environ.get("AZURE_CONTAINER")
64 | )
65 | mode = "once" if live_tests else "none"
66 | return {
67 | "filter_headers": [("authorization", "fake-authz-header")],
68 | "record_mode": mode,
69 | }
70 |
71 |
72 | @pytest.mark.vcr
73 | class TestAzureBlobStorageBackend(
74 | StreamingStorageAbstractTests, ExternalStorageAbstractTests
75 | ):
76 | pass
77 |
--------------------------------------------------------------------------------
/tests/test_middleware.py:
--------------------------------------------------------------------------------
1 | """Tests for using middleware and some specific middleware."""
2 | from typing import Any, cast
3 |
4 | import flask
5 | import pytest
6 | from flask.testing import FlaskClient
7 |
8 | from giftless.app import init_app
9 |
10 | from .helpers import (
11 | batch_request_payload,
12 | expected_uri_prefix,
13 | legacy_endpoints_id,
14 | )
15 |
16 |
17 | @pytest.fixture
18 | def app(storage_path: str) -> flask.Flask:
19 | """Session fixture to configure the Flask app."""
20 | return init_app(
21 | additional_config={
22 | "TESTING": True,
23 | "TRANSFER_ADAPTERS": {
24 | "basic": {
25 | "options": {"storage_options": {"path": storage_path}}
26 | }
27 | },
28 | "MIDDLEWARE": [
29 | {
30 | "class": "werkzeug.middleware.proxy_fix:ProxyFix",
31 | "kwargs": {
32 | "x_host": 1,
33 | "x_port": 1,
34 | "x_prefix": 1,
35 | },
36 | }
37 | ],
38 | }
39 | )
40 |
41 |
42 | @pytest.mark.usefixtures("_authz_full_access")
43 | @pytest.mark.parametrize(
44 | "app", [False, True], ids=legacy_endpoints_id, indirect=True
45 | )
46 | def test_upload_request_with_x_forwarded_middleware(
47 | app: flask.Flask,
48 | test_client: FlaskClient,
49 | ) -> None:
50 | """Test the ProxyFix middleware generates correct URLs if
51 | X-Forwarded headers are set.
52 | """
53 | request_payload = batch_request_payload(operation="upload")
54 | response = test_client.post(
55 | "/myorg/myrepo.git/info/lfs/objects/batch", json=request_payload
56 | )
57 |
58 | assert response.status_code == 200
59 | json = cast(dict[str, Any], response.json)
60 | upload_action = json["objects"][0]["actions"]["upload"]
61 | href = upload_action["href"]
62 | exp_uri_prefix = expected_uri_prefix(app, "myorg", "myrepo")
63 | assert (
64 | href == f"http://localhost/{exp_uri_prefix}/objects/storage/12345678"
65 | )
66 |
67 | response = test_client.post(
68 | "/myorg/myrepo.git/info/lfs/objects/batch",
69 | json=request_payload,
70 | headers={
71 | "X-Forwarded-Host": "mycompany.xyz",
72 | "X-Forwarded-Port": "1234",
73 | "X-Forwarded-Prefix": "/lfs",
74 | "X-Forwarded-Proto": "https",
75 | },
76 | )
77 |
78 | assert response.status_code == 200
79 | json = cast(dict[str, Any], response.json)
80 | upload_action = json["objects"][0]["actions"]["upload"]
81 | href = upload_action["href"]
82 | assert (
83 | href
84 | == f"https://mycompany.xyz:1234/lfs/{exp_uri_prefix}/objects/storage/12345678"
85 | )
86 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import importlib.metadata
15 |
16 | from recommonmark.transform import AutoStructify
17 |
18 | # import sys
19 | # sys.path.insert(0, os.path.abspath('.'))
20 |
21 |
22 | # -- Project information -----------------------------------------------------
23 |
24 | project = "giftless"
25 | copyright = "2020, Datopian / Viderum Inc."
26 | author = "Shahar Evron"
27 |
28 | # The full version, including alpha/beta/rc tags
29 | release = importlib.metadata.version(project)
30 |
31 | # -- General configuration ---------------------------------------------------
32 |
33 | # Add any Sphinx extension module names here, as strings. They can be
34 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
35 | # ones.
36 | extensions = [
37 | "recommonmark",
38 | "sphinx.ext.autodoc",
39 | "sphinx.ext.autosectionlabel",
40 | "sphinx_autodoc_typehints",
41 | ]
42 |
43 | # Add any paths that contain templates here, relative to this directory.
44 | templates_path = ["_templates"]
45 |
46 | # List of patterns, relative to source directory, that match files and
47 | # directories to ignore when looking for source files.
48 | # This pattern also affects html_static_path and html_extra_path.
49 | exclude_patterns = []
50 |
51 |
52 | # -- Options for HTML output -------------------------------------------------
53 |
54 | # The theme to use for HTML and HTML Help pages. See the documentation for
55 | # a list of builtin themes.
56 | #
57 | html_theme = "furo"
58 |
59 | # Add any paths that contain custom static files (such as style sheets) here,
60 | # relative to this directory. They are copied after the builtin static files,
61 | # so a file named "default.css" will overwrite the builtin "default.css".
62 | html_static_path = ["_static"]
63 |
64 |
65 | # Prefix document path to section labels, otherwise autogenerated labels would look like 'heading'
66 | # rather than 'path/to/file:heading'
67 | autosectionlabel_prefix_document = True
68 |
69 |
70 | def setup(app):
71 | app.add_config_value(
72 | "recommonmark_config",
73 | {
74 | "known_url_schemes": ["http", "https", "mailto"],
75 | "auto_toc_tree_section": "Contents",
76 | },
77 | True,
78 | )
79 | app.add_transform(AutoStructify)
80 |
--------------------------------------------------------------------------------
/giftless/util.py:
--------------------------------------------------------------------------------
1 | """Miscellanea."""
2 | import importlib
3 | from collections.abc import Callable, Iterable
4 | from typing import Any, cast
5 | from urllib.parse import urlencode
6 |
7 |
8 | def get_callable(
9 | callable_str: str, base_package: str | None = None
10 | ) -> Callable:
11 | """Get a callable function / class constructor from a string of the form
12 | `package.subpackage.module:callable`.
13 |
14 | >>> type(get_callable('os.path:basename')).__name__
15 | 'function'
16 |
17 | >>> type(get_callable('basename', 'os.path')).__name__
18 | 'function'
19 | """
20 | if ":" in callable_str:
21 | module_name, callable_name = callable_str.split(":", 1)
22 | module = importlib.import_module(module_name, base_package)
23 | elif base_package:
24 | module = importlib.import_module(base_package)
25 | callable_name = callable_str
26 | else:
27 | raise ValueError(
28 | "Expecting base_package to be set if only class name is provided"
29 | )
30 |
31 | return cast(Callable, getattr(module, callable_name))
32 |
33 |
34 | def to_iterable(val: Any) -> Iterable:
35 | """Get something we can iterate over from an unknown type.
36 |
37 | >>> i = to_iterable([1, 2, 3])
38 | >>> next(iter(i))
39 | 1
40 |
41 | >>> i = to_iterable(1)
42 | >>> next(iter(i))
43 | 1
44 |
45 | >>> i = to_iterable(None)
46 | >>> next(iter(i)) is None
47 | True
48 |
49 | >>> i = to_iterable('foobar')
50 | >>> next(iter(i))
51 | 'foobar'
52 |
53 | >>> i = to_iterable((1, 2, 3))
54 | >>> next(iter(i))
55 | 1
56 | """
57 | if isinstance(val, Iterable) and not isinstance(val, str | bytes):
58 | return val
59 | return (val,)
60 |
61 |
62 | def add_query_params(url: str, params: dict[str, Any]) -> str:
63 | """Safely add query params to a url that may or may not already contain
64 | query params.
65 |
66 | >>> add_query_params('https://example.org', {'param1': 'value1', 'param2': 'value2'})
67 | 'https://example.org?param1=value1¶m2=value2'
68 |
69 | >>> add_query_params('https://example.org?param1=value1', {'param2': 'value2'}) # noqa[E501]
70 | 'https://example.org?param1=value1¶m2=value2'
71 | """ # noqa: E501
72 | urlencoded_params = urlencode(params)
73 | separator = "&" if "?" in url else "?"
74 | return f"{url}{separator}{urlencoded_params}"
75 |
76 |
77 | def safe_filename(original_filename: str) -> str:
78 | """Return a filename safe to use in HTTP headers, formed from the
79 | given original filename.
80 |
81 | >>> safe_filename("example(1).txt")
82 | 'example1.txt'
83 |
84 | >>> safe_filename("_ex@mple 2%.old.xlsx")
85 | '_exmple2.old.xlsx'
86 | """
87 | valid_chars = (
88 | "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_."
89 | )
90 | return "".join(c for c in original_filename if c in valid_chars)
91 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAzureBlobStorageBackend.test_exists_not_exists.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | User-Agent:
12 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
13 | authorization:
14 | - fake-authz-header
15 | x-ms-client-request-id:
16 | - 2a638764-e136-11ea-830d-a0999b18a477
17 | x-ms-date:
18 | - Tue, 18 Aug 2020 09:35:34 GMT
19 | x-ms-version:
20 | - '2019-07-07'
21 | method: HEAD
22 | uri: https://my-account.blob.core.windows.net/my-container/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
23 | response:
24 | body:
25 | string: ''
26 | headers:
27 | Date:
28 | - Tue, 18 Aug 2020 09:35:34 GMT
29 | Server:
30 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
31 | Transfer-Encoding:
32 | - chunked
33 | Vary:
34 | - Origin
35 | x-ms-client-request-id:
36 | - 2a638764-e136-11ea-830d-a0999b18a477
37 | x-ms-error-code:
38 | - BlobNotFound
39 | x-ms-request-id:
40 | - 5910c63b-801e-00c4-5742-75faac000000
41 | x-ms-version:
42 | - '2019-07-07'
43 | status:
44 | code: 404
45 | message: The specified blob does not exist.
46 | - request:
47 | body: null
48 | headers:
49 | Accept:
50 | - application/xml
51 | Accept-Encoding:
52 | - gzip, deflate
53 | Connection:
54 | - keep-alive
55 | User-Agent:
56 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
57 | authorization:
58 | - fake-authz-header
59 | x-ms-client-request-id:
60 | - 2aec342e-e136-11ea-830d-a0999b18a477
61 | x-ms-date:
62 | - Tue, 18 Aug 2020 09:35:35 GMT
63 | x-ms-version:
64 | - '2019-07-07'
65 | method: GET
66 | uri: https://my-account.blob.core.windows.net/my-container?prefix=giftless-tests&restype=container&comp=list
67 | response:
68 | body:
69 | string: "\uFEFFgiftless-tests"
72 | headers:
73 | Content-Type:
74 | - application/xml
75 | Date:
76 | - Tue, 18 Aug 2020 09:35:35 GMT
77 | Server:
78 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
79 | Transfer-Encoding:
80 | - chunked
81 | Vary:
82 | - Origin
83 | x-ms-client-request-id:
84 | - 2aec342e-e136-11ea-830d-a0999b18a477
85 | x-ms-request-id:
86 | - a861dfc1-501e-00e8-2b42-751603000000
87 | x-ms-version:
88 | - '2019-07-07'
89 | status:
90 | code: 200
91 | message: OK
92 | version: 1
93 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAzureBlobStorageBackend.test_get_size_not_existing.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | User-Agent:
12 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
13 | authorization:
14 | - fake-authz-header
15 | x-ms-client-request-id:
16 | - 2cd7debe-e136-11ea-830d-a0999b18a477
17 | x-ms-date:
18 | - Tue, 18 Aug 2020 09:35:38 GMT
19 | x-ms-version:
20 | - '2019-07-07'
21 | method: HEAD
22 | uri: https://my-account.blob.core.windows.net/my-container/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
23 | response:
24 | body:
25 | string: ''
26 | headers:
27 | Date:
28 | - Tue, 18 Aug 2020 09:35:38 GMT
29 | Server:
30 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
31 | Transfer-Encoding:
32 | - chunked
33 | Vary:
34 | - Origin
35 | x-ms-client-request-id:
36 | - 2cd7debe-e136-11ea-830d-a0999b18a477
37 | x-ms-error-code:
38 | - BlobNotFound
39 | x-ms-request-id:
40 | - b27ca3cf-e01e-00a0-0d42-750b34000000
41 | x-ms-version:
42 | - '2019-07-07'
43 | status:
44 | code: 404
45 | message: The specified blob does not exist.
46 | - request:
47 | body: null
48 | headers:
49 | Accept:
50 | - application/xml
51 | Accept-Encoding:
52 | - gzip, deflate
53 | Connection:
54 | - keep-alive
55 | User-Agent:
56 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
57 | authorization:
58 | - fake-authz-header
59 | x-ms-client-request-id:
60 | - 2d5f41ba-e136-11ea-830d-a0999b18a477
61 | x-ms-date:
62 | - Tue, 18 Aug 2020 09:35:39 GMT
63 | x-ms-version:
64 | - '2019-07-07'
65 | method: GET
66 | uri: https://my-account.blob.core.windows.net/my-container?prefix=giftless-tests&restype=container&comp=list
67 | response:
68 | body:
69 | string: "\uFEFFgiftless-tests"
72 | headers:
73 | Content-Type:
74 | - application/xml
75 | Date:
76 | - Tue, 18 Aug 2020 09:35:39 GMT
77 | Server:
78 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
79 | Transfer-Encoding:
80 | - chunked
81 | Vary:
82 | - Origin
83 | x-ms-client-request-id:
84 | - 2d5f41ba-e136-11ea-830d-a0999b18a477
85 | x-ms-request-id:
86 | - 6da4219a-401e-0000-2942-758f95000000
87 | x-ms-version:
88 | - '2019-07-07'
89 | status:
90 | code: 200
91 | message: OK
92 | version: 1
93 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAzureBlobStorageBackend.test_verify_object_not_there.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | User-Agent:
12 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
13 | authorization:
14 | - fake-authz-header
15 | x-ms-client-request-id:
16 | - 7186912c-e14f-11ea-94b4-a0999b18a477
17 | x-ms-date:
18 | - Tue, 18 Aug 2020 12:36:31 GMT
19 | x-ms-version:
20 | - '2019-07-07'
21 | method: HEAD
22 | uri: https://my-account.blob.core.windows.net/my-container/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
23 | response:
24 | body:
25 | string: ''
26 | headers:
27 | Date:
28 | - Tue, 18 Aug 2020 12:36:31 GMT
29 | Server:
30 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
31 | Transfer-Encoding:
32 | - chunked
33 | Vary:
34 | - Origin
35 | x-ms-client-request-id:
36 | - 7186912c-e14f-11ea-94b4-a0999b18a477
37 | x-ms-error-code:
38 | - BlobNotFound
39 | x-ms-request-id:
40 | - c2829c72-701e-0024-105c-757935000000
41 | x-ms-version:
42 | - '2019-07-07'
43 | status:
44 | code: 404
45 | message: The specified blob does not exist.
46 | - request:
47 | body: null
48 | headers:
49 | Accept:
50 | - application/xml
51 | Accept-Encoding:
52 | - gzip, deflate
53 | Connection:
54 | - keep-alive
55 | User-Agent:
56 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
57 | authorization:
58 | - fake-authz-header
59 | x-ms-client-request-id:
60 | - 720ebb56-e14f-11ea-94b4-a0999b18a477
61 | x-ms-date:
62 | - Tue, 18 Aug 2020 12:36:32 GMT
63 | x-ms-version:
64 | - '2019-07-07'
65 | method: GET
66 | uri: https://my-account.blob.core.windows.net/my-container?prefix=giftless-tests&restype=container&comp=list
67 | response:
68 | body:
69 | string: "\uFEFFgiftless-tests"
72 | headers:
73 | Content-Type:
74 | - application/xml
75 | Date:
76 | - Tue, 18 Aug 2020 12:36:32 GMT
77 | Server:
78 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
79 | Transfer-Encoding:
80 | - chunked
81 | Vary:
82 | - Origin
83 | x-ms-client-request-id:
84 | - 720ebb56-e14f-11ea-94b4-a0999b18a477
85 | x-ms-request-id:
86 | - f603e678-d01e-00d9-7e5c-75f710000000
87 | x-ms-version:
88 | - '2019-07-07'
89 | status:
90 | code: 200
91 | message: OK
92 | version: 1
93 |
--------------------------------------------------------------------------------
/tests/mocks/google_cloud_storage.py:
--------------------------------------------------------------------------------
1 | """Mock for google_cloud_storage that just uses a temporary directory
2 | rather than talking to Google. This effectively makes it a LocalStorage
3 | implementation, of course.
4 | """
5 |
6 | import shutil
7 | from pathlib import Path
8 | from typing import Any, BinaryIO
9 |
10 | from giftless.storage.exc import ObjectNotFoundError
11 | from giftless.storage.google_cloud import GoogleCloudStorage
12 |
13 |
14 | class MockGoogleCloudStorage(GoogleCloudStorage):
15 | """Mocks a GoogleCloudStorage object by simulating it with a local
16 | directory.
17 | """
18 |
19 | def __init__(
20 | self,
21 | project_name: str,
22 | bucket_name: str,
23 | path: Path,
24 | account_key_file: str | None = None,
25 | account_key_base64: str | None = None,
26 | path_prefix: str | None = None,
27 | serviceaccount_email: str | None = None,
28 | **_: Any,
29 | ) -> None:
30 | super().__init__(
31 | project_name=project_name,
32 | bucket_name=bucket_name,
33 | account_key_file=account_key_file,
34 | account_key_base64=account_key_base64,
35 | serviceaccount_email=serviceaccount_email,
36 | )
37 | self._path = path
38 |
39 | def _get_blob_path(self, prefix: str, oid: str) -> str:
40 | return str(self._get_blob_pathlib_path(prefix, oid))
41 |
42 | def _get_blob_pathlib_path(self, prefix: str, oid: str) -> Path:
43 | return Path(self._path / Path(prefix) / oid)
44 |
45 | @staticmethod
46 | def _create_path(spath: str) -> None:
47 | path = Path(spath)
48 | if not path.is_dir():
49 | path.mkdir(parents=True)
50 |
51 | def _get_signed_url(
52 | self,
53 | prefix: str,
54 | oid: str,
55 | expires_in: int,
56 | http_method: str = "GET",
57 | filename: str | None = None,
58 | disposition: str | None = None,
59 | ) -> str:
60 | return f"https://example.com/signed_blob/{prefix}/{oid}"
61 |
62 | def get(self, prefix: str, oid: str) -> BinaryIO:
63 | obj = self._get_blob_pathlib_path(prefix, oid)
64 | if not obj.exists():
65 | raise ObjectNotFoundError("Object does not exist")
66 | return obj.open("rb")
67 |
68 | def put(self, prefix: str, oid: str, data_stream: BinaryIO) -> int:
69 | path = self._get_blob_pathlib_path(prefix, oid)
70 | directory = path.parent
71 | self._create_path(str(directory))
72 | with path.open("bw") as dest:
73 | shutil.copyfileobj(data_stream, dest)
74 | return dest.tell()
75 |
76 | def exists(self, prefix: str, oid: str) -> bool:
77 | return self._get_blob_pathlib_path(prefix, oid).is_file()
78 |
79 | def get_size(self, prefix: str, oid: str) -> int:
80 | if not self.exists(prefix, oid):
81 | raise ObjectNotFoundError("Object does not exist")
82 | path = self._get_blob_pathlib_path(prefix, oid)
83 | return path.stat().st_size
84 |
--------------------------------------------------------------------------------
/tests/storage/test_amazon_s3.py:
--------------------------------------------------------------------------------
1 | """Tests for the Azure storage backend."""
2 | import os
3 | from base64 import b64decode
4 | from binascii import unhexlify
5 | from collections.abc import Generator
6 | from typing import Any
7 |
8 | import pytest
9 |
10 | from giftless.storage import ExternalStorage
11 | from giftless.storage.amazon_s3 import AmazonS3Storage
12 |
13 | from . import ExternalStorageAbstractTests, StreamingStorageAbstractTests
14 |
15 | ARBITRARY_OID = (
16 | "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"
17 | )
18 | TEST_AWS_S3_BUCKET_NAME = "test-giftless"
19 |
20 |
21 | @pytest.fixture
22 | def storage_backend() -> Generator[AmazonS3Storage, None, None]:
23 | """Provide a S3 Storage backend for all AWS S3 tests.
24 |
25 | For this to work against production S3, you need to set boto3 auth:
26 | 1. AWS_ACCESS_KEY_ID
27 | 2. AWS_SECRET_ACCESS_KEY
28 |
29 | For more details please see:
30 | https://boto3.amazonaws.com/v1/documentation/api/latest/
31 | guide/credentials.html#environment-variables
32 |
33 | If these variables are not set, and pytest-vcr is not in use, the
34 | tests *will* fail.
35 | """
36 | prefix = "giftless-tests"
37 |
38 | # We use a live S3 bucket to test
39 | storage = AmazonS3Storage(
40 | bucket_name=TEST_AWS_S3_BUCKET_NAME, path_prefix=prefix
41 | )
42 | try:
43 | yield storage
44 | finally:
45 | bucket = storage.s3.Bucket(TEST_AWS_S3_BUCKET_NAME)
46 | try:
47 | bucket.objects.all().delete()
48 | except Exception as e:
49 | raise pytest.PytestWarning(
50 | f"Could not clean up after test: {e}"
51 | ) from None
52 |
53 |
54 | @pytest.fixture(scope="module")
55 | def vcr_config() -> dict[str, Any]:
56 | live_tests = bool(
57 | os.environ.get("AWS_ACCESS_KEY_ID")
58 | and os.environ.get("AWS_SECRET_ACCESS_KEY")
59 | )
60 | if live_tests:
61 | mode = "once"
62 | else:
63 | os.environ["AWS_ACCESS_KEY_ID"] = "fake"
64 | os.environ["AWS_SECRET_ACCESS_KEY"] = "fake"
65 | os.environ["AWS_DEFAULT_REGION"] = "us-east-1"
66 | mode = "none"
67 | return {
68 | "filter_headers": [("authorization", "fake-authz-header")],
69 | "record_mode": mode,
70 | }
71 |
72 |
73 | @pytest.mark.vcr
74 | class TestAmazonS3StorageBackend(
75 | StreamingStorageAbstractTests, ExternalStorageAbstractTests
76 | ):
77 | def test_get_upload_action(self, storage_backend: ExternalStorage) -> None:
78 | # A little duplication is better than a test that returns a value.
79 | action_spec = storage_backend.get_upload_action(
80 | "org/repo", ARBITRARY_OID, 100, 3600
81 | )
82 | upload = action_spec["actions"]["upload"]
83 | assert upload["href"][0:4] == "http"
84 | assert upload["expires_in"] == 3600
85 | assert upload["header"]["Content-Type"] == "application/octet-stream"
86 |
87 | b64_oid = upload["header"]["x-amz-checksum-sha256"]
88 | assert b64decode(b64_oid) == unhexlify(ARBITRARY_OID)
89 |
--------------------------------------------------------------------------------
/giftless/storage/local_storage.py:
--------------------------------------------------------------------------------
1 | """Local storage implementation, for development/testing or small-scale
2 | deployments.
3 | """
4 | import shutil
5 | from pathlib import Path
6 | from typing import Any, BinaryIO
7 |
8 | from flask import Flask
9 |
10 | from giftless.storage import MultipartStorage, StreamingStorage, exc
11 | from giftless.view import ViewProvider
12 |
13 |
14 | class LocalStorage(StreamingStorage, MultipartStorage, ViewProvider):
15 | """Local storage implementation.
16 |
17 | This storage backend works by storing files in the local file
18 | system. While it can be used in production, large scale
19 | deployment will most likely want to use a more scalable solution
20 | such as one of the cloud storage backends.
21 | """
22 |
23 | def __init__(self, path: str | None = None, **_: Any) -> None:
24 | if path is None:
25 | path = "lfs-storage"
26 | self.path = path
27 | self._create_path(self.path)
28 |
29 | def get(self, prefix: str, oid: str) -> BinaryIO:
30 | path = self._get_path(prefix, oid)
31 | if path.is_file():
32 | return path.open("br")
33 | else:
34 | raise exc.ObjectNotFoundError(f"Object {path} was not found")
35 |
36 | def put(self, prefix: str, oid: str, data_stream: BinaryIO) -> int:
37 | path = self._get_path(prefix, oid)
38 | directory = path.parent
39 | self._create_path(str(directory))
40 | with path.open("bw") as dest:
41 | shutil.copyfileobj(data_stream, dest)
42 | return dest.tell()
43 |
44 | def exists(self, prefix: str, oid: str) -> bool:
45 | path = self._get_path(prefix, oid)
46 | return path.is_file()
47 |
48 | def get_size(self, prefix: str, oid: str) -> int:
49 | if self.exists(prefix, oid):
50 | path = self._get_path(prefix, oid)
51 | return path.stat().st_size
52 | raise exc.ObjectNotFoundError("Object was not found")
53 |
54 | def get_mime_type(self, prefix: str, oid: str) -> str:
55 | if self.exists(prefix, oid):
56 | return "application/octet-stream"
57 | raise exc.ObjectNotFoundError("Object was not found")
58 |
59 | def get_multipart_actions(
60 | self,
61 | prefix: str,
62 | oid: str,
63 | size: int,
64 | part_size: int,
65 | expires_in: int,
66 | extra: dict[str, Any] | None = None,
67 | ) -> dict[str, Any]:
68 | return {}
69 |
70 | def get_download_action(
71 | self,
72 | prefix: str,
73 | oid: str,
74 | size: int,
75 | expires_in: int,
76 | extra: dict[str, Any] | None = None,
77 | ) -> dict[str, Any]:
78 | return {}
79 |
80 | def register_views(self, app: Flask) -> None:
81 | super().register_views(app)
82 |
83 | def _get_path(self, prefix: str, oid: str) -> Path:
84 | return Path(self.path) / prefix / oid
85 |
86 | @staticmethod
87 | def _create_path(spath: str) -> None:
88 | path = Path(spath)
89 | if not path.is_dir():
90 | path.mkdir(parents=True)
91 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAmazonS3StorageBackend.test_get_upload_action.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | User-Agent:
6 | - !!binary |
7 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
8 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
9 | X-Amz-Content-SHA256:
10 | - !!binary |
11 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
12 | ODUyYjg1NQ==
13 | X-Amz-Date:
14 | - !!binary |
15 | MjAyMTAzMTVUMTI0MjEzWg==
16 | authorization:
17 | - fake-authz-header
18 | method: GET
19 | uri: https://test-giftless.s3.amazonaws.com/?encoding-type=url
20 | response:
21 | body:
22 | string: '
23 |
24 | AuthorizationHeaderMalformedThe authorization
25 | header is malformed; the region ''us-east-1'' is wrong; expecting ''eu-west-1''eu-west-1X0FW6XS48TPBCYHCPqxxoVzCtvQQ9LYjcrs4pLeDlEHcscKUajE9PXma3B7ExIXyWng2aZRTPXJZyovQChJaD0hTVN4='
26 | headers:
27 | Connection:
28 | - close
29 | Content-Type:
30 | - application/xml
31 | Date:
32 | - Mon, 15 Mar 2021 12:42:12 GMT
33 | Server:
34 | - AmazonS3
35 | Transfer-Encoding:
36 | - chunked
37 | x-amz-bucket-region:
38 | - eu-west-1
39 | x-amz-id-2:
40 | - PqxxoVzCtvQQ9LYjcrs4pLeDlEHcscKUajE9PXma3B7ExIXyWng2aZRTPXJZyovQChJaD0hTVN4=
41 | x-amz-request-id:
42 | - X0FW6XS48TPBCYHC
43 | status:
44 | code: 400
45 | message: Bad Request
46 | - request:
47 | body: null
48 | headers:
49 | User-Agent:
50 | - !!binary |
51 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
52 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
53 | X-Amz-Content-SHA256:
54 | - !!binary |
55 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
56 | ODUyYjg1NQ==
57 | X-Amz-Date:
58 | - !!binary |
59 | MjAyMTAzMTVUMTI0MjEzWg==
60 | authorization:
61 | - fake-authz-header
62 | method: GET
63 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/?encoding-type=url
64 | response:
65 | body:
66 | string: '
67 |
68 | test-giftless1000urlfalse'
69 | headers:
70 | Content-Type:
71 | - application/xml
72 | Date:
73 | - Mon, 15 Mar 2021 12:42:14 GMT
74 | Server:
75 | - AmazonS3
76 | Transfer-Encoding:
77 | - chunked
78 | x-amz-bucket-region:
79 | - eu-west-1
80 | x-amz-id-2:
81 | - X4q1AzOcjbbI0ukWlSr32B3cqOw2acyaASorcCLLHRaTrGZlFSKrrplVllslwzO1U8+AE+pkQYw=
82 | x-amz-request-id:
83 | - X0FPE94FZZY0JD59
84 | status:
85 | code: 200
86 | message: OK
87 | version: 1
88 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAmazonS3StorageBackend.test_get_download_action.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | User-Agent:
6 | - !!binary |
7 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
8 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
9 | X-Amz-Content-SHA256:
10 | - !!binary |
11 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
12 | ODUyYjg1NQ==
13 | X-Amz-Date:
14 | - !!binary |
15 | MjAyMTAzMTVUMTI0MjEzWg==
16 | authorization:
17 | - fake-authz-header
18 | method: GET
19 | uri: https://test-giftless.s3.amazonaws.com/?encoding-type=url
20 | response:
21 | body:
22 | string: '
23 |
24 | AuthorizationHeaderMalformedThe authorization
25 | header is malformed; the region ''us-east-1'' is wrong; expecting ''eu-west-1''eu-west-1X0FQ7BTRP3FQ822GrnYglhB7vBiM794ay4pAVvZWg7gXk+OR0QOlVE7vAZleyE4hvVUnf8K6ZVMkhyMhTXia3GcRaSc='
26 | headers:
27 | Connection:
28 | - close
29 | Content-Type:
30 | - application/xml
31 | Date:
32 | - Mon, 15 Mar 2021 12:42:13 GMT
33 | Server:
34 | - AmazonS3
35 | Transfer-Encoding:
36 | - chunked
37 | x-amz-bucket-region:
38 | - eu-west-1
39 | x-amz-id-2:
40 | - rnYglhB7vBiM794ay4pAVvZWg7gXk+OR0QOlVE7vAZleyE4hvVUnf8K6ZVMkhyMhTXia3GcRaSc=
41 | x-amz-request-id:
42 | - X0FQ7BTRP3FQ822G
43 | status:
44 | code: 400
45 | message: Bad Request
46 | - request:
47 | body: null
48 | headers:
49 | User-Agent:
50 | - !!binary |
51 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
52 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
53 | X-Amz-Content-SHA256:
54 | - !!binary |
55 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
56 | ODUyYjg1NQ==
57 | X-Amz-Date:
58 | - !!binary |
59 | MjAyMTAzMTVUMTI0MjEzWg==
60 | authorization:
61 | - fake-authz-header
62 | method: GET
63 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/?encoding-type=url
64 | response:
65 | body:
66 | string: '
67 |
68 | test-giftless1000urlfalse'
69 | headers:
70 | Content-Type:
71 | - application/xml
72 | Date:
73 | - Mon, 15 Mar 2021 12:42:15 GMT
74 | Server:
75 | - AmazonS3
76 | Transfer-Encoding:
77 | - chunked
78 | x-amz-bucket-region:
79 | - eu-west-1
80 | x-amz-id-2:
81 | - 5efCJ/To3WTMGCNAwrvyyKIbs/y3DYttX++RgpZeXvxVtpQwPHFuZaHOObTgHZUU09P4uhnnWg4=
82 | x-amz-request-id:
83 | - 073S09AHP1866XBQ
84 | status:
85 | code: 200
86 | message: OK
87 | version: 1
88 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAzureBlobStorageBackend.test_get_raises_if_not_found.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | Accept:
6 | - application/xml
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | User-Agent:
12 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
13 | authorization:
14 | - fake-authz-header
15 | x-ms-client-request-id:
16 | - 27ebabf6-e136-11ea-830d-a0999b18a477
17 | x-ms-date:
18 | - Tue, 18 Aug 2020 09:35:30 GMT
19 | x-ms-range:
20 | - bytes=0-33554431
21 | x-ms-version:
22 | - '2019-07-07'
23 | method: GET
24 | uri: https://my-account.blob.core.windows.net/my-container/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
25 | response:
26 | body:
27 | string: "\uFEFFBlobNotFoundThe\
28 | \ specified blob does not exist.\nRequestId:25c5d527-101e-006f-5f42-758566000000\n\
29 | Time:2020-08-18T09:35:31.0516513Z"
30 | headers:
31 | Content-Length:
32 | - '215'
33 | Content-Type:
34 | - application/xml
35 | Date:
36 | - Tue, 18 Aug 2020 09:35:30 GMT
37 | Server:
38 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
39 | Vary:
40 | - Origin
41 | x-ms-client-request-id:
42 | - 27ebabf6-e136-11ea-830d-a0999b18a477
43 | x-ms-error-code:
44 | - BlobNotFound
45 | x-ms-request-id:
46 | - 25c5d527-101e-006f-5f42-758566000000
47 | x-ms-version:
48 | - '2019-07-07'
49 | status:
50 | code: 404
51 | message: The specified blob does not exist.
52 | - request:
53 | body: null
54 | headers:
55 | Accept:
56 | - application/xml
57 | Accept-Encoding:
58 | - gzip, deflate
59 | Connection:
60 | - keep-alive
61 | User-Agent:
62 | - azsdk-python-storage-blob/12.2.0 Python/3.7.3 (Darwin-19.6.0-x86_64-i386-64bit)
63 | authorization:
64 | - fake-authz-header
65 | x-ms-client-request-id:
66 | - 2875e4ce-e136-11ea-830d-a0999b18a477
67 | x-ms-date:
68 | - Tue, 18 Aug 2020 09:35:31 GMT
69 | x-ms-version:
70 | - '2019-07-07'
71 | method: GET
72 | uri: https://my-account.blob.core.windows.net/my-container?prefix=giftless-tests&restype=container&comp=list
73 | response:
74 | body:
75 | string: "\uFEFFgiftless-tests"
78 | headers:
79 | Content-Type:
80 | - application/xml
81 | Date:
82 | - Tue, 18 Aug 2020 09:35:31 GMT
83 | Server:
84 | - Windows-Azure-Blob/1.0 Microsoft-HTTPAPI/2.0
85 | Transfer-Encoding:
86 | - chunked
87 | Vary:
88 | - Origin
89 | x-ms-client-request-id:
90 | - 2875e4ce-e136-11ea-830d-a0999b18a477
91 | x-ms-request-id:
92 | - 2ebb0966-301e-0057-1f42-7521a6000000
93 | x-ms-version:
94 | - '2019-07-07'
95 | status:
96 | code: 200
97 | message: OK
98 | version: 1
99 |
--------------------------------------------------------------------------------
/tests/auth/data/test-key.pem:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIIJJQIBAAKCAgEAyRP9hgA3/FGJAVQxlDJzFgKQOikDJalxMAE02wyK/iTeFNZT
3 | lB1k/oJDrkuwfm9OfcBOjmzy6aEMmPvcUJBfqXmCqVuwrHZmJJN7wzyR+Bgz53vr
4 | pS+hT18RAhxagxrM/HSkhhrDuGQORdsMUqWdJ5RYFhDyWZR2cUfS83RxkKnMZ3KS
5 | xnYEGu+t2T7Hew94SSbB0yRDqfykd+rTFvBmza8Iqzo1Nb7BjiLWRSWkRau4bwxP
6 | FHhGih2ZaJu+q3u8jnrwn0AObVSj/W6ps8amhBuMDsaE2wyjs8to1bDpezuZkont
7 | aFoo56ll3FXOO5N32i5HprsFIWXrQFgG1xJp+7eDY7hKiK1xednYk8kse2/XJRTt
8 | GBgFP6gbdtCdfd7DQSGCGy6Q/0sDCJqAM30qDVZDNg940hdp1k9n1zhTSeW/boFc
9 | v09KjmFg+ZVmZVxxqZidPdGqB3VbwBRvH7idFDa+nSg5gJbM2YWaOe6FeUkIm7/R
10 | sxE+MfU/TFHvW/4q89sHq/2V/EZH1VSByv5R3iyWs8cLFjkQkqLTYdX2S/QS+nwt
11 | /aXKjn6HXlj2H1hddMz7UTRBDy5/IXRkW157YFWGIPIVvjo/1HpiAPtJ9Cv+MLCO
12 | +k4u79ZUjVKkpbiGWN5oQwIgdLwX1mOyS1ULXFZfhf5PK0ujwLIovPSyipUCAwEA
13 | AQKCAgBgRp0F0wb7XcK+iyRt87Xc3P9XiZmDJ90VAg7fruLCCr72tSQxb92Saqaj
14 | RoRMFVk8pp42XX31VPImMblsLaRQDej80/UErm/7pU4FzfOKGXSIqnHKlsKisT8A
15 | hrjjuypY2jSILTp3CBk5Y0dELx3O/J0se5K+R3JqjNw5SxCxFkvTYC1crtcKWqYf
16 | tYpnpqGOTOtTADMRqfDJpfrKKfLfGqaI7Ybao+oi9xUm6H3+Cdl1+ivG/kuwB7oT
17 | NzcU/lrq0Xxyk4YU3Qjj6F+Cv2OtL09hJ7Ym96dOB9+7KUixg3jli/rLExgwy5AE
18 | 7mSfa91f7vbJoxQyyIzaMwubYKqzTEJfXpwDCTAowz2o0HFZrRX0ReU9hAOkV5sf
19 | 0buy/P75WBV78MfjFtTisyioVxiLS2p1kpJjf31/6GdYE9x6aW1m5pXP5DBf9rtT
20 | /AhXKldkOuxULv2dS6TmJ8Fn84SVX+LdT0+cAUBVATCKeh2trNqkoqOMm+LW+a3h
21 | +0St7orNFoUO3Jnwh6nF0KWYGx+orqV5Pt/9zEo4OPYfX0mNRBl2c0hNlpkP+lXH
22 | S9xcWuT4nwBGKu+kJDSIGkE9XtaPTiQvlf+PxC4CyaUb332wl6/hQMHxJ6bim6kL
23 | ELXJISHc0etOP4qz70vPfkv5qT5ymFCLHUHfTjET4ruPTWnqoQKCAQEA9tAcKwE9
24 | BlRFDkJ5lmGLkkqMh+HOwYT5Q+K/48qPGUfFWyHBbdQhYr69Xfkz4a83T31cTwZT
25 | trSAG8GN7ytGCUNEyxeViU8nJYDXSPXtlnZeNZY3znEXDZJJgrv0zOcPUtXudgYE
26 | vBvplGNkNi+PBg+q4C84Pf5VjlSd9TDd4L8bbkW3gF8LZKUuFOm9lVubz1Im2vqM
27 | F5sQzzgKUcxa0wKgbK8V9yZ0CqK2bog+jrvwBDaRdgs/TZZkWUi2xos7+Sry+To0
28 | Gl91d4pSPEf/uI9jTJzqH9w5U6BApyc64T2Z2mSnDZNxZf4nHxompYT0GPoNclE8
29 | pop9rxHkjbo1TQKCAQEA0JASOrA3NNbAWSd9yUWKDd2lWkMzRH6PDj7OK1hZF9ld
30 | 38p7ZUjMp5y9+Yc9OJPSYUiFxQyBgvT+LJtbvPvPoejnDOn1GFaQJ7jandtS7teU
31 | sefbml+Q//iBdIVo9yUFqVafUwsIQpfCM9KMHD28SxRGhHB/+DP3nSu0qMY3iRSm
32 | +5ekkq5Hoc9yXvOCWoxRHLGBeSDThUrRkIa5Gk9qrs8MUFT457xeoOmtIcDrfUAD
33 | rdSj2RXUrUkzP5CSMiOagxRoigih7KkKqj50+N66AGm2Rc6fyXxkRS1Ul/fgBQUQ
34 | qnvQA8hm1+jTf4jMP2VqjwkbZZD7O5FRqnx5J25maQKCAQBO0KYuFXcnTkG/C/tH
35 | yl6HmMbHnZQouA3hX8dnNof7yq97KoxaRiNjJQXTvICU0R8Ygy+3DI4vCfTe6DAq
36 | 7+atjo2pTR6zelNF5p7Y7a+xHJt5AT9Q7UwvSUewN9U0mZSLH0XG6qUYbNFqUVLu
37 | +sDxbo9ih7GuGkm36c2HPtObsWtRC3JNzAwXV6gfhYTE7TwUOGewBdI2t43mnj6t
38 | YFSgjXYtPrwE8IKd4tfe9CMF8X3z2nkY+P5+CEiDOnUjcdL0/oIYlD2PLmyUf9dC
39 | 6LsAdMmA7ZrWEb08ty/uMNjclI9M7ldqPms35rEXfYJZ2NsaHr+sgcScah04ir4x
40 | hAUBAoH/L0B04NcLquz8pzZLSU53+Kw+yjRvaRVDU/bmvutFRCH7Iri9PDVCKLda
41 | Uy5QsIDNKT1FEV/D9+eJrpGEN0zVvdkGf+aBoQH7pwQ6g4ktQO+WwLfHCTKdp2Cl
42 | 88BGB7hbkAIQFsBpHq1DUnphRAS4pEnzsA3M8G7VqsY8KADAlE4ZhubUYmcZ048s
43 | WlbRMbKYreeJbegCBDDvSc88ILndcb13DLwzqE0FI2/NydfrgnT+YGPMu2I2zQvI
44 | kJ4wgSuP1iLJjHxoMzYZNHjyB3Bus6k/opEtoniMPyAGCmgeWQTMwdNLTZAZvNED
45 | GeHkZHiVq9JkYp52TQifeAC7cjORAoIBACEMiGLnnyCkqKSxaKwhLjIyw76jp+jZ
46 | 2BMzxZKMS1Gx/M0GMlv++gi2W4t0DB4gy8h4sulLzo0rqHw5OTswaFwKUiLPgt86
47 | 3BcwpuNQ/5rYH1C9ralrnkjd80YOFPAAzT0MQU0Ah3LJU+TdTT0bJxsNHRor7RUG
48 | ENTDKpyMcMf633xHm5ICXDjla7zL6Xp56ASe6h3MIL29p7Fr/emcRs4tR4P5+FE+
49 | JpcLyl83mlwY7gNVigtnX2acPBSesEJfbV56R6JAHtRwiCjmz0POUfYIphivABQ1
50 | QqkqNUsxGrw6c5XFBg7jXC+XfJvBcqcORsm2ZjsJzFc+JEKM3/pEmr4=
51 | -----END RSA PRIVATE KEY-----
52 |
--------------------------------------------------------------------------------
/giftless/auth/identity.py:
--------------------------------------------------------------------------------
1 | """Objects to support Giftless's concept of users and permissions."""
2 | from abc import ABC, abstractmethod
3 | from collections import defaultdict
4 | from enum import Enum
5 |
6 |
7 | class Permission(Enum):
8 | """System wide permissions."""
9 |
10 | READ = "read"
11 | READ_META = "read-meta"
12 | WRITE = "write"
13 |
14 | @classmethod
15 | def all(cls) -> set["Permission"]:
16 | return set(cls)
17 |
18 |
19 | PermissionTree = dict[
20 | str | None, dict[str | None, dict[str | None, set[Permission]]]
21 | ]
22 |
23 |
24 | class Identity(ABC):
25 | """Base user identity object.
26 |
27 | The goal of user objects is to contain some information about the
28 | user, and also to allow checking if the user is authorized to
29 | perform some actions.
30 | """
31 |
32 | def __init__(
33 | self,
34 | name: str | None = None,
35 | id: str | None = None,
36 | email: str | None = None,
37 | ) -> None:
38 | self.name = name
39 | self.id = id
40 | self.email = email
41 |
42 | @abstractmethod
43 | def is_authorized(
44 | self,
45 | organization: str,
46 | repo: str,
47 | permission: Permission,
48 | oid: str | None = None,
49 | ) -> bool:
50 | """Determine whether user is authorized to perform an operation
51 | on an object or repo.
52 | """
53 |
54 | def __repr__(self) -> str:
55 | return f"<{self.__class__.__name__} id:{self.id} name:{self.name}>"
56 |
57 |
58 | class DefaultIdentity(Identity):
59 | """Default instantiable user identity class."""
60 |
61 | def __init__(
62 | self,
63 | name: str | None = None,
64 | id: str | None = None,
65 | email: str | None = None,
66 | ) -> None:
67 | super().__init__(name, id, email)
68 | self._allowed: PermissionTree = defaultdict(
69 | lambda: defaultdict(lambda: defaultdict(set))
70 | )
71 |
72 | def allow(
73 | self,
74 | organization: str | None = None,
75 | repo: str | None = None,
76 | permissions: set[Permission] | None = None,
77 | oid: str | None = None,
78 | ) -> None:
79 | if permissions is None:
80 | self._allowed[organization][repo][oid] = set()
81 | else:
82 | self._allowed[organization][repo][oid].update(permissions)
83 |
84 | def is_authorized(
85 | self,
86 | organization: str,
87 | repo: str,
88 | permission: Permission,
89 | oid: str | None = None,
90 | ) -> bool:
91 | if organization in self._allowed:
92 | if repo in self._allowed[organization]:
93 | if oid in self._allowed[organization][repo]:
94 | return permission in self._allowed[organization][repo][oid]
95 | elif None in self._allowed[organization][repo]:
96 | return (
97 | permission in self._allowed[organization][repo][None]
98 | )
99 | elif None in self._allowed[organization]:
100 | return permission in self._allowed[organization][None][None]
101 | elif None in self._allowed and None in self._allowed[None]:
102 | return permission in self._allowed[None][None][oid]
103 |
104 | return False
105 |
--------------------------------------------------------------------------------
/giftless/config.py:
--------------------------------------------------------------------------------
1 | """Configuration handling helper functions and default configuration."""
2 | import os
3 | import warnings
4 | from pathlib import Path
5 | from typing import Any
6 |
7 | import yaml
8 | from dotenv import load_dotenv
9 | from figcan import Configuration, Extensible # type:ignore[attr-defined]
10 | from flask import Flask
11 |
12 | ENV_PREFIX = "GIFTLESS_"
13 | ENV_FILE = ".env"
14 |
15 | default_transfer_config = {
16 | "basic": Extensible(
17 | {
18 | "factory": "giftless.transfer.basic_streaming:factory",
19 | "options": Extensible(
20 | {
21 | "storage_class": (
22 | "giftless.storage.local_storage:LocalStorage"
23 | ),
24 | "storage_options": Extensible({"path": "lfs-storage"}),
25 | "action_lifetime": 900,
26 | }
27 | ),
28 | }
29 | ),
30 | }
31 |
32 | default_config = {
33 | "TESTING": False,
34 | "DEBUG": False,
35 | "LEGACY_ENDPOINTS": True,
36 | "TRANSFER_ADAPTERS": Extensible(default_transfer_config),
37 | "AUTH_PROVIDERS": ["giftless.auth.allow_anon:read_only"],
38 | "PRE_AUTHORIZED_ACTION_PROVIDER": {
39 | "factory": "giftless.auth.jwt:factory",
40 | "options": {
41 | "algorithm": "HS256",
42 | "private_key": "change-me",
43 | "private_key_file": None,
44 | "public_key": None,
45 | "public_key_file": None,
46 | "default_lifetime": 60, # 60 seconds for default actions
47 | "key_id": "giftless-internal-jwt-key",
48 | },
49 | },
50 | "MIDDLEWARE": [],
51 | }
52 |
53 | load_dotenv()
54 |
55 |
56 | def configure(app: Flask, additional_config: dict | None = None) -> Flask:
57 | """Configure a Flask app using Figcan managed configuration object."""
58 | config = _compose_config(additional_config)
59 | app.config.update(config)
60 | if app.config["LEGACY_ENDPOINTS"]:
61 | warnings.warn(
62 | FutureWarning(
63 | "LEGACY_ENDPOINTS (starting with '//') are enabled"
64 | " as the default. They will be eventually removed in favor of"
65 | " those starting with '/.git/info/lfs/')."
66 | " Switch your clients to them and set the configuration"
67 | " option to False to disable this warning."
68 | ),
69 | stacklevel=1,
70 | )
71 | return app
72 |
73 |
74 | def _compose_config(
75 | additional_config: dict[str, Any] | None = None,
76 | ) -> Configuration:
77 | """Compose configuration object from all available sources."""
78 | config = Configuration(default_config)
79 | environ = dict(
80 | os.environ
81 | ) # Copy the environment as we're going to change it
82 |
83 | if environ.get(f"{ENV_PREFIX}CONFIG_FILE"):
84 | with Path(environ[f"{ENV_PREFIX}CONFIG_FILE"]).open() as f:
85 | config_from_file = yaml.safe_load(f)
86 | config.apply(config_from_file)
87 | environ.pop(f"{ENV_PREFIX}CONFIG_FILE")
88 |
89 | if environ.get(f"{ENV_PREFIX}CONFIG_STR"):
90 | config_from_file = yaml.safe_load(environ[f"{ENV_PREFIX}CONFIG_STR"])
91 | config.apply(config_from_file)
92 | environ.pop(f"{ENV_PREFIX}CONFIG_STR")
93 |
94 | config.apply_flat(environ, prefix=ENV_PREFIX)
95 |
96 | if additional_config:
97 | config.apply(additional_config)
98 |
99 | return config
100 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yaml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | "on":
4 | push:
5 | branches-ignore:
6 | # These should always correspond to pull requests, so ignore them for
7 | # the push trigger and let them be triggered by the pull_request
8 | # trigger, avoiding running the workflow twice. This is a minor
9 | # optimization so there's no need to ensure this is comprehensive.
10 | - "dependabot/**"
11 | - "renovate/**"
12 | - "tickets/**"
13 | - "u/**"
14 | tags:
15 | - "*"
16 | pull_request: {}
17 | release:
18 | types: [published]
19 |
20 | jobs:
21 | lint:
22 | runs-on: ubuntu-latest
23 | timeout-minutes: 5
24 |
25 | steps:
26 | - uses: actions/checkout@v4
27 |
28 | - name: Set up Python
29 | uses: actions/setup-python@v5
30 | with:
31 | python-version: "3.12"
32 |
33 | - name: Run pre-commit
34 | uses: pre-commit/action@v3.0.1
35 |
36 | test:
37 | runs-on: ubuntu-latest
38 | timeout-minutes: 10
39 |
40 | strategy:
41 | matrix:
42 | python:
43 | - "3.10"
44 | - "3.11"
45 | - "3.12"
46 |
47 | steps:
48 | - uses: actions/checkout@v4
49 |
50 | - name: Run tox
51 | uses: lsst-sqre/run-tox@v1
52 | with:
53 | python-version: ${{ matrix.python }}
54 | tox-envs: "py,coverage-report,typing"
55 |
56 | build:
57 | runs-on: ubuntu-latest
58 | needs: [lint, test]
59 | timeout-minutes: 10
60 |
61 | # Only do Docker builds of tagged releases and pull requests from ticket
62 | # branches. This will still trigger on pull requests from untrusted
63 | # repositories whose branch names match our tickets/* branch convention,
64 | # but in this case the build will fail with an error since the secret
65 | # won't be set.
66 | if: >
67 | startsWith(github.ref, 'refs/tags/')
68 | || startsWith(github.head_ref, 'tickets/')
69 |
70 | steps:
71 | - uses: actions/checkout@v4
72 | with:
73 | fetch-depth: 0
74 |
75 | - uses: lsst-sqre/build-and-push-to-ghcr@v1
76 | id: build
77 | with:
78 | image: ${{ github.repository }}
79 | github_token: ${{ secrets.GITHUB_TOKEN }}
80 |
81 | test-packaging:
82 |
83 | name: Test packaging
84 | runs-on: ubuntu-latest
85 |
86 | steps:
87 | - uses: actions/checkout@v4
88 | with:
89 | fetch-depth: 0 # full history for setuptools_scm
90 |
91 | - name: Build and publish
92 | uses: lsst-sqre/build-and-publish-to-pypi@v3
93 | with:
94 | python-version: "3.12"
95 | upload: false
96 |
97 | pypi:
98 |
99 | # This job requires set up:
100 | # 1. Set up a trusted publisher for PyPI
101 | # 2. Set up a "pypi" environment in the repository
102 | # See https://github.com/lsst-sqre/build-and-publish-to-pypi
103 | name: Upload release to PyPI
104 | runs-on: ubuntu-latest
105 | needs: [lint, test, test-packaging]
106 | environment:
107 | name: pypi
108 | url: https://pypi.org/p/giftless
109 | permissions:
110 | id-token: write
111 | if: github.event_name == 'release' && github.event.action == 'published'
112 |
113 | steps:
114 | - uses: actions/checkout@v4
115 | with:
116 | fetch-depth: 0 # full history for setuptools_scm
117 |
118 | - name: Build and publish
119 | uses: lsst-sqre/build-and-publish-to-pypi@v3
120 | with:
121 | python-version: "3.12"
122 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Dockerfile for uWSGI wrapped Giftless Git LFS Server
2 | # Shared build ARGs among stages
3 | ARG WORKDIR=/app
4 | ARG VENV="$WORKDIR/.venv"
5 | ARG UV_VERSION=0.5.16
6 |
7 | ### Distroless uv version layer to be copied from (because COPY --from does not interpolate variables)
8 | FROM ghcr.io/astral-sh/uv:$UV_VERSION AS uv
9 |
10 | ### --- Build Depdendencies ---
11 | FROM python:3.12 AS builder
12 | ARG UWSGI_VERSION=2.0.23
13 | # Common WSGI middleware modules to be pip-installed
14 | # These are not required in every Giftless installation but are common enough
15 | ARG EXTRA_PACKAGES="wsgi_cors_middleware"
16 | # expose shared ARGs
17 | ARG WORKDIR
18 | ARG VENV
19 |
20 | # Set WORKDIR (also creates the dir)
21 | WORKDIR $WORKDIR
22 |
23 | # Install packages to build wheels for uWSGI and other requirements
24 | RUN set -eux ;\
25 | export DEBIAN_FRONTEND=noninteractive ;\
26 | apt-get update ;\
27 | apt-get install -y --no-install-recommends build-essential libpcre3 libpcre3-dev git curl ;\
28 | rm -rf /var/lib/apt/lists/*
29 |
30 | # Install uv to replace pip & friends
31 | COPY --from=uv /uv /uvx /bin/
32 |
33 | # Set a couple uv-related settings
34 | # Wait a bit longer for slow connections
35 | ENV UV_HTTP_TIMEOUT=100
36 | # Don't cache packages
37 | ENV UV_NO_CACHE=1
38 |
39 | # Create virtual env to store dependencies, "activate" it
40 | RUN uv venv "$VENV"
41 | ENV VIRTUAL_ENV="$VENV" PATH="$VENV/bin:$PATH"
42 |
43 | # Install runtime dependencies
44 | RUN --mount=target=/build-ctx \
45 | uv pip install -r /build-ctx/requirements/main.txt
46 | RUN uv pip install uwsgi==$UWSGI_VERSION
47 | # Install extra packages into the virtual env
48 | RUN uv pip install ${EXTRA_PACKAGES}
49 |
50 | # Copy project contents necessary for an editable install
51 | COPY .git .git/
52 | COPY giftless giftless/
53 | COPY pyproject.toml .
54 | # Editable-install the giftless package (add a kind of a project path reference in site-packages)
55 | # To detect the package version dynamically, setuptools-scm needs the git binary
56 | RUN uv pip install -e .
57 |
58 | ### --- Build Final Image ---
59 | FROM python:3.12-slim AS final
60 | LABEL org.opencontainers.image.authors="Shahar Evron "
61 |
62 | ARG USER_NAME=giftless
63 | # Writable path for local LFS storage
64 | ARG STORAGE_DIR=/lfs-storage
65 | # expose shared ARGs
66 | ARG WORKDIR
67 | ARG VENV
68 |
69 | # Set WORKDIR (also creates the dir)
70 | WORKDIR $WORKDIR
71 |
72 | # Create a user and set local storage write permissions
73 | RUN set -eux ;\
74 | useradd -d "$WORKDIR" "$USER_NAME" ;\
75 | mkdir "$STORAGE_DIR" ;\
76 | chown "$USER_NAME" "$STORAGE_DIR"
77 |
78 | # Install runtime dependencies
79 | RUN set -eux ;\
80 | export DEBIAN_FRONTEND=noninteractive ;\
81 | apt-get update ;\
82 | apt-get install -y libpcre3 libxml2 tini curl ;\
83 | rm -rf /var/lib/apt/lists/*
84 |
85 | # Use the virtual env with dependencies from builder stage
86 | COPY --from=builder "$VENV" "$VENV"
87 | ENV VIRTUAL_ENV="$VENV" PATH="$VENV/bin:$PATH"
88 | # Copy project source back into the same path referenced by the editable install
89 | COPY --from=builder "$WORKDIR/giftless" "giftless"
90 |
91 | # Set runtime properties
92 | USER $USER_NAME
93 | ENV GIFTLESS_TRANSFER_ADAPTERS_basic_options_storage_options_path="$STORAGE_DIR"
94 | ENV UWSGI_MODULE="giftless.wsgi_entrypoint"
95 |
96 | ENTRYPOINT ["tini", "--", "uwsgi"]
97 | CMD ["-s", "127.0.0.1:5000", "-M", "-T", "--threads", "2", "-p", "2", \
98 | "--manage-script-name", "--callable", "app"]
99 |
100 | # TODO remove this STOPSIGNAL override after uwsgi>=2.1
101 | STOPSIGNAL SIGQUIT
102 |
--------------------------------------------------------------------------------
/docs/source/github-lfs.md:
--------------------------------------------------------------------------------
1 | Shadowing GitHub LFS
2 | ====================
3 |
4 | This guide shows how to use Giftless as the LFS server for an existing GitHub repository (not using GitHub LFS). Thanks to a handful tricks it also acts as a full remote HTTPS-based `git` repository, making this a zero client configuration setup.
5 |
6 | This guide uses `docker compose`, so you need to [install it](https://docs.docker.com/compose/install/). It also relies on you using HTTPS for cloning GitHub repos. The SSH way is not supported.
7 |
8 | ### Running docker containers
9 | To run the setup, `git clone https://github.com/datopian/giftless`, step into the `examples/github-lfs` and run `docker compose up`.
10 |
11 | This will run two containers:
12 | - `giftless`: Locally built Giftless server configured to use solely the [GitHub authentication provider](auth-providers.md#github-authenticator) and a local docker compose volume as the storage backend.
13 | - `proxy`: An [Envoy reverse proxy](https://www.envoyproxy.io/) which acts as the frontend listening on a local port 5000, configured to route LFS traffic to `giftless` and pretty much anything else to `[api.]github.com`. **The proxy listens at an unencrypted HTTP**, setting the proxy to provide TLS termination is very much possible, but isn't yet covered (your turn, thanks for the contribution!).
14 |
15 | Feel free to explore the `compose.yaml`, which contains all the details.
16 |
17 | ### Cloning a GitHub repository via proxy
18 | The frontend proxy forwards the usual `git` traffic to GitHub, so go there and pick/create some testing repository where you have writable access and clone it via the proxy hostname (just change `github.com` for wherever you host):
19 | ```shell
20 | git clone http://localhost:5000/$YOUR_ORG/$YOUR_REPO
21 | ```
22 | When you don't use a credential helper, you might get asked a few times for the same credentials before the call gets through. [Make sure to get one](https://git-scm.com/doc/credential-helpers) before it drives you insane.
23 |
24 | Thanks to the [automatic LFS server discovery](https://github.com/git-lfs/git-lfs/blob/main/docs/api/server-discovery.md) this is all you should need to become LFS-enabled!
25 |
26 | ### Pushing binary blobs
27 | Let's try pushing some binary blobs then! See also [Quickstart](quickstart.md#create-a-local-repository-and-push-some-file).
28 | ```shell
29 | # create some blob
30 | dd if=/dev/urandom of=blob.bin bs=1M count=1
31 | # make it tracked by LFS
32 | git lfs track blob.bin
33 | # the LFS tracking is written in .gitattributes, which you also want committed
34 | git add .gitattributes blob.bin
35 | git commit -m 'Hello LFS!'
36 | # push it, assuming the local branch is main
37 | # this might fail for the 1st time, when git automatically runs 'git config lfs.locksverify false'
38 | git push -u origin main
39 | ```
40 |
41 | This should eventually succeed, and you will find the LFS digest in place of the blob on GitHub and the binary blob on your local storage:
42 | ```shell
43 | docker compose exec -it giftless find /lfs-storage
44 | /lfs-storage
45 | /lfs-storage/$YOUR_ORG
46 | /lfs-storage/$YOUR_ORG/$YOUR_REPO
47 | /lfs-storage/$YOUR_ORG/$YOUR_REPO/deadbeefb10bb10bad40beaa8c68c4863e8b00b7e929efbc6dcdb547084b01
48 | ```
49 |
50 | Next time anyone clones the repo (via the proxy), the binary blob will get properly downloaded. Failing to use the proxy hostname will make `git` use GitHub's own LFS, which is a paid service you are obviously trying to avoid.
51 |
52 | ### Service teardown
53 |
54 | Finally, to shut down your containers, break (`^C`) the current compose run and clean up dead containers with:
55 | ```shell
56 | docker compose down [--volumes]
57 | ```
58 | Using `--volumes` tears down the `lfs-storage` volume too, so make sure it's what you wanted.
--------------------------------------------------------------------------------
/docs/source/wsgi-middleware.md:
--------------------------------------------------------------------------------
1 | # Using WSGI Middleware
2 |
3 | Another way Giftless allows customizing its behavior is using standard
4 | [WSGI middleware](https://en.wikipedia.org/wiki/Web_Server_Gateway_Interface#WSGI_middleware).
5 | This includes both publicly available middleware libraries, or your own custom
6 | WSGI middleware code.
7 |
8 | ## Enabling Custom WSGI Middleware
9 |
10 | To enable a WSGI middleware, add it to the `MIDDLEWARE` config section like so:
11 |
12 | ```yaml
13 | MIDDLEWARE:
14 | - class: wsgi_package.wsgi_module:WSGICallable
15 | args: [] # List of ordered arguments to pass to callable
16 | kwargs: {} # key-value pairs of keyword arguments to pass to callable
17 | ```
18 |
19 | Where:
20 | * `class` is a `:` reference to a WSGI module
21 | and class name, or a callable that returns a WSGI object
22 | * `args` is a list of arguments to pass to the specified callable
23 | * `kwargs` are key-value pair of keyword arguments to pass to the specified callable.
24 |
25 | The middleware module must be installed in the same Python environment as Giftless
26 | for it to be loaded.
27 |
28 | ## Useful Middleware Examples
29 |
30 | Here are some examples of solving specific needs using WSGI middleware:
31 |
32 | ### HOWTO: Fixing Generated URLs when Running Behind a Proxy
33 | If you have Giftless running behind a reverse proxy, and available
34 | publicly at a custom hostname / port / path / scheme that is not known to
35 | Giftless, you might have an issue where generated URLs are not accessible.
36 |
37 | This can be fixed by enabling the `ProxyFix` Werkzeug middleware, which
38 | is already installed along with Giftless:
39 |
40 | ```yaml
41 | MIDDLEWARE:
42 | - class: werkzeug.middleware.proxy_fix:ProxyFix
43 | kwargs:
44 | x_host: 1
45 | x_port: 1
46 | x_prefix: 1
47 | ```
48 |
49 | In order for this to work, you must ensure your reverse proxy (e.g. nginx)
50 | sets the right `X-Forwarded-*` headers when passing requests.
51 |
52 | For example, if you have deployed giftless in an endpoint that is available to
53 | clients at `https://example.com/lfs`, the following nginx configuration is
54 | expected, in addition to the Giftless configuration set in the `MIDDLEWARE`
55 | section:
56 |
57 | ```
58 | location /lfs/ {
59 | proxy_pass http://giftless.internal.host:5000/;
60 | proxy_set_header X-Forwarded-Prefix /lfs;
61 | }
62 | ```
63 |
64 | This example assumes Giftless is available to the reverse proxy at
65 | `giftless.internal.host` port 5000. In addition, `X-Forwarded-Host`,
66 | `X-Forwarded-Port`, `X-Forwarded-Proto` are automatically set by nginx by
67 | default.
68 |
69 | ### HOWTO: CORS Support
70 |
71 | If you need to access Giftless from a browser, you may need to ensure
72 | Giftless sends proper [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS)
73 | headers, otherwise browsers may reject responses from Giftless.
74 |
75 | There are a number of CORS WSGI middleware implementations available on PyPI,
76 | and you can use any of them to add CORS headers control support to Giftless.
77 |
78 | For example, you can enable CORS support using
79 | [wsgi-cors-middleware](https://github.com/moritzmhmk/wsgi-cors-middleware):
80 |
81 | ```bash
82 | (.venv) $ pip install wsgi_cors_middleware
83 | ```
84 |
85 | ```note:: when using the Giftless Docker image, there is no need to install this
86 | middleware as it is already installed)
87 | ```
88 |
89 | And then add the following to your config file:
90 |
91 | ```yaml
92 | MIDDLEWARE:
93 | - class: wsgi_cors_middleware:CorsMiddleware
94 | kwargs:
95 | origin: https://www.example.com
96 | headers: ['Content-type', 'Accept', 'Authorization']
97 | methods: ['GET', 'POST', 'PUT']
98 | ```
99 |
--------------------------------------------------------------------------------
/requirements/main.txt:
--------------------------------------------------------------------------------
1 | # This file was autogenerated by uv via the following command:
2 | # uv pip compile --no-emit-index-url -o requirements/main.txt requirements/main.in
3 | azure-core==1.30.1
4 | # via azure-storage-blob
5 | azure-storage-blob==12.19.1
6 | # via -r requirements/main.in
7 | blinker==1.7.0
8 | # via flask
9 | boto3==1.34.59
10 | # via -r requirements/main.in
11 | botocore==1.34.59
12 | # via
13 | # boto3
14 | # s3transfer
15 | cachetools==5.3.3
16 | # via
17 | # -r requirements/main.in
18 | # google-auth
19 | certifi==2024.2.2
20 | # via requests
21 | cffi==1.16.0
22 | # via cryptography
23 | charset-normalizer==3.3.2
24 | # via requests
25 | click==8.1.7
26 | # via flask
27 | cryptography==42.0.5
28 | # via
29 | # -r requirements/main.in
30 | # azure-storage-blob
31 | figcan==0.0.4
32 | # via -r requirements/main.in
33 | flask==2.3.3
34 | # via
35 | # -r requirements/main.in
36 | # flask-classful
37 | # flask-marshmallow
38 | flask-classful==0.16.0
39 | # via -r requirements/main.in
40 | flask-marshmallow==0.15.0
41 | # via -r requirements/main.in
42 | google-api-core==2.17.1
43 | # via
44 | # google-cloud-core
45 | # google-cloud-storage
46 | google-auth==2.28.2
47 | # via
48 | # google-api-core
49 | # google-cloud-core
50 | # google-cloud-storage
51 | google-cloud-core==2.4.1
52 | # via google-cloud-storage
53 | google-cloud-storage==2.15.0
54 | # via -r requirements/main.in
55 | google-crc32c==1.5.0
56 | # via
57 | # google-cloud-storage
58 | # google-resumable-media
59 | google-resumable-media==2.7.0
60 | # via google-cloud-storage
61 | googleapis-common-protos==1.63.0
62 | # via google-api-core
63 | idna==3.6
64 | # via requests
65 | importlib-metadata==7.0.2
66 | # via -r requirements/main.in
67 | isodate==0.6.1
68 | # via azure-storage-blob
69 | itsdangerous==2.1.2
70 | # via flask
71 | jinja2==3.1.3
72 | # via flask
73 | jmespath==1.0.1
74 | # via
75 | # boto3
76 | # botocore
77 | markupsafe==2.1.5
78 | # via
79 | # jinja2
80 | # werkzeug
81 | marshmallow==3.21.1
82 | # via
83 | # flask-marshmallow
84 | # webargs
85 | packaging==24.0
86 | # via
87 | # flask-marshmallow
88 | # marshmallow
89 | # webargs
90 | protobuf==4.25.3
91 | # via
92 | # google-api-core
93 | # googleapis-common-protos
94 | pyasn1==0.5.1
95 | # via
96 | # pyasn1-modules
97 | # rsa
98 | pyasn1-modules==0.3.0
99 | # via google-auth
100 | pycparser==2.21
101 | # via cffi
102 | pyjwt==2.8.0
103 | # via -r requirements/main.in
104 | python-dateutil==2.9.0.post0
105 | # via
106 | # -r requirements/main.in
107 | # botocore
108 | python-dotenv==1.0.1
109 | # via -r requirements/main.in
110 | pyyaml==6.0.1
111 | # via -r requirements/main.in
112 | requests==2.31.0
113 | # via
114 | # azure-core
115 | # google-api-core
116 | # google-cloud-storage
117 | rsa==4.9
118 | # via google-auth
119 | s3transfer==0.10.0
120 | # via boto3
121 | six==1.16.0
122 | # via
123 | # azure-core
124 | # isodate
125 | # python-dateutil
126 | typing-extensions==4.10.0
127 | # via
128 | # -r requirements/main.in
129 | # azure-core
130 | # azure-storage-blob
131 | urllib3==2.0.7
132 | # via
133 | # botocore
134 | # requests
135 | webargs==8.4.0
136 | # via -r requirements/main.in
137 | werkzeug==3.0.3
138 | # via
139 | # -r requirements/main.in
140 | # flask
141 | zipp==3.17.0
142 | # via importlib-metadata
143 |
--------------------------------------------------------------------------------
/docs/source/transfer-adapters.md:
--------------------------------------------------------------------------------
1 | Transfer Adapters
2 | =================
3 | Git LFS servers and clients can implement and negotiate different
4 | [transfer adapters](https://github.com/git-lfs/git-lfs/blob/master/docs/api/basic-transfers.md).
5 | Typically, Git LFS will only define a `basic` transfer mode and support that. `basic` is simple
6 | and efficient for direct-to-storage uploads for backends that support uploading using
7 | a single `PUT` request.
8 |
9 | ## `basic` Transfer Mode
10 |
11 | ### External Storage `basic` transfer adapter
12 | The `basic_external` transfer adapter is designed to facilitate LFS `basic` mode transfers (the default transfer
13 | mode of Git LFS) for setups in which the storage backends supports communicating directly with the Git LFS client. That
14 | is, files will be uploaded or downloaded directly from a storage service that supports HTTP `PUT` / `GET` based access,
15 | without passing through Giftless. With this adapter, Giftless will not handle any file transfers - it will only be
16 | responsible for providing the client with access to storage.
17 |
18 | This transfer adapter works with storage adapters implementing the `ExternalStorage` storage interface - typically these
19 | are Cloud storage service based backends.
20 |
21 | ### Streaming `basic` transfer adapter
22 | The `basic_streaming` transfer adapter facilitates LFS `basic` mode transfers in which Giftless also handles object
23 | upload, download and verification requests directly. This is less scalable and typically less performant than
24 | the `basic_external` adapter, as all data and potentially long-running HTTP requests must be passed through Giftless
25 | and its Python runtime. However, in some situations this may be preferable to direct-to-storage HTTP requests.
26 |
27 | `basic_streaming` supports local storage, and also streaming requests from some Cloud storage service backends such as
28 | Azure and Google Cloud - although these tend to also support the `basic_external` transfer adapter.
29 |
30 | ## Multipart Transfer Mode
31 | To support more complex, and especially multi-part uploads (uploads done using more
32 | than one HTTP request, each with a different part of a large file) directly to backends
33 | that support that, Giftless adds support for a non-standard `multipart-basic` transfer
34 | mode.
35 |
36 | **NOTE**: `basic-multipart` is a non-standard transfer mode, and will not be supported
37 | by most Git LFS clients; For a Python implementation of a Git LFS client library that
38 | does, see [giftless-client](https://github.com/datopian/giftless-client).
39 |
40 | ### Enabling Multipart Transfer Mode
41 |
42 | You can enable multipart transfers by adding the following lines to your Giftless config
43 | file:
44 |
45 | ```yaml
46 | TRANSFER_ADAPTERS:
47 | # Add the following lines:
48 | multipart-basic:
49 | factory: giftless.transfer.multipart:factory
50 | options:
51 | storage_class: giftless.storage.azure:AzureBlobsStorage
52 | storage_options:
53 | connection_string: "somesecretconnectionstringhere"
54 | container_name: my-multipart-storage
55 | ```
56 |
57 | You must specify a `storage_class` that supports multipart transfers (implements the `MultipartStorage`
58 | interface). Currently, these are:
59 | * `giftless.storage.azure:AzureBlobsStorage` - Azure Blob Storage
60 |
61 | The following additional options are available for `multipart-basic` transfer adapter:
62 |
63 | * `action_lifetime` - The maximal lifetime in seconds for signed multipart actions; Because multipart
64 | uploads tend to be of very large files and can easily take hours to complete, we recommend setting this
65 | to a few hours; The default is 6 hours.
66 | * `max_part_size` - Maximal length in bytes of a single part upload. The default is 10MB.
67 |
68 | See the specific storage adapter for additional backend-specific configuration options to be added under
69 | `storage_options`.
70 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for giftless
2 | PACKAGE_NAME := giftless
3 | PACKAGE_DIRS := giftless
4 | TESTS_DIR := tests
5 |
6 | SHELL := bash
7 | PYTHON := python
8 | PIP := uv pip
9 | PIP_COMPILE := uv pip compile
10 | PYTEST := pytest
11 | DOCKER := docker
12 | GIT := git
13 |
14 | DOCKER_HOST := docker.io
15 | DOCKER_REPO := datopian
16 | DOCKER_IMAGE_NAME := giftless
17 | DOCKER_IMAGE_TAG := latest
18 | DOCKER_CACHE_FROM := docker.io/datopian/giftless:latest
19 |
20 | PYTEST_EXTRA_ARGS :=
21 |
22 | SOURCE_FILES := $(shell find $(PACKAGE_DIRS) $(TESTS_DIR) -type f -name "*.py")
23 | SENTINELS := .make-cache
24 | DIST_DIR := dist
25 |
26 | PYVER := $(shell $(PYTHON) -c "import sys;print(f'{sys.version_info[0]}{sys.version_info[1]}')")
27 | VERSION := $(shell $(PYTHON) -c "from importlib.metadata import version;print(version('$(PACKAGE_NAME)'))")
28 |
29 | default: help
30 |
31 | ## Install uv (fast pip replacement)
32 | init: $(SENTINELS)/uv
33 |
34 | ## Regenerate requirements files
35 | requirements: requirements/dev.txt requirements/dev.in requirements/main.txt requirements/main.in
36 |
37 | ## Set up the development environment
38 | dev-setup: $(SENTINELS)/dev-setup
39 |
40 | ## Run all linting checks
41 | lint: $(SENTINELS)
42 | pre-commit run --all-files
43 |
44 | ## Run all tests
45 | test: $(SENTINELS)
46 | $(PYTEST) $(PYTEST_EXTRA_ARGS) $(PACKAGE_DIRS) $(TESTS_DIR)
47 |
48 | ## Build a local Docker image
49 | docker: requirements/main.txt
50 | $(DOCKER) build --cache-from "$(DOCKER_CACHE_FROM)" -t $(DOCKER_HOST)/$(DOCKER_REPO)/$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_TAG) .
51 |
52 | ## Tag and push a release (disabled; done via GitHub Actions now)
53 | release:
54 | @echo "Package '$(PACKAGE_NAME)' releases are managed via GitHub"
55 |
56 | ## Clean all generated files
57 | distclean:
58 | rm -rf $(BUILD_DIR) $(DIST_DIR)
59 | rm -rf $(SENTINELS)/dist
60 |
61 | ## Create distribution files to upload to pypi
62 | dist: $(SENTINELS)/dist
63 |
64 | ## Build project documentation HTML files
65 | docs-html:
66 | @cd docs && $(MAKE) html
67 |
68 | .PHONY: test docker release dist distclean requirements docs-html init
69 |
70 | requirements/main.txt: requirements/main.in
71 | $(PIP_COMPILE) --no-emit-index-url -o requirements/main.txt requirements/main.in
72 |
73 | requirements/dev.txt: requirements/dev.in requirements/main.txt
74 | $(PIP_COMPILE) --no-emit-index-url -o requirements/dev.txt requirements/dev.in
75 |
76 | $(DIST_DIR)/$(PACKAGE_NAME)-$(VERSION).tar.gz $(DIST_DIR)/$(PACKAGE_NAME)-$(VERSION)-py3-none-any.whl: $(SOURCE_FILES) README.md | $(SENTINELS)/dist-setup
77 | $(PYTHON) -m build
78 |
79 | $(SENTINELS):
80 | mkdir $@
81 |
82 | $(SENTINELS)/dist-setup: | $(SENTINELS)
83 | $(PIP) install -U wheel twine
84 | @touch $@
85 |
86 | $(SENTINELS)/dist: $(SENTINELS)/dist-setup $(DIST_DIR)/$(PACKAGE_NAME)-$(VERSION).tar.gz $(DIST_DIR)/$(PACKAGE_NAME)-$(VERSION)-py3-none-any.whl | $(SENTINELS)
87 | @touch $@
88 |
89 | $(SENTINELS)/dev-setup: init requirements/main.txt requirements/dev.txt | $(SENTINELS)
90 | $(PIP) install -U pip pip-tools pre-commit tox
91 | $(PIP) install -r requirements/main.txt
92 | $(PIP) install -e .
93 | $(PIP) install -r requirements/dev.txt
94 | @touch $@
95 |
96 | $(SENTINELS)/uv: $(SENTINELS)
97 | pip install uv
98 | @touch $@
99 |
100 | # Help related variables and targets
101 |
102 | GREEN := $(shell tput -Txterm setaf 2)
103 | YELLOW := $(shell tput -Txterm setaf 3)
104 | WHITE := $(shell tput -Txterm setaf 7)
105 | RESET := $(shell tput -Txterm sgr0)
106 | TARGET_MAX_CHAR_NUM := 20
107 |
108 | ## Show help
109 | help:
110 | @echo ''
111 | @echo 'Usage:'
112 | @echo ' ${YELLOW}make${RESET} ${GREEN}${RESET}'
113 | @echo ''
114 | @echo 'Targets:'
115 | @awk '/^[a-zA-Z\-\_0-9]+:/ { \
116 | helpMessage = match(lastLine, /^## (.*)/); \
117 | if (helpMessage) { \
118 | helpCommand = substr($$1, 0, index($$1, ":")-1); \
119 | helpMessage = substr(lastLine, RSTART + 3, RLENGTH); \
120 | printf " ${YELLOW}%-$(TARGET_MAX_CHAR_NUM)s${RESET} ${GREEN}%s${RESET}\n", helpCommand, helpMessage; \
121 | } \
122 | } \
123 | { lastLine = $$0 }' $(MAKEFILE_LIST)
124 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestGoogleCloudStorageBackend.test_get_download_action.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: assertion=eyJ0eXAiOiAiSldUIiwgImFsZyI6ICJSUzI1NiIsICJraWQiOiAiMjg5ODJmN2JiOTg5YmE2NjkzY2Q1OTllMzMxZWI2OWJkZjE1OGIzMCJ9.eyJpYXQiOiAxNjAzMjIxOTg0LCAiZXhwIjogMTYwMzIyNTU4NCwgImlzcyI6ICJzaGFoYXItdGVzdHNAZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNlYWNjb3VudC5jb20iLCAiYXVkIjogImh0dHBzOi8vb2F1dGgyLmdvb2dsZWFwaXMuY29tL3Rva2VuIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5mdWxsX2NvbnRyb2wgaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vYXV0aC9kZXZzdG9yYWdlLnJlYWRfb25seSBodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9hdXRoL2RldnN0b3JhZ2UucmVhZF93cml0ZSJ9.IAC-8Y0u7gVIfK44-IUc9RTk_FbR3HXEeGx2bHJvmfdDu2oTnwQnwqHzwLtrsHDL45M4TeZS86DaGTHX9g7DceVDy_RrRLNZ7KOS0uhpa-iMdSoHrX8a2I-9vnSG4flQ83ABWulFmh-Bab_2rUfF-QA1CoYyY1Wgoq-Mn2Bp--nqPbwY-HdXEurmfM9BR4Lxr3ETTb29t2808cenXgdI-8lxJN79sRVVnrAmqpQjeB4uKMBvp4BUvtdynYpUrWIg6WvhAjlwtQ7BzruvdALzKdYlH-yvhBp75w_Ticr552SvyVIKQl4Qjn5IJj_oaN1dltYGWm4bIjrUCUi1mbX4Eg&grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | Content-Length:
12 | - '955'
13 | User-Agent:
14 | - python-requests/2.24.0
15 | content-type:
16 | - application/x-www-form-urlencoded
17 | method: POST
18 | uri: https://oauth2.googleapis.com/token
19 | response:
20 | body:
21 | string: !!binary |
22 | H4sIAAAAAAAC/x3P23JDQAAA0H/Z5zRj4xZ9cwkRVZF1qxeztmtcQlkq6PTfm+n5g/MDMCF0HLPp
23 | q6EdeAUrPih7snd6VxN8Ylak5MN26QrBFNcNs8GK6lNjqyYWRg2pvJfoUwN5xxijMsEwspzCyLut
24 | tHx6tufrWH1cffg+mC/n+IZr1WAdlCBqA66oZaHnTBSyk6ukLkmDXOKcLflGwXGQl8tMYzwlvfd2
25 | 1JlFhCIX8ziW2aFUIs+Pc5Wk9wXJzadQ36Hti3UrojCQ3FJ78Hoy0ZuCZorTC9gBuvQVo2NWPXu8
26 | qCg78H/NprWnz7BGMaMM/P4B7XstlQoBAAA=
27 | headers:
28 | Alt-Svc:
29 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-T051=":443"; ma=2592000,h3-T050=":443";
30 | ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443"; ma=2592000,quic=":443";
31 | ma=2592000; v="46,43"
32 | Cache-Control:
33 | - private
34 | Content-Encoding:
35 | - gzip
36 | Content-Type:
37 | - application/json; charset=UTF-8
38 | Date:
39 | - Tue, 20 Oct 2020 19:26:24 GMT
40 | Server:
41 | - scaffolding on HTTPServer2
42 | Transfer-Encoding:
43 | - chunked
44 | Vary:
45 | - Origin
46 | - X-Origin
47 | - Referer
48 | X-Content-Type-Options:
49 | - nosniff
50 | X-Frame-Options:
51 | - SAMEORIGIN
52 | X-XSS-Protection:
53 | - '0'
54 | status:
55 | code: 200
56 | message: OK
57 | - request:
58 | body: null
59 | headers:
60 | Accept:
61 | - '*/*'
62 | Accept-Encoding:
63 | - gzip
64 | Connection:
65 | - keep-alive
66 | User-Agent:
67 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
68 | X-Goog-API-Client:
69 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
70 | authorization:
71 | - fake-authz-header
72 | method: GET
73 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o?projection=noAcl&prefix=giftless-tests%2F&prettyPrint=false
74 | response:
75 | body:
76 | string: '{"kind":"storage#objects"}'
77 | headers:
78 | Alt-Svc:
79 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
80 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
81 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
82 | Cache-Control:
83 | - private, max-age=0, must-revalidate, no-transform
84 | Content-Length:
85 | - '26'
86 | Content-Type:
87 | - application/json; charset=UTF-8
88 | Date:
89 | - Tue, 20 Oct 2020 19:26:24 GMT
90 | Expires:
91 | - Tue, 20 Oct 2020 19:26:24 GMT
92 | Server:
93 | - UploadServer
94 | Vary:
95 | - Origin
96 | - X-Origin
97 | X-GUploader-UploadID:
98 | - ABg5-Uw_sNe33Mg4a0MKqPbXDbYa3BCudQH7M5Zx0snzS7naYEJOdSbnfafoj0zIg1N8j7QiBjh0fYZD5-McpGpbvw
99 | status:
100 | code: 200
101 | message: OK
102 | version: 1
103 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestGoogleCloudStorageBackend.test_get_upload_action.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: assertion=eyJ0eXAiOiAiSldUIiwgImFsZyI6ICJSUzI1NiIsICJraWQiOiAiMjg5ODJmN2JiOTg5YmE2NjkzY2Q1OTllMzMxZWI2OWJkZjE1OGIzMCJ9.eyJpYXQiOiAxNjAzMjIxOTgzLCAiZXhwIjogMTYwMzIyNTU4MywgImlzcyI6ICJzaGFoYXItdGVzdHNAZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNlYWNjb3VudC5jb20iLCAiYXVkIjogImh0dHBzOi8vb2F1dGgyLmdvb2dsZWFwaXMuY29tL3Rva2VuIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5mdWxsX2NvbnRyb2wgaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vYXV0aC9kZXZzdG9yYWdlLnJlYWRfb25seSBodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9hdXRoL2RldnN0b3JhZ2UucmVhZF93cml0ZSJ9.Bo9KxG5TXS7p8vm2rDTvi9SZYw0j6_tR6ueCm-HlsEkhnIcX-XtJen51luILmAQfDWpSG8olEXDUPEdNo38-tFqgUJUsyMNywJekzlEba2FZiTneiw4P9VTDHLxTpcpHI1hJ9-1hE2PgnLwEc0loBKXfHCncZmHEjpLBFV0_aV1FcP7Bbd1ljxP7kTsHVmPnEXqUs2IlACkdeDeLWNBAPBWmMQw3eybhFIPq25htidLKVyB9TJGIS0nNdeaWjFyASNt-Bck_VDYOj-kLbyG0DB6XLGlqXoC8-vJXD73VLcuq78T5L-yojRGc7WLyYUya1iHfOayEaYpmWWRwtavz-g&grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | Content-Length:
12 | - '955'
13 | User-Agent:
14 | - python-requests/2.24.0
15 | content-type:
16 | - application/x-www-form-urlencoded
17 | method: POST
18 | uri: https://oauth2.googleapis.com/token
19 | response:
20 | body:
21 | string: !!binary |
22 | H4sIAAAAAAAC/x3P23JDQAAA0H/Z58gkQTf6xlJ0K7V1KftiNrIxLhPCEnT67830/MH5ASzP+TBk
23 | oq35DbyChR20bb7FnWcohK9QFEJgb/+9gyH8ek8/oBRTC0HzcJ1hgejbGixtNzPX1KhvBHtMzqQg
24 | ZvVIUqc51bkqRisal5fpPGp3zyntqtIfbqtNBvFPYk317A5tr+auD1e5tyhbUaTvbEUZ0ZXRCBmI
25 | c9OxYwfPedxLslQojskurv+ZBAmPGp9O0hTACzouYUPwEatpeasrY4zCIqSjCzaAz13Z8yErnz1Z
26 | 1bQN+L9mYun4M2xw1vMe/P4Bc6DnXgoBAAA=
27 | headers:
28 | Alt-Svc:
29 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-T051=":443"; ma=2592000,h3-T050=":443";
30 | ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443"; ma=2592000,quic=":443";
31 | ma=2592000; v="46,43"
32 | Cache-Control:
33 | - private
34 | Content-Encoding:
35 | - gzip
36 | Content-Type:
37 | - application/json; charset=UTF-8
38 | Date:
39 | - Tue, 20 Oct 2020 19:26:23 GMT
40 | Server:
41 | - scaffolding on HTTPServer2
42 | Transfer-Encoding:
43 | - chunked
44 | Vary:
45 | - Origin
46 | - X-Origin
47 | - Referer
48 | X-Content-Type-Options:
49 | - nosniff
50 | X-Frame-Options:
51 | - SAMEORIGIN
52 | X-XSS-Protection:
53 | - '0'
54 | status:
55 | code: 200
56 | message: OK
57 | - request:
58 | body: null
59 | headers:
60 | Accept:
61 | - '*/*'
62 | Accept-Encoding:
63 | - gzip
64 | Connection:
65 | - keep-alive
66 | User-Agent:
67 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
68 | X-Goog-API-Client:
69 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
70 | authorization:
71 | - fake-authz-header
72 | method: GET
73 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o?projection=noAcl&prefix=giftless-tests%2F&prettyPrint=false
74 | response:
75 | body:
76 | string: '{"kind":"storage#objects"}'
77 | headers:
78 | Alt-Svc:
79 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
80 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
81 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
82 | Cache-Control:
83 | - private, max-age=0, must-revalidate, no-transform
84 | Content-Length:
85 | - '26'
86 | Content-Type:
87 | - application/json; charset=UTF-8
88 | Date:
89 | - Tue, 20 Oct 2020 19:26:23 GMT
90 | Expires:
91 | - Tue, 20 Oct 2020 19:26:23 GMT
92 | Server:
93 | - UploadServer
94 | Vary:
95 | - Origin
96 | - X-Origin
97 | X-GUploader-UploadID:
98 | - ABg5-UzNr1xPjMq7s_exwdaqxlEqMFK7voilEcmVjie1V99bGIkK5jzAU9nXRmZeJaMnQLfr6UfkEOm9Z6klrzbHkQ
99 | status:
100 | code: 200
101 | message: OK
102 | version: 1
103 |
--------------------------------------------------------------------------------
/giftless/storage/__init__.py:
--------------------------------------------------------------------------------
1 | """Storage base classes."""
2 | import mimetypes
3 | from abc import ABC, abstractmethod
4 | from collections.abc import Iterable
5 | from typing import Any, BinaryIO
6 |
7 | from . import exc
8 |
9 | # TODO @athornton: Think about refactoring this; some deduplication of
10 | # `verify_object`, at least.
11 |
12 |
13 | class VerifiableStorage(ABC):
14 | """A storage backend that supports object verification API.
15 |
16 | All streaming backends should be 'verifiable'.
17 | """
18 |
19 | @abstractmethod
20 | def verify_object(self, prefix: str, oid: str, size: int) -> bool:
21 | """Check that object exists and has the right size.
22 |
23 | This method should not throw an error if the object does not
24 | exist, but return False.
25 | """
26 |
27 |
28 | class StreamingStorage(VerifiableStorage, ABC):
29 | """Interface for streaming storage adapters."""
30 |
31 | @abstractmethod
32 | def get(self, prefix: str, oid: str) -> Iterable[bytes]:
33 | pass
34 |
35 | @abstractmethod
36 | def put(self, prefix: str, oid: str, data_stream: BinaryIO) -> int:
37 | pass
38 |
39 | @abstractmethod
40 | def exists(self, prefix: str, oid: str) -> bool:
41 | pass
42 |
43 | @abstractmethod
44 | def get_size(self, prefix: str, oid: str) -> int:
45 | pass
46 |
47 | def get_mime_type(self, prefix: str, oid: str) -> str:
48 | return "application/octet-stream"
49 |
50 | def verify_object(self, prefix: str, oid: str, size: int) -> bool:
51 | """Verify that an object exists and has the right size."""
52 | try:
53 | return self.get_size(prefix, oid) == size
54 | except exc.ObjectNotFoundError:
55 | return False
56 |
57 |
58 | class ExternalStorage(VerifiableStorage, ABC):
59 | """Interface for streaming storage adapters."""
60 |
61 | @abstractmethod
62 | def get_upload_action(
63 | self,
64 | prefix: str,
65 | oid: str,
66 | size: int,
67 | expires_in: int,
68 | extra: dict[str, Any] | None = None,
69 | ) -> dict[str, Any]:
70 | pass
71 |
72 | @abstractmethod
73 | def get_download_action(
74 | self,
75 | prefix: str,
76 | oid: str,
77 | size: int,
78 | expires_in: int,
79 | extra: dict[str, Any] | None = None,
80 | ) -> dict[str, Any]:
81 | pass
82 |
83 | @abstractmethod
84 | def exists(self, prefix: str, oid: str) -> bool:
85 | pass
86 |
87 | @abstractmethod
88 | def get_size(self, prefix: str, oid: str) -> int:
89 | pass
90 |
91 | def verify_object(self, prefix: str, oid: str, size: int) -> bool:
92 | """Verify that object exists and has the correct size."""
93 | try:
94 | return self.get_size(prefix, oid) == size
95 | except exc.ObjectNotFoundError:
96 | return False
97 |
98 |
99 | class MultipartStorage(VerifiableStorage, ABC):
100 | """Base class for storage that supports multipart uploads."""
101 |
102 | @abstractmethod
103 | def get_multipart_actions(
104 | self,
105 | prefix: str,
106 | oid: str,
107 | size: int,
108 | part_size: int,
109 | expires_in: int,
110 | extra: dict[str, Any] | None = None,
111 | ) -> dict[str, Any]:
112 | pass
113 |
114 | @abstractmethod
115 | def get_download_action(
116 | self,
117 | prefix: str,
118 | oid: str,
119 | size: int,
120 | expires_in: int,
121 | extra: dict[str, Any] | None = None,
122 | ) -> dict[str, Any]:
123 | pass
124 |
125 | @abstractmethod
126 | def exists(self, prefix: str, oid: str) -> bool:
127 | pass
128 |
129 | @abstractmethod
130 | def get_size(self, prefix: str, oid: str) -> int:
131 | pass
132 |
133 | def verify_object(self, prefix: str, oid: str, size: int) -> bool:
134 | """Verify that object exists and has the correct size."""
135 | try:
136 | return self.get_size(prefix, oid) == size
137 | except exc.ObjectNotFoundError:
138 | return False
139 |
140 |
141 | def guess_mime_type_from_filename(filename: str) -> str | None:
142 | """Based on the filename, guess what MIME type it is."""
143 | return mimetypes.guess_type(filename)[0]
144 |
--------------------------------------------------------------------------------
/tests/storage/test_google_cloud.py:
--------------------------------------------------------------------------------
1 | """Tests for the Google Cloud Storage storage backend."""
2 | from pathlib import Path
3 |
4 | import google.cloud.storage # noqa: F401 (used implicitly by storage backend)
5 | import pytest
6 |
7 | from ..mocks.google_cloud_storage import MockGoogleCloudStorage
8 | from . import ExternalStorageAbstractTests, StreamingStorageAbstractTests
9 |
10 | MOCK_GCP_PROJECT_NAME = "giftless-tests"
11 | MOCK_GCP_BUCKET_NAME = "giftless-tests-20240115"
12 |
13 | # This is a valid but revoked key that we use in testing
14 | MOCK_GCP_KEY_B64 = (
15 | "ewogICJ0eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAiZ2lmdGxl"
16 | "c3MtdGVzdHMiLAogICJwcml2YXRlX2tleV9pZCI6ICI4MWRhNDcxNzhiYzhmYjE1MDU1NTg3"
17 | "OWRjZTczZThmZDlmOWI4NmJkIiwKICAicHJpdmF0ZV9rZXkiOiAiLS0tLS1CRUdJTiBQUklW"
18 | "QVRFIEtFWS0tLS0tXG5NSUlFdkFJQkFEQU5CZ2txaGtpRzl3MEJBUUVGQUFTQ0JLWXdnZ1Np"
19 | "QWdFQUFvSUJBUUNsYXdDOUEvZHBnbVJWXG5kYVg2UW5xY1N6YW5ueTdCVlgwVklwUHVjNzl2"
20 | "aFR2NWRwZXRaa29SQmV6Uzg2ZStHUHVyTmJIMU9rWEZrL2tkXG5SNHFqMDV6SXlYeWxiQUVx"
21 | "Sk1BV24zZFY0VUVRVFlmRitPY0ltZUxpcjR3cW9pTldDZDNJaHErNHVVeU1WRDMxXG5wc1Fl"
22 | "cWVxcWV6bVoyNG1oTjBLK2NQczNuSXlIK0lzZXFsWjJob3U3bUU3U2JsYXdjc04ramcyNmQ5"
23 | "YzFUZlpoXG42eFozVkpndGFtcUZvdlZmbEZwNFVvLy9tVGo0cXEwUWRUYk9SS1NEeVkxTWhk"
24 | "Q24veSsyaForVm9IUitFM0Z4XG5XRmc2VGFwRGJhc29heEp5YjRoZEFFK0JhbW14bklTL09G"
25 | "bElaMGVoL2tsRmlBTlJRMEpQb2dXRjFjVE9NcVFxXG4wMlVFV2V5ckFnTUJBQUVDZ2dFQUJN"
26 | "OE5odGVQNElhTEUxd2haclN0cEp5NWltMGgxenFXTVlCTU85WDR4KzJUXG5PZmRUYStLbWtp"
27 | "cUV1c1UyanNJdks1VUJPakVBcncxVU1RYnBaaEtoTDhub2c3dGkyNjVoMG1Ba1pzWlZOWHU0"
28 | "XG5UKzQ4REZ4YzQ4THlzaktXM1RCQVBSb2RRbkJLTVA3MnY4QThKMU5BYlMwZ3IvTW1TbEVi"
29 | "dm1tT2FuTU9ONXAwXG43djlscm9GMzFOakMzT05OY25pUlRYY01xT2tEbWt5LyszeVc2RldM"
30 | "MkJZV3RwcGN3L0s1TnYxdGNMTG5iajVhXG5Hc3dVMENtQXgyTEVoWEo0bndJaWlFR3h6UGZY"
31 | "VXNLcVhLL2JEZENKbDUzMTgraU9aSHNrdXR1OFlqQVpsdktpXG5yckNFUkFXZitLeTZ0WGhn"
32 | "KzJIRzJJbUc5cG8wRnUwTGlIU0ZVUURKY1FLQmdRRFQ5RDJEYm9SNWFGWW0wQlVSXG5vNGd4"
33 | "OHZGc0NyTEx0a09EZGx3U2wrT20yblFvY0JXSTEyTmF5QXRlL2xhVFZNRlorVks1bU9vYXl2"
34 | "WnljTU1YXG5SdXZJYmdCTFdHYkdwSXdXZnlDOGxRZEJYM09xZTZZSzZTMUU2VnNYUVN0aHQ0"
35 | "YUx3ZGpGQ2J6VU1lc1ZzREV5XG5FYU85aXlTUVlFTmFTN2V3amFzNUFVU1F0d0tCZ1FESHl4"
36 | "WUp3bWxpQzE4NEVyZ3lZSEFwYm9weXQzSVkzVGFKXG5yV2MrSGw5WDNzVEJzaFVQYy85Smhj"
37 | "anZKYWVzMlhrcEEwYmY5cis1MEcxUndua3dMWHZsbDJSU0FBNE05TG4rXG45cVlsNEFXNU9Q"
38 | "VTVJS0tKYVk1c0kzSHdXTXd6elRya3FBV3hNallJME9OSnBaWUVnSTVKN09sek1jYnhLREZx"
39 | "XG51MmpjYkFubnJRS0JnRlUxaklGSkxmTE5FazE2Tys0aWF6K0Jack5EdmN1TjA2aUhMYzYv"
40 | "eDJLdDBpTHJwSXlsXG40cWg5WWF6bjNSQlA4NGRqWjNGNzJ5bTRUTW1ITWJjcTZPRmo3N1Jh"
41 | "cnI3UEtnNWxQMWp4Sk1DUVNpVFFudGttXG5FdS93VEpHVnZvWURUUkRrZG13SVZTU05pTy9v"
42 | "TEc3dmpuOUY4QVltM1F6eEFjRDF3MDhnaGxzVEFvR0FidUthXG4vNTJqeVdPUVhGbWZXMjVF"
43 | "c2VvRTh2ZzNYZTlnZG5jRUJ1anFkNlZPeEVYbkJHV1h1U0dFVEo0MGVtMVVubHVRXG5PWHNF"
44 | "RzhlKzlKS2ZtZ3FVYWU5bElWR2dlclpVaUZveUNuRlVHK0d0MEIvNXRaUWRGSTF6ampacVZ4"
45 | "Ry9idXFHXG5CanRjMi9XN1A4T2tDQ21sVHdncTVPRXFqZXVGeWJ2cnpmSTBhUjBDZ1lCdVlY"
46 | "Wm5MMm1xeVNma0FnaGswRVVmXG5XeElDb1FmRDdCQlJBV3lmL3VwRjQ2NlMvRmhONUVreG5v"
47 | "dkZ2RlZyQjU1SHVHRTh2Qk4vTEZNVXlPU0xXQ0lIXG5RUG9ZcytNM0NLdGJWTXMxY1h2Tm5t"
48 | "ZFRhMnRyYjQ0SlQ5ZlFLbkVwa2VsbUdPdXJMNEVMdmFyUEFyR0x4VllTXG5jWFo1a1FBUy9G"
49 | "eGhFSDZSbnFSalFnPT1cbi0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS1cbiIsCiAgImNsaWVu"
50 | "dF9lbWFpbCI6ICJzb21lLXNlcnZpY2UtYWNjb3VudEBnaWZ0bGVzcy10ZXN0cy5pYW0uZ3Nl"
51 | "cnZpY2VhY2NvdW50LmNvbSIsCiAgImNsaWVudF9pZCI6ICIxMDk4NTYwMjgzNDI5MDI4ODI3"
52 | "MTUiLAogICJhdXRoX3VyaSI6ICJodHRwczovL2FjY291bnRzLmdvb2dsZS5jb20vby9vYXV0"
53 | "aDIvYXV0aCIsCiAgInRva2VuX3VyaSI6ICJodHRwczovL29hdXRoMi5nb29nbGVhcGlzLmNv"
54 | "bS90b2tlbiIsCiAgImF1dGhfcHJvdmlkZXJfeDUwOV9jZXJ0X3VybCI6ICJodHRwczovL3d3"
55 | "dy5nb29nbGVhcGlzLmNvbS9vYXV0aDIvdjEvY2VydHMiLAogICJjbGllbnRfeDUwOV9jZXJ0"
56 | "X3VybCI6ICJodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9yb2JvdC92MS9tZXRhZGF0YS94"
57 | "NTA5L3NvbWUtc2VydmljZS1hY2NvdW50JTQwZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNl"
58 | "YWNjb3VudC5jb20iCn0K"
59 | )
60 |
61 |
62 | @pytest.fixture
63 | def storage_backend(
64 | storage_path: Path,
65 | ) -> MockGoogleCloudStorage:
66 | """Provide a mock Google Cloud Storage backend for all GCS tests."""
67 | return MockGoogleCloudStorage(
68 | project_name=MOCK_GCP_PROJECT_NAME,
69 | bucket_name=MOCK_GCP_BUCKET_NAME,
70 | account_key_base64=MOCK_GCP_KEY_B64,
71 | path=storage_path,
72 | )
73 |
74 |
75 | class TestGoogleCloudStorageBackend(
76 | StreamingStorageAbstractTests, ExternalStorageAbstractTests
77 | ):
78 | pass
79 |
--------------------------------------------------------------------------------
/giftless/transfer/basic_external.py:
--------------------------------------------------------------------------------
1 | """External Backend Transfer Adapter.
2 |
3 | This transfer adapter offers 'basic' transfers by directing clients to upload
4 | and download objects from an external storage service, such as AWS S3 or Azure
5 | Blobs.
6 |
7 | As long as external services support HTTP PUT / GET to do direct uploads /
8 | downloads, this transfer adapter can work with them.
9 |
10 | Different storage backends can be used with this adapter, as long as they
11 | implement the `ExternalStorage` interface defined in giftless.storage.
12 | """
13 |
14 | import posixpath
15 | from typing import Any
16 |
17 | from flask import Flask
18 |
19 | from giftless.storage import ExternalStorage, exc
20 | from giftless.transfer import PreAuthorizingTransferAdapter
21 | from giftless.transfer.basic_streaming import VerifyView
22 | from giftless.util import get_callable
23 | from giftless.view import ViewProvider
24 |
25 |
26 | class BasicExternalBackendTransferAdapter(
27 | PreAuthorizingTransferAdapter, ViewProvider
28 | ):
29 | """Provides External Transfer Adapter.
30 |
31 | TODO @athornton: inherently PreAuthorizing feels weird. Investigate
32 | whether there's refactoring/mixin work we can do here.
33 | """
34 |
35 | def __init__(
36 | self, storage: ExternalStorage, default_action_lifetime: int
37 | ) -> None:
38 | super().__init__()
39 | self.storage = storage
40 | self.action_lifetime = default_action_lifetime
41 |
42 | def upload(
43 | self,
44 | organization: str,
45 | repo: str,
46 | oid: str,
47 | size: int,
48 | extra: dict[str, Any] | None = None,
49 | ) -> dict:
50 | prefix = posixpath.join(organization, repo)
51 | response = {"oid": oid, "size": size}
52 |
53 | if self.storage.verify_object(prefix, oid, size):
54 | # No upload required, we already have this object
55 | return response
56 |
57 | response.update(
58 | self.storage.get_upload_action(
59 | prefix, oid, size, self.action_lifetime, extra
60 | )
61 | )
62 | if response.get("actions", {}).get("upload"): # type:ignore[attr-defined]
63 | response["authenticated"] = self._provides_preauth
64 | headers = self._preauth_headers(
65 | organization,
66 | repo,
67 | actions={"verify"},
68 | oid=oid,
69 | lifetime=self.VERIFY_LIFETIME,
70 | )
71 | response["actions"]["verify"] = { # type:ignore[index]
72 | "href": VerifyView.get_verify_url(organization, repo),
73 | "header": headers,
74 | "expires_in": self.VERIFY_LIFETIME,
75 | }
76 |
77 | return response
78 |
79 | def download(
80 | self,
81 | organization: str,
82 | repo: str,
83 | oid: str,
84 | size: int,
85 | extra: dict[str, Any] | None = None,
86 | ) -> dict:
87 | prefix = posixpath.join(organization, repo)
88 | response = {"oid": oid, "size": size}
89 |
90 | try:
91 | self._check_object(prefix, oid, size)
92 | response.update(
93 | self.storage.get_download_action(
94 | prefix, oid, size, self.action_lifetime, extra
95 | )
96 | )
97 | except exc.StorageError as e:
98 | response["error"] = e.as_dict()
99 |
100 | if response.get("actions", {}).get("download"): # type:ignore[attr-defined]
101 | response["authenticated"] = self._provides_preauth
102 |
103 | return response
104 |
105 | def register_views(self, app: Flask) -> None:
106 | VerifyView.register(app, init_argument=self.storage)
107 |
108 | def _check_object(self, prefix: str, oid: str, size: int) -> None:
109 | """Raise specific domain error if object is not valid.
110 |
111 | NOTE: this does not use storage.verify_object directly because
112 | we want ObjectNotFoundError errors to be propagated if raised
113 | """
114 | if self.storage.get_size(prefix, oid) != size:
115 | raise exc.InvalidObjectError("Object size does not match")
116 |
117 |
118 | def factory(
119 | storage_class: Any, storage_options: Any, action_lifetime: int
120 | ) -> BasicExternalBackendTransferAdapter:
121 | """Build a basic transfer adapter with external storage."""
122 | storage = get_callable(storage_class, __name__)
123 | return BasicExternalBackendTransferAdapter(
124 | storage(**storage_options), action_lifetime
125 | )
126 |
--------------------------------------------------------------------------------
/giftless/transfer/multipart.py:
--------------------------------------------------------------------------------
1 | """Multipart Transfer Adapter."""
2 |
3 | import posixpath
4 | from typing import Any
5 |
6 | from flask import Flask
7 |
8 | from giftless.storage import MultipartStorage, exc
9 | from giftless.transfer import PreAuthorizingTransferAdapter
10 | from giftless.transfer.basic_streaming import VerifyView
11 | from giftless.util import get_callable
12 | from giftless.view import ViewProvider
13 |
14 | DEFAULT_PART_SIZE = 10240000 # 10MB (-ish)
15 | DEFAULT_ACTION_LIFETIME = 6 * 60 * 60 # 6 hours
16 |
17 |
18 | class MultipartTransferAdapter(PreAuthorizingTransferAdapter, ViewProvider):
19 | """Transfer Adapter supporting multipart methods."""
20 |
21 | def __init__(
22 | self,
23 | storage: MultipartStorage,
24 | default_action_lifetime: int,
25 | max_part_size: int = DEFAULT_PART_SIZE,
26 | ) -> None:
27 | super().__init__()
28 | self.storage = storage
29 | self.max_part_size = max_part_size
30 | self.action_lifetime = default_action_lifetime
31 |
32 | def upload(
33 | self,
34 | organization: str,
35 | repo: str,
36 | oid: str,
37 | size: int,
38 | extra: dict[str, Any] | None = None,
39 | ) -> dict:
40 | prefix = posixpath.join(organization, repo)
41 | response = {"oid": oid, "size": size}
42 |
43 | if self.storage.verify_object(prefix, oid, size):
44 | # No upload required, we already have this object
45 | return response
46 |
47 | actions = self.storage.get_multipart_actions(
48 | prefix, oid, size, self.max_part_size, self.action_lifetime, extra
49 | )
50 | response.update(actions)
51 | if response.get("actions"):
52 | response["authenticated"] = True
53 | headers = self._preauth_headers(
54 | organization,
55 | repo,
56 | actions={"verify"},
57 | oid=oid,
58 | lifetime=self.VERIFY_LIFETIME,
59 | )
60 | response["actions"]["verify"] = { # type: ignore[index]
61 | "href": VerifyView.get_verify_url(organization, repo),
62 | "header": headers,
63 | "expires_in": self.VERIFY_LIFETIME,
64 | }
65 |
66 | return response
67 |
68 | def download(
69 | self,
70 | organization: str,
71 | repo: str,
72 | oid: str,
73 | size: int,
74 | extra: dict[str, Any] | None = None,
75 | ) -> dict:
76 | prefix = posixpath.join(organization, repo)
77 | response = {"oid": oid, "size": size}
78 |
79 | try:
80 | self._check_object(prefix, oid, size)
81 | response.update(
82 | self.storage.get_download_action(
83 | prefix, oid, size, self.action_lifetime, extra
84 | )
85 | )
86 | except exc.StorageError as e:
87 | response["error"] = e.as_dict()
88 |
89 | if response.get("actions", {}).get("download"): # type:ignore[attr-defined]
90 | response["authenticated"] = True
91 |
92 | return response
93 |
94 | def register_views(self, app: Flask) -> None:
95 | # TODO @rufuspollock: this is broken. Need to find a smarter
96 | # way for multiple transfer adapters to provide the same view
97 | # -- broken: VerifyView.register(app, init_argument=self.storage)
98 | # TODO @athornton: does this maybe indicate a classvar shadowing or
99 | # updating issue? Investigate that.
100 | if isinstance(self.storage, ViewProvider):
101 | self.storage.register_views(app)
102 |
103 | def _check_object(self, prefix: str, oid: str, size: int) -> None:
104 | """Raise specific domain error if object is not valid.
105 |
106 | NOTE: this does not use storage.verify_object directly because
107 | we want ObjectNotFoundError errors to be propagated if raised.
108 | """
109 | if self.storage.get_size(prefix, oid) != size:
110 | raise exc.InvalidObjectError("Object size does not match")
111 |
112 |
113 | def factory(
114 | storage_class: Any,
115 | storage_options: Any,
116 | action_lifetime: int = DEFAULT_ACTION_LIFETIME,
117 | max_part_size: int = DEFAULT_PART_SIZE,
118 | ) -> MultipartTransferAdapter:
119 | """Build a multipart transfer adapter with storage."""
120 | try:
121 | storage = get_callable(storage_class, __name__)
122 | except (AttributeError, ImportError):
123 | raise ValueError(
124 | f"Unable to load storage module: {storage_class}"
125 | ) from None
126 | return MultipartTransferAdapter(
127 | storage(**storage_options),
128 | action_lifetime,
129 | max_part_size=max_part_size,
130 | )
131 |
--------------------------------------------------------------------------------
/giftless/storage/amazon_s3.py:
--------------------------------------------------------------------------------
1 | """Amazon S3 backend."""
2 | import base64
3 | import binascii
4 | import posixpath
5 | from collections.abc import Iterable
6 | from typing import Any, BinaryIO
7 |
8 | import boto3
9 | import botocore
10 |
11 | from giftless.storage import ExternalStorage, StreamingStorage
12 | from giftless.storage.exc import ObjectNotFoundError
13 | from giftless.util import safe_filename
14 |
15 |
16 | class AmazonS3Storage(StreamingStorage, ExternalStorage):
17 | """AWS S3 Blob Storage backend."""
18 |
19 | def __init__(
20 | self,
21 | bucket_name: str,
22 | path_prefix: str | None = None,
23 | endpoint: str | None = None,
24 | **_: Any,
25 | ) -> None:
26 | self.bucket_name = bucket_name
27 | self.path_prefix = path_prefix
28 | self.s3 = boto3.resource("s3", endpoint_url=endpoint)
29 | self.s3_client = boto3.client("s3", endpoint_url=endpoint)
30 |
31 | def get(self, prefix: str, oid: str) -> Iterable[bytes]:
32 | if not self.exists(prefix, oid):
33 | raise ObjectNotFoundError
34 | result: Iterable[bytes] = self._s3_object(prefix, oid).get()["Body"]
35 | return result
36 |
37 | def put(self, prefix: str, oid: str, data_stream: BinaryIO) -> int:
38 | completed: list[int] = []
39 |
40 | def upload_callback(size: int) -> None:
41 | completed.append(size)
42 |
43 | bucket = self.s3.Bucket(self.bucket_name)
44 | bucket.upload_fileobj(
45 | data_stream,
46 | self._get_blob_path(prefix, oid),
47 | Callback=upload_callback,
48 | )
49 | return sum(completed)
50 |
51 | def exists(self, prefix: str, oid: str) -> bool:
52 | try:
53 | self.get_size(prefix, oid)
54 | except ObjectNotFoundError:
55 | return False
56 | return True
57 |
58 | def get_size(self, prefix: str, oid: str) -> int:
59 | try:
60 | result: int = self._s3_object(prefix, oid).content_length
61 | except botocore.exceptions.ClientError as e:
62 | if e.response["Error"]["Code"] == "404":
63 | raise ObjectNotFoundError from None
64 | raise
65 | return result
66 |
67 | def get_upload_action(
68 | self,
69 | prefix: str,
70 | oid: str,
71 | size: int,
72 | expires_in: int,
73 | extra: dict[str, Any] | None = None,
74 | ) -> dict[str, Any]:
75 | base64_oid = base64.b64encode(binascii.a2b_hex(oid)).decode("ascii")
76 | params = {
77 | "Bucket": self.bucket_name,
78 | "Key": self._get_blob_path(prefix, oid),
79 | "ContentType": "application/octet-stream",
80 | "ChecksumSHA256": base64_oid,
81 | }
82 | response = self.s3_client.generate_presigned_url(
83 | "put_object", Params=params, ExpiresIn=expires_in
84 | )
85 | return {
86 | "actions": {
87 | "upload": {
88 | "href": response,
89 | "header": {
90 | "Content-Type": "application/octet-stream",
91 | "x-amz-checksum-sha256": base64_oid,
92 | },
93 | "expires_in": expires_in,
94 | }
95 | }
96 | }
97 |
98 | def get_download_action(
99 | self,
100 | prefix: str,
101 | oid: str,
102 | size: int,
103 | expires_in: int,
104 | extra: dict[str, str] | None = None,
105 | ) -> dict[str, Any]:
106 | params = {
107 | "Bucket": self.bucket_name,
108 | "Key": self._get_blob_path(prefix, oid),
109 | }
110 |
111 | filename = extra.get("filename") if extra else None
112 | disposition = (
113 | extra.get("disposition", "attachment") if extra else "attachment"
114 | )
115 |
116 | if filename and disposition:
117 | filename = safe_filename(filename)
118 | params[
119 | "ResponseContentDisposition"
120 | ] = f'attachment; filename="{filename}"'
121 | elif disposition:
122 | params["ResponseContentDisposition"] = disposition
123 |
124 | response = self.s3_client.generate_presigned_url(
125 | "get_object", Params=params, ExpiresIn=expires_in
126 | )
127 | return {
128 | "actions": {
129 | "download": {
130 | "href": response,
131 | "header": {},
132 | "expires_in": expires_in,
133 | }
134 | }
135 | }
136 |
137 | def _get_blob_path(self, prefix: str, oid: str) -> str:
138 | """Get the path to a blob in storage."""
139 | if not self.path_prefix:
140 | storage_prefix = ""
141 | elif self.path_prefix[0] == "/":
142 | storage_prefix = self.path_prefix[1:]
143 | else:
144 | storage_prefix = self.path_prefix
145 | return posixpath.join(storage_prefix, prefix, oid)
146 |
147 | def _s3_object(self, prefix: str, oid: str) -> Any:
148 | return self.s3.Object(
149 | self.bucket_name, self._get_blob_path(prefix, oid)
150 | )
151 |
--------------------------------------------------------------------------------
/tests/storage/__init__.py:
--------------------------------------------------------------------------------
1 | import io
2 | from abc import ABC
3 | from typing import Any, cast
4 |
5 | import pytest
6 |
7 | from giftless.storage import ExternalStorage, StreamingStorage
8 | from giftless.storage.exc import ObjectNotFoundError
9 |
10 | ARBITRARY_OID = (
11 | "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"
12 | )
13 |
14 | # The layering is bad in giftless.storage, leading to some strange choices
15 | # for storage classes here. That should be refactored sometime.
16 |
17 |
18 | class _CommonStorageAbstractTests(ABC): # noqa: B024
19 | """Common tests for all storage backend types and interfaces."""
20 |
21 | def test_get_size(self, storage_backend: StreamingStorage) -> None:
22 | """Test getting the size of a stored object."""
23 | content = b"The contents of a file-like object"
24 | storage_backend.put("org/repo", ARBITRARY_OID, io.BytesIO(content))
25 | assert len(content) == storage_backend.get_size(
26 | "org/repo", ARBITRARY_OID
27 | )
28 |
29 | def test_get_size_not_existing(
30 | self, storage_backend: StreamingStorage
31 | ) -> None:
32 | """Test getting the size of a non-existing object raises an
33 | exception.
34 | """
35 | with pytest.raises(ObjectNotFoundError):
36 | storage_backend.get_size("org/repo", ARBITRARY_OID)
37 |
38 | def test_exists_exists(self, storage_backend: StreamingStorage) -> None:
39 | """Test that calling exists on an existing object returns True."""
40 | content = b"The contents of a file-like object"
41 | storage_backend.put("org/repo", ARBITRARY_OID, io.BytesIO(content))
42 | assert storage_backend.exists("org/repo", ARBITRARY_OID)
43 |
44 | def test_exists_not_exists(
45 | self, storage_backend: StreamingStorage
46 | ) -> None:
47 | """Test that calling exists on a non-existing object returns False."""
48 | assert not storage_backend.exists("org/repo", ARBITRARY_OID)
49 |
50 |
51 | class _VerifiableStorageAbstractTests(ABC): # noqa: B024
52 | """Mixin class for other base storage adapter test classes that implement
53 | VerifiableStorage.
54 | """
55 |
56 | def test_verify_object_ok(self, storage_backend: StreamingStorage) -> None:
57 | content = b"The contents of a file-like object"
58 | # put is part of StreamingStorage, not VerifiableStorage...but
59 | # StreamingStorage implements VerifiableStorage
60 | storage_backend.put("org/repo", ARBITRARY_OID, io.BytesIO(content))
61 | assert storage_backend.verify_object(
62 | "org/repo", ARBITRARY_OID, len(content)
63 | )
64 |
65 | def test_verify_object_wrong_size(
66 | self, storage_backend: StreamingStorage
67 | ) -> None:
68 | content = b"The contents of a file-like object"
69 | storage_backend.put("org/repo", ARBITRARY_OID, io.BytesIO(content))
70 | assert not storage_backend.verify_object(
71 | "org/repo", ARBITRARY_OID, len(content) + 2
72 | )
73 |
74 | def test_verify_object_not_there(
75 | self, storage_backend: StreamingStorage
76 | ) -> None:
77 | assert not storage_backend.verify_object("org/repo", ARBITRARY_OID, 0)
78 |
79 |
80 | class StreamingStorageAbstractTests(
81 | _CommonStorageAbstractTests, _VerifiableStorageAbstractTests, ABC
82 | ):
83 | """Mixin for testing the StreamingStorage methods of a backend
84 | that implements StreamingStorage.
85 |
86 | To use, create a concrete test class mixing this class in, and
87 | define a fixture named ``storage_backend`` that returns an
88 | appropriate storage backend object.
89 | """
90 |
91 | def test_put_get_object(self, storage_backend: StreamingStorage) -> None:
92 | """Test a full put-then-get cycle."""
93 | content = b"The contents of a file-like object"
94 | written = storage_backend.put(
95 | "org/repo", ARBITRARY_OID, io.BytesIO(content)
96 | )
97 |
98 | assert len(content) == written
99 |
100 | fetched = storage_backend.get("org/repo", ARBITRARY_OID)
101 | fetched_content = b"".join(fetched)
102 | assert content == fetched_content
103 |
104 | def test_get_raises_if_not_found(
105 | self, storage_backend: StreamingStorage
106 | ) -> None:
107 | """Test that calling get for a non-existing object raises."""
108 | with pytest.raises(ObjectNotFoundError):
109 | storage_backend.get("org/repo", ARBITRARY_OID)
110 |
111 |
112 | class ExternalStorageAbstractTests(
113 | _CommonStorageAbstractTests, _VerifiableStorageAbstractTests
114 | ):
115 | """Mixin for testing the ExternalStorage methods of a backend that
116 | implements ExternalStorage.
117 |
118 | To use, create a concrete test class mixing this class in, and
119 | define a fixture named ``storage_backend`` that returns an
120 | appropriate storage backend object.
121 |
122 |
123 | Again, perhaps this should be defined as an ABC?
124 | """
125 |
126 | def test_get_upload_action(self, storage_backend: ExternalStorage) -> None:
127 | action_spec = storage_backend.get_upload_action(
128 | "org/repo", ARBITRARY_OID, 100, 3600
129 | )
130 | upload = cast(dict[str, Any], action_spec["actions"]["upload"])
131 | assert upload["href"][0:4] == "http"
132 | assert upload["expires_in"] == 3600
133 |
134 | def test_get_download_action(
135 | self, storage_backend: ExternalStorage
136 | ) -> None:
137 | action_spec = storage_backend.get_download_action(
138 | "org/repo", ARBITRARY_OID, 100, 7200
139 | )
140 | download = cast(dict[str, Any], action_spec["actions"]["download"])
141 | assert download["href"][0:4] == "http"
142 | assert download["expires_in"] == 7200
143 |
--------------------------------------------------------------------------------
/giftless/view.py:
--------------------------------------------------------------------------------
1 | """Flask-Classful View Classes."""
2 | from typing import Any, ClassVar, cast
3 |
4 | from flask import Flask
5 | from flask_classful import FlaskView
6 | from webargs.flaskparser import parser
7 |
8 | from giftless import exc, representation, schema, transfer
9 | from giftless.auth import authentication as authn
10 | from giftless.auth.identity import Permission
11 |
12 |
13 | class BaseView(FlaskView):
14 | """Extends Flask-Classful's base view class to add some common
15 | custom functionality.
16 | """
17 |
18 | decorators: ClassVar = [authn.login_required]
19 |
20 | representations: ClassVar = {
21 | "application/json": representation.output_json,
22 | representation.GIT_LFS_MIME_TYPE: representation.output_git_lfs_json,
23 | "flask-classful/default": representation.output_git_lfs_json,
24 | }
25 |
26 | route_prefix: ClassVar = "/.git/info/lfs/"
27 | # [flask-classful bug/feat?] Placeholders in route_prefix not skipped for
28 | # building the final rule for methods with them (FlaskView.build_rule).
29 | base_args: ClassVar = ["organization", "repo"]
30 |
31 | trailing_slash = False
32 |
33 | @classmethod
34 | def register(cls, app: Flask, *args: Any, **kwargs: Any) -> Any:
35 | if kwargs.get("base_class") is None:
36 | kwargs["base_class"] = BaseView
37 | if (
38 | app.config["LEGACY_ENDPOINTS"]
39 | and kwargs.get("route_prefix") is None
40 | and not hasattr(cls, "_legacy_") # break the cycle
41 | ):
42 | # To span any transition required for the switch to the current
43 | # endpoint URI, create a "Legacy" class "copy" of this view and
44 | # register it too, for both the views and their endpoints to
45 | # coexist.
46 | legacy_view = type(
47 | f"Legacy{cls.__name__}",
48 | (cls,),
49 | {
50 | "route_prefix": "//",
51 | "_legacy_": True,
52 | },
53 | )
54 | legacy_view = cast(BaseView, legacy_view)
55 | legacy_view.register(app, *args, **kwargs)
56 |
57 | return super().register(app, *args, **kwargs)
58 |
59 | @classmethod
60 | def _check_authorization(
61 | cls,
62 | organization: str,
63 | repo: str,
64 | permission: Permission,
65 | oid: str | None = None,
66 | ) -> None:
67 | """Check the current user is authorized to perform an action
68 | and raise an exception otherwise.
69 | """
70 | if not cls._is_authorized(organization, repo, permission, oid):
71 | raise exc.Forbidden(
72 | "You are not authorized to perform this action"
73 | )
74 |
75 | @staticmethod
76 | def _is_authorized(
77 | organization: str,
78 | repo: str,
79 | permission: Permission,
80 | oid: str | None = None,
81 | ) -> bool:
82 | """Check the current user is authorized to perform an action."""
83 | identity = authn.get_identity()
84 | return identity is not None and identity.is_authorized(
85 | organization, repo, permission, oid
86 | )
87 |
88 |
89 | class BatchView(BaseView):
90 | """Batch operations."""
91 |
92 | route_base = "objects/batch"
93 |
94 | def post(self, organization: str, repo: str) -> dict[str, Any]:
95 | """Batch operations."""
96 | payload = parser.parse(schema.batch_request_schema)
97 |
98 | try:
99 | transfer_type, adapter = transfer.match_transfer_adapter(
100 | payload["transfers"]
101 | )
102 | except ValueError as e:
103 | raise exc.InvalidPayload(str(e)) from None
104 |
105 | permission = (
106 | Permission.WRITE
107 | if payload["operation"] == schema.Operation.upload
108 | else Permission.READ
109 | )
110 | try:
111 | self._check_authorization(organization, repo, permission)
112 | except exc.Forbidden:
113 | # User doesn't have global permission to the entire namespace,
114 | # but may be authorized for all objects
115 | if not all(
116 | self._is_authorized(organization, repo, permission, o["oid"])
117 | for o in payload["objects"]
118 | ):
119 | raise
120 |
121 | response: dict[str, Any] = {"transfer": transfer_type}
122 | action = adapter.get_action(
123 | payload["operation"].value, organization, repo
124 | )
125 | response["objects"] = [action(**o) for o in payload["objects"]] # type: ignore[call-arg]
126 |
127 | if all(self._is_error(o, 404) for o in response["objects"]):
128 | raise exc.NotFound("Cannot find any of the requested objects")
129 |
130 | if all(self._is_error(o) for o in response["objects"]):
131 | raise exc.InvalidPayload(
132 | "Cannot validate any of the requested objects"
133 | )
134 |
135 | # TODO @rufuspollock: Check Accept header
136 | # TODO @athornton: do we need an output schema? If so...should
137 | # we just turn this into a Pydantic app?
138 |
139 | return response
140 |
141 | @staticmethod
142 | def _is_error(obj: dict[str, Any], code: int | None = None) -> bool:
143 | try:
144 | return obj["error"]["code"] == code or code is None
145 | except KeyError:
146 | return False
147 |
148 |
149 | class ViewProvider:
150 | """ViewProvider is a marker interface for storage and transfer
151 | adapters that can provide their own Flask views.
152 |
153 | This allows transfer and storage backends to register routes for
154 | accessing or verifying files, for example, directly from the
155 | Giftless HTTP server.
156 | """
157 |
158 | def register_views(self, app: Flask) -> None:
159 | pass
160 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAmazonS3StorageBackend.test_exists_not_exists.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | User-Agent:
6 | - !!binary |
7 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
8 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
9 | X-Amz-Content-SHA256:
10 | - !!binary |
11 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
12 | ODUyYjg1NQ==
13 | X-Amz-Date:
14 | - !!binary |
15 | MjAyMTAzMTVUMTI0MjA3Wg==
16 | authorization:
17 | - fake-authz-header
18 | method: HEAD
19 | uri: https://test-giftless.s3.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
20 | response:
21 | body:
22 | string: ''
23 | headers:
24 | Connection:
25 | - close
26 | Content-Type:
27 | - application/xml
28 | Date:
29 | - Mon, 15 Mar 2021 12:42:07 GMT
30 | Server:
31 | - AmazonS3
32 | x-amz-id-2:
33 | - zwW09n1vp1qS7C73dAt0hKAz4SjLvt6jyMXdLIUkGFy0aZ64SoHH/0dZ4j72wR5gnWENZuO6OM8=
34 | x-amz-request-id:
35 | - GBNA74VPQYRJKQK7
36 | status:
37 | code: 400
38 | message: Bad Request
39 | - request:
40 | body: null
41 | headers:
42 | User-Agent:
43 | - !!binary |
44 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
45 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
46 | X-Amz-Content-SHA256:
47 | - !!binary |
48 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
49 | ODUyYjg1NQ==
50 | X-Amz-Date:
51 | - !!binary |
52 | MjAyMTAzMTVUMTI0MjA4Wg==
53 | authorization:
54 | - fake-authz-header
55 | method: HEAD
56 | uri: https://test-giftless.s3.amazonaws.com/
57 | response:
58 | body:
59 | string: ''
60 | headers:
61 | Connection:
62 | - close
63 | Content-Type:
64 | - application/xml
65 | Date:
66 | - Mon, 15 Mar 2021 12:42:08 GMT
67 | Server:
68 | - AmazonS3
69 | x-amz-bucket-region:
70 | - eu-west-1
71 | x-amz-id-2:
72 | - rbKg7auIlah5f5BzubcpkcOOtut9a3AZHz/uMxuec5okIcMIQCP12Znoo83AG8VDQUteYcTHCps=
73 | x-amz-request-id:
74 | - GBNB3B523MFWJ2T4
75 | status:
76 | code: 400
77 | message: Bad Request
78 | - request:
79 | body: null
80 | headers:
81 | User-Agent:
82 | - !!binary |
83 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
84 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
85 | X-Amz-Content-SHA256:
86 | - !!binary |
87 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
88 | ODUyYjg1NQ==
89 | X-Amz-Date:
90 | - !!binary |
91 | MjAyMTAzMTVUMTI0MjA4Wg==
92 | authorization:
93 | - fake-authz-header
94 | method: HEAD
95 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/
96 | response:
97 | body:
98 | string: ''
99 | headers:
100 | Content-Type:
101 | - application/xml
102 | Date:
103 | - Mon, 15 Mar 2021 12:42:09 GMT
104 | Server:
105 | - AmazonS3
106 | x-amz-bucket-region:
107 | - eu-west-1
108 | x-amz-id-2:
109 | - R7R7PzsBxkUGIqNraLbKXMDbLZkqQSkT3ednYRwZm4w7sKBoo74Qh/Lysp2oxOFK6S9rZyho+14=
110 | x-amz-request-id:
111 | - GBNCBYJAXWZNM38D
112 | status:
113 | code: 200
114 | message: OK
115 | - request:
116 | body: null
117 | headers:
118 | User-Agent:
119 | - !!binary |
120 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
121 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
122 | X-Amz-Content-SHA256:
123 | - !!binary |
124 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
125 | ODUyYjg1NQ==
126 | X-Amz-Date:
127 | - !!binary |
128 | MjAyMTAzMTVUMTI0MjA4Wg==
129 | authorization:
130 | - fake-authz-header
131 | method: HEAD
132 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
133 | response:
134 | body:
135 | string: ''
136 | headers:
137 | Content-Type:
138 | - application/xml
139 | Date:
140 | - Mon, 15 Mar 2021 12:42:07 GMT
141 | Server:
142 | - AmazonS3
143 | x-amz-id-2:
144 | - qkq1Dw0Rwij1sYSAuOIZEnmY15Zqa5jvaWhT1e6BFChbUyG0sb4Hxx/FtJSofMDuPi+7Uz4OjOs=
145 | x-amz-request-id:
146 | - GBNDEVDHR876WNGX
147 | status:
148 | code: 404
149 | message: Not Found
150 | - request:
151 | body: null
152 | headers:
153 | User-Agent:
154 | - !!binary |
155 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
156 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
157 | X-Amz-Content-SHA256:
158 | - !!binary |
159 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
160 | ODUyYjg1NQ==
161 | X-Amz-Date:
162 | - !!binary |
163 | MjAyMTAzMTVUMTI0MjA4Wg==
164 | authorization:
165 | - fake-authz-header
166 | method: GET
167 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/?encoding-type=url
168 | response:
169 | body:
170 | string: '
171 |
172 | test-giftless1000urlfalse'
173 | headers:
174 | Content-Type:
175 | - application/xml
176 | Date:
177 | - Mon, 15 Mar 2021 12:42:09 GMT
178 | Server:
179 | - AmazonS3
180 | Transfer-Encoding:
181 | - chunked
182 | x-amz-bucket-region:
183 | - eu-west-1
184 | x-amz-id-2:
185 | - 8WI2POOkezWb0Krmgyz3qQVGThFP31WDb5G+DXpUraAKcIa2oX3zpoy7tn0N+B8MkfDKZd28Q+c=
186 | x-amz-request-id:
187 | - GBNEMNZX2HQVA6KG
188 | status:
189 | code: 200
190 | message: OK
191 | version: 1
192 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAmazonS3StorageBackend.test_get_size_not_existing.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | User-Agent:
6 | - !!binary |
7 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
8 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
9 | X-Amz-Content-SHA256:
10 | - !!binary |
11 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
12 | ODUyYjg1NQ==
13 | X-Amz-Date:
14 | - !!binary |
15 | MjAyMTAzMTVUMTI0MjA2Wg==
16 | authorization:
17 | - fake-authz-header
18 | method: HEAD
19 | uri: https://test-giftless.s3.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
20 | response:
21 | body:
22 | string: ''
23 | headers:
24 | Connection:
25 | - close
26 | Content-Type:
27 | - application/xml
28 | Date:
29 | - Mon, 15 Mar 2021 12:42:05 GMT
30 | Server:
31 | - AmazonS3
32 | x-amz-id-2:
33 | - /o9qssHk4eIGp3aqRbBA6zgVhOoLlUnyUZ7VKEX0kK66cUNa2HeCs4zafNKP+xQH1ZphJINdtyc=
34 | x-amz-request-id:
35 | - ZVBJVQ8F6539WM31
36 | status:
37 | code: 400
38 | message: Bad Request
39 | - request:
40 | body: null
41 | headers:
42 | User-Agent:
43 | - !!binary |
44 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
45 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
46 | X-Amz-Content-SHA256:
47 | - !!binary |
48 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
49 | ODUyYjg1NQ==
50 | X-Amz-Date:
51 | - !!binary |
52 | MjAyMTAzMTVUMTI0MjA2Wg==
53 | authorization:
54 | - fake-authz-header
55 | method: HEAD
56 | uri: https://test-giftless.s3.amazonaws.com/
57 | response:
58 | body:
59 | string: ''
60 | headers:
61 | Connection:
62 | - close
63 | Content-Type:
64 | - application/xml
65 | Date:
66 | - Mon, 15 Mar 2021 12:42:05 GMT
67 | Server:
68 | - AmazonS3
69 | x-amz-bucket-region:
70 | - eu-west-1
71 | x-amz-id-2:
72 | - WGTfKrgNkY3Za3VfYA/x/ceKf6Y0/sIWZPWkp9jJq/Wfu3OXkqYjHX7084yQm4bWWEzg83e4Wuk=
73 | x-amz-request-id:
74 | - ZVBPFH718FTE0GZY
75 | status:
76 | code: 400
77 | message: Bad Request
78 | - request:
79 | body: null
80 | headers:
81 | User-Agent:
82 | - !!binary |
83 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
84 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
85 | X-Amz-Content-SHA256:
86 | - !!binary |
87 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
88 | ODUyYjg1NQ==
89 | X-Amz-Date:
90 | - !!binary |
91 | MjAyMTAzMTVUMTI0MjA2Wg==
92 | authorization:
93 | - fake-authz-header
94 | method: HEAD
95 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/
96 | response:
97 | body:
98 | string: ''
99 | headers:
100 | Content-Type:
101 | - application/xml
102 | Date:
103 | - Mon, 15 Mar 2021 12:42:07 GMT
104 | Server:
105 | - AmazonS3
106 | x-amz-bucket-region:
107 | - eu-west-1
108 | x-amz-id-2:
109 | - HRm8iOrWQFc8TqXt/ZVtnpeOCB303kDm4t23ybUK8XYtmdthCrSQ5zOMn0Ywj1UWgpy04kdtGM8=
110 | x-amz-request-id:
111 | - ZVBMXG321D43B867
112 | status:
113 | code: 200
114 | message: OK
115 | - request:
116 | body: null
117 | headers:
118 | User-Agent:
119 | - !!binary |
120 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
121 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
122 | X-Amz-Content-SHA256:
123 | - !!binary |
124 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
125 | ODUyYjg1NQ==
126 | X-Amz-Date:
127 | - !!binary |
128 | MjAyMTAzMTVUMTI0MjA2Wg==
129 | authorization:
130 | - fake-authz-header
131 | method: HEAD
132 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
133 | response:
134 | body:
135 | string: ''
136 | headers:
137 | Content-Type:
138 | - application/xml
139 | Date:
140 | - Mon, 15 Mar 2021 12:42:06 GMT
141 | Server:
142 | - AmazonS3
143 | x-amz-id-2:
144 | - OiPHJKTcII364CUp8+1iJJPE/2BXfL4/V/mKrR8/0OOn+RqTScMTX7YUz2w4EqBU8oJnh5uB4GI=
145 | x-amz-request-id:
146 | - ZVBTTSV2Z67P1T40
147 | status:
148 | code: 404
149 | message: Not Found
150 | - request:
151 | body: null
152 | headers:
153 | User-Agent:
154 | - !!binary |
155 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
156 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
157 | X-Amz-Content-SHA256:
158 | - !!binary |
159 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
160 | ODUyYjg1NQ==
161 | X-Amz-Date:
162 | - !!binary |
163 | MjAyMTAzMTVUMTI0MjA2Wg==
164 | authorization:
165 | - fake-authz-header
166 | method: GET
167 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/?encoding-type=url
168 | response:
169 | body:
170 | string: '
171 |
172 | test-giftless1000urlfalse'
173 | headers:
174 | Content-Type:
175 | - application/xml
176 | Date:
177 | - Mon, 15 Mar 2021 12:42:07 GMT
178 | Server:
179 | - AmazonS3
180 | Transfer-Encoding:
181 | - chunked
182 | x-amz-bucket-region:
183 | - eu-west-1
184 | x-amz-id-2:
185 | - wzrFqx0IamtTlwkQ8pETRHGQUg825zUfNtWMbHTn58HYFXSsGQe8878XkFmTRnCfqLDKXweMcVo=
186 | x-amz-request-id:
187 | - ZVBXAHMAM7M1XBNP
188 | status:
189 | code: 200
190 | message: OK
191 | version: 1
192 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAmazonS3StorageBackend.test_get_raises_if_not_found.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | User-Agent:
6 | - !!binary |
7 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
8 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
9 | X-Amz-Content-SHA256:
10 | - !!binary |
11 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
12 | ODUyYjg1NQ==
13 | X-Amz-Date:
14 | - !!binary |
15 | MjAyMTAzMTVUMTI0MjEyWg==
16 | authorization:
17 | - fake-authz-header
18 | method: HEAD
19 | uri: https://test-giftless.s3.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
20 | response:
21 | body:
22 | string: ''
23 | headers:
24 | Connection:
25 | - close
26 | Content-Type:
27 | - application/xml
28 | Date:
29 | - Mon, 15 Mar 2021 12:42:11 GMT
30 | Server:
31 | - AmazonS3
32 | x-amz-id-2:
33 | - x3F0gGQ2vfo2Iva6e+Fpf5NE9gKkam5tYbibu/sauTLWYLs040dj1meE8RtiptAJjUda38/v9Ec=
34 | x-amz-request-id:
35 | - HCTBQK0PS5WAW60J
36 | status:
37 | code: 400
38 | message: Bad Request
39 | - request:
40 | body: null
41 | headers:
42 | User-Agent:
43 | - !!binary |
44 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
45 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
46 | X-Amz-Content-SHA256:
47 | - !!binary |
48 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
49 | ODUyYjg1NQ==
50 | X-Amz-Date:
51 | - !!binary |
52 | MjAyMTAzMTVUMTI0MjEyWg==
53 | authorization:
54 | - fake-authz-header
55 | method: HEAD
56 | uri: https://test-giftless.s3.amazonaws.com/
57 | response:
58 | body:
59 | string: ''
60 | headers:
61 | Connection:
62 | - close
63 | Content-Type:
64 | - application/xml
65 | Date:
66 | - Mon, 15 Mar 2021 12:42:12 GMT
67 | Server:
68 | - AmazonS3
69 | x-amz-bucket-region:
70 | - eu-west-1
71 | x-amz-id-2:
72 | - qZ6xvBZKl+n/qDQZbGWt4oDNx1Gp9uaFVFXwnhSUTcoM5VnT7FDIpss7vAVJA5zcAYvfbFTe/ko=
73 | x-amz-request-id:
74 | - HCT4RNRKT8S7FDD8
75 | status:
76 | code: 400
77 | message: Bad Request
78 | - request:
79 | body: null
80 | headers:
81 | User-Agent:
82 | - !!binary |
83 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
84 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
85 | X-Amz-Content-SHA256:
86 | - !!binary |
87 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
88 | ODUyYjg1NQ==
89 | X-Amz-Date:
90 | - !!binary |
91 | MjAyMTAzMTVUMTI0MjEyWg==
92 | authorization:
93 | - fake-authz-header
94 | method: HEAD
95 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/
96 | response:
97 | body:
98 | string: ''
99 | headers:
100 | Content-Type:
101 | - application/xml
102 | Date:
103 | - Mon, 15 Mar 2021 12:42:13 GMT
104 | Server:
105 | - AmazonS3
106 | x-amz-bucket-region:
107 | - eu-west-1
108 | x-amz-id-2:
109 | - 2E+aLBq79ysQnGrqAH3oc48iROWKmrz7MGgfQobGdnR49OZPIN5I0/HgCGtSwHhG4atX9wimIVQ=
110 | x-amz-request-id:
111 | - HCT3ZAPFPQTVW1ZJ
112 | status:
113 | code: 200
114 | message: OK
115 | - request:
116 | body: null
117 | headers:
118 | User-Agent:
119 | - !!binary |
120 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
121 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
122 | X-Amz-Content-SHA256:
123 | - !!binary |
124 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
125 | ODUyYjg1NQ==
126 | X-Amz-Date:
127 | - !!binary |
128 | MjAyMTAzMTVUMTI0MjEyWg==
129 | authorization:
130 | - fake-authz-header
131 | method: HEAD
132 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
133 | response:
134 | body:
135 | string: ''
136 | headers:
137 | Content-Type:
138 | - application/xml
139 | Date:
140 | - Mon, 15 Mar 2021 12:42:12 GMT
141 | Server:
142 | - AmazonS3
143 | x-amz-id-2:
144 | - xwpWeIUyeM7th0akRwmIwlcq7fUHrs+7KIHlQJ2hkcd4gJbRe7P1c5HecBlwMDLrsAyYaQnpaow=
145 | x-amz-request-id:
146 | - X0FVQBCVXEJ13NZ2
147 | status:
148 | code: 404
149 | message: Not Found
150 | - request:
151 | body: null
152 | headers:
153 | User-Agent:
154 | - !!binary |
155 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
156 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
157 | X-Amz-Content-SHA256:
158 | - !!binary |
159 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
160 | ODUyYjg1NQ==
161 | X-Amz-Date:
162 | - !!binary |
163 | MjAyMTAzMTVUMTI0MjEzWg==
164 | authorization:
165 | - fake-authz-header
166 | method: GET
167 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/?encoding-type=url
168 | response:
169 | body:
170 | string: '
171 |
172 | test-giftless1000urlfalse'
173 | headers:
174 | Content-Type:
175 | - application/xml
176 | Date:
177 | - Mon, 15 Mar 2021 12:42:14 GMT
178 | Server:
179 | - AmazonS3
180 | Transfer-Encoding:
181 | - chunked
182 | x-amz-bucket-region:
183 | - eu-west-1
184 | x-amz-id-2:
185 | - D6sVW8AHfNbSnU0VblxtELhhLyL+fLzyz+YPoRvnNXhJgTQzZrI+EQ7MLzO7iYhE60oeSFqaopQ=
186 | x-amz-request-id:
187 | - X0FHEWHQ0X7XWVZA
188 | status:
189 | code: 200
190 | message: OK
191 | version: 1
192 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestAmazonS3StorageBackend.test_verify_object_not_there.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: null
4 | headers:
5 | User-Agent:
6 | - !!binary |
7 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
8 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
9 | X-Amz-Content-SHA256:
10 | - !!binary |
11 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
12 | ODUyYjg1NQ==
13 | X-Amz-Date:
14 | - !!binary |
15 | MjAyMTAzMTVUMTI0MjEwWg==
16 | authorization:
17 | - fake-authz-header
18 | method: HEAD
19 | uri: https://test-giftless.s3.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
20 | response:
21 | body:
22 | string: ''
23 | headers:
24 | Connection:
25 | - close
26 | Content-Type:
27 | - application/xml
28 | Date:
29 | - Mon, 15 Mar 2021 12:42:09 GMT
30 | Server:
31 | - AmazonS3
32 | x-amz-id-2:
33 | - Uxbvn0DuolTa2AqkRwcceHqdCZVahFU0qw/45VLhnMfc82tYqHEzaroYuBl/1FZU8w83Pk410sg=
34 | x-amz-request-id:
35 | - 1DNK9WMRPW662AXZ
36 | status:
37 | code: 400
38 | message: Bad Request
39 | - request:
40 | body: null
41 | headers:
42 | User-Agent:
43 | - !!binary |
44 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
45 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
46 | X-Amz-Content-SHA256:
47 | - !!binary |
48 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
49 | ODUyYjg1NQ==
50 | X-Amz-Date:
51 | - !!binary |
52 | MjAyMTAzMTVUMTI0MjEwWg==
53 | authorization:
54 | - fake-authz-header
55 | method: HEAD
56 | uri: https://test-giftless.s3.amazonaws.com/
57 | response:
58 | body:
59 | string: ''
60 | headers:
61 | Connection:
62 | - close
63 | Content-Type:
64 | - application/xml
65 | Date:
66 | - Mon, 15 Mar 2021 12:42:10 GMT
67 | Server:
68 | - AmazonS3
69 | x-amz-bucket-region:
70 | - eu-west-1
71 | x-amz-id-2:
72 | - llYMjZJLtebzJ5KIpxY8PB9OY3soeQQSlzNaksO35vDjipKfD55zhgKPaufBOOq42QI0Qsfx0sM=
73 | x-amz-request-id:
74 | - 1DNRX1D6CPKYR0PA
75 | status:
76 | code: 400
77 | message: Bad Request
78 | - request:
79 | body: null
80 | headers:
81 | User-Agent:
82 | - !!binary |
83 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
84 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
85 | X-Amz-Content-SHA256:
86 | - !!binary |
87 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
88 | ODUyYjg1NQ==
89 | X-Amz-Date:
90 | - !!binary |
91 | MjAyMTAzMTVUMTI0MjEwWg==
92 | authorization:
93 | - fake-authz-header
94 | method: HEAD
95 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/
96 | response:
97 | body:
98 | string: ''
99 | headers:
100 | Content-Type:
101 | - application/xml
102 | Date:
103 | - Mon, 15 Mar 2021 12:42:12 GMT
104 | Server:
105 | - AmazonS3
106 | x-amz-bucket-region:
107 | - eu-west-1
108 | x-amz-id-2:
109 | - 9sd6A65lfW0zrZBRDFCd7Gv81B+wJhc7QzYEvhTPBPywNXgwxal0cU45JHPBx7ZjV1Q7oZEV89o=
110 | x-amz-request-id:
111 | - SVJ2WMB5620NM26A
112 | status:
113 | code: 200
114 | message: OK
115 | - request:
116 | body: null
117 | headers:
118 | User-Agent:
119 | - !!binary |
120 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
121 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
122 | X-Amz-Content-SHA256:
123 | - !!binary |
124 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
125 | ODUyYjg1NQ==
126 | X-Amz-Date:
127 | - !!binary |
128 | MjAyMTAzMTVUMTI0MjExWg==
129 | authorization:
130 | - fake-authz-header
131 | method: HEAD
132 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824
133 | response:
134 | body:
135 | string: ''
136 | headers:
137 | Content-Type:
138 | - application/xml
139 | Date:
140 | - Mon, 15 Mar 2021 12:42:10 GMT
141 | Server:
142 | - AmazonS3
143 | x-amz-id-2:
144 | - hpalQeiZTQDZePyPhFgLikKwarRStCquyB5atNZOX2DWBQq6aZASPj3KPdTWaRDVUxqin1t564w=
145 | x-amz-request-id:
146 | - SVJFBMK519ZEJQY9
147 | status:
148 | code: 404
149 | message: Not Found
150 | - request:
151 | body: null
152 | headers:
153 | User-Agent:
154 | - !!binary |
155 | Qm90bzMvMS4xNy4yNiBQeXRob24vMy44LjUgTGludXgvNS4xMS41LTA1MTEwNS1nZW5lcmljIEJv
156 | dG9jb3JlLzEuMjAuMjYgUmVzb3VyY2U=
157 | X-Amz-Content-SHA256:
158 | - !!binary |
159 | ZTNiMGM0NDI5OGZjMWMxNDlhZmJmNGM4OTk2ZmI5MjQyN2FlNDFlNDY0OWI5MzRjYTQ5NTk5MWI3
160 | ODUyYjg1NQ==
161 | X-Amz-Date:
162 | - !!binary |
163 | MjAyMTAzMTVUMTI0MjExWg==
164 | authorization:
165 | - fake-authz-header
166 | method: GET
167 | uri: https://test-giftless.s3.eu-west-1.amazonaws.com/?encoding-type=url
168 | response:
169 | body:
170 | string: '
171 |
172 | test-giftless1000urlfalse'
173 | headers:
174 | Content-Type:
175 | - application/xml
176 | Date:
177 | - Mon, 15 Mar 2021 12:42:12 GMT
178 | Server:
179 | - AmazonS3
180 | Transfer-Encoding:
181 | - chunked
182 | x-amz-bucket-region:
183 | - eu-west-1
184 | x-amz-id-2:
185 | - YA75SF3OpUpq102HX+YTFBROyW4kXQUmA+2PEfaE4w8nd2KJNBMcOw8ytGd17KiwxathZvuCRS4=
186 | x-amz-request-id:
187 | - SVJ0HC1495K6NFHN
188 | status:
189 | code: 200
190 | message: OK
191 | version: 1
192 |
--------------------------------------------------------------------------------
/giftless/transfer/__init__.py:
--------------------------------------------------------------------------------
1 | """Transfer adapters.
2 |
3 | See
4 | https://github.com/git-lfs/git-lfs/blob/master/docs/api/basic-transfers.md
5 | for more information about what transfer APIs do in Git LFS.
6 | """
7 | from abc import ABC, abstractmethod
8 | from collections.abc import Callable
9 | from functools import partial
10 | from typing import Any, cast
11 |
12 | from flask import Flask
13 |
14 | from giftless.auth import (
15 | Authentication,
16 | PreAuthorizedActionAuthenticator,
17 | authentication,
18 | )
19 | from giftless.auth.identity import Identity
20 | from giftless.util import add_query_params, get_callable
21 | from giftless.view import ViewProvider
22 |
23 | _registered_adapters: dict[str, "TransferAdapter"] = {}
24 |
25 |
26 | class TransferAdapter(ABC): # noqa:B024
27 | """A transfer adapter tells Git LFS Server how to respond to batch
28 | API requests.
29 | """
30 |
31 | # We don't want these to be abstract methods because the test suite
32 | # actually instantiates a TransferAdapter, even though it's an ABC.
33 | def upload(
34 | self,
35 | organization: str,
36 | repo: str,
37 | oid: str,
38 | size: int,
39 | extra: dict[str, Any] | None = None,
40 | ) -> dict:
41 | raise NotImplementedError(
42 | "This transfer adapter is not fully implemented"
43 | )
44 |
45 | def download(
46 | self,
47 | organization: str,
48 | repo: str,
49 | oid: str,
50 | size: int,
51 | extra: dict[str, Any] | None = None,
52 | ) -> dict:
53 | raise NotImplementedError(
54 | "This transfer adapter is not fully implemented"
55 | )
56 |
57 | def get_action(
58 | self, name: str, organization: str, repo: str
59 | ) -> Callable[[str, int], dict]:
60 | """Shortcut for quickly getting an action callable for
61 | transfer adapter objects.
62 | """
63 | return partial(
64 | getattr(self, name), organization=organization, repo=repo
65 | )
66 |
67 |
68 | class PreAuthorizingTransferAdapter(TransferAdapter, ABC):
69 | """A transfer adapter that can pre-authorize one or more of the
70 | actions it supports.
71 | """
72 |
73 | @abstractmethod
74 | def __init__(self) -> None:
75 | #
76 | # These were class attributes, but at least _auth_module ought to
77 | # be an instance attribute instead.
78 | #
79 | self.VERIFY_LIFETIME = 12 * 60 * 60 # Can be quite a while
80 | if not hasattr(self, "_auth_module"):
81 | self._auth_module: Authentication | None = None
82 |
83 | def set_auth_module(self, auth_module: Authentication) -> None:
84 | self._auth_module = auth_module
85 |
86 | @property
87 | def _preauth_handler_and_identity(
88 | self,
89 | ) -> tuple[PreAuthorizedActionAuthenticator | None, Identity | None]:
90 | if (
91 | self._auth_module is None
92 | or self._auth_module.preauth_handler is None
93 | ):
94 | return None, None
95 | handler = cast(
96 | PreAuthorizedActionAuthenticator, self._auth_module.preauth_handler
97 | )
98 | identity = self._auth_module.get_identity()
99 | return handler, identity
100 |
101 | @property
102 | def _provides_preauth(self) -> bool:
103 | return None not in self._preauth_handler_and_identity
104 |
105 | def _preauth_url(
106 | self,
107 | original_url: str,
108 | org: str,
109 | repo: str,
110 | actions: set[str] | None = None,
111 | oid: str | None = None,
112 | lifetime: int | None = None,
113 | ) -> str:
114 | handler, identity = self._preauth_handler_and_identity
115 | if handler is None or identity is None:
116 | return original_url
117 |
118 | params = handler.get_authz_query_params(
119 | identity, org, repo, actions, oid, lifetime=lifetime
120 | )
121 |
122 | return add_query_params(original_url, params)
123 |
124 | def _preauth_headers(
125 | self,
126 | org: str,
127 | repo: str,
128 | actions: set[str] | None = None,
129 | oid: str | None = None,
130 | lifetime: int | None = None,
131 | ) -> dict[str, str]:
132 | handler, identity = self._preauth_handler_and_identity
133 | if handler is None or identity is None:
134 | return {}
135 |
136 | return handler.get_authz_header(
137 | identity, org, repo, actions, oid, lifetime=lifetime
138 | )
139 |
140 |
141 | def init_flask_app(app: Flask) -> None:
142 | """Initialize a flask app instance with transfer adapters.
143 |
144 | This will:
145 | - Instantiate all transfer adapters defined in config
146 | - Register any Flask views provided by these adapters
147 | """
148 | config = app.config.get("TRANSFER_ADAPTERS", {})
149 | adapters = {k: _init_adapter(v) for k, v in config.items()}
150 | for k, adapter in adapters.items():
151 | register_adapter(k, adapter)
152 |
153 | for adapter in (
154 | a for a in _registered_adapters.values() if isinstance(a, ViewProvider)
155 | ):
156 | adapter.register_views(app)
157 |
158 |
159 | def register_adapter(key: str, adapter: TransferAdapter) -> None:
160 | """Register a transfer adapter."""
161 | _registered_adapters[key] = adapter
162 |
163 |
164 | def match_transfer_adapter(
165 | transfers: list[str],
166 | ) -> tuple[str, TransferAdapter]:
167 | """Select a transfer adapter by key."""
168 | for t in transfers:
169 | if t in _registered_adapters:
170 | return t, _registered_adapters[t]
171 | raise ValueError(f"Unable to match any transfer adapter: {transfers}")
172 |
173 |
174 | def _init_adapter(config: dict) -> TransferAdapter:
175 | """Call adapter factory to create a transfer adapter instance."""
176 | factory: Callable[..., TransferAdapter] = get_callable(config["factory"])
177 | adapter: TransferAdapter = factory(**config.get("options", {}))
178 | if isinstance(adapter, PreAuthorizingTransferAdapter):
179 | adapter.set_auth_module(authentication)
180 | return adapter
181 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestGoogleCloudStorageBackend.test_exists_not_exists.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: assertion=eyJ0eXAiOiAiSldUIiwgImFsZyI6ICJSUzI1NiIsICJraWQiOiAiMjg5ODJmN2JiOTg5YmE2NjkzY2Q1OTllMzMxZWI2OWJkZjE1OGIzMCJ9.eyJpYXQiOiAxNjAzMjIxOTc1LCAiZXhwIjogMTYwMzIyNTU3NSwgImlzcyI6ICJzaGFoYXItdGVzdHNAZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNlYWNjb3VudC5jb20iLCAiYXVkIjogImh0dHBzOi8vb2F1dGgyLmdvb2dsZWFwaXMuY29tL3Rva2VuIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5mdWxsX2NvbnRyb2wgaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vYXV0aC9kZXZzdG9yYWdlLnJlYWRfb25seSBodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9hdXRoL2RldnN0b3JhZ2UucmVhZF93cml0ZSJ9.T7Mi9qew2IiVxFv557uzJhep7wcX_vFrwDgzPOyNroASzaRKNO3XEiwEfy_2wk41RxwqL9k2uGMqlWVuPttlfMru8yOPAv8bDKCn3l2_vqLehpERKG-jBdP9WDtJMCJojEaRbAEA6LUxdV0Hw_9kzyA_rR7FyGyk60egKTIkGNT9g-CrssfDw3ew-UmTB7g1RQkVMTHvReAqdKOvx2mOjwVi8NJj0snaXQFkW0ET7nXYXuDvm4O3mk8xcNlMs1r_BYF2xuIb72k-qZ1b5EZJiebNHSmIUl_fwsjPpDdKBecHPJ7GVS3FvTescsJqZFg7vUVZ3p4Ozn_szgeXDoXZOA&grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | Content-Length:
12 | - '955'
13 | User-Agent:
14 | - python-requests/2.24.0
15 | content-type:
16 | - application/x-www-form-urlencoded
17 | method: POST
18 | uri: https://oauth2.googleapis.com/token
19 | response:
20 | body:
21 | string: !!binary |
22 | H4sIAAAAAAAC/x3P23JDQAAA0H/Z58iwRapvpIsSU3fiZUfsmroE2UUjnf57Mz1/cH5AWVWUczyP
23 | HR3AG9hKqO2rvTt5hhwQ3z+Z7wKWCcwOlhXfILNddpK4yoMsDiN7iSSvgKaUjmGHdAUpo+N2QlyP
24 | bScPXnR2i6N4MXVC4Ff+KItrPzl2HBEuhPUwp61m3DI1s1KxeS3KuXew0q7HEfYNU9e8Z1UVlmb6
25 | mfNkbe/jo/7uXF8RUJSgxhUDvTMGB2mkxRe4heF2NXCGURony3o20JIKH2Q9gB2g96lhlOPm2XtR
26 | NG0H/q943ib6DBu0ZJSB3z+ENKrZCgEAAA==
27 | headers:
28 | Alt-Svc:
29 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-T051=":443"; ma=2592000,h3-T050=":443";
30 | ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443"; ma=2592000,quic=":443";
31 | ma=2592000; v="46,43"
32 | Cache-Control:
33 | - private
34 | Content-Encoding:
35 | - gzip
36 | Content-Type:
37 | - application/json; charset=UTF-8
38 | Date:
39 | - Tue, 20 Oct 2020 19:26:16 GMT
40 | Server:
41 | - scaffolding on HTTPServer2
42 | Transfer-Encoding:
43 | - chunked
44 | Vary:
45 | - Origin
46 | - X-Origin
47 | - Referer
48 | X-Content-Type-Options:
49 | - nosniff
50 | X-Frame-Options:
51 | - SAMEORIGIN
52 | X-XSS-Protection:
53 | - '0'
54 | status:
55 | code: 200
56 | message: OK
57 | - request:
58 | body: null
59 | headers:
60 | Accept:
61 | - '*/*'
62 | Accept-Encoding:
63 | - gzip
64 | Connection:
65 | - keep-alive
66 | User-Agent:
67 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
68 | X-Goog-API-Client:
69 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
70 | authorization:
71 | - fake-authz-header
72 | method: GET
73 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o/giftless-tests%2Forg%2Frepo%2F2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824?fields=name&prettyPrint=false
74 | response:
75 | body:
76 | string: '{"error":{"code":404,"message":"No such object: giftless-tests-20200818/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824","errors":[{"message":"No
77 | such object: giftless-tests-20200818/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824","domain":"global","reason":"notFound"}]}}'
78 | headers:
79 | Alt-Svc:
80 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
81 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
82 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
83 | Cache-Control:
84 | - no-cache, no-store, max-age=0, must-revalidate
85 | Content-Length:
86 | - '355'
87 | Content-Type:
88 | - application/json; charset=UTF-8
89 | Date:
90 | - Tue, 20 Oct 2020 19:26:16 GMT
91 | Expires:
92 | - Mon, 01 Jan 1990 00:00:00 GMT
93 | Pragma:
94 | - no-cache
95 | Server:
96 | - UploadServer
97 | Vary:
98 | - Origin
99 | - X-Origin
100 | X-GUploader-UploadID:
101 | - ABg5-Ux-Mi2Gmq93VDO_OyJj9dw3bn8kA4cetiXMg9SavjrXJXvbs8dG7jCVBLgOb7ECUOIpastNonazGJ--3H1h6CM
102 | status:
103 | code: 404
104 | message: Not Found
105 | - request:
106 | body: null
107 | headers:
108 | Accept:
109 | - '*/*'
110 | Accept-Encoding:
111 | - gzip
112 | Connection:
113 | - keep-alive
114 | User-Agent:
115 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
116 | X-Goog-API-Client:
117 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
118 | authorization:
119 | - fake-authz-header
120 | method: GET
121 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o?projection=noAcl&prefix=giftless-tests%2F&prettyPrint=false
122 | response:
123 | body:
124 | string: '{"kind":"storage#objects"}'
125 | headers:
126 | Alt-Svc:
127 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
128 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
129 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
130 | Cache-Control:
131 | - private, max-age=0, must-revalidate, no-transform
132 | Content-Length:
133 | - '26'
134 | Content-Type:
135 | - application/json; charset=UTF-8
136 | Date:
137 | - Tue, 20 Oct 2020 19:26:16 GMT
138 | Expires:
139 | - Tue, 20 Oct 2020 19:26:16 GMT
140 | Server:
141 | - UploadServer
142 | Vary:
143 | - Origin
144 | - X-Origin
145 | X-GUploader-UploadID:
146 | - ABg5-Uy9mlel-ClMDNP5oYagDGfROmwegqLrZIz4qA5ao4vKZMNpoAPUvElAMCTeWKXTOsr9JdDp8P3yVpmcnTw80g
147 | status:
148 | code: 200
149 | message: OK
150 | version: 1
151 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestGoogleCloudStorageBackend.test_get_size_not_existing.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: assertion=eyJ0eXAiOiAiSldUIiwgImFsZyI6ICJSUzI1NiIsICJraWQiOiAiMjg5ODJmN2JiOTg5YmE2NjkzY2Q1OTllMzMxZWI2OWJkZjE1OGIzMCJ9.eyJpYXQiOiAxNjAzMjIxOTcyLCAiZXhwIjogMTYwMzIyNTU3MiwgImlzcyI6ICJzaGFoYXItdGVzdHNAZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNlYWNjb3VudC5jb20iLCAiYXVkIjogImh0dHBzOi8vb2F1dGgyLmdvb2dsZWFwaXMuY29tL3Rva2VuIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5mdWxsX2NvbnRyb2wgaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vYXV0aC9kZXZzdG9yYWdlLnJlYWRfb25seSBodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9hdXRoL2RldnN0b3JhZ2UucmVhZF93cml0ZSJ9.BItqT1bRKOokaNvTcEP9b-a7cp7_GupDE4-y8gddRyj-ysOH7cIYHrsj5BEEYzgzCEueFdkOezsT2vgYopNH8CEsd-hgadRvZD6lqtPJtD4IzRvcrQBi9vYRepP19PmGRuKmq9eQCgxXlt0g090-NDgVl7WUaV6brnhqNodNVbHJHsQme-DZFOlmmWJorccRStlQEN0W2y2slW29cCVnO1YWKoE48b7fLi6CvO-ddSXYtSxlyEvXVQrjQLR9dknGJT2hSHmTSpkIyexjwwQpaAtbPLlv24yt7D1RBIqM4srcsHt4Sk2idQ6Qqsr5Ly_PuuLyIPaahHfFTqQ-xnuw1w&grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | Content-Length:
12 | - '955'
13 | User-Agent:
14 | - python-requests/2.24.0
15 | content-type:
16 | - application/x-www-form-urlencoded
17 | method: POST
18 | uri: https://oauth2.googleapis.com/token
19 | response:
20 | body:
21 | string: !!binary |
22 | H4sIAAAAAAAC/x3PyVKDMAAA0H/JuXSKBQVvbGGTFjps8cIECIsgpgk2ouO/2/H9wfsBuGkI59X6
23 | MZEFPIMNP+j7Zh/SyFSS7lAM2hLYVvtyllZRiNgVFCMs6thvzczZ4NU8B7HeeqOL3eqC6WB4poSQ
24 | mXJtLr9hVOHekMX8Xq62/5Zq8i1sT72KkGOpjtohJR0NvyuDOsoGlsz4iVi5k/rJBXdBlgnFGEKY
25 | F/Bzo5qQ0aMDS7LxqVg4zLiVw9Ky0agei5OkT4eeyZWEbs5V87BWrwrDTLyCHSBfdGSEV+O9d1R1
26 | fQf+r9W6UXIPmwQzwsDvH/K7NH4KAQAA
27 | headers:
28 | Alt-Svc:
29 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-T051=":443"; ma=2592000,h3-T050=":443";
30 | ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443"; ma=2592000,quic=":443";
31 | ma=2592000; v="46,43"
32 | Cache-Control:
33 | - private
34 | Content-Encoding:
35 | - gzip
36 | Content-Type:
37 | - application/json; charset=UTF-8
38 | Date:
39 | - Tue, 20 Oct 2020 19:26:12 GMT
40 | Server:
41 | - scaffolding on HTTPServer2
42 | Transfer-Encoding:
43 | - chunked
44 | Vary:
45 | - Origin
46 | - X-Origin
47 | - Referer
48 | X-Content-Type-Options:
49 | - nosniff
50 | X-Frame-Options:
51 | - SAMEORIGIN
52 | X-XSS-Protection:
53 | - '0'
54 | status:
55 | code: 200
56 | message: OK
57 | - request:
58 | body: null
59 | headers:
60 | Accept:
61 | - '*/*'
62 | Accept-Encoding:
63 | - gzip
64 | Connection:
65 | - keep-alive
66 | User-Agent:
67 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
68 | X-Goog-API-Client:
69 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
70 | authorization:
71 | - fake-authz-header
72 | method: GET
73 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o/giftless-tests%2Forg%2Frepo%2F2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824?projection=noAcl&prettyPrint=false
74 | response:
75 | body:
76 | string: '{"error":{"code":404,"message":"No such object: giftless-tests-20200818/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824","errors":[{"message":"No
77 | such object: giftless-tests-20200818/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824","domain":"global","reason":"notFound"}]}}'
78 | headers:
79 | Alt-Svc:
80 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
81 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
82 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
83 | Cache-Control:
84 | - no-cache, no-store, max-age=0, must-revalidate
85 | Content-Length:
86 | - '355'
87 | Content-Type:
88 | - application/json; charset=UTF-8
89 | Date:
90 | - Tue, 20 Oct 2020 19:26:13 GMT
91 | Expires:
92 | - Mon, 01 Jan 1990 00:00:00 GMT
93 | Pragma:
94 | - no-cache
95 | Server:
96 | - UploadServer
97 | Vary:
98 | - Origin
99 | - X-Origin
100 | X-GUploader-UploadID:
101 | - ABg5-UyC4P-hDUlwQHHjmqMufcew3WNb9VF0DqAFaXo7XGD0l-FyxQU9pb4CcdwnRUrbPBqaENK8Xk0-FQYhfRSp5g
102 | status:
103 | code: 404
104 | message: Not Found
105 | - request:
106 | body: null
107 | headers:
108 | Accept:
109 | - '*/*'
110 | Accept-Encoding:
111 | - gzip
112 | Connection:
113 | - keep-alive
114 | User-Agent:
115 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
116 | X-Goog-API-Client:
117 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
118 | authorization:
119 | - fake-authz-header
120 | method: GET
121 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o?projection=noAcl&prefix=giftless-tests%2F&prettyPrint=false
122 | response:
123 | body:
124 | string: '{"kind":"storage#objects"}'
125 | headers:
126 | Alt-Svc:
127 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
128 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
129 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
130 | Cache-Control:
131 | - private, max-age=0, must-revalidate, no-transform
132 | Content-Length:
133 | - '26'
134 | Content-Type:
135 | - application/json; charset=UTF-8
136 | Date:
137 | - Tue, 20 Oct 2020 19:26:13 GMT
138 | Expires:
139 | - Tue, 20 Oct 2020 19:26:13 GMT
140 | Server:
141 | - UploadServer
142 | Vary:
143 | - Origin
144 | - X-Origin
145 | X-GUploader-UploadID:
146 | - ABg5-UyCHZ6GhzzqGsF0CVg1mpvtUWS02h-k64mJnSWbvMcK4mvGxuvaT6nJjZhQmuXOk8JBhOOAUUwykPwh1KZ_Gg
147 | status:
148 | code: 200
149 | message: OK
150 | version: 1
151 |
--------------------------------------------------------------------------------
/tests/storage/cassettes/TestGoogleCloudStorageBackend.test_verify_object_not_there.yaml:
--------------------------------------------------------------------------------
1 | interactions:
2 | - request:
3 | body: assertion=eyJ0eXAiOiAiSldUIiwgImFsZyI6ICJSUzI1NiIsICJraWQiOiAiMjg5ODJmN2JiOTg5YmE2NjkzY2Q1OTllMzMxZWI2OWJkZjE1OGIzMCJ9.eyJpYXQiOiAxNjAzMjIxOTgwLCAiZXhwIjogMTYwMzIyNTU4MCwgImlzcyI6ICJzaGFoYXItdGVzdHNAZ2lmdGxlc3MtdGVzdHMuaWFtLmdzZXJ2aWNlYWNjb3VudC5jb20iLCAiYXVkIjogImh0dHBzOi8vb2F1dGgyLmdvb2dsZWFwaXMuY29tL3Rva2VuIiwgInNjb3BlIjogImh0dHBzOi8vd3d3Lmdvb2dsZWFwaXMuY29tL2F1dGgvZGV2c3RvcmFnZS5mdWxsX2NvbnRyb2wgaHR0cHM6Ly93d3cuZ29vZ2xlYXBpcy5jb20vYXV0aC9kZXZzdG9yYWdlLnJlYWRfb25seSBodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9hdXRoL2RldnN0b3JhZ2UucmVhZF93cml0ZSJ9.ks7AQ8cxY0i9zcrQj7kstgsImkzL3Pz2P4ov4qBLt_RaFizPrLlfzcMA6wmUAfpSotjAGWMDGVxcOZj4gLKZ6gGfJZz6RlJp5t8aKdFKVpxNwdmbdEbJpIuEX3wHJhyGjoNHSlRhsUKb4G6Fc5pkP7gS3xJrGiSwbtY8Y-3dMqTnXJl9Q9WMLsGvJMao7TvDwQTkVNFZHgO7i2v0zpbLIy5_jnwYmkcOxQo-1BVDefRc0TzN-j7FYoSldac44scHc8_R_Ner_-DbwLM_9mRnT7HePlbGmtSUvCRGxQCEHXw5Jnonl4eZjxNkLLy1zXlUyWNhJ38YaO6b3ltQeWl0dA&grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer
4 | headers:
5 | Accept:
6 | - '*/*'
7 | Accept-Encoding:
8 | - gzip, deflate
9 | Connection:
10 | - keep-alive
11 | Content-Length:
12 | - '955'
13 | User-Agent:
14 | - python-requests/2.24.0
15 | content-type:
16 | - application/x-www-form-urlencoded
17 | method: POST
18 | uri: https://oauth2.googleapis.com/token
19 | response:
20 | body:
21 | string: !!binary |
22 | H4sIAAAAAAAC/x3PyXKCMAAA0H/JWRyQvTcoUkmUzYLQC8OS2hSQLWKg03+v0/cH7wfkZYmnKaNd
23 | jW/gBSz5Tt+WW9SfTCnAZlrMrJNHYjXu2U1tIp7bK/Si11kz46Hnl8GjWhNi3ff0to0iSJLk2B/J
24 | w4fJt7deZh05b5ythcKwIEbvfPTlHLDCDQb+qCqRa9Pyk3YrRbElBvAUXhmKqpa5RSyMd06ZuHpC
25 | 5gM5yXyTURLUSqP4vNHaGXMv/J43lrWq3iW4Ckw9dIq026vEtpMCNiokyMo7FWwAZj0Z8ZSRZ0+U
26 | dX0D/q8ZXXr8DJs4H/EIfv8AbEfLTwoBAAA=
27 | headers:
28 | Alt-Svc:
29 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-T051=":443"; ma=2592000,h3-T050=":443";
30 | ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443"; ma=2592000,quic=":443";
31 | ma=2592000; v="46,43"
32 | Cache-Control:
33 | - private
34 | Content-Encoding:
35 | - gzip
36 | Content-Type:
37 | - application/json; charset=UTF-8
38 | Date:
39 | - Tue, 20 Oct 2020 19:26:20 GMT
40 | Server:
41 | - scaffolding on HTTPServer2
42 | Transfer-Encoding:
43 | - chunked
44 | Vary:
45 | - Origin
46 | - X-Origin
47 | - Referer
48 | X-Content-Type-Options:
49 | - nosniff
50 | X-Frame-Options:
51 | - SAMEORIGIN
52 | X-XSS-Protection:
53 | - '0'
54 | status:
55 | code: 200
56 | message: OK
57 | - request:
58 | body: null
59 | headers:
60 | Accept:
61 | - '*/*'
62 | Accept-Encoding:
63 | - gzip
64 | Connection:
65 | - keep-alive
66 | User-Agent:
67 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
68 | X-Goog-API-Client:
69 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
70 | authorization:
71 | - fake-authz-header
72 | method: GET
73 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o/giftless-tests%2Forg%2Frepo%2F2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824?projection=noAcl&prettyPrint=false
74 | response:
75 | body:
76 | string: '{"error":{"code":404,"message":"No such object: giftless-tests-20200818/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824","errors":[{"message":"No
77 | such object: giftless-tests-20200818/giftless-tests/org/repo/2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824","domain":"global","reason":"notFound"}]}}'
78 | headers:
79 | Alt-Svc:
80 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
81 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
82 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
83 | Cache-Control:
84 | - no-cache, no-store, max-age=0, must-revalidate
85 | Content-Length:
86 | - '355'
87 | Content-Type:
88 | - application/json; charset=UTF-8
89 | Date:
90 | - Tue, 20 Oct 2020 19:26:20 GMT
91 | Expires:
92 | - Mon, 01 Jan 1990 00:00:00 GMT
93 | Pragma:
94 | - no-cache
95 | Server:
96 | - UploadServer
97 | Vary:
98 | - Origin
99 | - X-Origin
100 | X-GUploader-UploadID:
101 | - ABg5-Ux2QROITmGWdO-rGMXVsPMRQ-avoCd-PDwIanuCd9imJkh5_O16jsGFe3O8izJQSzavD9cCm5juhSOhySql_g
102 | status:
103 | code: 404
104 | message: Not Found
105 | - request:
106 | body: null
107 | headers:
108 | Accept:
109 | - '*/*'
110 | Accept-Encoding:
111 | - gzip
112 | Connection:
113 | - keep-alive
114 | User-Agent:
115 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
116 | X-Goog-API-Client:
117 | - gcloud-python/1.28.1 gl-python/3.8.5 gax/1.22.3 gccl/1.28.1
118 | authorization:
119 | - fake-authz-header
120 | method: GET
121 | uri: https://storage.googleapis.com/storage/v1/b/giftless-tests-20200818/o?projection=noAcl&prefix=giftless-tests%2F&prettyPrint=false
122 | response:
123 | body:
124 | string: '{"kind":"storage#objects"}'
125 | headers:
126 | Alt-Svc:
127 | - h3-Q050=":443"; ma=2592000,h3-29=":443"; ma=2592000,h3-27=":443"; ma=2592000,h3-T051=":443";
128 | ma=2592000,h3-T050=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443";
129 | ma=2592000,quic=":443"; ma=2592000; v="46,43"
130 | Cache-Control:
131 | - private, max-age=0, must-revalidate, no-transform
132 | Content-Length:
133 | - '26'
134 | Content-Type:
135 | - application/json; charset=UTF-8
136 | Date:
137 | - Tue, 20 Oct 2020 19:26:20 GMT
138 | Expires:
139 | - Tue, 20 Oct 2020 19:26:20 GMT
140 | Server:
141 | - UploadServer
142 | Vary:
143 | - Origin
144 | - X-Origin
145 | X-GUploader-UploadID:
146 | - ABg5-Uz-M1F78nIXR3e9pz40q2gaECjzcF5MAzSrIWcLmmqPmqRICzr9IPGS8UOGsTs8PZDyf8bTiusKXSWxEqqIBw
147 | status:
148 | code: 200
149 | message: OK
150 | version: 1
151 |
--------------------------------------------------------------------------------