├── s3fs
├── tests
│ ├── __init__.py
│ ├── derived
│ │ ├── __init__.py
│ │ ├── s3fs_test.py
│ │ └── s3fs_fixtures.py
│ ├── test_utils.py
│ └── test_mapping.py
├── __init__.py
├── mapping.py
├── utils.py
├── errors.py
└── _version.py
├── .gitattributes
├── pytest.ini
├── requirements.txt
├── test_requirements.txt
├── docs
├── source
│ ├── _static
│ │ └── custom.css
│ ├── development.rst
│ ├── install.rst
│ ├── api.rst
│ ├── code-of-conduct.rst
│ ├── changelog.rst
│ ├── conf.py
│ └── index.rst
├── environment.yml
├── make.bat
└── Makefile
├── .gitignore
├── CONTRIBUTING.md
├── .coveragerc
├── MANIFEST.in
├── .readthedocs.yaml
├── ci
└── env.yaml
├── .pre-commit-config.yaml
├── release-procedure.md
├── README.md
├── setup.py
├── setup.cfg
├── LICENSE.txt
├── .github
└── workflows
│ └── ci.yml
└── versioneer.py
/s3fs/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/s3fs/tests/derived/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | s3fs/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | testpaths = s3fs
3 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | aiobotocore>=2.5.4,<3.0.0
2 | fsspec==2025.12.0
3 | aiohttp!=4.0.0a0, !=4.0.0a1
4 |
--------------------------------------------------------------------------------
/test_requirements.txt:
--------------------------------------------------------------------------------
1 | mock; python_version < '3.3'
2 | moto>=4
3 | flask
4 | flask_cors
5 | pytest>=4.2.0
6 | pytest-env
7 |
--------------------------------------------------------------------------------
/docs/source/_static/custom.css:
--------------------------------------------------------------------------------
1 | .classifier:before {
2 | font-style: normal;
3 | margin: 0.5em;
4 | content: ":";
5 | }
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | .cache/
3 | .pytest_cache/
4 | .python-version
5 | .idea/
6 | __pycache__
7 | dist/
8 | *.egg-info
9 | build/
10 | venv/
11 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | s3fs is a community maintained project. We welcome contributions in the form of bug reports, documentation, code, design proposals, and more.
2 |
--------------------------------------------------------------------------------
/docs/environment.yml:
--------------------------------------------------------------------------------
1 | name: s3fs
2 | channels:
3 | - defaults
4 | dependencies:
5 | - python= 3.10
6 | - botocore
7 | - docutils<0.17
8 | - sphinx
9 | - sphinx_rtd_theme
10 |
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | include =
3 | s3fs/*
4 |
5 | omit =
6 | s3fs/tests/test*
7 |
8 | [report]
9 | show_missing = True
10 |
11 | [html]
12 | directory = coverage_html_report
13 |
--------------------------------------------------------------------------------
/s3fs/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import S3FileSystem, S3File
2 | from .mapping import S3Map
3 |
4 | from ._version import get_versions
5 |
6 | __version__ = get_versions()["version"]
7 | del get_versions
8 |
--------------------------------------------------------------------------------
/docs/source/development.rst:
--------------------------------------------------------------------------------
1 | Development
2 | ===========
3 |
4 | Create a development environment::
5 |
6 | $ pip install -r requirements.txt -r test_requirements.txt
7 |
8 | Run tests::
9 |
10 | $ pytest
11 |
--------------------------------------------------------------------------------
/s3fs/mapping.py:
--------------------------------------------------------------------------------
1 | from .core import S3FileSystem
2 |
3 |
4 | def S3Map(root, s3, check=False, create=False):
5 | """Mirror previous class, not implemented in fsspec"""
6 | s3 = s3 or S3FileSystem.current()
7 | return s3.get_mapper(root, check=check, create=create)
8 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include s3fs *.py
2 | recursive-include docs *.rst
3 |
4 | include setup.py
5 | include README.rst
6 | include LICENSE.txt
7 | include MANIFEST.in
8 | include requirements.txt
9 |
10 | prune docs/_build
11 | include versioneer.py
12 | include s3fs/_version.py
13 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-22.04
5 | tools:
6 | python: miniconda3-4.7
7 |
8 | conda:
9 | environment: docs/environment.yml
10 |
11 | python:
12 | install:
13 | - method: pip
14 | path: .
15 |
16 | sphinx:
17 | configuration: docs/source/conf.py
18 | fail_on_warning: true
19 |
--------------------------------------------------------------------------------
/ci/env.yaml:
--------------------------------------------------------------------------------
1 | name: test_env
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - pytest
6 | - pytest-asyncio
7 | - pip
8 | - pytest
9 | - ujson
10 | - requests
11 | - decorator
12 | - pytest-timeout
13 | - flake8
14 | - black
15 | - httpretty
16 | - aiobotocore
17 | - moto
18 | - flask
19 | - fsspec
20 |
--------------------------------------------------------------------------------
/s3fs/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import s3fs.utils as utils
2 |
3 |
4 | def test_get_brange():
5 | assert list(utils._get_brange(100, 24)) == [
6 | (0, 23),
7 | (24, 47),
8 | (48, 71),
9 | (72, 95),
10 | (96, 99),
11 | ]
12 | assert list(utils._get_brange(100, 25)) == [(0, 24), (25, 49), (50, 74), (75, 99)]
13 | assert list(utils._get_brange(100, 26)) == [(0, 25), (26, 51), (52, 77), (78, 99)]
14 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v6.0.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - repo: https://github.com/psf/black-pre-commit-mirror
9 | rev: 25.9.0
10 | hooks:
11 | - id: black
12 | exclude: ^docs/
13 | - repo: https://github.com/pycqa/flake8
14 | rev: 7.3.0
15 | hooks:
16 | - id: flake8
17 | exclude: tests/|^docs/|__init__.py
18 |
--------------------------------------------------------------------------------
/docs/source/install.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | Conda
5 | -----
6 |
7 | The ``s3fs`` library and its dependencies can be installed from the
8 | `conda-forge `_ repository using
9 | `conda `_::
10 |
11 | $ conda install s3fs -c conda-forge
12 |
13 | PyPI
14 | ----
15 |
16 | You can install ``s3fs`` with pip::
17 |
18 | pip install s3fs
19 |
20 | Install from source
21 | -------------------
22 |
23 | You can also download the ``s3fs`` library from Github and install normally::
24 |
25 | git clone git@github.com:fsspec/s3fs
26 | cd s3fs
27 | python setup.py install
28 |
--------------------------------------------------------------------------------
/release-procedure.md:
--------------------------------------------------------------------------------
1 | 1. Verify tests on Linux, OS-X, and Windows
2 |
3 | 2. Complete entries in `docs/source/changelog.rst`.
4 |
5 | There's no need for changing version numbers in source files.
6 | The release version will be determined from the git tag (see below).
7 |
8 | 3. Tag the commit
9 |
10 | git tag 1.2.3 -m "Version 1.2.3"
11 |
12 | 4. Push new version bump commit and tag to github
13 |
14 | git push fsspec main --tags
15 |
16 | 5. Build source and wheel packages
17 |
18 | rm -rf dist/
19 | python setup.py sdist bdist_wheel --universal
20 |
21 | 6. Upload packages to PyPI
22 |
23 | twine upload dist/*
24 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | s3fs
2 | ====
3 |
4 | [|Build Status|](https://github.com/fsspec/s3fs/actions)
5 | [|Documentation|](https://s3fs.readthedocs.io/en/latest/?badge=latest)
6 |
7 | S3FS builds on [aiobotocore](https://aiobotocore.readthedocs.io/en/latest/)
8 | to provide a convenient Python filesystem interface for S3.
9 |
10 |
11 | Support
12 | -------
13 |
14 | Work on this repository is supported in part by:
15 |
16 | "Anaconda, Inc. - Advancing AI through open source."
17 |
18 |
19 |
--------------------------------------------------------------------------------
/docs/source/api.rst:
--------------------------------------------------------------------------------
1 | API
2 | ===
3 |
4 | .. currentmodule:: s3fs.core
5 |
6 | .. autosummary::
7 | S3FileSystem
8 | S3FileSystem.cat
9 | S3FileSystem.du
10 | S3FileSystem.exists
11 | S3FileSystem.find
12 | S3FileSystem.get
13 | S3FileSystem.glob
14 | S3FileSystem.info
15 | S3FileSystem.ls
16 | S3FileSystem.mkdir
17 | S3FileSystem.mv
18 | S3FileSystem.open
19 | S3FileSystem.put
20 | S3FileSystem.read_block
21 | S3FileSystem.rm
22 | S3FileSystem.tail
23 | S3FileSystem.touch
24 |
25 | .. autosummary::
26 | S3File
27 | S3File.close
28 | S3File.flush
29 | S3File.info
30 | S3File.read
31 | S3File.seek
32 | S3File.tell
33 | S3File.write
34 |
35 | .. currentmodule:: s3fs.mapping
36 |
37 | .. autosummary::
38 | S3Map
39 |
40 | .. currentmodule:: s3fs.core
41 |
42 | .. autoclass:: S3FileSystem
43 | :members:
44 | :inherited-members:
45 |
46 | .. autoclass:: S3File
47 | :members:
48 | :inherited-members:
49 |
50 | .. currentmodule:: s3fs.mapping
51 |
52 | .. autofunction:: S3Map
53 |
54 | .. currentmodule:: s3fs.utils
55 |
56 | .. autoclass:: ParamKwargsHelper
57 |
58 | .. autoclass:: SSEParams
59 |
--------------------------------------------------------------------------------
/s3fs/tests/derived/s3fs_test.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import fsspec.tests.abstract as abstract
4 | from s3fs.tests.derived.s3fs_fixtures import S3fsFixtures
5 |
6 |
7 | class TestS3fsCopy(abstract.AbstractCopyTests, S3fsFixtures):
8 | pass
9 |
10 |
11 | class TestS3fsGet(abstract.AbstractGetTests, S3fsFixtures):
12 | pass
13 |
14 |
15 | class TestS3fsPut(abstract.AbstractPutTests, S3fsFixtures):
16 | pass
17 |
18 |
19 | def botocore_too_old():
20 | import botocore
21 | from packaging.version import parse
22 |
23 | MIN_BOTOCORE_VERSION = "1.33.2"
24 |
25 | return parse(botocore.__version__) < parse(MIN_BOTOCORE_VERSION)
26 |
27 |
28 | class TestS3fsPipe(abstract.AbstractPipeTests, S3fsFixtures):
29 |
30 | test_pipe_exclusive = pytest.mark.skipif(
31 | botocore_too_old(), reason="Older botocore doesn't support exclusive writes"
32 | )(abstract.AbstractPipeTests.test_pipe_exclusive)
33 |
34 |
35 | class TestS3fsOpen(abstract.AbstractOpenTests, S3fsFixtures):
36 | test_open_exclusive = pytest.mark.xfail(
37 | reason="complete_multipart_upload doesn't implement condition in moto"
38 | )(abstract.AbstractOpenTests.test_open_exclusive)
39 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from setuptools import setup
4 | import versioneer
5 |
6 | setup(
7 | name="s3fs",
8 | version=versioneer.get_version(),
9 | cmdclass=versioneer.get_cmdclass(),
10 | classifiers=[
11 | "Development Status :: 4 - Beta",
12 | "Intended Audience :: Developers",
13 | "License :: OSI Approved :: BSD License",
14 | "Operating System :: OS Independent",
15 | "Programming Language :: Python :: 3.10",
16 | "Programming Language :: Python :: 3.11",
17 | "Programming Language :: Python :: 3.12",
18 | "Programming Language :: Python :: 3.13",
19 | "Programming Language :: Python :: 3.14",
20 | ],
21 | description="Convenient Filesystem interface over S3",
22 | url="http://github.com/fsspec/s3fs/",
23 | maintainer="Martin Durant",
24 | maintainer_email="mdurant@continuum.io",
25 | license="BSD",
26 | keywords="s3, boto",
27 | packages=["s3fs"],
28 | python_requires=">= 3.10",
29 | install_requires=[open("requirements.txt").read().strip().split("\n")],
30 | long_description="README.md",
31 | long_description_content_type="text/markdown",
32 | zip_safe=False,
33 | )
34 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | long_description: file: README.rst
3 |
4 | [versioneer]
5 | VCS = git
6 | style = pep440
7 | versionfile_source = s3fs/_version.py
8 | versionfile_build = s3fs/_version.py
9 | tag_prefix = ""
10 |
11 | [flake8]
12 | exclude = __init__.py,versioneer.py,s3fs/tests/
13 | max-line-length = 95
14 | ignore =
15 | # Extra space in brackets
16 | E20,
17 | # Multiple spaces around ","
18 | E231,E241,
19 | # Comments
20 | E26,
21 | # Import formatting
22 | E4,
23 | # Comparing types instead of isinstance
24 | E721,
25 | # Assigning lambda expression
26 | E731,
27 | # continuation line under-indented for hanging indent
28 | E121,
29 | # continuation line over-indented for hanging indent
30 | E126,
31 | # continuation line over-indented for visual indent
32 | E127,
33 | # E128 continuation line under-indented for visual indent
34 | E128,
35 | # multiple statements on one line (semicolon)
36 | E702,
37 | # line break before binary operator
38 | W503,
39 | # visually indented line with same indent as next logical line
40 | E129,
41 | # unexpected indentation
42 | E116,
43 | # redefinition of unused 'loop' from line 10
44 | F811,
45 | # local variable is assigned to but never used
46 | F841,
47 | # Ambiguous variable names
48 | E741
49 | # line break after binary operator
50 | W504,
51 | # line too long (leave it to black!)
52 | E501,
53 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright (c) 2016, Continuum Analytics, Inc. and contributors
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification,
5 | are permitted provided that the following conditions are met:
6 |
7 | Redistributions of source code must retain the above copyright notice,
8 | this list of conditions and the following disclaimer.
9 |
10 | Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | Neither the name of Continuum Analytics nor the names of any contributors
15 | may be used to endorse or promote products derived from this software
16 | without specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
22 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
28 | THE POSSIBILITY OF SUCH DAMAGE.
29 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | test:
7 | name: Python ${{ matrix.python-version }} - AioBotocore ${{ matrix.aiobotocore-version }}
8 | runs-on: ubuntu-latest
9 | strategy:
10 | fail-fast: false
11 | matrix:
12 | python-version:
13 | - "3.10"
14 | - "3.11"
15 | - "3.12"
16 | - "3.13"
17 | - "3.14"
18 | aiobotocore-version: [">=2.5.4,<2.6.0", ">=2.7.0,<2.8.0", ">=2.8.0,<2.9.0", "<3.0.0"]
19 |
20 | env:
21 | BOTO_CONFIG: /dev/null
22 | AWS_ACCESS_KEY_ID: foobar_key
23 | AWS_SECRET_ACCESS_KEY: foobar_secret
24 |
25 | steps:
26 | - name: Checkout source
27 | uses: actions/checkout@v5
28 | with:
29 | fetch-depth: 0
30 |
31 | - name: Setup conda
32 | uses: conda-incubator/setup-miniconda@v3
33 | with:
34 | environment-file: ci/env.yaml
35 | python-version: ${{ matrix.python-version }}
36 |
37 | - name: Install
38 | shell: bash -l {0}
39 | run: |
40 | pip install git+https://github.com/fsspec/filesystem_spec
41 | pip install --upgrade "aiobotocore${{ matrix.aiobotocore-version }}"
42 | pip install . --no-deps
43 | pip list
44 |
45 | - name: Run Tests
46 | shell: bash -l {0}
47 | run: pytest -vv -s s3fs
48 |
49 |
50 | pre-commit:
51 | runs-on: ubuntu-latest
52 | steps:
53 | - uses: actions/checkout@v5
54 | - uses: actions/setup-python@v6
55 | with:
56 | python-version: "3.11"
57 | - uses: pre-commit/action@v3.0.1
58 |
--------------------------------------------------------------------------------
/s3fs/tests/derived/s3fs_fixtures.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import pytest
4 | import requests
5 | import time
6 |
7 | from fsspec.tests.abstract import AbstractFixtures
8 | from s3fs.core import S3FileSystem
9 |
10 |
11 | test_bucket_name = "test"
12 | secure_bucket_name = "test-secure"
13 | versioned_bucket_name = "test-versioned"
14 | port = 5556
15 | endpoint_uri = "http://127.0.0.1:%s/" % port
16 |
17 |
18 | class S3fsFixtures(AbstractFixtures):
19 | @pytest.fixture(scope="class")
20 | def fs(self, _s3_base, _get_boto3_client):
21 | client = _get_boto3_client
22 | client.create_bucket(Bucket=test_bucket_name, ACL="public-read")
23 |
24 | client.create_bucket(Bucket=versioned_bucket_name, ACL="public-read")
25 | client.put_bucket_versioning(
26 | Bucket=versioned_bucket_name, VersioningConfiguration={"Status": "Enabled"}
27 | )
28 |
29 | # initialize secure bucket
30 | client.create_bucket(Bucket=secure_bucket_name, ACL="public-read")
31 | policy = json.dumps(
32 | {
33 | "Version": "2012-10-17",
34 | "Id": "PutObjPolicy",
35 | "Statement": [
36 | {
37 | "Sid": "DenyUnEncryptedObjectUploads",
38 | "Effect": "Deny",
39 | "Principal": "*",
40 | "Action": "s3:PutObject",
41 | "Resource": f"arn:aws:s3:::{secure_bucket_name}/*",
42 | "Condition": {
43 | "StringNotEquals": {
44 | "s3:x-amz-server-side-encryption": "aws:kms"
45 | }
46 | },
47 | }
48 | ],
49 | }
50 | )
51 | client.put_bucket_policy(Bucket=secure_bucket_name, Policy=policy)
52 |
53 | S3FileSystem.clear_instance_cache()
54 | s3 = S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_uri})
55 | s3.invalidate_cache()
56 | yield s3
57 |
58 | @pytest.fixture
59 | def fs_path(self):
60 | return test_bucket_name
61 |
62 | @pytest.fixture
63 | def supports_empty_directories(self):
64 | return False
65 |
66 | @pytest.fixture(scope="class")
67 | def _get_boto3_client(self):
68 | from botocore.session import Session
69 |
70 | # NB: we use the sync botocore client for setup
71 | session = Session()
72 | return session.create_client("s3", endpoint_url=endpoint_uri)
73 |
74 | @pytest.fixture(scope="class")
75 | def _s3_base(self):
76 | # copy of s3_base in test_s3fs
77 | from moto.moto_server.threaded_moto_server import ThreadedMotoServer
78 |
79 | server = ThreadedMotoServer(ip_address="127.0.0.1", port=port)
80 | server.start()
81 | if "AWS_SECRET_ACCESS_KEY" not in os.environ:
82 | os.environ["AWS_SECRET_ACCESS_KEY"] = "foo"
83 | if "AWS_ACCESS_KEY_ID" not in os.environ:
84 | os.environ["AWS_ACCESS_KEY_ID"] = "foo"
85 |
86 | print("server up")
87 | yield
88 | print("moto done")
89 | server.stop()
90 |
--------------------------------------------------------------------------------
/s3fs/tests/test_mapping.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from s3fs.tests.test_s3fs import s3_base, s3, test_bucket_name
3 | from s3fs import S3Map, S3FileSystem
4 |
5 | root = test_bucket_name + "/mapping"
6 |
7 |
8 | def test_simple(s3):
9 | d = s3.get_mapper(root)
10 | assert not d
11 |
12 | assert list(d) == list(d.keys()) == []
13 | assert list(d.values()) == []
14 | assert list(d.items()) == []
15 | s3.get_mapper(root)
16 |
17 |
18 | def test_default_s3filesystem(s3):
19 | d = s3.get_mapper(root)
20 | assert d.fs is s3
21 |
22 |
23 | def test_errors(s3):
24 | d = s3.get_mapper(root)
25 | with pytest.raises(KeyError):
26 | d["nonexistent"]
27 |
28 | try:
29 | s3.get_mapper("does-not-exist", check=True)
30 | except Exception as e:
31 | assert "does-not-exist" in str(e)
32 |
33 |
34 | def test_with_data(s3):
35 | d = s3.get_mapper(root)
36 | d["x"] = b"123"
37 | assert list(d) == list(d.keys()) == ["x"]
38 | assert list(d.values()) == [b"123"]
39 | assert list(d.items()) == [("x", b"123")]
40 | assert d["x"] == b"123"
41 | assert bool(d)
42 |
43 | assert s3.find(root) == [test_bucket_name + "/mapping/x"]
44 | d["x"] = b"000"
45 | assert d["x"] == b"000"
46 |
47 | d["y"] = b"456"
48 | assert d["y"] == b"456"
49 | assert set(d) == {"x", "y"}
50 |
51 | d.clear()
52 | assert list(d) == []
53 |
54 |
55 | def test_complex_keys(s3):
56 | d = s3.get_mapper(root)
57 | d[1] = b"hello"
58 | assert d[1] == b"hello"
59 | del d[1]
60 |
61 | d[1, 2] = b"world"
62 | assert d[1, 2] == b"world"
63 | del d[1, 2]
64 |
65 | d["x", 1, 2] = b"hello world"
66 | assert d["x", 1, 2] == b"hello world"
67 |
68 | assert ("x", 1, 2) in d
69 |
70 |
71 | def test_clear_empty(s3):
72 | d = s3.get_mapper(root)
73 | d.clear()
74 | assert list(d) == []
75 | d[1] = b"1"
76 | assert list(d) == ["1"]
77 | d.clear()
78 | assert list(d) == []
79 |
80 |
81 | def test_no_dircache(s3):
82 | from s3fs.tests.test_s3fs import endpoint_uri
83 | import fsspec
84 |
85 | d = fsspec.get_mapper(
86 | "s3://" + root,
87 | anon=False,
88 | client_kwargs={"endpoint_url": endpoint_uri},
89 | use_listings_cache=False,
90 | )
91 | d.clear()
92 | assert list(d) == []
93 | d[1] = b"1"
94 | assert list(d) == ["1"]
95 | d.clear()
96 | assert list(d) == []
97 |
98 |
99 | def test_pickle(s3):
100 | d = s3.get_mapper(root)
101 | d["x"] = b"1"
102 |
103 | import pickle
104 |
105 | d2 = pickle.loads(pickle.dumps(d))
106 |
107 | assert d2["x"] == b"1"
108 |
109 |
110 | def test_array(s3):
111 | from array import array
112 |
113 | d = s3.get_mapper(root)
114 | d["x"] = array("B", [65] * 1000)
115 |
116 | assert d["x"] == b"A" * 1000
117 |
118 |
119 | def test_bytearray(s3):
120 | d = s3.get_mapper(root)
121 | d["x"] = bytearray(b"123")
122 |
123 | assert d["x"] == b"123"
124 |
125 |
126 | def test_new_bucket(s3):
127 | try:
128 | s3.get_mapper("new-bucket", check=True)
129 | assert False
130 | except ValueError as e:
131 | assert "create" in str(e)
132 |
133 | d = s3.get_mapper("new-bucket", create=True)
134 | assert not d
135 |
136 | d = s3.get_mapper("new-bucket/new-directory")
137 | assert not d
138 |
139 |
140 | def test_old_api(s3):
141 | import fsspec.mapping
142 |
143 | assert isinstance(S3Map(root, s3), fsspec.mapping.FSMap)
144 |
--------------------------------------------------------------------------------
/s3fs/utils.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import logging
3 | from contextlib import contextmanager, AsyncExitStack
4 | from botocore.exceptions import ClientError
5 |
6 |
7 | logger = logging.getLogger("s3fs")
8 |
9 |
10 | @contextmanager
11 | def ignoring(*exceptions):
12 | try:
13 | yield
14 | except exceptions:
15 | pass
16 |
17 |
18 | class S3BucketRegionCache:
19 | # See https://github.com/aio-libs/aiobotocore/issues/866
20 | # for details.
21 |
22 | def __init__(self, session, **client_kwargs):
23 | self._session = session
24 | self._stack = AsyncExitStack()
25 | self._client = None
26 | self._client_kwargs = client_kwargs
27 | self._buckets = {}
28 | self._regions = {}
29 |
30 | async def get_bucket_client(self, bucket_name=None):
31 | if bucket_name in self._buckets:
32 | return self._buckets[bucket_name]
33 |
34 | general_client = await self.get_client()
35 | if bucket_name is None:
36 | return general_client
37 |
38 | try:
39 | response = await general_client.head_bucket(Bucket=bucket_name)
40 | except ClientError as e:
41 | logger.debug("RC: HEAD_BUCKET call for %r has failed", bucket_name)
42 | response = e.response
43 |
44 | region = (
45 | response["ResponseMetadata"]
46 | .get("HTTPHeaders", {})
47 | .get("x-amz-bucket-region")
48 | )
49 |
50 | if not region:
51 | logger.debug(
52 | "RC: No region in HEAD_BUCKET call response for %r, returning the general client",
53 | bucket_name,
54 | )
55 | return general_client
56 |
57 | if region not in self._regions:
58 | logger.debug(
59 | "RC: Creating a new regional client for %r on the region %r",
60 | bucket_name,
61 | region,
62 | )
63 | self._regions[region] = await self._stack.enter_async_context(
64 | self._session.create_client(
65 | "s3", region_name=region, **self._client_kwargs
66 | )
67 | )
68 |
69 | client = self._buckets[bucket_name] = self._regions[region]
70 | return client
71 |
72 | async def get_client(self):
73 | if not self._client:
74 | self._client = await self._stack.enter_async_context(
75 | self._session.create_client("s3", **self._client_kwargs)
76 | )
77 | return self._client
78 |
79 | async def clear(self):
80 | logger.debug("RC: discarding all clients")
81 | self._buckets.clear()
82 | self._regions.clear()
83 | self._client = None
84 | await self._stack.aclose()
85 |
86 | async def __aenter__(self):
87 | return self
88 |
89 | async def __aexit__(self, *exc_args):
90 | await self.clear()
91 |
92 |
93 | class FileExpired(IOError):
94 | """
95 | Is raised, when the file content has been changed from a different process after
96 | opening the file. Reading the file would lead to invalid or inconsistent output.
97 | This can also be triggered by outdated file-information inside the directory cache.
98 | In this case ``S3FileSystem.invalidate_cache`` can be used to force an update of
99 | the file-information when opening the file.
100 | """
101 |
102 | def __init__(self, filename: str, e_tag: str):
103 | super().__init__(
104 | errno.EBUSY,
105 | "The remote file corresponding to filename %s and Etag %s no longer exists."
106 | % (filename, e_tag),
107 | )
108 |
109 |
110 | def title_case(string):
111 | """
112 | TitleCases a given string.
113 |
114 | Parameters
115 | ----------
116 | string : underscore separated string
117 | """
118 | return "".join(x.capitalize() for x in string.split("_"))
119 |
120 |
121 | class ParamKwargsHelper:
122 | """
123 | Utility class to help extract the subset of keys that an s3 method is
124 | actually using
125 |
126 | Parameters
127 | ----------
128 | s3 : boto S3FileSystem
129 | """
130 |
131 | _kwarg_cache = {}
132 |
133 | def __init__(self, s3):
134 | self.s3 = s3
135 |
136 | def _get_valid_keys(self, model_name):
137 | if model_name not in self._kwarg_cache:
138 | model = self.s3.meta.service_model.operation_model(model_name)
139 | valid_keys = (
140 | set(model.input_shape.members.keys())
141 | if model.input_shape is not None
142 | else set()
143 | )
144 | self._kwarg_cache[model_name] = valid_keys
145 | return self._kwarg_cache[model_name]
146 |
147 | def filter_dict(self, method_name, d):
148 | model_name = title_case(method_name)
149 | valid_keys = self._get_valid_keys(model_name)
150 | if isinstance(d, SSEParams):
151 | d = d.to_kwargs()
152 | return {k: v for k, v in d.items() if k in valid_keys}
153 |
154 |
155 | class SSEParams:
156 | def __init__(
157 | self,
158 | server_side_encryption=None,
159 | sse_customer_algorithm=None,
160 | sse_customer_key=None,
161 | sse_kms_key_id=None,
162 | ):
163 | self.ServerSideEncryption = server_side_encryption
164 | self.SSECustomerAlgorithm = sse_customer_algorithm
165 | self.SSECustomerKey = sse_customer_key
166 | self.SSEKMSKeyId = sse_kms_key_id
167 |
168 | def to_kwargs(self):
169 | return {k: v for k, v in self.__dict__.items() if v is not None}
170 |
171 |
172 | def _get_brange(size, block):
173 | """
174 | Chunk up a file into zero-based byte ranges
175 |
176 | Parameters
177 | ----------
178 | size : file size
179 | block : block size
180 | """
181 | for offset in range(0, size, block):
182 | yield offset, min(offset + block - 1, size - 1)
183 |
--------------------------------------------------------------------------------
/docs/source/code-of-conduct.rst:
--------------------------------------------------------------------------------
1 | Code of Conduct
2 | ===============
3 |
4 | All participants in the fsspec community are expected to adhere to a Code of Conduct.
5 |
6 | As contributors and maintainers of this project, and in the interest of
7 | fostering an open and welcoming community, we pledge to respect all people who
8 | contribute through reporting issues, posting feature requests, updating
9 | documentation, submitting pull requests or patches, and other activities.
10 |
11 | We are committed to making participation in this project a harassment-free
12 | experience for everyone, treating everyone as unique humans deserving of
13 | respect.
14 |
15 | Examples of unacceptable behaviour by participants include:
16 |
17 | - The use of sexualized language or imagery
18 | - Personal attacks
19 | - Trolling or insulting/derogatory comments
20 | - Public or private harassment
21 | - Publishing other's private information, such as physical or electronic
22 | addresses, without explicit permission
23 | - Other unethical or unprofessional conduct
24 |
25 | Project maintainers have the right and responsibility to remove, edit, or
26 | reject comments, commits, code, wiki edits, issues, and other contributions
27 | that are not aligned to this Code of Conduct, or to ban temporarily or
28 | permanently any contributor for other behaviours that they deem inappropriate,
29 | threatening, offensive, or harmful.
30 |
31 | By adopting this Code of Conduct, project maintainers commit themselves
32 | to fairly and consistently applying these principles to every aspect of
33 | managing this project. Project maintainers who do not follow or enforce
34 | the Code of Conduct may be permanently removed from the project team.
35 |
36 | This code of conduct applies both within project spaces and in public
37 | spaces when an individual is representing the project or its community.
38 |
39 | If you feel the code of conduct has been violated, please report the
40 | incident to the fsspec core team.
41 |
42 | Reporting
43 | ---------
44 |
45 | If you believe someone is violating theCode of Conduct we ask that you report it
46 | to the Project by emailing community@anaconda.com. All reports will be kept
47 | confidential. In some cases we may determine that a public statement will need
48 | to be made. If that's the case, the identities of all victims and reporters
49 | will remain confidential unless those individuals instruct us otherwise.
50 | If you believe anyone is in physical danger, please notify appropriate law
51 | enforcement first.
52 |
53 | In your report please include:
54 |
55 | - Your contact info
56 | - Names (real, nicknames, or pseudonyms) of any individuals involved.
57 | If there were other witnesses besides you, please try to include them as well.
58 | - When and where the incident occurred. Please be as specific as possible.
59 | - Your account of what occurred. If there is a publicly available record
60 | please include a link.
61 | - Any extra context you believe existed for the incident.
62 | - If you believe this incident is ongoing.
63 | - If you believe any member of the core team has a conflict of interest
64 | in adjudicating the incident.
65 | - What, if any, corrective response you believe would be appropriate.
66 | - Any other information you believe we should have.
67 |
68 | Core team members are obligated to maintain confidentiality with regard
69 | to the reporter and details of an incident.
70 |
71 | What happens next?
72 | ~~~~~~~~~~~~~~~~~~
73 |
74 | You will receive an email acknowledging receipt of your complaint.
75 | The core team will immediately meet to review the incident and determine:
76 |
77 | - What happened.
78 | - Whether this event constitutes a code of conduct violation.
79 | - Who the bad actor was.
80 | - Whether this is an ongoing situation, or if there is a threat to anyone's
81 | physical safety.
82 | - If this is determined to be an ongoing incident or a threat to physical safety,
83 | the working groups' immediate priority will be to protect everyone involved.
84 |
85 | If a member of the core team is one of the named parties, they will not be
86 | included in any discussions, and will not be provided with any confidential
87 | details from the reporter.
88 |
89 | If anyone on the core team believes they have a conflict of interest in
90 | adjudicating on a reported issue, they will inform the other core team
91 | members, and exempt themselves from any discussion about the issue.
92 | Following this declaration, they will not be provided with any confidential
93 | details from the reporter.
94 |
95 | Once the working group has a complete account of the events they will make a
96 | decision as to how to response. Responses may include:
97 |
98 | - Nothing (if we determine no violation occurred).
99 | - A private reprimand from the working group to the individual(s) involved.
100 | - A public reprimand.
101 | - An imposed vacation
102 | - A permanent or temporary ban from some or all spaces (GitHub repositories, etc.)
103 | - A request for a public or private apology.
104 |
105 | We'll respond within one week to the person who filed the report with either a
106 | resolution or an explanation of why the situation is not yet resolved.
107 |
108 | Once we've determined our final action, we'll contact the original reporter
109 | to let them know what action (if any) we'll be taking. We'll take into account
110 | feedback from the reporter on the appropriateness of our response, but we
111 | don't guarantee we'll act on it.
112 |
113 | Acknowledgement
114 | ---------------
115 |
116 | This CoC is modified from the one by `BeeWare`_, which in turn refers to
117 | the `Contributor Covenant`_ and the `Django`_ project.
118 |
119 | .. _BeeWare: https://beeware.org/community/behavior/code-of-conduct/
120 | .. _Contributor Covenant: https://www.contributor-covenant.org/version/1/3/0/code-of-conduct/
121 | .. _Django: https://www.djangoproject.com/conduct/reporting/
122 |
123 | .. raw:: html
124 |
125 |
127 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
10 | set I18NSPHINXOPTS=%SPHINXOPTS% source
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
31 | echo. text to make text files
32 | echo. man to make manual pages
33 | echo. texinfo to make Texinfo files
34 | echo. gettext to make PO message catalogs
35 | echo. changes to make an overview over all changed/added/deprecated items
36 | echo. xml to make Docutils-native XML files
37 | echo. pseudoxml to make pseudoxml-XML files for display purposes
38 | echo. linkcheck to check all external links for integrity
39 | echo. doctest to run all doctests embedded in the documentation if enabled
40 | echo. coverage to run coverage check of the documentation if enabled
41 | goto end
42 | )
43 |
44 | if "%1" == "clean" (
45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
46 | del /q /s %BUILDDIR%\*
47 | goto end
48 | )
49 |
50 |
51 | REM Check if sphinx-build is available and fallback to Python version if any
52 | %SPHINXBUILD% 1>NUL 2>NUL
53 | if errorlevel 9009 goto sphinx_python
54 | goto sphinx_ok
55 |
56 | :sphinx_python
57 |
58 | set SPHINXBUILD=python -m sphinx.__init__
59 | %SPHINXBUILD% 2> nul
60 | if errorlevel 9009 (
61 | echo.
62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
63 | echo.installed, then set the SPHINXBUILD environment variable to point
64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
65 | echo.may add the Sphinx directory to PATH.
66 | echo.
67 | echo.If you don't have Sphinx installed, grab it from
68 | echo.http://sphinx-doc.org/
69 | exit /b 1
70 | )
71 |
72 | :sphinx_ok
73 |
74 |
75 | if "%1" == "html" (
76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
77 | if errorlevel 1 exit /b 1
78 | echo.
79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
80 | goto end
81 | )
82 |
83 | if "%1" == "dirhtml" (
84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
85 | if errorlevel 1 exit /b 1
86 | echo.
87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
88 | goto end
89 | )
90 |
91 | if "%1" == "singlehtml" (
92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
93 | if errorlevel 1 exit /b 1
94 | echo.
95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
96 | goto end
97 | )
98 |
99 | if "%1" == "pickle" (
100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
101 | if errorlevel 1 exit /b 1
102 | echo.
103 | echo.Build finished; now you can process the pickle files.
104 | goto end
105 | )
106 |
107 | if "%1" == "json" (
108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
109 | if errorlevel 1 exit /b 1
110 | echo.
111 | echo.Build finished; now you can process the JSON files.
112 | goto end
113 | )
114 |
115 | if "%1" == "htmlhelp" (
116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
117 | if errorlevel 1 exit /b 1
118 | echo.
119 | echo.Build finished; now you can run HTML Help Workshop with the ^
120 | .hhp project file in %BUILDDIR%/htmlhelp.
121 | goto end
122 | )
123 |
124 | if "%1" == "qthelp" (
125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
126 | if errorlevel 1 exit /b 1
127 | echo.
128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
129 | .qhcp project file in %BUILDDIR%/qthelp, like this:
130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\S3Fs.qhcp
131 | echo.To view the help file:
132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\S3Fs.ghc
133 | goto end
134 | )
135 |
136 | if "%1" == "devhelp" (
137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
138 | if errorlevel 1 exit /b 1
139 | echo.
140 | echo.Build finished.
141 | goto end
142 | )
143 |
144 | if "%1" == "epub" (
145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
146 | if errorlevel 1 exit /b 1
147 | echo.
148 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
149 | goto end
150 | )
151 |
152 | if "%1" == "latex" (
153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
154 | if errorlevel 1 exit /b 1
155 | echo.
156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
157 | goto end
158 | )
159 |
160 | if "%1" == "latexpdf" (
161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
162 | cd %BUILDDIR%/latex
163 | make all-pdf
164 | cd %~dp0
165 | echo.
166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
167 | goto end
168 | )
169 |
170 | if "%1" == "latexpdfja" (
171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
172 | cd %BUILDDIR%/latex
173 | make all-pdf-ja
174 | cd %~dp0
175 | echo.
176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
177 | goto end
178 | )
179 |
180 | if "%1" == "text" (
181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
182 | if errorlevel 1 exit /b 1
183 | echo.
184 | echo.Build finished. The text files are in %BUILDDIR%/text.
185 | goto end
186 | )
187 |
188 | if "%1" == "man" (
189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
190 | if errorlevel 1 exit /b 1
191 | echo.
192 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
193 | goto end
194 | )
195 |
196 | if "%1" == "texinfo" (
197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
198 | if errorlevel 1 exit /b 1
199 | echo.
200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
201 | goto end
202 | )
203 |
204 | if "%1" == "gettext" (
205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
206 | if errorlevel 1 exit /b 1
207 | echo.
208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
209 | goto end
210 | )
211 |
212 | if "%1" == "changes" (
213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
214 | if errorlevel 1 exit /b 1
215 | echo.
216 | echo.The overview file is in %BUILDDIR%/changes.
217 | goto end
218 | )
219 |
220 | if "%1" == "linkcheck" (
221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
222 | if errorlevel 1 exit /b 1
223 | echo.
224 | echo.Link check complete; look for any errors in the above output ^
225 | or in %BUILDDIR%/linkcheck/output.txt.
226 | goto end
227 | )
228 |
229 | if "%1" == "doctest" (
230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
231 | if errorlevel 1 exit /b 1
232 | echo.
233 | echo.Testing of doctests in the sources finished, look at the ^
234 | results in %BUILDDIR%/doctest/output.txt.
235 | goto end
236 | )
237 |
238 | if "%1" == "coverage" (
239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
240 | if errorlevel 1 exit /b 1
241 | echo.
242 | echo.Testing of coverage in the sources finished, look at the ^
243 | results in %BUILDDIR%/coverage/python.txt.
244 | goto end
245 | )
246 |
247 | if "%1" == "xml" (
248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
249 | if errorlevel 1 exit /b 1
250 | echo.
251 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
252 | goto end
253 | )
254 |
255 | if "%1" == "pseudoxml" (
256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
257 | if errorlevel 1 exit /b 1
258 | echo.
259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
260 | goto end
261 | )
262 |
263 | :end
264 |
--------------------------------------------------------------------------------
/s3fs/errors.py:
--------------------------------------------------------------------------------
1 | """S3 error codes adapted into more natural Python ones.
2 |
3 | Adapted from: https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
4 | """
5 |
6 | import errno
7 | import functools
8 |
9 |
10 | # Fallback values since some systems might not have these.
11 | ENAMETOOLONG = getattr(errno, "ENAMETOOLONG", errno.EINVAL)
12 | ENOTEMPTY = getattr(errno, "ENOTEMPTY", errno.EINVAL)
13 | EMSGSIZE = getattr(errno, "EMSGSIZE", errno.EINVAL)
14 | EREMOTEIO = getattr(errno, "EREMOTEIO", errno.EIO)
15 | EREMCHG = getattr(errno, "EREMCHG", errno.ENOENT)
16 |
17 |
18 | ERROR_CODE_TO_EXCEPTION = {
19 | "AccessDenied": PermissionError,
20 | "AccountProblem": PermissionError,
21 | "AllAccessDisabled": PermissionError,
22 | "AmbiguousGrantByEmailAddress": functools.partial(IOError, errno.EINVAL),
23 | "AuthorizationHeaderMalformed": functools.partial(IOError, errno.EINVAL),
24 | "BadDigest": functools.partial(IOError, errno.EINVAL),
25 | "BucketAlreadyExists": FileExistsError,
26 | "BucketAlreadyOwnedByYou": FileExistsError,
27 | "BucketNotEmpty": functools.partial(IOError, ENOTEMPTY),
28 | "CredentialsNotSupported": functools.partial(IOError, errno.EINVAL),
29 | "CrossLocationLoggingProhibited": PermissionError,
30 | "EntityTooSmall": functools.partial(IOError, errno.EINVAL),
31 | "EntityTooLarge": functools.partial(IOError, EMSGSIZE),
32 | "ExpiredToken": PermissionError,
33 | "IllegalLocationConstraintException": PermissionError,
34 | "IllegalVersioningConfigurationException": functools.partial(IOError, errno.EINVAL),
35 | "IncompleteBody": functools.partial(IOError, errno.EINVAL),
36 | "IncorrectNumberOfFilesInPostRequest": functools.partial(IOError, errno.EINVAL),
37 | "InlineDataTooLarge": functools.partial(IOError, EMSGSIZE),
38 | "InternalError": functools.partial(IOError, EREMOTEIO),
39 | "InvalidAccessKeyId": PermissionError,
40 | "InvalidAddressingHeader": functools.partial(IOError, errno.EINVAL),
41 | "InvalidArgument": functools.partial(IOError, errno.EINVAL),
42 | "InvalidBucketName": functools.partial(IOError, errno.EINVAL),
43 | "InvalidBucketState": functools.partial(IOError, errno.EPERM),
44 | "InvalidDigest": functools.partial(IOError, errno.EINVAL),
45 | "InvalidEncryptionAlgorithmError": functools.partial(IOError, errno.EINVAL),
46 | "InvalidLocationConstraint": functools.partial(IOError, errno.EINVAL),
47 | "InvalidObjectState": PermissionError,
48 | "InvalidPart": functools.partial(IOError, errno.EINVAL),
49 | "InvalidPartOrder": functools.partial(IOError, errno.EINVAL),
50 | "InvalidPayer": PermissionError,
51 | "InvalidPolicyDocument": functools.partial(IOError, errno.EINVAL),
52 | "InvalidRange": functools.partial(IOError, errno.EINVAL),
53 | "InvalidRequest": functools.partial(IOError, errno.EINVAL),
54 | "InvalidSecurity": PermissionError,
55 | "InvalidSOAPRequest": functools.partial(IOError, errno.EINVAL),
56 | "InvalidStorageClass": functools.partial(IOError, errno.EINVAL),
57 | "InvalidTargetBucketForLogging": functools.partial(IOError, errno.EINVAL),
58 | "InvalidToken": functools.partial(IOError, errno.EINVAL),
59 | "InvalidURI": functools.partial(IOError, errno.EINVAL),
60 | "KeyTooLongError": functools.partial(IOError, ENAMETOOLONG),
61 | "MalformedACLError": functools.partial(IOError, errno.EINVAL),
62 | "MalformedPOSTRequest": functools.partial(IOError, errno.EINVAL),
63 | "MalformedXML": functools.partial(IOError, errno.EINVAL),
64 | "MaxMessageLengthExceeded": functools.partial(IOError, EMSGSIZE),
65 | "MaxPostPreDataLengthExceededError": functools.partial(IOError, EMSGSIZE),
66 | "MetadataTooLarge": functools.partial(IOError, EMSGSIZE),
67 | "MethodNotAllowed": functools.partial(IOError, errno.EPERM),
68 | "MissingAttachment": functools.partial(IOError, errno.EINVAL),
69 | "MissingContentLength": functools.partial(IOError, errno.EINVAL),
70 | "MissingRequestBodyError": functools.partial(IOError, errno.EINVAL),
71 | "MissingSecurityElement": functools.partial(IOError, errno.EINVAL),
72 | "MissingSecurityHeader": functools.partial(IOError, errno.EINVAL),
73 | "NoLoggingStatusForKey": functools.partial(IOError, errno.EINVAL),
74 | "NoSuchBucket": FileNotFoundError,
75 | "NoSuchBucketPolicy": FileNotFoundError,
76 | "NoSuchKey": FileNotFoundError,
77 | "NoSuchLifecycleConfiguration": FileNotFoundError,
78 | "NoSuchUpload": FileNotFoundError,
79 | "NoSuchVersion": FileNotFoundError,
80 | "NotImplemented": functools.partial(IOError, errno.ENOSYS),
81 | "NotSignedUp": PermissionError,
82 | "OperationAborted": functools.partial(IOError, errno.EBUSY),
83 | "PermanentRedirect": functools.partial(IOError, EREMCHG),
84 | "PreconditionFailed": functools.partial(IOError, errno.EINVAL),
85 | "Redirect": functools.partial(IOError, EREMCHG),
86 | "RestoreAlreadyInProgress": functools.partial(IOError, errno.EBUSY),
87 | "RequestIsNotMultiPartContent": functools.partial(IOError, errno.EINVAL),
88 | "RequestTimeout": TimeoutError,
89 | "RequestTimeTooSkewed": PermissionError,
90 | "RequestTorrentOfBucketError": functools.partial(IOError, errno.EPERM),
91 | "SignatureDoesNotMatch": PermissionError,
92 | "ServiceUnavailable": functools.partial(IOError, errno.EBUSY),
93 | "SlowDown": functools.partial(IOError, errno.EBUSY),
94 | "TemporaryRedirect": functools.partial(IOError, EREMCHG),
95 | "TokenRefreshRequired": functools.partial(IOError, errno.EINVAL),
96 | "TooManyBuckets": functools.partial(IOError, errno.EINVAL),
97 | "UnexpectedContent": functools.partial(IOError, errno.EINVAL),
98 | "UnresolvableGrantByEmailAddress": functools.partial(IOError, errno.EINVAL),
99 | "UserKeyMustBeSpecified": functools.partial(IOError, errno.EINVAL),
100 | "301": functools.partial(IOError, EREMCHG), # PermanentRedirect
101 | "307": functools.partial(IOError, EREMCHG), # Redirect
102 | "400": functools.partial(IOError, errno.EINVAL),
103 | "403": PermissionError,
104 | "404": FileNotFoundError,
105 | "405": functools.partial(IOError, errno.EPERM),
106 | "409": functools.partial(IOError, errno.EBUSY),
107 | "412": functools.partial(IOError, errno.EINVAL), # PreconditionFailed
108 | "416": functools.partial(IOError, errno.EINVAL), # InvalidRange
109 | "500": functools.partial(IOError, EREMOTEIO), # InternalError
110 | "501": functools.partial(IOError, errno.ENOSYS), # NotImplemented
111 | "503": functools.partial(IOError, errno.EBUSY), # SlowDown
112 | }
113 |
114 |
115 | def translate_boto_error(error, message=None, set_cause=True, *args, **kwargs):
116 | """Convert a ClientError exception into a Python one.
117 |
118 | Parameters
119 | ----------
120 |
121 | error : botocore.exceptions.ClientError
122 | The exception returned by the boto API.
123 | message : str
124 | An error message to use for the returned exception. If not given, the
125 | error message returned by the server is used instead.
126 | set_cause : bool
127 | Whether to set the __cause__ attribute to the previous exception if the
128 | exception is translated.
129 | *args, **kwargs :
130 | Additional arguments to pass to the exception constructor, after the
131 | error message. Useful for passing the filename arguments to ``IOError``.
132 |
133 | Returns
134 | -------
135 |
136 | An instantiated exception ready to be thrown. If the error code isn't
137 | recognized, an IOError with the original error message is returned.
138 | """
139 | error_response = getattr(error, "response", None)
140 |
141 | if error_response is None:
142 | # non-http error, or response is None:
143 | return error
144 | code = error_response["Error"].get("Code")
145 | if (
146 | code == "PreconditionFailed"
147 | and error_response["Error"].get("Condition", "") == "If-None-Match"
148 | ):
149 | constructor = FileExistsError
150 | else:
151 | constructor = ERROR_CODE_TO_EXCEPTION.get(code)
152 | if constructor:
153 | if not message:
154 | message = error_response["Error"].get("Message", str(error))
155 | custom_exc = constructor(message, *args, **kwargs)
156 | else:
157 | # No match found, wrap this in an IOError with the appropriate message.
158 | custom_exc = OSError(errno.EIO, message or str(error), *args)
159 |
160 | if set_cause:
161 | custom_exc.__cause__ = error
162 | return custom_exc
163 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
21 |
22 | .PHONY: help
23 | help:
24 | @echo "Please use \`make ' where is one of"
25 | @echo " html to make standalone HTML files"
26 | @echo " dirhtml to make HTML files named index.html in directories"
27 | @echo " singlehtml to make a single large HTML file"
28 | @echo " pickle to make pickle files"
29 | @echo " json to make JSON files"
30 | @echo " htmlhelp to make HTML files and a HTML help project"
31 | @echo " qthelp to make HTML files and a qthelp project"
32 | @echo " applehelp to make an Apple Help Book"
33 | @echo " devhelp to make HTML files and a Devhelp project"
34 | @echo " epub to make an epub"
35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
36 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
38 | @echo " text to make text files"
39 | @echo " man to make manual pages"
40 | @echo " texinfo to make Texinfo files"
41 | @echo " info to make Texinfo files and run them through makeinfo"
42 | @echo " gettext to make PO message catalogs"
43 | @echo " changes to make an overview of all changed/added/deprecated items"
44 | @echo " xml to make Docutils-native XML files"
45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
46 | @echo " linkcheck to check all external links for integrity"
47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
48 | @echo " coverage to run coverage check of the documentation (if enabled)"
49 |
50 | .PHONY: clean
51 | clean:
52 | rm -rf $(BUILDDIR)/*
53 |
54 | .PHONY: html
55 | html:
56 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
57 | @echo
58 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
59 |
60 | .PHONY: dirhtml
61 | dirhtml:
62 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
63 | @echo
64 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
65 |
66 | .PHONY: singlehtml
67 | singlehtml:
68 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
69 | @echo
70 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
71 |
72 | .PHONY: pickle
73 | pickle:
74 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
75 | @echo
76 | @echo "Build finished; now you can process the pickle files."
77 |
78 | .PHONY: json
79 | json:
80 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
81 | @echo
82 | @echo "Build finished; now you can process the JSON files."
83 |
84 | .PHONY: htmlhelp
85 | htmlhelp:
86 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
87 | @echo
88 | @echo "Build finished; now you can run HTML Help Workshop with the" \
89 | ".hhp project file in $(BUILDDIR)/htmlhelp."
90 |
91 | .PHONY: qthelp
92 | qthelp:
93 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
94 | @echo
95 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
96 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
97 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/S3Fs.qhcp"
98 | @echo "To view the help file:"
99 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/S3Fs.qhc"
100 |
101 | .PHONY: applehelp
102 | applehelp:
103 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
104 | @echo
105 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
106 | @echo "N.B. You won't be able to view it unless you put it in" \
107 | "~/Library/Documentation/Help or install it in your application" \
108 | "bundle."
109 |
110 | .PHONY: devhelp
111 | devhelp:
112 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
113 | @echo
114 | @echo "Build finished."
115 | @echo "To view the help file:"
116 | @echo "# mkdir -p $$HOME/.local/share/devhelp/S3Fs"
117 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/S3Fs"
118 | @echo "# devhelp"
119 |
120 | .PHONY: epub
121 | epub:
122 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
123 | @echo
124 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
125 |
126 | .PHONY: latex
127 | latex:
128 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
129 | @echo
130 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
131 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
132 | "(use \`make latexpdf' here to do that automatically)."
133 |
134 | .PHONY: latexpdf
135 | latexpdf:
136 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
137 | @echo "Running LaTeX files through pdflatex..."
138 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
139 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
140 |
141 | .PHONY: latexpdfja
142 | latexpdfja:
143 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
144 | @echo "Running LaTeX files through platex and dvipdfmx..."
145 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
146 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
147 |
148 | .PHONY: text
149 | text:
150 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
151 | @echo
152 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
153 |
154 | .PHONY: man
155 | man:
156 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
157 | @echo
158 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
159 |
160 | .PHONY: texinfo
161 | texinfo:
162 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
163 | @echo
164 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
165 | @echo "Run \`make' in that directory to run these through makeinfo" \
166 | "(use \`make info' here to do that automatically)."
167 |
168 | .PHONY: info
169 | info:
170 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
171 | @echo "Running Texinfo files through makeinfo..."
172 | make -C $(BUILDDIR)/texinfo info
173 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
174 |
175 | .PHONY: gettext
176 | gettext:
177 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
178 | @echo
179 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
180 |
181 | .PHONY: changes
182 | changes:
183 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
184 | @echo
185 | @echo "The overview file is in $(BUILDDIR)/changes."
186 |
187 | .PHONY: linkcheck
188 | linkcheck:
189 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
190 | @echo
191 | @echo "Link check complete; look for any errors in the above output " \
192 | "or in $(BUILDDIR)/linkcheck/output.txt."
193 |
194 | .PHONY: doctest
195 | doctest:
196 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
197 | @echo "Testing of doctests in the sources finished, look at the " \
198 | "results in $(BUILDDIR)/doctest/output.txt."
199 |
200 | .PHONY: coverage
201 | coverage:
202 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
203 | @echo "Testing of coverage in the sources finished, look at the " \
204 | "results in $(BUILDDIR)/coverage/python.txt."
205 |
206 | .PHONY: xml
207 | xml:
208 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
209 | @echo
210 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
211 |
212 | .PHONY: pseudoxml
213 | pseudoxml:
214 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
215 | @echo
216 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
217 |
--------------------------------------------------------------------------------
/docs/source/changelog.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | =========
3 |
4 | 2025.12.0
5 | ---------
6 |
7 | - remove optional dependencies (#995)
8 | - add support for py3.14 and remove 3.9 (#993)
9 | - add link docs->repo (#992)
10 |
11 | 2025.10.0
12 | ---------
13 |
14 | - get bucket info on demand (#987)
15 | - add CoC (#986)
16 | - add goatcounter tracker (#985)
17 |
18 | 2025.9.0
19 | --------
20 |
21 | - update README for distribution compliance (#977)
22 |
23 | 2025.7.0
24 | --------
25 |
26 | - fix exclusive write for small files (#974)
27 | - acknowledge Anaconda support (#972)
28 | - fix test typo (#970)
29 |
30 | 2025.5.1
31 | --------
32 |
33 | no changes
34 |
35 | 2025.5.0
36 | --------
37 |
38 | - simpler requirements syntax (#958)
39 | - use head_bucket for info(bucket) (#961)
40 |
41 | 2025.3.2
42 | --------
43 |
44 | no changes
45 |
46 | 2025.3.1
47 | --------
48 |
49 | - get_event_loop -> get_running_loop at shutdown (#954)
50 |
51 | 2025.3.0
52 | --------
53 |
54 | - recreate sessino object on refresh (#939)
55 | - re-enable CI tests (#940)
56 |
57 | 2025.2.0
58 | --------
59 |
60 | - update docstrings to new default values (#934)
61 | - fix CI (#936)
62 |
63 | 2024.12.0
64 | ---------
65 |
66 | - CI fixes (#922)
67 | - smaller threshold for copy_managed (#921)
68 | - exclusive write (#917)
69 | - fix bug in _find (#913)
70 | - parse query without upstream infer_storage_options (#912)
71 | - bug in _upload_file_part_concurrent (#910)
72 |
73 | 2024.10.0
74 | ---------
75 |
76 | - invalidate cache in one-shot pipe file (#904)
77 | - make pipe() concurrent (#901)
78 | - add py3.13 (#898)
79 | - suppoert R2 multi-part uploads (#888)
80 |
81 | 2024.9.0
82 | --------
83 |
84 | no change
85 |
86 | 2024.6.1
87 | --------
88 |
89 | no changes
90 |
91 | 2024.6.0
92 | --------
93 |
94 | no changes
95 |
96 | 2024.5.0
97 | --------
98 |
99 | - widen fsspec req version (#869)
100 | - _bulk_delete must return list (#866)
101 | - retry on "reduce request rate" (#865)
102 |
103 | 2024.3.1
104 | --------
105 |
106 | - accept kwargs in get_file (#863)
107 |
108 | 2024.3.0
109 | --------
110 |
111 | - don't fail ls is parent is unaccessible (#860)
112 | - allow checksum error to retry (#858)
113 | - don't lsbuckets for isdir(bucket) (#856)
114 | - concurrent uplads of parts in put_file (#848)
115 |
116 | 2024.2.0
117 | --------
118 |
119 | - fix cache lookup in _info (#840)
120 |
121 | 2023.12.2
122 | ---------
123 |
124 | no changes
125 |
126 | 2023.12.1
127 | ---------
128 |
129 | - revert fallback to anon (#835)
130 |
131 | 2023.12.0
132 | ---------
133 |
134 | - fall back to anon if no creds are found or passed at all (#823)
135 | - **relax version bounds for aiobotocore** (#829)
136 | - avoid key error if LastModified missing (#828)
137 | - add make_mucket_versioned method (#825)
138 | - retain TZ on modified time (#818)
139 |
140 | 2023.10.0
141 | ---------
142 |
143 | - make protocol attribute a tuple (#812)
144 | - update to aiobotocore 2.7.0 (#809)
145 | - fix in _get_file following failure after connect (#805)
146 | - test for du of nonexistent (#803)
147 |
148 | 2023.9.2
149 | --------
150 |
151 | - allow size= in fs.open() (#797)
152 | - rmdir for non-bucket (#975)
153 | - moto updates (#973)
154 | - fix CI warnings (#792)
155 | - dircache usage with depth (#791)
156 |
157 | 2023.9.1
158 | --------
159 |
160 | - retry ClientPayloadError while reading after initial connection (#787)
161 | - don't pass ACL if not specified (#785)
162 |
163 | 2023.9.0
164 | --------
165 |
166 | - aiobotocore to 2.5.4
167 | - better ** support in bulk ops/glob (#769)
168 | - default ACL to "private" rather than blank (#764)
169 | - invalidate cache in rm_file (#762)
170 | - closing client in running loop (#760)
171 |
172 | 2023.6.0
173 | --------
174 |
175 | - allow versions in info.exists (#746)
176 | - streaming file to update it's size for tell (#745, 741)
177 |
178 |
179 | 2023.5.0
180 | --------
181 |
182 | - Fix "_" in xattrs tests (#732)
183 | - Fix file pointer already at end of file when retrying put (#731)
184 | - Fix repeated find corrupting cache (#730)
185 | - Remove duplicate class definition (#727)
186 | - return list of deleted keys in bulk deleted (#726)
187 |
188 |
189 | 2023.4.0
190 | --------
191 |
192 | - Add streaming async read file (#722)
193 | - Doc fixes (#721)
194 | - aiobotocore to 2.5.0 (#710)
195 |
196 | 2023.3.0
197 | --------
198 |
199 | - Allow setting endpoint_url as top-level kwarg (#704)
200 | - minimum python version 3.8 (#702)
201 | - Update docs config (#697)
202 | - get/put/cp recursive extra tests (#691)
203 |
204 | 2023.1.0
205 | --------
206 |
207 | - parse lambda ARNs (#686)
208 | - recursive on chmod (#679)
209 | - default cache to be readahead (#678)
210 | - temporary redirects in headBucket (#676)
211 | - async iterator for listings (#670)
212 |
213 |
214 | 2022.11.0
215 | ---------
216 |
217 | - optionally listing versions with ls (#661)
218 |
219 | 2022.10.0
220 | ---------
221 |
222 | - directory cache race condition (#655)
223 | - version aware find (#654)
224 |
225 | 2022.8.1
226 | --------
227 |
228 | (no change)
229 |
230 | 2022.8.0
231 | --------
232 |
233 | - aiobotocore 2.4.0 (#643)
234 | - del/list multipart uploads (#645)
235 | - disallow prerelease aiohttp (#640)
236 | - docs syntax (#634)
237 |
238 |
239 | 2022.7.1
240 | --------
241 |
242 | No changes
243 |
244 | 2022.7.0
245 | --------
246 |
247 | - aiobotocore 2.3.4 (#633)
248 |
249 |
250 | 2022.5.0
251 | --------
252 |
253 | - aiobotocore 2.3 (#622, fixes #558)
254 | - rate limiting (#619, #620)
255 |
256 | 2022.3.0
257 | --------
258 |
259 | - pre-commit (#612)
260 | - aiobotocore 2.2 (#609)
261 | - empty ETag (#605)
262 | - HTTPClientError retry (#597)
263 | - new callbacks support (#590)
264 |
265 | 2022.02.0
266 | ---------
267 |
268 | - callbacks fixes (#594, 590)
269 | - drop py36 (#582)
270 | - metadata fixes (#575, 579)
271 |
272 | 2022.01.0
273 | ---------
274 |
275 | - aiobotocore dep to 2.1.0 (#564)
276 | - docs for non-aws (#567)
277 | - ContentType in info (#570)
278 | - small-file ACL (#574)
279 |
280 | 2021.11.1
281 | ---------
282 |
283 | - deal with missing ETag (#557)
284 | - ClientPayloadError to retryable (#556)
285 | - pin aiobotocore (#555)
286 |
287 | 2021.11.0
288 | ---------
289 |
290 | - move to fsspec org
291 | - doc tweaks (#546, 540)
292 | - redondant argument in _rm_versioned_bucket_contents (#439)
293 | - allow client_method in url/sign (POST, etc) (#536)
294 | - revert list_v2->head for info (#545)
295 |
296 | 2021.10.1
297 | ---------
298 |
299 | - allow other methods than GET to url/sign (#536)
300 |
301 | 2021.10.0
302 | ---------
303 |
304 | No changes (just released to keep pin with fsspec)
305 |
306 | 2021.09.0
307 | ---------
308 |
309 | - check for bucket also with get_bucket_location (#533)
310 | - update versioneer (#531)
311 |
312 | 2021.08.1
313 | ---------
314 |
315 | - retry on IncompleteRead (#525)
316 | - fix isdir for missing bucket (#522)
317 | - raise for glob("*") (#5167)
318 |
319 | 2021.08.0
320 | ---------
321 |
322 | - fix for aiobotocore update (#510)
323 |
324 | 2021.07.0
325 | ---------
326 |
327 | - make bucket in put(recursive) (#496)
328 | - non-truthy prefixes (#497)
329 | - implement rm_file (#499)
330 |
331 | 2021.06.1
332 | ---------
333 |
334 | - bucket region caching (#495)
335 |
336 | 2021.06.0
337 | ---------
338 |
339 | - support "prefix" in directory listings (#486)
340 | - support negative index in cat_file (#487, 488)
341 | - don't requite ETag in file details (#480)
342 |
343 | 2021.05.0
344 | ---------
345 |
346 | - optimize ``info``,``exists`` (and related) calls for non-version aware mode
347 | - copy with entries without ETag (#480)
348 | - find not to corrupts parent listing (#476)
349 | - short listing to determine directory (#472, 471)
350 |
351 | Version 2021.04.0
352 | -----------------
353 |
354 | - switch to calver and fsspec pin
355 | - py36 (#462)
356 | - async fixes (#456, 452)
357 |
358 | Version 0.6.0
359 | -------------
360 |
361 | - update for fsspec 0.9.0 (#448)
362 | - better errors (#443)
363 | - cp to preserve ETAG (#441)
364 | - CI (#435, #427, #395)
365 | - 5GB PUT (#425)
366 | - partial cat (#389)
367 | - direct find (#360)
368 |
369 |
370 | Version 0.5.0
371 | -------------
372 |
373 | - Asynchronous filesystem based on ``aiobotocore``
374 |
375 |
376 | Version 0.4.0
377 | -------------
378 |
379 | - New instances no longer need reconnect (:pr:`244`) by `Martin Durant`_
380 | - Always use multipart uploads when not autocommitting (:pr:`243`) by `Marius van Niekerk`_
381 | - Create ``CONTRIBUTING.md`` (:pr:`248`) by `Jacob Tomlinson`_
382 | - Use autofunction for ``S3Map`` sphinx autosummary (:pr:`251`) by `James Bourbeau`_
383 | - Miscellaneous doc updates (:pr:`252`) by `James Bourbeau`_
384 | - Support for Python 3.8 (:pr:`264`) by `Tom Augspurger`_
385 | - Improved performance for ``isdir`` (:pr:`259`) by `Nate Yoder`_
386 | - Increased the minimum required version of fsspec to 0.6.0
387 |
388 | .. _`Martin Durant`: https://github.com/martindurant
389 | .. _`Marius van Niekerk`: https://github.com/mariusvniekerk
390 | .. _`Jacob Tomlinson`: https://github.com/jacobtomlinson
391 | .. _`James Bourbeau`: https://github.com/jrbourbeau
392 | .. _`Tom Augspurger`: https://github.com/TomAugspurger
393 | .. _`Nate Yoder`: https://github.com/nateyoder
394 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | #
3 | # S3Fs documentation build configuration file, created by
4 | # sphinx-quickstart on Mon Mar 21 15:20:01 2016.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | import os
16 |
17 | # If extensions (or modules to document with autodoc) are in another directory,
18 | # add these directories to sys.path here. If the directory is relative to the
19 | # documentation root, use os.path.abspath to make it absolute, like shown here.
20 | #sys.path.insert(0, os.path.abspath('.'))
21 |
22 | # -- General configuration ------------------------------------------------
23 |
24 | # If your documentation needs a minimal Sphinx version, state it here.
25 | #needs_sphinx = '1.0'
26 |
27 | # Add any Sphinx extension module names here, as strings. They can be
28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
29 | # ones.
30 | extensions = [
31 | 'sphinx.ext.autodoc',
32 | 'sphinx.ext.todo',
33 | 'sphinx.ext.ifconfig',
34 | 'sphinx.ext.viewcode',
35 | 'sphinx.ext.autosummary',
36 | 'sphinx.ext.extlinks',
37 | 'sphinx.ext.napoleon',
38 | ]
39 |
40 | # Add any paths that contain templates here, relative to this directory.
41 | templates_path = ['_templates']
42 |
43 | # The suffix(es) of source filenames.
44 | # You can specify multiple suffix as a list of string:
45 | # source_suffix = ['.rst', '.md']
46 | source_suffix = '.rst'
47 |
48 | # The encoding of source files.
49 | #source_encoding = 'utf-8-sig'
50 |
51 | # The master toctree document.
52 | master_doc = 'index'
53 |
54 | # General information about the project.
55 | project = 'S3Fs'
56 | copyright = '2016, Continuum Analytics'
57 | author = 'Continuum Analytics'
58 |
59 | # The version info for the project you're documenting, acts as replacement for
60 | # |version| and |release|, also used in various other places throughout the
61 | # built documents.
62 | #
63 | # The short X.Y version.
64 | import s3fs
65 | version = s3fs.__version__
66 | # The full version, including alpha/beta/rc tags.
67 | release = version
68 |
69 | # There are two options for replacing |today|: either, you set today to some
70 | # non-false value, then it is used:
71 | #today = ''
72 | # Else, today_fmt is used as the format for a strftime call.
73 | #today_fmt = '%B %d, %Y'
74 |
75 | # List of patterns, relative to source directory, that match files and
76 | # directories to ignore when looking for source files.
77 | exclude_patterns = []
78 |
79 | # The reST default role (used for this markup: `text`) to use for all
80 | # documents.
81 | #default_role = None
82 |
83 | # If true, '()' will be appended to :func: etc. cross-reference text.
84 | #add_function_parentheses = True
85 |
86 | # If true, the current module name will be prepended to all description
87 | # unit titles (such as .. function::).
88 | #add_module_names = True
89 |
90 | # If true, sectionauthor and moduleauthor directives will be shown in the
91 | # output. They are ignored by default.
92 | #show_authors = False
93 |
94 | # The name of the Pygments (syntax highlighting) style to use.
95 | pygments_style = 'sphinx'
96 |
97 | # A list of ignored prefixes for module index sorting.
98 | #modindex_common_prefix = []
99 |
100 | # If true, keep warnings as "system message" paragraphs in the built documents.
101 | #keep_warnings = False
102 |
103 | # If true, `todo` and `todoList` produce output, else they produce nothing.
104 | todo_include_todos = False
105 |
106 | extlinks = {
107 | "pr": ("https://github.com/fsspec/s3fs/pull/%s", "PR #%s"),
108 | }
109 |
110 |
111 | # -- Options for HTML output ----------------------------------------------
112 |
113 | html_theme = 'sphinx_rtd_theme'
114 |
115 | # Theme options are theme-specific and customize the look and feel of a theme
116 | # further. For a list of options available for each theme, see the
117 | # documentation.
118 | #html_theme_options = {}
119 |
120 | # Add any paths that contain custom themes here, relative to this directory.
121 | #html_theme_path = []
122 |
123 | # The name for this set of Sphinx documents. If None, it defaults to
124 | # " v documentation".
125 | #html_title = None
126 |
127 | # A shorter title for the navigation bar. Default is the same as html_title.
128 | #html_short_title = None
129 |
130 | # The name of an image file (relative to this directory) to place at the top
131 | # of the sidebar.
132 | #html_logo = None
133 |
134 | # The name of an image file (within the static path) to use as favicon of the
135 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
136 | # pixels large.
137 | #html_favicon = None
138 |
139 | # Add any paths that contain custom static files (such as style sheets) here,
140 | # relative to this directory. They are copied after the builtin static files,
141 | # so a file named "default.css" will overwrite the builtin "default.css".
142 | html_static_path = ['_static']
143 |
144 | # Custom CSS file to override read the docs default CSS.
145 | # Contains workaround for issue #790.
146 | html_css_files = ["custom.css"]
147 |
148 | # Add any extra paths that contain custom files (such as robots.txt or
149 | # .htaccess) here, relative to this directory. These files are copied
150 | # directly to the root of the documentation.
151 | #html_extra_path = []
152 |
153 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
154 | # using the given strftime format.
155 | #html_last_updated_fmt = '%b %d, %Y'
156 |
157 | # If true, SmartyPants will be used to convert quotes and dashes to
158 | # typographically correct entities.
159 | #html_use_smartypants = True
160 |
161 | # Custom sidebar templates, maps document names to template names.
162 | #html_sidebars = {}
163 |
164 | # Additional templates that should be rendered to pages, maps page names to
165 | # template names.
166 | #html_additional_pages = {}
167 |
168 | # If false, no module index is generated.
169 | #html_domain_indices = True
170 |
171 | # If false, no index is generated.
172 | #html_use_index = True
173 |
174 | # If true, the index is split into individual pages for each letter.
175 | #html_split_index = False
176 |
177 | # If true, links to the reST sources are added to the pages.
178 | #html_show_sourcelink = True
179 |
180 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
181 | #html_show_sphinx = True
182 |
183 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
184 | #html_show_copyright = True
185 |
186 | # If true, an OpenSearch description file will be output, and all pages will
187 | # contain a tag referring to it. The value of this option must be the
188 | # base URL from which the finished HTML is served.
189 | #html_use_opensearch = ''
190 |
191 | # This is the file name suffix for HTML files (e.g. ".xhtml").
192 | #html_file_suffix = None
193 |
194 | # Language to be used for generating the HTML full-text search index.
195 | # Sphinx supports the following languages:
196 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
197 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
198 | #html_search_language = 'en'
199 |
200 | # A dictionary with options for the search language support, empty by default.
201 | # Now only 'ja' uses this config value
202 | #html_search_options = {'type': 'default'}
203 |
204 | # The name of a javascript file (relative to the configuration directory) that
205 | # implements a search results scorer. If empty, the default will be used.
206 | #html_search_scorer = 'scorer.js'
207 |
208 | # Output file base name for HTML help builder.
209 | htmlhelp_basename = 'S3Fsdoc'
210 |
211 | # -- Options for LaTeX output ---------------------------------------------
212 |
213 | latex_elements = {
214 | # The paper size ('letterpaper' or 'a4paper').
215 | #'papersize': 'letterpaper',
216 |
217 | # The font size ('10pt', '11pt' or '12pt').
218 | #'pointsize': '10pt',
219 |
220 | # Additional stuff for the LaTeX preamble.
221 | #'preamble': '',
222 |
223 | # Latex figure (float) alignment
224 | #'figure_align': 'htbp',
225 | }
226 |
227 | # Grouping the document tree into LaTeX files. List of tuples
228 | # (source start file, target name, title,
229 | # author, documentclass [howto, manual, or own class]).
230 | latex_documents = [
231 | (master_doc, 'S3Fs.tex', 'S3Fs Documentation',
232 | 'Continuum Analytics', 'manual'),
233 | ]
234 |
235 | # The name of an image file (relative to this directory) to place at the top of
236 | # the title page.
237 | #latex_logo = None
238 |
239 | # For "manual" documents, if this is true, then toplevel headings are parts,
240 | # not chapters.
241 | #latex_use_parts = False
242 |
243 | # If true, show page references after internal links.
244 | #latex_show_pagerefs = False
245 |
246 | # If true, show URL addresses after external links.
247 | #latex_show_urls = False
248 |
249 | # Documents to append as an appendix to all manuals.
250 | #latex_appendices = []
251 |
252 | # If false, no module index is generated.
253 | #latex_domain_indices = True
254 |
255 |
256 | # -- Options for manual page output ---------------------------------------
257 |
258 | # One entry per manual page. List of tuples
259 | # (source start file, name, description, authors, manual section).
260 | man_pages = [
261 | (master_doc, 's3fs', 'S3Fs Documentation',
262 | [author], 1)
263 | ]
264 |
265 | # If true, show URL addresses after external links.
266 | #man_show_urls = False
267 |
268 |
269 |
270 | # -- Options for Texinfo output -------------------------------------------
271 |
272 | # Grouping the document tree into Texinfo files. List of tuples
273 | # (source start file, target name, title, author,
274 | # dir menu entry, description, category)
275 | texinfo_documents = [
276 | (master_doc, 'S3Fs', 'S3Fs Documentation',
277 | author, 'S3Fs', 'One line description of project.',
278 | 'Miscellaneous'),
279 | ]
280 |
281 | # Documents to append as an appendix to all manuals.
282 | #texinfo_appendices = []
283 |
284 | # If false, no module index is generated.
285 | #texinfo_domain_indices = True
286 |
287 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
288 | #texinfo_show_urls = 'footnote'
289 |
290 | # If true, do not generate a @detailmenu in the "Top" node's menu.
291 | #texinfo_no_detailmenu = False
292 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | S3Fs
2 | ====
3 |
4 | S3Fs is a Pythonic file interface to S3. It builds on top of botocore_. The project is hosted on `GitHub `_ |github_stars|
5 |
6 | .. |github_stars| image:: https://img.shields.io/github/stars/fsspec/s3fs?style=social
7 | :target: https://github.com/fsspec/s3fs
8 | :alt: GitHub Repository
9 |
10 | The top-level class :py:class:`.S3FileSystem` holds connection information and allows
11 | typical file-system style operations like ``cp``, ``mv``, ``ls``, ``du``,
12 | ``glob``, etc., as well as put/get of local files to/from S3.
13 |
14 | The connection can be anonymous - in which case only publicly-available,
15 | read-only buckets are accessible - or via credentials explicitly supplied
16 | or in configuration files.
17 |
18 | Calling ``open()`` on a :py:class:`.S3FileSystem` (typically using a context manager)
19 | provides an :py:class:`.S3File` for read or write access to a particular key. The object
20 | emulates the standard ``File`` protocol (``read``, ``write``, ``tell``,
21 | ``seek``), such that functions expecting a file can access S3. Only binary read
22 | and write modes are implemented, with blocked caching.
23 |
24 | S3Fs uses and is based upon `fsspec`_.
25 |
26 | .. _fsspec: https://filesystem-spec.readthedocs.io/en/latest/
27 |
28 | Examples
29 | --------
30 |
31 | Simple locate and read a file:
32 |
33 | .. code-block:: python
34 |
35 | >>> import s3fs
36 | >>> s3 = s3fs.S3FileSystem(anon=True)
37 | >>> s3.ls('my-bucket')
38 | ['my-file.txt']
39 | >>> with s3.open('my-bucket/my-file.txt', 'rb') as f:
40 | ... print(f.read())
41 | b'Hello, world'
42 |
43 | (see also ``walk`` and ``glob``)
44 |
45 | Reading with delimited blocks:
46 |
47 | .. code-block:: python
48 |
49 | >>> s3.read_block(path, offset=1000, length=10, delimiter=b'\n')
50 | b'A whole line of text\n'
51 |
52 | Writing with blocked caching:
53 |
54 | .. code-block:: python
55 |
56 | >>> s3 = s3fs.S3FileSystem(anon=False) # uses default credentials
57 | >>> with s3.open('mybucket/new-file', 'wb') as f:
58 | ... f.write(2*2**20 * b'a')
59 | ... f.write(2*2**20 * b'a') # data is flushed and file closed
60 | >>> s3.du('mybucket/new-file')
61 | {'mybucket/new-file': 4194304}
62 |
63 | Because S3Fs faithfully copies the Python file interface it can be used
64 | smoothly with other projects that consume the file interface like ``gzip`` or
65 | ``pandas``.
66 |
67 | .. code-block:: python
68 |
69 | >>> with s3.open('mybucket/my-file.csv.gz', 'rb') as f:
70 | ... g = gzip.GzipFile(fileobj=f) # Decompress data with gzip
71 | ... df = pd.read_csv(g) # Read CSV file with Pandas
72 |
73 | Integration
74 | -----------
75 |
76 | The libraries ``intake``, ``pandas`` and ``dask`` accept URLs with the prefix
77 | "s3://", and will use s3fs to complete the IO operation in question. The
78 | IO functions take an argument ``storage_options``, which will be passed
79 | to :py:class:`.S3FileSystem`, for example:
80 |
81 | .. code-block:: python
82 |
83 | df = pd.read_excel("s3://bucket/path/file.xls",
84 | storage_options={"anon": True})
85 |
86 | This gives the chance to pass any credentials or other necessary
87 | arguments needed to s3fs.
88 |
89 |
90 | Async
91 | -----
92 |
93 | ``s3fs`` is implemented using ``aiobotocore``, and offers async functionality.
94 | A number of methods of :py:class:`.S3FileSystem` are ``async``, for for each of these,
95 | there is also a synchronous version with the same name and lack of a ``_``
96 | prefix.
97 |
98 | If you wish to call ``s3fs`` from async code, then you should pass
99 | ``asynchronous=True, loop=`` to the constructor (the latter is optional,
100 | if you wish to use both async and sync methods). You must also explicitly
101 | await the client creation before making any S3 call.
102 |
103 | .. code-block:: python
104 |
105 | async def run_program():
106 | s3 = S3FileSystem(..., asynchronous=True)
107 | session = await s3.set_session()
108 | ... # perform work
109 | await session.close()
110 |
111 | asyncio.run(run_program()) # or call from your async code
112 |
113 | Concurrent async operations are also used internally for bulk operations
114 | such as ``pipe/cat``, ``get/put``, ``cp/mv/rm``. The async calls are
115 | hidden behind a synchronisation layer, so are designed to be called
116 | from normal code. If you are *not*
117 | using async-style programming, you do not need to know about how this
118 | works, but you might find the implementation interesting.
119 |
120 |
121 | Multiprocessing
122 | ---------------
123 |
124 | When using Python's `multiprocessing`_, the start method must be set to either
125 | ``spawn`` or ``forkserver``. ``fork`` is not safe to use because of the open sockets
126 | and async thread used by s3fs, and may lead to
127 | hard-to-find bugs and occasional deadlocks. Read more about the available
128 | `start methods`_.
129 |
130 | .. _multiprocessing: https://docs.python.org/3/library/multiprocessing.html
131 | .. _start methods: https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
132 |
133 | Limitations
134 | -----------
135 |
136 | This project is meant for convenience, rather than feature completeness.
137 | The following are known current omissions:
138 |
139 | - file access is always binary (although ``readline`` and iterating by line
140 | are possible)
141 |
142 | - no permissions/access-control (i.e., no ``chmod``/``chown`` methods)
143 |
144 |
145 | Logging
146 | -------
147 |
148 | The logger named ``s3fs`` provides information about the operations of the file
149 | system. To quickly see all messages, you can set the environment variable
150 | ``S3FS_LOGGING_LEVEL=DEBUG``. The presence of this environment variable will
151 | install a handler for the logger that prints messages to stderr and set the log
152 | level to the given value. More advance logging configuration is possible using
153 | Python's standard `logging framework`_.
154 |
155 | .. _logging framework: https://docs.python.org/3/library/logging.html
156 |
157 | Credentials
158 | -----------
159 |
160 | The AWS key and secret may be provided explicitly when creating an :py:class:`.S3FileSystem`.
161 | A more secure way, not including the credentials directly in code, is to allow
162 | boto to establish the credentials automatically. Boto will try the following
163 | methods, in order:
164 |
165 | - ``AWS_ACCESS_KEY_ID``, ``AWS_SECRET_ACCESS_KEY``, and ``AWS_SESSION_TOKEN``
166 | environment variables
167 |
168 | - configuration files such as ``~/.aws/credentials``
169 |
170 | - for nodes on EC2, the IAM metadata provider
171 |
172 | You can specify a profile using ``s3fs.S3FileSystem(profile='PROFILE')``.
173 | Otherwise ``sf3s`` will use authentication via `boto environment variables`_.
174 |
175 | .. _boto environment variables: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#using-environment-variables
176 |
177 | In a distributed environment, it is not expected that raw credentials should
178 | be passed between machines. In the explicitly provided credentials case, the
179 | method :py:meth:`.S3FileSystem.get_delegated_s3pars` can be used to obtain temporary credentials.
180 | When not using explicit credentials, it should be expected that every machine
181 | also has the appropriate environment variables, config files or IAM roles
182 | available.
183 |
184 | If none of the credential methods are available, only anonymous access will
185 | work, and ``anon=True`` must be passed to the constructor.
186 |
187 | Furthermore, :py:meth:`.S3FileSystem.current` will return the most-recently created
188 | instance, so this method could be used in preference to the constructor in
189 | cases where the code must be agnostic of the credentials/config used.
190 |
191 | S3 Compatible Storage
192 | ---------------------
193 |
194 | To use ``s3fs`` against an S3 compatible storage, like `MinIO`_ or
195 | `Ceph Object Gateway`_, you'll probably need to pass extra parameters when
196 | creating the ``s3fs`` filesystem. Here are some sample configurations:
197 |
198 | For a self-hosted MinIO instance:
199 |
200 | .. code-block:: python
201 |
202 | # When relying on auto discovery for credentials
203 | >>> s3 = s3fs.S3FileSystem(
204 | anon=False,
205 | endpoint_url='https://...'
206 | )
207 | # Or passing the credentials directly
208 | >>> s3 = s3fs.S3FileSystem(
209 | key='miniokey...',
210 | secret='asecretkey...',
211 | endpoint_url='https://...'
212 | )
213 |
214 | It is also possible to set credentials through environment variables:
215 |
216 | .. code-block:: python
217 |
218 | # export FSSPEC_S3_ENDPOINT_URL=https://...
219 | # export FSSPEC_S3_KEY='miniokey...'
220 | # export FSSPEC_S3_SECRET='asecretkey...'
221 | >>> s3 = s3fs.S3FileSystem()
222 | # or ...
223 | >>> f = fsspec.open("s3://minio-bucket/...")
224 |
225 |
226 | For Storj DCS via the `S3-compatible Gateway `_:
227 |
228 | .. code-block:: python
229 |
230 | # When relying on auto discovery for credentials
231 | >>> s3 = s3fs.S3FileSystem(
232 | anon=False,
233 | endpoint_url='https://gateway.storjshare.io'
234 | )
235 | # Or passing the credentials directly
236 | >>> s3 = s3fs.S3FileSystem(
237 | key='accesskey...',
238 | secret='asecretkey...',
239 | endpoint_url='https://gateway.storjshare.io'
240 | )
241 |
242 | For a Scaleway s3-compatible storage in the ``fr-par`` zone:
243 |
244 | .. code-block:: python
245 |
246 | >>> s3 = s3fs.S3FileSystem(
247 | key='scaleway-api-key...',
248 | secret='scaleway-secretkey...',
249 | endpoint_url='https://s3.fr-par.scw.cloud',
250 | client_kwargs={
251 | 'region_name': 'fr-par'
252 | }
253 | )
254 |
255 | For an OVH s3-compatible storage in the ``GRA`` zone:
256 |
257 | .. code-block:: python
258 |
259 | >>> s3 = s3fs.S3FileSystem(
260 | key='ovh-s3-key...',
261 | secret='ovh-s3-secretkey...',
262 | endpoint_url='https://s3.GRA.cloud.ovh.net',
263 | client_kwargs={
264 | 'region_name': 'GRA'
265 | },
266 | config_kwargs={
267 | 'signature_version': 's3v4'
268 | }
269 | )
270 |
271 |
272 | .. _MinIO: https://min.io
273 | .. _Ceph Object Gateway: https://docs.ceph.com/docs/master/radosgw/
274 |
275 | Requester Pays Buckets
276 | ----------------------
277 |
278 | Some buckets, such as the `arXiv raw data
279 | `__, are configured so that the
280 | requester of the data pays any transfer fees. You must be
281 | authenticated to access these buckets and (because these charges maybe
282 | unexpected) amazon requires an additional key on many of the API
283 | calls. To enable ``RequesterPays`` create your file system as
284 |
285 |
286 | .. code-block:: python
287 |
288 | >>> s3 = s3fs.S3FileSystem(anon=False, requester_pays=True)
289 |
290 |
291 | Serverside Encryption
292 | ---------------------
293 |
294 | For some buckets/files you may want to use some of s3's server side encryption
295 | features. ``s3fs`` supports these in a few ways
296 |
297 |
298 | .. code-block:: python
299 |
300 | >>> s3 = s3fs.S3FileSystem(
301 | ... s3_additional_kwargs={'ServerSideEncryption': 'AES256'})
302 |
303 | This will create an s3 filesystem instance that will append the
304 | ServerSideEncryption argument to all s3 calls (where applicable).
305 |
306 | The same applies for ``s3.open``. Most of the methods on the filesystem object
307 | will also accept and forward keyword arguments to the underlying calls. The
308 | most recently specified argument is applied last in the case where both
309 | ``s3_additional_kwargs`` and a method's ``**kwargs`` are used.
310 |
311 | The ``s3.utils.SSEParams`` provides some convenient helpers for the serverside
312 | encryption parameters in particular. An instance can be passed instead of a
313 | regular python dictionary as the ``s3_additional_kwargs`` parameter.
314 |
315 |
316 | Bucket Version Awareness
317 | ------------------------
318 |
319 | If your bucket has object versioning enabled then you can add version-aware support
320 | to ``s3fs``. This ensures that if a file is opened at a particular point in time that
321 | version will be used for reading.
322 |
323 | This mitigates the issue where more than one user is concurrently reading and writing
324 | to the same object.
325 |
326 | .. code-block:: python
327 |
328 | >>> s3 = s3fs.S3FileSystem(version_aware=True)
329 | # Open the file at the latest version
330 | >>> fo = s3.open('versioned_bucket/object')
331 | >>> versions = s3.object_version_info('versioned_bucket/object')
332 | # Open the file at a particular version
333 | >>> fo_old_version = s3.open('versioned_bucket/object', version_id='SOMEVERSIONID')
334 |
335 | In order for this to function the user must have the necessary IAM permissions to perform
336 | a GetObjectVersion
337 |
338 |
339 | Contents
340 | ========
341 |
342 | .. toctree::
343 | install
344 | development
345 | api
346 | changelog
347 | code-of-conduct
348 | :maxdepth: 2
349 |
350 |
351 | .. _botocore: https://botocore.readthedocs.io/en/latest/
352 |
353 | Indices and tables
354 | ==================
355 |
356 | * :ref:`genindex`
357 | * :ref:`modindex`
358 | * :ref:`search`
359 |
360 |
361 | These docs pages collect anonymous tracking data using goatcounter, and the
362 | dashboard is available to the public: https://s3fs.goatcounter.com/ .
363 |
364 | .. raw:: html
365 |
366 |
368 |
--------------------------------------------------------------------------------
/s3fs/_version.py:
--------------------------------------------------------------------------------
1 | # This file helps to compute a version number in source trees obtained from
2 | # git-archive tarball (such as those provided by githubs download-from-tag
3 | # feature). Distribution tarballs (built by setup.py sdist) and build
4 | # directories (produced by setup.py build) will contain a much shorter file
5 | # that just contains the computed version number.
6 |
7 | # This file is released into the public domain.
8 | # Generated by versioneer-0.29
9 | # https://github.com/python-versioneer/python-versioneer
10 |
11 | """Git implementation of _version.py."""
12 |
13 | import errno
14 | import os
15 | import re
16 | import subprocess
17 | import sys
18 | from typing import Any, Callable, Dict, List, Optional, Tuple
19 | import functools
20 |
21 |
22 | def get_keywords() -> Dict[str, str]:
23 | """Get the keywords needed to look up the version information."""
24 | # these strings will be replaced by git during git-archive.
25 | # setup.py/versioneer.py will grep for the variable names, so they must
26 | # each be defined on a line of their own. _version.py will just call
27 | # get_keywords().
28 | git_refnames = " (HEAD -> main, tag: 2025.12.0)"
29 | git_full = "65f394575b9667f33b59473dc28a8f1cf6708745"
30 | git_date = "2025-12-03 10:32:02 -0500"
31 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
32 | return keywords
33 |
34 |
35 | class VersioneerConfig:
36 | """Container for Versioneer configuration parameters."""
37 |
38 | VCS: str
39 | style: str
40 | tag_prefix: str
41 | parentdir_prefix: str
42 | versionfile_source: str
43 | verbose: bool
44 |
45 |
46 | def get_config() -> VersioneerConfig:
47 | """Create, populate and return the VersioneerConfig() object."""
48 | # these strings are filled in when 'setup.py versioneer' creates
49 | # _version.py
50 | cfg = VersioneerConfig()
51 | cfg.VCS = "git"
52 | cfg.style = "pep440"
53 | cfg.tag_prefix = ""
54 | cfg.parentdir_prefix = "None"
55 | cfg.versionfile_source = "s3fs/_version.py"
56 | cfg.verbose = False
57 | return cfg
58 |
59 |
60 | class NotThisMethod(Exception):
61 | """Exception raised if a method is not valid for the current scenario."""
62 |
63 |
64 | LONG_VERSION_PY: Dict[str, str] = {}
65 | HANDLERS: Dict[str, Dict[str, Callable]] = {}
66 |
67 |
68 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
69 | """Create decorator to mark a method as the handler of a VCS."""
70 |
71 | def decorate(f: Callable) -> Callable:
72 | """Store f in HANDLERS[vcs][method]."""
73 | if vcs not in HANDLERS:
74 | HANDLERS[vcs] = {}
75 | HANDLERS[vcs][method] = f
76 | return f
77 |
78 | return decorate
79 |
80 |
81 | def run_command(
82 | commands: List[str],
83 | args: List[str],
84 | cwd: Optional[str] = None,
85 | verbose: bool = False,
86 | hide_stderr: bool = False,
87 | env: Optional[Dict[str, str]] = None,
88 | ) -> Tuple[Optional[str], Optional[int]]:
89 | """Call the given command(s)."""
90 | assert isinstance(commands, list)
91 | process = None
92 |
93 | popen_kwargs: Dict[str, Any] = {}
94 | if sys.platform == "win32":
95 | # This hides the console window if pythonw.exe is used
96 | startupinfo = subprocess.STARTUPINFO()
97 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
98 | popen_kwargs["startupinfo"] = startupinfo
99 |
100 | for command in commands:
101 | try:
102 | dispcmd = str([command] + args)
103 | # remember shell=False, so use git.cmd on windows, not just git
104 | process = subprocess.Popen(
105 | [command] + args,
106 | cwd=cwd,
107 | env=env,
108 | stdout=subprocess.PIPE,
109 | stderr=(subprocess.PIPE if hide_stderr else None),
110 | **popen_kwargs,
111 | )
112 | break
113 | except OSError as e:
114 | if e.errno == errno.ENOENT:
115 | continue
116 | if verbose:
117 | print("unable to run %s" % dispcmd)
118 | print(e)
119 | return None, None
120 | else:
121 | if verbose:
122 | print("unable to find command, tried %s" % (commands,))
123 | return None, None
124 | stdout = process.communicate()[0].strip().decode()
125 | if process.returncode != 0:
126 | if verbose:
127 | print("unable to run %s (error)" % dispcmd)
128 | print("stdout was %s" % stdout)
129 | return None, process.returncode
130 | return stdout, process.returncode
131 |
132 |
133 | def versions_from_parentdir(
134 | parentdir_prefix: str,
135 | root: str,
136 | verbose: bool,
137 | ) -> Dict[str, Any]:
138 | """Try to determine the version from the parent directory name.
139 |
140 | Source tarballs conventionally unpack into a directory that includes both
141 | the project name and a version string. We will also support searching up
142 | two directory levels for an appropriately named parent directory
143 | """
144 | rootdirs = []
145 |
146 | for _ in range(3):
147 | dirname = os.path.basename(root)
148 | if dirname.startswith(parentdir_prefix):
149 | return {
150 | "version": dirname[len(parentdir_prefix) :],
151 | "full-revisionid": None,
152 | "dirty": False,
153 | "error": None,
154 | "date": None,
155 | }
156 | rootdirs.append(root)
157 | root = os.path.dirname(root) # up a level
158 |
159 | if verbose:
160 | print(
161 | "Tried directories %s but none started with prefix %s"
162 | % (str(rootdirs), parentdir_prefix)
163 | )
164 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
165 |
166 |
167 | @register_vcs_handler("git", "get_keywords")
168 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
169 | """Extract version information from the given file."""
170 | # the code embedded in _version.py can just fetch the value of these
171 | # keywords. When used from setup.py, we don't want to import _version.py,
172 | # so we do it with a regexp instead. This function is not used from
173 | # _version.py.
174 | keywords: Dict[str, str] = {}
175 | try:
176 | with open(versionfile_abs, "r") as fobj:
177 | for line in fobj:
178 | if line.strip().startswith("git_refnames ="):
179 | mo = re.search(r'=\s*"(.*)"', line)
180 | if mo:
181 | keywords["refnames"] = mo.group(1)
182 | if line.strip().startswith("git_full ="):
183 | mo = re.search(r'=\s*"(.*)"', line)
184 | if mo:
185 | keywords["full"] = mo.group(1)
186 | if line.strip().startswith("git_date ="):
187 | mo = re.search(r'=\s*"(.*)"', line)
188 | if mo:
189 | keywords["date"] = mo.group(1)
190 | except OSError:
191 | pass
192 | return keywords
193 |
194 |
195 | @register_vcs_handler("git", "keywords")
196 | def git_versions_from_keywords(
197 | keywords: Dict[str, str],
198 | tag_prefix: str,
199 | verbose: bool,
200 | ) -> Dict[str, Any]:
201 | """Get version information from git keywords."""
202 | if "refnames" not in keywords:
203 | raise NotThisMethod("Short version file found")
204 | date = keywords.get("date")
205 | if date is not None:
206 | # Use only the last line. Previous lines may contain GPG signature
207 | # information.
208 | date = date.splitlines()[-1]
209 |
210 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
211 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
212 | # -like" string, which we must then edit to make compliant), because
213 | # it's been around since git-1.5.3, and it's too difficult to
214 | # discover which version we're using, or to work around using an
215 | # older one.
216 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
217 | refnames = keywords["refnames"].strip()
218 | if refnames.startswith("$Format"):
219 | if verbose:
220 | print("keywords are unexpanded, not using")
221 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
222 | refs = {r.strip() for r in refnames.strip("()").split(",")}
223 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
224 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
225 | TAG = "tag: "
226 | tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
227 | if not tags:
228 | # Either we're using git < 1.8.3, or there really are no tags. We use
229 | # a heuristic: assume all version tags have a digit. The old git %d
230 | # expansion behaves like git log --decorate=short and strips out the
231 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
232 | # between branches and tags. By ignoring refnames without digits, we
233 | # filter out many common branch names like "release" and
234 | # "stabilization", as well as "HEAD" and "master".
235 | tags = {r for r in refs if re.search(r"\d", r)}
236 | if verbose:
237 | print("discarding '%s', no digits" % ",".join(refs - tags))
238 | if verbose:
239 | print("likely tags: %s" % ",".join(sorted(tags)))
240 | for ref in sorted(tags):
241 | # sorting will prefer e.g. "2.0" over "2.0rc1"
242 | if ref.startswith(tag_prefix):
243 | r = ref[len(tag_prefix) :]
244 | # Filter out refs that exactly match prefix or that don't start
245 | # with a number once the prefix is stripped (mostly a concern
246 | # when prefix is '')
247 | if not re.match(r"\d", r):
248 | continue
249 | if verbose:
250 | print("picking %s" % r)
251 | return {
252 | "version": r,
253 | "full-revisionid": keywords["full"].strip(),
254 | "dirty": False,
255 | "error": None,
256 | "date": date,
257 | }
258 | # no suitable tags, so version is "0+unknown", but full hex is still there
259 | if verbose:
260 | print("no suitable tags, using unknown + full revision id")
261 | return {
262 | "version": "0+unknown",
263 | "full-revisionid": keywords["full"].strip(),
264 | "dirty": False,
265 | "error": "no suitable tags",
266 | "date": None,
267 | }
268 |
269 |
270 | @register_vcs_handler("git", "pieces_from_vcs")
271 | def git_pieces_from_vcs(
272 | tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command
273 | ) -> Dict[str, Any]:
274 | """Get version from 'git describe' in the root of the source tree.
275 |
276 | This only gets called if the git-archive 'subst' keywords were *not*
277 | expanded, and _version.py hasn't already been rewritten with a short
278 | version string, meaning we're inside a checked out source tree.
279 | """
280 | GITS = ["git"]
281 | if sys.platform == "win32":
282 | GITS = ["git.cmd", "git.exe"]
283 |
284 | # GIT_DIR can interfere with correct operation of Versioneer.
285 | # It may be intended to be passed to the Versioneer-versioned project,
286 | # but that should not change where we get our version from.
287 | env = os.environ.copy()
288 | env.pop("GIT_DIR", None)
289 | runner = functools.partial(runner, env=env)
290 |
291 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose)
292 | if rc != 0:
293 | if verbose:
294 | print("Directory %s not under git control" % root)
295 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
296 |
297 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
298 | # if there isn't one, this yields HEX[-dirty] (no NUM)
299 | describe_out, rc = runner(
300 | GITS,
301 | [
302 | "describe",
303 | "--tags",
304 | "--dirty",
305 | "--always",
306 | "--long",
307 | "--match",
308 | f"{tag_prefix}[[:digit:]]*",
309 | ],
310 | cwd=root,
311 | )
312 | # --long was added in git-1.5.5
313 | if describe_out is None:
314 | raise NotThisMethod("'git describe' failed")
315 | describe_out = describe_out.strip()
316 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
317 | if full_out is None:
318 | raise NotThisMethod("'git rev-parse' failed")
319 | full_out = full_out.strip()
320 |
321 | pieces: Dict[str, Any] = {}
322 | pieces["long"] = full_out
323 | pieces["short"] = full_out[:7] # maybe improved later
324 | pieces["error"] = None
325 |
326 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
327 | # --abbrev-ref was added in git-1.6.3
328 | if rc != 0 or branch_name is None:
329 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
330 | branch_name = branch_name.strip()
331 |
332 | if branch_name == "HEAD":
333 | # If we aren't exactly on a branch, pick a branch which represents
334 | # the current commit. If all else fails, we are on a branchless
335 | # commit.
336 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
337 | # --contains was added in git-1.5.4
338 | if rc != 0 or branches is None:
339 | raise NotThisMethod("'git branch --contains' returned error")
340 | branches = branches.split("\n")
341 |
342 | # Remove the first line if we're running detached
343 | if "(" in branches[0]:
344 | branches.pop(0)
345 |
346 | # Strip off the leading "* " from the list of branches.
347 | branches = [branch[2:] for branch in branches]
348 | if "master" in branches:
349 | branch_name = "master"
350 | elif not branches:
351 | branch_name = None
352 | else:
353 | # Pick the first branch that is returned. Good or bad.
354 | branch_name = branches[0]
355 |
356 | pieces["branch"] = branch_name
357 |
358 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
359 | # TAG might have hyphens.
360 | git_describe = describe_out
361 |
362 | # look for -dirty suffix
363 | dirty = git_describe.endswith("-dirty")
364 | pieces["dirty"] = dirty
365 | if dirty:
366 | git_describe = git_describe[: git_describe.rindex("-dirty")]
367 |
368 | # now we have TAG-NUM-gHEX or HEX
369 |
370 | if "-" in git_describe:
371 | # TAG-NUM-gHEX
372 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
373 | if not mo:
374 | # unparsable. Maybe git-describe is misbehaving?
375 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
376 | return pieces
377 |
378 | # tag
379 | full_tag = mo.group(1)
380 | if not full_tag.startswith(tag_prefix):
381 | if verbose:
382 | fmt = "tag '%s' doesn't start with prefix '%s'"
383 | print(fmt % (full_tag, tag_prefix))
384 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
385 | full_tag,
386 | tag_prefix,
387 | )
388 | return pieces
389 | pieces["closest-tag"] = full_tag[len(tag_prefix) :]
390 |
391 | # distance: number of commits since tag
392 | pieces["distance"] = int(mo.group(2))
393 |
394 | # commit: short hex revision ID
395 | pieces["short"] = mo.group(3)
396 |
397 | else:
398 | # HEX: no tags
399 | pieces["closest-tag"] = None
400 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
401 | pieces["distance"] = len(out.split()) # total number of commits
402 |
403 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
404 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
405 | # Use only the last line. Previous lines may contain GPG signature
406 | # information.
407 | date = date.splitlines()[-1]
408 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
409 |
410 | return pieces
411 |
412 |
413 | def plus_or_dot(pieces: Dict[str, Any]) -> str:
414 | """Return a + if we don't already have one, else return a ."""
415 | if "+" in pieces.get("closest-tag", ""):
416 | return "."
417 | return "+"
418 |
419 |
420 | def render_pep440(pieces: Dict[str, Any]) -> str:
421 | """Build up version string, with post-release "local version identifier".
422 |
423 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
424 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
425 |
426 | Exceptions:
427 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
428 | """
429 | if pieces["closest-tag"]:
430 | rendered = pieces["closest-tag"]
431 | if pieces["distance"] or pieces["dirty"]:
432 | rendered += plus_or_dot(pieces)
433 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
434 | if pieces["dirty"]:
435 | rendered += ".dirty"
436 | else:
437 | # exception #1
438 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
439 | if pieces["dirty"]:
440 | rendered += ".dirty"
441 | return rendered
442 |
443 |
444 | def render_pep440_branch(pieces: Dict[str, Any]) -> str:
445 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
446 |
447 | The ".dev0" means not master branch. Note that .dev0 sorts backwards
448 | (a feature branch will appear "older" than the master branch).
449 |
450 | Exceptions:
451 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
452 | """
453 | if pieces["closest-tag"]:
454 | rendered = pieces["closest-tag"]
455 | if pieces["distance"] or pieces["dirty"]:
456 | if pieces["branch"] != "master":
457 | rendered += ".dev0"
458 | rendered += plus_or_dot(pieces)
459 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
460 | if pieces["dirty"]:
461 | rendered += ".dirty"
462 | else:
463 | # exception #1
464 | rendered = "0"
465 | if pieces["branch"] != "master":
466 | rendered += ".dev0"
467 | rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
468 | if pieces["dirty"]:
469 | rendered += ".dirty"
470 | return rendered
471 |
472 |
473 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
474 | """Split pep440 version string at the post-release segment.
475 |
476 | Returns the release segments before the post-release and the
477 | post-release version number (or -1 if no post-release segment is present).
478 | """
479 | vc = str.split(ver, ".post")
480 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
481 |
482 |
483 | def render_pep440_pre(pieces: Dict[str, Any]) -> str:
484 | """TAG[.postN.devDISTANCE] -- No -dirty.
485 |
486 | Exceptions:
487 | 1: no tags. 0.post0.devDISTANCE
488 | """
489 | if pieces["closest-tag"]:
490 | if pieces["distance"]:
491 | # update the post release segment
492 | tag_version, post_version = pep440_split_post(pieces["closest-tag"])
493 | rendered = tag_version
494 | if post_version is not None:
495 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
496 | else:
497 | rendered += ".post0.dev%d" % (pieces["distance"])
498 | else:
499 | # no commits, use the tag as the version
500 | rendered = pieces["closest-tag"]
501 | else:
502 | # exception #1
503 | rendered = "0.post0.dev%d" % pieces["distance"]
504 | return rendered
505 |
506 |
507 | def render_pep440_post(pieces: Dict[str, Any]) -> str:
508 | """TAG[.postDISTANCE[.dev0]+gHEX] .
509 |
510 | The ".dev0" means dirty. Note that .dev0 sorts backwards
511 | (a dirty tree will appear "older" than the corresponding clean one),
512 | but you shouldn't be releasing software with -dirty anyways.
513 |
514 | Exceptions:
515 | 1: no tags. 0.postDISTANCE[.dev0]
516 | """
517 | if pieces["closest-tag"]:
518 | rendered = pieces["closest-tag"]
519 | if pieces["distance"] or pieces["dirty"]:
520 | rendered += ".post%d" % pieces["distance"]
521 | if pieces["dirty"]:
522 | rendered += ".dev0"
523 | rendered += plus_or_dot(pieces)
524 | rendered += "g%s" % pieces["short"]
525 | else:
526 | # exception #1
527 | rendered = "0.post%d" % pieces["distance"]
528 | if pieces["dirty"]:
529 | rendered += ".dev0"
530 | rendered += "+g%s" % pieces["short"]
531 | return rendered
532 |
533 |
534 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
535 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
536 |
537 | The ".dev0" means not master branch.
538 |
539 | Exceptions:
540 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
541 | """
542 | if pieces["closest-tag"]:
543 | rendered = pieces["closest-tag"]
544 | if pieces["distance"] or pieces["dirty"]:
545 | rendered += ".post%d" % pieces["distance"]
546 | if pieces["branch"] != "master":
547 | rendered += ".dev0"
548 | rendered += plus_or_dot(pieces)
549 | rendered += "g%s" % pieces["short"]
550 | if pieces["dirty"]:
551 | rendered += ".dirty"
552 | else:
553 | # exception #1
554 | rendered = "0.post%d" % pieces["distance"]
555 | if pieces["branch"] != "master":
556 | rendered += ".dev0"
557 | rendered += "+g%s" % pieces["short"]
558 | if pieces["dirty"]:
559 | rendered += ".dirty"
560 | return rendered
561 |
562 |
563 | def render_pep440_old(pieces: Dict[str, Any]) -> str:
564 | """TAG[.postDISTANCE[.dev0]] .
565 |
566 | The ".dev0" means dirty.
567 |
568 | Exceptions:
569 | 1: no tags. 0.postDISTANCE[.dev0]
570 | """
571 | if pieces["closest-tag"]:
572 | rendered = pieces["closest-tag"]
573 | if pieces["distance"] or pieces["dirty"]:
574 | rendered += ".post%d" % pieces["distance"]
575 | if pieces["dirty"]:
576 | rendered += ".dev0"
577 | else:
578 | # exception #1
579 | rendered = "0.post%d" % pieces["distance"]
580 | if pieces["dirty"]:
581 | rendered += ".dev0"
582 | return rendered
583 |
584 |
585 | def render_git_describe(pieces: Dict[str, Any]) -> str:
586 | """TAG[-DISTANCE-gHEX][-dirty].
587 |
588 | Like 'git describe --tags --dirty --always'.
589 |
590 | Exceptions:
591 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
592 | """
593 | if pieces["closest-tag"]:
594 | rendered = pieces["closest-tag"]
595 | if pieces["distance"]:
596 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
597 | else:
598 | # exception #1
599 | rendered = pieces["short"]
600 | if pieces["dirty"]:
601 | rendered += "-dirty"
602 | return rendered
603 |
604 |
605 | def render_git_describe_long(pieces: Dict[str, Any]) -> str:
606 | """TAG-DISTANCE-gHEX[-dirty].
607 |
608 | Like 'git describe --tags --dirty --always -long'.
609 | The distance/hash is unconditional.
610 |
611 | Exceptions:
612 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
613 | """
614 | if pieces["closest-tag"]:
615 | rendered = pieces["closest-tag"]
616 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
617 | else:
618 | # exception #1
619 | rendered = pieces["short"]
620 | if pieces["dirty"]:
621 | rendered += "-dirty"
622 | return rendered
623 |
624 |
625 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
626 | """Render the given version pieces into the requested style."""
627 | if pieces["error"]:
628 | return {
629 | "version": "unknown",
630 | "full-revisionid": pieces.get("long"),
631 | "dirty": None,
632 | "error": pieces["error"],
633 | "date": None,
634 | }
635 |
636 | if not style or style == "default":
637 | style = "pep440" # the default
638 |
639 | if style == "pep440":
640 | rendered = render_pep440(pieces)
641 | elif style == "pep440-branch":
642 | rendered = render_pep440_branch(pieces)
643 | elif style == "pep440-pre":
644 | rendered = render_pep440_pre(pieces)
645 | elif style == "pep440-post":
646 | rendered = render_pep440_post(pieces)
647 | elif style == "pep440-post-branch":
648 | rendered = render_pep440_post_branch(pieces)
649 | elif style == "pep440-old":
650 | rendered = render_pep440_old(pieces)
651 | elif style == "git-describe":
652 | rendered = render_git_describe(pieces)
653 | elif style == "git-describe-long":
654 | rendered = render_git_describe_long(pieces)
655 | else:
656 | raise ValueError("unknown style '%s'" % style)
657 |
658 | return {
659 | "version": rendered,
660 | "full-revisionid": pieces["long"],
661 | "dirty": pieces["dirty"],
662 | "error": None,
663 | "date": pieces.get("date"),
664 | }
665 |
666 |
667 | def get_versions() -> Dict[str, Any]:
668 | """Get version information or return default if unable to do so."""
669 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
670 | # __file__, we can work backwards from there to the root. Some
671 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
672 | # case we can only use expanded keywords.
673 |
674 | cfg = get_config()
675 | verbose = cfg.verbose
676 |
677 | try:
678 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose)
679 | except NotThisMethod:
680 | pass
681 |
682 | try:
683 | root = os.path.realpath(__file__)
684 | # versionfile_source is the relative path from the top of the source
685 | # tree (where the .git directory might live) to this file. Invert
686 | # this to find the root from __file__.
687 | for _ in cfg.versionfile_source.split("/"):
688 | root = os.path.dirname(root)
689 | except NameError:
690 | return {
691 | "version": "0+unknown",
692 | "full-revisionid": None,
693 | "dirty": None,
694 | "error": "unable to find root of source tree",
695 | "date": None,
696 | }
697 |
698 | try:
699 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
700 | return render(pieces, cfg.style)
701 | except NotThisMethod:
702 | pass
703 |
704 | try:
705 | if cfg.parentdir_prefix:
706 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
707 | except NotThisMethod:
708 | pass
709 |
710 | return {
711 | "version": "0+unknown",
712 | "full-revisionid": None,
713 | "dirty": None,
714 | "error": "unable to compute version",
715 | "date": None,
716 | }
717 |
--------------------------------------------------------------------------------
/versioneer.py:
--------------------------------------------------------------------------------
1 | # Version: 0.29
2 |
3 | """The Versioneer - like a rocketeer, but for versions.
4 |
5 | The Versioneer
6 | ==============
7 |
8 | * like a rocketeer, but for versions!
9 | * https://github.com/python-versioneer/python-versioneer
10 | * Brian Warner
11 | * License: Public Domain (Unlicense)
12 | * Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3
13 | * [![Latest Version][pypi-image]][pypi-url]
14 | * [![Build Status][travis-image]][travis-url]
15 |
16 | This is a tool for managing a recorded version number in setuptools-based
17 | python projects. The goal is to remove the tedious and error-prone "update
18 | the embedded version string" step from your release process. Making a new
19 | release should be as easy as recording a new tag in your version-control
20 | system, and maybe making new tarballs.
21 |
22 |
23 | ## Quick Install
24 |
25 | Versioneer provides two installation modes. The "classic" vendored mode installs
26 | a copy of versioneer into your repository. The experimental build-time dependency mode
27 | is intended to allow you to skip this step and simplify the process of upgrading.
28 |
29 | ### Vendored mode
30 |
31 | * `pip install versioneer` to somewhere in your $PATH
32 | * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
33 | available, so you can also use `conda install -c conda-forge versioneer`
34 | * add a `[tool.versioneer]` section to your `pyproject.toml` or a
35 | `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
36 | * Note that you will need to add `tomli; python_version < "3.11"` to your
37 | build-time dependencies if you use `pyproject.toml`
38 | * run `versioneer install --vendor` in your source tree, commit the results
39 | * verify version information with `python setup.py version`
40 |
41 | ### Build-time dependency mode
42 |
43 | * `pip install versioneer` to somewhere in your $PATH
44 | * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
45 | available, so you can also use `conda install -c conda-forge versioneer`
46 | * add a `[tool.versioneer]` section to your `pyproject.toml` or a
47 | `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
48 | * add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`)
49 | to the `requires` key of the `build-system` table in `pyproject.toml`:
50 | ```toml
51 | [build-system]
52 | requires = ["setuptools", "versioneer[toml]"]
53 | build-backend = "setuptools.build_meta"
54 | ```
55 | * run `versioneer install --no-vendor` in your source tree, commit the results
56 | * verify version information with `python setup.py version`
57 |
58 | ## Version Identifiers
59 |
60 | Source trees come from a variety of places:
61 |
62 | * a version-control system checkout (mostly used by developers)
63 | * a nightly tarball, produced by build automation
64 | * a snapshot tarball, produced by a web-based VCS browser, like github's
65 | "tarball from tag" feature
66 | * a release tarball, produced by "setup.py sdist", distributed through PyPI
67 |
68 | Within each source tree, the version identifier (either a string or a number,
69 | this tool is format-agnostic) can come from a variety of places:
70 |
71 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
72 | about recent "tags" and an absolute revision-id
73 | * the name of the directory into which the tarball was unpacked
74 | * an expanded VCS keyword ($Id$, etc)
75 | * a `_version.py` created by some earlier build step
76 |
77 | For released software, the version identifier is closely related to a VCS
78 | tag. Some projects use tag names that include more than just the version
79 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
80 | needs to strip the tag prefix to extract the version identifier. For
81 | unreleased software (between tags), the version identifier should provide
82 | enough information to help developers recreate the same tree, while also
83 | giving them an idea of roughly how old the tree is (after version 1.2, before
84 | version 1.3). Many VCS systems can report a description that captures this,
85 | for example `git describe --tags --dirty --always` reports things like
86 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
87 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
88 | uncommitted changes).
89 |
90 | The version identifier is used for multiple purposes:
91 |
92 | * to allow the module to self-identify its version: `myproject.__version__`
93 | * to choose a name and prefix for a 'setup.py sdist' tarball
94 |
95 | ## Theory of Operation
96 |
97 | Versioneer works by adding a special `_version.py` file into your source
98 | tree, where your `__init__.py` can import it. This `_version.py` knows how to
99 | dynamically ask the VCS tool for version information at import time.
100 |
101 | `_version.py` also contains `$Revision$` markers, and the installation
102 | process marks `_version.py` to have this marker rewritten with a tag name
103 | during the `git archive` command. As a result, generated tarballs will
104 | contain enough information to get the proper version.
105 |
106 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to
107 | the top level of your source tree, next to `setup.py` and the `setup.cfg`
108 | that configures it. This overrides several distutils/setuptools commands to
109 | compute the version when invoked, and changes `setup.py build` and `setup.py
110 | sdist` to replace `_version.py` with a small static file that contains just
111 | the generated version data.
112 |
113 | ## Installation
114 |
115 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
116 |
117 | ## Version-String Flavors
118 |
119 | Code which uses Versioneer can learn about its version string at runtime by
120 | importing `_version` from your main `__init__.py` file and running the
121 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
122 | import the top-level `versioneer.py` and run `get_versions()`.
123 |
124 | Both functions return a dictionary with different flavors of version
125 | information:
126 |
127 | * `['version']`: A condensed version string, rendered using the selected
128 | style. This is the most commonly used value for the project's version
129 | string. The default "pep440" style yields strings like `0.11`,
130 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
131 | below for alternative styles.
132 |
133 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the
134 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
135 |
136 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
137 | commit date in ISO 8601 format. This will be None if the date is not
138 | available.
139 |
140 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
141 | this is only accurate if run in a VCS checkout, otherwise it is likely to
142 | be False or None
143 |
144 | * `['error']`: if the version string could not be computed, this will be set
145 | to a string describing the problem, otherwise it will be None. It may be
146 | useful to throw an exception in setup.py if this is set, to avoid e.g.
147 | creating tarballs with a version string of "unknown".
148 |
149 | Some variants are more useful than others. Including `full-revisionid` in a
150 | bug report should allow developers to reconstruct the exact code being tested
151 | (or indicate the presence of local changes that should be shared with the
152 | developers). `version` is suitable for display in an "about" box or a CLI
153 | `--version` output: it can be easily compared against release notes and lists
154 | of bugs fixed in various releases.
155 |
156 | The installer adds the following text to your `__init__.py` to place a basic
157 | version in `YOURPROJECT.__version__`:
158 |
159 | from ._version import get_versions
160 | __version__ = get_versions()['version']
161 | del get_versions
162 |
163 | ## Styles
164 |
165 | The setup.cfg `style=` configuration controls how the VCS information is
166 | rendered into a version string.
167 |
168 | The default style, "pep440", produces a PEP440-compliant string, equal to the
169 | un-prefixed tag name for actual releases, and containing an additional "local
170 | version" section with more detail for in-between builds. For Git, this is
171 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
172 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
173 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
174 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released
175 | software (exactly equal to a known tag), the identifier will only contain the
176 | stripped tag, e.g. "0.11".
177 |
178 | Other styles are available. See [details.md](details.md) in the Versioneer
179 | source tree for descriptions.
180 |
181 | ## Debugging
182 |
183 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
184 | to return a version of "0+unknown". To investigate the problem, run `setup.py
185 | version`, which will run the version-lookup code in a verbose mode, and will
186 | display the full contents of `get_versions()` (including the `error` string,
187 | which may help identify what went wrong).
188 |
189 | ## Known Limitations
190 |
191 | Some situations are known to cause problems for Versioneer. This details the
192 | most significant ones. More can be found on Github
193 | [issues page](https://github.com/python-versioneer/python-versioneer/issues).
194 |
195 | ### Subprojects
196 |
197 | Versioneer has limited support for source trees in which `setup.py` is not in
198 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
199 | two common reasons why `setup.py` might not be in the root:
200 |
201 | * Source trees which contain multiple subprojects, such as
202 | [Buildbot](https://github.com/buildbot/buildbot), which contains both
203 | "master" and "slave" subprojects, each with their own `setup.py`,
204 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
205 | distributions (and upload multiple independently-installable tarballs).
206 | * Source trees whose main purpose is to contain a C library, but which also
207 | provide bindings to Python (and perhaps other languages) in subdirectories.
208 |
209 | Versioneer will look for `.git` in parent directories, and most operations
210 | should get the right version string. However `pip` and `setuptools` have bugs
211 | and implementation details which frequently cause `pip install .` from a
212 | subproject directory to fail to find a correct version string (so it usually
213 | defaults to `0+unknown`).
214 |
215 | `pip install --editable .` should work correctly. `setup.py install` might
216 | work too.
217 |
218 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
219 | some later version.
220 |
221 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
222 | this issue. The discussion in
223 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
224 | issue from the Versioneer side in more detail.
225 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and
226 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
227 | pip to let Versioneer work correctly.
228 |
229 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the
230 | `setup.cfg`, so subprojects were completely unsupported with those releases.
231 |
232 | ### Editable installs with setuptools <= 18.5
233 |
234 | `setup.py develop` and `pip install --editable .` allow you to install a
235 | project into a virtualenv once, then continue editing the source code (and
236 | test) without re-installing after every change.
237 |
238 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
239 | convenient way to specify executable scripts that should be installed along
240 | with the python package.
241 |
242 | These both work as expected when using modern setuptools. When using
243 | setuptools-18.5 or earlier, however, certain operations will cause
244 | `pkg_resources.DistributionNotFound` errors when running the entrypoint
245 | script, which must be resolved by re-installing the package. This happens
246 | when the install happens with one version, then the egg_info data is
247 | regenerated while a different version is checked out. Many setup.py commands
248 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
249 | a different virtualenv), so this can be surprising.
250 |
251 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
252 | this one, but upgrading to a newer version of setuptools should probably
253 | resolve it.
254 |
255 |
256 | ## Updating Versioneer
257 |
258 | To upgrade your project to a new release of Versioneer, do the following:
259 |
260 | * install the new Versioneer (`pip install -U versioneer` or equivalent)
261 | * edit `setup.cfg` and `pyproject.toml`, if necessary,
262 | to include any new configuration settings indicated by the release notes.
263 | See [UPGRADING](./UPGRADING.md) for details.
264 | * re-run `versioneer install --[no-]vendor` in your source tree, to replace
265 | `SRC/_version.py`
266 | * commit any changed files
267 |
268 | ## Future Directions
269 |
270 | This tool is designed to make it easily extended to other version-control
271 | systems: all VCS-specific components are in separate directories like
272 | src/git/ . The top-level `versioneer.py` script is assembled from these
273 | components by running make-versioneer.py . In the future, make-versioneer.py
274 | will take a VCS name as an argument, and will construct a version of
275 | `versioneer.py` that is specific to the given VCS. It might also take the
276 | configuration arguments that are currently provided manually during
277 | installation by editing setup.py . Alternatively, it might go the other
278 | direction and include code from all supported VCS systems, reducing the
279 | number of intermediate scripts.
280 |
281 | ## Similar projects
282 |
283 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
284 | dependency
285 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
286 | versioneer
287 | * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools
288 | plugin
289 |
290 | ## License
291 |
292 | To make Versioneer easier to embed, all its code is dedicated to the public
293 | domain. The `_version.py` that it creates is also in the public domain.
294 | Specifically, both are released under the "Unlicense", as described in
295 | https://unlicense.org/.
296 |
297 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
298 | [pypi-url]: https://pypi.python.org/pypi/versioneer/
299 | [travis-image]:
300 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
301 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
302 |
303 | """
304 | # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring
305 | # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements
306 | # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error
307 | # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with
308 | # pylint:disable=attribute-defined-outside-init,too-many-arguments
309 |
310 | import configparser
311 | import errno
312 | import json
313 | import os
314 | import re
315 | import subprocess
316 | import sys
317 | from pathlib import Path
318 | from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union
319 | from typing import NoReturn
320 | import functools
321 |
322 | have_tomllib = True
323 | if sys.version_info >= (3, 11):
324 | import tomllib
325 | else:
326 | try:
327 | import tomli as tomllib
328 | except ImportError:
329 | have_tomllib = False
330 |
331 |
332 | class VersioneerConfig:
333 | """Container for Versioneer configuration parameters."""
334 |
335 | VCS: str
336 | style: str
337 | tag_prefix: str
338 | versionfile_source: str
339 | versionfile_build: Optional[str]
340 | parentdir_prefix: Optional[str]
341 | verbose: Optional[bool]
342 |
343 |
344 | def get_root() -> str:
345 | """Get the project root directory.
346 |
347 | We require that all commands are run from the project root, i.e. the
348 | directory that contains setup.py, setup.cfg, and versioneer.py .
349 | """
350 | root = os.path.realpath(os.path.abspath(os.getcwd()))
351 | setup_py = os.path.join(root, "setup.py")
352 | pyproject_toml = os.path.join(root, "pyproject.toml")
353 | versioneer_py = os.path.join(root, "versioneer.py")
354 | if not (
355 | os.path.exists(setup_py)
356 | or os.path.exists(pyproject_toml)
357 | or os.path.exists(versioneer_py)
358 | ):
359 | # allow 'python path/to/setup.py COMMAND'
360 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
361 | setup_py = os.path.join(root, "setup.py")
362 | pyproject_toml = os.path.join(root, "pyproject.toml")
363 | versioneer_py = os.path.join(root, "versioneer.py")
364 | if not (
365 | os.path.exists(setup_py)
366 | or os.path.exists(pyproject_toml)
367 | or os.path.exists(versioneer_py)
368 | ):
369 | err = (
370 | "Versioneer was unable to run the project root directory. "
371 | "Versioneer requires setup.py to be executed from "
372 | "its immediate directory (like 'python setup.py COMMAND'), "
373 | "or in a way that lets it use sys.argv[0] to find the root "
374 | "(like 'python path/to/setup.py COMMAND')."
375 | )
376 | raise VersioneerBadRootError(err)
377 | try:
378 | # Certain runtime workflows (setup.py install/develop in a setuptools
379 | # tree) execute all dependencies in a single python process, so
380 | # "versioneer" may be imported multiple times, and python's shared
381 | # module-import table will cache the first one. So we can't use
382 | # os.path.dirname(__file__), as that will find whichever
383 | # versioneer.py was first imported, even in later projects.
384 | my_path = os.path.realpath(os.path.abspath(__file__))
385 | me_dir = os.path.normcase(os.path.splitext(my_path)[0])
386 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
387 | if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals():
388 | print(
389 | "Warning: build in %s is using versioneer.py from %s"
390 | % (os.path.dirname(my_path), versioneer_py)
391 | )
392 | except NameError:
393 | pass
394 | return root
395 |
396 |
397 | def get_config_from_root(root: str) -> VersioneerConfig:
398 | """Read the project setup.cfg file to determine Versioneer config."""
399 | # This might raise OSError (if setup.cfg is missing), or
400 | # configparser.NoSectionError (if it lacks a [versioneer] section), or
401 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
402 | # the top of versioneer.py for instructions on writing your setup.cfg .
403 | root_pth = Path(root)
404 | pyproject_toml = root_pth / "pyproject.toml"
405 | setup_cfg = root_pth / "setup.cfg"
406 | section: Union[Dict[str, Any], configparser.SectionProxy, None] = None
407 | if pyproject_toml.exists() and have_tomllib:
408 | try:
409 | with open(pyproject_toml, "rb") as fobj:
410 | pp = tomllib.load(fobj)
411 | section = pp["tool"]["versioneer"]
412 | except (tomllib.TOMLDecodeError, KeyError) as e:
413 | print(f"Failed to load config from {pyproject_toml}: {e}")
414 | print("Try to load it from setup.cfg")
415 | if not section:
416 | parser = configparser.ConfigParser()
417 | with open(setup_cfg) as cfg_file:
418 | parser.read_file(cfg_file)
419 | parser.get("versioneer", "VCS") # raise error if missing
420 |
421 | section = parser["versioneer"]
422 |
423 | # `cast`` really shouldn't be used, but its simplest for the
424 | # common VersioneerConfig users at the moment. We verify against
425 | # `None` values elsewhere where it matters
426 |
427 | cfg = VersioneerConfig()
428 | cfg.VCS = section["VCS"]
429 | cfg.style = section.get("style", "")
430 | cfg.versionfile_source = cast(str, section.get("versionfile_source"))
431 | cfg.versionfile_build = section.get("versionfile_build")
432 | cfg.tag_prefix = cast(str, section.get("tag_prefix"))
433 | if cfg.tag_prefix in ("''", '""', None):
434 | cfg.tag_prefix = ""
435 | cfg.parentdir_prefix = section.get("parentdir_prefix")
436 | if isinstance(section, configparser.SectionProxy):
437 | # Make sure configparser translates to bool
438 | cfg.verbose = section.getboolean("verbose")
439 | else:
440 | cfg.verbose = section.get("verbose")
441 |
442 | return cfg
443 |
444 |
445 | class NotThisMethod(Exception):
446 | """Exception raised if a method is not valid for the current scenario."""
447 |
448 |
449 | # these dictionaries contain VCS-specific tools
450 | LONG_VERSION_PY: Dict[str, str] = {}
451 | HANDLERS: Dict[str, Dict[str, Callable]] = {}
452 |
453 |
454 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
455 | """Create decorator to mark a method as the handler of a VCS."""
456 |
457 | def decorate(f: Callable) -> Callable:
458 | """Store f in HANDLERS[vcs][method]."""
459 | HANDLERS.setdefault(vcs, {})[method] = f
460 | return f
461 |
462 | return decorate
463 |
464 |
465 | def run_command(
466 | commands: List[str],
467 | args: List[str],
468 | cwd: Optional[str] = None,
469 | verbose: bool = False,
470 | hide_stderr: bool = False,
471 | env: Optional[Dict[str, str]] = None,
472 | ) -> Tuple[Optional[str], Optional[int]]:
473 | """Call the given command(s)."""
474 | assert isinstance(commands, list)
475 | process = None
476 |
477 | popen_kwargs: Dict[str, Any] = {}
478 | if sys.platform == "win32":
479 | # This hides the console window if pythonw.exe is used
480 | startupinfo = subprocess.STARTUPINFO()
481 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
482 | popen_kwargs["startupinfo"] = startupinfo
483 |
484 | for command in commands:
485 | try:
486 | dispcmd = str([command] + args)
487 | # remember shell=False, so use git.cmd on windows, not just git
488 | process = subprocess.Popen(
489 | [command] + args,
490 | cwd=cwd,
491 | env=env,
492 | stdout=subprocess.PIPE,
493 | stderr=(subprocess.PIPE if hide_stderr else None),
494 | **popen_kwargs,
495 | )
496 | break
497 | except OSError as e:
498 | if e.errno == errno.ENOENT:
499 | continue
500 | if verbose:
501 | print("unable to run %s" % dispcmd)
502 | print(e)
503 | return None, None
504 | else:
505 | if verbose:
506 | print("unable to find command, tried %s" % (commands,))
507 | return None, None
508 | stdout = process.communicate()[0].strip().decode()
509 | if process.returncode != 0:
510 | if verbose:
511 | print("unable to run %s (error)" % dispcmd)
512 | print("stdout was %s" % stdout)
513 | return None, process.returncode
514 | return stdout, process.returncode
515 |
516 |
517 | LONG_VERSION_PY[
518 | "git"
519 | ] = r'''
520 | # This file helps to compute a version number in source trees obtained from
521 | # git-archive tarball (such as those provided by githubs download-from-tag
522 | # feature). Distribution tarballs (built by setup.py sdist) and build
523 | # directories (produced by setup.py build) will contain a much shorter file
524 | # that just contains the computed version number.
525 |
526 | # This file is released into the public domain.
527 | # Generated by versioneer-0.29
528 | # https://github.com/python-versioneer/python-versioneer
529 |
530 | """Git implementation of _version.py."""
531 |
532 | import errno
533 | import os
534 | import re
535 | import subprocess
536 | import sys
537 | from typing import Any, Callable, Dict, List, Optional, Tuple
538 | import functools
539 |
540 |
541 | def get_keywords() -> Dict[str, str]:
542 | """Get the keywords needed to look up the version information."""
543 | # these strings will be replaced by git during git-archive.
544 | # setup.py/versioneer.py will grep for the variable names, so they must
545 | # each be defined on a line of their own. _version.py will just call
546 | # get_keywords().
547 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
548 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
549 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
550 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
551 | return keywords
552 |
553 |
554 | class VersioneerConfig:
555 | """Container for Versioneer configuration parameters."""
556 |
557 | VCS: str
558 | style: str
559 | tag_prefix: str
560 | parentdir_prefix: str
561 | versionfile_source: str
562 | verbose: bool
563 |
564 |
565 | def get_config() -> VersioneerConfig:
566 | """Create, populate and return the VersioneerConfig() object."""
567 | # these strings are filled in when 'setup.py versioneer' creates
568 | # _version.py
569 | cfg = VersioneerConfig()
570 | cfg.VCS = "git"
571 | cfg.style = "%(STYLE)s"
572 | cfg.tag_prefix = "%(TAG_PREFIX)s"
573 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
574 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
575 | cfg.verbose = False
576 | return cfg
577 |
578 |
579 | class NotThisMethod(Exception):
580 | """Exception raised if a method is not valid for the current scenario."""
581 |
582 |
583 | LONG_VERSION_PY: Dict[str, str] = {}
584 | HANDLERS: Dict[str, Dict[str, Callable]] = {}
585 |
586 |
587 | def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator
588 | """Create decorator to mark a method as the handler of a VCS."""
589 | def decorate(f: Callable) -> Callable:
590 | """Store f in HANDLERS[vcs][method]."""
591 | if vcs not in HANDLERS:
592 | HANDLERS[vcs] = {}
593 | HANDLERS[vcs][method] = f
594 | return f
595 | return decorate
596 |
597 |
598 | def run_command(
599 | commands: List[str],
600 | args: List[str],
601 | cwd: Optional[str] = None,
602 | verbose: bool = False,
603 | hide_stderr: bool = False,
604 | env: Optional[Dict[str, str]] = None,
605 | ) -> Tuple[Optional[str], Optional[int]]:
606 | """Call the given command(s)."""
607 | assert isinstance(commands, list)
608 | process = None
609 |
610 | popen_kwargs: Dict[str, Any] = {}
611 | if sys.platform == "win32":
612 | # This hides the console window if pythonw.exe is used
613 | startupinfo = subprocess.STARTUPINFO()
614 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
615 | popen_kwargs["startupinfo"] = startupinfo
616 |
617 | for command in commands:
618 | try:
619 | dispcmd = str([command] + args)
620 | # remember shell=False, so use git.cmd on windows, not just git
621 | process = subprocess.Popen([command] + args, cwd=cwd, env=env,
622 | stdout=subprocess.PIPE,
623 | stderr=(subprocess.PIPE if hide_stderr
624 | else None), **popen_kwargs)
625 | break
626 | except OSError as e:
627 | if e.errno == errno.ENOENT:
628 | continue
629 | if verbose:
630 | print("unable to run %%s" %% dispcmd)
631 | print(e)
632 | return None, None
633 | else:
634 | if verbose:
635 | print("unable to find command, tried %%s" %% (commands,))
636 | return None, None
637 | stdout = process.communicate()[0].strip().decode()
638 | if process.returncode != 0:
639 | if verbose:
640 | print("unable to run %%s (error)" %% dispcmd)
641 | print("stdout was %%s" %% stdout)
642 | return None, process.returncode
643 | return stdout, process.returncode
644 |
645 |
646 | def versions_from_parentdir(
647 | parentdir_prefix: str,
648 | root: str,
649 | verbose: bool,
650 | ) -> Dict[str, Any]:
651 | """Try to determine the version from the parent directory name.
652 |
653 | Source tarballs conventionally unpack into a directory that includes both
654 | the project name and a version string. We will also support searching up
655 | two directory levels for an appropriately named parent directory
656 | """
657 | rootdirs = []
658 |
659 | for _ in range(3):
660 | dirname = os.path.basename(root)
661 | if dirname.startswith(parentdir_prefix):
662 | return {"version": dirname[len(parentdir_prefix):],
663 | "full-revisionid": None,
664 | "dirty": False, "error": None, "date": None}
665 | rootdirs.append(root)
666 | root = os.path.dirname(root) # up a level
667 |
668 | if verbose:
669 | print("Tried directories %%s but none started with prefix %%s" %%
670 | (str(rootdirs), parentdir_prefix))
671 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
672 |
673 |
674 | @register_vcs_handler("git", "get_keywords")
675 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
676 | """Extract version information from the given file."""
677 | # the code embedded in _version.py can just fetch the value of these
678 | # keywords. When used from setup.py, we don't want to import _version.py,
679 | # so we do it with a regexp instead. This function is not used from
680 | # _version.py.
681 | keywords: Dict[str, str] = {}
682 | try:
683 | with open(versionfile_abs, "r") as fobj:
684 | for line in fobj:
685 | if line.strip().startswith("git_refnames ="):
686 | mo = re.search(r'=\s*"(.*)"', line)
687 | if mo:
688 | keywords["refnames"] = mo.group(1)
689 | if line.strip().startswith("git_full ="):
690 | mo = re.search(r'=\s*"(.*)"', line)
691 | if mo:
692 | keywords["full"] = mo.group(1)
693 | if line.strip().startswith("git_date ="):
694 | mo = re.search(r'=\s*"(.*)"', line)
695 | if mo:
696 | keywords["date"] = mo.group(1)
697 | except OSError:
698 | pass
699 | return keywords
700 |
701 |
702 | @register_vcs_handler("git", "keywords")
703 | def git_versions_from_keywords(
704 | keywords: Dict[str, str],
705 | tag_prefix: str,
706 | verbose: bool,
707 | ) -> Dict[str, Any]:
708 | """Get version information from git keywords."""
709 | if "refnames" not in keywords:
710 | raise NotThisMethod("Short version file found")
711 | date = keywords.get("date")
712 | if date is not None:
713 | # Use only the last line. Previous lines may contain GPG signature
714 | # information.
715 | date = date.splitlines()[-1]
716 |
717 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
718 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
719 | # -like" string, which we must then edit to make compliant), because
720 | # it's been around since git-1.5.3, and it's too difficult to
721 | # discover which version we're using, or to work around using an
722 | # older one.
723 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
724 | refnames = keywords["refnames"].strip()
725 | if refnames.startswith("$Format"):
726 | if verbose:
727 | print("keywords are unexpanded, not using")
728 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
729 | refs = {r.strip() for r in refnames.strip("()").split(",")}
730 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
731 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
732 | TAG = "tag: "
733 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
734 | if not tags:
735 | # Either we're using git < 1.8.3, or there really are no tags. We use
736 | # a heuristic: assume all version tags have a digit. The old git %%d
737 | # expansion behaves like git log --decorate=short and strips out the
738 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
739 | # between branches and tags. By ignoring refnames without digits, we
740 | # filter out many common branch names like "release" and
741 | # "stabilization", as well as "HEAD" and "master".
742 | tags = {r for r in refs if re.search(r'\d', r)}
743 | if verbose:
744 | print("discarding '%%s', no digits" %% ",".join(refs - tags))
745 | if verbose:
746 | print("likely tags: %%s" %% ",".join(sorted(tags)))
747 | for ref in sorted(tags):
748 | # sorting will prefer e.g. "2.0" over "2.0rc1"
749 | if ref.startswith(tag_prefix):
750 | r = ref[len(tag_prefix):]
751 | # Filter out refs that exactly match prefix or that don't start
752 | # with a number once the prefix is stripped (mostly a concern
753 | # when prefix is '')
754 | if not re.match(r'\d', r):
755 | continue
756 | if verbose:
757 | print("picking %%s" %% r)
758 | return {"version": r,
759 | "full-revisionid": keywords["full"].strip(),
760 | "dirty": False, "error": None,
761 | "date": date}
762 | # no suitable tags, so version is "0+unknown", but full hex is still there
763 | if verbose:
764 | print("no suitable tags, using unknown + full revision id")
765 | return {"version": "0+unknown",
766 | "full-revisionid": keywords["full"].strip(),
767 | "dirty": False, "error": "no suitable tags", "date": None}
768 |
769 |
770 | @register_vcs_handler("git", "pieces_from_vcs")
771 | def git_pieces_from_vcs(
772 | tag_prefix: str,
773 | root: str,
774 | verbose: bool,
775 | runner: Callable = run_command
776 | ) -> Dict[str, Any]:
777 | """Get version from 'git describe' in the root of the source tree.
778 |
779 | This only gets called if the git-archive 'subst' keywords were *not*
780 | expanded, and _version.py hasn't already been rewritten with a short
781 | version string, meaning we're inside a checked out source tree.
782 | """
783 | GITS = ["git"]
784 | if sys.platform == "win32":
785 | GITS = ["git.cmd", "git.exe"]
786 |
787 | # GIT_DIR can interfere with correct operation of Versioneer.
788 | # It may be intended to be passed to the Versioneer-versioned project,
789 | # but that should not change where we get our version from.
790 | env = os.environ.copy()
791 | env.pop("GIT_DIR", None)
792 | runner = functools.partial(runner, env=env)
793 |
794 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
795 | hide_stderr=not verbose)
796 | if rc != 0:
797 | if verbose:
798 | print("Directory %%s not under git control" %% root)
799 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
800 |
801 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
802 | # if there isn't one, this yields HEX[-dirty] (no NUM)
803 | describe_out, rc = runner(GITS, [
804 | "describe", "--tags", "--dirty", "--always", "--long",
805 | "--match", f"{tag_prefix}[[:digit:]]*"
806 | ], cwd=root)
807 | # --long was added in git-1.5.5
808 | if describe_out is None:
809 | raise NotThisMethod("'git describe' failed")
810 | describe_out = describe_out.strip()
811 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
812 | if full_out is None:
813 | raise NotThisMethod("'git rev-parse' failed")
814 | full_out = full_out.strip()
815 |
816 | pieces: Dict[str, Any] = {}
817 | pieces["long"] = full_out
818 | pieces["short"] = full_out[:7] # maybe improved later
819 | pieces["error"] = None
820 |
821 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
822 | cwd=root)
823 | # --abbrev-ref was added in git-1.6.3
824 | if rc != 0 or branch_name is None:
825 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
826 | branch_name = branch_name.strip()
827 |
828 | if branch_name == "HEAD":
829 | # If we aren't exactly on a branch, pick a branch which represents
830 | # the current commit. If all else fails, we are on a branchless
831 | # commit.
832 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
833 | # --contains was added in git-1.5.4
834 | if rc != 0 or branches is None:
835 | raise NotThisMethod("'git branch --contains' returned error")
836 | branches = branches.split("\n")
837 |
838 | # Remove the first line if we're running detached
839 | if "(" in branches[0]:
840 | branches.pop(0)
841 |
842 | # Strip off the leading "* " from the list of branches.
843 | branches = [branch[2:] for branch in branches]
844 | if "master" in branches:
845 | branch_name = "master"
846 | elif not branches:
847 | branch_name = None
848 | else:
849 | # Pick the first branch that is returned. Good or bad.
850 | branch_name = branches[0]
851 |
852 | pieces["branch"] = branch_name
853 |
854 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
855 | # TAG might have hyphens.
856 | git_describe = describe_out
857 |
858 | # look for -dirty suffix
859 | dirty = git_describe.endswith("-dirty")
860 | pieces["dirty"] = dirty
861 | if dirty:
862 | git_describe = git_describe[:git_describe.rindex("-dirty")]
863 |
864 | # now we have TAG-NUM-gHEX or HEX
865 |
866 | if "-" in git_describe:
867 | # TAG-NUM-gHEX
868 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
869 | if not mo:
870 | # unparsable. Maybe git-describe is misbehaving?
871 | pieces["error"] = ("unable to parse git-describe output: '%%s'"
872 | %% describe_out)
873 | return pieces
874 |
875 | # tag
876 | full_tag = mo.group(1)
877 | if not full_tag.startswith(tag_prefix):
878 | if verbose:
879 | fmt = "tag '%%s' doesn't start with prefix '%%s'"
880 | print(fmt %% (full_tag, tag_prefix))
881 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
882 | %% (full_tag, tag_prefix))
883 | return pieces
884 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
885 |
886 | # distance: number of commits since tag
887 | pieces["distance"] = int(mo.group(2))
888 |
889 | # commit: short hex revision ID
890 | pieces["short"] = mo.group(3)
891 |
892 | else:
893 | # HEX: no tags
894 | pieces["closest-tag"] = None
895 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
896 | pieces["distance"] = len(out.split()) # total number of commits
897 |
898 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
899 | date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip()
900 | # Use only the last line. Previous lines may contain GPG signature
901 | # information.
902 | date = date.splitlines()[-1]
903 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
904 |
905 | return pieces
906 |
907 |
908 | def plus_or_dot(pieces: Dict[str, Any]) -> str:
909 | """Return a + if we don't already have one, else return a ."""
910 | if "+" in pieces.get("closest-tag", ""):
911 | return "."
912 | return "+"
913 |
914 |
915 | def render_pep440(pieces: Dict[str, Any]) -> str:
916 | """Build up version string, with post-release "local version identifier".
917 |
918 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
919 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
920 |
921 | Exceptions:
922 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
923 | """
924 | if pieces["closest-tag"]:
925 | rendered = pieces["closest-tag"]
926 | if pieces["distance"] or pieces["dirty"]:
927 | rendered += plus_or_dot(pieces)
928 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
929 | if pieces["dirty"]:
930 | rendered += ".dirty"
931 | else:
932 | # exception #1
933 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
934 | pieces["short"])
935 | if pieces["dirty"]:
936 | rendered += ".dirty"
937 | return rendered
938 |
939 |
940 | def render_pep440_branch(pieces: Dict[str, Any]) -> str:
941 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
942 |
943 | The ".dev0" means not master branch. Note that .dev0 sorts backwards
944 | (a feature branch will appear "older" than the master branch).
945 |
946 | Exceptions:
947 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
948 | """
949 | if pieces["closest-tag"]:
950 | rendered = pieces["closest-tag"]
951 | if pieces["distance"] or pieces["dirty"]:
952 | if pieces["branch"] != "master":
953 | rendered += ".dev0"
954 | rendered += plus_or_dot(pieces)
955 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
956 | if pieces["dirty"]:
957 | rendered += ".dirty"
958 | else:
959 | # exception #1
960 | rendered = "0"
961 | if pieces["branch"] != "master":
962 | rendered += ".dev0"
963 | rendered += "+untagged.%%d.g%%s" %% (pieces["distance"],
964 | pieces["short"])
965 | if pieces["dirty"]:
966 | rendered += ".dirty"
967 | return rendered
968 |
969 |
970 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
971 | """Split pep440 version string at the post-release segment.
972 |
973 | Returns the release segments before the post-release and the
974 | post-release version number (or -1 if no post-release segment is present).
975 | """
976 | vc = str.split(ver, ".post")
977 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
978 |
979 |
980 | def render_pep440_pre(pieces: Dict[str, Any]) -> str:
981 | """TAG[.postN.devDISTANCE] -- No -dirty.
982 |
983 | Exceptions:
984 | 1: no tags. 0.post0.devDISTANCE
985 | """
986 | if pieces["closest-tag"]:
987 | if pieces["distance"]:
988 | # update the post release segment
989 | tag_version, post_version = pep440_split_post(pieces["closest-tag"])
990 | rendered = tag_version
991 | if post_version is not None:
992 | rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"])
993 | else:
994 | rendered += ".post0.dev%%d" %% (pieces["distance"])
995 | else:
996 | # no commits, use the tag as the version
997 | rendered = pieces["closest-tag"]
998 | else:
999 | # exception #1
1000 | rendered = "0.post0.dev%%d" %% pieces["distance"]
1001 | return rendered
1002 |
1003 |
1004 | def render_pep440_post(pieces: Dict[str, Any]) -> str:
1005 | """TAG[.postDISTANCE[.dev0]+gHEX] .
1006 |
1007 | The ".dev0" means dirty. Note that .dev0 sorts backwards
1008 | (a dirty tree will appear "older" than the corresponding clean one),
1009 | but you shouldn't be releasing software with -dirty anyways.
1010 |
1011 | Exceptions:
1012 | 1: no tags. 0.postDISTANCE[.dev0]
1013 | """
1014 | if pieces["closest-tag"]:
1015 | rendered = pieces["closest-tag"]
1016 | if pieces["distance"] or pieces["dirty"]:
1017 | rendered += ".post%%d" %% pieces["distance"]
1018 | if pieces["dirty"]:
1019 | rendered += ".dev0"
1020 | rendered += plus_or_dot(pieces)
1021 | rendered += "g%%s" %% pieces["short"]
1022 | else:
1023 | # exception #1
1024 | rendered = "0.post%%d" %% pieces["distance"]
1025 | if pieces["dirty"]:
1026 | rendered += ".dev0"
1027 | rendered += "+g%%s" %% pieces["short"]
1028 | return rendered
1029 |
1030 |
1031 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
1032 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
1033 |
1034 | The ".dev0" means not master branch.
1035 |
1036 | Exceptions:
1037 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
1038 | """
1039 | if pieces["closest-tag"]:
1040 | rendered = pieces["closest-tag"]
1041 | if pieces["distance"] or pieces["dirty"]:
1042 | rendered += ".post%%d" %% pieces["distance"]
1043 | if pieces["branch"] != "master":
1044 | rendered += ".dev0"
1045 | rendered += plus_or_dot(pieces)
1046 | rendered += "g%%s" %% pieces["short"]
1047 | if pieces["dirty"]:
1048 | rendered += ".dirty"
1049 | else:
1050 | # exception #1
1051 | rendered = "0.post%%d" %% pieces["distance"]
1052 | if pieces["branch"] != "master":
1053 | rendered += ".dev0"
1054 | rendered += "+g%%s" %% pieces["short"]
1055 | if pieces["dirty"]:
1056 | rendered += ".dirty"
1057 | return rendered
1058 |
1059 |
1060 | def render_pep440_old(pieces: Dict[str, Any]) -> str:
1061 | """TAG[.postDISTANCE[.dev0]] .
1062 |
1063 | The ".dev0" means dirty.
1064 |
1065 | Exceptions:
1066 | 1: no tags. 0.postDISTANCE[.dev0]
1067 | """
1068 | if pieces["closest-tag"]:
1069 | rendered = pieces["closest-tag"]
1070 | if pieces["distance"] or pieces["dirty"]:
1071 | rendered += ".post%%d" %% pieces["distance"]
1072 | if pieces["dirty"]:
1073 | rendered += ".dev0"
1074 | else:
1075 | # exception #1
1076 | rendered = "0.post%%d" %% pieces["distance"]
1077 | if pieces["dirty"]:
1078 | rendered += ".dev0"
1079 | return rendered
1080 |
1081 |
1082 | def render_git_describe(pieces: Dict[str, Any]) -> str:
1083 | """TAG[-DISTANCE-gHEX][-dirty].
1084 |
1085 | Like 'git describe --tags --dirty --always'.
1086 |
1087 | Exceptions:
1088 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1089 | """
1090 | if pieces["closest-tag"]:
1091 | rendered = pieces["closest-tag"]
1092 | if pieces["distance"]:
1093 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
1094 | else:
1095 | # exception #1
1096 | rendered = pieces["short"]
1097 | if pieces["dirty"]:
1098 | rendered += "-dirty"
1099 | return rendered
1100 |
1101 |
1102 | def render_git_describe_long(pieces: Dict[str, Any]) -> str:
1103 | """TAG-DISTANCE-gHEX[-dirty].
1104 |
1105 | Like 'git describe --tags --dirty --always -long'.
1106 | The distance/hash is unconditional.
1107 |
1108 | Exceptions:
1109 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1110 | """
1111 | if pieces["closest-tag"]:
1112 | rendered = pieces["closest-tag"]
1113 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
1114 | else:
1115 | # exception #1
1116 | rendered = pieces["short"]
1117 | if pieces["dirty"]:
1118 | rendered += "-dirty"
1119 | return rendered
1120 |
1121 |
1122 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
1123 | """Render the given version pieces into the requested style."""
1124 | if pieces["error"]:
1125 | return {"version": "unknown",
1126 | "full-revisionid": pieces.get("long"),
1127 | "dirty": None,
1128 | "error": pieces["error"],
1129 | "date": None}
1130 |
1131 | if not style or style == "default":
1132 | style = "pep440" # the default
1133 |
1134 | if style == "pep440":
1135 | rendered = render_pep440(pieces)
1136 | elif style == "pep440-branch":
1137 | rendered = render_pep440_branch(pieces)
1138 | elif style == "pep440-pre":
1139 | rendered = render_pep440_pre(pieces)
1140 | elif style == "pep440-post":
1141 | rendered = render_pep440_post(pieces)
1142 | elif style == "pep440-post-branch":
1143 | rendered = render_pep440_post_branch(pieces)
1144 | elif style == "pep440-old":
1145 | rendered = render_pep440_old(pieces)
1146 | elif style == "git-describe":
1147 | rendered = render_git_describe(pieces)
1148 | elif style == "git-describe-long":
1149 | rendered = render_git_describe_long(pieces)
1150 | else:
1151 | raise ValueError("unknown style '%%s'" %% style)
1152 |
1153 | return {"version": rendered, "full-revisionid": pieces["long"],
1154 | "dirty": pieces["dirty"], "error": None,
1155 | "date": pieces.get("date")}
1156 |
1157 |
1158 | def get_versions() -> Dict[str, Any]:
1159 | """Get version information or return default if unable to do so."""
1160 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
1161 | # __file__, we can work backwards from there to the root. Some
1162 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
1163 | # case we can only use expanded keywords.
1164 |
1165 | cfg = get_config()
1166 | verbose = cfg.verbose
1167 |
1168 | try:
1169 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
1170 | verbose)
1171 | except NotThisMethod:
1172 | pass
1173 |
1174 | try:
1175 | root = os.path.realpath(__file__)
1176 | # versionfile_source is the relative path from the top of the source
1177 | # tree (where the .git directory might live) to this file. Invert
1178 | # this to find the root from __file__.
1179 | for _ in cfg.versionfile_source.split('/'):
1180 | root = os.path.dirname(root)
1181 | except NameError:
1182 | return {"version": "0+unknown", "full-revisionid": None,
1183 | "dirty": None,
1184 | "error": "unable to find root of source tree",
1185 | "date": None}
1186 |
1187 | try:
1188 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
1189 | return render(pieces, cfg.style)
1190 | except NotThisMethod:
1191 | pass
1192 |
1193 | try:
1194 | if cfg.parentdir_prefix:
1195 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
1196 | except NotThisMethod:
1197 | pass
1198 |
1199 | return {"version": "0+unknown", "full-revisionid": None,
1200 | "dirty": None,
1201 | "error": "unable to compute version", "date": None}
1202 | '''
1203 |
1204 |
1205 | @register_vcs_handler("git", "get_keywords")
1206 | def git_get_keywords(versionfile_abs: str) -> Dict[str, str]:
1207 | """Extract version information from the given file."""
1208 | # the code embedded in _version.py can just fetch the value of these
1209 | # keywords. When used from setup.py, we don't want to import _version.py,
1210 | # so we do it with a regexp instead. This function is not used from
1211 | # _version.py.
1212 | keywords: Dict[str, str] = {}
1213 | try:
1214 | with open(versionfile_abs, "r") as fobj:
1215 | for line in fobj:
1216 | if line.strip().startswith("git_refnames ="):
1217 | mo = re.search(r'=\s*"(.*)"', line)
1218 | if mo:
1219 | keywords["refnames"] = mo.group(1)
1220 | if line.strip().startswith("git_full ="):
1221 | mo = re.search(r'=\s*"(.*)"', line)
1222 | if mo:
1223 | keywords["full"] = mo.group(1)
1224 | if line.strip().startswith("git_date ="):
1225 | mo = re.search(r'=\s*"(.*)"', line)
1226 | if mo:
1227 | keywords["date"] = mo.group(1)
1228 | except OSError:
1229 | pass
1230 | return keywords
1231 |
1232 |
1233 | @register_vcs_handler("git", "keywords")
1234 | def git_versions_from_keywords(
1235 | keywords: Dict[str, str],
1236 | tag_prefix: str,
1237 | verbose: bool,
1238 | ) -> Dict[str, Any]:
1239 | """Get version information from git keywords."""
1240 | if "refnames" not in keywords:
1241 | raise NotThisMethod("Short version file found")
1242 | date = keywords.get("date")
1243 | if date is not None:
1244 | # Use only the last line. Previous lines may contain GPG signature
1245 | # information.
1246 | date = date.splitlines()[-1]
1247 |
1248 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
1249 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
1250 | # -like" string, which we must then edit to make compliant), because
1251 | # it's been around since git-1.5.3, and it's too difficult to
1252 | # discover which version we're using, or to work around using an
1253 | # older one.
1254 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
1255 | refnames = keywords["refnames"].strip()
1256 | if refnames.startswith("$Format"):
1257 | if verbose:
1258 | print("keywords are unexpanded, not using")
1259 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
1260 | refs = {r.strip() for r in refnames.strip("()").split(",")}
1261 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
1262 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
1263 | TAG = "tag: "
1264 | tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)}
1265 | if not tags:
1266 | # Either we're using git < 1.8.3, or there really are no tags. We use
1267 | # a heuristic: assume all version tags have a digit. The old git %d
1268 | # expansion behaves like git log --decorate=short and strips out the
1269 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
1270 | # between branches and tags. By ignoring refnames without digits, we
1271 | # filter out many common branch names like "release" and
1272 | # "stabilization", as well as "HEAD" and "master".
1273 | tags = {r for r in refs if re.search(r"\d", r)}
1274 | if verbose:
1275 | print("discarding '%s', no digits" % ",".join(refs - tags))
1276 | if verbose:
1277 | print("likely tags: %s" % ",".join(sorted(tags)))
1278 | for ref in sorted(tags):
1279 | # sorting will prefer e.g. "2.0" over "2.0rc1"
1280 | if ref.startswith(tag_prefix):
1281 | r = ref[len(tag_prefix) :]
1282 | # Filter out refs that exactly match prefix or that don't start
1283 | # with a number once the prefix is stripped (mostly a concern
1284 | # when prefix is '')
1285 | if not re.match(r"\d", r):
1286 | continue
1287 | if verbose:
1288 | print("picking %s" % r)
1289 | return {
1290 | "version": r,
1291 | "full-revisionid": keywords["full"].strip(),
1292 | "dirty": False,
1293 | "error": None,
1294 | "date": date,
1295 | }
1296 | # no suitable tags, so version is "0+unknown", but full hex is still there
1297 | if verbose:
1298 | print("no suitable tags, using unknown + full revision id")
1299 | return {
1300 | "version": "0+unknown",
1301 | "full-revisionid": keywords["full"].strip(),
1302 | "dirty": False,
1303 | "error": "no suitable tags",
1304 | "date": None,
1305 | }
1306 |
1307 |
1308 | @register_vcs_handler("git", "pieces_from_vcs")
1309 | def git_pieces_from_vcs(
1310 | tag_prefix: str, root: str, verbose: bool, runner: Callable = run_command
1311 | ) -> Dict[str, Any]:
1312 | """Get version from 'git describe' in the root of the source tree.
1313 |
1314 | This only gets called if the git-archive 'subst' keywords were *not*
1315 | expanded, and _version.py hasn't already been rewritten with a short
1316 | version string, meaning we're inside a checked out source tree.
1317 | """
1318 | GITS = ["git"]
1319 | if sys.platform == "win32":
1320 | GITS = ["git.cmd", "git.exe"]
1321 |
1322 | # GIT_DIR can interfere with correct operation of Versioneer.
1323 | # It may be intended to be passed to the Versioneer-versioned project,
1324 | # but that should not change where we get our version from.
1325 | env = os.environ.copy()
1326 | env.pop("GIT_DIR", None)
1327 | runner = functools.partial(runner, env=env)
1328 |
1329 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose)
1330 | if rc != 0:
1331 | if verbose:
1332 | print("Directory %s not under git control" % root)
1333 | raise NotThisMethod("'git rev-parse --git-dir' returned error")
1334 |
1335 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
1336 | # if there isn't one, this yields HEX[-dirty] (no NUM)
1337 | describe_out, rc = runner(
1338 | GITS,
1339 | [
1340 | "describe",
1341 | "--tags",
1342 | "--dirty",
1343 | "--always",
1344 | "--long",
1345 | "--match",
1346 | f"{tag_prefix}[[:digit:]]*",
1347 | ],
1348 | cwd=root,
1349 | )
1350 | # --long was added in git-1.5.5
1351 | if describe_out is None:
1352 | raise NotThisMethod("'git describe' failed")
1353 | describe_out = describe_out.strip()
1354 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
1355 | if full_out is None:
1356 | raise NotThisMethod("'git rev-parse' failed")
1357 | full_out = full_out.strip()
1358 |
1359 | pieces: Dict[str, Any] = {}
1360 | pieces["long"] = full_out
1361 | pieces["short"] = full_out[:7] # maybe improved later
1362 | pieces["error"] = None
1363 |
1364 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root)
1365 | # --abbrev-ref was added in git-1.6.3
1366 | if rc != 0 or branch_name is None:
1367 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
1368 | branch_name = branch_name.strip()
1369 |
1370 | if branch_name == "HEAD":
1371 | # If we aren't exactly on a branch, pick a branch which represents
1372 | # the current commit. If all else fails, we are on a branchless
1373 | # commit.
1374 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
1375 | # --contains was added in git-1.5.4
1376 | if rc != 0 or branches is None:
1377 | raise NotThisMethod("'git branch --contains' returned error")
1378 | branches = branches.split("\n")
1379 |
1380 | # Remove the first line if we're running detached
1381 | if "(" in branches[0]:
1382 | branches.pop(0)
1383 |
1384 | # Strip off the leading "* " from the list of branches.
1385 | branches = [branch[2:] for branch in branches]
1386 | if "master" in branches:
1387 | branch_name = "master"
1388 | elif not branches:
1389 | branch_name = None
1390 | else:
1391 | # Pick the first branch that is returned. Good or bad.
1392 | branch_name = branches[0]
1393 |
1394 | pieces["branch"] = branch_name
1395 |
1396 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
1397 | # TAG might have hyphens.
1398 | git_describe = describe_out
1399 |
1400 | # look for -dirty suffix
1401 | dirty = git_describe.endswith("-dirty")
1402 | pieces["dirty"] = dirty
1403 | if dirty:
1404 | git_describe = git_describe[: git_describe.rindex("-dirty")]
1405 |
1406 | # now we have TAG-NUM-gHEX or HEX
1407 |
1408 | if "-" in git_describe:
1409 | # TAG-NUM-gHEX
1410 | mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
1411 | if not mo:
1412 | # unparsable. Maybe git-describe is misbehaving?
1413 | pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
1414 | return pieces
1415 |
1416 | # tag
1417 | full_tag = mo.group(1)
1418 | if not full_tag.startswith(tag_prefix):
1419 | if verbose:
1420 | fmt = "tag '%s' doesn't start with prefix '%s'"
1421 | print(fmt % (full_tag, tag_prefix))
1422 | pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
1423 | full_tag,
1424 | tag_prefix,
1425 | )
1426 | return pieces
1427 | pieces["closest-tag"] = full_tag[len(tag_prefix) :]
1428 |
1429 | # distance: number of commits since tag
1430 | pieces["distance"] = int(mo.group(2))
1431 |
1432 | # commit: short hex revision ID
1433 | pieces["short"] = mo.group(3)
1434 |
1435 | else:
1436 | # HEX: no tags
1437 | pieces["closest-tag"] = None
1438 | out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
1439 | pieces["distance"] = len(out.split()) # total number of commits
1440 |
1441 | # commit date: see ISO-8601 comment in git_versions_from_keywords()
1442 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
1443 | # Use only the last line. Previous lines may contain GPG signature
1444 | # information.
1445 | date = date.splitlines()[-1]
1446 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
1447 |
1448 | return pieces
1449 |
1450 |
1451 | def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None:
1452 | """Git-specific installation logic for Versioneer.
1453 |
1454 | For Git, this means creating/changing .gitattributes to mark _version.py
1455 | for export-subst keyword substitution.
1456 | """
1457 | GITS = ["git"]
1458 | if sys.platform == "win32":
1459 | GITS = ["git.cmd", "git.exe"]
1460 | files = [versionfile_source]
1461 | if ipy:
1462 | files.append(ipy)
1463 | if "VERSIONEER_PEP518" not in globals():
1464 | try:
1465 | my_path = __file__
1466 | if my_path.endswith((".pyc", ".pyo")):
1467 | my_path = os.path.splitext(my_path)[0] + ".py"
1468 | versioneer_file = os.path.relpath(my_path)
1469 | except NameError:
1470 | versioneer_file = "versioneer.py"
1471 | files.append(versioneer_file)
1472 | present = False
1473 | try:
1474 | with open(".gitattributes", "r") as fobj:
1475 | for line in fobj:
1476 | if line.strip().startswith(versionfile_source):
1477 | if "export-subst" in line.strip().split()[1:]:
1478 | present = True
1479 | break
1480 | except OSError:
1481 | pass
1482 | if not present:
1483 | with open(".gitattributes", "a+") as fobj:
1484 | fobj.write(f"{versionfile_source} export-subst\n")
1485 | files.append(".gitattributes")
1486 | run_command(GITS, ["add", "--"] + files)
1487 |
1488 |
1489 | def versions_from_parentdir(
1490 | parentdir_prefix: str,
1491 | root: str,
1492 | verbose: bool,
1493 | ) -> Dict[str, Any]:
1494 | """Try to determine the version from the parent directory name.
1495 |
1496 | Source tarballs conventionally unpack into a directory that includes both
1497 | the project name and a version string. We will also support searching up
1498 | two directory levels for an appropriately named parent directory
1499 | """
1500 | rootdirs = []
1501 |
1502 | for _ in range(3):
1503 | dirname = os.path.basename(root)
1504 | if dirname.startswith(parentdir_prefix):
1505 | return {
1506 | "version": dirname[len(parentdir_prefix) :],
1507 | "full-revisionid": None,
1508 | "dirty": False,
1509 | "error": None,
1510 | "date": None,
1511 | }
1512 | rootdirs.append(root)
1513 | root = os.path.dirname(root) # up a level
1514 |
1515 | if verbose:
1516 | print(
1517 | "Tried directories %s but none started with prefix %s"
1518 | % (str(rootdirs), parentdir_prefix)
1519 | )
1520 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
1521 |
1522 |
1523 | SHORT_VERSION_PY = """
1524 | # This file was generated by 'versioneer.py' (0.29) from
1525 | # revision-control system data, or from the parent directory name of an
1526 | # unpacked source archive. Distribution tarballs contain a pre-generated copy
1527 | # of this file.
1528 |
1529 | import json
1530 |
1531 | version_json = '''
1532 | %s
1533 | ''' # END VERSION_JSON
1534 |
1535 |
1536 | def get_versions():
1537 | return json.loads(version_json)
1538 | """
1539 |
1540 |
1541 | def versions_from_file(filename: str) -> Dict[str, Any]:
1542 | """Try to determine the version from _version.py if present."""
1543 | try:
1544 | with open(filename) as f:
1545 | contents = f.read()
1546 | except OSError:
1547 | raise NotThisMethod("unable to read _version.py")
1548 | mo = re.search(
1549 | r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
1550 | )
1551 | if not mo:
1552 | mo = re.search(
1553 | r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
1554 | )
1555 | if not mo:
1556 | raise NotThisMethod("no version_json in _version.py")
1557 | return json.loads(mo.group(1))
1558 |
1559 |
1560 | def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None:
1561 | """Write the given version number to the given _version.py file."""
1562 | contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": "))
1563 | with open(filename, "w") as f:
1564 | f.write(SHORT_VERSION_PY % contents)
1565 |
1566 | print("set %s to '%s'" % (filename, versions["version"]))
1567 |
1568 |
1569 | def plus_or_dot(pieces: Dict[str, Any]) -> str:
1570 | """Return a + if we don't already have one, else return a ."""
1571 | if "+" in pieces.get("closest-tag", ""):
1572 | return "."
1573 | return "+"
1574 |
1575 |
1576 | def render_pep440(pieces: Dict[str, Any]) -> str:
1577 | """Build up version string, with post-release "local version identifier".
1578 |
1579 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
1580 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
1581 |
1582 | Exceptions:
1583 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
1584 | """
1585 | if pieces["closest-tag"]:
1586 | rendered = pieces["closest-tag"]
1587 | if pieces["distance"] or pieces["dirty"]:
1588 | rendered += plus_or_dot(pieces)
1589 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
1590 | if pieces["dirty"]:
1591 | rendered += ".dirty"
1592 | else:
1593 | # exception #1
1594 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
1595 | if pieces["dirty"]:
1596 | rendered += ".dirty"
1597 | return rendered
1598 |
1599 |
1600 | def render_pep440_branch(pieces: Dict[str, Any]) -> str:
1601 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
1602 |
1603 | The ".dev0" means not master branch. Note that .dev0 sorts backwards
1604 | (a feature branch will appear "older" than the master branch).
1605 |
1606 | Exceptions:
1607 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
1608 | """
1609 | if pieces["closest-tag"]:
1610 | rendered = pieces["closest-tag"]
1611 | if pieces["distance"] or pieces["dirty"]:
1612 | if pieces["branch"] != "master":
1613 | rendered += ".dev0"
1614 | rendered += plus_or_dot(pieces)
1615 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
1616 | if pieces["dirty"]:
1617 | rendered += ".dirty"
1618 | else:
1619 | # exception #1
1620 | rendered = "0"
1621 | if pieces["branch"] != "master":
1622 | rendered += ".dev0"
1623 | rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
1624 | if pieces["dirty"]:
1625 | rendered += ".dirty"
1626 | return rendered
1627 |
1628 |
1629 | def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]:
1630 | """Split pep440 version string at the post-release segment.
1631 |
1632 | Returns the release segments before the post-release and the
1633 | post-release version number (or -1 if no post-release segment is present).
1634 | """
1635 | vc = str.split(ver, ".post")
1636 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
1637 |
1638 |
1639 | def render_pep440_pre(pieces: Dict[str, Any]) -> str:
1640 | """TAG[.postN.devDISTANCE] -- No -dirty.
1641 |
1642 | Exceptions:
1643 | 1: no tags. 0.post0.devDISTANCE
1644 | """
1645 | if pieces["closest-tag"]:
1646 | if pieces["distance"]:
1647 | # update the post release segment
1648 | tag_version, post_version = pep440_split_post(pieces["closest-tag"])
1649 | rendered = tag_version
1650 | if post_version is not None:
1651 | rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
1652 | else:
1653 | rendered += ".post0.dev%d" % (pieces["distance"])
1654 | else:
1655 | # no commits, use the tag as the version
1656 | rendered = pieces["closest-tag"]
1657 | else:
1658 | # exception #1
1659 | rendered = "0.post0.dev%d" % pieces["distance"]
1660 | return rendered
1661 |
1662 |
1663 | def render_pep440_post(pieces: Dict[str, Any]) -> str:
1664 | """TAG[.postDISTANCE[.dev0]+gHEX] .
1665 |
1666 | The ".dev0" means dirty. Note that .dev0 sorts backwards
1667 | (a dirty tree will appear "older" than the corresponding clean one),
1668 | but you shouldn't be releasing software with -dirty anyways.
1669 |
1670 | Exceptions:
1671 | 1: no tags. 0.postDISTANCE[.dev0]
1672 | """
1673 | if pieces["closest-tag"]:
1674 | rendered = pieces["closest-tag"]
1675 | if pieces["distance"] or pieces["dirty"]:
1676 | rendered += ".post%d" % pieces["distance"]
1677 | if pieces["dirty"]:
1678 | rendered += ".dev0"
1679 | rendered += plus_or_dot(pieces)
1680 | rendered += "g%s" % pieces["short"]
1681 | else:
1682 | # exception #1
1683 | rendered = "0.post%d" % pieces["distance"]
1684 | if pieces["dirty"]:
1685 | rendered += ".dev0"
1686 | rendered += "+g%s" % pieces["short"]
1687 | return rendered
1688 |
1689 |
1690 | def render_pep440_post_branch(pieces: Dict[str, Any]) -> str:
1691 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
1692 |
1693 | The ".dev0" means not master branch.
1694 |
1695 | Exceptions:
1696 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
1697 | """
1698 | if pieces["closest-tag"]:
1699 | rendered = pieces["closest-tag"]
1700 | if pieces["distance"] or pieces["dirty"]:
1701 | rendered += ".post%d" % pieces["distance"]
1702 | if pieces["branch"] != "master":
1703 | rendered += ".dev0"
1704 | rendered += plus_or_dot(pieces)
1705 | rendered += "g%s" % pieces["short"]
1706 | if pieces["dirty"]:
1707 | rendered += ".dirty"
1708 | else:
1709 | # exception #1
1710 | rendered = "0.post%d" % pieces["distance"]
1711 | if pieces["branch"] != "master":
1712 | rendered += ".dev0"
1713 | rendered += "+g%s" % pieces["short"]
1714 | if pieces["dirty"]:
1715 | rendered += ".dirty"
1716 | return rendered
1717 |
1718 |
1719 | def render_pep440_old(pieces: Dict[str, Any]) -> str:
1720 | """TAG[.postDISTANCE[.dev0]] .
1721 |
1722 | The ".dev0" means dirty.
1723 |
1724 | Exceptions:
1725 | 1: no tags. 0.postDISTANCE[.dev0]
1726 | """
1727 | if pieces["closest-tag"]:
1728 | rendered = pieces["closest-tag"]
1729 | if pieces["distance"] or pieces["dirty"]:
1730 | rendered += ".post%d" % pieces["distance"]
1731 | if pieces["dirty"]:
1732 | rendered += ".dev0"
1733 | else:
1734 | # exception #1
1735 | rendered = "0.post%d" % pieces["distance"]
1736 | if pieces["dirty"]:
1737 | rendered += ".dev0"
1738 | return rendered
1739 |
1740 |
1741 | def render_git_describe(pieces: Dict[str, Any]) -> str:
1742 | """TAG[-DISTANCE-gHEX][-dirty].
1743 |
1744 | Like 'git describe --tags --dirty --always'.
1745 |
1746 | Exceptions:
1747 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1748 | """
1749 | if pieces["closest-tag"]:
1750 | rendered = pieces["closest-tag"]
1751 | if pieces["distance"]:
1752 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
1753 | else:
1754 | # exception #1
1755 | rendered = pieces["short"]
1756 | if pieces["dirty"]:
1757 | rendered += "-dirty"
1758 | return rendered
1759 |
1760 |
1761 | def render_git_describe_long(pieces: Dict[str, Any]) -> str:
1762 | """TAG-DISTANCE-gHEX[-dirty].
1763 |
1764 | Like 'git describe --tags --dirty --always -long'.
1765 | The distance/hash is unconditional.
1766 |
1767 | Exceptions:
1768 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
1769 | """
1770 | if pieces["closest-tag"]:
1771 | rendered = pieces["closest-tag"]
1772 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
1773 | else:
1774 | # exception #1
1775 | rendered = pieces["short"]
1776 | if pieces["dirty"]:
1777 | rendered += "-dirty"
1778 | return rendered
1779 |
1780 |
1781 | def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]:
1782 | """Render the given version pieces into the requested style."""
1783 | if pieces["error"]:
1784 | return {
1785 | "version": "unknown",
1786 | "full-revisionid": pieces.get("long"),
1787 | "dirty": None,
1788 | "error": pieces["error"],
1789 | "date": None,
1790 | }
1791 |
1792 | if not style or style == "default":
1793 | style = "pep440" # the default
1794 |
1795 | if style == "pep440":
1796 | rendered = render_pep440(pieces)
1797 | elif style == "pep440-branch":
1798 | rendered = render_pep440_branch(pieces)
1799 | elif style == "pep440-pre":
1800 | rendered = render_pep440_pre(pieces)
1801 | elif style == "pep440-post":
1802 | rendered = render_pep440_post(pieces)
1803 | elif style == "pep440-post-branch":
1804 | rendered = render_pep440_post_branch(pieces)
1805 | elif style == "pep440-old":
1806 | rendered = render_pep440_old(pieces)
1807 | elif style == "git-describe":
1808 | rendered = render_git_describe(pieces)
1809 | elif style == "git-describe-long":
1810 | rendered = render_git_describe_long(pieces)
1811 | else:
1812 | raise ValueError("unknown style '%s'" % style)
1813 |
1814 | return {
1815 | "version": rendered,
1816 | "full-revisionid": pieces["long"],
1817 | "dirty": pieces["dirty"],
1818 | "error": None,
1819 | "date": pieces.get("date"),
1820 | }
1821 |
1822 |
1823 | class VersioneerBadRootError(Exception):
1824 | """The project root directory is unknown or missing key files."""
1825 |
1826 |
1827 | def get_versions(verbose: bool = False) -> Dict[str, Any]:
1828 | """Get the project version from whatever source is available.
1829 |
1830 | Returns dict with two keys: 'version' and 'full'.
1831 | """
1832 | if "versioneer" in sys.modules:
1833 | # see the discussion in cmdclass.py:get_cmdclass()
1834 | del sys.modules["versioneer"]
1835 |
1836 | root = get_root()
1837 | cfg = get_config_from_root(root)
1838 |
1839 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
1840 | handlers = HANDLERS.get(cfg.VCS)
1841 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS
1842 | verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None`
1843 | assert (
1844 | cfg.versionfile_source is not None
1845 | ), "please set versioneer.versionfile_source"
1846 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
1847 |
1848 | versionfile_abs = os.path.join(root, cfg.versionfile_source)
1849 |
1850 | # extract version from first of: _version.py, VCS command (e.g. 'git
1851 | # describe'), parentdir. This is meant to work for developers using a
1852 | # source checkout, for users of a tarball created by 'setup.py sdist',
1853 | # and for users of a tarball/zipball created by 'git archive' or github's
1854 | # download-from-tag feature or the equivalent in other VCSes.
1855 |
1856 | get_keywords_f = handlers.get("get_keywords")
1857 | from_keywords_f = handlers.get("keywords")
1858 | if get_keywords_f and from_keywords_f:
1859 | try:
1860 | keywords = get_keywords_f(versionfile_abs)
1861 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
1862 | if verbose:
1863 | print("got version from expanded keyword %s" % ver)
1864 | return ver
1865 | except NotThisMethod:
1866 | pass
1867 |
1868 | try:
1869 | ver = versions_from_file(versionfile_abs)
1870 | if verbose:
1871 | print("got version from file %s %s" % (versionfile_abs, ver))
1872 | return ver
1873 | except NotThisMethod:
1874 | pass
1875 |
1876 | from_vcs_f = handlers.get("pieces_from_vcs")
1877 | if from_vcs_f:
1878 | try:
1879 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
1880 | ver = render(pieces, cfg.style)
1881 | if verbose:
1882 | print("got version from VCS %s" % ver)
1883 | return ver
1884 | except NotThisMethod:
1885 | pass
1886 |
1887 | try:
1888 | if cfg.parentdir_prefix:
1889 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
1890 | if verbose:
1891 | print("got version from parentdir %s" % ver)
1892 | return ver
1893 | except NotThisMethod:
1894 | pass
1895 |
1896 | if verbose:
1897 | print("unable to compute version")
1898 |
1899 | return {
1900 | "version": "0+unknown",
1901 | "full-revisionid": None,
1902 | "dirty": None,
1903 | "error": "unable to compute version",
1904 | "date": None,
1905 | }
1906 |
1907 |
1908 | def get_version() -> str:
1909 | """Get the short version string for this project."""
1910 | return get_versions()["version"]
1911 |
1912 |
1913 | def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None):
1914 | """Get the custom setuptools subclasses used by Versioneer.
1915 |
1916 | If the package uses a different cmdclass (e.g. one from numpy), it
1917 | should be provide as an argument.
1918 | """
1919 | if "versioneer" in sys.modules:
1920 | del sys.modules["versioneer"]
1921 | # this fixes the "python setup.py develop" case (also 'install' and
1922 | # 'easy_install .'), in which subdependencies of the main project are
1923 | # built (using setup.py bdist_egg) in the same python process. Assume
1924 | # a main project A and a dependency B, which use different versions
1925 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
1926 | # sys.modules by the time B's setup.py is executed, causing B to run
1927 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
1928 | # sandbox that restores sys.modules to it's pre-build state, so the
1929 | # parent is protected against the child's "import versioneer". By
1930 | # removing ourselves from sys.modules here, before the child build
1931 | # happens, we protect the child from the parent's versioneer too.
1932 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52
1933 |
1934 | cmds = {} if cmdclass is None else cmdclass.copy()
1935 |
1936 | # we add "version" to setuptools
1937 | from setuptools import Command
1938 |
1939 | class cmd_version(Command):
1940 | description = "report generated version string"
1941 | user_options: List[Tuple[str, str, str]] = []
1942 | boolean_options: List[str] = []
1943 |
1944 | def initialize_options(self) -> None:
1945 | pass
1946 |
1947 | def finalize_options(self) -> None:
1948 | pass
1949 |
1950 | def run(self) -> None:
1951 | vers = get_versions(verbose=True)
1952 | print("Version: %s" % vers["version"])
1953 | print(" full-revisionid: %s" % vers.get("full-revisionid"))
1954 | print(" dirty: %s" % vers.get("dirty"))
1955 | print(" date: %s" % vers.get("date"))
1956 | if vers["error"]:
1957 | print(" error: %s" % vers["error"])
1958 |
1959 | cmds["version"] = cmd_version
1960 |
1961 | # we override "build_py" in setuptools
1962 | #
1963 | # most invocation pathways end up running build_py:
1964 | # distutils/build -> build_py
1965 | # distutils/install -> distutils/build ->..
1966 | # setuptools/bdist_wheel -> distutils/install ->..
1967 | # setuptools/bdist_egg -> distutils/install_lib -> build_py
1968 | # setuptools/install -> bdist_egg ->..
1969 | # setuptools/develop -> ?
1970 | # pip install:
1971 | # copies source tree to a tempdir before running egg_info/etc
1972 | # if .git isn't copied too, 'git describe' will fail
1973 | # then does setup.py bdist_wheel, or sometimes setup.py install
1974 | # setup.py egg_info -> ?
1975 |
1976 | # pip install -e . and setuptool/editable_wheel will invoke build_py
1977 | # but the build_py command is not expected to copy any files.
1978 |
1979 | # we override different "build_py" commands for both environments
1980 | if "build_py" in cmds:
1981 | _build_py: Any = cmds["build_py"]
1982 | else:
1983 | from setuptools.command.build_py import build_py as _build_py
1984 |
1985 | class cmd_build_py(_build_py):
1986 | def run(self) -> None:
1987 | root = get_root()
1988 | cfg = get_config_from_root(root)
1989 | versions = get_versions()
1990 | _build_py.run(self)
1991 | if getattr(self, "editable_mode", False):
1992 | # During editable installs `.py` and data files are
1993 | # not copied to build_lib
1994 | return
1995 | # now locate _version.py in the new build/ directory and replace
1996 | # it with an updated value
1997 | if cfg.versionfile_build:
1998 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build)
1999 | print("UPDATING %s" % target_versionfile)
2000 | write_to_version_file(target_versionfile, versions)
2001 |
2002 | cmds["build_py"] = cmd_build_py
2003 |
2004 | if "build_ext" in cmds:
2005 | _build_ext: Any = cmds["build_ext"]
2006 | else:
2007 | from setuptools.command.build_ext import build_ext as _build_ext
2008 |
2009 | class cmd_build_ext(_build_ext):
2010 | def run(self) -> None:
2011 | root = get_root()
2012 | cfg = get_config_from_root(root)
2013 | versions = get_versions()
2014 | _build_ext.run(self)
2015 | if self.inplace:
2016 | # build_ext --inplace will only build extensions in
2017 | # build/lib<..> dir with no _version.py to write to.
2018 | # As in place builds will already have a _version.py
2019 | # in the module dir, we do not need to write one.
2020 | return
2021 | # now locate _version.py in the new build/ directory and replace
2022 | # it with an updated value
2023 | if not cfg.versionfile_build:
2024 | return
2025 | target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build)
2026 | if not os.path.exists(target_versionfile):
2027 | print(
2028 | f"Warning: {target_versionfile} does not exist, skipping "
2029 | "version update. This can happen if you are running build_ext "
2030 | "without first running build_py."
2031 | )
2032 | return
2033 | print("UPDATING %s" % target_versionfile)
2034 | write_to_version_file(target_versionfile, versions)
2035 |
2036 | cmds["build_ext"] = cmd_build_ext
2037 |
2038 | if "cx_Freeze" in sys.modules: # cx_freeze enabled?
2039 | from cx_Freeze.dist import build_exe as _build_exe # type: ignore
2040 |
2041 | # nczeczulin reports that py2exe won't like the pep440-style string
2042 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
2043 | # setup(console=[{
2044 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
2045 | # "product_version": versioneer.get_version(),
2046 | # ...
2047 |
2048 | class cmd_build_exe(_build_exe):
2049 | def run(self) -> None:
2050 | root = get_root()
2051 | cfg = get_config_from_root(root)
2052 | versions = get_versions()
2053 | target_versionfile = cfg.versionfile_source
2054 | print("UPDATING %s" % target_versionfile)
2055 | write_to_version_file(target_versionfile, versions)
2056 |
2057 | _build_exe.run(self)
2058 | os.unlink(target_versionfile)
2059 | with open(cfg.versionfile_source, "w") as f:
2060 | LONG = LONG_VERSION_PY[cfg.VCS]
2061 | f.write(
2062 | LONG
2063 | % {
2064 | "DOLLAR": "$",
2065 | "STYLE": cfg.style,
2066 | "TAG_PREFIX": cfg.tag_prefix,
2067 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
2068 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
2069 | }
2070 | )
2071 |
2072 | cmds["build_exe"] = cmd_build_exe
2073 | del cmds["build_py"]
2074 |
2075 | if "py2exe" in sys.modules: # py2exe enabled?
2076 | try:
2077 | from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore
2078 | except ImportError:
2079 | from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore
2080 |
2081 | class cmd_py2exe(_py2exe):
2082 | def run(self) -> None:
2083 | root = get_root()
2084 | cfg = get_config_from_root(root)
2085 | versions = get_versions()
2086 | target_versionfile = cfg.versionfile_source
2087 | print("UPDATING %s" % target_versionfile)
2088 | write_to_version_file(target_versionfile, versions)
2089 |
2090 | _py2exe.run(self)
2091 | os.unlink(target_versionfile)
2092 | with open(cfg.versionfile_source, "w") as f:
2093 | LONG = LONG_VERSION_PY[cfg.VCS]
2094 | f.write(
2095 | LONG
2096 | % {
2097 | "DOLLAR": "$",
2098 | "STYLE": cfg.style,
2099 | "TAG_PREFIX": cfg.tag_prefix,
2100 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
2101 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
2102 | }
2103 | )
2104 |
2105 | cmds["py2exe"] = cmd_py2exe
2106 |
2107 | # sdist farms its file list building out to egg_info
2108 | if "egg_info" in cmds:
2109 | _egg_info: Any = cmds["egg_info"]
2110 | else:
2111 | from setuptools.command.egg_info import egg_info as _egg_info
2112 |
2113 | class cmd_egg_info(_egg_info):
2114 | def find_sources(self) -> None:
2115 | # egg_info.find_sources builds the manifest list and writes it
2116 | # in one shot
2117 | super().find_sources()
2118 |
2119 | # Modify the filelist and normalize it
2120 | root = get_root()
2121 | cfg = get_config_from_root(root)
2122 | self.filelist.append("versioneer.py")
2123 | if cfg.versionfile_source:
2124 | # There are rare cases where versionfile_source might not be
2125 | # included by default, so we must be explicit
2126 | self.filelist.append(cfg.versionfile_source)
2127 | self.filelist.sort()
2128 | self.filelist.remove_duplicates()
2129 |
2130 | # The write method is hidden in the manifest_maker instance that
2131 | # generated the filelist and was thrown away
2132 | # We will instead replicate their final normalization (to unicode,
2133 | # and POSIX-style paths)
2134 | from setuptools import unicode_utils
2135 |
2136 | normalized = [
2137 | unicode_utils.filesys_decode(f).replace(os.sep, "/")
2138 | for f in self.filelist.files
2139 | ]
2140 |
2141 | manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
2142 | with open(manifest_filename, "w") as fobj:
2143 | fobj.write("\n".join(normalized))
2144 |
2145 | cmds["egg_info"] = cmd_egg_info
2146 |
2147 | # we override different "sdist" commands for both environments
2148 | if "sdist" in cmds:
2149 | _sdist: Any = cmds["sdist"]
2150 | else:
2151 | from setuptools.command.sdist import sdist as _sdist
2152 |
2153 | class cmd_sdist(_sdist):
2154 | def run(self) -> None:
2155 | versions = get_versions()
2156 | self._versioneer_generated_versions = versions
2157 | # unless we update this, the command will keep using the old
2158 | # version
2159 | self.distribution.metadata.version = versions["version"]
2160 | return _sdist.run(self)
2161 |
2162 | def make_release_tree(self, base_dir: str, files: List[str]) -> None:
2163 | root = get_root()
2164 | cfg = get_config_from_root(root)
2165 | _sdist.make_release_tree(self, base_dir, files)
2166 | # now locate _version.py in the new base_dir directory
2167 | # (remembering that it may be a hardlink) and replace it with an
2168 | # updated value
2169 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
2170 | print("UPDATING %s" % target_versionfile)
2171 | write_to_version_file(
2172 | target_versionfile, self._versioneer_generated_versions
2173 | )
2174 |
2175 | cmds["sdist"] = cmd_sdist
2176 |
2177 | return cmds
2178 |
2179 |
2180 | CONFIG_ERROR = """
2181 | setup.cfg is missing the necessary Versioneer configuration. You need
2182 | a section like:
2183 |
2184 | [versioneer]
2185 | VCS = git
2186 | style = pep440
2187 | versionfile_source = src/myproject/_version.py
2188 | versionfile_build = myproject/_version.py
2189 | tag_prefix =
2190 | parentdir_prefix = myproject-
2191 |
2192 | You will also need to edit your setup.py to use the results:
2193 |
2194 | import versioneer
2195 | setup(version=versioneer.get_version(),
2196 | cmdclass=versioneer.get_cmdclass(), ...)
2197 |
2198 | Please read the docstring in ./versioneer.py for configuration instructions,
2199 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
2200 | """
2201 |
2202 | SAMPLE_CONFIG = """
2203 | # See the docstring in versioneer.py for instructions. Note that you must
2204 | # re-run 'versioneer.py setup' after changing this section, and commit the
2205 | # resulting files.
2206 |
2207 | [versioneer]
2208 | #VCS = git
2209 | #style = pep440
2210 | #versionfile_source =
2211 | #versionfile_build =
2212 | #tag_prefix =
2213 | #parentdir_prefix =
2214 |
2215 | """
2216 |
2217 | OLD_SNIPPET = """
2218 | from ._version import get_versions
2219 | __version__ = get_versions()['version']
2220 | del get_versions
2221 | """
2222 |
2223 | INIT_PY_SNIPPET = """
2224 | from . import {0}
2225 | __version__ = {0}.get_versions()['version']
2226 | """
2227 |
2228 |
2229 | def do_setup() -> int:
2230 | """Do main VCS-independent setup function for installing Versioneer."""
2231 | root = get_root()
2232 | try:
2233 | cfg = get_config_from_root(root)
2234 | except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e:
2235 | if isinstance(e, (OSError, configparser.NoSectionError)):
2236 | print("Adding sample versioneer config to setup.cfg", file=sys.stderr)
2237 | with open(os.path.join(root, "setup.cfg"), "a") as f:
2238 | f.write(SAMPLE_CONFIG)
2239 | print(CONFIG_ERROR, file=sys.stderr)
2240 | return 1
2241 |
2242 | print(" creating %s" % cfg.versionfile_source)
2243 | with open(cfg.versionfile_source, "w") as f:
2244 | LONG = LONG_VERSION_PY[cfg.VCS]
2245 | f.write(
2246 | LONG
2247 | % {
2248 | "DOLLAR": "$",
2249 | "STYLE": cfg.style,
2250 | "TAG_PREFIX": cfg.tag_prefix,
2251 | "PARENTDIR_PREFIX": cfg.parentdir_prefix,
2252 | "VERSIONFILE_SOURCE": cfg.versionfile_source,
2253 | }
2254 | )
2255 |
2256 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
2257 | maybe_ipy: Optional[str] = ipy
2258 | if os.path.exists(ipy):
2259 | try:
2260 | with open(ipy, "r") as f:
2261 | old = f.read()
2262 | except OSError:
2263 | old = ""
2264 | module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0]
2265 | snippet = INIT_PY_SNIPPET.format(module)
2266 | if OLD_SNIPPET in old:
2267 | print(" replacing boilerplate in %s" % ipy)
2268 | with open(ipy, "w") as f:
2269 | f.write(old.replace(OLD_SNIPPET, snippet))
2270 | elif snippet not in old:
2271 | print(" appending to %s" % ipy)
2272 | with open(ipy, "a") as f:
2273 | f.write(snippet)
2274 | else:
2275 | print(" %s unmodified" % ipy)
2276 | else:
2277 | print(" %s doesn't exist, ok" % ipy)
2278 | maybe_ipy = None
2279 |
2280 | # Make VCS-specific changes. For git, this means creating/changing
2281 | # .gitattributes to mark _version.py for export-subst keyword
2282 | # substitution.
2283 | do_vcs_install(cfg.versionfile_source, maybe_ipy)
2284 | return 0
2285 |
2286 |
2287 | def scan_setup_py() -> int:
2288 | """Validate the contents of setup.py against Versioneer's expectations."""
2289 | found = set()
2290 | setters = False
2291 | errors = 0
2292 | with open("setup.py", "r") as f:
2293 | for line in f.readlines():
2294 | if "import versioneer" in line:
2295 | found.add("import")
2296 | if "versioneer.get_cmdclass()" in line:
2297 | found.add("cmdclass")
2298 | if "versioneer.get_version()" in line:
2299 | found.add("get_version")
2300 | if "versioneer.VCS" in line:
2301 | setters = True
2302 | if "versioneer.versionfile_source" in line:
2303 | setters = True
2304 | if len(found) != 3:
2305 | print("")
2306 | print("Your setup.py appears to be missing some important items")
2307 | print("(but I might be wrong). Please make sure it has something")
2308 | print("roughly like the following:")
2309 | print("")
2310 | print(" import versioneer")
2311 | print(" setup( version=versioneer.get_version(),")
2312 | print(" cmdclass=versioneer.get_cmdclass(), ...)")
2313 | print("")
2314 | errors += 1
2315 | if setters:
2316 | print("You should remove lines like 'versioneer.VCS = ' and")
2317 | print("'versioneer.versionfile_source = ' . This configuration")
2318 | print("now lives in setup.cfg, and should be removed from setup.py")
2319 | print("")
2320 | errors += 1
2321 | return errors
2322 |
2323 |
2324 | def setup_command() -> NoReturn:
2325 | """Set up Versioneer and exit with appropriate error code."""
2326 | errors = do_setup()
2327 | errors += scan_setup_py()
2328 | sys.exit(1 if errors else 0)
2329 |
2330 |
2331 | if __name__ == "__main__":
2332 | cmd = sys.argv[1]
2333 | if cmd == "setup":
2334 | setup_command()
2335 |
--------------------------------------------------------------------------------