├── .clang-format ├── .github └── workflows │ ├── build-publish.yml │ └── python-tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.rst ├── LICENSE ├── MANIFEST.in ├── README.md ├── codecov.yml ├── docs ├── changelog.rst ├── conf.py ├── index.rst ├── maintaining.rst ├── requirements.txt └── zoneinfo.rst ├── lib └── zoneinfo_module.c ├── licenses └── LICENSE_APACHE ├── pyproject.toml ├── scripts ├── benchmark.py ├── build_manylinux_wheels.sh ├── check_tag.py ├── tag_release.sh └── update_test_data.py ├── setup.cfg ├── setup.py ├── src └── backports │ ├── __init__.py │ └── zoneinfo │ ├── __init__.py │ ├── __init__.pyi │ ├── _common.py │ ├── _tzpath.py │ ├── _version.py │ ├── _zoneinfo.py │ └── py.typed ├── tests ├── __init__.py ├── _support.py ├── data │ └── zoneinfo_data.json ├── test_zoneinfo.py ├── test_zoneinfo_property.py └── typing_example.py └── tox.ini /.clang-format: -------------------------------------------------------------------------------- 1 | # A clang-format style that approximates Python's PEP 7 2 | # Useful for IDE integration 3 | BasedOnStyle: Google 4 | AlwaysBreakAfterReturnType: All 5 | AllowShortIfStatementsOnASingleLine: false 6 | AlignAfterOpenBracket: Align 7 | BreakBeforeBraces: Stroustrup 8 | ColumnLimit: 79 9 | DerivePointerAlignment: false 10 | IndentWidth: 4 11 | Language: Cpp 12 | PointerAlignment: Right 13 | ReflowComments: true 14 | SortIncludes: false 15 | SpaceBeforeParens: ControlStatements 16 | SpacesInParentheses: false 17 | TabWidth: 4 18 | UseTab: Never 19 | -------------------------------------------------------------------------------- /.github/workflows/build-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow is used to build all the wheels and source distributions for 2 | # the project and, on tags and releases, upload them. It is enabled on every 3 | # commit to ensure that the wheels can be built on all platforms, but releases 4 | # are only triggered in two situations: 5 | # 6 | # 1. When a tag is created, the workflow will upload the package to 7 | # test.pypi.org. 8 | # 2. When a release is made, the workflow will upload the package to pypi.org. 9 | # 10 | # It is done this way until PyPI has draft reviews, to allow for a two-stage 11 | # upload with a chance for manual intervention before the final publication. 12 | name: Build and release 13 | 14 | on: 15 | push: 16 | release: 17 | types: [created] 18 | 19 | jobs: 20 | build_sdist: 21 | runs-on: 'ubuntu-22.04' 22 | name: Build sdist 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Setup python 26 | uses: actions/setup-python@v4 27 | with: 28 | python-version: '3.8' 29 | - name: Install dependencies 30 | run: | 31 | python3 -m pip install --upgrade pip 32 | pip3 install 'tox>=4.0' 33 | - name: Build sdist 34 | run: tox -e build -- -s 35 | - uses: actions/upload-artifact@v3 36 | with: 37 | name: dist 38 | path: dist 39 | 40 | build_manylinux_wheels: 41 | runs-on: 'ubuntu-latest' 42 | strategy: 43 | fail-fast: false 44 | matrix: 45 | platform: 46 | - 'manylinux1_x86_64' 47 | - 'manylinux1_i686' 48 | - 'manylinux2014_aarch64' 49 | name: Build a ${{ matrix.platform }} for ${{ matrix.python_tag }} 50 | steps: 51 | - uses: actions/checkout@v3 52 | - uses: docker/setup-qemu-action@v2 53 | if: ${{ matrix.platform == 'manylinux2014_aarch64' }} 54 | name: Set up QEMU 55 | - name: Install docker image 56 | run: | 57 | DOCKER_IMAGE="quay.io/pypa/${{ matrix.platform }}" 58 | echo "DOCKER_IMAGE=$DOCKER_IMAGE" >> $GITHUB_ENV 59 | docker pull $DOCKER_IMAGE 60 | - name: Build wheels 61 | env: 62 | PYTHON_TAGS: "cp36-cp36m cp37-cp37m cp38-cp38" 63 | PRE_CMD: ${{ matrix.platform == 'manylinux1_i686' && 'linux32' || '' }} 64 | run: | 65 | echo "$name" 66 | docker run --rm \ 67 | -e PLAT=${{ matrix.platform }} \ 68 | -e PYTHON_TAGS="$PYTHON_TAGS" \ 69 | -v `pwd`:/io "$DOCKER_IMAGE" \ 70 | $PRE_CMD \ 71 | /io/scripts/build_manylinux_wheels.sh 72 | - uses: actions/upload-artifact@v3 73 | with: 74 | name: dist 75 | path: dist 76 | 77 | build_wheel: 78 | runs-on: ${{ matrix.os }} 79 | strategy: 80 | fail-fast: false 81 | matrix: 82 | python_version: [ '3.7', '3.8' ] 83 | arch: [ 'x86', 'x64' ] 84 | os: 85 | - 'windows-2022' 86 | - 'macos-11' 87 | include: 88 | - { python_version: "3.6", os: "windows-2019", arch: "x86" } 89 | - { python_version: "3.6", os: "windows-2019", arch: "x64" } 90 | - { python_version: "3.6", os: "macos-10.15", arch: "x64" } 91 | exclude: 92 | - os: 'macos-11' 93 | arch: 'x86' 94 | 95 | name: 'Build wheel: ${{ matrix.os }} ${{ matrix.python_version }} (${{ matrix.arch }})' 96 | steps: 97 | - uses: actions/checkout@v3 98 | - name: Add msbuild to PATH 99 | uses: microsoft/setup-msbuild@v1.3.1 100 | if: startsWith(matrix.os, 'windows-') 101 | - name: Setup python 102 | uses: actions/setup-python@v4 103 | with: 104 | python-version: ${{ matrix.python_version }} 105 | architecture: ${{ matrix.arch }} 106 | - name: Install dependencies 107 | run: | 108 | python -m pip install -U pip 109 | pip install -U 'tox>=3.18' 110 | - name: Create tox environment 111 | run: tox -e build --notest 112 | - name: Build wheel 113 | env: 114 | CL: ${{ startsWith(matrix.os, 'windows-') && '/WX' || '' }} 115 | run: | 116 | tox -e build -- -w 117 | - uses: actions/upload-artifact@v3 118 | with: 119 | name: dist 120 | path: dist 121 | 122 | deploy: 123 | runs-on: 'ubuntu-22.04' 124 | needs: [build_sdist, build_wheel, build_manylinux_wheels] 125 | steps: 126 | - uses: actions/checkout@v3 127 | - name: Set up Python 128 | uses: actions/setup-python@v4 129 | with: 130 | python-version: '3.8' 131 | - uses: actions/download-artifact@v3 132 | with: 133 | name: dist 134 | path: dist 135 | - name: Install dependencies 136 | run: | 137 | python3 -m pip install --upgrade pip 138 | pip3 install 'tox>=4.0' 139 | - name: Check that version and tag matches 140 | if: >- 141 | startsWith(github.ref, 'refs/tags') 142 | run: tox -e check-version-tag 143 | - name: Run twine check 144 | run: tox -e build-check 145 | - name: Publish package 146 | if: >- 147 | (github.event_name == 'push' && startsWith(github.ref, 'refs/tags')) || 148 | (github.event_name == 'release') 149 | env: 150 | TWINE_USERNAME: "__token__" 151 | run: | 152 | if [[ "$GITHUB_EVENT_NAME" == "push" ]]; then 153 | export TWINE_REPOSITORY_URL="https://test.pypi.org/legacy/" 154 | export TWINE_PASSWORD="${{ secrets.TEST_PYPI_UPLOAD_TOKEN }}" 155 | elif [[ "$GITHUB_EVENT_NAME" == "release" ]]; then 156 | export TWINE_REPOSITORY="pypi" 157 | export TWINE_PASSWORD="${{ secrets.PYPI_UPLOAD_TOKEN }}" 158 | else 159 | echo "Unknown event name: ${GITHUB_EVENT_NAME}" 160 | exit 1 161 | fi 162 | 163 | tox -e release 164 | -------------------------------------------------------------------------------- /.github/workflows/python-tests.yml: -------------------------------------------------------------------------------- 1 | name: Python package 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | tests: 7 | runs-on: ${{ matrix.os }} 8 | strategy: 9 | fail-fast: false 10 | matrix: 11 | python-version: ["3.7", "3.8", "pypy3.8"] 12 | os: ["ubuntu-22.04", "windows-2022", "macos-11"] 13 | tzdata_extras: ["", "tzdata"] 14 | include: 15 | - { python-version: "3.6", os: "windows-2019", tzdata_extras: "" } 16 | - { python-version: "3.6", os: "windows-2019", tzdata_extras: "tzdata" } 17 | - { python-version: "3.6", os: "macos-10.15", tzdata_extras: "" } 18 | - { python-version: "3.6", os: "macos-10.15", tzdata_extras: "tzdata" } 19 | - { python-version: "3.6", os: "ubuntu-20.04", tzdata_extras: "" } 20 | - { python-version: "3.6", os: "ubuntu-20.04", tzdata_extras: "tzdata" } 21 | env: 22 | TOXENV: py 23 | TEST_EXTRAS_TOX: ${{ matrix.tzdata_extras }} 24 | 25 | steps: 26 | - uses: actions/checkout@v3 27 | - name: ${{ matrix.python-version }} - ${{ matrix.os }} 28 | uses: actions/setup-python@v4 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | - name: Install dependencies 32 | run: | 33 | python -m pip install --upgrade pip 'tox>=3.18' 34 | - name: Run tests 35 | run: | 36 | python -m tox 37 | 38 | c_coverage: 39 | runs-on: ${{ matrix.os }} 40 | strategy: 41 | fail-fast: false 42 | matrix: 43 | python-version: ["3.7", "3.8"] 44 | os: ["ubuntu-22.04"] 45 | include: 46 | - { python-version: "3.6", os: "ubuntu-20.04" } 47 | env: 48 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 49 | steps: 50 | - uses: actions/checkout@v3 51 | - name: C coverage - ${{ matrix.os }} 52 | uses: actions/setup-python@v4 53 | with: 54 | python-version: ${{ matrix.python-version }} 55 | - name: Install dependencies 56 | run: | 57 | python -m pip install --upgrade pip 'tox>=3.18' 58 | - name: Run tests 59 | run: | 60 | tox -e gcov 61 | 62 | build: 63 | runs-on: "ubuntu-latest" 64 | strategy: 65 | fail-fast: false 66 | matrix: 67 | cc: ["gcc", "clang"] 68 | env: 69 | CC: ${{ matrix.cc }} 70 | CFLAGS: > 71 | -Wall 72 | -Werror 73 | -Wextra 74 | -Wno-unused-result 75 | -Wno-unused-parameter 76 | -Wno-missing-field-initializers 77 | steps: 78 | - uses: actions/checkout@v3 79 | - name: ${{ matrix.toxenv }} 80 | uses: actions/setup-python@v4 81 | with: 82 | python-version: 3.8 83 | - name: Install tox 84 | run: python -m pip install --upgrade pip 'tox>=3.18' 85 | - name: Build 86 | run: tox -e build,build-check 87 | 88 | other: 89 | runs-on: "ubuntu-latest" 90 | strategy: 91 | fail-fast: false 92 | matrix: 93 | toxenv: ["lint", "docs", "mypy"] 94 | env: 95 | TOXENV: ${{ matrix.toxenv }} 96 | 97 | steps: 98 | - uses: actions/checkout@v3 99 | - name: ${{ matrix.toxenv }} 100 | uses: actions/setup-python@v4 101 | with: 102 | python-version: 3.8 103 | - name: Install tox 104 | run: python -m pip install --upgrade pip 'tox>=4.0' 105 | - name: Run action 106 | run: | 107 | if [[ $TOXENV == "build" ]]; then 108 | CFLAGS="" 109 | CFLAGS+=" -Wall" 110 | CFLAGS+=" -Werror" 111 | CFLAGS+=" -Wextra" 112 | CFLAGS+=" -Wno-unused-result" 113 | CFLAGS+=" -Wno-unused-parameter" 114 | CFLAGS+=" -Wno-missing-field-initializers" 115 | export CFLAGS="${CFLAGS}" 116 | TOXENV="build,build-check" 117 | fi 118 | 119 | if [[ $TOXENV == "docs" ]]; then 120 | tox -- -j auto -bhtml -W -n -a --keep-going 121 | else 122 | tox 123 | fi 124 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *.so 5 | 6 | # Distribution / packaging 7 | build/ 8 | dist/ 9 | *.egg-info/ 10 | .eggs 11 | 12 | # Sphinx documentation 13 | docs/_build/ 14 | docs/_output/ 15 | 16 | # Testing and coverage 17 | .cache 18 | .hypothesis_cache 19 | .hypothesis 20 | .mypy_cache 21 | .pytest_cache 22 | .tox 23 | .pytype 24 | *.gcda 25 | *.gcno 26 | *.gcov 27 | 28 | # Virtual environments 29 | venv/ 30 | .venv/ 31 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/psf/black 3 | rev: 23.1.0 4 | hooks: 5 | - id: black 6 | language_version: python3.8 7 | 8 | - repo: https://github.com/pre-commit/mirrors-isort 9 | rev: v5.6.4 10 | hooks: 11 | - id: isort 12 | additional_dependencies: [toml] 13 | language_version: python3.8 14 | 15 | - repo: https://github.com/pycqa/pylint 16 | rev: "v2.16.3" 17 | hooks: 18 | - id: pylint 19 | 20 | 21 | - repo: https://github.com/pre-commit/pre-commit-hooks 22 | rev: v3.3.0 23 | hooks: 24 | - id: trailing-whitespace 25 | - id: end-of-file-fixer 26 | - id: debug-statements 27 | 28 | - repo: https://github.com/doublify/pre-commit-clang-format 29 | rev: 62302476d0da01515660132d76902359bed0f782 30 | hooks: 31 | - id: clang-format 32 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Version 0.2.1 (2020-06-18) 2 | ========================== 3 | 4 | - Fixed an issue where the C implementation of ``ZoneInfo.__init_subclass__`` 5 | was not a classmethod, causing errors when attempting to subclass 6 | ``ZoneInfo`` (:gh:`82`, :gh-pr:`83`). 7 | 8 | 9 | Version 0.2.0 (2020-05-29) 10 | ========================== 11 | 12 | - Added support for PyPy 3.6 (:gh-pr:`74`); when installed on PyPy, the library 13 | will not use the C extension, since benchmarks indicate that the pure Python 14 | implementation is faster. 15 | 16 | 17 | Version 0.1.0 (2020-05-26) 18 | ========================== 19 | 20 | This is the first public release of ``backports.zoneinfo``. It contains all the 21 | features from the ``zoneinfo`` release in Python 3.9.0b1, with the following 22 | changes: 23 | 24 | - Added support for Python 3.6, 3.7 and 3.8 (:gh-pr:`69`, :gh-pr:`70`). 25 | - The module is in the ``backports`` namespace rather than ``zoneinfo``. 26 | - There is no support for compile-time configuration of ``TZPATH``. 27 | - Fixed use-after-free in the ``module_free`` function (:bpo:`40705`, 28 | :gh-pr:`69`). 29 | - Minor refactoring to the C extension to avoid compiler warnings 30 | (:bpo:`40686`, :bpo:`40714`, :cpython-pr:`20342`, :gh-pr:`72`). 31 | - Removed unused imports, unused variables and other minor de-linting 32 | (:gh-pr:`71`). 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache Software License 2.0 2 | 3 | Copyright (c) 2020, Paul Ganssle (Google) 4 | 5 | Licensed under the Apache License, Version 2.0 (the "License"); 6 | you may not use this file except in compliance with the License. 7 | You may obtain a copy of the License at 8 | 9 | http://www.apache.org/licenses/LICENSE-2.0 10 | 11 | Unless required by applicable law or agreed to in writing, software 12 | distributed under the License is distributed on an "AS IS" BASIS, 13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | See the License for the specific language governing permissions and 15 | limitations under the License. 16 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE *.rst *.toml *.yml *.yaml *.ini 2 | recursive-include licenses * 3 | graft .github 4 | 5 | # Stubs 6 | include src/backports/zoneinfo/py.typed 7 | recursive-include src *.pyi 8 | 9 | # Tests 10 | include tox.ini 11 | recursive-include tests *.py 12 | 13 | # Documentation 14 | recursive-include docs *.png 15 | recursive-include docs *.svg 16 | recursive-include docs *.py 17 | recursive-include docs *.rst 18 | prune docs/_build 19 | prune docs/_output 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `backports.zoneinfo`: Backport of the standard library module `zoneinfo` 2 | 3 | This package was originally the reference implementation for [PEP 615](https://www.python.org/dev/peps/pep-0615/), which proposes support for the IANA time zone database in the standard library, and now serves as a backport to Python 3.6+ (including PyPy). 4 | 5 | This exposes the `backports.zoneinfo` module, which is a backport of the [`zoneinfo`](https://docs.python.org/3.9/library/zoneinfo.html#module-zoneinfo) module. The backport's documentation can be found [on readthedocs](https://zoneinfo.readthedocs.io/en/latest/). 6 | 7 | The module uses the system time zone data if available, and falls back to the [`tzdata`](https://tzdata.readthedocs.io/en/latest/) package (available [on PyPI](https://pypi.org/project/tzdata/)) if installed. 8 | 9 | ## Installation and depending on this library 10 | 11 | This module is called [`backports.zoneinfo`](https://pypi.org/project/backports.zoneinfo) on PyPI. To install it in your local environment, use: 12 | 13 | ``` 14 | pip install backports.zoneinfo 15 | ``` 16 | 17 | Or (particularly on Windows), you can also use the `tzdata` extra (which basically just declares a dependency on `tzdata`, so this doesn't actually save you any typing 😅): 18 | 19 | ``` 20 | pip install backports.zoneinfo[tzdata] 21 | ``` 22 | 23 | If you want to use this in your application, it is best to use [PEP 508 environment markers](https://www.python.org/dev/peps/pep-0508/#environment-markers) to declare a dependency *conditional on the Python version*: 24 | 25 | ``` 26 | backports.zoneinfo;python_version<"3.9" 27 | ``` 28 | 29 | Support for `backports.zoneinfo` in Python 3.9+ is currently minimal, since it is expected that you would use the standard library `zoneinfo` module instead. 30 | 31 | ## Use 32 | 33 | The `backports.zoneinfo` module should be a drop-in replacement for the Python 3.9 standard library module `zoneinfo`. If you do not support anything earlier than Python 3.9, **you do not need this library**; if you are supporting Python 3.6+, you may want to use this idiom to "fall back" to ``backports.zoneinfo``: 34 | 35 | ```python 36 | try: 37 | import zoneinfo 38 | except ImportError: 39 | from backports import zoneinfo 40 | ``` 41 | 42 | To get access to time zones with this module, construct a `ZoneInfo` object and attach it to your datetime: 43 | 44 | ```python 45 | >>> from backports.zoneinfo import ZoneInfo 46 | >>> from datetime import datetime, timedelta, timezone 47 | >>> dt = datetime(1992, 3, 1, tzinfo=ZoneInfo("Europe/Minsk")) 48 | >>> print(dt) 49 | 1992-03-01 00:00:00+02:00 50 | >>> print(dt.utcoffset()) 51 | 2:00:00 52 | >>> print(dt.tzname()) 53 | EET 54 | ``` 55 | 56 | Arithmetic works as expected without the need for a "normalization" step: 57 | 58 | ```python 59 | >>> dt += timedelta(days=90) 60 | >>> print(dt) 61 | 1992-05-30 00:00:00+03:00 62 | >>> dt.utcoffset() 63 | datetime.timedelta(seconds=10800) 64 | >>> dt.tzname() 65 | 'EEST' 66 | ``` 67 | 68 | Ambiguous and imaginary times are handled using the `fold` attribute added in [PEP 495](https://www.python.org/dev/peps/pep-0495/): 69 | 70 | ```python 71 | >>> dt = datetime(2020, 11, 1, 1, tzinfo=ZoneInfo("America/Chicago")) 72 | >>> print(dt) 73 | 2020-11-01 01:00:00-05:00 74 | >>> print(dt.replace(fold=1)) 75 | 2020-11-01 01:00:00-06:00 76 | 77 | >>> UTC = timezone.utc 78 | >>> print(dt.astimezone(UTC)) 79 | 2020-11-01 06:00:00+00:00 80 | >>> print(dt.replace(fold=1).astimezone(UTC)) 81 | 2020-11-01 07:00:00+00:00 82 | ``` 83 | 84 | # Contributing 85 | 86 | Currently we are not accepting contributions to this repository because we have not put the CLA in place and we would like to avoid complicating the process of adoption into the standard library. Contributions to [CPython](https://github.com/python/cpython) will eventually be backported to this repository — see [the Python developer's guide](https://devguide.python.org/) for more information on how to contribute to CPython. 87 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | --- 2 | comment: false 3 | coverage: 4 | status: 5 | patch: 6 | default: 7 | target: "100" 8 | paths: 9 | - "tests/" 10 | - "src/" 11 | project: 12 | default: 13 | target: "100" 14 | paths: 15 | - "tests/" 16 | - "src/" 17 | -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. Changelog transcluded from the changelog at the repo root 2 | 3 | ========= 4 | Changelog 5 | ========= 6 | 7 | .. include:: ../CHANGELOG.rst 8 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Project information ----------------------------------------------------- 8 | 9 | project = "backports.zoneinfo" 10 | author = "Paul Ganssle" 11 | copyright = f"2020, {author}" 12 | 13 | 14 | # Read the version information from the _version.py file 15 | def get_version(): 16 | import ast 17 | 18 | version_line = None 19 | with open("../src/backports/zoneinfo/_version.py") as f: 20 | for line in f: 21 | if line.startswith("__version__ ="): 22 | version_line = line 23 | break 24 | 25 | if version_line is None: 26 | raise ValueError("Version not found!") 27 | 28 | version_str = version_line.split("=", 1)[1].strip() 29 | 30 | return ast.literal_eval(version_str) 31 | 32 | 33 | version = get_version() 34 | 35 | # -- General configuration --------------------------------------------------- 36 | 37 | # Add any Sphinx extension module names here, as strings. They can be 38 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 39 | # ones. 40 | extensions = [ 41 | "sphinx.ext.intersphinx", 42 | "sphinx.ext.extlinks", 43 | ] 44 | 45 | # Add any paths that contain templates here, relative to this directory. 46 | templates_path = ["_templates"] 47 | 48 | # List of patterns, relative to source directory, that match files and 49 | # directories to ignore when looking for source files. 50 | # This pattern also affects html_static_path and html_extra_path. 51 | exclude_patterns = ["_output", "_build", "Thumbs.db", ".DS_Store"] 52 | 53 | 54 | # -- Options for HTML output ------------------------------------------------- 55 | 56 | # The theme to use for HTML and HTML Help pages. See the documentation for 57 | # a list of builtin themes. 58 | # 59 | html_theme = "nature" 60 | 61 | # Add any paths that contain custom static files (such as style sheets) here, 62 | # relative to this directory. They are copied after the builtin static files, 63 | # so a file named "default.css" will overwrite the builtin "default.css". 64 | html_static_path = [] 65 | 66 | # For cross-links to other documentation 67 | intersphinx_mapping = {"python": ("https://docs.python.org/3.9", None)} 68 | 69 | # This config value must be a dictionary of external sites, mapping unique 70 | # short alias names to a base URL and a prefix. 71 | # See http://sphinx-doc.org/ext/extlinks.html 72 | _repo = "https://github.com/pganssle/zoneinfo/" 73 | extlinks = { 74 | "gh": (_repo + "issues/%s", "GH-%s"), 75 | "gh-pr": (_repo + "pull/%s", "GH-%s"), 76 | "pypi": ("https://pypi.org/project/%s", None), 77 | "bpo": ("https://bugs.python.org/issue%s", "bpo-%s"), 78 | "cpython-pr": ( 79 | "https://github.com/python/cpython/pull/%s", 80 | "CPython PR #%s", 81 | ), 82 | } 83 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | ``backports.zoneinfo``: A backport of the ``zoneinfo`` module 2 | ============================================================= 3 | 4 | This was originally the reference implementation for :pep:`615`, which adds 5 | support for the IANA time zone database to the Python standard library, but now 6 | serves as a backport of the module to Python 3.6+ (including PyPy). 7 | 8 | The upstream documentation can be found at :mod:`zoneinfo`. A mirror of the 9 | documentation pinned to the version supported in the backport can be found at 10 | :mod:`backports.zoneinfo`. 11 | 12 | This is the documentation for version |version|. 13 | 14 | Documentation 15 | ============= 16 | Contents: 17 | 18 | .. toctree:: 19 | :maxdepth: 1 20 | 21 | zoneinfo 22 | changelog 23 | maintaining 24 | 25 | 26 | Indices and tables 27 | ================== 28 | 29 | * :ref:`genindex` 30 | * :ref:`modindex` 31 | * :ref:`search` 32 | -------------------------------------------------------------------------------- /docs/maintaining.rst: -------------------------------------------------------------------------------- 1 | Maintainer's Guide 2 | ================== 3 | 4 | Although this was the original implementation of the ``zoneinfo`` module, after 5 | Python 3.9, it is now a backport, and to the extent that there is a "canonical" 6 | repository, the `CPython repository `_ has a 7 | stronger claim than this one. Accepting outside PRs against this repository is 8 | difficult because we are not set up to collect CLAs for CPython. It is easier 9 | to accept PRs against CPython and import them here if possible. 10 | 11 | The code layout is very different between the two, and unfortunately (partially 12 | because of the different layouts, and the different module names), the code has 13 | diverged, so keeping the two in sync is not as simple as copy-pasting one into 14 | the other. For now, the two will need to be kept in sync manually. 15 | 16 | 17 | Development environment 18 | ----------------------- 19 | 20 | Maintenance scripts, releases, and tests are orchestrated using |tox|_ 21 | environments to manage the requirements of each script. The details of each 22 | environment can be found in the ``tox.ini`` file in the repository root. 23 | 24 | The repository also has pre-commit configured to automatically enforce various 25 | code formatting rules on commit. To use it, install `pre-commit 26 | `_ and run ``pre-commit install`` in the repository 27 | root to install the git commit hooks. 28 | 29 | 30 | Making a release 31 | ---------------- 32 | 33 | Releases are automated via the ``build-release.yml`` GitHub Actions workflow. 34 | The project is built on every push; whenever a *tag* is pushed, the build 35 | artifacts are released to `Test PyPI `_, and when a 36 | GitHub release is made, the project is built and released to `PyPI 37 | `_ (this is a workaround for the lack of "draft releases" 38 | on PyPI, and the two actions can be unified when that feature is added). 39 | 40 | To make a release: 41 | 42 | 1. Update the version number in ``src/backports/zoneinfo/_version.py`` and 43 | make a PR (if you want to be cautious, start with a ``.devN`` release 44 | intended only for PyPI). 45 | 2. Tag the repository with the current version – you can use the 46 | ``scripts/tag_release.sh`` script in the repository root to source the 47 | version automatically from the current module version. 48 | 3. Push the tag to GitHub (e.g. ``git push upstream 0.1.0.dev0``). This will 49 | trigger a release to Test PyPI. The PR does not need to be merged at this 50 | point if you are only planning to release to TestPyPI, but any "test only" 51 | tags should be deleted when the process is complete. 52 | 4. Wait for the GitHub action to succeed, then check the results on 53 | https://test.pypi.org/project/backports.zoneinfo . 54 | 5. If everything looks good, go into the GitHub repository's `"releases" tab 55 | `_ and click "Draft a new 56 | release"; type the name of the tag into the box, fill out the remainder of 57 | the form, and click "Publish". (Only do this step for non-dev releases). 58 | 6. Check that the release action has succeeded, then check that everything 59 | looks OK on https://pypi.org/project/backports.zoneinfo/ . 60 | 61 | If there's a problem with the release, make a post release by appending 62 | ``.postN`` to the current version, e.g. ``0.1.1`` → ``0.1.1.post0``. If the 63 | problem is sufficiently serious, yank the broken version. 64 | 65 | .. Links 66 | .. |tox| replace:: ``tox`` 67 | .. _tox: https://tox.readthedocs.io/en/latest/ 68 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx>=3.0.0 2 | -------------------------------------------------------------------------------- /docs/zoneinfo.rst: -------------------------------------------------------------------------------- 1 | :mod:`backports.zoneinfo` --- IANA time zone support 2 | ==================================================== 3 | 4 | .. module:: backports.zoneinfo 5 | :synopsis: IANA time zone support 6 | 7 | .. moduleauthor:: Paul Ganssle 8 | .. sectionauthor:: Paul Ganssle 9 | 10 | -------------- 11 | 12 | The :mod:`~backports.zoneinfo` module provides a concrete time zone 13 | implementation to support the IANA time zone database as originally specified 14 | in :pep:`615`. By default, :mod:`~backports.zoneinfo` uses the system's time 15 | zone data if available; if no system time zone data is available, the library 16 | will fall back to using the first-party `tzdata`_ package available on PyPI. 17 | 18 | .. seealso:: 19 | 20 | Module: :mod:`zoneinfo` 21 | The standard library module ``zoneinfo``, of which this is a backport. 22 | 23 | Module: :mod:`datetime` 24 | Provides the :class:`~datetime.time` and :class:`~datetime.datetime` 25 | types with which the :class:`ZoneInfo` class is designed to be used. 26 | 27 | Package `tzdata`_ 28 | First-party package maintained by the CPython core developers to supply 29 | time zone data via PyPI. 30 | 31 | 32 | Using ``ZoneInfo`` 33 | ------------------ 34 | 35 | :class:`ZoneInfo` is a concrete implementation of the :class:`datetime.tzinfo` 36 | abstract base class, and is intended to be attached to ``tzinfo``, either via 37 | the constructor, the :meth:`datetime.replace ` 38 | method or :meth:`datetime.astimezone `:: 39 | 40 | >>> from backports.zoneinfo import ZoneInfo 41 | >>> from datetime import datetime, timedelta 42 | 43 | >>> dt = datetime(2020, 10, 31, 12, tzinfo=ZoneInfo("America/Los_Angeles")) 44 | >>> print(dt) 45 | 2020-10-31 12:00:00-07:00 46 | 47 | >>> dt.tzname() 48 | 'PDT' 49 | 50 | Datetimes constructed in this way are compatible with datetime arithmetic and 51 | handle daylight saving time transitions with no further intervention:: 52 | 53 | >>> dt_add = dt + timedelta(days=1) 54 | 55 | >>> print(dt_add) 56 | 2020-11-01 12:00:00-08:00 57 | 58 | >>> dt_add.tzname() 59 | 'PST' 60 | 61 | These time zones also support the :attr:`~datetime.datetime.fold` attribute 62 | introduced in :pep:`495`. During offset transitions which induce ambiguous 63 | times (such as a daylight saving time to standard time transition), the offset 64 | from *before* the transition is used when ``fold=0``, and the offset *after* 65 | the transition is used when ``fold=1``, for example:: 66 | 67 | >>> dt = datetime(2020, 11, 1, 1, tzinfo=ZoneInfo("America/Los_Angeles")) 68 | >>> print(dt) 69 | 2020-11-01 01:00:00-07:00 70 | 71 | >>> print(dt.replace(fold=1)) 72 | 2020-11-01 01:00:00-08:00 73 | 74 | When converting from another time zone, the fold will be set to the correct 75 | value:: 76 | 77 | >>> from datetime import timezone 78 | >>> LOS_ANGELES = ZoneInfo("America/Los_Angeles") 79 | >>> dt_utc = datetime(2020, 11, 1, 8, tzinfo=timezone.utc) 80 | 81 | >>> # Before the PDT -> PST transition 82 | >>> print(dt_utc.astimezone(LOS_ANGELES)) 83 | 2020-11-01 01:00:00-07:00 84 | 85 | >>> # After the PDT -> PST transition 86 | >>> print((dt_utc + timedelta(hours=1)).astimezone(LOS_ANGELES)) 87 | 2020-11-01 01:00:00-08:00 88 | 89 | Data sources 90 | ------------ 91 | 92 | The ``zoneinfo`` module does not directly provide time zone data, and instead 93 | pulls time zone information from the system time zone database or the 94 | first-party PyPI package `tzdata`_, if available. Some systems, including 95 | notably Windows systems, do not have an IANA database available, and so for 96 | projects targeting cross-platform compatibility that require time zone data, it 97 | is recommended to declare a dependency on tzdata. If neither system data nor 98 | tzdata are available, all calls to :class:`ZoneInfo` will raise 99 | :exc:`ZoneInfoNotFoundError`. 100 | 101 | .. _zoneinfo_data_configuration: 102 | 103 | Configuring the data sources 104 | **************************** 105 | 106 | When ``ZoneInfo(key)`` is called, the constructor first searches the 107 | directories specified in :data:`TZPATH` for a file matching ``key``, and on 108 | failure looks for a match in the tzdata package. This behavior can be 109 | configured in three ways: 110 | 111 | 1. The default :data:`TZPATH` when not otherwise specified can be configured at 112 | :ref:`compile time `. 113 | 2. :data:`TZPATH` can be configured using :ref:`an environment variable 114 | `. 115 | 3. At :ref:`runtime `, the search path can be 116 | manipulated using the :func:`reset_tzpath` function. 117 | 118 | .. _zoneinfo_data_compile_time_config: 119 | 120 | Compile-time configuration 121 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ 122 | 123 | The default :data:`TZPATH` includes several common deployment locations for the 124 | time zone database (except on Windows, where there are no "well-known" 125 | locations for time zone data). On POSIX systems, downstream distributors and 126 | those building Python from source who know where their system 127 | time zone data is deployed may change the default time zone path by specifying 128 | the compile-time option ``TZPATH`` (or, more likely, the ``configure`` flag 129 | ``--with-tzpath``), which should be a string delimited by :data:`os.pathsep`. 130 | 131 | On all platforms, the configured value is available as the ``TZPATH`` key in 132 | :func:`sysconfig.get_config_var`. 133 | 134 | .. note:: 135 | 136 | This option is currently not available in the backport. 137 | 138 | .. _zoneinfo_data_environment_var: 139 | 140 | Environment configuration 141 | ^^^^^^^^^^^^^^^^^^^^^^^^^ 142 | 143 | When initializing :data:`TZPATH` (either at import time or whenever 144 | :func:`reset_tzpath` is called with no arguments), the ``zoneinfo`` module will 145 | use the environment variable ``PYTHONTZPATH``, if it exists, to set the search 146 | path. 147 | 148 | .. envvar:: PYTHONTZPATH 149 | 150 | This is an :data:`os.pathsep`-separated string containing the time zone 151 | search path to use. It must consist of only absolute rather than relative 152 | paths. Relative components specified in ``PYTHONTZPATH`` will not be used, 153 | but otherwise the behavior when a relative path is specified is 154 | implementation-defined; CPython will raise :exc:`InvalidTZPathWarning`, but 155 | other implementations are free to silently ignore the erroneous component 156 | or raise an exception. 157 | 158 | To set the system to ignore the system data and use the tzdata package 159 | instead, set ``PYTHONTZPATH=""``. 160 | 161 | .. _zoneinfo_data_runtime_config: 162 | 163 | Runtime configuration 164 | ^^^^^^^^^^^^^^^^^^^^^ 165 | 166 | The TZ search path can also be configured at runtime using the 167 | :func:`reset_tzpath` function. This is generally not an advisable operation, 168 | though it is reasonable to use it in test functions that require the use of a 169 | specific time zone path (or require disabling access to the system time zones). 170 | 171 | 172 | The ``ZoneInfo`` class 173 | ---------------------- 174 | 175 | .. class:: ZoneInfo(key) 176 | 177 | A concrete :class:`datetime.tzinfo` subclass that represents an IANA time 178 | zone specified by the string ``key``. Calls to the primary constructor will 179 | always return objects that compare identically; put another way, barring 180 | cache invalidation via :meth:`ZoneInfo.clear_cache`, for all values of 181 | ``key``, the following assertion will always be true: 182 | 183 | .. code-block:: python 184 | 185 | a = ZoneInfo(key) 186 | b = ZoneInfo(key) 187 | assert a is b 188 | 189 | ``key`` must be in the form of a relative, normalized POSIX path, with no 190 | up-level references. The constructor will raise :exc:`ValueError` if a 191 | non-conforming key is passed. 192 | 193 | If no file matching ``key`` is found, the constructor will raise 194 | :exc:`ZoneInfoNotFoundError`. 195 | 196 | 197 | The ``ZoneInfo`` class has two alternate constructors: 198 | 199 | .. classmethod:: ZoneInfo.from_file(fobj, /, key=None) 200 | 201 | Constructs a ``ZoneInfo`` object from a file-like object returning bytes 202 | (e.g. a file opened in binary mode or an :class:`io.BytesIO` object). 203 | Unlike the primary constructor, this always constructs a new object. 204 | 205 | The ``key`` parameter sets the name of the zone for the purposes of 206 | :py:meth:`~object.__str__` and :py:meth:`~object.__repr__`. 207 | 208 | Objects created via this constructor cannot be pickled (see `pickling`_). 209 | 210 | .. classmethod:: ZoneInfo.no_cache(key) 211 | 212 | An alternate constructor that bypasses the constructor's cache. It is 213 | identical to the primary constructor, but returns a new object on each 214 | call. This is most likely to be useful for testing or demonstration 215 | purposes, but it can also be used to create a system with a different cache 216 | invalidation strategy. 217 | 218 | Objects created via this constructor will also bypass the cache of a 219 | deserializing process when unpickled. 220 | 221 | .. TODO: Add "See `cache_behavior`_" reference when that section is ready. 222 | 223 | .. caution:: 224 | 225 | Using this constructor may change the semantics of your datetimes in 226 | surprising ways, only use it if you know that you need to. 227 | 228 | The following class methods are also available: 229 | 230 | .. classmethod:: ZoneInfo.clear_cache(*, only_keys=None) 231 | 232 | A method for invalidating the cache on the ``ZoneInfo`` class. If no 233 | arguments are passed, all caches are invalidated and the next call to 234 | the primary constructor for each key will return a new instance. 235 | 236 | If an iterable of key names is passed to the ``only_keys`` parameter, only 237 | the specified keys will be removed from the cache. Keys passed to 238 | ``only_keys`` but not found in the cache are ignored. 239 | 240 | .. TODO: Add "See `cache_behavior`_" reference when that section is ready. 241 | 242 | .. warning:: 243 | 244 | Invoking this function may change the semantics of datetimes using 245 | ``ZoneInfo`` in surprising ways; this modifies process-wide global state 246 | and thus may have wide-ranging effects. Only use it if you know that you 247 | need to. 248 | 249 | The class has one attribute: 250 | 251 | .. attribute:: ZoneInfo.key 252 | 253 | This is a read-only :term:`attribute` that returns the value of ``key`` 254 | passed to the constructor, which should be a lookup key in the IANA time 255 | zone database (e.g. ``America/New_York``, ``Europe/Paris`` or 256 | ``Asia/Tokyo``). 257 | 258 | For zones constructed from file without specifying a ``key`` parameter, 259 | this will be set to ``None``. 260 | 261 | .. note:: 262 | 263 | Although it is a somewhat common practice to expose these to end users, 264 | these values are designed to be primary keys for representing the 265 | relevant zones and not necessarily user-facing elements. Projects like 266 | CLDR (the Unicode Common Locale Data Repository) can be used to get 267 | more user-friendly strings from these keys. 268 | 269 | String representations 270 | ********************** 271 | 272 | The string representation returned when calling :py:class:`str` on a 273 | :class:`ZoneInfo` object defaults to using the :attr:`ZoneInfo.key` attribute (see 274 | the note on usage in the attribute documentation):: 275 | 276 | >>> zone = ZoneInfo("Pacific/Kwajalein") 277 | >>> str(zone) 278 | 'Pacific/Kwajalein' 279 | 280 | >>> dt = datetime(2020, 4, 1, 3, 15, tzinfo=zone) 281 | >>> f"{dt.isoformat()} [{dt.tzinfo}]" 282 | '2020-04-01T03:15:00+12:00 [Pacific/Kwajalein]' 283 | 284 | For objects constructed from a file without specifying a ``key`` parameter, 285 | ``str`` falls back to calling :func:`repr`. ``ZoneInfo``'s ``repr`` is 286 | implementation-defined and not necessarily stable between versions, but it is 287 | guaranteed not to be a valid ``ZoneInfo`` key. 288 | 289 | .. _pickling: 290 | 291 | Pickle serialization 292 | ******************** 293 | 294 | Rather than serializing all transition data, ``ZoneInfo`` objects are 295 | serialized by key, and ``ZoneInfo`` objects constructed from files (even those 296 | with a value for ``key`` specified) cannot be pickled. 297 | 298 | The behavior of a ``ZoneInfo`` file depends on how it was constructed: 299 | 300 | 1. ``ZoneInfo(key)``: When constructed with the primary constructor, a 301 | ``ZoneInfo`` object is serialized by key, and when deserialized, the 302 | deserializing process uses the primary and thus it is expected that these 303 | are expected to be the same object as other references to the same time 304 | zone. For example, if ``europe_berlin_pkl`` is a string containing a pickle 305 | constructed from ``ZoneInfo("Europe/Berlin")``, one would expect the 306 | following behavior: 307 | 308 | .. code-block:: 309 | 310 | >>> a = ZoneInfo("Europe/Berlin") 311 | >>> b = pickle.loads(europe_berlin_pkl) 312 | >>> a is b 313 | True 314 | 315 | 2. ``ZoneInfo.no_cache(key)``: When constructed from the cache-bypassing 316 | constructor, the ``ZoneInfo`` object is also serialized by key, but when 317 | deserialized, the deserializing process uses the cache bypassing 318 | constructor. If ``europe_berlin_pkl_nc`` is a string containing a pickle 319 | constructed from ``ZoneInfo.no_cache("Europe/Berlin")``, one would expect 320 | the following behavior: 321 | 322 | .. code-block:: 323 | 324 | >>> a = ZoneInfo("Europe/Berlin") 325 | >>> b = pickle.loads(europe_berlin_pkl_nc) 326 | >>> a is b 327 | False 328 | 329 | 3. ``ZoneInfo.from_file(fobj, /, key=None)``: When constructed from a file, the 330 | ``ZoneInfo`` object raises an exception on pickling. If an end user wants to 331 | pickle a ``ZoneInfo`` constructed from a file, it is recommended that they 332 | use a wrapper type or a custom serialization function: either serializing by 333 | key or storing the contents of the file object and serializing that. 334 | 335 | This method of serialization requires that the time zone data for the required 336 | key be available on both the serializing and deserializing side, similar to the 337 | way that references to classes and functions are expected to exist in both the 338 | serializing and deserializing environments. It also means that no guarantees 339 | are made about the consistency of results when unpickling a ``ZoneInfo`` 340 | pickled in an environment with a different version of the time zone data. 341 | 342 | Functions 343 | --------- 344 | 345 | .. function:: available_timezones() 346 | 347 | Get a set containing all the valid keys for IANA time zones available 348 | anywhere on the time zone path. This is recalculated on every call to the 349 | function. 350 | 351 | This function only includes canonical zone names and does not include 352 | "special" zones such as those under the ``posix/`` and ``right/`` 353 | directories, or the ``posixrules`` zone. 354 | 355 | .. caution:: 356 | 357 | This function may open a large number of files, as the best way to 358 | determine if a file on the time zone path is a valid time zone is to 359 | read the "magic string" at the beginning. 360 | 361 | .. note:: 362 | 363 | These values are not designed to be exposed to end-users; for user 364 | facing elements, applications should use something like CLDR (the 365 | Unicode Common Locale Data Repository) to get more user-friendly 366 | strings. See also the cautionary note on :attr:`ZoneInfo.key`. 367 | 368 | .. function:: reset_tzpath(to=None) 369 | 370 | Sets or resets the time zone search path (:data:`TZPATH`) for the module. 371 | When called with no arguments, :data:`TZPATH` is set to the default value. 372 | 373 | Calling ``reset_tzpath`` will not invalidate the :class:`ZoneInfo` cache, 374 | and so calls to the primary ``ZoneInfo`` constructor will only use the new 375 | ``TZPATH`` in the case of a cache miss. 376 | 377 | The ``to`` parameter must be a :term:`sequence` of strings or 378 | :class:`os.PathLike` and not a string, all of which must be absolute paths. 379 | :exc:`ValueError` will be raised if something other than an absolute path 380 | is passed. 381 | 382 | 383 | Globals 384 | ------- 385 | 386 | .. data:: TZPATH 387 | 388 | A read-only sequence representing the time zone search path -- when 389 | constructing a ``ZoneInfo`` from a key, the key is joined to each entry in 390 | the ``TZPATH``, and the first file found is used. 391 | 392 | ``TZPATH`` may contain only absolute paths, never relative paths, 393 | regardless of how it is configured. 394 | 395 | The object that ``zoneinfo.TZPATH`` points to may change in response to a 396 | call to :func:`reset_tzpath`, so it is recommended to use 397 | ``zoneinfo.TZPATH`` rather than importing ``TZPATH`` from ``zoneinfo`` or 398 | assigning a long-lived variable to ``zoneinfo.TZPATH``. 399 | 400 | For more information on configuring the time zone search path, see 401 | :ref:`zoneinfo_data_configuration`. 402 | 403 | Exceptions and warnings 404 | ----------------------- 405 | 406 | .. exception:: ZoneInfoNotFoundError 407 | 408 | Raised when construction of a :class:`ZoneInfo` object fails because the 409 | specified key could not be found on the system. This is a subclass of 410 | :exc:`KeyError`. 411 | 412 | .. exception:: InvalidTZPathWarning 413 | 414 | Raised when :envvar:`PYTHONTZPATH` contains an invalid component that will 415 | be filtered out, such as a relative path. 416 | 417 | .. Links and references: 418 | 419 | .. _tzdata: https://pypi.org/project/tzdata/ 420 | -------------------------------------------------------------------------------- /licenses/LICENSE_APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=40.8.0", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.black] 6 | line-length = 80 7 | 8 | [tool.coverage.run] 9 | omit = ["tests/typing_example.py"] 10 | 11 | [tool.coverage.paths] 12 | source = ["src", ".tox/*/site-packages"] 13 | 14 | [tool.coverage.report] 15 | show_missing = true 16 | skip_covered = true 17 | 18 | [tool.isort] 19 | atomic=true 20 | force_grid_wrap=0 21 | include_trailing_comma=true 22 | known_first_party = ["backports.zoneinfo"] 23 | known_third_party=[ 24 | "click", 25 | "dateutil", 26 | "hypothesis", 27 | "pint", 28 | "pytest", 29 | "pytz", 30 | "requests", 31 | ] 32 | multi_line_output=3 33 | use_parentheses=true 34 | 35 | [tool.pylint.'MESSAGES CONTROL'] 36 | disable="all" 37 | enable=""" 38 | unused-import, 39 | unused-variable, 40 | unpacking-non-sequence, 41 | invalid-all-object, 42 | used-before-assignment, 43 | no-else-raise, 44 | bad-format-character, 45 | bad-format-string, 46 | bare-except, 47 | """ 48 | -------------------------------------------------------------------------------- /scripts/benchmark.py: -------------------------------------------------------------------------------- 1 | import statistics 2 | import sys 3 | import timeit 4 | from datetime import datetime, timezone 5 | 6 | import click 7 | import pint 8 | import pytz 9 | from dateutil import tz 10 | 11 | from backports.zoneinfo import ZoneInfo 12 | from backports.zoneinfo._zoneinfo import ZoneInfo as PyZoneInfo 13 | 14 | _PINT_REGISTRY = pint.UnitRegistry() 15 | S = _PINT_REGISTRY.s 16 | 17 | DATETIME = datetime(2020, 1, 1) 18 | ZONE_DEFAULT_CONSTRUCTOR = { 19 | "c_zoneinfo": ZoneInfo, 20 | "py_zoneinfo": PyZoneInfo, 21 | "dateutil": tz.gettz, 22 | "pytz": pytz.timezone, 23 | } 24 | 25 | ZONE_NO_CACHE_CONSTRUCTOR = { 26 | "c_zoneinfo": ZoneInfo.no_cache, 27 | "py_zoneinfo": PyZoneInfo.no_cache, 28 | "dateutil": tz.gettz.nocache, 29 | } 30 | 31 | SOURCES = ["dateutil", "pytz"] 32 | 33 | BENCHMARKS = { 34 | "to_utc": lambda *args, **kwargs: bench_astimezone( 35 | *args, **kwargs, from_utc=False 36 | ), 37 | "from_utc": lambda *args, **kwargs: bench_astimezone( 38 | *args, **kwargs, from_utc=True 39 | ), 40 | "utcoffset": lambda *args, **kwargs: bench_utcoffset(*args, **kwargs), 41 | "constructor": lambda *args, **kwargs: bench_constructor( 42 | *args, **kwargs, cache=True 43 | ), 44 | "no_cache_constructor": lambda *args, **kwargs: bench_constructor( 45 | *args, **kwargs, cache=False 46 | ), 47 | "constructor_strong_cache_miss": lambda *args, **kwargs: bench_constructor_strong_cache_miss( 48 | *args, **kwargs 49 | ), 50 | } 51 | 52 | 53 | def get_zone(source, key): 54 | return ZONE_DEFAULT_CONSTRUCTOR[source](key) 55 | 56 | 57 | def bench_astimezone(source, zone_key, from_utc=True): 58 | zone = get_zone(source, zone_key) 59 | tz_from = timezone.utc 60 | tz_to = zone 61 | 62 | if not from_utc: 63 | tz_to, tz_from = tz_from, tz_to 64 | 65 | dt_from = DATETIME.replace(tzinfo=tz_from) 66 | 67 | def func(dt_from=dt_from, tz_to=tz_to): 68 | return dt_from.astimezone(tz_to) 69 | 70 | return func 71 | 72 | 73 | def bench_utcoffset(source, zone_key): 74 | zone = get_zone(source, zone_key) 75 | base_dt = DATETIME 76 | if source != "pytz": 77 | dt = base_dt.replace(tzinfo=zone) 78 | else: 79 | dt = zone.localize(base_dt) 80 | 81 | def func(dt=dt): 82 | return dt.utcoffset() 83 | 84 | return func 85 | 86 | 87 | def bench_constructor(source, zone_key, cache=False): 88 | if cache: 89 | zone_cache = get_zone(source, zone_key) 90 | constructor = ZONE_DEFAULT_CONSTRUCTOR[source] 91 | else: 92 | if source not in ZONE_NO_CACHE_CONSTRUCTOR: 93 | raise UnsupportedOperation( 94 | f"Source {source} does not support no-cache construction." 95 | ) 96 | 97 | zone_cache = None 98 | constructor = ZONE_NO_CACHE_CONSTRUCTOR[source] 99 | 100 | def func(constructor=constructor, key=zone_key, _cache=zone_cache): 101 | constructor(key) 102 | 103 | return func 104 | 105 | 106 | def bench_constructor_strong_cache_miss(source, zone_key): 107 | if source == "pytz": 108 | raise UnsupportedOperation("pytz does not have a strong and weak cache") 109 | 110 | # Strong cache is 8 keys 111 | keys = [ 112 | "UTC", 113 | "America/New_York", 114 | "America/Los_Angeles", 115 | "America/Chicago", 116 | "Asia/Tokyo", 117 | "Europe/Lisbon", 118 | "Africa/Casablanca", 119 | "Australia/Sydney", 120 | "America/Denver", 121 | "Europe/London", 122 | ] 123 | 124 | if zone_key in keys: 125 | keys.append("Europe/Moscow") 126 | else: 127 | keys.append(zone_key) 128 | 129 | constructor = ZONE_DEFAULT_CONSTRUCTOR[source] 130 | 131 | def func(constructor=constructor, keys=keys): 132 | for key in keys: 133 | constructor(key) 134 | 135 | def setup(_cache=[]): 136 | _cache.clear() 137 | for key in keys: 138 | _cache.append(constructor(key)) 139 | 140 | func.setup = setup 141 | 142 | return func 143 | 144 | 145 | @click.command() 146 | @click.option( 147 | "-b", 148 | "--benchmark", 149 | type=click.Choice(["all"] + list(BENCHMARKS.keys())), 150 | multiple=True, 151 | ) 152 | @click.option( 153 | "-c", "--compare", type=click.Choice(["all"] + SOURCES), multiple=True 154 | ) 155 | @click.option("--c_ext/--no_c_ext", default=True) 156 | @click.option("--py/--no_py", default=True) 157 | @click.option( 158 | "-z", "--zone", type=str, multiple=True, default=["America/New_York"] 159 | ) 160 | def cli(benchmark, zone, compare, c_ext, py): 161 | """Runner for the benchmark suite""" 162 | 163 | # Assemble sources 164 | sources = [] 165 | if c_ext: 166 | sources.append("c_zoneinfo") 167 | 168 | if py: 169 | sources.append("py_zoneinfo") 170 | 171 | if len(compare) == 1 and compare[0] == "all": 172 | sources.extend(SOURCES) 173 | else: 174 | for source in compare: 175 | if source == "all": 176 | raise InvalidInput( 177 | 'Cannot specify "all" along with other comparisons.' 178 | ) 179 | sources.append(source) 180 | 181 | if not sources: 182 | raise InvalidInput("Nothing to benchmark specified!") 183 | 184 | # Determine which benchmarks to run 185 | if not benchmark: 186 | raise InvalidInput("No benchmarks specified") 187 | 188 | if len(benchmark) == 1 and benchmark[0] == "all": 189 | benchmarks = sorted(BENCHMARKS.keys()) 190 | else: 191 | if "all" in benchmark: 192 | raise InvalidInput( 193 | '"all" cannot be specified with other benchmarks' 194 | ) 195 | 196 | benchmarks = sorted(set(benchmark)) 197 | 198 | zones = sorted(set(zone)) 199 | 200 | sys.argv = sys.argv[0:1] 201 | main(sources, zones, benchmarks) 202 | 203 | 204 | def run_benchmark(desc, func, k=5, N=None): 205 | timer = timeit.Timer(func, setup=getattr(func, "setup", lambda: None)) 206 | 207 | # Run for 0.2 seconds 208 | if N is None: 209 | N, time_taken = timer.autorange() # pylint: disable=unused-variable 210 | 211 | results = timer.repeat(repeat=k, number=N) 212 | results = [r / N for r in results] 213 | 214 | results_min = min(results) 215 | results_mean = statistics.mean(results) 216 | results_std = statistics.stdev(results, xbar=results_mean) 217 | 218 | results_mean *= S 219 | results_min *= S 220 | results_std *= S 221 | 222 | results_mean = results_mean.to_compact() 223 | results_min = results_min.to_compact() 224 | results_std = results_std.to_compact() 225 | 226 | print( 227 | f"{desc}: mean: {results_mean:.02f~P} ± {results_std:.02f~P}; " 228 | + f"min: {results_min:.02f~P} (k={k}, N={N})" 229 | ) 230 | 231 | 232 | def main(sources, zones, benchmarks): 233 | to_run = {} 234 | for benchmark in benchmarks: 235 | for zone in zones: 236 | to_run[(benchmark, zone)] = [] 237 | for source in sources: 238 | func_factory = BENCHMARKS[benchmark] 239 | try: 240 | func = func_factory(source, zone) 241 | except UnsupportedOperation: 242 | continue 243 | 244 | to_run[(benchmark, zone)].append((source, func)) 245 | 246 | for (benchmark, zone), funcs in to_run.items(): 247 | print(f"Running {benchmark} in zone {zone}") 248 | for source, func in funcs: 249 | run_benchmark(f"{source}", func) 250 | 251 | print() 252 | 253 | 254 | class InvalidInput(ValueError): 255 | """Raised for user input errors.""" 256 | 257 | 258 | class UnsupportedOperation(Exception): 259 | """Raised when a source doesn't support an operation.""" 260 | 261 | 262 | if __name__ == "__main__": 263 | try: 264 | cli() 265 | except InvalidInput as e: 266 | print(f"Invalid Input: {e}") 267 | sys.exit(1) 268 | -------------------------------------------------------------------------------- /scripts/build_manylinux_wheels.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e -x 3 | 4 | function repair_wheel { 5 | wheel=$1 6 | if ! auditwheel show "$wheel"; then 7 | echo "Skipping non-platform wheel $wheel" 8 | else 9 | auditwheel repair "$wheel" --plat "$PLAT" -w /io/wheelhouse 10 | fi 11 | } 12 | 13 | cd /io/ 14 | 15 | for tag in $PYTHON_TAGS; do 16 | PYBIN="/opt/python/$tag/bin/" 17 | ${PYBIN}/pip install tox 18 | CFLAGS="-std=c99 -O3" ${PYBIN}/tox -e build -- -w 19 | done 20 | 21 | mv dist/ raw_wheels 22 | 23 | for whl in raw_wheels/*.whl; do 24 | repair_wheel "$whl" 25 | done 26 | 27 | mkdir dist 28 | mv wheelhouse/*.whl dist/ 29 | -------------------------------------------------------------------------------- /scripts/check_tag.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import sys 3 | 4 | from backports.zoneinfo import __version__ as VERSION 5 | 6 | 7 | def get_current_tag(): 8 | p = subprocess.run( 9 | ["git", "describe", "--tag"], check=True, stdout=subprocess.PIPE 10 | ) 11 | 12 | return p.stdout.strip().decode() 13 | 14 | 15 | if __name__ == "__main__": 16 | tag = get_current_tag() 17 | if tag != VERSION: 18 | print(f"Tag does not match version: {tag!r} != {VERSION!r}") 19 | sys.exit(1) 20 | -------------------------------------------------------------------------------- /scripts/tag_release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/bash 2 | # 3 | # Script to tag the repository with the current version of the library 4 | set -e 5 | 6 | VERSION_LINE=$(grep '__version__ =' 'src/backports/zoneinfo/_version.py' -m 1) 7 | VERSION=$(echo "$VERSION_LINE" | sed 's/__version__ = "\([^"]\+\)"/\1/') 8 | echo "Found version: $VERSION" 9 | 10 | git tag -s -m "Version $VERSION" $VERSION || exit "Failed to tag!" 11 | echo "Success" 12 | -------------------------------------------------------------------------------- /scripts/update_test_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Script to automatically generate a JSON file containing time zone information. 3 | 4 | This is done to allow "pinning" a small subset of the tzdata in the tests, 5 | since we are testing properties of a file that may be subject to change. For 6 | example, the behavior in the far future of any given zone is likely to change, 7 | but "does this give the right answer for this file in 2040" is still an 8 | important property to test. 9 | 10 | This must be run from a computer with zoneinfo data installed. 11 | """ 12 | from __future__ import annotations 13 | 14 | import base64 15 | import functools 16 | import json 17 | import lzma 18 | import pathlib 19 | import textwrap 20 | import typing 21 | 22 | try: 23 | import backports.zoneinfo as zoneinfo 24 | except ImportError: 25 | # Mypy can't handle this try/except definition in multiple blocks, 26 | # so we need to tell it to ignore this line for now. See: 27 | # https://github.com/python/mypy/issues/1153 28 | import zoneinfo # type: ignore 29 | 30 | 31 | KEYS = [ 32 | "Africa/Abidjan", 33 | "Africa/Casablanca", 34 | "America/Los_Angeles", 35 | "America/Santiago", 36 | "Asia/Tokyo", 37 | "Australia/Sydney", 38 | "Europe/Dublin", 39 | "Europe/Lisbon", 40 | "Europe/London", 41 | "Pacific/Kiritimati", 42 | "UTC", 43 | ] 44 | 45 | REPO_ROOT = pathlib.Path(__file__).parent.parent.absolute() 46 | TEST_DATA_LOC = REPO_ROOT / "tests" / "data" 47 | 48 | 49 | @functools.lru_cache(maxsize=None) 50 | def get_zoneinfo_path() -> pathlib.Path: 51 | """Get the first zoneinfo directory on TZPATH containing the "UTC" zone.""" 52 | key = "UTC" 53 | for path in map(pathlib.Path, zoneinfo.TZPATH): 54 | if (path / key).exists(): 55 | return path 56 | else: 57 | raise OSError("Cannot find time zone data.") 58 | 59 | 60 | def get_zoneinfo_metadata() -> typing.Dict[str, str]: 61 | path = get_zoneinfo_path() 62 | 63 | tzdata_zi = path / "tzdata.zi" 64 | if not tzdata_zi.exists(): 65 | # tzdata.zi is necessary to get the version information 66 | raise OSError("Time zone data does not include tzdata.zi.") 67 | 68 | with open(tzdata_zi, "r") as f: 69 | version_line = next(f) 70 | 71 | _, version = version_line.strip().rsplit(" ", 1) 72 | 73 | if ( 74 | not version[0:4].isdigit() 75 | or len(version) < 5 76 | or not version[4:].isalpha() 77 | ): 78 | raise ValueError( 79 | "Version string should be YYYYx, " 80 | + "where YYYY is the year and x is a letter; " 81 | + f"found: {version}" 82 | ) 83 | 84 | return {"version": version} 85 | 86 | 87 | def get_zoneinfo(key: str) -> bytes: 88 | path = get_zoneinfo_path() 89 | 90 | with open(path / key, "rb") as f: 91 | return f.read() 92 | 93 | 94 | def encode_compressed(data: bytes) -> typing.List[str]: 95 | compressed_zone = lzma.compress(data) 96 | raw = base64.b85encode(compressed_zone) 97 | 98 | raw_data_str = raw.decode("utf-8") 99 | 100 | data_str = textwrap.wrap(raw_data_str, width=70) 101 | return data_str 102 | 103 | 104 | def load_compressed_keys() -> typing.Dict[str, typing.List[str]]: 105 | output = {key: encode_compressed(get_zoneinfo(key)) for key in KEYS} 106 | 107 | return output 108 | 109 | 110 | def update_test_data(fname: str = "zoneinfo_data.json") -> None: 111 | TEST_DATA_LOC.mkdir(exist_ok=True, parents=True) 112 | 113 | # Annotation required: https://github.com/python/mypy/issues/8772 114 | json_kwargs: typing.Dict[str, typing.Any] = dict( 115 | indent=2, 116 | sort_keys=True, 117 | ) 118 | 119 | compressed_keys = load_compressed_keys() 120 | metadata = get_zoneinfo_metadata() 121 | output = { 122 | "metadata": metadata, 123 | "data": compressed_keys, 124 | } 125 | 126 | with open(TEST_DATA_LOC / fname, "w") as f: 127 | json.dump(output, f, **json_kwargs) 128 | 129 | 130 | if __name__ == "__main__": 131 | update_test_data() 132 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = backports.zoneinfo 3 | version = attr:backports.zoneinfo._version.__version__ 4 | description = Backport of the standard library zoneinfo module 5 | long_description = file: README.md 6 | long_description_content_type = text/markdown 7 | url = https://github.com/pganssle/zoneinfo 8 | author = Python Software Foundation 9 | author_email = datetime-sig@python.org 10 | license = Apache-2.0 11 | license_file = LICENSE 12 | license_files = 13 | LICENSE 14 | licenses/LICENSE_APACHE 15 | classifiers = 16 | Development Status :: 4 - Beta 17 | Intended Audience :: Developers 18 | License :: OSI Approved :: Apache Software License 19 | Programming Language :: Python :: 3 20 | Programming Language :: Python :: 3 :: Only 21 | Programming Language :: Python :: 3.6 22 | Programming Language :: Python :: 3.7 23 | Programming Language :: Python :: 3.8 24 | project_urls = 25 | Source = https://github.com/pganssle/zoneinfo 26 | Documentation = https://zoneinfo.readthedocs.io/en/latest/ 27 | Bug Reports = https://github.com/pganssle/zoneinfo/issues 28 | 29 | [options] 30 | packages = find: 31 | install_requires = 32 | importlib_resources;python_version<"3.7" 33 | python_requires = >=3.6 34 | include_package_data = True 35 | package_dir = 36 | =src 37 | 38 | [options.extras_require] 39 | tzdata = 40 | tzdata 41 | 42 | [options.packages.find] 43 | where = src 44 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import platform 3 | import sys 4 | 5 | import setuptools 6 | from setuptools import Extension 7 | 8 | if platform.python_implementation() != "PyPy": 9 | # We need to pass the -std=c99 to gcc and/or clang, but we shouldn't pass 10 | # it to MSVC. There doesn't seem to be a simple way of setting 11 | # compiler-specific compile arguments, but for practical purposes 12 | # conditionally adding this argument on non-Windows platforms should be 13 | # enough. If an edge case is found that prevents compilation on some 14 | # systems, the end user should be able to set CFLAGS="-std=c99". 15 | if not sys.platform.startswith("win"): 16 | extra_compile_args = ["-std=c99"] 17 | else: 18 | extra_compile_args = [] 19 | 20 | c_extension = Extension( 21 | "backports.zoneinfo._czoneinfo", 22 | sources=["lib/zoneinfo_module.c"], 23 | extra_compile_args=extra_compile_args, 24 | ) 25 | 26 | setuptools.setup(ext_modules=[c_extension]) 27 | else: 28 | setuptools.setup() 29 | 30 | 31 | if "GCNO_TARGET_DIR" in os.environ: 32 | import glob 33 | 34 | gcno_files = glob.glob("**/*.gcno", recursive=True) 35 | 36 | if gcno_files: 37 | import shutil 38 | 39 | target_dir = os.environ["GCNO_TARGET_DIR"] 40 | os.makedirs(target_dir, exist_ok=True) 41 | for gcno_file in gcno_files: 42 | src = gcno_file 43 | src_dir, filename = os.path.split(gcno_file) 44 | new_target_dir = target_dir 45 | 46 | # When using gcc-9, the files are created in some flat location 47 | # with a naming convention where /path/to/file.gcda would be 48 | # represented as ${BASEDIR}/#path#to#file.gcda. In gcc-7, the input 49 | # directory is mirrored in the output directory, so the filename 50 | # would be ${BASEDIR}/path/to/file.gcda. The gcno files need to 51 | # have the same name and relative location as the gcda files, 52 | # apparently. 53 | if not filename.startswith("#"): 54 | rel_src_dir = os.path.relpath(src_dir) 55 | new_target_dir = os.path.join(target_dir, rel_src_dir) 56 | os.makedirs(new_target_dir, exist_ok=True) 57 | 58 | dst = os.path.join(new_target_dir, filename) 59 | shutil.copy(src, dst) 60 | -------------------------------------------------------------------------------- /src/backports/__init__.py: -------------------------------------------------------------------------------- 1 | # A Python "namespace package" http://www.python.org/dev/peps/pep-0382/ 2 | # This always goes inside of a namespace package's __init__.py 3 | from pkgutil import extend_path 4 | 5 | __path__ = extend_path(__path__, __name__) # type: ignore 6 | -------------------------------------------------------------------------------- /src/backports/zoneinfo/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | "ZoneInfo", 3 | "reset_tzpath", 4 | "available_timezones", 5 | "TZPATH", 6 | "ZoneInfoNotFoundError", 7 | "InvalidTZPathWarning", 8 | ] 9 | import sys 10 | 11 | from . import _tzpath 12 | from ._common import ZoneInfoNotFoundError 13 | from ._version import __version__ 14 | 15 | try: 16 | from ._czoneinfo import ZoneInfo 17 | except ImportError: # pragma: nocover 18 | from ._zoneinfo import ZoneInfo 19 | 20 | reset_tzpath = _tzpath.reset_tzpath 21 | available_timezones = _tzpath.available_timezones 22 | InvalidTZPathWarning = _tzpath.InvalidTZPathWarning 23 | 24 | if sys.version_info < (3, 7): 25 | # Module-level __getattr__ was added in Python 3.7, so instead of lazily 26 | # populating TZPATH on every access, we will register a callback with 27 | # reset_tzpath to update the top-level tuple. 28 | TZPATH = _tzpath.TZPATH 29 | 30 | def _tzpath_callback(new_tzpath): 31 | global TZPATH 32 | TZPATH = new_tzpath 33 | 34 | _tzpath.TZPATH_CALLBACKS.append(_tzpath_callback) 35 | del _tzpath_callback 36 | 37 | else: 38 | 39 | def __getattr__(name): 40 | if name == "TZPATH": 41 | return _tzpath.TZPATH 42 | else: 43 | raise AttributeError( 44 | f"module {__name__!r} has no attribute {name!r}" 45 | ) 46 | 47 | 48 | def __dir__(): 49 | return sorted(list(globals()) + ["TZPATH"]) 50 | -------------------------------------------------------------------------------- /src/backports/zoneinfo/__init__.pyi: -------------------------------------------------------------------------------- 1 | import os 2 | import typing 3 | from datetime import datetime, tzinfo 4 | from typing import ( 5 | Any, 6 | Iterable, 7 | Optional, 8 | Protocol, 9 | Sequence, 10 | Set, 11 | Type, 12 | Union, 13 | ) 14 | 15 | _T = typing.TypeVar("_T", bound="ZoneInfo") 16 | 17 | class _IOBytes(Protocol): 18 | def read(self, __size: int) -> bytes: ... 19 | def seek(self, __size: int, __whence: int = ...) -> Any: ... 20 | 21 | class ZoneInfo(tzinfo): 22 | @property 23 | def key(self) -> str: ... 24 | def __init__(self, key: str) -> None: ... 25 | @classmethod 26 | def no_cache(cls: Type[_T], key: str) -> _T: ... 27 | @classmethod 28 | def from_file( 29 | cls: Type[_T], __fobj: _IOBytes, key: Optional[str] = ... 30 | ) -> _T: ... 31 | @classmethod 32 | def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ... 33 | 34 | # Note: Both here and in clear_cache, the types allow the use of `str` where 35 | # a sequence of strings is required. This should be remedied if a solution 36 | # to this typing bug is found: https://github.com/python/typing/issues/256 37 | def reset_tzpath( 38 | to: Optional[Sequence[Union[os.PathLike, str]]] = ... 39 | ) -> None: ... 40 | def available_timezones() -> Set[str]: ... 41 | 42 | TZPATH: Sequence[str] 43 | 44 | class ZoneInfoNotFoundError(KeyError): ... 45 | class InvalidTZPathWarning(RuntimeWarning): ... 46 | -------------------------------------------------------------------------------- /src/backports/zoneinfo/_common.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | 4 | def load_tzdata(key): 5 | try: 6 | import importlib.resources as importlib_resources 7 | except ImportError: 8 | import importlib_resources 9 | 10 | components = key.split("/") 11 | package_name = ".".join(["tzdata.zoneinfo"] + components[:-1]) 12 | resource_name = components[-1] 13 | 14 | try: 15 | return importlib_resources.open_binary(package_name, resource_name) 16 | except (ImportError, FileNotFoundError, UnicodeEncodeError): 17 | # There are three types of exception that can be raised that all amount 18 | # to "we cannot find this key": 19 | # 20 | # ImportError: If package_name doesn't exist (e.g. if tzdata is not 21 | # installed, or if there's an error in the folder name like 22 | # Amrica/New_York) 23 | # FileNotFoundError: If resource_name doesn't exist in the package 24 | # (e.g. Europe/Krasnoy) 25 | # UnicodeEncodeError: If package_name or resource_name are not UTF-8, 26 | # such as keys containing a surrogate character. 27 | raise ZoneInfoNotFoundError(f"No time zone found with key {key}") 28 | 29 | 30 | def load_data(fobj): 31 | header = _TZifHeader.from_file(fobj) 32 | 33 | if header.version == 1: 34 | time_size = 4 35 | time_type = "l" 36 | else: 37 | # Version 2+ has 64-bit integer transition times 38 | time_size = 8 39 | time_type = "q" 40 | 41 | # Version 2+ also starts with a Version 1 header and data, which 42 | # we need to skip now 43 | skip_bytes = ( 44 | header.timecnt * 5 # Transition times and types 45 | + header.typecnt * 6 # Local time type records 46 | + header.charcnt # Time zone designations 47 | + header.leapcnt * 8 # Leap second records 48 | + header.isstdcnt # Standard/wall indicators 49 | + header.isutcnt # UT/local indicators 50 | ) 51 | 52 | fobj.seek(skip_bytes, 1) 53 | 54 | # Now we need to read the second header, which is not the same 55 | # as the first 56 | header = _TZifHeader.from_file(fobj) 57 | 58 | typecnt = header.typecnt 59 | timecnt = header.timecnt 60 | charcnt = header.charcnt 61 | 62 | # The data portion starts with timecnt transitions and indices 63 | if timecnt: 64 | trans_list_utc = struct.unpack( 65 | f">{timecnt}{time_type}", fobj.read(timecnt * time_size) 66 | ) 67 | trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt)) 68 | else: 69 | trans_list_utc = () 70 | trans_idx = () 71 | 72 | # Read the ttinfo struct, (utoff, isdst, abbrind) 73 | if typecnt: 74 | utcoff, isdst, abbrind = zip( 75 | *(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt)) 76 | ) 77 | else: 78 | utcoff = () 79 | isdst = () 80 | abbrind = () 81 | 82 | # Now read the abbreviations. They are null-terminated strings, indexed 83 | # not by position in the array but by position in the unsplit 84 | # abbreviation string. I suppose this makes more sense in C, which uses 85 | # null to terminate the strings, but it's inconvenient here... 86 | abbr_vals = {} 87 | abbr_chars = fobj.read(charcnt) 88 | 89 | def get_abbr(idx): 90 | # Gets a string starting at idx and running until the next \x00 91 | # 92 | # We cannot pre-populate abbr_vals by splitting on \x00 because there 93 | # are some zones that use subsets of longer abbreviations, like so: 94 | # 95 | # LMT\x00AHST\x00HDT\x00 96 | # 97 | # Where the idx to abbr mapping should be: 98 | # 99 | # {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"} 100 | if idx not in abbr_vals: 101 | span_end = abbr_chars.find(b"\x00", idx) 102 | abbr_vals[idx] = abbr_chars[idx:span_end].decode() 103 | 104 | return abbr_vals[idx] 105 | 106 | abbr = tuple(get_abbr(idx) for idx in abbrind) 107 | 108 | # The remainder of the file consists of leap seconds (currently unused) and 109 | # the standard/wall and ut/local indicators, which are metadata we don't need. 110 | # In version 2 files, we need to skip the unnecessary data to get at the TZ string: 111 | if header.version >= 2: 112 | # Each leap second record has size (time_size + 4) 113 | skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12 114 | fobj.seek(skip_bytes, 1) 115 | 116 | c = fobj.read(1) # Should be \n 117 | assert c == b"\n", c 118 | 119 | tz_bytes = b"" 120 | while True: 121 | c = fobj.read(1) 122 | if c == b"\n": 123 | break 124 | tz_bytes += c 125 | 126 | tz_str = tz_bytes 127 | else: 128 | tz_str = None 129 | 130 | return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str 131 | 132 | 133 | class _TZifHeader: 134 | __slots__ = [ 135 | "version", 136 | "isutcnt", 137 | "isstdcnt", 138 | "leapcnt", 139 | "timecnt", 140 | "typecnt", 141 | "charcnt", 142 | ] 143 | 144 | def __init__(self, *args): 145 | assert len(self.__slots__) == len(args) 146 | for attr, val in zip(self.__slots__, args): 147 | setattr(self, attr, val) 148 | 149 | @classmethod 150 | def from_file(cls, stream): 151 | # The header starts with a 4-byte "magic" value 152 | if stream.read(4) != b"TZif": 153 | raise ValueError("Invalid TZif file: magic not found") 154 | 155 | _version = stream.read(1) 156 | if _version == b"\x00": 157 | version = 1 158 | else: 159 | version = int(_version) 160 | stream.read(15) 161 | 162 | args = (version,) 163 | 164 | # Slots are defined in the order that the bytes are arranged 165 | args = args + struct.unpack(">6l", stream.read(24)) 166 | 167 | return cls(*args) 168 | 169 | 170 | class ZoneInfoNotFoundError(KeyError): 171 | """Exception raised when a ZoneInfo key is not found.""" 172 | -------------------------------------------------------------------------------- /src/backports/zoneinfo/_tzpath.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | PY36 = sys.version_info < (3, 7) 5 | 6 | 7 | def reset_tzpath(to=None): 8 | global TZPATH 9 | 10 | tzpaths = to 11 | if tzpaths is not None: 12 | if isinstance(tzpaths, (str, bytes)): 13 | raise TypeError( 14 | f"tzpaths must be a list or tuple, " 15 | + f"not {type(tzpaths)}: {tzpaths!r}" 16 | ) 17 | 18 | if not all(map(os.path.isabs, tzpaths)): 19 | raise ValueError(_get_invalid_paths_message(tzpaths)) 20 | base_tzpath = tzpaths 21 | else: 22 | env_var = os.environ.get("PYTHONTZPATH", None) 23 | if env_var is not None: 24 | base_tzpath = _parse_python_tzpath(env_var) 25 | elif sys.platform != "win32": 26 | base_tzpath = [ 27 | "/usr/share/zoneinfo", 28 | "/usr/lib/zoneinfo", 29 | "/usr/share/lib/zoneinfo", 30 | "/etc/zoneinfo", 31 | ] 32 | 33 | base_tzpath.sort(key=lambda x: not os.path.exists(x)) 34 | else: 35 | base_tzpath = () 36 | 37 | TZPATH = tuple(base_tzpath) 38 | 39 | if TZPATH_CALLBACKS: 40 | for callback in TZPATH_CALLBACKS: 41 | callback(TZPATH) 42 | 43 | 44 | def _parse_python_tzpath(env_var): 45 | if not env_var: 46 | return () 47 | 48 | raw_tzpath = env_var.split(os.pathsep) 49 | new_tzpath = tuple(filter(os.path.isabs, raw_tzpath)) 50 | 51 | # If anything has been filtered out, we will warn about it 52 | if len(new_tzpath) != len(raw_tzpath): 53 | import warnings 54 | 55 | msg = _get_invalid_paths_message(raw_tzpath) 56 | 57 | warnings.warn( 58 | "Invalid paths specified in PYTHONTZPATH environment variable." 59 | + msg, 60 | InvalidTZPathWarning, 61 | ) 62 | 63 | return new_tzpath 64 | 65 | 66 | def _get_invalid_paths_message(tzpaths): 67 | invalid_paths = (path for path in tzpaths if not os.path.isabs(path)) 68 | 69 | prefix = "\n " 70 | indented_str = prefix + prefix.join(invalid_paths) 71 | 72 | return ( 73 | "Paths should be absolute but found the following relative paths:" 74 | + indented_str 75 | ) 76 | 77 | 78 | if sys.version_info < (3, 8): 79 | 80 | def _isfile(path): 81 | # bpo-33721: In Python 3.8 non-UTF8 paths return False rather than 82 | # raising an error. See https://bugs.python.org/issue33721 83 | try: 84 | return os.path.isfile(path) 85 | except ValueError: 86 | return False 87 | 88 | else: 89 | _isfile = os.path.isfile 90 | 91 | 92 | def find_tzfile(key): 93 | """Retrieve the path to a TZif file from a key.""" 94 | _validate_tzfile_path(key) 95 | for search_path in TZPATH: 96 | filepath = os.path.join(search_path, key) 97 | if _isfile(filepath): 98 | return filepath 99 | 100 | return None 101 | 102 | 103 | _TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1] 104 | 105 | 106 | def _validate_tzfile_path(path, _base=_TEST_PATH): 107 | if os.path.isabs(path): 108 | raise ValueError( 109 | f"ZoneInfo keys may not be absolute paths, got: {path}" 110 | ) 111 | 112 | # We only care about the kinds of path normalizations that would change the 113 | # length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows, 114 | # normpath will also change from a/b to a\b, but that would still preserve 115 | # the length. 116 | new_path = os.path.normpath(path) 117 | if len(new_path) != len(path): 118 | raise ValueError( 119 | f"ZoneInfo keys must be normalized relative paths, got: {path}" 120 | ) 121 | 122 | resolved = os.path.normpath(os.path.join(_base, new_path)) 123 | if not resolved.startswith(_base): 124 | raise ValueError( 125 | f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}" 126 | ) 127 | 128 | 129 | del _TEST_PATH 130 | 131 | 132 | def available_timezones(): 133 | """Returns a set containing all available time zones. 134 | 135 | .. caution:: 136 | 137 | This may attempt to open a large number of files, since the best way to 138 | determine if a given file on the time zone search path is to open it 139 | and check for the "magic string" at the beginning. 140 | """ 141 | try: 142 | from importlib import resources 143 | except ImportError: 144 | import importlib_resources as resources 145 | 146 | valid_zones = set() 147 | 148 | # Start with loading from the tzdata package if it exists: this has a 149 | # pre-assembled list of zones that only requires opening one file. 150 | try: 151 | with resources.open_text("tzdata", "zones") as f: 152 | for zone in f: 153 | zone = zone.strip() 154 | if zone: 155 | valid_zones.add(zone) 156 | except (ImportError, FileNotFoundError): 157 | pass 158 | 159 | def valid_key(fpath): 160 | try: 161 | with open(fpath, "rb") as f: 162 | return f.read(4) == b"TZif" 163 | except Exception: # pragma: nocover 164 | return False 165 | 166 | for tz_root in TZPATH: 167 | if not os.path.exists(tz_root): 168 | continue 169 | 170 | for root, dirnames, files in os.walk(tz_root): 171 | if root == tz_root: 172 | # right/ and posix/ are special directories and shouldn't be 173 | # included in the output of available zones 174 | if "right" in dirnames: 175 | dirnames.remove("right") 176 | if "posix" in dirnames: 177 | dirnames.remove("posix") 178 | 179 | for file in files: 180 | fpath = os.path.join(root, file) 181 | 182 | key = os.path.relpath(fpath, start=tz_root) 183 | if os.sep != "/": # pragma: nocover 184 | key = key.replace(os.sep, "/") 185 | 186 | if not key or key in valid_zones: 187 | continue 188 | 189 | if valid_key(fpath): 190 | valid_zones.add(key) 191 | 192 | if "posixrules" in valid_zones: 193 | # posixrules is a special symlink-only time zone where it exists, it 194 | # should not be included in the output 195 | valid_zones.remove("posixrules") 196 | 197 | return valid_zones 198 | 199 | 200 | class InvalidTZPathWarning(RuntimeWarning): 201 | """Warning raised if an invalid path is specified in PYTHONTZPATH.""" 202 | 203 | 204 | TZPATH = () 205 | TZPATH_CALLBACKS = [] 206 | reset_tzpath() 207 | -------------------------------------------------------------------------------- /src/backports/zoneinfo/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.2.1" 2 | -------------------------------------------------------------------------------- /src/backports/zoneinfo/_zoneinfo.py: -------------------------------------------------------------------------------- 1 | import bisect 2 | import calendar 3 | import collections 4 | import functools 5 | import re 6 | import weakref 7 | from datetime import datetime, timedelta, tzinfo 8 | 9 | from . import _common, _tzpath 10 | 11 | EPOCH = datetime(1970, 1, 1) 12 | EPOCHORDINAL = datetime(1970, 1, 1).toordinal() 13 | 14 | 15 | # It is relatively expensive to construct new timedelta objects, and in most 16 | # cases we're looking at the same deltas, like integer numbers of hours, etc. 17 | # To improve speed and memory use, we'll keep a dictionary with references 18 | # to the ones we've already used so far. 19 | # 20 | # Loading every time zone in the 2020a version of the time zone database 21 | # requires 447 timedeltas, which requires approximately the amount of space 22 | # that ZoneInfo("America/New_York") with 236 transitions takes up, so we will 23 | # set the cache size to 512 so that in the common case we always get cache 24 | # hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts 25 | # of memory. 26 | @functools.lru_cache(maxsize=512) 27 | def _load_timedelta(seconds): 28 | return timedelta(seconds=seconds) 29 | 30 | 31 | class ZoneInfo(tzinfo): 32 | _strong_cache_size = 8 33 | _strong_cache = collections.OrderedDict() 34 | _weak_cache = weakref.WeakValueDictionary() 35 | __module__ = "backports.zoneinfo" 36 | 37 | def __init_subclass__(cls): 38 | cls._strong_cache = collections.OrderedDict() 39 | cls._weak_cache = weakref.WeakValueDictionary() 40 | 41 | def __new__(cls, key): 42 | instance = cls._weak_cache.get(key, None) 43 | if instance is None: 44 | instance = cls._weak_cache.setdefault(key, cls._new_instance(key)) 45 | instance._from_cache = True 46 | 47 | # Update the "strong" cache 48 | cls._strong_cache[key] = cls._strong_cache.pop(key, instance) 49 | 50 | if len(cls._strong_cache) > cls._strong_cache_size: 51 | cls._strong_cache.popitem(last=False) 52 | 53 | return instance 54 | 55 | @classmethod 56 | def no_cache(cls, key): 57 | obj = cls._new_instance(key) 58 | obj._from_cache = False 59 | 60 | return obj 61 | 62 | @classmethod 63 | def _new_instance(cls, key): 64 | obj = super().__new__(cls) 65 | obj._key = key 66 | obj._file_path = obj._find_tzfile(key) 67 | 68 | if obj._file_path is not None: 69 | file_obj = open(obj._file_path, "rb") 70 | else: 71 | file_obj = _common.load_tzdata(key) 72 | 73 | with file_obj as f: 74 | obj._load_file(f) 75 | 76 | return obj 77 | 78 | @classmethod 79 | def from_file(cls, fobj, key=None): 80 | obj = super().__new__(cls) 81 | obj._key = key 82 | obj._file_path = None 83 | obj._load_file(fobj) 84 | obj._file_repr = repr(fobj) 85 | 86 | # Disable pickling for objects created from files 87 | obj.__reduce__ = obj._file_reduce 88 | 89 | return obj 90 | 91 | @classmethod 92 | def clear_cache(cls, *, only_keys=None): 93 | if only_keys is not None: 94 | for key in only_keys: 95 | cls._weak_cache.pop(key, None) 96 | cls._strong_cache.pop(key, None) 97 | 98 | else: 99 | cls._weak_cache.clear() 100 | cls._strong_cache.clear() 101 | 102 | @property 103 | def key(self): 104 | return self._key 105 | 106 | def utcoffset(self, dt): 107 | return self._find_trans(dt).utcoff 108 | 109 | def dst(self, dt): 110 | return self._find_trans(dt).dstoff 111 | 112 | def tzname(self, dt): 113 | return self._find_trans(dt).tzname 114 | 115 | def fromutc(self, dt): 116 | """Convert from datetime in UTC to datetime in local time""" 117 | 118 | if not isinstance(dt, datetime): 119 | raise TypeError("fromutc() requires a datetime argument") 120 | if dt.tzinfo is not self: 121 | raise ValueError("dt.tzinfo is not self") 122 | 123 | timestamp = self._get_local_timestamp(dt) 124 | num_trans = len(self._trans_utc) 125 | 126 | if num_trans >= 1 and timestamp < self._trans_utc[0]: 127 | tti = self._tti_before 128 | fold = 0 129 | elif ( 130 | num_trans == 0 or timestamp > self._trans_utc[-1] 131 | ) and not isinstance(self._tz_after, _ttinfo): 132 | tti, fold = self._tz_after.get_trans_info_fromutc( 133 | timestamp, dt.year 134 | ) 135 | elif num_trans == 0: 136 | tti = self._tz_after 137 | fold = 0 138 | else: 139 | idx = bisect.bisect_right(self._trans_utc, timestamp) 140 | 141 | if num_trans > 1 and timestamp >= self._trans_utc[1]: 142 | tti_prev, tti = self._ttinfos[idx - 2 : idx] 143 | elif timestamp > self._trans_utc[-1]: 144 | tti_prev = self._ttinfos[-1] 145 | tti = self._tz_after 146 | else: 147 | tti_prev = self._tti_before 148 | tti = self._ttinfos[0] 149 | 150 | # Detect fold 151 | shift = tti_prev.utcoff - tti.utcoff 152 | fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1] 153 | dt += tti.utcoff 154 | if fold: 155 | return dt.replace(fold=1) 156 | else: 157 | return dt 158 | 159 | def _find_trans(self, dt): 160 | if dt is None: 161 | if self._fixed_offset: 162 | return self._tz_after 163 | else: 164 | return _NO_TTINFO 165 | 166 | ts = self._get_local_timestamp(dt) 167 | 168 | lt = self._trans_local[dt.fold] 169 | 170 | num_trans = len(lt) 171 | 172 | if num_trans and ts < lt[0]: 173 | return self._tti_before 174 | elif not num_trans or ts > lt[-1]: 175 | if isinstance(self._tz_after, _TZStr): 176 | return self._tz_after.get_trans_info(ts, dt.year, dt.fold) 177 | else: 178 | return self._tz_after 179 | else: 180 | # idx is the transition that occurs after this timestamp, so we 181 | # subtract off 1 to get the current ttinfo 182 | idx = bisect.bisect_right(lt, ts) - 1 183 | assert idx >= 0 184 | return self._ttinfos[idx] 185 | 186 | def _get_local_timestamp(self, dt): 187 | return ( 188 | (dt.toordinal() - EPOCHORDINAL) * 86400 189 | + dt.hour * 3600 190 | + dt.minute * 60 191 | + dt.second 192 | ) 193 | 194 | def __str__(self): 195 | if self._key is not None: 196 | return f"{self._key}" 197 | else: 198 | return repr(self) 199 | 200 | def __repr__(self): 201 | if self._key is not None: 202 | return f"{self.__class__.__name__}(key={self._key!r})" 203 | else: 204 | return f"{self.__class__.__name__}.from_file({self._file_repr})" 205 | 206 | def __reduce__(self): 207 | return (self.__class__._unpickle, (self._key, self._from_cache)) 208 | 209 | def _file_reduce(self): 210 | import pickle 211 | 212 | raise pickle.PicklingError( 213 | "Cannot pickle a ZoneInfo file created from a file stream." 214 | ) 215 | 216 | @classmethod 217 | def _unpickle(cls, key, from_cache): 218 | if from_cache: 219 | return cls(key) 220 | else: 221 | return cls.no_cache(key) 222 | 223 | def _find_tzfile(self, key): 224 | return _tzpath.find_tzfile(key) 225 | 226 | def _load_file(self, fobj): 227 | # Retrieve all the data as it exists in the zoneinfo file 228 | trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data( 229 | fobj 230 | ) 231 | 232 | # Infer the DST offsets (needed for .dst()) from the data 233 | dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst) 234 | 235 | # Convert all the transition times (UTC) into "seconds since 1970-01-01 local time" 236 | trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff) 237 | 238 | # Construct `_ttinfo` objects for each transition in the file 239 | _ttinfo_list = [ 240 | _ttinfo( 241 | _load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname 242 | ) 243 | for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr) 244 | ] 245 | 246 | self._trans_utc = trans_utc 247 | self._trans_local = trans_local 248 | self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx] 249 | 250 | # Find the first non-DST transition 251 | for i in range(len(isdst)): 252 | if not isdst[i]: 253 | self._tti_before = _ttinfo_list[i] 254 | break 255 | else: 256 | if self._ttinfos: 257 | self._tti_before = self._ttinfos[0] 258 | else: 259 | self._tti_before = None 260 | 261 | # Set the "fallback" time zone 262 | if tz_str is not None and tz_str != b"": 263 | self._tz_after = _parse_tz_str(tz_str.decode()) 264 | else: 265 | if not self._ttinfos and not _ttinfo_list: 266 | raise ValueError("No time zone information found.") 267 | 268 | if self._ttinfos: 269 | self._tz_after = self._ttinfos[-1] 270 | else: 271 | self._tz_after = _ttinfo_list[-1] 272 | 273 | # Determine if this is a "fixed offset" zone, meaning that the output 274 | # of the utcoffset, dst and tzname functions does not depend on the 275 | # specific datetime passed. 276 | # 277 | # We make three simplifying assumptions here: 278 | # 279 | # 1. If _tz_after is not a _ttinfo, it has transitions that might 280 | # actually occur (it is possible to construct TZ strings that 281 | # specify STD and DST but no transitions ever occur, such as 282 | # AAA0BBB,0/0,J365/25). 283 | # 2. If _ttinfo_list contains more than one _ttinfo object, the objects 284 | # represent different offsets. 285 | # 3. _ttinfo_list contains no unused _ttinfos (in which case an 286 | # otherwise fixed-offset zone with extra _ttinfos defined may 287 | # appear to *not* be a fixed offset zone). 288 | # 289 | # Violations to these assumptions would be fairly exotic, and exotic 290 | # zones should almost certainly not be used with datetime.time (the 291 | # only thing that would be affected by this). 292 | if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo): 293 | self._fixed_offset = False 294 | elif not _ttinfo_list: 295 | self._fixed_offset = True 296 | else: 297 | self._fixed_offset = _ttinfo_list[0] == self._tz_after 298 | 299 | @staticmethod 300 | def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts): 301 | # Now we must transform our ttis and abbrs into `_ttinfo` objects, 302 | # but there is an issue: .dst() must return a timedelta with the 303 | # difference between utcoffset() and the "standard" offset, but 304 | # the "base offset" and "DST offset" are not encoded in the file; 305 | # we can infer what they are from the isdst flag, but it is not 306 | # sufficient to to just look at the last standard offset, because 307 | # occasionally countries will shift both DST offset and base offset. 308 | 309 | typecnt = len(isdsts) 310 | dstoffs = [0] * typecnt # Provisionally assign all to 0. 311 | dst_cnt = sum(isdsts) 312 | dst_found = 0 313 | 314 | for i in range(1, len(trans_idx)): 315 | if dst_cnt == dst_found: 316 | break 317 | 318 | idx = trans_idx[i] 319 | 320 | dst = isdsts[idx] 321 | 322 | # We're only going to look at daylight saving time 323 | if not dst: 324 | continue 325 | 326 | # Skip any offsets that have already been assigned 327 | if dstoffs[idx] != 0: 328 | continue 329 | 330 | dstoff = 0 331 | utcoff = utcoffsets[idx] 332 | 333 | comp_idx = trans_idx[i - 1] 334 | 335 | if not isdsts[comp_idx]: 336 | dstoff = utcoff - utcoffsets[comp_idx] 337 | 338 | if not dstoff and idx < (typecnt - 1): 339 | comp_idx = trans_idx[i + 1] 340 | 341 | # If the following transition is also DST and we couldn't 342 | # find the DST offset by this point, we're going ot have to 343 | # skip it and hope this transition gets assigned later 344 | if isdsts[comp_idx]: 345 | continue 346 | 347 | dstoff = utcoff - utcoffsets[comp_idx] 348 | 349 | if dstoff: 350 | dst_found += 1 351 | dstoffs[idx] = dstoff 352 | else: 353 | # If we didn't find a valid value for a given index, we'll end up 354 | # with dstoff = 0 for something where `isdst=1`. This is obviously 355 | # wrong - one hour will be a much better guess than 0 356 | for idx in range(typecnt): 357 | if not dstoffs[idx] and isdsts[idx]: 358 | dstoffs[idx] = 3600 359 | 360 | return dstoffs 361 | 362 | @staticmethod 363 | def _ts_to_local(trans_idx, trans_list_utc, utcoffsets): 364 | """Generate number of seconds since 1970 *in the local time*. 365 | 366 | This is necessary to easily find the transition times in local time""" 367 | if not trans_list_utc: 368 | return [[], []] 369 | 370 | # Start with the timestamps and modify in-place 371 | trans_list_wall = [list(trans_list_utc), list(trans_list_utc)] 372 | 373 | if len(utcoffsets) > 1: 374 | offset_0 = utcoffsets[0] 375 | offset_1 = utcoffsets[trans_idx[0]] 376 | if offset_1 > offset_0: 377 | offset_1, offset_0 = offset_0, offset_1 378 | else: 379 | offset_0 = offset_1 = utcoffsets[0] 380 | 381 | trans_list_wall[0][0] += offset_0 382 | trans_list_wall[1][0] += offset_1 383 | 384 | for i in range(1, len(trans_idx)): 385 | offset_0 = utcoffsets[trans_idx[i - 1]] 386 | offset_1 = utcoffsets[trans_idx[i]] 387 | 388 | if offset_1 > offset_0: 389 | offset_1, offset_0 = offset_0, offset_1 390 | 391 | trans_list_wall[0][i] += offset_0 392 | trans_list_wall[1][i] += offset_1 393 | 394 | return trans_list_wall 395 | 396 | 397 | class _ttinfo: 398 | __slots__ = ["utcoff", "dstoff", "tzname"] 399 | 400 | def __init__(self, utcoff, dstoff, tzname): 401 | self.utcoff = utcoff 402 | self.dstoff = dstoff 403 | self.tzname = tzname 404 | 405 | def __eq__(self, other): 406 | return ( 407 | self.utcoff == other.utcoff 408 | and self.dstoff == other.dstoff 409 | and self.tzname == other.tzname 410 | ) 411 | 412 | def __repr__(self): # pragma: nocover 413 | return ( 414 | f"{self.__class__.__name__}" 415 | + f"({self.utcoff}, {self.dstoff}, {self.tzname})" 416 | ) 417 | 418 | 419 | _NO_TTINFO = _ttinfo(None, None, None) 420 | 421 | 422 | class _TZStr: 423 | __slots__ = ( 424 | "std", 425 | "dst", 426 | "start", 427 | "end", 428 | "get_trans_info", 429 | "get_trans_info_fromutc", 430 | "dst_diff", 431 | ) 432 | 433 | def __init__( 434 | self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None 435 | ): 436 | self.dst_diff = dst_offset - std_offset 437 | std_offset = _load_timedelta(std_offset) 438 | self.std = _ttinfo( 439 | utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr 440 | ) 441 | 442 | self.start = start 443 | self.end = end 444 | 445 | dst_offset = _load_timedelta(dst_offset) 446 | delta = _load_timedelta(self.dst_diff) 447 | self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr) 448 | 449 | # These are assertions because the constructor should only be called 450 | # by functions that would fail before passing start or end 451 | assert start is not None, "No transition start specified" 452 | assert end is not None, "No transition end specified" 453 | 454 | self.get_trans_info = self._get_trans_info 455 | self.get_trans_info_fromutc = self._get_trans_info_fromutc 456 | 457 | def transitions(self, year): 458 | start = self.start.year_to_epoch(year) 459 | end = self.end.year_to_epoch(year) 460 | return start, end 461 | 462 | def _get_trans_info(self, ts, year, fold): 463 | """Get the information about the current transition - tti""" 464 | start, end = self.transitions(year) 465 | 466 | # With fold = 0, the period (denominated in local time) with the 467 | # smaller offset starts at the end of the gap and ends at the end of 468 | # the fold; with fold = 1, it runs from the start of the gap to the 469 | # beginning of the fold. 470 | # 471 | # So in order to determine the DST boundaries we need to know both 472 | # the fold and whether DST is positive or negative (rare), and it 473 | # turns out that this boils down to fold XOR is_positive. 474 | if fold == (self.dst_diff >= 0): 475 | end -= self.dst_diff 476 | else: 477 | start += self.dst_diff 478 | 479 | if start < end: 480 | isdst = start <= ts < end 481 | else: 482 | isdst = not (end <= ts < start) 483 | 484 | return self.dst if isdst else self.std 485 | 486 | def _get_trans_info_fromutc(self, ts, year): 487 | start, end = self.transitions(year) 488 | start -= self.std.utcoff.total_seconds() 489 | end -= self.dst.utcoff.total_seconds() 490 | 491 | if start < end: 492 | isdst = start <= ts < end 493 | else: 494 | isdst = not (end <= ts < start) 495 | 496 | # For positive DST, the ambiguous period is one dst_diff after the end 497 | # of DST; for negative DST, the ambiguous period is one dst_diff before 498 | # the start of DST. 499 | if self.dst_diff > 0: 500 | ambig_start = end 501 | ambig_end = end + self.dst_diff 502 | else: 503 | ambig_start = start 504 | ambig_end = start - self.dst_diff 505 | 506 | fold = ambig_start <= ts < ambig_end 507 | 508 | return (self.dst if isdst else self.std, fold) 509 | 510 | 511 | def _post_epoch_days_before_year(year): 512 | """Get the number of days between 1970-01-01 and YEAR-01-01""" 513 | y = year - 1 514 | return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL 515 | 516 | 517 | class _DayOffset: 518 | __slots__ = ["d", "julian", "hour", "minute", "second"] 519 | 520 | def __init__(self, d, julian, hour=2, minute=0, second=0): 521 | if not (0 + julian) <= d <= 365: 522 | min_day = 0 + julian 523 | raise ValueError(f"d must be in [{min_day}, 365], not: {d}") 524 | 525 | self.d = d 526 | self.julian = julian 527 | self.hour = hour 528 | self.minute = minute 529 | self.second = second 530 | 531 | def year_to_epoch(self, year): 532 | days_before_year = _post_epoch_days_before_year(year) 533 | 534 | d = self.d 535 | if self.julian and d >= 59 and calendar.isleap(year): 536 | d += 1 537 | 538 | epoch = (days_before_year + d) * 86400 539 | epoch += self.hour * 3600 + self.minute * 60 + self.second 540 | 541 | return epoch 542 | 543 | 544 | class _CalendarOffset: 545 | __slots__ = ["m", "w", "d", "hour", "minute", "second"] 546 | 547 | _DAYS_BEFORE_MONTH = ( 548 | -1, 549 | 0, 550 | 31, 551 | 59, 552 | 90, 553 | 120, 554 | 151, 555 | 181, 556 | 212, 557 | 243, 558 | 273, 559 | 304, 560 | 334, 561 | ) 562 | 563 | def __init__(self, m, w, d, hour=2, minute=0, second=0): 564 | if not 0 < m <= 12: 565 | raise ValueError("m must be in (0, 12]") 566 | 567 | if not 0 < w <= 5: 568 | raise ValueError("w must be in (0, 5]") 569 | 570 | if not 0 <= d <= 6: 571 | raise ValueError("d must be in [0, 6]") 572 | 573 | self.m = m 574 | self.w = w 575 | self.d = d 576 | self.hour = hour 577 | self.minute = minute 578 | self.second = second 579 | 580 | @classmethod 581 | def _ymd2ord(cls, year, month, day): 582 | return ( 583 | _post_epoch_days_before_year(year) 584 | + cls._DAYS_BEFORE_MONTH[month] 585 | + (month > 2 and calendar.isleap(year)) 586 | + day 587 | ) 588 | 589 | # TODO: These are not actually epoch dates as they are expressed in local time 590 | def year_to_epoch(self, year): 591 | """Calculates the datetime of the occurrence from the year""" 592 | # We know year and month, we need to convert w, d into day of month 593 | # 594 | # Week 1 is the first week in which day `d` (where 0 = Sunday) appears. 595 | # Week 5 represents the last occurrence of day `d`, so we need to know 596 | # the range of the month. 597 | first_day, days_in_month = calendar.monthrange(year, self.m) 598 | 599 | # This equation seems magical, so I'll break it down: 600 | # 1. calendar says 0 = Monday, POSIX says 0 = Sunday 601 | # so we need first_day + 1 to get 1 = Monday -> 7 = Sunday, 602 | # which is still equivalent because this math is mod 7 603 | # 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need 604 | # to do anything to adjust negative numbers. 605 | # 3. Add 1 because month days are a 1-based index. 606 | month_day = (self.d - (first_day + 1)) % 7 + 1 607 | 608 | # Now use a 0-based index version of `w` to calculate the w-th 609 | # occurrence of `d` 610 | month_day += (self.w - 1) * 7 611 | 612 | # month_day will only be > days_in_month if w was 5, and `w` means 613 | # "last occurrence of `d`", so now we just check if we over-shot the 614 | # end of the month and if so knock off 1 week. 615 | if month_day > days_in_month: 616 | month_day -= 7 617 | 618 | ordinal = self._ymd2ord(year, self.m, month_day) 619 | epoch = ordinal * 86400 620 | epoch += self.hour * 3600 + self.minute * 60 + self.second 621 | return epoch 622 | 623 | 624 | def _parse_tz_str(tz_str): 625 | # The tz string has the format: 626 | # 627 | # std[offset[dst[offset],start[/time],end[/time]]] 628 | # 629 | # std and dst must be 3 or more characters long and must not contain 630 | # a leading colon, embedded digits, commas, nor a plus or minus signs; 631 | # The spaces between "std" and "offset" are only for display and are 632 | # not actually present in the string. 633 | # 634 | # The format of the offset is ``[+|-]hh[:mm[:ss]]`` 635 | 636 | offset_str, *start_end_str = tz_str.split(",", 1) 637 | 638 | # fmt: off 639 | parser_re = re.compile( 640 | r"(?P[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" + 641 | r"((?P[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" + 642 | r"((?P[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" + 643 | r"((?P[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" + 644 | r")?" + # dst 645 | r")?$" # stdoff 646 | ) 647 | # fmt: on 648 | 649 | m = parser_re.match(offset_str) 650 | 651 | if m is None: 652 | raise ValueError(f"{tz_str} is not a valid TZ string") 653 | 654 | std_abbr = m.group("std") 655 | dst_abbr = m.group("dst") 656 | dst_offset = None 657 | 658 | std_abbr = std_abbr.strip("<>") 659 | 660 | if dst_abbr: 661 | dst_abbr = dst_abbr.strip("<>") 662 | 663 | std_offset = m.group("stdoff") 664 | if std_offset: 665 | try: 666 | std_offset = _parse_tz_delta(std_offset) 667 | except ValueError as e: 668 | raise ValueError(f"Invalid STD offset in {tz_str}") from e 669 | else: 670 | std_offset = 0 671 | 672 | if dst_abbr is not None: 673 | dst_offset = m.group("dstoff") 674 | if dst_offset: 675 | try: 676 | dst_offset = _parse_tz_delta(dst_offset) 677 | except ValueError as e: 678 | raise ValueError(f"Invalid DST offset in {tz_str}") from e 679 | else: 680 | dst_offset = std_offset + 3600 681 | 682 | if not start_end_str: 683 | raise ValueError(f"Missing transition rules: {tz_str}") 684 | 685 | start_end_strs = start_end_str[0].split(",", 1) 686 | try: 687 | start, end = (_parse_dst_start_end(x) for x in start_end_strs) 688 | except ValueError as e: 689 | raise ValueError(f"Invalid TZ string: {tz_str}") from e 690 | 691 | return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end) 692 | elif start_end_str: 693 | raise ValueError(f"Transition rule present without DST: {tz_str}") 694 | else: 695 | # This is a static ttinfo, don't return _TZStr 696 | return _ttinfo( 697 | _load_timedelta(std_offset), _load_timedelta(0), std_abbr 698 | ) 699 | 700 | 701 | def _parse_dst_start_end(dststr): 702 | date, *time = dststr.split("/") 703 | if date[0] == "M": 704 | n_is_julian = False 705 | m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date) 706 | if m is None: 707 | raise ValueError(f"Invalid dst start/end date: {dststr}") 708 | date_offset = tuple(map(int, m.groups())) 709 | offset = _CalendarOffset(*date_offset) 710 | else: 711 | if date[0] == "J": 712 | n_is_julian = True 713 | date = date[1:] 714 | else: 715 | n_is_julian = False 716 | 717 | doy = int(date) 718 | offset = _DayOffset(doy, n_is_julian) 719 | 720 | if time: 721 | time_components = list(map(int, time[0].split(":"))) 722 | n_components = len(time_components) 723 | if n_components < 3: 724 | time_components.extend([0] * (3 - n_components)) 725 | offset.hour, offset.minute, offset.second = time_components 726 | 727 | return offset 728 | 729 | 730 | def _parse_tz_delta(tz_delta): 731 | match = re.match( 732 | r"(?P[+-])?(?P\d{1,2})(:(?P\d{2})(:(?P\d{2}))?)?", 733 | tz_delta, 734 | ) 735 | # Anything passed to this function should already have hit an equivalent 736 | # regular expression to find the section to parse. 737 | assert match is not None, tz_delta 738 | 739 | h, m, s = ( 740 | int(v) if v is not None else 0 741 | for v in map(match.group, ("h", "m", "s")) 742 | ) 743 | 744 | total = h * 3600 + m * 60 + s 745 | 746 | if not -86400 < total < 86400: 747 | raise ValueError( 748 | "Offset must be strictly between -24h and +24h:" + tz_delta 749 | ) 750 | 751 | # Yes, +5 maps to an offset of -5h 752 | if match.group("sign") != "-": 753 | total *= -1 754 | 755 | return total 756 | -------------------------------------------------------------------------------- /src/backports/zoneinfo/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pganssle/zoneinfo/666d80c27bda69541130758bbc7f1c9e035f79a0/src/backports/zoneinfo/py.typed -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pganssle/zoneinfo/666d80c27bda69541130758bbc7f1c9e035f79a0/tests/__init__.py -------------------------------------------------------------------------------- /tests/_support.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import functools 3 | import platform 4 | import sys 5 | import threading 6 | import unittest 7 | from test.support import import_fresh_module 8 | 9 | OS_ENV_LOCK = threading.Lock() 10 | TZPATH_LOCK = threading.Lock() 11 | TZPATH_TEST_LOCK = threading.Lock() 12 | 13 | IS_PYPY = platform.python_implementation() == "PyPy" 14 | 15 | 16 | def call_once(f): 17 | """Decorator that ensures a function is only ever called once.""" 18 | lock = threading.Lock() 19 | cached = functools.lru_cache(None)(f) 20 | 21 | @functools.wraps(f) 22 | def inner(): 23 | with lock: 24 | return cached() 25 | 26 | return inner 27 | 28 | 29 | @call_once 30 | def get_modules(): 31 | """Retrieve two copies of zoneinfo: pure Python and C accelerated. 32 | 33 | Because this function manipulates the import system in a way that might 34 | be fragile or do unexpected things if it is run many times, it uses a 35 | `call_once` decorator to ensure that this is only ever called exactly 36 | one time — in other words, when using this function you will only ever 37 | get one copy of each module rather than a fresh import each time. 38 | """ 39 | # PyPy doesn't have a C extension, so for the moment we'll just give it 40 | # two copies of the normal module 41 | if IS_PYPY: 42 | from backports import zoneinfo 43 | 44 | return zoneinfo, zoneinfo 45 | 46 | # The standard import_fresh_module approach seems to be somewhat buggy 47 | # when it comes to C imports, so in the short term, we will do a little 48 | # module surgery to test this. 49 | py_module, c_module = ( 50 | import_fresh_module("backports.zoneinfo") for _ in range(2) 51 | ) 52 | 53 | from backports.zoneinfo import _czoneinfo as c_zoneinfo 54 | from backports.zoneinfo import _zoneinfo as py_zoneinfo 55 | 56 | py_module.ZoneInfo = py_zoneinfo.ZoneInfo 57 | c_module.ZoneInfo = c_zoneinfo.ZoneInfo 58 | return py_module, c_module 59 | 60 | 61 | @contextlib.contextmanager 62 | def set_zoneinfo_module(module): 63 | """Make sure sys.modules["zoneinfo"] refers to `module`. 64 | 65 | This is necessary because `pickle` will refuse to serialize 66 | an type calling itself `zoneinfo.ZoneInfo` unless `zoneinfo.ZoneInfo` 67 | refers to the same object. 68 | """ 69 | 70 | NOT_PRESENT = object() 71 | old_zoneinfo = sys.modules.get("backports.zoneinfo", NOT_PRESENT) 72 | sys.modules["backports.zoneinfo"] = module 73 | yield 74 | if old_zoneinfo is not NOT_PRESENT: 75 | sys.modules["backports.zoneinfo"] = old_zoneinfo 76 | 77 | 78 | class ZoneInfoTestBase(unittest.TestCase): 79 | @classmethod 80 | def setUpClass(cls): 81 | cls.klass = cls.module.ZoneInfo 82 | super().setUpClass() 83 | 84 | @contextlib.contextmanager 85 | def tzpath_context(self, tzpath, block_tzdata=True, lock=TZPATH_LOCK): 86 | def pop_tzdata_modules(): 87 | tzdata_modules = {} 88 | for modname in list(sys.modules): 89 | if modname.split(".", 1)[0] != "tzdata": # pragma: nocover 90 | continue 91 | 92 | tzdata_modules[modname] = sys.modules.pop(modname) 93 | 94 | return tzdata_modules 95 | 96 | with lock: 97 | if block_tzdata: 98 | # In order to fully exclude tzdata from the path, we need to 99 | # clear the sys.modules cache of all its contents — setting the 100 | # root package to None is not enough to block direct access of 101 | # already-imported submodules (though it will prevent new 102 | # imports of submodules). 103 | tzdata_modules = pop_tzdata_modules() 104 | sys.modules["tzdata"] = None 105 | 106 | old_path = self.module.TZPATH 107 | try: 108 | self.module.reset_tzpath(tzpath) 109 | yield 110 | finally: 111 | if block_tzdata: 112 | sys.modules.pop("tzdata") 113 | for modname, module in tzdata_modules.items(): 114 | sys.modules[modname] = module 115 | 116 | self.module.reset_tzpath(old_path) 117 | -------------------------------------------------------------------------------- /tests/data/zoneinfo_data.json: -------------------------------------------------------------------------------- 1 | { 2 | "data": { 3 | "Africa/Abidjan": [ 4 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j-~f{VGF<>F7KxBg5R*{Ksocg8-", 5 | "YYVul=v7vZzaHNuC=da5UI2rH18c!OnjV{y4u(+A!!VBKmY&$ORw>7UO^(500B;v0RR91b", 6 | "Xh%WvBYQl0ssI200dcD" 7 | ], 8 | "Africa/Casablanca": [ 9 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0b&Kvt0lx7KxBg5R*{N&yjMUR~;C-fDaSOU;q-", 10 | "~FqW+4{YBjbcw}`a!dW>b)R2-", 11 | "0a+uwf`P3{_Y@HuCz}S$J$ZJ>R_V<~|Fk>sgX4=%0vUrh-lt@YP^Wrus;j?`Th#xRPzf<<", 12 | "~Hp4DH^gZX>d{+WOp~HNu8!{uWu}&XphAd{j1;rB4|9?R!pqruAFUMt8#*WcrVS{;kLlY(", 13 | "cJRV$w?d2car%Rs>q9BgTU4Ht-tQKZ7Z`9QqOb?R#b%z?rk", 14 | ">!CkH7jy3wja4NG2q)H}fNRKg8v{);Em;K3Cncf4C6&OajV+DbX%o4+)CV3+e!Lm6dutu(", 15 | "8G?7*LU}l{M47qtt*0#|bTq9ZFCzMCvD{QVT{VK!ER=XmV_-dsW$Sr7HoMnC}a3`9{?Bsz", 16 | "dA)i?**$VeBAFh4`$Rj~t1H{9yh~yPz5?A38P0%#H;!wF0&|3CJYv)`j1jiOHy??e_8g`L", 17 | "C*&&Tz=pFZ$FJ6QSeK}6U3!zrp2@Ufcf_oYm(1z=m6vCiHLgj_N_YtU^UDD=sm1Y%;?-", 18 | "ap-", 19 | "I1$)Tiy;}L7J`%3=1%QdY;x9D#1gv+aIQi%0}}tp^~Rzync~HIS#@ZvQg@+Pz?;uj3fi7J", 20 | "TFscBQZ%VTr?35(PN%7dgvCSGa17=VTVuecvv)iGzoda&{n!Nd>XRmuu{<*JIavM`cd>%$", 21 | "$zfMq;#fDZS4nw+HG61br6Ks;-", 22 | "*q?*|9Yi+bd2o3>$AcJH2@qU&;R1zFlxPd?`BFObWMKCH~mkR!|zibD})8N8Sf8Di=5A79", 23 | "-^;J=O=rzbCGMl^yG2M?RJ4hBh4C`oFo(!W)j3yKiSz%3Ka^Xg`=q`yHC}qVXW`*BkZIO^", 24 | "tQumRjV$MY|W>Nar*b#9*5-", 27 | "YWpyqUv_ipx)G_Yz*XheqU%n%<~u6alsiT60<=y0xjKJSUs10Ffxmww={b8XX~?ryRK%%r", 28 | "lvM4AHYSMupWmp+x3&!laKyMcL!JwCSiq+!!I>B!SWa}rg~PbhxR7h#0Ti&W3GNY5&!@Iq", 29 | "pvg9FTIyA00GYj{Sp8G$Q@<+vBYQl0ssI200dcD" 30 | ], 31 | "America/Los_Angeles": [ 32 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0q%JPh9{i7KxBg5R*;#4G>H)lhRYu$%jt%!jv+I", 33 | "JxhE=%W1?wYb!37Rb?(rgwFIAQI{L#8r*zy!$TMtER_1(vn(Zix^{AVB1(jwr$iL6h0Z!2", 34 | "8Gb~UW@0~e512{Z%8}Qzdnjl~wJ1{c2>`Z@1A~t&lyL{p{eM{5)QGf7Mo5FW9==mlyXJt2", 35 | "UwpntR7H0eSq!(aYq#aqUz&RM*tvuMI)AsM?K3-dV3-", 36 | "TT{t)!Iy#JTo=tXkzAM9~j2YbiOls3(H8Dc>Y|D1aqL51vjLbpYG;GvGTQB4bXuJ%mA;(B", 37 | "4eUpu$$@zv2vVcq-", 38 | "Y)VKbzp^teiuzy}R{LuvDjpuVb`79O+CBmg{Wx!", 39 | "bvx$eu4zRE&PehMb=&G<9$>iZ|bFE)0=4I?KLFGBC0I(0_svgw0%FiMsT%koo*!nEYc6GY", 40 | "@QnU}&4Isg;l=|khi(!VaiSE2=Ny`&&tpi~~;{$uMiozmmC;M)^ZwTGGtj|3E=aVrD5Bqa^;RV?wWs=hsohk2ql1XcqaOW|", 45 | "E4urOCR0a_;Id)cJ-GU5w3iJ10Aox!FH|8SHoiD_{JMEEOV0O?MDsDaI;Y>v=$rgN_q~t`", 46 | ";bL0p0cr+3oR<~L*q!A1N=YxYRoqaQo%ua#07hqE;U_X@B)L3Jd;2oN%}66Gz+0zO^<`*Jt_si5v;Qg-", 49 | "3q})ae)#%Dl>20mc9;6%A4(VObs;0JpQAGD7ADLJ?Rqiv#V5ZljDomsklGi_^8gtLkmRF>EstlBz)cI_>MG*?Z%S", 51 | "T;72Q8^Ku7t$MI_A1{5SEE>mII;1T8kfJf_FgrXNwx!AelhM`I@+>J+gg!uCyW&Vjnd0r~", 52 | "xZpKPsHdS4@$h%w6Eq6>tdfnmQLWvk`|e2>DM-E4Qm9Nc-", 53 | "(V_Gh@)x^Nj3H9p`fLm1IA_eaBVtRg`l1J@5pcFi$-", 54 | "31XWMxJ!;}xqC^I};&9=Az*&VH*lfN0M|Gx-9!ypV67!-KSwt=VI-*jej4FC~WiMTU+-", 55 | "L<2+d_ew|{n~!VJuGlSJ0d_=`@?_jC3h)r_?|J_;aJ|`U^>F6N?3pcY069b8yy#(^>pP8L", 56 | "*@Vg?IIMosq$u-00HX=q!s`G%_Rg?vBYQl0ssI200dcD" 57 | ], 58 | "America/Santiago": [ 59 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0e+L++6@F7KxBg84(fsEAUJ$9Yj;p>Mq3>*@-", 60 | "8>razI(EFyOtYzTyCY`3kVX~k`hSJt^$bT@V;iGRbT`JI3V)lBnui#lA$wIsfE8sL=RFVi", 61 | "Wi5>;Y=+h5(#B8hW33CVCYO92MtEmXQtd@", 68 | "E+3y)$%o~XozOC|QgFl*-mp9+(`s0w4O4a$v!xt`G&ZI1uE4c`JE-t5WW%1*91J-", 69 | "tr^_>U@@l^R_i@%gJ)q{kYX|w-", 70 | "m{DbN=Z+Zey_(<=ZlU4e**~fkShDtX&9_W>qj>RuJZ-ko&-nY}Eq_?SFX8N<9wT-TH*xbF0XT;L+siI1gPQrnKc|85", 72 | "UZaq6@7JAVeu%)Aa6I>6D;F*N)|f6jhs01q_UU%gc_Ek#>SbTrf6i;OsAQ>8&urTeX|Ier", 73 | "uSV{o||MjbXV?)AHzh1+v~odHV<_+-", 74 | "M0v=QR6~y%&K00{%grZ=+5)JNlFg!9Q}2rv{$OC*D{_yiUd$SL!S)f@K#pe<#CBb&}BH0p", 75 | "%w(SzGJjGX`q+ybM!e4GEP>b&S_4lnbBR!*5>ZIxYP?QK2DIny}eL_od7Ll|WdneS(sbh~", 76 | "!}|r_VlzIOB|lsf+rf*B~wqYWzu*", 78 | "k-", 79 | "19B1stj0gO@Kd^97k>cVnNvEEaMwZ6=ObszLNnyDGMW0?9>PhVq$r$BCx^ZdJ#fO^fC1JG", 80 | "oW>FS5oC!pr7=y}O@ecL6T+R$E20#GvlwVfswl?qnCEe|@!hI*Ooyk8~vfN}h@~NGp9$00", 81 | "H<1(-Qyy=a@C3vBYQl0ssI200dcD" 82 | ], 83 | "Asia/Tokyo": [ 84 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j-", 85 | "~luMgIxeB7KxBg5R*;y?l4Rl4neXH3cv!OtfK@hKZzauI)S!FSDREPhhBS6Fb$&Vv#7%;?", 86 | "Te|>pF^0HBr&z_Tk<%vMW_QqjevRZOp8XVFgP<8TkT#`9H&0Ua;gT1#rZLV0HqbAKK;_z@", 87 | "nO;6t0L+w;XP~5#XzS!mmq>y(@${PQsHdkBYiJixTuCQ2N?55>HhxKTEufgeVl}_", 93 | "Sq_FUIXFF$kI6!-", 94 | "eKVzudw1Zyg8;a%MynCz&_3Cl@%Zs*_JRo#I#Ss92V)!~{AwVung!|wK?4JFyFv#=nfhNI", 95 | "6Czr|3VjINV7V6#eFr=Gl1UDdL9MQRWZ`RiZET+fl#=kxZ_-", 97 | "G@=#VCQEvCZVdj9;e?rY8ynpo&34DPa3", 98 | "KI_Sc_0#?r^44F}Rzb_ChM7PPrq3@FkqqjWqgvOzPq+vkb;DT&2YMC|tC3lu6Ihz2prrFP", 99 | "vOU@6I3$XU@68kFFWx8fx?jt@frXW`;AZso*plI73>*wENVoc+2cm`jgoe6(B`w`{BdxVOWD1rqVTCN+Y4kRvHLYXlr)vU4G7S$p~Ia_n|G>=3N5dDB8cS6IGd-ckfc7", 104 | "8#^71HtN_BO2#F%&%Ll+IhR1DlEUG@+#_Yl8AKa1US?{&+yOwA=WA=!z-", 105 | "B9t2>Tn9Wu2*8Ydrbage(I4dkD_vcP(|c-", 106 | "LhGRkSn{W*nk?RJQN!j1RSZ}L6e<&UGMisk>nLH5JiLNgOSC7c&Mn1+Pit*Gz;s=M?_q!v", 107 | "DZ@dJctlzl2>W(rvs{(CERM)RT0xuqDAl{)$*ztEhe=?|;3u&!6O1c88W}SFHDBBHh%rrh", 108 | "U6=f6kjYt!N19xk*s_9IpbeS;(cPoTPX!vr00FE9ju8L=r}(KdvBYQl0ssI200dcD" 109 | ], 110 | "Europe/Dublin": [ 111 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0>b$_gw%g7KxBg5R*;&J77#T_U2R5sleVWFDmK~", 112 | "Kzj5oh@`QKHvW^6V{jU-", 113 | "w>qg1tSt0c^vh;?qAqA0%t?;#S~6U8Qiv&f1s9IH#g$m1k1a#3+lylw4mwT4QnEUUQdwg+", 114 | "xnEcBlgu31bAVabn41OMZVLGz6NDwG%XuQar!b>GI{qSahE`AG}$kRWbuI~JCt;38)Xwbb", 115 | "~Qggs55t+MAHIxgDxzTJ;2xXx99+qCy445kC#v_l8fx|G&jlVvaciR<-", 116 | "wwf22l%4(t@S6tnX39#_K(4S0fu$FUs$isud9IKzCXB78NkARYq@9Dc0TGkhz);NtM_SSzEffNl{2^*CKGdp52h!52A)6q9fUSltXF{T", 124 | "*Ehc9Q7u8!W7pE(Fv$D$cKUAt6wY=DA1mGgxC*VXq_If3G#FY6-", 125 | "Voj`fIKk`0}Cc72_SD{v>468LV{pyBI33^p0E?}RwDA6Pkq--C~0jF&Z@Pv!dx_1SN_)jw", 126 | "z@P$(oK%P!Tk9?fRjK88yxhxlcFtTjjZ$DYssSsa#ufYrR+}}nKS+r384o~!Uw$nwTbF~q", 127 | "gRsgr0N#d@KIinx%hQB(SJyjJtDtIy(%mDm}ZBGN}dV6K~om|=UVGkbciQ=^$_", 128 | "14|gT21!YQ)@y*Rd0i_lS6gtPBE9+ah%WIJPwzUTjIr+J1XckkmA!6WE16%CVAl{Dj@)nt", 129 | "QW>5WCgG;uYjCg(j>qCs(*}h9IWV8w>LXsNq+C>vLWc", 130 | ">tZt7asixk*XDd^+o!O=O52B7aPqt&HfgmRCd4-+ky?P-", 131 | "XP?5Np6dxH29G;c3@torNWCs!ufT_DK<~j-", 132 | "UgqAdf5G4ZdP`bECDkw8*?qvrtnMfoJT^Wq|8_~4;n!EAt=*dv)-", 133 | "0yF8uKXAiZ~3d|7Z>ZO|t~YD2@;`6Qg{xlf8Q&;*xB-Ys_R2Cp{y((HBw7kN@@CtfA<<$B", 134 | "fGc2BhK{XBpaQiJGWmO!j9XN7n0YhjD8vjw0{?{0zZm5js?lNS*8*{rU&d|`Rl_i(p2nHf}VB&&P+m6k^", 137 | "y|9Wu}iApF2OdyoJC2VV0^9?>qd00ENq*;#~kL7KxBg5R*;*X|PN+bCLBKjepiN+t(&A", 141 | "khpF;7sO(r>1T)-$|__aBOdk_AHZjxHc6G@vMd>R0(2H&-", 142 | "lZStc_Mr1wJ{6w{DgJ07=uC+mjqe(^3LyjJl_5x7WnW!i)jW$*QxT7tFAUv{c;}UD", 144 | "0H`Dud)3CsV-", 145 | "B~WRZ4>5w)TOsq4qlzR0HC1?s(1MKs{Eogyi`;Y+znIqt&FWj{&tFy3%5Pps74N;*?>NDZ", 146 | "1k?qO?%Nh0<}nG&KfuzSe#dL89UrA4EnAX`-", 147 | "~qYbG>pM_u;k{K&mpC(>+L$#Lq7F=y060qa*{c!+o{}x9kVcEF8>KV?OV^14E315e", 148 | "&Z)vCbva(U*zrw8fa^*@04V!@Y@Tn~!bdv=_$LG2ZvKndFDcVJn{9=CY#%ki==CjzwbM;(", 150 | "MI%9)YfId|<7iL?msY(}yKy}A|wSQJ0{_&g_x6mCzWVKO(267&>g)Z8p@5W33?3MlNXAlv", 151 | "VpbJ3!}!`V^eT&vv$R#F-", 152 | "CBNtc7$fwmz+Ns6+a)pLiPZn6p%oT^DUw(PBEy>n+uqD2hAEumi`6oOL{5VgdB1Q`BRc&0", 153 | "8gNzq^95eYtWVM<&-z1;^I=(}1TzHnb+HN8pGK8M-", 155 | "hv)`R>C!Nj&*oBO8o*UJ)0VM+aR34raZbR*}39#J(N%Kktu^!EGp+TABM>g)CXGh4gq}v{ChM9Ce1euCqfg8aBi(ARet*G7wd-Vn?ZqL^NkNFF2G", 158 | "lEw_25P%r43AdRZ|obE7s>=5Vz)ZF}e#LMkCK$jm7${M$kfDZ4Yxhyk=>a>d7iCWWWyC)itD;qbYfi4%>JYC>5Nia6^n2", 160 | "~58^nPqZRsha00{xM=RQhFYcZQ9Y9uhOHyah{Hhj-VN@C*?Dwi", 161 | ";C0Q3x2B5Rp#I4?H#MRpJ", 164 | "iZNR1k7d?C=rtZX~@_o+Zdwm?LXBC~-kWZsv|6Z{oHzl-", 165 | ";#ZZ=>$djY*B0KQ=4Ejm*XH$7PJZZH+R{uCm5kYjw@y#@5`z{+;rf{~u6e9=S9U36;2)J#", 166 | "Qv>*afkHFNFsOfu^`s+YLAZM0E-", 167 | "SI%+MqRwGa>(&6S^C#$IWf`c8`+CK^p`G|=!wXJ`Fi|qC^+%FR}|R>LaARaL_b0;QvKprS", 168 | "h>;6yMC%rGfe@ev&5|F^v6|rv4x0i00000QhlUMSRJNX00I69sT%+Q+tkg$vBYQl0ssI20", 169 | "0dcD" 170 | ], 171 | "Europe/London": [ 172 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j;0{j(Ib8rM7KxBg5R*;%EAUJ=!kQs3DZt(=0_!m1", 173 | "4wvE`6N%Vj#u6PS_3S?~(2)&xn8}2}3Wr#kG8n2!x8>$E$lF&~Y#_H6bu6(BiwblJ>;-", 174 | "FsgA$Y$*?=X)n1pFkKn}F~`>=4)+LLQk?L*P!bhAm0;`N~z3QbUIyVrm%kOZ(n1JJsm0py", 175 | "b8!GV{d*C!9KXv;4vD4Q>-", 177 | "k#+x(!V5L@w5M>v2V5a`B>t(|B|Fqr4^-", 179 | "{S*%Ep~ojUtx_CRbSQ(uFwu2=KH)Q@EBs@ZqRXn4mU;B!68;;IQs3Ub=n&UU%*mk&zwD36", 180 | "&JSwsN(%k&x?H+tN^6)23c`I0=5^N_R0~1>tsFZ`^`3z~rXSXT&qcwa#n#k^5sFT7HKtyC", 181 | "9A0oluNf^P4yk-d_|mUd;T0;~y@2Cu@Xd;IRT#++Y*Z6*0%M4`z`3a2vnHc-", 183 | "B(#xp*94c8t_y=lV~#Q=Whn&p<+Qzs`pL<^`L`BrF9ohSDa^SnosMt{zpFXpUjqumClb$D", 184 | "^}s!&5JM`)L?H!Gzj6q@G0mVG?P#xu;u1DMoWN$14T", 185 | "b>^F1?&ymdJ4TS`!aAO1#Myz0fU6<#?jJoc&X-", 186 | "z&$D#Zj!Dg3w**{JNc(J;D0DrI1K1-", 188 | "iv3~i7vFT?5yqP3rSI@jsz*k!0bE*~Pkn+g>u6Z;Se-", 189 | ")c$&;tI=8t;#10nr-)VFCx@^bf}Ak2a$g+MN&Pv7{W+wBYjvrBy)wwx-", 190 | "|SlQynn=O3ULjr$jM*QQAezq5|DqO63Zf+jql$4rQ2m|lPqGyA<>cV6bIA", 192 | "o~*{`bgCh=A>WU}#b$t&5MPj$~|esT>8v-", 195 | "C$_n!^iqI5;Q?X=YtG#wS0iS@Z+K+>Pw@;#vN~2NVaNcNiR~iPI$HEp0CZblbg!9944qJf", 196 | "e^U%kyd74NW@4bR+8mK^RfSa}|2+8EsMDCnOSZ@D)kcJx`Cx~hD", 200 | "N7PV9xa~1y#{E$Hy#KX4zE=~k$7%Vm?00000LN&_Vg>>9w00Gqs&>R2&ZK8hovBYQl0ssI", 201 | "200dcD" 202 | ], 203 | "Pacific/Kiritimati": [ 204 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j-~itMU0nbw7KxBg5R*{Ky3@w34e-", 205 | "emlEe%!1&HoVz@2)g91$Qvf%e=}%J+k5^yTOu{Q*OrV;`agAU{YYC1>Bq#Lo)Ct}OFqAaP", 206 | "SYvN>0#bjna^XrQl~da<70IbZ?+00000VC(mK^UzXX00DX60RR91ZYU6cvBYQl0ssI200d", 207 | "cD" 208 | ], 209 | "UTC": [ 210 | "{Wp48S^xk9=GL@E0stWa761SMbT8$j-", 211 | "~e#|9bEt_7KxBg5R*|3h1|xhHLji!C57qW6L*|HpEErm00000ygu;I+>V)?00B92fhY-", 212 | "(AGY&-0RR9100dcD" 213 | ] 214 | }, 215 | "metadata": { 216 | "version": "2022g" 217 | } 218 | } 219 | -------------------------------------------------------------------------------- /tests/test_zoneinfo_property.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import datetime 3 | import os 4 | import pickle 5 | import unittest 6 | 7 | import hypothesis 8 | import pytest 9 | 10 | from backports import zoneinfo 11 | 12 | from . import _support as test_support 13 | from ._support import ZoneInfoTestBase 14 | 15 | try: 16 | from importlib import resources 17 | except ImportError: 18 | import importlib_resources as resources 19 | 20 | 21 | py_zoneinfo, c_zoneinfo = test_support.get_modules() 22 | 23 | UTC = datetime.timezone.utc 24 | MIN_UTC = datetime.datetime.min.replace(tzinfo=UTC) 25 | MAX_UTC = datetime.datetime.max.replace(tzinfo=UTC) 26 | ZERO = datetime.timedelta(0) 27 | 28 | 29 | def _valid_keys(): 30 | """Get available time zones, including posix/ and right/ directories.""" 31 | available_zones = sorted(zoneinfo.available_timezones()) 32 | TZPATH = zoneinfo.TZPATH 33 | 34 | def valid_key(key): 35 | for root in TZPATH: 36 | key_file = os.path.join(root, key) 37 | if os.path.exists(key_file): 38 | return True 39 | 40 | components = key.split("/") 41 | package_name = ".".join(["tzdata.zoneinfo"] + components[:-1]) 42 | resource_name = components[-1] 43 | 44 | try: 45 | return resources.is_resource(package_name, resource_name) 46 | except ModuleNotFoundError: 47 | return False 48 | 49 | # This relies on the fact that dictionaries maintain insertion order — for 50 | # shrinking purposes, it is preferable to start with the standard version, 51 | # then move to the posix/ version, then to the right/ version. 52 | out_zones = {"": available_zones} 53 | for prefix in ["posix", "right"]: 54 | prefix_out = [] 55 | for key in available_zones: 56 | prefix_key = f"{prefix}/{key}" 57 | if valid_key(prefix_key): 58 | prefix_out.append(prefix_key) 59 | 60 | out_zones[prefix] = prefix_out 61 | 62 | output = [] 63 | for keys in out_zones.values(): 64 | output.extend(keys) 65 | 66 | return output 67 | 68 | 69 | VALID_KEYS = _valid_keys() 70 | if not VALID_KEYS: 71 | pytest.skip("No time zone data available", allow_module_level=True) 72 | 73 | 74 | def valid_keys(): 75 | return hypothesis.strategies.sampled_from(VALID_KEYS) 76 | 77 | 78 | class ZoneInfoTest(ZoneInfoTestBase): 79 | module = py_zoneinfo 80 | 81 | @hypothesis.given(key=valid_keys()) 82 | def test_str(self, key): 83 | zi = self.klass(key) 84 | self.assertEqual(str(zi), key) 85 | 86 | @hypothesis.given(key=valid_keys()) 87 | def test_key(self, key): 88 | zi = self.klass(key) 89 | 90 | self.assertEqual(zi.key, key) 91 | 92 | @hypothesis.given( 93 | dt=hypothesis.strategies.one_of( 94 | hypothesis.strategies.datetimes(), hypothesis.strategies.times() 95 | ) 96 | ) 97 | def test_utc(self, dt): 98 | zi = self.klass("UTC") 99 | dt_zi = dt.replace(tzinfo=zi) 100 | 101 | self.assertEqual(dt_zi.utcoffset(), ZERO) 102 | self.assertEqual(dt_zi.dst(), ZERO) 103 | self.assertEqual(dt_zi.tzname(), "UTC") 104 | 105 | 106 | class CZoneInfoTest(ZoneInfoTest): 107 | module = c_zoneinfo 108 | 109 | 110 | class ZoneInfoPickleTest(ZoneInfoTestBase): 111 | module = py_zoneinfo 112 | 113 | def setUp(self): 114 | with contextlib.ExitStack() as stack: 115 | stack.enter_context(test_support.set_zoneinfo_module(self.module)) 116 | self.addCleanup(stack.pop_all().close) 117 | 118 | super().setUp() 119 | 120 | @hypothesis.given(key=valid_keys()) 121 | def test_pickle_unpickle_cache(self, key): 122 | zi = self.klass(key) 123 | pkl_str = pickle.dumps(zi) 124 | zi_rt = pickle.loads(pkl_str) 125 | 126 | self.assertIs(zi, zi_rt) 127 | 128 | @hypothesis.given(key=valid_keys()) 129 | def test_pickle_unpickle_no_cache(self, key): 130 | zi = self.klass.no_cache(key) 131 | pkl_str = pickle.dumps(zi) 132 | zi_rt = pickle.loads(pkl_str) 133 | 134 | self.assertIsNot(zi, zi_rt) 135 | self.assertEqual(str(zi), str(zi_rt)) 136 | 137 | @hypothesis.given(key=valid_keys()) 138 | def test_pickle_unpickle_cache_multiple_rounds(self, key): 139 | """Test that pickle/unpickle is idempotent.""" 140 | zi_0 = self.klass(key) 141 | pkl_str_0 = pickle.dumps(zi_0) 142 | zi_1 = pickle.loads(pkl_str_0) 143 | pkl_str_1 = pickle.dumps(zi_1) 144 | zi_2 = pickle.loads(pkl_str_1) 145 | pkl_str_2 = pickle.dumps(zi_2) 146 | 147 | self.assertEqual(pkl_str_0, pkl_str_1) 148 | self.assertEqual(pkl_str_1, pkl_str_2) 149 | 150 | self.assertIs(zi_0, zi_1) 151 | self.assertIs(zi_0, zi_2) 152 | self.assertIs(zi_1, zi_2) 153 | 154 | @hypothesis.given(key=valid_keys()) 155 | def test_pickle_unpickle_no_cache_multiple_rounds(self, key): 156 | """Test that pickle/unpickle is idempotent.""" 157 | zi_cache = self.klass(key) 158 | 159 | zi_0 = self.klass.no_cache(key) 160 | pkl_str_0 = pickle.dumps(zi_0) 161 | zi_1 = pickle.loads(pkl_str_0) 162 | pkl_str_1 = pickle.dumps(zi_1) 163 | zi_2 = pickle.loads(pkl_str_1) 164 | pkl_str_2 = pickle.dumps(zi_2) 165 | 166 | self.assertEqual(pkl_str_0, pkl_str_1) 167 | self.assertEqual(pkl_str_1, pkl_str_2) 168 | 169 | self.assertIsNot(zi_0, zi_1) 170 | self.assertIsNot(zi_0, zi_2) 171 | self.assertIsNot(zi_1, zi_2) 172 | 173 | self.assertIsNot(zi_0, zi_cache) 174 | self.assertIsNot(zi_1, zi_cache) 175 | self.assertIsNot(zi_2, zi_cache) 176 | 177 | 178 | class CZoneInfoPickleTest(ZoneInfoPickleTest): 179 | module = c_zoneinfo 180 | 181 | 182 | class ZoneInfoCacheTest(ZoneInfoTestBase): 183 | module = py_zoneinfo 184 | 185 | @hypothesis.given(key=valid_keys()) 186 | def test_cache(self, key): 187 | zi_0 = self.klass(key) 188 | zi_1 = self.klass(key) 189 | 190 | self.assertIs(zi_0, zi_1) 191 | 192 | @hypothesis.given(key=valid_keys()) 193 | def test_no_cache(self, key): 194 | zi_0 = self.klass.no_cache(key) 195 | zi_1 = self.klass.no_cache(key) 196 | 197 | self.assertIsNot(zi_0, zi_1) 198 | 199 | 200 | class CZoneInfoCacheTest(ZoneInfoCacheTest): 201 | klass = c_zoneinfo.ZoneInfo 202 | 203 | 204 | class PythonCConsistencyTest(unittest.TestCase): 205 | """Tests that the C and Python versions do the same thing.""" 206 | 207 | def _is_ambiguous(self, dt): 208 | return dt.replace(fold=not dt.fold).utcoffset() == dt.utcoffset() 209 | 210 | @hypothesis.given(dt=hypothesis.strategies.datetimes(), key=valid_keys()) 211 | def test_same_str(self, dt, key): 212 | py_dt = dt.replace(tzinfo=py_zoneinfo.ZoneInfo(key)) 213 | c_dt = dt.replace(tzinfo=c_zoneinfo.ZoneInfo(key)) 214 | 215 | self.assertEqual(str(py_dt), str(c_dt)) 216 | 217 | @hypothesis.given(dt=hypothesis.strategies.datetimes(), key=valid_keys()) 218 | def test_same_offsets_and_names(self, dt, key): 219 | py_dt = dt.replace(tzinfo=py_zoneinfo.ZoneInfo(key)) 220 | c_dt = dt.replace(tzinfo=c_zoneinfo.ZoneInfo(key)) 221 | 222 | self.assertEqual(py_dt.tzname(), c_dt.tzname()) 223 | self.assertEqual(py_dt.utcoffset(), c_dt.utcoffset()) 224 | self.assertEqual(py_dt.dst(), c_dt.dst()) 225 | 226 | @hypothesis.given( 227 | dt=hypothesis.strategies.datetimes( 228 | timezones=hypothesis.strategies.just(UTC) 229 | ), 230 | key=valid_keys(), 231 | ) 232 | @hypothesis.example(dt=MIN_UTC, key="Asia/Tokyo") 233 | @hypothesis.example(dt=MAX_UTC, key="Asia/Tokyo") 234 | @hypothesis.example(dt=MIN_UTC, key="America/New_York") 235 | @hypothesis.example(dt=MAX_UTC, key="America/New_York") 236 | @hypothesis.example( 237 | dt=datetime.datetime(2006, 10, 29, 5, 15, tzinfo=UTC), 238 | key="America/New_York", 239 | ) 240 | def test_same_from_utc(self, dt, key): 241 | py_zi = py_zoneinfo.ZoneInfo(key) 242 | c_zi = c_zoneinfo.ZoneInfo(key) 243 | 244 | # Convert to UTC: This can overflow, but we just care about consistency 245 | py_overflow_exc = None 246 | c_overflow_exc = None 247 | try: 248 | py_dt = dt.astimezone(py_zi) 249 | except OverflowError as e: 250 | py_overflow_exc = e 251 | 252 | try: 253 | c_dt = dt.astimezone(c_zi) 254 | except OverflowError as e: 255 | c_overflow_exc = e 256 | 257 | if (py_overflow_exc is not None) != (c_overflow_exc is not None): 258 | raise py_overflow_exc or c_overflow_exc # pragma: nocover 259 | 260 | if py_overflow_exc is not None: 261 | return # Consistently raises the same exception 262 | 263 | # PEP 495 says that an inter-zone comparison between ambiguous 264 | # datetimes is always False. 265 | if py_dt != c_dt: 266 | self.assertEqual( 267 | self._is_ambiguous(py_dt), 268 | self._is_ambiguous(c_dt), 269 | (py_dt, c_dt), 270 | ) 271 | 272 | self.assertEqual(py_dt.tzname(), c_dt.tzname()) 273 | self.assertEqual(py_dt.utcoffset(), c_dt.utcoffset()) 274 | self.assertEqual(py_dt.dst(), c_dt.dst()) 275 | 276 | @hypothesis.given(dt=hypothesis.strategies.datetimes(), key=valid_keys()) 277 | @hypothesis.example(dt=datetime.datetime.max, key="America/New_York") 278 | @hypothesis.example(dt=datetime.datetime.min, key="America/New_York") 279 | @hypothesis.example(dt=datetime.datetime.min, key="Asia/Tokyo") 280 | @hypothesis.example(dt=datetime.datetime.max, key="Asia/Tokyo") 281 | def test_same_to_utc(self, dt, key): 282 | py_dt = dt.replace(tzinfo=py_zoneinfo.ZoneInfo(key)) 283 | c_dt = dt.replace(tzinfo=c_zoneinfo.ZoneInfo(key)) 284 | 285 | # Convert from UTC: Overflow OK if it happens in both implementations 286 | py_overflow_exc = None 287 | c_overflow_exc = None 288 | try: 289 | py_utc = py_dt.astimezone(UTC) 290 | except OverflowError as e: 291 | py_overflow_exc = e 292 | 293 | try: 294 | c_utc = c_dt.astimezone(UTC) 295 | except OverflowError as e: 296 | c_overflow_exc = e 297 | 298 | if (py_overflow_exc is not None) != (c_overflow_exc is not None): 299 | raise py_overflow_exc or c_overflow_exc # pragma: nocover 300 | 301 | if py_overflow_exc is not None: 302 | return # Consistently raises the same exception 303 | 304 | self.assertEqual(py_utc, c_utc) 305 | 306 | @hypothesis.given(key=valid_keys()) 307 | def test_cross_module_pickle(self, key): 308 | py_zi = py_zoneinfo.ZoneInfo(key) 309 | c_zi = c_zoneinfo.ZoneInfo(key) 310 | 311 | with test_support.set_zoneinfo_module(py_zoneinfo): 312 | py_pkl = pickle.dumps(py_zi) 313 | 314 | with test_support.set_zoneinfo_module(c_zoneinfo): 315 | c_pkl = pickle.dumps(c_zi) 316 | 317 | with test_support.set_zoneinfo_module(c_zoneinfo): 318 | # Python → C 319 | py_to_c_zi = pickle.loads(py_pkl) 320 | self.assertIs(py_to_c_zi, c_zi) 321 | 322 | with test_support.set_zoneinfo_module(py_zoneinfo): 323 | # C → Python 324 | c_to_py_zi = pickle.loads(c_pkl) 325 | self.assertIs(c_to_py_zi, py_zi) 326 | -------------------------------------------------------------------------------- /tests/typing_example.py: -------------------------------------------------------------------------------- 1 | """Exercises the type stubs for the zoneinfo module.""" 2 | from __future__ import annotations 3 | 4 | import io 5 | import typing 6 | from datetime import datetime, time, timedelta, timezone 7 | from pathlib import Path 8 | from typing import Callable, Optional, Sequence, Set, Tuple 9 | 10 | import backports.zoneinfo as zoneinfo 11 | 12 | REGISTERED_FUNCTIONS = [] 13 | 14 | 15 | def register(f: Callable[[], typing.Any]) -> Callable[[], typing.Any]: 16 | REGISTERED_FUNCTIONS.append(f) 17 | return f 18 | 19 | 20 | @register 21 | def test_constructor() -> zoneinfo.ZoneInfo: 22 | return zoneinfo.ZoneInfo("America/Los_Angeles") 23 | 24 | 25 | def test_no_cache() -> zoneinfo.ZoneInfo: 26 | return zoneinfo.ZoneInfo.no_cache("America/Los_Angeles") 27 | 28 | 29 | def test_from_file_accepts_bytes_io() -> zoneinfo.ZoneInfo: 30 | x = io.BytesIO(b"TZif") 31 | 32 | try: 33 | y = zoneinfo.ZoneInfo.from_file(x, key="BadZone") 34 | except ValueError: 35 | pass 36 | else: 37 | assert False 38 | 39 | return y 40 | 41 | 42 | def test_clear_cache() -> Sequence[None]: 43 | y = zoneinfo.ZoneInfo.clear_cache( 44 | only_keys=["America/Los_Angeles", "Europe/Lisbon"] 45 | ) 46 | assert y is None 47 | 48 | x = zoneinfo.ZoneInfo.clear_cache() 49 | assert x is None 50 | 51 | return (x, y) 52 | 53 | 54 | def test_reset_tzpath() -> None: 55 | zoneinfo.reset_tzpath(to=[Path("/path/to/blah")]) 56 | zoneinfo.reset_tzpath(to=["/path/to/blah"]) 57 | zoneinfo.reset_tzpath(to=[]) 58 | zoneinfo.reset_tzpath() 59 | 60 | 61 | def test_offset() -> ( 62 | Sequence[Tuple[Optional[str], Optional[timedelta], Optional[timedelta]]] 63 | ): 64 | LA: zoneinfo.ZoneInfo = zoneinfo.ZoneInfo("America/Los_Angeles") 65 | dt: datetime = datetime(2020, 1, 1, tzinfo=LA) 66 | 67 | offsets: typing.List[ 68 | Tuple[Optional[str], Optional[timedelta], Optional[timedelta]] 69 | ] = [] 70 | dt_offset = (dt.tzname(), dt.utcoffset(), dt.dst()) 71 | assert dt_offset == ("PST", timedelta(hours=-8), timedelta(hours=0)) 72 | offsets.append(dt_offset) 73 | 74 | t: time = time(0, tzinfo=LA) 75 | 76 | # TODO: Remove this cast when the bug in typeshed is fixed: 77 | # https://github.com/python/typeshed/pull/3964 78 | t_offset = ( 79 | t.tzname(), 80 | t.utcoffset(), 81 | typing.cast(Optional[timedelta], t.dst()), 82 | ) 83 | assert t_offset == (None, None, None) 84 | offsets.append(t_offset) 85 | 86 | return offsets 87 | 88 | 89 | def test_astimezone() -> Sequence[datetime]: 90 | LA: zoneinfo.ZoneInfo = zoneinfo.ZoneInfo("America/Los_Angeles") 91 | UTC: timezone = timezone.utc 92 | 93 | dt: datetime = datetime(2020, 1, 1, tzinfo=LA) 94 | dt_utc = dt.astimezone(UTC) 95 | dt_rt = dt_utc.astimezone(LA) 96 | 97 | assert dt == dt_rt 98 | assert dt == dt_utc 99 | 100 | return (dt, dt_rt, dt_utc) 101 | 102 | 103 | def test_available_timezones() -> Set[str]: 104 | valid_zones = zoneinfo.available_timezones() 105 | 106 | assert "America/Los_Angeles" in valid_zones 107 | 108 | return valid_zones 109 | 110 | 111 | def call_functions() -> None: 112 | for function in REGISTERED_FUNCTIONS: 113 | function() 114 | 115 | print("Success!") 116 | 117 | 118 | if __name__ == "__main__": 119 | call_functions() 120 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 3.18.0 3 | isolated_build = True 4 | skip_missing_interpreters = true 5 | 6 | [testenv] 7 | description = Run the tests 8 | deps = 9 | coverage[toml] 10 | dataclasses; python_version<"3.7" 11 | hypothesis>=5.7.0,<5.43 12 | importlib_metadata<5.0.0; python_version<"3.8" 13 | pytest 14 | pytest-cov 15 | pytest-randomly 16 | pytest-subtests 17 | pytest-xdist 18 | extras = 19 | {env:TEST_EXTRAS_TOX:} 20 | setenv = 21 | COVERAGE_FILE={toxworkdir}/.coverage/.coverage.{envname} 22 | CFLAGS={env:CFLAGS:-UNDEBUG} 23 | commands = 24 | pytest {toxinidir} {posargs: --cov=backports.zoneinfo --cov=tests} 25 | 26 | # This should probably be integrated into the main testenv as an option if such 27 | # a thing is possible, but because it's more important that it runs on CI and 28 | # it's not necessarily very cross-platform, I've split it out for now. 29 | [testenv:gcov] 30 | description = Run the tests and collect C coverage stats 31 | deps = 32 | gcovr 33 | dataclasses; python_version<"3.7" 34 | hypothesis>=5.7.0,<5.43 35 | importlib_metadata<5.0.0; python_version<"3.8" 36 | pytest 37 | pytest-subtests 38 | extras = 39 | tzdata 40 | passenv = 41 | CC 42 | setenv = 43 | CFLAGS=--coverage -fprofile-dir={envdir}/coverage 44 | LDFLAGS=--coverage 45 | GCNO_TARGET_DIR={envdir}/coverage 46 | commands = 47 | pytest {toxinidir} 48 | gcovr -r {toxinidir} -k 49 | python -c "import os; os.makedirs('{toxworkdir}/.coverage', exist_ok=True)" 50 | gcovr -r {toxinidir} --xml \ 51 | -o {toxworkdir}/.coverage/.gcov_coverage.{envname}.xml 52 | 53 | [testenv:coverage-report] 54 | skip_install = true 55 | deps = coverage[toml]>=5.0.2 56 | depends = py38 57 | setenv=COVERAGE_FILE=.coverage 58 | changedir = {toxworkdir}/.coverage 59 | commands = 60 | coverage combine 61 | coverage report 62 | coverage xml 63 | 64 | [testenv:format] 65 | description = Run auto formatters 66 | skip_install = True 67 | allowlist_externals= 68 | bash 69 | deps = 70 | black 71 | isort>=5.0.0 72 | commands = 73 | black . 74 | isort scripts src tests docs 75 | bash -c 'clang-format --verbose -i lib/*.c' 76 | 77 | [testenv:lint] 78 | description = Run linting checks 79 | skip_install = True 80 | depends = format 81 | deps = 82 | black 83 | isort>=5.0.0 84 | pylint 85 | commands = 86 | black --check . 87 | isort --check-only scripts src tests docs 88 | pylint docs scripts src tests 89 | 90 | [testenv:benchmark] 91 | description = Run benchmarks 92 | deps = 93 | click 94 | pytz 95 | python-dateutil 96 | tzdata 97 | pint[uncertainties] 98 | commands = 99 | python scripts/benchmark.py {posargs} 100 | 101 | [testenv:docs] 102 | description = Build the documentation 103 | skip_install = True 104 | deps = 105 | -rdocs/requirements.txt 106 | commands = 107 | sphinx-build -d "{toxworkdir}/docs_doctree" "{toxinidir}/docs" \ 108 | "{toxinidir}/docs/_output" {posargs: -j auto --color -bhtml} 109 | 110 | [testenv:mypy] 111 | description = Run mypy on the testing example 112 | basepython = 3.8 113 | deps = 114 | mypy < 1 115 | commands = 116 | mypy src/backports/zoneinfo/__init__.pyi 117 | mypy tests/typing_example.py 118 | mypy scripts/update_test_data.py 119 | 120 | [testenv:build] 121 | description = Build a wheel and source distribution 122 | skip_install = True 123 | passenv = * 124 | deps = 125 | build 126 | commands = 127 | python -m build {posargs} {toxinidir} -o {toxinidir}/dist 128 | 129 | [testenv:build-check] 130 | description = Build a wheel and source distribution 131 | skip_install = True 132 | deps = 133 | twine 134 | depends = build 135 | commands = 136 | twine check dist/* 137 | 138 | [testenv:check-version-tag] 139 | description = Ensure that the current version matches the current tag 140 | deps = 141 | commands = 142 | python scripts/check_tag.py 143 | 144 | [testenv:release] 145 | description = Make a release; must be called after "build" 146 | skip_install = True 147 | deps = 148 | twine 149 | depends = 150 | build 151 | auditwheel 152 | passenv = 153 | TWINE_* 154 | commands = 155 | twine check {toxinidir}/dist/* 156 | twine upload {toxinidir}/dist/* \ 157 | {posargs:-r {env:TWINE_REPOSITORY:testpypi} --non-interactive} 158 | --------------------------------------------------------------------------------