├── .coveragerc ├── .github └── workflows │ ├── codeql.yml │ ├── main.yml │ └── stale.yml ├── .gitignore ├── .readthedocs.yaml ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── README.rst ├── _python_utils_tests ├── __init__.py ├── requirements.txt ├── test_aio.py ├── test_containers.py ├── test_decorators.py ├── test_generators.py ├── test_import.py ├── test_logger.py ├── test_python_utils.py └── test_time.py ├── codecov.yml ├── coverage.rc ├── docs ├── Makefile ├── conf.py ├── index.rst ├── make.bat ├── python_utils.rst ├── requirements.txt └── usage.rst ├── pyproject.toml ├── pytest.ini ├── python_utils ├── __about__.py ├── __init__.py ├── aio.py ├── containers.py ├── converters.py ├── decorators.py ├── exceptions.py ├── formatters.py ├── generators.py ├── import_.py ├── logger.py ├── loguru.py ├── py.typed ├── terminal.py ├── time.py └── types.py ├── requirements.txt ├── ruff.toml ├── setup.cfg ├── setup.py └── tox.ini /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = 4 | python_utils 5 | _python_utils_tests 6 | omit = 7 | */mock/* 8 | */nose/* 9 | [paths] 10 | source = 11 | python_utils 12 | [report] 13 | fail_under = 100 14 | exclude_lines = 15 | pragma: no cover 16 | @abc.abstractmethod 17 | def __repr__ 18 | if self.debug: 19 | if settings.DEBUG 20 | raise AssertionError 21 | raise NotImplementedError 22 | if 0: 23 | if __name__ == .__main__.: 24 | if typing.TYPE_CHECKING: 25 | if types.TYPE_CHECKING: 26 | @overload 27 | @types.overload 28 | @typing.overload 29 | types.Protocol 30 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "develop" ] 6 | pull_request: 7 | branches: [ "develop" ] 8 | schedule: 9 | - cron: "46 1 * * 3" 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ python ] 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v3 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v2 31 | with: 32 | languages: ${{ matrix.language }} 33 | queries: +security-and-quality 34 | 35 | - name: Autobuild 36 | uses: github/codeql-action/autobuild@v2 37 | 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v2 40 | with: 41 | category: "/language:${{ matrix.language }}" 42 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: pytest 2 | 3 | on: 4 | push: 5 | pull_request: 6 | workflow_dispatch: 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | timeout-minutes: 4 12 | strategy: 13 | matrix: 14 | python-version: ['pypy3.9', 'pypy3.10', '3.9', '3.10', '3.11', '3.12'] # Maybe soon?, '3.13'] 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 1 20 | - name: Set up Python ${{ matrix.python-version }} 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: ${{ matrix.python-version }} 24 | - name: Install dependencies 25 | run: | 26 | python -m pip install --upgrade pip setuptools ruff 27 | pip install -e '.[tests]' 28 | - name: Get versions 29 | run: | 30 | python -V 31 | pip freeze 32 | - name: ruff 33 | run: ruff check --output-format=github 34 | - name: pytest 35 | run: py.test 36 | 37 | docs_and_lint: 38 | runs-on: ubuntu-latest 39 | timeout-minutes: 2 40 | steps: 41 | - uses: actions/checkout@v4 42 | - name: Set up Python 43 | uses: actions/setup-python@v5 44 | with: 45 | python-version: '3.10' 46 | - name: Install dependencies 47 | run: | 48 | python -m pip install --upgrade pip setuptools 49 | pip install -e '.[docs,tests]' pyright ruff mypy 50 | - name: build docs 51 | run: make html 52 | working-directory: docs/ 53 | - name: ruff 54 | run: ruff check --output-format=github 55 | - name: mypy 56 | run: mypy python_utils setup.py 57 | - name: pyright 58 | run: pyright 59 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: Close stale issues and pull requests 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: '0 0 * * *' # Run every day at midnight 7 | 8 | jobs: 9 | stale: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/stale@v8 13 | with: 14 | days-before-stale: 30 15 | exempt-issue-labels: in-progress,help-wanted,pinned,security,enhancement 16 | exempt-all-pr-assignees: true 17 | 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | /dist 3 | /*.egg-info 4 | /docs/_build 5 | /cover 6 | /.eggs 7 | /.* -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.12" 12 | # You can also specify other tool versions: 13 | # nodejs: "20" 14 | # rust: "1.70" 15 | # golang: "1.20" 16 | 17 | # Build documentation in the "docs/" directory with Sphinx 18 | sphinx: 19 | configuration: docs/conf.py 20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs 21 | # builder: "dirhtml" 22 | # Fail on all warnings to avoid broken references 23 | # fail_on_warning: true 24 | 25 | # Optionally build your docs in additional formats such as PDF and ePub 26 | formats: 27 | - pdf 28 | - epub 29 | 30 | # Optional but recommended, declare the Python requirements required 31 | # to build your documentation 32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 33 | python: 34 | install: 35 | - requirements: docs/requirements.txt 36 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to python-utils 2 | 3 | Bug reports, code and documentation contributions are welcome. You can help this 4 | project also by using the development version and by reporting any bugs you might encounter 5 | 6 | ## 1. Reporting bugs 7 | It's important to provide following details when submitting a bug 8 | - Python version 9 | - python-utils version 10 | - OS details 11 | 12 | If possible also provide a minimum reproducible working code. 13 | ## 2. Contributing Code and Docs 14 | 15 | Before working on a new feature or a bug, please browse [existing issues](https://github.com/WoLpH/python-utils/issues) 16 | to see whether it has previously been discussed. 17 | 18 | If your change alters python-util's behaviour or interface, it's a good idea to 19 | discuss it before you start working on it. 20 | 21 | If you are fixing an issue, the first step should be to create a test case that 22 | reproduces the incorrect behaviour. That will also help you to build an 23 | understanding of the issue at hand. 24 | 25 | Make sure to add relevant tests and update documentation in order to get 26 | your PRs merged. We strictly adhere to 100% code coverage. 27 | 28 | ### Development Environment 29 | 30 | #### Getting the code 31 | 32 | Go to and fork the project repository. 33 | 34 | ```bash 35 | # Clone your fork 36 | $ git clone git@github.com:/python-utils.git 37 | 38 | # Enter the project directory 39 | $ cd python-utils 40 | 41 | # Create a branch for your changes 42 | $ git checkout -b my_awesome_branch 43 | ``` 44 | 45 | #### Testing 46 | Before submitting any PR make sure your code passes all the tests. 47 | 48 | To run the full test-suite, make sure you have `tox` installed and run the following command: 49 | 50 | ```bash 51 | $ tox 52 | ``` 53 | 54 | Or to speed it up (replace 8 with your number of cores), run: 55 | 56 | ```bash 57 | $ tox -p8 58 | ``` 59 | 60 | During development I recommend using pytest directly and installing the package in development mode. 61 | 62 | Create virtual environment and activate 63 | ```bash 64 | $ python3 -m venv venv 65 | $ source venv/bin/activate 66 | ``` 67 | Install test requirements 68 | ```bash 69 | $ cd python-utils 70 | $ pip install -e ".[tests]" 71 | ``` 72 | Run tests 73 | ```bash 74 | $ py.test 75 | ``` 76 | 77 | Note that this won't run `ruff` yet, so once all the tests succeed you can run `ruff check` to check for code style errors. 78 | 79 | ```bash 80 | $ ruff check 81 | ``` 82 | 83 | Lastly we test the types using `pyright`: 84 | 85 | ```bash 86 | $ pyright 87 | ``` 88 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2016, Rick van Hattem 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 7 | 8 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 9 | 10 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 11 | 12 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 13 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include coverage.rc 2 | include LICENSE 3 | include MANIFEST.in 4 | include pytest.ini 5 | include README.rst 6 | include requirements.txt 7 | include setup.cfg 8 | include setup.py 9 | include tox.ini 10 | include python_utils/py.typed 11 | recursive-include _python_utils_tests *.py *.txt 12 | recursive-exclude __pycache__ * 13 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Useful Python Utils 2 | ============================================================================== 3 | 4 | .. image:: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master 5 | :target: https://github.com/WoLpH/python-utils/actions/workflows/main.yml 6 | 7 | .. image:: https://coveralls.io/repos/WoLpH/python-utils/badge.svg?branch=master 8 | :target: https://coveralls.io/r/WoLpH/python-utils?branch=master 9 | 10 | Python Utils is a collection of small Python functions and 11 | classes which make common patterns shorter and easier. It is by no means a 12 | complete collection but it has served me quite a bit in the past and I will 13 | keep extending it. 14 | 15 | One of the libraries using Python Utils is Django Utils. 16 | 17 | Documentation is available at: https://python-utils.readthedocs.org/en/latest/ 18 | 19 | Links 20 | ----- 21 | 22 | - The source: https://github.com/WoLpH/python-utils 23 | - Project page: https://pypi.python.org/pypi/python-utils 24 | - Reporting bugs: https://github.com/WoLpH/python-utils/issues 25 | - Documentation: https://python-utils.readthedocs.io/en/latest/ 26 | - My blog: https://wol.ph/ 27 | 28 | Security contact information 29 | ------------------------------------------------------------------------------ 30 | 31 | To report a security vulnerability, please use the 32 | `Tidelift security contact `_. 33 | Tidelift will coordinate the fix and disclosure. 34 | 35 | Requirements for installing: 36 | ------------------------------------------------------------------------------ 37 | 38 | For the Python 3+ release (i.e. v3.0.0 or higher) there are no requirements. 39 | For the Python 2 compatible version (v2.x.x) the `six` package is needed. 40 | 41 | Installation: 42 | ------------------------------------------------------------------------------ 43 | 44 | The package can be installed through `pip` (this is the recommended method): 45 | 46 | .. code-block:: bash 47 | 48 | pip install python-utils 49 | 50 | Or if `pip` is not available, `easy_install` should work as well: 51 | 52 | .. code-block:: bash 53 | 54 | easy_install python-utils 55 | 56 | Or download the latest release from Pypi (https://pypi.python.org/pypi/python-utils) or Github. 57 | 58 | Note that the releases on Pypi are signed with my GPG key (https://pgp.mit.edu/pks/lookup?op=vindex&search=0xE81444E9CE1F695D) and can be checked using GPG: 59 | 60 | .. code-block:: bash 61 | 62 | gpg --verify python-utils-.tar.gz.asc python-utils-.tar.gz 63 | 64 | Quickstart 65 | ------------------------------------------------------------------------------ 66 | 67 | This module makes it easy to execute common tasks in Python scripts such as 68 | converting text to numbers and making sure a string is in unicode or bytes 69 | format. 70 | 71 | Examples 72 | ------------------------------------------------------------------------------ 73 | 74 | Automatically converting a generator to a list, dict or other collections 75 | using a decorator: 76 | 77 | .. code-block:: pycon 78 | 79 | >>> @decorators.listify() 80 | ... def generate_list(): 81 | ... yield 1 82 | ... yield 2 83 | ... yield 3 84 | ... 85 | >>> generate_list() 86 | [1, 2, 3] 87 | 88 | >>> @listify(collection=dict) 89 | ... def dict_generator(): 90 | ... yield 'a', 1 91 | ... yield 'b', 2 92 | 93 | >>> dict_generator() 94 | {'a': 1, 'b': 2} 95 | 96 | Retrying until timeout 97 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 98 | 99 | To easily retry a block of code with a configurable timeout, you can use the 100 | `time.timeout_generator`: 101 | 102 | .. code-block:: pycon 103 | 104 | >>> for i in time.timeout_generator(10): 105 | ... try: 106 | ... # Run your code here 107 | ... except Exception as e: 108 | ... # Handle the exception 109 | 110 | Formatting of timestamps, dates and times 111 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 112 | 113 | Easy formatting of timestamps and calculating the time since: 114 | 115 | .. code-block:: pycon 116 | 117 | >>> time.format_time('1') 118 | '0:00:01' 119 | >>> time.format_time(1.234) 120 | '0:00:01' 121 | >>> time.format_time(1) 122 | '0:00:01' 123 | >>> time.format_time(datetime.datetime(2000, 1, 2, 3, 4, 5, 6)) 124 | '2000-01-02 03:04:05' 125 | >>> time.format_time(datetime.date(2000, 1, 2)) 126 | '2000-01-02' 127 | >>> time.format_time(datetime.timedelta(seconds=3661)) 128 | '1:01:01' 129 | >>> time.format_time(None) 130 | '--:--:--' 131 | 132 | >>> formatters.timesince(now) 133 | 'just now' 134 | >>> formatters.timesince(now - datetime.timedelta(seconds=1)) 135 | '1 second ago' 136 | >>> formatters.timesince(now - datetime.timedelta(seconds=2)) 137 | '2 seconds ago' 138 | >>> formatters.timesince(now - datetime.timedelta(seconds=60)) 139 | '1 minute ago' 140 | 141 | Converting your test from camel-case to underscores: 142 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 143 | 144 | .. code-block:: pycon 145 | 146 | >>> camel_to_underscore('SpamEggsAndBacon') 147 | 'spam_eggs_and_bacon' 148 | 149 | Attribute setting decorator. Very useful for the Django admin 150 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 151 | A convenient decorator to set function attributes using a decorator: 152 | 153 | .. code-block:: pycon 154 | 155 | You can use: 156 | >>> @decorators.set_attributes(short_description='Name') 157 | ... def upper_case_name(self, obj): 158 | ... return ("%s %s" % (obj.first_name, obj.last_name)).upper() 159 | 160 | Instead of: 161 | >>> def upper_case_name(obj): 162 | ... return ("%s %s" % (obj.first_name, obj.last_name)).upper() 163 | 164 | >>> upper_case_name.short_description = 'Name' 165 | 166 | This can be very useful for the Django admin as it allows you to have all 167 | metadata in one place. 168 | 169 | Scaling numbers between ranges 170 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 171 | 172 | .. code-block:: pycon 173 | 174 | >>> converters.remap(500, old_min=0, old_max=1000, new_min=0, new_max=100) 175 | 50 176 | 177 | # Or with decimals: 178 | >>> remap(decimal.Decimal('250.0'), 0.0, 1000.0, 0.0, 100.0) 179 | Decimal('25.0') 180 | 181 | Get the screen/window/terminal size in characters: 182 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 183 | 184 | .. code-block:: pycon 185 | 186 | >>> terminal.get_terminal_size() 187 | (80, 24) 188 | 189 | That method supports IPython and Jupyter as well as regular shells, using 190 | `blessings` and other modules depending on what is available. 191 | 192 | Extracting numbers from nearly every string: 193 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 194 | 195 | .. code-block:: pycon 196 | 197 | >>> converters.to_int('spam15eggs') 198 | 15 199 | >>> converters.to_int('spam') 200 | 0 201 | >>> number = converters.to_int('spam', default=1) 202 | 1 203 | 204 | Doing a global import of all the modules in a package programmatically: 205 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 206 | 207 | To do a global import programmatically you can use the `import_global` 208 | function. This effectively emulates a `from ... import *` 209 | 210 | .. code-block:: python 211 | 212 | from python_utils.import_ import import_global 213 | 214 | # The following is the equivalent of `from some_module import *` 215 | import_global('some_module') 216 | 217 | Automatically named logger for classes: 218 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 219 | 220 | Or add a correclty named logger to your classes which can be easily accessed: 221 | 222 | .. code-block:: python 223 | 224 | class MyClass(Logged): 225 | def __init__(self): 226 | Logged.__init__(self) 227 | 228 | my_class = MyClass() 229 | 230 | # Accessing the logging method: 231 | my_class.error('error') 232 | 233 | # With formatting: 234 | my_class.error('The logger supports %(formatting)s', 235 | formatting='named parameters') 236 | 237 | # Or to access the actual log function (overwriting the log formatting can 238 | # be done n the log method) 239 | import logging 240 | my_class.log(logging.ERROR, 'log') 241 | 242 | Alternatively loguru is also supported. It is largely a drop-in replacement for the logging module which is a bit more convenient to configure: 243 | 244 | First install the extra loguru package: 245 | 246 | .. code-block:: bash 247 | 248 | pip install 'python-utils[loguru]' 249 | 250 | .. code-block:: python 251 | 252 | class MyClass(Logurud): 253 | ... 254 | 255 | Now you can use the `Logurud` class to make functions such as `self.info()` 256 | available. The benefit of this approach is that you can add extra context or 257 | options to you specific loguru instance (i.e. `self.logger`): 258 | 259 | Convenient type aliases and some commonly used types: 260 | 261 | .. code-block:: python 262 | 263 | # For type hinting scopes such as locals/globals/vars 264 | Scope = Dict[str, Any] 265 | OptionalScope = O[Scope] 266 | 267 | # Note that Number is only useful for extra clarity since float 268 | # will work for both int and float in practice. 269 | Number = U[int, float] 270 | DecimalNumber = U[Number, decimal.Decimal] 271 | 272 | # To accept an exception or list of exceptions 273 | ExceptionType = Type[Exception] 274 | ExceptionsType = U[Tuple[ExceptionType, ...], ExceptionType] 275 | 276 | # Matching string/bytes types: 277 | StringTypes = U[str, bytes] 278 | -------------------------------------------------------------------------------- /_python_utils_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wolph/python-utils/d36802f9c049744d210a454531d00e36fffda8c1/_python_utils_tests/__init__.py -------------------------------------------------------------------------------- /_python_utils_tests/requirements.txt: -------------------------------------------------------------------------------- 1 | -e .[tests] 2 | -------------------------------------------------------------------------------- /_python_utils_tests/test_aio.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from python_utils import types 6 | from python_utils.aio import acontainer, acount, adict 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test_acount(monkeypatch: pytest.MonkeyPatch) -> None: 11 | sleeps: types.List[float] = [] 12 | 13 | async def mock_sleep(delay: float) -> None: 14 | sleeps.append(delay) 15 | 16 | monkeypatch.setattr(asyncio, 'sleep', mock_sleep) 17 | 18 | async for _i in acount(delay=1, stop=3.5): 19 | pass 20 | 21 | assert len(sleeps) == 4 22 | assert sum(sleeps) == 4 23 | 24 | 25 | @pytest.mark.asyncio 26 | async def test_acontainer() -> None: 27 | async def async_gen() -> types.AsyncIterable[int]: 28 | yield 1 29 | yield 2 30 | yield 3 31 | 32 | async def empty_gen() -> types.AsyncIterable[int]: 33 | if False: 34 | yield 1 35 | 36 | assert await acontainer(async_gen) == [1, 2, 3] 37 | assert await acontainer(async_gen()) == [1, 2, 3] 38 | assert await acontainer(async_gen, set) == {1, 2, 3} 39 | assert await acontainer(async_gen(), set) == {1, 2, 3} 40 | assert await acontainer(async_gen, list) == [1, 2, 3] 41 | assert await acontainer(async_gen(), list) == [1, 2, 3] 42 | assert await acontainer(async_gen, tuple) == (1, 2, 3) 43 | assert await acontainer(async_gen(), tuple) == (1, 2, 3) 44 | assert await acontainer(empty_gen) == [] 45 | assert await acontainer(empty_gen()) == [] 46 | assert await acontainer(empty_gen, set) == set() 47 | assert await acontainer(empty_gen(), set) == set() 48 | assert await acontainer(empty_gen, list) == list() 49 | assert await acontainer(empty_gen(), list) == list() 50 | assert await acontainer(empty_gen, tuple) == tuple() 51 | assert await acontainer(empty_gen(), tuple) == tuple() 52 | 53 | 54 | @pytest.mark.asyncio 55 | async def test_adict() -> None: 56 | async def async_gen() -> types.AsyncIterable[types.Tuple[int, int]]: 57 | yield 1, 2 58 | yield 3, 4 59 | yield 5, 6 60 | 61 | async def empty_gen() -> types.AsyncIterable[types.Tuple[int, int]]: 62 | if False: 63 | yield 1, 2 64 | 65 | assert await adict(async_gen) == {1: 2, 3: 4, 5: 6} 66 | assert await adict(async_gen()) == {1: 2, 3: 4, 5: 6} 67 | assert await adict(empty_gen) == {} 68 | assert await adict(empty_gen()) == {} 69 | -------------------------------------------------------------------------------- /_python_utils_tests/test_containers.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from python_utils import containers 4 | 5 | 6 | def test_unique_list_ignore() -> None: 7 | a: containers.UniqueList[int] = containers.UniqueList() 8 | a.append(1) 9 | a.append(1) 10 | assert a == [1] 11 | 12 | a = containers.UniqueList(*range(20)) 13 | with pytest.raises(RuntimeError): 14 | a[10:20:2] = [1, 2, 3, 4, 5] 15 | 16 | a[3] = 5 17 | 18 | 19 | def test_unique_list_raise() -> None: 20 | a: containers.UniqueList[int] = containers.UniqueList( 21 | *range(20), on_duplicate='raise' 22 | ) 23 | with pytest.raises(ValueError): 24 | a[10:20:2] = [1, 2, 3, 4, 5] 25 | 26 | a[10:20:2] = [21, 22, 23, 24, 25] 27 | with pytest.raises(ValueError): 28 | a[3] = 5 29 | 30 | del a[10] 31 | del a[5:15] 32 | 33 | 34 | def test_sliceable_deque() -> None: 35 | d: containers.SliceableDeque[int] = containers.SliceableDeque(range(10)) 36 | assert d[0] == 0 37 | assert d[-1] == 9 38 | assert d[1:3] == [1, 2] 39 | assert d[1:3:2] == [1] 40 | assert d[1:3:-1] == [] 41 | assert d[3:1] == [] 42 | assert d[3:1:-1] == [3, 2] 43 | assert d[3:1:-2] == [3] 44 | with pytest.raises(ValueError): 45 | assert d[1:3:0] 46 | assert d[1:3:1] == [1, 2] 47 | assert d[1:3:2] == [1] 48 | assert d[1:3:-1] == [] 49 | 50 | 51 | def test_sliceable_deque_pop() -> None: 52 | d: containers.SliceableDeque[int] = containers.SliceableDeque(range(10)) 53 | 54 | assert d.pop() == 9 == 9 55 | assert d.pop(0) == 0 56 | 57 | with pytest.raises(IndexError): 58 | assert d.pop(100) 59 | 60 | with pytest.raises(IndexError): 61 | assert d.pop(2) 62 | 63 | with pytest.raises(IndexError): 64 | assert d.pop(-2) 65 | 66 | 67 | def test_sliceable_deque_eq() -> None: 68 | d: containers.SliceableDeque[int] = containers.SliceableDeque([1, 2, 3]) 69 | assert d == [1, 2, 3] 70 | assert d == (1, 2, 3) 71 | assert d == {1, 2, 3} 72 | assert d == d 73 | assert d == containers.SliceableDeque([1, 2, 3]) 74 | -------------------------------------------------------------------------------- /_python_utils_tests/test_decorators.py: -------------------------------------------------------------------------------- 1 | import typing 2 | from unittest.mock import MagicMock 3 | 4 | import pytest 5 | 6 | from python_utils.decorators import sample, wraps_classmethod 7 | 8 | T = typing.TypeVar('T') 9 | 10 | 11 | @pytest.fixture 12 | def random(monkeypatch: pytest.MonkeyPatch) -> MagicMock: 13 | mock = MagicMock() 14 | monkeypatch.setattr( 15 | 'python_utils.decorators.random.random', mock, raising=True 16 | ) 17 | return mock 18 | 19 | 20 | def test_sample_called(random: MagicMock) -> None: 21 | demo_function = MagicMock() 22 | decorated = sample(0.5)(demo_function) 23 | random.return_value = 0.4 24 | decorated() 25 | random.return_value = 0.0 26 | decorated() 27 | args = [1, 2] 28 | kwargs = {'1': 1, '2': 2} 29 | decorated(*args, **kwargs) 30 | demo_function.assert_called_with(*args, **kwargs) 31 | assert demo_function.call_count == 3 32 | 33 | 34 | def test_sample_not_called(random: MagicMock) -> None: 35 | demo_function = MagicMock() 36 | decorated = sample(0.5)(demo_function) 37 | random.return_value = 0.5 38 | decorated() 39 | random.return_value = 1.0 40 | decorated() 41 | assert demo_function.call_count == 0 42 | 43 | 44 | class SomeClass: 45 | @classmethod 46 | def some_classmethod(cls, arg: T) -> T: 47 | return arg 48 | 49 | @classmethod 50 | def some_annotated_classmethod(cls, arg: int) -> int: 51 | return arg 52 | 53 | 54 | def test_wraps_classmethod() -> None: 55 | some_class = SomeClass() 56 | some_class.some_classmethod = MagicMock() # type: ignore[method-assign] 57 | wrapped_method = wraps_classmethod(SomeClass.some_classmethod)( 58 | some_class.some_classmethod 59 | ) 60 | wrapped_method(123) 61 | some_class.some_classmethod.assert_called_with(123) 62 | 63 | 64 | def test_wraps_annotated_classmethod() -> None: 65 | some_class = SomeClass() 66 | some_class.some_annotated_classmethod = MagicMock() # type: ignore[method-assign] 67 | wrapped_method = wraps_classmethod(SomeClass.some_annotated_classmethod)( 68 | some_class.some_annotated_classmethod 69 | ) 70 | wrapped_method(123) 71 | some_class.some_annotated_classmethod.assert_called_with(123) 72 | -------------------------------------------------------------------------------- /_python_utils_tests/test_generators.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | import python_utils 6 | from python_utils import types 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test_abatcher() -> None: 11 | async for batch in python_utils.abatcher(python_utils.acount(stop=9), 3): 12 | assert len(batch) == 3 13 | 14 | async for batch in python_utils.abatcher(python_utils.acount(stop=2), 3): 15 | assert len(batch) == 2 16 | 17 | 18 | @pytest.mark.asyncio 19 | async def test_abatcher_timed() -> None: 20 | batches: types.List[types.List[int]] = [] 21 | async for batch in python_utils.abatcher( 22 | python_utils.acount(stop=10, delay=0.08), interval=0.1 23 | ): 24 | batches.append(batch) 25 | 26 | assert batches == [[0, 1, 2], [3, 4], [5, 6], [7, 8], [9]] 27 | assert len(batches) == 5 28 | 29 | 30 | @pytest.mark.asyncio 31 | async def test_abatcher_timed_with_timeout() -> None: 32 | async def generator() -> types.AsyncIterator[int]: 33 | # Test if the timeout is respected 34 | yield 0 35 | yield 1 36 | await asyncio.sleep(0.11) 37 | 38 | # Test if the timeout is respected 39 | yield 2 40 | yield 3 41 | await asyncio.sleep(0.11) 42 | 43 | # Test if exceptions are handled correctly 44 | await asyncio.wait_for(asyncio.sleep(1), timeout=0.05) 45 | 46 | # Test if StopAsyncIteration is handled correctly 47 | yield 4 48 | 49 | batcher = python_utils.abatcher(generator(), interval=0.1) 50 | assert await batcher.__anext__() == [0, 1] 51 | assert await batcher.__anext__() == [2, 3] 52 | 53 | with pytest.raises(asyncio.TimeoutError): 54 | await batcher.__anext__() 55 | 56 | with pytest.raises(StopAsyncIteration): 57 | await batcher.__anext__() 58 | 59 | 60 | def test_batcher() -> None: 61 | batch = [] 62 | for batch in python_utils.batcher(range(9), 3): 63 | assert len(batch) == 3 64 | 65 | for batch in python_utils.batcher(range(4), 3): 66 | assert batch is not None 67 | 68 | assert len(batch) == 1 69 | -------------------------------------------------------------------------------- /_python_utils_tests/test_import.py: -------------------------------------------------------------------------------- 1 | from python_utils import import_, types 2 | 3 | 4 | def test_import_globals_relative_import() -> None: 5 | for i in range(-1, 5): 6 | relative_import(i) 7 | 8 | 9 | def relative_import(level: int) -> None: 10 | locals_: types.Dict[str, types.Any] = {} 11 | globals_ = {'__name__': 'python_utils.import_'} 12 | import_.import_global('.formatters', locals_=locals_, globals_=globals_) 13 | assert 'camel_to_underscore' in globals_ 14 | 15 | 16 | def test_import_globals_without_inspection() -> None: 17 | locals_: types.Dict[str, types.Any] = {} 18 | globals_: types.Dict[str, types.Any] = {'__name__': __name__} 19 | import_.import_global( 20 | 'python_utils.formatters', locals_=locals_, globals_=globals_ 21 | ) 22 | assert 'camel_to_underscore' in globals_ 23 | 24 | 25 | def test_import_globals_single_method() -> None: 26 | locals_: types.Dict[str, types.Any] = {} 27 | globals_: types.Dict[str, types.Any] = {'__name__': __name__} 28 | import_.import_global( 29 | 'python_utils.formatters', 30 | ['camel_to_underscore'], 31 | locals_=locals_, 32 | globals_=globals_, 33 | ) 34 | assert 'camel_to_underscore' in globals_ 35 | 36 | 37 | def test_import_globals_with_inspection() -> None: 38 | import_.import_global('python_utils.formatters') 39 | assert 'camel_to_underscore' in globals() 40 | 41 | 42 | def test_import_globals_missing_module() -> None: 43 | import_.import_global( 44 | 'python_utils.spam', exceptions=ImportError, locals_=locals() 45 | ) 46 | assert 'camel_to_underscore' in globals() 47 | 48 | 49 | def test_import_locals_missing_module() -> None: 50 | import_.import_global( 51 | 'python_utils.spam', exceptions=ImportError, globals_=globals() 52 | ) 53 | assert 'camel_to_underscore' in globals() 54 | -------------------------------------------------------------------------------- /_python_utils_tests/test_logger.py: -------------------------------------------------------------------------------- 1 | # mypy: disable-error-code=misc 2 | import pytest 3 | 4 | from python_utils.loguru import Logurud 5 | 6 | loguru = pytest.importorskip('loguru') 7 | 8 | 9 | def test_logurud() -> None: 10 | class MyClass(Logurud): 11 | pass 12 | 13 | my_class = MyClass() 14 | my_class.debug('debug') 15 | my_class.info('info') 16 | my_class.warning('warning') 17 | my_class.error('error') 18 | my_class.critical('critical') 19 | my_class.exception('exception') 20 | my_class.log(0, 'log') 21 | -------------------------------------------------------------------------------- /_python_utils_tests/test_python_utils.py: -------------------------------------------------------------------------------- 1 | from python_utils import __about__ 2 | 3 | 4 | def test_definitions() -> None: 5 | # The setup.py requires this so we better make sure they exist :) 6 | assert __about__.__version__ 7 | assert __about__.__author__ 8 | assert __about__.__author_email__ 9 | assert __about__.__description__ 10 | -------------------------------------------------------------------------------- /_python_utils_tests/test_time.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import itertools 3 | from datetime import timedelta 4 | 5 | import pytest 6 | 7 | import python_utils 8 | from python_utils import types 9 | 10 | 11 | @pytest.mark.parametrize( 12 | 'timeout,interval,interval_multiplier,maximum_interval,iterable,result', 13 | [ 14 | (0.2, 0.1, 0.4, 0.2, python_utils.acount, 2), 15 | (0.3, 0.1, 0.4, 0.2, python_utils.acount(), 3), 16 | (0.3, 0.06, 1.0, None, python_utils.acount, 5), 17 | ( 18 | timedelta(seconds=0.1), 19 | timedelta(seconds=0.06), 20 | 2.0, 21 | timedelta(seconds=0.1), 22 | python_utils.acount, 23 | 2, 24 | ), 25 | ], 26 | ) 27 | @pytest.mark.asyncio 28 | async def test_aio_timeout_generator( 29 | timeout: float, 30 | interval: float, 31 | interval_multiplier: float, 32 | maximum_interval: float, 33 | iterable: types.AsyncIterable[types.Any], 34 | result: int, 35 | ) -> None: 36 | i = None 37 | async for i in python_utils.aio_timeout_generator( 38 | timeout, interval, iterable, maximum_interval=maximum_interval 39 | ): 40 | pass 41 | 42 | assert i == result 43 | 44 | 45 | @pytest.mark.parametrize( 46 | 'timeout,interval,interval_multiplier,maximum_interval,iterable,result', 47 | [ 48 | (0.1, 0.06, 0.5, 0.1, 'abc', 'c'), 49 | (0.1, 0.07, 0.5, 0.1, itertools.count, 2), 50 | (0.1, 0.07, 0.5, 0.1, itertools.count(), 2), 51 | (0.1, 0.06, 1.0, None, 'abc', 'c'), 52 | ( 53 | timedelta(seconds=0.1), 54 | timedelta(seconds=0.06), 55 | 2.0, 56 | timedelta(seconds=0.1), 57 | itertools.count, 58 | 2, 59 | ), 60 | ], 61 | ) 62 | def test_timeout_generator( 63 | timeout: float, 64 | interval: float, 65 | interval_multiplier: float, 66 | maximum_interval: float, 67 | iterable: types.Union[ 68 | str, 69 | types.Iterable[types.Any], 70 | types.Callable[..., types.Iterable[types.Any]], 71 | ], 72 | result: int, 73 | ) -> None: 74 | i = None 75 | for i in python_utils.timeout_generator( 76 | timeout=timeout, 77 | interval=interval, 78 | interval_multiplier=interval_multiplier, 79 | iterable=iterable, 80 | maximum_interval=maximum_interval, 81 | ): 82 | assert i is not None 83 | 84 | assert i == result 85 | 86 | 87 | @pytest.mark.asyncio 88 | async def test_aio_generator_timeout_detector() -> None: 89 | # Make pyright happy 90 | i = None 91 | 92 | async def generator() -> types.AsyncGenerator[int, None]: 93 | for i in range(10): 94 | await asyncio.sleep(i / 20.0) 95 | yield i 96 | 97 | detector = python_utils.aio_generator_timeout_detector 98 | # Test regular timeout with reraise 99 | with pytest.raises(asyncio.TimeoutError): 100 | async for i in detector(generator(), 0.25): 101 | pass 102 | 103 | # Test regular timeout with clean exit 104 | async for i in detector(generator(), 0.25, on_timeout=None): 105 | pass 106 | 107 | assert i == 4 108 | 109 | # Test total timeout with reraise 110 | with pytest.raises(asyncio.TimeoutError): 111 | async for i in detector(generator(), total_timeout=0.5): 112 | pass 113 | 114 | # Test total timeout with clean exit 115 | async for i in detector(generator(), total_timeout=0.5, on_timeout=None): 116 | pass 117 | 118 | assert i == 4 119 | 120 | # Test stop iteration 121 | async for i in detector(generator(), on_timeout=None): 122 | pass 123 | 124 | 125 | @pytest.mark.asyncio 126 | async def test_aio_generator_timeout_detector_decorator_reraise() -> None: 127 | # Test regular timeout with reraise 128 | @python_utils.aio_generator_timeout_detector_decorator(timeout=0.05) 129 | async def generator_timeout() -> types.AsyncGenerator[int, None]: 130 | for i in range(10): 131 | await asyncio.sleep(i / 100.0) 132 | yield i 133 | 134 | with pytest.raises(asyncio.TimeoutError): 135 | async for _ in generator_timeout(): 136 | pass 137 | 138 | 139 | @pytest.mark.asyncio 140 | async def test_aio_generator_timeout_detector_decorator_clean_exit() -> None: 141 | # Make pyright happy 142 | i = None 143 | 144 | # Test regular timeout with clean exit 145 | @python_utils.aio_generator_timeout_detector_decorator( 146 | timeout=0.05, on_timeout=None 147 | ) 148 | async def generator_clean() -> types.AsyncGenerator[int, None]: 149 | for i in range(10): 150 | await asyncio.sleep(i / 100.0) 151 | yield i 152 | 153 | async for i in generator_clean(): 154 | pass 155 | 156 | assert i == 4 157 | 158 | 159 | @pytest.mark.asyncio 160 | async def test_aio_generator_timeout_detector_decorator_reraise_total() -> ( 161 | None 162 | ): 163 | # Test total timeout with reraise 164 | @python_utils.aio_generator_timeout_detector_decorator(total_timeout=0.1) 165 | async def generator_reraise() -> types.AsyncGenerator[int, None]: 166 | for i in range(10): 167 | await asyncio.sleep(i / 100.0) 168 | yield i 169 | 170 | with pytest.raises(asyncio.TimeoutError): 171 | async for _ in generator_reraise(): 172 | pass 173 | 174 | 175 | @pytest.mark.asyncio 176 | async def test_aio_generator_timeout_detector_decorator_clean_total() -> None: 177 | # Make pyright happy 178 | i = None 179 | 180 | # Test total timeout with clean exit 181 | @python_utils.aio_generator_timeout_detector_decorator( 182 | total_timeout=0.1, on_timeout=None 183 | ) 184 | async def generator_clean_total() -> types.AsyncGenerator[int, None]: 185 | for i in range(10): 186 | await asyncio.sleep(i / 100.0) 187 | yield i 188 | 189 | async for i in generator_clean_total(): 190 | pass 191 | 192 | assert i == 4 193 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | token: 046054bc-5013-4e26-b93e-f2720c0e7b84 3 | -------------------------------------------------------------------------------- /coverage.rc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = python_utils,tests 3 | omit = */nose/* 4 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | """ 2 | Configuration file for the Sphinx documentation builder. 3 | # 4 | This file only contains a selection of the most common options. For a full 5 | list see the documentation: 6 | https://www.sphinx-doc.org/en/master/usage/configuration.html 7 | 8 | -- Path setup -------------------------------------------------------------- 9 | 10 | If extensions (or modules to document with autodoc) are in another directory, 11 | add these directories to sys.path here. If the directory is relative to the 12 | documentation root, use os.path.abspath to make it absolute, like shown here. 13 | # 14 | """ 15 | 16 | import os 17 | import sys 18 | from datetime import date 19 | 20 | sys.path.insert(0, os.path.abspath('..')) 21 | 22 | from python_utils import __about__ 23 | 24 | # -- Project information ----------------------------------------------------- 25 | 26 | project = 'Python Utils' 27 | author = __about__.__author__ 28 | copyright = f'{date.today().year}, {author}' 29 | 30 | # The full version, including alpha/beta/rc tags 31 | release = __about__.__version__ 32 | 33 | # -- General configuration --------------------------------------------------- 34 | 35 | # Add any Sphinx extension module names here, as strings. They can be 36 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 37 | # ones. 38 | extensions = [ 39 | 'sphinx.ext.autodoc', 40 | 'sphinx.ext.doctest', 41 | 'sphinx.ext.intersphinx', 42 | 'sphinx.ext.todo', 43 | 'sphinx.ext.coverage', 44 | 'sphinx.ext.viewcode', 45 | ] 46 | 47 | # Add any paths that contain templates here, relative to this directory. 48 | templates_path = ['_templates'] 49 | 50 | # List of patterns, relative to source directory, that match files and 51 | # directories to ignore when looking for source files. 52 | # This pattern also affects html_static_path and html_extra_path. 53 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 54 | 55 | # -- Options for HTML output ------------------------------------------------- 56 | 57 | # The theme to use for HTML and HTML Help pages. See the documentation for 58 | # a list of builtin themes. 59 | # 60 | html_theme = 'alabaster' 61 | 62 | # Add any paths that contain custom static files (such as style sheets) here, 63 | # relative to this directory. They are copied after the builtin static files, 64 | # so a file named "default.css" will overwrite the builtin "default.css". 65 | # html_static_path = ['_static'] 66 | 67 | intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} 68 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to Python Utils's documentation! 2 | ======================================== 3 | 4 | .. image:: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master 5 | :target: https://github.com/WoLpH/python-utils/actions/workflows/main.yml/badge.svg?branch=master 6 | 7 | .. image:: https://coveralls.io/repos/WoLpH/python-utils/badge.svg?branch=master 8 | :target: https://coveralls.io/r/WoLpH/python-utils?branch=master 9 | 10 | Contents: 11 | 12 | .. toctree:: 13 | :maxdepth: 4 14 | 15 | usage 16 | python_utils 17 | 18 | Indices and tables 19 | ================== 20 | 21 | * :ref:`genindex` 22 | * :ref:`modindex` 23 | * :ref:`search` 24 | 25 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/python_utils.rst: -------------------------------------------------------------------------------- 1 | python\_utils package 2 | ===================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | python\_utils\.decorators module 8 | -------------------------------- 9 | 10 | .. automodule:: python_utils.decorators 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | python\_utils\.converters module 16 | -------------------------------- 17 | 18 | .. automodule:: python_utils.converters 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | python\_utils\.formatters module 24 | -------------------------------- 25 | 26 | .. automodule:: python_utils.formatters 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | python\_utils\.import\_ module 32 | ------------------------------ 33 | 34 | .. automodule:: python_utils.import_ 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | python\_utils\.logger module 40 | ---------------------------- 41 | 42 | .. automodule:: python_utils.logger 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | python\_utils\.terminal module 48 | ------------------------------ 49 | 50 | .. automodule:: python_utils.terminal 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | python\_utils\.time module 56 | -------------------------- 57 | 58 | .. automodule:: python_utils.time 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | 64 | Module contents 65 | --------------- 66 | 67 | .. automodule:: python_utils 68 | :members: 69 | :undoc-members: 70 | :show-inheritance: 71 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | -e .[docs] 2 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | 2 | .. include:: ../README.rst 3 | 4 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 79 3 | target-version = ['py37', 'py38', 'py39', 'py310', 'py311'] 4 | skip-string-normalization = true 5 | 6 | [tool.pyright] 7 | # include = ['python_utils'] 8 | include = ['python_utils', '_python_utils_tests', 'setup.py'] 9 | strict = ['python_utils', '_python_utils_tests', 'setup.py'] 10 | # The terminal file is very OS specific and dependent on imports so we're skipping it from type checking 11 | ignore = ['python_utils/terminal.py'] 12 | pythonVersion = '3.9' 13 | 14 | [tool.mypy] 15 | strict = true 16 | check_untyped_defs = true 17 | files = ['python_utils', '_python_utils_tests', 'setup.py'] 18 | 19 | [[tool.mypy.overrides]] 20 | module = '_python_utils_tests.*' 21 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | python_files = 3 | python_utils/*.py 4 | _python_utils_tests/*.py 5 | 6 | addopts = 7 | --doctest-modules 8 | --cov python_utils 9 | --cov-report term-missing 10 | ; --mypy 11 | 12 | doctest_optionflags = 13 | ALLOW_UNICODE 14 | ALLOW_BYTES 15 | 16 | asyncio_mode = strict 17 | -------------------------------------------------------------------------------- /python_utils/__about__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains metadata about the `python-utils` package. 3 | 4 | Attributes: 5 | __package_name__ (str): The name of the package. 6 | __author__ (str): The author of the package. 7 | __author_email__ (str): The email of the author. 8 | __description__ (str): A brief description of the package. 9 | __url__ (str): The URL of the package's repository. 10 | __version__ (str): The current version of the package. 11 | """ 12 | 13 | __package_name__: str = 'python-utils' 14 | __author__: str = 'Rick van Hattem' 15 | __author_email__: str = 'Wolph@wol.ph' 16 | __description__: str = ( 17 | 'Python Utils is a module with some convenient utilities not included ' 18 | 'with the standard Python install' 19 | ) 20 | __url__: str = 'https://github.com/WoLpH/python-utils' 21 | # Omit type info due to automatic versioning script 22 | __version__ = '3.9.1' 23 | -------------------------------------------------------------------------------- /python_utils/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module initializes the `python_utils` package by importing various 3 | submodules and functions. 4 | 5 | Submodules: 6 | aio 7 | converters 8 | decorators 9 | formatters 10 | generators 11 | import_ 12 | logger 13 | terminal 14 | time 15 | types 16 | 17 | Functions: 18 | acount 19 | remap 20 | scale_1024 21 | to_float 22 | to_int 23 | to_str 24 | to_unicode 25 | listify 26 | set_attributes 27 | raise_exception 28 | reraise 29 | camel_to_underscore 30 | timesince 31 | abatcher 32 | batcher 33 | import_global 34 | get_terminal_size 35 | aio_generator_timeout_detector 36 | aio_generator_timeout_detector_decorator 37 | aio_timeout_generator 38 | delta_to_seconds 39 | delta_to_seconds_or_none 40 | format_time 41 | timedelta_to_seconds 42 | timeout_generator 43 | 44 | Classes: 45 | CastedDict 46 | LazyCastedDict 47 | UniqueList 48 | Logged 49 | LoggerBase 50 | """ 51 | 52 | from . import ( 53 | aio, 54 | converters, 55 | decorators, 56 | formatters, 57 | generators, 58 | import_, 59 | logger, 60 | terminal, 61 | time, 62 | types, 63 | ) 64 | from .aio import acount 65 | from .containers import CastedDict, LazyCastedDict, UniqueList 66 | from .converters import remap, scale_1024, to_float, to_int, to_str, to_unicode 67 | from .decorators import listify, set_attributes 68 | from .exceptions import raise_exception, reraise 69 | from .formatters import camel_to_underscore, timesince 70 | from .generators import abatcher, batcher 71 | from .import_ import import_global 72 | from .logger import Logged, LoggerBase 73 | from .terminal import get_terminal_size 74 | from .time import ( 75 | aio_generator_timeout_detector, 76 | aio_generator_timeout_detector_decorator, 77 | aio_timeout_generator, 78 | delta_to_seconds, 79 | delta_to_seconds_or_none, 80 | format_time, 81 | timedelta_to_seconds, 82 | timeout_generator, 83 | ) 84 | 85 | __all__ = [ 86 | 'CastedDict', 87 | 'LazyCastedDict', 88 | 'Logged', 89 | 'LoggerBase', 90 | 'UniqueList', 91 | 'abatcher', 92 | 'acount', 93 | 'aio', 94 | 'aio_generator_timeout_detector', 95 | 'aio_generator_timeout_detector_decorator', 96 | 'aio_timeout_generator', 97 | 'batcher', 98 | 'camel_to_underscore', 99 | 'converters', 100 | 'decorators', 101 | 'delta_to_seconds', 102 | 'delta_to_seconds_or_none', 103 | 'format_time', 104 | 'formatters', 105 | 'generators', 106 | 'get_terminal_size', 107 | 'import_', 108 | 'import_global', 109 | 'listify', 110 | 'logger', 111 | 'raise_exception', 112 | 'remap', 113 | 'reraise', 114 | 'scale_1024', 115 | 'set_attributes', 116 | 'terminal', 117 | 'time', 118 | 'timedelta_to_seconds', 119 | 'timeout_generator', 120 | 'timesince', 121 | 'to_float', 122 | 'to_int', 123 | 'to_str', 124 | 'to_unicode', 125 | 'types', 126 | ] 127 | -------------------------------------------------------------------------------- /python_utils/aio.py: -------------------------------------------------------------------------------- 1 | """Asyncio equivalents to regular Python functions.""" 2 | 3 | import asyncio 4 | import itertools 5 | import typing 6 | 7 | from . import types 8 | 9 | _N = types.TypeVar('_N', int, float) 10 | _T = types.TypeVar('_T') 11 | _K = types.TypeVar('_K') 12 | _V = types.TypeVar('_V') 13 | 14 | 15 | async def acount( 16 | start: _N = 0, 17 | step: _N = 1, 18 | delay: float = 0, 19 | stop: types.Optional[_N] = None, 20 | ) -> types.AsyncIterator[_N]: 21 | """Asyncio version of itertools.count().""" 22 | for item in itertools.count(start, step): # pragma: no branch 23 | if stop is not None and item >= stop: 24 | break 25 | 26 | yield item 27 | await asyncio.sleep(delay) 28 | 29 | 30 | @typing.overload 31 | async def acontainer( 32 | iterable: types.Union[ 33 | types.AsyncIterable[_T], 34 | types.Callable[..., types.AsyncIterable[_T]], 35 | ], 36 | container: types.Type[types.Tuple[_T, ...]], 37 | ) -> types.Tuple[_T, ...]: ... 38 | 39 | 40 | @typing.overload 41 | async def acontainer( 42 | iterable: types.Union[ 43 | types.AsyncIterable[_T], 44 | types.Callable[..., types.AsyncIterable[_T]], 45 | ], 46 | container: types.Type[types.List[_T]] = list, 47 | ) -> types.List[_T]: ... 48 | 49 | 50 | @typing.overload 51 | async def acontainer( 52 | iterable: types.Union[ 53 | types.AsyncIterable[_T], 54 | types.Callable[..., types.AsyncIterable[_T]], 55 | ], 56 | container: types.Type[types.Set[_T]], 57 | ) -> types.Set[_T]: ... 58 | 59 | 60 | async def acontainer( 61 | iterable: types.Union[ 62 | types.AsyncIterable[_T], 63 | types.Callable[..., types.AsyncIterable[_T]], 64 | ], 65 | container: types.Callable[ 66 | [types.Iterable[_T]], types.Collection[_T] 67 | ] = list, 68 | ) -> types.Collection[_T]: 69 | """ 70 | Asyncio version of list()/set()/tuple()/etc() using an async for loop. 71 | 72 | So instead of doing `[item async for item in iterable]` you can do 73 | `await acontainer(iterable)`. 74 | 75 | """ 76 | iterable_: types.AsyncIterable[_T] 77 | if callable(iterable): 78 | iterable_ = iterable() 79 | else: 80 | iterable_ = iterable 81 | 82 | item: _T 83 | items: types.List[_T] = [] 84 | async for item in iterable_: # pragma: no branch 85 | items.append(item) 86 | 87 | return container(items) 88 | 89 | 90 | async def adict( 91 | iterable: types.Union[ 92 | types.AsyncIterable[types.Tuple[_K, _V]], 93 | types.Callable[..., types.AsyncIterable[types.Tuple[_K, _V]]], 94 | ], 95 | container: types.Callable[ 96 | [types.Iterable[types.Tuple[_K, _V]]], types.Mapping[_K, _V] 97 | ] = dict, 98 | ) -> types.Mapping[_K, _V]: 99 | """ 100 | Asyncio version of dict() using an async for loop. 101 | 102 | So instead of doing `{key: value async for key, value in iterable}` you 103 | can do `await adict(iterable)`. 104 | 105 | """ 106 | iterable_: types.AsyncIterable[types.Tuple[_K, _V]] 107 | if callable(iterable): 108 | iterable_ = iterable() 109 | else: 110 | iterable_ = iterable 111 | 112 | item: types.Tuple[_K, _V] 113 | items: types.List[types.Tuple[_K, _V]] = [] 114 | async for item in iterable_: # pragma: no branch 115 | items.append(item) 116 | 117 | return container(items) 118 | -------------------------------------------------------------------------------- /python_utils/containers.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides custom container classes with enhanced functionality. 3 | 4 | Classes: 5 | CastedDictBase: Abstract base class for dictionaries that cast keys and 6 | values. 7 | CastedDict: Dictionary that casts keys and values to specified types. 8 | LazyCastedDict: Dictionary that lazily casts values to specified types upon 9 | access. 10 | UniqueList: List that only allows unique values, with configurable behavior 11 | on duplicates. 12 | SliceableDeque: Deque that supports slicing and enhanced equality checks. 13 | 14 | Type Aliases: 15 | KT: Type variable for dictionary keys. 16 | VT: Type variable for dictionary values. 17 | DT: Type alias for a dictionary with keys of type KT and values of type VT. 18 | KT_cast: Type alias for a callable that casts dictionary keys. 19 | VT_cast: Type alias for a callable that casts dictionary values. 20 | HT: Type variable for hashable values in UniqueList. 21 | T: Type variable for generic types. 22 | DictUpdateArgs: Union type for arguments that can be used to update a 23 | dictionary. 24 | OnDuplicate: Literal type for handling duplicate values in UniqueList. 25 | 26 | Usage: 27 | - CastedDict and LazyCastedDict can be used to create dictionaries with 28 | automatic type casting. 29 | - UniqueList ensures all elements are unique and can raise an error on 30 | duplicates. 31 | - SliceableDeque extends deque with slicing support and enhanced equality 32 | checks. 33 | 34 | Examples: 35 | >>> d = CastedDict(int, int) 36 | >>> d[1] = 2 37 | >>> d['3'] = '4' 38 | >>> d.update({'5': '6'}) 39 | >>> d.update([('7', '8')]) 40 | >>> d 41 | {1: 2, 3: 4, 5: 6, 7: 8} 42 | 43 | >>> l = UniqueList(1, 2, 3) 44 | >>> l.append(4) 45 | >>> l.append(4) 46 | >>> l.insert(0, 4) 47 | >>> l.insert(0, 5) 48 | >>> l[1] = 10 49 | >>> l 50 | [5, 10, 2, 3, 4] 51 | 52 | >>> d = SliceableDeque([1, 2, 3, 4, 5]) 53 | >>> d[1:4] 54 | SliceableDeque([2, 3, 4]) 55 | """ 56 | 57 | # pyright: reportIncompatibleMethodOverride=false 58 | import abc 59 | import collections 60 | import typing 61 | 62 | from . import types 63 | 64 | if typing.TYPE_CHECKING: 65 | import _typeshed # noqa: F401 66 | 67 | #: A type alias for a type that can be used as a key in a dictionary. 68 | KT = types.TypeVar('KT') 69 | #: A type alias for a type that can be used as a value in a dictionary. 70 | VT = types.TypeVar('VT') 71 | #: A type alias for a dictionary with keys of type KT and values of type VT. 72 | DT = types.Dict[KT, VT] 73 | #: A type alias for the casted type of a dictionary key. 74 | KT_cast = types.Optional[types.Callable[..., KT]] 75 | #: A type alias for the casted type of a dictionary value. 76 | VT_cast = types.Optional[types.Callable[..., VT]] 77 | #: A type alias for the hashable values of the `UniqueList` 78 | HT = types.TypeVar('HT', bound=types.Hashable) 79 | #: A type alias for a regular generic type 80 | T = types.TypeVar('T') 81 | 82 | # Using types.Union instead of | since Python 3.7 doesn't fully support it 83 | DictUpdateArgs = types.Union[ 84 | types.Mapping[KT, VT], 85 | types.Iterable[types.Tuple[KT, VT]], 86 | types.Iterable[types.Mapping[KT, VT]], 87 | '_typeshed.SupportsKeysAndGetItem[KT, VT]', 88 | ] 89 | 90 | OnDuplicate = types.Literal['ignore', 'raise'] 91 | 92 | 93 | class CastedDictBase(types.Dict[KT, VT], abc.ABC): 94 | """ 95 | Abstract base class for dictionaries that cast keys and values. 96 | 97 | Attributes: 98 | _key_cast (KT_cast[KT]): Callable to cast dictionary keys. 99 | _value_cast (VT_cast[VT]): Callable to cast dictionary values. 100 | 101 | Methods: 102 | __init__(key_cast: KT_cast[KT] = None, value_cast: VT_cast[VT] = None, 103 | *args: DictUpdateArgs[KT, VT], **kwargs: VT) -> None: 104 | Initializes the dictionary with optional key and value casting 105 | callables. 106 | update(*args: DictUpdateArgs[types.Any, types.Any], 107 | **kwargs: types.Any) -> None: 108 | Updates the dictionary with the given arguments. 109 | __setitem__(key: types.Any, value: types.Any) -> None: 110 | Sets the item in the dictionary, casting the key if a key cast 111 | callable is provided. 112 | """ 113 | 114 | _key_cast: KT_cast[KT] 115 | _value_cast: VT_cast[VT] 116 | 117 | def __init__( 118 | self, 119 | key_cast: KT_cast[KT] = None, 120 | value_cast: VT_cast[VT] = None, 121 | *args: DictUpdateArgs[KT, VT], 122 | **kwargs: VT, 123 | ) -> None: 124 | """ 125 | Initializes the CastedDictBase with optional key and value 126 | casting callables. 127 | 128 | Args: 129 | key_cast (KT_cast[KT], optional): Callable to cast 130 | dictionary keys. Defaults to None. 131 | value_cast (VT_cast[VT], optional): Callable to cast 132 | dictionary values. Defaults to None. 133 | *args (DictUpdateArgs[KT, VT]): Arguments to initialize 134 | the dictionary. 135 | **kwargs (VT): Keyword arguments to initialize the 136 | dictionary. 137 | """ 138 | self._value_cast = value_cast 139 | self._key_cast = key_cast 140 | self.update(*args, **kwargs) 141 | 142 | def update( 143 | self, *args: DictUpdateArgs[types.Any, types.Any], **kwargs: types.Any 144 | ) -> None: 145 | """ 146 | Updates the dictionary with the given arguments. 147 | 148 | Args: 149 | *args (DictUpdateArgs[types.Any, types.Any]): Arguments to update 150 | the dictionary. 151 | **kwargs (types.Any): Keyword arguments to update the dictionary. 152 | """ 153 | if args: 154 | kwargs.update(*args) 155 | 156 | if kwargs: 157 | for key, value in kwargs.items(): 158 | self[key] = value 159 | 160 | def __setitem__(self, key: types.Any, value: types.Any) -> None: 161 | """ 162 | Sets the item in the dictionary, casting the key if a key cast 163 | callable is provided. 164 | 165 | Args: 166 | key (types.Any): The key to set in the dictionary. 167 | value (types.Any): The value to set in the dictionary. 168 | """ 169 | if self._key_cast is not None: 170 | key = self._key_cast(key) 171 | 172 | return super().__setitem__(key, value) 173 | 174 | 175 | class CastedDict(CastedDictBase[KT, VT]): 176 | """ 177 | Custom dictionary that casts keys and values to the specified typing. 178 | 179 | Note that you can specify the types for mypy and type hinting with: 180 | CastedDict[int, int](int, int) 181 | 182 | >>> d: CastedDict[int, int] = CastedDict(int, int) 183 | >>> d[1] = 2 184 | >>> d['3'] = '4' 185 | >>> d.update({'5': '6'}) 186 | >>> d.update([('7', '8')]) 187 | >>> d 188 | {1: 2, 3: 4, 5: 6, 7: 8} 189 | >>> list(d.keys()) 190 | [1, 3, 5, 7] 191 | >>> list(d) 192 | [1, 3, 5, 7] 193 | >>> list(d.values()) 194 | [2, 4, 6, 8] 195 | >>> list(d.items()) 196 | [(1, 2), (3, 4), (5, 6), (7, 8)] 197 | >>> d[3] 198 | 4 199 | 200 | # Casts are optional and can be disabled by passing None as the cast 201 | >>> d = CastedDict() 202 | >>> d[1] = 2 203 | >>> d['3'] = '4' 204 | >>> d.update({'5': '6'}) 205 | >>> d.update([('7', '8')]) 206 | >>> d 207 | {1: 2, '3': '4', '5': '6', '7': '8'} 208 | """ 209 | 210 | def __setitem__(self, key: typing.Any, value: typing.Any) -> None: 211 | """Sets `key` to `cast(value)` in the dictionary.""" 212 | if self._value_cast is not None: 213 | value = self._value_cast(value) 214 | 215 | super().__setitem__(key, value) 216 | 217 | 218 | class LazyCastedDict(CastedDictBase[KT, VT]): 219 | """ 220 | Custom dictionary that casts keys and lazily casts values to the specified 221 | typing. Note that the values are cast only when they are accessed and 222 | are not cached between executions. 223 | 224 | Note that you can specify the types for mypy and type hinting with: 225 | LazyCastedDict[int, int](int, int) 226 | 227 | >>> d: LazyCastedDict[int, int] = LazyCastedDict(int, int) 228 | >>> d[1] = 2 229 | >>> d['3'] = '4' 230 | >>> d.update({'5': '6'}) 231 | >>> d.update([('7', '8')]) 232 | >>> d 233 | {1: 2, 3: '4', 5: '6', 7: '8'} 234 | >>> list(d.keys()) 235 | [1, 3, 5, 7] 236 | >>> list(d) 237 | [1, 3, 5, 7] 238 | >>> list(d.values()) 239 | [2, 4, 6, 8] 240 | >>> list(d.items()) 241 | [(1, 2), (3, 4), (5, 6), (7, 8)] 242 | >>> d[3] 243 | 4 244 | 245 | # Casts are optional and can be disabled by passing None as the cast 246 | >>> d = LazyCastedDict() 247 | >>> d[1] = 2 248 | >>> d['3'] = '4' 249 | >>> d.update({'5': '6'}) 250 | >>> d.update([('7', '8')]) 251 | >>> d 252 | {1: 2, '3': '4', '5': '6', '7': '8'} 253 | >>> list(d.keys()) 254 | [1, '3', '5', '7'] 255 | >>> list(d.values()) 256 | [2, '4', '6', '8'] 257 | 258 | >>> list(d.items()) 259 | [(1, 2), ('3', '4'), ('5', '6'), ('7', '8')] 260 | >>> d['3'] 261 | '4' 262 | """ 263 | 264 | def __setitem__(self, key: types.Any, value: types.Any) -> None: 265 | """ 266 | Sets the item in the dictionary, casting the key if a key cast 267 | callable is provided. 268 | 269 | Args: 270 | key (types.Any): The key to set in the dictionary. 271 | value (types.Any): The value to set in the dictionary. 272 | """ 273 | if self._key_cast is not None: 274 | key = self._key_cast(key) 275 | 276 | super().__setitem__(key, value) 277 | 278 | def __getitem__(self, key: types.Any) -> VT: 279 | """ 280 | Gets the item from the dictionary, casting the value if a value cast 281 | callable is provided. 282 | 283 | Args: 284 | key (types.Any): The key to get from the dictionary. 285 | 286 | Returns: 287 | VT: The value from the dictionary. 288 | """ 289 | if self._key_cast is not None: 290 | key = self._key_cast(key) 291 | 292 | value = super().__getitem__(key) 293 | 294 | if self._value_cast is not None: 295 | value = self._value_cast(value) 296 | 297 | return value 298 | 299 | def items( # type: ignore[override] 300 | self, 301 | ) -> types.Generator[types.Tuple[KT, VT], None, None]: 302 | """ 303 | Returns a generator of the dictionary's items, casting the values if a 304 | value cast callable is provided. 305 | 306 | Yields: 307 | types.Generator[types.Tuple[KT, VT], None, None]: A generator of 308 | the dictionary's items. 309 | """ 310 | if self._value_cast is None: 311 | yield from super().items() 312 | else: 313 | for key, value in super().items(): 314 | yield key, self._value_cast(value) 315 | 316 | def values(self) -> types.Generator[VT, None, None]: # type: ignore[override] 317 | """ 318 | Returns a generator of the dictionary's values, casting the values if a 319 | value cast callable is provided. 320 | 321 | Yields: 322 | types.Generator[VT, None, None]: A generator of the dictionary's 323 | values. 324 | """ 325 | if self._value_cast is None: 326 | yield from super().values() 327 | else: 328 | for value in super().values(): 329 | yield self._value_cast(value) 330 | 331 | 332 | class UniqueList(types.List[HT]): 333 | """ 334 | A list that only allows unique values. Duplicate values are ignored by 335 | default, but can be configured to raise an exception instead. 336 | 337 | >>> l = UniqueList(1, 2, 3) 338 | >>> l.append(4) 339 | >>> l.append(4) 340 | >>> l.insert(0, 4) 341 | >>> l.insert(0, 5) 342 | >>> l[1] = 10 343 | >>> l 344 | [5, 10, 2, 3, 4] 345 | 346 | >>> l = UniqueList(1, 2, 3, on_duplicate='raise') 347 | >>> l.append(4) 348 | >>> l.append(4) 349 | Traceback (most recent call last): 350 | ... 351 | ValueError: Duplicate value: 4 352 | >>> l.insert(0, 4) 353 | Traceback (most recent call last): 354 | ... 355 | ValueError: Duplicate value: 4 356 | >>> 4 in l 357 | True 358 | >>> l[0] 359 | 1 360 | >>> l[1] = 4 361 | Traceback (most recent call last): 362 | ... 363 | ValueError: Duplicate value: 4 364 | """ 365 | 366 | _set: types.Set[HT] 367 | 368 | def __init__( 369 | self, 370 | *args: HT, 371 | on_duplicate: OnDuplicate = 'ignore', 372 | ): 373 | """ 374 | Initializes the UniqueList with optional duplicate handling behavior. 375 | 376 | Args: 377 | *args (HT): Initial values for the list. 378 | on_duplicate (OnDuplicate, optional): Behavior on duplicates. 379 | Defaults to 'ignore'. 380 | """ 381 | self.on_duplicate = on_duplicate 382 | self._set = set() 383 | super().__init__() 384 | for arg in args: 385 | self.append(arg) 386 | 387 | def insert(self, index: types.SupportsIndex, value: HT) -> None: 388 | """ 389 | Inserts a value at the specified index, ensuring uniqueness. 390 | 391 | Args: 392 | index (types.SupportsIndex): The index to insert the value at. 393 | value (HT): The value to insert. 394 | 395 | Raises: 396 | ValueError: If the value is a duplicate and `on_duplicate` is set 397 | to 'raise'. 398 | """ 399 | if value in self._set: 400 | if self.on_duplicate == 'raise': 401 | raise ValueError(f'Duplicate value: {value}') 402 | else: 403 | return 404 | 405 | self._set.add(value) 406 | super().insert(index, value) 407 | 408 | def append(self, value: HT) -> None: 409 | """ 410 | Appends a value to the list, ensuring uniqueness. 411 | 412 | Args: 413 | value (HT): The value to append. 414 | 415 | Raises: 416 | ValueError: If the value is a duplicate and `on_duplicate` is set 417 | to 'raise'. 418 | """ 419 | if value in self._set: 420 | if self.on_duplicate == 'raise': 421 | raise ValueError(f'Duplicate value: {value}') 422 | else: 423 | return 424 | 425 | self._set.add(value) 426 | super().append(value) 427 | 428 | def __contains__(self, item: HT) -> bool: # type: ignore[override] 429 | """ 430 | Checks if the list contains the specified item. 431 | 432 | Args: 433 | item (HT): The item to check for. 434 | 435 | Returns: 436 | bool: True if the item is in the list, False otherwise. 437 | """ 438 | return item in self._set 439 | 440 | @typing.overload 441 | def __setitem__( 442 | self, indices: types.SupportsIndex, values: HT 443 | ) -> None: ... 444 | 445 | @typing.overload 446 | def __setitem__( 447 | self, indices: slice, values: types.Iterable[HT] 448 | ) -> None: ... 449 | 450 | def __setitem__( 451 | self, 452 | indices: types.Union[slice, types.SupportsIndex], 453 | values: types.Union[types.Iterable[HT], HT], 454 | ) -> None: 455 | """ 456 | Sets the item(s) at the specified index/indices, ensuring uniqueness. 457 | 458 | Args: 459 | indices (types.Union[slice, types.SupportsIndex]): The index or 460 | slice to set the value(s) at. 461 | values (types.Union[types.Iterable[HT], HT]): The value(s) to set. 462 | 463 | Raises: 464 | RuntimeError: If `on_duplicate` is 'ignore' and setting slices. 465 | ValueError: If the value(s) are duplicates and `on_duplicate` is 466 | set to 'raise'. 467 | """ 468 | if isinstance(indices, slice): 469 | values = types.cast(types.Iterable[HT], values) 470 | if self.on_duplicate == 'ignore': 471 | raise RuntimeError( 472 | 'ignore mode while setting slices introduces ambiguous ' 473 | 'behaviour and is therefore not supported' 474 | ) 475 | 476 | duplicates: types.Set[HT] = set(values) & self._set 477 | if duplicates and values != list(self[indices]): 478 | raise ValueError(f'Duplicate values: {duplicates}') 479 | 480 | self._set.update(values) 481 | else: 482 | values = types.cast(HT, values) 483 | if values in self._set and values != self[indices]: 484 | if self.on_duplicate == 'raise': 485 | raise ValueError(f'Duplicate value: {values}') 486 | else: 487 | return 488 | 489 | self._set.add(values) 490 | 491 | super().__setitem__( 492 | types.cast(slice, indices), types.cast(types.List[HT], values) 493 | ) 494 | 495 | def __delitem__( 496 | self, index: types.Union[types.SupportsIndex, slice] 497 | ) -> None: 498 | """ 499 | Deletes the item(s) at the specified index/indices. 500 | 501 | Args: 502 | index (types.Union[types.SupportsIndex, slice]): The index or slice 503 | to delete the item(s) at. 504 | """ 505 | if isinstance(index, slice): 506 | for value in self[index]: 507 | self._set.remove(value) 508 | else: 509 | self._set.remove(self[index]) 510 | 511 | super().__delitem__(index) 512 | 513 | 514 | # Type hinting `collections.deque` does not work consistently between Python 515 | # runtime, mypy and pyright currently so we have to ignore the errors 516 | class SliceableDeque(types.Generic[T], collections.deque[T]): 517 | """ 518 | A deque that supports slicing and enhanced equality checks. 519 | 520 | Methods: 521 | __getitem__(index: types.Union[types.SupportsIndex, slice]) -> 522 | types.Union[T, 'SliceableDeque[T]']: 523 | Returns the item or slice at the given index. 524 | __eq__(other: types.Any) -> bool: 525 | Checks equality with another object, allowing for comparison with 526 | lists, tuples, and sets. 527 | pop(index: int = -1) -> T: 528 | Removes and returns the item at the given index. Only supports 529 | index 0 and the last index. 530 | """ 531 | 532 | @typing.overload 533 | def __getitem__(self, index: types.SupportsIndex) -> T: ... 534 | 535 | @typing.overload 536 | def __getitem__(self, index: slice) -> 'SliceableDeque[T]': ... 537 | 538 | def __getitem__( 539 | self, index: types.Union[types.SupportsIndex, slice] 540 | ) -> types.Union[T, 'SliceableDeque[T]']: 541 | """ 542 | Return the item or slice at the given index. 543 | 544 | Args: 545 | index (types.Union[types.SupportsIndex, slice]): The index or 546 | slice to retrieve. 547 | 548 | Returns: 549 | types.Union[T, 'SliceableDeque[T]']: The item or slice at the 550 | given index. 551 | 552 | Examples: 553 | >>> d = SliceableDeque[int]([1, 2, 3, 4, 5]) 554 | >>> d[1:4] 555 | SliceableDeque([2, 3, 4]) 556 | 557 | >>> d = SliceableDeque[str](['a', 'b', 'c']) 558 | >>> d[-2:] 559 | SliceableDeque(['b', 'c']) 560 | """ 561 | if isinstance(index, slice): 562 | start, stop, step = index.indices(len(self)) 563 | return self.__class__(self[i] for i in range(start, stop, step)) 564 | else: 565 | return super().__getitem__(index) 566 | 567 | def __eq__(self, other: types.Any) -> bool: 568 | """ 569 | Checks equality with another object, allowing for comparison with 570 | lists, tuples, and sets. 571 | 572 | Args: 573 | other (types.Any): The object to compare with. 574 | 575 | Returns: 576 | bool: True if the objects are equal, False otherwise. 577 | """ 578 | if isinstance(other, list): 579 | return list(self) == other 580 | elif isinstance(other, tuple): 581 | return tuple(self) == other 582 | elif isinstance(other, set): 583 | return set(self) == other 584 | else: 585 | return super().__eq__(other) 586 | 587 | def pop(self, index: int = -1) -> T: 588 | """ 589 | Removes and returns the item at the given index. Only supports index 0 590 | and the last index. 591 | 592 | Args: 593 | index (int, optional): The index of the item to remove. Defaults to 594 | -1. 595 | 596 | Returns: 597 | T: The removed item. 598 | 599 | Raises: 600 | IndexError: If the index is not 0 or the last index. 601 | 602 | Examples: 603 | >>> d = SliceableDeque([1, 2, 3]) 604 | >>> d.pop(0) 605 | 1 606 | >>> d.pop() 607 | 3 608 | """ 609 | if index == 0: 610 | return super().popleft() 611 | elif index in {-1, len(self) - 1}: 612 | return super().pop() 613 | else: 614 | raise IndexError( 615 | 'Only index 0 and the last index (`N-1` or `-1`) ' 616 | 'are supported' 617 | ) 618 | 619 | 620 | if __name__ == '__main__': 621 | import doctest 622 | 623 | doctest.testmod() 624 | -------------------------------------------------------------------------------- /python_utils/converters.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides utility functions for type conversion. 3 | 4 | Functions: 5 | - to_int: Convert a string to an integer with optional regular expression 6 | matching. 7 | - to_float: Convert a string to a float with optional regular expression 8 | matching. 9 | - to_unicode: Convert objects to Unicode strings. 10 | - to_str: Convert objects to byte strings. 11 | - scale_1024: Scale a number down to a suitable size based on powers of 12 | 1024. 13 | - remap: Remap a value from one range to another. 14 | """ 15 | 16 | # Ignoring all mypy errors because mypy doesn't understand many modern typing 17 | # constructs... please, use pyright instead if you can. 18 | from __future__ import annotations 19 | 20 | import decimal 21 | import math 22 | import re 23 | import typing 24 | from typing import Union 25 | 26 | from . import types 27 | 28 | _TN = types.TypeVar('_TN', bound=types.DecimalNumber) 29 | 30 | _RegexpType: types.TypeAlias = Union[ 31 | types.Pattern[str], str, types.Literal[True], None 32 | ] 33 | 34 | 35 | def to_int( 36 | input_: str | None = None, 37 | default: int = 0, 38 | exception: types.ExceptionsType = (ValueError, TypeError), 39 | regexp: _RegexpType = None, 40 | ) -> int: 41 | r""" 42 | Convert the given input to an integer or return default. 43 | 44 | When trying to convert the exceptions given in the exception parameter 45 | are automatically catched and the default will be returned. 46 | 47 | The regexp parameter allows for a regular expression to find the digits 48 | in a string. 49 | When True it will automatically match any digit in the string. 50 | When a (regexp) object (has a search method) is given, that will be used. 51 | WHen a string is given, re.compile will be run over it first 52 | 53 | The last group of the regexp will be used as value 54 | 55 | >>> to_int('abc') 56 | 0 57 | >>> to_int('1') 58 | 1 59 | >>> to_int('') 60 | 0 61 | >>> to_int() 62 | 0 63 | >>> to_int('abc123') 64 | 0 65 | >>> to_int('123abc') 66 | 0 67 | >>> to_int('abc123', regexp=True) 68 | 123 69 | >>> to_int('123abc', regexp=True) 70 | 123 71 | >>> to_int('abc123abc', regexp=True) 72 | 123 73 | >>> to_int('abc123abc456', regexp=True) 74 | 123 75 | >>> to_int('abc123', regexp=re.compile(r'(\d+)')) 76 | 123 77 | >>> to_int('123abc', regexp=re.compile(r'(\d+)')) 78 | 123 79 | >>> to_int('abc123abc', regexp=re.compile(r'(\d+)')) 80 | 123 81 | >>> to_int('abc123abc456', regexp=re.compile(r'(\d+)')) 82 | 123 83 | >>> to_int('abc123', regexp=r'(\d+)') 84 | 123 85 | >>> to_int('123abc', regexp=r'(\d+)') 86 | 123 87 | >>> to_int('abc', regexp=r'(\d+)') 88 | 0 89 | >>> to_int('abc123abc', regexp=r'(\d+)') 90 | 123 91 | >>> to_int('abc123abc456', regexp=r'(\d+)') 92 | 123 93 | >>> to_int('1234', default=1) 94 | 1234 95 | >>> to_int('abc', default=1) 96 | 1 97 | >>> to_int('abc', regexp=123) 98 | Traceback (most recent call last): 99 | ... 100 | TypeError: unknown argument for regexp parameter: 123 101 | """ 102 | if regexp is True: 103 | regexp = re.compile(r'(\d+)') 104 | elif isinstance(regexp, str): 105 | regexp = re.compile(regexp) 106 | elif hasattr(regexp, 'search'): 107 | pass 108 | elif regexp is not None: 109 | raise TypeError(f'unknown argument for regexp parameter: {regexp!r}') 110 | 111 | try: 112 | if regexp and input_ and (match := regexp.search(input_)): 113 | input_ = match.groups()[-1] 114 | 115 | if input_ is None: 116 | return default 117 | else: 118 | return int(input_) 119 | except exception: 120 | return default 121 | 122 | 123 | def to_float( 124 | input_: str, 125 | default: int = 0, 126 | exception: types.ExceptionsType = (ValueError, TypeError), 127 | regexp: _RegexpType = None, 128 | ) -> types.Number: 129 | r""" 130 | Convert the given `input_` to an integer or return default. 131 | 132 | When trying to convert the exceptions given in the exception parameter 133 | are automatically catched and the default will be returned. 134 | 135 | The regexp parameter allows for a regular expression to find the digits 136 | in a string. 137 | When True it will automatically match any digit in the string. 138 | When a (regexp) object (has a search method) is given, that will be used. 139 | When a string is given, re.compile will be run over it first 140 | 141 | The last group of the regexp will be used as value 142 | 143 | >>> '%.2f' % to_float('abc') 144 | '0.00' 145 | >>> '%.2f' % to_float('1') 146 | '1.00' 147 | >>> '%.2f' % to_float('abc123.456', regexp=True) 148 | '123.46' 149 | >>> '%.2f' % to_float('abc123', regexp=True) 150 | '123.00' 151 | >>> '%.2f' % to_float('abc0.456', regexp=True) 152 | '0.46' 153 | >>> '%.2f' % to_float('abc123.456', regexp=re.compile(r'(\d+\.\d+)')) 154 | '123.46' 155 | >>> '%.2f' % to_float('123.456abc', regexp=re.compile(r'(\d+\.\d+)')) 156 | '123.46' 157 | >>> '%.2f' % to_float('abc123.46abc', regexp=re.compile(r'(\d+\.\d+)')) 158 | '123.46' 159 | >>> '%.2f' % to_float('abc123abc456', regexp=re.compile(r'(\d+(\.\d+|))')) 160 | '123.00' 161 | >>> '%.2f' % to_float('abc', regexp=r'(\d+)') 162 | '0.00' 163 | >>> '%.2f' % to_float('abc123', regexp=r'(\d+)') 164 | '123.00' 165 | >>> '%.2f' % to_float('123abc', regexp=r'(\d+)') 166 | '123.00' 167 | >>> '%.2f' % to_float('abc123abc', regexp=r'(\d+)') 168 | '123.00' 169 | >>> '%.2f' % to_float('abc123abc456', regexp=r'(\d+)') 170 | '123.00' 171 | >>> '%.2f' % to_float('1234', default=1) 172 | '1234.00' 173 | >>> '%.2f' % to_float('abc', default=1) 174 | '1.00' 175 | >>> '%.2f' % to_float('abc', regexp=123) 176 | Traceback (most recent call last): 177 | ... 178 | TypeError: unknown argument for regexp parameter 179 | """ 180 | if regexp is True: 181 | regexp = re.compile(r'(\d+(\.\d+|))') 182 | elif isinstance(regexp, str): 183 | regexp = re.compile(regexp) 184 | elif hasattr(regexp, 'search'): 185 | pass 186 | elif regexp is not None: 187 | raise TypeError('unknown argument for regexp parameter') 188 | 189 | try: 190 | if regexp and (match := regexp.search(input_)): 191 | input_ = match.group(1) 192 | return float(input_) 193 | except exception: 194 | return default 195 | 196 | 197 | def to_unicode( 198 | input_: types.StringTypes, 199 | encoding: str = 'utf-8', 200 | errors: str = 'replace', 201 | ) -> str: 202 | """Convert objects to unicode, if needed decodes string with the given 203 | encoding and errors settings. 204 | 205 | :rtype: str 206 | 207 | >>> to_unicode(b'a') 208 | 'a' 209 | >>> to_unicode('a') 210 | 'a' 211 | >>> to_unicode('a') 212 | 'a' 213 | >>> class Foo(object): 214 | ... __str__ = lambda s: 'a' 215 | >>> to_unicode(Foo()) 216 | 'a' 217 | >>> to_unicode(Foo) 218 | "" 219 | """ 220 | if isinstance(input_, bytes): 221 | input_ = input_.decode(encoding, errors) 222 | else: 223 | input_ = str(input_) 224 | return input_ 225 | 226 | 227 | def to_str( 228 | input_: types.StringTypes, 229 | encoding: str = 'utf-8', 230 | errors: str = 'replace', 231 | ) -> bytes: 232 | """Convert objects to string, encodes to the given encoding. 233 | 234 | :rtype: str 235 | 236 | >>> to_str('a') 237 | b'a' 238 | >>> to_str('a') 239 | b'a' 240 | >>> to_str(b'a') 241 | b'a' 242 | >>> class Foo(object): 243 | ... __str__ = lambda s: 'a' 244 | >>> to_str(Foo()) 245 | 'a' 246 | >>> to_str(Foo) 247 | "" 248 | """ 249 | if not isinstance(input_, bytes): 250 | if not hasattr(input_, 'encode'): 251 | input_ = str(input_) 252 | 253 | input_ = input_.encode(encoding, errors) 254 | return input_ 255 | 256 | 257 | def scale_1024( 258 | x: types.Number, 259 | n_prefixes: int, 260 | ) -> types.Tuple[types.Number, types.Number]: 261 | """Scale a number down to a suitable size, based on powers of 1024. 262 | 263 | Returns the scaled number and the power of 1024 used. 264 | 265 | Use to format numbers of bytes to KiB, MiB, etc. 266 | 267 | >>> scale_1024(310, 3) 268 | (310.0, 0) 269 | >>> scale_1024(2048, 3) 270 | (2.0, 1) 271 | >>> scale_1024(0, 2) 272 | (0.0, 0) 273 | >>> scale_1024(0.5, 2) 274 | (0.5, 0) 275 | >>> scale_1024(1, 2) 276 | (1.0, 0) 277 | """ 278 | if x <= 0: 279 | power = 0 280 | else: 281 | power = min(int(math.log(x, 2) / 10), n_prefixes - 1) 282 | scaled = float(x) / (2 ** (10 * power)) 283 | return scaled, power 284 | 285 | 286 | @typing.overload 287 | def remap( 288 | value: decimal.Decimal, 289 | old_min: decimal.Decimal | float, 290 | old_max: decimal.Decimal | float, 291 | new_min: decimal.Decimal | float, 292 | new_max: decimal.Decimal | float, 293 | ) -> decimal.Decimal: ... 294 | 295 | 296 | @typing.overload 297 | def remap( 298 | value: decimal.Decimal | float, 299 | old_min: decimal.Decimal, 300 | old_max: decimal.Decimal | float, 301 | new_min: decimal.Decimal | float, 302 | new_max: decimal.Decimal | float, 303 | ) -> decimal.Decimal: ... 304 | 305 | 306 | @typing.overload 307 | def remap( 308 | value: decimal.Decimal | float, 309 | old_min: decimal.Decimal | float, 310 | old_max: decimal.Decimal, 311 | new_min: decimal.Decimal | float, 312 | new_max: decimal.Decimal | float, 313 | ) -> decimal.Decimal: ... 314 | 315 | 316 | @typing.overload 317 | def remap( 318 | value: decimal.Decimal | float, 319 | old_min: decimal.Decimal | float, 320 | old_max: decimal.Decimal | float, 321 | new_min: decimal.Decimal, 322 | new_max: decimal.Decimal | float, 323 | ) -> decimal.Decimal: ... 324 | 325 | 326 | @typing.overload 327 | def remap( 328 | value: decimal.Decimal | float, 329 | old_min: decimal.Decimal | float, 330 | old_max: decimal.Decimal | float, 331 | new_min: decimal.Decimal | float, 332 | new_max: decimal.Decimal, 333 | ) -> decimal.Decimal: ... 334 | 335 | 336 | # Note that float captures both int and float types so we don't need to 337 | # specify them separately 338 | @typing.overload 339 | def remap( 340 | value: float, 341 | old_min: float, 342 | old_max: float, 343 | new_min: float, 344 | new_max: float, 345 | ) -> float: ... 346 | 347 | 348 | def remap( # pyright: ignore[reportInconsistentOverload] 349 | value: _TN, 350 | old_min: _TN, 351 | old_max: _TN, 352 | new_min: _TN, 353 | new_max: _TN, 354 | ) -> _TN: 355 | """ 356 | remap a value from one range into another. 357 | 358 | >>> remap(500, 0, 1000, 0, 100) 359 | 50 360 | >>> remap(250.0, 0.0, 1000.0, 0.0, 100.0) 361 | 25.0 362 | >>> remap(-75, -100, 0, -1000, 0) 363 | -750 364 | >>> remap(33, 0, 100, -500, 500) 365 | -170 366 | >>> remap(decimal.Decimal('250.0'), 0.0, 1000.0, 0.0, 100.0) 367 | Decimal('25.0') 368 | 369 | This is a great use case example. Take an AVR that has dB values the 370 | minimum being -80dB and the maximum being 10dB and you want to convert 371 | volume percent to the equilivint in that dB range 372 | 373 | >>> remap(46.0, 0.0, 100.0, -80.0, 10.0) 374 | -38.6 375 | 376 | I added using decimal.Decimal so floating point math errors can be avoided. 377 | Here is an example of a floating point math error 378 | >>> 0.1 + 0.1 + 0.1 379 | 0.30000000000000004 380 | 381 | If floating point remaps need to be done my suggstion is to pass at least 382 | one parameter as a `decimal.Decimal`. This will ensure that the output 383 | from this function is accurate. I left passing `floats` for backwards 384 | compatability and there is no conversion done from float to 385 | `decimal.Decimal` unless one of the passed parameters has a type of 386 | `decimal.Decimal`. This will ensure that any existing code that uses this 387 | funtion will work exactly how it has in the past. 388 | 389 | Some edge cases to test 390 | >>> remap(1, 0, 0, 1, 2) 391 | Traceback (most recent call last): 392 | ... 393 | ValueError: Input range (0-0) is empty 394 | 395 | >>> remap(1, 1, 2, 0, 0) 396 | Traceback (most recent call last): 397 | ... 398 | ValueError: Output range (0-0) is empty 399 | 400 | Args: 401 | value (int, float, decimal.Decimal): Value to be converted. 402 | old_min (int, float, decimal.Decimal): Minimum of the range for the 403 | value that has been passed. 404 | old_max (int, float, decimal.Decimal): Maximum of the range for the 405 | value that has been passed. 406 | new_min (int, float, decimal.Decimal): The minimum of the new range. 407 | new_max (int, float, decimal.Decimal): The maximum of the new range. 408 | 409 | Returns: int, float, decimal.Decimal: Value that has been re-ranged. If 410 | any of the parameters passed is a `decimal.Decimal`, all of the 411 | parameters will be converted to `decimal.Decimal`. The same thing also 412 | happens if one of the parameters is a `float`. Otherwise, all 413 | parameters will get converted into an `int`. Technically, you can pass 414 | a `str` of an integer and it will get converted. The returned value 415 | type will be `decimal.Decimal` if any of the passed parameters are 416 | `decimal.Decimal`, the return type will be `float` if any of the 417 | passed parameters are a `float`, otherwise the returned type will be 418 | `int`. 419 | """ 420 | type_: types.Type[types.DecimalNumber] 421 | if ( 422 | isinstance(value, decimal.Decimal) 423 | or isinstance(old_min, decimal.Decimal) 424 | or isinstance(old_max, decimal.Decimal) 425 | or isinstance(new_min, decimal.Decimal) 426 | or isinstance(new_max, decimal.Decimal) 427 | ): 428 | type_ = decimal.Decimal 429 | elif ( 430 | isinstance(value, float) 431 | or isinstance(old_min, float) 432 | or isinstance(old_max, float) 433 | or isinstance(new_min, float) 434 | or isinstance(new_max, float) 435 | ): 436 | type_ = float 437 | else: 438 | type_ = int 439 | 440 | value = types.cast(_TN, type_(value)) 441 | old_min = types.cast(_TN, type_(old_min)) 442 | old_max = types.cast(_TN, type_(old_max)) 443 | new_max = types.cast(_TN, type_(new_max)) 444 | new_min = types.cast(_TN, type_(new_min)) 445 | 446 | # These might not be floats but the Python type system doesn't understand 447 | # the generic type system in this case 448 | old_range = types.cast(float, old_max) - types.cast(float, old_min) 449 | new_range = types.cast(float, new_max) - types.cast(float, new_min) 450 | 451 | if old_range == 0: 452 | raise ValueError(f'Input range ({old_min}-{old_max}) is empty') 453 | 454 | if new_range == 0: 455 | raise ValueError(f'Output range ({new_min}-{new_max}) is empty') 456 | 457 | # The current state of Python typing makes it impossible to use the 458 | # generic type system in this case. Or so extremely verbose that it's not 459 | # worth it. 460 | new_value = (value - old_min) * new_range # type: ignore[operator] # pyright: ignore[reportOperatorIssue, reportUnknownVariableType] 461 | 462 | if type_ is int: 463 | new_value //= old_range # pyright: ignore[reportUnknownVariableType] 464 | else: 465 | new_value /= old_range # pyright: ignore[reportUnknownVariableType] 466 | 467 | new_value += new_min # type: ignore[operator] # pyright: ignore[reportOperatorIssue, reportUnknownVariableType] 468 | 469 | return types.cast(_TN, new_value) 470 | -------------------------------------------------------------------------------- /python_utils/decorators.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides various utility decorators for Python functions 3 | and methods. 4 | 5 | The decorators include: 6 | 7 | 1. `set_attributes`: Sets attributes on functions and classes. 8 | 2. `listify`: Converts any generator to a list or other collection. 9 | 3. `sample`: Limits calls to a function based on a sample rate. 10 | 4. `wraps_classmethod`: Wraps classmethods with type info from a 11 | regular method. 12 | 13 | Each decorator is designed to enhance the functionality of Python 14 | functions and methods in a simple and reusable manner. 15 | """ 16 | 17 | import contextlib 18 | import functools 19 | import logging 20 | import random 21 | 22 | from . import types 23 | 24 | _T = types.TypeVar('_T') 25 | _P = types.ParamSpec('_P') 26 | _S = types.TypeVar('_S', covariant=True) 27 | 28 | 29 | def set_attributes(**kwargs: types.Any) -> types.Callable[..., types.Any]: 30 | """Decorator to set attributes on functions and classes. 31 | 32 | A common usage for this pattern is the Django Admin where 33 | functions can get an optional short_description. To illustrate: 34 | 35 | Example from the Django admin using this decorator: 36 | https://docs.djangoproject.com/en/3.0/ref/contrib/admin/#django.contrib.admin.ModelAdmin.list_display 37 | 38 | Our simplified version: 39 | 40 | >>> @set_attributes(short_description='Name') 41 | ... def upper_case_name(self, obj): 42 | ... return ('%s %s' % (obj.first_name, obj.last_name)).upper() 43 | 44 | The standard Django version: 45 | 46 | >>> def upper_case_name(obj): 47 | ... return ('%s %s' % (obj.first_name, obj.last_name)).upper() 48 | 49 | >>> upper_case_name.short_description = 'Name' 50 | 51 | """ 52 | 53 | def _set_attributes( 54 | function: types.Callable[_P, _T], 55 | ) -> types.Callable[_P, _T]: 56 | for key, value in kwargs.items(): 57 | setattr(function, key, value) 58 | return function 59 | 60 | return _set_attributes 61 | 62 | 63 | def listify( 64 | collection: types.Callable[ 65 | [types.Iterable[_T]], types.Collection[_T] 66 | ] = list, 67 | allow_empty: bool = True, 68 | ) -> types.Callable[ 69 | [types.Callable[..., types.Optional[types.Iterable[_T]]]], 70 | types.Callable[..., types.Collection[_T]], 71 | ]: 72 | """ 73 | Convert any generator to a list or other type of collection. 74 | 75 | >>> @listify() 76 | ... def generator(): 77 | ... yield 1 78 | ... yield 2 79 | ... yield 3 80 | 81 | >>> generator() 82 | [1, 2, 3] 83 | 84 | >>> @listify() 85 | ... def empty_generator(): 86 | ... pass 87 | 88 | >>> empty_generator() 89 | [] 90 | 91 | >>> @listify(allow_empty=False) 92 | ... def empty_generator_not_allowed(): 93 | ... pass 94 | 95 | >>> empty_generator_not_allowed() # doctest: +ELLIPSIS 96 | Traceback (most recent call last): 97 | ... 98 | TypeError: ... `allow_empty` is `False` 99 | 100 | >>> @listify(collection=set) 101 | ... def set_generator(): 102 | ... yield 1 103 | ... yield 1 104 | ... yield 2 105 | 106 | >>> set_generator() 107 | {1, 2} 108 | 109 | >>> @listify(collection=dict) 110 | ... def dict_generator(): 111 | ... yield 'a', 1 112 | ... yield 'b', 2 113 | 114 | >>> dict_generator() 115 | {'a': 1, 'b': 2} 116 | """ 117 | 118 | def _listify( 119 | function: types.Callable[..., types.Optional[types.Iterable[_T]]], 120 | ) -> types.Callable[..., types.Collection[_T]]: 121 | def __listify( 122 | *args: types.Any, **kwargs: types.Any 123 | ) -> types.Collection[_T]: 124 | result: types.Optional[types.Iterable[_T]] = function( 125 | *args, **kwargs 126 | ) 127 | if result is None: 128 | if allow_empty: 129 | return collection(iter(())) 130 | else: 131 | raise TypeError( 132 | f'{function} returned `None` and `allow_empty` ' 133 | 'is `False`' 134 | ) 135 | else: 136 | return collection(result) 137 | 138 | return __listify 139 | 140 | return _listify 141 | 142 | 143 | def sample( 144 | sample_rate: float, 145 | ) -> types.Callable[ 146 | [types.Callable[_P, _T]], 147 | types.Callable[_P, types.Optional[_T]], 148 | ]: 149 | """ 150 | Limit calls to a function based on given sample rate. 151 | Number of calls to the function will be roughly equal to 152 | sample_rate percentage. 153 | 154 | Usage: 155 | 156 | >>> @sample(0.5) 157 | ... def demo_function(*args, **kwargs): 158 | ... return 1 159 | 160 | Calls to *demo_function* will be limited to 50% approximatly. 161 | """ 162 | 163 | def _sample( 164 | function: types.Callable[_P, _T], 165 | ) -> types.Callable[_P, types.Optional[_T]]: 166 | @functools.wraps(function) 167 | def __sample( 168 | *args: _P.args, **kwargs: _P.kwargs 169 | ) -> types.Optional[_T]: 170 | if random.random() < sample_rate: 171 | return function(*args, **kwargs) 172 | else: 173 | logging.debug( 174 | 'Skipped execution of %r(%r, %r) due to sampling', 175 | function, 176 | args, 177 | kwargs, 178 | ) 179 | return None 180 | 181 | return __sample 182 | 183 | return _sample 184 | 185 | 186 | def wraps_classmethod( 187 | wrapped: types.Callable[types.Concatenate[_S, _P], _T], 188 | ) -> types.Callable[ 189 | [ 190 | types.Callable[types.Concatenate[types.Any, _P], _T], 191 | ], 192 | types.Callable[types.Concatenate[_S, _P], _T], 193 | ]: 194 | """ 195 | Like `functools.wraps`, but for wrapping classmethods with the type info 196 | from a regular method. 197 | """ 198 | 199 | def _wraps_classmethod( 200 | wrapper: types.Callable[types.Concatenate[types.Any, _P], _T], 201 | ) -> types.Callable[types.Concatenate[_S, _P], _T]: 202 | # For some reason `functools.update_wrapper` fails on some test 203 | # runs but not while running actual code 204 | with contextlib.suppress(AttributeError): 205 | wrapper = functools.update_wrapper( 206 | wrapper, 207 | wrapped, 208 | assigned=tuple( 209 | a 210 | for a in functools.WRAPPER_ASSIGNMENTS 211 | if a != '__annotations__' 212 | ), 213 | ) 214 | if annotations := getattr(wrapped, '__annotations__', {}): 215 | annotations.pop('self', None) 216 | wrapper.__annotations__ = annotations 217 | 218 | return wrapper 219 | 220 | return _wraps_classmethod 221 | -------------------------------------------------------------------------------- /python_utils/exceptions.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides utility functions for raising and reraising exceptions. 3 | 4 | Functions: 5 | raise_exception(exception_class, *args, **kwargs): 6 | Returns a function that raises an exception of the given type with 7 | the given arguments. 8 | 9 | reraise(*args, **kwargs): 10 | Reraises the current exception. 11 | """ 12 | 13 | from . import types 14 | 15 | 16 | def raise_exception( 17 | exception_class: types.Type[Exception], 18 | *args: types.Any, 19 | **kwargs: types.Any, 20 | ) -> types.Callable[..., None]: 21 | """ 22 | Returns a function that raises an exception of the given type with the 23 | given arguments. 24 | 25 | >>> raise_exception(ValueError, 'spam')('eggs') 26 | Traceback (most recent call last): 27 | ... 28 | ValueError: spam 29 | """ 30 | 31 | def raise_(*args_: types.Any, **kwargs_: types.Any) -> types.Any: 32 | raise exception_class(*args, **kwargs) 33 | 34 | return raise_ 35 | 36 | 37 | def reraise(*args: types.Any, **kwargs: types.Any) -> types.Any: 38 | """ 39 | Reraises the current exception. 40 | 41 | This function seems useless, but it can be useful when you need to pass 42 | a callable to another function that raises an exception. 43 | """ 44 | raise 45 | -------------------------------------------------------------------------------- /python_utils/formatters.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides utility functions for formatting strings and dates. 3 | 4 | Functions: 5 | camel_to_underscore(name: str) -> str: 6 | Convert camel case style naming to underscore/snake case style naming. 7 | 8 | apply_recursive(function: Callable[[str], str], data: OptionalScope = None, 9 | **kwargs: Any) -> OptionalScope: 10 | Apply a function to all keys in a scope recursively. 11 | 12 | timesince(dt: Union[datetime.datetime, datetime.timedelta], 13 | default: str = 'just now') -> str: 14 | Returns string representing 'time since' e.g. 3 days ago, 5 hours ago. 15 | """ 16 | 17 | # pyright: reportUnnecessaryIsInstance=false 18 | import datetime 19 | 20 | from python_utils import types 21 | 22 | 23 | def camel_to_underscore(name: str) -> str: 24 | """Convert camel case style naming to underscore/snake case style naming. 25 | 26 | If there are existing underscores they will be collapsed with the 27 | to-be-added underscores. Multiple consecutive capital letters will not be 28 | split except for the last one. 29 | 30 | >>> camel_to_underscore('SpamEggsAndBacon') 31 | 'spam_eggs_and_bacon' 32 | >>> camel_to_underscore('Spam_and_bacon') 33 | 'spam_and_bacon' 34 | >>> camel_to_underscore('Spam_And_Bacon') 35 | 'spam_and_bacon' 36 | >>> camel_to_underscore('__SpamAndBacon__') 37 | '__spam_and_bacon__' 38 | >>> camel_to_underscore('__SpamANDBacon__') 39 | '__spam_and_bacon__' 40 | """ 41 | output: types.List[str] = [] 42 | for i, c in enumerate(name): 43 | if i > 0: 44 | pc = name[i - 1] 45 | if c.isupper() and not pc.isupper() and pc != '_': 46 | # Uppercase and the previous character isn't upper/underscore? 47 | # Add the underscore 48 | output.append('_') 49 | elif i > 3 and not c.isupper(): 50 | # Will return the last 3 letters to check if we are changing 51 | # case 52 | previous = name[i - 3 : i] 53 | if previous.isalpha() and previous.isupper(): 54 | output.insert(len(output) - 1, '_') 55 | 56 | output.append(c.lower()) 57 | 58 | return ''.join(output) 59 | 60 | 61 | def apply_recursive( 62 | function: types.Callable[[str], str], 63 | data: types.OptionalScope = None, 64 | **kwargs: types.Any, 65 | ) -> types.OptionalScope: 66 | """ 67 | Apply a function to all keys in a scope recursively. 68 | 69 | >>> apply_recursive(camel_to_underscore, {'SpamEggsAndBacon': 'spam'}) 70 | {'spam_eggs_and_bacon': 'spam'} 71 | >>> apply_recursive( 72 | ... camel_to_underscore, 73 | ... { 74 | ... 'SpamEggsAndBacon': { 75 | ... 'SpamEggsAndBacon': 'spam', 76 | ... } 77 | ... }, 78 | ... ) 79 | {'spam_eggs_and_bacon': {'spam_eggs_and_bacon': 'spam'}} 80 | 81 | >>> a = {'a_b_c': 123, 'def': {'DeF': 456}} 82 | >>> b = apply_recursive(camel_to_underscore, a) 83 | >>> b 84 | {'a_b_c': 123, 'def': {'de_f': 456}} 85 | 86 | >>> apply_recursive(camel_to_underscore, None) 87 | """ 88 | if data is None: 89 | return None 90 | 91 | elif isinstance(data, dict): 92 | return { 93 | function(key): apply_recursive(function, value, **kwargs) 94 | for key, value in data.items() 95 | } 96 | else: 97 | return data 98 | 99 | 100 | def timesince( 101 | dt: types.Union[datetime.datetime, datetime.timedelta], 102 | default: str = 'just now', 103 | ) -> str: 104 | """ 105 | Returns string representing 'time since' e.g. 106 | 3 days ago, 5 hours ago etc. 107 | 108 | >>> now = datetime.datetime.now() 109 | >>> timesince(now) 110 | 'just now' 111 | >>> timesince(now - datetime.timedelta(seconds=1)) 112 | '1 second ago' 113 | >>> timesince(now - datetime.timedelta(seconds=2)) 114 | '2 seconds ago' 115 | >>> timesince(now - datetime.timedelta(seconds=60)) 116 | '1 minute ago' 117 | >>> timesince(now - datetime.timedelta(seconds=61)) 118 | '1 minute and 1 second ago' 119 | >>> timesince(now - datetime.timedelta(seconds=62)) 120 | '1 minute and 2 seconds ago' 121 | >>> timesince(now - datetime.timedelta(seconds=120)) 122 | '2 minutes ago' 123 | >>> timesince(now - datetime.timedelta(seconds=121)) 124 | '2 minutes and 1 second ago' 125 | >>> timesince(now - datetime.timedelta(seconds=122)) 126 | '2 minutes and 2 seconds ago' 127 | >>> timesince(now - datetime.timedelta(seconds=3599)) 128 | '59 minutes and 59 seconds ago' 129 | >>> timesince(now - datetime.timedelta(seconds=3600)) 130 | '1 hour ago' 131 | >>> timesince(now - datetime.timedelta(seconds=3601)) 132 | '1 hour and 1 second ago' 133 | >>> timesince(now - datetime.timedelta(seconds=3602)) 134 | '1 hour and 2 seconds ago' 135 | >>> timesince(now - datetime.timedelta(seconds=3660)) 136 | '1 hour and 1 minute ago' 137 | >>> timesince(now - datetime.timedelta(seconds=3661)) 138 | '1 hour and 1 minute ago' 139 | >>> timesince(now - datetime.timedelta(seconds=3720)) 140 | '1 hour and 2 minutes ago' 141 | >>> timesince(now - datetime.timedelta(seconds=3721)) 142 | '1 hour and 2 minutes ago' 143 | >>> timesince(datetime.timedelta(seconds=3721)) 144 | '1 hour and 2 minutes ago' 145 | """ 146 | if isinstance(dt, datetime.timedelta): 147 | diff = dt 148 | else: 149 | now = datetime.datetime.now() 150 | diff = abs(now - dt) 151 | 152 | periods = ( 153 | (diff.days / 365, 'year', 'years'), 154 | (diff.days % 365 / 30, 'month', 'months'), 155 | (diff.days % 30 / 7, 'week', 'weeks'), 156 | (diff.days % 7, 'day', 'days'), 157 | (diff.seconds / 3600, 'hour', 'hours'), 158 | (diff.seconds % 3600 / 60, 'minute', 'minutes'), 159 | (diff.seconds % 60, 'second', 'seconds'), 160 | ) 161 | 162 | output: types.List[str] = [] 163 | for period, singular, plural in periods: 164 | int_period = int(period) 165 | if int_period == 1: 166 | output.append(f'{int_period} {singular}') 167 | elif int_period: 168 | output.append(f'{int_period} {plural}') 169 | 170 | if output: 171 | return f'{" and ".join(output[:2])} ago' 172 | 173 | return default 174 | -------------------------------------------------------------------------------- /python_utils/generators.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides generator utilities for batching items from 3 | iterables and async iterables. 4 | 5 | Functions: 6 | abatcher(generator, batch_size=None, interval=None): 7 | Asyncio generator wrapper that returns items with a given batch 8 | size or interval (whichever is reached first). 9 | 10 | batcher(iterable, batch_size=10): 11 | Generator wrapper that returns items with a given batch size. 12 | """ 13 | 14 | import asyncio 15 | import time 16 | 17 | import python_utils 18 | from python_utils import types 19 | 20 | _T = types.TypeVar('_T') 21 | 22 | 23 | async def abatcher( 24 | generator: types.Union[ 25 | types.AsyncGenerator[_T, None], 26 | types.AsyncIterator[_T], 27 | ], 28 | batch_size: types.Optional[int] = None, 29 | interval: types.Optional[types.delta_type] = None, 30 | ) -> types.AsyncGenerator[types.List[_T], None]: 31 | """ 32 | Asyncio generator wrapper that returns items with a given batch size or 33 | interval (whichever is reached first). 34 | 35 | Args: 36 | generator: The async generator or iterator to batch. 37 | batch_size (types.Optional[int], optional): The number of items per 38 | batch. Defaults to None. 39 | interval (types.Optional[types.delta_type], optional): The time 40 | interval to wait before yielding a batch. Defaults to None. 41 | 42 | Yields: 43 | types.AsyncGenerator[types.List[_T], None]: A generator that yields 44 | batches of items. 45 | """ 46 | batch: types.List[_T] = [] 47 | 48 | assert batch_size or interval, 'Must specify either batch_size or interval' 49 | 50 | # If interval is specified, use it to determine when to yield the batch 51 | # Alternatively set a really long timeout to keep the code simpler 52 | if interval: 53 | interval_s = python_utils.delta_to_seconds(interval) 54 | else: 55 | # Set the timeout to 10 years 56 | interval_s = 60 * 60 * 24 * 365 * 10.0 57 | 58 | next_yield: float = time.perf_counter() + interval_s 59 | 60 | done: types.Set[asyncio.Task[_T]] 61 | pending: types.Set[asyncio.Task[_T]] = set() 62 | 63 | while True: 64 | try: 65 | done, pending = await asyncio.wait( 66 | pending 67 | or [ 68 | asyncio.create_task( 69 | types.cast( 70 | types.Coroutine[None, None, _T], 71 | generator.__anext__(), 72 | ) 73 | ), 74 | ], 75 | timeout=interval_s, 76 | return_when=asyncio.FIRST_COMPLETED, 77 | ) 78 | 79 | if done: 80 | for result in done: 81 | batch.append(result.result()) 82 | 83 | except StopAsyncIteration: 84 | if batch: 85 | yield batch 86 | 87 | break 88 | 89 | if batch_size is not None and len(batch) == batch_size: 90 | yield batch 91 | batch = [] 92 | 93 | if interval and batch and time.perf_counter() > next_yield: 94 | yield batch 95 | batch = [] 96 | # Always set the next yield time to the current time. If the 97 | # loop is running slow due to blocking functions we do not 98 | # want to burst too much 99 | next_yield = time.perf_counter() + interval_s 100 | 101 | 102 | def batcher( 103 | iterable: types.Iterable[_T], 104 | batch_size: int = 10, 105 | ) -> types.Generator[types.List[_T], None, None]: 106 | """ 107 | Generator wrapper that returns items with a given batch size. 108 | 109 | Args: 110 | iterable (types.Iterable[_T]): The iterable to batch. 111 | batch_size (int, optional): The number of items per batch. Defaults 112 | to 10. 113 | 114 | Yields: 115 | types.Generator[types.List[_T], None, None]: A generator that yields 116 | batches of items. 117 | """ 118 | batch: types.List[_T] = [] 119 | for item in iterable: 120 | batch.append(item) 121 | if len(batch) == batch_size: 122 | yield batch 123 | batch = [] 124 | 125 | if batch: 126 | yield batch 127 | -------------------------------------------------------------------------------- /python_utils/import_.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides utilities for importing modules and handling exceptions. 3 | 4 | Classes: 5 | DummyError(Exception): 6 | A custom exception class used as a default for exception handling. 7 | 8 | Functions: 9 | import_global(name, modules=None, exceptions=DummyError, locals_=None, 10 | globals_=None, level=-1): 11 | Imports the requested items into the global scope, with support for 12 | relative imports and custom exception handling. 13 | """ 14 | 15 | from . import types 16 | 17 | 18 | class DummyError(Exception): 19 | """A custom exception class used as a default for exception handling.""" 20 | 21 | 22 | # Legacy alias for DummyError 23 | DummyException = DummyError 24 | 25 | 26 | def import_global( # noqa: C901 27 | name: str, 28 | modules: types.Optional[types.List[str]] = None, 29 | exceptions: types.ExceptionsType = DummyError, 30 | locals_: types.OptionalScope = None, 31 | globals_: types.OptionalScope = None, 32 | level: int = -1, 33 | ) -> types.Any: # sourcery skip: hoist-if-from-if 34 | """Import the requested items into the global scope. 35 | 36 | WARNING! this method _will_ overwrite your global scope 37 | If you have a variable named `path` and you call `import_global('sys')` 38 | it will be overwritten with `sys.path` 39 | 40 | Args: 41 | name (str): the name of the module to import, e.g. sys 42 | modules (str): the modules to import, use None for everything 43 | exceptions (Exception): the exception to catch, e.g. ImportError 44 | locals_: the `locals()` method (in case you need a different scope) 45 | globals_: the `globals()` method (in case you need a different scope) 46 | level (int): the level to import from, this can be used for 47 | relative imports 48 | """ 49 | frame = None 50 | name_parts: types.List[str] = name.split('.') 51 | modules_set: types.Set[str] = set() 52 | try: 53 | # If locals_ or globals_ are not given, autodetect them by inspecting 54 | # the current stack 55 | if locals_ is None or globals_ is None: 56 | import inspect 57 | 58 | frame = inspect.stack()[1][0] 59 | 60 | if locals_ is None: 61 | locals_ = frame.f_locals 62 | 63 | if globals_ is None: 64 | globals_ = frame.f_globals 65 | 66 | try: 67 | # Relative imports are supported (from .spam import eggs) 68 | if not name_parts[0]: 69 | name_parts = name_parts[1:] 70 | level = 1 71 | 72 | # raise IOError((name, level)) 73 | module = __import__( 74 | name=name_parts[0] or '.', 75 | globals=globals_, 76 | locals=locals_, 77 | fromlist=name_parts[1:], 78 | level=max(level, 0), 79 | ) 80 | 81 | # Make sure we get the right part of a dotted import (i.e. 82 | # spam.eggs should return eggs, not spam) 83 | try: 84 | for attr in name_parts[1:]: 85 | module = getattr(module, attr) 86 | except AttributeError as e: 87 | raise ImportError( 88 | 'No module named ' + '.'.join(name_parts) 89 | ) from e 90 | 91 | # If no list of modules is given, autodetect from either __all__ 92 | # or a dir() of the module 93 | if not modules: 94 | modules_set = set(getattr(module, '__all__', dir(module))) 95 | else: 96 | modules_set = set(modules).intersection(dir(module)) 97 | 98 | # Add all items in modules to the global scope 99 | for k in set(dir(module)).intersection(modules_set): 100 | if k and k[0] != '_': 101 | globals_[k] = getattr(module, k) 102 | except exceptions as e: 103 | return e 104 | finally: 105 | # Clean up, just to be sure 106 | del ( 107 | name, 108 | name_parts, 109 | modules, 110 | modules_set, 111 | exceptions, 112 | locals_, 113 | globals_, 114 | frame, 115 | ) 116 | -------------------------------------------------------------------------------- /python_utils/logger.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides a base class `LoggerBase` and a derived class `Logged` 3 | for adding logging capabilities to classes. The `LoggerBase` class expects 4 | a `logger` attribute to be a `logging.Logger` or compatible instance and 5 | provides methods for logging at various levels. The `Logged` class 6 | automatically adds a named logger to the class. 7 | 8 | Classes: 9 | LoggerBase: 10 | A base class that adds logging utilities to a class. 11 | Logged: 12 | A derived class that automatically adds a named logger to a class. 13 | 14 | Example: 15 | >>> class MyClass(Logged): 16 | ... def __init__(self): 17 | ... Logged.__init__(self) 18 | 19 | >>> my_class = MyClass() 20 | >>> my_class.debug('debug') 21 | >>> my_class.info('info') 22 | >>> my_class.warning('warning') 23 | >>> my_class.error('error') 24 | >>> my_class.exception('exception') 25 | >>> my_class.log(0, 'log') 26 | """ 27 | 28 | import abc 29 | import logging 30 | 31 | from . import decorators 32 | 33 | __all__ = ['Logged'] 34 | 35 | from . import types 36 | 37 | # From the logging typeshed, converted to be compatible with Python 3.8 38 | # https://github.com/python/typeshed/blob/main/stdlib/logging/__init__.pyi 39 | _ExcInfoType: types.TypeAlias = types.Union[ 40 | bool, 41 | types.Tuple[ 42 | types.Type[BaseException], 43 | BaseException, 44 | types.Union[types.TracebackType, None], 45 | ], 46 | types.Tuple[None, None, None], 47 | BaseException, 48 | None, 49 | ] 50 | _P = types.ParamSpec('_P') 51 | _T = types.TypeVar('_T', covariant=True) 52 | 53 | 54 | class LoggerProtocol(types.Protocol): 55 | def debug( 56 | self, 57 | msg: object, 58 | *args: object, 59 | exc_info: _ExcInfoType = None, 60 | stack_info: bool = False, 61 | stacklevel: int = 1, 62 | extra: types.Union[types.Mapping[str, object], None] = None, 63 | ) -> None: ... 64 | 65 | def info( 66 | self, 67 | msg: object, 68 | *args: object, 69 | exc_info: _ExcInfoType = None, 70 | stack_info: bool = False, 71 | stacklevel: int = 1, 72 | extra: types.Union[types.Mapping[str, object], None] = None, 73 | ) -> None: ... 74 | 75 | def warning( 76 | self, 77 | msg: object, 78 | *args: object, 79 | exc_info: _ExcInfoType = None, 80 | stack_info: bool = False, 81 | stacklevel: int = 1, 82 | extra: types.Union[types.Mapping[str, object], None] = None, 83 | ) -> None: ... 84 | 85 | def error( 86 | self, 87 | msg: object, 88 | *args: object, 89 | exc_info: _ExcInfoType = None, 90 | stack_info: bool = False, 91 | stacklevel: int = 1, 92 | extra: types.Union[types.Mapping[str, object], None] = None, 93 | ) -> None: ... 94 | 95 | def critical( 96 | self, 97 | msg: object, 98 | *args: object, 99 | exc_info: _ExcInfoType = None, 100 | stack_info: bool = False, 101 | stacklevel: int = 1, 102 | extra: types.Union[types.Mapping[str, object], None] = None, 103 | ) -> None: ... 104 | 105 | def exception( 106 | self, 107 | msg: object, 108 | *args: object, 109 | exc_info: _ExcInfoType = None, 110 | stack_info: bool = False, 111 | stacklevel: int = 1, 112 | extra: types.Union[types.Mapping[str, object], None] = None, 113 | ) -> None: ... 114 | 115 | def log( 116 | self, 117 | level: int, 118 | msg: object, 119 | *args: object, 120 | exc_info: _ExcInfoType = None, 121 | stack_info: bool = False, 122 | stacklevel: int = 1, 123 | extra: types.Union[types.Mapping[str, object], None] = None, 124 | ) -> None: ... 125 | 126 | 127 | class LoggerBase(abc.ABC): 128 | """Class which automatically adds logging utilities to your class when 129 | interiting. Expects `logger` to be a logging.Logger or compatible instance. 130 | 131 | Adds easy access to debug, info, warning, error, exception and log methods 132 | 133 | >>> class MyClass(LoggerBase): 134 | ... logger = logging.getLogger(__name__) 135 | ... 136 | ... def __init__(self): 137 | ... Logged.__init__(self) 138 | 139 | >>> my_class = MyClass() 140 | >>> my_class.debug('debug') 141 | >>> my_class.info('info') 142 | >>> my_class.warning('warning') 143 | >>> my_class.error('error') 144 | >>> my_class.exception('exception') 145 | >>> my_class.log(0, 'log') 146 | """ 147 | 148 | # I've tried using a protocol to properly type the logger but it gave all 149 | # sorts of issues with mypy so we're using the lazy solution for now. The 150 | # actual classes define the correct type anyway 151 | logger: types.Any 152 | # logger: LoggerProtocol 153 | 154 | @classmethod 155 | def __get_name( # pyright: ignore[reportUnusedFunction] 156 | cls, *name_parts: str 157 | ) -> str: 158 | return '.'.join(n.strip() for n in name_parts if n.strip()) 159 | 160 | @decorators.wraps_classmethod(logging.Logger.debug) 161 | @classmethod 162 | def debug( 163 | cls, 164 | msg: object, 165 | *args: object, 166 | exc_info: _ExcInfoType = None, 167 | stack_info: bool = False, 168 | stacklevel: int = 1, 169 | extra: types.Union[types.Mapping[str, object], None] = None, 170 | ) -> None: 171 | return cls.logger.debug( # type: ignore[no-any-return] 172 | msg, 173 | *args, 174 | exc_info=exc_info, 175 | stack_info=stack_info, 176 | stacklevel=stacklevel, 177 | extra=extra, 178 | ) 179 | 180 | @decorators.wraps_classmethod(logging.Logger.info) 181 | @classmethod 182 | def info( 183 | cls, 184 | msg: object, 185 | *args: object, 186 | exc_info: _ExcInfoType = None, 187 | stack_info: bool = False, 188 | stacklevel: int = 1, 189 | extra: types.Union[types.Mapping[str, object], None] = None, 190 | ) -> None: 191 | return cls.logger.info( # type: ignore[no-any-return] 192 | msg, 193 | *args, 194 | exc_info=exc_info, 195 | stack_info=stack_info, 196 | stacklevel=stacklevel, 197 | extra=extra, 198 | ) 199 | 200 | @decorators.wraps_classmethod(logging.Logger.warning) 201 | @classmethod 202 | def warning( 203 | cls, 204 | msg: object, 205 | *args: object, 206 | exc_info: _ExcInfoType = None, 207 | stack_info: bool = False, 208 | stacklevel: int = 1, 209 | extra: types.Union[types.Mapping[str, object], None] = None, 210 | ) -> None: 211 | return cls.logger.warning( # type: ignore[no-any-return] 212 | msg, 213 | *args, 214 | exc_info=exc_info, 215 | stack_info=stack_info, 216 | stacklevel=stacklevel, 217 | extra=extra, 218 | ) 219 | 220 | @decorators.wraps_classmethod(logging.Logger.error) 221 | @classmethod 222 | def error( 223 | cls, 224 | msg: object, 225 | *args: object, 226 | exc_info: _ExcInfoType = None, 227 | stack_info: bool = False, 228 | stacklevel: int = 1, 229 | extra: types.Union[types.Mapping[str, object], None] = None, 230 | ) -> None: 231 | return cls.logger.error( # type: ignore[no-any-return] 232 | msg, 233 | *args, 234 | exc_info=exc_info, 235 | stack_info=stack_info, 236 | stacklevel=stacklevel, 237 | extra=extra, 238 | ) 239 | 240 | @decorators.wraps_classmethod(logging.Logger.critical) 241 | @classmethod 242 | def critical( 243 | cls, 244 | msg: object, 245 | *args: object, 246 | exc_info: _ExcInfoType = None, 247 | stack_info: bool = False, 248 | stacklevel: int = 1, 249 | extra: types.Union[types.Mapping[str, object], None] = None, 250 | ) -> None: 251 | return cls.logger.critical( # type: ignore[no-any-return] 252 | msg, 253 | *args, 254 | exc_info=exc_info, 255 | stack_info=stack_info, 256 | stacklevel=stacklevel, 257 | extra=extra, 258 | ) 259 | 260 | @decorators.wraps_classmethod(logging.Logger.exception) 261 | @classmethod 262 | def exception( 263 | cls, 264 | msg: object, 265 | *args: object, 266 | exc_info: _ExcInfoType = None, 267 | stack_info: bool = False, 268 | stacklevel: int = 1, 269 | extra: types.Union[types.Mapping[str, object], None] = None, 270 | ) -> None: 271 | return cls.logger.exception( # type: ignore[no-any-return] 272 | msg, 273 | *args, 274 | exc_info=exc_info, 275 | stack_info=stack_info, 276 | stacklevel=stacklevel, 277 | extra=extra, 278 | ) 279 | 280 | @decorators.wraps_classmethod(logging.Logger.log) 281 | @classmethod 282 | def log( 283 | cls, 284 | level: int, 285 | msg: object, 286 | *args: object, 287 | exc_info: _ExcInfoType = None, 288 | stack_info: bool = False, 289 | stacklevel: int = 1, 290 | extra: types.Union[types.Mapping[str, object], None] = None, 291 | ) -> None: 292 | return cls.logger.log( # type: ignore[no-any-return] 293 | level, 294 | msg, 295 | *args, 296 | exc_info=exc_info, 297 | stack_info=stack_info, 298 | stacklevel=stacklevel, 299 | extra=extra, 300 | ) 301 | 302 | 303 | class Logged(LoggerBase): 304 | """Class which automatically adds a named logger to your class when 305 | interiting. 306 | 307 | Adds easy access to debug, info, warning, error, exception and log methods 308 | 309 | >>> class MyClass(Logged): 310 | ... def __init__(self): 311 | ... Logged.__init__(self) 312 | 313 | >>> my_class = MyClass() 314 | >>> my_class.debug('debug') 315 | >>> my_class.info('info') 316 | >>> my_class.warning('warning') 317 | >>> my_class.error('error') 318 | >>> my_class.exception('exception') 319 | >>> my_class.log(0, 'log') 320 | 321 | >>> my_class._Logged__get_name('spam') 322 | 'spam' 323 | """ 324 | 325 | logger: logging.Logger # pragma: no cover 326 | 327 | @classmethod 328 | def __get_name(cls, *name_parts: str) -> str: 329 | return types.cast( 330 | str, 331 | LoggerBase._LoggerBase__get_name(*name_parts), # type: ignore[attr-defined] # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType, reportAttributeAccessIssue] 332 | ) 333 | 334 | def __new__(cls, *args: types.Any, **kwargs: types.Any) -> 'Logged': 335 | """ 336 | Create a new instance of the class and initialize the logger. 337 | 338 | The logger is named using the module and class name. 339 | 340 | Args: 341 | *args: Variable length argument list. 342 | **kwargs: Arbitrary keyword arguments. 343 | 344 | Returns: 345 | An instance of the class. 346 | """ 347 | cls.logger = logging.getLogger( 348 | cls.__get_name(cls.__module__, cls.__name__) 349 | ) 350 | return super().__new__(cls) 351 | -------------------------------------------------------------------------------- /python_utils/loguru.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides a `Logurud` class that integrates the `loguru` logger 3 | with the base logging functionality defined in `logger_module.LoggerBase`. 4 | 5 | Classes: 6 | Logurud: A class that extends `LoggerBase` and uses `loguru` for logging. 7 | 8 | Usage example: 9 | >>> from python_utils.loguru import Logurud 10 | >>> class MyClass(Logurud): 11 | ... def __init__(self): 12 | ... Logurud.__init__(self) 13 | >>> my_class = MyClass() 14 | >>> my_class.logger.info('This is an info message') 15 | """ 16 | 17 | from __future__ import annotations 18 | 19 | import typing 20 | 21 | import loguru 22 | 23 | from . import logger as logger_module 24 | 25 | __all__ = ['Logurud'] 26 | 27 | 28 | class Logurud(logger_module.LoggerBase): 29 | """ 30 | A class that extends `LoggerBase` and uses `loguru` for logging. 31 | 32 | Attributes: 33 | logger (loguru.Logger): The `loguru` logger instance. 34 | """ 35 | 36 | logger: loguru.Logger 37 | 38 | def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Logurud: 39 | """ 40 | Creates a new instance of `Logurud` and initializes the `loguru` 41 | logger. 42 | 43 | Args: 44 | *args (typing.Any): Variable length argument list. 45 | **kwargs (typing.Any): Arbitrary keyword arguments. 46 | 47 | Returns: 48 | Logurud: A new instance of `Logurud`. 49 | """ 50 | cls.logger: loguru.Logger = loguru.logger.opt(depth=1) 51 | return super().__new__(cls) 52 | -------------------------------------------------------------------------------- /python_utils/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wolph/python-utils/d36802f9c049744d210a454531d00e36fffda8c1/python_utils/py.typed -------------------------------------------------------------------------------- /python_utils/terminal.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides functions to get the terminal size across different 3 | platforms. 4 | 5 | Functions: 6 | get_terminal_size: Get the current size of the terminal. 7 | _get_terminal_size_windows: Get terminal size on Windows. 8 | _get_terminal_size_tput: Get terminal size using `tput`. 9 | _get_terminal_size_linux: Get terminal size on Linux. 10 | 11 | Usage example: 12 | >>> width, height = get_terminal_size() 13 | """ 14 | 15 | from __future__ import annotations 16 | 17 | import contextlib 18 | import os 19 | import typing 20 | 21 | from . import converters 22 | 23 | Dimensions = tuple[int, int] 24 | OptionalDimensions = typing.Optional[Dimensions] 25 | _StrDimensions = tuple[str, str] 26 | _OptionalStrDimensions = typing.Optional[_StrDimensions] 27 | 28 | 29 | def get_terminal_size() -> Dimensions: # pragma: no cover 30 | """Get the current size of your terminal. 31 | 32 | Multiple returns are not always a good idea, but in this case it greatly 33 | simplifies the code so I believe it's justified. It's not the prettiest 34 | function but that's never really possible with cross-platform code. 35 | 36 | Returns: 37 | width, height: Two integers containing width and height 38 | """ 39 | w: int | None 40 | h: int | None 41 | 42 | with contextlib.suppress(Exception): 43 | # Default to 79 characters for IPython notebooks 44 | from IPython import get_ipython # type: ignore[attr-defined] 45 | 46 | ipython = get_ipython() # type: ignore[no-untyped-call] 47 | from ipykernel import zmqshell # type: ignore[import-not-found] 48 | 49 | if isinstance(ipython, zmqshell.ZMQInteractiveShell): 50 | return 79, 24 51 | with contextlib.suppress(Exception): 52 | # This works for Python 3, but not Pypy3. Probably the best method if 53 | # it's supported so let's always try 54 | import shutil 55 | 56 | w, h = shutil.get_terminal_size() 57 | if w and h: 58 | # The off by one is needed due to progressbars in some cases, for 59 | # safety we'll always substract it. 60 | return w - 1, h 61 | with contextlib.suppress(Exception): 62 | w = converters.to_int(os.environ.get('COLUMNS')) 63 | h = converters.to_int(os.environ.get('LINES')) 64 | if w and h: 65 | return w, h 66 | with contextlib.suppress(Exception): 67 | import blessings # type: ignore[import-untyped] 68 | 69 | terminal = blessings.Terminal() 70 | w = terminal.width 71 | h = terminal.height 72 | if w and h: 73 | return w, h 74 | with contextlib.suppress(Exception): 75 | # The method can return None so we don't unpack it 76 | wh = _get_terminal_size_linux() 77 | if wh is not None and all(wh): 78 | return wh 79 | 80 | with contextlib.suppress(Exception): 81 | # Windows detection doesn't always work, let's try anyhow 82 | wh = _get_terminal_size_windows() 83 | if wh is not None and all(wh): 84 | return wh 85 | 86 | with contextlib.suppress(Exception): 87 | # needed for window's python in cygwin's xterm! 88 | wh = _get_terminal_size_tput() 89 | if wh is not None and all(wh): 90 | return wh 91 | 92 | return 79, 24 93 | 94 | 95 | def _get_terminal_size_windows() -> OptionalDimensions: # pragma: no cover 96 | res = None 97 | try: 98 | from ctypes import ( # type: ignore[attr-defined] 99 | create_string_buffer, 100 | windll, 101 | ) 102 | 103 | # stdin handle is -10 104 | # stdout handle is -11 105 | # stderr handle is -12 106 | 107 | h = windll.kernel32.GetStdHandle(-12) 108 | csbi = create_string_buffer(22) 109 | res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) 110 | except Exception: 111 | return None 112 | 113 | if res: 114 | import struct 115 | 116 | (_, _, _, _, _, left, top, right, bottom, _, _) = struct.unpack( 117 | 'hhhhHhhhhhh', csbi.raw 118 | ) 119 | w = right - left 120 | h = bottom - top 121 | return w, h 122 | else: 123 | return None 124 | 125 | 126 | def _get_terminal_size_tput() -> OptionalDimensions: # pragma: no cover 127 | # get terminal width src: http://stackoverflow.com/questions/263890/ 128 | try: 129 | import subprocess 130 | 131 | proc = subprocess.Popen( 132 | ['tput', 'cols'], 133 | stdin=subprocess.PIPE, 134 | stdout=subprocess.PIPE, 135 | stderr=subprocess.PIPE, 136 | ) 137 | output = proc.communicate(input=None) 138 | w = int(output[0]) 139 | proc = subprocess.Popen( 140 | ['tput', 'lines'], 141 | stdin=subprocess.PIPE, 142 | stdout=subprocess.PIPE, 143 | stderr=subprocess.PIPE, 144 | ) 145 | output = proc.communicate(input=None) 146 | h = int(output[0]) 147 | except Exception: 148 | return None 149 | else: 150 | return w, h 151 | 152 | 153 | def _get_terminal_size_linux() -> OptionalDimensions: # pragma: no cover 154 | def ioctl_gwinsz(fd: int) -> tuple[str, str] | None: 155 | try: 156 | import fcntl 157 | import struct 158 | import termios 159 | 160 | return typing.cast( 161 | _OptionalStrDimensions, 162 | struct.unpack( 163 | 'hh', 164 | fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'), # type: ignore[call-overload] 165 | ), 166 | ) 167 | except Exception: 168 | return None 169 | 170 | size: _OptionalStrDimensions 171 | size = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) 172 | 173 | if not size: 174 | with contextlib.suppress(Exception): 175 | fd = os.open(os.ctermid(), os.O_RDONLY) 176 | size = ioctl_gwinsz(fd) 177 | os.close(fd) 178 | if not size: 179 | try: 180 | size = os.environ['LINES'], os.environ['COLUMNS'] 181 | except Exception: 182 | return None 183 | 184 | return int(size[1]), int(size[0]) 185 | -------------------------------------------------------------------------------- /python_utils/time.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides utility functions for handling time-related operations. 3 | 4 | Functions: 5 | - timedelta_to_seconds: Convert a timedelta to seconds with microseconds as 6 | fraction. 7 | - delta_to_seconds: Convert a timedelta or numeric interval to seconds. 8 | - delta_to_seconds_or_none: Convert a timedelta to seconds or return None. 9 | - format_time: Format a timestamp (timedelta, datetime, or seconds) to a 10 | string. 11 | - timeout_generator: Generate items from an iterable until a timeout is 12 | reached. 13 | - aio_timeout_generator: Asynchronously generate items from an iterable until a 14 | timeout is reached. 15 | - aio_generator_timeout_detector: Detect if an async generator has not yielded 16 | an element for a set amount of time. 17 | - aio_generator_timeout_detector_decorator: Decorator for 18 | aio_generator_timeout_detector. 19 | """ 20 | 21 | # pyright: reportUnnecessaryIsInstance=false 22 | import asyncio 23 | import datetime 24 | import functools 25 | import itertools 26 | import time 27 | 28 | import python_utils 29 | from python_utils import aio, exceptions, types 30 | 31 | _T = types.TypeVar('_T') 32 | _P = types.ParamSpec('_P') 33 | 34 | 35 | # There might be a better way to get the epoch with tzinfo, please create 36 | # a pull request if you know a better way that functions for Python 2 and 3 37 | epoch = datetime.datetime(year=1970, month=1, day=1) 38 | 39 | 40 | def timedelta_to_seconds(delta: datetime.timedelta) -> types.Number: 41 | """Convert a timedelta to seconds with the microseconds as fraction. 42 | 43 | Note that this method has become largely obsolete with the 44 | `timedelta.total_seconds()` method introduced in Python 2.7. 45 | 46 | >>> from datetime import timedelta 47 | >>> '%d' % timedelta_to_seconds(timedelta(days=1)) 48 | '86400' 49 | >>> '%d' % timedelta_to_seconds(timedelta(seconds=1)) 50 | '1' 51 | >>> '%.6f' % timedelta_to_seconds(timedelta(seconds=1, microseconds=1)) 52 | '1.000001' 53 | >>> '%.6f' % timedelta_to_seconds(timedelta(microseconds=1)) 54 | '0.000001' 55 | """ 56 | # Only convert to float if needed 57 | if delta.microseconds: 58 | total = delta.microseconds * 1e-6 59 | else: 60 | total = 0 61 | total += delta.seconds 62 | total += delta.days * 60 * 60 * 24 63 | return total 64 | 65 | 66 | def delta_to_seconds(interval: types.delta_type) -> types.Number: 67 | """ 68 | Convert a timedelta to seconds. 69 | 70 | >>> delta_to_seconds(datetime.timedelta(seconds=1)) 71 | 1 72 | >>> delta_to_seconds(datetime.timedelta(seconds=1, microseconds=1)) 73 | 1.000001 74 | >>> delta_to_seconds(1) 75 | 1 76 | >>> delta_to_seconds('whatever') # doctest: +ELLIPSIS 77 | Traceback (most recent call last): 78 | ... 79 | TypeError: Unknown type ... 80 | """ 81 | if isinstance(interval, datetime.timedelta): 82 | return timedelta_to_seconds(interval) 83 | elif isinstance(interval, (int, float)): 84 | return interval 85 | else: 86 | raise TypeError(f'Unknown type {type(interval)}: {interval!r}') 87 | 88 | 89 | def delta_to_seconds_or_none( 90 | interval: types.Optional[types.delta_type], 91 | ) -> types.Optional[types.Number]: 92 | """Convert a timedelta to seconds or return None.""" 93 | if interval is None: 94 | return None 95 | else: 96 | return delta_to_seconds(interval) 97 | 98 | 99 | def format_time( 100 | timestamp: types.timestamp_type, 101 | precision: datetime.timedelta = datetime.timedelta(seconds=1), 102 | ) -> str: 103 | """Formats timedelta/datetime/seconds. 104 | 105 | >>> format_time('1') 106 | '0:00:01' 107 | >>> format_time(1.234) 108 | '0:00:01' 109 | >>> format_time(1) 110 | '0:00:01' 111 | >>> format_time(datetime.datetime(2000, 1, 2, 3, 4, 5, 6)) 112 | '2000-01-02 03:04:05' 113 | >>> format_time(datetime.date(2000, 1, 2)) 114 | '2000-01-02' 115 | >>> format_time(datetime.timedelta(seconds=3661)) 116 | '1:01:01' 117 | >>> format_time(None) 118 | '--:--:--' 119 | >>> format_time(format_time) # doctest: +ELLIPSIS 120 | Traceback (most recent call last): 121 | ... 122 | TypeError: Unknown type ... 123 | 124 | """ 125 | precision_seconds = precision.total_seconds() 126 | 127 | if isinstance(timestamp, str): 128 | timestamp = float(timestamp) 129 | 130 | if isinstance(timestamp, (int, float)): 131 | try: 132 | timestamp = datetime.timedelta(seconds=timestamp) 133 | except OverflowError: # pragma: no cover 134 | timestamp = None 135 | 136 | if isinstance(timestamp, datetime.timedelta): 137 | seconds = timestamp.total_seconds() 138 | # Truncate the number to the given precision 139 | seconds = seconds - (seconds % precision_seconds) 140 | 141 | return str(datetime.timedelta(seconds=seconds)) 142 | elif isinstance(timestamp, datetime.datetime): # pragma: no cover 143 | # Python 2 doesn't have the timestamp method 144 | if hasattr(timestamp, 'timestamp'): 145 | seconds = timestamp.timestamp() 146 | else: 147 | seconds = timedelta_to_seconds(timestamp - epoch) 148 | 149 | # Truncate the number to the given precision 150 | seconds = seconds - (seconds % precision_seconds) 151 | 152 | try: # pragma: no cover 153 | dt = datetime.datetime.fromtimestamp(seconds) 154 | except (ValueError, OSError): # pragma: no cover 155 | dt = datetime.datetime.max 156 | return str(dt) 157 | elif isinstance(timestamp, datetime.date): 158 | return str(timestamp) 159 | elif timestamp is None: 160 | return '--:--:--' 161 | else: 162 | raise TypeError(f'Unknown type {type(timestamp)}: {timestamp!r}') 163 | 164 | 165 | @types.overload 166 | def _to_iterable( 167 | iterable: types.Union[ 168 | types.Callable[[], types.AsyncIterable[_T]], 169 | types.AsyncIterable[_T], 170 | ], 171 | ) -> types.AsyncIterable[_T]: ... 172 | 173 | 174 | @types.overload 175 | def _to_iterable( 176 | iterable: types.Union[ 177 | types.Callable[[], types.Iterable[_T]], types.Iterable[_T] 178 | ], 179 | ) -> types.Iterable[_T]: ... 180 | 181 | 182 | def _to_iterable( 183 | iterable: types.Union[ 184 | types.Iterable[_T], 185 | types.Callable[[], types.Iterable[_T]], 186 | types.AsyncIterable[_T], 187 | types.Callable[[], types.AsyncIterable[_T]], 188 | ], 189 | ) -> types.Union[types.Iterable[_T], types.AsyncIterable[_T]]: 190 | if callable(iterable): 191 | return iterable() 192 | else: 193 | return iterable 194 | 195 | 196 | def timeout_generator( 197 | timeout: types.delta_type, 198 | interval: types.delta_type = datetime.timedelta(seconds=1), 199 | iterable: types.Union[ 200 | types.Iterable[_T], types.Callable[[], types.Iterable[_T]] 201 | ] = itertools.count, # type: ignore[assignment] 202 | interval_multiplier: float = 1.0, 203 | maximum_interval: types.Optional[types.delta_type] = None, 204 | ) -> types.Iterable[_T]: 205 | """ 206 | Generator that walks through the given iterable (a counter by default) 207 | until the float_timeout is reached with a configurable float_interval 208 | between items. 209 | 210 | This can be used to limit the time spent on a slow operation. This can be 211 | useful for testing slow APIs so you get a small sample of the data in a 212 | reasonable amount of time. 213 | 214 | >>> for i in timeout_generator(0.1, 0.06): 215 | ... # Put your slow code here 216 | ... print(i) 217 | 0 218 | 1 219 | 2 220 | >>> timeout = datetime.timedelta(seconds=0.1) 221 | >>> interval = datetime.timedelta(seconds=0.06) 222 | >>> for i in timeout_generator(timeout, interval, itertools.count()): 223 | ... print(i) 224 | 0 225 | 1 226 | 2 227 | >>> for i in timeout_generator(1, interval=0.1, iterable='ab'): 228 | ... print(i) 229 | a 230 | b 231 | 232 | >>> timeout = datetime.timedelta(seconds=0.1) 233 | >>> interval = datetime.timedelta(seconds=0.06) 234 | >>> for i in timeout_generator(timeout, interval, interval_multiplier=2): 235 | ... print(i) 236 | 0 237 | 1 238 | 2 239 | """ 240 | float_interval: float = delta_to_seconds(interval) 241 | float_maximum_interval: types.Optional[float] = delta_to_seconds_or_none( 242 | maximum_interval 243 | ) 244 | iterable_ = _to_iterable(iterable) 245 | 246 | end = delta_to_seconds(timeout) + time.perf_counter() 247 | for item in iterable_: 248 | yield item 249 | 250 | if time.perf_counter() >= end: 251 | break 252 | 253 | time.sleep(float_interval) 254 | 255 | float_interval *= interval_multiplier 256 | if float_maximum_interval: 257 | float_interval = min(float_interval, float_maximum_interval) 258 | 259 | 260 | async def aio_timeout_generator( 261 | timeout: types.delta_type, # noqa: ASYNC109 262 | interval: types.delta_type = datetime.timedelta(seconds=1), 263 | iterable: types.Union[ 264 | types.AsyncIterable[_T], types.Callable[..., types.AsyncIterable[_T]] 265 | ] = aio.acount, 266 | interval_multiplier: float = 1.0, 267 | maximum_interval: types.Optional[types.delta_type] = None, 268 | ) -> types.AsyncGenerator[_T, None]: 269 | """ 270 | Async generator that walks through the given async iterable (a counter by 271 | default) until the float_timeout is reached with a configurable 272 | float_interval between items. 273 | 274 | The interval_exponent automatically increases the float_timeout with each 275 | run. Note that if the float_interval is less than 1, 1/interval_exponent 276 | will be used so the float_interval is always growing. To double the 277 | float_interval with each run, specify 2. 278 | 279 | Doctests and asyncio are not friends, so no examples. But this function is 280 | effectively the same as the `timeout_generator` but it uses `async for` 281 | instead. 282 | """ 283 | float_interval: float = delta_to_seconds(interval) 284 | float_maximum_interval: types.Optional[float] = delta_to_seconds_or_none( 285 | maximum_interval 286 | ) 287 | iterable_ = _to_iterable(iterable) 288 | 289 | end = delta_to_seconds(timeout) + time.perf_counter() 290 | async for item in iterable_: # pragma: no branch 291 | yield item 292 | 293 | if time.perf_counter() >= end: 294 | break 295 | 296 | await asyncio.sleep(float_interval) 297 | 298 | float_interval *= interval_multiplier 299 | if float_maximum_interval: # pragma: no branch 300 | float_interval = min(float_interval, float_maximum_interval) 301 | 302 | 303 | async def aio_generator_timeout_detector( 304 | generator: types.AsyncGenerator[_T, None], 305 | timeout: types.Optional[types.delta_type] = None, # noqa: ASYNC109 306 | total_timeout: types.Optional[types.delta_type] = None, 307 | on_timeout: types.Optional[ 308 | types.Callable[ 309 | [ 310 | types.AsyncGenerator[_T, None], 311 | types.Optional[types.delta_type], 312 | types.Optional[types.delta_type], 313 | BaseException, 314 | ], 315 | types.Any, 316 | ] 317 | ] = exceptions.reraise, 318 | **on_timeout_kwargs: types.Mapping[types.Text, types.Any], 319 | ) -> types.AsyncGenerator[_T, None]: 320 | """ 321 | This function is used to detect if an asyncio generator has not yielded 322 | an element for a set amount of time. 323 | 324 | The `on_timeout` argument is called with the `generator`, `timeout`, 325 | `total_timeout`, `exception` and the extra `**kwargs` to this function as 326 | arguments. 327 | If `on_timeout` is not specified, the exception is reraised. 328 | If `on_timeout` is `None`, the exception is silently ignored and the 329 | generator will finish as normal. 330 | """ 331 | if total_timeout is None: 332 | total_timeout_end = None 333 | else: 334 | total_timeout_end = time.perf_counter() + delta_to_seconds( 335 | total_timeout 336 | ) 337 | 338 | timeout_s = python_utils.delta_to_seconds_or_none(timeout) 339 | 340 | while True: 341 | try: 342 | if total_timeout_end and time.perf_counter() >= total_timeout_end: 343 | raise asyncio.TimeoutError( # noqa: TRY301 344 | 'Total timeout reached' 345 | ) 346 | 347 | if timeout_s: 348 | yield await asyncio.wait_for(generator.__anext__(), timeout_s) 349 | else: 350 | yield await generator.__anext__() 351 | 352 | except asyncio.TimeoutError as exception: # noqa: PERF203 353 | if on_timeout is not None: 354 | await on_timeout( 355 | generator, 356 | timeout, 357 | total_timeout, 358 | exception, 359 | **on_timeout_kwargs, 360 | ) 361 | break 362 | 363 | except StopAsyncIteration: 364 | break 365 | 366 | 367 | def aio_generator_timeout_detector_decorator( 368 | timeout: types.Optional[types.delta_type] = None, 369 | total_timeout: types.Optional[types.delta_type] = None, 370 | on_timeout: types.Optional[ 371 | types.Callable[ 372 | [ 373 | types.AsyncGenerator[types.Any, None], 374 | types.Optional[types.delta_type], 375 | types.Optional[types.delta_type], 376 | BaseException, 377 | ], 378 | types.Any, 379 | ] 380 | ] = exceptions.reraise, 381 | **on_timeout_kwargs: types.Mapping[types.Text, types.Any], 382 | ) -> types.Callable[ 383 | [types.Callable[_P, types.AsyncGenerator[_T, None]]], 384 | types.Callable[_P, types.AsyncGenerator[_T, None]], 385 | ]: 386 | """A decorator wrapper for aio_generator_timeout_detector.""" 387 | 388 | def _timeout_detector_decorator( 389 | generator: types.Callable[_P, types.AsyncGenerator[_T, None]], 390 | ) -> types.Callable[_P, types.AsyncGenerator[_T, None]]: 391 | """The decorator itself.""" 392 | 393 | @functools.wraps(generator) 394 | def wrapper( 395 | *args: _P.args, 396 | **kwargs: _P.kwargs, 397 | ) -> types.AsyncGenerator[_T, None]: 398 | return aio_generator_timeout_detector( 399 | generator(*args, **kwargs), 400 | timeout, 401 | total_timeout, 402 | on_timeout, 403 | **on_timeout_kwargs, 404 | ) 405 | 406 | return wrapper 407 | 408 | return _timeout_detector_decorator 409 | -------------------------------------------------------------------------------- /python_utils/types.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides type definitions and utility functions for type hinting. 3 | 4 | It includes: 5 | - Shorthand for commonly used types such as Optional and Union. 6 | - Type aliases for various data structures and common types. 7 | - Importing all types from the `typing` and `typing_extensions` modules. 8 | - Importing specific types from the `types` module. 9 | 10 | The module also configures Pyright to ignore wildcard import warnings. 11 | """ 12 | # pyright: reportWildcardImportFromLibrary=false 13 | # ruff: noqa: F405 14 | 15 | import datetime 16 | import decimal 17 | from re import Match, Pattern 18 | from types import * # pragma: no cover # noqa: F403 19 | from typing import * # pragma: no cover # noqa: F403 20 | 21 | # import * does not import these in all Python versions 22 | # Quickhand for optional because it gets so much use. If only Python had 23 | # support for an optional type shorthand such as `SomeType?` instead of 24 | # `Optional[SomeType]`. 25 | # Since the Union operator is only supported for Python 3.10, we'll create a 26 | # shorthand for it. 27 | from typing import ( 28 | IO, 29 | BinaryIO, 30 | Optional as O, # noqa: N817 31 | TextIO, 32 | Union as U, # noqa: N817 33 | ) 34 | 35 | from typing_extensions import * # type: ignore[no-redef,assignment] # noqa: F403 36 | 37 | Scope = Dict[str, Any] 38 | OptionalScope = O[Scope] 39 | Number = U[int, float] 40 | DecimalNumber = U[Number, decimal.Decimal] 41 | ExceptionType = Type[Exception] 42 | ExceptionsType = U[Tuple[ExceptionType, ...], ExceptionType] 43 | StringTypes = U[str, bytes] 44 | 45 | delta_type = U[datetime.timedelta, int, float] 46 | timestamp_type = U[ 47 | datetime.timedelta, 48 | datetime.date, 49 | datetime.datetime, 50 | str, 51 | int, 52 | float, 53 | None, 54 | ] 55 | 56 | __all__ = [ 57 | 'IO', 58 | 'TYPE_CHECKING', 59 | # ABCs (from collections.abc). 60 | 'AbstractSet', 61 | # The types from the typing module. 62 | # Super-special typing primitives. 63 | 'Annotated', 64 | 'Any', 65 | # One-off things. 66 | 'AnyStr', 67 | 'AsyncContextManager', 68 | 'AsyncGenerator', 69 | 'AsyncGeneratorType', 70 | 'AsyncIterable', 71 | 'AsyncIterator', 72 | 'Awaitable', 73 | # Other concrete types. 74 | 'BinaryIO', 75 | 'BuiltinFunctionType', 76 | 'BuiltinMethodType', 77 | 'ByteString', 78 | 'Callable', 79 | # Concrete collection types. 80 | 'ChainMap', 81 | 'ClassMethodDescriptorType', 82 | 'ClassVar', 83 | 'CodeType', 84 | 'Collection', 85 | 'Concatenate', 86 | 'Container', 87 | 'ContextManager', 88 | 'Coroutine', 89 | 'CoroutineType', 90 | 'Counter', 91 | 'DecimalNumber', 92 | 'DefaultDict', 93 | 'Deque', 94 | 'Dict', 95 | 'DynamicClassAttribute', 96 | 'Final', 97 | 'ForwardRef', 98 | 'FrameType', 99 | 'FrozenSet', 100 | # Types from the `types` module. 101 | 'FunctionType', 102 | 'Generator', 103 | 'GeneratorType', 104 | 'Generic', 105 | 'GetSetDescriptorType', 106 | 'Hashable', 107 | 'ItemsView', 108 | 'Iterable', 109 | 'Iterator', 110 | 'KeysView', 111 | 'LambdaType', 112 | 'List', 113 | 'Literal', 114 | 'Mapping', 115 | 'MappingProxyType', 116 | 'MappingView', 117 | 'Match', 118 | 'MemberDescriptorType', 119 | 'MethodDescriptorType', 120 | 'MethodType', 121 | 'MethodWrapperType', 122 | 'ModuleType', 123 | 'MutableMapping', 124 | 'MutableSequence', 125 | 'MutableSet', 126 | 'NamedTuple', # Not really a type. 127 | 'NewType', 128 | 'NoReturn', 129 | 'Number', 130 | 'Optional', 131 | 'OptionalScope', 132 | 'OrderedDict', 133 | 'ParamSpec', 134 | 'ParamSpecArgs', 135 | 'ParamSpecKwargs', 136 | 'Pattern', 137 | 'Protocol', 138 | # Structural checks, a.k.a. protocols. 139 | 'Reversible', 140 | 'Sequence', 141 | 'Set', 142 | 'SimpleNamespace', 143 | 'Sized', 144 | 'SupportsAbs', 145 | 'SupportsBytes', 146 | 'SupportsComplex', 147 | 'SupportsFloat', 148 | 'SupportsIndex', 149 | 'SupportsIndex', 150 | 'SupportsInt', 151 | 'SupportsRound', 152 | 'Text', 153 | 'TextIO', 154 | 'TracebackType', 155 | 'TracebackType', 156 | 'Tuple', 157 | 'Type', 158 | 'TypeAlias', 159 | 'TypeGuard', 160 | 'TypeVar', 161 | 'TypedDict', # Not really a type. 162 | 'Union', 163 | 'ValuesView', 164 | 'WrapperDescriptorType', 165 | 'cast', 166 | 'coroutine', 167 | 'delta_type', 168 | 'final', 169 | 'get_args', 170 | 'get_origin', 171 | 'get_type_hints', 172 | 'is_typeddict', 173 | 'new_class', 174 | 'no_type_check', 175 | 'no_type_check_decorator', 176 | 'overload', 177 | 'prepare_class', 178 | 'resolve_bases', 179 | 'runtime_checkable', 180 | 'timestamp_type', 181 | ] 182 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | . 2 | -------------------------------------------------------------------------------- /ruff.toml: -------------------------------------------------------------------------------- 1 | # We keep the ruff configuration separate so it can easily be shared across 2 | # all projects 3 | 4 | target-version = 'py39' 5 | 6 | exclude = [ 7 | '.venv', 8 | '.tox', 9 | # Ignore local test files/directories/old-stuff 10 | 'test.py', 11 | '*_old.py', 12 | ] 13 | 14 | line-length = 79 15 | 16 | [lint] 17 | ignore = [ 18 | 'A001', # Variable {name} is shadowing a Python builtin 19 | 'A002', # Argument {name} is shadowing a Python builtin 20 | 'A003', # Class attribute {name} is shadowing a Python builtin 21 | 'B023', # function-uses-loop-variable 22 | 'B024', # `FormatWidgetMixin` is an abstract base class, but it has no abstract methods 23 | 'D205', # blank-line-after-summary 24 | 'D212', # multi-line-summary-first-line 25 | 'RET505', # Unnecessary `else` after `return` statement 26 | 'TRY003', # Avoid specifying long messages outside the exception class 27 | 'RET507', # Unnecessary `elif` after `continue` statement 28 | 'C405', # Unnecessary {obj_type} literal (rewrite as a set literal) 29 | 'C406', # Unnecessary {obj_type} literal (rewrite as a dict literal) 30 | 'C408', # Unnecessary {obj_type} call (rewrite as a literal) 31 | 'SIM114', # Combine `if` branches using logical `or` operator 32 | 'RET506', # Unnecessary `else` after `raise` statement 33 | 'Q001', # Remove bad quotes 34 | 'Q002', # Remove bad quotes 35 | 'FA100', # Missing `from __future__ import annotations`, but uses `typing.Optional` 36 | 'COM812', # Missing trailing comma in a list 37 | 'ISC001', # String concatenation with implicit str conversion 38 | 'SIM108', # Ternary operators are not always more readable 39 | 'RUF100', # Unused noqa directives. Due to multiple Python versions, we need to keep them 40 | ] 41 | 42 | select = [ 43 | 'A', # flake8-builtins 44 | 'ASYNC', # flake8 async checker 45 | 'B', # flake8-bugbear 46 | 'C4', # flake8-comprehensions 47 | 'C90', # mccabe 48 | 'COM', # flake8-commas 49 | 50 | ## Require docstrings for all public methods, would be good to enable at some point 51 | 'D', # pydocstyle 52 | 53 | 'E', # pycodestyle error ('W' for warning) 54 | 'F', # pyflakes 55 | 'FA', # flake8-future-annotations 56 | 'I', # isort 57 | 'ICN', # flake8-import-conventions 58 | 'INP', # flake8-no-pep420 59 | 'ISC', # flake8-implicit-str-concat 60 | 'N', # pep8-naming 61 | 'NPY', # NumPy-specific rules 62 | 'PERF', # perflint, 63 | 'PIE', # flake8-pie 64 | 'Q', # flake8-quotes 65 | 66 | 'RET', # flake8-return 67 | 'RUF', # Ruff-specific rules 68 | 'SIM', # flake8-simplify 69 | 'T20', # flake8-print 70 | 'TD', # flake8-todos 71 | 'TRY', # tryceratops 72 | 'UP', # pyupgrade 73 | ] 74 | 75 | [lint.per-file-ignores] 76 | '*tests/*' = ['INP001', 'T201', 'T203', 'ASYNC109', 'B007'] 77 | 'examples.py' = ['T201', 'N806'] 78 | 'docs/conf.py' = ['E501', 'INP001'] 79 | 'docs/_theme/flask_theme_support.py' = ['RUF012', 'INP001'] 80 | '*/types.py' = ['F405'] 81 | 82 | [lint.pydocstyle] 83 | convention = 'google' 84 | ignore-decorators = [ 85 | 'typing.overload', 86 | 'typing.override', 87 | ] 88 | 89 | [lint.isort] 90 | case-sensitive = true 91 | combine-as-imports = true 92 | force-wrap-aliases = true 93 | 94 | [lint.flake8-quotes] 95 | docstring-quotes = 'single' 96 | inline-quotes = 'single' 97 | multiline-quotes = 'single' 98 | 99 | [format] 100 | line-ending = 'lf' 101 | indent-style = 'space' 102 | quote-style = 'single' 103 | docstring-code-format = true 104 | skip-magic-trailing-comma = false 105 | exclude = [ 106 | '__init__.py', 107 | ] 108 | 109 | [lint.pycodestyle] 110 | max-line-length = 79 111 | 112 | [lint.flake8-pytest-style] 113 | mark-parentheses = true 114 | 115 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [aliases] 2 | test=pytest 3 | 4 | [metadata] 5 | description-file = README.rst 6 | 7 | [nosetests] 8 | verbosity=3 9 | with-doctest=1 10 | with-coverage=1 11 | cover-package=python_utils 12 | cover-min-percentage=100 13 | detailed-errors=1 14 | debug=nose.loader 15 | pdb=1 16 | # pdb-failures=1 17 | 18 | [build_sphinx] 19 | source-dir = docs/ 20 | build-dir = docs/_build 21 | all_files = 1 22 | 23 | [upload_sphinx] 24 | upload-dir = docs/_build/html 25 | 26 | [bdist_wheel] 27 | universal = 1 28 | 29 | [upload] 30 | sign = 1 31 | 32 | [flake8] 33 | per-file-ignores = 34 | python_utils/types.py: F403,F405 35 | ignore = W391, W503, E741, E203, F811 36 | exclude = 37 | docs 38 | 39 | [mypy] 40 | files = 41 | python_utils, 42 | _python_utils_tests 43 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """ 2 | Setup script for the python-utils package. 3 | 4 | This script uses setuptools to package the python-utils library. It reads 5 | metadata from the `python_utils/__about__.py` file and the `README.rst` file to 6 | populate the package information. The script also defines the package 7 | requirements and optional dependencies for different use cases such as logging, 8 | documentation, and testing. 9 | """ 10 | 11 | import pathlib 12 | 13 | import setuptools 14 | 15 | # pyright: reportUnknownMemberType=false 16 | 17 | # To prevent importing about and thereby breaking the coverage info we use this 18 | # exec hack 19 | about: dict[str, str] = {} 20 | with open('python_utils/__about__.py') as fp: 21 | exec(fp.read(), about) 22 | 23 | _readme_path = pathlib.Path(__file__).parent / 'README.rst' 24 | if _readme_path.exists() and _readme_path.is_file(): 25 | long_description = _readme_path.read_text() 26 | else: 27 | long_description = 'See http://pypi.python.org/pypi/python-utils/' 28 | 29 | if __name__ == '__main__': 30 | setuptools.setup( 31 | python_requires='>=3.9.0', 32 | name='python-utils', 33 | version=about['__version__'], 34 | author=about['__author__'], 35 | author_email=about['__author_email__'], 36 | description=about['__description__'], 37 | url=about['__url__'], 38 | license='BSD', 39 | packages=setuptools.find_packages( 40 | exclude=['_python_utils_tests', '*.__pycache__'], 41 | ), 42 | package_data={'python_utils': ['py.typed']}, 43 | long_description=long_description, 44 | install_requires=['typing_extensions>3.10.0.2'], 45 | extras_require={ 46 | 'loguru': [ 47 | 'loguru', 48 | ], 49 | 'docs': [ 50 | 'mock', 51 | 'sphinx', 52 | 'python-utils', 53 | ], 54 | 'tests': [ 55 | 'ruff', 56 | 'pyright', 57 | 'pytest', 58 | 'pytest-cov', 59 | 'pytest-mypy', 60 | 'pytest-asyncio', 61 | 'sphinx', 62 | 'types-setuptools', 63 | 'loguru', 64 | 'loguru-mypy', 65 | 'mypy-ipython', 66 | 'blessings', 67 | ], 68 | }, 69 | classifiers=['License :: OSI Approved :: BSD License'], 70 | ) 71 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = ruff, black, pypy3, py39, py310, py311, py312, py313, docs, mypy, pyright 3 | skip_missing_interpreters = True 4 | 5 | [testenv] 6 | basepython = 7 | py39: python3.9 8 | py310: python3.10 9 | py311: python3.11 10 | py312: python3.12 11 | py313: python3.13 12 | pypy3: pypy3 13 | 14 | setenv = PY_IGNORE_IMPORTMISMATCH=1 15 | deps = 16 | mypy 17 | pyright 18 | -r{toxinidir}/_python_utils_tests/requirements.txt 19 | commands = 20 | mypy 21 | pyright 22 | py.test --basetemp="{envtmpdir}" --confcutdir=.. {posargs} python_utils _python_utils_tests 23 | 24 | [testenv:ruff] 25 | basepython = python3 26 | deps = ruff 27 | commands = ruff check {toxinidir}/setup.py {toxinidir}/_python_utils_tests {toxinidir}/python_utils 28 | 29 | [testenv:black] 30 | basepython = python3 31 | deps = black 32 | commands = black --skip-string-normalization --line-length 79 {toxinidir}/setup.py {toxinidir}/_python_utils_tests {toxinidir}/python_utils 33 | 34 | [testenv:pyright] 35 | basepython = python3 36 | deps = 37 | pyright 38 | -r{toxinidir}/_python_utils_tests/requirements.txt 39 | commands = pyright {posargs} 40 | 41 | [testenv:mypy] 42 | basepython = python3 43 | deps = -r{toxinidir}/_python_utils_tests/requirements.txt 44 | commands = mypy {posargs} 45 | 46 | [testenv:docs] 47 | changedir = 48 | basepython = python3 49 | deps = -r{toxinidir}/docs/requirements.txt 50 | allowlist_externals = 51 | rm 52 | mkdir 53 | whitelist_externals = 54 | rm 55 | cd 56 | mkdir 57 | commands = 58 | rm -f docs/project_name.rst 59 | rm -f docs/modules.rst 60 | mkdir -p docs/_static 61 | sphinx-apidoc -o docs/ python_utils 62 | rm -f docs/modules.rst 63 | sphinx-build -W -b html -d docs/_build/doctrees docs docs/_build/html {posargs} 64 | 65 | --------------------------------------------------------------------------------