├── .github └── workflows │ └── python-publish.yml ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── CONTRIBUTORS.txt ├── LICENSE ├── README.md ├── conf.py ├── easypy ├── __init__.py ├── _multithreading_init.py ├── aliasing.py ├── bunch.py ├── caching.py ├── collections.py ├── colors.py ├── concurrency.py ├── contexts.py ├── decorations.py ├── deprecation.py ├── exceptions.py ├── fixtures.py ├── gevent.py ├── humanize.py ├── interaction.py ├── lockstep.py ├── logging │ ├── __init__.py │ ├── _logbook.py │ ├── _logging.py │ ├── heartbeats.py │ └── progressbar.py ├── meta.py ├── misc.py ├── predicates.py ├── properties.py ├── random.py ├── resilience.py ├── semver.py ├── signals.py ├── sync.py ├── tables.py ├── threadtree.py ├── timing.py ├── tokens.py ├── typed_struct.py ├── units.py ├── words.py └── ziplog.py ├── examples ├── logbook_init.py └── logging_init.py ├── index.rst ├── pytest.ini ├── setup.py ├── test_package.py └── tests ├── __init__.py ├── conftest.py ├── indentable_buffer1.txt ├── indentable_buffer2.txt ├── indentable_buffer3.txt ├── test_aliasing.py ├── test_bunch.py ├── test_caching.py ├── test_collections.py ├── test_colors.py ├── test_concurrency.py ├── test_contexts.py ├── test_decorations.py ├── test_deprecation.py ├── test_exceptions.py ├── test_humanize.py ├── test_lockstep.py ├── test_logging.py ├── test_meta.py ├── test_misc.py ├── test_randutils.py ├── test_resilience.py ├── test_rwlock.py ├── test_semver.py ├── test_signals.py ├── test_sync.py ├── test_timing.py ├── test_typed_struct.py ├── test_units.py └── test_ziplog.py /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v4 25 | - name: Set up Python 26 | uses: actions/setup-python@v4 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | .pytest_cache 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | env/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # IPython Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # dotenv 80 | .env 81 | 82 | # virtualenv 83 | venv/ 84 | ENV/ 85 | 86 | # Spyder project settings 87 | .spyderproject 88 | 89 | # Rope project settings 90 | .ropeproject 91 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | stages: 2 | - name: test 3 | if: tag IS blank 4 | - name: deploy 5 | if: tag IS present 6 | language: python 7 | python: 8 | - '3.8' 9 | - '3.7' 10 | - '3.6' 11 | - 3.9-dev 12 | - 3.8-dev 13 | - 3.7-dev 14 | - 3.6-dev 15 | env: 16 | - EASYPY_AUTO_PATCH_GEVENT=true 17 | - EASYPY_AUTO_PATCH_GEVENT=false 18 | - EASYPY_AUTO_PATCH_LOGGING=logging 19 | - EASYPY_AUTO_PATCH_LOGGING=logbook 20 | install: 21 | - pip install PyYaml gevent pytest-random-order logbook 22 | before_script: 23 | - export TZ=Asia/Jerusalem 24 | script: 25 | - python -m pytest --random-order-bucket=package -vv test_package.py 26 | - python -m pytest --random-order-bucket=package -vv tests 27 | - python -m pytest --random-order-bucket=package -vv --doctest-modules easypy 28 | notifications: 29 | email: 30 | on_success: change 31 | on_failure: change 32 | jobs: 33 | include: 34 | - stage: deploy 35 | python: '3.8' 36 | env: GEVENT=false 37 | script: skip 38 | install: skip 39 | deploy: 40 | provider: pypi 41 | user: __token__ 42 | on: 43 | tags: true 44 | branch: master 45 | skip_existing: true 46 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## Unreleased 8 | 9 | 10 | ## [0.5.0] - 2020-08-25 11 | 12 | ### Changed 13 | - Logging Initialization 14 | - a run-once `initialize` method takes care of it all 15 | - removed `set_width`, `set_coloring`, `set_graphics` 16 | - the env-var `TERM_COLOR_SUPPORT` is no longer checked 17 | - the env-var `EASYPY_LOG_INDENTATION` is no longer checked 18 | - DataSize - return 'byte' and 'MiB' instead of '1 byte' and '1 MiB', except in `.render(humanize=True)` 19 | 20 | ### Added 21 | - logging: support for `logbook` as the underlying logging framework 22 | - logging: auto-initialize via env-var `EASYPY_AUTO_PATCH_LOGGING=logging|logbook` 23 | - logging: env-vars for configuring logging - `EASYPY_AUTO_GRAPHICAL_LOGGING`, `EASYPY_AUTO_COLORED_LOGGING` 24 | - gevent: auto-patch via env-var `EASYPY_AUTO_PATCH_GEVENT=yes` 25 | - resilience: support returning exceptions captured by `resilient` decorator 26 | - Examples 27 | - a skeleton for initializing logging with easypy 28 | 29 | ## [0.4.3] - 2020-04-28 30 | 31 | ### Added 32 | - collections: make `separate`, `grouped`, `chunkify` and `takesome` methods of the ObjectCollection classes 33 | 34 | ### Fixed 35 | - support for gevent 1.5 36 | 37 | ## [0.4.2] - 2020-04-28 38 | 39 | ### Added 40 | - `ensure_same_defaults` decorator for setting one function's defaults as source of truth for other function 41 | - contexts: added `breakable_section()`, which allows breaking out of a context manager as if it were a loop 42 | - gevent: use gevent's native-threads instead of easypy's 43 | 44 | ### Fixed 45 | - aliasing: Improve `RecursionError` for aliasing-induced infinite recursion 46 | - concurrency: don't swallow `KeyboardInterrupt` when running under gevent 47 | - colors: bug fix in handling markups with no text 48 | - units: fix `__rfloordiv__` handling of unsupported operands 49 | 50 | ## [0.4.1] - 2020-04-27 51 | 52 | Project renamed as 'real-easypy' 53 | 54 | ## [0.4.0] - 2019-11-14 55 | 56 | ### Changed 57 | - units: DataSize - return 'byte' and 'MiB' instead of '1 byte' and '1 MiB' 58 | - units: Duration(inf) is now 'Eternity' instead of 'Never' 59 | - timing: The Timer class now renders the duration using the Duration's repr, instead of as a float 60 | - aliasing: Improved `RecursionError` for aliasing-induced infinite recursion 61 | 62 | ### Added 63 | - CLI: expose 'colorize' and 'ziplog' as clis 64 | - units: Added a `Percentage` class 65 | - timing: Added a `TimeInterval` class, for use for timestamp comparisons. Can be converted to/from the `Timer` class 66 | - colors: Added '@[..]@' as coloring markup delimiter 67 | - concurrency: Add parent's name to new thread's name, to show hierarchy 68 | - concurrency: Add `done()` function to `concurrent` to check if the thread is finished. 69 | - collections: Add `.menu()` to `ObjectCollectionBase`, integrating with `termenu` 70 | - semver: Added a module and class for parsing and comparing semver-styled version strings 71 | - random: Added 'perchance', for a common random use-case 72 | - bunch: Added `.render()` to render the data cleanly 73 | - exceptions: support pickling 74 | - CI: Add python 3.7 and 3.8 75 | 76 | ### Fixed 77 | - sync: prevent an import cycle 78 | - colors/ziplog: swallow BrokenPipeError when parsing from cmdline 79 | - aliasing: bug fix for 'Cannot create a consistent method resolution', add unittest 80 | - collections: workaround python bug (3.6.8+) importing 'test.support' 81 | 82 | ### Changed 83 | - Logging Initialization 84 | - a run-once `initialize` method takes care of it all 85 | - removed `set_width`, `set_coloring`, `set_graphics` 86 | - the env-var `TERM_COLOR_SUPPORT` is no longer checked 87 | - the env-var `EASYPY_LOG_INDENTATION` is no longer checked 88 | 89 | ## [0.3.1] - 2019-07-30 90 | 91 | ### Added 92 | - ci: run tests in random order 93 | - collections: added unittest 94 | 95 | ### Fixed 96 | - sync: Don't release unacquired lock 97 | - caching: Don't leak open db, as it is concurrent access which has undefined behavior 98 | - concurrency 99 | - refactor to fix stalling of main-thread after Futures.executor breaks on exception 100 | - Update max number of threads 101 | - logging: set default host in thread logging context 102 | - resilience: match ``retry`` and ``retrying`` function signature 103 | 104 | 105 | ## [0.3.0] - 2019-06-10 106 | 107 | ### Added 108 | - Support for suppressing and soloing logging to console per thread. 109 | - `TypedStruct`: Support for inheritance. 110 | - `EasyMeta`: the `before_subclass_init` hook. 111 | - `wait` and `iter_wait` support `log_interval` and `log_level` for printing 112 | the thrown `PredicateNotSatisfied` to the log. 113 | - `takesome`: a new generator that partially yields a sequence 114 | - `repr` and `hash` to typed struct fields. 115 | - `PersistentCache`: allow disabling persistence via env-var (`DISABLE_CACHING_PERSISTENCE`) 116 | - collections: raising user-friendly exceptions for failed object queries (too many, too few) 117 | 118 | ### Fixed 119 | - `ExponentialBackoff`: return the value **before** the incrementation. 120 | - `concurrent`: capture `KeyboardInterrupt` exceptions like any other. 121 | - doctests in various functions and classes. 122 | - `SynchronizedSingleton` on `contextmanager` deadlock when some (but not all) 123 | of the CMs throw. 124 | - `resilient` between `timecache`s bug. 125 | - `FilterCollection`: deal with missing attributes on objects (#163) 126 | - `PersistentCache`: don't clear old version when changing cache version 127 | - concurrency: documentation 128 | - `SynchronizedSingleton`: deadlock condition when used with `contextmanager` (#150) 129 | - concurrency: make 'async' available only in python < 3.7, where it became reserved 130 | 131 | ### Changed 132 | - Reorganization: 133 | - Moved tokens to a proper module. 134 | - Moved function from `easypy.concurrency` and `easypy.timing` to new module 135 | `easypy.sync` 136 | - Moved `throttled` from `easypy.concurrency` to `easypy.timing`. 137 | - `easypy.signals`: Async handlers are invoked first, then the sequential handlers. 138 | - `async` -> `asynchronous`: to support python 3.7, where this word is reserved 139 | - `concurrency.concurrent`: `.result` property changed into method `.result()`, which also waits on the thread 140 | - `easypy.colors`: clean-up, documentation, and reverse-parsing from ansi to markup 141 | 142 | ### Removed 143 | - `Bunch`: The rigid `KEYS` feature. 144 | - `synchronized_on_first_call`. 145 | - `ExponentialBackoff`: The unused `iteration` argument. 146 | - `easypy.cartesian` 147 | - `easypy.selective_queue` 148 | - `easypy.timezone` 149 | 150 | ### Deprecated 151 | - `locking_lru_cache`. 152 | 153 | ## [0.2.0] - 2018-11-15 154 | ### Added 155 | - Add the `easypy.aliasing` module. 156 | - Add the `easypy.bunch` module. 157 | - Add the `easypy.caching` module. 158 | - Add the `easypy.cartesian` module. 159 | - Add the `easypy.collections` module. 160 | - Add the `easypy.colors` module. 161 | - Add the `easypy.concurrency` module. 162 | - Add the `easypy.contexts` module. 163 | - Add the `easypy.decorations` module. 164 | - Add the `easypy.exceptions` module. 165 | - Add the `easypy.fixtures` module. 166 | - Add the `easypy.gevent` module. 167 | - Add the `easypy.humanize` module. 168 | - Add the `easypy.interaction` module. 169 | - Add the `easypy.lockstep` module. 170 | - Add the `easypy.logging` module. 171 | - Add the `easypy.meta` module. 172 | - Add the `easypy.misc` module. 173 | - Add the `easypy.mocking` module. 174 | - Add the `easypy.predicates` module. 175 | - Add the `easypy.properties` module. 176 | - Add the `easypy.randutils` module. 177 | - Add the `easypy.resilience` module. 178 | - Add the `easypy.selective_queue` module. 179 | - Add the `easypy.signals` module. 180 | - Add the `easypy.tables` module. 181 | - Add the `easypy.threadtree` module. 182 | - Add the `easypy.timezone` module. 183 | - Add the `easypy.timing` module. 184 | - Add the `easypy.typed_struct` module. 185 | - Add the `easypy.units` module. 186 | - Add the `easypy.words` module. 187 | - Add the `easypy.ziplog` module. 188 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | Docstring Format 5 | ---------------- 6 | 7 | The documentation style is [Sphinx' reStructuredText](http://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html). Here is an example demonstrating the most common syntax: 8 | 9 | ```python 10 | def foo(a, b): 11 | """ 12 | Short description of the function. 13 | 14 | :param a: description of the parameter 15 | :type a: the type of a parameter 16 | :param int b: another syntax, for specifying the parameter's type and description together 17 | 18 | :return: what the function returns 19 | :rtype: the type the function returns 20 | 21 | :raises TypeError: possible exception and when is it thrown 22 | 23 | Longer description of the function, may contain multiple lines. 24 | 25 | Italic is formatted with a single asterisk - *italic*. 26 | 27 | Bold is formatted with a double asterisks - **bold**. 28 | 29 | Inline code style is formatted with double backticks: ``identifier``. 30 | 31 | Code blocks are formatted with the code-block directive and an indentation: 32 | 33 | code-block:: 34 | 35 | assert foo(1, 1) == True 36 | 37 | Or, you can add double colon at the end of a line to include a code block after that line:: 38 | 39 | assert foo(1, 2) == False 40 | 41 | Example of interactive Python session that demonstrate the function are formatted with three greater-thans: 42 | 43 | >>> foo(2, 2) 44 | True 45 | 46 | The interactive Python session formatting ends after one blank line 47 | """ 48 | if type(a) is not type(b): 49 | raise TypeError('a and b are not of the same type') 50 | return a == b 51 | ``` 52 | -------------------------------------------------------------------------------- /CONTRIBUTORS.txt: -------------------------------------------------------------------------------- 1 | Ofer Koren 2 | Anton Bykov 3 | Idan Arye 4 | Navot Silberstein 5 | Roi Padan 6 | Tomer Filiba 7 | David Baum 8 | Lee Keren 9 | Roy Zvibel 10 | Or Dahan 11 | Amir Tadmor 12 | Ariel Saar 13 | Zohar Zilberman 14 | Hillel Costeff 15 | Yotam Rubin 16 | Alex Goltman 17 | Tomer Godinger 18 | Alon Horev -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017 Weka.IO 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://api.travis-ci.org/real-easypy/easypy.svg?branch=master)](https://travis-ci.org/real-easypy/easypy) 2 | [![Documentation](https://img.shields.io/badge/api-sphinx-blue.svg)](https://real-easypy.github.io/easypy/) 3 | 4 | # easypy 5 | easypy makes python even easier! 6 | 7 | Many boiler-plate patterns are reduced to simple yet richly featured constructs: 8 | - concurrency - running asynchronous code easily and manageably 9 | - resiliency - retrying or swallowing exceptions 10 | - synchronization - poll-and-wait for long-running process to complete 11 | - collections - filter and choose objects using keywords and predicates 12 | 13 | In addition, there are humanization modules that help both code and logs more user-friendly 14 | - represting data-size and time units 15 | - randomizing strings 16 | - coloring and structured logging 17 | - tokens 18 | 19 | ... And much more! 20 | 21 | Contributers please read [CONTRIBUTING.md](CONTRIBUTING.md). 22 | -------------------------------------------------------------------------------- /conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'easypy' 23 | copyright = '2017, Weka.IO' 24 | author = 'Weka.IO' 25 | 26 | # The short X.Y version 27 | version = '' 28 | # The full version, including alpha/beta/rc tags 29 | release = '' 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # If your documentation needs a minimal Sphinx version, state it here. 35 | # 36 | # needs_sphinx = '1.0' 37 | 38 | # Add any Sphinx extension module names here, as strings. They can be 39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 40 | # ones. 41 | extensions = [ 42 | 'sphinx.ext.autodoc', 43 | # 'sphinx.ext.autosummary', 44 | # 'sphinxcontrib.fulltoc', 45 | ] 46 | 47 | # autodoc_default_flags = ['members'] 48 | # autosummary_generate = True 49 | 50 | # Add any paths that contain templates here, relative to this directory. 51 | templates_path = ['_templates'] 52 | 53 | # The suffix(es) of source filenames. 54 | # You can specify multiple suffix as a list of string: 55 | # 56 | # source_suffix = ['.rst', '.md'] 57 | source_suffix = '.rst' 58 | 59 | # The master toctree document. 60 | master_doc = 'index' 61 | 62 | # The language for content autogenerated by Sphinx. Refer to documentation 63 | # for a list of supported languages. 64 | # 65 | # This is also used if you do content translation via gettext catalogs. 66 | # Usually you set "language" from the command line for these cases. 67 | language = None 68 | 69 | # List of patterns, relative to source directory, that match files and 70 | # directories to ignore when looking for source files. 71 | # This pattern also affects html_static_path and html_extra_path . 72 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 73 | 74 | # The name of the Pygments (syntax highlighting) style to use. 75 | pygments_style = 'sphinx' 76 | 77 | 78 | modindex_common_prefix = ['easypy.'] 79 | 80 | 81 | # -- Options for HTML output ------------------------------------------------- 82 | 83 | # The theme to use for HTML and HTML Help pages. See the documentation for 84 | # a list of builtin themes. 85 | # 86 | html_theme = 'classic' 87 | 88 | # Theme options are theme-specific and customize the look and feel of a theme 89 | # further. For a list of options available for each theme, see the 90 | # documentation. 91 | # 92 | # html_theme_options = {} 93 | html_theme_options = { 94 | 'stickysidebar': True, 95 | } 96 | 97 | # Add any paths that contain custom static files (such as style sheets) here, 98 | # relative to this directory. They are copied after the builtin static files, 99 | # so a file named "default.css" will overwrite the builtin "default.css". 100 | html_static_path = ['_static'] 101 | 102 | # Custom sidebar templates, must be a dictionary that maps document names 103 | # to template names. 104 | # 105 | # The default sidebars (for documents that don't match any pattern) are 106 | # defined by theme itself. Builtin themes are using these templates by 107 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 108 | # 'searchbox.html']``. 109 | # 110 | # html_sidebars = {} 111 | html_sidebars = { 112 | '**': ['localtoc.html', 'globaltoc.html', 'searchbox.html'], 113 | } 114 | 115 | 116 | # -- Options for HTMLHelp output --------------------------------------------- 117 | 118 | # Output file base name for HTML help builder. 119 | htmlhelp_basename = 'easypydoc' 120 | 121 | 122 | # -- Options for LaTeX output ------------------------------------------------ 123 | 124 | latex_elements = { 125 | # The paper size ('letterpaper' or 'a4paper'). 126 | # 127 | # 'papersize': 'letterpaper', 128 | 129 | # The font size ('10pt', '11pt' or '12pt'). 130 | # 131 | # 'pointsize': '10pt', 132 | 133 | # Additional stuff for the LaTeX preamble. 134 | # 135 | # 'preamble': '', 136 | 137 | # Latex figure (float) alignment 138 | # 139 | # 'figure_align': 'htbp', 140 | } 141 | 142 | # Grouping the document tree into LaTeX files. List of tuples 143 | # (source start file, target name, title, 144 | # author, documentclass [howto, manual, or own class]). 145 | latex_documents = [ 146 | (master_doc, 'easypy.tex', 'easypy Documentation', 147 | 'Weka.IO', 'manual'), 148 | ] 149 | 150 | 151 | # -- Options for manual page output ------------------------------------------ 152 | 153 | # One entry per manual page. List of tuples 154 | # (source start file, name, description, authors, manual section). 155 | man_pages = [ 156 | (master_doc, 'easypy', 'easypy Documentation', 157 | [author], 1) 158 | ] 159 | 160 | 161 | # -- Options for Texinfo output ---------------------------------------------- 162 | 163 | # Grouping the document tree into Texinfo files. List of tuples 164 | # (source start file, target name, title, author, 165 | # dir menu entry, description, category) 166 | texinfo_documents = [ 167 | (master_doc, 'easypy', 'easypy Documentation', 168 | author, 'easypy', 'One line description of project.', 169 | 'Miscellaneous'), 170 | ] 171 | 172 | 173 | # -- Extension configuration ------------------------------------------------- 174 | -------------------------------------------------------------------------------- /easypy/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | # this is used internally, so we define it here, and copy it into .humanize 4 | def yesno_to_bool(s): 5 | s = s.lower() 6 | if s not in ("yes", "no", "true", "false", "1", "0"): 7 | raise ValueError("Unrecognized boolean value: %r" % (s,)) 8 | return s in ("yes", "true", "1") 9 | 10 | 11 | gevent = os.getenv('EASYPY_AUTO_PATCH_GEVENT', '') 12 | if gevent and yesno_to_bool(gevent): 13 | from easypy.gevent import apply_patch 14 | apply_patch() 15 | 16 | 17 | framework = os.getenv('EASYPY_AUTO_PATCH_LOGGING', '') 18 | if framework: 19 | from easypy.logging import initialize 20 | initialize(framework=framework, patch=True) 21 | -------------------------------------------------------------------------------- /easypy/_multithreading_init.py: -------------------------------------------------------------------------------- 1 | """ 2 | Common initialization that needs to be called before any easypy module that deals with multithreading 3 | """ 4 | 5 | from uuid import uuid4, UUID 6 | from weakref import WeakKeyDictionary 7 | 8 | 9 | MAIN_UUID = UUID(int=0) 10 | UUIDS_TREE = WeakKeyDictionary() 11 | UUID_TO_IDENT = WeakKeyDictionary() 12 | IDENT_TO_UUID = {} 13 | _BOOTSTRAPPERS = set() 14 | 15 | 16 | def _set_thread_uuid(ident, parent_uuid=MAIN_UUID): 17 | uuid = uuid4() 18 | IDENT_TO_UUID[ident] = uuid 19 | UUIDS_TREE[uuid] = parent_uuid 20 | 21 | 22 | def _set_main_uuid(): 23 | import threading 24 | IDENT_TO_UUID[threading.main_thread().ident] = MAIN_UUID 25 | UUID_TO_IDENT[MAIN_UUID] = threading.main_thread().ident 26 | 27 | 28 | def get_thread_uuid(thread=None): 29 | """ 30 | Assigns and returns a UUID to our thread, since thread.ident can be recycled. 31 | The UUID is used in mapping child threads to their parent threads. 32 | """ 33 | import threading 34 | if not thread: 35 | thread = threading.current_thread() 36 | 37 | ident = thread.ident 38 | try: 39 | uuid = IDENT_TO_UUID[ident] 40 | except KeyError: 41 | uuid = IDENT_TO_UUID.setdefault(ident, uuid4()) 42 | UUID_TO_IDENT[uuid] = ident 43 | return uuid 44 | 45 | 46 | _set_main_uuid() 47 | -------------------------------------------------------------------------------- /easypy/aliasing.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module is all about its 'aliases' (v.) class decorator 3 | """ 4 | 5 | from itertools import chain 6 | 7 | 8 | # TODO - remove this, we don't need the python2 compatibility anymore 9 | def super_dir(obj): 10 | """ 11 | A python2/3 compatible way of getting the default ('super') behavior of __dir__ 12 | """ 13 | return sorted(set(chain(dir(type(obj)), obj.__dict__))) 14 | 15 | 16 | # Python 3.4 does not have RecursionError - it throws a RuntimeError instead 17 | try: 18 | _RecursionError = RecursionError 19 | except NameError: 20 | _RecursionError = RuntimeError 21 | 22 | 23 | class AliasingMixin(): 24 | @property 25 | def _aliased(self): 26 | try: 27 | return getattr(self, self._ALIAS) 28 | except AttributeError: 29 | raise RuntimeError("object %r does no contain aliased object %r" % (self, self._ALIAS)) 30 | 31 | def __dir__(self): 32 | members = set(super_dir(self)) 33 | members.update(n for n in dir(self._aliased) if not n.startswith("_")) 34 | return sorted(members) 35 | 36 | def __getattr__(self, attr): 37 | if attr.startswith("_"): 38 | raise AttributeError(attr) 39 | try: 40 | return getattr(self._aliased, attr) 41 | except AttributeError: 42 | raise AttributeError("'%s' object has no attribute '%s'" % (type(self).__name__, attr)) from None 43 | except _RecursionError as e: 44 | if type(e) is RuntimeError and str(e) != 'maximum recursion depth exceeded': 45 | raise 46 | raise _RecursionError('Infinite recursion trying to access {attr!r} on {obj!r} (via {type_name}.{alias}.{attr})'.format( 47 | attr=attr, 48 | obj=self, 49 | type_name=type(self).__name__, 50 | alias=self._ALIAS)) 51 | 52 | 53 | def aliases(name, static=True): 54 | """ 55 | A class decorator that makes objects of a class delegate to an object they contain. 56 | Inspired by D's "alias this". 57 | 58 | Example:: 59 | 60 | class B(): 61 | def foo(self): 62 | print('foo') 63 | 64 | @aliases('b') 65 | class A(): 66 | b = B() 67 | 68 | a = A() 69 | a.foo() # delegated to b.foo() 70 | 71 | 72 | @aliases('b', static=False) 73 | class A(): 74 | def __init__(self): 75 | self.b = B() 76 | 77 | a = A() 78 | a.foo() # delegated to b.foo() 79 | 80 | """ 81 | def deco(cls): 82 | assert not static or hasattr(cls, name) 83 | return type(cls.__name__, (cls, AliasingMixin), dict(_ALIAS=name, __module__=cls.__module__)) 84 | return deco 85 | -------------------------------------------------------------------------------- /easypy/bunch.py: -------------------------------------------------------------------------------- 1 | # TODO: some methods like fromkeys/update can insert keys which are not strings - and that causes problems. 2 | # need to either convert keys to strings (like in _convert), or raise ValueError. 3 | class Bunch(dict): 4 | 5 | __slots__ = ("__stop_recursing__",) 6 | 7 | def __getattr__(self, name): 8 | try: 9 | return self[name] 10 | except KeyError: 11 | if name[0] == "_" and name[1:].isdigit(): 12 | return self[name[1:]] 13 | raise AttributeError("%s has no attribute %r" % (self.__class__, name)) 14 | 15 | def __getitem__(self, key): 16 | try: 17 | return super(Bunch, self).__getitem__(key) 18 | except KeyError: 19 | from numbers import Integral 20 | if isinstance(key, Integral): 21 | return self[str(key)] 22 | raise 23 | 24 | def __setattr__(self, name, value): 25 | self[name] = value 26 | 27 | def __delattr__(self, name): 28 | try: 29 | del self[name] 30 | except KeyError: 31 | raise AttributeError("%s has no attribute %r" % (self.__class__, name)) 32 | 33 | def __getstate__(self): 34 | return self 35 | 36 | def __setstate__(self, dict): 37 | self.update(dict) 38 | 39 | def __repr__(self): 40 | if getattr(self, "__stop_recursing__", False): 41 | items = sorted("%s" % k for k in self if isinstance(k, str) and not k.startswith("__")) 42 | attrs = ", ".join(items) 43 | else: 44 | dict.__setattr__(self, "__stop_recursing__", True) 45 | try: 46 | attrs = self.render() 47 | finally: 48 | dict.__delattr__(self, "__stop_recursing__") 49 | return "%s(%s)" % (self.__class__.__name__, attrs) 50 | 51 | def render(self): 52 | items = sorted("%s=%r" % (k, v) for k, v in self.items() if isinstance(k, str) and not k.startswith("__")) 53 | return ", ".join(items) 54 | 55 | def _repr_pretty_(self, *args, **kwargs): 56 | from easypy.humanize import ipython_mapping_repr 57 | return ipython_mapping_repr(self, *args, **kwargs) 58 | 59 | def to_dict(self): 60 | return unbunchify(self) 61 | 62 | def to_json(self): 63 | import json 64 | return json.dumps(self.to_dict()) 65 | 66 | def to_yaml(self): 67 | import yaml 68 | return yaml.dump(self.to_dict()) 69 | 70 | def copy(self, deep=False): 71 | if deep: 72 | return _convert(self, self.__class__) 73 | else: 74 | return self.__class__(self) 75 | 76 | @classmethod 77 | def from_dict(cls, d): 78 | return _convert(d, cls) 79 | 80 | @classmethod 81 | def from_json(cls, d): 82 | import json 83 | return cls.from_dict(json.loads(d)) 84 | 85 | @classmethod 86 | def from_yaml(cls, d): 87 | import yaml 88 | return cls.from_dict(yaml.safe_load(d)) 89 | 90 | @classmethod 91 | def from_xml(cls, d): 92 | import xmltodict 93 | return cls.from_dict(xmltodict.parse(d)) 94 | 95 | def __dir__(self): 96 | members = set(k for k in self if isinstance(k, str) and (k[0] == "_" or k.replace("_", "").isalnum())) 97 | members.update(dict.__dir__(self)) 98 | return sorted(members) 99 | 100 | def without(self, *keys): 101 | "Return a shallow copy of the bunch without the specified keys" 102 | return Bunch((k, v) for k, v in self.items() if k not in keys) 103 | 104 | def but_with(self, **kw): 105 | "Return a shallow copy of the bunch with the specified keys" 106 | return Bunch(self, **kw) 107 | 108 | 109 | def _convert(d, typ): 110 | if isinstance(d, dict): 111 | return typ(dict((str(k), _convert(v, typ)) for k, v in d.items())) 112 | elif isinstance(d, (tuple, list, set)): 113 | return type(d)(_convert(e, typ) for e in d) 114 | else: 115 | return d 116 | 117 | 118 | def unbunchify(d): 119 | """Recursively convert Bunches in `d` to a regular dicts.""" 120 | return _convert(d, dict) 121 | 122 | 123 | def bunchify(d=None, **kw): 124 | """Recursively convert dicts in `d` to Bunches. 125 | If `kw` given, recursively convert dicts in it to Bunches and update `d` with it. 126 | If `d` is None, an empty Bunch is made.""" 127 | d = _convert(d, Bunch) if d is not None else Bunch() 128 | if kw: 129 | d.update(bunchify(kw)) 130 | return d 131 | -------------------------------------------------------------------------------- /easypy/contexts.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | from functools import wraps 3 | from inspect import isgeneratorfunction, signature, Parameter 4 | from contextlib import contextmanager as orig_contextmanager 5 | from contextlib import ExitStack 6 | from contextlib import _GeneratorContextManager 7 | 8 | 9 | class KeyedStack(ExitStack): 10 | def __init__(self, context_factory): 11 | self.context_factory = context_factory 12 | self.contexts_dict = defaultdict(list) 13 | super().__init__() 14 | 15 | def enter_context(self, *key): 16 | cm = self.context_factory(*key) 17 | self.contexts_dict[key].append(cm) 18 | super().enter_context(cm) 19 | 20 | def exit_context(self, *key): 21 | self.contexts_dict[key].pop(-1).__exit__(None, None, None) 22 | 23 | 24 | class _BetterGeneratorContextManager(_GeneratorContextManager): 25 | "This helper can handle generators and other context-managers" 26 | 27 | def __call__(self, func): 28 | if isgeneratorfunction(func): 29 | def inner(*args, **kwds): 30 | with self._recreate_cm(): 31 | yield from func(*args, **kwds) 32 | elif is_contextmanager(func): 33 | @contextmanager 34 | def inner(*args, **kwds): 35 | with self._recreate_cm(): 36 | with func(*args, **kwds) as ret: 37 | yield ret 38 | else: 39 | def inner(*args, **kwds): 40 | with self._recreate_cm(): 41 | return func(*args, **kwds) 42 | return wraps(func)(inner) 43 | 44 | 45 | # Some python version have a different signature for '_GeneratorContextManager.__init__', so we must adapt: 46 | if signature(_GeneratorContextManager).parameters['args'].kind is Parameter.VAR_POSITIONAL: 47 | def contextmanager(func): 48 | @wraps(func) 49 | def helper(*args, **kwds): 50 | return _BetterGeneratorContextManager(func, *args, **kwds) 51 | return helper 52 | else: 53 | def contextmanager(func): 54 | @wraps(func) 55 | def helper(*args, **kwds): 56 | return _BetterGeneratorContextManager(func, args, kwds) 57 | return helper 58 | 59 | contextmanager.__doc__ = """@contextmanager decorator. 60 | 61 | Typical usage:: 62 | 63 | @contextmanager 64 | def ctx(): 65 | 66 | try: 67 | yield 68 | finally: 69 | 70 | 71 | In a ``with`` statement:: 72 | 73 | with ctx() as : 74 | 75 | 76 | 77 | As a decorator for a function/method:: 78 | 79 | @ctx() 80 | def simple_function(): 81 | 82 | 83 | As a decorator for a generator:: 84 | 85 | @ctx() 86 | def generator(): 87 | yield 88 | 89 | As a decorator for a context manager (only those created using the @contextmanager decorator):: 90 | 91 | @ctx() 92 | @contextmanager 93 | def some_other_context_manager(): 94 | 95 | try: 96 | yield 97 | finally: 98 | 99 | """ 100 | 101 | 102 | # we use these to identify functions decorated by 'contextmanager' 103 | _ctxm_code_samples = { 104 | f(None).__code__ for f in 105 | [contextmanager, orig_contextmanager]} 106 | 107 | 108 | def is_contextmanager(func): 109 | return getattr(func, "__code__", None) in _ctxm_code_samples 110 | 111 | 112 | @contextmanager 113 | def breakable_section(): 114 | """ 115 | Useful for getting out of some deep nesting, as an alternative to a closure: 116 | 117 | item = None 118 | with breakable_section() as Break: 119 | if alpha: 120 | item = alpha.value 121 | raise Break 122 | 123 | if beta: 124 | for opt in beta.items: 125 | if opt.is_the_one: 126 | item = opt.value 127 | raise Break 128 | 129 | Note that each 'Break' class this context-manager yield is unique, 130 | i.e it will only be caught by the context-manager that created it: 131 | 132 | with breakable_section() as Break1: 133 | 134 | with breakable_section() as Break2: 135 | raise Break1 136 | 137 | assert False # will not reach here 138 | """ 139 | Break = type("Break", (Exception,), {}) 140 | try: 141 | yield Break 142 | except Break: 143 | pass 144 | -------------------------------------------------------------------------------- /easypy/decorations.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module is about making it easier to create decorators 3 | """ 4 | 5 | from collections import OrderedDict 6 | from functools import wraps, partial, update_wrapper 7 | from itertools import chain 8 | from operator import attrgetter 9 | from abc import ABCMeta, abstractmethod 10 | import inspect 11 | 12 | from easypy.exceptions import TException 13 | 14 | 15 | def parametrizeable_decorator(deco): 16 | @wraps(deco) 17 | def inner(func=None, *args, **kwargs): 18 | if func is None: 19 | return partial(deco, *args, **kwargs) 20 | else: 21 | return wraps(func)(deco(func, *args, **kwargs)) 22 | return inner 23 | 24 | 25 | def wrapper_decorator(deco): 26 | @wraps(deco) 27 | def inner(func): 28 | return wraps(func)(deco(func)) 29 | return inner 30 | 31 | 32 | def reusable_contextmanager(context_manager): 33 | """ 34 | Allows the generator-based context manager to be used more than once 35 | """ 36 | 37 | if not hasattr(context_manager, '_recreate_cm'): 38 | return context_manager # context manager is already reusable (was not created usin yield funcion 39 | 40 | class ReusableCtx: 41 | def __enter__(self): 42 | self.cm = context_manager._recreate_cm() 43 | return self.cm.__enter__() 44 | 45 | def __exit__(self, *args): 46 | self.cm.__exit__(*args) 47 | 48 | return ReusableCtx() 49 | 50 | 51 | class DecoratingDescriptor(metaclass=ABCMeta): 52 | """ 53 | Base class for descriptors that decorate a function 54 | 55 | :param func: The function to be decorated. 56 | :param bool cached: If ``True``, the decoration will only be done once per instance. 57 | 58 | Use this as a base class for other descriptors. When used on class objects, 59 | this will return itself. When used on instances, it this will call ``_decorate`` 60 | on the method created by binding ``func``. 61 | """ 62 | 63 | def __init__(self, *, func, cached: bool): 64 | self._func = func 65 | self._cached = cached 66 | self.__property_name = '__property_%s' % id(self) 67 | update_wrapper(self, func, updated=()) 68 | 69 | @abstractmethod 70 | def _decorate(self, method, instance, owner): 71 | """ 72 | Override to perform the actual decoration. 73 | 74 | :param method: The method from binding ``func``. 75 | :params instance: The binding instance (same as in ``__get__``) 76 | :params owner: The owner class (same as in ``__get__``) 77 | """ 78 | pass 79 | 80 | def __get__(self, instance, owner): 81 | method = self._func.__get__(instance, owner) 82 | if instance is None: 83 | return method 84 | else: 85 | if self._cached: 86 | try: 87 | return instance.__dict__[self.__property_name] 88 | except KeyError: 89 | bound = self._decorate(method, instance, owner) 90 | instance.__dict__[self.__property_name] = bound 91 | return bound 92 | else: 93 | return self._decorate(method, instance, owner) 94 | 95 | 96 | class LazyDecoratorDescriptor(DecoratingDescriptor): 97 | def __init__(self, decorator_factory, func, cached): 98 | super().__init__(func=func, cached=cached) 99 | self.decorator_factory = decorator_factory 100 | 101 | def _decorate(self, method, instance, owner): 102 | decorator = self.decorator_factory(instance) 103 | return decorator(method) 104 | 105 | 106 | def lazy_decorator(decorator_factory, cached=False): 107 | """ 108 | Create and apply a decorator only after the method is instantiated:: 109 | 110 | :param decorator_factory: A function that will be called with the ``self`` argument. 111 | Should return a decorator for the method. 112 | If ``string``, use an attribute of ``self`` with that name 113 | as the decorator. 114 | :param bool cached: If ``True``, the decoration will only be done once per instance. 115 | 116 | class UsageWithLambda: 117 | @lazy_decorator(lambda self: some_decorator_that_needs_the_object(self)) 118 | def foo(self): 119 | # ... 120 | 121 | class UsageWithAttribute: 122 | def decorator_method(self, func): 123 | # ... 124 | 125 | @lazy_decorator('decorator_method') 126 | def foo(self): 127 | # ... 128 | 129 | class UsageCached: 130 | # Without ``cached=True``, this will create a new ``timecache`` on every invocation. 131 | @lazy_decorator(lambda self: timecache(expiration=1, get_ts_func=lambda: self.ts), cached=True) 132 | def foo(self): 133 | # ... 134 | """ 135 | 136 | if callable(decorator_factory): 137 | pass 138 | elif isinstance(decorator_factory, str): 139 | decorator_factory = attrgetter(decorator_factory) 140 | else: 141 | raise TypeError('decorator_factory must be callable or string, not %s' % type(decorator_factory)) 142 | 143 | def wrapper(func): 144 | return LazyDecoratorDescriptor(decorator_factory, func, cached) 145 | return wrapper 146 | 147 | 148 | class DefaultsMismatch(TException): 149 | template = 'The defaults of {func} differ from those of {source_of_truth} in params {param_names}' 150 | 151 | 152 | def ensure_same_defaults(source_of_truth, ignore=()): 153 | """ 154 | Ensure the decorated function has the same default as the source of truth in optional parameters shared by both 155 | 156 | :param source_of_truth: A function to check the defaults against. 157 | :param ignore: A list of parameters to ignore even if they exist and have defaults in both functions. 158 | :raises DefaultsMismatch: When the defaults are different. 159 | 160 | >>> def foo(a=1, b=2, c=3): 161 | ... ... 162 | >>> @ensure_same_defaults(foo) 163 | ... def bar(a=1, b=2, c=3): # these defaults are verified by the decorator 164 | ... ... 165 | """ 166 | 167 | sot_signature = inspect.signature(source_of_truth) 168 | params_with_defaults = [ 169 | param for param in sot_signature.parameters.values() 170 | if param.default is not param.empty 171 | and param.name not in ignore] 172 | 173 | def gen_mismatches(func): 174 | signature = inspect.signature(func) 175 | for sot_param in params_with_defaults: 176 | param = signature.parameters.get(sot_param.name) 177 | if param is None: 178 | continue 179 | if param.default is param.empty: 180 | continue 181 | if sot_param.default != param.default: 182 | yield sot_param.name 183 | 184 | def wrapper(func): 185 | mismatches = list(gen_mismatches(func)) 186 | if mismatches: 187 | raise DefaultsMismatch( 188 | func=func, 189 | source_of_truth=source_of_truth, 190 | param_names=mismatches) 191 | return func 192 | return wrapper 193 | 194 | 195 | __KEYWORD_PARAMS = (inspect.Parameter.POSITIONAL_OR_KEYWORD, inspect.Parameter.KEYWORD_ONLY) 196 | 197 | 198 | def kwargs_from(*functions, exclude=()): 199 | """ 200 | Edits the decorated function's signature to expand the variadic keyword 201 | arguments parameter to the possible keywords from the wrapped functions. 202 | This allows better completions inside interactive tools such as IPython. 203 | 204 | :param functions: The functions to get the keywords from. 205 | :param exclude: A list of parameters to exclude from the new signature. 206 | :raises TypeError: When the decorated function does not have a variadic 207 | keyword argument. 208 | 209 | >>> def foo(*, a, b, c): 210 | ... ... 211 | >>> @kwargs_from(foo) 212 | ... def bar(**kwargs): 213 | ... ... 214 | >>> help(bar) 215 | Help on function bar in module easypy.decorations: 216 | 217 | bar(*, a, b, c) 218 | 219 | """ 220 | exclude = set(exclude or ()) 221 | all_original_params = (inspect.signature(func).parameters for func in functions) 222 | def _decorator(func): 223 | signature = inspect.signature(func) 224 | 225 | kws_param = None 226 | params = OrderedDict() 227 | for param in signature.parameters.values(): 228 | if param.kind != inspect.Parameter.VAR_KEYWORD: 229 | params[param.name] = param 230 | else: 231 | kws_param = param 232 | if kws_param is None: 233 | raise TypeError("kwargs_from can only wrap functions with variadic keyword arguments") 234 | 235 | keep_kwargs = False 236 | for param in chain.from_iterable(original_params.values() for original_params in all_original_params): 237 | if param.name in exclude: 238 | pass 239 | elif param.kind in __KEYWORD_PARAMS and param.name not in params: 240 | params[param.name] = param.replace(kind=inspect.Parameter.KEYWORD_ONLY) 241 | elif param.kind == inspect.Parameter.VAR_KEYWORD: 242 | keep_kwargs = True 243 | 244 | if keep_kwargs: 245 | params['**'] = kws_param 246 | 247 | func.__signature__ = signature.replace(parameters=params.values()) 248 | return func 249 | return _decorator 250 | -------------------------------------------------------------------------------- /easypy/deprecation.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module helps you generate deprecation warnings 3 | """ 4 | 5 | from functools import wraps, partial 6 | import warnings 7 | 8 | 9 | def deprecated(func=None, message=None): 10 | if not callable(func): 11 | return partial(deprecated, message=func) 12 | message = (" " + message) if message else "" 13 | message = "Hey! '%s' is deprecated!%s" % (func.__name__, message) 14 | 15 | @wraps(func) 16 | def inner(*args, **kwargs): 17 | warnings.warn(message, DeprecationWarning, stacklevel=2) 18 | return func(*args, **kwargs) 19 | return inner 20 | 21 | 22 | def deprecated_arguments(**argmap): 23 | """ 24 | Renames arguments while emitting deprecation warning:: 25 | 26 | @deprecated_arguments(old_name='new_name') 27 | def func(new_name): 28 | # ... 29 | 30 | func(old_name='value meant for new name') 31 | """ 32 | 33 | def wrapper(func): 34 | @wraps(func) 35 | def inner(*args, **kwargs): 36 | deprecation_warnings = [] 37 | for name, map_to in argmap.items(): 38 | try: 39 | value = kwargs.pop(name) 40 | except KeyError: 41 | pass # deprecated argument was not used 42 | else: 43 | if map_to in kwargs: 44 | raise TypeError("%s is deprecated for %s - can't use both in %s()" % ( 45 | name, map_to, func.__name__)) 46 | deprecation_warnings.append('%s is deprecated - use %s instead' % (name, map_to)) 47 | kwargs[map_to] = value 48 | 49 | if deprecation_warnings: 50 | message = 'Hey! In %s, %s' % (func.__name__, ', '.join(deprecation_warnings)) 51 | warnings.warn(message, DeprecationWarning, stacklevel=2) 52 | 53 | return func(*args, **kwargs) 54 | return inner 55 | return wrapper 56 | -------------------------------------------------------------------------------- /easypy/exceptions.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import sys 3 | import traceback 4 | from time import time 5 | from datetime import datetime 6 | from contextlib import contextmanager 7 | from textwrap import indent 8 | 9 | 10 | class PException(Exception): 11 | 12 | """An exception object that can accept kwargs as attributes""" 13 | 14 | def __init__(self, message="", *args, **params): 15 | if args or params: 16 | message = message.format(*args, **params) 17 | Exception.__init__(self, message) 18 | self.context = params.pop("context", None) 19 | self.traceback = params.pop("traceback", None) 20 | if self.traceback is True: 21 | self.traceback = traceback.format_exc() 22 | self.message = message 23 | self.timestamp = params.pop('timestamp', time()) 24 | if 'tip' not in params: 25 | # sometimes it's on the class 26 | params['tip'] = getattr(self, 'tip', None) 27 | self._params = {} 28 | self.add_params(**params) 29 | 30 | def __reduce__(self): 31 | return (self.__class__.__new__, (self.__class__,), self.__getstate__()) 32 | 33 | def __getstate__(self): 34 | return (self.message, self.context, self.traceback, self.timestamp, self._params) 35 | 36 | def __setstate__(self, state): 37 | self.message, self.context, self.traceback, self.timestamp, params = state 38 | self._params = {} 39 | self.add_params(**params) 40 | 41 | def add_params(self, **params): 42 | for k, v in params.items(): 43 | setattr(self, k, v) 44 | self._params.update(params) 45 | 46 | def __repr__(self): 47 | if self._params: 48 | kw = sorted("%s=%r" % (k, v) for k, v in self._params.items()) 49 | return "%s(%r, %s)" % (self.__class__.__name__, self.message, ", ".join(kw)) 50 | else: 51 | return "%s(%r)" % (self.__class__.__name__, self.message) 52 | 53 | def __str__(self): 54 | return self.render(traceback=False, color=False) 55 | 56 | def render(self, params=True, context=True, traceback=True, timestamp=True, color=True): 57 | text = "" 58 | 59 | if self.message: 60 | text += "".join("WHITE<<%s>>\n" % line for line in self.message.splitlines()) 61 | 62 | if params and self._params: 63 | tip = self._params.pop('tip', None) 64 | text += indent("".join(make_block(self._params)), " " * 4) 65 | if tip: 66 | tip = tip.format(**self._params) 67 | lines = tip.splitlines() 68 | text += indent("GREEN(BLUE)@{tip = %s}@\n" % lines[0], " " * 4) 69 | for line in lines[1:]: 70 | text += indent("GREEN(BLUE)@{ %s}@\n" % line, " " * 4) 71 | self._params['tip'] = tip # put it back in params, even though it might've been on the class 72 | 73 | if timestamp and self.timestamp: 74 | ts = datetime.fromtimestamp(self.timestamp).isoformat() 75 | text += indent("MAGENTA<>\n" % ts, " " * 4) 76 | 77 | if context and self.context: 78 | text += "Context:\n" + indent("".join(make_block(self.context, skip={"indentation"})), " " * 4) 79 | 80 | if traceback and self.traceback: 81 | fmt = "DARK_GRAY@{{{}}}@" 82 | text += "\n".join(map(fmt.format, self.traceback.splitlines())) 83 | 84 | if not color: 85 | from easypy.colors import uncolored 86 | text = uncolored(text) 87 | 88 | return text 89 | 90 | @classmethod 91 | def make(cls, name): 92 | return type(name, (cls,), {}) 93 | 94 | @classmethod 95 | @contextmanager 96 | def on_exception(cls, acceptable=Exception, **kwargs): 97 | from easypy.logging import _get_logger # noqa 98 | logger = _get_logger(name=__name__) 99 | 100 | try: 101 | yield 102 | except cls: 103 | # don't mess with exceptions of this type 104 | raise 105 | except acceptable as exc: 106 | exc_info = sys.exc_info() 107 | logger.debug("'%s' raised; Raising as '%s'" % (type(exc), cls), exc_info=exc_info) 108 | raise cls(traceback=True, **kwargs) from None 109 | 110 | 111 | def make_block(d, skip={}): 112 | for k in sorted(d): 113 | if k.startswith("_"): 114 | continue 115 | if k in skip: 116 | continue 117 | v = d[k] 118 | if isinstance(v, datetime): 119 | v = v.isoformat() 120 | elif not isinstance(v, str): 121 | v = repr(v) 122 | dark = False 123 | if k.startswith("~"): 124 | k = k[1:] 125 | dark = True 126 | head = "%s = " % k 127 | block = indent(v, " " * len(head)) 128 | block = head + block[len(head):] 129 | if dark: 130 | block = "DARK_GRAY@{%s}@" % block 131 | yield block + "\n" 132 | 133 | 134 | class TException(PException): 135 | 136 | @property 137 | def template(self): 138 | raise NotImplementedError("Must implement template") 139 | 140 | def __init__(self, *args, **params): 141 | super(TException, self).__init__(self.template, *args, **params) 142 | 143 | @classmethod 144 | def make(cls, name, template): 145 | return type(name, (cls,), dict(template=template)) 146 | 147 | 148 | def convert_traceback_to_list(tb): 149 | # convert to list of dictionaries that contain file, line_no and function 150 | traceback_list = [dict(file=file, line_no=line_no, function=function) 151 | for file, line_no, function, _ in traceback.extract_tb(tb)] 152 | return traceback_list 153 | 154 | 155 | def apply_timestamp(exc, now=None): 156 | timestamp = now or time() 157 | if getattr(exc, "timestamp", "__missing__") == "__missing__": 158 | try: 159 | exc.timestamp = timestamp 160 | except Exception: 161 | pass 162 | return exc 163 | -------------------------------------------------------------------------------- /easypy/fixtures.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | import inspect 3 | from functools import wraps 4 | 5 | from easypy.properties import cached_property 6 | 7 | 8 | class Fixture(object): 9 | def __init__(self, name, function, cached=True): 10 | self.name = name 11 | self._function = function 12 | self._cached = cached 13 | 14 | def __repr__(self): 15 | return 'Fixture %s' % self.name 16 | 17 | @cached_property 18 | def _signature(self): 19 | return inspect.signature(self._function) 20 | 21 | @cached_property 22 | def dependencies(self): 23 | return self._signature.parameters.keys() 24 | 25 | def invoke(self, fixtures_assembly): 26 | kwargs = {dependency: fixtures_assembly.resolve_fixture(dependency) for dependency in self.dependencies} 27 | return self._function(**kwargs) 28 | 29 | 30 | class FixturesNamespace(object): 31 | def __init__(self): 32 | self.fixtures = {} 33 | 34 | def register(self, func=None, *, cached=True): 35 | @wraps(func) 36 | def inner(func): 37 | assert func.__name__ not in self.fixtures, 'Fixture %s already exists' % func.__name__ 38 | fixture = Fixture(func.__name__, func, cached=cached) 39 | self.fixtures[fixture.name] = fixture 40 | 41 | if func is None: 42 | return inner 43 | else: 44 | return inner(func) 45 | 46 | def assemble(self, **manual_fixture_values): 47 | assembly = FixturesAssembly(self) 48 | for k, v in manual_fixture_values.items(): 49 | assembly._set_fixture_value(k, v) 50 | return assembly 51 | 52 | def get(self, name): 53 | return self.fixtures[name.split('__')[0]] 54 | 55 | 56 | class FixturesAssembly(object): 57 | def __init__(self, fixtures_namespace): 58 | self._fixtures_namespace = fixtures_namespace 59 | self._fixture_values = {} 60 | self.__being_added = set() 61 | 62 | @contextmanager 63 | def __adding(self, name): 64 | assert name not in self.__being_added, \ 65 | 'Circular dependency detected while invoking fixture %s. Currently resolving %s' % (name, self.__being_added) 66 | self.__being_added.add(name) 67 | try: 68 | yield 69 | finally: 70 | self.__being_added.remove(name) 71 | 72 | def _set_fixture_value(self, name, value): 73 | assert name not in self._fixture_values, 'Value for fixture %s already set' % (name,) 74 | self._fixture_values[name] = value 75 | 76 | def resolve_fixture(self, name): 77 | try: 78 | return self._fixture_values[name] 79 | except KeyError: 80 | pass 81 | 82 | with self.__adding(name): 83 | fixture = self._fixtures_namespace.get(name) 84 | fixture_value = fixture.invoke(self) 85 | 86 | if fixture._cached: 87 | self._set_fixture_value(name, fixture_value) 88 | 89 | return fixture_value 90 | 91 | -------------------------------------------------------------------------------- /easypy/gevent.py: -------------------------------------------------------------------------------- 1 | try: 2 | import gevent 3 | from gevent.monkey import is_module_patched 4 | except ImportError: 5 | def is_module_patched(*_, **__): 6 | return False 7 | 8 | 9 | import threading # this will be reloaded after patching 10 | 11 | import atexit 12 | import time 13 | import sys 14 | import os 15 | 16 | 17 | from ._multithreading_init import _set_thread_uuid, _set_main_uuid, _BOOTSTRAPPERS, get_thread_uuid 18 | from . import yesno_to_bool 19 | 20 | # can't use easypy's logging since this has to be run before everything, 21 | # hence the name '_basic_logger', to remind that easypy features are not available 22 | from logging import getLogger 23 | _basic_logger = getLogger(name='gevent') 24 | 25 | 26 | main_thread_ident_before_patching = threading.main_thread().ident 27 | 28 | HUB = None 29 | 30 | HOGGING_TIMEOUT = int(os.getenv('EASYPY_GEVENT_HOGGING_DETECTOR_INTERVAL', 0)) 31 | _HOGGING_DETECTION_RUNNING = False 32 | 33 | 34 | def apply_patch(hogging_detection=None, real_threads=None): 35 | if real_threads is None: 36 | real_threads = int(os.getenv('EASYPY_GEVENT_REAL_THREADS', 1)) 37 | if hogging_detection is None: 38 | hogging_detection = bool(HOGGING_TIMEOUT) 39 | 40 | _basic_logger.info('applying gevent patch (%s real threads)', real_threads) 41 | 42 | # real_threads is 1 by default so it will be possible to run watch_threads concurrently 43 | if hogging_detection: 44 | real_threads += 1 45 | 46 | import gevent 47 | import gevent.monkey 48 | 49 | for m in ["easypy.threadtree", "easypy.concurrency"]: 50 | assert m not in sys.modules, "Must apply the gevent patch before importing %s" % m 51 | 52 | gevent.monkey.patch_all(Event=True, sys=True) 53 | 54 | _patch_module_locks() 55 | _unpatch_logging_handlers_lock() 56 | 57 | global HUB 58 | HUB = gevent.get_hub() 59 | 60 | global threading 61 | import threading 62 | for thread in threading.enumerate(): 63 | _set_thread_uuid(thread.ident) 64 | _set_main_uuid() # the patched threading has a new ident for the main thread 65 | 66 | # this will declutter the thread dumps from gevent/greenlet frames 67 | import gevent, gevent.threading, gevent.greenlet 68 | _BOOTSTRAPPERS.update([gevent, gevent.threading, gevent.greenlet]) 69 | 70 | if hogging_detection: 71 | import greenlet 72 | greenlet.settrace(lambda *args: _greenlet_trace_func(*args)) 73 | global _HOGGING_DETECTION_RUNNING 74 | _HOGGING_DETECTION_RUNNING = True 75 | wait = defer_to_thread(detect_hogging, 'detect-hogging') 76 | 77 | @atexit.register 78 | def stop_detection(): 79 | global _HOGGING_DETECTION_RUNNING 80 | _HOGGING_DETECTION_RUNNING = False 81 | wait() 82 | 83 | 84 | def _patch_module_locks(): 85 | # gevent will not patch existing locks (including ModuleLocks) when it's not single threaded 86 | # so we map the ownership of module locks to the greenlets that took over 87 | 88 | import importlib 89 | thread_greenlet_ident = { 90 | main_thread_ident_before_patching: threading.main_thread().ident 91 | } 92 | 93 | for ref in importlib._bootstrap._module_locks.values(): 94 | lock = ref() 95 | lock.owner = thread_greenlet_ident.get(lock.owner, lock.owner) 96 | 97 | 98 | def _unpatch_logging_handlers_lock(): 99 | # we dont want to use logger locks since those are used by both real thread and gevent greenlets 100 | # switching from one to the other will cause gevent hub to throw an exception 101 | 102 | RLock = gevent.monkey.saved['threading']['_CRLock'] 103 | 104 | def create_unpatched_lock_for_handler(handler): 105 | handler.lock = RLock() 106 | 107 | import logging 108 | # patch future handlers 109 | logging.Handler.createLock = create_unpatched_lock_for_handler 110 | for handler in logging._handlers.values(): 111 | if handler.lock: 112 | handler.lock = RLock() 113 | 114 | try: 115 | import logbook.handlers 116 | except ImportError: 117 | pass 118 | else: 119 | # patch future handlers 120 | logbook.handlers.new_fine_grained_lock = RLock 121 | for handler in logbook.handlers.Handler.stack_manager.iter_context_objects(): 122 | handler.lock = RLock() 123 | 124 | 125 | def _greenlet_trace_func(event, args): 126 | pass 127 | 128 | 129 | def detect_hogging(): 130 | did_switch = True 131 | 132 | current_running_greenlet = HUB 133 | 134 | def mark_switch(event, args): 135 | nonlocal did_switch 136 | nonlocal current_running_greenlet 137 | if event != 'switch': 138 | return 139 | did_switch = True 140 | current_running_greenlet = args[1] # args = [origin_greenlet , target_greenlet 141 | 142 | global _greenlet_trace_func 143 | _greenlet_trace_func = mark_switch 144 | 145 | current_blocker_time = 0 146 | last_warning_time = 0 147 | 148 | while _HOGGING_DETECTION_RUNNING: 149 | non_gevent_sleep(HOGGING_TIMEOUT) 150 | if did_switch: 151 | # all good 152 | pass 153 | elif current_running_greenlet == HUB: 154 | # it's ok for the hub to block if all greenlet wait on async io 155 | pass 156 | else: 157 | current_blocker_time += HOGGING_TIMEOUT 158 | if current_blocker_time < last_warning_time * 2: 159 | continue # dont dump too much warnings - decay exponentialy until exploding after FAIL_BLOCK_TIME_SEC 160 | for thread in threading.enumerate(): 161 | if getattr(thread, '_greenlet', None) == current_running_greenlet: 162 | _basic_logger.info('RED<>', current_blocker_time) 163 | _basic_logger.debug('thread stuck: %s', thread) 164 | break 165 | else: 166 | _basic_logger.info('RED<>', current_blocker_time) 167 | _basic_logger.debug('greenlet stuck (no corresponding thread found): %s', current_running_greenlet) 168 | _basic_logger.debug('hub is: %s', HUB) 169 | # this is needed by `detect_hogging`, but we must'nt import it 170 | # there since it leads to a gevent/native-thread deadlock, 171 | # and can't import it at the top since thing must wait for 172 | # the gevent patching 173 | from easypy.humanize import format_thread_stack 174 | func = _basic_logger.debug if current_blocker_time < 5 * HOGGING_TIMEOUT else _basic_logger.info 175 | func("Stack:\n%s", format_thread_stack(sys._current_frames()[main_thread_ident_before_patching])) 176 | last_warning_time = current_blocker_time 177 | continue 178 | 179 | current_blocker_time = 0 180 | last_warning_time = 0 181 | did_switch = False 182 | 183 | 184 | def non_gevent_sleep(timeout): 185 | try: 186 | gevent.monkey.saved['time']['sleep'](timeout) 187 | except KeyError: 188 | time.sleep(timeout) 189 | 190 | 191 | def defer_to_thread(func, threadname): 192 | 193 | def run(): 194 | _set_thread_uuid(threading.get_ident(), parent_uuid) 195 | _basic_logger.debug('Starting job in real thread: %s', threadname or "") 196 | gevent.spawn(func) # running via gevent ensures we have a Hub 197 | gevent.wait() 198 | _basic_logger.debug('ready for the next job') 199 | 200 | parent_uuid = get_thread_uuid(threading.current_thread()) 201 | pool = gevent.get_hub().threadpool 202 | return pool.spawn(run).wait 203 | -------------------------------------------------------------------------------- /easypy/interaction.py: -------------------------------------------------------------------------------- 1 | from easypy.colors import Colorized 2 | from easypy.collections import listify 3 | 4 | 5 | class CancelledException(KeyboardInterrupt): 6 | pass 7 | 8 | 9 | def message(fmt, *args, wait_for_user=False, **kwargs): 10 | msg = fmt.format(*args, **kwargs) if (args or kwargs) else fmt 11 | print(Colorized(msg)) 12 | if wait_for_user: 13 | globals()['wait_for_user']() 14 | 15 | 16 | def get_input(prompt, default=NotImplemented): 17 | prompt = prompt.strip(": ") 18 | if default is not NotImplemented: 19 | prompt = "%s DARK_CYAN<<[%s]>>" % (prompt, default) 20 | ret = input(Colorized(prompt + ": ")) 21 | if not ret: 22 | if default is NotImplemented: 23 | raise CancelledException() 24 | else: 25 | return default 26 | return ret 27 | 28 | 29 | def wait_for_user(): 30 | try: 31 | get_input("(Hit to continue)") 32 | except CancelledException: 33 | pass 34 | 35 | 36 | def choose(question, options, default=NotImplemented): 37 | options = list(options.items() if isinstance(options, dict) else options) 38 | 39 | all_options = {} 40 | for opt, value in options: 41 | for o in listify(opt): 42 | all_options[o.lower()] = value 43 | 44 | disp_options = "/".join(str(opt[0] if isinstance(opt, (list, tuple)) else opt) for opt, value in options) 45 | fmt = "%s (%s) " if default is NotImplemented else "%s [%s]" 46 | msg = fmt % (question, disp_options) 47 | 48 | while True: 49 | answer = get_input(msg, default=default).strip().lower() 50 | if answer not in all_options: 51 | print("Invalid answer ('%s')" % answer) 52 | continue 53 | return all_options[answer] 54 | 55 | 56 | def ask(question, default=NotImplemented): 57 | if default is NotImplemented: 58 | options = [(("y", "yes"), True), (("n", "no"), False)] # no default 59 | elif default: 60 | default = "Y" 61 | options = [(("Y", "yes"), True), (("n", "no"), False)] # 'Y' is default 62 | else: 63 | default = "N" 64 | options = [(("y", "yes"), True), (("N", "no"), False)] # 'N' is default 65 | 66 | try: 67 | return choose(question, options, default=default) 68 | except EOFError: 69 | return False # EOF == no 70 | -------------------------------------------------------------------------------- /easypy/lockstep.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | from functools import wraps, update_wrapper 3 | 4 | from .exceptions import TException 5 | 6 | 7 | class LockstepSyncMismatch(TException): 8 | template = "Expected {lockstep_name} to be {expected_step}, but it is {actual_step}" 9 | 10 | 11 | class _LockstepInvocation(object): 12 | def __init__(self, name, generator): 13 | self._name = name 14 | self._generator = generator 15 | self._current_step = 'just-started' 16 | 17 | def step_next(self, step): 18 | """Progress to the next step, confirming it's the specified one""" 19 | 20 | try: 21 | self._current_step, value = self._next_step_and_value() 22 | except StopIteration: 23 | raise LockstepSyncMismatch(lockstep_name=self._name, 24 | expected_step=step, 25 | actual_step='finished') 26 | if self._current_step != step: 27 | raise LockstepSyncMismatch(lockstep_name=self._name, 28 | expected_step=step, 29 | actual_step=self._current_step) 30 | return value 31 | 32 | def step_until(self, step): 33 | """Progress until we get to the specified step, confirming that it exist""" 34 | 35 | while self._current_step != step: 36 | try: 37 | self._current_step, value = self._next_step_and_value() 38 | except StopIteration: 39 | raise LockstepSyncMismatch(lockstep_name=self._name, 40 | expected_step=step, 41 | actual_step='finished') 42 | return value 43 | 44 | def step_all(self): 45 | """Progress through all the remaining steps""" 46 | 47 | for self._current_step in self._generator: 48 | pass 49 | 50 | def __iter__(self): 51 | yield from self._generator 52 | self._current_step = 'finished' 53 | 54 | def _next_step_and_value(self): 55 | yield_result = next(self._generator) 56 | if isinstance(yield_result, tuple) and len(yield_result) == 2: 57 | return yield_result 58 | else: 59 | return yield_result, None 60 | 61 | def __str__(self): 62 | return '%s<%s>' % (self._name, self._current_step) 63 | 64 | 65 | class lockstep(object): 66 | """ 67 | Synchronize a coroutine that runs a process step-by-step. 68 | 69 | Decorate a generator that yields step names to create a context manager. 70 | 71 | * Use like a regular method(that returns `None`). 72 | * Use ``.lockstep(...)`` on the method to get a context manager. The context 73 | object has a ``.step_next``/``.step_until`` methods that must be called, in 74 | order, with all expected step names, to make the generator progress to 75 | each step. 76 | * Yield from the context object to embed the lockstep inside a bigger 77 | lockstep function. 78 | 79 | Example:: 80 | 81 | @lockstep 82 | def my_process(): 83 | # things before step A 84 | yield 'A' 85 | # things between step A and step B 86 | yield 'B' 87 | # things between step B and step C 88 | yield 'C' 89 | # things between step C and step D 90 | yield 'D' 91 | # things after step D 92 | 93 | my_process() # just run it like a normal function, ignoring the lockstep 94 | 95 | with my_process.lockstep() as process: 96 | process.step_next('A') # go to next step - A 97 | process.step_until('C') # go through steps until you reach C 98 | process.step_all() # go through all remaining steps until the end 99 | 100 | @lockstep 101 | def bigger_process(): 102 | yield 'X' 103 | 104 | # Embed `my_process`'s steps inside `bigger_process`: 105 | with my_process.lockstep() as process: 106 | yield from process 107 | 108 | yield 'Y' 109 | 110 | :note: When used class methods, put the ``@classmethod`` decorator **below** 111 | the ``@lockstep`` decorator:: 112 | 113 | class Foo: 114 | @lockstep 115 | @classmethod 116 | def process(cls): 117 | # ... 118 | """ 119 | 120 | def __init__(self, generator_func): 121 | self.generator_func = generator_func 122 | update_wrapper(self, generator_func) 123 | 124 | @contextmanager 125 | def lockstep(self, *args, **kwargs): 126 | generator = self.generator_func(*args, **kwargs) 127 | invocation = _LockstepInvocation(self.generator_func.__name__, generator) 128 | 129 | yield invocation 130 | 131 | try: 132 | step_not_taken, _ = invocation._next_step_and_value() 133 | except StopIteration: 134 | # all is well - all steps were exhausted 135 | invocation._current_step = 'finished' 136 | else: 137 | raise LockstepSyncMismatch(lockstep_name=invocation._name, 138 | expected_step='finished', 139 | actual_step=step_not_taken) 140 | 141 | def __call__(self, *args, **kwargs): 142 | with self.lockstep(*args, **kwargs) as process: 143 | process.step_all() 144 | 145 | def __get__(self, instance, owner=None): 146 | func = self.generator_func.__get__(instance, owner) 147 | if func is self.generator_func: 148 | return self 149 | else: 150 | return lockstep(func) 151 | -------------------------------------------------------------------------------- /easypy/logging/_logbook.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | from __future__ import absolute_import 3 | 4 | import logbook 5 | import re 6 | import sys 7 | import threading 8 | import logging 9 | from itertools import cycle, chain, repeat, count 10 | from collections import OrderedDict 11 | 12 | from easypy.colors import colorize, uncolored 13 | from easypy.threadtree import ThreadContexts 14 | from easypy.contexts import contextmanager 15 | 16 | from easypy.logging import G, get_level_color, THREAD_LOGGING_CONTEXT 17 | 18 | 19 | CLEAR_EOL = '\x1b[0K' 20 | 21 | 22 | LEVEL_COLORS = { 23 | logbook.DEBUG: "DARK_GRAY", 24 | logbook.INFO: "GRAY", 25 | logbook.WARNING: "YELLOW", 26 | logbook.ERROR: "RED", 27 | logbook.NOTICE: "WHITE", 28 | } 29 | 30 | 31 | class LogLevelClamp(logbook.Processor): 32 | """ 33 | Log-records with a log-level that is too high are clamped down. 34 | Used internally by the ``ProgressBar`` 35 | """ 36 | 37 | def __init__(self, logger=None, level=logbook.DEBUG): 38 | self.level = level 39 | self.name = logbook.get_level_name(logbook.lookup_level(level)) 40 | 41 | def process(self, record): 42 | if record.levelno > self.level: 43 | record.levelname, record.levelno = self.name, self.level 44 | return True 45 | 46 | def __enter__(self): 47 | self.push_application() 48 | 49 | def __exit__(self, *args): 50 | self.pop_application() 51 | 52 | 53 | def get_console_handler(): 54 | stack_manager = logbook.Handler.stack_manager 55 | stderr_handlers = (h for h in stack_manager.iter_context_objects() if isinstance(h, logbook.StderrHandler)) 56 | return next(stderr_handlers, None) 57 | 58 | 59 | RE_OLD_STRING_FORMATTING = re.compile(r'%(?:\((\w+)\))?([ \-+#0]*)(\d*\.?\d*)([diouxXeEfFgGcrs])') 60 | 61 | 62 | def convert_string_template(string): 63 | index = -1 64 | 65 | def repl(matched): 66 | nonlocal index 67 | keyword, flags, width, convert = matched.groups() 68 | if keyword: 69 | return matched.group(0) # don't change 70 | else: 71 | index += 1 72 | align = sign = zero = type = '' 73 | flags = set(flags) 74 | 75 | if '-' in flags: 76 | align = "<" 77 | 78 | if '+' in flags: 79 | sign = '+' 80 | elif ' ' in flags: 81 | sign = ' ' 82 | 83 | if '0' in flags: 84 | zero = "0" 85 | 86 | if convert in 'asr': 87 | convert = "!" + convert 88 | elif convert in 'cdoxXeEfFgG': 89 | convert, type = '', convert 90 | elif convert == 'i': 91 | convert, type = '', 'd' 92 | 93 | return "{%(index)d%(convert)s:%(align)s%(sign)s%(zero)s%(width)s%(type)s}" % locals() 94 | 95 | return RE_OLD_STRING_FORMATTING.sub(repl, string) 96 | 97 | 98 | class LoggingToLogbookAdapter(): 99 | """ 100 | Converts %-style to {}-style format strings 101 | Converts logging log-levels to logbook log-levels 102 | """ 103 | 104 | @classmethod 105 | def _to_logbook_level(cls, level): 106 | if level >= logging.CRITICAL: 107 | return logbook.CRITICAL 108 | if level >= logging.ERROR: 109 | return logbook.ERROR 110 | if level >= logging.WARNING: 111 | return logbook.WARNING 112 | if level > logging.INFO: 113 | return logbook.NOTICE 114 | if level >= logging.INFO: 115 | return logbook.INFO 116 | if level >= logging.DEBUG: 117 | return logbook.DEBUG 118 | return logbook.NOTSET 119 | 120 | def _log(self, level, args, kwargs): 121 | level = self._to_logbook_level(level) 122 | fmt = args[0] 123 | fmt = convert_string_template(fmt) 124 | args = (fmt,) + args[1:] 125 | kwargs.update(frame_correction=kwargs.get('frame_correction', 0) + 2) 126 | return super()._log(level, args, kwargs) 127 | 128 | 129 | class ThreadControl(logbook.Processor): 130 | """ 131 | Used by ContextLoggerMixin .solo and .suppressed methods to control logging to console 132 | """ 133 | 134 | CONTEXT = ThreadContexts(defaults=dict(silenced=False)) 135 | 136 | # we use this ordered-dict to track which thread is currently 'solo-ed' 137 | # we populate it with some initial values to make the 'filter' method 138 | # implementation more convenient 139 | SELECTED = OrderedDict() 140 | IDX_GEN = count() 141 | LOCK = threading.RLock() 142 | 143 | @classmethod 144 | @contextmanager 145 | def solo(cls): 146 | try: 147 | with cls.LOCK: 148 | idx = next(cls.IDX_GEN) 149 | cls.SELECTED[idx] = threading.current_thread() 150 | yield 151 | finally: 152 | cls.SELECTED.pop(idx) 153 | 154 | def process(self, record): 155 | selected = False 156 | while selected is False: 157 | idx = next(reversed(self.SELECTED), None) 158 | if idx is None: 159 | selected = None 160 | break 161 | selected = self.SELECTED.get(idx, False) 162 | 163 | if selected: 164 | record.extra['silenced'] = selected != threading.current_thread() 165 | elif self.CONTEXT.silenced is False: 166 | pass 167 | elif self.CONTEXT.silenced is True: 168 | record.extra['silenced'] = True 169 | else: 170 | record.extra['silenced'] = record.level <= logbook.lookup_level(self.CONTEXT.silenced) 171 | 172 | 173 | class ConsoleHandlerMixin(): 174 | 175 | def should_handle(self, record): 176 | return not record.extra.get("silenced", False) and super().should_handle(record) 177 | 178 | 179 | class ColorizingFormatter(logbook.StringFormatter): 180 | 181 | def format_record(self, record, handler): 182 | if "levelcolor" not in record.extra: 183 | record.extra["levelcolor"] = get_level_color(record.level) 184 | msg = super().format_record(record, handler) 185 | return colorize(msg) if G.COLORING else uncolored(msg) 186 | 187 | 188 | class ConsoleFormatter(ColorizingFormatter): 189 | 190 | def format_record(self, record, handler): 191 | msg = super().format_record(record, handler) 192 | if not isinstance(handler, logbook.StreamHandler): 193 | pass 194 | elif handler.stream not in (sys.stdout, sys.stderr): 195 | pass 196 | elif not G.IS_A_TTY: 197 | pass 198 | else: 199 | msg = "\n".join('\r{0}{1}'.format(line, CLEAR_EOL) for line in msg.splitlines()) 200 | return msg 201 | 202 | 203 | try: 204 | import yaml 205 | except ImportError: 206 | pass 207 | else: 208 | try: 209 | from yaml import CDumper as Dumper 210 | except ImportError: 211 | from yaml import Dumper 212 | 213 | class YAMLFormatter(logbook.StringFormatter): 214 | 215 | def __init__(self, **kw): 216 | self.dumper_params = kw 217 | 218 | def __call__(self, record, handler): 219 | return yaml.dump(vars(record), Dumper=Dumper) + "\n---\n" 220 | 221 | 222 | class ContextProcessor(logbook.Processor): 223 | 224 | def process(self, record): 225 | decoration = G.graphics.INDENT_SEGMENT 226 | extra = record.extra 227 | if extra is not None: 228 | decoration = extra.pop('decoration', decoration) 229 | 230 | contexts = THREAD_LOGGING_CONTEXT.context 231 | extra = THREAD_LOGGING_CONTEXT.flatten() 232 | extra['context'] = "[%s]" % ";".join(contexts) if contexts else "" 233 | record.extra.update(extra) 234 | indentation = record.extra['indentation'] 235 | 236 | indents = chain(repeat(G.graphics.INDENT_SEGMENT, indentation), repeat(decoration, 1)) 237 | record.extra['decoration'] = "".join(color(segment) for color, segment in zip(cycle(G.INDENT_COLORS), indents)) 238 | return record 239 | -------------------------------------------------------------------------------- /easypy/logging/_logging.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | from __future__ import absolute_import 3 | 4 | import logging 5 | import os 6 | import threading 7 | from itertools import cycle, chain, repeat, count 8 | from collections import OrderedDict 9 | 10 | from easypy.colors import colorize, uncolored 11 | from easypy.threadtree import ThreadContexts 12 | from easypy.contexts import contextmanager 13 | 14 | from easypy.logging import G, get_level_color, THREAD_LOGGING_CONTEXT 15 | 16 | 17 | CLEAR_EOL = '\x1b[0K' 18 | 19 | 20 | LEVEL_COLORS = { 21 | logging.DEBUG: "DARK_GRAY", 22 | logging.INFO: "GRAY", 23 | logging.WARNING: "YELLOW", 24 | logging.ERROR: "RED", 25 | logging.INFO + 1: "WHITE", 26 | } 27 | 28 | 29 | class LogLevelClamp(logging.Filterer): 30 | """ 31 | Log-records with a log-level that is too high are clamped down. 32 | Used internally by the ``ProgressBar`` 33 | """ 34 | 35 | def __init__(self, level=logging.DEBUG): 36 | self.level = level 37 | self.name = logging.getLevelName(level) 38 | 39 | def filter(self, record): 40 | if record.levelno > self.level: 41 | record.levelname, record.levelno = self.name, self.level 42 | return True 43 | 44 | def __enter__(self): 45 | self.addFilter(self) 46 | 47 | def __exit__(self, *args): 48 | self.removeFilter(self) 49 | 50 | 51 | def get_console_handler(): 52 | try: 53 | return logging._handlers['console'] 54 | except KeyError: 55 | for handler in logging.root.handlers: 56 | if not isinstance(handler, logging.StreamHandler): 57 | continue 58 | return handler 59 | 60 | 61 | class ThreadControl(logging.Filter): 62 | """ 63 | Used by ContextLoggerMixin .solo and .suppressed methods to control logging to console 64 | To use, add it to the logging configuration as a filter in the console handler 65 | 66 | ... 67 | 'filters': { 68 | 'thread_control': { 69 | '()': 'easypy.logging.ThreadControl' 70 | } 71 | }, 72 | 'handlers': { 73 | 'console': { 74 | 'class': 'logging.StreamHandler', 75 | 'filters': ['thread_control'], 76 | }, 77 | 78 | """ 79 | 80 | CONTEXT = ThreadContexts(counters='silenced') 81 | 82 | # we use this ordered-dict to track which thread is currently 'solo-ed' 83 | # we populate it with some initial values to make the 'filter' method 84 | # implementation more convenient 85 | SELECTED = OrderedDict() 86 | IDX_GEN = count() 87 | LOCK = threading.RLock() 88 | 89 | @classmethod 90 | @contextmanager 91 | def solo(cls): 92 | try: 93 | with cls.LOCK: 94 | idx = next(cls.IDX_GEN) 95 | cls.SELECTED[idx] = threading.current_thread() 96 | yield 97 | finally: 98 | cls.SELECTED.pop(idx) 99 | 100 | def filter(self, record): 101 | selected = False 102 | while selected is False: 103 | idx = next(reversed(self.SELECTED), None) 104 | if idx is None: 105 | selected = None 106 | break 107 | selected = self.SELECTED.get(idx, False) 108 | 109 | if selected: 110 | return selected == threading.current_thread() 111 | 112 | if self.CONTEXT.silenced is False: 113 | return False 114 | if self.CONTEXT.silenced is True: 115 | return True 116 | return record.levelno <= logging._nameToLevel.get(self.CONTEXT.silenced, self.CONTEXT.silenced) 117 | 118 | 119 | class ColorizingFormatter(logging.Formatter): 120 | 121 | def formatMessage(self, record): 122 | if not hasattr(record, "levelcolor"): 123 | record.levelcolor = get_level_color(record.levelno) 124 | msg = super().formatMessage(record) 125 | return colorize(msg) if G.COLORING else uncolored(msg) 126 | 127 | 128 | class ConsoleFormatter(ColorizingFormatter): 129 | 130 | def formatMessage(self, record): 131 | msg = super().formatMessage(record) 132 | if G.IS_A_TTY: 133 | msg = "\n".join('\r{0}{1}'.format(line, CLEAR_EOL) for line in msg.splitlines()) 134 | return msg 135 | 136 | 137 | try: 138 | import yaml 139 | except ImportError: 140 | pass 141 | else: 142 | try: 143 | from yaml import CDumper as Dumper 144 | except ImportError: 145 | from yaml import Dumper 146 | 147 | class YAMLFormatter(logging.Formatter): 148 | 149 | def __init__(self, **kw): 150 | self.dumper_params = kw 151 | 152 | def format(self, record): 153 | return yaml.dump(vars(record), Dumper=Dumper) + "\n---\n" 154 | 155 | #=====================#=====================#=====================# 156 | # This monkey-patch tricks logging's findCaller into skipping over 157 | # this module when looking for the caller of a logger.log function 158 | class _SrcFiles: 159 | _srcfiles = {logging._srcfile, __file__} 160 | def __eq__(self, fname): 161 | return fname in self.__class__._srcfiles 162 | logging._srcfile = _SrcFiles() 163 | #=====================#=====================#=====================# 164 | 165 | 166 | _root = __file__[:__file__.find(os.sep.join(__name__.split(".")))] 167 | 168 | 169 | def _trim(pathname, modname, cache={}): 170 | try: 171 | return cache[(pathname, modname)] 172 | except KeyError: 173 | pass 174 | 175 | elems = pathname.replace(_root, "").strip(".").split(os.sep)[:-1] 176 | if modname != "__init__": 177 | elems.append(modname) 178 | 179 | ret = cache[(pathname, modname)] = filter(None, elems) 180 | return ret 181 | 182 | 183 | # ================================================================= 184 | 185 | def patched_makeRecord(self, name, level, fn, lno, msg, args, exc_info, func=None, extra=None, sinfo=None): 186 | decoration = G.graphics.INDENT_SEGMENT 187 | 188 | rv = self._makeRecord(name, level, fn, lno, msg, args, exc_info, func=func, sinfo=sinfo) 189 | if extra is not None: 190 | decoration = extra.pop('decoration', decoration) 191 | for key in extra: 192 | if (key in ["message", "asctime"]) or (key in rv.__dict__): 193 | raise KeyError("Attempt to overwrite %r in LogRecord" % key) 194 | rv.__dict__[key] = extra[key] 195 | 196 | contexts = THREAD_LOGGING_CONTEXT.context 197 | extra = THREAD_LOGGING_CONTEXT.flatten() 198 | extra['context'] = "[%s]" % ";".join(contexts) if contexts else "" 199 | rv.__dict__.update(dict(extra, **rv.__dict__)) 200 | 201 | indents = chain(repeat(G.graphics.INDENT_SEGMENT, rv.indentation), repeat(decoration, 1)) 202 | rv.decoration = "".join(color(segment) for color, segment in zip(cycle(G.INDENT_COLORS), indents)) 203 | return rv 204 | -------------------------------------------------------------------------------- /easypy/logging/heartbeats.py: -------------------------------------------------------------------------------- 1 | class HeartbeatHandlerMixin(): 2 | "Heartbeat notifications based on the application's logging activity" 3 | 4 | def __init__(self, beat_func, min_interval=1, **kw): 5 | """ 6 | @param beat_func: calls this function when a heartbeat is due 7 | @param min_interval: minimum time interval between heartbeats 8 | """ 9 | super(HeartbeatHandlerMixin, self).__init__(**kw) 10 | self.min_interval = min_interval 11 | self.last_beat = 0 12 | self.beat = beat_func 13 | self._emitting = False 14 | 15 | def emit(self, record): 16 | if self._emitting: 17 | # prevent reenterance 18 | return 19 | 20 | try: 21 | self._emitting = True 22 | if (record.created - self.last_beat) > self.min_interval: 23 | try: 24 | log_message = self.format(record) 25 | except: # noqa 26 | log_message = "Log record formatting error (%s:#%s)" % (record.filename, record.lineno) 27 | self.beat(log_message=log_message, heartbeat=record.created) 28 | self.last_beat = record.created 29 | finally: 30 | self._emitting = False 31 | -------------------------------------------------------------------------------- /easypy/logging/progressbar.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | from __future__ import absolute_import 3 | 4 | import logging 5 | import os 6 | import threading 7 | import time 8 | from contextlib import ExitStack 9 | from itertools import cycle 10 | 11 | from easypy.colors import uncolored 12 | from easypy.humanize import compact 13 | from easypy.timing import Timer 14 | from easypy.contexts import contextmanager 15 | from easypy.misc import at_least 16 | from easypy.logging import G 17 | 18 | CLEAR_EOL = '\x1b[0K' 19 | 20 | 21 | def _progress(): 22 | from random import randint 23 | while True: 24 | yield chr(randint(0x2800, 0x28FF)) 25 | 26 | 27 | class ProgressBar: 28 | 29 | WAITING = "▅▇▆▃ ▆▇▆▅▃_ " 30 | # PROGRESSING = "⣾⣽⣻⢿⡿⣟⣯⣷" #"◴◷◶◵◐◓◑◒" 31 | SPF = 1.0 / 15 32 | 33 | def __init__(self): 34 | self._event = threading.Event() 35 | self._thread = None 36 | self._lock = threading.RLock() 37 | self._depth = 0 38 | self._term_width, _ = os.get_terminal_size() if G.IS_A_TTY else [0, 0] 39 | self._term_width = at_least(120, self._term_width) 40 | 41 | def loop(self): 42 | wait_seq = cycle(self.WAITING) 43 | prog_seq = _progress() 44 | wait_symb, progress_symb = map(next, (wait_seq, prog_seq)) 45 | last_time = hanging = 0 46 | while True: 47 | progressed = self._event.wait(self.SPF) 48 | if self._stop: 49 | break 50 | now = time.time() 51 | 52 | if now - last_time >= self.SPF: 53 | wait_symb = next(wait_seq) 54 | last_time = now 55 | 56 | if progressed: 57 | progress_symb = next(prog_seq) 58 | hanging = 0 59 | else: 60 | hanging += 1 61 | 62 | anim = G.WHITE(wait_symb + progress_symb) 63 | 64 | elapsed = self._timer.elapsed.render(precision=0).rjust(8) 65 | if hanging >= (5 * 10 * 60): # ~5 minutes with no log messages 66 | elapsed = G.RED(elapsed) 67 | else: 68 | elapsed = G.BLUE(elapsed) 69 | 70 | line = elapsed + self._last_line.rstrip() 71 | line = line.replace("__PB__", anim) 72 | print("\r" + line, end=CLEAR_EOL + "\r", flush=True) 73 | self._event.clear() 74 | print("\rDone waiting.", end=CLEAR_EOL + "\r", flush=True) 75 | 76 | def progress(self, record): 77 | if not self._thread: 78 | return 79 | 80 | if record.levelno >= logging.DEBUG: 81 | record.decoration = "__PB__" + record.decoration[2:] 82 | txt = uncolored(self._format(record).split("\n")[0]).strip()[8:] 83 | self._last_line = compact(txt, self._term_width - 5) 84 | self._event.set() 85 | 86 | def set_message(self, msg): 87 | msg = msg.replace("|..|", "|__PB__" + G.graphics.INDENT_SEGMENT[3]) 88 | self._last_line = "|" + compact(msg, self._term_width - 5) 89 | self._event.set() 90 | 91 | @contextmanager 92 | def __call__(self): 93 | if not G.GRAPHICAL: 94 | yield self 95 | return 96 | 97 | from . import get_console_handler 98 | handler = get_console_handler() 99 | 100 | if not isinstance(handler, logging.Handler): 101 | # not supported 102 | yield self 103 | return 104 | 105 | with self._lock: 106 | self._depth += 1 107 | if self._depth == 1: 108 | self.set_message("Waiting...") 109 | self._stop = False 110 | self._timer = Timer() 111 | self._format = handler.formatter.format if handler else lambda record: record.getMessage() 112 | self._thread = threading.Thread(target=self.loop, name="ProgressBar", daemon=True) 113 | self._thread.start() 114 | try: 115 | yield self 116 | finally: 117 | with self._lock: 118 | self._depth -= 1 119 | if self._depth <= 0: 120 | self._stop = True 121 | self._event.set() 122 | self._thread.join() 123 | self._thread = None 124 | 125 | 126 | class ProgressHandlerMixin(): 127 | def handle(self, record): 128 | PROGRESS_BAR.progress(record) 129 | 130 | 131 | PROGRESS_BAR = ProgressBar() 132 | 133 | 134 | class ProgressBarLoggerMixin(): 135 | 136 | _progressing = False 137 | @contextmanager 138 | def progress_bar(self): 139 | if not G.GRAPHICAL: 140 | with PROGRESS_BAR() as pb: 141 | yield pb 142 | return 143 | 144 | from . import LogLevelClamp 145 | debuggifier = LogLevelClamp(logger=self) 146 | 147 | with ExitStack() as stack: 148 | if not self.__class__._progressing: 149 | stack.enter_context(debuggifier) 150 | stack.enter_context(PROGRESS_BAR()) 151 | self.__class__._progressing = True 152 | stack.callback(setattr, self.__class__, "_progressing", False) 153 | yield PROGRESS_BAR 154 | -------------------------------------------------------------------------------- /easypy/meta.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | from abc import ABCMeta 3 | from functools import wraps 4 | from collections import OrderedDict 5 | 6 | from .misc import kwargs_resilient 7 | from .collections import as_list 8 | 9 | 10 | class EasyMeta(ABCMeta): 11 | """ 12 | This class helps in implementing various metaclass-based magic. 13 | Implement the ``before_subclass_init`` and/or ``after_subclass_init`` (see defined in :class:``EasyMetaHooks``) 14 | to modify the class spec, register subclasses, etc. 15 | 16 | Each hook method can be defined more than once (with the same name), and methods will all be invoked sequentially. 17 | 18 | Important: the hooks are not invoked on the class that implements the hooks - only on it subclasses. 19 | """ 20 | 21 | class EasyMetaHooks: 22 | """ 23 | The class defines the available EasyMeta hooks (slots), and registers handlers for EasyMeta derivatives 24 | """ 25 | 26 | HOOK_NAMES = [] 27 | 28 | def _define_hook(dlg, HOOK_NAMES=HOOK_NAMES): 29 | HOOK_NAMES.append(dlg.__name__) 30 | 31 | @wraps(dlg) 32 | def hook(self, *args, **kwargs): 33 | kwargs_resilience = kwargs_resilient(negligible=self.class_kwargs.keys()) 34 | kwargs.update((k, v) for k, v in self.class_kwargs.items() if k not in kwargs) 35 | 36 | for hook in self._em_hooks[dlg.__name__]: 37 | kwargs_resilience(hook)(*args, **kwargs) 38 | 39 | return hook 40 | 41 | @_define_hook 42 | def after_subclass_init(self, cls): 43 | """ 44 | Invoked after a subclass is being initialized 45 | 46 | >>> class PrintTheName(metaclass=EasyMeta): 47 | ... @EasyMeta.Hook 48 | ... def after_subclass_init(cls): 49 | ... print('Declared', cls.__name__) 50 | ... 51 | ... 52 | >>> class Foo(PrintTheName): 53 | ... pass 54 | Declared Foo 55 | """ 56 | 57 | @_define_hook 58 | def before_subclass_init(self, name, bases, dct): 59 | """ 60 | Invoked after a subclass is being initialized 61 | 62 | >>> class AddMember(metaclass=EasyMeta): 63 | ... @EasyMeta.Hook 64 | ... def before_subclass_init(name, bases, dct): 65 | ... dct['foo'] = 'bar' 66 | ... 67 | ... 68 | >>> class Foo(AddMember): 69 | ... pass 70 | >>> Foo.foo 71 | 'bar' 72 | """ 73 | 74 | def __init__(self, class_kwargs={}): 75 | self._em_hooks = {name: [] for name in self.HOOK_NAMES} 76 | self.class_kwargs = class_kwargs 77 | 78 | def add(self, hook): 79 | self._em_hooks[hook.__name__].append(hook) 80 | 81 | def extend(self, other): 82 | for k, v in other._em_hooks.items(): 83 | slot = self._em_hooks[k] 84 | slot.extend(hook for hook in v if hook not in slot) 85 | 86 | class EasyMetaDslDict(OrderedDict): 87 | """ 88 | This class is used as the namespace for the user's class. 89 | Any member decorated as an EasyMeta hook gets removed from the namespace and 90 | put into a special 'hooks' collection. The EasyMeta metaclass then invokes the 91 | delegates registered under those hooks 92 | """ 93 | def __init__(self, class_name): 94 | super().__init__() 95 | self._class_name = class_name 96 | self._em_hooks = EasyMeta.EasyMetaHooks() 97 | 98 | def __setitem__(self, name, value): 99 | if isinstance(value, EasyMeta.Hook): 100 | self._em_hooks.add(value.dlg) 101 | else: 102 | return super().__setitem__(name, value) 103 | 104 | @classmethod 105 | def __prepare__(metacls, name, bases, **kwds): 106 | dsl = metacls.EasyMetaDslDict(class_name=name) 107 | return dsl 108 | 109 | class Hook(object): 110 | def __init__(self, dlg): 111 | self.dlg = dlg 112 | 113 | def __init__(cls, name, bases, dct, **kwargs): 114 | super().__init__(name, bases, dct) 115 | 116 | def __new__(mcs, name, bases, dct, **kwargs): 117 | aggregated_hooks = mcs.EasyMetaHooks(class_kwargs=kwargs) 118 | 119 | bases = list(bases) # allow the hook to modify the base class list 120 | 121 | for base in bases: 122 | for sub_base in reversed(inspect.getmro(base)): 123 | if isinstance(sub_base, EasyMeta): 124 | aggregated_hooks.extend(sub_base._em_hooks) 125 | 126 | ns = dict(dct) 127 | aggregated_hooks.before_subclass_init(name, bases, ns) 128 | new_type = super().__new__(mcs, name, tuple(bases), ns) 129 | aggregated_hooks.after_subclass_init(new_type) 130 | 131 | new_type._em_hooks = dct._em_hooks 132 | 133 | return new_type 134 | 135 | 136 | class GetAllSubclasses(metaclass=EasyMeta): 137 | """ 138 | Meta-magic mixin for registering subclasses 139 | 140 | The ``get_all_subclasses`` class method will return a list of all subclasses 141 | of the class it was called on. The class it was called on is not included in 142 | the list. 143 | 144 | >>> class Foo(GetAllSubclasses): 145 | ... pass 146 | ... 147 | ... 148 | >>> class Bar(Foo): 149 | ... pass 150 | ... 151 | ... 152 | >>> class Baz(Foo): 153 | ... pass 154 | ... 155 | ... 156 | >>> class Qux(Bar): 157 | ... pass 158 | ... 159 | ... 160 | >>> Foo.get_all_subclasses() 161 | [, , ] 162 | >>> Bar.get_all_subclasses() 163 | [] 164 | >>> Baz.get_all_subclasses() 165 | [] 166 | >>> Qux.get_all_subclasses() 167 | [] 168 | """ 169 | 170 | @EasyMeta.Hook 171 | def after_subclass_init(cls): 172 | cls.__direct_subclasses = [] 173 | for base in cls.__bases__: 174 | if base is not GetAllSubclasses and issubclass(base, GetAllSubclasses): 175 | base.__direct_subclasses.append(cls) 176 | 177 | @classmethod 178 | @as_list 179 | def get_subclasses(cls): 180 | """ 181 | List immediate subclasses of this class 182 | """ 183 | yield from cls.__direct_subclasses 184 | 185 | @classmethod 186 | def iter_all_subclasses(cls, level=0) -> (int, type): 187 | """ 188 | walk all subclasses of this class 189 | """ 190 | for subclass in cls.__direct_subclasses: 191 | yield level, subclass 192 | yield from subclass.iter_all_subclasses(level=level + 1) 193 | 194 | @classmethod 195 | @as_list 196 | def get_all_subclasses(cls): 197 | """ 198 | List all subclasses of this class 199 | """ 200 | for level, subclass in cls.iter_all_subclasses(): 201 | yield subclass 202 | -------------------------------------------------------------------------------- /easypy/misc.py: -------------------------------------------------------------------------------- 1 | import inspect 2 | import weakref 3 | from functools import wraps, update_wrapper 4 | from types import MethodType 5 | from .decorations import parametrizeable_decorator 6 | from .collections import ilistify, intersected_dict 7 | from .tokens import Token # for backwards compatibility 8 | 9 | 10 | class Hex(int): 11 | 12 | def __str__(self): 13 | return "%X" % self 14 | 15 | def __repr__(self): 16 | return "0x%x" % self 17 | 18 | 19 | def get_all_subclasses(cls, include_mixins=False): 20 | 21 | def is_mixin(subclass): 22 | return getattr(subclass, "_%s__is_mixin" % subclass.__name__, False) 23 | 24 | def gen(cls): 25 | for subclass in cls.__subclasses__(): 26 | if include_mixins or not is_mixin(subclass): 27 | yield subclass 28 | yield from gen(subclass) 29 | 30 | return list(gen(cls)) 31 | 32 | 33 | def stack_level_to_get_out_of_file(): 34 | frame = inspect.currentframe().f_back 35 | filename = frame.f_code.co_filename 36 | stack_levels = 1 37 | while frame.f_code.co_filename == filename: 38 | stack_levels += 1 39 | frame = frame.f_back 40 | return stack_levels 41 | 42 | 43 | def at_most(val, mx_val): 44 | return min(val, mx_val) 45 | 46 | 47 | def at_least(val, mn_val): 48 | return max(val, mn_val) 49 | 50 | 51 | def clamp(val, at_least, at_most): 52 | """ 53 | Clamps a value so it doesn't exceed specified limits. 54 | If one of the edges is not needed, it should be passed as None (consider using at_most / at_least functions). 55 | :param at_least: Minimum possible value. 56 | :param at_most: Maxium possible value. 57 | :return: The clamped value. 58 | """ 59 | 60 | if at_least > at_most: 61 | raise ValueError("Min value cannot be higher than max value.") 62 | 63 | if at_most is not None: 64 | val = min(at_most, val) 65 | if at_least is not None: 66 | val = max(at_least, val) 67 | return val 68 | 69 | 70 | class WeakMethodDead(Exception): 71 | pass 72 | 73 | 74 | class WeakMethodWrapper: 75 | def __init__(self, weak_method): 76 | if isinstance(weak_method, MethodType): 77 | weak_method = weakref.WeakMethod(weak_method) 78 | self.weak_method = weak_method 79 | update_wrapper(self, weak_method(), updated=()) 80 | self.__wrapped__ = weak_method 81 | 82 | def __call__(self, *args, **kwargs): 83 | method = self.weak_method() 84 | if method is None: 85 | raise WeakMethodDead 86 | return method(*args, **kwargs) 87 | 88 | 89 | @parametrizeable_decorator 90 | def kwargs_resilient(func, negligible=None): 91 | """ 92 | If function does not specify **kwargs, pass only params which it can accept 93 | 94 | :param negligible: If set, only be resilient to these specific parameters: 95 | 96 | - Other parameters will be passed normally, even if they don't appear in the signature. 97 | - If a specified parameter is not in the signature, don't pass it even if there are **kwargs. 98 | """ 99 | if isinstance(func, weakref.WeakMethod): 100 | spec = inspect.getfullargspec(inspect.unwrap(func())) 101 | func = WeakMethodWrapper(func) 102 | else: 103 | spec = inspect.getfullargspec(inspect.unwrap(func)) 104 | acceptable_args = set(spec.args or ()) 105 | if isinstance(func, MethodType): 106 | acceptable_args -= {spec.args[0]} 107 | 108 | if negligible is None: 109 | @wraps(func) 110 | def inner(*args, **kwargs): 111 | if spec.varkw is None: 112 | kwargs = intersected_dict(kwargs, acceptable_args) 113 | return func(*args, **kwargs) 114 | else: 115 | negligible = set(ilistify(negligible)) 116 | 117 | @wraps(func) 118 | def inner(*args, **kwargs): 119 | kwargs = {k: v for k, v in kwargs.items() 120 | if k in acceptable_args 121 | or k not in negligible} 122 | return func(*args, **kwargs) 123 | 124 | return inner 125 | -------------------------------------------------------------------------------- /easypy/predicates.py: -------------------------------------------------------------------------------- 1 | class Predicate(object): 2 | 3 | def test(self, obj): 4 | raise NotImplementedError() 5 | 6 | def __call__(self, obj): 7 | return self.test(obj) 8 | 9 | def __eq__(self, obj): 10 | return self.test(obj) 11 | 12 | def __ne__(self, obj): 13 | return not self == obj 14 | 15 | def __and__(self, other): 16 | return And(self, other) 17 | 18 | def __or__(self, other): 19 | return Or(self, other) 20 | 21 | def __str__(self): 22 | return self.describe() 23 | 24 | def __repr__(self): 25 | return "<%s>" % self 26 | 27 | def describe(self, variable="X"): 28 | return self._describe(variable) + "?" 29 | 30 | def _describe(self, variable): 31 | raise NotImplementedError() 32 | 33 | 34 | class FunctionPredicate(Predicate): 35 | 36 | def __init__(self, func, description=None): 37 | super(FunctionPredicate, self).__init__() 38 | self.func = func 39 | if description is None: 40 | description = func.__doc__ 41 | self.description = description 42 | 43 | def test(self, obj): 44 | if isinstance(obj, FunctionPredicate): 45 | return obj.func is self.func 46 | else: 47 | return self.func(obj) 48 | 49 | def _describe(self, variable): 50 | if self.description: 51 | return self.description % dict(var=variable) 52 | else: 53 | return "%s(%s)" % (self.func,variable) 54 | 55 | 56 | class Equality(Predicate): 57 | 58 | def __init__(self, value): 59 | super(Equality, self).__init__() 60 | self.value = value 61 | 62 | def test(self, obj): 63 | if isinstance(obj, Equality): 64 | return obj.value == self.value 65 | else: 66 | return obj == self.value 67 | 68 | def _describe(self, variable): 69 | return "%s==%s" % (variable, str(self.value)) 70 | 71 | 72 | Inequality = lambda value: Not(Equality(value)) 73 | 74 | 75 | class Or(Predicate): 76 | 77 | def __init__(self, *preds): 78 | super(Or, self).__init__() 79 | self.preds = map(make_predicate, preds) 80 | 81 | def test(self, obj): 82 | return any(p == obj for p in self.preds) 83 | 84 | def _describe(self, variable): 85 | return " OR ".join("(%s)" % pred._describe(variable) for pred in self.preds) 86 | 87 | 88 | class And(Predicate): 89 | 90 | def __init__(self, *preds): 91 | super(And, self).__init__() 92 | self.preds = map(make_predicate, preds) 93 | 94 | def test(self, obj): 95 | return all(p == obj for p in self.preds) 96 | 97 | def _describe(self, variable): 98 | return " AND ".join("(%s)" % pred._describe(variable) for pred in self.preds) 99 | 100 | 101 | class Not(Predicate): 102 | 103 | def __init__(self, pred): 104 | super(Not, self).__init__() 105 | self.pred = make_predicate(pred) 106 | 107 | def test(self, obj): 108 | return not self.pred == obj 109 | 110 | def _describe(self, variable): 111 | return "NOT(%s)" % (self.pred._describe(variable),) 112 | 113 | 114 | class _Dummy(Predicate): 115 | 116 | def __init__(self, retval, description=""): 117 | self.retval = retval 118 | self.description = description 119 | 120 | def test(self, other): 121 | return self.retval 122 | 123 | def _describe(self, variable): 124 | return self.description 125 | 126 | IGNORE = _Dummy(True, "ANYTHING") 127 | FAIL = _Dummy(False, "NOTHING") 128 | 129 | 130 | def make_predicate(expr): 131 | """ 132 | common utility for making various expressions into predicates 133 | """ 134 | if isinstance(expr, Predicate): 135 | return expr 136 | elif isinstance(expr, type): 137 | return FunctionPredicate(lambda obj, type=expr: isinstance(obj, type)) 138 | elif callable(expr): 139 | return FunctionPredicate(expr) 140 | else: 141 | return Equality(expr) 142 | 143 | P = make_predicate 144 | -------------------------------------------------------------------------------- /easypy/properties.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module is about 'property' descriptors. 3 | """ 4 | 5 | import sys 6 | from functools import wraps 7 | from easypy.caching import cached_property # public import, for back-compat 8 | 9 | 10 | _builtin_property = property 11 | 12 | 13 | def safe_property(fget=None, fset=None, fdel=None, doc=None): 14 | """ 15 | A pythonic property which raises a RuntimeError when an attribute error is raised within it. 16 | This fixes an issue in python where AttributeErrors that occur anywhere _within_ 'property' functions 17 | are effectively suppressed, and converted to AttributeErrors for the property itself. This is confusing 18 | for the debugger, and also leads to unintended fallback calls to a __getattr__ if defined 19 | 20 | >>> def i_raise_an_exception(): 21 | ... raise AttributeError("blap") 22 | 23 | >>> class Test(object): 24 | ... def some_prop(self): 25 | ... return i_raise_an_exception() 26 | ... def __getattr__(self, attr): 27 | ... assert False 28 | ... prop = property(some_prop) 29 | ... safe_prop = safe_property(some_prop) 30 | >>> t = Test() 31 | >>> t.prop 32 | Traceback (most recent call last): 33 | ... 34 | AssertionError 35 | >>> t.safe_prop 36 | Traceback (most recent call last): 37 | ... 38 | AttributeError: blap 39 | ... 40 | During handling of the above exception, another exception occurred: 41 | ... 42 | Traceback (most recent call last): 43 | ... 44 | RuntimeError: Attribute error within a property (blap) 45 | """ 46 | if fget is not None: 47 | @wraps(fget) 48 | def callable(*args, **kwargs): 49 | try: 50 | return fget(*args, **kwargs) 51 | except AttributeError: 52 | _, exc, tb = sys.exc_info() 53 | raise RuntimeError("Attribute error within a property (%s)" % exc).with_traceback(tb) 54 | return _builtin_property(callable, fset, fdel, doc) 55 | else: 56 | return _builtin_property(fget, fset, fdel, doc) 57 | -------------------------------------------------------------------------------- /easypy/random.py: -------------------------------------------------------------------------------- 1 | import string 2 | import random 3 | from random import choice, sample 4 | 5 | choose = choice 6 | 7 | 8 | def random_nice_name(max_length=64, entropy=2, sep='-'): 9 | """Generates a nice random name from the dictionaries in words 10 | 11 | :param max_length: max length for the name. 12 | :type max_length: int, optional 13 | :param entropy: how unique th name will be, currently entropy - 1 adjectives are joined with one noun. 14 | :type entropy: int, optional 15 | :param sep: seperator between name parts. 16 | :type sep: str, optional 17 | 18 | :return: the generated name 19 | :rtype: str 20 | 21 | :raises ValueError: if ``param2`` is equal to ``param1``. 22 | """ 23 | 24 | from .words import (adjectives, creatures) 25 | 26 | name = None 27 | entropy = max(entropy, 1) 28 | parts = (creatures, ) + (adjectives, ) * (entropy - 1) 29 | for _ in range(10): 30 | name_parts = [random.choice(p) for p in parts[::-1]] 31 | joined = sep.join(name_parts) 32 | if len(joined) <= max_length: 33 | name = joined 34 | break 35 | 36 | if not name: 37 | raise ValueError("Can't generate name under these conditions") 38 | 39 | return name 40 | 41 | 42 | def random_string(length, charset=string.printable): 43 | return ''.join(random.choice(charset) for i in range(length)) 44 | 45 | 46 | def random_filename(length=(3, 50)): 47 | if hasattr(length, "__iter__"): 48 | mn, mx = length 49 | length = random.randrange(mn, mx+1) # randrange does not include upper bound 50 | return random_string(length, charset=string.ascii_letters) 51 | 52 | 53 | def random_buf(size): 54 | assert size < 5 * 2**20, "This is too big for a buffer (%s)" % size 55 | return random_string(size).encode("latin-1") 56 | 57 | 58 | def perchance(probabilty): 59 | return random.random() <= probabilty 60 | -------------------------------------------------------------------------------- /easypy/semver.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | import re 3 | 4 | 5 | class SemVerParseException(ValueError): 6 | pass 7 | 8 | 9 | class SemVer(namedtuple("SemVer", "major minor patch build tag")): 10 | """ Semantic Version object 11 | 12 | From https://semver.org: 13 | Given a version number MAJOR.MINOR.PATCH, increment the: 14 | 15 | MAJOR version when you make incompatible API changes, 16 | MINOR version when you add functionality in a backwards-compatible manner, and 17 | PATCH version when you make backwards-compatible bug fixes. 18 | Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format. 19 | 20 | We use a fourth part, let's call it BUILD and define it is incremented sporadically. 21 | """ 22 | 23 | @classmethod 24 | def loads(cls, string, *, separator='.', tag_separator='-', raise_on_failure=True): 25 | """ 26 | Load a string into a SemVer object. 27 | 28 | :param string: The string representing the semantic version. Must adhere to semver format. 29 | :type string: str 30 | :param separator: Use a different version part separator. 31 | :type separator: str, optional 32 | :param tag_separator: Use a different tag separator. 33 | :type tag_separator: str, optional 34 | :param raise_on_failure: Whether to raise on failure or just return None 35 | :type raise_on_failure: bool, optional 36 | """ 37 | 38 | string, _, tag = string.partition(tag_separator) 39 | parts = string.split(separator) 40 | try: 41 | return cls(*parts, tag=tag) 42 | except ValueError as e: 43 | if raise_on_failure: 44 | raise SemVerParseException("Error parsing %s: %s" % (string, str(e))) from None 45 | else: 46 | return None 47 | 48 | @classmethod 49 | def loads_fuzzy(cls, string): 50 | """ 51 | Load a string into a SemVer without enforcing semver format. 52 | 53 | :param string: The string representing the semantic version. Must adhere to semver format. 54 | :type string: str 55 | """ 56 | 57 | regex = re.compile(r"((?:\d+[.-])+)(.*)") 58 | string, tag = regex.fullmatch(string).groups() 59 | parts = re.split("[-.]", string) 60 | return cls(*filter(None, parts), tag=tag) 61 | 62 | def __new__(cls, major=0, minor=0, patch=None, build=None, *, tag=None): 63 | return super().__new__( 64 | cls, 65 | major=int(major), 66 | minor=int(minor), 67 | patch=int(patch) if patch is not None else None, 68 | build=int(build) if build is not None else None, 69 | tag="" if tag is None else str(tag), 70 | ) 71 | 72 | def __str__(self): 73 | return self.dumps() 74 | 75 | def __repr__(self): 76 | return "<{} {}>".format(self.__class__.__name__, self) 77 | 78 | def _to_tuple(self): 79 | return (self.major, self.minor, self.patch or 0, self.build or 0) 80 | 81 | def __eq__(self, other): 82 | assert isinstance(other, self.__class__) 83 | return self._to_tuple() == other._to_tuple() and self.tag == other.tag 84 | 85 | def __lt__(self, other): 86 | assert isinstance(other, self.__class__) 87 | return (self._to_tuple(), self.tag) < (other._to_tuple(), other.tag) 88 | 89 | def __gt__(self, other): 90 | assert isinstance(other, self.__class__) 91 | return (self._to_tuple(), self.tag) > (other._to_tuple(), other.tag) 92 | 93 | def __ge__(self, other): 94 | return not self.__lt__(other) 95 | 96 | def __le__(self, other): 97 | return not self.__gt__(other) 98 | 99 | def dumps(self, *, separator='.', tag_separator='-'): 100 | """ 101 | Dump SemVer into a string representation. 102 | 103 | :param separator: Use a different version part separator. 104 | :type separator: str, optional 105 | :param tag_separator: Use a different tag separator. 106 | :type tag_separator: str, optional 107 | """ 108 | 109 | template = "{self.major}{separator}{self.minor}" 110 | if self.patch is not None: 111 | template += "{separator}{self.patch}" 112 | if self.build: 113 | template += "{separator}{self.build}" 114 | if self.tag: 115 | template += "{tag_separator}{self.tag}" 116 | 117 | return template.format(**locals()) 118 | 119 | def copy(self, **kw): 120 | """ 121 | Copy this SemVer object to a new one with optional changes. 122 | 123 | :param kw: Change some of the object's attributes 124 | (Any of major, minor, patch, build, tag). 125 | """ 126 | 127 | return self.__class__(**dict(self._asdict(), **kw)) 128 | 129 | def bump_build(self, clear_tag=True): 130 | """ 131 | Return a copy of this object with the build part incremented by one 132 | 133 | :param clear_tag: Whether to clear the SemVer tag part 134 | :type clear_tag: bool, optional 135 | """ 136 | 137 | return self.copy( 138 | build=(0 if self.build is None else self.build) + 1, 139 | tag='' if clear_tag else self.tag) 140 | 141 | def bump_patch(self, clear_tag=True): 142 | """ 143 | Return a copy of this object with the patch part incremented by one 144 | Bumping patch resets build part. 145 | 146 | :param clear_tag: Whether to clear the SemVer tag part 147 | :type clear_tag: bool, optional 148 | """ 149 | 150 | return self.copy( 151 | build=0, patch=self.patch + 1, 152 | tag='' if clear_tag else self.tag) 153 | 154 | def bump_minor(self, clear_tag=True): 155 | """ 156 | Return a copy of this object with the minor part incremented by one 157 | Bumping minor resets build and patch parts. 158 | 159 | :param clear_tag: Whether to clear the SemVer tag part 160 | :type clear_tag: bool, optional 161 | """ 162 | 163 | return self.copy( 164 | build=0, patch=0, minor=self.minor + 1, 165 | tag='' if clear_tag else self.tag) 166 | 167 | def bump_major(self, clear_tag=True): 168 | """ 169 | Return a copy of this object with the major part incremented by one 170 | Bumping major resets build, patch and minor parts. 171 | 172 | :param clear_tag: Whether to clear the SemVer tag part 173 | :type clear_tag: bool, optional 174 | """ 175 | 176 | return self.copy( 177 | build=0, patch=0, minor=0, major=self.major + 1, 178 | tag='' if clear_tag else self.tag) 179 | 180 | 181 | SMV = SemVer.loads 182 | -------------------------------------------------------------------------------- /easypy/tables.py: -------------------------------------------------------------------------------- 1 | from io import StringIO 2 | 3 | from easypy.collections import defaultlist 4 | from easypy.colors import colorize, uncolored 5 | from easypy.humanize import compact 6 | 7 | 8 | class Column(): 9 | def __init__(self, name, title=None, max_width=None, align=None, header_align=None, padding=None, drop_if_empty=False): 10 | self.name = name 11 | self.max_width = max_width 12 | self.align = align 13 | self.header_align = header_align 14 | self.padding = padding 15 | self.overflow = 'ellipsis' 16 | self.title = title or name 17 | self.drop_if_empty = drop_if_empty 18 | self.visible = True 19 | 20 | 21 | class Table(): 22 | """ 23 | :param List[Column] columns: column descriptors 24 | :param List[Bunch] data: rows 25 | """ 26 | 27 | HEADER_SEP = "|" 28 | SEPARATORS = "|" 29 | BAR = '=' 30 | BAR_SEP = ":" 31 | 32 | def __init__(self, *columns, data=None, max_col_width=None, align='left', header_align='center', padding=1): 33 | self.data = data or [] 34 | self.columns = [] 35 | 36 | self.max_col_width = max_col_width 37 | self.align = align 38 | self.header_align = header_align 39 | self.padding = padding 40 | 41 | for column in columns: 42 | self.add_column(column) 43 | 44 | _ALIGN_MAP = dict(left='<', right='>', center='^') 45 | 46 | def add_column(self, column: Column): 47 | self.columns.append(column) 48 | 49 | def add_row(self, **row): 50 | self.data.append(row) 51 | 52 | def render(self): 53 | rendered = defaultlist(list) 54 | columns = [] 55 | 56 | def _get_value(data, value): 57 | ret = data.get(value) 58 | if ret is None: 59 | ret = '' 60 | return ret 61 | 62 | for column in self.columns: 63 | if not column.visible: 64 | continue 65 | rows = [_get_value(data, column.name) for data in self.data] 66 | if not any(filter(lambda i: i != '', rows)) and column.drop_if_empty: 67 | continue 68 | columns.append(column) 69 | 70 | if column.max_width is None: 71 | column.max_width = self.max_col_width 72 | if column.align is None: 73 | column.align = self.align 74 | if column.header_align is None: 75 | column.header_align = self.header_align 76 | if column.padding is None: 77 | column.padding = self.padding 78 | 79 | raw_data = [column.title] + rows 80 | colored_data = [colorize(str(data)) for data in raw_data] 81 | uncolored_data = [uncolored(data) for data in colored_data] 82 | max_width = column.max_width or max(len(data) for data in uncolored_data) 83 | for i, data in enumerate(colored_data): 84 | align = column.header_align if i == 0 else column.align 85 | coloring_spacing = len(colored_data[i]) - len(uncolored_data[i]) 86 | spacing = max_width + coloring_spacing 87 | format_string = "{{data:{align}{spacing}}}".format(align=self._ALIGN_MAP[align], spacing=spacing) 88 | rendered[i].append(format_string.format(data=data)) 89 | 90 | output = StringIO() 91 | for r_i, row in enumerate(rendered): 92 | r_parts = [] 93 | 94 | sep = self.HEADER_SEP if r_i == 0 else self.SEPARATORS[r_i % len(self.SEPARATORS)] 95 | 96 | for col_i, col in enumerate(row): 97 | column = columns[col_i] 98 | padding = column.padding * " " 99 | if column.max_width and r_i > 0: 100 | col = compact(col, column.max_width, suffix_length=column.max_width // 10) 101 | r_parts.append("{padding}{col}{padding}".format(col=col, padding=padding)) 102 | 103 | output.write(sep.join(r_parts)) 104 | output.write("\n") 105 | 106 | if r_i == 0: 107 | r_parts = [self.BAR * len(uncolored(part)) for part in r_parts] 108 | output.write(self.BAR_SEP.join(r_parts)) 109 | output.write("\n") 110 | 111 | output.seek(0) 112 | return output.read() 113 | 114 | 115 | class DecoratedTable(Table): 116 | HEADER_SEP = "│" 117 | SEPARATORS = "┼│┊┊│" 118 | BAR = '═' 119 | BAR_SEP = "╪" 120 | 121 | 122 | def _test(): 123 | table = Table(Column("first", "GREEN<>")) 124 | table.add_column(Column("second", align='right')) 125 | table.add_row(first='1', second='BLUE<> second MAGENTA<>') 126 | table.add_row(first='longer first column', second='2') 127 | print(table.render()) 128 | -------------------------------------------------------------------------------- /easypy/tokens.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module is about Tokens. Tokens are string-based objects for when 3 | ``None`` is not enough, and ``Enum`` is too much. 4 | Tokens are used to indicate a desired behavior, instead of some specific value. 5 | 6 | Tokens can be created by simple instantiation:: 7 | 8 | from easypy.tokens import Token 9 | AUTO = Token('AUTO') 10 | 11 | If you don't like repeating yourself, however, you can use this 'dark' magic:: 12 | 13 | from easypy.tokens import AUTO 14 | """ 15 | 16 | import sys 17 | from types import ModuleType 18 | 19 | 20 | __all__ = ["Token", "if_auto"] 21 | 22 | 23 | class Token(str): 24 | """ 25 | When ``None`` is not enough, and ``Enum`` is too much. 26 | Use to indicate a desired behavior, instead of some specific value: 27 | 28 | from easypy.tokens import AUTO, MAX 29 | 30 | def create_file(fname=AUTO, size=AUTO): 31 | 32 | if size is AUTO: 33 | size = get_capacity() / 2 34 | elif size is MAX: 35 | size = get_capacity() 36 | 37 | if fname is AUTO: 38 | from .random import random_nice_name 39 | fname = random_nice_name() 40 | os.truncate(fname, size) 41 | 42 | Also, to support use as cli options, a token can be compared with an str: 43 | 44 | AUTO == '' 45 | AUTO == 'AUTO' 46 | AUTO == '' 47 | AUTO == 'auto' 48 | 49 | """ 50 | 51 | _all = {} 52 | 53 | def __new__(cls, name): 54 | name = name.strip("<>") 55 | try: 56 | return cls._all[name] 57 | except KeyError: 58 | pass 59 | cls._all[name] = self = super().__new__(cls, "<%s>" % name) 60 | return self 61 | 62 | def __repr__(self): 63 | return self 64 | 65 | def __eq__(self, other): 66 | if isinstance(other, self.__class__): 67 | return self is other 68 | elif isinstance(other, str): 69 | # we allows this so that cli flags can be easily transformed into tokens (AUTO == 'auto') 70 | return self.strip("<>").lower() == other.strip("<>").lower() 71 | return False 72 | 73 | # we're already case insensitive when comparing 74 | def lower(self): 75 | return self 76 | 77 | def upper(self): 78 | return self 79 | 80 | def __hash__(self): 81 | return super().__hash__() 82 | 83 | 84 | def if_auto(val, auto): 85 | """ 86 | Convenience for the popular ``auto if val is AUTO else val`` 87 | 88 | Example: 89 | 90 | config.foo_level = 100 91 | 92 | def foo(level=AUTO): 93 | level = if_auto(level, config.foo_level) 94 | return level 95 | 96 | assert foo() == 100 97 | assert foo(AUTO) == 100 98 | assert foo(1) == 1 99 | 100 | """ 101 | AUTO = Token("AUTO") 102 | return auto if val is AUTO else val 103 | 104 | 105 | class _TokensModule(ModuleType): 106 | """ 107 | The module-hack that allows us to use ``from easypy.tokens import AUTO`` 108 | """ 109 | 110 | __all__ = () # to make help() happy 111 | __package__ = __name__ 112 | _orig_module = sys.modules[__name__] 113 | 114 | def __getattr__(self, attr): 115 | try: 116 | return getattr(self._orig_module, attr) 117 | except AttributeError: 118 | pass 119 | 120 | if attr.startswith("_") or attr == 'trait_names': 121 | raise AttributeError(attr) 122 | 123 | token = Token("<%s>" % attr) 124 | setattr(self._orig_module, attr, token) 125 | return token 126 | 127 | def __dir__(self): 128 | return sorted(dir(self._orig_module) + list(Token._all)) 129 | 130 | __path__ = [] 131 | __file__ = __file__ 132 | 133 | 134 | mod = _TokensModule(__name__, __doc__) 135 | sys.modules[__name__] = mod 136 | 137 | del ModuleType 138 | del _TokensModule 139 | del mod, sys 140 | -------------------------------------------------------------------------------- /easypy/ziplog.py: -------------------------------------------------------------------------------- 1 | import re 2 | import time 3 | from datetime import datetime 4 | from queue import PriorityQueue 5 | from .colors import uncolored 6 | 7 | 8 | TIMESTAMP_PATTERN = "%Y-%m-%d %H:%M:%S" 9 | 10 | 11 | def to_timestamp(t): 12 | return "-".center(19) if t is None else time.strftime(TIMESTAMP_PATTERN, time.localtime(t)) 13 | 14 | 15 | YEAR = time.strftime("%Y") 16 | MONTH = time.strftime("%m") 17 | DAY = time.strftime("%d") 18 | 19 | TIMESTAMP_GETTERS = [ 20 | 21 | # Apr 6 17:13:40 22 | (re.compile(r"^(\w{3} +\d+ +\d+:\d+:\d+)"), 23 | lambda ts: time.mktime(time.strptime("%s %s" % (YEAR, ts), "%Y %b %d %H:%M:%S"))), 24 | 25 | # 2018-12-15T02:11:06+0200 26 | (re.compile(r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+\d{4})"), 27 | lambda ts: datetime.strptime(ts, '%Y-%m-%dT%H:%M:%S%z').timestamp()), 28 | 29 | # 2018-12-15T02:11:06.123456+02:00 30 | # 2019-10-09T10:58:45,929228489+03:00 31 | (re.compile(r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2})[.,](\d{6})\d*(\+\d{2}):(\d{2})"), 32 | lambda *args: datetime.strptime("{}.{}{}{}".format(*args), '%Y-%m-%dT%H:%M:%S.%f%z').timestamp()), 33 | 34 | # 2018-04-06 17:13:40,955 35 | (re.compile(r"(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}),(\d{3})\b"), 36 | lambda ts, ms: time.mktime(time.strptime(ts, "%Y-%m-%d %H:%M:%S")) + float(ms) / 1000), 37 | 38 | # 2018-04-06 17:13:40 39 | # 2018-04-06 17:13:40.955356 40 | # [2018/04/06 17:13:40 41 | # [2018/04/06 17:13:40.955356 42 | (re.compile(r"^\[?(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})(?:\.(\d{6}))?"), 43 | lambda ts, ms: time.mktime(time.strptime(ts.replace("/", "-"), "%Y-%m-%d %H:%M:%S")) + float(ms or 0) / 1000000), 44 | 45 | # 01:21:27 46 | # 01:21:27.554223 47 | (re.compile(r"\b(\d{2}:\d{2}:\d{2})(?:\.(\d{6}))?"), 48 | lambda ts, ms: time.mktime(time.strptime("%s-%s-%s %s" % (YEAR, MONTH, DAY, ts), "%Y-%m-%d %H:%M:%S")) + float(ms or 0) / 1000000), 49 | ] 50 | 51 | 52 | class TimestampedStream(object): 53 | 54 | def __init__(self, stream, prefix="> "): 55 | self.name = stream.name if hasattr(stream, "name") else repr(stream) 56 | self.stream = iter(stream) 57 | self.prefix = prefix 58 | self.filler = " "*len(self.prefix) 59 | self._untimestamp = None 60 | 61 | def __gt__(self, other): 62 | return id(self) > id(other) 63 | 64 | def get_next(self): 65 | "Get next line in the stream and the stream itself, or None if stream ended" 66 | try: 67 | line = next(self.stream) 68 | except StopIteration: 69 | return 70 | else: 71 | ts = self.get_timestamp(uncolored(line)) 72 | return ts, (self.prefix if ts else self.filler) + line, self 73 | 74 | def get_timestamp(self, line): 75 | """ 76 | Find the timestamp if exists, and return as a float 77 | """ 78 | if not line.startswith(" "): 79 | if not self._untimestamp: 80 | for regex, converter in TIMESTAMP_GETTERS: 81 | match = regex.search(line) 82 | if match: 83 | # cache the regex and conversion funcs for later 84 | self._untimestamp = converter, regex 85 | break 86 | else: 87 | converter, regex = self._untimestamp 88 | match = regex.search(line) 89 | if match: 90 | return converter(*match.groups()) 91 | return 0 92 | 93 | 94 | def iter_zipped_logs(*log_streams, prefix="> ", show_intervals=None, show_timestamp=False): 95 | """ 96 | Line iterator that merges lines from different log streams based on their timestamp. 97 | Timestamp patterns are found in the TIMESTAMP_GETTERS list in this module. 98 | 99 | :param prefix: Prepend this prefix to each line where a timestamp was identified. 100 | This can also be an iterable that can be zipped with the `log_streams`, so that 101 | each stream gets a unique prefix. 102 | :param show_intervals: `s` or `ms` - Prepend the duration (in secs or msecs) since the previous log line 103 | """ 104 | 105 | # A sorted queue of (timestamp, stream) tuples (lowest-timestamp first) 106 | streams = PriorityQueue() 107 | stream_names = [] 108 | for i, stream in enumerate(log_streams): 109 | if not isinstance(stream, tuple): 110 | tstream = TimestampedStream(stream, prefix) 111 | else: 112 | tstream = TimestampedStream(*stream) 113 | 114 | n = tstream.get_next() 115 | if n: 116 | stream_names.append(tstream.name) 117 | streams.put(n) 118 | 119 | last_ts = None 120 | if show_intervals: 121 | from easypy.units import Duration 122 | 123 | def formatted(line, current_ts, last_ts): 124 | fmt = "{:>7}{}" 125 | if (current_ts and last_ts): 126 | return fmt.format(Duration(current_ts - last_ts).render(show_intervals), line) 127 | else: 128 | return fmt.format("", line) 129 | else: 130 | def formatted(line, current_ts, last_ts): 131 | return line 132 | 133 | if show_timestamp: 134 | _formatted = formatted 135 | 136 | def formatted(line, current_ts, last_ts): 137 | line = _formatted(line, current_ts, last_ts) 138 | dt = datetime.fromtimestamp(current_ts) 139 | return "{:%Y-%m-%d %H:%M:%S.%f} {}".format(dt, line) 140 | 141 | while not streams.empty(): 142 | current_ts, line, stream = streams.get() 143 | yield formatted(line, current_ts, last_ts) 144 | last_ts = current_ts 145 | while True: 146 | n = stream.get_next() 147 | if not n: 148 | break # stream ended 149 | ts, line, stream = n 150 | if ts and ts > current_ts: 151 | streams.put((ts, line, stream)) 152 | break # timestamp advanced 153 | yield formatted(line, ts, last_ts) 154 | if ts: 155 | last_ts = ts 156 | 157 | 158 | def main(): 159 | import sys 160 | import argparse 161 | parser = argparse.ArgumentParser(description='ZipLog - merge logs by timestamps') 162 | parser.add_argument( 163 | 'logs', metavar='N', type=str, nargs='+', 164 | help='Log files; Use "-" for STDIN') 165 | parser.add_argument( 166 | '-i', '--interval', dest='interval', default=None, 167 | help="Show interval by seconds (s), or milliseconds (ms)") 168 | parser.add_argument( 169 | '-p', '--prefix', dest='prefix', default="> ", 170 | help="A prefix to prepend to timestamped lines") 171 | ns = parser.parse_args(sys.argv[1:]) 172 | 173 | files = [sys.stdin if f == "-" else open(f) for f in ns.logs] 174 | try: 175 | for line in iter_zipped_logs(*files, show_intervals=ns.interval, prefix=ns.prefix): 176 | print(line, end="") 177 | except BrokenPipeError: 178 | pass 179 | 180 | 181 | if __name__ == "__main__": 182 | main() 183 | -------------------------------------------------------------------------------- /examples/logbook_init.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logbook 3 | from easypy.logging import initialize 4 | 5 | 6 | LOG_PATH = "." 7 | PRIMARY_LOG_FILE = os.path.join(LOG_PATH, 'easypy.log') 8 | LOG_LEVEL = "INFO" 9 | 10 | DETAILED = ( 11 | '{record.asctime}|{process:2}:{thread_name:25}|{name:40}|{levelname:5}|' 12 | '{funcName:30} |{host:35}|{message}' 13 | ) 14 | 15 | CONSOLE = ( 16 | "{record.time:%Y-%m-%d %H:%M:%S}|{record.channel:8}|" 17 | "{record.level_name}<<{record.level_name:8}>>|" 18 | "{record.extra[domain]:10}| " 19 | "{record.extra[decoration]}{record.message}" 20 | ) 21 | 22 | 23 | _action = 0 24 | 25 | 26 | def handle_usr_signal(sig, frame): 27 | extra = dict(host="---") 28 | 29 | def dump_stacks(): 30 | from easypy.threadtree import get_thread_stacks 31 | stacks = get_thread_stacks() 32 | logbook.info("\n%s", stacks, extra=extra) 33 | 34 | actions = {0: dump_stacks, 1: set_verbose, 2: set_info} 35 | 36 | global _action 37 | func = actions[_action % len(actions)] 38 | _action += 1 39 | logbook.info("YELLOW<> -> CYAN<<%s>>", _action, func, extra=extra) 40 | try: 41 | func() 42 | except: 43 | pass 44 | 45 | 46 | def configure(filename=None, no_heartbeats=False): 47 | import socket 48 | global get_console_handler 49 | 50 | HOSTNAME = socket.gethostname() 51 | initialize(context=dict(domain=HOSTNAME), framework="logbook", patch=True) 52 | from easypy.logging import get_console_handler, ConsoleFormatter 53 | from easypy.colors import register_colorizers 54 | register_colorizers( 55 | CRITICAL='red', 56 | ERROR='red', 57 | WARNING='yellow', 58 | NOTICE='white', 59 | TRACE='dark_gray' 60 | ) 61 | from logbook.handlers import StderrHandler 62 | handler = StderrHandler(level=LOG_LEVEL) 63 | handler.formatter = ConsoleFormatter(CONSOLE) 64 | handler.push_application() 65 | 66 | import signal 67 | signal.signal(signal.SIGUSR2, handle_usr_signal) 68 | 69 | 70 | def set_level(level): 71 | get_console_handler().level = logbook.lookup_level(level) 72 | 73 | 74 | def get_level(): 75 | return logbook.get_level_name(get_console_handler().level) 76 | 77 | 78 | def set_verbose(): 79 | set_level("DEBUG") 80 | 81 | 82 | def set_info(): 83 | set_level("INFO") 84 | 85 | 86 | if __name__ == "__main__": 87 | configure() 88 | 89 | from easypy.logging import EasypyLogger 90 | from easypy.concurrency import MultiObject 91 | from time import sleep 92 | import random 93 | 94 | def log(_, depth=0): 95 | logger = EasypyLogger("easypy.test.%s" % depth) 96 | for level in range(logbook.TRACE, logbook.CRITICAL + 1): 97 | with logger.indented("Level is: %s", level): 98 | logger.info("hey") 99 | logger.info("GREEN<>") 100 | logger.warning("YELLOW<>") 101 | logger.error("error!") 102 | sleep(random.random() * .2) 103 | logger.trace("ping") 104 | if random.random() > 0.97: 105 | log(depth + 1) 106 | 107 | set_level(level) 108 | 109 | MultiObject("abcde", log_ctx=lambda x: dict(domain=x)).call(log) 110 | -------------------------------------------------------------------------------- /examples/logging_init.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | import logging.config 4 | import logging.handlers 5 | 6 | from easypy.logging import initialize 7 | from easypy.logging import get_console_handler 8 | 9 | logging.addLevelName(logging.WARN, "WARN") # instead of "WARNING", so that it takes less space... 10 | logging.addLevelName(logging.NOTSET, "NA") # instead of "NOTSET", so that it takes less space... 11 | 12 | 13 | LOG_PATH = "." 14 | PRIMARY_LOG_FILE = os.path.join(LOG_PATH, 'easypy.log') 15 | LOG_LEVEL = "INFO" 16 | 17 | if os.getenv('TERM_LOG_STDOUT'): 18 | console_stream = 'stdout' 19 | else: 20 | console_stream = 'stderr' 21 | 22 | 23 | CONFIG = { 24 | 'version': 1, 25 | 'disable_existing_loggers': False, 26 | 'formatters': { 27 | 'detailed': { 28 | 'format': '%(asctime)s|%(process)2s:%(threadName)-25s|%(name)-40s|%(levelname)-5s|' 29 | '%(funcName)-30s |%(host)-35s|%(message)s', 30 | }, 31 | 'console': { 32 | '()': 'easypy.logging.ConsoleFormatter', 33 | 'fmt': '%(levelcolor)s<<%(asctime)s|%(levelname)-5s|%(host)-40s>>|%(decoration)s%(message)s', 34 | 'datefmt': '%H:%M:%S' 35 | }, 36 | 'yaml': { 37 | '()': 'easypy.logging.YAMLFormatter', 38 | 'allow_unicode': True, 39 | 'explicit_start': True, 40 | 'explicit_end': True, 41 | 'encoding': 'utf-8', 42 | }, 43 | }, 44 | 'filters': { 45 | 'thread_control': { 46 | '()': 'easypy.logging.ThreadControl' 47 | } 48 | }, 49 | 'handlers': { 50 | 'console': { 51 | 'class': 'logging.StreamHandler', 52 | 'formatter': "console", # if sys.stdout.isatty() else "detailed", 53 | 'filters': ['thread_control'], 54 | 'level': LOG_LEVEL, 55 | 'stream': 'ext://sys.%s' % console_stream 56 | }, 57 | 'main_file': { 58 | 'class': 'logging.handlers.RotatingFileHandler', 59 | 'filename': PRIMARY_LOG_FILE, 60 | 'mode': 'w', 61 | 'formatter': 'detailed', 62 | 'level': 'DEBUG', 63 | 'maxBytes': 2**20 * 10, 64 | 'backupCount': 5, 65 | 'delay': True, 66 | 'encoding': 'utf-8', 67 | }, 68 | 'aux': { 69 | 'class': 'logging.handlers.RotatingFileHandler', 70 | 'filename': os.path.join(LOG_PATH, 'aux.log'), 71 | 'mode': 'w', 72 | 'formatter': 'detailed', 73 | 'level': 'DEBUG', 74 | 'maxBytes': 2**20 * 10, 75 | 'backupCount': 5, 76 | 'delay': True, 77 | 'encoding': 'utf-8', 78 | }, 79 | 'boto': { 80 | 'class': 'logging.handlers.RotatingFileHandler', 81 | 'filename': os.path.join(LOG_PATH, 'boto.log'), 82 | 'mode': 'w', 83 | 'formatter': 'detailed', 84 | 'level': 'DEBUG', 85 | 'maxBytes': 2**20 * 10, 86 | 'backupCount': 5, 87 | 'delay': True, 88 | 'encoding': 'utf-8', 89 | }, 90 | 'threads': { 91 | 'class': 'logging.handlers.RotatingFileHandler', 92 | 'filename': os.path.join(LOG_PATH, 'threads.yaml.log'), 93 | 'mode': 'w', 94 | 'formatter': 'yaml', 95 | 'level': 'DEBUG', 96 | 'maxBytes': 2**20 * 100, 97 | 'backupCount': 1, 98 | 'delay': True, 99 | 'encoding': 'utf-8', 100 | }, 101 | 'gevent': { 102 | 'class': 'logging.handlers.RotatingFileHandler', 103 | 'filename': os.path.join(LOG_PATH, 'gevent.log'), 104 | 'mode': 'w', 105 | 'formatter': 'detailed', 106 | 'level': 'DEBUG', 107 | 'maxBytes': 2**20 * 1, 108 | 'backupCount': 1, 109 | 'delay': True, 110 | 'encoding': 'utf-8', 111 | }, 112 | 'progress': { 113 | 'class': 'easypy.logging.ProgressHandler', 114 | }, 115 | }, 116 | 'root': { 117 | 'level': 'NOTSET', 118 | 'handlers': ['console', 'main_file', 'progress'] 119 | }, 120 | 'loggers': { 121 | 'threads': { 122 | 'propagate': False, 123 | 'handlers': ['threads'] 124 | }, 125 | 'gevent': { 126 | 'propagate': False, 127 | 'handlers': ['gevent'] 128 | }, 129 | } 130 | } 131 | 132 | 133 | REDIRECTIONS = { 134 | 'aux': [ 135 | 'paramiko', 'paramiko.transport', 'plumbum.shell', 'urllib3.connectionpool', 'urllib3.util.retry', 136 | 'aux', 'requests.packages.urllib3.connectionpool', 'googleapiclient.discovery', 137 | 'sentry.errors', 'wepy.devops.talker.verbose', 'easypy.threadtree', 'concurrent.futures', 138 | 'easypy.concurrency.locks', 139 | ], 140 | 'boto': ['boto', 'boto3', 'botocore'] 141 | } 142 | 143 | 144 | for target, loggers in REDIRECTIONS.items(): 145 | for name in loggers: 146 | CONFIG['loggers'][name] = dict(propagate=False, handlers=[target, 'progress']) 147 | 148 | 149 | _action = 0 150 | 151 | 152 | def handle_usr_signal(sig, frame): 153 | extra = dict(host="---") 154 | 155 | def dump_stacks(): 156 | from easypy.threadtree import get_thread_stacks 157 | stacks = get_thread_stacks() 158 | logging.info("\n%s", stacks, extra=extra) 159 | 160 | actions = {0: dump_stacks, 1: set_verbose, 2: set_info} 161 | 162 | global _action 163 | func = actions[_action % len(actions)] 164 | _action += 1 165 | logging.info("YELLOW<> -> CYAN<<%s>>", _action, func, extra=extra) 166 | try: 167 | func() 168 | except: 169 | pass 170 | 171 | 172 | def configure(filename=None, no_heartbeats=False): 173 | import socket 174 | 175 | HOSTNAME = socket.gethostname() 176 | initialize(context=dict(host=HOSTNAME)) 177 | 178 | import signal 179 | signal.signal(signal.SIGUSR2, handle_usr_signal) 180 | 181 | logging.config.dictConfig(CONFIG) 182 | 183 | 184 | def set_level(level): 185 | get_console_handler().setLevel(getattr(logging, level)) 186 | 187 | 188 | def get_level(): 189 | return logging.getLevelName(get_console_handler().level) 190 | 191 | 192 | def set_verbose(): 193 | set_level("DEBUG") 194 | 195 | 196 | def set_info(): 197 | set_level("INFO") 198 | -------------------------------------------------------------------------------- /index.rst: -------------------------------------------------------------------------------- 1 | .. easypy documentation master file, created by 2 | sphinx-quickstart on Mon Apr 16 12:21:43 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to easypy's documentation! 7 | ================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | ignore:'async':DeprecationWarning 4 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import setuptools 4 | 5 | with open("README.md", "r") as fh: 6 | long_description = fh.read() 7 | 8 | setuptools.setup( 9 | name='real-easypy', 10 | version='0.5.0', 11 | description='easypy is a collection of python modules that makes developers happy', 12 | author='Ofer Koren', 13 | author_email='koreno@gmail.com', 14 | url='https://github.com/real-easypy/easypy', 15 | license='BSD', 16 | long_description=long_description, 17 | long_description_content_type="text/markdown", 18 | packages=setuptools.find_packages(), 19 | classifiers=[ 20 | "Programming Language :: Python :: 3", 21 | "License :: OSI Approved :: BSD License", 22 | "Operating System :: OS Independent", 23 | ], 24 | entry_points={ 25 | 'console_scripts': [ 26 | 'eziplog=easypy.ziplog:main', 27 | 'ezcolorize=easypy.colors:main', 28 | ] 29 | }, 30 | ) 31 | 32 | 33 | # how to upload a package: 34 | # 0. increment the version above 35 | # 1. python3 setup.py sdist bdist_wheel 36 | # 2. python3 -m twine upload dist/* 37 | -------------------------------------------------------------------------------- /test_package.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sys 3 | 4 | packages = list(filter(None, """ 5 | _multithreading_init 6 | aliasing 7 | bunch 8 | caching 9 | collections 10 | colors 11 | concurrency 12 | contexts 13 | decorations 14 | deprecation 15 | exceptions 16 | fixtures 17 | gevent 18 | humanize 19 | interaction 20 | lockstep 21 | meta 22 | misc 23 | predicates 24 | properties 25 | random 26 | resilience 27 | signals 28 | sync 29 | tables 30 | threadtree 31 | timing 32 | tokens 33 | typed_struct 34 | units 35 | words 36 | ziplog 37 | """.split())) 38 | 39 | 40 | @pytest.yield_fixture 41 | def clean_modules(): 42 | yield 43 | roots = "easypy", "logbook", "logging" 44 | for n in sorted(sys.modules): 45 | if any(n.startswith(root) for root in roots): 46 | sys.modules.pop(n) 47 | 48 | 49 | @pytest.mark.parametrize("package", packages) 50 | def test_package(package, clean_modules): 51 | __import__("easypy.%s" % package) 52 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/real-easypy/easypy/b1280e310375cc6b2afb17c79cc9b7c5256ccdc0/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | # if os.getenv("GEVENT") == "true": 3 | # from easypy.gevent import apply_patch 4 | # apply_patch() 5 | 6 | # use_logbook = bool(os.getenv("LOGBOOK")) 7 | 8 | 9 | 10 | from easypy.logging import G, THREAD_LOGGING_CONTEXT, initialize 11 | if not G.initialized: 12 | initialize(patch=True) 13 | 14 | use_logbook = G.initialized == "logbook" 15 | THREAD_LOGGING_CONTEXT.update_defaults(domain='domain') 16 | 17 | if use_logbook: 18 | import logbook 19 | from logbook.handlers import StderrHandler 20 | from easypy.logging import ConsoleFormatter 21 | handler = StderrHandler(level=logbook.DEBUG) 22 | handler.formatter = ConsoleFormatter( 23 | "{record.extra[levelcolor]}<<" 24 | "{record.time:%Y-%m-%d %H:%M:%S}|" 25 | "{record.filename}:{record.lineno}|" 26 | "{record.level_name:8}>>| " 27 | "{record.extra[domain]:15}| " 28 | "{record.extra[decoration]}{record.message}" 29 | ) 30 | handler.push_application() 31 | else: 32 | import logging 33 | from logging import StreamHandler 34 | from easypy.logging import ConsoleFormatter 35 | logging.addLevelName(logging.WARN, "WARN") # instead of "WARNING", so that it takes less space... 36 | logging.addLevelName(logging.NOTSET, "NA") # instead of "NOTSET", so that it takes less space... 37 | formatter = ConsoleFormatter('%(levelcolor)s<<%(asctime)s|%(levelname)-5s|%(domain)-15s>>|%(decoration)s%(message)s', datefmt='%H:%M:%S') 38 | handler = StreamHandler() 39 | handler.setFormatter(formatter) 40 | logging.root.addHandler(handler) 41 | 42 | 43 | import pytest 44 | 45 | 46 | @pytest.fixture 47 | def is_logbook(): 48 | return use_logbook 49 | -------------------------------------------------------------------------------- /tests/indentable_buffer1.txt: -------------------------------------------------------------------------------- 1 | .- Exc ----------------------------------------------------------------------------------------------------------------. 2 | | a | 3 | | b | 4 | +---.- Header2 --------------------------------------------------------------------------------------------------------. 5 | | | 0x....+ 00 1 2 3 4 5 6 7 8 9 A B C D E F 10 11 12 13 14 15 16 17 | -------- -------- -------- | 6 | | | 0x0000: 4a 9c e8 5a 21 c2 e6 8b a0 01 cb c3 2e 78 5d 11 9b 73 43 1c b2 cd b3 9e | J..Z!... .....x]. .sC..... | 7 | | | 0x0018: 4d f7 13 60 c8 ce f8 67 31 48 26 e2 9b d1 a8 fd 14 08 55 17 35 c7 03 71 | M..`...g 1H&..... ..U.5..q | 8 | | | 0x0030: ac da e6 29 71 7d 7d 54 34 34 9e b5 3b f1 2e f6 2a 16 ba e0 7e 6d 96 6f | ...)q}}T 44..;... *...~m.o | 9 | | | 0x0048: b8 a4 54 6c 96 8a c7 9a c9 c4 f2 b1 9e 13 0b e2 69 c6 d8 92 de fa 62 6e | ..Tl.... ........ i.....bn | 10 | | | 0x0060: 36 ea f5 5f 79 3e 15 c5 d5 a0 05 bd ea b8 ba 80 2b 50 a7 d8 ad bf 91 3c | 6.._y>.. ........ +P.....< | 11 | | | 0x0078: ca c5 94 e6 fc 2d ab 34 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 | .....-.4 ABABABAB ABABABAB | 12 | | | 0x0090: 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 | ABABABAB ABABABAB ABABABAB | 13 | | | * | | 14 | | | 0x0150: 41 42 41 42 41 42 41 42 41 42 41 42 43 41 42 41 42 41 42 41 42 41 42 41 | ABABABAB ABABCABA BABABABA | 15 | | | 0x0168: 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 | BABABABA BABABABA BABABABA | 16 | | | * | | 17 | | | 0x0240: 42 dc 29 6e 2f 9a 4e 79 9f 03 c7 6a 14 08 1a 08 91 40 ad ac a9 28 1a 8b | B.)n/.Ny ...j.... .@...(.. | 18 | | | 0x0258: 9f 81 b0 75 73 87 9e 34 f9 99 31 77 33 39 d0 98 58 6f 6b 48 a6 c9 52 76 | ...us..4 ..1w39.. XokH..Rv | 19 | | | 0x0270: bc ac 90 3b ac 83 c8 ba 60 56 a9 c3 75 b2 cc 56 9d 06 b3 f0 1e b4 4b 10 | ...;.... `V..u..V ......K. | 20 | | | 0x0288: 9c 83 dc e7 cb 0c 9a 8c 80 01 30 8c 61 f8 35 5a 9c | ........ ..0.a.5Z . | 21 | | `-------------------------------------------------------------------------------------------------------- Header2 -* 22 | | hello | 23 | | world | 24 | +---.- Header2 --------------------------------------------------------------------------------------------------------. 25 | | +---.- Header3 ----------------------------------------------------------------------------------------------------. 26 | | | | text3 | 27 | | | `---------------------------------------------------------------------------------------------------- Header3 -* 28 | | | text2 | 29 | | `-------------------------------------------------------------------------------------------------------- Header2 -* 30 | `---------------------------------------------------------------------------------------------------------------- Exc -* 31 | -------------------------------------------------------------------------------- /tests/indentable_buffer2.txt: -------------------------------------------------------------------------------- 1 | ┬╼ Exc ╾───────────────────────────────╮ 2 | │ a │ 3 | │ b │ 4 | ├───┬╼ Header2 ╾───────────────────────╮ 5 | │ │ 0x....+ 00 1 2 3 4 5 6 7 8 9 A B C D E F 10 11 12 13 14 15 16 17 | -------- -------- -------- 6 | │ │ 0x0000: 4a 9c e8 5a 21 c2 e6 8b a0 01 cb c3 2e 78 5d 11 9b 73 43 1c b2 cd b3 9e | J..Z!... .....x]. .sC..... 7 | │ │ 0x0018: 4d f7 13 60 c8 ce f8 67 31 48 26 e2 9b d1 a8 fd 14 08 55 17 35 c7 03 71 | M..`...g 1H&..... ..U.5..q 8 | │ │ 0x0030: ac da e6 29 71 7d 7d 54 34 34 9e b5 3b f1 2e f6 2a 16 ba e0 7e 6d 96 6f | ...)q}}T 44..;... *...~m.o 9 | │ │ 0x0048: b8 a4 54 6c 96 8a c7 9a c9 c4 f2 b1 9e 13 0b e2 69 c6 d8 92 de fa 62 6e | ..Tl.... ........ i.....bn 10 | │ │ 0x0060: 36 ea f5 5f 79 3e 15 c5 d5 a0 05 bd ea b8 ba 80 2b 50 a7 d8 ad bf 91 3c | 6.._y>.. ........ +P.....< 11 | │ │ 0x0078: ca c5 94 e6 fc 2d ab 34 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 | .....-.4 ABABABAB ABABABAB 12 | │ │ 0x0090: 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 | ABABABAB ABABABAB ABABABAB 13 | │ │ * | 14 | │ │ 0x0150: 41 42 41 42 41 42 41 42 41 42 41 42 43 41 42 41 42 41 42 41 42 41 42 41 | ABABABAB ABABCABA BABABABA 15 | │ │ 0x0168: 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 | BABABABA BABABABA BABABABA 16 | │ │ * | 17 | │ │ 0x0240: 42 dc 29 6e 2f 9a 4e 79 9f 03 c7 6a 14 08 1a 08 91 40 ad ac a9 28 1a 8b | B.)n/.Ny ...j.... .@...(.. 18 | │ │ 0x0258: 9f 81 b0 75 73 87 9e 34 f9 99 31 77 33 39 d0 98 58 6f 6b 48 a6 c9 52 76 | ...us..4 ..1w39.. XokH..Rv 19 | │ │ 0x0270: bc ac 90 3b ac 83 c8 ba 60 56 a9 c3 75 b2 cc 56 9d 06 b3 f0 1e b4 4b 10 | ...;.... `V..u..V ......K. 20 | │ │ 0x0288: 9c 83 dc e7 cb 0c 9a 8c 80 01 30 8c 61 f8 35 5a 9c | ........ ..0.a.5Z . 21 | │ ╰───────────────────────╼ Header2 ╾╯ 22 | │ hello │ 23 | │ world │ 24 | ├───┬╼ Header2 ╾───────────────────────╮ 25 | │ ├───┬╼ Header3 ╾───────────────────╮ 26 | │ │ │ text3 │ 27 | │ │ ╰───────────────────╼ Header3 ╾╯ 28 | │ │ text2 │ 29 | │ ╰───────────────────────╼ Header2 ╾╯ 30 | ╰───────────────────────────────╼ Exc ╾╯ 31 | -------------------------------------------------------------------------------- /tests/indentable_buffer3.txt: -------------------------------------------------------------------------------- 1 | ┬╼ Exc ╾───────────────────────────────╮ 2 | │ a 3 | │ b 4 | ├───┬╼ Header2 ╾───────────────────────╮ 5 | │ │ 0x....+ 00 1 2 3 4 5 6 7 8 9 A B C D E F 10 11 12 13 14 15 16 17 | -------- -------- -------- 6 | │ │ 0x0000: 4a 9c e8 5a 21 c2 e6 8b a0 01 cb c3 2e 78 5d 11 9b 73 43 1c b2 cd b3 9e | J..Z!... .....x]. .sC..... 7 | │ │ 0x0018: 4d f7 13 60 c8 ce f8 67 31 48 26 e2 9b d1 a8 fd 14 08 55 17 35 c7 03 71 | M..`...g 1H&..... ..U.5..q 8 | │ │ 0x0030: ac da e6 29 71 7d 7d 54 34 34 9e b5 3b f1 2e f6 2a 16 ba e0 7e 6d 96 6f | ...)q}}T 44..;... *...~m.o 9 | │ │ 0x0048: b8 a4 54 6c 96 8a c7 9a c9 c4 f2 b1 9e 13 0b e2 69 c6 d8 92 de fa 62 6e | ..Tl.... ........ i.....bn 10 | │ │ 0x0060: 36 ea f5 5f 79 3e 15 c5 d5 a0 05 bd ea b8 ba 80 2b 50 a7 d8 ad bf 91 3c | 6.._y>.. ........ +P.....< 11 | │ │ 0x0078: ca c5 94 e6 fc 2d ab 34 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 | .....-.4 ABABABAB ABABABAB 12 | │ │ 0x0090: 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 | ABABABAB ABABABAB ABABABAB 13 | │ │ * | 14 | │ │ 0x0150: 41 42 41 42 41 42 41 42 41 42 41 42 43 41 42 41 42 41 42 41 42 41 42 41 | ABABABAB ABABCABA BABABABA 15 | │ │ 0x0168: 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 42 41 | BABABABA BABABABA BABABABA 16 | │ │ * | 17 | │ │ 0x0240: 42 dc 29 6e 2f 9a 4e 79 9f 03 c7 6a 14 08 1a 08 91 40 ad ac a9 28 1a 8b | B.)n/.Ny ...j.... .@...(.. 18 | │ │ 0x0258: 9f 81 b0 75 73 87 9e 34 f9 99 31 77 33 39 d0 98 58 6f 6b 48 a6 c9 52 76 | ...us..4 ..1w39.. XokH..Rv 19 | │ │ 0x0270: bc ac 90 3b ac 83 c8 ba 60 56 a9 c3 75 b2 cc 56 9d 06 b3 f0 1e b4 4b 10 | ...;.... `V..u..V ......K. 20 | │ │ 0x0288: 9c 83 dc e7 cb 0c 9a 8c 80 01 30 8c 61 f8 35 5a 9c | ........ ..0.a.5Z . 21 | │ ╰───────────────────────╼ Header2 ╾╯ 22 | │ hello 23 | │ world 24 | ├───┬╼ Header2 ╾───────────────────────╮ 25 | │ ├───┬╼ Header3 ╾───────────────────╮ 26 | │ │ │ text3 27 | │ │ ╰───────────────────╼ Header3 ╾╯ 28 | │ │ text2 29 | │ ╰───────────────────────╼ Header2 ╾╯ 30 | ╰───────────────────────────────╼ Exc ╾╯ 31 | -------------------------------------------------------------------------------- /tests/test_aliasing.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from easypy.aliasing import aliases 3 | 4 | 5 | def test_aliasing_static(): 6 | 7 | @aliases("this") 8 | class Foo(): 9 | this = dict(a=1) 10 | 11 | f = Foo() 12 | assert f.get("a") == 1 13 | 14 | 15 | def test_aliasing_dynamic(): 16 | 17 | class Foo(): 18 | def __init__(self): 19 | self.this = dict(a=1) 20 | 21 | with pytest.raises(AssertionError): 22 | Foo = aliases("this")(Foo) 23 | 24 | Foo = aliases("this", static=False)(Foo) 25 | 26 | f = Foo() 27 | assert f.get("a") == 1 28 | 29 | 30 | def test_aliasing_inherit(): 31 | 32 | class Foo(int): 33 | def __init__(self, x): 34 | self.this = dict(a=1) 35 | 36 | with pytest.raises(AssertionError): 37 | Foo = aliases("this")(Foo) 38 | 39 | Foo = aliases("this", static=False)(Foo) 40 | 41 | f = Foo("5") 42 | assert f.get("a") == 1 43 | assert f == 5 44 | 45 | 46 | def test_aliasing_infinite_recursion_exception(): 47 | @aliases('bar', static=False) 48 | class Foo: 49 | def __init__(self): 50 | self.bar = Bar(self) 51 | 52 | def __repr__(self): 53 | return 'Foo()' 54 | 55 | class Bar: 56 | def __init__(self, foo): 57 | self.foo = foo 58 | 59 | def baz(self): 60 | return self.foo.baz() 61 | 62 | with pytest.raises(getattr(__builtins__, 'RecursionError', RuntimeError)) as e: 63 | Foo().baz() 64 | assert str(e.value) == "Infinite recursion trying to access 'baz' on Foo() (via Foo.bar.baz)" 65 | -------------------------------------------------------------------------------- /tests/test_bunch.py: -------------------------------------------------------------------------------- 1 | from easypy.bunch import Bunch, bunchify 2 | 3 | 4 | def test_bunch_recursion(): 5 | x = Bunch(a='a', b="b", d=Bunch(x="axe", y="why")) 6 | x.d.x = x 7 | x.d.y = x.b 8 | print(x) 9 | 10 | 11 | def test_bunchify(): 12 | x = bunchify(dict(a=[dict(b=5), 9, (1, 2)], c=8)) 13 | assert x.a[0].b == 5 14 | assert x.a[1] == 9 15 | assert isinstance(x.a[2], tuple) 16 | assert x.c == 8 17 | assert x.pop("c") == 8 18 | -------------------------------------------------------------------------------- /tests/test_caching.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | from logging import getLogger 4 | from uuid import uuid4 5 | import random 6 | import weakref 7 | import gc 8 | 9 | import pytest 10 | 11 | from easypy.bunch import Bunch 12 | from easypy.caching import timecache, PersistentCache, cached_property, locking_cache 13 | from easypy.units import DAY 14 | from easypy.resilience import resilient 15 | 16 | _logger = getLogger(__name__) 17 | 18 | 19 | def test_timecache(): 20 | ts = 0 21 | data = Bunch(a=0, b=0) 22 | 23 | def get_ts(): 24 | return ts 25 | 26 | @timecache(expiration=1, get_ts_func=get_ts) 27 | def inc(k, x): 28 | x += 1 29 | data[k] += 1 30 | 31 | @inc.key_func 32 | def _(k): 33 | return k 34 | 35 | assert data.a == data.b == 0 36 | inc('a', random.random()) 37 | assert (data.a, data.b) == (1, 0) 38 | 39 | inc('a', x=random.random()) 40 | assert (data.a, data.b) == (1, 0) 41 | 42 | ts += 1 43 | inc('a', random.random()) 44 | assert (data.a, data.b) == (2, 0) 45 | 46 | inc('b', x=random.random()) 47 | assert (data.a, data.b) == (2, 1) 48 | 49 | inc('b', random.random()) 50 | assert (data.a, data.b) == (2, 1) 51 | 52 | ts += 1 53 | inc('b', x=random.random()) 54 | assert (data.a, data.b) == (2, 2) 55 | 56 | inc.cache_clear() 57 | inc('a', x=random.random()) 58 | assert (data.a, data.b) == (3, 2) 59 | inc('b', x=random.random()) 60 | assert (data.a, data.b) == (3, 3) 61 | 62 | inc.cache_clear() 63 | inc('a', x=random.random()) 64 | inc('b', x=random.random()) 65 | inc('a', x=random.random()) 66 | inc('b', x=random.random()) 67 | assert (data.a, data.b) == (4, 4) 68 | inc.cache_pop('a', x=random.random()) 69 | inc('a', x=random.random()) 70 | inc('b', x=random.random()) 71 | 72 | 73 | def test_timecache_method(): 74 | ts = 0 75 | 76 | def get_ts(): 77 | return ts 78 | 79 | class Foo: 80 | def __init__(self, prefix): 81 | self.prefix = prefix 82 | 83 | @timecache(expiration=1, get_ts_func=get_ts) 84 | def foo(self, *args): 85 | return [self.prefix] + list(args) 86 | 87 | @foo.key_func 88 | def _(args): 89 | return args 90 | 91 | foo1 = Foo(1) 92 | foo2 = Foo(2) 93 | 94 | assert foo1.foo(1, 2, 3) == foo1.foo(1, 2, 3) 95 | assert foo1.foo(1, 2, 3) != foo1.foo(1, 2, 4) 96 | assert foo1.foo(1, 2, 3) != foo2.foo(1, 2, 3) 97 | 98 | foo1_1 = foo1.foo(1) 99 | foo1_2 = foo1.foo(2) 100 | foo2_1 = foo2.foo(1) 101 | foo2_2 = foo2.foo(2) 102 | 103 | assert foo1_1 == [1, 1] 104 | assert foo1_2 == [1, 2] 105 | assert foo2_1 == [2, 1] 106 | assert foo2_2 == [2, 2] 107 | 108 | assert foo1_1 is foo1.foo(1) 109 | assert foo1_2 is foo1.foo(2) 110 | assert foo2_1 is foo2.foo(1) 111 | assert foo2_2 is foo2.foo(2) 112 | 113 | assert foo1_1 is foo1.foo(1) 114 | assert foo1_2 is foo1.foo(2) 115 | assert foo2_1 is foo2.foo(1) 116 | assert foo2_2 is foo2.foo(2) 117 | 118 | foo1.foo.cache_clear() 119 | foo2.foo.cache_pop(1) 120 | 121 | assert foo1_1 is not foo1.foo(1) 122 | assert foo1_2 is not foo1.foo(2) 123 | assert foo2_1 is not foo2.foo(1) 124 | assert foo2_2 is foo2.foo(2) 125 | 126 | 127 | def test_timecache_getattr(): 128 | ts = 0 129 | 130 | def get_ts(): 131 | return ts 132 | 133 | class Foo: 134 | def __init__(self): 135 | self.count = 0 136 | 137 | @timecache(expiration=1, get_ts_func=get_ts) 138 | def __getattr__(self, name): 139 | self.count += 1 140 | return [self.count, name] 141 | 142 | foo = Foo() 143 | 144 | assert foo.bar == [1, 'bar'] 145 | assert foo.bar == [1, 'bar'] 146 | assert foo.baz == [2, 'baz'] 147 | 148 | ts += 1 149 | 150 | assert foo.baz == [3, 'baz'] 151 | assert foo.bar == [4, 'bar'] 152 | 153 | 154 | @pytest.yield_fixture() 155 | def persistent_cache_path(): 156 | cache_path = '/tmp/test_pcache_%s' % uuid4() 157 | try: 158 | yield cache_path 159 | finally: 160 | try: 161 | os.unlink("%s.db" % cache_path) 162 | except: # noqa 163 | pass 164 | 165 | 166 | def test_persistent_cache(persistent_cache_path): 167 | ps = PersistentCache(persistent_cache_path, version=1) 168 | TEST_KEY = "test_key" 169 | TEST_VALUE = "test_value" 170 | ps.set(TEST_KEY, TEST_VALUE) 171 | assert ps.get(TEST_KEY) == TEST_VALUE, "Value does not match set value" 172 | 173 | ps = PersistentCache(persistent_cache_path, version=1) 174 | assert ps.get(TEST_KEY) == TEST_VALUE, "Value does not match set value after reopen" 175 | 176 | ps = PersistentCache(persistent_cache_path, version=2) 177 | with pytest.raises(KeyError): # Changed version should invalidate cache 178 | ps.get(TEST_KEY) 179 | 180 | # Default values 181 | assert ps.get(TEST_KEY, default=None) is None, "Wrong default value returnen(not None)" 182 | assert ps.get(TEST_KEY, default="1") == "1", "Wrong default value returned" 183 | 184 | # Cached func should be called only once 185 | value_generated = False 186 | use_cache = True 187 | 188 | class UnnecessaryFunctionCall(Exception): 189 | pass 190 | 191 | ps = PersistentCache(persistent_cache_path, version=2, ignored_keywords="x") 192 | 193 | @ps(validator=lambda _, **__: use_cache) 194 | def cached_func(x): 195 | nonlocal value_generated 196 | if value_generated: 197 | raise UnnecessaryFunctionCall() 198 | value_generated = True 199 | return True 200 | 201 | assert cached_func(x=random.random()) is cached_func(x=random.random()) 202 | assert value_generated 203 | 204 | # Testing validator 205 | use_cache = False 206 | with pytest.raises(UnnecessaryFunctionCall): 207 | cached_func(x=random.random()) 208 | 209 | # Removing data 210 | ps.clear() 211 | assert ps.get(TEST_KEY, default=None) is None, "Database was not cleared properly" 212 | 213 | # Expiration 214 | ps = PersistentCache(persistent_cache_path, version=3, expiration=.01) 215 | ps.set(TEST_KEY, TEST_VALUE) 216 | time.sleep(0.011) 217 | assert ps.get(TEST_KEY, None) is None, "Database was not cleaned up on expiration" 218 | 219 | 220 | def test_locking_timecache(): 221 | from easypy.concurrency import MultiObject 222 | 223 | # Cached func should be called only once 224 | value_generated = False 225 | 226 | class UnnecessaryFunctionCall(Exception): 227 | pass 228 | 229 | @timecache() 230 | def test(x): 231 | nonlocal value_generated 232 | if value_generated: 233 | raise UnnecessaryFunctionCall() 234 | value_generated = True 235 | return True 236 | 237 | @test.key_func 238 | def test_key(x): 239 | return () 240 | 241 | MultiObject(range(10)).call(lambda x: test(x=x)) 242 | 243 | 244 | @pytest.mark.parametrize('cache_decorator', [cached_property, timecache()]) 245 | def test_caching_gc_leaks(cache_decorator): 246 | """ 247 | Make sure that the cache does not prevent GC collection once the original objects die 248 | """ 249 | 250 | class Leaked(): 251 | pass 252 | 253 | class Foo: 254 | @cache_decorator 255 | def cached_method(self): 256 | return Leaked() 257 | 258 | def get(self): 259 | """Generalize property type and function type caches""" 260 | result = self.cached_method 261 | if callable(result): 262 | result = result() 263 | assert isinstance(result, Leaked), 'cache not used properly - got wrong value %s' % (result,) 264 | return result 265 | 266 | foo = Foo() 267 | leaked = weakref.ref(foo.get()) 268 | 269 | gc.collect() 270 | assert leaked() == foo.get() 271 | 272 | del foo 273 | gc.collect() 274 | assert leaked() is None 275 | 276 | 277 | def test_cached_property(): 278 | 279 | num = 0 280 | 281 | class Foo: 282 | @cached_property 283 | def num(self): 284 | nonlocal num 285 | num += 1 286 | return num 287 | 288 | f = Foo() 289 | assert f.num == 1 290 | assert f.num == 1 291 | del f.num 292 | assert f.num == 2 293 | 294 | 295 | def test_cached_exception_property(): 296 | 297 | num = 0 298 | 299 | class Foo: 300 | @cached_property(cacheable_exceptions=ZeroDivisionError) 301 | def num(self): 302 | nonlocal num 303 | num += 1 304 | 1 / 0 305 | 306 | f = Foo() 307 | 308 | with pytest.raises(ZeroDivisionError): 309 | f.num 310 | assert num == 1 311 | 312 | with pytest.raises(ZeroDivisionError): 313 | f.num 314 | assert num == 1 315 | 316 | 317 | def test_locking_exception_timecache(): 318 | 319 | values = [] 320 | 321 | @timecache(cacheable_exceptions=ZeroDivisionError) 322 | def test(x): 323 | values.append(x) 324 | return 1 / x 325 | 326 | test(1) 327 | test(1) 328 | test(2) 329 | test(2) 330 | 331 | with pytest.raises(ZeroDivisionError): 332 | test(0) 333 | test(0) 334 | 335 | assert values == [1, 2, 0] 336 | 337 | 338 | def test_resilient_between_timecaches(): 339 | class ExceptionLeakedThroughResilient(Exception): 340 | pass 341 | 342 | @timecache(1) 343 | @resilient(acceptable=ExceptionLeakedThroughResilient, default='default') 344 | @timecache(1) 345 | def foo(): 346 | raise ExceptionLeakedThroughResilient() 347 | 348 | assert foo() == 'default' 349 | -------------------------------------------------------------------------------- /tests/test_collections.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from easypy.collections import separate 3 | from easypy.collections import ListCollection, SimpleObjectCollection, partial_dict, UNIQUE, ObjectNotFound, TooManyObjectsFound 4 | from easypy.bunch import Bunch 5 | from collections import Counter 6 | 7 | 8 | class Obj(Bunch): 9 | def __repr__(self): 10 | return "%(name)s:%(id)s:%(v)s" % self 11 | 12 | 13 | L = ListCollection(Obj(name=n, id=i, v=v) for n, i, v in zip("aabcdddeff", "1234567890", "xxxyyyxxyz") for _ in range(100)) 14 | 15 | 16 | def test_collection_filter(): 17 | lst = ListCollection("abcdef") 18 | assert lst.filtered(lambda c: c == 'a').sample(1) == ['a'] 19 | 20 | 21 | def test_collection_reprs(): 22 | 23 | def check(lst): 24 | str(lst) 25 | repr(lst) 26 | 27 | check(L) 28 | check(L.filtered(id=5)) 29 | 30 | NL = ListCollection(L, name='lst') 31 | 32 | check(NL) 33 | check(NL.filtered(id=5)) 34 | 35 | O = SimpleObjectCollection(L, ID_ATTRIBUTE='name') 36 | 37 | check(O) 38 | check(O.filtered(id=5)) 39 | 40 | NO = SimpleObjectCollection(L, ID_ATTRIBUTE='name', name='objs') 41 | 42 | check(NO) 43 | check(NO.filtered(id=5)) 44 | 45 | 46 | def test_partial_dict(): 47 | assert partial_dict({'a': 1, 'b': 2, 'c': 3}, ['a', 'b']) == {'a': 1, 'b': 2} 48 | 49 | 50 | def test_separate1(): 51 | a, b = separate(range(5), key=lambda n: n < 3) 52 | assert a == [0, 1, 2] 53 | assert b == [3, 4] 54 | 55 | 56 | def test_separate2(): 57 | a, b = separate(range(5)) 58 | assert a == [1, 2, 3, 4] 59 | assert b == [0] 60 | 61 | 62 | def test_collection_sample(): 63 | l = ListCollection("abcdef") 64 | assert len(l.sample(2.0)) == 2 65 | 66 | with pytest.raises(AssertionError): 67 | l.sample(1.5) 68 | 69 | 70 | def test_collection_select(): 71 | assert len(L.select(name='a', id='1')) == 100 72 | 73 | 74 | def test_collection_select_no_unique(): 75 | with pytest.raises(AssertionError): 76 | L.select(name=UNIQUE) 77 | 78 | 79 | def test_collection_sample_too_much(): 80 | len(L.select(name='a', id='2').sample(100)) == 100 81 | with pytest.raises(ObjectNotFound): 82 | L.select(name='a', id='2').sample(101) 83 | 84 | 85 | def test_collection_sample_too_many(): 86 | len(L.select(name='a', id='2').sample(100)) == 100 87 | with pytest.raises(TooManyObjectsFound): 88 | L.get(name='a', id='2') 89 | 90 | 91 | def test_collection_sample_unique0(): 92 | assert not L.sample(0, name=UNIQUE) 93 | 94 | 95 | def test_collection_sample_unique1(): 96 | s = L.sample(3, name=UNIQUE) 97 | assert len({b.name for b in s}) == 3 98 | 99 | 100 | def test_collection_sample_unique2(): 101 | x, = L.sample(1, name=UNIQUE, id='1') 102 | assert x.id == '1' 103 | 104 | 105 | def test_collection_sample_unique3(): 106 | s = L.sample(6, name=UNIQUE, id=UNIQUE) 107 | assert len({(b.name, b.id) for b in s}) == 6 108 | 109 | 110 | def test_collection_sample_unique4(): 111 | with pytest.raises(ObjectNotFound): 112 | L.sample(7, name=UNIQUE) # too many 113 | 114 | 115 | def test_collection_sample_unique5(): 116 | s = L.sample(3, name=UNIQUE, id=UNIQUE, v=UNIQUE) 117 | assert len({(b.name, b.id) for b in s}) == 3 118 | 119 | 120 | def test_collection_sample_unique_diverse(): 121 | x = Counter(repr(x) for _ in range(100) for x in L.sample(1, name=UNIQUE)) 122 | assert len(x) == 10 123 | 124 | 125 | def test_collections_slicing(): 126 | L = ListCollection("abcdef") 127 | assert L[0] == 'a' 128 | assert L[-1] == 'f' 129 | assert L[:2] == list('ab') 130 | assert L[-2:] == list('ef') 131 | assert L[::2] == list('ace') 132 | assert L[::-2] == list('fdb') 133 | 134 | 135 | def test_filters_order(): 136 | l = ListCollection([Bunch(a='b', b=5), Bunch(a='c', c='c')]) 137 | filterd_l = l.filtered(a='b').select(lambda o: o.b > 4) 138 | assert len(filterd_l) == 1 139 | 140 | with pytest.raises(AttributeError): 141 | filterd_l = l.select(lambda o: o.b > 4) 142 | 143 | 144 | def test_simple_object_collection(): 145 | S = SimpleObjectCollection(L, ID_ATTRIBUTE='id') 146 | assert S.get_next(S['5']) == S['6'] 147 | assert S.get_prev(S['5']) == S['4'] 148 | -------------------------------------------------------------------------------- /tests/test_colors.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from easypy.colors import Colorized, uncolored, colorize, register_colorizers 3 | register_colorizers(bad=("red", "blue")) 4 | 5 | 6 | @pytest.mark.parametrize("content", ["XXX", ""]) 7 | def test_colors(content): 8 | opts = { 9 | str(Colorized("RED(BLUE)<<%s>>" % content)), 10 | str(Colorized("RED(BLUE)@[%s]@" % content)), 11 | str(Colorized("RED(BLUE)@{%s}@" % content)), 12 | 13 | str(colorize("RED(BLUE)<<%s>>" % content)), 14 | str(colorize("RED(BLUE)@[%s]@" % content)), 15 | str(colorize("RED(BLUE)@{%s}@" % content)), 16 | 17 | str(colorize("BAD<<%s>>" % content)), 18 | str(colorize("BAD@[%s]@" % content)), 19 | str(colorize("BAD@{%s}@" % content)), 20 | } 21 | 22 | assert len(opts) == 1 23 | [ret] = opts 24 | assert ret == ("\x1b[1;44;31m%s\x1b[0m" % content if content else '') 25 | assert uncolored(ret) == content 26 | 27 | 28 | @pytest.mark.parametrize("content", ["XXX", ""]) 29 | def test_uncolored(content): 30 | uncolored(str(Colorized("RED(BLUE)<<%s>>" % content))) 31 | uncolored(str(Colorized("RED(BLUE)@[%s]@" % content))) 32 | uncolored(str(Colorized("RED(BLUE)@{%s}@" % content))) 33 | 34 | uncolored(str(colorize("RED(BLUE)<<%s>>" % content))) 35 | uncolored(str(colorize("RED(BLUE)@[%s]@" % content))) 36 | uncolored(str(colorize("RED(BLUE)@{%s}@" % content))) 37 | 38 | uncolored(str(colorize("BAD<<%s>>" % content))) 39 | uncolored(str(colorize("BAD@[%s]@" % content))) 40 | uncolored(str(colorize("BAD@{%s}@" % content))) 41 | 42 | uncolored("RED(BLUE)<<%s>>" % content) 43 | uncolored("RED(BLUE)@[%s]@" % content) 44 | uncolored("RED(BLUE)@{%s}@" % content) 45 | 46 | uncolored("RED(BLUE)<<%s>>" % content) 47 | uncolored("RED(BLUE)@[%s]@" % content) 48 | uncolored("RED(BLUE)@{%s}@" % content) 49 | 50 | uncolored("BAD<<%s>>" % content) 51 | uncolored("BAD@[%s]@" % content) 52 | uncolored("BAD@{%s}@" % content) 53 | -------------------------------------------------------------------------------- /tests/test_contexts.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from easypy.contexts import contextmanager, breakable_section 3 | 4 | 5 | X = [] 6 | 7 | 8 | @contextmanager 9 | def ctx(): 10 | X.append(1) 11 | yield 2 12 | X.pop(0) 13 | 14 | 15 | def test_simple(): 16 | with ctx() as i: 17 | assert i == 2 18 | assert X == [1] 19 | assert not X 20 | 21 | 22 | def test_function(): 23 | @ctx() 24 | def foo(): 25 | assert X == [1] 26 | foo() 27 | assert not X 28 | 29 | 30 | def test_generator(): 31 | @ctx() 32 | def foo(): 33 | yield from range(5) 34 | 35 | for i in foo(): 36 | assert X == [1] 37 | assert not X 38 | 39 | 40 | def test_ctx(): 41 | @ctx() 42 | @contextmanager 43 | def foo(): 44 | yield 45 | 46 | with foo(): 47 | assert X == [1] 48 | 49 | assert not X 50 | 51 | 52 | def test_breakable_section(): 53 | 54 | a = [] 55 | with breakable_section() as Break1: 56 | with breakable_section() as Break2: 57 | with breakable_section() as Break3: 58 | raise Break2() 59 | a += [1] # this will be skipped 60 | a += [2] # this will be skipped 61 | a += [3] # landing here 62 | a += [4] 63 | 64 | assert Break1 is not Break2 65 | assert Break2 is not Break3 66 | assert Break3 is not Break1 67 | assert a == [3, 4] 68 | -------------------------------------------------------------------------------- /tests/test_decorations.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from functools import wraps 4 | from io import StringIO 5 | from pydoc import doc 6 | 7 | from easypy.decorations import lazy_decorator 8 | from easypy.decorations import ensure_same_defaults, DefaultsMismatch 9 | from easypy.decorations import kwargs_from 10 | from easypy.misc import kwargs_resilient 11 | 12 | 13 | def test_kwargs_resilient(): 14 | @kwargs_resilient 15 | def foo(a, b): 16 | return [a, b] 17 | 18 | assert foo(1, b=2, c=3, d=4) == [1, 2] 19 | 20 | @kwargs_resilient 21 | def bar(a, b, **kwargs): 22 | return [a, b, kwargs] 23 | 24 | assert bar(1, b=2, c=3, d=4) == [1, 2, {'c': 3, 'd': 4}] 25 | 26 | @kwargs_resilient(negligible='d') 27 | def baz(a, b): 28 | return [a, b] 29 | 30 | # Should only be neglect `d` - not to `c` 31 | with pytest.raises(TypeError): 32 | baz(1, b=2, c=3, d=4) 33 | assert baz(1, b=2, d=4) == [1, 2] 34 | 35 | @kwargs_resilient(negligible=['b', 'd']) 36 | def qux(a, b, **kwargs): 37 | return [a, b, kwargs] 38 | 39 | # Should be passing b because it's in the function signature 40 | # Should be passing c because it's not in `negligible` 41 | # Should not be passing d because it's in `negligible` and not in the function signature 42 | assert qux(1, b=2, c=3, d=4) == [1, 2, {'c': 3}] 43 | 44 | 45 | def test_lazy_decorator_lambda(): 46 | def add_to_result(num): 47 | def inner(func): 48 | @wraps(func) 49 | def wrapper(*args, **kwargs): 50 | return func(*args, **kwargs) + num 51 | 52 | wrapper.__name__ = '%s + %s' % (func.__name__, num) 53 | 54 | return wrapper 55 | return inner 56 | 57 | class Foo: 58 | def __init__(self, num): 59 | self.num = num 60 | 61 | @lazy_decorator(lambda self: add_to_result(num=self.num)) 62 | def foo(self): 63 | """foo doc""" 64 | return 1 65 | 66 | foo = Foo(10) 67 | assert foo.foo() == 11 68 | 69 | assert Foo.foo.__name__ == 'foo' 70 | assert foo.foo.__name__ == 'foo + 10' 71 | 72 | assert Foo.foo.__doc__ == foo.foo.__doc__ == 'foo doc' 73 | 74 | 75 | def test_lazy_decorator_attribute(): 76 | class Foo: 77 | def add_to_result(self, func): 78 | @wraps(func) 79 | def wrapper(*args, **kwargs): 80 | return func(*args, **kwargs) + self.num 81 | 82 | wrapper.__name__ = '%s + %s' % (func.__name__, self.num) 83 | 84 | return wrapper 85 | 86 | @lazy_decorator('add_to_result') 87 | def foo(self): 88 | """foo doc""" 89 | return 1 90 | 91 | foo = Foo() 92 | 93 | with pytest.raises(AttributeError): 94 | # We did not set foo.num yet, so the decorator will fail trying to set the name 95 | foo.foo 96 | 97 | foo.num = 10 98 | assert foo.foo() == 11 99 | assert foo.foo.__name__ == 'foo + 10' 100 | assert Foo.foo.__doc__ == foo.foo.__doc__ == 'foo doc' 101 | 102 | foo.num = 20 103 | assert foo.foo() == 21 104 | assert foo.foo.__name__ == 'foo + 20' 105 | 106 | 107 | def test_lazy_decorator_with_timecache(): 108 | from easypy.caching import timecache 109 | 110 | class Foo: 111 | def __init__(self): 112 | self.ts = 0 113 | self._counter = 0 114 | 115 | @property 116 | def timecache(self): 117 | return timecache(expiration=1, get_ts_func=lambda: self.ts) 118 | 119 | @lazy_decorator('timecache', cached=True) 120 | def inc(self): 121 | self._counter += 1 122 | return self._counter 123 | 124 | @lazy_decorator(lambda self: lambda method: method()) 125 | @lazy_decorator('timecache', cached=True) 126 | def counter(self): 127 | return self._counter 128 | 129 | foo1 = Foo() 130 | foo2 = Foo() 131 | 132 | assert [foo1.inc(), foo2.inc()] == [1, 1] 133 | assert [foo1.inc(), foo2.inc()] == [1, 1] 134 | assert [foo1.counter, foo2.counter] == [1, 1] 135 | 136 | foo1.ts += 1 137 | assert [foo1.counter, foo2.counter] == [1, 1] 138 | assert [foo1.inc(), foo2.inc()] == [2, 1] 139 | assert [foo1.counter, foo2.counter] == [1, 1] 140 | foo2.ts += 1 141 | assert [foo1.inc(), foo2.inc()] == [2, 2] 142 | assert [foo1.counter, foo2.counter] == [1, 2] # foo1 was not updated since last sync - only foo2 143 | 144 | 145 | def test_ensure_same_defaults(): 146 | def foo(a=1, b=2, c=3): 147 | return a, b, c 148 | 149 | 150 | @ensure_same_defaults(foo) 151 | def bar(a=1, b=2, c=3): 152 | return a, b, c 153 | 154 | # Test we did not change the actual function 155 | assert foo() == bar() 156 | assert foo(4, 5, 6) == bar(4, 5, 6) 157 | 158 | 159 | with pytest.raises(DefaultsMismatch) as exc: 160 | @ensure_same_defaults(foo) 161 | def baz(a=1, b=3, c=2): 162 | pass 163 | assert exc.value.param_names == ['b', 'c'] 164 | 165 | 166 | def test_ensure_same_defaults_skipping_params_with_no_default(): 167 | @ensure_same_defaults(lambda a=1, b=2: ...) 168 | def foo(a, b=2): 169 | pass 170 | 171 | @ensure_same_defaults(lambda a, b=2: ...) 172 | def foo(a=1, b=2): 173 | pass 174 | 175 | @ensure_same_defaults(lambda a, b=2: ...) 176 | def foo(a=1, c=3): 177 | pass 178 | 179 | with pytest.raises(DefaultsMismatch) as exc: 180 | @ensure_same_defaults(lambda a, b=2: ...) 181 | def foo(a, b=4): 182 | pass 183 | assert exc.value.param_names == ['b'] 184 | 185 | 186 | def test_ensure_same_defaults_ignore(): 187 | @ensure_same_defaults(lambda a=1, b=2: ..., ignore=('b',)) 188 | def foo(a=1, b=3): 189 | pass 190 | 191 | with pytest.raises(DefaultsMismatch) as exc: 192 | @ensure_same_defaults(lambda a=1, b=2: ..., ignore=('b',)) 193 | def foo(a=2, b=3): 194 | pass 195 | assert exc.value.param_names == ['a'] 196 | 197 | 198 | def signature_line(thing): 199 | with StringIO() as capture: 200 | doc(thing, output=capture) 201 | return capture.getvalue().splitlines()[2] 202 | 203 | 204 | def test_kwargs_from(): 205 | import sys 206 | if sys.version_info < (3, 7): 207 | fix_annotations = lambda sign: sign.replace(': ', ':') 208 | else: 209 | fix_annotations = lambda sign: sign 210 | 211 | def foo(*, a, b: int, c=3): 212 | pass 213 | 214 | @kwargs_from(foo) 215 | def bar(a=1, **kwargs) -> bool: 216 | return False 217 | 218 | assert signature_line(bar) == fix_annotations('bar(a=1, *, b: int, c=3) -> bool') 219 | 220 | @kwargs_from(bar) 221 | def baz(c=4, **kwargs): 222 | pass 223 | 224 | assert signature_line(baz) == fix_annotations('baz(c=4, *, a=1, b: int)') 225 | 226 | 227 | def test_kwargs_from_no_kwargs(): 228 | with pytest.raises(TypeError): 229 | @kwargs_from(lambda *, x, y: ...) 230 | def foo(a): 231 | pass 232 | 233 | with pytest.raises(TypeError): 234 | @kwargs_from(lambda *, x, y: ...) 235 | def bar(x, y): 236 | pass 237 | 238 | @kwargs_from(lambda *args: ...) 239 | def baz(a, **kwargs): 240 | pass 241 | 242 | assert signature_line(baz) == 'baz(a)' 243 | 244 | 245 | def test_kwargs_from_keep_kwargs(): 246 | def foo(*, a, b, **foos): 247 | pass 248 | 249 | @kwargs_from(foo) 250 | def bar(x, **bars): 251 | pass 252 | 253 | assert signature_line(bar) == 'bar(x, *, a, b, **bars)' 254 | 255 | 256 | def test_kwargs_from_multiple(): 257 | @kwargs_from(lambda a, b: ..., lambda x, y, a: ...) 258 | def bar(**kwargs): 259 | pass 260 | 261 | assert signature_line(bar) == 'bar(*, a, b, x, y)' 262 | 263 | 264 | def test_kwargs_from_exclude(): 265 | @kwargs_from(lambda a, b, c, w: ..., exclude=['w']) 266 | def bar(**kwargs): 267 | pass 268 | 269 | assert signature_line(bar) == 'bar(*, a, b, c)' 270 | -------------------------------------------------------------------------------- /tests/test_deprecation.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from easypy.deprecation import deprecated_arguments 3 | 4 | 5 | @pytest.mark.filterwarnings("ignore::DeprecationWarning") 6 | def test_deprecated_arguments(): 7 | @deprecated_arguments(foo='bar') 8 | def func(bar): 9 | return 'bar is %s' % (bar,) 10 | 11 | assert func(1) == func(foo=1) == func(bar=1) == 'bar is 1' 12 | 13 | with pytest.raises(TypeError): 14 | func(foo=1, bar=2) 15 | 16 | with pytest.raises(TypeError): 17 | func(1, foo=2) 18 | -------------------------------------------------------------------------------- /tests/test_exceptions.py: -------------------------------------------------------------------------------- 1 | from easypy.exceptions import TException 2 | from easypy.bunch import Bunch 3 | 4 | 5 | class T(TException): 6 | template = "The happened: {what}" 7 | 8 | 9 | def test_pickle_texception(): 10 | import pickle 11 | 12 | t1 = T(what="happened", a=1, b=Bunch(x=[1, 2, 3], y=range(5))) 13 | t2 = pickle.loads(pickle.dumps(t1)) 14 | 15 | assert t1.render() == t2.render() 16 | assert t1._params == t2._params 17 | -------------------------------------------------------------------------------- /tests/test_humanize.py: -------------------------------------------------------------------------------- 1 | from easypy.humanize import from_hexdump, hexdump, IndentableTextBuffer, format_table, easy_repr 2 | 3 | 4 | _SAMPLE_DATA = b'J\x9c\xe8Z!\xc2\xe6\x8b\xa0\x01\xcb\xc3.x]\x11\x9bsC\x1c\xb2\xcd\xb3\x9eM\xf7\x13`\xc8\xce\xf8g1H&\xe2\x9b' \ 5 | b'\xd1\xa8\xfd\x14\x08U\x175\xc7\x03q\xac\xda\xe6)q}}T44\x9e\xb5;\xf1.\xf6*\x16\xba\xe0~m\x96o\xb8\xa4Tl\x96\x8a\xc7' \ 6 | b'\x9a\xc9\xc4\xf2\xb1\x9e\x13\x0b\xe2i\xc6\xd8\x92\xde\xfabn6\xea\xf5_y>\x15\xc5\xd5\xa0\x05\xbd\xea\xb8\xba\x80+P' \ 7 | b'\xa7\xd8\xad\xbf\x91<\xca\xc5\x94\xe6\xfc-\xab4ABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABAB' \ 8 | b'ABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABA' \ 9 | b'BABABABABABABABABABABABABABABABABABABABABCABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABAB' \ 10 | b'ABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABABA' \ 11 | b'BABABABABABABABABABABABABABABABABABABABABAB\xdc)n/\x9aNy\x9f\x03\xc7j\x14\x08\x1a\x08\x91@\xad\xac\xa9(\x1a\x8b\x9f' \ 12 | b'\x81\xb0us\x87\x9e4\xf9\x991w39\xd0\x98XokH\xa6\xc9Rv\xbc\xac\x90;\xac\x83\xc8\xba`V\xa9\xc3u\xb2\xccV\x9d\x06\xb3' \ 13 | b'\xf0\x1e\xb4K\x10\x9c\x83\xdc\xe7\xcb\x0c\x9a\x8c\x80\x010\x8ca\xf85Z\x9c' 14 | 15 | 16 | def test_hexdump_functions(): 17 | assert from_hexdump(hexdump(_SAMPLE_DATA)) == _SAMPLE_DATA 18 | assert from_hexdump(hexdump(_SAMPLE_DATA, 24, 2)) == _SAMPLE_DATA 19 | assert from_hexdump(hexdump(_SAMPLE_DATA, 16, 1, False)) == _SAMPLE_DATA 20 | assert from_hexdump(hexdump(_SAMPLE_DATA, 4, 4)) == _SAMPLE_DATA 21 | 22 | assert _SAMPLE_DATA.decode("hexdump_24_2") == hexdump(_SAMPLE_DATA, 24, 2) 23 | assert hexdump(_SAMPLE_DATA, 24, 2).encode("hexdump") == _SAMPLE_DATA 24 | 25 | 26 | def test_indentable_text_buffer(): 27 | from io import StringIO 28 | 29 | buff = IndentableTextBuffer("Exc") 30 | buff.write("a") 31 | buff.write("b") 32 | with buff.indent("Header2"): 33 | buff.write(hexdump(_SAMPLE_DATA, 24, 8)) 34 | buff.write("hello") 35 | buff.write("world") 36 | with buff.indent("Header2"): 37 | # buff.write(format_in_columns([str(i) for i in range(100)], 50)) 38 | with buff.indent("This should be pruned away"): 39 | with buff.indent("This should be pruned away"): 40 | pass 41 | with buff.indent("Header3"): 42 | buff.write("text3") 43 | buff.write("text2") 44 | 45 | f = StringIO() 46 | buff.render(prune=True, textual=True, width=120, file=f) 47 | assert open("tests/indentable_buffer1.txt", "r").read() == f.getvalue() 48 | 49 | f = StringIO() 50 | buff.render(prune=True, textual=False, width=40, overflow="ignore", file=f) 51 | assert open("tests/indentable_buffer2.txt", "r").read() == f.getvalue() 52 | 53 | f = StringIO() 54 | buff.render(prune=True, textual=False, width=40, edges=False, file=f) 55 | assert open("tests/indentable_buffer3.txt", "r").read() == f.getvalue() 56 | 57 | 58 | def test_format_table_with_titles(): 59 | table = [ 60 | 'abc', 61 | range(3), 62 | [None, True, False], 63 | [dict(x='x'), b'bytes', 'string'] 64 | ] 65 | 66 | output = ( 67 | "a |b |c \n" 68 | "--------------------------\n" 69 | " 0| 1| 2\n" 70 | "None |True |False \n" 71 | "{'x': 'x'}|b'bytes'|string\n") 72 | 73 | assert output == format_table(table) 74 | 75 | 76 | def test_format_table_without_titles(): 77 | table = [ 78 | 'abc', 79 | range(3), 80 | [None, True, False], 81 | [dict(x='x'), b'bytes', 'string'] 82 | ] 83 | 84 | output = ( 85 | "a |b |c \n" 86 | " 0| 1| 2\n" 87 | "None |True |False \n" 88 | "{'x': 'x'}|b'bytes'|string\n") 89 | 90 | assert output == format_table(table, titles=False) 91 | 92 | 93 | def test_easy_repr(): 94 | @easy_repr('a', 'b', 'c') 95 | class Class1: 96 | def __init__(self, a, b, c, d): 97 | self.a = a 98 | self.b = b 99 | self.c = c 100 | self.d = d 101 | a = Class1('a', 'b', 1, 2) 102 | assert repr(a) == "" 103 | 104 | # change order 105 | @easy_repr('c', 'a', 'd') 106 | class Class2: 107 | def __init__(self, a, b, c, d): 108 | self.a = a 109 | self.b = b 110 | self.c = c 111 | self.d = d 112 | a = Class2('a', 'b', 1, 2) 113 | assert repr(a) == "" 114 | 115 | try: 116 | @easy_repr() 117 | class Class3: 118 | def __init__(self, a, b, c, d): 119 | self.a = a 120 | self.b = b 121 | self.c = c 122 | self.d = d 123 | except AssertionError: 124 | pass 125 | else: 126 | assert False, 'easy_repr with no attributes should not be allowed' 127 | 128 | -------------------------------------------------------------------------------- /tests/test_lockstep.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from easypy.lockstep import lockstep, LockstepSyncMismatch 4 | 5 | 6 | def test_lockstep_side_effects(): 7 | calculation_result = 0 8 | 9 | @lockstep 10 | def simple_calculation(number): 11 | nonlocal calculation_result 12 | 13 | calculation_result = number 14 | yield 'SET_NUMBER' 15 | 16 | calculation_result *= 2 17 | yield 'MULTIPLY_IT_BY_TWO' 18 | 19 | calculation_result += 5 20 | yield 'ADD_FIVE' 21 | 22 | with simple_calculation.lockstep(5) as calculation: 23 | calculation.step_next('SET_NUMBER') 24 | assert calculation_result == 5 25 | 26 | calculation.step_next('MULTIPLY_IT_BY_TWO') 27 | assert calculation_result == 10 28 | 29 | calculation.step_next('ADD_FIVE') 30 | assert calculation_result == 15 31 | 32 | 33 | def test_lockstep_run_as_function(): 34 | calculation_result = 0 35 | 36 | @lockstep 37 | def simple_calculation(number): 38 | nonlocal calculation_result 39 | 40 | calculation_result = number 41 | yield 'SET_NUMBER' 42 | 43 | calculation_result *= 2 44 | yield 'MULTIPLY_IT_BY_TWO' 45 | 46 | calculation_result += 5 47 | yield 'ADD_FIVE' 48 | 49 | simple_calculation(10) 50 | assert calculation_result == 25 51 | 52 | 53 | def test_lockstep_class_method(): 54 | class SimpleCalculation(): 55 | def __init__(self, number): 56 | self.calculation_result = number 57 | 58 | @lockstep 59 | def calculation(self): 60 | self.calculation_result *= 2 61 | yield 'MULTIPLY_IT_BY_TWO' 62 | 63 | self.calculation_result += 5 64 | yield 'ADD_FIVE' 65 | 66 | simple_calculation = SimpleCalculation(5) 67 | with simple_calculation.calculation.lockstep() as calculation: 68 | assert simple_calculation.calculation_result == 5 69 | 70 | calculation.step_next('MULTIPLY_IT_BY_TWO') 71 | assert simple_calculation.calculation_result == 10 72 | 73 | calculation.step_next('ADD_FIVE') 74 | assert simple_calculation.calculation_result == 15 75 | assert simple_calculation.calculation_result == 15 76 | 77 | # run as function 78 | simple_calculation2 = SimpleCalculation(10) 79 | assert simple_calculation2.calculation_result == 10 80 | simple_calculation2.calculation() 81 | assert simple_calculation2.calculation_result == 25 82 | 83 | 84 | def test_lockstep_wrong_step_name(): 85 | @lockstep 86 | def process(): 87 | yield 'STEP_1' 88 | yield 'STEP_2' 89 | yield 'STEP_3' 90 | 91 | with pytest.raises(LockstepSyncMismatch) as excinfo: 92 | with process.lockstep() as process: 93 | process.step_next('STEP_1') 94 | process.step_next('STEP_TWO') 95 | process.step_next('STEP_3') 96 | 97 | assert excinfo.value.expected_step == 'STEP_TWO' 98 | assert excinfo.value.actual_step == 'STEP_2' 99 | 100 | 101 | def test_lockstep_not_exhausted(): 102 | @lockstep 103 | def process(): 104 | yield 'STEP_1' 105 | yield 'STEP_2' 106 | yield 'STEP_3' 107 | 108 | with pytest.raises(LockstepSyncMismatch) as excinfo: 109 | with process.lockstep() as process: 110 | process.step_next('STEP_1') 111 | process.step_next('STEP_2') 112 | 113 | assert excinfo.value.expected_step == 'finished' 114 | assert excinfo.value.actual_step == 'STEP_3' 115 | 116 | 117 | def test_lockstep_exhausted_prematurely(): 118 | @lockstep 119 | def process(): 120 | yield 'STEP_1' 121 | yield 'STEP_2' 122 | 123 | with pytest.raises(LockstepSyncMismatch) as excinfo: 124 | with process.lockstep() as process: 125 | process.step_next('STEP_1') 126 | process.step_next('STEP_2') 127 | process.step_next('STEP_3') 128 | 129 | assert excinfo.value.expected_step == 'STEP_3' 130 | assert excinfo.value.actual_step == 'finished' 131 | 132 | 133 | def test_lockstep_exhaust(): 134 | finished = False 135 | 136 | @lockstep 137 | def process(): 138 | nonlocal finished 139 | 140 | yield 'STEP_1' 141 | yield 'STEP_2' 142 | yield 'STEP_3' 143 | 144 | finished = True 145 | 146 | assert not finished 147 | with process.lockstep() as process: 148 | assert not finished 149 | process.step_all() 150 | assert finished 151 | assert finished 152 | 153 | 154 | def test_lockstep_yielded_values(): 155 | @lockstep 156 | def process(): 157 | yield 'STEP_1', 1 158 | yield 'STEP_2' 159 | yield 'STEP_3', 3 160 | 161 | with process.lockstep() as process: 162 | assert process.step_next('STEP_1') == 1 163 | assert process.step_next('STEP_2') is None 164 | assert process.step_next('STEP_3') == 3 165 | 166 | 167 | def test_lockstep_nested(): 168 | @lockstep 169 | def internal_process(): 170 | yield 'INTERNAL_1' 171 | yield 'INTERNAL_2' 172 | 173 | @lockstep 174 | def external_process(): 175 | yield 'EXTERNAL_1' 176 | with internal_process.lockstep() as process: 177 | yield from process 178 | yield 'EXTERNAL_2' 179 | 180 | with external_process.lockstep() as process: 181 | process.step_next('EXTERNAL_1') 182 | process.step_next('INTERNAL_1') 183 | process.step_next('INTERNAL_2') 184 | process.step_next('EXTERNAL_2') 185 | 186 | 187 | def test_lockstep_step_util(): 188 | @lockstep 189 | def process(): 190 | yield 'STEP_1' 191 | yield 'STEP_2' 192 | yield 'STEP_3' 193 | 194 | with process.lockstep() as process: 195 | process.step_until('STEP_3') 196 | 197 | 198 | def test_lockstep_step_util_wrong_order(): 199 | @lockstep 200 | def process(): 201 | yield 'STEP_1' 202 | yield 'STEP_2' 203 | yield 'STEP_3' 204 | 205 | with pytest.raises(LockstepSyncMismatch) as excinfo: 206 | with process.lockstep() as process: 207 | process.step_until('STEP_2') 208 | process.step_until('STEP_1') 209 | 210 | assert excinfo.value.expected_step == 'STEP_1' 211 | assert excinfo.value.actual_step == 'finished' 212 | 213 | 214 | def test_lockstep_as_static_and_class_methods(): 215 | class Foo: 216 | @lockstep 217 | def process1(self, out): 218 | out.append(1) 219 | yield 'STEP' 220 | out.append(2) 221 | 222 | @lockstep 223 | @classmethod 224 | def process2(cls, out): 225 | out.append(1) 226 | yield 'STEP' 227 | out.append(2) 228 | 229 | @lockstep 230 | @staticmethod 231 | def process3(out): 232 | out.append(1) 233 | yield 'STEP' 234 | out.append(2) 235 | 236 | print() 237 | 238 | def check_method_call(method): 239 | out = [] 240 | method(out) 241 | assert out == [1, 2] 242 | 243 | check_method_call(Foo().process1) 244 | check_method_call(Foo().process2) 245 | check_method_call(Foo().process3) 246 | 247 | def check_method_lockstep(method): 248 | method.lockstep 249 | out = [] 250 | with method.lockstep(out) as process: 251 | assert out == [] 252 | process.step_until('STEP') 253 | assert out == [1] 254 | assert out == [1, 2] 255 | 256 | check_method_lockstep(Foo().process1) 257 | check_method_lockstep(Foo.process2) 258 | check_method_lockstep(Foo.process3) 259 | -------------------------------------------------------------------------------- /tests/test_logging.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from contextlib import contextmanager 3 | from io import StringIO 4 | from easypy.colors import uncolored 5 | 6 | 7 | @pytest.yield_fixture 8 | def get_log(is_logbook): 9 | stream = StringIO() 10 | 11 | if is_logbook: 12 | import logbook 13 | handler = logbook.StreamHandler(stream, format_string="{record.message}") 14 | handler.push_application() 15 | else: 16 | import logging 17 | orig_level = logging.root.level 18 | handler = logging.StreamHandler(stream) 19 | handler.setFormatter(logging.Formatter(fmt="%(message)s")) 20 | logging.root.addHandler(handler) 21 | logging.root.setLevel(0) 22 | 23 | def get(): 24 | return uncolored(stream.getvalue()) 25 | yield get 26 | 27 | if is_logbook: 28 | handler.pop_application() 29 | else: 30 | logging.root.setLevel(orig_level) 31 | logging.root.removeHandler(handler) 32 | 33 | 34 | @pytest.fixture 35 | def logger(request): 36 | from easypy.logging import _get_logger 37 | return _get_logger(name=request.function.__name__) 38 | 39 | 40 | def test_indent_around_generator(get_log, logger): 41 | 42 | @logger.indented("hey") 43 | def gen(): 44 | logger.info("000") 45 | yield 1 46 | yield 2 47 | 48 | for i in gen(): 49 | logger.info("%03d" % i) 50 | break 51 | 52 | assert get_log() == "hey\n000\n001\nDONE in no-time (hey)\n" 53 | 54 | 55 | def test_indent_around_function(get_log, logger): 56 | 57 | @logger.indented("hey") 58 | def foo(): 59 | logger.info("001") 60 | 61 | foo() 62 | 63 | assert get_log() == "hey\n001\nDONE in no-time (hey)\n" 64 | 65 | 66 | def test_indent_around_ctx(get_log, logger): 67 | 68 | @logger.indented("hey") 69 | @contextmanager 70 | def ctx(): 71 | logger.info("001") 72 | yield 73 | logger.info("003") 74 | 75 | with ctx(): 76 | logger.info("002") 77 | 78 | assert get_log() == "hey\n001\n002\n003\nDONE in no-time (hey)\n" 79 | -------------------------------------------------------------------------------- /tests/test_meta.py: -------------------------------------------------------------------------------- 1 | from easypy.meta import EasyMeta, GetAllSubclasses 2 | 3 | 4 | def test_easy_meta_before_cls_init(): 5 | 6 | class FooMaker(metaclass=EasyMeta): 7 | @EasyMeta.Hook 8 | def before_subclass_init(name, bases, dct): 9 | dct[name] = "foo" 10 | 11 | class BarMaker(metaclass=EasyMeta): 12 | @EasyMeta.Hook 13 | def before_subclass_init(name, bases, dct): 14 | dct[name] = "bar" 15 | 16 | class Foo(FooMaker): 17 | ... 18 | assert Foo.Foo == "foo" 19 | 20 | class Bar(BarMaker): 21 | ... 22 | assert Bar.Bar == "bar" 23 | 24 | class Baz1(FooMaker, BarMaker): 25 | ... 26 | assert Baz1.Baz1 == "bar" 27 | 28 | class Baz2(BarMaker, FooMaker): 29 | ... 30 | assert Baz2.Baz2 == "foo" 31 | 32 | 33 | def test_easy_meta_after_cls_init(): 34 | class Foo(metaclass=EasyMeta): 35 | @EasyMeta.Hook 36 | def after_subclass_init(cls): 37 | cls.foo_init = cls.__name__ 38 | 39 | class Bar(metaclass=EasyMeta): 40 | @EasyMeta.Hook 41 | def after_subclass_init(cls): 42 | cls.bar_init = cls.__name__ 43 | 44 | class Baz(Foo, Bar): 45 | @EasyMeta.Hook 46 | def after_subclass_init(cls): 47 | cls.baz_init = cls.__name__ 48 | 49 | assert not hasattr(Foo, 'foo_init'), 'after_subclass_init declared in Foo invoked on Foo' 50 | assert not hasattr(Bar, 'bar_init'), 'after_subclass_init declared in Bar invoked on Bar' 51 | 52 | assert Baz.foo_init == 'Baz' 53 | assert Baz.bar_init == 'Baz' 54 | assert not hasattr(Baz, 'baz_init'), 'after_subclass_init declared in Baz invoked on Baz' 55 | 56 | 57 | def test_easy_meta_get_all_subclasses(): 58 | class Foo(GetAllSubclasses): 59 | pass 60 | 61 | class Bar(Foo): 62 | pass 63 | 64 | class Baz(Foo): 65 | pass 66 | 67 | class Qux(Bar): 68 | pass 69 | 70 | assert set(Foo.get_all_subclasses()) == {Bar, Baz, Qux} 71 | assert set(Bar.get_all_subclasses()) == {Qux} 72 | assert set(Baz.get_all_subclasses()) == set() 73 | assert set(Qux.get_all_subclasses()) == set() 74 | 75 | 76 | def test_easy_meta_multi_inheritance(): 77 | 78 | class A(metaclass=EasyMeta): 79 | @EasyMeta.Hook 80 | def before_subclass_init(name, bases, dct): 81 | dct.setdefault('name', []).append("A") 82 | 83 | class B(metaclass=EasyMeta): 84 | @EasyMeta.Hook 85 | def before_subclass_init(name, bases, dct): 86 | dct.setdefault('name', []).append("B") 87 | 88 | class AA(A): 89 | @EasyMeta.Hook 90 | def before_subclass_init(name, bases, dct): 91 | dct.setdefault('name', []).append("AA") 92 | 93 | class BB(B): 94 | @EasyMeta.Hook 95 | def before_subclass_init(name, bases, dct): 96 | dct.setdefault('name', []).append("BB") 97 | 98 | class A_B(A, B): ... 99 | class AA_BB(AA, BB): ... 100 | class A_BB(A, BB): ... 101 | class B_AA(B, AA): ... 102 | 103 | # reminder - hooks aren't invoked on the classes on which they're defined, only subclasses 104 | assert not hasattr(A, "name") 105 | assert not hasattr(B, "name") 106 | assert AA.name == ["A"] 107 | assert BB.name == ["B"] 108 | assert A_B.name == ["A", "B"] 109 | assert AA_BB.name == ["A", "AA", "B", "BB"] 110 | assert A_BB.name == ["A", "B", "BB"] 111 | -------------------------------------------------------------------------------- /tests/test_misc.py: -------------------------------------------------------------------------------- 1 | def test_tokens(): 2 | from easypy.tokens import AUTO, if_auto, MAX 3 | 4 | def foo(p=AUTO): 5 | return if_auto(p, 100) 6 | 7 | assert foo() == 100 8 | assert foo(5) == 5 9 | assert foo(MAX) == MAX 10 | 11 | assert MAX == "MAX" 12 | assert MAX == "" 13 | assert MAX == "max" 14 | assert MAX == "" 15 | 16 | d = {AUTO: AUTO, MAX: MAX} 17 | assert d[AUTO] == AUTO 18 | assert d[MAX] == MAX 19 | assert d[''] is MAX 20 | assert 'AUTO' not in d 21 | -------------------------------------------------------------------------------- /tests/test_randutils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import random 3 | 4 | from easypy.random import random_nice_name, random_filename 5 | 6 | 7 | def test_random_nice_name(): 8 | for _ in range(20): 9 | length = random.randint(64, 85) 10 | entropy = random.randint(1, 3) 11 | sep = random.choice(['_', '..']) 12 | name = random_nice_name(max_length=length, entropy=entropy, sep=sep) 13 | assert len(name) <= length 14 | 15 | 16 | def test_random_nice_name_raises(): 17 | with pytest.raises(ValueError): 18 | random_nice_name(max_length=10, entropy=3) 19 | 20 | 21 | def test_random_filename(): 22 | fn = random_filename(10) 23 | assert len(fn) == 10 24 | 25 | fn = random_filename((10, 11)) 26 | assert 10 <= len(fn) <= 11 27 | 28 | fn = random_filename((11, 11)) 29 | assert len(fn) == 11 30 | -------------------------------------------------------------------------------- /tests/test_resilience.py: -------------------------------------------------------------------------------- 1 | def test_resilience(): 2 | from easypy.resilience import resilient 3 | 4 | @resilient(default=resilient.CAPTURE) 5 | def foo(a): 6 | return 1 / a 7 | 8 | ret = foo(1) 9 | assert ret == 1 10 | 11 | exc = foo(0) 12 | assert isinstance(exc, ZeroDivisionError) 13 | -------------------------------------------------------------------------------- /tests/test_rwlock.py: -------------------------------------------------------------------------------- 1 | import threading 2 | import pytest 3 | import logging 4 | from time import sleep 5 | from easypy.concurrency import concurrent 6 | from easypy.sync import RWLock, TimeoutException 7 | from easypy.bunch import Bunch 8 | from easypy.sync import RWLock 9 | 10 | 11 | def test_rwlock(): 12 | 13 | main_ctrl = threading.Event() 14 | reader_ctrl = threading.Event() 15 | writer_ctrl = threading.Event() 16 | lock = RWLock("test") 17 | 18 | state = Bunch(reading=False, writing=False) 19 | 20 | def read(): 21 | logging.info("Before read") 22 | reader_ctrl.wait() 23 | reader_ctrl.clear() 24 | 25 | with lock: 26 | logging.info("Reading...") 27 | state.reading = True 28 | main_ctrl.set() 29 | reader_ctrl.wait() 30 | reader_ctrl.clear() 31 | state.reading = False 32 | logging.info("Done reading") 33 | 34 | logging.info("After read") 35 | 36 | def write(): 37 | logging.info("Before write") 38 | writer_ctrl.wait() 39 | writer_ctrl.clear() 40 | 41 | with lock.exclusive(): 42 | logging.info("Writing...") 43 | state.writing = True 44 | main_ctrl.set() 45 | writer_ctrl.wait() 46 | writer_ctrl.clear() 47 | state.writing = False 48 | logging.info("Done writing") 49 | 50 | logging.info("After write") 51 | main_ctrl.set() 52 | 53 | reader = concurrent(read, threadname='read') 54 | writer = concurrent(write, threadname='write') 55 | 56 | with reader, writer: 57 | assert not state.reading and not state.writing 58 | 59 | reader_ctrl.set() 60 | main_ctrl.wait() 61 | logging.info("lock acquired non-exclusively") 62 | main_ctrl.clear() 63 | assert state.reading and not state.writing 64 | 65 | writer_ctrl.set() 66 | logging.info("writer awaits exclusivity") 67 | with lock: 68 | assert state.reading and not state.writing 69 | 70 | reader_ctrl.set() 71 | main_ctrl.wait() 72 | main_ctrl.clear() 73 | logging.info("read lock released") 74 | assert not state.reading and state.writing 75 | 76 | writer_ctrl.set() 77 | main_ctrl.wait() 78 | main_ctrl.clear() 79 | logging.info("write lock released") 80 | assert not state.reading and not state.writing 81 | 82 | 83 | def test_rwlock_different_threads(): 84 | lock = RWLock("test") 85 | ea = threading.Event() 86 | eb = threading.Event() 87 | 88 | def a(id=None): 89 | lock.acquire(id) 90 | ea.set() 91 | eb.wait() 92 | 93 | def b(id=None): 94 | lock.release(id) 95 | eb.set() 96 | 97 | with concurrent(a): 98 | ea.wait() 99 | assert lock.owners 100 | with pytest.raises(RuntimeError): 101 | b() 102 | eb.set() 103 | assert lock.owners 104 | 105 | with concurrent(a, "same"): 106 | assert lock.owners 107 | with concurrent(b, "same"): 108 | pass 109 | assert lock.owners 110 | 111 | 112 | def test_wrlock_exclusive_timeout(): 113 | wrlock = RWLock() 114 | 115 | def acquire_lock(): 116 | nonlocal wrlock 117 | wrlock.acquire() 118 | sleep(1) 119 | wrlock.release() 120 | 121 | t1 = threading.Thread(target=acquire_lock) 122 | t1.start() 123 | 124 | sleep(0.01) 125 | with pytest.raises(TimeoutException): 126 | with wrlock.exclusive(timeout=0.5): 127 | pass 128 | 129 | t1.join() 130 | -------------------------------------------------------------------------------- /tests/test_semver.py: -------------------------------------------------------------------------------- 1 | from easypy.semver import SemVer, SMV 2 | 3 | 4 | def test_loads(): 5 | semver = SemVer.loads('3.4.5.6-hello') 6 | assert semver.build == 6 7 | assert semver.patch == 5 8 | assert semver.minor == 4 9 | assert semver.major == 3 10 | assert semver.tag == 'hello' 11 | 12 | semver = SemVer.loads('3_4_5:hello', separator='_', tag_separator=':') 13 | assert semver.build is None 14 | assert semver.patch == 5 15 | assert semver.minor == 4 16 | assert semver.major == 3 17 | assert semver.tag == 'hello' 18 | 19 | 20 | def test_dumps(): 21 | version = '3.4.5.6-hello' 22 | assert SemVer.loads(version).dumps() == version 23 | 24 | string = SemVer.loads(version).dumps(separator='_', tag_separator=':') 25 | assert string == version.replace('.', '_').replace('-', ':') 26 | 27 | 28 | def test_copy(): 29 | semver = SemVer.loads('1000.2000.3000.5000-10102010') 30 | assert semver.copy() == semver 31 | 32 | assert semver.copy(major=1500).major == 1500 33 | 34 | 35 | def test_comparison(): 36 | semver1 = SemVer.loads('2.1-aaa') 37 | semver2 = SemVer.loads('2.1-bbb') 38 | semver3 = SemVer.loads('2.1.2-a') 39 | semver4 = SemVer.loads('2.2') 40 | assert semver1 < semver2 < semver3 < semver4 41 | 42 | assert semver1 == semver2.copy(tag='aaa') 43 | 44 | 45 | def test_bumping(): 46 | semver = SemVer.loads('1.1.1.1-aaa') 47 | assert semver.bump_build().dumps() == '1.1.1.2' 48 | 49 | # Build part is only printed when it is set 50 | assert semver.bump_patch().dumps() == '1.1.2' 51 | assert semver.bump_minor().dumps() == '1.2.0' 52 | assert semver.bump_major().dumps() == '2.0.0' 53 | assert semver.bump_major(clear_tag=False).dumps() == '2.0.0-aaa' 54 | -------------------------------------------------------------------------------- /tests/test_timing.py: -------------------------------------------------------------------------------- 1 | from easypy.timing import Timer, TimeInterval 2 | 3 | 4 | def test_time_interval1(): 5 | 6 | st = 150000000 7 | t = Timer(st) 8 | t.t1 = t.t0 + 1 9 | 10 | ti = t.to_interval() 11 | 12 | assert t in ti 13 | 14 | assert t.t0 in ti 15 | assert t.t1 in ti 16 | 17 | assert t.t0 - 1 not in ti 18 | assert t.t1 + 1 not in ti 19 | 20 | assert ti.duration == t.duration 21 | assert ti.duration == 1 22 | assert ti.duration_delta.total_seconds() == 1 23 | 24 | assert str(ti) == '' 25 | 26 | assert str(t.to_interval().to_timer()) == str(t) 27 | 28 | 29 | def test_time_interval2(): 30 | st = 150000000 31 | ti = TimeInterval() 32 | assert str(ti) == '' 33 | 34 | ti = TimeInterval(from_time=st) 35 | assert str(ti) == '' 36 | 37 | ti = TimeInterval(from_time=st, to_time=st) 38 | assert str(ti) == '' 39 | 40 | ti = TimeInterval(to_time=st) 41 | assert str(ti) == '' 42 | -------------------------------------------------------------------------------- /tests/test_units.py: -------------------------------------------------------------------------------- 1 | from easypy.units import byte, KiB, SECOND 2 | 3 | 4 | def test_data_sizes(): 5 | assert "{0!s}, {0!r}, {0:10text}".format(byte) == "byte, byte, 1 byte" 6 | assert "{0!s}, {0!r}, {0:10text}".format(1000 * byte) == "KB, KB, 1 KB" 7 | assert "{0!s}, {0!r}, {0:10text}".format(1020 * byte) == "1020bytes, 1020*bytes, 1020bytes" 8 | assert "{0!s}, {0!r}, {0:10text}".format(1024 * byte) == "KiB, KiB, 1 KiB" 9 | assert "{0!s}, {0!r}, {0:10text}".format(2**20 * byte) == "MiB, MiB, 1 MiB" 10 | assert "{0!s}, {0!r}, {0:10text}".format(2**21 * byte) == "2MiB, 2*MiB, 2MiB" 11 | assert "{0!s}, {0!r}, {0:10text}".format(2**21 * byte + 100) == "~2.0MiB, 2097252*bytes, ~2.0MiB" 12 | assert "{0!s}, {0!r}, {0:10text}".format(2**41 * byte + 100) == "~2.0TiB, 2199023255652*bytes, ~2.0TiB" 13 | 14 | 15 | def test_durations(): 16 | assert "{0!s}, {0!r}, {0:10text}".format(SECOND) == "1.0, 1s, no-time " 17 | assert "{0!s}, {0!r}, {0:10text}".format(50 * SECOND) == "50.0, 50s, almost a minute" 18 | assert "{0!s}, {0!r}, {0:10text}".format(60 * SECOND) == "60.0, 01:00m, a minute " 19 | assert "{0!s}, {0!r}, {0:10text}".format(60**2 * SECOND) == "3600.0, 01:00h, an hour " 20 | assert "{0!s}, {0!r}, {0:10text}".format(25 * 60**2 * SECOND) == "90000.0, 1d, 01:00h, a day " 21 | assert "{0!s}, {0!r}, {0:10text}".format(8 * 24 * 60**2 * SECOND) == "691200.0, 8d, 00:00h, 8 days " 22 | assert "{0!s}, {0!r}, {0:10text}".format(32 * 24 * 60**2 * SECOND) == "2764800.0, 32d, 00:00h, 32 days " 23 | assert "{0!s}, {0!r}, {0:10text}".format(400 * 24 * 60**2 * SECOND) == "34560000.0, 400d, 00:00h, 400 days " 24 | 25 | 26 | def test_operators(): 27 | 28 | assert (byte * 1024) == KiB 29 | assert KiB / 1024 == byte 30 | assert KiB / KiB == 1 31 | assert KiB / 7 == 146.28571428571428 32 | assert KiB // 7 == 146 33 | assert 2050 // KiB == (2 * byte) 34 | 35 | # check that __r*__ overloads are used when the unit doesn't support the right-hand operand 36 | class Foo(): 37 | def __rfloordiv__(self, div): 38 | return self 39 | 40 | foo = Foo() 41 | 42 | assert KiB // foo is foo 43 | -------------------------------------------------------------------------------- /tests/test_ziplog.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | def test_ziplog(): 4 | from io import StringIO 5 | from easypy import ziplog 6 | from textwrap import dedent 7 | 8 | streams = dedent(""" 9 | 01:21:27 - 2 10 | 05:41:27 - 4 11 | ; 12 | 15:08:52.554223| - 5 13 | 16155 19:08:52.554223| - 11 14 | ; 15 | 2018-04-01 04:48:11,811| - 1 16 | 2018-04-06 17:13:40,966 - 8 17 | ; 18 | 2018-04-06T02:11:06+0200 - 3 19 | 2018-04-07T02:11:06+0200 - 12 20 | ; 21 | 2018-04-06 18:13:40,966 - 10 22 | 2018-04-23 04:48:11,811| - 14 23 | ; 24 | [2018/04/06 17:13:40.955356 - 7 25 | [2018/04/06 17:13:41.955356 - 9 26 | ; 27 | Apr 6 17:13:40 - 6 28 | Apr 7 17:13:40 - 13 29 | ; 30 | """) 31 | 32 | ziplog.YEAR = 2018 33 | ziplog.MONTH = 4 34 | ziplog.DAY = 6 35 | 36 | streams = [StringIO(line.lstrip()) for line in streams.split(";")] 37 | lines = ziplog.iter_zipped_logs(*streams, prefix="> ", show_timestamp=True) 38 | prev = 0 39 | print() 40 | for line in lines: 41 | print(line, end="") 42 | cur = int(line.rpartition(" ")[-1]) 43 | try: 44 | assert cur == prev + 1, "line %s is out of place" % cur 45 | except AssertionError: 46 | for line in lines: 47 | print(line, end="") 48 | raise 49 | prev = cur 50 | --------------------------------------------------------------------------------